Author: jing9
Date: Tue Feb 18 19:13:41 2014
New Revision: 1569482

URL: http://svn.apache.org/r1569482
Log:
HDFS-5893. Merge change r1569478 from branch-2.

Modified:
    
hadoop/common/branches/branch-2.4/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
    
hadoop/common/branches/branch-2.4/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FileDataServlet.java
    
hadoop/common/branches/branch-2.4/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/HftpFileSystem.java
    
hadoop/common/branches/branch-2.4/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestByteRangeInputStream.java
    
hadoop/common/branches/branch-2.4/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestHttpsFileSystem.java

Modified: 
hadoop/common/branches/branch-2.4/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2.4/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt?rev=1569482&r1=1569481&r2=1569482&view=diff
==============================================================================
--- 
hadoop/common/branches/branch-2.4/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt 
(original)
+++ 
hadoop/common/branches/branch-2.4/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt 
Tue Feb 18 19:13:41 2014
@@ -173,6 +173,10 @@ Release 2.4.0 - UNRELEASED
 
     HDFS-5803. TestBalancer.testBalancer0 fails. (Chen He via kihwal)
 
+    HDFS-5893. HftpFileSystem.RangeHeaderUrlOpener uses the default
+    URLConnectionFactory which does not import SSL certificates. (Haohui Mai 
via
+    jing9)
+
   BREAKDOWN OF HDFS-5698 SUBTASKS AND RELATED JIRAS
 
     HDFS-5717. Save FSImage header in protobuf. (Haohui Mai via jing9)

Modified: 
hadoop/common/branches/branch-2.4/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FileDataServlet.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2.4/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FileDataServlet.java?rev=1569482&r1=1569481&r2=1569482&view=diff
==============================================================================
--- 
hadoop/common/branches/branch-2.4/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FileDataServlet.java
 (original)
+++ 
hadoop/common/branches/branch-2.4/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FileDataServlet.java
 Tue Feb 18 19:13:41 2014
@@ -27,7 +27,6 @@ import javax.servlet.http.HttpServletRes
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.hdfs.DFSConfigKeys;
 import org.apache.hadoop.hdfs.protocol.ClientProtocol;
 import org.apache.hadoop.hdfs.protocol.DatanodeID;
 import org.apache.hadoop.hdfs.protocol.DatanodeInfo;
@@ -61,18 +60,13 @@ public class FileDataServlet extends Dfs
     } else {
       hostname = host.getIpAddr();
     }
-    int port = host.getInfoPort();
-    if ("https".equals(scheme)) {
-      final Integer portObject = (Integer) getServletContext().getAttribute(
-          DFSConfigKeys.DFS_DATANODE_HTTPS_PORT_KEY);
-      if (portObject != null) {
-        port = portObject;
-      }
-    }
+
+    int port = "https".equals(scheme) ? host.getInfoSecurePort() : host
+        .getInfoPort();
 
     String dtParam = "";
     if (dt != null) {
-      dtParam=JspHelper.getDelegationTokenUrlParam(dt);
+      dtParam = JspHelper.getDelegationTokenUrlParam(dt);
     }
 
     // Add namenode address to the url params

Modified: 
hadoop/common/branches/branch-2.4/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/HftpFileSystem.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2.4/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/HftpFileSystem.java?rev=1569482&r1=1569481&r2=1569482&view=diff
==============================================================================
--- 
hadoop/common/branches/branch-2.4/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/HftpFileSystem.java
 (original)
+++ 
hadoop/common/branches/branch-2.4/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/HftpFileSystem.java
 Tue Feb 18 19:13:41 2014
@@ -344,14 +344,15 @@ public class HftpFileSystem extends File
   }
 
   static class RangeHeaderUrlOpener extends ByteRangeInputStream.URLOpener {
-    URLConnectionFactory connectionFactory = 
URLConnectionFactory.DEFAULT_SYSTEM_CONNECTION_FACTORY;
+    private final URLConnectionFactory connFactory;
 
-    RangeHeaderUrlOpener(final URL url) {
+    RangeHeaderUrlOpener(URLConnectionFactory connFactory, final URL url) {
       super(url);
+      this.connFactory = connFactory;
     }
 
     protected HttpURLConnection openConnection() throws IOException {
-      return (HttpURLConnection)connectionFactory.openConnection(url);
+      return (HttpURLConnection)connFactory.openConnection(url);
     }
 
     /** Use HTTP Range header for specifying offset. */
@@ -381,8 +382,9 @@ public class HftpFileSystem extends File
       super(o, r);
     }
 
-    RangeHeaderInputStream(final URL url) {
-      this(new RangeHeaderUrlOpener(url), new RangeHeaderUrlOpener(null));
+    RangeHeaderInputStream(URLConnectionFactory connFactory, final URL url) {
+      this(new RangeHeaderUrlOpener(connFactory, url),
+          new RangeHeaderUrlOpener(connFactory, null));
     }
 
     @Override
@@ -397,7 +399,7 @@ public class HftpFileSystem extends File
     String path = "/data" + ServletUtil.encodePath(f.toUri().getPath());
     String query = addDelegationTokenParam("ugi=" + getEncodedUgiParameter());
     URL u = getNamenodeURL(path, query);
-    return new FSDataInputStream(new RangeHeaderInputStream(u));
+    return new FSDataInputStream(new RangeHeaderInputStream(connectionFactory, 
u));
   }
 
   @Override

Modified: 
hadoop/common/branches/branch-2.4/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestByteRangeInputStream.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2.4/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestByteRangeInputStream.java?rev=1569482&r1=1569481&r2=1569482&view=diff
==============================================================================
--- 
hadoop/common/branches/branch-2.4/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestByteRangeInputStream.java
 (original)
+++ 
hadoop/common/branches/branch-2.4/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestByteRangeInputStream.java
 Tue Feb 18 19:13:41 2014
@@ -97,12 +97,13 @@ public static class MockHttpURLConnectio
 
   @Test
   public void testByteRange() throws IOException {
+    URLConnectionFactory factory = mock(URLConnectionFactory.class);
     HftpFileSystem.RangeHeaderUrlOpener ospy = spy(
-        new HftpFileSystem.RangeHeaderUrlOpener(new URL("http://test/";)));
+        new HftpFileSystem.RangeHeaderUrlOpener(factory, new 
URL("http://test/";)));
     doReturn(new MockHttpURLConnection(ospy.getURL())).when(ospy)
         .openConnection();
     HftpFileSystem.RangeHeaderUrlOpener rspy = spy(
-        new HftpFileSystem.RangeHeaderUrlOpener((URL) null));
+        new HftpFileSystem.RangeHeaderUrlOpener(factory, (URL) null));
     doReturn(new MockHttpURLConnection(rspy.getURL())).when(rspy)
         .openConnection();
     ByteRangeInputStream is = new HftpFileSystem.RangeHeaderInputStream(ospy, 
rspy);
@@ -171,12 +172,15 @@ public static class MockHttpURLConnectio
       assertEquals("Should fail because incorrect response code was sent",
                    "HTTP_OK expected, received 206", e.getMessage());
     }
+    is.close();
   }
 
   @Test
   public void testPropagatedClose() throws IOException {
-    ByteRangeInputStream brs = spy(
-        new HftpFileSystem.RangeHeaderInputStream(new URL("http://test/";)));
+    URLConnectionFactory factory = mock(URLConnectionFactory.class);
+
+    ByteRangeInputStream brs = spy(new HftpFileSystem.RangeHeaderInputStream(
+        factory, new URL("http://test/";)));
 
     InputStream mockStream = mock(InputStream.class);
     doReturn(mockStream).when(brs).openInputStream();

Modified: 
hadoop/common/branches/branch-2.4/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestHttpsFileSystem.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2.4/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestHttpsFileSystem.java?rev=1569482&r1=1569481&r2=1569482&view=diff
==============================================================================
--- 
hadoop/common/branches/branch-2.4/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestHttpsFileSystem.java
 (original)
+++ 
hadoop/common/branches/branch-2.4/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestHttpsFileSystem.java
 Tue Feb 18 19:13:41 2014
@@ -19,6 +19,7 @@ package org.apache.hadoop.hdfs.web;
 
 import java.io.File;
 import java.io.InputStream;
+import java.io.OutputStream;
 import java.net.InetSocketAddress;
 import java.net.URI;
 
@@ -30,6 +31,7 @@ import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hdfs.DFSConfigKeys;
 import org.apache.hadoop.hdfs.MiniDFSCluster;
 import org.apache.hadoop.http.HttpConfig;
+import org.apache.hadoop.net.NetUtils;
 import org.apache.hadoop.security.ssl.KeyStoreTestUtil;
 import org.junit.AfterClass;
 import org.junit.Assert;
@@ -65,9 +67,11 @@ public class TestHttpsFileSystem {
 
     cluster = new MiniDFSCluster.Builder(conf).numDataNodes(1).build();
     cluster.waitActive();
-    cluster.getFileSystem().create(new Path("/test")).close();
+    OutputStream os = cluster.getFileSystem().create(new Path("/test"));
+    os.write(23);
+    os.close();
     InetSocketAddress addr = cluster.getNameNode().getHttpsAddress();
-    nnAddr = addr.getHostName() + ":" + addr.getPort();
+    nnAddr = NetUtils.getHostPortString(addr);
     conf.set(DFSConfigKeys.DFS_NAMENODE_HTTPS_ADDRESS_KEY, nnAddr);
   }
 
@@ -82,6 +86,9 @@ public class TestHttpsFileSystem {
   public void testHsftpFileSystem() throws Exception {
     FileSystem fs = FileSystem.get(new URI("hsftp://"; + nnAddr), conf);
     Assert.assertTrue(fs.exists(new Path("/test")));
+    InputStream is = fs.open(new Path("/test"));
+    Assert.assertEquals(23, is.read());
+    is.close();
     fs.close();
   }
 


Reply via email to