Author: jing9 Date: Wed Nov 20 21:51:58 2013 New Revision: 1543962 URL: http://svn.apache.org/r1543962 Log: HDFS-3987. Support webhdfs over HTTPS. Contributed by Haohui Mai.
Added: hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/SWebHdfsFileSystem.java Modified: hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSUtil.java hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NameNode.java hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/web/resources/NamenodeWebHdfsMethods.java hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/TokenAspect.java hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/WebHdfsFileSystem.java hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/resources/META-INF/services/org.apache.hadoop.fs.FileSystem hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/fs/TestSymlinkHdfs.java hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDFSClientRetries.java hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDFSUtil.java hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/security/TestDelegationTokenForProxyUser.java hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestAuditLogs.java hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestHttpsFileSystem.java hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestWebHDFS.java hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestWebHdfsFileSystemContract.java hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestWebHdfsTimeouts.java hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/WebHdfsTestUtil.java Modified: hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt?rev=1543962&r1=1543961&r2=1543962&view=diff ============================================================================== --- hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt (original) +++ hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt Wed Nov 20 21:51:58 2013 @@ -414,6 +414,8 @@ Release 2.3.0 - UNRELEASED HDFS-5382. Implement the UI of browsing filesystems in HTML 5 page. (Haohui Mai via jing9) + HDFS-3987. Support webhdfs over HTTPS. (Haohui Mai via jing9) + IMPROVEMENTS HDFS-5267. Remove volatile from LightWeightHashSet. (Junping Du via llu) Modified: hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSUtil.java URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSUtil.java?rev=1543962&r1=1543961&r2=1543962&view=diff ============================================================================== --- hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSUtil.java (original) +++ hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSUtil.java Wed Nov 20 21:51:58 2013 @@ -73,6 +73,8 @@ import org.apache.hadoop.hdfs.protocol.L import org.apache.hadoop.hdfs.protocolPB.ClientDatanodeProtocolTranslatorPB; import org.apache.hadoop.hdfs.server.namenode.FSDirectory; import org.apache.hadoop.hdfs.server.namenode.NameNode; +import org.apache.hadoop.hdfs.web.SWebHdfsFileSystem; +import org.apache.hadoop.hdfs.web.WebHdfsFileSystem; import org.apache.hadoop.ipc.ProtobufRpcEngine; import org.apache.hadoop.ipc.RPC; import org.apache.hadoop.net.NetUtils; @@ -605,12 +607,19 @@ public class DFSUtil { * Returns list of InetSocketAddress corresponding to HA NN HTTP addresses from * the configuration. * - * @param conf configuration * @return list of InetSocketAddresses */ - public static Map<String, Map<String, InetSocketAddress>> getHaNnHttpAddresses( - Configuration conf) { - return getAddresses(conf, null, DFSConfigKeys.DFS_NAMENODE_HTTP_ADDRESS_KEY); + public static Map<String, Map<String, InetSocketAddress>> getHaNnWebHdfsAddresses( + Configuration conf, String scheme) { + if (WebHdfsFileSystem.SCHEME.equals(scheme)) { + return getAddresses(conf, null, + DFSConfigKeys.DFS_NAMENODE_HTTP_ADDRESS_KEY); + } else if (SWebHdfsFileSystem.SCHEME.equals(scheme)) { + return getAddresses(conf, null, + DFSConfigKeys.DFS_NAMENODE_HTTPS_ADDRESS_KEY); + } else { + throw new IllegalArgumentException("Unsupported scheme: " + scheme); + } } /** @@ -619,18 +628,28 @@ public class DFSUtil { * cluster, the resolver further resolves the logical name (i.e., the authority * in the URL) into real namenode addresses. */ - public static InetSocketAddress[] resolve(URI uri, int schemeDefaultPort, - Configuration conf) throws IOException { + public static InetSocketAddress[] resolveWebHdfsUri(URI uri, Configuration conf) + throws IOException { + int defaultPort; + String scheme = uri.getScheme(); + if (WebHdfsFileSystem.SCHEME.equals(scheme)) { + defaultPort = DFSConfigKeys.DFS_NAMENODE_HTTP_PORT_DEFAULT; + } else if (SWebHdfsFileSystem.SCHEME.equals(scheme)) { + defaultPort = DFSConfigKeys.DFS_NAMENODE_HTTPS_PORT_DEFAULT; + } else { + throw new IllegalArgumentException("Unsupported scheme: " + scheme); + } + ArrayList<InetSocketAddress> ret = new ArrayList<InetSocketAddress>(); if (!HAUtil.isLogicalUri(conf, uri)) { InetSocketAddress addr = NetUtils.createSocketAddr(uri.getAuthority(), - schemeDefaultPort); + defaultPort); ret.add(addr); } else { Map<String, Map<String, InetSocketAddress>> addresses = DFSUtil - .getHaNnHttpAddresses(conf); + .getHaNnWebHdfsAddresses(conf, scheme); for (Map<String, InetSocketAddress> addrs : addresses.values()) { for (InetSocketAddress addr : addrs.values()) { Modified: hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NameNode.java URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NameNode.java?rev=1543962&r1=1543961&r2=1543962&view=diff ============================================================================== --- hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NameNode.java (original) +++ hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NameNode.java Wed Nov 20 21:51:58 2013 @@ -804,6 +804,10 @@ public class NameNode implements NameNod return httpServer.getHttpAddress(); } + /** + * @return NameNode HTTPS address, used by the Web UI, image transfer, + * and HTTP-based file system clients like Hftp and WebHDFS + */ public InetSocketAddress getHttpsAddress() { return httpServer.getHttpsAddress(); } Modified: hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/web/resources/NamenodeWebHdfsMethods.java URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/web/resources/NamenodeWebHdfsMethods.java?rev=1543962&r1=1543961&r2=1543962&view=diff ============================================================================== --- hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/web/resources/NamenodeWebHdfsMethods.java (original) +++ hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/web/resources/NamenodeWebHdfsMethods.java Wed Nov 20 21:51:58 2013 @@ -66,6 +66,7 @@ import org.apache.hadoop.hdfs.server.nam import org.apache.hadoop.hdfs.server.protocol.NamenodeProtocols; import org.apache.hadoop.hdfs.web.JsonUtil; import org.apache.hadoop.hdfs.web.ParamFilter; +import org.apache.hadoop.hdfs.web.SWebHdfsFileSystem; import org.apache.hadoop.hdfs.web.WebHdfsFileSystem; import org.apache.hadoop.hdfs.web.resources.AccessTimeParam; import org.apache.hadoop.hdfs.web.resources.BlockSizeParam; @@ -96,6 +97,7 @@ import org.apache.hadoop.hdfs.web.resour import org.apache.hadoop.hdfs.web.resources.TokenArgumentParam; import org.apache.hadoop.hdfs.web.resources.UriFsPathParam; import org.apache.hadoop.hdfs.web.resources.UserParam; +import org.apache.hadoop.io.Text; import org.apache.hadoop.ipc.Server; import org.apache.hadoop.net.NodeBase; import org.apache.hadoop.security.Credentials; @@ -210,7 +212,8 @@ public class NamenodeWebHdfsMethods { final Credentials c = DelegationTokenSecretManager.createCredentials( namenode, ugi, renewer != null? renewer: ugi.getShortUserName()); final Token<? extends TokenIdentifier> t = c.getAllTokens().iterator().next(); - t.setKind(WebHdfsFileSystem.TOKEN_KIND); + Text kind = request.getScheme().equals("http") ? WebHdfsFileSystem.TOKEN_KIND : SWebHdfsFileSystem.TOKEN_KIND; + t.setKind(kind); return t; } Added: hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/SWebHdfsFileSystem.java URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/SWebHdfsFileSystem.java?rev=1543962&view=auto ============================================================================== --- hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/SWebHdfsFileSystem.java (added) +++ hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/SWebHdfsFileSystem.java Wed Nov 20 21:51:58 2013 @@ -0,0 +1,66 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hdfs.web; + +import java.io.IOException; +import java.security.GeneralSecurityException; + +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hdfs.DFSConfigKeys; +import org.apache.hadoop.io.Text; + +public class SWebHdfsFileSystem extends WebHdfsFileSystem { + + public static final Text TOKEN_KIND = new Text("SWEBHDFS delegation"); + public static final String SCHEME = "swebhdfs"; + + @Override + public String getScheme() { + return SCHEME; + } + + @Override + protected String getTransportScheme() { + return "https"; + } + + @Override + protected synchronized void initializeTokenAspect() { + tokenAspect = new TokenAspect<WebHdfsFileSystem>(this, TOKEN_KIND); + } + + @Override + protected void initializeConnectionFactory(Configuration conf) + throws IOException { + connectionFactory = new URLConnectionFactory( + URLConnectionFactory.DEFAULT_SOCKET_TIMEOUT); + try { + connectionFactory.setConnConfigurator(URLConnectionFactory + .newSslConnConfigurator(URLConnectionFactory.DEFAULT_SOCKET_TIMEOUT, + conf)); + } catch (GeneralSecurityException e) { + throw new IOException(e); + } + } + + @Override + protected int getDefaultPort() { + return getConf().getInt(DFSConfigKeys.DFS_NAMENODE_HTTPS_PORT_KEY, + DFSConfigKeys.DFS_NAMENODE_HTTPS_PORT_DEFAULT); + } +} Modified: hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/TokenAspect.java URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/TokenAspect.java?rev=1543962&r1=1543961&r2=1543962&view=diff ============================================================================== --- hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/TokenAspect.java (original) +++ hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/TokenAspect.java Wed Nov 20 21:51:58 2013 @@ -58,7 +58,8 @@ final class TokenAspect<T extends FileSy public boolean handleKind(Text kind) { return kind.equals(HftpFileSystem.TOKEN_KIND) || kind.equals(HsftpFileSystem.TOKEN_KIND) - || kind.equals(WebHdfsFileSystem.TOKEN_KIND); + || kind.equals(WebHdfsFileSystem.TOKEN_KIND) + || kind.equals(SWebHdfsFileSystem.TOKEN_KIND); } @Override @@ -83,6 +84,8 @@ final class TokenAspect<T extends FileSy uri = DFSUtil.createUri(HsftpFileSystem.SCHEME, address); } else if (kind.equals(WebHdfsFileSystem.TOKEN_KIND)) { uri = DFSUtil.createUri(WebHdfsFileSystem.SCHEME, address); + } else if (kind.equals(SWebHdfsFileSystem.TOKEN_KIND)) { + uri = DFSUtil.createUri(SWebHdfsFileSystem.SCHEME, address); } else { throw new IllegalArgumentException("Unsupported scheme"); } Modified: hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/WebHdfsFileSystem.java URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/WebHdfsFileSystem.java?rev=1543962&r1=1543961&r2=1543962&view=diff ============================================================================== --- hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/WebHdfsFileSystem.java (original) +++ hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/WebHdfsFileSystem.java Wed Nov 20 21:51:58 2013 @@ -56,7 +56,6 @@ import org.apache.hadoop.hdfs.HAUtil; import org.apache.hadoop.hdfs.protocol.HdfsFileStatus; import org.apache.hadoop.hdfs.security.token.delegation.DelegationTokenIdentifier; import org.apache.hadoop.hdfs.server.namenode.SafeModeException; -import org.apache.hadoop.hdfs.web.TokenAspect.DTSelecorByKind; import org.apache.hadoop.hdfs.web.resources.AccessTimeParam; import org.apache.hadoop.hdfs.web.resources.BlockSizeParam; import org.apache.hadoop.hdfs.web.resources.BufferSizeParam; @@ -98,7 +97,6 @@ import org.apache.hadoop.security.token. import org.apache.hadoop.util.Progressable; import org.mortbay.util.ajax.JSON; -import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Charsets; import com.google.common.collect.Lists; @@ -118,8 +116,7 @@ public class WebHdfsFileSystem extends F /** Delegation token kind */ public static final Text TOKEN_KIND = new Text("WEBHDFS delegation"); - protected TokenAspect<WebHdfsFileSystem> tokenAspect = new TokenAspect<WebHdfsFileSystem>( - this, TOKEN_KIND); + protected TokenAspect<WebHdfsFileSystem> tokenAspect; private UserGroupInformation ugi; private URI uri; @@ -140,17 +137,44 @@ public class WebHdfsFileSystem extends F return SCHEME; } + /** + * return the underlying transport protocol (http / https). + */ + protected String getTransportScheme() { + return "http"; + } + + /** + * Initialize tokenAspect. This function is intended to + * be overridden by SWebHdfsFileSystem. + */ + protected synchronized void initializeTokenAspect() { + tokenAspect = new TokenAspect<WebHdfsFileSystem>(this, TOKEN_KIND); + } + + /** + * Initialize connectionFactory. This function is intended to + * be overridden by SWebHdfsFileSystem. + */ + protected void initializeConnectionFactory(Configuration conf) + throws IOException { + connectionFactory = URLConnectionFactory.DEFAULT_CONNECTION_FACTORY; + } + @Override public synchronized void initialize(URI uri, Configuration conf ) throws IOException { super.initialize(uri, conf); setConf(conf); + initializeTokenAspect(); + initializeConnectionFactory(conf); + ugi = UserGroupInformation.getCurrentUser(); try { this.uri = new URI(uri.getScheme(), uri.getAuthority(), null, null, null); - this.nnAddrs = DFSUtil.resolve(this.uri, getDefaultPort(), conf); + this.nnAddrs = DFSUtil.resolveWebHdfsUri(this.uri, conf); } catch (URISyntaxException e) { throw new IllegalArgumentException(e); } @@ -342,7 +366,7 @@ public class WebHdfsFileSystem extends F */ private URL getNamenodeURL(String path, String query) throws IOException { InetSocketAddress nnAddr = getCurrentNNAddr(); - final URL url = new URL("http", nnAddr.getHostName(), + final URL url = new URL(getTransportScheme(), nnAddr.getHostName(), nnAddr.getPort(), path + '?' + query); if (LOG.isTraceEnabled()) { LOG.trace("url=" + url); @@ -840,7 +864,9 @@ public class WebHdfsFileSystem extends F @Override public void close() throws IOException { super.close(); - tokenAspect.removeRenewAction(); + synchronized (this) { + tokenAspect.removeRenewAction(); + } } class OffsetUrlOpener extends ByteRangeInputStream.URLOpener { Modified: hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/resources/META-INF/services/org.apache.hadoop.fs.FileSystem URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/resources/META-INF/services/org.apache.hadoop.fs.FileSystem?rev=1543962&r1=1543961&r2=1543962&view=diff ============================================================================== --- hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/resources/META-INF/services/org.apache.hadoop.fs.FileSystem (original) +++ hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/resources/META-INF/services/org.apache.hadoop.fs.FileSystem Wed Nov 20 21:51:58 2013 @@ -17,3 +17,4 @@ org.apache.hadoop.hdfs.DistributedFileSy org.apache.hadoop.hdfs.web.HftpFileSystem org.apache.hadoop.hdfs.web.HsftpFileSystem org.apache.hadoop.hdfs.web.WebHdfsFileSystem +org.apache.hadoop.hdfs.web.SWebHdfsFileSystem Modified: hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/fs/TestSymlinkHdfs.java URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/fs/TestSymlinkHdfs.java?rev=1543962&r1=1543961&r2=1543962&view=diff ============================================================================== --- hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/fs/TestSymlinkHdfs.java (original) +++ hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/fs/TestSymlinkHdfs.java Wed Nov 20 21:51:58 2013 @@ -89,7 +89,7 @@ abstract public class TestSymlinkHdfs ex conf.setBoolean(DFSConfigKeys.DFS_WEBHDFS_ENABLED_KEY, true); conf.set(FsPermission.UMASK_LABEL, "000"); cluster = new MiniDFSCluster.Builder(conf).build(); - webhdfs = WebHdfsTestUtil.getWebHdfsFileSystem(conf); + webhdfs = WebHdfsTestUtil.getWebHdfsFileSystem(conf, WebHdfsFileSystem.SCHEME); dfs = cluster.getFileSystem(); } Modified: hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDFSClientRetries.java URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDFSClientRetries.java?rev=1543962&r1=1543961&r2=1543962&view=diff ============================================================================== --- hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDFSClientRetries.java (original) +++ hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDFSClientRetries.java Wed Nov 20 21:51:58 2013 @@ -73,6 +73,7 @@ import org.apache.hadoop.hdfs.protocol.L import org.apache.hadoop.hdfs.server.namenode.NameNode; import org.apache.hadoop.hdfs.server.namenode.NotReplicatedYetException; import org.apache.hadoop.hdfs.server.protocol.NamenodeProtocols; +import org.apache.hadoop.hdfs.web.WebHdfsFileSystem; import org.apache.hadoop.hdfs.web.WebHdfsTestUtil; import org.apache.hadoop.io.EnumSetWritable; import org.apache.hadoop.io.IOUtils; @@ -886,8 +887,8 @@ public class TestDFSClientRetries { try { cluster.waitActive(); final DistributedFileSystem dfs = cluster.getFileSystem(); - final FileSystem fs = isWebHDFS? - WebHdfsTestUtil.getWebHdfsFileSystem(conf): dfs; + final FileSystem fs = isWebHDFS ? WebHdfsTestUtil.getWebHdfsFileSystem( + conf, WebHdfsFileSystem.SCHEME) : dfs; final URI uri = dfs.getUri(); assertTrue(HdfsUtils.isHealthy(uri)); @@ -1091,7 +1092,7 @@ public class TestDFSClientRetries { final UserGroupInformation ugi = UserGroupInformation.createUserForTesting( username, new String[]{"supergroup"}); - return isWebHDFS? WebHdfsTestUtil.getWebHdfsFileSystemAs(ugi, conf) + return isWebHDFS? WebHdfsTestUtil.getWebHdfsFileSystemAs(ugi, conf, WebHdfsFileSystem.SCHEME) : DFSTestUtil.getFileSystemAs(ugi, conf); } Modified: hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDFSUtil.java URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDFSUtil.java?rev=1543962&r1=1543961&r2=1543962&view=diff ============================================================================== --- hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDFSUtil.java (original) +++ hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDFSUtil.java Wed Nov 20 21:51:58 2013 @@ -556,7 +556,7 @@ public class TestDFSUtil { Configuration conf = createWebHDFSHAConfiguration(LOGICAL_HOST_NAME, NS1_NN1_ADDR, NS1_NN2_ADDR); Map<String, Map<String, InetSocketAddress>> map = - DFSUtil.getHaNnHttpAddresses(conf); + DFSUtil.getHaNnWebHdfsAddresses(conf, "webhdfs"); assertEquals(NS1_NN1_ADDR, map.get("ns1").get("nn1").toString()); assertEquals(NS1_NN2_ADDR, map.get("ns1").get("nn2").toString()); @@ -574,7 +574,7 @@ public class TestDFSUtil { Configuration conf = createWebHDFSHAConfiguration(LOGICAL_HOST_NAME, NS1_NN1_ADDR, NS1_NN2_ADDR); URI uri = new URI("webhdfs://ns1"); assertTrue(HAUtil.isLogicalUri(conf, uri)); - InetSocketAddress[] addrs = DFSUtil.resolve(uri, DEFAULT_PORT, conf); + InetSocketAddress[] addrs = DFSUtil.resolveWebHdfsUri(uri, conf); assertArrayEquals(new InetSocketAddress[] { new InetSocketAddress(NS1_NN1_HOST, DEFAULT_PORT), new InetSocketAddress(NS1_NN2_HOST, DEFAULT_PORT), Modified: hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/security/TestDelegationTokenForProxyUser.java URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/security/TestDelegationTokenForProxyUser.java?rev=1543962&r1=1543961&r2=1543962&view=diff ============================================================================== --- hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/security/TestDelegationTokenForProxyUser.java (original) +++ hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/security/TestDelegationTokenForProxyUser.java Wed Nov 20 21:51:58 2013 @@ -147,7 +147,7 @@ public class TestDelegationTokenForProxy public void testWebHdfsDoAs() throws Exception { WebHdfsTestUtil.LOG.info("START: testWebHdfsDoAs()"); WebHdfsTestUtil.LOG.info("ugi.getShortUserName()=" + ugi.getShortUserName()); - final WebHdfsFileSystem webhdfs = WebHdfsTestUtil.getWebHdfsFileSystemAs(ugi, config); + final WebHdfsFileSystem webhdfs = WebHdfsTestUtil.getWebHdfsFileSystemAs(ugi, config, WebHdfsFileSystem.SCHEME); final Path root = new Path("/"); cluster.getFileSystem().setPermission(root, new FsPermission((short)0777)); Modified: hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestAuditLogs.java URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestAuditLogs.java?rev=1543962&r1=1543961&r2=1543962&view=diff ============================================================================== --- hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestAuditLogs.java (original) +++ hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestAuditLogs.java Wed Nov 20 21:51:58 2013 @@ -163,7 +163,7 @@ public class TestAuditLogs { setupAuditLogs(); - WebHdfsFileSystem webfs = WebHdfsTestUtil.getWebHdfsFileSystemAs(userGroupInfo, conf); + WebHdfsFileSystem webfs = WebHdfsTestUtil.getWebHdfsFileSystemAs(userGroupInfo, conf, WebHdfsFileSystem.SCHEME); InputStream istream = webfs.open(file); int val = istream.read(); istream.close(); @@ -182,7 +182,7 @@ public class TestAuditLogs { setupAuditLogs(); - WebHdfsFileSystem webfs = WebHdfsTestUtil.getWebHdfsFileSystemAs(userGroupInfo, conf); + WebHdfsFileSystem webfs = WebHdfsTestUtil.getWebHdfsFileSystemAs(userGroupInfo, conf, WebHdfsFileSystem.SCHEME); FileStatus st = webfs.getFileStatus(file); verifyAuditLogs(true); @@ -222,7 +222,7 @@ public class TestAuditLogs { setupAuditLogs(); try { - WebHdfsFileSystem webfs = WebHdfsTestUtil.getWebHdfsFileSystemAs(userGroupInfo, conf); + WebHdfsFileSystem webfs = WebHdfsTestUtil.getWebHdfsFileSystemAs(userGroupInfo, conf, WebHdfsFileSystem.SCHEME); InputStream istream = webfs.open(file); int val = istream.read(); fail("open+read must not succeed, got " + val); Modified: hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestHttpsFileSystem.java URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestHttpsFileSystem.java?rev=1543962&r1=1543961&r2=1543962&view=diff ============================================================================== --- hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestHttpsFileSystem.java (original) +++ hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestHttpsFileSystem.java Wed Nov 20 21:51:58 2013 @@ -65,6 +65,7 @@ public class TestHttpsFileSystem { cluster.getFileSystem().create(new Path("/test")).close(); InetSocketAddress addr = cluster.getNameNode().getHttpsAddress(); nnAddr = addr.getHostName() + ":" + addr.getPort(); + conf.set(DFSConfigKeys.DFS_NAMENODE_HTTPS_ADDRESS_KEY, nnAddr); } @AfterClass @@ -80,4 +81,15 @@ public class TestHttpsFileSystem { Assert.assertTrue(fs.exists(new Path("/test"))); fs.close(); } + + @Test + public void testSWebHdfsFileSystem() throws Exception { + FileSystem fs = WebHdfsTestUtil.getWebHdfsFileSystem(conf, "swebhdfs"); + final Path f = new Path("/testswebhdfs"); + FSDataOutputStream os = fs.create(f); + os.write(23); + os.close(); + Assert.assertTrue(fs.exists(f)); + fs.close(); + } } Modified: hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestWebHDFS.java URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestWebHDFS.java?rev=1543962&r1=1543961&r2=1543962&view=diff ============================================================================== --- hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestWebHDFS.java (original) +++ hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestWebHDFS.java Wed Nov 20 21:51:58 2013 @@ -101,7 +101,7 @@ public class TestWebHDFS { try { cluster.waitActive(); - final FileSystem fs = WebHdfsTestUtil.getWebHdfsFileSystem(conf); + final FileSystem fs = WebHdfsTestUtil.getWebHdfsFileSystem(conf, WebHdfsFileSystem.SCHEME); final Path dir = new Path("/test/largeFile"); Assert.assertTrue(fs.mkdirs(dir)); @@ -229,9 +229,9 @@ public class TestWebHDFS { new MiniDFSCluster.Builder(conf).numDataNodes(3).build(); try { cluster.waitActive(); - WebHdfsTestUtil.getWebHdfsFileSystem(conf).setPermission( - new Path("/"), - new FsPermission(FsAction.ALL, FsAction.ALL, FsAction.ALL)); + WebHdfsTestUtil.getWebHdfsFileSystem(conf, WebHdfsFileSystem.SCHEME) + .setPermission(new Path("/"), + new FsPermission(FsAction.ALL, FsAction.ALL, FsAction.ALL)); // trick the NN into not believing it's not the superuser so we can // tell if the correct user is used by listStatus @@ -243,8 +243,9 @@ public class TestWebHDFS { .doAs(new PrivilegedExceptionAction<Void>() { @Override public Void run() throws IOException, URISyntaxException { - FileSystem fs = WebHdfsTestUtil.getWebHdfsFileSystem(conf); - Path d = new Path("/my-dir"); + FileSystem fs = WebHdfsTestUtil.getWebHdfsFileSystem(conf, + WebHdfsFileSystem.SCHEME); + Path d = new Path("/my-dir"); Assert.assertTrue(fs.mkdirs(d)); for (int i=0; i < listLimit*3; i++) { Path p = new Path(d, "file-"+i); Modified: hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestWebHdfsFileSystemContract.java URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestWebHdfsFileSystemContract.java?rev=1543962&r1=1543961&r2=1543962&view=diff ============================================================================== --- hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestWebHdfsFileSystemContract.java (original) +++ hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestWebHdfsFileSystemContract.java Wed Nov 20 21:51:58 2013 @@ -82,7 +82,7 @@ public class TestWebHdfsFileSystemContra final UserGroupInformation current = UserGroupInformation.getCurrentUser(); ugi = UserGroupInformation.createUserForTesting( current.getShortUserName() + "x", new String[]{"user"}); - fs = WebHdfsTestUtil.getWebHdfsFileSystemAs(ugi, conf); + fs = WebHdfsTestUtil.getWebHdfsFileSystemAs(ugi, conf, WebHdfsFileSystem.SCHEME); defaultWorkingDirectory = fs.getWorkingDirectory().toUri().getPath(); } Modified: hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestWebHdfsTimeouts.java URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestWebHdfsTimeouts.java?rev=1543962&r1=1543961&r2=1543962&view=diff ============================================================================== --- hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestWebHdfsTimeouts.java (original) +++ hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestWebHdfsTimeouts.java Wed Nov 20 21:51:58 2013 @@ -18,35 +18,32 @@ package org.apache.hadoop.hdfs.web; -import static org.junit.Assert.*; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.fail; import java.io.BufferedReader; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.io.OutputStream; -import java.net.InetAddress; import java.net.InetSocketAddress; import java.net.ServerSocket; import java.net.Socket; -import java.net.SocketAddress; import java.net.SocketTimeoutException; import java.nio.channels.SocketChannel; import java.util.ArrayList; import java.util.List; -import org.junit.After; -import org.junit.Before; -import org.junit.Test; - import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hdfs.DFSConfigKeys; -import org.apache.hadoop.hdfs.server.namenode.NameNode; import org.apache.hadoop.io.IOUtils; import org.apache.hadoop.net.NetUtils; +import org.junit.After; +import org.junit.Before; +import org.junit.Test; /** * This test suite checks that WebHdfsFileSystem sets connection timeouts and @@ -77,7 +74,7 @@ public class TestWebHdfsTimeouts { serverSocket = new ServerSocket(0, CONNECTION_BACKLOG); nnHttpAddress = new InetSocketAddress("localhost", serverSocket.getLocalPort()); conf.set(DFSConfigKeys.DFS_NAMENODE_HTTP_ADDRESS_KEY, "localhost:" + serverSocket.getLocalPort()); - fs = WebHdfsTestUtil.getWebHdfsFileSystem(conf); + fs = WebHdfsTestUtil.getWebHdfsFileSystem(conf, WebHdfsFileSystem.SCHEME); fs.connectionFactory = connectionFactory; clients = new ArrayList<SocketChannel>(); serverThread = null; Modified: hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/WebHdfsTestUtil.java URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/WebHdfsTestUtil.java?rev=1543962&r1=1543961&r2=1543962&view=diff ============================================================================== --- hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/WebHdfsTestUtil.java (original) +++ hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/WebHdfsTestUtil.java Wed Nov 20 21:51:58 2013 @@ -46,20 +46,36 @@ public class WebHdfsTestUtil { return conf; } - public static WebHdfsFileSystem getWebHdfsFileSystem(final Configuration conf - ) throws IOException, URISyntaxException { - final String uri = WebHdfsFileSystem.SCHEME + "://" - + conf.get(DFSConfigKeys.DFS_NAMENODE_HTTP_ADDRESS_KEY); + public static WebHdfsFileSystem getWebHdfsFileSystem( + final Configuration conf, String scheme) throws IOException, + URISyntaxException { + final String uri; + + if (WebHdfsFileSystem.SCHEME.equals(scheme)) { + uri = WebHdfsFileSystem.SCHEME + "://" + + conf.get(DFSConfigKeys.DFS_NAMENODE_HTTP_ADDRESS_KEY); + } else if (SWebHdfsFileSystem.SCHEME.equals(scheme)) { + uri = SWebHdfsFileSystem.SCHEME + "://" + + conf.get(DFSConfigKeys.DFS_NAMENODE_HTTPS_ADDRESS_KEY); + } else { + throw new IllegalArgumentException("unknown scheme:" + scheme); + } return (WebHdfsFileSystem)FileSystem.get(new URI(uri), conf); } public static WebHdfsFileSystem getWebHdfsFileSystemAs( - final UserGroupInformation ugi, final Configuration conf + final UserGroupInformation ugi, final Configuration conf + ) throws IOException, InterruptedException { + return getWebHdfsFileSystemAs(ugi, conf, WebHdfsFileSystem.SCHEME); + } + + public static WebHdfsFileSystem getWebHdfsFileSystemAs( + final UserGroupInformation ugi, final Configuration conf, String scheme ) throws IOException, InterruptedException { return ugi.doAs(new PrivilegedExceptionAction<WebHdfsFileSystem>() { @Override public WebHdfsFileSystem run() throws Exception { - return getWebHdfsFileSystem(conf); + return getWebHdfsFileSystem(conf, WebHdfsFileSystem.SCHEME); } }); }