Author: jing9 Date: Tue Dec 17 21:18:38 2013 New Revision: 1551715 URL: http://svn.apache.org/r1551715 Log: HDFS-5536. Merge change r1547925 from trunk.
Added: hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestNameNodeHttpServer.java - copied unchanged from r1547925, hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestNameNodeHttpServer.java Modified: hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSConfigKeys.java hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSUtil.java hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/HdfsConfiguration.java hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DataNode.java hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/SecureDataNodeStarter.java hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/BackupNode.java hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/EditLogFileInputStream.java hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NameNode.java hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NameNodeHttpServer.java hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/SecondaryNameNode.java hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/web/resources/NamenodeWebHdfsMethods.java hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/resources/hdfs-default.xml hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/MiniDFSCluster.java hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/MiniDFSClusterWithNodeGroup.java hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestValidateConfigurationSettings.java hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestHttpsFileSystem.java Modified: hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt?rev=1551715&r1=1551714&r2=1551715&view=diff ============================================================================== --- hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt (original) +++ hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt Tue Dec 17 21:18:38 2013 @@ -165,6 +165,9 @@ Release 2.4.0 - UNRELEASED HDFS-5545. Allow specifying endpoints for listeners in HttpServer. (Haohui Mai via jing9) + HDFS-5536. Implement HTTP policy for Namenode and DataNode. (Haohui Mai via + jing9) + OPTIMIZATIONS HDFS-5239. Allow FSNamesystem lock fairness to be configurable (daryn) Modified: hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSConfigKeys.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSConfigKeys.java?rev=1551715&r1=1551714&r2=1551715&view=diff ============================================================================== --- hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSConfigKeys.java (original) +++ hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSConfigKeys.java Tue Dec 17 21:18:38 2013 @@ -21,6 +21,7 @@ package org.apache.hadoop.hdfs; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.fs.CommonConfigurationKeys; import org.apache.hadoop.hdfs.server.blockmanagement.BlockPlacementPolicyDefault; +import org.apache.hadoop.http.HttpConfig; /** * This class contains constants for configuration keys used @@ -340,6 +341,8 @@ public class DFSConfigKeys extends Commo public static final boolean DFS_SUPPORT_APPEND_DEFAULT = true; public static final String DFS_HTTPS_ENABLE_KEY = "dfs.https.enable"; public static final boolean DFS_HTTPS_ENABLE_DEFAULT = false; + public static final String DFS_HTTP_POLICY_KEY = "dfs.http.policy"; + public static final String DFS_HTTP_POLICY_DEFAULT = HttpConfig.Policy.HTTP_ONLY.name(); public static final String DFS_DEFAULT_CHUNK_VIEW_SIZE_KEY = "dfs.default.chunk.view.size"; public static final int DFS_DEFAULT_CHUNK_VIEW_SIZE_DEFAULT = 32*1024; public static final String DFS_DATANODE_HTTPS_ADDRESS_KEY = "dfs.datanode.https.address"; Modified: hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSUtil.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSUtil.java?rev=1551715&r1=1551714&r2=1551715&view=diff ============================================================================== --- hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSUtil.java (original) +++ hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSUtil.java Tue Dec 17 21:18:38 2013 @@ -18,6 +18,8 @@ package org.apache.hadoop.hdfs; +import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_CLIENT_HTTPS_NEED_AUTH_DEFAULT; +import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_CLIENT_HTTPS_NEED_AUTH_KEY; import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_HA_NAMENODES_KEY_PREFIX; import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_HA_NAMENODE_ID_KEY; import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_NAMENODE_BACKUP_ADDRESS_KEY; @@ -64,6 +66,7 @@ import org.apache.hadoop.HadoopIllegalAr import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.BlockLocation; +import org.apache.hadoop.fs.CommonConfigurationKeys; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hdfs.protocol.AlreadyBeingCreatedException; @@ -78,6 +81,7 @@ import org.apache.hadoop.hdfs.server.nam import org.apache.hadoop.hdfs.server.namenode.NameNode; import org.apache.hadoop.hdfs.web.SWebHdfsFileSystem; import org.apache.hadoop.hdfs.web.WebHdfsFileSystem; +import org.apache.hadoop.http.HttpConfig; import org.apache.hadoop.http.HttpServer; import org.apache.hadoop.ipc.ProtobufRpcEngine; import org.apache.hadoop.ipc.RPC; @@ -1409,12 +1413,58 @@ public class DFSUtil { defaultKey : DFSConfigKeys.DFS_WEB_AUTHENTICATION_KERBEROS_KEYTAB_KEY; } - public static HttpServer.Builder loadSslConfToHttpServerBuilder( - HttpServer.Builder builder, Configuration sslConf) { + /** + * Get http policy. Http Policy is chosen as follows: + * <ol> + * <li>If hadoop.ssl.enabled is set, http endpoints are not started. Only + * https endpoints are started on configured https ports</li> + * <li>This configuration is overridden by dfs.https.enable configuration, if + * it is set to true. In that case, both http and https endpoints are stared.</li> + * <li>All the above configurations are overridden by dfs.http.policy + * configuration. With this configuration you can set http-only, https-only + * and http-and-https endpoints.</li> + * </ol> + * See hdfs-default.xml documentation for more details on each of the above + * configuration settings. + */ + public static HttpConfig.Policy getHttpPolicy(Configuration conf) { + String httpPolicy = conf.get(DFSConfigKeys.DFS_HTTP_POLICY_KEY, + DFSConfigKeys.DFS_HTTP_POLICY_DEFAULT); + + HttpConfig.Policy policy = HttpConfig.Policy.fromString(httpPolicy); + + if (policy == HttpConfig.Policy.HTTP_ONLY) { + boolean httpsEnabled = conf.getBoolean( + DFSConfigKeys.DFS_HTTPS_ENABLE_KEY, + DFSConfigKeys.DFS_HTTPS_ENABLE_DEFAULT); + + boolean hadoopSslEnabled = conf.getBoolean( + CommonConfigurationKeys.HADOOP_SSL_ENABLED_KEY, + CommonConfigurationKeys.HADOOP_SSL_ENABLED_DEFAULT); + + if (hadoopSslEnabled) { + LOG.warn(CommonConfigurationKeys.HADOOP_SSL_ENABLED_KEY + + " is deprecated. Please use " + + DFSConfigKeys.DFS_HTTPS_ENABLE_KEY + "."); + policy = HttpConfig.Policy.HTTPS_ONLY; + } else if (httpsEnabled) { + LOG.warn(DFSConfigKeys.DFS_HTTPS_ENABLE_KEY + + " is deprecated. Please use " + + DFSConfigKeys.DFS_HTTPS_ENABLE_KEY + "."); + policy = HttpConfig.Policy.HTTP_AND_HTTPS; + } + } + + conf.set(DFSConfigKeys.DFS_HTTP_POLICY_KEY, policy.name()); + return policy; + } + + public static HttpServer.Builder loadSslConfToHttpServerBuilder(HttpServer.Builder builder, + Configuration sslConf) { return builder .needsClientAuth( - sslConf.getBoolean(DFSConfigKeys.DFS_CLIENT_HTTPS_NEED_AUTH_KEY, - DFSConfigKeys.DFS_CLIENT_HTTPS_NEED_AUTH_DEFAULT)) + sslConf.getBoolean(DFS_CLIENT_HTTPS_NEED_AUTH_KEY, + DFS_CLIENT_HTTPS_NEED_AUTH_DEFAULT)) .keyPassword(sslConf.get("ssl.server.keystore.keypassword")) .keyStore(sslConf.get("ssl.server.keystore.location"), sslConf.get("ssl.server.keystore.password"), Modified: hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/HdfsConfiguration.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/HdfsConfiguration.java?rev=1551715&r1=1551714&r2=1551715&view=diff ============================================================================== --- hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/HdfsConfiguration.java (original) +++ hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/HdfsConfiguration.java Tue Dec 17 21:18:38 2013 @@ -19,7 +19,6 @@ package org.apache.hadoop.hdfs; import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.conf.Configuration.DeprecationDelta; import org.apache.hadoop.classification.InterfaceAudience; Modified: hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DataNode.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DataNode.java?rev=1551715&r1=1551714&r2=1551715&view=diff ============================================================================== --- hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DataNode.java (original) +++ hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DataNode.java Tue Dec 17 21:18:38 2013 @@ -17,7 +17,6 @@ */ package org.apache.hadoop.hdfs.server.datanode; - import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Joiner; import com.google.common.base.Preconditions; @@ -66,6 +65,7 @@ import org.apache.hadoop.hdfs.server.pro import org.apache.hadoop.hdfs.server.protocol.*; import org.apache.hadoop.hdfs.web.WebHdfsFileSystem; import org.apache.hadoop.hdfs.web.resources.Param; +import org.apache.hadoop.http.HttpConfig; import org.apache.hadoop.http.HttpServer; import org.apache.hadoop.io.IOUtils; import org.apache.hadoop.io.ReadaheadPool; @@ -181,9 +181,11 @@ public class DataNode extends Configured private DNConf dnConf; private volatile boolean heartbeatsDisabledForTests = false; private DataStorage storage = null; + private HttpServer infoServer = null; private int infoPort; private int infoSecurePort; + DataNodeMetrics metrics; private InetSocketAddress streamingAddr; @@ -285,7 +287,7 @@ public class DataNode extends Configured * explicitly configured in the given config, then it is determined * via the DNS class. * - * @param config + * @param config configuration * @return the hostname (NB: may not be a FQDN) * @throws UnknownHostException if the dfs.datanode.dns.interface * option is used and the hostname can not be determined @@ -303,40 +305,54 @@ public class DataNode extends Configured return name; } - + /** + * @see DFSUtil#getHttpPolicy(org.apache.hadoop.conf.Configuration) + * for information related to the different configuration options and + * Http Policy is decided. + */ private void startInfoServer(Configuration conf) throws IOException { - // create a servlet to serve full-file content + HttpServer.Builder builder = new HttpServer.Builder().setName("datanode") + .setConf(conf).setACL(new AccessControlList(conf.get(DFS_ADMIN, " "))); + + HttpConfig.Policy policy = DFSUtil.getHttpPolicy(conf); InetSocketAddress infoSocAddr = DataNode.getInfoAddr(conf); String infoHost = infoSocAddr.getHostName(); - int tmpInfoPort = infoSocAddr.getPort(); - HttpServer.Builder builder = new HttpServer.Builder().setName("datanode") - .addEndpoint(URI.create("http://" + NetUtils.getHostPortString(infoSocAddr))) - .setFindPort(tmpInfoPort == 0).setConf(conf) - .setACL(new AccessControlList(conf.get(DFS_ADMIN, " "))); - LOG.info("Opened info server at " + infoHost + ":" + tmpInfoPort); - if (conf.getBoolean(DFS_HTTPS_ENABLE_KEY, false)) { + if (policy.isHttpEnabled()) { + if (secureResources == null) { + int port = infoSocAddr.getPort(); + builder.addEndpoint(URI.create("http://" + infoHost + ":" + port)); + if (port == 0) { + builder.setFindPort(true); + } + } else { + // The http socket is created externally using JSVC, we add it in + // directly. + builder.setConnector(secureResources.getListener()); + } + } + + if (policy.isHttpsEnabled()) { InetSocketAddress secInfoSocAddr = NetUtils.createSocketAddr(conf.get( DFS_DATANODE_HTTPS_ADDRESS_KEY, infoHost + ":" + 0)); - builder.addEndpoint(URI.create("https://" - + NetUtils.getHostPortString(secInfoSocAddr))); + Configuration sslConf = new Configuration(false); - sslConf.setBoolean(DFSConfigKeys.DFS_CLIENT_HTTPS_NEED_AUTH_KEY, conf - .getBoolean(DFSConfigKeys.DFS_CLIENT_HTTPS_NEED_AUTH_KEY, - DFSConfigKeys.DFS_CLIENT_HTTPS_NEED_AUTH_DEFAULT)); sslConf.addResource(conf.get( DFSConfigKeys.DFS_SERVER_HTTPS_KEYSTORE_RESOURCE_KEY, DFSConfigKeys.DFS_SERVER_HTTPS_KEYSTORE_RESOURCE_DEFAULT)); + sslConf.setBoolean(DFS_CLIENT_HTTPS_NEED_AUTH_KEY, conf.getBoolean( + DFS_CLIENT_HTTPS_NEED_AUTH_KEY, DFS_CLIENT_HTTPS_NEED_AUTH_DEFAULT)); DFSUtil.loadSslConfToHttpServerBuilder(builder, sslConf); - if(LOG.isDebugEnabled()) { - LOG.debug("Datanode listening for SSL on " + secInfoSocAddr); + int port = secInfoSocAddr.getPort(); + if (port == 0) { + builder.setFindPort(true); } - infoSecurePort = secInfoSocAddr.getPort(); + builder.addEndpoint(URI.create("https://" + infoHost + ":" + port)); } - this.infoServer = (secureResources == null) ? builder.build() : - builder.setConnector(secureResources.getListener()).build(); + this.infoServer = builder.build(); + this.infoServer.addInternalServlet(null, "/streamFile/*", StreamFile.class); this.infoServer.addInternalServlet(null, "/getFileChecksum/*", FileChecksumServlets.GetServlet.class); @@ -352,9 +368,17 @@ public class DataNode extends Configured WebHdfsFileSystem.PATH_PREFIX + "/*"); } this.infoServer.start(); - this.infoPort = infoServer.getConnectorAddress(0).getPort(); + + int connIdx = 0; + if (policy.isHttpEnabled()) { + infoPort = infoServer.getConnectorAddress(connIdx++).getPort(); + } + + if (policy.isHttpsEnabled()) { + infoSecurePort = infoServer.getConnectorAddress(connIdx).getPort(); + } } - + private void startPlugins(Configuration conf) { plugins = conf.getInstances(DFS_DATANODE_PLUGINS_KEY, ServicePlugin.class); for (ServicePlugin p: plugins) { Modified: hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/SecureDataNodeStarter.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/SecureDataNodeStarter.java?rev=1551715&r1=1551714&r2=1551715&view=diff ============================================================================== --- hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/SecureDataNodeStarter.java (original) +++ hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/SecureDataNodeStarter.java Tue Dec 17 21:18:38 2013 @@ -16,27 +16,20 @@ */ package org.apache.hadoop.hdfs.server.datanode; -import java.io.IOException; import java.net.InetSocketAddress; import java.net.ServerSocket; import java.nio.channels.ServerSocketChannel; -import java.security.GeneralSecurityException; import org.apache.commons.daemon.Daemon; import org.apache.commons.daemon.DaemonContext; import org.apache.hadoop.conf.Configuration; - import org.apache.hadoop.hdfs.DFSConfigKeys; +import org.apache.hadoop.hdfs.DFSUtil; import org.apache.hadoop.hdfs.server.common.HdfsServerConstants; import org.apache.hadoop.http.HttpConfig; import org.apache.hadoop.http.HttpServer; import org.apache.hadoop.security.UserGroupInformation; -import org.apache.hadoop.security.ssl.SSLFactory; import org.mortbay.jetty.Connector; -import org.mortbay.jetty.nio.SelectChannelConnector; -import org.mortbay.jetty.security.SslSocketConnector; - -import javax.net.ssl.SSLServerSocketFactory; import com.google.common.annotations.VisibleForTesting; @@ -65,7 +58,6 @@ public class SecureDataNodeStarter imple private String [] args; private SecureResources resources; - private SSLFactory sslFactory; @Override public void init(DaemonContext context) throws Exception { @@ -74,9 +66,7 @@ public class SecureDataNodeStarter imple // Stash command-line arguments for regular datanode args = context.getArguments(); - - sslFactory = new SSLFactory(SSLFactory.Mode.SERVER, conf); - resources = getSecureResources(sslFactory, conf); + resources = getSecureResources(conf); } @Override @@ -84,68 +74,65 @@ public class SecureDataNodeStarter imple System.err.println("Starting regular datanode initialization"); DataNode.secureMain(args, resources); } - - @Override public void destroy() { - sslFactory.destroy(); - } + @Override public void destroy() {} @Override public void stop() throws Exception { /* Nothing to do */ } + /** + * Acquire privileged resources (i.e., the privileged ports) for the data + * node. The privileged resources consist of the port of the RPC server and + * the port of HTTP (not HTTPS) server. + */ @VisibleForTesting - public static SecureResources getSecureResources(final SSLFactory sslFactory, - Configuration conf) throws Exception { + public static SecureResources getSecureResources(Configuration conf) + throws Exception { + HttpConfig.Policy policy = DFSUtil.getHttpPolicy(conf); + // Obtain secure port for data streaming to datanode InetSocketAddress streamingAddr = DataNode.getStreamingAddr(conf); - int socketWriteTimeout = conf.getInt(DFSConfigKeys.DFS_DATANODE_SOCKET_WRITE_TIMEOUT_KEY, + int socketWriteTimeout = conf.getInt( + DFSConfigKeys.DFS_DATANODE_SOCKET_WRITE_TIMEOUT_KEY, HdfsServerConstants.WRITE_TIMEOUT); - + ServerSocket ss = (socketWriteTimeout > 0) ? ServerSocketChannel.open().socket() : new ServerSocket(); ss.bind(streamingAddr, 0); - + // Check that we got the port we need if (ss.getLocalPort() != streamingAddr.getPort()) { - throw new RuntimeException("Unable to bind on specified streaming port in secure " + - "context. Needed " + streamingAddr.getPort() + ", got " + ss.getLocalPort()); + throw new RuntimeException( + "Unable to bind on specified streaming port in secure " + + "context. Needed " + streamingAddr.getPort() + ", got " + + ss.getLocalPort()); } - // Obtain secure listener for web server - Connector listener; - if (HttpConfig.isSecure()) { - try { - sslFactory.init(); - } catch (GeneralSecurityException ex) { - throw new IOException(ex); - } - SslSocketConnector sslListener = new SslSocketConnector() { - @Override - protected SSLServerSocketFactory createFactory() throws Exception { - return sslFactory.createSSLServerSocketFactory(); - } - }; - listener = sslListener; - } else { + System.err.println("Opened streaming server at " + streamingAddr); + + // Bind a port for the web server. The code intends to bind HTTP server to + // privileged port only, as the client can authenticate the server using + // certificates if they are communicating through SSL. + Connector listener = null; + if (policy.isHttpEnabled()) { listener = HttpServer.createDefaultChannelConnector(); - } + InetSocketAddress infoSocAddr = DataNode.getInfoAddr(conf); + listener.setHost(infoSocAddr.getHostName()); + listener.setPort(infoSocAddr.getPort()); + // Open listener here in order to bind to port as root + listener.open(); + if (listener.getPort() != infoSocAddr.getPort()) { + throw new RuntimeException("Unable to bind on specified info port in secure " + + "context. Needed " + streamingAddr.getPort() + ", got " + ss.getLocalPort()); + } + System.err.println("Successfully obtained privileged resources (streaming port = " + + ss + " ) (http listener port = " + listener.getConnection() +")"); - InetSocketAddress infoSocAddr = DataNode.getInfoAddr(conf); - listener.setHost(infoSocAddr.getHostName()); - listener.setPort(infoSocAddr.getPort()); - // Open listener here in order to bind to port as root - listener.open(); - if (listener.getPort() != infoSocAddr.getPort()) { - throw new RuntimeException("Unable to bind on specified info port in secure " + - "context. Needed " + streamingAddr.getPort() + ", got " + ss.getLocalPort()); - } - System.err.println("Successfully obtained privileged resources (streaming port = " - + ss + " ) (http listener port = " + listener.getConnection() +")"); - - if ((ss.getLocalPort() > 1023 || listener.getPort() > 1023) && - UserGroupInformation.isSecurityEnabled()) { - throw new RuntimeException("Cannot start secure datanode with unprivileged ports"); + if ((ss.getLocalPort() > 1023 || listener.getPort() > 1023) && + UserGroupInformation.isSecurityEnabled()) { + throw new RuntimeException("Cannot start secure datanode with unprivileged ports"); + } + System.err.println("Opened info server at " + infoSocAddr); } - System.err.println("Opened streaming server at " + streamingAddr); - System.err.println("Opened info server at " + infoSocAddr); + return new SecureResources(ss, listener); } Modified: hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/BackupNode.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/BackupNode.java?rev=1551715&r1=1551714&r2=1551715&view=diff ============================================================================== --- hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/BackupNode.java (original) +++ hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/BackupNode.java Tue Dec 17 21:18:38 2013 @@ -122,11 +122,6 @@ public class BackupNode extends NameNode String addr = conf.get(BN_HTTP_ADDRESS_NAME_KEY, BN_HTTP_ADDRESS_DEFAULT); return NetUtils.createSocketAddr(addr); } - - @Override // NameNode - protected void setHttpServerAddress(Configuration conf){ - conf.set(BN_HTTP_ADDRESS_NAME_KEY, NetUtils.getHostPortString(getHttpAddress())); - } @Override // NameNode protected void loadNamesystem(Configuration conf) throws IOException { @@ -163,6 +158,10 @@ public class BackupNode extends NameNode registerWith(nsInfo); // Checkpoint daemon should start after the rpc server started runCheckpointDaemon(conf); + InetSocketAddress addr = getHttpAddress(); + if (addr != null) { + conf.set(BN_HTTP_ADDRESS_NAME_KEY, NetUtils.getHostPortString(getHttpAddress())); + } } @Override Modified: hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/EditLogFileInputStream.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/EditLogFileInputStream.java?rev=1551715&r1=1551714&r2=1551715&view=diff ============================================================================== --- hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/EditLogFileInputStream.java (original) +++ hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/EditLogFileInputStream.java Tue Dec 17 21:18:38 2013 @@ -118,7 +118,7 @@ public class EditLogFileInputStream exte */ public static EditLogInputStream fromUrl( URLConnectionFactory connectionFactory, URL url, long startTxId, - long endTxId, boolean inProgress) { + long endTxId, boolean inProgress) { return new EditLogFileInputStream(new URLLog(connectionFactory, url), startTxId, endTxId, inProgress); } Modified: hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NameNode.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NameNode.java?rev=1551715&r1=1551714&r2=1551715&view=diff ============================================================================== --- hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NameNode.java (original) +++ hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NameNode.java Tue Dec 17 21:18:38 2013 @@ -17,6 +17,10 @@ */ package org.apache.hadoop.hdfs.server.namenode; +import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.FS_DEFAULT_NAME_KEY; +import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.FS_TRASH_INTERVAL_DEFAULT; +import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.FS_TRASH_INTERVAL_KEY; + import java.io.File; import java.io.IOException; import java.io.PrintStream; @@ -432,17 +436,11 @@ public class NameNode implements NameNod return getHttpAddress(conf); } - /** @return the NameNode HTTP address set in the conf. */ + /** @return the NameNode HTTP address. */ public static InetSocketAddress getHttpAddress(Configuration conf) { return NetUtils.createSocketAddr( conf.get(DFS_NAMENODE_HTTP_ADDRESS_KEY, DFS_NAMENODE_HTTP_ADDRESS_DEFAULT)); } - - protected void setHttpServerAddress(Configuration conf) { - String hostPort = NetUtils.getHostPortString(getHttpAddress()); - conf.set(DFS_NAMENODE_HTTP_ADDRESS_KEY, hostPort); - LOG.info("Web-server up at: " + hostPort); - } protected void loadNamesystem(Configuration conf) throws IOException { this.namesystem = FSNamesystem.loadFromDisk(conf); @@ -492,7 +490,6 @@ public class NameNode implements NameNod if (NamenodeRole.NAMENODE == role) { startHttpServer(conf); - validateConfigurationSettingsOrAbort(conf); } loadNamesystem(conf); @@ -500,8 +497,6 @@ public class NameNode implements NameNod if (NamenodeRole.NAMENODE == role) { httpServer.setNameNodeAddress(getNameNodeAddress()); httpServer.setFSImage(getFSImage()); - } else { - validateConfigurationSettingsOrAbort(conf); } pauseMonitor = new JvmPauseMonitor(conf); @@ -519,45 +514,6 @@ public class NameNode implements NameNod return new NameNodeRpcServer(conf, this); } - /** - * Verifies that the final Configuration Settings look ok for the NameNode to - * properly start up - * Things to check for include: - * - HTTP Server Port does not equal the RPC Server Port - * @param conf - * @throws IOException - */ - protected void validateConfigurationSettings(final Configuration conf) - throws IOException { - // check to make sure the web port and rpc port do not match - if(getHttpServerAddress(conf).getPort() - == getRpcServerAddress(conf).getPort()) { - String errMsg = "dfs.namenode.rpc-address " + - "("+ getRpcServerAddress(conf) + ") and " + - "dfs.namenode.http-address ("+ getHttpServerAddress(conf) + ") " + - "configuration keys are bound to the same port, unable to start " + - "NameNode. Port: " + getRpcServerAddress(conf).getPort(); - throw new IOException(errMsg); - } - } - - /** - * Validate NameNode configuration. Log a fatal error and abort if - * configuration is invalid. - * - * @param conf Configuration to validate - * @throws IOException thrown if conf is invalid - */ - private void validateConfigurationSettingsOrAbort(Configuration conf) - throws IOException { - try { - validateConfigurationSettings(conf); - } catch (IOException e) { - LOG.fatal(e.toString()); - throw e; - } - } - /** Start the services common to active and standby states */ private void startCommonServices(Configuration conf) throws IOException { namesystem.startCommonServices(conf, haContext); @@ -636,7 +592,6 @@ public class NameNode implements NameNod httpServer = new NameNodeHttpServer(conf, this, getHttpServerAddress(conf)); httpServer.start(); httpServer.setStartupProgress(startupProgress); - setHttpServerAddress(conf); } private void stopHttpServer() { @@ -658,7 +613,7 @@ public class NameNode implements NameNod * <li>{@link StartupOption#CHECKPOINT CHECKPOINT} - start checkpoint node</li> * <li>{@link StartupOption#UPGRADE UPGRADE} - start the cluster * upgrade and create a snapshot of the current file system state</li> - * <li>{@link StartupOption#RECOVERY RECOVERY} - recover name node + * <li>{@link StartupOption#RECOVER RECOVERY} - recover name node * metadata</li> * <li>{@link StartupOption#ROLLBACK ROLLBACK} - roll the * cluster back to the previous state</li> Modified: hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NameNodeHttpServer.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NameNodeHttpServer.java?rev=1551715&r1=1551714&r2=1551715&view=diff ============================================================================== --- hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NameNodeHttpServer.java (original) +++ hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NameNodeHttpServer.java Tue Dec 17 21:18:38 2013 @@ -18,6 +18,8 @@ package org.apache.hadoop.hdfs.server.namenode; import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_ADMIN; +import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_CLIENT_HTTPS_NEED_AUTH_DEFAULT; +import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_CLIENT_HTTPS_NEED_AUTH_KEY; import java.io.IOException; import java.net.InetSocketAddress; @@ -38,6 +40,7 @@ import org.apache.hadoop.hdfs.web.AuthFi import org.apache.hadoop.hdfs.web.WebHdfsFileSystem; import org.apache.hadoop.hdfs.web.resources.Param; import org.apache.hadoop.hdfs.web.resources.UserParam; +import org.apache.hadoop.http.HttpConfig; import org.apache.hadoop.http.HttpServer; import org.apache.hadoop.net.NetUtils; import org.apache.hadoop.security.SecurityUtil; @@ -61,79 +64,118 @@ public class NameNodeHttpServer { public static final String FSIMAGE_ATTRIBUTE_KEY = "name.system.image"; protected static final String NAMENODE_ATTRIBUTE_KEY = "name.node"; public static final String STARTUP_PROGRESS_ATTRIBUTE_KEY = "startup.progress"; - - public NameNodeHttpServer( - Configuration conf, - NameNode nn, + + NameNodeHttpServer(Configuration conf, NameNode nn, InetSocketAddress bindAddress) { this.conf = conf; this.nn = nn; this.bindAddress = bindAddress; } - + + private void initWebHdfs(Configuration conf) throws IOException { + if (WebHdfsFileSystem.isEnabled(conf, HttpServer.LOG)) { + // set user pattern based on configuration file + UserParam.setUserPattern(conf.get(DFSConfigKeys.DFS_WEBHDFS_USER_PATTERN_KEY, DFSConfigKeys.DFS_WEBHDFS_USER_PATTERN_DEFAULT)); + //add SPNEGO authentication filter for webhdfs + final String name = "SPNEGO"; + final String classname = AuthFilter.class.getName(); + final String pathSpec = WebHdfsFileSystem.PATH_PREFIX + "/*"; + Map<String, String> params = getAuthFilterParams(conf); + HttpServer.defineFilter(httpServer.getWebAppContext(), name, classname, params, + new String[]{pathSpec}); + HttpServer.LOG.info("Added filter '" + name + "' (class=" + classname + ")"); + + // add webhdfs packages + httpServer.addJerseyResourcePackage( + NamenodeWebHdfsMethods.class.getPackage().getName() + + ";" + Param.class.getPackage().getName(), pathSpec); + } + } + + /** + * @see DFSUtil#getHttpPolicy(org.apache.hadoop.conf.Configuration) + * for information related to the different configuration options and + * Http Policy is decided. + */ void start() throws IOException { + HttpConfig.Policy policy = DFSUtil.getHttpPolicy(conf); final String infoHost = bindAddress.getHostName(); - int infoPort = bindAddress.getPort(); - HttpServer.Builder builder = new HttpServer.Builder().setName("hdfs") - .addEndpoint(URI.create(("http://" + NetUtils.getHostPortString(bindAddress)))) - .setFindPort(infoPort == 0).setConf(conf).setACL( - new AccessControlList(conf.get(DFS_ADMIN, " "))) + + HttpServer.Builder builder = new HttpServer.Builder() + .setName("hdfs") + .setConf(conf) + .setACL(new AccessControlList(conf.get(DFS_ADMIN, " "))) .setSecurityEnabled(UserGroupInformation.isSecurityEnabled()) .setUsernameConfKey( DFSConfigKeys.DFS_NAMENODE_INTERNAL_SPNEGO_USER_NAME_KEY) - .setKeytabConfKey(DFSUtil.getSpnegoKeytabKey(conf, - DFSConfigKeys.DFS_NAMENODE_KEYTAB_FILE_KEY)); + .setKeytabConfKey( + DFSUtil.getSpnegoKeytabKey(conf, + DFSConfigKeys.DFS_NAMENODE_KEYTAB_FILE_KEY)); + + if (policy.isHttpEnabled()) { + int port = bindAddress.getPort(); + if (port == 0) { + builder.setFindPort(true); + } + builder.addEndpoint(URI.create("http://" + infoHost + ":" + port)); + } - boolean certSSL = conf.getBoolean(DFSConfigKeys.DFS_HTTPS_ENABLE_KEY, false); - if (certSSL) { - httpsAddress = NetUtils.createSocketAddr(conf.get( + if (policy.isHttpsEnabled()) { + final String httpsAddrString = conf.get( DFSConfigKeys.DFS_NAMENODE_HTTPS_ADDRESS_KEY, - DFSConfigKeys.DFS_NAMENODE_HTTPS_ADDRESS_DEFAULT)); + DFSConfigKeys.DFS_NAMENODE_HTTPS_ADDRESS_DEFAULT); + InetSocketAddress addr = NetUtils.createSocketAddr(httpsAddrString); - builder.addEndpoint(URI.create("https://" - + NetUtils.getHostPortString(httpsAddress))); Configuration sslConf = new Configuration(false); - sslConf.setBoolean(DFSConfigKeys.DFS_CLIENT_HTTPS_NEED_AUTH_KEY, conf - .getBoolean(DFSConfigKeys.DFS_CLIENT_HTTPS_NEED_AUTH_KEY, - DFSConfigKeys.DFS_CLIENT_HTTPS_NEED_AUTH_DEFAULT)); + sslConf.addResource(conf.get( DFSConfigKeys.DFS_SERVER_HTTPS_KEYSTORE_RESOURCE_KEY, DFSConfigKeys.DFS_SERVER_HTTPS_KEYSTORE_RESOURCE_DEFAULT)); + + sslConf.addResource(conf.get( + DFSConfigKeys.DFS_SERVER_HTTPS_KEYSTORE_RESOURCE_KEY, + DFSConfigKeys.DFS_SERVER_HTTPS_KEYSTORE_RESOURCE_DEFAULT)); + sslConf.setBoolean(DFS_CLIENT_HTTPS_NEED_AUTH_KEY, conf.getBoolean( + DFS_CLIENT_HTTPS_NEED_AUTH_KEY, DFS_CLIENT_HTTPS_NEED_AUTH_DEFAULT)); DFSUtil.loadSslConfToHttpServerBuilder(builder, sslConf); + + if (addr.getPort() == 0) { + builder.setFindPort(true); + } + + builder.addEndpoint(URI.create("https://" + + NetUtils.getHostPortString(addr))); } httpServer = builder.build(); - if (WebHdfsFileSystem.isEnabled(conf, HttpServer.LOG)) { - // set user pattern based on configuration file - UserParam.setUserPattern(conf.get(DFSConfigKeys.DFS_WEBHDFS_USER_PATTERN_KEY, DFSConfigKeys.DFS_WEBHDFS_USER_PATTERN_DEFAULT)); - // add SPNEGO authentication filter for webhdfs - final String name = "SPNEGO"; - final String classname = AuthFilter.class.getName(); - final String pathSpec = WebHdfsFileSystem.PATH_PREFIX + "/*"; - Map<String, String> params = getAuthFilterParams(conf); - HttpServer.defineFilter(httpServer.getWebAppContext(), name, classname, params, - new String[]{pathSpec}); - HttpServer.LOG.info("Added filter '" + name + "' (class=" + classname + ")"); + if (policy.isHttpsEnabled()) { + // assume same ssl port for all datanodes + InetSocketAddress datanodeSslPort = NetUtils.createSocketAddr(conf.get( + DFSConfigKeys.DFS_DATANODE_HTTPS_ADDRESS_KEY, infoHost + ":" + + DFSConfigKeys.DFS_DATANODE_HTTPS_DEFAULT_PORT)); + httpServer.setAttribute(DFSConfigKeys.DFS_DATANODE_HTTPS_PORT_KEY, + datanodeSslPort.getPort()); + } - // add webhdfs packages - httpServer.addJerseyResourcePackage( - NamenodeWebHdfsMethods.class.getPackage().getName() - + ";" + Param.class.getPackage().getName(), pathSpec); - } + initWebHdfs(conf); httpServer.setAttribute(NAMENODE_ATTRIBUTE_KEY, nn); httpServer.setAttribute(JspHelper.CURRENT_CONF, conf); setupServlets(httpServer, conf); httpServer.start(); - httpAddress = httpServer.getConnectorAddress(0); - if (certSSL) { - httpsAddress = httpServer.getConnectorAddress(1); - // assume same ssl port for all datanodes - InetSocketAddress datanodeSslPort = NetUtils.createSocketAddr(conf.get( - DFSConfigKeys.DFS_DATANODE_HTTPS_ADDRESS_KEY, infoHost + ":" + 50475)); - httpServer.setAttribute(DFSConfigKeys.DFS_DATANODE_HTTPS_PORT_KEY, datanodeSslPort - .getPort()); + + int connIdx = 0; + if (policy.isHttpEnabled()) { + httpAddress = httpServer.getConnectorAddress(connIdx++); + conf.set(DFSConfigKeys.DFS_NAMENODE_HTTP_ADDRESS_KEY, + NetUtils.getHostPortString(httpAddress)); + } + + if (policy.isHttpsEnabled()) { + httpsAddress = httpServer.getConnectorAddress(connIdx); + conf.set(DFSConfigKeys.DFS_NAMENODE_HTTPS_ADDRESS_KEY, + NetUtils.getHostPortString(httpsAddress)); } } @@ -169,18 +211,17 @@ public class NameNodeHttpServer { return params; } - - public void stop() throws Exception { + void stop() throws Exception { if (httpServer != null) { httpServer.stop(); } } - public InetSocketAddress getHttpAddress() { + InetSocketAddress getHttpAddress() { return httpAddress; } - public InetSocketAddress getHttpsAddress() { + InetSocketAddress getHttpsAddress() { return httpsAddress; } @@ -189,7 +230,7 @@ public class NameNodeHttpServer { * * @param fsImage FSImage to set */ - public void setFSImage(FSImage fsImage) { + void setFSImage(FSImage fsImage) { httpServer.setAttribute(FSIMAGE_ATTRIBUTE_KEY, fsImage); } @@ -198,7 +239,7 @@ public class NameNodeHttpServer { * * @param nameNodeAddress InetSocketAddress to set */ - public void setNameNodeAddress(InetSocketAddress nameNodeAddress) { + void setNameNodeAddress(InetSocketAddress nameNodeAddress) { httpServer.setAttribute(NAMENODE_ADDRESS_ATTRIBUTE_KEY, NetUtils.getConnectAddress(nameNodeAddress)); } @@ -208,7 +249,7 @@ public class NameNodeHttpServer { * * @param prog StartupProgress to set */ - public void setStartupProgress(StartupProgress prog) { + void setStartupProgress(StartupProgress prog) { httpServer.setAttribute(STARTUP_PROGRESS_ATTRIBUTE_KEY, prog); } @@ -238,7 +279,7 @@ public class NameNodeHttpServer { ContentSummaryServlet.class, false); } - public static FSImage getFsImageFromContext(ServletContext context) { + static FSImage getFsImageFromContext(ServletContext context) { return (FSImage)context.getAttribute(FSIMAGE_ATTRIBUTE_KEY); } @@ -246,7 +287,7 @@ public class NameNodeHttpServer { return (NameNode)context.getAttribute(NAMENODE_ATTRIBUTE_KEY); } - public static Configuration getConfFromContext(ServletContext context) { + static Configuration getConfFromContext(ServletContext context) { return (Configuration)context.getAttribute(JspHelper.CURRENT_CONF); } @@ -262,7 +303,7 @@ public class NameNodeHttpServer { * @param context ServletContext to get * @return StartupProgress associated with context */ - public static StartupProgress getStartupProgressFromContext( + static StartupProgress getStartupProgressFromContext( ServletContext context) { return (StartupProgress)context.getAttribute(STARTUP_PROGRESS_ATTRIBUTE_KEY); } Modified: hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/SecondaryNameNode.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/SecondaryNameNode.java?rev=1551715&r1=1551714&r2=1551715&view=diff ============================================================================== --- hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/SecondaryNameNode.java (original) +++ hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/SecondaryNameNode.java Tue Dec 17 21:18:38 2013 @@ -30,7 +30,6 @@ import java.io.FilenameFilter; import java.io.IOException; import java.net.InetSocketAddress; import java.net.URI; -import java.net.URISyntaxException; import java.security.PrivilegedAction; import java.security.PrivilegedExceptionAction; import java.util.Collection; @@ -257,12 +256,7 @@ public class SecondaryNameNode implement // initialize the webserver for uploading files. int tmpInfoPort = infoSocAddr.getPort(); - URI httpEndpoint; - try { - httpEndpoint = new URI("http://" + NetUtils.getHostPortString(infoSocAddr)); - } catch (URISyntaxException e) { - throw new IOException(e); - } + URI httpEndpoint = URI.create("http://" + NetUtils.getHostPortString(infoSocAddr)); infoServer = new HttpServer.Builder().setName("secondary") .addEndpoint(httpEndpoint) @@ -273,6 +267,7 @@ public class SecondaryNameNode implement DFSConfigKeys.DFS_SECONDARY_NAMENODE_INTERNAL_SPNEGO_USER_NAME_KEY) .setKeytabConfKey(DFSUtil.getSpnegoKeytabKey(conf, DFSConfigKeys.DFS_SECONDARY_NAMENODE_KEYTAB_FILE_KEY)).build(); + infoServer.setAttribute("secondary.name.node", this); infoServer.setAttribute("name.system.image", checkpointImage); infoServer.setAttribute(JspHelper.CURRENT_CONF, conf); Modified: hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/web/resources/NamenodeWebHdfsMethods.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/web/resources/NamenodeWebHdfsMethods.java?rev=1551715&r1=1551714&r2=1551715&view=diff ============================================================================== --- hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/web/resources/NamenodeWebHdfsMethods.java (original) +++ hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/web/resources/NamenodeWebHdfsMethods.java Tue Dec 17 21:18:38 2013 @@ -245,8 +245,12 @@ public class NamenodeWebHdfsMethods { + Param.toSortedString("&", parameters); final String uripath = WebHdfsFileSystem.PATH_PREFIX + path; - final URI uri = new URI("http", null, dn.getHostName(), dn.getInfoPort(), - uripath, query, null); + final String scheme = request.getScheme(); + int port = "http".equals(scheme) ? dn.getInfoPort() : dn + .getInfoSecurePort(); + final URI uri = new URI(scheme, null, dn.getHostName(), port, uripath, + query, null); + if (LOG.isTraceEnabled()) { LOG.trace("redirectURI=" + uri); } Modified: hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/resources/hdfs-default.xml URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/resources/hdfs-default.xml?rev=1551715&r1=1551714&r2=1551715&view=diff ============================================================================== --- hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/resources/hdfs-default.xml (original) +++ hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/resources/hdfs-default.xml Tue Dec 17 21:18:38 2013 @@ -137,7 +137,20 @@ <property> <name>dfs.https.enable</name> <value>false</value> + <description> + Deprecated. Use "dfs.http.policy" instead. + </description> +</property> + +<property> + <name>dfs.http.policy</name> + <value>HTTP_ONLY</value> <description>Decide if HTTPS(SSL) is supported on HDFS + This configures the HTTP endpoint for HDFS daemons: + The following values are supported: + - HTTP_ONLY : Service is provided only on http + - HTTPS_ONLY : Service is provided only on https + - HTTP_AND_HTTPS : Service is provided both on http and https </description> </property> Modified: hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/MiniDFSCluster.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/MiniDFSCluster.java?rev=1551715&r1=1551714&r2=1551715&view=diff ============================================================================== --- hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/MiniDFSCluster.java (original) +++ hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/MiniDFSCluster.java Tue Dec 17 21:18:38 2013 @@ -33,6 +33,7 @@ import static org.apache.hadoop.hdfs.DFS import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_NAMENODE_CHECKPOINT_DIR_KEY; import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_NAMENODE_DECOMMISSION_INTERVAL_KEY; import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_NAMENODE_HTTP_ADDRESS_KEY; +import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_NAMENODE_HTTPS_ADDRESS_KEY; import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_NAMENODE_NAME_DIR_KEY; import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_NAMENODE_RPC_ADDRESS_KEY; import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_NAMENODE_SAFEMODE_EXTENSION_KEY; @@ -901,12 +902,17 @@ public class MiniDFSCluster { // After the NN has started, set back the bound ports into // the conf - conf.set(DFSUtil.addKeySuffixes( - DFS_NAMENODE_RPC_ADDRESS_KEY, nameserviceId, nnId), - nn.getNameNodeAddressHostPortString()); - conf.set(DFSUtil.addKeySuffixes( - DFS_NAMENODE_HTTP_ADDRESS_KEY, nameserviceId, nnId), NetUtils - .getHostPortString(nn.getHttpAddress())); + conf.set(DFSUtil.addKeySuffixes(DFS_NAMENODE_RPC_ADDRESS_KEY, + nameserviceId, nnId), nn.getNameNodeAddressHostPortString()); + if (nn.getHttpAddress() != null) { + conf.set(DFSUtil.addKeySuffixes(DFS_NAMENODE_HTTP_ADDRESS_KEY, + nameserviceId, nnId), NetUtils.getHostPortString(nn.getHttpAddress())); + } + if (nn.getHttpsAddress() != null) { + conf.set(DFSUtil.addKeySuffixes(DFS_NAMENODE_HTTPS_ADDRESS_KEY, + nameserviceId, nnId), NetUtils.getHostPortString(nn.getHttpsAddress())); + } + DFSUtil.setGenericConf(conf, nameserviceId, nnId, DFS_NAMENODE_HTTP_ADDRESS_KEY); nameNodes[nnIndex] = new NameNodeInfo(nn, nameserviceId, nnId, @@ -1182,9 +1188,8 @@ public class MiniDFSCluster { SecureResources secureResources = null; if (UserGroupInformation.isSecurityEnabled()) { - SSLFactory sslFactory = new SSLFactory(SSLFactory.Mode.SERVER, dnConf); try { - secureResources = SecureDataNodeStarter.getSecureResources(sslFactory, dnConf); + secureResources = SecureDataNodeStarter.getSecureResources(dnConf); } catch (Exception ex) { ex.printStackTrace(); } Modified: hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/MiniDFSClusterWithNodeGroup.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/MiniDFSClusterWithNodeGroup.java?rev=1551715&r1=1551714&r2=1551715&view=diff ============================================================================== --- hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/MiniDFSClusterWithNodeGroup.java (original) +++ hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/MiniDFSClusterWithNodeGroup.java Tue Dec 17 21:18:38 2013 @@ -158,9 +158,8 @@ public class MiniDFSClusterWithNodeGroup SecureResources secureResources = null; if (UserGroupInformation.isSecurityEnabled()) { - SSLFactory sslFactory = new SSLFactory(SSLFactory.Mode.SERVER, dnConf); try { - secureResources = SecureDataNodeStarter.getSecureResources(sslFactory, dnConf); + secureResources = SecureDataNodeStarter.getSecureResources(dnConf); } catch (Exception ex) { ex.printStackTrace(); } Modified: hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestValidateConfigurationSettings.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestValidateConfigurationSettings.java?rev=1551715&r1=1551714&r2=1551715&view=diff ============================================================================== --- hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestValidateConfigurationSettings.java (original) +++ hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestValidateConfigurationSettings.java Tue Dec 17 21:18:38 2013 @@ -22,6 +22,7 @@ import static org.junit.Assert.fail; import java.io.File; import java.io.IOException; +import java.net.BindException; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; @@ -50,7 +51,7 @@ public class TestValidateConfigurationSe * an exception * is thrown when trying to re-use the same port */ - @Test + @Test(expected = BindException.class) public void testThatMatchingRPCandHttpPortsThrowException() throws IOException { @@ -63,14 +64,7 @@ public class TestValidateConfigurationSe FileSystem.setDefaultUri(conf, "hdfs://localhost:9000"); conf.set(DFSConfigKeys.DFS_NAMENODE_HTTP_ADDRESS_KEY, "127.0.0.1:9000"); DFSTestUtil.formatNameNode(conf); - try { - NameNode nameNode = new NameNode(conf); - fail("Should have throw the exception since the ports match"); - } catch (IOException e) { - // verify we're getting the right IOException - assertTrue(e.toString().contains("dfs.namenode.rpc-address (")); - System.out.println("Got expected exception: " + e.toString()); - } + new NameNode(conf); } /** Modified: hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestHttpsFileSystem.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestHttpsFileSystem.java?rev=1551715&r1=1551714&r2=1551715&view=diff ============================================================================== --- hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestHttpsFileSystem.java (original) +++ hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestHttpsFileSystem.java Tue Dec 17 21:18:38 2013 @@ -28,6 +28,7 @@ import org.apache.hadoop.fs.FileUtil; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hdfs.DFSConfigKeys; import org.apache.hadoop.hdfs.MiniDFSCluster; +import org.apache.hadoop.http.HttpConfig; import org.apache.hadoop.security.ssl.KeyStoreTestUtil; import org.junit.AfterClass; import org.junit.Assert; @@ -49,7 +50,7 @@ public class TestHttpsFileSystem { public static void setUp() throws Exception { conf = new Configuration(); conf.setBoolean(DFSConfigKeys.DFS_WEBHDFS_ENABLED_KEY, true); - conf.setBoolean(DFSConfigKeys.DFS_HTTPS_ENABLE_KEY, true); + conf.set(DFSConfigKeys.DFS_HTTP_POLICY_KEY, HttpConfig.Policy.HTTPS_ONLY.name()); conf.set(DFSConfigKeys.DFS_NAMENODE_HTTPS_ADDRESS_KEY, "localhost:0"); File base = new File(BASEDIR);