Author: eli
Date: Fri May 4 21:58:44 2012
New Revision: 1334216
URL: http://svn.apache.org/viewvc?rev=1334216&view=rev
Log:
HDFS-2617. Replaced Kerberized SSL for image transfer and fsck with
SPNEGO-based solution. Contributed by Jakob Homan, Alejandro Abdelnur, and
Aaron T. Myers
Modified:
hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSConfigKeys.java
hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/HftpFileSystem.java
hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/GetImageServlet.java
hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NameNode.java
hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NameNodeHttpServer.java
hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/SecondaryNameNode.java
hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/TransferFsImage.java
hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/ha/BootstrapStandby.java
hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/ha/StandbyCheckpointer.java
hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/DFSAdmin.java
hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/DFSck.java
hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/DelegationTokenFetcher.java
hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/resources/hdfs-default.xml
Modified: hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
URL:
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt?rev=1334216&r1=1334215&r2=1334216&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt (original)
+++ hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt Fri May 4
21:58:44 2012
@@ -435,6 +435,9 @@ Release 2.0.0 - UNRELEASED
HDFS-2476. More CPU efficient data structure for under-replicated,
over-replicated, and invalidated blocks. (Tomasz Nykiel via todd)
+ HDFS-2617. Replaced Kerberized SSL for image transfer and fsck
+ with SPNEGO-based solution. (jghoman, tucu, and atm via eli)
+
BUG FIXES
HDFS-2481. Unknown protocol:
org.apache.hadoop.hdfs.protocol.ClientProtocol.
Modified:
hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSConfigKeys.java
URL:
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSConfigKeys.java?rev=1334216&r1=1334215&r2=1334216&view=diff
==============================================================================
---
hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSConfigKeys.java
(original)
+++
hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSConfigKeys.java
Fri May 4 21:58:44 2012
@@ -319,10 +319,10 @@ public class DFSConfigKeys extends Commo
public static final String DFS_DATANODE_USER_NAME_KEY =
"dfs.datanode.kerberos.principal";
public static final String DFS_NAMENODE_KEYTAB_FILE_KEY =
"dfs.namenode.keytab.file";
public static final String DFS_NAMENODE_USER_NAME_KEY =
"dfs.namenode.kerberos.principal";
- public static final String DFS_NAMENODE_KRB_HTTPS_USER_NAME_KEY =
"dfs.namenode.kerberos.https.principal";
+ public static final String DFS_NAMENODE_INTERNAL_SPENGO_USER_NAME_KEY =
"dfs.namenode.kerberos.internal.spnego.principal";
public static final String DFS_SECONDARY_NAMENODE_KEYTAB_FILE_KEY =
"dfs.secondary.namenode.keytab.file";
public static final String DFS_SECONDARY_NAMENODE_USER_NAME_KEY =
"dfs.secondary.namenode.kerberos.principal";
- public static final String DFS_SECONDARY_NAMENODE_KRB_HTTPS_USER_NAME_KEY =
"dfs.secondary.namenode.kerberos.https.principal";
+ public static final String
DFS_SECONDARY_NAMENODE_INTERNAL_SPENGO_USER_NAME_KEY =
"dfs.secondary.namenode.kerberos.internal.spnego.principal";
public static final String DFS_NAMENODE_NAME_CACHE_THRESHOLD_KEY =
"dfs.namenode.name.cache.threshold";
public static final int DFS_NAMENODE_NAME_CACHE_THRESHOLD_DEFAULT = 10;
Modified:
hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/HftpFileSystem.java
URL:
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/HftpFileSystem.java?rev=1334216&r1=1334215&r2=1334216&view=diff
==============================================================================
---
hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/HftpFileSystem.java
(original)
+++
hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/HftpFileSystem.java
Fri May 4 21:58:44 2012
@@ -144,7 +144,7 @@ public class HftpFileSystem extends File
}
protected URI getNamenodeSecureUri(URI uri) {
- return DFSUtil.createUri("https", getNamenodeSecureAddr(uri));
+ return DFSUtil.createUri("http", getNamenodeSecureAddr(uri));
}
@Override
@@ -247,7 +247,7 @@ public class HftpFileSystem extends File
c = DelegationTokenFetcher.getDTfromRemote(nnHttpUrl, renewer);
} catch (Exception e) {
LOG.info("Couldn't get a delegation token from " + nnHttpUrl +
- " using https.");
+ " using http.");
if(LOG.isDebugEnabled()) {
LOG.debug("error was ", e);
}
@@ -686,11 +686,11 @@ public class HftpFileSystem extends File
Configuration conf) throws IOException {
// update the kerberos credentials, if they are coming from a keytab
UserGroupInformation.getLoginUser().reloginFromKeytab();
- // use https to renew the token
+ // use http to renew the token
InetSocketAddress serviceAddr = SecurityUtil.getTokenServiceAddr(token);
return
DelegationTokenFetcher.renewDelegationToken
- (DFSUtil.createUri("https", serviceAddr).toString(),
+ (DFSUtil.createUri("http", serviceAddr).toString(),
(Token<DelegationTokenIdentifier>) token);
}
@@ -700,10 +700,10 @@ public class HftpFileSystem extends File
Configuration conf) throws IOException {
// update the kerberos credentials, if they are coming from a keytab
UserGroupInformation.getLoginUser().checkTGTAndReloginFromKeytab();
- // use https to cancel the token
+ // use http to cancel the token
InetSocketAddress serviceAddr = SecurityUtil.getTokenServiceAddr(token);
DelegationTokenFetcher.cancelDelegationToken
- (DFSUtil.createUri("https", serviceAddr).toString(),
+ (DFSUtil.createUri("http", serviceAddr).toString(),
(Token<DelegationTokenIdentifier>) token);
}
}
Modified:
hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/GetImageServlet.java
URL:
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/GetImageServlet.java?rev=1334216&r1=1334215&r2=1334216&view=diff
==============================================================================
---
hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/GetImageServlet.java
(original)
+++
hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/GetImageServlet.java
Fri May 4 21:58:44 2012
@@ -27,6 +27,8 @@ import javax.servlet.ServletException;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
+
+import org.apache.hadoop.hdfs.DFSConfigKeys;
import org.apache.hadoop.security.SecurityUtil;
import org.apache.commons.logging.Log;
@@ -34,7 +36,6 @@ import org.apache.commons.logging.LogFac
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hdfs.DFSConfigKeys;
import org.apache.hadoop.hdfs.DFSUtil;
import org.apache.hadoop.hdfs.HAUtil;
import org.apache.hadoop.hdfs.server.common.JspHelper;
@@ -83,11 +84,11 @@ public class GetImageServlet extends Htt
(Configuration)getServletContext().getAttribute(JspHelper.CURRENT_CONF);
if(UserGroupInformation.isSecurityEnabled() &&
- !isValidRequestor(request.getRemoteUser(), conf)) {
+ !isValidRequestor(request.getUserPrincipal().getName(), conf)) {
response.sendError(HttpServletResponse.SC_FORBIDDEN,
"Only Namenode and Secondary Namenode may access this servlet");
LOG.warn("Received non-NN/SNN request for image or edits from "
- + request.getRemoteHost());
+ + request.getUserPrincipal().getName() + " at " +
request.getRemoteHost());
return;
}
@@ -156,15 +157,10 @@ public class GetImageServlet extends Htt
}
// issue a HTTP get request to download the new fsimage
- MD5Hash downloadImageDigest = reloginIfNecessary().doAs(
- new PrivilegedExceptionAction<MD5Hash>() {
- @Override
- public MD5Hash run() throws Exception {
- return TransferFsImage.downloadImageToStorage(
+ MD5Hash downloadImageDigest =
+ TransferFsImage.downloadImageToStorage(
parsedParams.getInfoServer(), txid,
nnImage.getStorage(), true);
- }
- });
nnImage.saveDigestAndRenameCheckpointImage(txid,
downloadImageDigest);
// Now that we have a new checkpoint, we might be able to
@@ -176,18 +172,6 @@ public class GetImageServlet extends Htt
}
return null;
}
-
- // We may have lost our ticket since the last time we tried to open
- // an http connection, so log in just in case.
- private UserGroupInformation reloginIfNecessary() throws IOException {
- // This method is only called on the NN, therefore it is safe to
- // use these key values.
- return UserGroupInformation.loginUserFromKeytabAndReturnUGI(
- SecurityUtil.getServerPrincipal(conf
- .get(DFSConfigKeys.DFS_NAMENODE_KRB_HTTPS_USER_NAME_KEY),
- NameNode.getAddress(conf).getHostName()),
- conf.get(DFSConfigKeys.DFS_NAMENODE_KEYTAB_FILE_KEY));
- }
});
} catch (Throwable t) {
@@ -234,18 +218,10 @@ public class GetImageServlet extends Htt
validRequestors.add(
SecurityUtil.getServerPrincipal(conf
- .get(DFSConfigKeys.DFS_NAMENODE_KRB_HTTPS_USER_NAME_KEY), NameNode
- .getAddress(conf).getHostName()));
- validRequestors.add(
- SecurityUtil.getServerPrincipal(conf
.get(DFSConfigKeys.DFS_NAMENODE_USER_NAME_KEY), NameNode
.getAddress(conf).getHostName()));
validRequestors.add(
SecurityUtil.getServerPrincipal(conf
- .get(DFSConfigKeys.DFS_SECONDARY_NAMENODE_KRB_HTTPS_USER_NAME_KEY),
- SecondaryNameNode.getHttpAddress(conf).getHostName()));
- validRequestors.add(
- SecurityUtil.getServerPrincipal(conf
.get(DFSConfigKeys.DFS_SECONDARY_NAMENODE_USER_NAME_KEY),
SecondaryNameNode.getHttpAddress(conf).getHostName()));
@@ -253,21 +229,17 @@ public class GetImageServlet extends Htt
Configuration otherNnConf = HAUtil.getConfForOtherNode(conf);
validRequestors.add(
SecurityUtil.getServerPrincipal(otherNnConf
- .get(DFSConfigKeys.DFS_NAMENODE_KRB_HTTPS_USER_NAME_KEY),
- NameNode.getAddress(otherNnConf).getHostName()));
- validRequestors.add(
- SecurityUtil.getServerPrincipal(otherNnConf
.get(DFSConfigKeys.DFS_NAMENODE_USER_NAME_KEY),
NameNode.getAddress(otherNnConf).getHostName()));
}
for(String v : validRequestors) {
if(v != null && v.equals(remoteUser)) {
- if(LOG.isDebugEnabled()) LOG.debug("isValidRequestor is allowing: " +
remoteUser);
+ if(LOG.isInfoEnabled()) LOG.info("GetImageServlet allowing: " +
remoteUser);
return true;
}
}
- if(LOG.isDebugEnabled()) LOG.debug("isValidRequestor is rejecting: " +
remoteUser);
+ if(LOG.isInfoEnabled()) LOG.info("GetImageServlet rejecting: " +
remoteUser);
return false;
}
Modified:
hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NameNode.java
URL:
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NameNode.java?rev=1334216&r1=1334215&r2=1334216&view=diff
==============================================================================
---
hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NameNode.java
(original)
+++
hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NameNode.java
Fri May 4 21:58:44 2012
@@ -164,10 +164,8 @@ public class NameNode {
DFS_NAMENODE_CHECKPOINT_EDITS_DIR_KEY,
DFS_NAMENODE_SERVICE_RPC_ADDRESS_KEY,
DFS_NAMENODE_HTTP_ADDRESS_KEY,
- DFS_NAMENODE_HTTPS_ADDRESS_KEY,
DFS_NAMENODE_KEYTAB_FILE_KEY,
DFS_NAMENODE_SECONDARY_HTTP_ADDRESS_KEY,
- DFS_NAMENODE_SECONDARY_HTTPS_PORT_KEY,
DFS_SECONDARY_NAMENODE_KEYTAB_FILE_KEY,
DFS_NAMENODE_BACKUP_ADDRESS_KEY,
DFS_NAMENODE_BACKUP_HTTP_ADDRESS_KEY,
@@ -361,8 +359,9 @@ public class NameNode {
}
protected void setHttpServerAddress(Configuration conf) {
- conf.set(DFS_NAMENODE_HTTP_ADDRESS_KEY,
- NetUtils.getHostPortString(getHttpAddress()));
+ String hostPort = NetUtils.getHostPortString(getHttpAddress());
+ conf.set(DFS_NAMENODE_HTTP_ADDRESS_KEY, hostPort);
+ LOG.info("Web-server up at: " + hostPort);
}
protected void loadNamesystem(Configuration conf) throws IOException {
Modified:
hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NameNodeHttpServer.java
URL:
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NameNodeHttpServer.java?rev=1334216&r1=1334215&r2=1334216&view=diff
==============================================================================
---
hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NameNodeHttpServer.java
(original)
+++
hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NameNodeHttpServer.java
Fri May 4 21:58:44 2012
@@ -17,6 +17,7 @@
*/
package org.apache.hadoop.hdfs.server.namenode;
+import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_ADMIN;
import static
org.apache.hadoop.hdfs.DFSConfigKeys.DFS_DATANODE_HTTPS_ADDRESS_KEY;
import static
org.apache.hadoop.hdfs.DFSConfigKeys.DFS_SERVER_HTTPS_KEYSTORE_RESOURCE_KEY;
import static
org.apache.hadoop.hdfs.DFSConfigKeys.DFS_SERVER_HTTPS_KEYSTORE_RESOURCE_DEFAULT;
@@ -43,6 +44,7 @@ import org.apache.hadoop.http.HttpServer
import org.apache.hadoop.net.NetUtils;
import org.apache.hadoop.security.SecurityUtil;
import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.hadoop.security.authentication.server.AuthenticationFilter;
import org.apache.hadoop.security.authorize.AccessControlList;
/**
@@ -78,127 +80,101 @@ public class NameNodeHttpServer {
conf.get(DFSConfigKeys.DFS_NAMENODE_USER_NAME_KEY),
nn.getNameNodeAddress().getHostName());
}
-
+
public void start() throws IOException {
final String infoHost = bindAddress.getHostName();
-
- if(UserGroupInformation.isSecurityEnabled()) {
- String httpsUser = SecurityUtil.getServerPrincipal(conf
- .get(DFSConfigKeys.DFS_NAMENODE_KRB_HTTPS_USER_NAME_KEY), infoHost);
- if (httpsUser == null) {
- LOG.warn(DFSConfigKeys.DFS_NAMENODE_KRB_HTTPS_USER_NAME_KEY
- + " not defined in config. Starting http server as "
- + getDefaultServerPrincipal()
- + ": Kerberized SSL may be not function correctly.");
- } else {
- // Kerberized SSL servers must be run from the host principal...
- LOG.info("Logging in as " + httpsUser + " to start http server.");
- SecurityUtil.login(conf, DFSConfigKeys.DFS_NAMENODE_KEYTAB_FILE_KEY,
- DFSConfigKeys.DFS_NAMENODE_KRB_HTTPS_USER_NAME_KEY, infoHost);
- }
- }
+ int infoPort = bindAddress.getPort();
- UserGroupInformation ugi = UserGroupInformation.getLoginUser();
- try {
- this.httpServer = ugi.doAs(new PrivilegedExceptionAction<HttpServer>() {
- @Override
- public HttpServer run() throws IOException, InterruptedException {
- int infoPort = bindAddress.getPort();
- httpServer = new HttpServer("hdfs", infoHost, infoPort,
- infoPort == 0, conf,
- new AccessControlList(conf.get(DFSConfigKeys.DFS_ADMIN, " "))) {
- {
- if (WebHdfsFileSystem.isEnabled(conf, LOG)) {
- //add SPNEGO authentication filter for webhdfs
- final String name = "SPNEGO";
- final String classname = AuthFilter.class.getName();
- final String pathSpec = WebHdfsFileSystem.PATH_PREFIX + "/*";
- Map<String, String> params = getAuthFilterParams(conf);
- defineFilter(webAppContext, name, classname, params,
- new String[]{pathSpec});
- LOG.info("Added filter '" + name + "' (class=" + classname +
")");
-
- // add webhdfs packages
- addJerseyResourcePackage(
- NamenodeWebHdfsMethods.class.getPackage().getName()
- + ";" + Param.class.getPackage().getName(), pathSpec);
- }
+ httpServer = new HttpServer("hdfs", infoHost, infoPort,
+ infoPort == 0, conf,
+ new AccessControlList(conf.get(DFS_ADMIN, "
"))) {
+ {
+ // Add SPNEGO support to NameNode
+ if (UserGroupInformation.isSecurityEnabled()) {
+ Map<String, String> params = new HashMap<String, String>();
+ String principalInConf = conf.get(
+ DFSConfigKeys.DFS_NAMENODE_INTERNAL_SPENGO_USER_NAME_KEY);
+ if (principalInConf != null && !principalInConf.isEmpty()) {
+ params.put("kerberos.principal",
+ SecurityUtil.getServerPrincipal(principalInConf,
infoHost));
+ String httpKeytab =
conf.get(DFSConfigKeys.DFS_NAMENODE_KEYTAB_FILE_KEY);
+ if (httpKeytab != null && !httpKeytab.isEmpty()) {
+ params.put("kerberos.keytab", httpKeytab);
}
- private Map<String, String> getAuthFilterParams(Configuration conf)
- throws IOException {
- Map<String, String> params = new HashMap<String, String>();
- String principalInConf = conf
-
.get(DFSConfigKeys.DFS_WEB_AUTHENTICATION_KERBEROS_PRINCIPAL_KEY);
- if (principalInConf != null && !principalInConf.isEmpty()) {
- params
- .put(
-
DFSConfigKeys.DFS_WEB_AUTHENTICATION_KERBEROS_PRINCIPAL_KEY,
- SecurityUtil.getServerPrincipal(principalInConf,
- infoHost));
- }
- String httpKeytab = conf
-
.get(DFSConfigKeys.DFS_WEB_AUTHENTICATION_KERBEROS_KEYTAB_KEY);
- if (httpKeytab != null && !httpKeytab.isEmpty()) {
- params.put(
- DFSConfigKeys.DFS_WEB_AUTHENTICATION_KERBEROS_KEYTAB_KEY,
- httpKeytab);
- }
- return params;
- }
- };
+ params.put(AuthenticationFilter.AUTH_TYPE, "kerberos");
- boolean certSSL =
conf.getBoolean(DFSConfigKeys.DFS_HTTPS_ENABLE_KEY, false);
- boolean useKrb = UserGroupInformation.isSecurityEnabled();
- if (certSSL || useKrb) {
- boolean needClientAuth = conf.getBoolean(
- DFSConfigKeys.DFS_CLIENT_HTTPS_NEED_AUTH_KEY,
- DFSConfigKeys.DFS_CLIENT_HTTPS_NEED_AUTH_DEFAULT);
- InetSocketAddress secInfoSocAddr = NetUtils.createSocketAddr(conf
- .get(DFSConfigKeys.DFS_NAMENODE_HTTPS_ADDRESS_KEY,
- DFSConfigKeys.DFS_NAMENODE_HTTPS_ADDRESS_DEFAULT));
- Configuration sslConf = new HdfsConfiguration(false);
- if (certSSL) {
-
sslConf.addResource(conf.get(DFS_SERVER_HTTPS_KEYSTORE_RESOURCE_KEY,
- DFS_SERVER_HTTPS_KEYSTORE_RESOURCE_DEFAULT));
- }
- httpServer.addSslListener(secInfoSocAddr, sslConf, needClientAuth,
- useKrb);
- // assume same ssl port for all datanodes
- InetSocketAddress datanodeSslPort = NetUtils.createSocketAddr(
- conf.get(DFS_DATANODE_HTTPS_ADDRESS_KEY,
- infoHost + ":" +
DFSConfigKeys.DFS_DATANODE_HTTPS_DEFAULT_PORT));
- httpServer.setAttribute(DFSConfigKeys.DFS_DATANODE_HTTPS_PORT_KEY,
- datanodeSslPort.getPort());
+ defineFilter(webAppContext, SPNEGO_FILTER,
+ AuthenticationFilter.class.getName(), params, null);
}
- httpServer.setAttribute(NAMENODE_ATTRIBUTE_KEY, nn);
- httpServer.setAttribute(NAMENODE_ADDRESS_ATTRIBUTE_KEY,
- nn.getNameNodeAddress());
- httpServer.setAttribute(FSIMAGE_ATTRIBUTE_KEY, nn.getFSImage());
- httpServer.setAttribute(JspHelper.CURRENT_CONF, conf);
- setupServlets(httpServer, conf);
- httpServer.start();
-
- // The web-server port can be ephemeral... ensure we have the correct
- // info
- infoPort = httpServer.getPort();
- httpAddress = new InetSocketAddress(infoHost, infoPort);
- LOG.info(nn.getRole() + " Web-server up at: " + httpAddress);
- return httpServer;
}
- });
- } catch (InterruptedException e) {
- throw new IOException(e);
- } finally {
- if(UserGroupInformation.isSecurityEnabled() &&
- conf.get(DFSConfigKeys.DFS_NAMENODE_KRB_HTTPS_USER_NAME_KEY) !=
null) {
- // Go back to being the correct Namenode principal
- LOG.info("Logging back in as NameNode user following http server
start");
- nn.loginAsNameNodeUser(conf);
+ if (WebHdfsFileSystem.isEnabled(conf, LOG)) {
+ //add SPNEGO authentication filter for webhdfs
+ final String name = "SPNEGO";
+ final String classname = AuthFilter.class.getName();
+ final String pathSpec = WebHdfsFileSystem.PATH_PREFIX + "/*";
+ Map<String, String> params = getAuthFilterParams(conf);
+ defineFilter(webAppContext, name, classname, params,
+ new String[]{pathSpec});
+ LOG.info("Added filter '" + name + "' (class=" + classname + ")");
+
+ // add webhdfs packages
+ addJerseyResourcePackage(
+ NamenodeWebHdfsMethods.class.getPackage().getName()
+ + ";" + Param.class.getPackage().getName(), pathSpec);
+ }
+ }
+
+ private Map<String, String> getAuthFilterParams(Configuration conf)
+ throws IOException {
+ Map<String, String> params = new HashMap<String, String>();
+ String principalInConf = conf
+ .get(DFSConfigKeys.DFS_WEB_AUTHENTICATION_KERBEROS_PRINCIPAL_KEY);
+ if (principalInConf != null && !principalInConf.isEmpty()) {
+ params
+ .put(
+ DFSConfigKeys.DFS_WEB_AUTHENTICATION_KERBEROS_PRINCIPAL_KEY,
+ SecurityUtil.getServerPrincipal(principalInConf,
+ bindAddress.getHostName()));
+ }
+ String httpKeytab = conf
+ .get(DFSConfigKeys.DFS_WEB_AUTHENTICATION_KERBEROS_KEYTAB_KEY);
+ if (httpKeytab != null && !httpKeytab.isEmpty()) {
+ params.put(
+ DFSConfigKeys.DFS_WEB_AUTHENTICATION_KERBEROS_KEYTAB_KEY,
+ httpKeytab);
+ }
+ return params;
+ }
+ };
+
+ boolean certSSL = conf.getBoolean("dfs.https.enable", false);
+ if (certSSL) {
+ boolean needClientAuth = conf.getBoolean("dfs.https.need.client.auth",
false);
+ InetSocketAddress secInfoSocAddr = NetUtils.createSocketAddr(infoHost +
":" + conf.get(
+ "dfs.https.port", infoHost + ":" + 0));
+ Configuration sslConf = new Configuration(false);
+ if (certSSL) {
+ sslConf.addResource(conf.get("dfs.https.server.keystore.resource",
+ "ssl-server.xml"));
}
+ httpServer.addSslListener(secInfoSocAddr, sslConf, needClientAuth);
+ // assume same ssl port for all datanodes
+ InetSocketAddress datanodeSslPort = NetUtils.createSocketAddr(conf.get(
+ "dfs.datanode.https.address", infoHost + ":" + 50475));
+ httpServer.setAttribute("datanode.https.port", datanodeSslPort
+ .getPort());
}
+ httpServer.setAttribute("name.node", nn);
+ httpServer.setAttribute("name.node.address", bindAddress);
+ httpServer.setAttribute("name.system.image", nn.getFSImage());
+ httpServer.setAttribute(JspHelper.CURRENT_CONF, conf);
+ setupServlets(httpServer, conf);
+ httpServer.start();
+ httpAddress = new InetSocketAddress(bindAddress.getAddress(),
httpServer.getPort());
}
-
+
+
public void stop() throws Exception {
if (httpServer != null) {
httpServer.stop();
Modified:
hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/SecondaryNameNode.java
URL:
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/SecondaryNameNode.java?rev=1334216&r1=1334215&r2=1334216&view=diff
==============================================================================
---
hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/SecondaryNameNode.java
(original)
+++
hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/SecondaryNameNode.java
Fri May 4 21:58:44 2012
@@ -25,8 +25,10 @@ import java.security.PrivilegedAction;
import java.security.PrivilegedExceptionAction;
import java.util.Collection;
import java.util.Date;
+import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
+import java.util.Map;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.CommandLineParser;
@@ -44,6 +46,7 @@ import org.apache.hadoop.conf.Configurat
import org.apache.hadoop.fs.FileSystem;
import static org.apache.hadoop.hdfs.DFSConfigKeys.*;
+import org.apache.hadoop.hdfs.DFSConfigKeys;
import org.apache.hadoop.hdfs.DFSUtil;
import org.apache.hadoop.hdfs.HAUtil;
import org.apache.hadoop.hdfs.NameNodeProxies;
@@ -63,9 +66,9 @@ import org.apache.hadoop.ipc.RemoteExcep
import org.apache.hadoop.metrics2.lib.DefaultMetricsSystem;
import org.apache.hadoop.metrics2.source.JvmMetrics;
import org.apache.hadoop.net.NetUtils;
-import org.apache.hadoop.security.Krb5AndCertsSslSocketConnector;
import org.apache.hadoop.security.SecurityUtil;
import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.hadoop.security.authentication.server.AuthenticationFilter;
import org.apache.hadoop.security.authorize.AccessControlList;
import org.apache.hadoop.util.Daemon;
@@ -108,7 +111,6 @@ public class SecondaryNameNode implement
private volatile boolean shouldRun;
private HttpServer infoServer;
private int infoPort;
- private int imagePort;
private String infoBindAddress;
private Collection<URI> checkpointDirs;
@@ -229,63 +231,47 @@ public class SecondaryNameNode implement
// Initialize other scheduling parameters from the configuration
checkpointConf = new CheckpointConf(conf);
-
+
// initialize the webserver for uploading files.
- // Kerberized SSL servers must be run from the host principal...
- UserGroupInformation httpUGI =
- UserGroupInformation.loginUserFromKeytabAndReturnUGI(
- SecurityUtil.getServerPrincipal(conf
- .get(DFS_SECONDARY_NAMENODE_KRB_HTTPS_USER_NAME_KEY),
- infoBindAddress),
- conf.get(DFS_SECONDARY_NAMENODE_KEYTAB_FILE_KEY));
- try {
- infoServer = httpUGI.doAs(new PrivilegedExceptionAction<HttpServer>() {
- @Override
- public HttpServer run() throws IOException, InterruptedException {
- LOG.info("Starting web server as: " +
- UserGroupInformation.getCurrentUser().getUserName());
-
- int tmpInfoPort = infoSocAddr.getPort();
- infoServer = new HttpServer("secondary", infoBindAddress,
tmpInfoPort,
- tmpInfoPort == 0, conf,
- new AccessControlList(conf.get(DFS_ADMIN, " ")));
-
- if(UserGroupInformation.isSecurityEnabled()) {
- SecurityUtil.initKrb5CipherSuites();
- InetSocketAddress secInfoSocAddr =
- NetUtils.createSocketAddr(infoBindAddress + ":"+ conf.getInt(
- DFS_NAMENODE_SECONDARY_HTTPS_PORT_KEY,
- DFS_NAMENODE_SECONDARY_HTTPS_PORT_DEFAULT));
- imagePort = secInfoSocAddr.getPort();
- infoServer.addSslListener(secInfoSocAddr, conf, false, true);
+ int tmpInfoPort = infoSocAddr.getPort();
+ infoServer = new HttpServer("secondary", infoBindAddress, tmpInfoPort,
+ tmpInfoPort == 0, conf,
+ new AccessControlList(conf.get(DFS_ADMIN, "
"))) {
+ {
+ if (UserGroupInformation.isSecurityEnabled()) {
+ Map<String, String> params = new HashMap<String, String>();
+ String principalInConf =
conf.get(DFSConfigKeys.DFS_SECONDARY_NAMENODE_INTERNAL_SPENGO_USER_NAME_KEY);
+ if (principalInConf != null && !principalInConf.isEmpty()) {
+ params.put("kerberos.principal",
+ SecurityUtil.getServerPrincipal(principalInConf,
infoSocAddr.getHostName()));
}
-
- infoServer.setAttribute("secondary.name.node",
SecondaryNameNode.this);
- infoServer.setAttribute("name.system.image", checkpointImage);
- infoServer.setAttribute(JspHelper.CURRENT_CONF, conf);
- infoServer.addInternalServlet("getimage", "/getimage",
- GetImageServlet.class, true);
- infoServer.start();
- return infoServer;
+ String httpKeytab =
conf.get(DFSConfigKeys.DFS_SECONDARY_NAMENODE_KEYTAB_FILE_KEY);
+ if (httpKeytab != null && !httpKeytab.isEmpty()) {
+ params.put("kerberos.keytab", httpKeytab);
+ }
+ params.put(AuthenticationFilter.AUTH_TYPE, "kerberos");
+
+ defineFilter(webAppContext, SPNEGO_FILTER,
AuthenticationFilter.class.getName(),
+ params, null);
}
- });
- } catch (InterruptedException e) {
- throw new RuntimeException(e);
- }
-
+ }
+ };
+ infoServer.setAttribute("secondary.name.node", this);
+ infoServer.setAttribute("name.system.image", checkpointImage);
+ infoServer.setAttribute(JspHelper.CURRENT_CONF, conf);
+ infoServer.addInternalServlet("getimage", "/getimage",
+ GetImageServlet.class, true);
+ infoServer.start();
+
LOG.info("Web server init done");
// The web-server port can be ephemeral... ensure we have the correct info
infoPort = infoServer.getPort();
- if (!UserGroupInformation.isSecurityEnabled()) {
- imagePort = infoPort;
- }
-
- conf.set(DFS_NAMENODE_SECONDARY_HTTP_ADDRESS_KEY, infoBindAddress + ":"
+infoPort);
- LOG.info("Secondary Web-server up at: " + infoBindAddress + ":" +infoPort);
- LOG.info("Secondary image servlet up at: " + infoBindAddress + ":" +
imagePort);
+
+ conf.set(DFS_NAMENODE_SECONDARY_HTTP_ADDRESS_KEY, infoBindAddress + ":" +
infoPort);
+ LOG.info("Secondary Web-server up at: " + infoBindAddress + ":" +
infoPort);
LOG.info("Checkpoint Period :" + checkpointConf.getPeriod() + " secs " +
- "(" + checkpointConf.getPeriod()/60 + " min)");
+ "(" + checkpointConf.getPeriod() / 60 + " min)");
LOG.info("Log Size Trigger :" + checkpointConf.getTxnCount() + " txns");
}
@@ -434,7 +420,7 @@ public class SecondaryNameNode implement
throw new IOException("This is not a DFS");
}
- String configuredAddress = DFSUtil.getInfoServer(null, conf, true);
+ String configuredAddress = DFSUtil.getInfoServer(null, conf, false);
String address = DFSUtil.substituteForWildcardAddress(configuredAddress,
fsName.getHost());
LOG.debug("Will connect to NameNode at HTTP address: " + address);
@@ -446,7 +432,7 @@ public class SecondaryNameNode implement
* for image transfers
*/
private InetSocketAddress getImageListenAddress() {
- return new InetSocketAddress(infoBindAddress, imagePort);
+ return new InetSocketAddress(infoBindAddress, infoPort);
}
/**
@@ -507,7 +493,7 @@ public class SecondaryNameNode implement
/**
- * @param argv The parameters passed to this program.
+ * @param opts The parameters passed to this program.
* @exception Exception if the filesystem does not exist.
* @return 0 on success, non zero on error.
*/
@@ -709,7 +695,7 @@ public class SecondaryNameNode implement
* Construct a checkpoint image.
* @param conf Node configuration.
* @param imageDirs URIs of storage for image.
- * @param editDirs URIs of storage for edit logs.
+ * @param editsDirs URIs of storage for edit logs.
* @throws IOException If storage cannot be access.
*/
CheckpointStorage(Configuration conf,
Modified:
hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/TransferFsImage.java
URL:
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/TransferFsImage.java?rev=1334216&r1=1334215&r2=1334216&view=diff
==============================================================================
---
hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/TransferFsImage.java
(original)
+++
hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/TransferFsImage.java
Fri May 4 21:58:44 2012
@@ -201,19 +201,17 @@ public class TransferFsImage {
String queryString, List<File> localPaths,
NNStorage dstStorage, boolean getChecksum) throws IOException {
byte[] buf = new byte[HdfsConstants.IO_FILE_BUFFER_SIZE];
- String proto = UserGroupInformation.isSecurityEnabled() ? "https://" :
"http://";
- StringBuilder str = new StringBuilder(proto+nnHostPort+"/getimage?");
- str.append(queryString);
+ String str = "http://" + nnHostPort + "/getimage?" + queryString;
+ LOG.info("Opening connection to " + str);
//
// open connection to remote server
//
- URL url = new URL(str.toString());
-
- // Avoid Krb bug with cross-realm hosts
- SecurityUtil.fetchServiceTicket(url);
- HttpURLConnection connection = (HttpURLConnection) url.openConnection();
-
+ URL url = new URL(str);
+
+ HttpURLConnection connection = (HttpURLConnection)
+ SecurityUtil.openSecureHttpConnection(url);
+
if (connection.getResponseCode() != HttpURLConnection.HTTP_OK) {
throw new HttpGetFailedException(
"Image transfer servlet at " + url +
Modified:
hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/ha/BootstrapStandby.java
URL:
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/ha/BootstrapStandby.java?rev=1334216&r1=1334215&r2=1334216&view=diff
==============================================================================
---
hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/ha/BootstrapStandby.java
(original)
+++
hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/ha/BootstrapStandby.java
Fri May 4 21:58:44 2012
@@ -95,7 +95,6 @@ public class BootstrapStandby implements
static final int ERR_CODE_LOGS_UNAVAILABLE = 6;
public int run(String[] args) throws Exception {
- SecurityUtil.initKrb5CipherSuites();
parseArgs(args);
parseConfAndFindOtherNN();
NameNode.checkAllowFormat(conf);
@@ -322,7 +321,7 @@ public class BootstrapStandby implements
"Could not determine valid IPC address for other NameNode (%s)" +
", got: %s", otherNNId, otherIpcAddr);
- otherHttpAddr = DFSUtil.getInfoServer(null, otherNode, true);
+ otherHttpAddr = DFSUtil.getInfoServer(null, otherNode, false);
otherHttpAddr = DFSUtil.substituteForWildcardAddress(otherHttpAddr,
otherIpcAddr.getHostName());
Modified:
hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/ha/StandbyCheckpointer.java
URL:
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/ha/StandbyCheckpointer.java?rev=1334216&r1=1334215&r2=1334216&view=diff
==============================================================================
---
hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/ha/StandbyCheckpointer.java
(original)
+++
hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/ha/StandbyCheckpointer.java
Fri May 4 21:58:44 2012
@@ -92,7 +92,7 @@ public class StandbyCheckpointer {
}
private String getHttpAddress(Configuration conf) {
- String configuredAddr = DFSUtil.getInfoServer(null, conf, true);
+ String configuredAddr = DFSUtil.getInfoServer(null, conf, false);
// Use the hostname from the RPC address as a default, in case
// the HTTP address is configured to 0.0.0.0.
Modified:
hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/DFSAdmin.java
URL:
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/DFSAdmin.java?rev=1334216&r1=1334215&r2=1334216&view=diff
==============================================================================
---
hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/DFSAdmin.java
(original)
+++
hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/DFSAdmin.java
Fri May 4 21:58:44 2012
@@ -504,7 +504,7 @@ public class DFSAdmin extends FsShell {
*/
public int fetchImage(String[] argv, int idx) throws IOException {
String infoServer = DFSUtil.getInfoServer(
- HAUtil.getAddressOfActive(getDFS()), getConf(), true);
+ HAUtil.getAddressOfActive(getDFS()), getConf(), false);
TransferFsImage.downloadMostRecentImageToDirectory(infoServer,
new File(argv[idx]));
return 0;
Modified:
hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/DFSck.java
URL:
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/DFSck.java?rev=1334216&r1=1334215&r2=1334216&view=diff
==============================================================================
---
hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/DFSck.java
(original)
+++
hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/DFSck.java
Fri May 4 21:58:44 2012
@@ -153,8 +153,7 @@ public class DFSck extends Configured im
url.append("&startblockafter=").append(String.valueOf(cookie));
}
URL path = new URL(url.toString());
- SecurityUtil.fetchServiceTicket(path);
- URLConnection connection = path.openConnection();
+ URLConnection connection = SecurityUtil.openSecureHttpConnection(path);
InputStream stream = connection.getInputStream();
BufferedReader input = new BufferedReader(new InputStreamReader(
stream, "UTF-8"));
@@ -222,16 +221,11 @@ public class DFSck extends Configured im
return null;
}
- return DFSUtil.getInfoServer(HAUtil.getAddressOfActive(fs), conf, true);
+ return DFSUtil.getInfoServer(HAUtil.getAddressOfActive(fs), conf, false);
}
private int doWork(final String[] args) throws IOException {
- String proto = "http://";
- if (UserGroupInformation.isSecurityEnabled()) {
- SecurityUtil.initKrb5CipherSuites();
- proto = "https://";
- }
- final StringBuilder url = new StringBuilder(proto);
+ final StringBuilder url = new StringBuilder("http://");
String namenodeAddress = getCurrentNamenodeAddress();
if (namenodeAddress == null) {
@@ -279,8 +273,7 @@ public class DFSck extends Configured im
return listCorruptFileBlocks(dir, url.toString());
}
URL path = new URL(url.toString());
- SecurityUtil.fetchServiceTicket(path);
- URLConnection connection = path.openConnection();
+ URLConnection connection = SecurityUtil.openSecureHttpConnection(path);
InputStream stream = connection.getInputStream();
BufferedReader input = new BufferedReader(new InputStreamReader(
stream, "UTF-8"));
Modified:
hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/DelegationTokenFetcher.java
URL:
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/DelegationTokenFetcher.java?rev=1334216&r1=1334215&r2=1334216&view=diff
==============================================================================
---
hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/DelegationTokenFetcher.java
(original)
+++
hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/DelegationTokenFetcher.java
Fri May 4 21:58:44 2012
@@ -72,11 +72,6 @@ public class DelegationTokenFetcher {
private static final String RENEW = "renew";
private static final String PRINT = "print";
- static {
- // Enable Kerberos sockets
- System.setProperty("https.cipherSuites", "TLS_KRB5_WITH_3DES_EDE_CBC_SHA");
- }
-
private static void printUsage(PrintStream err) throws IOException {
err.println("fetchdt retrieves delegation tokens from the NameNode");
err.println();
@@ -106,7 +101,7 @@ public class DelegationTokenFetcher {
final Configuration conf = new HdfsConfiguration();
Options fetcherOptions = new Options();
fetcherOptions.addOption(WEBSERVICE, true,
- "HTTPS url to reach the NameNode at");
+ "HTTP url to reach the NameNode at");
fetcherOptions.addOption(RENEWER, true,
"Name of the delegation token renewer");
fetcherOptions.addOption(CANCEL, false, "cancel the token");
@@ -224,8 +219,7 @@ public class DelegationTokenFetcher {
}
URL remoteURL = new URL(url.toString());
- SecurityUtil.fetchServiceTicket(remoteURL);
- URLConnection connection = URLUtils.openConnection(remoteURL);
+ URLConnection connection =
SecurityUtil.openSecureHttpConnection(remoteURL);
InputStream in = connection.getInputStream();
Credentials ts = new Credentials();
dis = new DataInputStream(in);
@@ -264,7 +258,7 @@ public class DelegationTokenFetcher {
try {
URL url = new URL(buf.toString());
- SecurityUtil.fetchServiceTicket(url);
+ connection = (HttpURLConnection)
SecurityUtil.openSecureHttpConnection(url);
connection = (HttpURLConnection)URLUtils.openConnection(url);
if (connection.getResponseCode() != HttpURLConnection.HTTP_OK) {
throw new IOException("Error renewing token: " +
@@ -358,8 +352,7 @@ public class DelegationTokenFetcher {
HttpURLConnection connection=null;
try {
URL url = new URL(buf.toString());
- SecurityUtil.fetchServiceTicket(url);
- connection = (HttpURLConnection)URLUtils.openConnection(url);
+ connection = (HttpURLConnection)
SecurityUtil.openSecureHttpConnection(url);
if (connection.getResponseCode() != HttpURLConnection.HTTP_OK) {
throw new IOException("Error cancelling token: " +
connection.getResponseMessage());
Modified:
hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/resources/hdfs-default.xml
URL:
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/resources/hdfs-default.xml?rev=1334216&r1=1334215&r2=1334216&view=diff
==============================================================================
---
hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/resources/hdfs-default.xml
(original)
+++
hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/resources/hdfs-default.xml
Fri May 4 21:58:44 2012
@@ -858,4 +858,15 @@
</description>
</property>
+<property>
+ <name>dfs.namenode.kerberos.internal.spnego.principal</name>
+ <value>${dfs.web.authentication.kerberos.principal}</value>
+</property>
+
+<property>
+ <name>dfs.secondary.namenode.kerberos.internal.spnego.principal</name>
+ <value>${dfs.web.authentication.kerberos.principal}</value>
+</property>
+
+
</configuration>