hadoop git commit: HADOOP-12082. Support multiple authentication schemes via AuthenticationFilter. Contributed by Hrishikesh Gadre.

2016-10-25 Thread benoy
Repository: hadoop
Updated Branches:
  refs/heads/branch-2 6bb23a14b -> b913b62aa


HADOOP-12082. Support multiple authentication schemes via AuthenticationFilter. 
Contributed by Hrishikesh Gadre.


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/b913b62a
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/b913b62a
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/b913b62a

Branch: refs/heads/branch-2
Commit: b913b62aafed8ac5638e858fa51d5943ce757196
Parents: 6bb23a1
Author: Benoy Antony <be...@apache.org>
Authored: Tue Oct 25 08:30:02 2016 -0700
Committer: Benoy Antony <be...@apache.org>
Committed: Tue Oct 25 08:30:02 2016 -0700

--
 hadoop-common-project/hadoop-auth/pom.xml   |  24 ++
 .../client/KerberosAuthenticator.java   |   8 +-
 .../server/AuthenticationFilter.java|  47 ++-
 .../server/AuthenticationHandler.java   |   2 +-
 .../server/AuthenticationHandlerUtil.java   | 105 ++
 .../server/CompositeAuthenticationHandler.java  |  30 ++
 .../authentication/server/HttpConstants.java|  55 +++
 .../server/LdapAuthenticationHandler.java   | 339 +++
 .../MultiSchemeAuthenticationHandler.java   | 209 
 .../authentication/server/package-info.java |  27 ++
 .../src/site/markdown/Configuration.md  | 137 
 .../client/TestKerberosAuthenticator.java   |  71 +++-
 .../authentication/server/LdapConstants.java|  31 ++
 .../server/TestLdapAuthenticationHandler.java   | 159 +
 .../TestMultiSchemeAuthenticationHandler.java   | 189 +++
 .../DelegationTokenAuthenticationFilter.java|   9 +-
 .../DelegationTokenAuthenticationHandler.java   |  25 +-
 ...emeDelegationTokenAuthenticationHandler.java | 182 ++
 hadoop-project/pom.xml  |   3 +
 19 files changed, 1630 insertions(+), 22 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/b913b62a/hadoop-common-project/hadoop-auth/pom.xml
--
diff --git a/hadoop-common-project/hadoop-auth/pom.xml 
b/hadoop-common-project/hadoop-auth/pom.xml
index e138382..1f702b2 100644
--- a/hadoop-common-project/hadoop-auth/pom.xml
+++ b/hadoop-common-project/hadoop-auth/pom.xml
@@ -154,6 +154,30 @@
   curator-test
   test
 
+
+  org.apache.directory.server
+  apacheds-server-integ
+  ${apacheds.version}
+  test
+  
+
+  org.apache.directory.api
+  api-ldap-schema-data
+
+  
+
+
+  org.apache.directory.server
+  apacheds-core-integ
+  ${apacheds.version}
+  test
+  
+
+  org.apache.directory.api
+  api-ldap-schema-data
+
+  
+
   
 
   

http://git-wip-us.apache.org/repos/asf/hadoop/blob/b913b62a/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/client/KerberosAuthenticator.java
--
diff --git 
a/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/client/KerberosAuthenticator.java
 
b/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/client/KerberosAuthenticator.java
index a69ee46..ceec927 100644
--- 
a/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/client/KerberosAuthenticator.java
+++ 
b/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/client/KerberosAuthenticator.java
@@ -14,6 +14,7 @@
 package org.apache.hadoop.security.authentication.client;
 
 import org.apache.commons.codec.binary.Base64;
+import org.apache.hadoop.security.authentication.server.HttpConstants;
 import org.apache.hadoop.security.authentication.util.AuthToken;
 import org.apache.hadoop.security.authentication.util.KerberosUtil;
 import org.ietf.jgss.GSSContext;
@@ -57,17 +58,18 @@ public class KerberosAuthenticator implements Authenticator 
{
   /**
* HTTP header used by the SPNEGO server endpoint during an authentication 
sequence.
*/
-  public static final String WWW_AUTHENTICATE = "WWW-Authenticate";
+  public static final String WWW_AUTHENTICATE =
+  HttpConstants.WWW_AUTHENTICATE_HEADER;
 
   /**
* HTTP header used by the SPNEGO client endpoint during an authentication 
sequence.
*/
-  public static final String AUTHORIZATION = "Authorization";
+  public static final String AUTHORIZATION = 
HttpConstants.AUTHORIZATION_HEADER;
 
   /**
* HTTP header prefix used by the SPNEGO client/server endpoints during an 
authentication sequence.
*/
-  public static final String NEGOTIATE = "Negotiate&q

hadoop git commit: HADOOP-12082 Support multiple authentication schemes via AuthenticationFilter. Contributed by Hrishikesh Gadre.

2016-10-21 Thread benoy
Repository: hadoop
Updated Branches:
  refs/heads/branch-2.8 b82364759 -> 653ceab7d


HADOOP-12082 Support multiple authentication schemes via AuthenticationFilter. 
Contributed by Hrishikesh Gadre.


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/653ceab7
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/653ceab7
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/653ceab7

Branch: refs/heads/branch-2.8
Commit: 653ceab7d35670d488e1dfc7500ca281bc4327e0
Parents: b823647
Author: Benoy Antony <be...@apache.org>
Authored: Fri Oct 21 19:43:22 2016 -0700
Committer: Benoy Antony <be...@apache.org>
Committed: Fri Oct 21 19:43:22 2016 -0700

--
 hadoop-common-project/hadoop-auth/pom.xml   |  24 ++
 .../client/KerberosAuthenticator.java   |   8 +-
 .../server/AuthenticationFilter.java|  47 ++-
 .../server/AuthenticationHandler.java   |   2 +-
 .../server/AuthenticationHandlerUtil.java   | 105 ++
 .../server/CompositeAuthenticationHandler.java  |  30 ++
 .../authentication/server/HttpConstants.java|  55 +++
 .../server/LdapAuthenticationHandler.java   | 339 +++
 .../MultiSchemeAuthenticationHandler.java   | 209 
 .../authentication/server/package-info.java |  27 ++
 .../src/site/markdown/Configuration.md  | 137 
 .../client/TestKerberosAuthenticator.java   |  71 +++-
 .../authentication/server/LdapConstants.java|  31 ++
 .../server/TestLdapAuthenticationHandler.java   | 159 +
 .../TestMultiSchemeAuthenticationHandler.java   | 189 +++
 .../DelegationTokenAuthenticationFilter.java|   9 +-
 .../DelegationTokenAuthenticationHandler.java   |  25 +-
 ...emeDelegationTokenAuthenticationHandler.java | 182 ++
 hadoop-project/pom.xml  |   3 +
 19 files changed, 1630 insertions(+), 22 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/653ceab7/hadoop-common-project/hadoop-auth/pom.xml
--
diff --git a/hadoop-common-project/hadoop-auth/pom.xml 
b/hadoop-common-project/hadoop-auth/pom.xml
index f59cf84..9d99a05 100644
--- a/hadoop-common-project/hadoop-auth/pom.xml
+++ b/hadoop-common-project/hadoop-auth/pom.xml
@@ -154,6 +154,30 @@
   curator-test
   test
 
+
+  org.apache.directory.server
+  apacheds-server-integ
+  ${apacheds.version}
+  test
+  
+
+  org.apache.directory.api
+  api-ldap-schema-data
+
+  
+
+
+  org.apache.directory.server
+  apacheds-core-integ
+  ${apacheds.version}
+  test
+  
+
+  org.apache.directory.api
+  api-ldap-schema-data
+
+  
+
   
 
   

http://git-wip-us.apache.org/repos/asf/hadoop/blob/653ceab7/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/client/KerberosAuthenticator.java
--
diff --git 
a/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/client/KerberosAuthenticator.java
 
b/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/client/KerberosAuthenticator.java
index a69ee46..ceec927 100644
--- 
a/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/client/KerberosAuthenticator.java
+++ 
b/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/client/KerberosAuthenticator.java
@@ -14,6 +14,7 @@
 package org.apache.hadoop.security.authentication.client;
 
 import org.apache.commons.codec.binary.Base64;
+import org.apache.hadoop.security.authentication.server.HttpConstants;
 import org.apache.hadoop.security.authentication.util.AuthToken;
 import org.apache.hadoop.security.authentication.util.KerberosUtil;
 import org.ietf.jgss.GSSContext;
@@ -57,17 +58,18 @@ public class KerberosAuthenticator implements Authenticator 
{
   /**
* HTTP header used by the SPNEGO server endpoint during an authentication 
sequence.
*/
-  public static final String WWW_AUTHENTICATE = "WWW-Authenticate";
+  public static final String WWW_AUTHENTICATE =
+  HttpConstants.WWW_AUTHENTICATE_HEADER;
 
   /**
* HTTP header used by the SPNEGO client endpoint during an authentication 
sequence.
*/
-  public static final String AUTHORIZATION = "Authorization";
+  public static final String AUTHORIZATION = 
HttpConstants.AUTHORIZATION_HEADER;
 
   /**
* HTTP header prefix used by the SPNEGO client/server endpoints during an 
authentication sequence.
*/
-  public static final String NEGOTIATE = "Negotiate&q

hadoop git commit: HADOOP-12082 Support multiple authentication schemes via AuthenticationFilter

2016-10-18 Thread benoy
Repository: hadoop
Updated Branches:
  refs/heads/trunk d75cbc574 -> 4bca38524


HADOOP-12082 Support multiple authentication schemes via AuthenticationFilter


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/4bca3852
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/4bca3852
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/4bca3852

Branch: refs/heads/trunk
Commit: 4bca385241c0fc8ff168c7b0f2984a7aed2c7492
Parents: d75cbc5
Author: Benoy Antony <be...@apache.org>
Authored: Tue Oct 18 18:32:01 2016 -0700
Committer: Benoy Antony <be...@apache.org>
Committed: Tue Oct 18 18:32:01 2016 -0700

--
 hadoop-common-project/hadoop-auth/pom.xml   |  42 +++
 .../client/KerberosAuthenticator.java   |   8 +-
 .../server/AuthenticationFilter.java|  47 ++-
 .../server/AuthenticationHandler.java   |   2 +-
 .../server/AuthenticationHandlerUtil.java   | 105 ++
 .../server/CompositeAuthenticationHandler.java  |  30 ++
 .../authentication/server/HttpConstants.java|  55 +++
 .../server/LdapAuthenticationHandler.java   | 339 +++
 .../MultiSchemeAuthenticationHandler.java   | 209 
 .../authentication/server/package-info.java |  27 ++
 .../src/site/markdown/Configuration.md  | 137 
 .../client/TestKerberosAuthenticator.java   |  71 +++-
 .../authentication/server/LdapConstants.java|  31 ++
 .../server/TestLdapAuthenticationHandler.java   | 159 +
 .../TestMultiSchemeAuthenticationHandler.java   | 189 +++
 .../DelegationTokenAuthenticationFilter.java|   9 +-
 .../DelegationTokenAuthenticationHandler.java   |  25 +-
 ...emeDelegationTokenAuthenticationHandler.java | 182 ++
 hadoop-project/pom.xml  |   4 +
 19 files changed, 1649 insertions(+), 22 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/4bca3852/hadoop-common-project/hadoop-auth/pom.xml
--
diff --git a/hadoop-common-project/hadoop-auth/pom.xml 
b/hadoop-common-project/hadoop-auth/pom.xml
index 4cbdc49..0b37715 100644
--- a/hadoop-common-project/hadoop-auth/pom.xml
+++ b/hadoop-common-project/hadoop-auth/pom.xml
@@ -135,6 +135,48 @@
   org.apache.kerby
   kerb-simplekdc
 
+
+  org.apache.directory.server
+  apacheds-core
+  ${apacheds.version}
+  test
+
+
+  org.apache.directory.server
+  apacheds-protocol-ldap
+  ${apacheds.version}
+  test
+
+
+  org.apache.directory.server
+  apacheds-ldif-partition
+  ${apacheds.version}
+  test
+
+
+  org.apache.directory.api
+  api-ldap-codec-core
+  ${ldap-api.version}
+  test
+
+
+  org.apache.directory.api
+  api-ldap-model
+  ${ldap-api.version}
+  test
+
+
+  org.apache.directory.server
+  apacheds-server-integ
+  ${apacheds.version}
+  test
+
+
+  org.apache.directory.server
+  apacheds-core-integ
+  ${apacheds.version}
+  test
+
   
 
   

http://git-wip-us.apache.org/repos/asf/hadoop/blob/4bca3852/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/client/KerberosAuthenticator.java
--
diff --git 
a/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/client/KerberosAuthenticator.java
 
b/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/client/KerberosAuthenticator.java
index a69ee46..ceec927 100644
--- 
a/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/client/KerberosAuthenticator.java
+++ 
b/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/client/KerberosAuthenticator.java
@@ -14,6 +14,7 @@
 package org.apache.hadoop.security.authentication.client;
 
 import org.apache.commons.codec.binary.Base64;
+import org.apache.hadoop.security.authentication.server.HttpConstants;
 import org.apache.hadoop.security.authentication.util.AuthToken;
 import org.apache.hadoop.security.authentication.util.KerberosUtil;
 import org.ietf.jgss.GSSContext;
@@ -57,17 +58,18 @@ public class KerberosAuthenticator implements Authenticator 
{
   /**
* HTTP header used by the SPNEGO server endpoint during an authentication 
sequence.
*/
-  public static final String WWW_AUTHENTICATE = "WWW-Authenticate";
+  public static final String WWW_AUTHENTICATE =
+  HttpConstants.WWW_AUTHENTICATE_HEADER;
 
   /**
* HTTP header used by the SPNEGO client endpoint during an authentication 
sequence.
*/
-  public static f

hadoop git commit: HDFS-10735 Distcp using webhdfs on secure HA clusters fails with StandbyException

2016-10-14 Thread benoy
Repository: hadoop
Updated Branches:
  refs/heads/branch-2 6e0b55fe7 -> 156b92e36


HDFS-10735 Distcp using webhdfs on secure HA clusters fails with 
StandbyException

(cherry picked from commit 701c27a7762294e1a5fb2b3ac81f5534aa37f667)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/156b92e3
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/156b92e3
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/156b92e3

Branch: refs/heads/branch-2
Commit: 156b92e36bb04252a1c3ee7b334809021d963254
Parents: 6e0b55f
Author: Benoy Antony <be...@apache.org>
Authored: Fri Oct 14 10:26:39 2016 -0700
Committer: Benoy Antony <be...@apache.org>
Committed: Fri Oct 14 10:29:42 2016 -0700

--
 .../java/org/apache/hadoop/hdfs/web/WebHdfsFileSystem.java   | 8 
 1 file changed, 8 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/156b92e3/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/WebHdfsFileSystem.java
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/WebHdfsFileSystem.java
 
b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/WebHdfsFileSystem.java
index 92e6901..7f9a02e 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/WebHdfsFileSystem.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/WebHdfsFileSystem.java
@@ -90,6 +90,7 @@ import org.apache.hadoop.io.retry.RetryPolicies;
 import org.apache.hadoop.io.retry.RetryPolicy;
 import org.apache.hadoop.io.retry.RetryUtils;
 import org.apache.hadoop.ipc.RemoteException;
+import org.apache.hadoop.ipc.StandbyException;
 import org.apache.hadoop.net.NetUtils;
 import org.apache.hadoop.security.AccessControlException;
 import org.apache.hadoop.security.SecurityUtil;
@@ -479,6 +480,13 @@ public class WebHdfsFileSystem extends FileSystem
   }
 
   IOException re = JsonUtilClient.toRemoteException(m);
+
+  //check if exception is due to communication with a Standby name node
+  if (re.getMessage() != null && re.getMessage().endsWith(
+  StandbyException.class.getSimpleName())) {
+LOG.trace("Detected StandbyException", re);
+throw new IOException(re);
+  }
   // extract UGI-related exceptions and unwrap InvalidToken
   // the NN mangles these exceptions but the DN does not and may need
   // to re-fetch a token if either report the token is expired


-
To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org
For additional commands, e-mail: common-commits-h...@hadoop.apache.org



hadoop git commit: HDFS-10735 Distcp using webhdfs on secure HA clusters fails with StandbyException

2016-10-14 Thread benoy
Repository: hadoop
Updated Branches:
  refs/heads/trunk 8a9f6635a -> 701c27a77


HDFS-10735 Distcp using webhdfs on secure HA clusters fails with 
StandbyException


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/701c27a7
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/701c27a7
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/701c27a7

Branch: refs/heads/trunk
Commit: 701c27a7762294e1a5fb2b3ac81f5534aa37f667
Parents: 8a9f663
Author: Benoy Antony <be...@apache.org>
Authored: Fri Oct 14 10:26:39 2016 -0700
Committer: Benoy Antony <be...@apache.org>
Committed: Fri Oct 14 10:26:39 2016 -0700

--
 .../java/org/apache/hadoop/hdfs/web/WebHdfsFileSystem.java   | 8 
 1 file changed, 8 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/701c27a7/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/WebHdfsFileSystem.java
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/WebHdfsFileSystem.java
 
b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/WebHdfsFileSystem.java
index 19de5b5..af43d56 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/WebHdfsFileSystem.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/WebHdfsFileSystem.java
@@ -91,6 +91,7 @@ import org.apache.hadoop.io.retry.RetryPolicies;
 import org.apache.hadoop.io.retry.RetryPolicy;
 import org.apache.hadoop.io.retry.RetryUtils;
 import org.apache.hadoop.ipc.RemoteException;
+import org.apache.hadoop.ipc.StandbyException;
 import org.apache.hadoop.net.NetUtils;
 import org.apache.hadoop.security.AccessControlException;
 import org.apache.hadoop.security.SecurityUtil;
@@ -471,6 +472,13 @@ public class WebHdfsFileSystem extends FileSystem
   }
 
   IOException re = JsonUtilClient.toRemoteException(m);
+
+  //check if exception is due to communication with a Standby name node
+  if (re.getMessage() != null && re.getMessage().endsWith(
+  StandbyException.class.getSimpleName())) {
+LOG.trace("Detected StandbyException", re);
+throw new IOException(re);
+  }
   // extract UGI-related exceptions and unwrap InvalidToken
   // the NN mangles these exceptions but the DN does not and may need
   // to re-fetch a token if either report the token is expired


-
To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org
For additional commands, e-mail: common-commits-h...@hadoop.apache.org



hadoop git commit: HADOOP-12929. JWTRedirectAuthenticationHandler must accommodate null expiration time. Contributed by Larry McCay.

2016-03-21 Thread benoy
Repository: hadoop
Updated Branches:
  refs/heads/branch-2.8 29872506b -> 1b263ea49


HADOOP-12929. JWTRedirectAuthenticationHandler must accommodate null expiration 
time. Contributed by Larry McCay.

(cherry picked from commit e7ed05e4f5b0421e93f2f2cadc5beda3d28b9911)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/1b263ea4
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/1b263ea4
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/1b263ea4

Branch: refs/heads/branch-2.8
Commit: 1b263ea49056a370e2308f0f88bbb68d538e4c03
Parents: 2987250
Author: Benoy Antony <be...@apache.org>
Authored: Mon Mar 21 13:19:43 2016 -0700
Committer: Benoy Antony <be...@apache.org>
Committed: Mon Mar 21 13:31:55 2016 -0700

--
 .../JWTRedirectAuthenticationHandler.java   | 16 ++---
 .../TestJWTRedirectAuthentictionHandler.java| 37 
 2 files changed, 33 insertions(+), 20 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/1b263ea4/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/server/JWTRedirectAuthenticationHandler.java
--
diff --git 
a/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/server/JWTRedirectAuthenticationHandler.java
 
b/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/server/JWTRedirectAuthenticationHandler.java
index cbe923b..61f5b9e 100644
--- 
a/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/server/JWTRedirectAuthenticationHandler.java
+++ 
b/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/server/JWTRedirectAuthenticationHandler.java
@@ -26,20 +26,10 @@ import java.util.List;
 import java.util.Properties;
 import java.text.ParseException;
 
-import java.io.ByteArrayInputStream;
-import java.io.UnsupportedEncodingException;
-import java.security.PublicKey;
-import java.security.cert.CertificateFactory;
-import java.security.cert.X509Certificate;
-import java.security.cert.CertificateException;
 import java.security.interfaces.RSAPublicKey;
 
-import org.apache.commons.codec.binary.Base64;
 import 
org.apache.hadoop.security.authentication.client.AuthenticationException;
-import 
org.apache.hadoop.security.authentication.server.AltKerberosAuthenticationHandler;
-import org.apache.hadoop.security.authentication.server.AuthenticationToken;
 import org.apache.hadoop.security.authentication.util.CertificateUtil;
-import org.apache.hadoop.security.authentication.util.KerberosName;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -83,7 +73,8 @@ public class JWTRedirectAuthenticationHandler extends
   private static Logger LOG = LoggerFactory
   .getLogger(JWTRedirectAuthenticationHandler.class);
 
-  public static final String AUTHENTICATION_PROVIDER_URL = 
"authentication.provider.url";
+  public static final String AUTHENTICATION_PROVIDER_URL =
+  "authentication.provider.url";
   public static final String PUBLIC_KEY_PEM = "public.key.pem";
   public static final String EXPECTED_JWT_AUDIENCES = "expected.jwt.audiences";
   public static final String JWT_COOKIE_NAME = "jwt.cookie.name";
@@ -205,7 +196,6 @@ public class JWTRedirectAuthenticationHandler extends
   protected String getJWTFromCookie(HttpServletRequest req) {
 String serializedJWT = null;
 Cookie[] cookies = req.getCookies();
-String userName = null;
 if (cookies != null) {
   for (Cookie cookie : cookies) {
 if (cookieName.equals(cookie.getName())) {
@@ -350,7 +340,7 @@ public class JWTRedirectAuthenticationHandler extends
 boolean valid = false;
 try {
   Date expires = jwtToken.getJWTClaimsSet().getExpirationTime();
-  if (expires != null && new Date().before(expires)) {
+  if (expires == null || new Date().before(expires)) {
 LOG.debug("JWT token expiration date has been "
 + "successfully validated");
 valid = true;

http://git-wip-us.apache.org/repos/asf/hadoop/blob/1b263ea4/hadoop-common-project/hadoop-auth/src/test/java/org/apache/hadoop/security/authentication/server/TestJWTRedirectAuthentictionHandler.java
--
diff --git 
a/hadoop-common-project/hadoop-auth/src/test/java/org/apache/hadoop/security/authentication/server/TestJWTRedirectAuthentictionHandler.java
 
b/hadoop-common-project/hadoop-auth/src/test/java/org/apache/hadoop/security/authentication/server/TestJWTRedirectAuthentictionHandler.java
index 019ecb4..97a8a9d 100644
--- 
a/had

hadoop git commit: HADOOP-12929. JWTRedirectAuthenticationHandler must accommodate null expiration time. Contributed by Larry McCay.

2016-03-21 Thread benoy
Repository: hadoop
Updated Branches:
  refs/heads/branch-2 2ce24bf21 -> 4dd02f948


HADOOP-12929. JWTRedirectAuthenticationHandler must accommodate null expiration 
time. Contributed by Larry McCay.

(cherry picked from commit e7ed05e4f5b0421e93f2f2cadc5beda3d28b9911)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/4dd02f94
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/4dd02f94
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/4dd02f94

Branch: refs/heads/branch-2
Commit: 4dd02f948482a4e329bfc6c8baca33517d931121
Parents: 2ce24bf
Author: Benoy Antony <be...@apache.org>
Authored: Mon Mar 21 13:19:43 2016 -0700
Committer: Benoy Antony <be...@apache.org>
Committed: Mon Mar 21 13:21:54 2016 -0700

--
 .../JWTRedirectAuthenticationHandler.java   | 16 ++---
 .../TestJWTRedirectAuthentictionHandler.java| 37 
 2 files changed, 33 insertions(+), 20 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/4dd02f94/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/server/JWTRedirectAuthenticationHandler.java
--
diff --git 
a/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/server/JWTRedirectAuthenticationHandler.java
 
b/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/server/JWTRedirectAuthenticationHandler.java
index cbe923b..61f5b9e 100644
--- 
a/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/server/JWTRedirectAuthenticationHandler.java
+++ 
b/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/server/JWTRedirectAuthenticationHandler.java
@@ -26,20 +26,10 @@ import java.util.List;
 import java.util.Properties;
 import java.text.ParseException;
 
-import java.io.ByteArrayInputStream;
-import java.io.UnsupportedEncodingException;
-import java.security.PublicKey;
-import java.security.cert.CertificateFactory;
-import java.security.cert.X509Certificate;
-import java.security.cert.CertificateException;
 import java.security.interfaces.RSAPublicKey;
 
-import org.apache.commons.codec.binary.Base64;
 import 
org.apache.hadoop.security.authentication.client.AuthenticationException;
-import 
org.apache.hadoop.security.authentication.server.AltKerberosAuthenticationHandler;
-import org.apache.hadoop.security.authentication.server.AuthenticationToken;
 import org.apache.hadoop.security.authentication.util.CertificateUtil;
-import org.apache.hadoop.security.authentication.util.KerberosName;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -83,7 +73,8 @@ public class JWTRedirectAuthenticationHandler extends
   private static Logger LOG = LoggerFactory
   .getLogger(JWTRedirectAuthenticationHandler.class);
 
-  public static final String AUTHENTICATION_PROVIDER_URL = 
"authentication.provider.url";
+  public static final String AUTHENTICATION_PROVIDER_URL =
+  "authentication.provider.url";
   public static final String PUBLIC_KEY_PEM = "public.key.pem";
   public static final String EXPECTED_JWT_AUDIENCES = "expected.jwt.audiences";
   public static final String JWT_COOKIE_NAME = "jwt.cookie.name";
@@ -205,7 +196,6 @@ public class JWTRedirectAuthenticationHandler extends
   protected String getJWTFromCookie(HttpServletRequest req) {
 String serializedJWT = null;
 Cookie[] cookies = req.getCookies();
-String userName = null;
 if (cookies != null) {
   for (Cookie cookie : cookies) {
 if (cookieName.equals(cookie.getName())) {
@@ -350,7 +340,7 @@ public class JWTRedirectAuthenticationHandler extends
 boolean valid = false;
 try {
   Date expires = jwtToken.getJWTClaimsSet().getExpirationTime();
-  if (expires != null && new Date().before(expires)) {
+  if (expires == null || new Date().before(expires)) {
 LOG.debug("JWT token expiration date has been "
 + "successfully validated");
 valid = true;

http://git-wip-us.apache.org/repos/asf/hadoop/blob/4dd02f94/hadoop-common-project/hadoop-auth/src/test/java/org/apache/hadoop/security/authentication/server/TestJWTRedirectAuthentictionHandler.java
--
diff --git 
a/hadoop-common-project/hadoop-auth/src/test/java/org/apache/hadoop/security/authentication/server/TestJWTRedirectAuthentictionHandler.java
 
b/hadoop-common-project/hadoop-auth/src/test/java/org/apache/hadoop/security/authentication/server/TestJWTRedirectAuthentictionHandler.java
index 019ecb4..97a8a9d 100644
--- 
a/had

hadoop git commit: HADOOP-12929. JWTRedirectAuthenticationHandler must accommodate null expiration time. Contributed by Larry McCay.

2016-03-21 Thread benoy
Repository: hadoop
Updated Branches:
  refs/heads/trunk 680716f31 -> e7ed05e4f


HADOOP-12929. JWTRedirectAuthenticationHandler must accommodate null expiration 
time. Contributed by Larry McCay.


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/e7ed05e4
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/e7ed05e4
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/e7ed05e4

Branch: refs/heads/trunk
Commit: e7ed05e4f5b0421e93f2f2cadc5beda3d28b9911
Parents: 680716f
Author: Benoy Antony <be...@apache.org>
Authored: Mon Mar 21 13:19:43 2016 -0700
Committer: Benoy Antony <be...@apache.org>
Committed: Mon Mar 21 13:19:43 2016 -0700

--
 .../JWTRedirectAuthenticationHandler.java   | 16 ++---
 .../TestJWTRedirectAuthentictionHandler.java| 37 
 2 files changed, 33 insertions(+), 20 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/e7ed05e4/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/server/JWTRedirectAuthenticationHandler.java
--
diff --git 
a/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/server/JWTRedirectAuthenticationHandler.java
 
b/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/server/JWTRedirectAuthenticationHandler.java
index cbe923b..61f5b9e 100644
--- 
a/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/server/JWTRedirectAuthenticationHandler.java
+++ 
b/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/server/JWTRedirectAuthenticationHandler.java
@@ -26,20 +26,10 @@ import java.util.List;
 import java.util.Properties;
 import java.text.ParseException;
 
-import java.io.ByteArrayInputStream;
-import java.io.UnsupportedEncodingException;
-import java.security.PublicKey;
-import java.security.cert.CertificateFactory;
-import java.security.cert.X509Certificate;
-import java.security.cert.CertificateException;
 import java.security.interfaces.RSAPublicKey;
 
-import org.apache.commons.codec.binary.Base64;
 import 
org.apache.hadoop.security.authentication.client.AuthenticationException;
-import 
org.apache.hadoop.security.authentication.server.AltKerberosAuthenticationHandler;
-import org.apache.hadoop.security.authentication.server.AuthenticationToken;
 import org.apache.hadoop.security.authentication.util.CertificateUtil;
-import org.apache.hadoop.security.authentication.util.KerberosName;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -83,7 +73,8 @@ public class JWTRedirectAuthenticationHandler extends
   private static Logger LOG = LoggerFactory
   .getLogger(JWTRedirectAuthenticationHandler.class);
 
-  public static final String AUTHENTICATION_PROVIDER_URL = 
"authentication.provider.url";
+  public static final String AUTHENTICATION_PROVIDER_URL =
+  "authentication.provider.url";
   public static final String PUBLIC_KEY_PEM = "public.key.pem";
   public static final String EXPECTED_JWT_AUDIENCES = "expected.jwt.audiences";
   public static final String JWT_COOKIE_NAME = "jwt.cookie.name";
@@ -205,7 +196,6 @@ public class JWTRedirectAuthenticationHandler extends
   protected String getJWTFromCookie(HttpServletRequest req) {
 String serializedJWT = null;
 Cookie[] cookies = req.getCookies();
-String userName = null;
 if (cookies != null) {
   for (Cookie cookie : cookies) {
 if (cookieName.equals(cookie.getName())) {
@@ -350,7 +340,7 @@ public class JWTRedirectAuthenticationHandler extends
 boolean valid = false;
 try {
   Date expires = jwtToken.getJWTClaimsSet().getExpirationTime();
-  if (expires != null && new Date().before(expires)) {
+  if (expires == null || new Date().before(expires)) {
 LOG.debug("JWT token expiration date has been "
 + "successfully validated");
 valid = true;

http://git-wip-us.apache.org/repos/asf/hadoop/blob/e7ed05e4/hadoop-common-project/hadoop-auth/src/test/java/org/apache/hadoop/security/authentication/server/TestJWTRedirectAuthentictionHandler.java
--
diff --git 
a/hadoop-common-project/hadoop-auth/src/test/java/org/apache/hadoop/security/authentication/server/TestJWTRedirectAuthentictionHandler.java
 
b/hadoop-common-project/hadoop-auth/src/test/java/org/apache/hadoop/security/authentication/server/TestJWTRedirectAuthentictionHandler.java
index 019ecb4..97a8a9d 100644
--- 
a/hadoop-common-project/hadoop-auth/src/test/java/org/apache/hadoop/security/authentication/serv

[1/3] hadoop git commit: HADOOP-12587. Hadoop AuthToken refuses to work without a maxinactive attribute in issued token. (Benoy Antony)

2016-01-14 Thread benoy
Repository: hadoop
Updated Branches:
  refs/heads/branch-2.8 3266709b5 -> 6cfeb84a3


HADOOP-12587. Hadoop AuthToken refuses to work without a maxinactive attribute 
in issued token. (Benoy Antony)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/6cfeb84a
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/6cfeb84a
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/6cfeb84a

Branch: refs/heads/branch-2.8
Commit: 6cfeb84a3677665d608f5d3feb76efd25445a725
Parents: 6ceee07
Author: Benoy Antony <be...@apache.org>
Authored: Sat Jan 9 13:39:18 2016 -0800
Committer: Benoy Antony <be...@apache.org>
Committed: Thu Jan 14 14:42:04 2016 -0800

--
 .../server/AuthenticationFilter.java|  11 +-
 .../security/authentication/util/AuthToken.java |  16 +-
 .../server/TestAuthenticationFilter.java| 167 ---
 .../src/site/markdown/HttpAuthentication.md |   2 +-
 4 files changed, 161 insertions(+), 35 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/6cfeb84a/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/server/AuthenticationFilter.java
--
diff --git 
a/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/server/AuthenticationFilter.java
 
b/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/server/AuthenticationFilter.java
index e0da38b..4bdc808 100644
--- 
a/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/server/AuthenticationFilter.java
+++ 
b/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/server/AuthenticationFilter.java
@@ -150,7 +150,7 @@ public class AuthenticationFilter implements Filter {
* that indicates the max inactive interval of the generated token.
*/
   public static final String
-  AUTH_TOKEN_MAX_INACTIVE_INTERVAL = "token.MaxInactiveInterval";
+  AUTH_TOKEN_MAX_INACTIVE_INTERVAL = "token.max-inactive-interval";
 
   /**
* Constant for the configuration property that indicates the validity of 
the generated token.
@@ -234,9 +234,11 @@ public class AuthenticationFilter implements Filter {
 } else {
   authHandlerClassName = authHandlerName;
 }
-
 maxInactiveInterval = Long.parseLong(config.getProperty(
-AUTH_TOKEN_MAX_INACTIVE_INTERVAL, "1800")) * 1000; // 30 minutes;
+AUTH_TOKEN_MAX_INACTIVE_INTERVAL, "-1")); // By default, disable.
+if (maxInactiveInterval > 0) {
+  maxInactiveInterval *= 1000;
+}
 validity = Long.parseLong(config.getProperty(AUTH_TOKEN_VALIDITY, "36000"))
 * 1000; //10 hours
 initializeSecretProvider(filterConfig);
@@ -559,7 +561,7 @@ public class AuthenticationFilter implements Filter {
   }
   token = authHandler.authenticate(httpRequest, httpResponse);
   if (token != null && token != AuthenticationToken.ANONYMOUS) {
-if (token.getMaxInactives() != 0) {
+if (token.getMaxInactives() > 0) {
   token.setMaxInactives(System.currentTimeMillis()
   + getMaxInactiveInterval() * 1000);
 }
@@ -603,6 +605,7 @@ public class AuthenticationFilter implements Filter {
   && getMaxInactiveInterval() > 0) {
 token.setMaxInactives(System.currentTimeMillis()
 + getMaxInactiveInterval() * 1000);
+token.setExpires(token.getExpires());
 newToken = true;
   }
   if (newToken && !token.isExpired()

http://git-wip-us.apache.org/repos/asf/hadoop/blob/6cfeb84a/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/util/AuthToken.java
--
diff --git 
a/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/util/AuthToken.java
 
b/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/util/AuthToken.java
index 4fbe599..e959f65 100644
--- 
a/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/util/AuthToken.java
+++ 
b/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/util/AuthToken.java
@@ -39,8 +39,7 @@ public class AuthToken implements Principal {
   private static final String TYPE = "t";
 
   private final static Set ATTRIBUTES =
-new HashSet(Arrays.asList(USER_NAME, PRINCIPAL,
-MAX_INACTIVES, EXPIRES, TYPE));
+  new HashSet<>(Arrays.asList(USER_NAME, 

[2/3] hadoop git commit: HDFS-9597. BaseReplicationPolicyTest should update data node stats after adding a data node. Contributed by Wei-Chiu Chuang.

2016-01-14 Thread benoy
HDFS-9597. BaseReplicationPolicyTest should update data node stats after adding 
a data node. Contributed by Wei-Chiu Chuang.


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/6ceee073
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/6ceee073
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/6ceee073

Branch: refs/heads/branch-2.8
Commit: 6ceee073574214502e05b10b0147a34ad910936f
Parents: 5a50651
Author: Benoy Antony <be...@apache.org>
Authored: Fri Dec 25 08:55:39 2015 -0800
Committer: Benoy Antony <be...@apache.org>
Committed: Thu Jan 14 14:42:04 2016 -0800

--
 .../hdfs/server/blockmanagement/BaseReplicationPolicyTest.java | 2 ++
 1 file changed, 2 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/6ceee073/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/blockmanagement/BaseReplicationPolicyTest.java
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/blockmanagement/BaseReplicationPolicyTest.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/blockmanagement/BaseReplicationPolicyTest.java
index 6174447..7dc52fa 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/blockmanagement/BaseReplicationPolicyTest.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/blockmanagement/BaseReplicationPolicyTest.java
@@ -98,6 +98,8 @@ abstract public class BaseReplicationPolicyTest {
   cluster.add(dataNodes[i]);
   bm.getDatanodeManager().getHeartbeatManager().addDatanode(
   dataNodes[i]);
+  bm.getDatanodeManager().getHeartbeatManager().updateDnStat(
+  dataNodes[i]);
 }
 updateHeartbeatWithUsage();
   }



[3/3] hadoop git commit: HDFS-9034. StorageTypeStats Metric should not count failed storage. Contributed by Surendra Singh Lilhore.

2016-01-14 Thread benoy
HDFS-9034. StorageTypeStats Metric should not count failed storage. Contributed 
by Surendra Singh Lilhore.


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/5a50651e
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/5a50651e
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/5a50651e

Branch: refs/heads/branch-2.8
Commit: 5a50651e5bd7dc77fd80f79c80b2388cacd117d3
Parents: 3266709
Author: Benoy Antony <be...@apache.org>
Authored: Tue Dec 22 15:28:17 2015 -0800
Committer: Benoy Antony <be...@apache.org>
Committed: Thu Jan 14 14:42:04 2016 -0800

--
 .../server/blockmanagement/DatanodeManager.java |  1 +
 .../server/blockmanagement/DatanodeStats.java   | 19 --
 .../blockmanagement/HeartbeatManager.java   |  6 +-
 .../blockmanagement/TestBlockStatsMXBean.java   | 62 
 4 files changed, 81 insertions(+), 7 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/5a50651e/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/DatanodeManager.java
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/DatanodeManager.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/DatanodeManager.java
index 59190ee..9f878d1 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/DatanodeManager.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/DatanodeManager.java
@@ -967,6 +967,7 @@ public class DatanodeManager {
 // no need to update its timestamp
 // because its is done when the descriptor is created
 heartbeatManager.addDatanode(nodeDescr);
+heartbeatManager.updateDnStat(nodeDescr);
 incrementVersionCount(nodeReg.getSoftwareVersion());
 startDecommissioningIfExcluded(nodeDescr);
 success = true;

http://git-wip-us.apache.org/repos/asf/hadoop/blob/5a50651e/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/DatanodeStats.java
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/DatanodeStats.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/DatanodeStats.java
index 4c39c41..bcc9bba 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/DatanodeStats.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/DatanodeStats.java
@@ -19,6 +19,7 @@ package org.apache.hadoop.hdfs.server.blockmanagement;
 
 import org.apache.hadoop.fs.StorageType;
 import org.apache.hadoop.hdfs.DFSUtilClient;
+import org.apache.hadoop.hdfs.server.protocol.DatanodeStorage;
 
 import java.util.EnumMap;
 import java.util.HashSet;
@@ -61,8 +62,10 @@ class DatanodeStats {
 }
 Set storageTypes = new HashSet<>();
 for (DatanodeStorageInfo storageInfo : node.getStorageInfos()) {
-  statsMap.addStorage(storageInfo, node);
-  storageTypes.add(storageInfo.getStorageType());
+  if (storageInfo.getState() != DatanodeStorage.State.FAILED) {
+statsMap.addStorage(storageInfo, node);
+storageTypes.add(storageInfo.getStorageType());
+  }
 }
 for (StorageType storageType : storageTypes) {
   statsMap.addNode(storageType, node);
@@ -86,8 +89,10 @@ class DatanodeStats {
 }
 Set storageTypes = new HashSet<>();
 for (DatanodeStorageInfo storageInfo : node.getStorageInfos()) {
-  statsMap.subtractStorage(storageInfo, node);
-  storageTypes.add(storageInfo.getStorageType());
+  if (storageInfo.getState() != DatanodeStorage.State.FAILED) {
+statsMap.subtractStorage(storageInfo, node);
+storageTypes.add(storageInfo.getStorageType());
+  }
 }
 for (StorageType storageType : storageTypes) {
   statsMap.subtractNode(storageType, node);
@@ -202,10 +207,12 @@ class DatanodeStats {
 
 private void subtractNode(StorageType storageType,
 final DatanodeDescriptor node) {
-  StorageTypeStats storageTypeStats =
-  storageTypeStatsMap.get(storageType);
+  StorageTypeStats storageTypeStats = storageTypeStatsMap.get(storageType);
   if (storageTypeStats != null) {
 storageTypeStats.subtractNode(node);
+if (storageTypeStats.getNodesInService() == 0) {
+  storageTypeStatsMap.remove(storageType);
+}
   }
 }
   }

http://git-wip-us.apache.org/repos/asf/hadoop/b

hadoop git commit: HADOOP-12587. Hadoop AuthToken refuses to work without a maxinactive attribute in issued token. (Benoy Antony)

2016-01-09 Thread benoy
Repository: hadoop
Updated Branches:
  refs/heads/branch-2 80734bc34 -> 4e5f77b7f


HADOOP-12587. Hadoop AuthToken refuses to work without a maxinactive attribute 
in issued token. (Benoy Antony)

(cherry picked from commit dec8dfdfa66c37f8cc8c0900fd12f98c7529b99e)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/4e5f77b7
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/4e5f77b7
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/4e5f77b7

Branch: refs/heads/branch-2
Commit: 4e5f77b7f5c9f1145743d960846233c06c304622
Parents: 80734bc
Author: Benoy Antony <be...@apache.org>
Authored: Sat Jan 9 13:39:18 2016 -0800
Committer: Benoy Antony <be...@apache.org>
Committed: Sat Jan 9 13:43:15 2016 -0800

--
 .../server/AuthenticationFilter.java|  11 +-
 .../security/authentication/util/AuthToken.java |  16 +-
 .../server/TestAuthenticationFilter.java| 167 ---
 .../src/site/markdown/HttpAuthentication.md |   2 +-
 4 files changed, 161 insertions(+), 35 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/4e5f77b7/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/server/AuthenticationFilter.java
--
diff --git 
a/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/server/AuthenticationFilter.java
 
b/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/server/AuthenticationFilter.java
index e0da38b..4bdc808 100644
--- 
a/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/server/AuthenticationFilter.java
+++ 
b/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/server/AuthenticationFilter.java
@@ -150,7 +150,7 @@ public class AuthenticationFilter implements Filter {
* that indicates the max inactive interval of the generated token.
*/
   public static final String
-  AUTH_TOKEN_MAX_INACTIVE_INTERVAL = "token.MaxInactiveInterval";
+  AUTH_TOKEN_MAX_INACTIVE_INTERVAL = "token.max-inactive-interval";
 
   /**
* Constant for the configuration property that indicates the validity of 
the generated token.
@@ -234,9 +234,11 @@ public class AuthenticationFilter implements Filter {
 } else {
   authHandlerClassName = authHandlerName;
 }
-
 maxInactiveInterval = Long.parseLong(config.getProperty(
-AUTH_TOKEN_MAX_INACTIVE_INTERVAL, "1800")) * 1000; // 30 minutes;
+AUTH_TOKEN_MAX_INACTIVE_INTERVAL, "-1")); // By default, disable.
+if (maxInactiveInterval > 0) {
+  maxInactiveInterval *= 1000;
+}
 validity = Long.parseLong(config.getProperty(AUTH_TOKEN_VALIDITY, "36000"))
 * 1000; //10 hours
 initializeSecretProvider(filterConfig);
@@ -559,7 +561,7 @@ public class AuthenticationFilter implements Filter {
   }
   token = authHandler.authenticate(httpRequest, httpResponse);
   if (token != null && token != AuthenticationToken.ANONYMOUS) {
-if (token.getMaxInactives() != 0) {
+if (token.getMaxInactives() > 0) {
   token.setMaxInactives(System.currentTimeMillis()
   + getMaxInactiveInterval() * 1000);
 }
@@ -603,6 +605,7 @@ public class AuthenticationFilter implements Filter {
   && getMaxInactiveInterval() > 0) {
 token.setMaxInactives(System.currentTimeMillis()
 + getMaxInactiveInterval() * 1000);
+token.setExpires(token.getExpires());
 newToken = true;
   }
   if (newToken && !token.isExpired()

http://git-wip-us.apache.org/repos/asf/hadoop/blob/4e5f77b7/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/util/AuthToken.java
--
diff --git 
a/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/util/AuthToken.java
 
b/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/util/AuthToken.java
index 4fbe599..e959f65 100644
--- 
a/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/util/AuthToken.java
+++ 
b/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/util/AuthToken.java
@@ -39,8 +39,7 @@ public class AuthToken implements Principal {
   private static final String TYPE = "t";
 
   private final static Set ATTRIBUTES =
-new HashSet(Arrays.asList(USER_NAME, PRINCIPAL,
-MAX_INACTIVES, EX

hadoop git commit: HADOOP-12587. Hadoop AuthToken refuses to work without a maxinactive attribute in issued token. (Benoy Antony)

2016-01-09 Thread benoy
Repository: hadoop
Updated Branches:
  refs/heads/trunk d7ed04758 -> dec8dfdfa


HADOOP-12587. Hadoop AuthToken refuses to work without a maxinactive attribute 
in issued token. (Benoy Antony)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/dec8dfdf
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/dec8dfdf
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/dec8dfdf

Branch: refs/heads/trunk
Commit: dec8dfdfa66c37f8cc8c0900fd12f98c7529b99e
Parents: d7ed047
Author: Benoy Antony <be...@apache.org>
Authored: Sat Jan 9 13:39:18 2016 -0800
Committer: Benoy Antony <be...@apache.org>
Committed: Sat Jan 9 13:41:18 2016 -0800

--
 .../server/AuthenticationFilter.java|  11 +-
 .../security/authentication/util/AuthToken.java |  16 +-
 .../server/TestAuthenticationFilter.java| 167 ---
 .../src/site/markdown/HttpAuthentication.md |   2 +-
 4 files changed, 161 insertions(+), 35 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/dec8dfdf/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/server/AuthenticationFilter.java
--
diff --git 
a/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/server/AuthenticationFilter.java
 
b/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/server/AuthenticationFilter.java
index e0da38b..4bdc808 100644
--- 
a/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/server/AuthenticationFilter.java
+++ 
b/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/server/AuthenticationFilter.java
@@ -150,7 +150,7 @@ public class AuthenticationFilter implements Filter {
* that indicates the max inactive interval of the generated token.
*/
   public static final String
-  AUTH_TOKEN_MAX_INACTIVE_INTERVAL = "token.MaxInactiveInterval";
+  AUTH_TOKEN_MAX_INACTIVE_INTERVAL = "token.max-inactive-interval";
 
   /**
* Constant for the configuration property that indicates the validity of 
the generated token.
@@ -234,9 +234,11 @@ public class AuthenticationFilter implements Filter {
 } else {
   authHandlerClassName = authHandlerName;
 }
-
 maxInactiveInterval = Long.parseLong(config.getProperty(
-AUTH_TOKEN_MAX_INACTIVE_INTERVAL, "1800")) * 1000; // 30 minutes;
+AUTH_TOKEN_MAX_INACTIVE_INTERVAL, "-1")); // By default, disable.
+if (maxInactiveInterval > 0) {
+  maxInactiveInterval *= 1000;
+}
 validity = Long.parseLong(config.getProperty(AUTH_TOKEN_VALIDITY, "36000"))
 * 1000; //10 hours
 initializeSecretProvider(filterConfig);
@@ -559,7 +561,7 @@ public class AuthenticationFilter implements Filter {
   }
   token = authHandler.authenticate(httpRequest, httpResponse);
   if (token != null && token != AuthenticationToken.ANONYMOUS) {
-if (token.getMaxInactives() != 0) {
+if (token.getMaxInactives() > 0) {
   token.setMaxInactives(System.currentTimeMillis()
   + getMaxInactiveInterval() * 1000);
 }
@@ -603,6 +605,7 @@ public class AuthenticationFilter implements Filter {
   && getMaxInactiveInterval() > 0) {
 token.setMaxInactives(System.currentTimeMillis()
 + getMaxInactiveInterval() * 1000);
+token.setExpires(token.getExpires());
 newToken = true;
   }
   if (newToken && !token.isExpired()

http://git-wip-us.apache.org/repos/asf/hadoop/blob/dec8dfdf/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/util/AuthToken.java
--
diff --git 
a/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/util/AuthToken.java
 
b/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/util/AuthToken.java
index 4fbe599..e959f65 100644
--- 
a/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/util/AuthToken.java
+++ 
b/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/util/AuthToken.java
@@ -39,8 +39,7 @@ public class AuthToken implements Principal {
   private static final String TYPE = "t";
 
   private final static Set ATTRIBUTES =
-new HashSet(Arrays.asList(USER_NAME, PRINCIPAL,
-MAX_INACTIVES, EXPIRES, TYPE));
+  new HashSet<>(Arrays.asList(USER_NAME, PRINCI

hadoop git commit: HDFS-9597. BaseReplicationPolicyTest should update data node stats after adding a data node. Contributed by Wei-Chiu Chuang.

2015-12-25 Thread benoy
Repository: hadoop
Updated Branches:
  refs/heads/trunk 2c17b8156 -> 8cfd67239


HDFS-9597. BaseReplicationPolicyTest should update data node stats after adding 
a data node. Contributed by Wei-Chiu Chuang.


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/8cfd6723
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/8cfd6723
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/8cfd6723

Branch: refs/heads/trunk
Commit: 8cfd672397efd91c471c417afef3aff85f64b506
Parents: 2c17b81
Author: Benoy Antony <be...@apache.org>
Authored: Fri Dec 25 08:55:39 2015 -0800
Committer: Benoy Antony <be...@apache.org>
Committed: Fri Dec 25 08:55:39 2015 -0800

--
 .../hdfs/server/blockmanagement/BaseReplicationPolicyTest.java | 2 ++
 1 file changed, 2 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/8cfd6723/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/blockmanagement/BaseReplicationPolicyTest.java
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/blockmanagement/BaseReplicationPolicyTest.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/blockmanagement/BaseReplicationPolicyTest.java
index 6174447..7dc52fa 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/blockmanagement/BaseReplicationPolicyTest.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/blockmanagement/BaseReplicationPolicyTest.java
@@ -98,6 +98,8 @@ abstract public class BaseReplicationPolicyTest {
   cluster.add(dataNodes[i]);
   bm.getDatanodeManager().getHeartbeatManager().addDatanode(
   dataNodes[i]);
+  bm.getDatanodeManager().getHeartbeatManager().updateDnStat(
+  dataNodes[i]);
 }
 updateHeartbeatWithUsage();
   }



hadoop git commit: HDFS-9597. BaseReplicationPolicyTest should update data node stats after adding a data node. Contributed by Wei-Chiu Chuang.

2015-12-25 Thread benoy
Repository: hadoop
Updated Branches:
  refs/heads/branch-2 8baf9369b -> 42160d358


HDFS-9597. BaseReplicationPolicyTest should update data node stats after adding 
a data node. Contributed by Wei-Chiu Chuang.

(cherry picked from commit 8cfd672397efd91c471c417afef3aff85f64b506)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/42160d35
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/42160d35
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/42160d35

Branch: refs/heads/branch-2
Commit: 42160d358a8d010539603f777eafcc806d983c21
Parents: 8baf936
Author: Benoy Antony <be...@apache.org>
Authored: Fri Dec 25 08:55:39 2015 -0800
Committer: Benoy Antony <be...@apache.org>
Committed: Fri Dec 25 08:57:34 2015 -0800

--
 .../hdfs/server/blockmanagement/BaseReplicationPolicyTest.java | 2 ++
 1 file changed, 2 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/42160d35/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/blockmanagement/BaseReplicationPolicyTest.java
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/blockmanagement/BaseReplicationPolicyTest.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/blockmanagement/BaseReplicationPolicyTest.java
index 6174447..7dc52fa 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/blockmanagement/BaseReplicationPolicyTest.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/blockmanagement/BaseReplicationPolicyTest.java
@@ -98,6 +98,8 @@ abstract public class BaseReplicationPolicyTest {
   cluster.add(dataNodes[i]);
   bm.getDatanodeManager().getHeartbeatManager().addDatanode(
   dataNodes[i]);
+  bm.getDatanodeManager().getHeartbeatManager().updateDnStat(
+  dataNodes[i]);
 }
 updateHeartbeatWithUsage();
   }



hadoop git commit: HDFS-9034. StorageTypeStats Metric should not count failed storage. Contributed by Surendra Singh Lilhore.

2015-12-22 Thread benoy
Repository: hadoop
Updated Branches:
  refs/heads/branch-2 131260f0a -> 0d249a5e2


HDFS-9034. StorageTypeStats Metric should not count failed storage. Contributed 
by Surendra Singh Lilhore.

(cherry picked from commit df83230948204ee2d2b06ecc66ce0163e2df27ef)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/0d249a5e
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/0d249a5e
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/0d249a5e

Branch: refs/heads/branch-2
Commit: 0d249a5e287a54b7540f2ea7937d14d0fd44bce0
Parents: 131260f
Author: Benoy Antony <be...@apache.org>
Authored: Tue Dec 22 15:28:17 2015 -0800
Committer: Benoy Antony <be...@apache.org>
Committed: Tue Dec 22 15:29:36 2015 -0800

--
 .../server/blockmanagement/DatanodeManager.java |  1 +
 .../server/blockmanagement/DatanodeStats.java   | 19 --
 .../blockmanagement/HeartbeatManager.java   |  6 +-
 .../blockmanagement/TestBlockStatsMXBean.java   | 62 
 4 files changed, 81 insertions(+), 7 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/0d249a5e/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/DatanodeManager.java
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/DatanodeManager.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/DatanodeManager.java
index 802bb76..d302b9b 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/DatanodeManager.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/DatanodeManager.java
@@ -953,6 +953,7 @@ public class DatanodeManager {
 // no need to update its timestamp
 // because its is done when the descriptor is created
 heartbeatManager.addDatanode(nodeDescr);
+heartbeatManager.updateDnStat(nodeDescr);
 incrementVersionCount(nodeReg.getSoftwareVersion());
 startDecommissioningIfExcluded(nodeDescr);
 success = true;

http://git-wip-us.apache.org/repos/asf/hadoop/blob/0d249a5e/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/DatanodeStats.java
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/DatanodeStats.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/DatanodeStats.java
index 4c39c41..bcc9bba 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/DatanodeStats.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/DatanodeStats.java
@@ -19,6 +19,7 @@ package org.apache.hadoop.hdfs.server.blockmanagement;
 
 import org.apache.hadoop.fs.StorageType;
 import org.apache.hadoop.hdfs.DFSUtilClient;
+import org.apache.hadoop.hdfs.server.protocol.DatanodeStorage;
 
 import java.util.EnumMap;
 import java.util.HashSet;
@@ -61,8 +62,10 @@ class DatanodeStats {
 }
 Set storageTypes = new HashSet<>();
 for (DatanodeStorageInfo storageInfo : node.getStorageInfos()) {
-  statsMap.addStorage(storageInfo, node);
-  storageTypes.add(storageInfo.getStorageType());
+  if (storageInfo.getState() != DatanodeStorage.State.FAILED) {
+statsMap.addStorage(storageInfo, node);
+storageTypes.add(storageInfo.getStorageType());
+  }
 }
 for (StorageType storageType : storageTypes) {
   statsMap.addNode(storageType, node);
@@ -86,8 +89,10 @@ class DatanodeStats {
 }
 Set storageTypes = new HashSet<>();
 for (DatanodeStorageInfo storageInfo : node.getStorageInfos()) {
-  statsMap.subtractStorage(storageInfo, node);
-  storageTypes.add(storageInfo.getStorageType());
+  if (storageInfo.getState() != DatanodeStorage.State.FAILED) {
+statsMap.subtractStorage(storageInfo, node);
+storageTypes.add(storageInfo.getStorageType());
+  }
 }
 for (StorageType storageType : storageTypes) {
   statsMap.subtractNode(storageType, node);
@@ -202,10 +207,12 @@ class DatanodeStats {
 
 private void subtractNode(StorageType storageType,
 final DatanodeDescriptor node) {
-  StorageTypeStats storageTypeStats =
-  storageTypeStatsMap.get(storageType);
+  StorageTypeStats storageTypeStats = storageTypeStatsMap.get(storageType);
   if (storageTypeStats != null) {
 storageTypeStats.subtractNode(node);
+if (storageTypeSta

hadoop git commit: hadoop-12050. Enable MaxInactiveInterval for hadoop http auth token. Contributed by Huizhi Lu.

2015-08-18 Thread benoy
Repository: hadoop
Updated Branches:
  refs/heads/trunk 14215c8ef - 71aedfabf


hadoop-12050. Enable MaxInactiveInterval for hadoop http auth token. 
Contributed by Huizhi Lu.


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/71aedfab
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/71aedfab
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/71aedfab

Branch: refs/heads/trunk
Commit: 71aedfabf39e03104c8d22456e95ef6349aae6c0
Parents: 14215c8
Author: Benoy Antony be...@apache.org
Authored: Tue Aug 18 13:43:34 2015 -0700
Committer: Benoy Antony be...@apache.org
Committed: Tue Aug 18 13:43:34 2015 -0700

--
 .../server/AuthenticationFilter.java|  63 +--
 .../server/AuthenticationToken.java |  12 ++
 .../security/authentication/util/AuthToken.java |  34 +++-
 .../server/TestAuthenticationFilter.java| 163 ++-
 .../src/site/markdown/HttpAuthentication.md |   8 +-
 5 files changed, 258 insertions(+), 22 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/71aedfab/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/server/AuthenticationFilter.java
--
diff --git 
a/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/server/AuthenticationFilter.java
 
b/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/server/AuthenticationFilter.java
index bf44f48..e0da38b 100644
--- 
a/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/server/AuthenticationFilter.java
+++ 
b/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/server/AuthenticationFilter.java
@@ -146,6 +146,13 @@ public class AuthenticationFilter implements Filter {
   public static final String SIGNATURE_SECRET_FILE = SIGNATURE_SECRET + 
.file;
 
   /**
+   * Constant for the configuration property
+   * that indicates the max inactive interval of the generated token.
+   */
+  public static final String
+  AUTH_TOKEN_MAX_INACTIVE_INTERVAL = token.MaxInactiveInterval;
+
+  /**
* Constant for the configuration property that indicates the validity of 
the generated token.
*/
   public static final String AUTH_TOKEN_VALIDITY = token.validity;
@@ -190,6 +197,7 @@ public class AuthenticationFilter implements Filter {
   private Signer signer;
   private SignerSecretProvider secretProvider;
   private AuthenticationHandler authHandler;
+  private long maxInactiveInterval;
   private long validity;
   private String cookieDomain;
   private String cookiePath;
@@ -227,6 +235,8 @@ public class AuthenticationFilter implements Filter {
   authHandlerClassName = authHandlerName;
 }
 
+maxInactiveInterval = Long.parseLong(config.getProperty(
+AUTH_TOKEN_MAX_INACTIVE_INTERVAL, 1800)) * 1000; // 30 minutes;
 validity = Long.parseLong(config.getProperty(AUTH_TOKEN_VALIDITY, 36000))
 * 1000; //10 hours
 initializeSecretProvider(filterConfig);
@@ -355,6 +365,15 @@ public class AuthenticationFilter implements Filter {
   }
 
   /**
+   * Returns the max inactive interval time of the generated tokens.
+   *
+   * @return the max inactive interval time of the generated tokens in seconds.
+   */
+  protected long getMaxInactiveInterval() {
+return maxInactiveInterval / 1000;
+  }
+
+  /**
* Returns the validity time of the generated tokens.
*
* @return the validity time of the generated tokens, in seconds.
@@ -510,8 +529,10 @@ public class AuthenticationFilter implements Filter {
* @throws ServletException thrown if a processing error occurred.
*/
   @Override
-  public void doFilter(ServletRequest request, ServletResponse response, 
FilterChain filterChain)
-  throws IOException, ServletException {
+  public void doFilter(ServletRequest request,
+   ServletResponse response,
+   FilterChain filterChain)
+   throws IOException, ServletException {
 boolean unauthorizedResponse = true;
 int errCode = HttpServletResponse.SC_UNAUTHORIZED;
 AuthenticationException authenticationEx = null;
@@ -533,19 +554,27 @@ public class AuthenticationFilter implements Filter {
   if (authHandler.managementOperation(token, httpRequest, httpResponse)) {
 if (token == null) {
   if (LOG.isDebugEnabled()) {
-LOG.debug(Request [{}] triggering authentication, 
getRequestURL(httpRequest));
+LOG.debug(Request [{}] triggering authentication,
+getRequestURL(httpRequest));
   }
   token = authHandler.authenticate(httpRequest, httpResponse

hadoop git commit: hadoop-12050. Enable MaxInactiveInterval for hadoop http auth token. Contributed by Huizhi Lu.

2015-08-18 Thread benoy
Repository: hadoop
Updated Branches:
  refs/heads/branch-2 40d8faf86 - d84e4a90d


hadoop-12050. Enable MaxInactiveInterval for hadoop http auth token. 
Contributed by Huizhi Lu.

(cherry picked from commit 71aedfabf39e03104c8d22456e95ef6349aae6c0)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/d84e4a90
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/d84e4a90
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/d84e4a90

Branch: refs/heads/branch-2
Commit: d84e4a90d04f85ab59913e2d7f49ba6fef5f5a94
Parents: 40d8faf
Author: Benoy Antony be...@apache.org
Authored: Tue Aug 18 13:43:34 2015 -0700
Committer: Benoy Antony be...@apache.org
Committed: Tue Aug 18 13:53:47 2015 -0700

--
 .../server/AuthenticationFilter.java|  63 +--
 .../server/AuthenticationToken.java |  12 ++
 .../security/authentication/util/AuthToken.java |  34 +++-
 .../server/TestAuthenticationFilter.java| 163 ++-
 .../src/site/markdown/HttpAuthentication.md |   8 +-
 5 files changed, 258 insertions(+), 22 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/d84e4a90/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/server/AuthenticationFilter.java
--
diff --git 
a/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/server/AuthenticationFilter.java
 
b/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/server/AuthenticationFilter.java
index bf44f48..e0da38b 100644
--- 
a/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/server/AuthenticationFilter.java
+++ 
b/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/server/AuthenticationFilter.java
@@ -146,6 +146,13 @@ public class AuthenticationFilter implements Filter {
   public static final String SIGNATURE_SECRET_FILE = SIGNATURE_SECRET + 
.file;
 
   /**
+   * Constant for the configuration property
+   * that indicates the max inactive interval of the generated token.
+   */
+  public static final String
+  AUTH_TOKEN_MAX_INACTIVE_INTERVAL = token.MaxInactiveInterval;
+
+  /**
* Constant for the configuration property that indicates the validity of 
the generated token.
*/
   public static final String AUTH_TOKEN_VALIDITY = token.validity;
@@ -190,6 +197,7 @@ public class AuthenticationFilter implements Filter {
   private Signer signer;
   private SignerSecretProvider secretProvider;
   private AuthenticationHandler authHandler;
+  private long maxInactiveInterval;
   private long validity;
   private String cookieDomain;
   private String cookiePath;
@@ -227,6 +235,8 @@ public class AuthenticationFilter implements Filter {
   authHandlerClassName = authHandlerName;
 }
 
+maxInactiveInterval = Long.parseLong(config.getProperty(
+AUTH_TOKEN_MAX_INACTIVE_INTERVAL, 1800)) * 1000; // 30 minutes;
 validity = Long.parseLong(config.getProperty(AUTH_TOKEN_VALIDITY, 36000))
 * 1000; //10 hours
 initializeSecretProvider(filterConfig);
@@ -355,6 +365,15 @@ public class AuthenticationFilter implements Filter {
   }
 
   /**
+   * Returns the max inactive interval time of the generated tokens.
+   *
+   * @return the max inactive interval time of the generated tokens in seconds.
+   */
+  protected long getMaxInactiveInterval() {
+return maxInactiveInterval / 1000;
+  }
+
+  /**
* Returns the validity time of the generated tokens.
*
* @return the validity time of the generated tokens, in seconds.
@@ -510,8 +529,10 @@ public class AuthenticationFilter implements Filter {
* @throws ServletException thrown if a processing error occurred.
*/
   @Override
-  public void doFilter(ServletRequest request, ServletResponse response, 
FilterChain filterChain)
-  throws IOException, ServletException {
+  public void doFilter(ServletRequest request,
+   ServletResponse response,
+   FilterChain filterChain)
+   throws IOException, ServletException {
 boolean unauthorizedResponse = true;
 int errCode = HttpServletResponse.SC_UNAUTHORIZED;
 AuthenticationException authenticationEx = null;
@@ -533,19 +554,27 @@ public class AuthenticationFilter implements Filter {
   if (authHandler.managementOperation(token, httpRequest, httpResponse)) {
 if (token == null) {
   if (LOG.isDebugEnabled()) {
-LOG.debug(Request [{}] triggering authentication, 
getRequestURL(httpRequest));
+LOG.debug(Request [{}] triggering authentication,
+getRequestURL(httpRequest

hadoop git commit: HADOOP-12049. Control http authentication cookie persistence via configuration. Contributed by Huizhi Lu.

2015-06-24 Thread benoy
Repository: hadoop
Updated Branches:
  refs/heads/trunk afe9ea3c1 - a815cc157


HADOOP-12049. Control http authentication cookie persistence via configuration. 
Contributed by Huizhi Lu.


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/a815cc15
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/a815cc15
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/a815cc15

Branch: refs/heads/trunk
Commit: a815cc157ceb24e02189634a85abed8e874568e0
Parents: afe9ea3
Author: Benoy Antony be...@apache.org
Authored: Wed Jun 24 15:59:39 2015 -0700
Committer: Benoy Antony be...@apache.org
Committed: Wed Jun 24 15:59:39 2015 -0700

--
 .../server/AuthenticationFilter.java|  28 ++-
 .../http/TestAuthenticationSessionCookie.java   | 187 +++
 .../apache/hadoop/http/TestHttpCookieFlag.java  |   2 +-
 3 files changed, 213 insertions(+), 4 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/a815cc15/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/server/AuthenticationFilter.java
--
diff --git 
a/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/server/AuthenticationFilter.java
 
b/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/server/AuthenticationFilter.java
index 0f86623..bf44f48 100644
--- 
a/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/server/AuthenticationFilter.java
+++ 
b/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/server/AuthenticationFilter.java
@@ -161,6 +161,12 @@ public class AuthenticationFilter implements Filter {
   public static final String COOKIE_PATH = cookie.path;
 
   /**
+   * Constant for the configuration property
+   * that indicates the persistence of the HTTP cookie.
+   */
+  public static final String COOKIE_PERSISTENT = cookie.persistent;
+
+  /**
* Constant for the configuration property that indicates the name of the
* SignerSecretProvider class to use.
* Possible values are: string, random, zookeeper, or a classname.
@@ -187,6 +193,7 @@ public class AuthenticationFilter implements Filter {
   private long validity;
   private String cookieDomain;
   private String cookiePath;
+  private boolean isCookiePersistent;
   private boolean isInitializedByTomcat;
 
   /**
@@ -228,6 +235,9 @@ public class AuthenticationFilter implements Filter {
 
 cookieDomain = config.getProperty(COOKIE_DOMAIN, null);
 cookiePath = config.getProperty(COOKIE_PATH, null);
+isCookiePersistent = Boolean.parseBoolean(
+config.getProperty(COOKIE_PERSISTENT, false));
+
   }
 
   protected void initializeAuthHandler(String authHandlerClassName, 
FilterConfig filterConfig)
@@ -372,6 +382,15 @@ public class AuthenticationFilter implements Filter {
   }
 
   /**
+   * Returns the cookie persistence to use for the HTTP cookie.
+   *
+   * @return the cookie persistence to use for the HTTP cookie.
+   */
+  protected boolean isCookiePersistent() {
+return isCookiePersistent;
+  }
+
+  /**
* Destroys the filter.
* p
* It invokes the {@link AuthenticationHandler#destroy()} method to release 
any resources it may hold.
@@ -549,7 +568,8 @@ public class AuthenticationFilter implements Filter {
   if (newToken  !token.isExpired()  token != 
AuthenticationToken.ANONYMOUS) {
 String signedToken = signer.sign(token.toString());
 createAuthCookie(httpResponse, signedToken, getCookieDomain(),
-getCookiePath(), token.getExpires(), isHttps);
+getCookiePath(), token.getExpires(),
+isCookiePersistent(), isHttps);
   }
   doFilter(filterChain, httpRequest, httpResponse);
 }
@@ -569,7 +589,7 @@ public class AuthenticationFilter implements Filter {
 if (unauthorizedResponse) {
   if (!httpResponse.isCommitted()) {
 createAuthCookie(httpResponse, , getCookieDomain(),
-getCookiePath(), 0, isHttps);
+getCookiePath(), 0, isCookiePersistent(), isHttps);
 // If response code is 401. Then WWW-Authenticate Header should be
 // present.. reset to 403 if not found..
 if ((errCode == HttpServletResponse.SC_UNAUTHORIZED)
@@ -614,6 +634,7 @@ public class AuthenticationFilter implements Filter {
* @param isSecure is the cookie secure?
* @param token the token.
* @param expires the cookie expiration time.
+   * @param isCookiePersistent whether the cookie is persistent or not.
*
* XXX the following code duplicate some logic in Jetty / Servlet API,
* because

hadoop git commit: HADOOP-12049. Control http authentication cookie persistence via configuration. Contributed by Huizhi Lu.

2015-06-24 Thread benoy
Repository: hadoop
Updated Branches:
  refs/heads/branch-2 431f68530 - aef9ab212


HADOOP-12049. Control http authentication cookie persistence via configuration. 
Contributed by Huizhi Lu.

(cherry picked from commit a815cc157ceb24e02189634a85abed8e874568e0)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/aef9ab21
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/aef9ab21
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/aef9ab21

Branch: refs/heads/branch-2
Commit: aef9ab2128ec6e2bb30dd41c3850c80eea5ed6bd
Parents: 431f685
Author: Benoy Antony be...@apache.org
Authored: Wed Jun 24 15:59:39 2015 -0700
Committer: Benoy Antony be...@apache.org
Committed: Wed Jun 24 16:41:23 2015 -0700

--
 .../server/AuthenticationFilter.java|  28 ++-
 .../http/TestAuthenticationSessionCookie.java   | 187 +++
 .../apache/hadoop/http/TestHttpCookieFlag.java  |   2 +-
 3 files changed, 213 insertions(+), 4 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/aef9ab21/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/server/AuthenticationFilter.java
--
diff --git 
a/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/server/AuthenticationFilter.java
 
b/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/server/AuthenticationFilter.java
index 0f86623..bf44f48 100644
--- 
a/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/server/AuthenticationFilter.java
+++ 
b/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/server/AuthenticationFilter.java
@@ -161,6 +161,12 @@ public class AuthenticationFilter implements Filter {
   public static final String COOKIE_PATH = cookie.path;
 
   /**
+   * Constant for the configuration property
+   * that indicates the persistence of the HTTP cookie.
+   */
+  public static final String COOKIE_PERSISTENT = cookie.persistent;
+
+  /**
* Constant for the configuration property that indicates the name of the
* SignerSecretProvider class to use.
* Possible values are: string, random, zookeeper, or a classname.
@@ -187,6 +193,7 @@ public class AuthenticationFilter implements Filter {
   private long validity;
   private String cookieDomain;
   private String cookiePath;
+  private boolean isCookiePersistent;
   private boolean isInitializedByTomcat;
 
   /**
@@ -228,6 +235,9 @@ public class AuthenticationFilter implements Filter {
 
 cookieDomain = config.getProperty(COOKIE_DOMAIN, null);
 cookiePath = config.getProperty(COOKIE_PATH, null);
+isCookiePersistent = Boolean.parseBoolean(
+config.getProperty(COOKIE_PERSISTENT, false));
+
   }
 
   protected void initializeAuthHandler(String authHandlerClassName, 
FilterConfig filterConfig)
@@ -372,6 +382,15 @@ public class AuthenticationFilter implements Filter {
   }
 
   /**
+   * Returns the cookie persistence to use for the HTTP cookie.
+   *
+   * @return the cookie persistence to use for the HTTP cookie.
+   */
+  protected boolean isCookiePersistent() {
+return isCookiePersistent;
+  }
+
+  /**
* Destroys the filter.
* p
* It invokes the {@link AuthenticationHandler#destroy()} method to release 
any resources it may hold.
@@ -549,7 +568,8 @@ public class AuthenticationFilter implements Filter {
   if (newToken  !token.isExpired()  token != 
AuthenticationToken.ANONYMOUS) {
 String signedToken = signer.sign(token.toString());
 createAuthCookie(httpResponse, signedToken, getCookieDomain(),
-getCookiePath(), token.getExpires(), isHttps);
+getCookiePath(), token.getExpires(),
+isCookiePersistent(), isHttps);
   }
   doFilter(filterChain, httpRequest, httpResponse);
 }
@@ -569,7 +589,7 @@ public class AuthenticationFilter implements Filter {
 if (unauthorizedResponse) {
   if (!httpResponse.isCommitted()) {
 createAuthCookie(httpResponse, , getCookieDomain(),
-getCookiePath(), 0, isHttps);
+getCookiePath(), 0, isCookiePersistent(), isHttps);
 // If response code is 401. Then WWW-Authenticate Header should be
 // present.. reset to 403 if not found..
 if ((errCode == HttpServletResponse.SC_UNAUTHORIZED)
@@ -614,6 +634,7 @@ public class AuthenticationFilter implements Filter {
* @param isSecure is the cookie secure?
* @param token the token.
* @param expires the cookie expiration time.
+   * @param isCookiePersistent whether the cookie is persistent or not.
*
* XXX

hadoop git commit: HDFS-7467. Provide storage tier information for a directory via fsck. (Benoy Antony)

2015-02-25 Thread benoy
Repository: hadoop
Updated Branches:
  refs/heads/branch-2 d389a1ae9 - 4e400030f


HDFS-7467. Provide storage tier information for a directory via fsck. (Benoy 
Antony)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/4e400030
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/4e400030
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/4e400030

Branch: refs/heads/branch-2
Commit: 4e400030f6bfb3cf2c689aefb526d5296ee077ec
Parents: d389a1a
Author: Benoy Antony be...@apache.org
Authored: Wed Feb 25 16:19:35 2015 -0800
Committer: Benoy Antony be...@apache.org
Committed: Wed Feb 25 16:46:41 2015 -0800

--
 .../hdfs/server/namenode/NamenodeFsck.java  |  23 +-
 .../server/namenode/StoragePolicySummary.java   | 257 +++
 .../org/apache/hadoop/hdfs/tools/DFSck.java |   4 +-
 .../hadoop/hdfs/server/namenode/TestFsck.java   |  78 +-
 .../namenode/TestStoragePolicySummary.java  | 201 +++
 5 files changed, 549 insertions(+), 14 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/4e400030/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NamenodeFsck.java
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NamenodeFsck.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NamenodeFsck.java
index f96618b..8133529 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NamenodeFsck.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NamenodeFsck.java
@@ -25,6 +25,7 @@ import java.net.InetAddress;
 import java.net.InetSocketAddress;
 import java.net.Socket;
 import java.util.ArrayList;
+import java.util.Arrays;
 import java.util.Collection;
 import java.util.Date;
 import java.util.Iterator;
@@ -45,6 +46,7 @@ import org.apache.hadoop.hdfs.DFSClient;
 import org.apache.hadoop.hdfs.DFSConfigKeys;
 import org.apache.hadoop.hdfs.DFSUtil;
 import org.apache.hadoop.hdfs.RemotePeerFactory;
+import org.apache.hadoop.fs.StorageType;
 import org.apache.hadoop.hdfs.net.Peer;
 import org.apache.hadoop.hdfs.net.TcpPeerServer;
 import org.apache.hadoop.hdfs.protocol.Block;
@@ -127,6 +129,7 @@ public class NamenodeFsck implements 
DataEncryptionKeyFactory {
   private boolean showBlocks = false;
   private boolean showLocations = false;
   private boolean showRacks = false;
+  private boolean showStoragePolcies = false;
   private boolean showCorruptFileBlocks = false;
 
   /**
@@ -163,6 +166,7 @@ public class NamenodeFsck implements 
DataEncryptionKeyFactory {
   private ListString snapshottableDirs = null;
 
   private final BlockPlacementPolicy bpPolicy;
+  private StoragePolicySummary storageTypeSummary = null;
 
   /**
* Filesystem checker.
@@ -199,6 +203,7 @@ public class NamenodeFsck implements 
DataEncryptionKeyFactory {
   else if (key.equals(blocks)) { this.showBlocks = true; }
   else if (key.equals(locations)) { this.showLocations = true; }
   else if (key.equals(racks)) { this.showRacks = true; }
+  else if (key.equals(storagepolicies)) { this.showStoragePolcies = 
true; }
   else if (key.equals(openforwrite)) {this.showOpenFiles = true; }
   else if (key.equals(listcorruptfileblocks)) {
 this.showCorruptFileBlocks = true;
@@ -332,6 +337,11 @@ public class NamenodeFsck implements 
DataEncryptionKeyFactory {
   return;
 }
 
+if (this.showStoragePolcies) {
+  storageTypeSummary = new StoragePolicySummary(
+  namenode.getNamesystem().getBlockManager().getStoragePolicies());
+}
+
 Result res = new Result(conf);
 
 check(path, file, res);
@@ -340,6 +350,10 @@ public class NamenodeFsck implements 
DataEncryptionKeyFactory {
 out.println( Number of data-nodes:\t\t + totalDatanodes);
 out.println( Number of racks:\t\t + networktopology.getNumOfRacks());
 
+if (this.showStoragePolcies) {
+  out.print(storageTypeSummary.toString());
+}
+
 out.println(FSCK ended at  + new Date() +  in 
 + (Time.now() - startTime +  milliseconds));
 
@@ -487,7 +501,8 @@ public class NamenodeFsck implements 
DataEncryptionKeyFactory {
   boolean isCorrupt = lBlk.isCorrupt();
   String blkName = block.toString();
   DatanodeInfo[] locs = lBlk.getLocations();
-  NumberReplicas numberReplicas = 
namenode.getNamesystem().getBlockManager().countNodes(block.getLocalBlock());
+  NumberReplicas numberReplicas =
+  
namenode.getNamesystem().getBlockManager().countNodes(block.getLocalBlock());
   int

hadoop git commit: HDFS-7467. Provide storage tier information for a directory via fsck. (Benoy Antony)

2015-02-25 Thread benoy
Repository: hadoop
Updated Branches:
  refs/heads/trunk caa42adf2 - d140d76a4


HDFS-7467. Provide storage tier information for a directory via fsck. (Benoy 
Antony)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/d140d76a
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/d140d76a
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/d140d76a

Branch: refs/heads/trunk
Commit: d140d76a43c88e326b9c2818578f22bd3563b969
Parents: caa42ad
Author: Benoy Antony be...@apache.org
Authored: Wed Feb 25 16:19:35 2015 -0800
Committer: Benoy Antony be...@apache.org
Committed: Wed Feb 25 16:19:35 2015 -0800

--
 .../hdfs/server/namenode/NamenodeFsck.java  |  23 +-
 .../server/namenode/StoragePolicySummary.java   | 257 +++
 .../org/apache/hadoop/hdfs/tools/DFSck.java |   2 +
 .../hadoop/hdfs/server/namenode/TestFsck.java   |  78 +-
 .../namenode/TestStoragePolicySummary.java  | 201 +++
 5 files changed, 548 insertions(+), 13 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/d140d76a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NamenodeFsck.java
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NamenodeFsck.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NamenodeFsck.java
index dc9494d..5134f3c 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NamenodeFsck.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NamenodeFsck.java
@@ -25,6 +25,7 @@ import java.net.InetAddress;
 import java.net.InetSocketAddress;
 import java.net.Socket;
 import java.util.ArrayList;
+import java.util.Arrays;
 import java.util.Collection;
 import java.util.Date;
 import java.util.Iterator;
@@ -45,6 +46,7 @@ import org.apache.hadoop.hdfs.DFSClient;
 import org.apache.hadoop.hdfs.DFSConfigKeys;
 import org.apache.hadoop.hdfs.DFSUtil;
 import org.apache.hadoop.hdfs.RemotePeerFactory;
+import org.apache.hadoop.fs.StorageType;
 import org.apache.hadoop.hdfs.net.Peer;
 import org.apache.hadoop.hdfs.net.TcpPeerServer;
 import org.apache.hadoop.hdfs.protocol.Block;
@@ -127,6 +129,7 @@ public class NamenodeFsck implements 
DataEncryptionKeyFactory {
   private boolean showBlocks = false;
   private boolean showLocations = false;
   private boolean showRacks = false;
+  private boolean showStoragePolcies = false;
   private boolean showprogress = false;
   private boolean showCorruptFileBlocks = false;
 
@@ -164,6 +167,7 @@ public class NamenodeFsck implements 
DataEncryptionKeyFactory {
   private ListString snapshottableDirs = null;
 
   private final BlockPlacementPolicy bpPolicy;
+  private StoragePolicySummary storageTypeSummary = null;
 
   /**
* Filesystem checker.
@@ -200,6 +204,7 @@ public class NamenodeFsck implements 
DataEncryptionKeyFactory {
   else if (key.equals(blocks)) { this.showBlocks = true; }
   else if (key.equals(locations)) { this.showLocations = true; }
   else if (key.equals(racks)) { this.showRacks = true; }
+  else if (key.equals(storagepolicies)) { this.showStoragePolcies = 
true; }
   else if (key.equals(showprogress)) { this.showprogress = true; }
   else if (key.equals(openforwrite)) {this.showOpenFiles = true; }
   else if (key.equals(listcorruptfileblocks)) {
@@ -334,6 +339,11 @@ public class NamenodeFsck implements 
DataEncryptionKeyFactory {
   return;
 }
 
+if (this.showStoragePolcies) {
+  storageTypeSummary = new StoragePolicySummary(
+  namenode.getNamesystem().getBlockManager().getStoragePolicies());
+}
+
 Result res = new Result(conf);
 
 check(path, file, res);
@@ -342,6 +352,10 @@ public class NamenodeFsck implements 
DataEncryptionKeyFactory {
 out.println( Number of data-nodes:\t\t + totalDatanodes);
 out.println( Number of racks:\t\t + networktopology.getNumOfRacks());
 
+if (this.showStoragePolcies) {
+  out.print(storageTypeSummary.toString());
+}
+
 out.println(FSCK ended at  + new Date() +  in 
 + (Time.now() - startTime +  milliseconds));
 
@@ -492,7 +506,8 @@ public class NamenodeFsck implements 
DataEncryptionKeyFactory {
   boolean isCorrupt = lBlk.isCorrupt();
   String blkName = block.toString();
   DatanodeInfo[] locs = lBlk.getLocations();
-  NumberReplicas numberReplicas = 
namenode.getNamesystem().getBlockManager().countNodes(block.getLocalBlock());
+  NumberReplicas numberReplicas =
+  
namenode.getNamesystem().getBlockManager

hadoop git commit: HADOOP-11494. Lock acquisition on WrappedInputStream#unwrappedRpcBuffer may race with another thread. Contributed by Ted Yu.

2015-02-02 Thread benoy
Repository: hadoop
Updated Branches:
  refs/heads/trunk ffc75d6eb - 3472e3bd6


HADOOP-11494. Lock acquisition on WrappedInputStream#unwrappedRpcBuffer may 
race with another thread. Contributed by Ted Yu.


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/3472e3bd
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/3472e3bd
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/3472e3bd

Branch: refs/heads/trunk
Commit: 3472e3bd6c50558870b86c9ccfea5072385fa991
Parents: ffc75d6
Author: Benoy Antony be...@apache.org
Authored: Mon Feb 2 10:34:47 2015 -0800
Committer: Benoy Antony be...@apache.org
Committed: Mon Feb 2 10:34:47 2015 -0800

--
 .../org/apache/hadoop/security/SaslRpcClient.java | 18 --
 1 file changed, 8 insertions(+), 10 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/3472e3bd/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslRpcClient.java
--
diff --git 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslRpcClient.java
 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslRpcClient.java
index dfb0898..4a1a397 100644
--- 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslRpcClient.java
+++ 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslRpcClient.java
@@ -573,17 +573,15 @@ public class SaslRpcClient {
 }
 
 @Override
-public int read(byte[] buf, int off, int len) throws IOException {
-  synchronized(unwrappedRpcBuffer) {
-// fill the buffer with the next RPC message
-if (unwrappedRpcBuffer.remaining() == 0) {
-  readNextRpcPacket();
-}
-// satisfy as much of the request as possible
-int readLen = Math.min(len, unwrappedRpcBuffer.remaining());
-unwrappedRpcBuffer.get(buf, off, readLen);
-return readLen;
+public synchronized int read(byte[] buf, int off, int len) throws 
IOException {
+  // fill the buffer with the next RPC message
+  if (unwrappedRpcBuffer.remaining() == 0) {
+readNextRpcPacket();
   }
+  // satisfy as much of the request as possible
+  int readLen = Math.min(len, unwrappedRpcBuffer.remaining());
+  unwrappedRpcBuffer.get(buf, off, readLen);
+  return readLen;
 }
 
 // all messages must be RPC SASL wrapped, else an exception is thrown



hadoop git commit: HADOOP-11494. Lock acquisition on WrappedInputStream#unwrappedRpcBuffer may race with another thread. Contributed by Ted Yu.

2015-02-02 Thread benoy
Repository: hadoop
Updated Branches:
  refs/heads/branch-2 648510e03 - f3ed5b68c


HADOOP-11494. Lock acquisition on WrappedInputStream#unwrappedRpcBuffer may 
race with another thread. Contributed by Ted Yu.

(cherry picked from commit 3472e3bd6c50558870b86c9ccfea5072385fa991)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/f3ed5b68
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/f3ed5b68
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/f3ed5b68

Branch: refs/heads/branch-2
Commit: f3ed5b68ca0963290298c180db9ab9dd20a6dd2d
Parents: 648510e
Author: Benoy Antony be...@apache.org
Authored: Mon Feb 2 10:34:47 2015 -0800
Committer: Benoy Antony be...@apache.org
Committed: Mon Feb 2 10:46:54 2015 -0800

--
 .../org/apache/hadoop/security/SaslRpcClient.java | 18 --
 1 file changed, 8 insertions(+), 10 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/f3ed5b68/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslRpcClient.java
--
diff --git 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslRpcClient.java
 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslRpcClient.java
index dfb0898..4a1a397 100644
--- 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslRpcClient.java
+++ 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslRpcClient.java
@@ -573,17 +573,15 @@ public class SaslRpcClient {
 }
 
 @Override
-public int read(byte[] buf, int off, int len) throws IOException {
-  synchronized(unwrappedRpcBuffer) {
-// fill the buffer with the next RPC message
-if (unwrappedRpcBuffer.remaining() == 0) {
-  readNextRpcPacket();
-}
-// satisfy as much of the request as possible
-int readLen = Math.min(len, unwrappedRpcBuffer.remaining());
-unwrappedRpcBuffer.get(buf, off, readLen);
-return readLen;
+public synchronized int read(byte[] buf, int off, int len) throws 
IOException {
+  // fill the buffer with the next RPC message
+  if (unwrappedRpcBuffer.remaining() == 0) {
+readNextRpcPacket();
   }
+  // satisfy as much of the request as possible
+  int readLen = Math.min(len, unwrappedRpcBuffer.remaining());
+  unwrappedRpcBuffer.get(buf, off, readLen);
+  return readLen;
 }
 
 // all messages must be RPC SASL wrapped, else an exception is thrown



hadoop git commit: HADOOP-10651. Add ability to restrict service access using IP addresses and hostnames. (Benoy Antony)

2015-01-08 Thread benoy
Repository: hadoop
Updated Branches:
  refs/heads/trunk dc2eaa26b - 20625c8f0


HADOOP-10651. Add ability to restrict service access using IP addresses and 
hostnames. (Benoy Antony)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/20625c8f
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/20625c8f
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/20625c8f

Branch: refs/heads/trunk
Commit: 20625c8f048701c9516da159b24c0b33983e4bb7
Parents: dc2eaa2
Author: Benoy Antony be...@apache.org
Authored: Thu Jan 8 10:06:48 2015 -0800
Committer: Benoy Antony be...@apache.org
Committed: Thu Jan 8 10:06:48 2015 -0800

--
 .../authorize/ServiceAuthorizationManager.java  |  58 +-
 .../org/apache/hadoop/util/MachineList.java |   3 +-
 .../src/site/apt/ServiceLevelAuth.apt.vm|  25 +++
 .../authorize/TestServiceAuthorization.java | 189 +--
 4 files changed, 259 insertions(+), 16 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/20625c8f/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/authorize/ServiceAuthorizationManager.java
--
diff --git 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/authorize/ServiceAuthorizationManager.java
 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/authorize/ServiceAuthorizationManager.java
index 272538a..5d29516 100644
--- 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/authorize/ServiceAuthorizationManager.java
+++ 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/authorize/ServiceAuthorizationManager.java
@@ -33,6 +33,7 @@ import org.apache.hadoop.fs.CommonConfigurationKeys;
 import org.apache.hadoop.security.KerberosInfo;
 import org.apache.hadoop.security.SecurityUtil;
 import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.hadoop.util.MachineList;
 
 import com.google.common.annotations.VisibleForTesting;
 
@@ -44,6 +45,7 @@ import com.google.common.annotations.VisibleForTesting;
 @InterfaceStability.Evolving
 public class ServiceAuthorizationManager {
   static final String BLOCKED = .blocked;
+  static final String HOSTS = .hosts;
 
   private static final String HADOOP_POLICY_FILE = hadoop-policy.xml;
 
@@ -51,6 +53,10 @@ public class ServiceAuthorizationManager {
   // and second ACL specifies blocked entries.
   private volatile MapClass?, AccessControlList[] protocolToAcls =
 new IdentityHashMapClass?, AccessControlList[]();
+  // For each class, first MachineList in the array specifies the allowed 
entries
+  // and second MachineList specifies blocked entries.
+  private volatile MapClass?, MachineList[] protocolToMachineLists =
+new IdentityHashMapClass?, MachineList[]();
   
   /**
* Configuration key for controlling service-level authorization for Hadoop.
@@ -85,7 +91,8 @@ public class ServiceAuthorizationManager {
InetAddress addr
) throws AuthorizationException {
 AccessControlList[] acls = protocolToAcls.get(protocol);
-if (acls == null) {
+MachineList[] hosts = protocolToMachineLists.get(protocol);
+if (acls == null || hosts == null) {
   throw new AuthorizationException(Protocol  + protocol + 
 is not known.);
 }
@@ -115,6 +122,16 @@ public class ServiceAuthorizationManager {
is not authorized for protocol  + protocol + 
   , expected client Kerberos principal is  + clientPrincipal);
 }
+if (addr != null) {
+  String hostAddress = addr.getHostAddress();
+  if (hosts.length != 2 || !hosts[0].includes(hostAddress) ||
+  hosts[1].includes(hostAddress)) {
+AUDITLOG.warn(AUTHZ_FAILED_FOR +  for protocol= + protocol
++  from host =  +  hostAddress);
+throw new AuthorizationException(Host  + hostAddress +
+ is not authorized for protocol  + protocol) ;
+  }
+}
 AUDITLOG.info(AUTHZ_SUCCESSFUL_FOR + user +  for protocol=+protocol);
   }
 
@@ -135,6 +152,8 @@ public class ServiceAuthorizationManager {
   PolicyProvider provider) {
 final MapClass?, AccessControlList[] newAcls =
   new IdentityHashMapClass?, AccessControlList[]();
+final MapClass?, MachineList[] newMachineLists =
+  new IdentityHashMapClass?, MachineList[]();
 
 String defaultAcl = conf.get(
 
CommonConfigurationKeys.HADOOP_SECURITY_SERVICE_AUTHORIZATION_DEFAULT_ACL,
@@ -143,6 +162,13 @@ public class ServiceAuthorizationManager {
 String defaultBlockedAcl = conf.get

hadoop git commit: HADOOP-10651. Add ability to restrict service access using IP addresses and hostnames. (Benoy Antony)

2015-01-08 Thread benoy
Repository: hadoop
Updated Branches:
  refs/heads/branch-2 d2fbba790 - 832ae27f8


HADOOP-10651. Add ability to restrict service access using IP addresses and 
hostnames. (Benoy Antony)

(cherry picked from commit 20625c8f048701c9516da159b24c0b33983e4bb7)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/832ae27f
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/832ae27f
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/832ae27f

Branch: refs/heads/branch-2
Commit: 832ae27f83856dfec1eb8f898fb1f9cfd04a1877
Parents: d2fbba7
Author: Benoy Antony be...@apache.org
Authored: Thu Jan 8 10:06:48 2015 -0800
Committer: Benoy Antony be...@apache.org
Committed: Thu Jan 8 10:09:55 2015 -0800

--
 .../authorize/ServiceAuthorizationManager.java  |  58 +-
 .../org/apache/hadoop/util/MachineList.java |   3 +-
 .../src/site/apt/ServiceLevelAuth.apt.vm|  25 +++
 .../authorize/TestServiceAuthorization.java | 189 +--
 4 files changed, 259 insertions(+), 16 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/832ae27f/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/authorize/ServiceAuthorizationManager.java
--
diff --git 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/authorize/ServiceAuthorizationManager.java
 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/authorize/ServiceAuthorizationManager.java
index 272538a..5d29516 100644
--- 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/authorize/ServiceAuthorizationManager.java
+++ 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/authorize/ServiceAuthorizationManager.java
@@ -33,6 +33,7 @@ import org.apache.hadoop.fs.CommonConfigurationKeys;
 import org.apache.hadoop.security.KerberosInfo;
 import org.apache.hadoop.security.SecurityUtil;
 import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.hadoop.util.MachineList;
 
 import com.google.common.annotations.VisibleForTesting;
 
@@ -44,6 +45,7 @@ import com.google.common.annotations.VisibleForTesting;
 @InterfaceStability.Evolving
 public class ServiceAuthorizationManager {
   static final String BLOCKED = .blocked;
+  static final String HOSTS = .hosts;
 
   private static final String HADOOP_POLICY_FILE = hadoop-policy.xml;
 
@@ -51,6 +53,10 @@ public class ServiceAuthorizationManager {
   // and second ACL specifies blocked entries.
   private volatile MapClass?, AccessControlList[] protocolToAcls =
 new IdentityHashMapClass?, AccessControlList[]();
+  // For each class, first MachineList in the array specifies the allowed 
entries
+  // and second MachineList specifies blocked entries.
+  private volatile MapClass?, MachineList[] protocolToMachineLists =
+new IdentityHashMapClass?, MachineList[]();
   
   /**
* Configuration key for controlling service-level authorization for Hadoop.
@@ -85,7 +91,8 @@ public class ServiceAuthorizationManager {
InetAddress addr
) throws AuthorizationException {
 AccessControlList[] acls = protocolToAcls.get(protocol);
-if (acls == null) {
+MachineList[] hosts = protocolToMachineLists.get(protocol);
+if (acls == null || hosts == null) {
   throw new AuthorizationException(Protocol  + protocol + 
 is not known.);
 }
@@ -115,6 +122,16 @@ public class ServiceAuthorizationManager {
is not authorized for protocol  + protocol + 
   , expected client Kerberos principal is  + clientPrincipal);
 }
+if (addr != null) {
+  String hostAddress = addr.getHostAddress();
+  if (hosts.length != 2 || !hosts[0].includes(hostAddress) ||
+  hosts[1].includes(hostAddress)) {
+AUDITLOG.warn(AUTHZ_FAILED_FOR +  for protocol= + protocol
++  from host =  +  hostAddress);
+throw new AuthorizationException(Host  + hostAddress +
+ is not authorized for protocol  + protocol) ;
+  }
+}
 AUDITLOG.info(AUTHZ_SUCCESSFUL_FOR + user +  for protocol=+protocol);
   }
 
@@ -135,6 +152,8 @@ public class ServiceAuthorizationManager {
   PolicyProvider provider) {
 final MapClass?, AccessControlList[] newAcls =
   new IdentityHashMapClass?, AccessControlList[]();
+final MapClass?, MachineList[] newMachineLists =
+  new IdentityHashMapClass?, MachineList[]();
 
 String defaultAcl = conf.get(
 
CommonConfigurationKeys.HADOOP_SECURITY_SERVICE_AUTHORIZATION_DEFAULT_ACL,
@@ -143,6 +162,13 @@ public class ServiceAuthorizationManager {
 String defaultBlockedAcl

hadoop git commit: HADOOP-11402. Negative user-to-group cache entries are never cleared for never-again-accessed users. Contributed by Varun Saxena.

2015-01-05 Thread benoy
Repository: hadoop
Updated Branches:
  refs/heads/trunk 94d342e60 - 53caeaa16


HADOOP-11402. Negative user-to-group cache entries are never cleared for 
never-again-accessed users. Contributed by Varun Saxena.


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/53caeaa1
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/53caeaa1
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/53caeaa1

Branch: refs/heads/trunk
Commit: 53caeaa16b1450b54e994c77f5d0c8a767b88d57
Parents: 94d342e
Author: Benoy Antony be...@apache.org
Authored: Mon Jan 5 15:06:46 2015 -0800
Committer: Benoy Antony be...@apache.org
Committed: Mon Jan 5 15:06:46 2015 -0800

--
 .../java/org/apache/hadoop/security/Groups.java | 36 +--
 .../hadoop/security/TestGroupsCaching.java  | 48 
 2 files changed, 71 insertions(+), 13 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/53caeaa1/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/Groups.java
--
diff --git 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/Groups.java
 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/Groups.java
index f3c5094..9fd39b0 100644
--- 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/Groups.java
+++ 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/Groups.java
@@ -23,12 +23,14 @@ import java.util.Collections;
 import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
-import java.util.concurrent.ConcurrentHashMap;
+import java.util.Set;
 import java.util.concurrent.ExecutionException;
 import java.util.concurrent.TimeUnit;
 
+import com.google.common.annotations.VisibleForTesting;
 import com.google.common.base.Ticker;
 import com.google.common.cache.CacheBuilder;
+import com.google.common.cache.Cache;
 import com.google.common.cache.CacheLoader;
 import com.google.common.cache.LoadingCache;
 import org.apache.hadoop.HadoopIllegalArgumentException;
@@ -60,14 +62,13 @@ public class Groups {
   private final GroupMappingServiceProvider impl;
 
   private final LoadingCacheString, ListString cache;
-  private final ConcurrentHashMapString, Long negativeCacheMask =
-new ConcurrentHashMapString, Long();
   private final MapString, ListString staticUserToGroupsMap =
   new HashMapString, ListString();
   private final long cacheTimeout;
   private final long negativeCacheTimeout;
   private final long warningDeltaMs;
   private final Timer timer;
+  private SetString negativeCache;
 
   public Groups(Configuration conf) {
 this(conf, new Timer());
@@ -99,11 +100,24 @@ public class Groups {
   .expireAfterWrite(10 * cacheTimeout, TimeUnit.MILLISECONDS)
   .build(new GroupCacheLoader());
 
+if(negativeCacheTimeout  0) {
+  CacheString, Boolean tempMap = CacheBuilder.newBuilder()
+.expireAfterWrite(negativeCacheTimeout, TimeUnit.MILLISECONDS)
+.ticker(new TimerToTickerAdapter(timer))
+.build();
+  negativeCache = Collections.newSetFromMap(tempMap.asMap());
+}
+
 if(LOG.isDebugEnabled())
   LOG.debug(Group mapping impl= + impl.getClass().getName() + 
   ; cacheTimeout= + cacheTimeout + ; warningDeltaMs= +
   warningDeltaMs);
   }
+  
+  @VisibleForTesting
+  SetString getNegativeCache() {
+return negativeCache;
+  }
 
   /*
* Parse the hadoop.user.group.static.mapping.overrides configuration to
@@ -159,13 +173,8 @@ public class Groups {
 
 // Check the negative cache first
 if (isNegativeCacheEnabled()) {
-  Long expirationTime = negativeCacheMask.get(user);
-  if (expirationTime != null) {
-if (timer.monotonicNow()  expirationTime) {
-  throw noGroupsForUser(user);
-} else {
-  negativeCacheMask.remove(user, expirationTime);
-}
+  if (negativeCache.contains(user)) {
+throw noGroupsForUser(user);
   }
 }
 
@@ -212,8 +221,7 @@ public class Groups {
 
   if (groups.isEmpty()) {
 if (isNegativeCacheEnabled()) {
-  long expirationTime = timer.monotonicNow() + negativeCacheTimeout;
-  negativeCacheMask.put(user, expirationTime);
+  negativeCache.add(user);
 }
 
 // We throw here to prevent Cache from retaining an empty group
@@ -252,7 +260,9 @@ public class Groups {
   LOG.warn(Error refreshing groups cache, e);
 }
 cache.invalidateAll();
-negativeCacheMask.clear();
+if(isNegativeCacheEnabled()) {
+  negativeCache.clear();
+}
   }
 
   /**

http://git-wip-us.apache.org/repos/asf/hadoop/blob/53caeaa1/hadoop-common-project/hadoop-common/src/test

hadoop git commit: HADOOP-11402. Negative user-to-group cache entries are never cleared for never-again-accessed users. Contributed by Varun Saxena.

2015-01-05 Thread benoy
Repository: hadoop
Updated Branches:
  refs/heads/branch-2 d070597a0 - 0c2d996c2


HADOOP-11402. Negative user-to-group cache entries are never cleared for 
never-again-accessed users. Contributed by Varun Saxena.

(cherry picked from commit 53caeaa16b1450b54e994c77f5d0c8a767b88d57)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/0c2d996c
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/0c2d996c
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/0c2d996c

Branch: refs/heads/branch-2
Commit: 0c2d996c2ce2f3e1c959a04f15bb6fb0ba9c4b1f
Parents: d070597
Author: Benoy Antony be...@apache.org
Authored: Mon Jan 5 15:06:46 2015 -0800
Committer: Benoy Antony be...@apache.org
Committed: Mon Jan 5 15:09:19 2015 -0800

--
 .../java/org/apache/hadoop/security/Groups.java | 36 +--
 .../hadoop/security/TestGroupsCaching.java  | 48 
 2 files changed, 71 insertions(+), 13 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/0c2d996c/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/Groups.java
--
diff --git 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/Groups.java
 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/Groups.java
index f3c5094..9fd39b0 100644
--- 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/Groups.java
+++ 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/Groups.java
@@ -23,12 +23,14 @@ import java.util.Collections;
 import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
-import java.util.concurrent.ConcurrentHashMap;
+import java.util.Set;
 import java.util.concurrent.ExecutionException;
 import java.util.concurrent.TimeUnit;
 
+import com.google.common.annotations.VisibleForTesting;
 import com.google.common.base.Ticker;
 import com.google.common.cache.CacheBuilder;
+import com.google.common.cache.Cache;
 import com.google.common.cache.CacheLoader;
 import com.google.common.cache.LoadingCache;
 import org.apache.hadoop.HadoopIllegalArgumentException;
@@ -60,14 +62,13 @@ public class Groups {
   private final GroupMappingServiceProvider impl;
 
   private final LoadingCacheString, ListString cache;
-  private final ConcurrentHashMapString, Long negativeCacheMask =
-new ConcurrentHashMapString, Long();
   private final MapString, ListString staticUserToGroupsMap =
   new HashMapString, ListString();
   private final long cacheTimeout;
   private final long negativeCacheTimeout;
   private final long warningDeltaMs;
   private final Timer timer;
+  private SetString negativeCache;
 
   public Groups(Configuration conf) {
 this(conf, new Timer());
@@ -99,11 +100,24 @@ public class Groups {
   .expireAfterWrite(10 * cacheTimeout, TimeUnit.MILLISECONDS)
   .build(new GroupCacheLoader());
 
+if(negativeCacheTimeout  0) {
+  CacheString, Boolean tempMap = CacheBuilder.newBuilder()
+.expireAfterWrite(negativeCacheTimeout, TimeUnit.MILLISECONDS)
+.ticker(new TimerToTickerAdapter(timer))
+.build();
+  negativeCache = Collections.newSetFromMap(tempMap.asMap());
+}
+
 if(LOG.isDebugEnabled())
   LOG.debug(Group mapping impl= + impl.getClass().getName() + 
   ; cacheTimeout= + cacheTimeout + ; warningDeltaMs= +
   warningDeltaMs);
   }
+  
+  @VisibleForTesting
+  SetString getNegativeCache() {
+return negativeCache;
+  }
 
   /*
* Parse the hadoop.user.group.static.mapping.overrides configuration to
@@ -159,13 +173,8 @@ public class Groups {
 
 // Check the negative cache first
 if (isNegativeCacheEnabled()) {
-  Long expirationTime = negativeCacheMask.get(user);
-  if (expirationTime != null) {
-if (timer.monotonicNow()  expirationTime) {
-  throw noGroupsForUser(user);
-} else {
-  negativeCacheMask.remove(user, expirationTime);
-}
+  if (negativeCache.contains(user)) {
+throw noGroupsForUser(user);
   }
 }
 
@@ -212,8 +221,7 @@ public class Groups {
 
   if (groups.isEmpty()) {
 if (isNegativeCacheEnabled()) {
-  long expirationTime = timer.monotonicNow() + negativeCacheTimeout;
-  negativeCacheMask.put(user, expirationTime);
+  negativeCache.add(user);
 }
 
 // We throw here to prevent Cache from retaining an empty group
@@ -252,7 +260,9 @@ public class Groups {
   LOG.warn(Error refreshing groups cache, e);
 }
 cache.invalidateAll();
-negativeCacheMask.clear();
+if(isNegativeCacheEnabled()) {
+  negativeCache.clear();
+}
   }
 
   /**

http://git-wip-us.apache.org

hadoop git commit: HADOOP-10852 Fix thread safety issues in NetgroupCache. (Benoy Antony)

2014-12-15 Thread benoy
Repository: hadoop
Updated Branches:
  refs/heads/trunk e597249d3 - a095622f3


HADOOP-10852 Fix thread safety issues in NetgroupCache. (Benoy Antony)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/a095622f
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/a095622f
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/a095622f

Branch: refs/heads/trunk
Commit: a095622f36c5e9fff3ec02b14b800038a81f6286
Parents: e597249
Author: Benoy Antony be...@apache.org
Authored: Mon Dec 15 14:00:25 2014 -0800
Committer: Benoy Antony be...@apache.org
Committed: Mon Dec 15 14:00:25 2014 -0800

--
 .../apache/hadoop/security/NetgroupCache.java   | 61 +++-
 1 file changed, 33 insertions(+), 28 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/a095622f/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/NetgroupCache.java
--
diff --git 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/NetgroupCache.java
 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/NetgroupCache.java
index bd9c448..4495a66 100644
--- 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/NetgroupCache.java
+++ 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/NetgroupCache.java
@@ -17,11 +17,11 @@
  */
 package org.apache.hadoop.security;
 
+import java.util.Collections;
+import java.util.HashSet;
 import java.util.LinkedList;
 import java.util.List;
-import java.util.Map;
 import java.util.Set;
-import java.util.HashSet;
 import java.util.concurrent.ConcurrentHashMap;
 
 import org.apache.hadoop.classification.InterfaceAudience;
@@ -36,14 +36,9 @@ import org.apache.hadoop.classification.InterfaceStability;
 @InterfaceAudience.LimitedPrivate({HDFS, MapReduce})
 @InterfaceStability.Unstable
 public class NetgroupCache {
-  private static boolean netgroupToUsersMapUpdated = true;
-  private static MapString, SetString netgroupToUsersMap =
-new ConcurrentHashMapString, SetString();
-
-  private static MapString, SetString userToNetgroupsMap =
+  private static ConcurrentHashMapString, SetString userToNetgroupsMap =
 new ConcurrentHashMapString, SetString();
 
-
   /**
* Get netgroups for a given user
*
@@ -52,21 +47,11 @@ public class NetgroupCache {
*/
   public static void getNetgroups(final String user,
   ListString groups) {
-if(netgroupToUsersMapUpdated) {
-  netgroupToUsersMapUpdated = false; // at the beginning to avoid race
-  //update userToNetgroupsMap
-  for(String netgroup : netgroupToUsersMap.keySet()) {
-for(String netuser : netgroupToUsersMap.get(netgroup)) {
-  // add to userToNetgroupsMap
-  if(!userToNetgroupsMap.containsKey(netuser)) {
-userToNetgroupsMap.put(netuser, new HashSetString());
-  }
-  userToNetgroupsMap.get(netuser).add(netgroup);
-}
-  }
-}
-if(userToNetgroupsMap.containsKey(user)) {
-  groups.addAll(userToNetgroupsMap.get(user));
+SetString userGroups = userToNetgroupsMap.get(user);
+//ConcurrentHashMap does not allow null values; 
+//So null value check can be used to check if the key exists
+if (userGroups != null) {
+  groups.addAll(userGroups);
 }
   }
 
@@ -76,7 +61,15 @@ public class NetgroupCache {
* @return list of cached groups
*/
   public static ListString getNetgroupNames() {
-return new LinkedListString(netgroupToUsersMap.keySet());
+return new LinkedListString(getGroups());
+  }
+
+  private static SetString getGroups() {
+SetString allGroups = new HashSetString ();
+for (SetString userGroups : userToNetgroupsMap.values()) {
+  allGroups.addAll(userGroups);
+}
+return allGroups;
   }
 
   /**
@@ -86,14 +79,13 @@ public class NetgroupCache {
* @return true if group is cached, false otherwise
*/
   public static boolean isCached(String group) {
-return netgroupToUsersMap.containsKey(group);
+return getGroups().contains(group);
   }
 
   /**
* Clear the cache
*/
   public static void clear() {
-netgroupToUsersMap.clear();
 userToNetgroupsMap.clear();
   }
 
@@ -104,7 +96,20 @@ public class NetgroupCache {
* @param users list of users for a given group
*/
   public static void add(String group, ListString users) {
-netgroupToUsersMap.put(group, new HashSetString(users));
-netgroupToUsersMapUpdated = true; // at the end to avoid race
+for (String user : users) {
+  SetString userGroups = userToNetgroupsMap.get(user);
+  // ConcurrentHashMap does not allow null values; 
+  // So null value check can be used to check

hadoop git commit: HADOOP-10852 Fix thread safety issues in NetgroupCache. (Benoy Antony)

2014-12-15 Thread benoy
Repository: hadoop
Updated Branches:
  refs/heads/branch-2 b1ccf68db - 2eabbae29


HADOOP-10852 Fix thread safety issues in NetgroupCache. (Benoy Antony)

(cherry picked from commit a095622f36c5e9fff3ec02b14b800038a81f6286)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/2eabbae2
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/2eabbae2
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/2eabbae2

Branch: refs/heads/branch-2
Commit: 2eabbae29288dd597395648529f4db04d95265cf
Parents: b1ccf68
Author: Benoy Antony be...@apache.org
Authored: Mon Dec 15 14:00:25 2014 -0800
Committer: Benoy Antony be...@apache.org
Committed: Mon Dec 15 14:02:15 2014 -0800

--
 .../apache/hadoop/security/NetgroupCache.java   | 61 +++-
 1 file changed, 33 insertions(+), 28 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/2eabbae2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/NetgroupCache.java
--
diff --git 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/NetgroupCache.java
 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/NetgroupCache.java
index bd9c448..4495a66 100644
--- 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/NetgroupCache.java
+++ 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/NetgroupCache.java
@@ -17,11 +17,11 @@
  */
 package org.apache.hadoop.security;
 
+import java.util.Collections;
+import java.util.HashSet;
 import java.util.LinkedList;
 import java.util.List;
-import java.util.Map;
 import java.util.Set;
-import java.util.HashSet;
 import java.util.concurrent.ConcurrentHashMap;
 
 import org.apache.hadoop.classification.InterfaceAudience;
@@ -36,14 +36,9 @@ import org.apache.hadoop.classification.InterfaceStability;
 @InterfaceAudience.LimitedPrivate({HDFS, MapReduce})
 @InterfaceStability.Unstable
 public class NetgroupCache {
-  private static boolean netgroupToUsersMapUpdated = true;
-  private static MapString, SetString netgroupToUsersMap =
-new ConcurrentHashMapString, SetString();
-
-  private static MapString, SetString userToNetgroupsMap =
+  private static ConcurrentHashMapString, SetString userToNetgroupsMap =
 new ConcurrentHashMapString, SetString();
 
-
   /**
* Get netgroups for a given user
*
@@ -52,21 +47,11 @@ public class NetgroupCache {
*/
   public static void getNetgroups(final String user,
   ListString groups) {
-if(netgroupToUsersMapUpdated) {
-  netgroupToUsersMapUpdated = false; // at the beginning to avoid race
-  //update userToNetgroupsMap
-  for(String netgroup : netgroupToUsersMap.keySet()) {
-for(String netuser : netgroupToUsersMap.get(netgroup)) {
-  // add to userToNetgroupsMap
-  if(!userToNetgroupsMap.containsKey(netuser)) {
-userToNetgroupsMap.put(netuser, new HashSetString());
-  }
-  userToNetgroupsMap.get(netuser).add(netgroup);
-}
-  }
-}
-if(userToNetgroupsMap.containsKey(user)) {
-  groups.addAll(userToNetgroupsMap.get(user));
+SetString userGroups = userToNetgroupsMap.get(user);
+//ConcurrentHashMap does not allow null values; 
+//So null value check can be used to check if the key exists
+if (userGroups != null) {
+  groups.addAll(userGroups);
 }
   }
 
@@ -76,7 +61,15 @@ public class NetgroupCache {
* @return list of cached groups
*/
   public static ListString getNetgroupNames() {
-return new LinkedListString(netgroupToUsersMap.keySet());
+return new LinkedListString(getGroups());
+  }
+
+  private static SetString getGroups() {
+SetString allGroups = new HashSetString ();
+for (SetString userGroups : userToNetgroupsMap.values()) {
+  allGroups.addAll(userGroups);
+}
+return allGroups;
   }
 
   /**
@@ -86,14 +79,13 @@ public class NetgroupCache {
* @return true if group is cached, false otherwise
*/
   public static boolean isCached(String group) {
-return netgroupToUsersMap.containsKey(group);
+return getGroups().contains(group);
   }
 
   /**
* Clear the cache
*/
   public static void clear() {
-netgroupToUsersMap.clear();
 userToNetgroupsMap.clear();
   }
 
@@ -104,7 +96,20 @@ public class NetgroupCache {
* @param users list of users for a given group
*/
   public static void add(String group, ListString users) {
-netgroupToUsersMap.put(group, new HashSetString(users));
-netgroupToUsersMapUpdated = true; // at the end to avoid race
+for (String user : users) {
+  SetString userGroups = userToNetgroupsMap.get(user);
+  // ConcurrentHashMap does

git commit: HDFS-7184. Allow data migration tool to run as a daemon. (Benoy Antony)

2014-10-20 Thread benoy
Repository: hadoop
Updated Branches:
  refs/heads/trunk d5084b9fa - e4d6a8785


HDFS-7184. Allow data migration tool to run as a daemon. (Benoy Antony)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/e4d6a878
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/e4d6a878
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/e4d6a878

Branch: refs/heads/trunk
Commit: e4d6a878541cc07fada2bd07dedc4740570a472e
Parents: d5084b9
Author: Benoy Antony be...@apache.org
Authored: Mon Oct 20 12:15:58 2014 -0700
Committer: Benoy Antony be...@apache.org
Committed: Mon Oct 20 12:15:58 2014 -0700

--
 hadoop-hdfs-project/hadoop-hdfs/src/main/bin/hdfs | 1 +
 1 file changed, 1 insertion(+)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/e4d6a878/hadoop-hdfs-project/hadoop-hdfs/src/main/bin/hdfs
--
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/bin/hdfs 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/bin/hdfs
index a5cb73c..e88ae08 100755
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/bin/hdfs
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/bin/hdfs
@@ -164,6 +164,7 @@ case ${COMMAND} in
 CLASS=org.apache.hadoop.hdfs.tools.snapshot.LsSnapshottableDir
   ;;
   mover)
+daemon=true
 CLASS=org.apache.hadoop.hdfs.server.mover.Mover
 hadoop_debug Appending HADOOP_MOVER_OPTS onto HADOOP_OPTS
 HADOOP_OPTS=${HADOOP_OPTS} ${HADOOP_MOVER_OPTS}



git commit: HDFS-6799. The invalidate method in SimulatedFSDataset failed to remove (invalidate) blocks from the file system. Contributed by Megasthenis Asteris.

2014-09-16 Thread benoy
Repository: hadoop
Updated Branches:
  refs/heads/trunk 7971c97ec - 02adf7185


HDFS-6799. The invalidate method in SimulatedFSDataset failed to remove 
(invalidate) blocks from the file system. Contributed by Megasthenis Asteris.


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/02adf718
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/02adf718
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/02adf718

Branch: refs/heads/trunk
Commit: 02adf7185de626492bba2b3718959457e958a7be
Parents: 7971c97
Author: Benoy Antony be...@apache.org
Authored: Tue Sep 16 16:24:23 2014 -0700
Committer: Benoy Antony be...@apache.org
Committed: Tue Sep 16 16:24:23 2014 -0700

--
 .../hadoop/hdfs/server/datanode/SimulatedFSDataset.java   | 7 ---
 1 file changed, 4 insertions(+), 3 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/02adf718/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/SimulatedFSDataset.java
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/SimulatedFSDataset.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/SimulatedFSDataset.java
index 8ad4510..d0fad6e 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/SimulatedFSDataset.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/SimulatedFSDataset.java
@@ -482,9 +482,10 @@ public class SimulatedFSDataset implements 
FsDatasetSpiFsVolumeSpi {
   }
 
   @Override // FsDatasetSpi
-  public synchronized void unfinalizeBlock(ExtendedBlock b) {
+  public synchronized void unfinalizeBlock(ExtendedBlock b) throws IOException{
 if (isValidRbw(b)) {
-  blockMap.remove(b.getLocalBlock());
+  final MapBlock, BInfo map = getMap(b.getBlockPoolId());
+  map.remove(b.getLocalBlock());
 }
   }
 
@@ -624,7 +625,7 @@ public class SimulatedFSDataset implements 
FsDatasetSpiFsVolumeSpi {
 continue;
   }
   storage.free(bpid, binfo.getNumBytes());
-  blockMap.remove(b);
+  map.remove(b);
 }
 if (error) {
   throw new IOException(Invalidate: Missing blocks.);



git commit: HDFS-6799. The invalidate method in SimulatedFSDataset failed to remove (invalidate) blocks from the file system. Contributed by Megasthenis Asteris.

2014-09-16 Thread benoy
Repository: hadoop
Updated Branches:
  refs/heads/branch-2 5d897026e - 215216abe


HDFS-6799. The invalidate method in SimulatedFSDataset failed to remove 
(invalidate) blocks from the file system. Contributed by Megasthenis Asteris.


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/215216ab
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/215216ab
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/215216ab

Branch: refs/heads/branch-2
Commit: 215216abeb657de2d0ac8df52e7af5c4a04ba8cb
Parents: 5d89702
Author: Benoy Antony be...@apache.org
Authored: Tue Sep 16 16:24:23 2014 -0700
Committer: Benoy Antony be...@apache.org
Committed: Tue Sep 16 16:27:25 2014 -0700

--
 .../hadoop/hdfs/server/datanode/SimulatedFSDataset.java   | 7 ---
 1 file changed, 4 insertions(+), 3 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/215216ab/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/SimulatedFSDataset.java
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/SimulatedFSDataset.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/SimulatedFSDataset.java
index 8ad4510..d0fad6e 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/SimulatedFSDataset.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/SimulatedFSDataset.java
@@ -482,9 +482,10 @@ public class SimulatedFSDataset implements 
FsDatasetSpiFsVolumeSpi {
   }
 
   @Override // FsDatasetSpi
-  public synchronized void unfinalizeBlock(ExtendedBlock b) {
+  public synchronized void unfinalizeBlock(ExtendedBlock b) throws IOException{
 if (isValidRbw(b)) {
-  blockMap.remove(b.getLocalBlock());
+  final MapBlock, BInfo map = getMap(b.getBlockPoolId());
+  map.remove(b.getLocalBlock());
 }
   }
 
@@ -624,7 +625,7 @@ public class SimulatedFSDataset implements 
FsDatasetSpiFsVolumeSpi {
 continue;
   }
   storage.free(bpid, binfo.getNumBytes());
-  blockMap.remove(b);
+  map.remove(b);
 }
 if (error) {
   throw new IOException(Invalidate: Missing blocks.);



git commit: HADOOP-10833. Remove unused cache in UserProvider. (Benoy Antony)

2014-08-30 Thread benoy
Repository: hadoop
Updated Branches:
  refs/heads/trunk 0f34e6f38 - 258c7d0f5


HADOOP-10833. Remove unused cache in UserProvider. (Benoy Antony)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/258c7d0f
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/258c7d0f
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/258c7d0f

Branch: refs/heads/trunk
Commit: 258c7d0f53fbdf4b0b9cae901701176e3e70c4fe
Parents: 0f34e6f
Author: Benoy Antony be...@apache.org
Authored: Sat Aug 30 12:49:19 2014 -0700
Committer: Benoy Antony be...@apache.org
Committed: Sat Aug 30 12:49:19 2014 -0700

--
 hadoop-common-project/hadoop-common/CHANGES.txt | 2 ++
 .../java/org/apache/hadoop/security/alias/UserProvider.java | 5 -
 2 files changed, 2 insertions(+), 5 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/258c7d0f/hadoop-common-project/hadoop-common/CHANGES.txt
--
diff --git a/hadoop-common-project/hadoop-common/CHANGES.txt 
b/hadoop-common-project/hadoop-common/CHANGES.txt
index 3b44b8b..f3ef49d 100644
--- a/hadoop-common-project/hadoop-common/CHANGES.txt
+++ b/hadoop-common-project/hadoop-common/CHANGES.txt
@@ -537,6 +537,8 @@ Release 2.6.0 - UNRELEASED
 schedules incoming calls and multiplexes outgoing calls. (Chris Li via
 Arpit Agarwal)
 
+HADOOP-10833. Remove unused cache in UserProvider. (Benoy Antony)
+
   BUG FIXES
 
 HADOOP-10781. Unportable getgrouplist() usage breaks FreeBSD (Dmitry

http://git-wip-us.apache.org/repos/asf/hadoop/blob/258c7d0f/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/alias/UserProvider.java
--
diff --git 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/alias/UserProvider.java
 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/alias/UserProvider.java
index 99d6d00..262cbad 100644
--- 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/alias/UserProvider.java
+++ 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/alias/UserProvider.java
@@ -21,9 +21,7 @@ package org.apache.hadoop.security.alias;
 import java.io.IOException;
 import java.net.URI;
 import java.util.ArrayList;
-import java.util.HashMap;
 import java.util.List;
-import java.util.Map;
 
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.conf.Configuration;
@@ -41,8 +39,6 @@ public class UserProvider extends CredentialProvider {
   public static final String SCHEME_NAME = user;
   private final UserGroupInformation user;
   private final Credentials credentials;
-  private final MapString, CredentialEntry cache = new HashMapString, 
-  CredentialEntry();
 
   private UserProvider() throws IOException {
 user = UserGroupInformation.getCurrentUser();
@@ -86,7 +82,6 @@ public class UserProvider extends CredentialProvider {
   throw new IOException(Credential  + name + 
does not exist in  + this);
 }
-cache.remove(name);
   }
 
   @Override



svn commit: r1619953 - in /hadoop/common/site/main: author/src/documentation/content/xdocs/who.xml publish/who.html publish/who.pdf

2014-08-22 Thread benoy
Author: benoy
Date: Sat Aug 23 00:11:00 2014
New Revision: 1619953

URL: http://svn.apache.org/r1619953
Log:
Added benoy to committers list.

Modified:
hadoop/common/site/main/author/src/documentation/content/xdocs/who.xml
hadoop/common/site/main/publish/who.html
hadoop/common/site/main/publish/who.pdf

Modified: hadoop/common/site/main/author/src/documentation/content/xdocs/who.xml
URL: 
http://svn.apache.org/viewvc/hadoop/common/site/main/author/src/documentation/content/xdocs/who.xml?rev=1619953r1=1619952r2=1619953view=diff
==
--- hadoop/common/site/main/author/src/documentation/content/xdocs/who.xml 
(original)
+++ hadoop/common/site/main/author/src/documentation/content/xdocs/who.xml Sat 
Aug 23 00:11:00 2014
@@ -80,6 +80,14 @@
   /tr
 
   tr
+tdbenoy/td
+tda href=http://people.apache.org/~benoy;Benoy Antony/a/td
+tdeBay/td
+td/td
+td-8/td
+  /tr
+
+  tr
 tdbikas/td
 tdBikas Saha/td
 tdHortonworks/td

Modified: hadoop/common/site/main/publish/who.html
URL: 
http://svn.apache.org/viewvc/hadoop/common/site/main/publish/who.html?rev=1619953r1=1619952r2=1619953view=diff
==
--- hadoop/common/site/main/publish/who.html (original)
+++ hadoop/common/site/main/publish/who.html Sat Aug 23 00:11:00 2014
@@ -341,6 +341,17 @@ document.write(Last Published:  + docu
   
 tr
 
+td colspan=1 rowspan=1benoy/td
+td colspan=1 rowspan=1a 
href=http://people.apache.org/~benoy;Benoy Antony/a/td
+td colspan=1 rowspan=1eBay/td
+td colspan=1 rowspan=1/td
+td colspan=1 rowspan=1-8/td
+  
+/tr
+
+  
+tr
+
 td colspan=1 rowspan=1bikas/td
 td colspan=1 rowspan=1Bikas Saha/td
 td colspan=1 rowspan=1Hortonworks/td
@@ -849,7 +860,7 @@ document.write(Last Published:  + docu
 /div
 
 
-a name=N105BC/aa name=Emeritus+Hadoop+PMC+Members/a
+a name=N105D9/aa name=Emeritus+Hadoop+PMC+Members/a
 h2 class=h3Emeritus Hadoop PMC Members/h2
 div class=section
 ul
@@ -864,7 +875,7 @@ document.write(Last Published:  + docu
 /div
 

-a name=N105CF/aa name=Hadoop+Committers/a
+a name=N105EC/aa name=Hadoop+Committers/a
 h2 class=h3Hadoop Committers/h2
 div class=section
 pHadoop's active committers include:/p
@@ -1755,7 +1766,7 @@ document.write(Last Published:  + docu
 /div
 

-a name=N10EC3/aa name=Emeritus+Hadoop+Committers/a
+a name=N10EE0/aa name=Emeritus+Hadoop+Committers/a
 h2 class=h3Emeritus Hadoop Committers/h2
 div class=section
 pHadoop committers who are no longer active include:/p

Modified: hadoop/common/site/main/publish/who.pdf
URL: 
http://svn.apache.org/viewvc/hadoop/common/site/main/publish/who.pdf?rev=1619953r1=1619952r2=1619953view=diff
==
Binary files - no diff available.




svn commit: r1619959 - in /hadoop/common/site/main: author/src/documentation/content/xdocs/who.xml publish/who.html publish/who.pdf

2014-08-22 Thread benoy
Author: benoy
Date: Sat Aug 23 03:45:59 2014
New Revision: 1619959

URL: http://svn.apache.org/r1619959
Log:
Added benoy to committers list. Removed from PMC list.

Modified:
hadoop/common/site/main/author/src/documentation/content/xdocs/who.xml
hadoop/common/site/main/publish/who.html
hadoop/common/site/main/publish/who.pdf

Modified: hadoop/common/site/main/author/src/documentation/content/xdocs/who.xml
URL: 
http://svn.apache.org/viewvc/hadoop/common/site/main/author/src/documentation/content/xdocs/who.xml?rev=1619959r1=1619958r2=1619959view=diff
==
--- hadoop/common/site/main/author/src/documentation/content/xdocs/who.xml 
(original)
+++ hadoop/common/site/main/author/src/documentation/content/xdocs/who.xml Sat 
Aug 23 03:45:59 2014
@@ -80,14 +80,6 @@
   /tr
 
   tr
-tdbenoy/td
-tda href=http://people.apache.org/~benoy;Benoy Antony/a/td
-tdeBay/td
-td/td
-td-8/td
-  /tr
-
-  tr
 tdbikas/td
 tdBikas Saha/td
 tdHortonworks/td
@@ -546,6 +538,14 @@
  td/td
  td-8/td
/tr
+   
+   tr
+ tdbenoy/td
+ tda href=http://people.apache.org/~benoy;Benoy Antony/a/td
+ tdeBay/td
+ td/td
+ td-8/td
+   /tr
 
tr
  tdbikas/td

Modified: hadoop/common/site/main/publish/who.html
URL: 
http://svn.apache.org/viewvc/hadoop/common/site/main/publish/who.html?rev=1619959r1=1619958r2=1619959view=diff
==
--- hadoop/common/site/main/publish/who.html (original)
+++ hadoop/common/site/main/publish/who.html Sat Aug 23 03:45:59 2014
@@ -341,17 +341,6 @@ document.write(Last Published:  + docu
   
 tr
 
-td colspan=1 rowspan=1benoy/td
-td colspan=1 rowspan=1a 
href=http://people.apache.org/~benoy;Benoy Antony/a/td
-td colspan=1 rowspan=1eBay/td
-td colspan=1 rowspan=1/td
-td colspan=1 rowspan=1-8/td
-  
-/tr
-
-  
-tr
-
 td colspan=1 rowspan=1bikas/td
 td colspan=1 rowspan=1Bikas Saha/td
 td colspan=1 rowspan=1Hortonworks/td
@@ -860,7 +849,7 @@ document.write(Last Published:  + docu
 /div
 
 
-a name=N105D9/aa name=Emeritus+Hadoop+PMC+Members/a
+a name=N105BC/aa name=Emeritus+Hadoop+PMC+Members/a
 h2 class=h3Emeritus Hadoop PMC Members/h2
 div class=section
 ul
@@ -875,7 +864,7 @@ document.write(Last Published:  + docu
 /div
 

-a name=N105EC/aa name=Hadoop+Committers/a
+a name=N105CF/aa name=Hadoop+Committers/a
 h2 class=h3Hadoop Committers/h2
 div class=section
 pHadoop's active committers include:/p
@@ -979,6 +968,17 @@ document.write(Last Published:  + docu
  td colspan=1 rowspan=1-8/td

 /tr
+   
+   
+tr
+ 
+td colspan=1 rowspan=1benoy/td
+ td colspan=1 rowspan=1a 
href=http://people.apache.org/~benoy;Benoy Antony/a/td
+ td colspan=1 rowspan=1eBay/td
+ td colspan=1 rowspan=1/td
+ td colspan=1 rowspan=1-8/td
+   
+/tr
 

 tr

Modified: hadoop/common/site/main/publish/who.pdf
URL: 
http://svn.apache.org/viewvc/hadoop/common/site/main/publish/who.pdf?rev=1619959r1=1619958r2=1619959view=diff
==
Binary files - no diff available.