[2/2] hadoop git commit: HADOOP-11363 Hadoop maven surefire-plugin uses must set heap size.
HADOOP-11363 Hadoop maven surefire-plugin uses must set heap size. Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/394c70c2 Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/394c70c2 Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/394c70c2 Branch: refs/heads/trunk Commit: 394c70c21d1fd6ffa728b982c2b194a80e14b8f8 Parents: db73cc9 Author: Steve Loughran ste...@apache.org Authored: Tue Dec 9 11:04:59 2014 + Committer: Steve Loughran ste...@apache.org Committed: Tue Dec 9 11:05:16 2014 + -- hadoop-common-project/hadoop-common/CHANGES.txt | 2 ++ hadoop-project/pom.xml | 10 -- 2 files changed, 10 insertions(+), 2 deletions(-) -- http://git-wip-us.apache.org/repos/asf/hadoop/blob/394c70c2/hadoop-common-project/hadoop-common/CHANGES.txt -- diff --git a/hadoop-common-project/hadoop-common/CHANGES.txt b/hadoop-common-project/hadoop-common/CHANGES.txt index 4b998d0..c5aa954 100644 --- a/hadoop-common-project/hadoop-common/CHANGES.txt +++ b/hadoop-common-project/hadoop-common/CHANGES.txt @@ -524,6 +524,8 @@ Release 2.7.0 - UNRELEASED HADOOP-11329. Add JAVA_LIBRARY_PATH to KMS startup options. (Arun Suresh via wang) +HADOOP-11363 Hadoop maven surefire-plugin uses must set heap size. (stevel) + Release 2.6.0 - 2014-11-18 INCOMPATIBLE CHANGES http://git-wip-us.apache.org/repos/asf/hadoop/blob/394c70c2/hadoop-project/pom.xml -- diff --git a/hadoop-project/pom.xml b/hadoop-project/pom.xml index 3b52dc3..7c492c8 100644 --- a/hadoop-project/pom.xml +++ b/hadoop-project/pom.xml @@ -84,6 +84,12 @@ -- enforced.java.version[${javac.version},)/enforced.java.version enforced.maven.version[3.0.2,)/enforced.maven.version + +!-- Plugin versions and config -- +maven-surefire-plugin.argLine-Xmx4096m -XX:MaxPermSize=768m -XX:+HeapDumpOnOutOfMemoryError/maven-surefire-plugin.argLine +maven-surefire-plugin.version2.17/maven-surefire-plugin.version + maven-surefire-report-plugin.version${maven-surefire-plugin.version}/maven-surefire-report-plugin.version + maven-failsafe-plugin.version${maven-surefire-plugin.version}/maven-failsafe-plugin.version /properties dependencyManagement @@ -950,7 +956,7 @@ plugin groupIdorg.apache.maven.plugins/groupId artifactIdmaven-surefire-plugin/artifactId - version2.16/version + version${maven-surefire-plugin.version}/version /plugin plugin groupIdorg.apache.maven.plugins/groupId @@ -1061,7 +1067,7 @@ configuration reuseForksfalse/reuseForks forkedProcessTimeoutInSeconds900/forkedProcessTimeoutInSeconds - argLine-Xmx1024m -XX:+HeapDumpOnOutOfMemoryError/argLine + argLine${maven-surefire-plugin.argLine}/argLine environmentVariables !-- HADOOP_HOME required for tests on Windows to find winutils -- HADOOP_HOME${hadoop.common.build.dir}/HADOOP_HOME
[1/2] hadoop git commit: HADOOP-11363 Hadoop maven surefire-plugin uses must set heap size.
Repository: hadoop Updated Branches: refs/heads/branch-2 9d72b0282 - a7be36384 refs/heads/trunk db73cc912 - 394c70c21 HADOOP-11363 Hadoop maven surefire-plugin uses must set heap size. Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/a7be3638 Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/a7be3638 Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/a7be3638 Branch: refs/heads/branch-2 Commit: a7be36384ae3aa1eed4b08154cb63710110b466d Parents: 9d72b02 Author: Steve Loughran ste...@apache.org Authored: Tue Dec 9 11:04:59 2014 + Committer: Steve Loughran ste...@apache.org Committed: Tue Dec 9 11:04:59 2014 + -- hadoop-common-project/hadoop-common/CHANGES.txt | 2 ++ hadoop-project/pom.xml | 10 -- 2 files changed, 10 insertions(+), 2 deletions(-) -- http://git-wip-us.apache.org/repos/asf/hadoop/blob/a7be3638/hadoop-common-project/hadoop-common/CHANGES.txt -- diff --git a/hadoop-common-project/hadoop-common/CHANGES.txt b/hadoop-common-project/hadoop-common/CHANGES.txt index e842fe6..3d13918 100644 --- a/hadoop-common-project/hadoop-common/CHANGES.txt +++ b/hadoop-common-project/hadoop-common/CHANGES.txt @@ -161,6 +161,8 @@ Release 2.7.0 - UNRELEASED HADOOP-11329. Add JAVA_LIBRARY_PATH to KMS startup options. (Arun Suresh via wang) +HADOOP-11363 Hadoop maven surefire-plugin uses must set heap size. (stevel) + Release 2.6.0 - 2014-11-18 INCOMPATIBLE CHANGES http://git-wip-us.apache.org/repos/asf/hadoop/blob/a7be3638/hadoop-project/pom.xml -- diff --git a/hadoop-project/pom.xml b/hadoop-project/pom.xml index 76b8645..d50b72c 100644 --- a/hadoop-project/pom.xml +++ b/hadoop-project/pom.xml @@ -84,6 +84,12 @@ -- enforced.java.version[${javac.version},)/enforced.java.version enforced.maven.version[3.0.2,)/enforced.maven.version + +!-- Plugin versions and config -- +maven-surefire-plugin.argLine-Xmx4096m -XX:MaxPermSize=768m -XX:+HeapDumpOnOutOfMemoryError/maven-surefire-plugin.argLine +maven-surefire-plugin.version2.17/maven-surefire-plugin.version + maven-surefire-report-plugin.version${maven-surefire-plugin.version}/maven-surefire-report-plugin.version + maven-failsafe-plugin.version${maven-surefire-plugin.version}/maven-failsafe-plugin.version /properties dependencyManagement @@ -948,7 +954,7 @@ plugin groupIdorg.apache.maven.plugins/groupId artifactIdmaven-surefire-plugin/artifactId - version2.16/version + version${maven-surefire-plugin.version}/version /plugin plugin groupIdorg.apache.maven.plugins/groupId @@ -1097,7 +1103,7 @@ configuration reuseForksfalse/reuseForks forkedProcessTimeoutInSeconds900/forkedProcessTimeoutInSeconds - argLine-Xmx1024m -XX:+HeapDumpOnOutOfMemoryError/argLine + argLine${maven-surefire-plugin.argLine}/argLine environmentVariables !-- HADOOP_HOME required for tests on Windows to find winutils -- HADOOP_HOME${hadoop.common.build.dir}/HADOOP_HOME
[2/2] hadoop git commit: HADOOP-10134 [JDK8] Fix Javadoc errors caused by incorrect or illegal tags in doc comments.
HADOOP-10134 [JDK8] Fix Javadoc errors caused by incorrect or illegal tags in doc comments. Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/f71eb51a Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/f71eb51a Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/f71eb51a Branch: refs/heads/trunk Commit: f71eb51ab8109c14e8e921751dd5de603bdf2bde Parents: 394c70c Author: Steve Loughran ste...@apache.org Authored: Tue Dec 9 11:15:19 2014 + Committer: Steve Loughran ste...@apache.org Committed: Tue Dec 9 11:15:35 2014 + -- .../authentication/client/AuthenticatedURL.java | 13 +++--- .../authentication/client/Authenticator.java| 2 +- .../client/KerberosAuthenticator.java | 10 ++--- .../client/PseudoAuthenticator.java | 10 ++--- .../AltKerberosAuthenticationHandler.java | 5 +-- .../server/AuthenticationFilter.java| 46 ++-- .../server/AuthenticationHandler.java | 21 - .../server/AuthenticationToken.java | 4 +- .../server/KerberosAuthenticationHandler.java | 11 +++-- .../server/PseudoAuthenticationHandler.java | 17 .../authentication/util/KerberosName.java | 4 +- .../authentication/util/KerberosUtil.java | 8 ++-- .../security/authentication/util/Signer.java| 2 - .../util/ZKSignerSecretProvider.java| 8 ++-- hadoop-common-project/hadoop-common/CHANGES.txt | 3 ++ .../java/org/apache/hadoop/minikdc/MiniKdc.java | 10 ++--- .../apache/hadoop/maven/plugin/util/Exec.java | 4 +- .../hadoop/maven/plugin/util/FileSetUtils.java | 2 +- 18 files changed, 86 insertions(+), 94 deletions(-) -- http://git-wip-us.apache.org/repos/asf/hadoop/blob/f71eb51a/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/client/AuthenticatedURL.java -- diff --git a/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/client/AuthenticatedURL.java b/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/client/AuthenticatedURL.java index 61c3c6d..c50a516 100644 --- a/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/client/AuthenticatedURL.java +++ b/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/client/AuthenticatedURL.java @@ -24,19 +24,18 @@ import java.util.Map; /** * The {@link AuthenticatedURL} class enables the use of the JDK {@link URL} class * against HTTP endpoints protected with the {@link AuthenticationFilter}. - * p/ + * p * The authentication mechanisms supported by default are Hadoop Simple authentication * (also known as pseudo authentication) and Kerberos SPNEGO authentication. - * p/ + * p * Additional authentication mechanisms can be supported via {@link Authenticator} implementations. - * p/ + * p * The default {@link Authenticator} is the {@link KerberosAuthenticator} class which supports * automatic fallback from Kerberos SPNEGO to Hadoop Simple authentication. - * p/ + * p * codeAuthenticatedURL/code instances are not thread-safe. - * p/ + * p * The usage pattern of the {@link AuthenticatedURL} is: - * p/ * pre * * // establishing an initial connection @@ -240,7 +239,7 @@ public class AuthenticatedURL { /** * Helper method that extracts an authentication token received from a connection. - * p/ + * p * This method is used by {@link Authenticator} implementations. * * @param conn connection to extract the authentication token from. http://git-wip-us.apache.org/repos/asf/hadoop/blob/f71eb51a/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/client/Authenticator.java -- diff --git a/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/client/Authenticator.java b/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/client/Authenticator.java index e7bae4a..6828970 100644 --- a/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/client/Authenticator.java +++ b/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/client/Authenticator.java @@ -19,7 +19,7 @@ import java.net.URL; /** * Interface for client authentication mechanisms. - * p/ + * p * Implementations are use-once instances, they don't need to be thread safe. */ public interface Authenticator {
[1/2] hadoop git commit: HADOOP-10134 [JDK8] Fix Javadoc errors caused by incorrect or illegal tags in doc comments.
Repository: hadoop Updated Branches: refs/heads/branch-2 a7be36384 - 2bc3f1064 refs/heads/trunk 394c70c21 - f71eb51ab HADOOP-10134 [JDK8] Fix Javadoc errors caused by incorrect or illegal tags in doc comments. Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/2bc3f106 Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/2bc3f106 Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/2bc3f106 Branch: refs/heads/branch-2 Commit: 2bc3f1064f5a5eea1c6b384f68b2d1c2fb427e8f Parents: a7be363 Author: Steve Loughran ste...@apache.org Authored: Tue Dec 9 11:15:19 2014 + Committer: Steve Loughran ste...@apache.org Committed: Tue Dec 9 11:15:19 2014 + -- .../authentication/client/AuthenticatedURL.java | 13 +++--- .../authentication/client/Authenticator.java| 2 +- .../client/KerberosAuthenticator.java | 10 ++--- .../client/PseudoAuthenticator.java | 10 ++--- .../AltKerberosAuthenticationHandler.java | 5 +-- .../server/AuthenticationFilter.java| 46 ++-- .../server/AuthenticationHandler.java | 21 - .../server/AuthenticationToken.java | 4 +- .../server/KerberosAuthenticationHandler.java | 11 +++-- .../server/PseudoAuthenticationHandler.java | 17 .../authentication/util/KerberosName.java | 4 +- .../authentication/util/KerberosUtil.java | 8 ++-- .../security/authentication/util/Signer.java| 2 - .../util/ZKSignerSecretProvider.java| 8 ++-- hadoop-common-project/hadoop-common/CHANGES.txt | 3 ++ .../java/org/apache/hadoop/minikdc/MiniKdc.java | 10 ++--- .../apache/hadoop/maven/plugin/util/Exec.java | 4 +- .../hadoop/maven/plugin/util/FileSetUtils.java | 2 +- 18 files changed, 86 insertions(+), 94 deletions(-) -- http://git-wip-us.apache.org/repos/asf/hadoop/blob/2bc3f106/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/client/AuthenticatedURL.java -- diff --git a/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/client/AuthenticatedURL.java b/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/client/AuthenticatedURL.java index 61c3c6d..c50a516 100644 --- a/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/client/AuthenticatedURL.java +++ b/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/client/AuthenticatedURL.java @@ -24,19 +24,18 @@ import java.util.Map; /** * The {@link AuthenticatedURL} class enables the use of the JDK {@link URL} class * against HTTP endpoints protected with the {@link AuthenticationFilter}. - * p/ + * p * The authentication mechanisms supported by default are Hadoop Simple authentication * (also known as pseudo authentication) and Kerberos SPNEGO authentication. - * p/ + * p * Additional authentication mechanisms can be supported via {@link Authenticator} implementations. - * p/ + * p * The default {@link Authenticator} is the {@link KerberosAuthenticator} class which supports * automatic fallback from Kerberos SPNEGO to Hadoop Simple authentication. - * p/ + * p * codeAuthenticatedURL/code instances are not thread-safe. - * p/ + * p * The usage pattern of the {@link AuthenticatedURL} is: - * p/ * pre * * // establishing an initial connection @@ -240,7 +239,7 @@ public class AuthenticatedURL { /** * Helper method that extracts an authentication token received from a connection. - * p/ + * p * This method is used by {@link Authenticator} implementations. * * @param conn connection to extract the authentication token from. http://git-wip-us.apache.org/repos/asf/hadoop/blob/2bc3f106/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/client/Authenticator.java -- diff --git a/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/client/Authenticator.java b/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/client/Authenticator.java index e7bae4a..6828970 100644 --- a/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/client/Authenticator.java +++ b/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/client/Authenticator.java @@ -19,7 +19,7 @@ import java.net.URL; /** * Interface for client authentication mechanisms. - * p/ + * p * Implementations are use-once instances, they don't need to be thread safe. */ public
hadoop git commit: HADOOP-11352 Clean up test-patch.sh to disable +1 contrib tests
Repository: hadoop Updated Branches: refs/heads/trunk f71eb51ab - 82707b441 HADOOP-11352 Clean up test-patch.sh to disable +1 contrib tests Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/82707b44 Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/82707b44 Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/82707b44 Branch: refs/heads/trunk Commit: 82707b44196d63150a8668fa97aa824344b3f9a0 Parents: f71eb51 Author: Steve Loughran ste...@apache.org Authored: Tue Dec 9 11:32:51 2014 + Committer: Steve Loughran ste...@apache.org Committed: Tue Dec 9 11:32:51 2014 + -- dev-support/test-patch.sh | 71 hadoop-common-project/hadoop-common/CHANGES.txt | 3 + 2 files changed, 3 insertions(+), 71 deletions(-) -- http://git-wip-us.apache.org/repos/asf/hadoop/blob/82707b44/dev-support/test-patch.sh -- diff --git a/dev-support/test-patch.sh b/dev-support/test-patch.sh index e6512ab..ece3ddf 100755 --- a/dev-support/test-patch.sh +++ b/dev-support/test-patch.sh @@ -858,74 +858,6 @@ findModules () { echo $CHANGED_MODULES } ### -### Run the test-contrib target -runContribTests () { - echo - echo - echo == - echo == - echo Running contrib tests. - echo == - echo == - echo - echo - - if [[ `$GREP -c 'test-contrib' build.xml` == 0 ]] ; then -echo No contrib tests in this project. -return 0 - fi - - ### Kill any rogue build processes from the last attempt - $PS auxwww | $GREP ${PROJECT_NAME}PatchProcess | $AWK '{print $2}' | /usr/bin/xargs -t -I {} /bin/kill -9 {} /dev/null - - #echo $ANT_HOME/bin/ant -Dversion=${VERSION} $ECLIPSE_PROPERTY -DHadoopPatchProcess= -Dtest.junit.output.format=xml -Dtest.output=no test-contrib - #$ANT_HOME/bin/ant -Dversion=${VERSION} $ECLIPSE_PROPERTY -DHadoopPatchProcess= -Dtest.junit.output.format=xml -Dtest.output=no test-contrib - echo NOP - if [[ $? != 0 ]] ; then -JIRA_COMMENT=$JIRA_COMMENT - -{color:red}-1 contrib tests{color}. The patch failed contrib unit tests. -return 1 - fi - JIRA_COMMENT=$JIRA_COMMENT - -{color:green}+1 contrib tests{color}. The patch passed contrib unit tests. - return 0 -} - -### -### Run the inject-system-faults target -checkInjectSystemFaults () { - echo - echo - echo == - echo == - echo Checking the integrity of system test framework code. - echo == - echo == - echo - echo - - ### Kill any rogue build processes from the last attempt - $PS auxwww | $GREP ${PROJECT_NAME}PatchProcess | $AWK '{print $2}' | /usr/bin/xargs -t -I {} /bin/kill -9 {} /dev/null - - #echo $ANT_HOME/bin/ant -Dversion=${VERSION} -DHadoopPatchProcess= -Dtest.junit.output.format=xml -Dtest.output=no -Dcompile.c++=yes -Dforrest.home=$FORREST_HOME inject-system-faults - #$ANT_HOME/bin/ant -Dversion=${VERSION} -DHadoopPatchProcess= -Dtest.junit.output.format=xml -Dtest.output=no -Dcompile.c++=yes -Dforrest.home=$FORREST_HOME inject-system-faults - echo NOP - return 0 - if [[ $? != 0 ]] ; then -JIRA_COMMENT=$JIRA_COMMENT - -{color:red}-1 system test framework{color}. The patch failed system test framework compile. -return 1 - fi - JIRA_COMMENT=$JIRA_COMMENT - -{color:green}+1 system test framework{color}. The patch passed system test framework compile. - return 0 -} - -### ### Submit a comment to the defect's Jira submitJiraComment () { local result=$1 @@ -1059,10 +991,7 @@ checkReleaseAuditWarnings if [[ $JENKINS == true || $RUN_TESTS == true ]] ; then runTests (( RESULT = RESULT + $? )) - runContribTests - (( RESULT = RESULT + $? )) fi -checkInjectSystemFaults (( RESULT = RESULT + $? )) JIRA_COMMENT_FOOTER=Test results: $BUILD_URL/testReport/ $JIRA_COMMENT_FOOTER http://git-wip-us.apache.org/repos/asf/hadoop/blob/82707b44/hadoop-common-project/hadoop-common/CHANGES.txt -- diff --git
hadoop git commit: HADOOP-10476. Bumping the findbugs version to 3.0.0. Contributed by Haohui Mai.
Repository: hadoop Updated Branches: refs/heads/trunk 82707b441 - b55769369 HADOOP-10476. Bumping the findbugs version to 3.0.0. Contributed by Haohui Mai. Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/b5576936 Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/b5576936 Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/b5576936 Branch: refs/heads/trunk Commit: b55769369bcb977bdd04af9889dbf63b7c856b8d Parents: 82707b4 Author: Haohui Mai whe...@apache.org Authored: Tue Dec 9 10:38:24 2014 -0800 Committer: Haohui Mai whe...@apache.org Committed: Tue Dec 9 10:38:24 2014 -0800 -- hadoop-common-project/hadoop-common/CHANGES.txt | 2 ++ hadoop-project/pom.xml | 5 +++-- 2 files changed, 5 insertions(+), 2 deletions(-) -- http://git-wip-us.apache.org/repos/asf/hadoop/blob/b5576936/hadoop-common-project/hadoop-common/CHANGES.txt -- diff --git a/hadoop-common-project/hadoop-common/CHANGES.txt b/hadoop-common-project/hadoop-common/CHANGES.txt index cf3e531..a0a10b8 100644 --- a/hadoop-common-project/hadoop-common/CHANGES.txt +++ b/hadoop-common-project/hadoop-common/CHANGES.txt @@ -424,6 +424,8 @@ Release 2.7.0 - UNRELEASED HADOOP-11287. Simplify UGI#reloginFromKeytab for Java 7+. (Li Lu via wheat9) +HADOOP-10476) Bumping the findbugs version to 3.0.0. (wheat9) + OPTIMIZATIONS HADOOP-11323. WritableComparator#compare keeps reference to byte array. http://git-wip-us.apache.org/repos/asf/hadoop/blob/b5576936/hadoop-project/pom.xml -- diff --git a/hadoop-project/pom.xml b/hadoop-project/pom.xml index 7c492c8..c3881e8 100644 --- a/hadoop-project/pom.xml +++ b/hadoop-project/pom.xml @@ -71,6 +71,7 @@ protoc.path${env.HADOOP_PROTOC_PATH}/protoc.path zookeeper.version3.4.6/zookeeper.version +findbugs.version3.0.0/findbugs.version tomcat.version6.0.41/tomcat.version @@ -843,7 +844,7 @@ dependency groupIdcom.google.code.findbugs/groupId artifactIdjsr305/artifactId -version1.3.9/version +version${findbugs.version}/version /dependency dependency groupIdjavax.xml.bind/groupId @@ -981,7 +982,7 @@ plugin groupIdorg.codehaus.mojo/groupId artifactIdfindbugs-maven-plugin/artifactId - version2.3.2/version + version${findbugs.version}/version /plugin plugin groupIdorg.apache.maven.plugins/groupId
hadoop git commit: HADOOP-10476. Bumping the findbugs version to 3.0.0. Contributed by Haohui Mai.
Repository: hadoop Updated Branches: refs/heads/branch-2 2bc3f1064 - c9e9f82ec HADOOP-10476. Bumping the findbugs version to 3.0.0. Contributed by Haohui Mai. Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/c9e9f82e Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/c9e9f82e Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/c9e9f82e Branch: refs/heads/branch-2 Commit: c9e9f82ec6a76bd80fc04ec301a4c9b3d28306c9 Parents: 2bc3f10 Author: Haohui Mai whe...@apache.org Authored: Tue Dec 9 10:38:24 2014 -0800 Committer: Haohui Mai whe...@apache.org Committed: Tue Dec 9 10:38:43 2014 -0800 -- hadoop-common-project/hadoop-common/CHANGES.txt | 2 ++ hadoop-project/pom.xml | 5 +++-- 2 files changed, 5 insertions(+), 2 deletions(-) -- http://git-wip-us.apache.org/repos/asf/hadoop/blob/c9e9f82e/hadoop-common-project/hadoop-common/CHANGES.txt -- diff --git a/hadoop-common-project/hadoop-common/CHANGES.txt b/hadoop-common-project/hadoop-common/CHANGES.txt index 1706b69..b1cdfb1 100644 --- a/hadoop-common-project/hadoop-common/CHANGES.txt +++ b/hadoop-common-project/hadoop-common/CHANGES.txt @@ -55,6 +55,8 @@ Release 2.7.0 - UNRELEASED HADOOP-11287. Simplify UGI#reloginFromKeytab for Java 7+. (Li Lu via wheat9) +HADOOP-10476) Bumping the findbugs version to 3.0.0. (wheat9) + OPTIMIZATIONS HADOOP-11323. WritableComparator#compare keeps reference to byte array. http://git-wip-us.apache.org/repos/asf/hadoop/blob/c9e9f82e/hadoop-project/pom.xml -- diff --git a/hadoop-project/pom.xml b/hadoop-project/pom.xml index d50b72c..6527b92 100644 --- a/hadoop-project/pom.xml +++ b/hadoop-project/pom.xml @@ -71,6 +71,7 @@ protoc.path${env.HADOOP_PROTOC_PATH}/protoc.path zookeeper.version3.4.6/zookeeper.version +findbugs.version3.0.0/findbugs.version tomcat.version6.0.41/tomcat.version @@ -836,7 +837,7 @@ dependency groupIdcom.google.code.findbugs/groupId artifactIdjsr305/artifactId -version1.3.9/version +version${findbugs.version}/version /dependency dependency groupIdjavax.xml.bind/groupId @@ -979,7 +980,7 @@ plugin groupIdorg.codehaus.mojo/groupId artifactIdfindbugs-maven-plugin/artifactId - version2.3.2/version + version${findbugs.version}/version /plugin plugin groupIdorg.apache.maven.plugins/groupId
hadoop git commit: HADOOP-11367. Fix warnings from findbugs 3.0 in hadoop-streaming. Contributed by Li Lu.
Repository: hadoop Updated Branches: refs/heads/branch-2 c9e9f82ec - 1b8b51704 HADOOP-11367. Fix warnings from findbugs 3.0 in hadoop-streaming. Contributed by Li Lu. Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/1b8b5170 Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/1b8b5170 Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/1b8b5170 Branch: refs/heads/branch-2 Commit: 1b8b51704a583d1ae448f73c6c565b0a20853ad1 Parents: c9e9f82 Author: Haohui Mai whe...@apache.org Authored: Tue Dec 9 10:41:35 2014 -0800 Committer: Haohui Mai whe...@apache.org Committed: Tue Dec 9 10:42:17 2014 -0800 -- hadoop-common-project/hadoop-common/CHANGES.txt | 2 ++ .../src/main/java/org/apache/hadoop/streaming/Environment.java | 4 +++- 2 files changed, 5 insertions(+), 1 deletion(-) -- http://git-wip-us.apache.org/repos/asf/hadoop/blob/1b8b5170/hadoop-common-project/hadoop-common/CHANGES.txt -- diff --git a/hadoop-common-project/hadoop-common/CHANGES.txt b/hadoop-common-project/hadoop-common/CHANGES.txt index b1cdfb1..3ce2ae7 100644 --- a/hadoop-common-project/hadoop-common/CHANGES.txt +++ b/hadoop-common-project/hadoop-common/CHANGES.txt @@ -168,6 +168,8 @@ Release 2.7.0 - UNRELEASED HADOOP-10134 [JDK8] Fix Javadoc errors caused by incorrect or illegal tags in doc comments. (apurtell via stevel) +HADOOP-11367. Fix warnings from findbugs 3.0 in hadoop-streaming. (Li Lu via wheat9) + Release 2.6.0 - 2014-11-18 INCOMPATIBLE CHANGES http://git-wip-us.apache.org/repos/asf/hadoop/blob/1b8b5170/hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/streaming/Environment.java -- diff --git a/hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/streaming/Environment.java b/hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/streaming/Environment.java index bd76c31..98d8aa03 100644 --- a/hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/streaming/Environment.java +++ b/hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/streaming/Environment.java @@ -20,6 +20,7 @@ package org.apache.hadoop.streaming; import java.io.*; import java.net.InetAddress; +import java.nio.charset.Charset; import java.util.*; import org.apache.hadoop.classification.InterfaceAudience; @@ -62,7 +63,8 @@ public class Environment extends Properties { // Read the environment variables Process pid = Runtime.getRuntime().exec(command); -BufferedReader in = new BufferedReader(new InputStreamReader(pid.getInputStream())); +BufferedReader in = new BufferedReader( +new InputStreamReader(pid.getInputStream(), Charset.forName(UTF-8))); try { while (true) { String line = in.readLine();
hadoop git commit: HADOOP-11367. Fix warnings from findbugs 3.0 in hadoop-streaming. Contributed by Li Lu.
Repository: hadoop Updated Branches: refs/heads/trunk b55769369 - 7bceb13ba HADOOP-11367. Fix warnings from findbugs 3.0 in hadoop-streaming. Contributed by Li Lu. Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/7bceb13b Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/7bceb13b Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/7bceb13b Branch: refs/heads/trunk Commit: 7bceb13ba9634123a92a091f93b3b04c14473678 Parents: b557693 Author: Haohui Mai whe...@apache.org Authored: Tue Dec 9 10:41:35 2014 -0800 Committer: Haohui Mai whe...@apache.org Committed: Tue Dec 9 10:41:35 2014 -0800 -- hadoop-common-project/hadoop-common/CHANGES.txt | 2 ++ .../src/main/java/org/apache/hadoop/streaming/Environment.java | 4 +++- 2 files changed, 5 insertions(+), 1 deletion(-) -- http://git-wip-us.apache.org/repos/asf/hadoop/blob/7bceb13b/hadoop-common-project/hadoop-common/CHANGES.txt -- diff --git a/hadoop-common-project/hadoop-common/CHANGES.txt b/hadoop-common-project/hadoop-common/CHANGES.txt index a0a10b8..425cab7 100644 --- a/hadoop-common-project/hadoop-common/CHANGES.txt +++ b/hadoop-common-project/hadoop-common/CHANGES.txt @@ -534,6 +534,8 @@ Release 2.7.0 - UNRELEASED HADOOP-10134 [JDK8] Fix Javadoc errors caused by incorrect or illegal tags in doc comments. (apurtell via stevel) +HADOOP-11367. Fix warnings from findbugs 3.0 in hadoop-streaming. (Li Lu via wheat9) + Release 2.6.0 - 2014-11-18 INCOMPATIBLE CHANGES http://git-wip-us.apache.org/repos/asf/hadoop/blob/7bceb13b/hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/streaming/Environment.java -- diff --git a/hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/streaming/Environment.java b/hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/streaming/Environment.java index bd76c31..98d8aa03 100644 --- a/hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/streaming/Environment.java +++ b/hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/streaming/Environment.java @@ -20,6 +20,7 @@ package org.apache.hadoop.streaming; import java.io.*; import java.net.InetAddress; +import java.nio.charset.Charset; import java.util.*; import org.apache.hadoop.classification.InterfaceAudience; @@ -62,7 +63,8 @@ public class Environment extends Properties { // Read the environment variables Process pid = Runtime.getRuntime().exec(command); -BufferedReader in = new BufferedReader(new InputStreamReader(pid.getInputStream())); +BufferedReader in = new BufferedReader( +new InputStreamReader(pid.getInputStream(), Charset.forName(UTF-8))); try { while (true) { String line = in.readLine();
hadoop git commit: HADOOP-11369. Fix new findbugs warnings in hadoop-mapreduce-client, non-core directories. Contributed by Li Lu.
Repository: hadoop Updated Branches: refs/heads/branch-2 1b8b51704 - 759179e63 HADOOP-11369. Fix new findbugs warnings in hadoop-mapreduce-client, non-core directories. Contributed by Li Lu. Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/759179e6 Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/759179e6 Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/759179e6 Branch: refs/heads/branch-2 Commit: 759179e6316b1bc86ee9ea63fb23d3959fe1e287 Parents: 1b8b517 Author: Haohui Mai whe...@apache.org Authored: Tue Dec 9 10:46:13 2014 -0800 Committer: Haohui Mai whe...@apache.org Committed: Tue Dec 9 10:46:25 2014 -0800 -- hadoop-common-project/hadoop-common/CHANGES.txt | 3 +++ .../hadoop/mapreduce/jobhistory/JobHistoryEventHandler.java | 3 +-- .../org/apache/hadoop/mapreduce/v2/app/rm/RMCommunicator.java | 1 - .../org/apache/hadoop/mapreduce/v2/hs/HistoryFileManager.java | 2 +- 4 files changed, 5 insertions(+), 4 deletions(-) -- http://git-wip-us.apache.org/repos/asf/hadoop/blob/759179e6/hadoop-common-project/hadoop-common/CHANGES.txt -- diff --git a/hadoop-common-project/hadoop-common/CHANGES.txt b/hadoop-common-project/hadoop-common/CHANGES.txt index 3ce2ae7..98751c5 100644 --- a/hadoop-common-project/hadoop-common/CHANGES.txt +++ b/hadoop-common-project/hadoop-common/CHANGES.txt @@ -170,6 +170,9 @@ Release 2.7.0 - UNRELEASED HADOOP-11367. Fix warnings from findbugs 3.0 in hadoop-streaming. (Li Lu via wheat9) +HADOOP-11369. Fix new findbugs warnings in hadoop-mapreduce-client, +non-core directories. (Li Lu via wheat9) + Release 2.6.0 - 2014-11-18 INCOMPATIBLE CHANGES http://git-wip-us.apache.org/repos/asf/hadoop/blob/759179e6/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/jobhistory/JobHistoryEventHandler.java -- diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/jobhistory/JobHistoryEventHandler.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/jobhistory/JobHistoryEventHandler.java index 45ddb9e..97de8fa 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/jobhistory/JobHistoryEventHandler.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/jobhistory/JobHistoryEventHandler.java @@ -870,8 +870,7 @@ public class JobHistoryEventHandler extends AbstractService TaskAttemptStartedEvent tase = (TaskAttemptStartedEvent) event; tEvent.addEventInfo(TASK_TYPE, tase.getTaskType().toString()); tEvent.addEventInfo(TASK_ATTEMPT_ID, -tase.getTaskAttemptId().toString() == null ? - : tase.getTaskAttemptId().toString()); +tase.getTaskAttemptId().toString()); tEvent.addEventInfo(START_TIME, tase.getStartTime()); tEvent.addEventInfo(HTTP_PORT, tase.getHttpPort()); tEvent.addEventInfo(TRACKER_NAME, tase.getTrackerName()); http://git-wip-us.apache.org/repos/asf/hadoop/blob/759179e6/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/rm/RMCommunicator.java -- diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/rm/RMCommunicator.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/rm/RMCommunicator.java index 6c58a68..cd4e272 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/rm/RMCommunicator.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/rm/RMCommunicator.java @@ -76,7 +76,6 @@ public abstract class RMCommunicator extends AbstractService protected EventHandler eventHandler; protected ApplicationMasterProtocol scheduler; private final ClientService clientService; - protected int lastResponseID; private Resource maxContainerCapability; protected MapApplicationAccessType, String applicationACLs; private volatile long lastHeartbeatTime;
hadoop git commit: HADOOP-11369. Fix new findbugs warnings in hadoop-mapreduce-client, non-core directories. Contributed by Li Lu.
Repository: hadoop Updated Branches: refs/heads/trunk 7bceb13ba - d777a1e4c HADOOP-11369. Fix new findbugs warnings in hadoop-mapreduce-client, non-core directories. Contributed by Li Lu. Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/d777a1e4 Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/d777a1e4 Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/d777a1e4 Branch: refs/heads/trunk Commit: d777a1e4ca8e7cf0ce8967f79dd475468906c733 Parents: 7bceb13 Author: Haohui Mai whe...@apache.org Authored: Tue Dec 9 10:46:13 2014 -0800 Committer: Haohui Mai whe...@apache.org Committed: Tue Dec 9 10:46:13 2014 -0800 -- hadoop-common-project/hadoop-common/CHANGES.txt | 3 +++ .../hadoop/mapreduce/jobhistory/JobHistoryEventHandler.java | 3 +-- .../org/apache/hadoop/mapreduce/v2/app/rm/RMCommunicator.java | 1 - .../org/apache/hadoop/mapreduce/v2/hs/HistoryFileManager.java | 2 +- 4 files changed, 5 insertions(+), 4 deletions(-) -- http://git-wip-us.apache.org/repos/asf/hadoop/blob/d777a1e4/hadoop-common-project/hadoop-common/CHANGES.txt -- diff --git a/hadoop-common-project/hadoop-common/CHANGES.txt b/hadoop-common-project/hadoop-common/CHANGES.txt index 425cab7..5e2ff8d 100644 --- a/hadoop-common-project/hadoop-common/CHANGES.txt +++ b/hadoop-common-project/hadoop-common/CHANGES.txt @@ -536,6 +536,9 @@ Release 2.7.0 - UNRELEASED HADOOP-11367. Fix warnings from findbugs 3.0 in hadoop-streaming. (Li Lu via wheat9) +HADOOP-11369. Fix new findbugs warnings in hadoop-mapreduce-client, +non-core directories. (Li Lu via wheat9) + Release 2.6.0 - 2014-11-18 INCOMPATIBLE CHANGES http://git-wip-us.apache.org/repos/asf/hadoop/blob/d777a1e4/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/jobhistory/JobHistoryEventHandler.java -- diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/jobhistory/JobHistoryEventHandler.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/jobhistory/JobHistoryEventHandler.java index 45ddb9e..97de8fa 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/jobhistory/JobHistoryEventHandler.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/jobhistory/JobHistoryEventHandler.java @@ -870,8 +870,7 @@ public class JobHistoryEventHandler extends AbstractService TaskAttemptStartedEvent tase = (TaskAttemptStartedEvent) event; tEvent.addEventInfo(TASK_TYPE, tase.getTaskType().toString()); tEvent.addEventInfo(TASK_ATTEMPT_ID, -tase.getTaskAttemptId().toString() == null ? - : tase.getTaskAttemptId().toString()); +tase.getTaskAttemptId().toString()); tEvent.addEventInfo(START_TIME, tase.getStartTime()); tEvent.addEventInfo(HTTP_PORT, tase.getHttpPort()); tEvent.addEventInfo(TRACKER_NAME, tase.getTrackerName()); http://git-wip-us.apache.org/repos/asf/hadoop/blob/d777a1e4/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/rm/RMCommunicator.java -- diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/rm/RMCommunicator.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/rm/RMCommunicator.java index 6c58a68..cd4e272 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/rm/RMCommunicator.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/rm/RMCommunicator.java @@ -76,7 +76,6 @@ public abstract class RMCommunicator extends AbstractService protected EventHandler eventHandler; protected ApplicationMasterProtocol scheduler; private final ClientService clientService; - protected int lastResponseID; private Resource maxContainerCapability; protected MapApplicationAccessType, String applicationACLs; private volatile long lastHeartbeatTime;
hadoop git commit: HADOOP-11368. Fix SSLFactory truststore reloader thread leak in KMSClientProvider. Contributed by Arun Suresh.
Repository: hadoop Updated Branches: refs/heads/branch-2 759179e63 - deaa172e7 HADOOP-11368. Fix SSLFactory truststore reloader thread leak in KMSClientProvider. Contributed by Arun Suresh. (cherry picked from commit 74d4bfded98239507511dedb515bc6a54958d5a8) Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/deaa172e Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/deaa172e Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/deaa172e Branch: refs/heads/branch-2 Commit: deaa172e7a2ab09656cc9eb431a3e68a73e0bd96 Parents: 759179e Author: Andrew Wang w...@apache.org Authored: Tue Dec 9 10:46:50 2014 -0800 Committer: Andrew Wang w...@apache.org Committed: Tue Dec 9 10:47:33 2014 -0800 -- hadoop-common-project/hadoop-common/CHANGES.txt | 3 +++ .../crypto/key/kms/KMSClientProvider.java | 4 +++ .../hadoop/crypto/key/kms/server/TestKMS.java | 26 3 files changed, 33 insertions(+) -- http://git-wip-us.apache.org/repos/asf/hadoop/blob/deaa172e/hadoop-common-project/hadoop-common/CHANGES.txt -- diff --git a/hadoop-common-project/hadoop-common/CHANGES.txt b/hadoop-common-project/hadoop-common/CHANGES.txt index 98751c5..9f404f0 100644 --- a/hadoop-common-project/hadoop-common/CHANGES.txt +++ b/hadoop-common-project/hadoop-common/CHANGES.txt @@ -173,6 +173,9 @@ Release 2.7.0 - UNRELEASED HADOOP-11369. Fix new findbugs warnings in hadoop-mapreduce-client, non-core directories. (Li Lu via wheat9) +HADOOP-11368. Fix SSLFactory truststore reloader thread leak in +KMSClientProvider. (Arun Suresh via wang) + Release 2.6.0 - 2014-11-18 INCOMPATIBLE CHANGES http://git-wip-us.apache.org/repos/asf/hadoop/blob/deaa172e/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/kms/KMSClientProvider.java -- diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/kms/KMSClientProvider.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/kms/KMSClientProvider.java index e9e8af4..19cce7e 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/kms/KMSClientProvider.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/kms/KMSClientProvider.java @@ -827,6 +827,10 @@ public class KMSClientProvider extends KeyProvider implements CryptoExtension, encKeyVersionQueue.shutdown(); } catch (Exception e) { throw new IOException(e); +} finally { + if (sslFactory != null) { +sslFactory.destroy(); + } } } } http://git-wip-us.apache.org/repos/asf/hadoop/blob/deaa172e/hadoop-common-project/hadoop-kms/src/test/java/org/apache/hadoop/crypto/key/kms/server/TestKMS.java -- diff --git a/hadoop-common-project/hadoop-kms/src/test/java/org/apache/hadoop/crypto/key/kms/server/TestKMS.java b/hadoop-common-project/hadoop-kms/src/test/java/org/apache/hadoop/crypto/key/kms/server/TestKMS.java index 61ce807..f487e98 100644 --- a/hadoop-common-project/hadoop-kms/src/test/java/org/apache/hadoop/crypto/key/kms/server/TestKMS.java +++ b/hadoop-common-project/hadoop-kms/src/test/java/org/apache/hadoop/crypto/key/kms/server/TestKMS.java @@ -303,6 +303,32 @@ public class TestKMS { url.getProtocol().equals(https)); final URI uri = createKMSUri(getKMSUrl()); +if (ssl) { + KeyProvider testKp = new KMSClientProvider(uri, conf); + ThreadGroup threadGroup = Thread.currentThread().getThreadGroup(); + while (threadGroup.getParent() != null) { +threadGroup = threadGroup.getParent(); + } + Thread[] threads = new Thread[threadGroup.activeCount()]; + threadGroup.enumerate(threads); + Thread reloaderThread = null; + for (Thread thread : threads) { +if ((thread.getName() != null) + (thread.getName().contains(Truststore reloader thread))) { + reloaderThread = thread; +} + } + Assert.assertTrue(Reloader is not alive, reloaderThread.isAlive()); + testKp.close(); + boolean reloaderStillAlive = true; + for (int i = 0; i 10; i++) { +reloaderStillAlive = reloaderThread.isAlive(); +if (!reloaderStillAlive) break; +Thread.sleep(1000); + } + Assert.assertFalse(Reloader is still alive, reloaderStillAlive); +} + if (kerberos) { for (String user : new String[]{client, client/host}) {
hadoop git commit: HADOOP-11368. Fix SSLFactory truststore reloader thread leak in KMSClientProvider. Contributed by Arun Suresh.
Repository: hadoop Updated Branches: refs/heads/trunk d777a1e4c - 74d4bfded HADOOP-11368. Fix SSLFactory truststore reloader thread leak in KMSClientProvider. Contributed by Arun Suresh. Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/74d4bfde Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/74d4bfde Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/74d4bfde Branch: refs/heads/trunk Commit: 74d4bfded98239507511dedb515bc6a54958d5a8 Parents: d777a1e Author: Andrew Wang w...@apache.org Authored: Tue Dec 9 10:46:50 2014 -0800 Committer: Andrew Wang w...@apache.org Committed: Tue Dec 9 10:47:24 2014 -0800 -- hadoop-common-project/hadoop-common/CHANGES.txt | 3 +++ .../crypto/key/kms/KMSClientProvider.java | 4 +++ .../hadoop/crypto/key/kms/server/TestKMS.java | 26 3 files changed, 33 insertions(+) -- http://git-wip-us.apache.org/repos/asf/hadoop/blob/74d4bfde/hadoop-common-project/hadoop-common/CHANGES.txt -- diff --git a/hadoop-common-project/hadoop-common/CHANGES.txt b/hadoop-common-project/hadoop-common/CHANGES.txt index 5e2ff8d..2051698 100644 --- a/hadoop-common-project/hadoop-common/CHANGES.txt +++ b/hadoop-common-project/hadoop-common/CHANGES.txt @@ -539,6 +539,9 @@ Release 2.7.0 - UNRELEASED HADOOP-11369. Fix new findbugs warnings in hadoop-mapreduce-client, non-core directories. (Li Lu via wheat9) +HADOOP-11368. Fix SSLFactory truststore reloader thread leak in +KMSClientProvider. (Arun Suresh via wang) + Release 2.6.0 - 2014-11-18 INCOMPATIBLE CHANGES http://git-wip-us.apache.org/repos/asf/hadoop/blob/74d4bfde/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/kms/KMSClientProvider.java -- diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/kms/KMSClientProvider.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/kms/KMSClientProvider.java index cb03683..50dd1ad 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/kms/KMSClientProvider.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/kms/KMSClientProvider.java @@ -827,6 +827,10 @@ public class KMSClientProvider extends KeyProvider implements CryptoExtension, encKeyVersionQueue.shutdown(); } catch (Exception e) { throw new IOException(e); +} finally { + if (sslFactory != null) { +sslFactory.destroy(); + } } } } http://git-wip-us.apache.org/repos/asf/hadoop/blob/74d4bfde/hadoop-common-project/hadoop-kms/src/test/java/org/apache/hadoop/crypto/key/kms/server/TestKMS.java -- diff --git a/hadoop-common-project/hadoop-kms/src/test/java/org/apache/hadoop/crypto/key/kms/server/TestKMS.java b/hadoop-common-project/hadoop-kms/src/test/java/org/apache/hadoop/crypto/key/kms/server/TestKMS.java index 61ce807..f487e98 100644 --- a/hadoop-common-project/hadoop-kms/src/test/java/org/apache/hadoop/crypto/key/kms/server/TestKMS.java +++ b/hadoop-common-project/hadoop-kms/src/test/java/org/apache/hadoop/crypto/key/kms/server/TestKMS.java @@ -303,6 +303,32 @@ public class TestKMS { url.getProtocol().equals(https)); final URI uri = createKMSUri(getKMSUrl()); +if (ssl) { + KeyProvider testKp = new KMSClientProvider(uri, conf); + ThreadGroup threadGroup = Thread.currentThread().getThreadGroup(); + while (threadGroup.getParent() != null) { +threadGroup = threadGroup.getParent(); + } + Thread[] threads = new Thread[threadGroup.activeCount()]; + threadGroup.enumerate(threads); + Thread reloaderThread = null; + for (Thread thread : threads) { +if ((thread.getName() != null) + (thread.getName().contains(Truststore reloader thread))) { + reloaderThread = thread; +} + } + Assert.assertTrue(Reloader is not alive, reloaderThread.isAlive()); + testKp.close(); + boolean reloaderStillAlive = true; + for (int i = 0; i 10; i++) { +reloaderStillAlive = reloaderThread.isAlive(); +if (!reloaderStillAlive) break; +Thread.sleep(1000); + } + Assert.assertFalse(Reloader is still alive, reloaderStillAlive); +} + if (kerberos) { for (String user : new String[]{client, client/host}) { doAs(user, new PrivilegedExceptionActionVoid() {
hadoop git commit: HADOOP-11372. Fix new findbugs warnings in mapreduce-examples. Contributed by Li Lu.
Repository: hadoop Updated Branches: refs/heads/branch-2 deaa172e7 - 49aacee2c HADOOP-11372. Fix new findbugs warnings in mapreduce-examples. Contributed by Li Lu. Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/49aacee2 Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/49aacee2 Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/49aacee2 Branch: refs/heads/branch-2 Commit: 49aacee2cb3ac0edaf9c1319323f3a829f002a42 Parents: deaa172 Author: Haohui Mai whe...@apache.org Authored: Tue Dec 9 10:48:35 2014 -0800 Committer: Haohui Mai whe...@apache.org Committed: Tue Dec 9 10:49:11 2014 -0800 -- hadoop-common-project/hadoop-common/CHANGES.txt | 3 +++ .../src/main/java/org/apache/hadoop/examples/pi/Parser.java | 5 ++--- .../java/org/apache/hadoop/examples/pi/math/Bellard.java| 9 - 3 files changed, 13 insertions(+), 4 deletions(-) -- http://git-wip-us.apache.org/repos/asf/hadoop/blob/49aacee2/hadoop-common-project/hadoop-common/CHANGES.txt -- diff --git a/hadoop-common-project/hadoop-common/CHANGES.txt b/hadoop-common-project/hadoop-common/CHANGES.txt index 9f404f0..48a7fb8 100644 --- a/hadoop-common-project/hadoop-common/CHANGES.txt +++ b/hadoop-common-project/hadoop-common/CHANGES.txt @@ -176,6 +176,9 @@ Release 2.7.0 - UNRELEASED HADOOP-11368. Fix SSLFactory truststore reloader thread leak in KMSClientProvider. (Arun Suresh via wang) +HADOOP-11372. Fix new findbugs warnings in mapreduce-examples. +(Li Lu via wheat9) + Release 2.6.0 - 2014-11-18 INCOMPATIBLE CHANGES http://git-wip-us.apache.org/repos/asf/hadoop/blob/49aacee2/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/pi/Parser.java -- diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/pi/Parser.java b/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/pi/Parser.java index 187520a..a2db9d1 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/pi/Parser.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/pi/Parser.java @@ -151,11 +151,10 @@ public final class Parser { static T extends CombinableT MapParameter, T combine(MapParameter, ListT m) { final MapParameter, T combined = new TreeMapParameter, T(); for(Parameter p : Parameter.values()) { + //note: results would never be null due to the design of Util.combine final ListT results = Util.combine(m.get(p)); Util.out.format(%-6s = , p); - if (results == null) -Util.out.println(null); - else if (results.size() != 1) + if (results.size() != 1) Util.out.println(results.toString().replace(, , ,\n )); else { final T r = results.get(0); http://git-wip-us.apache.org/repos/asf/hadoop/blob/49aacee2/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/pi/math/Bellard.java -- diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/pi/math/Bellard.java b/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/pi/math/Bellard.java index 90b608f..d909d92 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/pi/math/Bellard.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/pi/math/Bellard.java @@ -25,6 +25,7 @@ import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.TreeMap; +import java.util.NoSuchElementException; import org.apache.hadoop.examples.pi.Container; import org.apache.hadoop.examples.pi.Util; @@ -255,7 +256,13 @@ public final class Bellard { public boolean hasNext() {return i parts.length;} /** {@inheritDoc} */ @Override -public Summation next() {return parts[i++];} +public Summation next() throws NoSuchElementException { + if (hasNext()) { +return parts[i++]; + } else { +throw new NoSuchElementException(Sum's iterator does not have next!); + } +} /** Unsupported */ @Override public void remove() {throw new UnsupportedOperationException();}
hadoop git commit: HADOOP-11372. Fix new findbugs warnings in mapreduce-examples. Contributed by Li Lu.
Repository: hadoop Updated Branches: refs/heads/trunk 74d4bfded - be86237c0 HADOOP-11372. Fix new findbugs warnings in mapreduce-examples. Contributed by Li Lu. Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/be86237c Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/be86237c Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/be86237c Branch: refs/heads/trunk Commit: be86237c09533a6691ed9eb4864643657331a11a Parents: 74d4bfd Author: Haohui Mai whe...@apache.org Authored: Tue Dec 9 10:48:35 2014 -0800 Committer: Haohui Mai whe...@apache.org Committed: Tue Dec 9 10:49:55 2014 -0800 -- hadoop-common-project/hadoop-common/CHANGES.txt | 3 +++ .../src/main/java/org/apache/hadoop/examples/pi/Parser.java | 5 ++--- .../java/org/apache/hadoop/examples/pi/math/Bellard.java| 9 - 3 files changed, 13 insertions(+), 4 deletions(-) -- http://git-wip-us.apache.org/repos/asf/hadoop/blob/be86237c/hadoop-common-project/hadoop-common/CHANGES.txt -- diff --git a/hadoop-common-project/hadoop-common/CHANGES.txt b/hadoop-common-project/hadoop-common/CHANGES.txt index 2051698..4b23471 100644 --- a/hadoop-common-project/hadoop-common/CHANGES.txt +++ b/hadoop-common-project/hadoop-common/CHANGES.txt @@ -542,6 +542,9 @@ Release 2.7.0 - UNRELEASED HADOOP-11368. Fix SSLFactory truststore reloader thread leak in KMSClientProvider. (Arun Suresh via wang) +HADOOP-11372. Fix new findbugs warnings in mapreduce-examples. +(Li Lu via wheat9) + Release 2.6.0 - 2014-11-18 INCOMPATIBLE CHANGES http://git-wip-us.apache.org/repos/asf/hadoop/blob/be86237c/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/pi/Parser.java -- diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/pi/Parser.java b/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/pi/Parser.java index 187520a..a2db9d1 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/pi/Parser.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/pi/Parser.java @@ -151,11 +151,10 @@ public final class Parser { static T extends CombinableT MapParameter, T combine(MapParameter, ListT m) { final MapParameter, T combined = new TreeMapParameter, T(); for(Parameter p : Parameter.values()) { + //note: results would never be null due to the design of Util.combine final ListT results = Util.combine(m.get(p)); Util.out.format(%-6s = , p); - if (results == null) -Util.out.println(null); - else if (results.size() != 1) + if (results.size() != 1) Util.out.println(results.toString().replace(, , ,\n )); else { final T r = results.get(0); http://git-wip-us.apache.org/repos/asf/hadoop/blob/be86237c/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/pi/math/Bellard.java -- diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/pi/math/Bellard.java b/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/pi/math/Bellard.java index 90b608f..d909d92 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/pi/math/Bellard.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/pi/math/Bellard.java @@ -25,6 +25,7 @@ import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.TreeMap; +import java.util.NoSuchElementException; import org.apache.hadoop.examples.pi.Container; import org.apache.hadoop.examples.pi.Util; @@ -255,7 +256,13 @@ public final class Bellard { public boolean hasNext() {return i parts.length;} /** {@inheritDoc} */ @Override -public Summation next() {return parts[i++];} +public Summation next() throws NoSuchElementException { + if (hasNext()) { +return parts[i++]; + } else { +throw new NoSuchElementException(Sum's iterator does not have next!); + } +} /** Unsupported */ @Override public void remove() {throw new UnsupportedOperationException();}
hadoop git commit: Incorrect locking in FsVolumeList#checkDirs can hang datanodes (Noah Lorang via Colin P. McCabe)
Repository: hadoop Updated Branches: refs/heads/trunk be86237c0 - d8352b9b2 Incorrect locking in FsVolumeList#checkDirs can hang datanodes (Noah Lorang via Colin P. McCabe) Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/d8352b9b Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/d8352b9b Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/d8352b9b Branch: refs/heads/trunk Commit: d8352b9b2b99aa46679c5880a724ba3f0ceb41ff Parents: be86237 Author: Colin Patrick Mccabe cmcc...@cloudera.com Authored: Tue Dec 9 10:55:17 2014 -0800 Committer: Colin Patrick Mccabe cmcc...@cloudera.com Committed: Tue Dec 9 10:56:46 2014 -0800 -- hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt | 3 ++ .../datanode/fsdataset/impl/FsVolumeList.java | 56 ++-- 2 files changed, 31 insertions(+), 28 deletions(-) -- http://git-wip-us.apache.org/repos/asf/hadoop/blob/d8352b9b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt -- diff --git a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt index 55026a2..626d90a 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt +++ b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt @@ -574,6 +574,9 @@ Release 2.6.1 - UNRELEASED HDFS-4882. Prevent the Namenode's LeaseManager from looping forever in checkLeases (Ravi Prakash via Colin P. McCabe) +HDFS-7489. Incorrect locking in FsVolumeList#checkDirs can hang datanodes +(Noah Lorang via Colin P. McCabe) + Release 2.6.0 - 2014-11-18 INCOMPATIBLE CHANGES http://git-wip-us.apache.org/repos/asf/hadoop/blob/d8352b9b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/FsVolumeList.java -- diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/FsVolumeList.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/FsVolumeList.java index 837ddf7..55329ae 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/FsVolumeList.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/FsVolumeList.java @@ -36,6 +36,7 @@ class FsVolumeList { * This list is replaced on modification holding this lock. */ volatile ListFsVolumeImpl volumes = null; + private Object checkDirsMutex = new Object(); private final VolumeChoosingPolicyFsVolumeImpl blockChooser; private volatile int numFailedVolumes; @@ -167,40 +168,39 @@ class FsVolumeList { * Calls {@link FsVolumeImpl#checkDirs()} on each volume, removing any * volumes from the active list that result in a DiskErrorException. * - * This method is synchronized to allow only one instance of checkDirs() - * call + * Use checkDirsMutext to allow only one instance of checkDirs() call + * * @return list of all the removed volumes. */ - synchronized ListFsVolumeImpl checkDirs() { -ArrayListFsVolumeImpl removedVols = null; - -// Make a copy of volumes for performing modification -final ListFsVolumeImpl volumeList = new ArrayListFsVolumeImpl(volumes); + ListFsVolumeImpl checkDirs() { +synchronized(checkDirsMutex) { + ArrayListFsVolumeImpl removedVols = null; + + // Make a copy of volumes for performing modification + final ListFsVolumeImpl volumeList = new ArrayListFsVolumeImpl(volumes); -for(IteratorFsVolumeImpl i = volumeList.iterator(); i.hasNext(); ) { - final FsVolumeImpl fsv = i.next(); - try { -fsv.checkDirs(); - } catch (DiskErrorException e) { -FsDatasetImpl.LOG.warn(Removing failed volume + fsv + : ,e); -if (removedVols == null) { - removedVols = new ArrayListFsVolumeImpl(1); + for(IteratorFsVolumeImpl i = volumeList.iterator(); i.hasNext(); ) { +final FsVolumeImpl fsv = i.next(); +try { + fsv.checkDirs(); +} catch (DiskErrorException e) { + FsDatasetImpl.LOG.warn(Removing failed volume + fsv + : ,e); + if (removedVols == null) { +removedVols = new ArrayListFsVolumeImpl(1); + } + removedVols.add(fsv); + removeVolume(fsv.getBasePath()); + numFailedVolumes++; } -removedVols.add(fsv); -fsv.shutdown(); -i.remove(); // Remove the volume -numFailedVolumes++; } -} - -if (removedVols != null removedVols.size() 0) { - // Replace volume list - volumes = Collections.unmodifiableList(volumeList); -
hadoop git commit: HADOOP-11273. TestMiniKdc failure: login options not compatible with IBM JDK. Contributed by Gao Zhong Liang.
Repository: hadoop Updated Branches: refs/heads/branch-2 49aacee2c - fa9ca2501 HADOOP-11273. TestMiniKdc failure: login options not compatible with IBM JDK. Contributed by Gao Zhong Liang. Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/fa9ca250 Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/fa9ca250 Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/fa9ca250 Branch: refs/heads/branch-2 Commit: fa9ca25019b931ce1de09dc5429b3268a9e01b6c Parents: 49aacee Author: Haohui Mai whe...@apache.org Authored: Tue Dec 9 10:57:32 2014 -0800 Committer: Haohui Mai whe...@apache.org Committed: Tue Dec 9 10:57:41 2014 -0800 -- hadoop-common-project/hadoop-common/CHANGES.txt | 3 +++ .../org/apache/hadoop/minikdc/TestMiniKdc.java | 22 +--- 2 files changed, 17 insertions(+), 8 deletions(-) -- http://git-wip-us.apache.org/repos/asf/hadoop/blob/fa9ca250/hadoop-common-project/hadoop-common/CHANGES.txt -- diff --git a/hadoop-common-project/hadoop-common/CHANGES.txt b/hadoop-common-project/hadoop-common/CHANGES.txt index 48a7fb8..35b2807 100644 --- a/hadoop-common-project/hadoop-common/CHANGES.txt +++ b/hadoop-common-project/hadoop-common/CHANGES.txt @@ -179,6 +179,9 @@ Release 2.7.0 - UNRELEASED HADOOP-11372. Fix new findbugs warnings in mapreduce-examples. (Li Lu via wheat9) +HADOOP-11273. TestMiniKdc failure: login options not compatible with IBM +JDK. (Gao Zhong Liang via wheat9) + Release 2.6.0 - 2014-11-18 INCOMPATIBLE CHANGES http://git-wip-us.apache.org/repos/asf/hadoop/blob/fa9ca250/hadoop-common-project/hadoop-minikdc/src/test/java/org/apache/hadoop/minikdc/TestMiniKdc.java -- diff --git a/hadoop-common-project/hadoop-minikdc/src/test/java/org/apache/hadoop/minikdc/TestMiniKdc.java b/hadoop-common-project/hadoop-minikdc/src/test/java/org/apache/hadoop/minikdc/TestMiniKdc.java index c052bb1..fac7f0f 100644 --- a/hadoop-common-project/hadoop-minikdc/src/test/java/org/apache/hadoop/minikdc/TestMiniKdc.java +++ b/hadoop-common-project/hadoop-minikdc/src/test/java/org/apache/hadoop/minikdc/TestMiniKdc.java @@ -37,7 +37,8 @@ import java.util.HashMap; import java.util.Arrays; public class TestMiniKdc extends KerberosSecurityTestcase { - + private static final boolean IBM_JAVA = System.getProperty(java.vendor) + .contains(IBM); @Test public void testMiniKdcStart() { MiniKdc kdc = getKdc(); @@ -94,15 +95,20 @@ public class TestMiniKdc extends KerberosSecurityTestcase { @Override public AppConfigurationEntry[] getAppConfigurationEntry(String name) { MapString, String options = new HashMapString, String(); - options.put(keyTab, keytab); options.put(principal, principal); - options.put(useKeyTab, true); - options.put(storeKey, true); - options.put(doNotPrompt, true); - options.put(useTicketCache, true); - options.put(renewTGT, true); options.put(refreshKrb5Config, true); - options.put(isInitiator, Boolean.toString(isInitiator)); + if (IBM_JAVA) { +options.put(useKeytab, keytab); +options.put(credsType, both); + } else { +options.put(keyTab, keytab); +options.put(useKeyTab, true); +options.put(storeKey, true); +options.put(doNotPrompt, true); +options.put(useTicketCache, true); +options.put(renewTGT, true); +options.put(isInitiator, Boolean.toString(isInitiator)); + } String ticketCache = System.getenv(KRB5CCNAME); if (ticketCache != null) { options.put(ticketCache, ticketCache);
hadoop git commit: HADOOP-11273. TestMiniKdc failure: login options not compatible with IBM JDK. Contributed by Gao Zhong Liang.
Repository: hadoop Updated Branches: refs/heads/trunk d8352b9b2 - d08fc9aca HADOOP-11273. TestMiniKdc failure: login options not compatible with IBM JDK. Contributed by Gao Zhong Liang. Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/d08fc9ac Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/d08fc9ac Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/d08fc9ac Branch: refs/heads/trunk Commit: d08fc9aca807af5a240f5e1904d9c0ba027196b8 Parents: d8352b9 Author: Haohui Mai whe...@apache.org Authored: Tue Dec 9 10:57:32 2014 -0800 Committer: Haohui Mai whe...@apache.org Committed: Tue Dec 9 10:58:34 2014 -0800 -- hadoop-common-project/hadoop-common/CHANGES.txt | 3 +++ .../org/apache/hadoop/minikdc/TestMiniKdc.java | 22 +--- 2 files changed, 17 insertions(+), 8 deletions(-) -- http://git-wip-us.apache.org/repos/asf/hadoop/blob/d08fc9ac/hadoop-common-project/hadoop-common/CHANGES.txt -- diff --git a/hadoop-common-project/hadoop-common/CHANGES.txt b/hadoop-common-project/hadoop-common/CHANGES.txt index 4b23471..b030bf7 100644 --- a/hadoop-common-project/hadoop-common/CHANGES.txt +++ b/hadoop-common-project/hadoop-common/CHANGES.txt @@ -545,6 +545,9 @@ Release 2.7.0 - UNRELEASED HADOOP-11372. Fix new findbugs warnings in mapreduce-examples. (Li Lu via wheat9) +HADOOP-11273. TestMiniKdc failure: login options not compatible with IBM +JDK. (Gao Zhong Liang via wheat9) + Release 2.6.0 - 2014-11-18 INCOMPATIBLE CHANGES http://git-wip-us.apache.org/repos/asf/hadoop/blob/d08fc9ac/hadoop-common-project/hadoop-minikdc/src/test/java/org/apache/hadoop/minikdc/TestMiniKdc.java -- diff --git a/hadoop-common-project/hadoop-minikdc/src/test/java/org/apache/hadoop/minikdc/TestMiniKdc.java b/hadoop-common-project/hadoop-minikdc/src/test/java/org/apache/hadoop/minikdc/TestMiniKdc.java index c052bb1..fac7f0f 100644 --- a/hadoop-common-project/hadoop-minikdc/src/test/java/org/apache/hadoop/minikdc/TestMiniKdc.java +++ b/hadoop-common-project/hadoop-minikdc/src/test/java/org/apache/hadoop/minikdc/TestMiniKdc.java @@ -37,7 +37,8 @@ import java.util.HashMap; import java.util.Arrays; public class TestMiniKdc extends KerberosSecurityTestcase { - + private static final boolean IBM_JAVA = System.getProperty(java.vendor) + .contains(IBM); @Test public void testMiniKdcStart() { MiniKdc kdc = getKdc(); @@ -94,15 +95,20 @@ public class TestMiniKdc extends KerberosSecurityTestcase { @Override public AppConfigurationEntry[] getAppConfigurationEntry(String name) { MapString, String options = new HashMapString, String(); - options.put(keyTab, keytab); options.put(principal, principal); - options.put(useKeyTab, true); - options.put(storeKey, true); - options.put(doNotPrompt, true); - options.put(useTicketCache, true); - options.put(renewTGT, true); options.put(refreshKrb5Config, true); - options.put(isInitiator, Boolean.toString(isInitiator)); + if (IBM_JAVA) { +options.put(useKeytab, keytab); +options.put(credsType, both); + } else { +options.put(keyTab, keytab); +options.put(useKeyTab, true); +options.put(storeKey, true); +options.put(doNotPrompt, true); +options.put(useTicketCache, true); +options.put(renewTGT, true); +options.put(isInitiator, Boolean.toString(isInitiator)); + } String ticketCache = System.getenv(KRB5CCNAME); if (ticketCache != null) { options.put(ticketCache, ticketCache);
hadoop git commit: HDFS-7489. Incorrect locking in FsVolumeList#checkDirs can hang datanodes (Noah Lorang via Colin P. McCabe) (cherry picked from commit d8352b9b2b99aa46679c5880a724ba3f0ceb41ff)
Repository: hadoop Updated Branches: refs/heads/branch-2 fa9ca2501 - a037d6030 HDFS-7489. Incorrect locking in FsVolumeList#checkDirs can hang datanodes (Noah Lorang via Colin P. McCabe) (cherry picked from commit d8352b9b2b99aa46679c5880a724ba3f0ceb41ff) Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/a037d603 Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/a037d603 Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/a037d603 Branch: refs/heads/branch-2 Commit: a037d6030b5ae9422fdb265f5e4880d515be9e37 Parents: fa9ca25 Author: Colin Patrick Mccabe cmcc...@cloudera.com Authored: Tue Dec 9 10:55:17 2014 -0800 Committer: Colin Patrick Mccabe cmcc...@cloudera.com Committed: Tue Dec 9 10:58:56 2014 -0800 -- hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt | 3 ++ .../datanode/fsdataset/impl/FsVolumeList.java | 56 ++-- 2 files changed, 31 insertions(+), 28 deletions(-) -- http://git-wip-us.apache.org/repos/asf/hadoop/blob/a037d603/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt -- diff --git a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt index 037dd8c..7219553 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt +++ b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt @@ -317,6 +317,9 @@ Release 2.6.1 - UNRELEASED HDFS-4882. Prevent the Namenode's LeaseManager from looping forever in checkLeases (Ravi Prakash via Colin P. McCabe) +HDFS-7489. Incorrect locking in FsVolumeList#checkDirs can hang datanodes +(Noah Lorang via Colin P. McCabe) + Release 2.6.0 - 2014-11-18 INCOMPATIBLE CHANGES http://git-wip-us.apache.org/repos/asf/hadoop/blob/a037d603/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/FsVolumeList.java -- diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/FsVolumeList.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/FsVolumeList.java index 837ddf7..55329ae 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/FsVolumeList.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/FsVolumeList.java @@ -36,6 +36,7 @@ class FsVolumeList { * This list is replaced on modification holding this lock. */ volatile ListFsVolumeImpl volumes = null; + private Object checkDirsMutex = new Object(); private final VolumeChoosingPolicyFsVolumeImpl blockChooser; private volatile int numFailedVolumes; @@ -167,40 +168,39 @@ class FsVolumeList { * Calls {@link FsVolumeImpl#checkDirs()} on each volume, removing any * volumes from the active list that result in a DiskErrorException. * - * This method is synchronized to allow only one instance of checkDirs() - * call + * Use checkDirsMutext to allow only one instance of checkDirs() call + * * @return list of all the removed volumes. */ - synchronized ListFsVolumeImpl checkDirs() { -ArrayListFsVolumeImpl removedVols = null; - -// Make a copy of volumes for performing modification -final ListFsVolumeImpl volumeList = new ArrayListFsVolumeImpl(volumes); + ListFsVolumeImpl checkDirs() { +synchronized(checkDirsMutex) { + ArrayListFsVolumeImpl removedVols = null; + + // Make a copy of volumes for performing modification + final ListFsVolumeImpl volumeList = new ArrayListFsVolumeImpl(volumes); -for(IteratorFsVolumeImpl i = volumeList.iterator(); i.hasNext(); ) { - final FsVolumeImpl fsv = i.next(); - try { -fsv.checkDirs(); - } catch (DiskErrorException e) { -FsDatasetImpl.LOG.warn(Removing failed volume + fsv + : ,e); -if (removedVols == null) { - removedVols = new ArrayListFsVolumeImpl(1); + for(IteratorFsVolumeImpl i = volumeList.iterator(); i.hasNext(); ) { +final FsVolumeImpl fsv = i.next(); +try { + fsv.checkDirs(); +} catch (DiskErrorException e) { + FsDatasetImpl.LOG.warn(Removing failed volume + fsv + : ,e); + if (removedVols == null) { +removedVols = new ArrayListFsVolumeImpl(1); + } + removedVols.add(fsv); + removeVolume(fsv.getBasePath()); + numFailedVolumes++; } -removedVols.add(fsv); -fsv.shutdown(); -i.remove(); // Remove the volume -numFailedVolumes++; } -} - -if (removedVols != null removedVols.size() 0) { - // Replace
hadoop git commit: HDFS-7489. Incorrect locking in FsVolumeList#checkDirs can hang datanodes (Noah Lorang via Colin P. McCabe) (cherry picked from commit d8352b9b2b99aa46679c5880a724ba3f0ceb41ff) (che
Repository: hadoop Updated Branches: refs/heads/branch-2.6 575d7bd5b - d93bfbace HDFS-7489. Incorrect locking in FsVolumeList#checkDirs can hang datanodes (Noah Lorang via Colin P. McCabe) (cherry picked from commit d8352b9b2b99aa46679c5880a724ba3f0ceb41ff) (cherry picked from commit a037d6030b5ae9422fdb265f5e4880d515be9e37) Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/d93bfbac Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/d93bfbac Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/d93bfbac Branch: refs/heads/branch-2.6 Commit: d93bfbace41fc93e1b2188bf48259ae8728b8295 Parents: 575d7bd Author: Colin Patrick Mccabe cmcc...@cloudera.com Authored: Tue Dec 9 10:55:17 2014 -0800 Committer: Colin Patrick Mccabe cmcc...@cloudera.com Committed: Tue Dec 9 10:59:49 2014 -0800 -- hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt | 3 ++ .../datanode/fsdataset/impl/FsVolumeList.java | 56 ++-- 2 files changed, 31 insertions(+), 28 deletions(-) -- http://git-wip-us.apache.org/repos/asf/hadoop/blob/d93bfbac/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt -- diff --git a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt index 1da5603..a542c66 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt +++ b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt @@ -18,6 +18,9 @@ Release 2.6.1 - UNRELEASED HDFS-4882. Prevent the Namenode's LeaseManager from looping forever in checkLeases (Ravi Prakash via Colin P. McCabe) +HDFS-7489. Incorrect locking in FsVolumeList#checkDirs can hang datanodes +(Noah Lorang via Colin P. McCabe) + Release 2.6.0 - 2014-11-18 INCOMPATIBLE CHANGES http://git-wip-us.apache.org/repos/asf/hadoop/blob/d93bfbac/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/FsVolumeList.java -- diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/FsVolumeList.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/FsVolumeList.java index 837ddf7..55329ae 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/FsVolumeList.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/FsVolumeList.java @@ -36,6 +36,7 @@ class FsVolumeList { * This list is replaced on modification holding this lock. */ volatile ListFsVolumeImpl volumes = null; + private Object checkDirsMutex = new Object(); private final VolumeChoosingPolicyFsVolumeImpl blockChooser; private volatile int numFailedVolumes; @@ -167,40 +168,39 @@ class FsVolumeList { * Calls {@link FsVolumeImpl#checkDirs()} on each volume, removing any * volumes from the active list that result in a DiskErrorException. * - * This method is synchronized to allow only one instance of checkDirs() - * call + * Use checkDirsMutext to allow only one instance of checkDirs() call + * * @return list of all the removed volumes. */ - synchronized ListFsVolumeImpl checkDirs() { -ArrayListFsVolumeImpl removedVols = null; - -// Make a copy of volumes for performing modification -final ListFsVolumeImpl volumeList = new ArrayListFsVolumeImpl(volumes); + ListFsVolumeImpl checkDirs() { +synchronized(checkDirsMutex) { + ArrayListFsVolumeImpl removedVols = null; + + // Make a copy of volumes for performing modification + final ListFsVolumeImpl volumeList = new ArrayListFsVolumeImpl(volumes); -for(IteratorFsVolumeImpl i = volumeList.iterator(); i.hasNext(); ) { - final FsVolumeImpl fsv = i.next(); - try { -fsv.checkDirs(); - } catch (DiskErrorException e) { -FsDatasetImpl.LOG.warn(Removing failed volume + fsv + : ,e); -if (removedVols == null) { - removedVols = new ArrayListFsVolumeImpl(1); + for(IteratorFsVolumeImpl i = volumeList.iterator(); i.hasNext(); ) { +final FsVolumeImpl fsv = i.next(); +try { + fsv.checkDirs(); +} catch (DiskErrorException e) { + FsDatasetImpl.LOG.warn(Removing failed volume + fsv + : ,e); + if (removedVols == null) { +removedVols = new ArrayListFsVolumeImpl(1); + } + removedVols.add(fsv); + removeVolume(fsv.getBasePath()); + numFailedVolumes++; } -removedVols.add(fsv); -fsv.shutdown(); -i.remove(); // Remove the volume -numFailedVolumes++; } -} - -
hadoop git commit: HDFS-7498. Simplify the logic in INodesInPath. Contributed by Jing Zhao.
Repository: hadoop Updated Branches: refs/heads/trunk d08fc9aca - 5776a41da HDFS-7498. Simplify the logic in INodesInPath. Contributed by Jing Zhao. Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/5776a41d Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/5776a41d Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/5776a41d Branch: refs/heads/trunk Commit: 5776a41da08af653206bb94d7c76c9c4dcce059a Parents: d08fc9a Author: Jing Zhao ji...@apache.org Authored: Tue Dec 9 11:37:39 2014 -0800 Committer: Jing Zhao ji...@apache.org Committed: Tue Dec 9 11:37:39 2014 -0800 -- .../main/java/org/apache/hadoop/fs/Path.java| 1 - hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt | 2 + .../java/org/apache/hadoop/hdfs/DFSUtil.java| 15 +- .../server/namenode/EncryptionZoneManager.java | 10 +- .../hdfs/server/namenode/FSDirConcatOp.java | 5 +- .../hdfs/server/namenode/FSDirMkdirOp.java | 42 ++-- .../hdfs/server/namenode/FSDirRenameOp.java | 51 ++-- .../hdfs/server/namenode/FSDirSnapshotOp.java | 2 +- .../server/namenode/FSDirStatAndListingOp.java | 10 +- .../hdfs/server/namenode/FSDirectory.java | 95 .../hdfs/server/namenode/FSEditLogLoader.java | 4 +- .../hdfs/server/namenode/FSNamesystem.java | 8 +- .../server/namenode/FSPermissionChecker.java| 51 ++-- .../hdfs/server/namenode/INodesInPath.java | 240 +-- .../server/namenode/TestSnapshotPathINodes.java | 134 +-- 15 files changed, 320 insertions(+), 350 deletions(-) -- http://git-wip-us.apache.org/repos/asf/hadoop/blob/5776a41d/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/Path.java -- diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/Path.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/Path.java index 54ddeda..caeb7a1 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/Path.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/Path.java @@ -60,7 +60,6 @@ public class Path implements Comparable { /** * Pathnames with scheme and relative path are illegal. - * @param path to be checked */ void checkNotSchemeWithRelative() { if (toUri().isAbsolute() !isUriPathAbsolute()) { http://git-wip-us.apache.org/repos/asf/hadoop/blob/5776a41d/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt -- diff --git a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt index 626d90a..9398429 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt +++ b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt @@ -447,6 +447,8 @@ Release 2.7.0 - UNRELEASED HDFS-7486. Consolidate XAttr-related implementation into a single class. (wheat9) +HDFS-7498. Simplify the logic in INodesInPath. (jing9) + OPTIMIZATIONS HDFS-7454. Reduce memory footprint for AclEntries in NameNode. http://git-wip-us.apache.org/repos/asf/hadoop/blob/5776a41d/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSUtil.java -- diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSUtil.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSUtil.java index f1bfcb4..8b3f512 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSUtil.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSUtil.java @@ -341,15 +341,20 @@ public class DFSUtil { /** * Given a list of path components returns a path as a UTF8 String */ - public static String byteArray2PathString(byte[][] pathComponents) { + public static String byteArray2PathString(byte[][] pathComponents, + int offset, int length) { if (pathComponents.length == 0) { return ; -} else if (pathComponents.length == 1 +} +Preconditions.checkArgument(offset = 0 offset pathComponents.length); +Preconditions.checkArgument(length = 0 offset + length = +pathComponents.length); +if (pathComponents.length == 1 (pathComponents[0] == null || pathComponents[0].length == 0)) { return Path.SEPARATOR; } StringBuilder result = new StringBuilder(); -for (int i = 0; i pathComponents.length; i++) { +for (int i = offset; i offset + length; i++) { result.append(new String(pathComponents[i], Charsets.UTF_8)); if (i pathComponents.length - 1) { result.append(Path.SEPARATOR_CHAR); @@ -358,6 +363,10 @@
hadoop git commit: HDFS-7498. Simplify the logic in INodesInPath. Contributed by Jing Zhao.
Repository: hadoop Updated Branches: refs/heads/branch-2 a037d6030 - e8e86e3ec HDFS-7498. Simplify the logic in INodesInPath. Contributed by Jing Zhao. Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/e8e86e3e Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/e8e86e3e Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/e8e86e3e Branch: refs/heads/branch-2 Commit: e8e86e3ec7c97cf776c0c129388245c90f2e3b3b Parents: a037d60 Author: Jing Zhao ji...@apache.org Authored: Tue Dec 9 11:37:39 2014 -0800 Committer: Jing Zhao ji...@apache.org Committed: Tue Dec 9 11:38:07 2014 -0800 -- .../main/java/org/apache/hadoop/fs/Path.java| 1 - hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt | 2 + .../java/org/apache/hadoop/hdfs/DFSUtil.java| 15 +- .../server/namenode/EncryptionZoneManager.java | 10 +- .../hdfs/server/namenode/FSDirConcatOp.java | 5 +- .../hdfs/server/namenode/FSDirMkdirOp.java | 42 ++-- .../hdfs/server/namenode/FSDirRenameOp.java | 51 ++-- .../hdfs/server/namenode/FSDirSnapshotOp.java | 2 +- .../server/namenode/FSDirStatAndListingOp.java | 10 +- .../hdfs/server/namenode/FSDirectory.java | 95 .../hdfs/server/namenode/FSEditLogLoader.java | 4 +- .../hdfs/server/namenode/FSNamesystem.java | 8 +- .../server/namenode/FSPermissionChecker.java| 51 ++-- .../hdfs/server/namenode/INodesInPath.java | 240 +-- .../server/namenode/TestSnapshotPathINodes.java | 134 +-- 15 files changed, 320 insertions(+), 350 deletions(-) -- http://git-wip-us.apache.org/repos/asf/hadoop/blob/e8e86e3e/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/Path.java -- diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/Path.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/Path.java index 54ddeda..caeb7a1 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/Path.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/Path.java @@ -60,7 +60,6 @@ public class Path implements Comparable { /** * Pathnames with scheme and relative path are illegal. - * @param path to be checked */ void checkNotSchemeWithRelative() { if (toUri().isAbsolute() !isUriPathAbsolute()) { http://git-wip-us.apache.org/repos/asf/hadoop/blob/e8e86e3e/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt -- diff --git a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt index 7219553..a04e7cc 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt +++ b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt @@ -190,6 +190,8 @@ Release 2.7.0 - UNRELEASED HDFS-7486. Consolidate XAttr-related implementation into a single class. (wheat9) +HDFS-7498. Simplify the logic in INodesInPath. (jing9) + OPTIMIZATIONS HDFS-7454. Reduce memory footprint for AclEntries in NameNode. http://git-wip-us.apache.org/repos/asf/hadoop/blob/e8e86e3e/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSUtil.java -- diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSUtil.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSUtil.java index 151028f..846231a 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSUtil.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSUtil.java @@ -342,15 +342,20 @@ public class DFSUtil { /** * Given a list of path components returns a path as a UTF8 String */ - public static String byteArray2PathString(byte[][] pathComponents) { + public static String byteArray2PathString(byte[][] pathComponents, + int offset, int length) { if (pathComponents.length == 0) { return ; -} else if (pathComponents.length == 1 +} +Preconditions.checkArgument(offset = 0 offset pathComponents.length); +Preconditions.checkArgument(length = 0 offset + length = +pathComponents.length); +if (pathComponents.length == 1 (pathComponents[0] == null || pathComponents[0].length == 0)) { return Path.SEPARATOR; } StringBuilder result = new StringBuilder(); -for (int i = 0; i pathComponents.length; i++) { +for (int i = offset; i offset + length; i++) { result.append(new String(pathComponents[i], Charsets.UTF_8)); if (i pathComponents.length - 1) { result.append(Path.SEPARATOR_CHAR); @@ -359,6 +364,10
hadoop git commit: HADOOP-11379. Fix new findbugs warnings in hadoop-auth*. Contributed by Li Lu.
Repository: hadoop Updated Branches: refs/heads/branch-2 e8e86e3ec - b68d51e89 HADOOP-11379. Fix new findbugs warnings in hadoop-auth*. Contributed by Li Lu. Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/b68d51e8 Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/b68d51e8 Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/b68d51e8 Branch: refs/heads/branch-2 Commit: b68d51e8981450ba61bb2ad6c2782fef738ac00a Parents: e8e86e3 Author: Haohui Mai whe...@apache.org Authored: Tue Dec 9 13:08:51 2014 -0800 Committer: Haohui Mai whe...@apache.org Committed: Tue Dec 9 13:09:01 2014 -0800 -- .../hadoop/security/authentication/examples/WhoClient.java | 5 - .../authentication/util/RandomSignerSecretProvider.java | 4 +++- .../org/apache/hadoop/security/authentication/util/Signer.java | 3 ++- .../authentication/util/StringSignerSecretProvider.java | 3 ++- .../security/authentication/util/ZKSignerSecretProvider.java| 3 ++- hadoop-common-project/hadoop-common/CHANGES.txt | 2 ++ 6 files changed, 15 insertions(+), 5 deletions(-) -- http://git-wip-us.apache.org/repos/asf/hadoop/blob/b68d51e8/hadoop-common-project/hadoop-auth-examples/src/main/java/org/apache/hadoop/security/authentication/examples/WhoClient.java -- diff --git a/hadoop-common-project/hadoop-auth-examples/src/main/java/org/apache/hadoop/security/authentication/examples/WhoClient.java b/hadoop-common-project/hadoop-auth-examples/src/main/java/org/apache/hadoop/security/authentication/examples/WhoClient.java index 2299ae1..f5cff2b 100644 --- a/hadoop-common-project/hadoop-auth-examples/src/main/java/org/apache/hadoop/security/authentication/examples/WhoClient.java +++ b/hadoop-common-project/hadoop-auth-examples/src/main/java/org/apache/hadoop/security/authentication/examples/WhoClient.java @@ -19,6 +19,7 @@ import java.io.BufferedReader; import java.io.InputStreamReader; import java.net.HttpURLConnection; import java.net.URL; +import java.nio.charset.Charset; /** * Example that uses codeAuthenticatedURL/code. @@ -39,7 +40,9 @@ public class WhoClient { System.out.println(Status code: + conn.getResponseCode() + + conn.getResponseMessage()); System.out.println(); if (conn.getResponseCode() == HttpURLConnection.HTTP_OK) { -BufferedReader reader = new BufferedReader(new InputStreamReader(conn.getInputStream())); +BufferedReader reader = new BufferedReader( +new InputStreamReader( +conn.getInputStream(), Charset.forName(UTF-8))); String line = reader.readLine(); while (line != null) { System.out.println(line); http://git-wip-us.apache.org/repos/asf/hadoop/blob/b68d51e8/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/util/RandomSignerSecretProvider.java -- diff --git a/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/util/RandomSignerSecretProvider.java b/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/util/RandomSignerSecretProvider.java index 29e5661..41059a7 100644 --- a/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/util/RandomSignerSecretProvider.java +++ b/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/util/RandomSignerSecretProvider.java @@ -14,6 +14,8 @@ package org.apache.hadoop.security.authentication.util; import com.google.common.annotations.VisibleForTesting; + +import java.nio.charset.Charset; import java.util.Random; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; @@ -46,6 +48,6 @@ public class RandomSignerSecretProvider extends RolloverSignerSecretProvider { @Override protected byte[] generateNewSecret() { -return Long.toString(rand.nextLong()).getBytes(); +return Long.toString(rand.nextLong()).getBytes(Charset.forName(UTF-8)); } } http://git-wip-us.apache.org/repos/asf/hadoop/blob/b68d51e8/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/util/Signer.java -- diff --git a/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/util/Signer.java b/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/util/Signer.java index f639503..aa63e40 100644 ---
hadoop git commit: HADOOP-11379. Fix new findbugs warnings in hadoop-auth*. Contributed by Li Lu.
Repository: hadoop Updated Branches: refs/heads/trunk 5776a41da - 6df457a3d HADOOP-11379. Fix new findbugs warnings in hadoop-auth*. Contributed by Li Lu. Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/6df457a3 Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/6df457a3 Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/6df457a3 Branch: refs/heads/trunk Commit: 6df457a3d7661a890e84fc89567f29d0fe23c970 Parents: 5776a41 Author: Haohui Mai whe...@apache.org Authored: Tue Dec 9 13:08:51 2014 -0800 Committer: Haohui Mai whe...@apache.org Committed: Tue Dec 9 13:08:51 2014 -0800 -- .../hadoop/security/authentication/examples/WhoClient.java | 5 - .../authentication/util/RandomSignerSecretProvider.java | 4 +++- .../org/apache/hadoop/security/authentication/util/Signer.java | 3 ++- .../authentication/util/StringSignerSecretProvider.java | 3 ++- .../security/authentication/util/ZKSignerSecretProvider.java| 3 ++- hadoop-common-project/hadoop-common/CHANGES.txt | 2 ++ 6 files changed, 15 insertions(+), 5 deletions(-) -- http://git-wip-us.apache.org/repos/asf/hadoop/blob/6df457a3/hadoop-common-project/hadoop-auth-examples/src/main/java/org/apache/hadoop/security/authentication/examples/WhoClient.java -- diff --git a/hadoop-common-project/hadoop-auth-examples/src/main/java/org/apache/hadoop/security/authentication/examples/WhoClient.java b/hadoop-common-project/hadoop-auth-examples/src/main/java/org/apache/hadoop/security/authentication/examples/WhoClient.java index 2299ae1..f5cff2b 100644 --- a/hadoop-common-project/hadoop-auth-examples/src/main/java/org/apache/hadoop/security/authentication/examples/WhoClient.java +++ b/hadoop-common-project/hadoop-auth-examples/src/main/java/org/apache/hadoop/security/authentication/examples/WhoClient.java @@ -19,6 +19,7 @@ import java.io.BufferedReader; import java.io.InputStreamReader; import java.net.HttpURLConnection; import java.net.URL; +import java.nio.charset.Charset; /** * Example that uses codeAuthenticatedURL/code. @@ -39,7 +40,9 @@ public class WhoClient { System.out.println(Status code: + conn.getResponseCode() + + conn.getResponseMessage()); System.out.println(); if (conn.getResponseCode() == HttpURLConnection.HTTP_OK) { -BufferedReader reader = new BufferedReader(new InputStreamReader(conn.getInputStream())); +BufferedReader reader = new BufferedReader( +new InputStreamReader( +conn.getInputStream(), Charset.forName(UTF-8))); String line = reader.readLine(); while (line != null) { System.out.println(line); http://git-wip-us.apache.org/repos/asf/hadoop/blob/6df457a3/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/util/RandomSignerSecretProvider.java -- diff --git a/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/util/RandomSignerSecretProvider.java b/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/util/RandomSignerSecretProvider.java index 29e5661..41059a7 100644 --- a/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/util/RandomSignerSecretProvider.java +++ b/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/util/RandomSignerSecretProvider.java @@ -14,6 +14,8 @@ package org.apache.hadoop.security.authentication.util; import com.google.common.annotations.VisibleForTesting; + +import java.nio.charset.Charset; import java.util.Random; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; @@ -46,6 +48,6 @@ public class RandomSignerSecretProvider extends RolloverSignerSecretProvider { @Override protected byte[] generateNewSecret() { -return Long.toString(rand.nextLong()).getBytes(); +return Long.toString(rand.nextLong()).getBytes(Charset.forName(UTF-8)); } } http://git-wip-us.apache.org/repos/asf/hadoop/blob/6df457a3/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/util/Signer.java -- diff --git a/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/util/Signer.java b/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/util/Signer.java index f639503..aa63e40 100644 ---
hadoop git commit: HADOOP-11378. Fix new findbugs warnings in hadoop-kms. Contributed by Li Lu.
Repository: hadoop Updated Branches: refs/heads/branch-2 b68d51e89 - 784f48147 HADOOP-11378. Fix new findbugs warnings in hadoop-kms. Contributed by Li Lu. Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/784f4814 Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/784f4814 Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/784f4814 Branch: refs/heads/branch-2 Commit: 784f481473276f25ca8f967230829d13e5dfff20 Parents: b68d51e Author: Haohui Mai whe...@apache.org Authored: Tue Dec 9 13:10:03 2014 -0800 Committer: Haohui Mai whe...@apache.org Committed: Tue Dec 9 13:10:09 2014 -0800 -- hadoop-common-project/hadoop-common/CHANGES.txt | 2 ++ .../org/apache/hadoop/crypto/key/kms/server/KMSJSONWriter.java | 4 +++- 2 files changed, 5 insertions(+), 1 deletion(-) -- http://git-wip-us.apache.org/repos/asf/hadoop/blob/784f4814/hadoop-common-project/hadoop-common/CHANGES.txt -- diff --git a/hadoop-common-project/hadoop-common/CHANGES.txt b/hadoop-common-project/hadoop-common/CHANGES.txt index 2d4a1d3..1cf5bd7 100644 --- a/hadoop-common-project/hadoop-common/CHANGES.txt +++ b/hadoop-common-project/hadoop-common/CHANGES.txt @@ -184,6 +184,8 @@ Release 2.7.0 - UNRELEASED HADOOP-11379. Fix new findbugs warnings in hadoop-auth*. (Li Lu via wheat9) +HADOOP-11378. Fix new findbugs warnings in hadoop-kms. (Li Lu via wheat9) + Release 2.6.0 - 2014-11-18 INCOMPATIBLE CHANGES http://git-wip-us.apache.org/repos/asf/hadoop/blob/784f4814/hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMSJSONWriter.java -- diff --git a/hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMSJSONWriter.java b/hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMSJSONWriter.java index 3674e7a..31fac9f 100644 --- a/hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMSJSONWriter.java +++ b/hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMSJSONWriter.java @@ -32,6 +32,7 @@ import java.io.OutputStreamWriter; import java.io.Writer; import java.lang.annotation.Annotation; import java.lang.reflect.Type; +import java.nio.charset.Charset; import java.util.List; import java.util.Map; @@ -62,7 +63,8 @@ public class KMSJSONWriter implements MessageBodyWriterObject { Annotation[] annotations, MediaType mediaType, MultivaluedMapString, Object stringObjectMultivaluedMap, OutputStream outputStream) throws IOException, WebApplicationException { -Writer writer = new OutputStreamWriter(outputStream); +Writer writer = new OutputStreamWriter(outputStream, Charset +.forName(UTF-8)); ObjectMapper jsonMapper = new ObjectMapper(); jsonMapper.writerWithDefaultPrettyPrinter().writeValue(writer, obj); }
hadoop git commit: YARN-2910. FSLeafQueue can throw ConcurrentModificationException. (Wilfred Spiegelenburg via kasha)
Repository: hadoop Updated Branches: refs/heads/branch-2 784f48147 - 1986ea8dd YARN-2910. FSLeafQueue can throw ConcurrentModificationException. (Wilfred Spiegelenburg via kasha) (cherry picked from commit a2e07a54561a57a83b943628ebbc53ed5ba52718) Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/1986ea8d Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/1986ea8d Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/1986ea8d Branch: refs/heads/branch-2 Commit: 1986ea8dd223267ced3e3aef69980b46e2fef740 Parents: 784f481 Author: Karthik Kambatla ka...@apache.org Authored: Tue Dec 9 14:00:31 2014 -0800 Committer: Karthik Kambatla ka...@apache.org Committed: Tue Dec 9 14:00:52 2014 -0800 -- hadoop-yarn-project/CHANGES.txt | 3 + .../scheduler/fair/FSAppAttempt.java| 2 +- .../scheduler/fair/FSLeafQueue.java | 151 +-- .../scheduler/fair/TestFSLeafQueue.java | 93 +++- 4 files changed, 199 insertions(+), 50 deletions(-) -- http://git-wip-us.apache.org/repos/asf/hadoop/blob/1986ea8d/hadoop-yarn-project/CHANGES.txt -- diff --git a/hadoop-yarn-project/CHANGES.txt b/hadoop-yarn-project/CHANGES.txt index d8b64fb..8cdf40b 100644 --- a/hadoop-yarn-project/CHANGES.txt +++ b/hadoop-yarn-project/CHANGES.txt @@ -173,6 +173,9 @@ Release 2.7.0 - UNRELEASED YARN-2931. PublicLocalizer may fail until directory is initialized by LocalizeRunner. (Anubhav Dhoot via kasha) +YARN-2910. FSLeafQueue can throw ConcurrentModificationException. +(Wilfred Spiegelenburg via kasha) + Release 2.6.0 - 2014-11-18 INCOMPATIBLE CHANGES http://git-wip-us.apache.org/repos/asf/hadoop/blob/1986ea8d/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/FSAppAttempt.java -- diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/FSAppAttempt.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/FSAppAttempt.java index b9966e7..b23ec3e 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/FSAppAttempt.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/FSAppAttempt.java @@ -172,7 +172,7 @@ public class FSAppAttempt extends SchedulerApplicationAttempt } @Override - public synchronized Resource getHeadroom() { + public Resource getHeadroom() { final FSQueue queue = (FSQueue) this.queue; SchedulingPolicy policy = queue.getPolicy(); http://git-wip-us.apache.org/repos/asf/hadoop/blob/1986ea8d/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/FSLeafQueue.java -- diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/FSLeafQueue.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/FSLeafQueue.java index 345ea8b..bbf1be7 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/FSLeafQueue.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/FSLeafQueue.java @@ -23,6 +23,9 @@ import java.util.Collection; import java.util.Collections; import java.util.Comparator; import java.util.List; +import java.util.concurrent.locks.Lock; +import java.util.concurrent.locks.ReadWriteLock; +import java.util.concurrent.locks.ReentrantReadWriteLock; import com.google.common.annotations.VisibleForTesting; import org.apache.commons.logging.Log; @@ -50,6 +53,10 @@ public class FSLeafQueue extends FSQueue { new ArrayListFSAppAttempt(); private final ListFSAppAttempt nonRunnableApps = new ArrayListFSAppAttempt(); + // get a lock with fair distribution for app
hadoop git commit: HADOOP-11349. RawLocalFileSystem leaks file descriptor while creating a file if creat succeeds but chmod fails. (Varun Saxena via Colin P. McCabe)
Repository: hadoop Updated Branches: refs/heads/trunk a2e07a545 - 03867eb1b HADOOP-11349. RawLocalFileSystem leaks file descriptor while creating a file if creat succeeds but chmod fails. (Varun Saxena via Colin P. McCabe) Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/03867eb1 Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/03867eb1 Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/03867eb1 Branch: refs/heads/trunk Commit: 03867eb1bb173c66b5eb3bebf2fe03a1188635b5 Parents: a2e07a5 Author: Colin Patrick Mccabe cmcc...@cloudera.com Authored: Tue Dec 9 14:31:44 2014 -0800 Committer: Colin Patrick Mccabe cmcc...@cloudera.com Committed: Tue Dec 9 14:31:44 2014 -0800 -- hadoop-common-project/hadoop-common/CHANGES.txt | 4 .../apache/hadoop/fs/RawLocalFileSystem.java| 25 2 files changed, 25 insertions(+), 4 deletions(-) -- http://git-wip-us.apache.org/repos/asf/hadoop/blob/03867eb1/hadoop-common-project/hadoop-common/CHANGES.txt -- diff --git a/hadoop-common-project/hadoop-common/CHANGES.txt b/hadoop-common-project/hadoop-common/CHANGES.txt index 40aab85..0019b3a 100644 --- a/hadoop-common-project/hadoop-common/CHANGES.txt +++ b/hadoop-common-project/hadoop-common/CHANGES.txt @@ -552,6 +552,10 @@ Release 2.7.0 - UNRELEASED HADOOP-11378. Fix new findbugs warnings in hadoop-kms. (Li Lu via wheat9) +HADOOP-11349. RawLocalFileSystem leaks file descriptor while creating a +file if creat succeeds but chmod fails. (Varun Saxena via Colin P. McCabe) + + Release 2.6.0 - 2014-11-18 INCOMPATIBLE CHANGES http://git-wip-us.apache.org/repos/asf/hadoop/blob/03867eb1/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/RawLocalFileSystem.java -- diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/RawLocalFileSystem.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/RawLocalFileSystem.java index b6b6f59..858789e 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/RawLocalFileSystem.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/RawLocalFileSystem.java @@ -41,6 +41,7 @@ import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.permission.FsPermission; +import org.apache.hadoop.io.IOUtils; import org.apache.hadoop.io.nativeio.NativeIO; import org.apache.hadoop.util.Progressable; import org.apache.hadoop.util.Shell; @@ -295,8 +296,16 @@ public class RawLocalFileSystem extends FileSystem { FSDataOutputStream out = create(f, overwrite, bufferSize, replication, blockSize, progress); -setPermission(f, permission); -return out; +boolean success = false; +try { + setPermission(f, permission); + success = true; + return out; +} finally { + if (!success) { +IOUtils.cleanup(LOG, out); + } +} } @Override @@ -306,8 +315,16 @@ public class RawLocalFileSystem extends FileSystem { Progressable progress) throws IOException { FSDataOutputStream out = create(f, overwrite, false, bufferSize, replication, blockSize, progress); -setPermission(f, permission); -return out; +boolean success = false; +try { + setPermission(f, permission); + success = true; + return out; +} finally { + if (!success) { +IOUtils.cleanup(LOG, out); + } +} } @Override
hadoop git commit: HADOOP-11349. RawLocalFileSystem leaks file descriptor while creating a file if creat succeeds but chmod fails. (Varun Saxena via Colin P. McCabe) (cherry picked from commit 03867eb
Repository: hadoop Updated Branches: refs/heads/branch-2 1986ea8dd - 3637bc8cd HADOOP-11349. RawLocalFileSystem leaks file descriptor while creating a file if creat succeeds but chmod fails. (Varun Saxena via Colin P. McCabe) (cherry picked from commit 03867eb1bb173c66b5eb3bebf2fe03a1188635b5) Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/3637bc8c Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/3637bc8c Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/3637bc8c Branch: refs/heads/branch-2 Commit: 3637bc8cd9b7f2b81b5de555f212f37561fd82fb Parents: 1986ea8 Author: Colin Patrick Mccabe cmcc...@cloudera.com Authored: Tue Dec 9 14:31:44 2014 -0800 Committer: Colin Patrick Mccabe cmcc...@cloudera.com Committed: Tue Dec 9 14:32:25 2014 -0800 -- hadoop-common-project/hadoop-common/CHANGES.txt | 4 .../apache/hadoop/fs/RawLocalFileSystem.java| 25 2 files changed, 25 insertions(+), 4 deletions(-) -- http://git-wip-us.apache.org/repos/asf/hadoop/blob/3637bc8c/hadoop-common-project/hadoop-common/CHANGES.txt -- diff --git a/hadoop-common-project/hadoop-common/CHANGES.txt b/hadoop-common-project/hadoop-common/CHANGES.txt index 1cf5bd7..a5cb164 100644 --- a/hadoop-common-project/hadoop-common/CHANGES.txt +++ b/hadoop-common-project/hadoop-common/CHANGES.txt @@ -186,6 +186,10 @@ Release 2.7.0 - UNRELEASED HADOOP-11378. Fix new findbugs warnings in hadoop-kms. (Li Lu via wheat9) +HADOOP-11349. RawLocalFileSystem leaks file descriptor while creating a +file if creat succeeds but chmod fails. (Varun Saxena via Colin P. McCabe) + + Release 2.6.0 - 2014-11-18 INCOMPATIBLE CHANGES http://git-wip-us.apache.org/repos/asf/hadoop/blob/3637bc8c/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/RawLocalFileSystem.java -- diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/RawLocalFileSystem.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/RawLocalFileSystem.java index ca815a3..be32031 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/RawLocalFileSystem.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/RawLocalFileSystem.java @@ -41,6 +41,7 @@ import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.permission.FsPermission; +import org.apache.hadoop.io.IOUtils; import org.apache.hadoop.io.nativeio.NativeIO; import org.apache.hadoop.util.Progressable; import org.apache.hadoop.util.Shell; @@ -295,8 +296,16 @@ public class RawLocalFileSystem extends FileSystem { FSDataOutputStream out = create(f, overwrite, bufferSize, replication, blockSize, progress); -setPermission(f, permission); -return out; +boolean success = false; +try { + setPermission(f, permission); + success = true; + return out; +} finally { + if (!success) { +IOUtils.cleanup(LOG, out); + } +} } @Override @@ -306,8 +315,16 @@ public class RawLocalFileSystem extends FileSystem { Progressable progress) throws IOException { FSDataOutputStream out = create(f, overwrite, false, bufferSize, replication, blockSize, progress); -setPermission(f, permission); -return out; +boolean success = false; +try { + setPermission(f, permission); + success = true; + return out; +} finally { + if (!success) { +IOUtils.cleanup(LOG, out); + } +} } @Override
hadoop git commit: YARN-2930. Fixed TestRMRestart#testRMRestartRecoveringNodeLabelManager intermittent failure. Contributed by Wangda Tan (cherry picked from commit 2ed90a57fdd31d194b4a690df68b158ed97
Repository: hadoop Updated Branches: refs/heads/branch-2 3637bc8cd - 0f47e4e74 YARN-2930. Fixed TestRMRestart#testRMRestartRecoveringNodeLabelManager intermittent failure. Contributed by Wangda Tan (cherry picked from commit 2ed90a57fdd31d194b4a690df68b158ed9743dba) Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/0f47e4e7 Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/0f47e4e7 Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/0f47e4e7 Branch: refs/heads/branch-2 Commit: 0f47e4e74584f98cef894881b0395492c6f541f1 Parents: 3637bc8 Author: Jian He jia...@apache.org Authored: Tue Dec 9 16:47:24 2014 -0800 Committer: Jian He jia...@apache.org Committed: Tue Dec 9 16:48:46 2014 -0800 -- hadoop-yarn-project/CHANGES.txt | 3 +++ .../yarn/server/resourcemanager/TestRMRestart.java| 14 ++ 2 files changed, 17 insertions(+) -- http://git-wip-us.apache.org/repos/asf/hadoop/blob/0f47e4e7/hadoop-yarn-project/CHANGES.txt -- diff --git a/hadoop-yarn-project/CHANGES.txt b/hadoop-yarn-project/CHANGES.txt index 8cdf40b..0a38d15 100644 --- a/hadoop-yarn-project/CHANGES.txt +++ b/hadoop-yarn-project/CHANGES.txt @@ -176,6 +176,9 @@ Release 2.7.0 - UNRELEASED YARN-2910. FSLeafQueue can throw ConcurrentModificationException. (Wilfred Spiegelenburg via kasha) +YARN-2930. Fixed TestRMRestart#testRMRestartRecoveringNodeLabelManager +intermittent failure. (Wangda Tan via jianhe) + Release 2.6.0 - 2014-11-18 INCOMPATIBLE CHANGES http://git-wip-us.apache.org/repos/asf/hadoop/blob/0f47e4e7/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/TestRMRestart.java -- diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/TestRMRestart.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/TestRMRestart.java index 29f0208..fcb2be7 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/TestRMRestart.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/TestRMRestart.java @@ -39,6 +39,7 @@ import java.util.List; import java.util.Map; import java.util.Set; +import org.apache.commons.io.FileUtils; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.CommonConfigurationKeysPublic; import org.apache.hadoop.io.DataOutputBuffer; @@ -2048,6 +2049,19 @@ public class TestRMRestart extends ParameterizedSchedulerTestBase { // 4. Get cluster and node lobel, it should be present by recovering it @Test(timeout = 2) public void testRMRestartRecoveringNodeLabelManager() throws Exception { +// Initial FS node label store root dir to a random tmp dir +File nodeLabelFsStoreDir = +new File(target, this.getClass().getSimpleName() ++ -testRMRestartRecoveringNodeLabelManager); +if (nodeLabelFsStoreDir.exists()) { + FileUtils.deleteDirectory(nodeLabelFsStoreDir); +} +nodeLabelFsStoreDir.deleteOnExit(); + +String nodeLabelFsStoreDirURI = nodeLabelFsStoreDir.toURI().toString(); +conf.set(YarnConfiguration.FS_NODE_LABELS_STORE_ROOT_DIR, +nodeLabelFsStoreDirURI); + MemoryRMStateStore memStore = new MemoryRMStateStore(); memStore.init(conf); MockRM rm1 = new MockRM(conf, memStore) {
hadoop git commit: YARN-2930. Fixed TestRMRestart#testRMRestartRecoveringNodeLabelManager intermittent failure. Contributed by Wangda Tan
Repository: hadoop Updated Branches: refs/heads/trunk 03867eb1b - 2ed90a57f YARN-2930. Fixed TestRMRestart#testRMRestartRecoveringNodeLabelManager intermittent failure. Contributed by Wangda Tan Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/2ed90a57 Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/2ed90a57 Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/2ed90a57 Branch: refs/heads/trunk Commit: 2ed90a57fdd31d194b4a690df68b158ed9743dba Parents: 03867eb Author: Jian He jia...@apache.org Authored: Tue Dec 9 16:47:24 2014 -0800 Committer: Jian He jia...@apache.org Committed: Tue Dec 9 16:48:04 2014 -0800 -- hadoop-yarn-project/CHANGES.txt | 3 +++ .../yarn/server/resourcemanager/TestRMRestart.java| 14 ++ 2 files changed, 17 insertions(+) -- http://git-wip-us.apache.org/repos/asf/hadoop/blob/2ed90a57/hadoop-yarn-project/CHANGES.txt -- diff --git a/hadoop-yarn-project/CHANGES.txt b/hadoop-yarn-project/CHANGES.txt index d87322f..0173782 100644 --- a/hadoop-yarn-project/CHANGES.txt +++ b/hadoop-yarn-project/CHANGES.txt @@ -206,6 +206,9 @@ Release 2.7.0 - UNRELEASED YARN-2910. FSLeafQueue can throw ConcurrentModificationException. (Wilfred Spiegelenburg via kasha) +YARN-2930. Fixed TestRMRestart#testRMRestartRecoveringNodeLabelManager +intermittent failure. (Wangda Tan via jianhe) + Release 2.6.0 - 2014-11-18 INCOMPATIBLE CHANGES http://git-wip-us.apache.org/repos/asf/hadoop/blob/2ed90a57/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/TestRMRestart.java -- diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/TestRMRestart.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/TestRMRestart.java index 29f0208..fcb2be7 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/TestRMRestart.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/TestRMRestart.java @@ -39,6 +39,7 @@ import java.util.List; import java.util.Map; import java.util.Set; +import org.apache.commons.io.FileUtils; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.CommonConfigurationKeysPublic; import org.apache.hadoop.io.DataOutputBuffer; @@ -2048,6 +2049,19 @@ public class TestRMRestart extends ParameterizedSchedulerTestBase { // 4. Get cluster and node lobel, it should be present by recovering it @Test(timeout = 2) public void testRMRestartRecoveringNodeLabelManager() throws Exception { +// Initial FS node label store root dir to a random tmp dir +File nodeLabelFsStoreDir = +new File(target, this.getClass().getSimpleName() ++ -testRMRestartRecoveringNodeLabelManager); +if (nodeLabelFsStoreDir.exists()) { + FileUtils.deleteDirectory(nodeLabelFsStoreDir); +} +nodeLabelFsStoreDir.deleteOnExit(); + +String nodeLabelFsStoreDirURI = nodeLabelFsStoreDir.toURI().toString(); +conf.set(YarnConfiguration.FS_NODE_LABELS_STORE_ROOT_DIR, +nodeLabelFsStoreDirURI); + MemoryRMStateStore memStore = new MemoryRMStateStore(); memStore.init(conf); MockRM rm1 = new MockRM(conf, memStore) {
hadoop git commit: YARN-2924. Fixed RMAdminCLI to not convert node labels to lower case. Contributed by Wangda Tan (cherry picked from commit 437322afcaa4b1b260501af160283c97eb589419)
Repository: hadoop Updated Branches: refs/heads/branch-2 0f47e4e74 - b31dfb17d YARN-2924. Fixed RMAdminCLI to not convert node labels to lower case. Contributed by Wangda Tan (cherry picked from commit 437322afcaa4b1b260501af160283c97eb589419) Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/b31dfb17 Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/b31dfb17 Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/b31dfb17 Branch: refs/heads/branch-2 Commit: b31dfb17d84136c1c5f31b0f00e0a62d60aec690 Parents: 0f47e4e Author: Jian He jia...@apache.org Authored: Tue Dec 9 17:56:04 2014 -0800 Committer: Jian He jia...@apache.org Committed: Tue Dec 9 17:56:41 2014 -0800 -- hadoop-yarn-project/CHANGES.txt | 3 +++ .../main/java/org/apache/hadoop/yarn/client/cli/RMAdminCLI.java | 2 +- .../java/org/apache/hadoop/yarn/client/cli/TestRMAdminCLI.java | 4 ++-- 3 files changed, 6 insertions(+), 3 deletions(-) -- http://git-wip-us.apache.org/repos/asf/hadoop/blob/b31dfb17/hadoop-yarn-project/CHANGES.txt -- diff --git a/hadoop-yarn-project/CHANGES.txt b/hadoop-yarn-project/CHANGES.txt index 0a38d15..fafd129 100644 --- a/hadoop-yarn-project/CHANGES.txt +++ b/hadoop-yarn-project/CHANGES.txt @@ -179,6 +179,9 @@ Release 2.7.0 - UNRELEASED YARN-2930. Fixed TestRMRestart#testRMRestartRecoveringNodeLabelManager intermittent failure. (Wangda Tan via jianhe) +YARN-2924. Fixed RMAdminCLI to not convert node labels to lower case. +(Wangda Tan via jianhe) + Release 2.6.0 - 2014-11-18 INCOMPATIBLE CHANGES http://git-wip-us.apache.org/repos/asf/hadoop/blob/b31dfb17/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/cli/RMAdminCLI.java -- diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/cli/RMAdminCLI.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/cli/RMAdminCLI.java index 89d87cf..c7cc4d2 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/cli/RMAdminCLI.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/cli/RMAdminCLI.java @@ -399,7 +399,7 @@ public class RMAdminCLI extends HAAdmin { for (int i = 1; i splits.length; i++) { if (!splits[i].trim().isEmpty()) { - map.get(nodeId).add(splits[i].trim().toLowerCase()); + map.get(nodeId).add(splits[i].trim()); } } } http://git-wip-us.apache.org/repos/asf/hadoop/blob/b31dfb17/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/client/cli/TestRMAdminCLI.java -- diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/client/cli/TestRMAdminCLI.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/client/cli/TestRMAdminCLI.java index 6176a3e..bee114b 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/client/cli/TestRMAdminCLI.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/client/cli/TestRMAdminCLI.java @@ -468,9 +468,9 @@ public class TestRMAdminCLI { @Test public void testReplaceLabelsOnNode() throws Exception { // Successfully replace labels -dummyNodeLabelsManager.addToCluserNodeLabels(ImmutableSet.of(x, y)); +dummyNodeLabelsManager.addToCluserNodeLabels(ImmutableSet.of(x, Y)); String[] args = -{ -replaceLabelsOnNode, node1,x,y node2,y, +{ -replaceLabelsOnNode, node1,x,Y node2,Y, -directlyAccessNodeLabelStore }; assertEquals(0, rmAdminCLI.run(args)); assertTrue(dummyNodeLabelsManager.getNodeLabels().containsKey(
hadoop git commit: YARN-2924. Fixed RMAdminCLI to not convert node labels to lower case. Contributed by Wangda Tan
Repository: hadoop Updated Branches: refs/heads/trunk 2ed90a57f - 437322afc YARN-2924. Fixed RMAdminCLI to not convert node labels to lower case. Contributed by Wangda Tan Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/437322af Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/437322af Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/437322af Branch: refs/heads/trunk Commit: 437322afcaa4b1b260501af160283c97eb589419 Parents: 2ed90a5 Author: Jian He jia...@apache.org Authored: Tue Dec 9 17:56:04 2014 -0800 Committer: Jian He jia...@apache.org Committed: Tue Dec 9 17:56:04 2014 -0800 -- hadoop-yarn-project/CHANGES.txt | 3 +++ .../main/java/org/apache/hadoop/yarn/client/cli/RMAdminCLI.java | 2 +- .../java/org/apache/hadoop/yarn/client/cli/TestRMAdminCLI.java | 4 ++-- 3 files changed, 6 insertions(+), 3 deletions(-) -- http://git-wip-us.apache.org/repos/asf/hadoop/blob/437322af/hadoop-yarn-project/CHANGES.txt -- diff --git a/hadoop-yarn-project/CHANGES.txt b/hadoop-yarn-project/CHANGES.txt index 0173782..81d5707 100644 --- a/hadoop-yarn-project/CHANGES.txt +++ b/hadoop-yarn-project/CHANGES.txt @@ -209,6 +209,9 @@ Release 2.7.0 - UNRELEASED YARN-2930. Fixed TestRMRestart#testRMRestartRecoveringNodeLabelManager intermittent failure. (Wangda Tan via jianhe) +YARN-2924. Fixed RMAdminCLI to not convert node labels to lower case. +(Wangda Tan via jianhe) + Release 2.6.0 - 2014-11-18 INCOMPATIBLE CHANGES http://git-wip-us.apache.org/repos/asf/hadoop/blob/437322af/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/cli/RMAdminCLI.java -- diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/cli/RMAdminCLI.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/cli/RMAdminCLI.java index 89d87cf..c7cc4d2 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/cli/RMAdminCLI.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/cli/RMAdminCLI.java @@ -399,7 +399,7 @@ public class RMAdminCLI extends HAAdmin { for (int i = 1; i splits.length; i++) { if (!splits[i].trim().isEmpty()) { - map.get(nodeId).add(splits[i].trim().toLowerCase()); + map.get(nodeId).add(splits[i].trim()); } } } http://git-wip-us.apache.org/repos/asf/hadoop/blob/437322af/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/client/cli/TestRMAdminCLI.java -- diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/client/cli/TestRMAdminCLI.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/client/cli/TestRMAdminCLI.java index 6176a3e..bee114b 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/client/cli/TestRMAdminCLI.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/client/cli/TestRMAdminCLI.java @@ -468,9 +468,9 @@ public class TestRMAdminCLI { @Test public void testReplaceLabelsOnNode() throws Exception { // Successfully replace labels -dummyNodeLabelsManager.addToCluserNodeLabels(ImmutableSet.of(x, y)); +dummyNodeLabelsManager.addToCluserNodeLabels(ImmutableSet.of(x, Y)); String[] args = -{ -replaceLabelsOnNode, node1,x,y node2,y, +{ -replaceLabelsOnNode, node1,x,Y node2,Y, -directlyAccessNodeLabelStore }; assertEquals(0, rmAdminCLI.run(args)); assertTrue(dummyNodeLabelsManager.getNodeLabels().containsKey(
hadoop git commit: HDFS-7481. Add ACL indicator to the 'Permission Denied' exception. (Contributed by Vinayakumar B )
Repository: hadoop Updated Branches: refs/heads/trunk 437322afc - d93f3b981 HDFS-7481. Add ACL indicator to the 'Permission Denied' exception. (Contributed by Vinayakumar B ) Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/d93f3b98 Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/d93f3b98 Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/d93f3b98 Branch: refs/heads/trunk Commit: d93f3b9815f90d24c838574a56013e6dc60dc5ad Parents: 437322a Author: Vinayakumar B vinayakum...@apache.org Authored: Wed Dec 10 08:27:15 2014 +0530 Committer: Vinayakumar B vinayakum...@apache.org Committed: Wed Dec 10 08:27:15 2014 +0530 -- hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt | 3 +++ .../hadoop/hdfs/server/namenode/FSPermissionChecker.java | 11 ++- 2 files changed, 13 insertions(+), 1 deletion(-) -- http://git-wip-us.apache.org/repos/asf/hadoop/blob/d93f3b98/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt -- diff --git a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt index 9398429..d141439 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt +++ b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt @@ -558,6 +558,9 @@ Release 2.7.0 - UNRELEASED HDFS-7473. Document setting dfs.namenode.fs-limits.max-directory-items to 0 is invalid. (Akira AJISAKA via cnauroth) +HDFS-7481. Add ACL indicator to the Permission Denied exception. +(vinayakumarb) + Release 2.6.1 - UNRELEASED INCOMPATIBLE CHANGES http://git-wip-us.apache.org/repos/asf/hadoop/blob/d93f3b98/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSPermissionChecker.java -- diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSPermissionChecker.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSPermissionChecker.java index 8de8c54..0508484 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSPermissionChecker.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSPermissionChecker.java @@ -47,6 +47,12 @@ class FSPermissionChecker { /** @return a string for throwing {@link AccessControlException} */ private String toAccessControlString(INode inode, int snapshotId, FsAction access, FsPermission mode) { +return toAccessControlString(inode, snapshotId, access, mode, false); + } + + /** @return a string for throwing {@link AccessControlException} */ + private String toAccessControlString(INode inode, int snapshotId, FsAction access, + FsPermission mode, boolean deniedFromAcl) { StringBuilder sb = new StringBuilder(Permission denied: ) .append(user=).append(user).append(, ) .append(access=).append(access).append(, ) @@ -55,6 +61,9 @@ class FSPermissionChecker { .append(inode.getGroupName(snapshotId)).append(':') .append(inode.isDirectory() ? 'd' : '-') .append(mode); +if (deniedFromAcl) { + sb.append(+); +} return sb.toString(); } @@ -338,7 +347,7 @@ class FSPermissionChecker { } throw new AccessControlException( - toAccessControlString(inode, snapshotId, access, mode)); + toAccessControlString(inode, snapshotId, access, mode, true)); } /** Guarded by {@link FSNamesystem#readLock()} */
hadoop git commit: HDFS-7481. Add ACL indicator to the 'Permission Denied' exception. (Contributed by Vinayakumar B )
Repository: hadoop Updated Branches: refs/heads/branch-2 b31dfb17d - 19627e589 HDFS-7481. Add ACL indicator to the 'Permission Denied' exception. (Contributed by Vinayakumar B ) (cherry picked from commit d93f3b9815f90d24c838574a56013e6dc60dc5ad) Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/19627e58 Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/19627e58 Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/19627e58 Branch: refs/heads/branch-2 Commit: 19627e5897388dadb85bee5f022ee195d189844b Parents: b31dfb1 Author: Vinayakumar B vinayakum...@apache.org Authored: Wed Dec 10 08:27:15 2014 +0530 Committer: Vinayakumar B vinayakum...@apache.org Committed: Wed Dec 10 08:32:39 2014 +0530 -- hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt | 3 +++ .../hadoop/hdfs/server/namenode/FSPermissionChecker.java | 11 ++- 2 files changed, 13 insertions(+), 1 deletion(-) -- http://git-wip-us.apache.org/repos/asf/hadoop/blob/19627e58/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt -- diff --git a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt index a04e7cc..2496083 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt +++ b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt @@ -301,6 +301,9 @@ Release 2.7.0 - UNRELEASED HDFS-7473. Document setting dfs.namenode.fs-limits.max-directory-items to 0 is invalid. (Akira AJISAKA via cnauroth) +HDFS-7481. Add ACL indicator to the Permission Denied exception. +(vinayakumarb) + Release 2.6.1 - UNRELEASED INCOMPATIBLE CHANGES http://git-wip-us.apache.org/repos/asf/hadoop/blob/19627e58/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSPermissionChecker.java -- diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSPermissionChecker.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSPermissionChecker.java index 8de8c54..0508484 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSPermissionChecker.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSPermissionChecker.java @@ -47,6 +47,12 @@ class FSPermissionChecker { /** @return a string for throwing {@link AccessControlException} */ private String toAccessControlString(INode inode, int snapshotId, FsAction access, FsPermission mode) { +return toAccessControlString(inode, snapshotId, access, mode, false); + } + + /** @return a string for throwing {@link AccessControlException} */ + private String toAccessControlString(INode inode, int snapshotId, FsAction access, + FsPermission mode, boolean deniedFromAcl) { StringBuilder sb = new StringBuilder(Permission denied: ) .append(user=).append(user).append(, ) .append(access=).append(access).append(, ) @@ -55,6 +61,9 @@ class FSPermissionChecker { .append(inode.getGroupName(snapshotId)).append(':') .append(inode.isDirectory() ? 'd' : '-') .append(mode); +if (deniedFromAcl) { + sb.append(+); +} return sb.toString(); } @@ -338,7 +347,7 @@ class FSPermissionChecker { } throw new AccessControlException( - toAccessControlString(inode, snapshotId, access, mode)); + toAccessControlString(inode, snapshotId, access, mode, true)); } /** Guarded by {@link FSNamesystem#readLock()} */
hadoop git commit: HDFS-7502. Fix findbugs warning in hdfs-nfs project. Contributed by Brandon Li.
Repository: hadoop Updated Branches: refs/heads/branch-2 19627e589 - c2514aaa8 HDFS-7502. Fix findbugs warning in hdfs-nfs project. Contributed by Brandon Li. Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/c2514aaa Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/c2514aaa Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/c2514aaa Branch: refs/heads/branch-2 Commit: c2514aaa8352dc0f33c8962fae51f97f01201475 Parents: 19627e5 Author: Haohui Mai whe...@apache.org Authored: Tue Dec 9 20:42:42 2014 -0800 Committer: Haohui Mai whe...@apache.org Committed: Tue Dec 9 20:43:20 2014 -0800 -- .../apache/hadoop/hdfs/nfs/nfs3/RpcProgramNfs3.java | 15 +-- hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt | 3 +++ 2 files changed, 12 insertions(+), 6 deletions(-) -- http://git-wip-us.apache.org/repos/asf/hadoop/blob/c2514aaa/hadoop-hdfs-project/hadoop-hdfs-nfs/src/main/java/org/apache/hadoop/hdfs/nfs/nfs3/RpcProgramNfs3.java -- diff --git a/hadoop-hdfs-project/hadoop-hdfs-nfs/src/main/java/org/apache/hadoop/hdfs/nfs/nfs3/RpcProgramNfs3.java b/hadoop-hdfs-project/hadoop-hdfs-nfs/src/main/java/org/apache/hadoop/hdfs/nfs/nfs3/RpcProgramNfs3.java index c860dd5..aaac797 100644 --- a/hadoop-hdfs-project/hadoop-hdfs-nfs/src/main/java/org/apache/hadoop/hdfs/nfs/nfs3/RpcProgramNfs3.java +++ b/hadoop-hdfs-project/hadoop-hdfs-nfs/src/main/java/org/apache/hadoop/hdfs/nfs/nfs3/RpcProgramNfs3.java @@ -25,6 +25,7 @@ import java.net.InetAddress; import java.net.InetSocketAddress; import java.net.SocketAddress; import java.nio.ByteBuffer; +import java.nio.charset.Charset; import java.util.EnumSet; import org.apache.commons.logging.Log; @@ -651,15 +652,16 @@ public class RpcProgramNfs3 extends RpcProgram implements Nfs3Interface { } int rtmax = config.getInt(NfsConfigKeys.DFS_NFS_MAX_READ_TRANSFER_SIZE_KEY, NfsConfigKeys.DFS_NFS_MAX_READ_TRANSFER_SIZE_DEFAULT); - if (rtmax target.getBytes().length) { -LOG.error(Link size: + target.getBytes().length + if (rtmax target.getBytes(Charset.forName(UTF-8)).length) { +LOG.error(Link size: ++ target.getBytes(Charset.forName(UTF-8)).length + is larger than max transfer size: + rtmax); return new READLINK3Response(Nfs3Status.NFS3ERR_IO, postOpAttr, new byte[0]); } return new READLINK3Response(Nfs3Status.NFS3_OK, postOpAttr, - target.getBytes()); + target.getBytes(Charset.forName(UTF-8))); } catch (IOException e) { LOG.warn(Readlink error: + e.getClass(), e); @@ -1462,7 +1464,8 @@ public class RpcProgramNfs3 extends RpcProgram implements Nfs3Interface { throw io; } // This happens when startAfter was just deleted - LOG.info(Cookie couldn't be found: + new String(startAfter) + LOG.info(Cookie couldn't be found: + + new String(startAfter, Charset.forName(UTF-8)) + , do listing from beginning); dlisting = dfsClient .listPaths(dirFileIdPath, HdfsFileStatus.EMPTY_NAME); @@ -1571,7 +1574,7 @@ public class RpcProgramNfs3 extends RpcProgram implements Nfs3Interface { startAfter = HdfsFileStatus.EMPTY_NAME; } else { String inodeIdPath = Nfs3Utils.getFileIdPath(cookie); -startAfter = inodeIdPath.getBytes(); +startAfter = inodeIdPath.getBytes(Charset.forName(UTF-8)); } dlisting = listPaths(dfsClient, dirFileIdPath, startAfter); @@ -1733,7 +1736,7 @@ public class RpcProgramNfs3 extends RpcProgram implements Nfs3Interface { startAfter = HdfsFileStatus.EMPTY_NAME; } else { String inodeIdPath = Nfs3Utils.getFileIdPath(cookie); -startAfter = inodeIdPath.getBytes(); +startAfter = inodeIdPath.getBytes(Charset.forName(UTF-8)); } dlisting = listPaths(dfsClient, dirFileIdPath, startAfter); http://git-wip-us.apache.org/repos/asf/hadoop/blob/c2514aaa/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt -- diff --git a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt index 2496083..8d1facd 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt +++ b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt @@ -304,6 +304,9 @@ Release 2.7.0 - UNRELEASED HDFS-7481. Add ACL indicator to the Permission Denied exception. (vinayakumarb) +HDFS-7502. Fix findbugs warning in hdfs-nfs project. +(Brandon Li via wheat9) + Release 2.6.1 - UNRELEASED INCOMPATIBLE CHANGES
hadoop git commit: HDFS-7502. Fix findbugs warning in hdfs-nfs project. Contributed by Brandon Li.
Repository: hadoop Updated Branches: refs/heads/trunk d93f3b981 - 195f31a8e HDFS-7502. Fix findbugs warning in hdfs-nfs project. Contributed by Brandon Li. Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/195f31a8 Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/195f31a8 Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/195f31a8 Branch: refs/heads/trunk Commit: 195f31a8ef6b15e1962ab945b2f83af98e0058c6 Parents: d93f3b9 Author: Haohui Mai whe...@apache.org Authored: Tue Dec 9 20:42:42 2014 -0800 Committer: Haohui Mai whe...@apache.org Committed: Tue Dec 9 20:42:42 2014 -0800 -- .../apache/hadoop/hdfs/nfs/nfs3/RpcProgramNfs3.java | 15 +-- hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt | 3 +++ 2 files changed, 12 insertions(+), 6 deletions(-) -- http://git-wip-us.apache.org/repos/asf/hadoop/blob/195f31a8/hadoop-hdfs-project/hadoop-hdfs-nfs/src/main/java/org/apache/hadoop/hdfs/nfs/nfs3/RpcProgramNfs3.java -- diff --git a/hadoop-hdfs-project/hadoop-hdfs-nfs/src/main/java/org/apache/hadoop/hdfs/nfs/nfs3/RpcProgramNfs3.java b/hadoop-hdfs-project/hadoop-hdfs-nfs/src/main/java/org/apache/hadoop/hdfs/nfs/nfs3/RpcProgramNfs3.java index c860dd5..aaac797 100644 --- a/hadoop-hdfs-project/hadoop-hdfs-nfs/src/main/java/org/apache/hadoop/hdfs/nfs/nfs3/RpcProgramNfs3.java +++ b/hadoop-hdfs-project/hadoop-hdfs-nfs/src/main/java/org/apache/hadoop/hdfs/nfs/nfs3/RpcProgramNfs3.java @@ -25,6 +25,7 @@ import java.net.InetAddress; import java.net.InetSocketAddress; import java.net.SocketAddress; import java.nio.ByteBuffer; +import java.nio.charset.Charset; import java.util.EnumSet; import org.apache.commons.logging.Log; @@ -651,15 +652,16 @@ public class RpcProgramNfs3 extends RpcProgram implements Nfs3Interface { } int rtmax = config.getInt(NfsConfigKeys.DFS_NFS_MAX_READ_TRANSFER_SIZE_KEY, NfsConfigKeys.DFS_NFS_MAX_READ_TRANSFER_SIZE_DEFAULT); - if (rtmax target.getBytes().length) { -LOG.error(Link size: + target.getBytes().length + if (rtmax target.getBytes(Charset.forName(UTF-8)).length) { +LOG.error(Link size: ++ target.getBytes(Charset.forName(UTF-8)).length + is larger than max transfer size: + rtmax); return new READLINK3Response(Nfs3Status.NFS3ERR_IO, postOpAttr, new byte[0]); } return new READLINK3Response(Nfs3Status.NFS3_OK, postOpAttr, - target.getBytes()); + target.getBytes(Charset.forName(UTF-8))); } catch (IOException e) { LOG.warn(Readlink error: + e.getClass(), e); @@ -1462,7 +1464,8 @@ public class RpcProgramNfs3 extends RpcProgram implements Nfs3Interface { throw io; } // This happens when startAfter was just deleted - LOG.info(Cookie couldn't be found: + new String(startAfter) + LOG.info(Cookie couldn't be found: + + new String(startAfter, Charset.forName(UTF-8)) + , do listing from beginning); dlisting = dfsClient .listPaths(dirFileIdPath, HdfsFileStatus.EMPTY_NAME); @@ -1571,7 +1574,7 @@ public class RpcProgramNfs3 extends RpcProgram implements Nfs3Interface { startAfter = HdfsFileStatus.EMPTY_NAME; } else { String inodeIdPath = Nfs3Utils.getFileIdPath(cookie); -startAfter = inodeIdPath.getBytes(); +startAfter = inodeIdPath.getBytes(Charset.forName(UTF-8)); } dlisting = listPaths(dfsClient, dirFileIdPath, startAfter); @@ -1733,7 +1736,7 @@ public class RpcProgramNfs3 extends RpcProgram implements Nfs3Interface { startAfter = HdfsFileStatus.EMPTY_NAME; } else { String inodeIdPath = Nfs3Utils.getFileIdPath(cookie); -startAfter = inodeIdPath.getBytes(); +startAfter = inodeIdPath.getBytes(Charset.forName(UTF-8)); } dlisting = listPaths(dfsClient, dirFileIdPath, startAfter); http://git-wip-us.apache.org/repos/asf/hadoop/blob/195f31a8/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt -- diff --git a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt index d141439..9f3f9ee 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt +++ b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt @@ -561,6 +561,9 @@ Release 2.7.0 - UNRELEASED HDFS-7481. Add ACL indicator to the Permission Denied exception. (vinayakumarb) +HDFS-7502. Fix findbugs warning in hdfs-nfs project. +(Brandon Li via wheat9) + Release 2.6.1 - UNRELEASED INCOMPATIBLE CHANGES
hadoop git commit: HADOOP-11381. Fix findbugs warnings in hadoop-distcp, hadoop-aws, hadoop-azure, and hadoop-openstack. Contributed by Li Lu.
Repository: hadoop Updated Branches: refs/heads/branch-2 c2514aaa8 - a1e4a12dc HADOOP-11381. Fix findbugs warnings in hadoop-distcp, hadoop-aws, hadoop-azure, and hadoop-openstack. Contributed by Li Lu. Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/a1e4a12d Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/a1e4a12d Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/a1e4a12d Branch: refs/heads/branch-2 Commit: a1e4a12dc07fd6541c4d22318d0cc3070fc1d2dc Parents: c2514aa Author: Haohui Mai whe...@apache.org Authored: Tue Dec 9 20:45:21 2014 -0800 Committer: Haohui Mai whe...@apache.org Committed: Tue Dec 9 20:46:44 2014 -0800 -- hadoop-common-project/hadoop-common/CHANGES.txt | 2 ++ .../src/main/java/org/apache/hadoop/fs/s3a/S3AFileSystem.java | 4 .../java/org/apache/hadoop/tools/FileBasedCopyListing.java| 4 +++- .../hadoop/fs/swift/snative/SwiftNativeFileSystemStore.java | 7 --- .../java/org/apache/hadoop/fs/swift/util/SwiftTestUtils.java | 4 ++-- 5 files changed, 15 insertions(+), 6 deletions(-) -- http://git-wip-us.apache.org/repos/asf/hadoop/blob/a1e4a12d/hadoop-common-project/hadoop-common/CHANGES.txt -- diff --git a/hadoop-common-project/hadoop-common/CHANGES.txt b/hadoop-common-project/hadoop-common/CHANGES.txt index a5cb164..d02a1e0 100644 --- a/hadoop-common-project/hadoop-common/CHANGES.txt +++ b/hadoop-common-project/hadoop-common/CHANGES.txt @@ -189,6 +189,8 @@ Release 2.7.0 - UNRELEASED HADOOP-11349. RawLocalFileSystem leaks file descriptor while creating a file if creat succeeds but chmod fails. (Varun Saxena via Colin P. McCabe) +HADOOP-11381. Fix findbugs warnings in hadoop-distcp, hadoop-aws, +hadoop-azure, and hadoop-openstack. (Li Lu via wheat9) Release 2.6.0 - 2014-11-18 http://git-wip-us.apache.org/repos/asf/hadoop/blob/a1e4a12d/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/S3AFileSystem.java -- diff --git a/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/S3AFileSystem.java b/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/S3AFileSystem.java index 6bdd233..457351d 100644 --- a/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/S3AFileSystem.java +++ b/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/S3AFileSystem.java @@ -875,6 +875,8 @@ public class S3AFileSystem extends FileSystem { case ProgressEvent.PART_COMPLETED_EVENT_CODE: statistics.incrementWriteOps(1); break; + default: +break; } } }; @@ -933,6 +935,8 @@ public class S3AFileSystem extends FileSystem { case ProgressEvent.PART_COMPLETED_EVENT_CODE: statistics.incrementWriteOps(1); break; + default: +break; } } }; http://git-wip-us.apache.org/repos/asf/hadoop/blob/a1e4a12d/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/FileBasedCopyListing.java -- diff --git a/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/FileBasedCopyListing.java b/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/FileBasedCopyListing.java index 0fe93c2..2bc343e 100644 --- a/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/FileBasedCopyListing.java +++ b/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/FileBasedCopyListing.java @@ -27,6 +27,7 @@ import org.apache.hadoop.security.Credentials; import java.io.BufferedReader; import java.io.IOException; import java.io.InputStreamReader; +import java.nio.charset.Charset; import java.util.ArrayList; import java.util.List; @@ -74,7 +75,8 @@ public class FileBasedCopyListing extends CopyListing { FileSystem fs = sourceListing.getFileSystem(getConf()); BufferedReader input = null; try { - input = new BufferedReader(new InputStreamReader(fs.open(sourceListing))); + input = new BufferedReader(new InputStreamReader(fs.open(sourceListing), + Charset.forName(UTF-8))); String line = input.readLine(); while (line != null) { result.add(new Path(line)); http://git-wip-us.apache.org/repos/asf/hadoop/blob/a1e4a12d/hadoop-tools/hadoop-openstack/src/main/java/org/apache/hadoop/fs/swift/snative/SwiftNativeFileSystemStore.java -- diff --git a/hadoop-tools/hadoop-openstack/src/main/java/org/apache/hadoop/fs/swift/snative/SwiftNativeFileSystemStore.java
hadoop git commit: HADOOP-11381. Fix findbugs warnings in hadoop-distcp, hadoop-aws, hadoop-azure, and hadoop-openstack. Contributed by Li Lu.
Repository: hadoop Updated Branches: refs/heads/trunk 195f31a8e - 2e98ad34c HADOOP-11381. Fix findbugs warnings in hadoop-distcp, hadoop-aws, hadoop-azure, and hadoop-openstack. Contributed by Li Lu. Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/2e98ad34 Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/2e98ad34 Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/2e98ad34 Branch: refs/heads/trunk Commit: 2e98ad34ce64a9e5184c53447004de20a637f927 Parents: 195f31a Author: Haohui Mai whe...@apache.org Authored: Tue Dec 9 20:45:21 2014 -0800 Committer: Haohui Mai whe...@apache.org Committed: Tue Dec 9 20:45:21 2014 -0800 -- hadoop-common-project/hadoop-common/CHANGES.txt | 2 ++ .../src/main/java/org/apache/hadoop/fs/s3a/S3AFileSystem.java | 4 .../org/apache/hadoop/fs/azure/NativeAzureFileSystem.java | 5 +++-- .../java/org/apache/hadoop/fs/azure/SelfRenewingLease.java| 7 --- .../java/org/apache/hadoop/tools/FileBasedCopyListing.java| 4 +++- .../hadoop/fs/swift/snative/SwiftNativeFileSystemStore.java | 7 --- .../java/org/apache/hadoop/fs/swift/util/SwiftTestUtils.java | 4 ++-- 7 files changed, 22 insertions(+), 11 deletions(-) -- http://git-wip-us.apache.org/repos/asf/hadoop/blob/2e98ad34/hadoop-common-project/hadoop-common/CHANGES.txt -- diff --git a/hadoop-common-project/hadoop-common/CHANGES.txt b/hadoop-common-project/hadoop-common/CHANGES.txt index 0019b3a..9065ff5 100644 --- a/hadoop-common-project/hadoop-common/CHANGES.txt +++ b/hadoop-common-project/hadoop-common/CHANGES.txt @@ -555,6 +555,8 @@ Release 2.7.0 - UNRELEASED HADOOP-11349. RawLocalFileSystem leaks file descriptor while creating a file if creat succeeds but chmod fails. (Varun Saxena via Colin P. McCabe) +HADOOP-11381. Fix findbugs warnings in hadoop-distcp, hadoop-aws, +hadoop-azure, and hadoop-openstack. (Li Lu via wheat9) Release 2.6.0 - 2014-11-18 http://git-wip-us.apache.org/repos/asf/hadoop/blob/2e98ad34/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/S3AFileSystem.java -- diff --git a/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/S3AFileSystem.java b/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/S3AFileSystem.java index 6bdd233..457351d 100644 --- a/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/S3AFileSystem.java +++ b/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/S3AFileSystem.java @@ -875,6 +875,8 @@ public class S3AFileSystem extends FileSystem { case ProgressEvent.PART_COMPLETED_EVENT_CODE: statistics.incrementWriteOps(1); break; + default: +break; } } }; @@ -933,6 +935,8 @@ public class S3AFileSystem extends FileSystem { case ProgressEvent.PART_COMPLETED_EVENT_CODE: statistics.incrementWriteOps(1); break; + default: +break; } } }; http://git-wip-us.apache.org/repos/asf/hadoop/blob/2e98ad34/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azure/NativeAzureFileSystem.java -- diff --git a/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azure/NativeAzureFileSystem.java b/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azure/NativeAzureFileSystem.java index ad2e2e6..c136002 100644 --- a/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azure/NativeAzureFileSystem.java +++ b/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azure/NativeAzureFileSystem.java @@ -25,6 +25,7 @@ import java.io.InputStream; import java.io.OutputStream; import java.net.URI; import java.net.URISyntaxException; +import java.nio.charset.Charset; import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.Date; @@ -153,7 +154,7 @@ public class NativeAzureFileSystem extends FileSystem { Error reading pending rename file contents -- + maximum file size exceeded); } - String contents = new String(bytes, 0, l); + String contents = new String(bytes, 0, l, Charset.forName(UTF-8)); // parse the JSON ObjectMapper objMapper = new ObjectMapper(); @@ -253,7 +254,7 @@ public class NativeAzureFileSystem extends FileSystem { // Write file. try { output = fs.create(path); -output.write(contents.getBytes()); +output.write(contents.getBytes(Charset.forName(UTF-8))); } catch (IOException e) { throw new IOException(Unable to write