hadoop git commit: YARN-3552. RM Web UI shows -1 running containers for completed apps. Contributed by Rohith (cherry picked from commit 9356cf8676fd18f78655e8a6f2e6c946997dbd40)

2015-05-05 Thread jlowe
Repository: hadoop
Updated Branches:
  refs/heads/branch-2 01bdfd794 - 0f30913ea


YARN-3552. RM Web UI shows -1 running containers for completed apps. 
Contributed by Rohith
(cherry picked from commit 9356cf8676fd18f78655e8a6f2e6c946997dbd40)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/0f30913e
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/0f30913e
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/0f30913e

Branch: refs/heads/branch-2
Commit: 0f30913ea8167e1191036cb47cfaeff3aedc6a2e
Parents: 01bdfd7
Author: Jason Lowe jl...@apache.org
Authored: Tue May 5 13:42:32 2015 +
Committer: Jason Lowe jl...@apache.org
Committed: Tue May 5 13:43:57 2015 +

--
 hadoop-yarn-project/CHANGES.txt   | 3 +++
 .../server/resourcemanager/webapp/FairSchedulerAppsBlock.java | 3 ++-
 .../hadoop/yarn/server/resourcemanager/webapp/RMAppsBlock.java| 3 ++-
 3 files changed, 7 insertions(+), 2 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/0f30913e/hadoop-yarn-project/CHANGES.txt
--
diff --git a/hadoop-yarn-project/CHANGES.txt b/hadoop-yarn-project/CHANGES.txt
index f9eb34c..9f7af1e 100644
--- a/hadoop-yarn-project/CHANGES.txt
+++ b/hadoop-yarn-project/CHANGES.txt
@@ -260,6 +260,9 @@ Release 2.8.0 - UNRELEASED
 YARN-2725. Added test cases of retrying creating znode in ZKRMStateStore.
 (Tsuyoshi Ozawa via jianhe)
 
+YARN-3552. RM Web UI shows -1 running containers for completed apps
+(Rohith via jlowe)
+
 Release 2.7.1 - UNRELEASED
 
   INCOMPATIBLE CHANGES

http://git-wip-us.apache.org/repos/asf/hadoop/blob/0f30913e/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/FairSchedulerAppsBlock.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/FairSchedulerAppsBlock.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/FairSchedulerAppsBlock.java
index 9650dfe..8f8ece0 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/FairSchedulerAppsBlock.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/FairSchedulerAppsBlock.java
@@ -133,7 +133,8 @@ public class FairSchedulerAppsBlock extends HtmlBlock {
   .append(appInfo.getFinishTime()).append(\,\)
   .append(appInfo.getState()).append(\,\)
   .append(appInfo.getFinalStatus()).append(\,\)
-  .append(appInfo.getRunningContainers()).append(\,\)
+  .append(appInfo.getRunningContainers() == -1 ? N/A : String
+ .valueOf(appInfo.getRunningContainers())).append(\,\)
   // Progress bar
   .append(br title=').append(percent)
   .append(' div class=').append(C_PROGRESSBAR).append(' title=')

http://git-wip-us.apache.org/repos/asf/hadoop/blob/0f30913e/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/RMAppsBlock.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/RMAppsBlock.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/RMAppsBlock.java
index 715d9d9..3054150 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/RMAppsBlock.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/RMAppsBlock.java
@@ -110,7 +110,8 @@ public class RMAppsBlock extends AppsBlock {
 .append(\,\)
 .append(app.getFinalAppStatus())
 .append(\,\)
-.append(String.valueOf(app.getRunningContainers()))
+.append(app.getRunningContainers() == -1 ? N/A : String
+.valueOf(app.getRunningContainers()))
 .append(\,\)
 // Progress bar
 .append(br title=').append(percent).append(' div class=')



hadoop git commit: YARN-3552. RM Web UI shows -1 running containers for completed apps. Contributed by Rohith

2015-05-05 Thread jlowe
Repository: hadoop
Updated Branches:
  refs/heads/trunk 318081ccd - 9356cf867


YARN-3552. RM Web UI shows -1 running containers for completed apps. 
Contributed by Rohith


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/9356cf86
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/9356cf86
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/9356cf86

Branch: refs/heads/trunk
Commit: 9356cf8676fd18f78655e8a6f2e6c946997dbd40
Parents: 318081c
Author: Jason Lowe jl...@apache.org
Authored: Tue May 5 13:42:32 2015 +
Committer: Jason Lowe jl...@apache.org
Committed: Tue May 5 13:42:32 2015 +

--
 hadoop-yarn-project/CHANGES.txt   | 3 +++
 .../server/resourcemanager/webapp/FairSchedulerAppsBlock.java | 3 ++-
 .../hadoop/yarn/server/resourcemanager/webapp/RMAppsBlock.java| 3 ++-
 3 files changed, 7 insertions(+), 2 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/9356cf86/hadoop-yarn-project/CHANGES.txt
--
diff --git a/hadoop-yarn-project/CHANGES.txt b/hadoop-yarn-project/CHANGES.txt
index 287205d..97d3208 100644
--- a/hadoop-yarn-project/CHANGES.txt
+++ b/hadoop-yarn-project/CHANGES.txt
@@ -305,6 +305,9 @@ Release 2.8.0 - UNRELEASED
 YARN-2725. Added test cases of retrying creating znode in ZKRMStateStore.
 (Tsuyoshi Ozawa via jianhe)
 
+YARN-3552. RM Web UI shows -1 running containers for completed apps
+(Rohith via jlowe)
+
 Release 2.7.1 - UNRELEASED
 
   INCOMPATIBLE CHANGES

http://git-wip-us.apache.org/repos/asf/hadoop/blob/9356cf86/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/FairSchedulerAppsBlock.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/FairSchedulerAppsBlock.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/FairSchedulerAppsBlock.java
index 9650dfe..8f8ece0 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/FairSchedulerAppsBlock.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/FairSchedulerAppsBlock.java
@@ -133,7 +133,8 @@ public class FairSchedulerAppsBlock extends HtmlBlock {
   .append(appInfo.getFinishTime()).append(\,\)
   .append(appInfo.getState()).append(\,\)
   .append(appInfo.getFinalStatus()).append(\,\)
-  .append(appInfo.getRunningContainers()).append(\,\)
+  .append(appInfo.getRunningContainers() == -1 ? N/A : String
+ .valueOf(appInfo.getRunningContainers())).append(\,\)
   // Progress bar
   .append(br title=').append(percent)
   .append(' div class=').append(C_PROGRESSBAR).append(' title=')

http://git-wip-us.apache.org/repos/asf/hadoop/blob/9356cf86/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/RMAppsBlock.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/RMAppsBlock.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/RMAppsBlock.java
index 715d9d9..3054150 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/RMAppsBlock.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/RMAppsBlock.java
@@ -110,7 +110,8 @@ public class RMAppsBlock extends AppsBlock {
 .append(\,\)
 .append(app.getFinalAppStatus())
 .append(\,\)
-.append(String.valueOf(app.getRunningContainers()))
+.append(app.getRunningContainers() == -1 ? N/A : String
+.valueOf(app.getRunningContainers()))
 .append(\,\)
 // Progress bar
 .append(br title=').append(percent).append(' div class=')



[07/17] hadoop git commit: YARN-3396. Handle URISyntaxException in ResourceLocalizationService. (Contributed by Brahma Reddy Battula)

2015-05-05 Thread jitendra
YARN-3396. Handle URISyntaxException in ResourceLocalizationService. 
(Contributed by Brahma Reddy Battula)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/38102420
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/38102420
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/38102420

Branch: refs/heads/HDFS-7240
Commit: 38102420621308f5ba91cdeb6a18a63aa5acf640
Parents: 05adc76
Author: Junping Du junping...@apache.org
Authored: Tue May 5 10:18:23 2015 -0700
Committer: Junping Du junping...@apache.org
Committed: Tue May 5 10:18:23 2015 -0700

--
 hadoop-yarn-project/CHANGES.txt  | 2 ++
 .../localizer/ResourceLocalizationService.java   | 8 ++--
 2 files changed, 8 insertions(+), 2 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/38102420/hadoop-yarn-project/CHANGES.txt
--
diff --git a/hadoop-yarn-project/CHANGES.txt b/hadoop-yarn-project/CHANGES.txt
index 97d3208..a6b7f17 100644
--- a/hadoop-yarn-project/CHANGES.txt
+++ b/hadoop-yarn-project/CHANGES.txt
@@ -180,6 +180,8 @@ Release 2.8.0 - UNRELEASED
 YARN-3363. add localization and container launch time to ContainerMetrics
 at NM to show these timing information for each active container.
 (zxu via rkanter)
+YARN-3396. Handle URISyntaxException in ResourceLocalizationService. 
+(Brahma Reddy Battula via junping_du)
 
   OPTIMIZATIONS
 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/38102420/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/localizer/ResourceLocalizationService.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/localizer/ResourceLocalizationService.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/localizer/ResourceLocalizationService.java
index e9c45f3..17ea1a9 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/localizer/ResourceLocalizationService.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/localizer/ResourceLocalizationService.java
@@ -995,7 +995,9 @@ public class ResourceLocalizationService extends 
CompositeService
 try {
   req = new LocalResourceRequest(rsrc);
 } catch (URISyntaxException e) {
-  // TODO fail? Already translated several times...
+  LOG.error(
+  Got exception in parsing URL of LocalResource:
+  + rsrc.getResource(), e);
 }
 LocalizerResourceRequestEvent assoc = scheduled.get(req);
 if (assoc == null) {
@@ -1069,7 +1071,9 @@ public class ResourceLocalizationService extends 
CompositeService
   LOG.error(Inorrect path for PRIVATE localization.
   + next.getResource().getFile(), e);
 } catch (URISyntaxException e) {
-//TODO fail? Already translated several times...
+  LOG.error(
+  Got exception in parsing URL of LocalResource:
+  + next.getResource(), e);
 }
   }
 



[10/17] hadoop git commit: HADOOP-11904. test-patch.sh goes into an infinite loop on non-maven builds (aw)

2015-05-05 Thread jitendra
HADOOP-11904. test-patch.sh goes into an infinite loop on non-maven builds (aw)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/3ff91e9e
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/3ff91e9e
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/3ff91e9e

Branch: refs/heads/HDFS-7240
Commit: 3ff91e9e9302d94b0d18cccebd02d3815c06ce90
Parents: fcd4cb7
Author: Allen Wittenauer a...@apache.org
Authored: Tue May 5 11:02:15 2015 -0700
Committer: Allen Wittenauer a...@apache.org
Committed: Tue May 5 11:02:15 2015 -0700

--
 dev-support/test-patch.sh   | 8 +++-
 hadoop-common-project/hadoop-common/CHANGES.txt | 3 +++
 2 files changed, 10 insertions(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/3ff91e9e/dev-support/test-patch.sh
--
diff --git a/dev-support/test-patch.sh b/dev-support/test-patch.sh
index 5107718..9f48c64 100755
--- a/dev-support/test-patch.sh
+++ b/dev-support/test-patch.sh
@@ -915,6 +915,13 @@ function git_checkout
   fi
 
   add_jira_footer git revision ${PATCH_BRANCH} / ${GIT_REVISION}
+
+  if [[ ! -f ${BASEDIR}/pom.xml ]]; then
+hadoop_error ERROR: This verison of test-patch.sh only supports 
Maven-based builds. Aborting.
+add_jira_table -1 pre-patch Unsupported build system.
+output_to_jira 1
+cleanup_and_exit 1
+  fi
   return 0
 }
 
@@ -1331,7 +1338,6 @@ function apply_patch_file
 ## @return   none; otherwise relaunches
 function check_reexec
 {
-  set +x
   local commentfile=${PATCH_DIR}/tp.${RANDOM}
 
   if [[ ${REEXECED} == true ]]; then

http://git-wip-us.apache.org/repos/asf/hadoop/blob/3ff91e9e/hadoop-common-project/hadoop-common/CHANGES.txt
--
diff --git a/hadoop-common-project/hadoop-common/CHANGES.txt 
b/hadoop-common-project/hadoop-common/CHANGES.txt
index 49106ae..5b2654a 100644
--- a/hadoop-common-project/hadoop-common/CHANGES.txt
+++ b/hadoop-common-project/hadoop-common/CHANGES.txt
@@ -604,6 +604,9 @@ Release 2.8.0 - UNRELEASED
 HADOOP-11916. TestStringUtils#testLowerAndUpperStrings failed on MAC
 due to a JVM bug. (Ming Ma via ozawa)
 
+HADOOP-11904. test-patch.sh goes into an infinite loop on non-maven
+builds (aw)
+
 Release 2.7.1 - UNRELEASED
 
   INCOMPATIBLE CHANGES



[09/17] hadoop git commit: HDFS-8305: HDFS INotify: the destination field of RenameOp should always end with the file name (cmccabe)

2015-05-05 Thread jitendra
HDFS-8305: HDFS INotify: the destination field of RenameOp should always end 
with the file name (cmccabe)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/fcd4cb75
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/fcd4cb75
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/fcd4cb75

Branch: refs/heads/HDFS-7240
Commit: fcd4cb751665adb241081e42b3403c3856b6c6fe
Parents: b7dd3a4
Author: Colin Patrick Mccabe cmcc...@cloudera.com
Authored: Tue May 5 10:50:09 2015 -0700
Committer: Colin Patrick Mccabe cmcc...@cloudera.com
Committed: Tue May 5 10:50:09 2015 -0700

--
 hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt  |  3 +++
 .../hadoop/hdfs/server/namenode/FSDirRenameOp.java   |  2 +-
 .../apache/hadoop/hdfs/server/namenode/FSEditLog.java| 10 +++---
 .../hadoop/hdfs/TestDFSInotifyEventInputStream.java  | 11 +++
 4 files changed, 22 insertions(+), 4 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/fcd4cb75/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
--
diff --git a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt 
b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
index cd9b7b8..08ab7e7 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
+++ b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
@@ -682,6 +682,9 @@ Release 2.7.1 - UNRELEASED
 HDFS-8091: ACLStatus and XAttributes should be presented to
 INodeAttributesProvider before returning to client (asuresh)
 
+HDFS-8305: HDFS INotify: the destination field of RenameOp should always
+end with the file name (cmccabe)
+
 Release 2.7.0 - 2015-04-20
 
   INCOMPATIBLE CHANGES

http://git-wip-us.apache.org/repos/asf/hadoop/blob/fcd4cb75/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSDirRenameOp.java
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSDirRenameOp.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSDirRenameOp.java
index c57cae2..4a20a62 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSDirRenameOp.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSDirRenameOp.java
@@ -476,7 +476,7 @@ class FSDirRenameOp {
   fsd.writeUnlock();
 }
 if (stat) {
-  fsd.getEditLog().logRename(src, dst, mtime, logRetryCache);
+  fsd.getEditLog().logRename(src, actualDst, mtime, logRetryCache);
   return true;
 }
 return false;

http://git-wip-us.apache.org/repos/asf/hadoop/blob/fcd4cb75/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSEditLog.java
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSEditLog.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSEditLog.java
index bda827a..28e150c 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSEditLog.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSEditLog.java
@@ -815,7 +815,9 @@ public class FSEditLog implements LogsPurgeable {
   }
   
   /** 
-   * Add rename record to edit log
+   * Add rename record to edit log.
+   *
+   * The destination should be the file name, not the destination directory.
* TODO: use String parameters until just before writing to disk
*/
   void logRename(String src, String dst, long timestamp, boolean toLogRpcIds) {
@@ -826,9 +828,11 @@ public class FSEditLog implements LogsPurgeable {
 logRpcIds(op, toLogRpcIds);
 logEdit(op);
   }
-  
+
   /** 
-   * Add rename record to edit log
+   * Add rename record to edit log.
+   *
+   * The destination should be the file name, not the destination directory.
*/
   void logRename(String src, String dst, long timestamp, boolean toLogRpcIds,
   Options.Rename... options) {

http://git-wip-us.apache.org/repos/asf/hadoop/blob/fcd4cb75/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDFSInotifyEventInputStream.java
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDFSInotifyEventInputStream.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDFSInotifyEventInputStream.java
index 6e91e06..ba33bd3 100644
--- 

[12/17] hadoop git commit: HADOOP-11917. test-patch.sh should work with ${BASEDIR}/patchprocess setups (aw)

2015-05-05 Thread jitendra
HADOOP-11917. test-patch.sh should work with ${BASEDIR}/patchprocess setups (aw)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/d33419ae
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/d33419ae
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/d33419ae

Branch: refs/heads/HDFS-7240
Commit: d33419ae01c528073f9f00ef1aadf153fed41222
Parents: 24d3a2d
Author: Allen Wittenauer a...@apache.org
Authored: Tue May 5 11:26:31 2015 -0700
Committer: Allen Wittenauer a...@apache.org
Committed: Tue May 5 11:26:31 2015 -0700

--
 .gitignore  |  1 +
 dev-support/test-patch.sh   | 78 +---
 hadoop-common-project/hadoop-common/CHANGES.txt |  3 +
 pom.xml | 23 +++---
 4 files changed, 84 insertions(+), 21 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/d33419ae/.gitignore
--
diff --git a/.gitignore b/.gitignore
index a49ad4b..779f507 100644
--- a/.gitignore
+++ b/.gitignore
@@ -23,3 +23,4 @@ 
hadoop-tools/hadoop-openstack/src/test/resources/contract-test-options.xml
 
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-registry/src/main/tla/yarnregistry.toolbox
 yarnregistry.pdf
 hadoop-tools/hadoop-aws/src/test/resources/contract-test-options.xml
+patchprocess/

http://git-wip-us.apache.org/repos/asf/hadoop/blob/d33419ae/dev-support/test-patch.sh
--
diff --git a/dev-support/test-patch.sh b/dev-support/test-patch.sh
index 9f48c64..3759e9f 100755
--- a/dev-support/test-patch.sh
+++ b/dev-support/test-patch.sh
@@ -535,6 +535,26 @@ function echo_and_redirect
   ${@}  ${logfile} 21
 }
 
+## @description is PATCH_DIR relative to BASEDIR?
+## @audiencepublic
+## @stability   stable
+## @replaceable yes
+## @returns 1 - no, PATCH_DIR
+## @returns 0 - yes, PATCH_DIR - BASEDIR
+function relative_patchdir
+{
+  local p=${PATCH_DIR#${BASEDIR}}
+
+  if [[ ${#p} -eq ${#PATCH_DIR} ]]; then
+echo ${p}
+return 1
+  fi
+  p=${p#/}
+  echo ${p}
+  return 0
+}
+
+
 ## @description  Print the usage information
 ## @audience public
 ## @stabilitystable
@@ -697,7 +717,8 @@ function parse_args
 esac
   done
 
-  # if we get a relative path, turn it absolute
+  # we need absolute dir for ${BASEDIR}
+  cd ${CWD}
   BASEDIR=$(cd -P -- ${BASEDIR} /dev/null  pwd -P)
 
   if [[ ${BUILD_NATIVE} == true ]] ; then
@@ -723,6 +744,7 @@ function parse_args
 JENKINS=false
   fi
 
+  cd ${CWD}
   if [[ ! -d ${PATCH_DIR} ]]; then
 mkdir -p ${PATCH_DIR}
 if [[ $? == 0 ]] ; then
@@ -733,6 +755,9 @@ function parse_args
 fi
   fi
 
+  # we need absolute dir for PATCH_DIR
+  PATCH_DIR=$(cd -P -- ${PATCH_DIR} /dev/null  pwd -P)
+
   GITDIFFLINES=${PATCH_DIR}/gitdifflines.txt
 }
 
@@ -821,17 +846,36 @@ function find_changed_modules
 function git_checkout
 {
   local currentbranch
+  local exemptdir
 
   big_console_header Confirming git environment
 
+  cd ${BASEDIR}
+  if [[ ! -d .git ]]; then
+hadoop_error ERROR: ${BASEDIR} is not a git repo.
+cleanup_and_exit 1
+  fi
+
   if [[ ${RESETREPO} == true ]] ; then
-cd ${BASEDIR}
 ${GIT} reset --hard
 if [[ $? != 0 ]]; then
   hadoop_error ERROR: git reset is failing
   cleanup_and_exit 1
 fi
-${GIT} clean -xdf
+
+# if PATCH_DIR is in BASEDIR, then we don't want
+# git wiping it out.
+exemptdir=$(relative_patchdir)
+if [[ $? == 1 ]]; then
+  ${GIT} clean -xdf
+else
+  # we do, however, want it emptied of all _files_.
+  # we need to leave _directories_ in case we are in
+  # re-exec mode (which places a directory full of stuff in it)
+  hadoop_debug Exempting ${exemptdir} from clean
+  rm ${PATCH_DIR}/* 2/dev/null
+  ${GIT} clean -xdf -e ${exemptdir}
+fi
 if [[ $? != 0 ]]; then
   hadoop_error ERROR: git clean is failing
   cleanup_and_exit 1
@@ -875,11 +919,6 @@ function git_checkout
 fi
 
   else
-cd ${BASEDIR}
-if [[ ! -d .git ]]; then
-  hadoop_error ERROR: ${BASEDIR} is not a git repo.
-  cleanup_and_exit 1
-fi
 
 status=$(${GIT} status --porcelain)
 if [[ ${status} !=   -z ${DIRTY_WORKSPACE} ]] ; then
@@ -1000,6 +1039,16 @@ function verify_valid_branch
   local check=$2
   local i
 
+  # shortcut some common
+  # non-resolvable names
+  if [[ -z ${check} ]]; then
+return 1
+  fi
+
+  if [[ ${check} == patch ]]; then
+return 1
+  fi
+
   if [[ ${check} =~ ^git ]]; then
 ref=$(echo ${check} | cut -f2 -dt)
 count=$(echo ${ref} | wc -c | tr -d ' ')
@@ -2207,9 +2256,16 @@ function cleanup_and_exit
 
   if [[ ${JENKINS} == true ]] ; then
 if [[ -e 

[11/17] hadoop git commit: HDFS-7758. Retire FsDatasetSpi#getVolumes() and use FsDatasetSpi#getVolumeRefs() instead (Lei (Eddy) Xu via Colin P. McCabe)

2015-05-05 Thread jitendra
HDFS-7758. Retire FsDatasetSpi#getVolumes() and use 
FsDatasetSpi#getVolumeRefs() instead (Lei (Eddy) Xu via Colin P. McCabe)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/24d3a2d4
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/24d3a2d4
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/24d3a2d4

Branch: refs/heads/HDFS-7240
Commit: 24d3a2d4fdd836ac9a5bc755a7fb9354f7a582b1
Parents: 3ff91e9
Author: Colin Patrick Mccabe cmcc...@cloudera.com
Authored: Tue May 5 10:55:04 2015 -0700
Committer: Colin Patrick Mccabe cmcc...@cloudera.com
Committed: Tue May 5 11:08:59 2015 -0700

--
 hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt |   3 +
 .../hdfs/server/datanode/DirectoryScanner.java  |  69 +-
 .../server/datanode/fsdataset/FsDatasetSpi.java |  99 +-
 .../datanode/fsdataset/FsVolumeReference.java   |  13 +-
 .../datanode/fsdataset/impl/FsDatasetImpl.java  |  29 ++---
 .../datanode/fsdataset/impl/FsVolumeImpl.java   |   7 +-
 .../datanode/fsdataset/impl/FsVolumeList.java   |   7 +-
 .../org/apache/hadoop/hdfs/MiniDFSCluster.java  |  23 ++--
 .../hdfs/MiniDFSClusterWithNodeGroup.java   |  17 +--
 .../TestNameNodePrunesMissingStorages.java  |  18 +--
 .../server/datanode/SimulatedFSDataset.java |   2 +-
 .../TestBlockHasMultipleReplicasOnSameDN.java   |  14 +-
 .../hdfs/server/datanode/TestBlockScanner.java  |   8 +-
 .../datanode/TestDataNodeHotSwapVolumes.java|  21 ++-
 .../datanode/TestDataNodeVolumeFailure.java |   9 +-
 .../server/datanode/TestDirectoryScanner.java   | 130 +++
 .../hdfs/server/datanode/TestDiskError.java |  12 +-
 .../datanode/TestIncrementalBlockReports.java   |   6 +-
 .../datanode/TestIncrementalBrVariations.java   |  81 ++--
 .../server/datanode/TestTriggerBlockReport.java |   8 +-
 .../extdataset/ExternalDatasetImpl.java |   2 +-
 .../fsdataset/impl/LazyPersistTestCase.java |  72 +-
 .../fsdataset/impl/TestDatanodeRestart.java |  20 ++-
 .../fsdataset/impl/TestFsDatasetImpl.java   |  39 --
 .../fsdataset/impl/TestFsVolumeList.java|   9 +-
 .../fsdataset/impl/TestRbwSpaceReservation.java |  43 +++---
 .../fsdataset/impl/TestWriteToReplica.java  |  70 ++
 .../hdfs/server/mover/TestStorageMover.java |  19 ++-
 28 files changed, 515 insertions(+), 335 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/24d3a2d4/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
--
diff --git a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt 
b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
index 08ab7e7..c89e6fe 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
+++ b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
@@ -507,6 +507,9 @@ Release 2.8.0 - UNRELEASED
 HDFS-8237. Move all protocol classes used by ClientProtocol to hdfs-client.
 (wheat9)
 
+HDFS-7758. Retire FsDatasetSpi#getVolumes() and use
+FsDatasetSpi#getVolumeRefs() instead (Lei (Eddy) Xu via Colin P. McCabe)
+
   OPTIMIZATIONS
 
 HDFS-8026. Trace FSOutputSummer#writeChecksumChunks rather than

http://git-wip-us.apache.org/repos/asf/hadoop/blob/24d3a2d4/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DirectoryScanner.java
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DirectoryScanner.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DirectoryScanner.java
index 62885a9..8453094 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DirectoryScanner.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DirectoryScanner.java
@@ -527,59 +527,48 @@ public class DirectoryScanner implements Runnable {
 diffRecord.add(new ScanInfo(blockId, null, null, vol));
   }
 
-  /** Is the given volume still valid in the dataset? */
-  private static boolean isValid(final FsDatasetSpi? dataset,
-  final FsVolumeSpi volume) {
-for (FsVolumeSpi vol : dataset.getVolumes()) {
-  if (vol == volume) {
-return true;
-  }
-}
-return false;
-  }
-
   /** Get lists of blocks on the disk sorted by blockId, per blockpool */
   private MapString, ScanInfo[] getDiskReport() {
+ScanInfoPerBlockPool list = new ScanInfoPerBlockPool();
+ScanInfoPerBlockPool[] dirReports = null;
 // First get list of data directories
-final List? extends FsVolumeSpi volumes = dataset.getVolumes();
+try (FsDatasetSpi.FsVolumeReferences volumes =
+dataset.getFsVolumeReferences()) {
 
-// Use an array since the 

[16/17] hadoop git commit: MAPREDUCE-6192. Create unit test to automatically compare MR related classes and mapred-default.xml (rchiang via rkanter)

2015-05-05 Thread jitendra
MAPREDUCE-6192. Create unit test to automatically compare MR related classes 
and mapred-default.xml (rchiang via rkanter)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/9809a16d
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/9809a16d
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/9809a16d

Branch: refs/heads/HDFS-7240
Commit: 9809a16d3c8068beccbf0106e99c7ede6ba11e0f
Parents: 0100b15
Author: Robert Kanter rkan...@apache.org
Authored: Mon May 4 17:48:10 2015 -0700
Committer: Robert Kanter rkan...@apache.org
Committed: Tue May 5 14:43:28 2015 -0700

--
 .../conf/TestConfigurationFieldsBase.java   | 58 ++-
 hadoop-mapreduce-project/CHANGES.txt|  3 +
 .../mapred/TestMapreduceConfigFields.java   | 76 
 3 files changed, 135 insertions(+), 2 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/9809a16d/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestConfigurationFieldsBase.java
--
diff --git 
a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestConfigurationFieldsBase.java
 
b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestConfigurationFieldsBase.java
index c3fe3a3..2e4d8b1 100644
--- 
a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestConfigurationFieldsBase.java
+++ 
b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestConfigurationFieldsBase.java
@@ -147,6 +147,12 @@ public abstract class TestConfigurationFieldsBase {
   private SetString xmlFieldsMissingInConfiguration = null;
 
   /**
+   * Member variable for debugging base class operation
+   */
+  protected boolean configDebug = false;
+  protected boolean xmlDebug = false;
+
+  /**
* Abstract method to be used by subclasses for initializing base
* members.
*/
@@ -168,13 +174,16 @@ public abstract class TestConfigurationFieldsBase {
 HashMapString,String retVal = new HashMapString,String();
 
 // Setup regexp for valid properties
-String propRegex = ^[A-Za-z_-]+(\\.[A-Za-z_-]+)+$;
+String propRegex = ^[A-Za-z][A-Za-z0-9_-]+(\\.[A-Za-z0-9_-]+)+$;
 Pattern p = Pattern.compile(propRegex);
 
 // Iterate through class member variables
 int totalFields = 0;
 String value;
 for (Field f : fields) {
+  if (configDebug) {
+System.out.println(Field:  + f);
+  }
   // Filter out anything that isn't public static final
   if (!Modifier.isStatic(f.getModifiers()) ||
   !Modifier.isPublic(f.getModifiers()) ||
@@ -192,6 +201,9 @@ public abstract class TestConfigurationFieldsBase {
   } catch (IllegalAccessException iaException) {
 continue;
   }
+  if (configDebug) {
+System.out.println(  Value:  + value);
+  }
   // Special Case: Detect and ignore partial properties (ending in x)
   //   or file properties (ending in .xml)
   if (value.endsWith(.xml) ||
@@ -221,11 +233,23 @@ public abstract class TestConfigurationFieldsBase {
   //  something like: blah.blah2(.blah3.blah4...)
   Matcher m = p.matcher(value);
   if (!m.find()) {
+if (configDebug) {
+  System.out.println(  Passes Regex: false);
+}
 continue;
   }
+  if (configDebug) {
+System.out.println(  Passes Regex: true);
+  }
 
   // Save member variable/value as hash
-  retVal.put(value,f.getName());
+  if (!retVal.containsKey(value)) {
+retVal.put(value,f.getName());
+  } else {
+if (configDebug) {
+  System.out.println(ERROR: Already found key for property  + value);
+}
+  }
 }
 
 return retVal;
@@ -256,6 +280,9 @@ public abstract class TestConfigurationFieldsBase {
   // Ignore known xml props
   if (xmlPropsToSkipCompare != null) {
 if (xmlPropsToSkipCompare.contains(key)) {
+  if (xmlDebug) {
+System.out.println(  Skipping Full Key:  + key);
+  }
   continue;
 }
   }
@@ -270,14 +297,23 @@ public abstract class TestConfigurationFieldsBase {
}
   }
   if (skipPrefix) {
+if (xmlDebug) {
+  System.out.println(  Skipping Prefix Key:  + key);
+}
 continue;
   }
   if (conf.onlyKeyExists(key)) {
 retVal.put(key,null);
+if (xmlDebug) {
+  System.out.println(  XML Key,Null Value:  + key);
+}
   } else {
 String value = conf.get(key);
 if (value!=null) {
   retVal.put(key,entry.getValue());
+  if (xmlDebug) {
+System.out.println(  XML Key,Valid Value:  + 

[14/17] hadoop git commit: HDFS-7847. Modify NNThroughputBenchmark to be able to operate on a remote NameNode (Charles Lamb via Colin P. McCabe)

2015-05-05 Thread jitendra
HDFS-7847. Modify NNThroughputBenchmark to be able to operate on a remote 
NameNode (Charles Lamb via Colin P. McCabe)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/ffce9a34
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/ffce9a34
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/ffce9a34

Branch: refs/heads/HDFS-7240
Commit: ffce9a3413277a69444fcb890460c885de56db69
Parents: e4c3b52
Author: Colin Patrick Mccabe cmcc...@cloudera.com
Authored: Tue May 5 11:27:36 2015 -0700
Committer: Colin Patrick Mccabe cmcc...@cloudera.com
Committed: Tue May 5 11:34:58 2015 -0700

--
 hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt |   3 +
 .../org/apache/hadoop/hdfs/DFSTestUtil.java |  40 ++
 .../server/namenode/NNThroughputBenchmark.java  | 136 +--
 3 files changed, 137 insertions(+), 42 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/ffce9a34/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
--
diff --git a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt 
b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
index c89e6fe..01de9b1 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
+++ b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
@@ -510,6 +510,9 @@ Release 2.8.0 - UNRELEASED
 HDFS-7758. Retire FsDatasetSpi#getVolumes() and use
 FsDatasetSpi#getVolumeRefs() instead (Lei (Eddy) Xu via Colin P. McCabe)
 
+HDFS-7847. Modify NNThroughputBenchmark to be able to operate on a remote
+NameNode (Charles Lamb via Colin P. McCabe)
+
   OPTIMIZATIONS
 
 HDFS-8026. Trace FSOutputSummer#writeChecksumChunks rather than

http://git-wip-us.apache.org/repos/asf/hadoop/blob/ffce9a34/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/DFSTestUtil.java
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/DFSTestUtil.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/DFSTestUtil.java
index a8df991..cfee997 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/DFSTestUtil.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/DFSTestUtil.java
@@ -48,6 +48,7 @@ import java.lang.reflect.Modifier;
 import java.net.HttpURLConnection;
 import java.net.InetSocketAddress;
 import java.net.Socket;
+import java.net.URI;
 import java.net.URL;
 import java.net.URLConnection;
 import java.nio.ByteBuffer;
@@ -64,6 +65,7 @@ import java.util.Random;
 import java.util.Set;
 import java.util.UUID;
 import java.util.concurrent.TimeoutException;
+import java.util.concurrent.atomic.AtomicBoolean;
 
 import org.apache.commons.io.FileUtils;
 import org.apache.commons.logging.Log;
@@ -129,12 +131,14 @@ import org.apache.hadoop.hdfs.server.namenode.NameNode;
 import 
org.apache.hadoop.hdfs.server.namenode.ha.ConfiguredFailoverProxyProvider;
 import org.apache.hadoop.hdfs.server.protocol.DatanodeRegistration;
 import org.apache.hadoop.hdfs.server.protocol.DatanodeStorage;
+import org.apache.hadoop.hdfs.server.protocol.NamenodeProtocol;
 import org.apache.hadoop.hdfs.tools.DFSAdmin;
 import org.apache.hadoop.io.IOUtils;
 import org.apache.hadoop.io.nativeio.NativeIO;
 import org.apache.hadoop.net.NetUtils;
 import org.apache.hadoop.net.unix.DomainSocket;
 import org.apache.hadoop.net.unix.TemporarySocketDirectory;
+import org.apache.hadoop.security.RefreshUserMappingsProtocol;
 import org.apache.hadoop.security.ShellBasedUnixGroupsMapping;
 import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hadoop.security.token.Token;
@@ -147,6 +151,7 @@ import org.apache.log4j.Level;
 import org.junit.Assume;
 import org.mockito.internal.util.reflection.Whitebox;
 
+import com.google.common.annotations.VisibleForTesting;
 import com.google.common.base.Charsets;
 import com.google.common.base.Joiner;
 import com.google.common.base.Preconditions;
@@ -1756,6 +1761,41 @@ public class DFSTestUtil {
   }
 
   /**
+   * Get the NamenodeProtocol RPC proxy for the NN associated with this
+   * DFSClient object
+   *
+   * @param nameNodeUri the URI of the NN to get a proxy for.
+   *
+   * @return the Namenode RPC proxy associated with this DFSClient object
+   */
+  @VisibleForTesting
+  public static NamenodeProtocol getNamenodeProtocolProxy(Configuration conf,
+  URI nameNodeUri, UserGroupInformation ugi)
+  throws IOException {
+return NameNodeProxies.createNonHAProxy(conf,
+NameNode.getAddress(nameNodeUri), NamenodeProtocol.class, ugi, false).
+getProxy();
+  }
+
+  /**
+   * Get the RefreshUserMappingsProtocol RPC proxy for the NN associated with
+   * this DFSClient object
+   *
+   * @param 

[17/17] hadoop git commit: HDFS-8314. Move HdfsServerConstants#IO_FILE_BUFFER_SIZE and SMALL_BUFFER_SIZE to the users. Contributed by Li Lu.

2015-05-05 Thread jitendra
HDFS-8314. Move HdfsServerConstants#IO_FILE_BUFFER_SIZE and SMALL_BUFFER_SIZE 
to the users. Contributed by Li Lu.


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/4da8490b
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/4da8490b
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/4da8490b

Branch: refs/heads/HDFS-7240
Commit: 4da8490b512a33a255ed27309860859388d7c168
Parents: 9809a16
Author: Haohui Mai whe...@apache.org
Authored: Tue May 5 15:41:22 2015 -0700
Committer: Haohui Mai whe...@apache.org
Committed: Tue May 5 15:41:22 2015 -0700

--
 hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt |  3 +++
 .../java/org/apache/hadoop/hdfs/DFSClient.java  |  6 +++--
 .../java/org/apache/hadoop/hdfs/DFSUtil.java| 11 +
 .../org/apache/hadoop/hdfs/DataStreamer.java|  7 +++---
 .../hadoop/hdfs/server/balancer/Dispatcher.java |  7 --
 .../hdfs/server/common/HdfsServerConstants.java |  8 --
 .../server/datanode/BlockMetadataHeader.java|  7 --
 .../hdfs/server/datanode/BlockReceiver.java |  5 ++--
 .../hdfs/server/datanode/BlockSender.java   | 14 ---
 .../hadoop/hdfs/server/datanode/DataNode.java   |  2 +-
 .../hdfs/server/datanode/DataXceiver.java   | 26 
 .../datanode/fsdataset/impl/BlockPoolSlice.java |  7 --
 .../datanode/fsdataset/impl/FsDatasetImpl.java  | 26 
 .../impl/RamDiskAsyncLazyPersistService.java|  8 --
 .../hdfs/server/namenode/TransferFsImage.java   | 10 +---
 .../org/apache/hadoop/hdfs/DFSTestUtil.java |  2 +-
 16 files changed, 95 insertions(+), 54 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/4da8490b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
--
diff --git a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt 
b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
index cc6758f..92a82c8 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
+++ b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
@@ -513,6 +513,9 @@ Release 2.8.0 - UNRELEASED
 HDFS-7847. Modify NNThroughputBenchmark to be able to operate on a remote
 NameNode (Charles Lamb via Colin P. McCabe)
 
+HDFS-8314. Move HdfsServerConstants#IO_FILE_BUFFER_SIZE and
+SMALL_BUFFER_SIZE to the users. (Li Lu via wheat9)
+
   OPTIMIZATIONS
 
 HDFS-8026. Trace FSOutputSummer#writeChecksumChunks rather than

http://git-wip-us.apache.org/repos/asf/hadoop/blob/4da8490b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSClient.java
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSClient.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSClient.java
index 99b8d2c..c145959 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSClient.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSClient.java
@@ -238,6 +238,7 @@ public class DFSClient implements java.io.Closeable, 
RemotePeerFactory,
   new DFSHedgedReadMetrics();
   private static ThreadPoolExecutor HEDGED_READ_THREAD_POOL;
   private final Sampler? traceSampler;
+  private final int smallBufferSize;
 
   public DfsClientConf getConf() {
 return dfsClientConf;
@@ -309,6 +310,7 @@ public class DFSClient implements java.io.Closeable, 
RemotePeerFactory,
 this.stats = stats;
 this.socketFactory = NetUtils.getSocketFactory(conf, ClientProtocol.class);
 this.dtpReplaceDatanodeOnFailure = ReplaceDatanodeOnFailure.get(conf);
+this.smallBufferSize = DFSUtil.getSmallBufferSize(conf);
 
 this.ugi = UserGroupInformation.getCurrentUser();
 
@@ -1902,7 +1904,7 @@ public class DFSClient implements java.io.Closeable, 
RemotePeerFactory,
   //connect to a datanode
   IOStreamPair pair = connectToDN(datanodes[j], timeout, lb);
   out = new DataOutputStream(new BufferedOutputStream(pair.out,
-  HdfsServerConstants.SMALL_BUFFER_SIZE));
+  smallBufferSize));
   in = new DataInputStream(pair.in);
 
   if (LOG.isDebugEnabled()) {
@@ -2067,7 +2069,7 @@ public class DFSClient implements java.io.Closeable, 
RemotePeerFactory,
 
 try {
   DataOutputStream out = new DataOutputStream(new 
BufferedOutputStream(pair.out,
-  HdfsServerConstants.SMALL_BUFFER_SIZE));
+  smallBufferSize));
   DataInputStream in = new DataInputStream(pair.in);
   
   new Sender(out).readBlock(lb.getBlock(), lb.getBlockToken(), clientName,

http://git-wip-us.apache.org/repos/asf/hadoop/blob/4da8490b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSUtil.java

[05/17] hadoop git commit: HADOOP-11120. hadoop fs -rmr gives wrong advice. Contributed by Juliet Houghland.

2015-05-05 Thread jitendra
HADOOP-11120. hadoop fs -rmr gives wrong advice. Contributed by Juliet 
Houghland.


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/05adc76a
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/05adc76a
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/05adc76a

Branch: refs/heads/HDFS-7240
Commit: 05adc76ace6bf28e4a3ff874044c2c41e3eba63f
Parents: 9356cf8
Author: Andrew Wang w...@apache.org
Authored: Tue May 5 08:37:37 2015 -0700
Committer: Andrew Wang w...@apache.org
Committed: Tue May 5 08:37:37 2015 -0700

--
 hadoop-common-project/hadoop-common/CHANGES.txt| 2 ++
 .../src/main/java/org/apache/hadoop/fs/shell/Delete.java   | 2 +-
 .../hadoop-common/src/test/resources/testConf.xml  | 2 +-
 3 files changed, 4 insertions(+), 2 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/05adc76a/hadoop-common-project/hadoop-common/CHANGES.txt
--
diff --git a/hadoop-common-project/hadoop-common/CHANGES.txt 
b/hadoop-common-project/hadoop-common/CHANGES.txt
index 0b3c971..8b0e67c 100644
--- a/hadoop-common-project/hadoop-common/CHANGES.txt
+++ b/hadoop-common-project/hadoop-common/CHANGES.txt
@@ -528,6 +528,8 @@ Release 2.8.0 - UNRELEASED
 HADOOP-11328. ZKFailoverController does not log Exception when doRun raises
 errors. (Tianyin Xu via ozawa)
 
+HADOOP-11120. hadoop fs -rmr gives wrong advice. (Juliet Hougland via wang)
+
   OPTIMIZATIONS
 
 HADOOP-11785. Reduce the number of listStatus operation in distcp

http://git-wip-us.apache.org/repos/asf/hadoop/blob/05adc76a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Delete.java
--
diff --git 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Delete.java
 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Delete.java
index 6798fbe..f882817 100644
--- 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Delete.java
+++ 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Delete.java
@@ -141,7 +141,7 @@ class Delete {
 
 @Override
 public String getReplacementCommand() {
-  return rm -r;
+  return -rm -r;
 }
   }
 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/05adc76a/hadoop-common-project/hadoop-common/src/test/resources/testConf.xml
--
diff --git 
a/hadoop-common-project/hadoop-common/src/test/resources/testConf.xml 
b/hadoop-common-project/hadoop-common/src/test/resources/testConf.xml
index 9b72960..57cce14 100644
--- a/hadoop-common-project/hadoop-common/src/test/resources/testConf.xml
+++ b/hadoop-common-project/hadoop-common/src/test/resources/testConf.xml
@@ -453,7 +453,7 @@
 /comparator
 comparator
   typeRegexpComparator/type
-  expected-output^\s*\(DEPRECATED\) Same as 'rm 
-r'\s*/expected-output
+  expected-output^\s*\(DEPRECATED\) Same as '-rm 
-r'\s*/expected-output
 /comparator
   /comparators
 /test



[15/17] hadoop git commit: HDFS-8219. setStoragePolicy with folder behavior is different after cluster restart. (surendra singh lilhore via Xiaoyu Yao)

2015-05-05 Thread jitendra
HDFS-8219. setStoragePolicy with folder behavior is different after cluster 
restart. (surendra singh lilhore via Xiaoyu Yao)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/0100b155
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/0100b155
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/0100b155

Branch: refs/heads/HDFS-7240
Commit: 0100b155019496d077f958904de7d385697d65d9
Parents: ffce9a3
Author: Xiaoyu Yao x...@apache.org
Authored: Tue May 5 13:41:14 2015 -0700
Committer: Xiaoyu Yao x...@apache.org
Committed: Tue May 5 13:41:14 2015 -0700

--
 hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt |  3 ++
 .../hadoop/hdfs/server/namenode/FSEditLog.java  |  2 +-
 .../hadoop/hdfs/TestBlockStoragePolicy.java | 45 
 3 files changed, 49 insertions(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/0100b155/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
--
diff --git a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt 
b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
index 01de9b1..cc6758f 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
+++ b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
@@ -626,6 +626,9 @@ Release 2.8.0 - UNRELEASED
 HDFS-8290. WebHDFS calls before namesystem initialization can cause
 NullPointerException. (cnauroth)
 
+HDFS-8219. setStoragePolicy with folder behavior is different after 
cluster restart.
+(surendra singh lilhore via Xiaoyu Yao)
+
 Release 2.7.1 - UNRELEASED
 
   INCOMPATIBLE CHANGES

http://git-wip-us.apache.org/repos/asf/hadoop/blob/0100b155/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSEditLog.java
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSEditLog.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSEditLog.java
index 28e150c..83e52bc 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSEditLog.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSEditLog.java
@@ -739,7 +739,7 @@ public class FSEditLog implements LogsPurgeable {
   .setClientMachine(
   newNode.getFileUnderConstructionFeature().getClientMachine())
   .setOverwrite(overwrite)
-  .setStoragePolicyId(newNode.getStoragePolicyID());
+  .setStoragePolicyId(newNode.getLocalStoragePolicyID());
 
 AclFeature f = newNode.getAclFeature();
 if (f != null) {

http://git-wip-us.apache.org/repos/asf/hadoop/blob/0100b155/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestBlockStoragePolicy.java
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestBlockStoragePolicy.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestBlockStoragePolicy.java
index 9621dc8..5e3b55f 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestBlockStoragePolicy.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestBlockStoragePolicy.java
@@ -26,6 +26,7 @@ import java.util.*;
 
 import com.google.common.collect.Lists;
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FSDataOutputStream;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.StorageType;
@@ -1172,4 +1173,48 @@ public class TestBlockStoragePolicy {
   cluster.shutdown();
 }
   }
+
+  @Test
+  public void testGetFileStoragePolicyAfterRestartNN() throws Exception {
+//HDFS8219
+final MiniDFSCluster cluster = new MiniDFSCluster.Builder(conf)
+.numDataNodes(REPLICATION)
+.storageTypes(
+new StorageType[] {StorageType.DISK, StorageType.ARCHIVE})
+.build();
+cluster.waitActive();
+final DistributedFileSystem fs = cluster.getFileSystem();
+try {
+  final String file = /testScheduleWithinSameNode/file;
+  Path dir = new Path(/testScheduleWithinSameNode);
+  fs.mkdirs(dir);
+  // 2. Set Dir policy
+  fs.setStoragePolicy(dir, COLD);
+  // 3. Create file
+  final FSDataOutputStream out = fs.create(new Path(file));
+  out.writeChars(testScheduleWithinSameNode);
+  out.close();
+  // 4. Set Dir policy
+  fs.setStoragePolicy(dir, HOT);
+  HdfsFileStatus status = fs.getClient().getFileInfo(file);
+  // 5. get file policy, it should be parent policy.
+  Assert
+  .assertTrue(
+  File storage policy 

[01/17] hadoop git commit: MAPREDUCE-6165. [JDK8] TestCombineFileInputFormat failed on JDK8. Contributed by Akira AJISAKA.

2015-05-05 Thread jitendra
Repository: hadoop
Updated Branches:
  refs/heads/HDFS-7240 d701acc9c - 4da8490b5


MAPREDUCE-6165. [JDK8] TestCombineFileInputFormat failed on JDK8. Contributed 
by Akira AJISAKA.


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/551615fa
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/551615fa
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/551615fa

Branch: refs/heads/HDFS-7240
Commit: 551615fa13f65ae996bae9c1bacff189539b6557
Parents: d701acc
Author: Tsuyoshi Ozawa oz...@apache.org
Authored: Tue May 5 10:23:13 2015 +0900
Committer: Tsuyoshi Ozawa oz...@apache.org
Committed: Tue May 5 10:23:13 2015 +0900

--
 hadoop-mapreduce-project/CHANGES.txt|3 +
 .../lib/input/CombineFileInputFormat.java   |   26 +-
 .../lib/input/TestCombineFileInputFormat.java   | 1138 --
 3 files changed, 805 insertions(+), 362 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/551615fa/hadoop-mapreduce-project/CHANGES.txt
--
diff --git a/hadoop-mapreduce-project/CHANGES.txt 
b/hadoop-mapreduce-project/CHANGES.txt
index 481757a..002fbe6 100644
--- a/hadoop-mapreduce-project/CHANGES.txt
+++ b/hadoop-mapreduce-project/CHANGES.txt
@@ -368,6 +368,9 @@ Release 2.8.0 - UNRELEASED
 MAPREDUCE-5649. Reduce cannot use more than 2G memory for the final merge
 (Gera Shegalov via jlowe)
 
+MAPREDUCE-6165. [JDK8] TestCombineFileInputFormat failed on JDK8.
+(Akira AJISAKA via ozawa)
+
 Release 2.7.1 - UNRELEASED
 
   INCOMPATIBLE CHANGES

http://git-wip-us.apache.org/repos/asf/hadoop/blob/551615fa/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/input/CombineFileInputFormat.java
--
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/input/CombineFileInputFormat.java
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/input/CombineFileInputFormat.java
index 040c54b..b2b7656 100644
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/input/CombineFileInputFormat.java
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/input/CombineFileInputFormat.java
@@ -29,7 +29,6 @@ import java.util.HashMap;
 import java.util.Set;
 import java.util.Iterator;
 import java.util.Map;
-import java.util.Map.Entry;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
@@ -289,6 +288,26 @@ public abstract class CombineFileInputFormatK, V
  maxSize, minSizeNode, minSizeRack, splits);
   }
 
+  /**
+   * Process all the nodes and create splits that are local to a node.
+   * Generate one split per node iteration, and walk over nodes multiple times
+   * to distribute the splits across nodes.
+   * p
+   * Note: The order of processing the nodes is undetermined because the
+   * implementation of nodeToBlocks is {@link java.util.HashMap} and its order
+   * of the entries is undetermined.
+   * @param nodeToBlocks Mapping from a node to the list of blocks that
+   * it contains.
+   * @param blockToNodes Mapping from a block to the nodes on which
+   * it has replicas.
+   * @param rackToBlocks Mapping from a rack name to the list of blocks it has.
+   * @param totLength Total length of the input files.
+   * @param maxSize Max size of each split.
+   *If set to 0, disable smoothing load.
+   * @param minSizeNode Minimum split size per node.
+   * @param minSizeRack Minimum split size per rack.
+   * @param splits New splits created by this method are added to the list.
+   */
   @VisibleForTesting
   void createSplits(MapString, SetOneBlockInfo nodeToBlocks,
  MapOneBlockInfo, String[] blockToNodes,
@@ -309,11 +328,6 @@ public abstract class CombineFileInputFormatK, V
 SetString completedNodes = new HashSetString();
 
 while(true) {
-  // it is allowed for maxSize to be 0. Disable smoothing load for such 
cases
-
-  // process all nodes and create splits that are local to a node. Generate
-  // one split per node iteration, and walk over nodes multiple times to
-  // distribute the splits across nodes. 
   for (IteratorMap.EntryString, SetOneBlockInfo iter = nodeToBlocks
   .entrySet().iterator(); iter.hasNext();) {
 Map.EntryString, SetOneBlockInfo one = iter.next();


[04/17] hadoop git commit: YARN-3552. RM Web UI shows -1 running containers for completed apps. Contributed by Rohith

2015-05-05 Thread jitendra
YARN-3552. RM Web UI shows -1 running containers for completed apps. 
Contributed by Rohith


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/9356cf86
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/9356cf86
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/9356cf86

Branch: refs/heads/HDFS-7240
Commit: 9356cf8676fd18f78655e8a6f2e6c946997dbd40
Parents: 318081c
Author: Jason Lowe jl...@apache.org
Authored: Tue May 5 13:42:32 2015 +
Committer: Jason Lowe jl...@apache.org
Committed: Tue May 5 13:42:32 2015 +

--
 hadoop-yarn-project/CHANGES.txt   | 3 +++
 .../server/resourcemanager/webapp/FairSchedulerAppsBlock.java | 3 ++-
 .../hadoop/yarn/server/resourcemanager/webapp/RMAppsBlock.java| 3 ++-
 3 files changed, 7 insertions(+), 2 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/9356cf86/hadoop-yarn-project/CHANGES.txt
--
diff --git a/hadoop-yarn-project/CHANGES.txt b/hadoop-yarn-project/CHANGES.txt
index 287205d..97d3208 100644
--- a/hadoop-yarn-project/CHANGES.txt
+++ b/hadoop-yarn-project/CHANGES.txt
@@ -305,6 +305,9 @@ Release 2.8.0 - UNRELEASED
 YARN-2725. Added test cases of retrying creating znode in ZKRMStateStore.
 (Tsuyoshi Ozawa via jianhe)
 
+YARN-3552. RM Web UI shows -1 running containers for completed apps
+(Rohith via jlowe)
+
 Release 2.7.1 - UNRELEASED
 
   INCOMPATIBLE CHANGES

http://git-wip-us.apache.org/repos/asf/hadoop/blob/9356cf86/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/FairSchedulerAppsBlock.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/FairSchedulerAppsBlock.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/FairSchedulerAppsBlock.java
index 9650dfe..8f8ece0 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/FairSchedulerAppsBlock.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/FairSchedulerAppsBlock.java
@@ -133,7 +133,8 @@ public class FairSchedulerAppsBlock extends HtmlBlock {
   .append(appInfo.getFinishTime()).append(\,\)
   .append(appInfo.getState()).append(\,\)
   .append(appInfo.getFinalStatus()).append(\,\)
-  .append(appInfo.getRunningContainers()).append(\,\)
+  .append(appInfo.getRunningContainers() == -1 ? N/A : String
+ .valueOf(appInfo.getRunningContainers())).append(\,\)
   // Progress bar
   .append(br title=').append(percent)
   .append(' div class=').append(C_PROGRESSBAR).append(' title=')

http://git-wip-us.apache.org/repos/asf/hadoop/blob/9356cf86/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/RMAppsBlock.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/RMAppsBlock.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/RMAppsBlock.java
index 715d9d9..3054150 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/RMAppsBlock.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/RMAppsBlock.java
@@ -110,7 +110,8 @@ public class RMAppsBlock extends AppsBlock {
 .append(\,\)
 .append(app.getFinalAppStatus())
 .append(\,\)
-.append(String.valueOf(app.getRunningContainers()))
+.append(app.getRunningContainers() == -1 ? N/A : String
+.valueOf(app.getRunningContainers()))
 .append(\,\)
 // Progress bar
 .append(br title=').append(percent).append(' div class=')



[06/17] hadoop git commit: HADOOP-11911. test-patch should allow configuration of default branch (Sean Busbey via aw)

2015-05-05 Thread jitendra
HADOOP-11911. test-patch should allow configuration of default branch (Sean 
Busbey via aw)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/9b01f81e
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/9b01f81e
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/9b01f81e

Branch: refs/heads/HDFS-7240
Commit: 9b01f81eb874cd63e6b9ae2d09d94fc8bf4fcd7d
Parents: 3810242
Author: Allen Wittenauer a...@apache.org
Authored: Tue May 5 09:59:20 2015 -0700
Committer: Allen Wittenauer a...@apache.org
Committed: Tue May 5 09:59:31 2015 -0700

--
 dev-support/test-patch.sh   | 21 
 hadoop-common-project/hadoop-common/CHANGES.txt |  3 +++
 2 files changed, 16 insertions(+), 8 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/9b01f81e/dev-support/test-patch.sh
--
diff --git a/dev-support/test-patch.sh b/dev-support/test-patch.sh
index b6e1b03..5107718 100755
--- a/dev-support/test-patch.sh
+++ b/dev-support/test-patch.sh
@@ -43,6 +43,7 @@ function setup_defaults
   ECLIPSE_HOME=${ECLIPSE_HOME:-}
   BUILD_NATIVE=${BUILD_NATIVE:-true}
   PATCH_BRANCH=
+  PATCH_BRANCH_DEFAULT=trunk
   CHANGED_MODULES=
   USER_MODULE_LIST=
   OFFLINE=false
@@ -551,7 +552,8 @@ function hadoop_usage
   echo
   echo Options:
   echo --basedir=dirThe directory to apply the patch to (default 
current directory)
-  echo --branch=dir Forcibly set the branch
+  echo --branch=ref Forcibly set the branch
+  echo --branch-default=ref If the branch isn't forced and we don't detect 
one in the patch name, use this branch (default 'trunk')
   echo --build-native=bool  If true, then build native components (default 
'true')
   echo --debugIf set, then output some extra stuff to stderr
   echo --dirty-workspace  Allow the local git workspace to have 
uncommitted changes
@@ -604,6 +606,9 @@ function parse_args
   --branch=*)
 PATCH_BRANCH=${i#*=}
   ;;
+  --branch-default=*)
+PATCH_BRANCH_DEFAULT=${i#*=}
+  ;;
   --build-native=*)
 BUILD_NATIVE=${i#*=}
   ;;
@@ -832,9 +837,9 @@ function git_checkout
   cleanup_and_exit 1
 fi
 
-${GIT} checkout --force trunk
+${GIT} checkout --force ${PATCH_BRANCH_DEFAULT}
 if [[ $? != 0 ]]; then
-  hadoop_error ERROR: git checkout --force trunk is failing
+  hadoop_error ERROR: git checkout --force ${PATCH_BRANCH_DEFAULT} is 
failing
   cleanup_and_exit 1
 fi
 
@@ -859,8 +864,8 @@ function git_checkout
   cleanup_and_exit 1
 fi
 
-# we need to explicitly fetch in case the
-# git ref hasn't been brought in tree yet
+# if we've selected a feature branch that has new changes
+# since our last build, we'll need to rebase to see those changes.
 if [[ ${OFFLINE} == false ]]; then
   ${GIT} pull --rebase
   if [[ $? != 0 ]]; then
@@ -1011,7 +1016,7 @@ function verify_valid_branch
 ## @stabilityevolving
 ## @replaceable  no
 ## @return   0 on success, with PATCH_BRANCH updated appropriately
-## @return   1 on failure, with PATCH_BRANCH updated to trunk
+## @return   1 on failure, with PATCH_BRANCH updated to 
PATCH_BRANCH_DEFAULT
 function determine_branch
 {
   local allbranches
@@ -1075,7 +1080,7 @@ function determine_branch
 fi
   done
 
-  PATCH_BRANCH=trunk
+  PATCH_BRANCH=${PATCH_BRANCH_DEFAULT}
 
   popd /dev/null
 }
@@ -1365,7 +1370,7 @@ function check_reexec
 
   exec ${PATCH_DIR}/dev-support-test/test-patch.sh \
 --reexec \
---branch ${PATCH_BRANCH} \
+--branch ${PATCH_BRANCH} \
 --patch-dir=${PATCH_DIR} \
   ${USER_PARAMS[@]}
 }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/9b01f81e/hadoop-common-project/hadoop-common/CHANGES.txt
--
diff --git a/hadoop-common-project/hadoop-common/CHANGES.txt 
b/hadoop-common-project/hadoop-common/CHANGES.txt
index 8b0e67c..49106ae 100644
--- a/hadoop-common-project/hadoop-common/CHANGES.txt
+++ b/hadoop-common-project/hadoop-common/CHANGES.txt
@@ -530,6 +530,9 @@ Release 2.8.0 - UNRELEASED
 
 HADOOP-11120. hadoop fs -rmr gives wrong advice. (Juliet Hougland via wang)
 
+HADOOP-11911. test-patch should allow configuration of default branch
+(Sean Busbey via aw)
+
   OPTIMIZATIONS
 
 HADOOP-11785. Reduce the number of listStatus operation in distcp



[Hadoop Wiki] Update of TestPatchTips by SomeOtherAccount

2015-05-05 Thread Apache Wiki
Dear Wiki user,

You have subscribed to a wiki page or wiki category on Hadoop Wiki for change 
notification.

The TestPatchTips page has been changed by SomeOtherAccount:
https://wiki.apache.org/hadoop/TestPatchTips

New page:
=== Introduction ===
In the Hadoop source tree is {{{dev-support/test-patch.sh}}}.  This script used 
by the Jenkins servers to run the automated QA tests.  It is possible and 
highly recommended to run this script locally prior to uploading a patch to 
JIRA.

In order to get the full power of the tool set, you'll want to make sure that 
both {{{findbugs}}} and {{{shellcheck}}} are installed.

== Using test-patch.sh ==
Running {{{test-patch.sh}}} will show a usage message that describes all of its 
options. While there are many listed, there are a few key ones:

 * {{{--basedir}}} = location of the source repo
 * {{{--dirty-workspace}}} = the repo isn't pristine, but run anyway
 * {{{--reset-repo}}} = the repo is allowed to be modified NOTE: This will 
'''DESTROY''' any changes in the given repo!
 * {{{--run-tests}}} = run appropriate unit tests
 * filename or JIRA # or HTTP URL = the location of the patch that needs to be 
tested

Apply and run just the basic checks in a checkout that has other stuff in it:

{{{
$ dev-support/test-patch.sh --dirty-workspace /tmp/patchfile
}}}
Apply and run the full unit test:

{{{
$ dev-support/test-patch.sh --dirty-workspace --run-tests /tmp/patchfile
}}}
Download a patch from a JIRA and run just the basic checks in a checkout that 
can be destroyed:

{{}}}

{{{$ dev-support/test-patch.sh --resetrepo HADOOP-11820}}}

}}}

'''Recommended Usage'''

In general, the easiest way to use {{{test-patch.sh}}} is to use two repos.  
One repo is used to build patches.  The other repo is used to to test them.

{{{
$ git diff --no-prefix trunk  /tmp/1.patch

$ dev-support/test-patch.sh --resetrepo --runtests --basedir=/test/repo 
/tmp/1.patch
}}}
This will run the freshly built patch against the tests in a fresh repo.


hadoop git commit: HDFS-8314. Move HdfsServerConstants#IO_FILE_BUFFER_SIZE and SMALL_BUFFER_SIZE to the users. Contributed by Li Lu.

2015-05-05 Thread wheat9
Repository: hadoop
Updated Branches:
  refs/heads/trunk 9809a16d3 - 4da8490b5


HDFS-8314. Move HdfsServerConstants#IO_FILE_BUFFER_SIZE and SMALL_BUFFER_SIZE 
to the users. Contributed by Li Lu.


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/4da8490b
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/4da8490b
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/4da8490b

Branch: refs/heads/trunk
Commit: 4da8490b512a33a255ed27309860859388d7c168
Parents: 9809a16
Author: Haohui Mai whe...@apache.org
Authored: Tue May 5 15:41:22 2015 -0700
Committer: Haohui Mai whe...@apache.org
Committed: Tue May 5 15:41:22 2015 -0700

--
 hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt |  3 +++
 .../java/org/apache/hadoop/hdfs/DFSClient.java  |  6 +++--
 .../java/org/apache/hadoop/hdfs/DFSUtil.java| 11 +
 .../org/apache/hadoop/hdfs/DataStreamer.java|  7 +++---
 .../hadoop/hdfs/server/balancer/Dispatcher.java |  7 --
 .../hdfs/server/common/HdfsServerConstants.java |  8 --
 .../server/datanode/BlockMetadataHeader.java|  7 --
 .../hdfs/server/datanode/BlockReceiver.java |  5 ++--
 .../hdfs/server/datanode/BlockSender.java   | 14 ---
 .../hadoop/hdfs/server/datanode/DataNode.java   |  2 +-
 .../hdfs/server/datanode/DataXceiver.java   | 26 
 .../datanode/fsdataset/impl/BlockPoolSlice.java |  7 --
 .../datanode/fsdataset/impl/FsDatasetImpl.java  | 26 
 .../impl/RamDiskAsyncLazyPersistService.java|  8 --
 .../hdfs/server/namenode/TransferFsImage.java   | 10 +---
 .../org/apache/hadoop/hdfs/DFSTestUtil.java |  2 +-
 16 files changed, 95 insertions(+), 54 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/4da8490b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
--
diff --git a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt 
b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
index cc6758f..92a82c8 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
+++ b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
@@ -513,6 +513,9 @@ Release 2.8.0 - UNRELEASED
 HDFS-7847. Modify NNThroughputBenchmark to be able to operate on a remote
 NameNode (Charles Lamb via Colin P. McCabe)
 
+HDFS-8314. Move HdfsServerConstants#IO_FILE_BUFFER_SIZE and
+SMALL_BUFFER_SIZE to the users. (Li Lu via wheat9)
+
   OPTIMIZATIONS
 
 HDFS-8026. Trace FSOutputSummer#writeChecksumChunks rather than

http://git-wip-us.apache.org/repos/asf/hadoop/blob/4da8490b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSClient.java
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSClient.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSClient.java
index 99b8d2c..c145959 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSClient.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSClient.java
@@ -238,6 +238,7 @@ public class DFSClient implements java.io.Closeable, 
RemotePeerFactory,
   new DFSHedgedReadMetrics();
   private static ThreadPoolExecutor HEDGED_READ_THREAD_POOL;
   private final Sampler? traceSampler;
+  private final int smallBufferSize;
 
   public DfsClientConf getConf() {
 return dfsClientConf;
@@ -309,6 +310,7 @@ public class DFSClient implements java.io.Closeable, 
RemotePeerFactory,
 this.stats = stats;
 this.socketFactory = NetUtils.getSocketFactory(conf, ClientProtocol.class);
 this.dtpReplaceDatanodeOnFailure = ReplaceDatanodeOnFailure.get(conf);
+this.smallBufferSize = DFSUtil.getSmallBufferSize(conf);
 
 this.ugi = UserGroupInformation.getCurrentUser();
 
@@ -1902,7 +1904,7 @@ public class DFSClient implements java.io.Closeable, 
RemotePeerFactory,
   //connect to a datanode
   IOStreamPair pair = connectToDN(datanodes[j], timeout, lb);
   out = new DataOutputStream(new BufferedOutputStream(pair.out,
-  HdfsServerConstants.SMALL_BUFFER_SIZE));
+  smallBufferSize));
   in = new DataInputStream(pair.in);
 
   if (LOG.isDebugEnabled()) {
@@ -2067,7 +2069,7 @@ public class DFSClient implements java.io.Closeable, 
RemotePeerFactory,
 
 try {
   DataOutputStream out = new DataOutputStream(new 
BufferedOutputStream(pair.out,
-  HdfsServerConstants.SMALL_BUFFER_SIZE));
+  smallBufferSize));
   DataInputStream in = new DataInputStream(pair.in);
   
   new Sender(out).readBlock(lb.getBlock(), lb.getBlockToken(), clientName,


hadoop git commit: HDFS-8314. Move HdfsServerConstants#IO_FILE_BUFFER_SIZE and SMALL_BUFFER_SIZE to the users. Contributed by Li Lu.

2015-05-05 Thread wheat9
Repository: hadoop
Updated Branches:
  refs/heads/branch-2 bd207b6cc - a4f868f0b


HDFS-8314. Move HdfsServerConstants#IO_FILE_BUFFER_SIZE and SMALL_BUFFER_SIZE 
to the users. Contributed by Li Lu.


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/a4f868f0
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/a4f868f0
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/a4f868f0

Branch: refs/heads/branch-2
Commit: a4f868f0b8dfba3d6e290030fdca90612ca14a66
Parents: bd207b6
Author: Haohui Mai whe...@apache.org
Authored: Tue May 5 15:41:22 2015 -0700
Committer: Haohui Mai whe...@apache.org
Committed: Tue May 5 15:56:08 2015 -0700

--
 hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt |  3 ++
 .../java/org/apache/hadoop/hdfs/DFSClient.java  |  6 ++--
 .../java/org/apache/hadoop/hdfs/DFSUtil.java| 11 
 .../org/apache/hadoop/hdfs/DataStreamer.java|  7 ++---
 .../hadoop/hdfs/server/balancer/Dispatcher.java |  7 +++--
 .../hdfs/server/common/HdfsServerConstants.java |  8 --
 .../server/datanode/BlockMetadataHeader.java|  7 +++--
 .../hdfs/server/datanode/BlockReceiver.java |  5 ++--
 .../hdfs/server/datanode/BlockSender.java   | 14 +++---
 .../hadoop/hdfs/server/datanode/DataNode.java   |  2 +-
 .../hdfs/server/datanode/DataXceiver.java   | 26 +++---
 .../datanode/fsdataset/impl/BlockPoolSlice.java |  7 +++--
 .../datanode/fsdataset/impl/FsDatasetImpl.java  | 29 
 .../impl/RamDiskAsyncLazyPersistService.java|  9 --
 .../hdfs/server/namenode/TransferFsImage.java   | 10 ---
 .../org/apache/hadoop/hdfs/DFSTestUtil.java |  2 +-
 .../org/apache/hadoop/hdfs/TestFileStatus.java  |  2 +-
 17 files changed, 99 insertions(+), 56 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/a4f868f0/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
--
diff --git a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt 
b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
index 0aaba2f..f253383 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
+++ b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
@@ -183,6 +183,9 @@ Release 2.8.0 - UNRELEASED
 HDFS-7758. Retire FsDatasetSpi#getVolumes() and use
 FsDatasetSpi#getVolumeRefs() instead (Lei (Eddy) Xu via Colin P. McCabe)
 
+HDFS-8314. Move HdfsServerConstants#IO_FILE_BUFFER_SIZE and
+SMALL_BUFFER_SIZE to the users. (Li Lu via wheat9)
+
   OPTIMIZATIONS
 
 HDFS-8026. Trace FSOutputSummer#writeChecksumChunks rather than

http://git-wip-us.apache.org/repos/asf/hadoop/blob/a4f868f0/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSClient.java
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSClient.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSClient.java
index acfb41b..265d094 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSClient.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSClient.java
@@ -240,6 +240,7 @@ public class DFSClient implements java.io.Closeable, 
RemotePeerFactory,
   new DFSHedgedReadMetrics();
   private static ThreadPoolExecutor HEDGED_READ_THREAD_POOL;
   private final Sampler? traceSampler;
+  private final int smallBufferSize;
 
   public DfsClientConf getConf() {
 return dfsClientConf;
@@ -311,6 +312,7 @@ public class DFSClient implements java.io.Closeable, 
RemotePeerFactory,
 this.stats = stats;
 this.socketFactory = NetUtils.getSocketFactory(conf, ClientProtocol.class);
 this.dtpReplaceDatanodeOnFailure = ReplaceDatanodeOnFailure.get(conf);
+this.smallBufferSize = DFSUtil.getSmallBufferSize(conf);
 
 this.ugi = UserGroupInformation.getCurrentUser();
 
@@ -1901,7 +1903,7 @@ public class DFSClient implements java.io.Closeable, 
RemotePeerFactory,
   //connect to a datanode
   IOStreamPair pair = connectToDN(datanodes[j], timeout, lb);
   out = new DataOutputStream(new BufferedOutputStream(pair.out,
-  HdfsServerConstants.SMALL_BUFFER_SIZE));
+  smallBufferSize));
   in = new DataInputStream(pair.in);
 
   if (LOG.isDebugEnabled()) {
@@ -2066,7 +2068,7 @@ public class DFSClient implements java.io.Closeable, 
RemotePeerFactory,
 
 try {
   DataOutputStream out = new DataOutputStream(new 
BufferedOutputStream(pair.out,
-  HdfsServerConstants.SMALL_BUFFER_SIZE));
+  smallBufferSize));
   DataInputStream in = new DataInputStream(pair.in);
   
   new Sender(out).readBlock(lb.getBlock(), lb.getBlockToken(), clientName,


[Hadoop Wiki] Update of 2015MayBugBash by SomeOtherAccount

2015-05-05 Thread Apache Wiki
Dear Wiki user,

You have subscribed to a wiki page or wiki category on Hadoop Wiki for change 
notification.

The 2015MayBugBash page has been changed by SomeOtherAccount:
https://wiki.apache.org/hadoop/2015MayBugBash?action=diffrev1=5rev2=6

1. Name the patch file (something).patch
1. Verify the patch applies cleanly
1. Fix any pre-existing comments
-   1. Test the patch locally using {{{test-patch.sh}}}
+   1. Test the patch locally using {{{test-patch.sh}}} [See TestPatchTips for 
more!]
   1. '''Before uploading''', did you run {{{test-patch.sh}}}?
   1. Upload the reworked patch back into JIRA.
   1. Set the label to '''BB2015-05-RFC'''.


[13/17] hadoop git commit: YARN-3343. Increased TestCapacitySchedulerNodeLabelUpdate#testNodeUpdate timeout. Contributed by Rohith Sharmaks

2015-05-05 Thread jitendra
YARN-3343. Increased TestCapacitySchedulerNodeLabelUpdate#testNodeUpdate 
timeout. Contributed by Rohith Sharmaks


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/e4c3b52c
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/e4c3b52c
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/e4c3b52c

Branch: refs/heads/HDFS-7240
Commit: e4c3b52c896291012f869ebc0a21e85e643fadd1
Parents: d33419a
Author: Jian He jia...@apache.org
Authored: Tue May 5 11:33:47 2015 -0700
Committer: Jian He jia...@apache.org
Committed: Tue May 5 11:33:47 2015 -0700

--
 hadoop-yarn-project/CHANGES.txt   | 3 +++
 .../scheduler/capacity/TestCapacitySchedulerNodeLabelUpdate.java  | 2 +-
 2 files changed, 4 insertions(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/e4c3b52c/hadoop-yarn-project/CHANGES.txt
--
diff --git a/hadoop-yarn-project/CHANGES.txt b/hadoop-yarn-project/CHANGES.txt
index 6dac3c8..00dd205 100644
--- a/hadoop-yarn-project/CHANGES.txt
+++ b/hadoop-yarn-project/CHANGES.txt
@@ -313,6 +313,9 @@ Release 2.8.0 - UNRELEASED
 YARN-2123. Progress bars in Web UI always at 100% due to non-US locale.
 (Akira AJISAKA via xgong)
 
+YARN-3343. Increased TestCapacitySchedulerNodeLabelUpdate#testNodeUpdate
+timeout. (Rohith Sharmaks via jianhe)
+
 Release 2.7.1 - UNRELEASED
 
   INCOMPATIBLE CHANGES

http://git-wip-us.apache.org/repos/asf/hadoop/blob/e4c3b52c/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/TestCapacitySchedulerNodeLabelUpdate.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/TestCapacitySchedulerNodeLabelUpdate.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/TestCapacitySchedulerNodeLabelUpdate.java
index c5439d8..e60e496 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/TestCapacitySchedulerNodeLabelUpdate.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/TestCapacitySchedulerNodeLabelUpdate.java
@@ -95,7 +95,7 @@ public class TestCapacitySchedulerNodeLabelUpdate {
 .getMemory());
   }
 
-  @Test (timeout = 3)
+  @Test (timeout = 6)
   public void testNodeUpdate() throws Exception {
 // set node - label
 mgr.addToCluserNodeLabelsWithDefaultExclusivity(ImmutableSet.of(x, y, 
z));



[08/17] hadoop git commit: YARN-2123. Progress bars in Web UI always at 100% due to non-US locale. Contributed by Akira AJISAKA

2015-05-05 Thread jitendra
YARN-2123. Progress bars in Web UI always at 100% due to non-US locale.
Contributed by Akira AJISAKA


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/b7dd3a4f
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/b7dd3a4f
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/b7dd3a4f

Branch: refs/heads/HDFS-7240
Commit: b7dd3a4f04f712b7594c4e6e7ce50fd314f7c342
Parents: 9b01f81
Author: Xuan xg...@apache.org
Authored: Tue May 5 10:40:16 2015 -0700
Committer: Xuan xg...@apache.org
Committed: Tue May 5 10:40:16 2015 -0700

--
 .../org/apache/hadoop/mapreduce/v2/app/webapp/TaskPage.java | 4 ++--
 .../apache/hadoop/mapreduce/v2/app/webapp/TasksBlock.java   | 4 ++--
 .../apache/hadoop/mapreduce/v2/app/webapp/dao/JobInfo.java  | 9 +
 hadoop-yarn-project/CHANGES.txt | 3 +++
 .../main/java/org/apache/hadoop/yarn/util/StringHelper.java | 5 -
 .../org/apache/hadoop/yarn/server/webapp/AppsBlock.java | 3 ++-
 .../server/resourcemanager/resource/ResourceWeights.java| 2 +-
 .../resourcemanager/webapp/CapacitySchedulerPage.java   | 9 +
 .../server/resourcemanager/webapp/DefaultSchedulerPage.java | 7 ---
 .../resourcemanager/webapp/FairSchedulerAppsBlock.java  | 3 ++-
 .../server/resourcemanager/webapp/FairSchedulerPage.java| 7 ---
 .../yarn/server/resourcemanager/webapp/RMAppsBlock.java | 3 ++-
 12 files changed, 32 insertions(+), 27 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/b7dd3a4f/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/TaskPage.java
--
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/TaskPage.java
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/TaskPage.java
index 8aa8bb6..e293fd2 100644
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/TaskPage.java
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/TaskPage.java
@@ -18,7 +18,6 @@
 
 package org.apache.hadoop.mapreduce.v2.app.webapp;
 
-import static org.apache.hadoop.yarn.util.StringHelper.percent;
 import static org.apache.hadoop.yarn.webapp.view.JQueryUI.ACCORDION;
 import static org.apache.hadoop.yarn.webapp.view.JQueryUI.DATATABLES;
 import static org.apache.hadoop.yarn.webapp.view.JQueryUI.DATATABLES_ID;
@@ -31,6 +30,7 @@ import org.apache.commons.lang.StringEscapeUtils;
 import org.apache.hadoop.mapreduce.v2.app.job.TaskAttempt;
 import org.apache.hadoop.mapreduce.v2.app.webapp.dao.TaskAttemptInfo;
 import org.apache.hadoop.mapreduce.v2.util.MRWebAppUtil;
+import org.apache.hadoop.util.StringUtils;
 import org.apache.hadoop.yarn.webapp.SubView;
 import org.apache.hadoop.yarn.webapp.hamlet.Hamlet;
 import org.apache.hadoop.yarn.webapp.hamlet.Hamlet.TABLE;
@@ -77,7 +77,7 @@ public class TaskPage extends AppView {
 
   for (TaskAttempt attempt : getTaskAttempts()) {
 TaskAttemptInfo ta = new TaskAttemptInfo(attempt, true);
-String progress = percent(ta.getProgress() / 100);
+String progress = StringUtils.formatPercent(ta.getProgress() / 100, 2);
 
 String nodeHttpAddr = ta.getNode();
 String diag = ta.getNote() == null ?  : ta.getNote();

http://git-wip-us.apache.org/repos/asf/hadoop/blob/b7dd3a4f/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/TasksBlock.java
--
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/TasksBlock.java
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/TasksBlock.java
index 64aae59..7c1aa49 100644
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/TasksBlock.java
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/TasksBlock.java
@@ -21,7 +21,6 @@ package org.apache.hadoop.mapreduce.v2.app.webapp;
 import static org.apache.hadoop.mapreduce.v2.app.webapp.AMParams.TASK_STATE;
 import static org.apache.hadoop.mapreduce.v2.app.webapp.AMParams.TASK_TYPE;
 import 

[02/17] hadoop git commit: HADOOP-11916. TestStringUtils#testLowerAndUpperStrings failed on MAC due to a JVM bug. Contributed by Ming Ma.

2015-05-05 Thread jitendra
HADOOP-11916. TestStringUtils#testLowerAndUpperStrings failed on MAC due to a 
JVM bug. Contributed by Ming Ma.


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/338e88a1
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/338e88a1
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/338e88a1

Branch: refs/heads/HDFS-7240
Commit: 338e88a19eeb01364c7f5bcdc5f4b5c35d53852d
Parents: 551615f
Author: Tsuyoshi Ozawa oz...@apache.org
Authored: Tue May 5 12:39:24 2015 +0900
Committer: Tsuyoshi Ozawa oz...@apache.org
Committed: Tue May 5 12:39:24 2015 +0900

--
 hadoop-common-project/hadoop-common/CHANGES.txt  | 3 +++
 .../src/test/java/org/apache/hadoop/util/TestStringUtils.java| 4 
 2 files changed, 7 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/338e88a1/hadoop-common-project/hadoop-common/CHANGES.txt
--
diff --git a/hadoop-common-project/hadoop-common/CHANGES.txt 
b/hadoop-common-project/hadoop-common/CHANGES.txt
index 2bf790a..0b3c971 100644
--- a/hadoop-common-project/hadoop-common/CHANGES.txt
+++ b/hadoop-common-project/hadoop-common/CHANGES.txt
@@ -596,6 +596,9 @@ Release 2.8.0 - UNRELEASED
 
 HADOOP-11900. Add failIfNoTests=false to hadoop-build-tools pom. (gera)
 
+HADOOP-11916. TestStringUtils#testLowerAndUpperStrings failed on MAC
+due to a JVM bug. (Ming Ma via ozawa)
+
 Release 2.7.1 - UNRELEASED
 
   INCOMPATIBLE CHANGES

http://git-wip-us.apache.org/repos/asf/hadoop/blob/338e88a1/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestStringUtils.java
--
diff --git 
a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestStringUtils.java
 
b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestStringUtils.java
index 515c3e0..5b0715f 100644
--- 
a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestStringUtils.java
+++ 
b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestStringUtils.java
@@ -37,6 +37,7 @@ import java.util.regex.Pattern;
 
 import org.apache.hadoop.test.UnitTestcaseTimeLimit;
 import org.apache.hadoop.util.StringUtils.TraditionalBinaryPrefix;
+import org.junit.Assume;
 import org.junit.Test;
 
 public class TestStringUtils extends UnitTestcaseTimeLimit {
@@ -416,6 +417,9 @@ public class TestStringUtils extends UnitTestcaseTimeLimit {
 
   @Test
   public void testLowerAndUpperStrings() {
+// Due to java bug 
http://bugs.java.com/bugdatabase/view_bug.do?bug_id=8047340,
+// The test will fail with Turkish locality on Mac OS.
+Assume.assumeTrue(Shell.LINUX);
 Locale defaultLocale = Locale.getDefault();
 try {
   Locale.setDefault(new Locale(tr, TR));



[03/17] hadoop git commit: HDFS-7916. 'reportBadBlocks' from datanodes to standby Node BPServiceActor goes for infinite loop (Contributed by Vinayakumar B) Reverted earlier commit

2015-05-05 Thread jitendra
HDFS-7916. 'reportBadBlocks' from datanodes to standby Node BPServiceActor goes 
for infinite loop (Contributed by Vinayakumar B)
Reverted earlier commit


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/318081cc
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/318081cc
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/318081cc

Branch: refs/heads/HDFS-7240
Commit: 318081ccd7af1ec02ec18f35ea95c579326be728
Parents: 338e88a
Author: Vinayakumar B vinayakum...@apache.org
Authored: Tue May 5 11:05:37 2015 +0530
Committer: Vinayakumar B vinayakum...@apache.org
Committed: Tue May 5 11:05:37 2015 +0530

--
 hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt  | 3 ---
 .../apache/hadoop/hdfs/server/datanode/ReportBadBlockAction.java | 4 
 2 files changed, 7 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/318081cc/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
--
diff --git a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt 
b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
index 365b005..cd9b7b8 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
+++ b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
@@ -664,9 +664,6 @@ Release 2.7.1 - UNRELEASED
 HDFS-8179. DFSClient#getServerDefaults returns null within 1
 hour of system start. (Xiaoyu Yao via Arpit Agarwal)
 
-HDFS-7916. 'reportBadBlocks' from datanodes to standby Node BPServiceActor
-goes for infinite loop (vinayakumarb)
-
 HDFS-8163. Using monotonicNow for block report scheduling causes
 test failures on recently restarted systems. (Arpit Agarwal)
 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/318081cc/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/ReportBadBlockAction.java
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/ReportBadBlockAction.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/ReportBadBlockAction.java
index 991b56d..7155eae 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/ReportBadBlockAction.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/ReportBadBlockAction.java
@@ -26,7 +26,6 @@ import org.apache.hadoop.hdfs.protocol.ExtendedBlock;
 import org.apache.hadoop.hdfs.protocol.LocatedBlock;
 import 
org.apache.hadoop.hdfs.protocolPB.DatanodeProtocolClientSideTranslatorPB;
 import org.apache.hadoop.hdfs.server.protocol.DatanodeRegistration;
-import org.apache.hadoop.ipc.StandbyException;
 
 /**
  * ReportBadBlockAction is an instruction issued by {{BPOfferService}} to
@@ -60,9 +59,6 @@ public class ReportBadBlockAction implements 
BPServiceActorAction {
 
 try {
   bpNamenode.reportBadBlocks(locatedBlock);
-} catch (StandbyException e) {
-  DataNode.LOG.warn(Failed to report bad block  + block
-  +  to standby namenode);
 } catch (IOException e) {
   throw new BPServiceActorActionException(Failed to report bad block 
   + block +  to namenode: );



hadoop git commit: HADOOP-11912. Extra configuration key used in TraceUtils should respect prefix (Masatake Iwasaki via Colin P. McCabe)

2015-05-05 Thread cmccabe
Repository: hadoop
Updated Branches:
  refs/heads/branch-2 b8e450763 - 606e4f494


HADOOP-11912. Extra configuration key used in TraceUtils should respect prefix 
(Masatake Iwasaki via Colin P. McCabe)

(cherry picked from commit 90b384564875bb353224630e501772b46d4ca9c5)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/606e4f49
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/606e4f49
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/606e4f49

Branch: refs/heads/branch-2
Commit: 606e4f4940a78af96411dc72900b195831d077a6
Parents: b8e4507
Author: Colin Patrick Mccabe cmcc...@cloudera.com
Authored: Tue May 5 17:40:31 2015 -0700
Committer: Colin Patrick Mccabe cmcc...@cloudera.com
Committed: Tue May 5 17:50:04 2015 -0700

--
 hadoop-common-project/hadoop-common/CHANGES.txt |  3 +++
 .../main/java/org/apache/hadoop/tracing/TraceUtils.java | 12 +---
 .../java/org/apache/hadoop/tracing/TestTraceUtils.java  |  2 +-
 3 files changed, 9 insertions(+), 8 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/606e4f49/hadoop-common-project/hadoop-common/CHANGES.txt
--
diff --git a/hadoop-common-project/hadoop-common/CHANGES.txt 
b/hadoop-common-project/hadoop-common/CHANGES.txt
index 9b73647..4895c15 100644
--- a/hadoop-common-project/hadoop-common/CHANGES.txt
+++ b/hadoop-common-project/hadoop-common/CHANGES.txt
@@ -162,6 +162,9 @@ Release 2.8.0 - UNRELEASED
 
 HADOOP-11926. test-patch.sh mv does wrong math (aw)
 
+HADOOP-11912. Extra configuration key used in TraceUtils should respect
+prefix (Masatake Iwasaki via Colin P. McCabe)
+
 Release 2.7.1 - UNRELEASED
 
   INCOMPATIBLE CHANGES

http://git-wip-us.apache.org/repos/asf/hadoop/blob/606e4f49/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/tracing/TraceUtils.java
--
diff --git 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/tracing/TraceUtils.java
 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/tracing/TraceUtils.java
index fa52ac6..52b5d47 100644
--- 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/tracing/TraceUtils.java
+++ 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/tracing/TraceUtils.java
@@ -47,18 +47,16 @@ public class TraceUtils {
 return new HTraceConfiguration() {
   @Override
   public String get(String key) {
-if (extraMap.containsKey(key)) {
-  return extraMap.get(key);
-}
-return conf.get(prefix + key, );
+return get(key, );
   }
 
   @Override
   public String get(String key, String defaultValue) {
-if (extraMap.containsKey(key)) {
-  return extraMap.get(key);
+String prefixedKey = prefix + key;
+if (extraMap.containsKey(prefixedKey)) {
+  return extraMap.get(prefixedKey);
 }
-return conf.get(prefix + key, defaultValue);
+return conf.get(prefixedKey, defaultValue);
   }
 };
   }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/606e4f49/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/tracing/TestTraceUtils.java
--
diff --git 
a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/tracing/TestTraceUtils.java
 
b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/tracing/TestTraceUtils.java
index 80d64b1..400d003 100644
--- 
a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/tracing/TestTraceUtils.java
+++ 
b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/tracing/TestTraceUtils.java
@@ -46,7 +46,7 @@ public class TestTraceUtils {
 conf.set(TEST_PREFIX + key, oldValue);
 LinkedListConfigurationPair extraConfig =
 new LinkedListConfigurationPair();
-extraConfig.add(new ConfigurationPair(key, newValue));
+extraConfig.add(new ConfigurationPair(TEST_PREFIX + key, newValue));
 HTraceConfiguration wrapped = TraceUtils.wrapHadoopConf(TEST_PREFIX, conf, 
extraConfig);
 assertEquals(newValue, wrapped.get(key));
   }



hadoop git commit: HADOOP-11912. Extra configuration key used in TraceUtils should respect prefix (Masatake Iwasaki via Colin P. McCabe)

2015-05-05 Thread cmccabe
Repository: hadoop
Updated Branches:
  refs/heads/trunk 4402e4c63 - 90b384564


HADOOP-11912. Extra configuration key used in TraceUtils should respect prefix 
(Masatake Iwasaki via Colin P. McCabe)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/90b38456
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/90b38456
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/90b38456

Branch: refs/heads/trunk
Commit: 90b384564875bb353224630e501772b46d4ca9c5
Parents: 4402e4c
Author: Colin Patrick Mccabe cmcc...@cloudera.com
Authored: Tue May 5 17:40:31 2015 -0700
Committer: Colin Patrick Mccabe cmcc...@cloudera.com
Committed: Tue May 5 17:40:31 2015 -0700

--
 hadoop-common-project/hadoop-common/CHANGES.txt |  3 +++
 .../main/java/org/apache/hadoop/tracing/TraceUtils.java | 12 +---
 .../java/org/apache/hadoop/tracing/TestTraceUtils.java  |  2 +-
 3 files changed, 9 insertions(+), 8 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/90b38456/hadoop-common-project/hadoop-common/CHANGES.txt
--
diff --git a/hadoop-common-project/hadoop-common/CHANGES.txt 
b/hadoop-common-project/hadoop-common/CHANGES.txt
index 9353862..adbe8b5 100644
--- a/hadoop-common-project/hadoop-common/CHANGES.txt
+++ b/hadoop-common-project/hadoop-common/CHANGES.txt
@@ -612,6 +612,9 @@ Release 2.8.0 - UNRELEASED
 
 HADOOP-11926. test-patch.sh mv does wrong math (aw)
 
+HADOOP-11912. Extra configuration key used in TraceUtils should respect
+prefix (Masatake Iwasaki via Colin P. McCabe)
+
 Release 2.7.1 - UNRELEASED
 
   INCOMPATIBLE CHANGES

http://git-wip-us.apache.org/repos/asf/hadoop/blob/90b38456/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/tracing/TraceUtils.java
--
diff --git 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/tracing/TraceUtils.java
 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/tracing/TraceUtils.java
index fa52ac6..52b5d47 100644
--- 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/tracing/TraceUtils.java
+++ 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/tracing/TraceUtils.java
@@ -47,18 +47,16 @@ public class TraceUtils {
 return new HTraceConfiguration() {
   @Override
   public String get(String key) {
-if (extraMap.containsKey(key)) {
-  return extraMap.get(key);
-}
-return conf.get(prefix + key, );
+return get(key, );
   }
 
   @Override
   public String get(String key, String defaultValue) {
-if (extraMap.containsKey(key)) {
-  return extraMap.get(key);
+String prefixedKey = prefix + key;
+if (extraMap.containsKey(prefixedKey)) {
+  return extraMap.get(prefixedKey);
 }
-return conf.get(prefix + key, defaultValue);
+return conf.get(prefixedKey, defaultValue);
   }
 };
   }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/90b38456/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/tracing/TestTraceUtils.java
--
diff --git 
a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/tracing/TestTraceUtils.java
 
b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/tracing/TestTraceUtils.java
index 80d64b1..400d003 100644
--- 
a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/tracing/TestTraceUtils.java
+++ 
b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/tracing/TestTraceUtils.java
@@ -46,7 +46,7 @@ public class TestTraceUtils {
 conf.set(TEST_PREFIX + key, oldValue);
 LinkedListConfigurationPair extraConfig =
 new LinkedListConfigurationPair();
-extraConfig.add(new ConfigurationPair(key, newValue));
+extraConfig.add(new ConfigurationPair(TEST_PREFIX + key, newValue));
 HTraceConfiguration wrapped = TraceUtils.wrapHadoopConf(TEST_PREFIX, conf, 
extraConfig);
 assertEquals(newValue, wrapped.get(key));
   }



hadoop git commit: HDFS-7672. Handle write failure for stripping blocks and refactor the existing code in DFSStripedOutputStream and StripedDataStreamer.

2015-05-05 Thread szetszwo
Repository: hadoop
Updated Branches:
  refs/heads/HDFS-7285 eeb8ed772 - a5b52fc10


HDFS-7672. Handle write failure for stripping blocks and refactor the existing 
code in DFSStripedOutputStream and StripedDataStreamer.


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/a5b52fc1
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/a5b52fc1
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/a5b52fc1

Branch: refs/heads/HDFS-7285
Commit: a5b52fc107fd060cc30ea18097c01d44237cb1bc
Parents: eeb8ed7
Author: Tsz-Wo Nicholas Sze szets...@hortonworks.com
Authored: Tue May 5 16:26:49 2015 -0700
Committer: Tsz-Wo Nicholas Sze szets...@hortonworks.com
Committed: Tue May 5 16:26:49 2015 -0700

--
 .../hadoop-hdfs/CHANGES-HDFS-EC-7285.txt|   3 +
 .../org/apache/hadoop/hdfs/DFSOutputStream.java |  69 +--
 .../hadoop/hdfs/DFSStripedOutputStream.java | 501 ---
 .../java/org/apache/hadoop/hdfs/DFSUtil.java|  10 +-
 .../org/apache/hadoop/hdfs/DataStreamer.java|  17 +-
 .../apache/hadoop/hdfs/StripedDataStreamer.java | 156 ++
 .../org/apache/hadoop/hdfs/MiniDFSCluster.java  |   2 -
 .../hadoop/hdfs/TestDFSStripedOutputStream.java |  18 +-
 .../TestDFSStripedOutputStreamWithFailure.java  | 323 
 9 files changed, 765 insertions(+), 334 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/a5b52fc1/hadoop-hdfs-project/hadoop-hdfs/CHANGES-HDFS-EC-7285.txt
--
diff --git a/hadoop-hdfs-project/hadoop-hdfs/CHANGES-HDFS-EC-7285.txt 
b/hadoop-hdfs-project/hadoop-hdfs/CHANGES-HDFS-EC-7285.txt
index a8df3f2..7efaa5a 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/CHANGES-HDFS-EC-7285.txt
+++ b/hadoop-hdfs-project/hadoop-hdfs/CHANGES-HDFS-EC-7285.txt
@@ -172,3 +172,6 @@
 
 HDFS-8324. Add trace info to DFSClient#getErasureCodingZoneInfo(..) 
(vinayakumarb via 
 umamahesh)
+
+HDFS-7672. Handle write failure for stripping blocks and refactor the
+existing code in DFSStripedOutputStream and StripedDataStreamer.  
(szetszwo)

http://git-wip-us.apache.org/repos/asf/hadoop/blob/a5b52fc1/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSOutputStream.java
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSOutputStream.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSOutputStream.java
index 0280d71..8580357 100755
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSOutputStream.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSOutputStream.java
@@ -24,6 +24,8 @@ import java.nio.channels.ClosedChannelException;
 import java.util.EnumSet;
 import java.util.concurrent.atomic.AtomicReference;
 
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.HadoopIllegalArgumentException;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.crypto.CryptoProtocolVersion;
@@ -86,6 +88,8 @@ import com.google.common.base.Preconditions;
 @InterfaceAudience.Private
 public class DFSOutputStream extends FSOutputSummer
 implements Syncable, CanSetDropBehind {
+  static final Log LOG = LogFactory.getLog(DFSOutputStream.class);
+
   /**
* Number of times to retry creating a file when there are transient 
* errors (typically related to encryption zones and KeyProvider operations).
@@ -419,24 +423,35 @@ public class DFSOutputStream extends FSOutputSummer
 streamer.incBytesCurBlock(len);
 
 // If packet is full, enqueue it for transmission
-//
 if (currentPacket.getNumChunks() == currentPacket.getMaxChunks() ||
 streamer.getBytesCurBlock() == blockSize) {
-  if (DFSClient.LOG.isDebugEnabled()) {
-DFSClient.LOG.debug(DFSClient writeChunk packet full seqno= +
-currentPacket.getSeqno() +
-, src= + src +
-, bytesCurBlock= + streamer.getBytesCurBlock() +
-, blockSize= + blockSize +
-, appendChunk= + streamer.getAppendChunk());
-  }
-  streamer.waitAndQueuePacket(currentPacket);
-  currentPacket = null;
+  enqueueCurrentPacketFull();
+}
+  }
 
-  adjustChunkBoundary();
+  void enqueueCurrentPacket() throws IOException {
+streamer.waitAndQueuePacket(currentPacket);
+currentPacket = null;
+  }
 
-  endBlock();
+  void enqueueCurrentPacketFull() throws IOException {
+if (LOG.isDebugEnabled()) {
+  LOG.debug(enqueue full  + currentPacket + , src= + src
+  + , bytesCurBlock= + streamer.getBytesCurBlock()
+  + , blockSize= + blockSize
+  + , 

hadoop git commit: HADOOP-11926. test-patch.sh mv does wrong math (aw)

2015-05-05 Thread aw
Repository: hadoop
Updated Branches:
  refs/heads/trunk 4da8490b5 - 4402e4c63


HADOOP-11926. test-patch.sh mv does wrong math (aw)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/4402e4c6
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/4402e4c6
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/4402e4c6

Branch: refs/heads/trunk
Commit: 4402e4c633808556d49854df45683688b6a9ce84
Parents: 4da8490
Author: Allen Wittenauer a...@apache.org
Authored: Tue May 5 17:01:42 2015 -0700
Committer: Allen Wittenauer a...@apache.org
Committed: Tue May 5 17:01:42 2015 -0700

--
 dev-support/test-patch.sh   | 2 +-
 hadoop-common-project/hadoop-common/CHANGES.txt | 2 ++
 2 files changed, 3 insertions(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/4402e4c6/dev-support/test-patch.sh
--
diff --git a/dev-support/test-patch.sh b/dev-support/test-patch.sh
index 3759e9f..d2f0244 100755
--- a/dev-support/test-patch.sh
+++ b/dev-support/test-patch.sh
@@ -2262,7 +2262,7 @@ function cleanup_and_exit
 # Jenkins or whatever already knows where it is at
 # since it told us to put it there!
 relative_patchdir /dev/null
-if [[ $? == 0 ]]; then
+if [[ $? == 1 ]]; then
   hadoop_debug mv ${PATCH_DIR} ${BASEDIR}
   mv ${PATCH_DIR} ${BASEDIR}
 fi

http://git-wip-us.apache.org/repos/asf/hadoop/blob/4402e4c6/hadoop-common-project/hadoop-common/CHANGES.txt
--
diff --git a/hadoop-common-project/hadoop-common/CHANGES.txt 
b/hadoop-common-project/hadoop-common/CHANGES.txt
index 1b33b37..9353862 100644
--- a/hadoop-common-project/hadoop-common/CHANGES.txt
+++ b/hadoop-common-project/hadoop-common/CHANGES.txt
@@ -610,6 +610,8 @@ Release 2.8.0 - UNRELEASED
 HADOOP-11904. test-patch.sh goes into an infinite loop on non-maven
 builds (aw)
 
+HADOOP-11926. test-patch.sh mv does wrong math (aw)
+
 Release 2.7.1 - UNRELEASED
 
   INCOMPATIBLE CHANGES



hadoop git commit: HDFS-7348. Erasure Coding: DataNode reconstruct striped blocks. Contributed by Yi Liu.

2015-05-05 Thread zhz
Repository: hadoop
Updated Branches:
  refs/heads/HDFS-7285 a5b52fc10 - 850d7fac9


HDFS-7348. Erasure Coding: DataNode reconstruct striped blocks. Contributed by 
Yi Liu.


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/850d7fac
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/850d7fac
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/850d7fac

Branch: refs/heads/HDFS-7285
Commit: 850d7fac953b20b7ea76f3b66ba498eb62ad663f
Parents: a5b52fc
Author: Zhe Zhang z...@apache.org
Authored: Tue May 5 16:33:56 2015 -0700
Committer: Zhe Zhang z...@apache.org
Committed: Tue May 5 16:33:56 2015 -0700

--
 .../hadoop-hdfs/CHANGES-HDFS-EC-7285.txt|   3 +
 .../org/apache/hadoop/hdfs/BlockReader.java |   6 +
 .../apache/hadoop/hdfs/BlockReaderLocal.java|   5 +
 .../hadoop/hdfs/BlockReaderLocalLegacy.java |   5 +
 .../org/apache/hadoop/hdfs/DFSConfigKeys.java   |   6 +
 .../java/org/apache/hadoop/hdfs/DFSPacket.java  |  10 +-
 .../apache/hadoop/hdfs/RemoteBlockReader.java   |   5 +
 .../apache/hadoop/hdfs/RemoteBlockReader2.java  |   5 +
 .../hadoop/hdfs/server/datanode/DNConf.java |  27 +
 .../hadoop/hdfs/server/datanode/DataNode.java   |  31 +-
 .../erasurecode/ErasureCodingWorker.java| 893 ++-
 .../hadoop/hdfs/util/StripedBlockUtil.java  |  49 +-
 .../src/main/resources/hdfs-default.xml |  31 +-
 .../hadoop/hdfs/TestRecoverStripedFile.java | 356 
 14 files changed, 1377 insertions(+), 55 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/850d7fac/hadoop-hdfs-project/hadoop-hdfs/CHANGES-HDFS-EC-7285.txt
--
diff --git a/hadoop-hdfs-project/hadoop-hdfs/CHANGES-HDFS-EC-7285.txt 
b/hadoop-hdfs-project/hadoop-hdfs/CHANGES-HDFS-EC-7285.txt
index 7efaa5a..0d2d448 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/CHANGES-HDFS-EC-7285.txt
+++ b/hadoop-hdfs-project/hadoop-hdfs/CHANGES-HDFS-EC-7285.txt
@@ -175,3 +175,6 @@
 
 HDFS-7672. Handle write failure for stripping blocks and refactor the
 existing code in DFSStripedOutputStream and StripedDataStreamer.  
(szetszwo)
+
+HDFS-7348. Erasure Coding: DataNode reconstruct striped blocks. 
+(Yi Liu via Zhe Zhang)

http://git-wip-us.apache.org/repos/asf/hadoop/blob/850d7fac/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/BlockReader.java
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/BlockReader.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/BlockReader.java
index aa3e8ba..0a5511e 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/BlockReader.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/BlockReader.java
@@ -24,6 +24,7 @@ import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.fs.ByteBufferReadable;
 import org.apache.hadoop.fs.ReadOption;
 import org.apache.hadoop.hdfs.shortcircuit.ClientMmap;
+import org.apache.hadoop.util.DataChecksum;
 
 /**
  * A BlockReader is responsible for reading a single block
@@ -99,4 +100,9 @@ public interface BlockReader extends ByteBufferReadable {
*  supported.
*/
   ClientMmap getClientMmap(EnumSetReadOption opts);
+
+  /**
+   * @return  The DataChecksum used by the read block
+   */
+  DataChecksum getDataChecksum();
 }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/850d7fac/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/BlockReaderLocal.java
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/BlockReaderLocal.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/BlockReaderLocal.java
index d913f3a..0b2420d 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/BlockReaderLocal.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/BlockReaderLocal.java
@@ -738,4 +738,9 @@ class BlockReaderLocal implements BlockReader {
   void forceUnanchorable() {
 replica.getSlot().makeUnanchorable();
   }
+
+  @Override
+  public DataChecksum getDataChecksum() {
+return checksum;
+  }
 }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/850d7fac/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/BlockReaderLocalLegacy.java
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/BlockReaderLocalLegacy.java
 

hadoop git commit: YARN-3582. NPE in WebAppProxyServlet. Contributed by Jian He

2015-05-05 Thread xgong
Repository: hadoop
Updated Branches:
  refs/heads/trunk 90b384564 - a583a4069


YARN-3582. NPE in WebAppProxyServlet. Contributed by Jian He


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/a583a406
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/a583a406
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/a583a406

Branch: refs/heads/trunk
Commit: a583a40693f5c56c40b39fd12cfa0bb7174fc526
Parents: 90b3845
Author: Xuan xg...@apache.org
Authored: Tue May 5 21:42:19 2015 -0700
Committer: Xuan xg...@apache.org
Committed: Tue May 5 21:42:19 2015 -0700

--
 hadoop-yarn-project/CHANGES.txt   | 2 ++
 .../hadoop/yarn/server/webproxy/WebAppProxyServlet.java   | 7 +--
 .../hadoop/yarn/server/webproxy/TestWebAppProxyServlet.java   | 7 +++
 3 files changed, 14 insertions(+), 2 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/a583a406/hadoop-yarn-project/CHANGES.txt
--
diff --git a/hadoop-yarn-project/CHANGES.txt b/hadoop-yarn-project/CHANGES.txt
index 00dd205..97b7ee4 100644
--- a/hadoop-yarn-project/CHANGES.txt
+++ b/hadoop-yarn-project/CHANGES.txt
@@ -316,6 +316,8 @@ Release 2.8.0 - UNRELEASED
 YARN-3343. Increased TestCapacitySchedulerNodeLabelUpdate#testNodeUpdate
 timeout. (Rohith Sharmaks via jianhe)
 
+YARN-3582. NPE in WebAppProxyServlet. (jian he via xgong)
+
 Release 2.7.1 - UNRELEASED
 
   INCOMPATIBLE CHANGES

http://git-wip-us.apache.org/repos/asf/hadoop/blob/a583a406/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-web-proxy/src/main/java/org/apache/hadoop/yarn/server/webproxy/WebAppProxyServlet.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-web-proxy/src/main/java/org/apache/hadoop/yarn/server/webproxy/WebAppProxyServlet.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-web-proxy/src/main/java/org/apache/hadoop/yarn/server/webproxy/WebAppProxyServlet.java
index fd98c80..d45beb6 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-web-proxy/src/main/java/org/apache/hadoop/yarn/server/webproxy/WebAppProxyServlet.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-web-proxy/src/main/java/org/apache/hadoop/yarn/server/webproxy/WebAppProxyServlet.java
@@ -248,8 +248,11 @@ public class WebAppProxyServlet extends HttpServlet {
   final String remoteUser = req.getRemoteUser();
   final String pathInfo = req.getPathInfo();
 
-  String[] parts = pathInfo.split(/, 3);
-  if(parts.length  2) {
+  String[] parts = null;
+  if (pathInfo != null) {
+parts = pathInfo.split(/, 3);
+  }
+  if(parts == null || parts.length  2) {
 LOG.warn({} gave an invalid proxy path {}, remoteUser,  pathInfo);
 notFound(resp, Your path appears to be formatted incorrectly.);
 return;

http://git-wip-us.apache.org/repos/asf/hadoop/blob/a583a406/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-web-proxy/src/test/java/org/apache/hadoop/yarn/server/webproxy/TestWebAppProxyServlet.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-web-proxy/src/test/java/org/apache/hadoop/yarn/server/webproxy/TestWebAppProxyServlet.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-web-proxy/src/test/java/org/apache/hadoop/yarn/server/webproxy/TestWebAppProxyServlet.java
index aa6d918..2a2ca2c 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-web-proxy/src/test/java/org/apache/hadoop/yarn/server/webproxy/TestWebAppProxyServlet.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-web-proxy/src/test/java/org/apache/hadoop/yarn/server/webproxy/TestWebAppProxyServlet.java
@@ -131,6 +131,13 @@ public class TestWebAppProxyServlet {
 
 // wrong url
 try {
+  // wrong url without app ID
+  URL emptyUrl = new URL(http://localhost:; + proxyPort + /proxy);
+  HttpURLConnection emptyProxyConn = (HttpURLConnection) emptyUrl
+  .openConnection();
+  emptyProxyConn.connect();;
+  assertEquals(HttpURLConnection.HTTP_NOT_FOUND, 
emptyProxyConn.getResponseCode());
+
   // wrong url. Set wrong app ID
   URL wrongUrl = new URL(http://localhost:; + proxyPort + /proxy/app);
   HttpURLConnection proxyConn = (HttpURLConnection) wrongUrl



hadoop git commit: HADOOP-11120. hadoop fs -rmr gives wrong advice. Contributed by Juliet Houghland.

2015-05-05 Thread wang
Repository: hadoop
Updated Branches:
  refs/heads/branch-2 0f30913ea - 84d8ba4f7


HADOOP-11120. hadoop fs -rmr gives wrong advice. Contributed by Juliet 
Houghland.

(cherry picked from commit 05adc76ace6bf28e4a3ff874044c2c41e3eba63f)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/84d8ba4f
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/84d8ba4f
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/84d8ba4f

Branch: refs/heads/branch-2
Commit: 84d8ba4f722ce806391f3434268f08a96aae8741
Parents: 0f30913
Author: Andrew Wang w...@apache.org
Authored: Tue May 5 08:37:37 2015 -0700
Committer: Andrew Wang w...@apache.org
Committed: Tue May 5 08:37:41 2015 -0700

--
 hadoop-common-project/hadoop-common/CHANGES.txt| 2 ++
 .../src/main/java/org/apache/hadoop/fs/shell/Delete.java   | 2 +-
 .../hadoop-common/src/test/resources/testConf.xml  | 2 +-
 3 files changed, 4 insertions(+), 2 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/84d8ba4f/hadoop-common-project/hadoop-common/CHANGES.txt
--
diff --git a/hadoop-common-project/hadoop-common/CHANGES.txt 
b/hadoop-common-project/hadoop-common/CHANGES.txt
index fd5bb4b..d885c41 100644
--- a/hadoop-common-project/hadoop-common/CHANGES.txt
+++ b/hadoop-common-project/hadoop-common/CHANGES.txt
@@ -70,6 +70,8 @@ Release 2.8.0 - UNRELEASED
 HADOOP-11328. ZKFailoverController does not log Exception when doRun raises
 errors. (Tianyin Xu via ozawa)
 
+HADOOP-11120. hadoop fs -rmr gives wrong advice. (Juliet Hougland via wang)
+
   OPTIMIZATIONS
 
 HADOOP-11785. Reduce the number of listStatus operation in distcp

http://git-wip-us.apache.org/repos/asf/hadoop/blob/84d8ba4f/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Delete.java
--
diff --git 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Delete.java
 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Delete.java
index 6798fbe..f882817 100644
--- 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Delete.java
+++ 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Delete.java
@@ -141,7 +141,7 @@ class Delete {
 
 @Override
 public String getReplacementCommand() {
-  return rm -r;
+  return -rm -r;
 }
   }
 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/84d8ba4f/hadoop-common-project/hadoop-common/src/test/resources/testConf.xml
--
diff --git 
a/hadoop-common-project/hadoop-common/src/test/resources/testConf.xml 
b/hadoop-common-project/hadoop-common/src/test/resources/testConf.xml
index 3729cf9..d36efd5 100644
--- a/hadoop-common-project/hadoop-common/src/test/resources/testConf.xml
+++ b/hadoop-common-project/hadoop-common/src/test/resources/testConf.xml
@@ -429,7 +429,7 @@
 /comparator
 comparator
   typeRegexpComparator/type
-  expected-output^\s*\(DEPRECATED\) Same as 'rm 
-r'\s*/expected-output
+  expected-output^\s*\(DEPRECATED\) Same as '-rm 
-r'\s*/expected-output
 /comparator
   /comparators
 /test



hadoop git commit: HADOOP-11120. hadoop fs -rmr gives wrong advice. Contributed by Juliet Houghland.

2015-05-05 Thread wang
Repository: hadoop
Updated Branches:
  refs/heads/trunk 9356cf867 - 05adc76ac


HADOOP-11120. hadoop fs -rmr gives wrong advice. Contributed by Juliet 
Houghland.


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/05adc76a
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/05adc76a
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/05adc76a

Branch: refs/heads/trunk
Commit: 05adc76ace6bf28e4a3ff874044c2c41e3eba63f
Parents: 9356cf8
Author: Andrew Wang w...@apache.org
Authored: Tue May 5 08:37:37 2015 -0700
Committer: Andrew Wang w...@apache.org
Committed: Tue May 5 08:37:37 2015 -0700

--
 hadoop-common-project/hadoop-common/CHANGES.txt| 2 ++
 .../src/main/java/org/apache/hadoop/fs/shell/Delete.java   | 2 +-
 .../hadoop-common/src/test/resources/testConf.xml  | 2 +-
 3 files changed, 4 insertions(+), 2 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/05adc76a/hadoop-common-project/hadoop-common/CHANGES.txt
--
diff --git a/hadoop-common-project/hadoop-common/CHANGES.txt 
b/hadoop-common-project/hadoop-common/CHANGES.txt
index 0b3c971..8b0e67c 100644
--- a/hadoop-common-project/hadoop-common/CHANGES.txt
+++ b/hadoop-common-project/hadoop-common/CHANGES.txt
@@ -528,6 +528,8 @@ Release 2.8.0 - UNRELEASED
 HADOOP-11328. ZKFailoverController does not log Exception when doRun raises
 errors. (Tianyin Xu via ozawa)
 
+HADOOP-11120. hadoop fs -rmr gives wrong advice. (Juliet Hougland via wang)
+
   OPTIMIZATIONS
 
 HADOOP-11785. Reduce the number of listStatus operation in distcp

http://git-wip-us.apache.org/repos/asf/hadoop/blob/05adc76a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Delete.java
--
diff --git 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Delete.java
 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Delete.java
index 6798fbe..f882817 100644
--- 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Delete.java
+++ 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Delete.java
@@ -141,7 +141,7 @@ class Delete {
 
 @Override
 public String getReplacementCommand() {
-  return rm -r;
+  return -rm -r;
 }
   }
 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/05adc76a/hadoop-common-project/hadoop-common/src/test/resources/testConf.xml
--
diff --git 
a/hadoop-common-project/hadoop-common/src/test/resources/testConf.xml 
b/hadoop-common-project/hadoop-common/src/test/resources/testConf.xml
index 9b72960..57cce14 100644
--- a/hadoop-common-project/hadoop-common/src/test/resources/testConf.xml
+++ b/hadoop-common-project/hadoop-common/src/test/resources/testConf.xml
@@ -453,7 +453,7 @@
 /comparator
 comparator
   typeRegexpComparator/type
-  expected-output^\s*\(DEPRECATED\) Same as 'rm 
-r'\s*/expected-output
+  expected-output^\s*\(DEPRECATED\) Same as '-rm 
-r'\s*/expected-output
 /comparator
   /comparators
 /test



hadoop git commit: HDFS-8324. Add trace info to DFSClient#getErasureCodingZoneInfo(..). Contributed by Vinayakumar B

2015-05-05 Thread umamahesh
Repository: hadoop
Updated Branches:
  refs/heads/HDFS-7285 edfaff3c7 - eeb8ed772


HDFS-8324. Add trace info to DFSClient#getErasureCodingZoneInfo(..). 
Contributed by Vinayakumar B


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/eeb8ed77
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/eeb8ed77
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/eeb8ed77

Branch: refs/heads/HDFS-7285
Commit: eeb8ed77202bb82e6a13e971faaf86ea93823e63
Parents: edfaff3
Author: Uma Maheswara Rao G umamah...@apache.org
Authored: Tue May 5 19:25:21 2015 +0530
Committer: Uma Maheswara Rao G umamah...@apache.org
Committed: Tue May 5 19:25:21 2015 +0530

--
 hadoop-hdfs-project/hadoop-hdfs/CHANGES-HDFS-EC-7285.txt  | 3 +++
 .../src/main/java/org/apache/hadoop/hdfs/DFSClient.java   | 3 +++
 2 files changed, 6 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/eeb8ed77/hadoop-hdfs-project/hadoop-hdfs/CHANGES-HDFS-EC-7285.txt
--
diff --git a/hadoop-hdfs-project/hadoop-hdfs/CHANGES-HDFS-EC-7285.txt 
b/hadoop-hdfs-project/hadoop-hdfs/CHANGES-HDFS-EC-7285.txt
index ef760fc..a8df3f2 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/CHANGES-HDFS-EC-7285.txt
+++ b/hadoop-hdfs-project/hadoop-hdfs/CHANGES-HDFS-EC-7285.txt
@@ -169,3 +169,6 @@
 
 HDFS-8242. Erasure Coding: XML based end-to-end test for ECCli commands
 (Rakesh R via vinayakumarb)
+
+HDFS-8324. Add trace info to DFSClient#getErasureCodingZoneInfo(..) 
(vinayakumarb via 
+umamahesh)

http://git-wip-us.apache.org/repos/asf/hadoop/blob/eeb8ed77/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSClient.java
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSClient.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSClient.java
index cfa14b8..729ebf8 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSClient.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSClient.java
@@ -3313,11 +3313,14 @@ public class DFSClient implements java.io.Closeable, 
RemotePeerFactory,
*/
   public ECZoneInfo getErasureCodingZoneInfo(String src) throws IOException {
 checkOpen();
+TraceScope scope = getPathTraceScope(getErasureCodingZoneInfo, src);
 try {
   return namenode.getErasureCodingZoneInfo(src);
 } catch (RemoteException re) {
   throw re.unwrapRemoteException(FileNotFoundException.class,
   AccessControlException.class, UnresolvedPathException.class);
+} finally {
+  scope.close();
 }
   }
 }



hadoop git commit: HADOOP-11904. test-patch.sh goes into an infinite loop on non-maven builds (aw)

2015-05-05 Thread aw
Repository: hadoop
Updated Branches:
  refs/heads/trunk fcd4cb751 - 3ff91e9e9


HADOOP-11904. test-patch.sh goes into an infinite loop on non-maven builds (aw)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/3ff91e9e
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/3ff91e9e
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/3ff91e9e

Branch: refs/heads/trunk
Commit: 3ff91e9e9302d94b0d18cccebd02d3815c06ce90
Parents: fcd4cb7
Author: Allen Wittenauer a...@apache.org
Authored: Tue May 5 11:02:15 2015 -0700
Committer: Allen Wittenauer a...@apache.org
Committed: Tue May 5 11:02:15 2015 -0700

--
 dev-support/test-patch.sh   | 8 +++-
 hadoop-common-project/hadoop-common/CHANGES.txt | 3 +++
 2 files changed, 10 insertions(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/3ff91e9e/dev-support/test-patch.sh
--
diff --git a/dev-support/test-patch.sh b/dev-support/test-patch.sh
index 5107718..9f48c64 100755
--- a/dev-support/test-patch.sh
+++ b/dev-support/test-patch.sh
@@ -915,6 +915,13 @@ function git_checkout
   fi
 
   add_jira_footer git revision ${PATCH_BRANCH} / ${GIT_REVISION}
+
+  if [[ ! -f ${BASEDIR}/pom.xml ]]; then
+hadoop_error ERROR: This verison of test-patch.sh only supports 
Maven-based builds. Aborting.
+add_jira_table -1 pre-patch Unsupported build system.
+output_to_jira 1
+cleanup_and_exit 1
+  fi
   return 0
 }
 
@@ -1331,7 +1338,6 @@ function apply_patch_file
 ## @return   none; otherwise relaunches
 function check_reexec
 {
-  set +x
   local commentfile=${PATCH_DIR}/tp.${RANDOM}
 
   if [[ ${REEXECED} == true ]]; then

http://git-wip-us.apache.org/repos/asf/hadoop/blob/3ff91e9e/hadoop-common-project/hadoop-common/CHANGES.txt
--
diff --git a/hadoop-common-project/hadoop-common/CHANGES.txt 
b/hadoop-common-project/hadoop-common/CHANGES.txt
index 49106ae..5b2654a 100644
--- a/hadoop-common-project/hadoop-common/CHANGES.txt
+++ b/hadoop-common-project/hadoop-common/CHANGES.txt
@@ -604,6 +604,9 @@ Release 2.8.0 - UNRELEASED
 HADOOP-11916. TestStringUtils#testLowerAndUpperStrings failed on MAC
 due to a JVM bug. (Ming Ma via ozawa)
 
+HADOOP-11904. test-patch.sh goes into an infinite loop on non-maven
+builds (aw)
+
 Release 2.7.1 - UNRELEASED
 
   INCOMPATIBLE CHANGES



hadoop git commit: HDFS-8305: HDFS INotify: the destination field of RenameOp should always end with the file name (cmccabe)

2015-05-05 Thread cmccabe
Repository: hadoop
Updated Branches:
  refs/heads/branch-2 51df9e837 - 1f01d8347


HDFS-8305: HDFS INotify: the destination field of RenameOp should always end 
with the file name (cmccabe)

(cherry picked from commit fcd4cb751665adb241081e42b3403c3856b6c6fe)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/1f01d834
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/1f01d834
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/1f01d834

Branch: refs/heads/branch-2
Commit: 1f01d8347a57bb95936f6d1fa5dc33b20239d6b2
Parents: 51df9e8
Author: Colin Patrick Mccabe cmcc...@cloudera.com
Authored: Tue May 5 10:50:09 2015 -0700
Committer: Colin Patrick Mccabe cmcc...@cloudera.com
Committed: Tue May 5 11:06:47 2015 -0700

--
 hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt  |  3 +++
 .../hadoop/hdfs/server/namenode/FSDirRenameOp.java   |  2 +-
 .../apache/hadoop/hdfs/server/namenode/FSEditLog.java| 10 +++---
 .../hadoop/hdfs/TestDFSInotifyEventInputStream.java  | 11 +++
 4 files changed, 22 insertions(+), 4 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/1f01d834/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
--
diff --git a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt 
b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
index 7b18365..d75ad50 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
+++ b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
@@ -358,6 +358,9 @@ Release 2.7.1 - UNRELEASED
 HDFS-8091: ACLStatus and XAttributes should be presented to
 INodeAttributesProvider before returning to client (asuresh)
 
+HDFS-8305: HDFS INotify: the destination field of RenameOp should always
+end with the file name (cmccabe)
+
 Release 2.7.0 - 2015-04-20
 
   INCOMPATIBLE CHANGES

http://git-wip-us.apache.org/repos/asf/hadoop/blob/1f01d834/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSDirRenameOp.java
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSDirRenameOp.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSDirRenameOp.java
index c57cae2..4a20a62 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSDirRenameOp.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSDirRenameOp.java
@@ -476,7 +476,7 @@ class FSDirRenameOp {
   fsd.writeUnlock();
 }
 if (stat) {
-  fsd.getEditLog().logRename(src, dst, mtime, logRetryCache);
+  fsd.getEditLog().logRename(src, actualDst, mtime, logRetryCache);
   return true;
 }
 return false;

http://git-wip-us.apache.org/repos/asf/hadoop/blob/1f01d834/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSEditLog.java
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSEditLog.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSEditLog.java
index 257a8fe..9ec4902 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSEditLog.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSEditLog.java
@@ -816,7 +816,9 @@ public class FSEditLog implements LogsPurgeable {
   }
   
   /** 
-   * Add rename record to edit log
+   * Add rename record to edit log.
+   *
+   * The destination should be the file name, not the destination directory.
* TODO: use String parameters until just before writing to disk
*/
   void logRename(String src, String dst, long timestamp, boolean toLogRpcIds) {
@@ -827,9 +829,11 @@ public class FSEditLog implements LogsPurgeable {
 logRpcIds(op, toLogRpcIds);
 logEdit(op);
   }
-  
+
   /** 
-   * Add rename record to edit log
+   * Add rename record to edit log.
+   *
+   * The destination should be the file name, not the destination directory.
*/
   void logRename(String src, String dst, long timestamp, boolean toLogRpcIds,
   Options.Rename... options) {

http://git-wip-us.apache.org/repos/asf/hadoop/blob/1f01d834/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDFSInotifyEventInputStream.java
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDFSInotifyEventInputStream.java
 

hadoop git commit: HDFS-8305: HDFS INotify: the destination field of RenameOp should always end with the file name (cmccabe)

2015-05-05 Thread cmccabe
Repository: hadoop
Updated Branches:
  refs/heads/branch-2.7 69d837428 - e056e0a3c


HDFS-8305: HDFS INotify: the destination field of RenameOp should always end 
with the file name (cmccabe)

(cherry picked from commit fcd4cb751665adb241081e42b3403c3856b6c6fe)
(cherry picked from commit 1f01d8347a57bb95936f6d1fa5dc33b20239d6b2)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/e056e0a3
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/e056e0a3
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/e056e0a3

Branch: refs/heads/branch-2.7
Commit: e056e0a3c1bcd300cf950d32e943dac457423f9b
Parents: 69d8374
Author: Colin Patrick Mccabe cmcc...@cloudera.com
Authored: Tue May 5 10:50:09 2015 -0700
Committer: Colin Patrick Mccabe cmcc...@cloudera.com
Committed: Tue May 5 11:07:13 2015 -0700

--
 hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt  |  3 +++
 .../hadoop/hdfs/server/namenode/FSDirRenameOp.java   |  2 +-
 .../apache/hadoop/hdfs/server/namenode/FSEditLog.java| 10 +++---
 .../hadoop/hdfs/TestDFSInotifyEventInputStream.java  | 11 +++
 4 files changed, 22 insertions(+), 4 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/e056e0a3/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
--
diff --git a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt 
b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
index 2c209cd..eb02759 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
+++ b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
@@ -62,6 +62,9 @@ Release 2.7.1 - UNRELEASED
 HDFS-8091: ACLStatus and XAttributes should be presented to
 INodeAttributesProvider before returning to client (asuresh)
 
+HDFS-8305: HDFS INotify: the destination field of RenameOp should always
+end with the file name (cmccabe)
+
 Release 2.7.0 - 2015-04-20
 
   INCOMPATIBLE CHANGES

http://git-wip-us.apache.org/repos/asf/hadoop/blob/e056e0a3/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSDirRenameOp.java
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSDirRenameOp.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSDirRenameOp.java
index c57cae2..4a20a62 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSDirRenameOp.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSDirRenameOp.java
@@ -476,7 +476,7 @@ class FSDirRenameOp {
   fsd.writeUnlock();
 }
 if (stat) {
-  fsd.getEditLog().logRename(src, dst, mtime, logRetryCache);
+  fsd.getEditLog().logRename(src, actualDst, mtime, logRetryCache);
   return true;
 }
 return false;

http://git-wip-us.apache.org/repos/asf/hadoop/blob/e056e0a3/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSEditLog.java
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSEditLog.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSEditLog.java
index 8912532..57c5832 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSEditLog.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSEditLog.java
@@ -816,7 +816,9 @@ public class FSEditLog implements LogsPurgeable {
   }
   
   /** 
-   * Add rename record to edit log
+   * Add rename record to edit log.
+   *
+   * The destination should be the file name, not the destination directory.
* TODO: use String parameters until just before writing to disk
*/
   void logRename(String src, String dst, long timestamp, boolean toLogRpcIds) {
@@ -827,9 +829,11 @@ public class FSEditLog implements LogsPurgeable {
 logRpcIds(op, toLogRpcIds);
 logEdit(op);
   }
-  
+
   /** 
-   * Add rename record to edit log
+   * Add rename record to edit log.
+   *
+   * The destination should be the file name, not the destination directory.
*/
   void logRename(String src, String dst, long timestamp, boolean toLogRpcIds,
   Options.Rename... options) {

http://git-wip-us.apache.org/repos/asf/hadoop/blob/e056e0a3/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDFSInotifyEventInputStream.java
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDFSInotifyEventInputStream.java
 

hadoop git commit: HADOOP-11917. test-patch.sh should work with ${BASEDIR}/patchprocess setups (aw)

2015-05-05 Thread aw
Repository: hadoop
Updated Branches:
  refs/heads/trunk 24d3a2d4f - d33419ae0


HADOOP-11917. test-patch.sh should work with ${BASEDIR}/patchprocess setups (aw)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/d33419ae
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/d33419ae
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/d33419ae

Branch: refs/heads/trunk
Commit: d33419ae01c528073f9f00ef1aadf153fed41222
Parents: 24d3a2d
Author: Allen Wittenauer a...@apache.org
Authored: Tue May 5 11:26:31 2015 -0700
Committer: Allen Wittenauer a...@apache.org
Committed: Tue May 5 11:26:31 2015 -0700

--
 .gitignore  |  1 +
 dev-support/test-patch.sh   | 78 +---
 hadoop-common-project/hadoop-common/CHANGES.txt |  3 +
 pom.xml | 23 +++---
 4 files changed, 84 insertions(+), 21 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/d33419ae/.gitignore
--
diff --git a/.gitignore b/.gitignore
index a49ad4b..779f507 100644
--- a/.gitignore
+++ b/.gitignore
@@ -23,3 +23,4 @@ 
hadoop-tools/hadoop-openstack/src/test/resources/contract-test-options.xml
 
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-registry/src/main/tla/yarnregistry.toolbox
 yarnregistry.pdf
 hadoop-tools/hadoop-aws/src/test/resources/contract-test-options.xml
+patchprocess/

http://git-wip-us.apache.org/repos/asf/hadoop/blob/d33419ae/dev-support/test-patch.sh
--
diff --git a/dev-support/test-patch.sh b/dev-support/test-patch.sh
index 9f48c64..3759e9f 100755
--- a/dev-support/test-patch.sh
+++ b/dev-support/test-patch.sh
@@ -535,6 +535,26 @@ function echo_and_redirect
   ${@}  ${logfile} 21
 }
 
+## @description is PATCH_DIR relative to BASEDIR?
+## @audiencepublic
+## @stability   stable
+## @replaceable yes
+## @returns 1 - no, PATCH_DIR
+## @returns 0 - yes, PATCH_DIR - BASEDIR
+function relative_patchdir
+{
+  local p=${PATCH_DIR#${BASEDIR}}
+
+  if [[ ${#p} -eq ${#PATCH_DIR} ]]; then
+echo ${p}
+return 1
+  fi
+  p=${p#/}
+  echo ${p}
+  return 0
+}
+
+
 ## @description  Print the usage information
 ## @audience public
 ## @stabilitystable
@@ -697,7 +717,8 @@ function parse_args
 esac
   done
 
-  # if we get a relative path, turn it absolute
+  # we need absolute dir for ${BASEDIR}
+  cd ${CWD}
   BASEDIR=$(cd -P -- ${BASEDIR} /dev/null  pwd -P)
 
   if [[ ${BUILD_NATIVE} == true ]] ; then
@@ -723,6 +744,7 @@ function parse_args
 JENKINS=false
   fi
 
+  cd ${CWD}
   if [[ ! -d ${PATCH_DIR} ]]; then
 mkdir -p ${PATCH_DIR}
 if [[ $? == 0 ]] ; then
@@ -733,6 +755,9 @@ function parse_args
 fi
   fi
 
+  # we need absolute dir for PATCH_DIR
+  PATCH_DIR=$(cd -P -- ${PATCH_DIR} /dev/null  pwd -P)
+
   GITDIFFLINES=${PATCH_DIR}/gitdifflines.txt
 }
 
@@ -821,17 +846,36 @@ function find_changed_modules
 function git_checkout
 {
   local currentbranch
+  local exemptdir
 
   big_console_header Confirming git environment
 
+  cd ${BASEDIR}
+  if [[ ! -d .git ]]; then
+hadoop_error ERROR: ${BASEDIR} is not a git repo.
+cleanup_and_exit 1
+  fi
+
   if [[ ${RESETREPO} == true ]] ; then
-cd ${BASEDIR}
 ${GIT} reset --hard
 if [[ $? != 0 ]]; then
   hadoop_error ERROR: git reset is failing
   cleanup_and_exit 1
 fi
-${GIT} clean -xdf
+
+# if PATCH_DIR is in BASEDIR, then we don't want
+# git wiping it out.
+exemptdir=$(relative_patchdir)
+if [[ $? == 1 ]]; then
+  ${GIT} clean -xdf
+else
+  # we do, however, want it emptied of all _files_.
+  # we need to leave _directories_ in case we are in
+  # re-exec mode (which places a directory full of stuff in it)
+  hadoop_debug Exempting ${exemptdir} from clean
+  rm ${PATCH_DIR}/* 2/dev/null
+  ${GIT} clean -xdf -e ${exemptdir}
+fi
 if [[ $? != 0 ]]; then
   hadoop_error ERROR: git clean is failing
   cleanup_and_exit 1
@@ -875,11 +919,6 @@ function git_checkout
 fi
 
   else
-cd ${BASEDIR}
-if [[ ! -d .git ]]; then
-  hadoop_error ERROR: ${BASEDIR} is not a git repo.
-  cleanup_and_exit 1
-fi
 
 status=$(${GIT} status --porcelain)
 if [[ ${status} !=   -z ${DIRTY_WORKSPACE} ]] ; then
@@ -1000,6 +1039,16 @@ function verify_valid_branch
   local check=$2
   local i
 
+  # shortcut some common
+  # non-resolvable names
+  if [[ -z ${check} ]]; then
+return 1
+  fi
+
+  if [[ ${check} == patch ]]; then
+return 1
+  fi
+
   if [[ ${check} =~ ^git ]]; then
 ref=$(echo ${check} | cut -f2 -dt)
 count=$(echo ${ref} | wc -c | tr -d ' ')
@@ -2207,9 +2256,16 @@ 

hadoop git commit: YARN-3396. Handle URISyntaxException in ResourceLocalizationService. (Contributed by Brahma Reddy Battula)

2015-05-05 Thread junping_du
Repository: hadoop
Updated Branches:
  refs/heads/trunk 05adc76ac - 381024206


YARN-3396. Handle URISyntaxException in ResourceLocalizationService. 
(Contributed by Brahma Reddy Battula)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/38102420
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/38102420
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/38102420

Branch: refs/heads/trunk
Commit: 38102420621308f5ba91cdeb6a18a63aa5acf640
Parents: 05adc76
Author: Junping Du junping...@apache.org
Authored: Tue May 5 10:18:23 2015 -0700
Committer: Junping Du junping...@apache.org
Committed: Tue May 5 10:18:23 2015 -0700

--
 hadoop-yarn-project/CHANGES.txt  | 2 ++
 .../localizer/ResourceLocalizationService.java   | 8 ++--
 2 files changed, 8 insertions(+), 2 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/38102420/hadoop-yarn-project/CHANGES.txt
--
diff --git a/hadoop-yarn-project/CHANGES.txt b/hadoop-yarn-project/CHANGES.txt
index 97d3208..a6b7f17 100644
--- a/hadoop-yarn-project/CHANGES.txt
+++ b/hadoop-yarn-project/CHANGES.txt
@@ -180,6 +180,8 @@ Release 2.8.0 - UNRELEASED
 YARN-3363. add localization and container launch time to ContainerMetrics
 at NM to show these timing information for each active container.
 (zxu via rkanter)
+YARN-3396. Handle URISyntaxException in ResourceLocalizationService. 
+(Brahma Reddy Battula via junping_du)
 
   OPTIMIZATIONS
 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/38102420/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/localizer/ResourceLocalizationService.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/localizer/ResourceLocalizationService.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/localizer/ResourceLocalizationService.java
index e9c45f3..17ea1a9 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/localizer/ResourceLocalizationService.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/localizer/ResourceLocalizationService.java
@@ -995,7 +995,9 @@ public class ResourceLocalizationService extends 
CompositeService
 try {
   req = new LocalResourceRequest(rsrc);
 } catch (URISyntaxException e) {
-  // TODO fail? Already translated several times...
+  LOG.error(
+  Got exception in parsing URL of LocalResource:
+  + rsrc.getResource(), e);
 }
 LocalizerResourceRequestEvent assoc = scheduled.get(req);
 if (assoc == null) {
@@ -1069,7 +1071,9 @@ public class ResourceLocalizationService extends 
CompositeService
   LOG.error(Inorrect path for PRIVATE localization.
   + next.getResource().getFile(), e);
 } catch (URISyntaxException e) {
-//TODO fail? Already translated several times...
+  LOG.error(
+  Got exception in parsing URL of LocalResource:
+  + next.getResource(), e);
 }
   }
 



hadoop git commit: HADOOP-11911. test-patch should allow configuration of default branch (Sean Busbey via aw)

2015-05-05 Thread aw
Repository: hadoop
Updated Branches:
  refs/heads/trunk 381024206 - 9b01f81eb


HADOOP-11911. test-patch should allow configuration of default branch (Sean 
Busbey via aw)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/9b01f81e
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/9b01f81e
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/9b01f81e

Branch: refs/heads/trunk
Commit: 9b01f81eb874cd63e6b9ae2d09d94fc8bf4fcd7d
Parents: 3810242
Author: Allen Wittenauer a...@apache.org
Authored: Tue May 5 09:59:20 2015 -0700
Committer: Allen Wittenauer a...@apache.org
Committed: Tue May 5 09:59:31 2015 -0700

--
 dev-support/test-patch.sh   | 21 
 hadoop-common-project/hadoop-common/CHANGES.txt |  3 +++
 2 files changed, 16 insertions(+), 8 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/9b01f81e/dev-support/test-patch.sh
--
diff --git a/dev-support/test-patch.sh b/dev-support/test-patch.sh
index b6e1b03..5107718 100755
--- a/dev-support/test-patch.sh
+++ b/dev-support/test-patch.sh
@@ -43,6 +43,7 @@ function setup_defaults
   ECLIPSE_HOME=${ECLIPSE_HOME:-}
   BUILD_NATIVE=${BUILD_NATIVE:-true}
   PATCH_BRANCH=
+  PATCH_BRANCH_DEFAULT=trunk
   CHANGED_MODULES=
   USER_MODULE_LIST=
   OFFLINE=false
@@ -551,7 +552,8 @@ function hadoop_usage
   echo
   echo Options:
   echo --basedir=dirThe directory to apply the patch to (default 
current directory)
-  echo --branch=dir Forcibly set the branch
+  echo --branch=ref Forcibly set the branch
+  echo --branch-default=ref If the branch isn't forced and we don't detect 
one in the patch name, use this branch (default 'trunk')
   echo --build-native=bool  If true, then build native components (default 
'true')
   echo --debugIf set, then output some extra stuff to stderr
   echo --dirty-workspace  Allow the local git workspace to have 
uncommitted changes
@@ -604,6 +606,9 @@ function parse_args
   --branch=*)
 PATCH_BRANCH=${i#*=}
   ;;
+  --branch-default=*)
+PATCH_BRANCH_DEFAULT=${i#*=}
+  ;;
   --build-native=*)
 BUILD_NATIVE=${i#*=}
   ;;
@@ -832,9 +837,9 @@ function git_checkout
   cleanup_and_exit 1
 fi
 
-${GIT} checkout --force trunk
+${GIT} checkout --force ${PATCH_BRANCH_DEFAULT}
 if [[ $? != 0 ]]; then
-  hadoop_error ERROR: git checkout --force trunk is failing
+  hadoop_error ERROR: git checkout --force ${PATCH_BRANCH_DEFAULT} is 
failing
   cleanup_and_exit 1
 fi
 
@@ -859,8 +864,8 @@ function git_checkout
   cleanup_and_exit 1
 fi
 
-# we need to explicitly fetch in case the
-# git ref hasn't been brought in tree yet
+# if we've selected a feature branch that has new changes
+# since our last build, we'll need to rebase to see those changes.
 if [[ ${OFFLINE} == false ]]; then
   ${GIT} pull --rebase
   if [[ $? != 0 ]]; then
@@ -1011,7 +1016,7 @@ function verify_valid_branch
 ## @stabilityevolving
 ## @replaceable  no
 ## @return   0 on success, with PATCH_BRANCH updated appropriately
-## @return   1 on failure, with PATCH_BRANCH updated to trunk
+## @return   1 on failure, with PATCH_BRANCH updated to 
PATCH_BRANCH_DEFAULT
 function determine_branch
 {
   local allbranches
@@ -1075,7 +1080,7 @@ function determine_branch
 fi
   done
 
-  PATCH_BRANCH=trunk
+  PATCH_BRANCH=${PATCH_BRANCH_DEFAULT}
 
   popd /dev/null
 }
@@ -1365,7 +1370,7 @@ function check_reexec
 
   exec ${PATCH_DIR}/dev-support-test/test-patch.sh \
 --reexec \
---branch ${PATCH_BRANCH} \
+--branch ${PATCH_BRANCH} \
 --patch-dir=${PATCH_DIR} \
   ${USER_PARAMS[@]}
 }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/9b01f81e/hadoop-common-project/hadoop-common/CHANGES.txt
--
diff --git a/hadoop-common-project/hadoop-common/CHANGES.txt 
b/hadoop-common-project/hadoop-common/CHANGES.txt
index 8b0e67c..49106ae 100644
--- a/hadoop-common-project/hadoop-common/CHANGES.txt
+++ b/hadoop-common-project/hadoop-common/CHANGES.txt
@@ -530,6 +530,9 @@ Release 2.8.0 - UNRELEASED
 
 HADOOP-11120. hadoop fs -rmr gives wrong advice. (Juliet Hougland via wang)
 
+HADOOP-11911. test-patch should allow configuration of default branch
+(Sean Busbey via aw)
+
   OPTIMIZATIONS
 
 HADOOP-11785. Reduce the number of listStatus operation in distcp



hadoop git commit: HADOOP-11911. test-patch should allow configuration of default branch (Sean Busbey via aw)

2015-05-05 Thread aw
Repository: hadoop
Updated Branches:
  refs/heads/branch-2 6ea007ba9 - 02892f674


HADOOP-11911. test-patch should allow configuration of default branch (Sean 
Busbey via aw)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/02892f67
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/02892f67
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/02892f67

Branch: refs/heads/branch-2
Commit: 02892f674bb4d3440e418053b92db5140dca0c39
Parents: 6ea007b
Author: Allen Wittenauer a...@apache.org
Authored: Tue May 5 10:03:22 2015 -0700
Committer: Allen Wittenauer a...@apache.org
Committed: Tue May 5 10:03:22 2015 -0700

--
 dev-support/test-patch.sh   | 21 
 hadoop-common-project/hadoop-common/CHANGES.txt |  3 +++
 2 files changed, 16 insertions(+), 8 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/02892f67/dev-support/test-patch.sh
--
diff --git a/dev-support/test-patch.sh b/dev-support/test-patch.sh
index b6e1b03..5107718 100755
--- a/dev-support/test-patch.sh
+++ b/dev-support/test-patch.sh
@@ -43,6 +43,7 @@ function setup_defaults
   ECLIPSE_HOME=${ECLIPSE_HOME:-}
   BUILD_NATIVE=${BUILD_NATIVE:-true}
   PATCH_BRANCH=
+  PATCH_BRANCH_DEFAULT=trunk
   CHANGED_MODULES=
   USER_MODULE_LIST=
   OFFLINE=false
@@ -551,7 +552,8 @@ function hadoop_usage
   echo
   echo Options:
   echo --basedir=dirThe directory to apply the patch to (default 
current directory)
-  echo --branch=dir Forcibly set the branch
+  echo --branch=ref Forcibly set the branch
+  echo --branch-default=ref If the branch isn't forced and we don't detect 
one in the patch name, use this branch (default 'trunk')
   echo --build-native=bool  If true, then build native components (default 
'true')
   echo --debugIf set, then output some extra stuff to stderr
   echo --dirty-workspace  Allow the local git workspace to have 
uncommitted changes
@@ -604,6 +606,9 @@ function parse_args
   --branch=*)
 PATCH_BRANCH=${i#*=}
   ;;
+  --branch-default=*)
+PATCH_BRANCH_DEFAULT=${i#*=}
+  ;;
   --build-native=*)
 BUILD_NATIVE=${i#*=}
   ;;
@@ -832,9 +837,9 @@ function git_checkout
   cleanup_and_exit 1
 fi
 
-${GIT} checkout --force trunk
+${GIT} checkout --force ${PATCH_BRANCH_DEFAULT}
 if [[ $? != 0 ]]; then
-  hadoop_error ERROR: git checkout --force trunk is failing
+  hadoop_error ERROR: git checkout --force ${PATCH_BRANCH_DEFAULT} is 
failing
   cleanup_and_exit 1
 fi
 
@@ -859,8 +864,8 @@ function git_checkout
   cleanup_and_exit 1
 fi
 
-# we need to explicitly fetch in case the
-# git ref hasn't been brought in tree yet
+# if we've selected a feature branch that has new changes
+# since our last build, we'll need to rebase to see those changes.
 if [[ ${OFFLINE} == false ]]; then
   ${GIT} pull --rebase
   if [[ $? != 0 ]]; then
@@ -1011,7 +1016,7 @@ function verify_valid_branch
 ## @stabilityevolving
 ## @replaceable  no
 ## @return   0 on success, with PATCH_BRANCH updated appropriately
-## @return   1 on failure, with PATCH_BRANCH updated to trunk
+## @return   1 on failure, with PATCH_BRANCH updated to 
PATCH_BRANCH_DEFAULT
 function determine_branch
 {
   local allbranches
@@ -1075,7 +1080,7 @@ function determine_branch
 fi
   done
 
-  PATCH_BRANCH=trunk
+  PATCH_BRANCH=${PATCH_BRANCH_DEFAULT}
 
   popd /dev/null
 }
@@ -1365,7 +1370,7 @@ function check_reexec
 
   exec ${PATCH_DIR}/dev-support-test/test-patch.sh \
 --reexec \
---branch ${PATCH_BRANCH} \
+--branch ${PATCH_BRANCH} \
 --patch-dir=${PATCH_DIR} \
   ${USER_PARAMS[@]}
 }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/02892f67/hadoop-common-project/hadoop-common/CHANGES.txt
--
diff --git a/hadoop-common-project/hadoop-common/CHANGES.txt 
b/hadoop-common-project/hadoop-common/CHANGES.txt
index d885c41..34795a8 100644
--- a/hadoop-common-project/hadoop-common/CHANGES.txt
+++ b/hadoop-common-project/hadoop-common/CHANGES.txt
@@ -72,6 +72,9 @@ Release 2.8.0 - UNRELEASED
 
 HADOOP-11120. hadoop fs -rmr gives wrong advice. (Juliet Hougland via wang)
 
+HADOOP-11911. test-patch should allow configuration of default branch
+(Sean Busbey via aw)
+
   OPTIMIZATIONS
 
 HADOOP-11785. Reduce the number of listStatus operation in distcp



hadoop git commit: YARN-3396. Handle URISyntaxException in ResourceLocalizationService. (Contributed by Brahma Reddy Battula) (cherry picked from commit 38102420621308f5ba91cdeb6a18a63aa5acf640)

2015-05-05 Thread junping_du
Repository: hadoop
Updated Branches:
  refs/heads/branch-2 84d8ba4f7 - 6ea007ba9


YARN-3396. Handle URISyntaxException in ResourceLocalizationService. 
(Contributed by Brahma Reddy Battula)
(cherry picked from commit 38102420621308f5ba91cdeb6a18a63aa5acf640)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/6ea007ba
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/6ea007ba
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/6ea007ba

Branch: refs/heads/branch-2
Commit: 6ea007ba91bbe460de7509351562ee06b3f77533
Parents: 84d8ba4
Author: Junping Du junping...@apache.org
Authored: Tue May 5 10:18:23 2015 -0700
Committer: Junping Du junping...@apache.org
Committed: Tue May 5 10:19:16 2015 -0700

--
 hadoop-yarn-project/CHANGES.txt  | 2 ++
 .../localizer/ResourceLocalizationService.java   | 8 ++--
 2 files changed, 8 insertions(+), 2 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/6ea007ba/hadoop-yarn-project/CHANGES.txt
--
diff --git a/hadoop-yarn-project/CHANGES.txt b/hadoop-yarn-project/CHANGES.txt
index 9f7af1e..88533fb 100644
--- a/hadoop-yarn-project/CHANGES.txt
+++ b/hadoop-yarn-project/CHANGES.txt
@@ -132,6 +132,8 @@ Release 2.8.0 - UNRELEASED
 YARN-3363. add localization and container launch time to ContainerMetrics
 at NM to show these timing information for each active container.
 (zxu via rkanter)
+YARN-3396. Handle URISyntaxException in ResourceLocalizationService. 
+(Brahma Reddy Battula via junping_du)
 
 YARN-2980. Move health check script related functionality to hadoop-common
 (Varun Saxena via aw)

http://git-wip-us.apache.org/repos/asf/hadoop/blob/6ea007ba/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/localizer/ResourceLocalizationService.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/localizer/ResourceLocalizationService.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/localizer/ResourceLocalizationService.java
index e9c45f3..17ea1a9 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/localizer/ResourceLocalizationService.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/localizer/ResourceLocalizationService.java
@@ -995,7 +995,9 @@ public class ResourceLocalizationService extends 
CompositeService
 try {
   req = new LocalResourceRequest(rsrc);
 } catch (URISyntaxException e) {
-  // TODO fail? Already translated several times...
+  LOG.error(
+  Got exception in parsing URL of LocalResource:
+  + rsrc.getResource(), e);
 }
 LocalizerResourceRequestEvent assoc = scheduled.get(req);
 if (assoc == null) {
@@ -1069,7 +1071,9 @@ public class ResourceLocalizationService extends 
CompositeService
   LOG.error(Inorrect path for PRIVATE localization.
   + next.getResource().getFile(), e);
 } catch (URISyntaxException e) {
-//TODO fail? Already translated several times...
+  LOG.error(
+  Got exception in parsing URL of LocalResource:
+  + next.getResource(), e);
 }
   }
 



hadoop git commit: YARN-2123. Progress bars in Web UI always at 100% due to non-US locale. Contributed by Akira AJISAKA

2015-05-05 Thread xgong
Repository: hadoop
Updated Branches:
  refs/heads/trunk 9b01f81eb - b7dd3a4f0


YARN-2123. Progress bars in Web UI always at 100% due to non-US locale.
Contributed by Akira AJISAKA


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/b7dd3a4f
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/b7dd3a4f
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/b7dd3a4f

Branch: refs/heads/trunk
Commit: b7dd3a4f04f712b7594c4e6e7ce50fd314f7c342
Parents: 9b01f81
Author: Xuan xg...@apache.org
Authored: Tue May 5 10:40:16 2015 -0700
Committer: Xuan xg...@apache.org
Committed: Tue May 5 10:40:16 2015 -0700

--
 .../org/apache/hadoop/mapreduce/v2/app/webapp/TaskPage.java | 4 ++--
 .../apache/hadoop/mapreduce/v2/app/webapp/TasksBlock.java   | 4 ++--
 .../apache/hadoop/mapreduce/v2/app/webapp/dao/JobInfo.java  | 9 +
 hadoop-yarn-project/CHANGES.txt | 3 +++
 .../main/java/org/apache/hadoop/yarn/util/StringHelper.java | 5 -
 .../org/apache/hadoop/yarn/server/webapp/AppsBlock.java | 3 ++-
 .../server/resourcemanager/resource/ResourceWeights.java| 2 +-
 .../resourcemanager/webapp/CapacitySchedulerPage.java   | 9 +
 .../server/resourcemanager/webapp/DefaultSchedulerPage.java | 7 ---
 .../resourcemanager/webapp/FairSchedulerAppsBlock.java  | 3 ++-
 .../server/resourcemanager/webapp/FairSchedulerPage.java| 7 ---
 .../yarn/server/resourcemanager/webapp/RMAppsBlock.java | 3 ++-
 12 files changed, 32 insertions(+), 27 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/b7dd3a4f/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/TaskPage.java
--
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/TaskPage.java
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/TaskPage.java
index 8aa8bb6..e293fd2 100644
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/TaskPage.java
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/TaskPage.java
@@ -18,7 +18,6 @@
 
 package org.apache.hadoop.mapreduce.v2.app.webapp;
 
-import static org.apache.hadoop.yarn.util.StringHelper.percent;
 import static org.apache.hadoop.yarn.webapp.view.JQueryUI.ACCORDION;
 import static org.apache.hadoop.yarn.webapp.view.JQueryUI.DATATABLES;
 import static org.apache.hadoop.yarn.webapp.view.JQueryUI.DATATABLES_ID;
@@ -31,6 +30,7 @@ import org.apache.commons.lang.StringEscapeUtils;
 import org.apache.hadoop.mapreduce.v2.app.job.TaskAttempt;
 import org.apache.hadoop.mapreduce.v2.app.webapp.dao.TaskAttemptInfo;
 import org.apache.hadoop.mapreduce.v2.util.MRWebAppUtil;
+import org.apache.hadoop.util.StringUtils;
 import org.apache.hadoop.yarn.webapp.SubView;
 import org.apache.hadoop.yarn.webapp.hamlet.Hamlet;
 import org.apache.hadoop.yarn.webapp.hamlet.Hamlet.TABLE;
@@ -77,7 +77,7 @@ public class TaskPage extends AppView {
 
   for (TaskAttempt attempt : getTaskAttempts()) {
 TaskAttemptInfo ta = new TaskAttemptInfo(attempt, true);
-String progress = percent(ta.getProgress() / 100);
+String progress = StringUtils.formatPercent(ta.getProgress() / 100, 2);
 
 String nodeHttpAddr = ta.getNode();
 String diag = ta.getNote() == null ?  : ta.getNote();

http://git-wip-us.apache.org/repos/asf/hadoop/blob/b7dd3a4f/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/TasksBlock.java
--
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/TasksBlock.java
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/TasksBlock.java
index 64aae59..7c1aa49 100644
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/TasksBlock.java
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/TasksBlock.java
@@ -21,7 +21,6 @@ package org.apache.hadoop.mapreduce.v2.app.webapp;
 import static org.apache.hadoop.mapreduce.v2.app.webapp.AMParams.TASK_STATE;
 import 

hadoop git commit: HDFS-8242. Erasure Coding: XML based end-to-end test for ECCli commands (Contributed by Rakesh R)

2015-05-05 Thread vinayakumarb
Repository: hadoop
Updated Branches:
  refs/heads/HDFS-7285 858800e89 - edfaff3c7


HDFS-8242. Erasure Coding: XML based end-to-end test for ECCli commands 
(Contributed by Rakesh R)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/edfaff3c
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/edfaff3c
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/edfaff3c

Branch: refs/heads/HDFS-7285
Commit: edfaff3c74a1bfc2dfea1d936a348ea140df04b0
Parents: 858800e
Author: Vinayakumar B vinayakum...@apache.org
Authored: Tue May 5 11:54:30 2015 +0530
Committer: Vinayakumar B vinayakum...@apache.org
Committed: Tue May 5 11:54:30 2015 +0530

--
 .../hadoop-hdfs/CHANGES-HDFS-EC-7285.txt|   3 +
 .../hdfs/tools/erasurecode/ECCommand.java   |   9 +-
 .../hadoop/cli/CLITestCmdErasureCoding.java |  38 +++
 .../apache/hadoop/cli/TestErasureCodingCLI.java | 114 +++
 .../cli/util/CLICommandErasureCodingCli.java|  21 ++
 .../cli/util/ErasureCodingCliCmdExecutor.java   |  37 ++
 .../test/resources/testErasureCodingConf.xml| 342 +++
 7 files changed, 561 insertions(+), 3 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/edfaff3c/hadoop-hdfs-project/hadoop-hdfs/CHANGES-HDFS-EC-7285.txt
--
diff --git a/hadoop-hdfs-project/hadoop-hdfs/CHANGES-HDFS-EC-7285.txt 
b/hadoop-hdfs-project/hadoop-hdfs/CHANGES-HDFS-EC-7285.txt
index faec023..ef760fc 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/CHANGES-HDFS-EC-7285.txt
+++ b/hadoop-hdfs-project/hadoop-hdfs/CHANGES-HDFS-EC-7285.txt
@@ -166,3 +166,6 @@
 (jing9)
 
 HDFS-8137. Send the EC schema to DataNode via EC encoding/recovering 
command(umamahesh)
+
+HDFS-8242. Erasure Coding: XML based end-to-end test for ECCli commands
+(Rakesh R via vinayakumarb)

http://git-wip-us.apache.org/repos/asf/hadoop/blob/edfaff3c/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/erasurecode/ECCommand.java
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/erasurecode/ECCommand.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/erasurecode/ECCommand.java
index 84c2275..802a46d 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/erasurecode/ECCommand.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/erasurecode/ECCommand.java
@@ -17,7 +17,9 @@
 package org.apache.hadoop.hdfs.tools.erasurecode;
 
 import java.io.IOException;
+import java.util.ArrayList;
 import java.util.LinkedList;
+import java.util.List;
 
 import org.apache.hadoop.HadoopIllegalArgumentException;
 import org.apache.hadoop.classification.InterfaceAudience;
@@ -120,11 +122,12 @@ public abstract class ECCommand extends Command {
 sb.append(Schema ');
 sb.append(schemaName);
 sb.append(' does not match any of the supported schemas.);
-sb.append(Please select any one of [);
+sb.append( Please select any one of );
+ListString schemaNames = new ArrayListString();
 for (ECSchema ecSchema : ecSchemas) {
-  sb.append(ecSchema.getSchemaName());
-  sb.append(, );
+  schemaNames.add(ecSchema.getSchemaName());
 }
+sb.append(schemaNames);
 throw new HadoopIllegalArgumentException(sb.toString());
   }
 }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/edfaff3c/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/cli/CLITestCmdErasureCoding.java
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/cli/CLITestCmdErasureCoding.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/cli/CLITestCmdErasureCoding.java
new file mode 100644
index 000..6c06a8d
--- /dev/null
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/cli/CLITestCmdErasureCoding.java
@@ -0,0 +1,38 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * License); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * p/
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * p/
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the 

hadoop git commit: HDFS-8219. setStoragePolicy with folder behavior is different after cluster restart. (surendra singh lilhore via Xiaoyu Yao)

2015-05-05 Thread xyao
Repository: hadoop
Updated Branches:
  refs/heads/trunk ffce9a341 - 0100b1550


HDFS-8219. setStoragePolicy with folder behavior is different after cluster 
restart. (surendra singh lilhore via Xiaoyu Yao)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/0100b155
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/0100b155
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/0100b155

Branch: refs/heads/trunk
Commit: 0100b155019496d077f958904de7d385697d65d9
Parents: ffce9a3
Author: Xiaoyu Yao x...@apache.org
Authored: Tue May 5 13:41:14 2015 -0700
Committer: Xiaoyu Yao x...@apache.org
Committed: Tue May 5 13:41:14 2015 -0700

--
 hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt |  3 ++
 .../hadoop/hdfs/server/namenode/FSEditLog.java  |  2 +-
 .../hadoop/hdfs/TestBlockStoragePolicy.java | 45 
 3 files changed, 49 insertions(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/0100b155/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
--
diff --git a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt 
b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
index 01de9b1..cc6758f 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
+++ b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
@@ -626,6 +626,9 @@ Release 2.8.0 - UNRELEASED
 HDFS-8290. WebHDFS calls before namesystem initialization can cause
 NullPointerException. (cnauroth)
 
+HDFS-8219. setStoragePolicy with folder behavior is different after 
cluster restart.
+(surendra singh lilhore via Xiaoyu Yao)
+
 Release 2.7.1 - UNRELEASED
 
   INCOMPATIBLE CHANGES

http://git-wip-us.apache.org/repos/asf/hadoop/blob/0100b155/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSEditLog.java
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSEditLog.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSEditLog.java
index 28e150c..83e52bc 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSEditLog.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSEditLog.java
@@ -739,7 +739,7 @@ public class FSEditLog implements LogsPurgeable {
   .setClientMachine(
   newNode.getFileUnderConstructionFeature().getClientMachine())
   .setOverwrite(overwrite)
-  .setStoragePolicyId(newNode.getStoragePolicyID());
+  .setStoragePolicyId(newNode.getLocalStoragePolicyID());
 
 AclFeature f = newNode.getAclFeature();
 if (f != null) {

http://git-wip-us.apache.org/repos/asf/hadoop/blob/0100b155/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestBlockStoragePolicy.java
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestBlockStoragePolicy.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestBlockStoragePolicy.java
index 9621dc8..5e3b55f 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestBlockStoragePolicy.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestBlockStoragePolicy.java
@@ -26,6 +26,7 @@ import java.util.*;
 
 import com.google.common.collect.Lists;
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FSDataOutputStream;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.StorageType;
@@ -1172,4 +1173,48 @@ public class TestBlockStoragePolicy {
   cluster.shutdown();
 }
   }
+
+  @Test
+  public void testGetFileStoragePolicyAfterRestartNN() throws Exception {
+//HDFS8219
+final MiniDFSCluster cluster = new MiniDFSCluster.Builder(conf)
+.numDataNodes(REPLICATION)
+.storageTypes(
+new StorageType[] {StorageType.DISK, StorageType.ARCHIVE})
+.build();
+cluster.waitActive();
+final DistributedFileSystem fs = cluster.getFileSystem();
+try {
+  final String file = /testScheduleWithinSameNode/file;
+  Path dir = new Path(/testScheduleWithinSameNode);
+  fs.mkdirs(dir);
+  // 2. Set Dir policy
+  fs.setStoragePolicy(dir, COLD);
+  // 3. Create file
+  final FSDataOutputStream out = fs.create(new Path(file));
+  out.writeChars(testScheduleWithinSameNode);
+  out.close();
+  // 4. Set Dir policy
+  fs.setStoragePolicy(dir, HOT);
+  HdfsFileStatus status = fs.getClient().getFileInfo(file);
+  // 5. get file policy, it should be parent 

hadoop git commit: HDFS-8219. setStoragePolicy with folder behavior is different after cluster restart. (surendra singh lilhore via Xiaoyu Yao)

2015-05-05 Thread xyao
Repository: hadoop
Updated Branches:
  refs/heads/branch-2 eb0c6d2ee - e68e8b3b5


HDFS-8219. setStoragePolicy with folder behavior is different after cluster 
restart. (surendra singh lilhore via Xiaoyu Yao)

(cherry picked from commit 0100b155019496d077f958904de7d385697d65d9)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/e68e8b3b
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/e68e8b3b
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/e68e8b3b

Branch: refs/heads/branch-2
Commit: e68e8b3b5cff85bfd8bb5b00b9033f63577856d6
Parents: eb0c6d2
Author: Xiaoyu Yao x...@apache.org
Authored: Tue May 5 13:41:14 2015 -0700
Committer: Xiaoyu Yao x...@apache.org
Committed: Tue May 5 13:42:22 2015 -0700

--
 hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt |  3 ++
 .../hadoop/hdfs/server/namenode/FSEditLog.java  |  2 +-
 .../hadoop/hdfs/TestBlockStoragePolicy.java | 45 
 3 files changed, 49 insertions(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/e68e8b3b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
--
diff --git a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt 
b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
index 75d0871..0aaba2f 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
+++ b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
@@ -299,6 +299,9 @@ Release 2.8.0 - UNRELEASED
 HDFS-8290. WebHDFS calls before namesystem initialization can cause
 NullPointerException. (cnauroth)
 
+HDFS-8219. setStoragePolicy with folder behavior is different after 
cluster restart.
+(surendra singh lilhore via Xiaoyu Yao)
+
 Release 2.7.1 - UNRELEASED
 
   INCOMPATIBLE CHANGES

http://git-wip-us.apache.org/repos/asf/hadoop/blob/e68e8b3b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSEditLog.java
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSEditLog.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSEditLog.java
index 9ec4902..0e097c7 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSEditLog.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSEditLog.java
@@ -740,7 +740,7 @@ public class FSEditLog implements LogsPurgeable {
   .setClientMachine(
   newNode.getFileUnderConstructionFeature().getClientMachine())
   .setOverwrite(overwrite)
-  .setStoragePolicyId(newNode.getStoragePolicyID());
+  .setStoragePolicyId(newNode.getLocalStoragePolicyID());
 
 AclFeature f = newNode.getAclFeature();
 if (f != null) {

http://git-wip-us.apache.org/repos/asf/hadoop/blob/e68e8b3b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestBlockStoragePolicy.java
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestBlockStoragePolicy.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestBlockStoragePolicy.java
index 821d2db..4c3e7f0 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestBlockStoragePolicy.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestBlockStoragePolicy.java
@@ -26,6 +26,7 @@ import java.util.*;
 
 import com.google.common.collect.Lists;
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FSDataOutputStream;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.StorageType;
@@ -1177,4 +1178,48 @@ public class TestBlockStoragePolicy {
   cluster.shutdown();
 }
   }
+
+  @Test
+  public void testGetFileStoragePolicyAfterRestartNN() throws Exception {
+//HDFS8219
+final MiniDFSCluster cluster = new MiniDFSCluster.Builder(conf)
+.numDataNodes(REPLICATION)
+.storageTypes(
+new StorageType[] {StorageType.DISK, StorageType.ARCHIVE})
+.build();
+cluster.waitActive();
+final DistributedFileSystem fs = cluster.getFileSystem();
+try {
+  final String file = /testScheduleWithinSameNode/file;
+  Path dir = new Path(/testScheduleWithinSameNode);
+  fs.mkdirs(dir);
+  // 2. Set Dir policy
+  fs.setStoragePolicy(dir, COLD);
+  // 3. Create file
+  final FSDataOutputStream out = fs.create(new Path(file));
+  out.writeChars(testScheduleWithinSameNode);
+  out.close();
+  // 4. Set Dir policy
+  fs.setStoragePolicy(dir, HOT);
+  HdfsFileStatus status = 

hadoop git commit: HADOOP-11904. test-patch.sh goes into an infinite loop on non-maven builds (aw)

2015-05-05 Thread aw
Repository: hadoop
Updated Branches:
  refs/heads/branch-2 02cdcaec5 - 51df9e837


HADOOP-11904. test-patch.sh goes into an infinite loop on non-maven builds (aw)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/51df9e83
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/51df9e83
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/51df9e83

Branch: refs/heads/branch-2
Commit: 51df9e83791712fd6c12589ca6d6df7d96b42d01
Parents: 02cdcae
Author: Allen Wittenauer a...@apache.org
Authored: Tue May 5 11:05:44 2015 -0700
Committer: Allen Wittenauer a...@apache.org
Committed: Tue May 5 11:05:56 2015 -0700

--
 dev-support/test-patch.sh   | 8 +++-
 hadoop-common-project/hadoop-common/CHANGES.txt | 3 +++
 2 files changed, 10 insertions(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/51df9e83/dev-support/test-patch.sh
--
diff --git a/dev-support/test-patch.sh b/dev-support/test-patch.sh
index 5107718..9f48c64 100755
--- a/dev-support/test-patch.sh
+++ b/dev-support/test-patch.sh
@@ -915,6 +915,13 @@ function git_checkout
   fi
 
   add_jira_footer git revision ${PATCH_BRANCH} / ${GIT_REVISION}
+
+  if [[ ! -f ${BASEDIR}/pom.xml ]]; then
+hadoop_error ERROR: This verison of test-patch.sh only supports 
Maven-based builds. Aborting.
+add_jira_table -1 pre-patch Unsupported build system.
+output_to_jira 1
+cleanup_and_exit 1
+  fi
   return 0
 }
 
@@ -1331,7 +1338,6 @@ function apply_patch_file
 ## @return   none; otherwise relaunches
 function check_reexec
 {
-  set +x
   local commentfile=${PATCH_DIR}/tp.${RANDOM}
 
   if [[ ${REEXECED} == true ]]; then

http://git-wip-us.apache.org/repos/asf/hadoop/blob/51df9e83/hadoop-common-project/hadoop-common/CHANGES.txt
--
diff --git a/hadoop-common-project/hadoop-common/CHANGES.txt 
b/hadoop-common-project/hadoop-common/CHANGES.txt
index 34795a8..5a73826 100644
--- a/hadoop-common-project/hadoop-common/CHANGES.txt
+++ b/hadoop-common-project/hadoop-common/CHANGES.txt
@@ -149,6 +149,9 @@ Release 2.8.0 - UNRELEASED
 HADOOP-11916. TestStringUtils#testLowerAndUpperStrings failed on MAC
 due to a JVM bug. (Ming Ma via ozawa)
 
+HADOOP-11904. test-patch.sh goes into an infinite loop on non-maven
+builds (aw)
+
 Release 2.7.1 - UNRELEASED
 
   INCOMPATIBLE CHANGES



svn commit: r1677873 - in /hadoop/common/site/main/publish: ./ docs/r0.23.10/ docs/r0.23.6/ docs/r0.23.7/ docs/r0.23.8/ docs/r0.23.9/ docs/r1.1.1/ docs/r1.1.2/ docs/r1.2.0/ docs/r2.0.2-alpha/ docs/r2.

2015-05-05 Thread omalley
Author: omalley
Date: Tue May  5 18:14:19 2015
New Revision: 1677873

URL: http://svn.apache.org/r1677873
Log:
HADOOP-11902. Prune old javadoc versions.

Removed:
hadoop/common/site/main/publish/docs/r0.23.10/
hadoop/common/site/main/publish/docs/r0.23.6/
hadoop/common/site/main/publish/docs/r0.23.7/
hadoop/common/site/main/publish/docs/r0.23.8/
hadoop/common/site/main/publish/docs/r0.23.9/
hadoop/common/site/main/publish/docs/r1.1.1/
hadoop/common/site/main/publish/docs/r1.1.2/
hadoop/common/site/main/publish/docs/r1.2.0/
hadoop/common/site/main/publish/docs/r2.0.2-alpha/
hadoop/common/site/main/publish/docs/r2.0.3-alpha/
hadoop/common/site/main/publish/docs/r2.0.4-alpha/
hadoop/common/site/main/publish/docs/r2.0.5-alpha/
hadoop/common/site/main/publish/docs/r2.0.6-alpha/
hadoop/common/site/main/publish/docs/r2.1.0-beta/
hadoop/common/site/main/publish/docs/r2.2.0/
hadoop/common/site/main/publish/docs/r2.3.0/
hadoop/common/site/main/publish/docs/r2.4.0/
hadoop/common/site/main/publish/docs/r2.5.0/
hadoop/common/site/main/publish/docs/r2.5.1/
Modified:
hadoop/common/site/main/publish/.htaccess

Modified: hadoop/common/site/main/publish/.htaccess
URL: 
http://svn.apache.org/viewvc/hadoop/common/site/main/publish/.htaccess?rev=1677873r1=1677872r2=1677873view=diff
==
--- hadoop/common/site/main/publish/.htaccess (original)
+++ hadoop/common/site/main/publish/.htaccess Tue May  5 18:14:19 2015
@@ -27,3 +27,24 @@ RedirectMatch Permanent ^/zookeeper/(.*)
 
 # Redirect HDFS javadoc links to HDFS user guide
 RedirectMatch Permanent ^/docs/(.+)/api/org/apache/hadoop/hdfs/.* 
http://hadoop.apache.org/docs/$1/hdfs_user_guide.html
+
+#Redirect old versions of the doc
+RedirectMatch Permanent ^/docs/r0.23.6/(.*)$ 
http://hadoop.apache.org/docs/r0.23.11/$1
+RedirectMatch Permanent ^/docs/r0.23.7/(.*)$ 
http://hadoop.apache.org/docs/r0.23.11/$1
+RedirectMatch Permanent ^/docs/r0.23.8/(.*)$ 
http://hadoop.apache.org/docs/r0.23.11/$1
+RedirectMatch Permanent ^/docs/r0.23.9/(.*)$ 
http://hadoop.apache.org/docs/r0.23.11/$1
+RedirectMatch Permanent ^/docs/r0.23.10/(.*)$ 
http://hadoop.apache.org/docs/r0.23.11/$1
+RedirectMatch Permanent ^/docs/r1.1.1/(.*)$ 
http://hadoop.apache.org/docs/r1.2.1/$1
+RedirectMatch Permanent ^/docs/r1.1.2/(.*)$ 
http://hadoop.apache.org/docs/r1.2.1/$1
+RedirectMatch Permanent ^/docs/r1.2.0/(.*)$ 
http://hadoop.apache.org/docs/r1.2.1/$1
+RedirectMatch Permanent ^/docs/r2.0.2-alpha/(.*)$ 
http://hadoop.apache.org/docs/r2.4.1/$1
+RedirectMatch Permanent ^/docs/r2.0.3-alpha/(.*)$ 
http://hadoop.apache.org/docs/r2.4.1/$1
+RedirectMatch Permanent ^/docs/r2.0.4-alpha/(.*)$ 
http://hadoop.apache.org/docs/r2.4.1/$1
+RedirectMatch Permanent ^/docs/r2.0.5-alpha/(.*)$ 
http://hadoop.apache.org/docs/r2.4.1/$1
+RedirectMatch Permanent ^/docs/r2.0.6-alpha/(.*)$ 
http://hadoop.apache.org/docs/r2.4.1/$1
+RedirectMatch Permanent ^/docs/r2.1.0-beta/(.*)$ 
http://hadoop.apache.org/docs/r2.4.1/$1
+RedirectMatch Permanent ^/docs/r2.2.0/(.*)$ 
http://hadoop.apache.org/docs/r2.4.1/$1
+RedirectMatch Permanent ^/docs/r2.3.0/(.*)$ 
http://hadoop.apache.org/docs/r2.4.1/$1
+RedirectMatch Permanent ^/docs/r2.4.0/(.*)$ 
http://hadoop.apache.org/docs/r2.4.1/$1
+RedirectMatch Permanent ^/docs/r2.5.0/(.*)$ 
http://hadoop.apache.org/docs/r2.5.2/$1
+RedirectMatch Permanent ^/docs/r2.5.1/(.*)$ 
http://hadoop.apache.org/docs/r2.5.2/$1




hadoop git commit: HDFS-8305: HDFS INotify: the destination field of RenameOp should always end with the file name (cmccabe)

2015-05-05 Thread cmccabe
Repository: hadoop
Updated Branches:
  refs/heads/trunk b7dd3a4f0 - fcd4cb751


HDFS-8305: HDFS INotify: the destination field of RenameOp should always end 
with the file name (cmccabe)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/fcd4cb75
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/fcd4cb75
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/fcd4cb75

Branch: refs/heads/trunk
Commit: fcd4cb751665adb241081e42b3403c3856b6c6fe
Parents: b7dd3a4
Author: Colin Patrick Mccabe cmcc...@cloudera.com
Authored: Tue May 5 10:50:09 2015 -0700
Committer: Colin Patrick Mccabe cmcc...@cloudera.com
Committed: Tue May 5 10:50:09 2015 -0700

--
 hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt  |  3 +++
 .../hadoop/hdfs/server/namenode/FSDirRenameOp.java   |  2 +-
 .../apache/hadoop/hdfs/server/namenode/FSEditLog.java| 10 +++---
 .../hadoop/hdfs/TestDFSInotifyEventInputStream.java  | 11 +++
 4 files changed, 22 insertions(+), 4 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/fcd4cb75/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
--
diff --git a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt 
b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
index cd9b7b8..08ab7e7 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
+++ b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
@@ -682,6 +682,9 @@ Release 2.7.1 - UNRELEASED
 HDFS-8091: ACLStatus and XAttributes should be presented to
 INodeAttributesProvider before returning to client (asuresh)
 
+HDFS-8305: HDFS INotify: the destination field of RenameOp should always
+end with the file name (cmccabe)
+
 Release 2.7.0 - 2015-04-20
 
   INCOMPATIBLE CHANGES

http://git-wip-us.apache.org/repos/asf/hadoop/blob/fcd4cb75/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSDirRenameOp.java
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSDirRenameOp.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSDirRenameOp.java
index c57cae2..4a20a62 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSDirRenameOp.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSDirRenameOp.java
@@ -476,7 +476,7 @@ class FSDirRenameOp {
   fsd.writeUnlock();
 }
 if (stat) {
-  fsd.getEditLog().logRename(src, dst, mtime, logRetryCache);
+  fsd.getEditLog().logRename(src, actualDst, mtime, logRetryCache);
   return true;
 }
 return false;

http://git-wip-us.apache.org/repos/asf/hadoop/blob/fcd4cb75/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSEditLog.java
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSEditLog.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSEditLog.java
index bda827a..28e150c 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSEditLog.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSEditLog.java
@@ -815,7 +815,9 @@ public class FSEditLog implements LogsPurgeable {
   }
   
   /** 
-   * Add rename record to edit log
+   * Add rename record to edit log.
+   *
+   * The destination should be the file name, not the destination directory.
* TODO: use String parameters until just before writing to disk
*/
   void logRename(String src, String dst, long timestamp, boolean toLogRpcIds) {
@@ -826,9 +828,11 @@ public class FSEditLog implements LogsPurgeable {
 logRpcIds(op, toLogRpcIds);
 logEdit(op);
   }
-  
+
   /** 
-   * Add rename record to edit log
+   * Add rename record to edit log.
+   *
+   * The destination should be the file name, not the destination directory.
*/
   void logRename(String src, String dst, long timestamp, boolean toLogRpcIds,
   Options.Rename... options) {

http://git-wip-us.apache.org/repos/asf/hadoop/blob/fcd4cb75/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDFSInotifyEventInputStream.java
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDFSInotifyEventInputStream.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDFSInotifyEventInputStream.java
index 6e91e06..ba33bd3 100644
--- 

hadoop git commit: HADOOP-11917. test-patch.sh should work with ${BASEDIR}/patchprocess setups (aw)

2015-05-05 Thread aw
Repository: hadoop
Updated Branches:
  refs/heads/branch-2 1f6bcf94c - 50a5d0b62


HADOOP-11917. test-patch.sh should work with ${BASEDIR}/patchprocess setups (aw)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/50a5d0b6
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/50a5d0b6
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/50a5d0b6

Branch: refs/heads/branch-2
Commit: 50a5d0b62b88a24f2db36d097fdd28d972a07dfe
Parents: 1f6bcf9
Author: Allen Wittenauer a...@apache.org
Authored: Tue May 5 11:28:16 2015 -0700
Committer: Allen Wittenauer a...@apache.org
Committed: Tue May 5 11:28:16 2015 -0700

--
 .gitignore  |  1 +
 dev-support/test-patch.sh   | 78 +---
 hadoop-common-project/hadoop-common/CHANGES.txt |  3 +
 pom.xml | 21 +++---
 4 files changed, 83 insertions(+), 20 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/50a5d0b6/.gitignore
--
diff --git a/.gitignore b/.gitignore
index 15c040c..e6a05de 100644
--- a/.gitignore
+++ b/.gitignore
@@ -22,3 +22,4 @@ 
hadoop-tools/hadoop-openstack/src/test/resources/contract-test-options.xml
 
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-registry/src/main/tla/yarnregistry.toolbox
 yarnregistry.pdf
 hadoop-tools/hadoop-aws/src/test/resources/contract-test-options.xml
+patchprocess/

http://git-wip-us.apache.org/repos/asf/hadoop/blob/50a5d0b6/dev-support/test-patch.sh
--
diff --git a/dev-support/test-patch.sh b/dev-support/test-patch.sh
index 9f48c64..3759e9f 100755
--- a/dev-support/test-patch.sh
+++ b/dev-support/test-patch.sh
@@ -535,6 +535,26 @@ function echo_and_redirect
   ${@}  ${logfile} 21
 }
 
+## @description is PATCH_DIR relative to BASEDIR?
+## @audiencepublic
+## @stability   stable
+## @replaceable yes
+## @returns 1 - no, PATCH_DIR
+## @returns 0 - yes, PATCH_DIR - BASEDIR
+function relative_patchdir
+{
+  local p=${PATCH_DIR#${BASEDIR}}
+
+  if [[ ${#p} -eq ${#PATCH_DIR} ]]; then
+echo ${p}
+return 1
+  fi
+  p=${p#/}
+  echo ${p}
+  return 0
+}
+
+
 ## @description  Print the usage information
 ## @audience public
 ## @stabilitystable
@@ -697,7 +717,8 @@ function parse_args
 esac
   done
 
-  # if we get a relative path, turn it absolute
+  # we need absolute dir for ${BASEDIR}
+  cd ${CWD}
   BASEDIR=$(cd -P -- ${BASEDIR} /dev/null  pwd -P)
 
   if [[ ${BUILD_NATIVE} == true ]] ; then
@@ -723,6 +744,7 @@ function parse_args
 JENKINS=false
   fi
 
+  cd ${CWD}
   if [[ ! -d ${PATCH_DIR} ]]; then
 mkdir -p ${PATCH_DIR}
 if [[ $? == 0 ]] ; then
@@ -733,6 +755,9 @@ function parse_args
 fi
   fi
 
+  # we need absolute dir for PATCH_DIR
+  PATCH_DIR=$(cd -P -- ${PATCH_DIR} /dev/null  pwd -P)
+
   GITDIFFLINES=${PATCH_DIR}/gitdifflines.txt
 }
 
@@ -821,17 +846,36 @@ function find_changed_modules
 function git_checkout
 {
   local currentbranch
+  local exemptdir
 
   big_console_header Confirming git environment
 
+  cd ${BASEDIR}
+  if [[ ! -d .git ]]; then
+hadoop_error ERROR: ${BASEDIR} is not a git repo.
+cleanup_and_exit 1
+  fi
+
   if [[ ${RESETREPO} == true ]] ; then
-cd ${BASEDIR}
 ${GIT} reset --hard
 if [[ $? != 0 ]]; then
   hadoop_error ERROR: git reset is failing
   cleanup_and_exit 1
 fi
-${GIT} clean -xdf
+
+# if PATCH_DIR is in BASEDIR, then we don't want
+# git wiping it out.
+exemptdir=$(relative_patchdir)
+if [[ $? == 1 ]]; then
+  ${GIT} clean -xdf
+else
+  # we do, however, want it emptied of all _files_.
+  # we need to leave _directories_ in case we are in
+  # re-exec mode (which places a directory full of stuff in it)
+  hadoop_debug Exempting ${exemptdir} from clean
+  rm ${PATCH_DIR}/* 2/dev/null
+  ${GIT} clean -xdf -e ${exemptdir}
+fi
 if [[ $? != 0 ]]; then
   hadoop_error ERROR: git clean is failing
   cleanup_and_exit 1
@@ -875,11 +919,6 @@ function git_checkout
 fi
 
   else
-cd ${BASEDIR}
-if [[ ! -d .git ]]; then
-  hadoop_error ERROR: ${BASEDIR} is not a git repo.
-  cleanup_and_exit 1
-fi
 
 status=$(${GIT} status --porcelain)
 if [[ ${status} !=   -z ${DIRTY_WORKSPACE} ]] ; then
@@ -1000,6 +1039,16 @@ function verify_valid_branch
   local check=$2
   local i
 
+  # shortcut some common
+  # non-resolvable names
+  if [[ -z ${check} ]]; then
+return 1
+  fi
+
+  if [[ ${check} == patch ]]; then
+return 1
+  fi
+
   if [[ ${check} =~ ^git ]]; then
 ref=$(echo ${check} | cut -f2 -dt)
 count=$(echo ${ref} | wc -c | tr -d ' ')
@@ -2207,9 +2256,16 

hadoop git commit: HDFS-7847. Modify NNThroughputBenchmark to be able to operate on a remote NameNode (Charles Lamb via Colin P. McCabe)

2015-05-05 Thread cmccabe
Repository: hadoop
Updated Branches:
  refs/heads/trunk e4c3b52c8 - ffce9a341


HDFS-7847. Modify NNThroughputBenchmark to be able to operate on a remote 
NameNode (Charles Lamb via Colin P. McCabe)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/ffce9a34
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/ffce9a34
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/ffce9a34

Branch: refs/heads/trunk
Commit: ffce9a3413277a69444fcb890460c885de56db69
Parents: e4c3b52
Author: Colin Patrick Mccabe cmcc...@cloudera.com
Authored: Tue May 5 11:27:36 2015 -0700
Committer: Colin Patrick Mccabe cmcc...@cloudera.com
Committed: Tue May 5 11:34:58 2015 -0700

--
 hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt |   3 +
 .../org/apache/hadoop/hdfs/DFSTestUtil.java |  40 ++
 .../server/namenode/NNThroughputBenchmark.java  | 136 +--
 3 files changed, 137 insertions(+), 42 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/ffce9a34/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
--
diff --git a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt 
b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
index c89e6fe..01de9b1 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
+++ b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
@@ -510,6 +510,9 @@ Release 2.8.0 - UNRELEASED
 HDFS-7758. Retire FsDatasetSpi#getVolumes() and use
 FsDatasetSpi#getVolumeRefs() instead (Lei (Eddy) Xu via Colin P. McCabe)
 
+HDFS-7847. Modify NNThroughputBenchmark to be able to operate on a remote
+NameNode (Charles Lamb via Colin P. McCabe)
+
   OPTIMIZATIONS
 
 HDFS-8026. Trace FSOutputSummer#writeChecksumChunks rather than

http://git-wip-us.apache.org/repos/asf/hadoop/blob/ffce9a34/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/DFSTestUtil.java
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/DFSTestUtil.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/DFSTestUtil.java
index a8df991..cfee997 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/DFSTestUtil.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/DFSTestUtil.java
@@ -48,6 +48,7 @@ import java.lang.reflect.Modifier;
 import java.net.HttpURLConnection;
 import java.net.InetSocketAddress;
 import java.net.Socket;
+import java.net.URI;
 import java.net.URL;
 import java.net.URLConnection;
 import java.nio.ByteBuffer;
@@ -64,6 +65,7 @@ import java.util.Random;
 import java.util.Set;
 import java.util.UUID;
 import java.util.concurrent.TimeoutException;
+import java.util.concurrent.atomic.AtomicBoolean;
 
 import org.apache.commons.io.FileUtils;
 import org.apache.commons.logging.Log;
@@ -129,12 +131,14 @@ import org.apache.hadoop.hdfs.server.namenode.NameNode;
 import 
org.apache.hadoop.hdfs.server.namenode.ha.ConfiguredFailoverProxyProvider;
 import org.apache.hadoop.hdfs.server.protocol.DatanodeRegistration;
 import org.apache.hadoop.hdfs.server.protocol.DatanodeStorage;
+import org.apache.hadoop.hdfs.server.protocol.NamenodeProtocol;
 import org.apache.hadoop.hdfs.tools.DFSAdmin;
 import org.apache.hadoop.io.IOUtils;
 import org.apache.hadoop.io.nativeio.NativeIO;
 import org.apache.hadoop.net.NetUtils;
 import org.apache.hadoop.net.unix.DomainSocket;
 import org.apache.hadoop.net.unix.TemporarySocketDirectory;
+import org.apache.hadoop.security.RefreshUserMappingsProtocol;
 import org.apache.hadoop.security.ShellBasedUnixGroupsMapping;
 import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hadoop.security.token.Token;
@@ -147,6 +151,7 @@ import org.apache.log4j.Level;
 import org.junit.Assume;
 import org.mockito.internal.util.reflection.Whitebox;
 
+import com.google.common.annotations.VisibleForTesting;
 import com.google.common.base.Charsets;
 import com.google.common.base.Joiner;
 import com.google.common.base.Preconditions;
@@ -1756,6 +1761,41 @@ public class DFSTestUtil {
   }
 
   /**
+   * Get the NamenodeProtocol RPC proxy for the NN associated with this
+   * DFSClient object
+   *
+   * @param nameNodeUri the URI of the NN to get a proxy for.
+   *
+   * @return the Namenode RPC proxy associated with this DFSClient object
+   */
+  @VisibleForTesting
+  public static NamenodeProtocol getNamenodeProtocolProxy(Configuration conf,
+  URI nameNodeUri, UserGroupInformation ugi)
+  throws IOException {
+return NameNodeProxies.createNonHAProxy(conf,
+NameNode.getAddress(nameNodeUri), NamenodeProtocol.class, ugi, false).
+getProxy();
+  }
+
+  /**
+   * Get the RefreshUserMappingsProtocol RPC proxy 

hadoop git commit: YARN-3343. Increased TestCapacitySchedulerNodeLabelUpdate#testNodeUpdate timeout. Contributed by Rohith Sharmaks (cherry picked from commit e4c3b52c896291012f869ebc0a21e85e643fadd1)

2015-05-05 Thread jianhe
Repository: hadoop
Updated Branches:
  refs/heads/branch-2 50a5d0b62 - eb0c6d2ee


YARN-3343. Increased TestCapacitySchedulerNodeLabelUpdate#testNodeUpdate 
timeout. Contributed by Rohith Sharmaks
(cherry picked from commit e4c3b52c896291012f869ebc0a21e85e643fadd1)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/eb0c6d2e
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/eb0c6d2e
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/eb0c6d2e

Branch: refs/heads/branch-2
Commit: eb0c6d2ee7e53f3ee0fb768f14d60d3e87ba3a1d
Parents: 50a5d0b
Author: Jian He jia...@apache.org
Authored: Tue May 5 11:33:47 2015 -0700
Committer: Jian He jia...@apache.org
Committed: Tue May 5 11:34:11 2015 -0700

--
 hadoop-yarn-project/CHANGES.txt   | 3 +++
 .../scheduler/capacity/TestCapacitySchedulerNodeLabelUpdate.java  | 2 +-
 2 files changed, 4 insertions(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/eb0c6d2e/hadoop-yarn-project/CHANGES.txt
--
diff --git a/hadoop-yarn-project/CHANGES.txt b/hadoop-yarn-project/CHANGES.txt
index 3193681..b6e3b86 100644
--- a/hadoop-yarn-project/CHANGES.txt
+++ b/hadoop-yarn-project/CHANGES.txt
@@ -268,6 +268,9 @@ Release 2.8.0 - UNRELEASED
 YARN-2123. Progress bars in Web UI always at 100% due to non-US locale.
 (Akira AJISAKA via xgong)
 
+YARN-3343. Increased TestCapacitySchedulerNodeLabelUpdate#testNodeUpdate
+timeout. (Rohith Sharmaks via jianhe)
+
 Release 2.7.1 - UNRELEASED
 
   INCOMPATIBLE CHANGES

http://git-wip-us.apache.org/repos/asf/hadoop/blob/eb0c6d2e/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/TestCapacitySchedulerNodeLabelUpdate.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/TestCapacitySchedulerNodeLabelUpdate.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/TestCapacitySchedulerNodeLabelUpdate.java
index c5439d8..e60e496 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/TestCapacitySchedulerNodeLabelUpdate.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/TestCapacitySchedulerNodeLabelUpdate.java
@@ -95,7 +95,7 @@ public class TestCapacitySchedulerNodeLabelUpdate {
 .getMemory());
   }
 
-  @Test (timeout = 3)
+  @Test (timeout = 6)
   public void testNodeUpdate() throws Exception {
 // set node - label
 mgr.addToCluserNodeLabelsWithDefaultExclusivity(ImmutableSet.of(x, y, 
z));



hadoop git commit: YARN-2123. Progress bars in Web UI always at 100% due to non-US locale. Contributed by Akira AJISAKA

2015-05-05 Thread xgong
Repository: hadoop
Updated Branches:
  refs/heads/branch-2 02892f674 - 02cdcaec5


YARN-2123. Progress bars in Web UI always at 100% due to non-US locale.
Contributed by Akira AJISAKA

(cherry picked from commit b7dd3a4f04f712b7594c4e6e7ce50fd314f7c342)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/02cdcaec
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/02cdcaec
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/02cdcaec

Branch: refs/heads/branch-2
Commit: 02cdcaec586f157a90dbc5a2435610313279b075
Parents: 02892f6
Author: Xuan xg...@apache.org
Authored: Tue May 5 10:40:16 2015 -0700
Committer: Xuan xg...@apache.org
Committed: Tue May 5 10:45:18 2015 -0700

--
 .../org/apache/hadoop/mapreduce/v2/app/webapp/TaskPage.java | 4 ++--
 .../apache/hadoop/mapreduce/v2/app/webapp/TasksBlock.java   | 4 ++--
 .../apache/hadoop/mapreduce/v2/app/webapp/dao/JobInfo.java  | 9 +
 hadoop-yarn-project/CHANGES.txt | 3 +++
 .../main/java/org/apache/hadoop/yarn/util/StringHelper.java | 5 -
 .../org/apache/hadoop/yarn/server/webapp/AppsBlock.java | 3 ++-
 .../server/resourcemanager/resource/ResourceWeights.java| 2 +-
 .../resourcemanager/webapp/CapacitySchedulerPage.java   | 9 +
 .../server/resourcemanager/webapp/DefaultSchedulerPage.java | 7 ---
 .../resourcemanager/webapp/FairSchedulerAppsBlock.java  | 3 ++-
 .../server/resourcemanager/webapp/FairSchedulerPage.java| 7 ---
 .../yarn/server/resourcemanager/webapp/RMAppsBlock.java | 3 ++-
 12 files changed, 32 insertions(+), 27 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/02cdcaec/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/TaskPage.java
--
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/TaskPage.java
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/TaskPage.java
index 8aa8bb6..e293fd2 100644
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/TaskPage.java
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/TaskPage.java
@@ -18,7 +18,6 @@
 
 package org.apache.hadoop.mapreduce.v2.app.webapp;
 
-import static org.apache.hadoop.yarn.util.StringHelper.percent;
 import static org.apache.hadoop.yarn.webapp.view.JQueryUI.ACCORDION;
 import static org.apache.hadoop.yarn.webapp.view.JQueryUI.DATATABLES;
 import static org.apache.hadoop.yarn.webapp.view.JQueryUI.DATATABLES_ID;
@@ -31,6 +30,7 @@ import org.apache.commons.lang.StringEscapeUtils;
 import org.apache.hadoop.mapreduce.v2.app.job.TaskAttempt;
 import org.apache.hadoop.mapreduce.v2.app.webapp.dao.TaskAttemptInfo;
 import org.apache.hadoop.mapreduce.v2.util.MRWebAppUtil;
+import org.apache.hadoop.util.StringUtils;
 import org.apache.hadoop.yarn.webapp.SubView;
 import org.apache.hadoop.yarn.webapp.hamlet.Hamlet;
 import org.apache.hadoop.yarn.webapp.hamlet.Hamlet.TABLE;
@@ -77,7 +77,7 @@ public class TaskPage extends AppView {
 
   for (TaskAttempt attempt : getTaskAttempts()) {
 TaskAttemptInfo ta = new TaskAttemptInfo(attempt, true);
-String progress = percent(ta.getProgress() / 100);
+String progress = StringUtils.formatPercent(ta.getProgress() / 100, 2);
 
 String nodeHttpAddr = ta.getNode();
 String diag = ta.getNote() == null ?  : ta.getNote();

http://git-wip-us.apache.org/repos/asf/hadoop/blob/02cdcaec/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/TasksBlock.java
--
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/TasksBlock.java
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/TasksBlock.java
index 64aae59..7c1aa49 100644
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/TasksBlock.java
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/TasksBlock.java
@@ -21,7 +21,6 @@ package org.apache.hadoop.mapreduce.v2.app.webapp;
 import 

hadoop git commit: HDFS-7758. Retire FsDatasetSpi#getVolumes() and use FsDatasetSpi#getVolumeRefs() instead (Lei (Eddy) Xu via Colin P. McCabe)

2015-05-05 Thread cmccabe
Repository: hadoop
Updated Branches:
  refs/heads/trunk 3ff91e9e9 - 24d3a2d4f


HDFS-7758. Retire FsDatasetSpi#getVolumes() and use 
FsDatasetSpi#getVolumeRefs() instead (Lei (Eddy) Xu via Colin P. McCabe)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/24d3a2d4
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/24d3a2d4
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/24d3a2d4

Branch: refs/heads/trunk
Commit: 24d3a2d4fdd836ac9a5bc755a7fb9354f7a582b1
Parents: 3ff91e9
Author: Colin Patrick Mccabe cmcc...@cloudera.com
Authored: Tue May 5 10:55:04 2015 -0700
Committer: Colin Patrick Mccabe cmcc...@cloudera.com
Committed: Tue May 5 11:08:59 2015 -0700

--
 hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt |   3 +
 .../hdfs/server/datanode/DirectoryScanner.java  |  69 +-
 .../server/datanode/fsdataset/FsDatasetSpi.java |  99 +-
 .../datanode/fsdataset/FsVolumeReference.java   |  13 +-
 .../datanode/fsdataset/impl/FsDatasetImpl.java  |  29 ++---
 .../datanode/fsdataset/impl/FsVolumeImpl.java   |   7 +-
 .../datanode/fsdataset/impl/FsVolumeList.java   |   7 +-
 .../org/apache/hadoop/hdfs/MiniDFSCluster.java  |  23 ++--
 .../hdfs/MiniDFSClusterWithNodeGroup.java   |  17 +--
 .../TestNameNodePrunesMissingStorages.java  |  18 +--
 .../server/datanode/SimulatedFSDataset.java |   2 +-
 .../TestBlockHasMultipleReplicasOnSameDN.java   |  14 +-
 .../hdfs/server/datanode/TestBlockScanner.java  |   8 +-
 .../datanode/TestDataNodeHotSwapVolumes.java|  21 ++-
 .../datanode/TestDataNodeVolumeFailure.java |   9 +-
 .../server/datanode/TestDirectoryScanner.java   | 130 +++
 .../hdfs/server/datanode/TestDiskError.java |  12 +-
 .../datanode/TestIncrementalBlockReports.java   |   6 +-
 .../datanode/TestIncrementalBrVariations.java   |  81 ++--
 .../server/datanode/TestTriggerBlockReport.java |   8 +-
 .../extdataset/ExternalDatasetImpl.java |   2 +-
 .../fsdataset/impl/LazyPersistTestCase.java |  72 +-
 .../fsdataset/impl/TestDatanodeRestart.java |  20 ++-
 .../fsdataset/impl/TestFsDatasetImpl.java   |  39 --
 .../fsdataset/impl/TestFsVolumeList.java|   9 +-
 .../fsdataset/impl/TestRbwSpaceReservation.java |  43 +++---
 .../fsdataset/impl/TestWriteToReplica.java  |  70 ++
 .../hdfs/server/mover/TestStorageMover.java |  19 ++-
 28 files changed, 515 insertions(+), 335 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/24d3a2d4/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
--
diff --git a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt 
b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
index 08ab7e7..c89e6fe 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
+++ b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
@@ -507,6 +507,9 @@ Release 2.8.0 - UNRELEASED
 HDFS-8237. Move all protocol classes used by ClientProtocol to hdfs-client.
 (wheat9)
 
+HDFS-7758. Retire FsDatasetSpi#getVolumes() and use
+FsDatasetSpi#getVolumeRefs() instead (Lei (Eddy) Xu via Colin P. McCabe)
+
   OPTIMIZATIONS
 
 HDFS-8026. Trace FSOutputSummer#writeChecksumChunks rather than

http://git-wip-us.apache.org/repos/asf/hadoop/blob/24d3a2d4/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DirectoryScanner.java
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DirectoryScanner.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DirectoryScanner.java
index 62885a9..8453094 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DirectoryScanner.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DirectoryScanner.java
@@ -527,59 +527,48 @@ public class DirectoryScanner implements Runnable {
 diffRecord.add(new ScanInfo(blockId, null, null, vol));
   }
 
-  /** Is the given volume still valid in the dataset? */
-  private static boolean isValid(final FsDatasetSpi? dataset,
-  final FsVolumeSpi volume) {
-for (FsVolumeSpi vol : dataset.getVolumes()) {
-  if (vol == volume) {
-return true;
-  }
-}
-return false;
-  }
-
   /** Get lists of blocks on the disk sorted by blockId, per blockpool */
   private MapString, ScanInfo[] getDiskReport() {
+ScanInfoPerBlockPool list = new ScanInfoPerBlockPool();
+ScanInfoPerBlockPool[] dirReports = null;
 // First get list of data directories
-final List? extends FsVolumeSpi volumes = dataset.getVolumes();
+try (FsDatasetSpi.FsVolumeReferences volumes =
+ 

hadoop git commit: HDFS-7758. Retire FsDatasetSpi#getVolumes() and use FsDatasetSpi#getVolumeRefs() instead (Lei (Eddy) Xu via Colin P. McCabe)

2015-05-05 Thread cmccabe
Repository: hadoop
Updated Branches:
  refs/heads/branch-2 1f01d8347 - 1f6bcf94c


HDFS-7758. Retire FsDatasetSpi#getVolumes() and use 
FsDatasetSpi#getVolumeRefs() instead (Lei (Eddy) Xu via Colin P. McCabe)

(cherry picked from commit 24d3a2d4fdd836ac9a5bc755a7fb9354f7a582b1)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/1f6bcf94
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/1f6bcf94
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/1f6bcf94

Branch: refs/heads/branch-2
Commit: 1f6bcf94ccbdac5b4099d28d4623eb03c4dd7316
Parents: 1f01d83
Author: Colin Patrick Mccabe cmcc...@cloudera.com
Authored: Tue May 5 10:55:04 2015 -0700
Committer: Colin Patrick Mccabe cmcc...@cloudera.com
Committed: Tue May 5 11:09:28 2015 -0700

--
 hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt |   3 +
 .../hdfs/server/datanode/DirectoryScanner.java  |  69 +-
 .../server/datanode/fsdataset/FsDatasetSpi.java |  99 +-
 .../datanode/fsdataset/FsVolumeReference.java   |  13 +-
 .../datanode/fsdataset/impl/FsDatasetImpl.java  |  29 ++---
 .../datanode/fsdataset/impl/FsVolumeImpl.java   |   7 +-
 .../datanode/fsdataset/impl/FsVolumeList.java   |   7 +-
 .../org/apache/hadoop/hdfs/MiniDFSCluster.java  |  23 ++--
 .../hdfs/MiniDFSClusterWithNodeGroup.java   |  17 +--
 .../TestNameNodePrunesMissingStorages.java  |  18 +--
 .../server/datanode/SimulatedFSDataset.java |   2 +-
 .../TestBlockHasMultipleReplicasOnSameDN.java   |  14 +-
 .../hdfs/server/datanode/TestBlockScanner.java  |   8 +-
 .../datanode/TestDataNodeHotSwapVolumes.java|  21 ++-
 .../datanode/TestDataNodeVolumeFailure.java |   9 +-
 .../server/datanode/TestDirectoryScanner.java   | 130 +++
 .../hdfs/server/datanode/TestDiskError.java |  12 +-
 .../datanode/TestIncrementalBlockReports.java   |   6 +-
 .../datanode/TestIncrementalBrVariations.java   |  81 ++--
 .../server/datanode/TestTriggerBlockReport.java |   8 +-
 .../extdataset/ExternalDatasetImpl.java |   2 +-
 .../fsdataset/impl/LazyPersistTestCase.java |  72 +-
 .../fsdataset/impl/TestDatanodeRestart.java |  20 ++-
 .../fsdataset/impl/TestFsDatasetImpl.java   |  39 --
 .../fsdataset/impl/TestFsVolumeList.java|   9 +-
 .../fsdataset/impl/TestRbwSpaceReservation.java |  43 +++---
 .../fsdataset/impl/TestWriteToReplica.java  |  70 ++
 .../hdfs/server/mover/TestStorageMover.java |  19 ++-
 28 files changed, 515 insertions(+), 335 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/1f6bcf94/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
--
diff --git a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt 
b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
index d75ad50..75d0871 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
+++ b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
@@ -180,6 +180,9 @@ Release 2.8.0 - UNRELEASED
 HDFS-8237. Move all protocol classes used by ClientProtocol to hdfs-client.
 (wheat9)
 
+HDFS-7758. Retire FsDatasetSpi#getVolumes() and use
+FsDatasetSpi#getVolumeRefs() instead (Lei (Eddy) Xu via Colin P. McCabe)
+
   OPTIMIZATIONS
 
 HDFS-8026. Trace FSOutputSummer#writeChecksumChunks rather than

http://git-wip-us.apache.org/repos/asf/hadoop/blob/1f6bcf94/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DirectoryScanner.java
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DirectoryScanner.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DirectoryScanner.java
index 62885a9..8453094 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DirectoryScanner.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DirectoryScanner.java
@@ -527,59 +527,48 @@ public class DirectoryScanner implements Runnable {
 diffRecord.add(new ScanInfo(blockId, null, null, vol));
   }
 
-  /** Is the given volume still valid in the dataset? */
-  private static boolean isValid(final FsDatasetSpi? dataset,
-  final FsVolumeSpi volume) {
-for (FsVolumeSpi vol : dataset.getVolumes()) {
-  if (vol == volume) {
-return true;
-  }
-}
-return false;
-  }
-
   /** Get lists of blocks on the disk sorted by blockId, per blockpool */
   private MapString, ScanInfo[] getDiskReport() {
+ScanInfoPerBlockPool list = new ScanInfoPerBlockPool();
+ScanInfoPerBlockPool[] dirReports = null;
 // First get list of data directories
-final List? extends FsVolumeSpi volumes = 

hadoop git commit: YARN-3343. Increased TestCapacitySchedulerNodeLabelUpdate#testNodeUpdate timeout. Contributed by Rohith Sharmaks

2015-05-05 Thread jianhe
Repository: hadoop
Updated Branches:
  refs/heads/trunk d33419ae0 - e4c3b52c8


YARN-3343. Increased TestCapacitySchedulerNodeLabelUpdate#testNodeUpdate 
timeout. Contributed by Rohith Sharmaks


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/e4c3b52c
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/e4c3b52c
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/e4c3b52c

Branch: refs/heads/trunk
Commit: e4c3b52c896291012f869ebc0a21e85e643fadd1
Parents: d33419a
Author: Jian He jia...@apache.org
Authored: Tue May 5 11:33:47 2015 -0700
Committer: Jian He jia...@apache.org
Committed: Tue May 5 11:33:47 2015 -0700

--
 hadoop-yarn-project/CHANGES.txt   | 3 +++
 .../scheduler/capacity/TestCapacitySchedulerNodeLabelUpdate.java  | 2 +-
 2 files changed, 4 insertions(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/e4c3b52c/hadoop-yarn-project/CHANGES.txt
--
diff --git a/hadoop-yarn-project/CHANGES.txt b/hadoop-yarn-project/CHANGES.txt
index 6dac3c8..00dd205 100644
--- a/hadoop-yarn-project/CHANGES.txt
+++ b/hadoop-yarn-project/CHANGES.txt
@@ -313,6 +313,9 @@ Release 2.8.0 - UNRELEASED
 YARN-2123. Progress bars in Web UI always at 100% due to non-US locale.
 (Akira AJISAKA via xgong)
 
+YARN-3343. Increased TestCapacitySchedulerNodeLabelUpdate#testNodeUpdate
+timeout. (Rohith Sharmaks via jianhe)
+
 Release 2.7.1 - UNRELEASED
 
   INCOMPATIBLE CHANGES

http://git-wip-us.apache.org/repos/asf/hadoop/blob/e4c3b52c/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/TestCapacitySchedulerNodeLabelUpdate.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/TestCapacitySchedulerNodeLabelUpdate.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/TestCapacitySchedulerNodeLabelUpdate.java
index c5439d8..e60e496 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/TestCapacitySchedulerNodeLabelUpdate.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/TestCapacitySchedulerNodeLabelUpdate.java
@@ -95,7 +95,7 @@ public class TestCapacitySchedulerNodeLabelUpdate {
 .getMemory());
   }
 
-  @Test (timeout = 3)
+  @Test (timeout = 6)
   public void testNodeUpdate() throws Exception {
 // set node - label
 mgr.addToCluserNodeLabelsWithDefaultExclusivity(ImmutableSet.of(x, y, 
z));



hadoop git commit: HADOOP-11925. backport trunk's smart-apply-patch.sh to branch-2 (aw)

2015-05-05 Thread aw
Repository: hadoop
Updated Branches:
  refs/heads/branch-2 e68e8b3b5 - f95fd5b6d


HADOOP-11925. backport trunk's smart-apply-patch.sh to branch-2 (aw)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/f95fd5b6
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/f95fd5b6
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/f95fd5b6

Branch: refs/heads/branch-2
Commit: f95fd5b6df01dd1fea2e137da816da8dbd3101ad
Parents: e68e8b3
Author: Allen Wittenauer a...@apache.org
Authored: Tue May 5 14:11:08 2015 -0700
Committer: Allen Wittenauer a...@apache.org
Committed: Tue May 5 14:11:08 2015 -0700

--
 dev-support/smart-apply-patch.sh| 96 ++--
 hadoop-common-project/hadoop-common/CHANGES.txt |  2 +
 2 files changed, 91 insertions(+), 7 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/f95fd5b6/dev-support/smart-apply-patch.sh
--
diff --git a/dev-support/smart-apply-patch.sh b/dev-support/smart-apply-patch.sh
index f91745f..449fc22 100755
--- a/dev-support/smart-apply-patch.sh
+++ b/dev-support/smart-apply-patch.sh
@@ -11,14 +11,48 @@
 #   See the License for the specific language governing permissions and
 #   limitations under the License.
 
-set -e
+#
+# Determine if the patch file is a git diff file with prefixes.
+# These files are generated via git diff *without* the --no-prefix option.
+#
+# We can apply these patches more easily because we know that the a/ and b/
+# prefixes in the diff lines stands for the project root directory.
+# So we don't have to hunt for the project root.
+# And of course, we know that the patch file was generated using git, so we
+# know git apply can handle it properly.
+#
+# Arguments: file name.
+# Return: 0 if it is a git diff; 1 otherwise.
+#
+is_git_diff_with_prefix() {
+  DIFF_TYPE=unknown
+  while read -r line; do
+if [[ $line =~ ^diff\  ]]; then
+  if [[ $line =~ ^diff\ \-\-git ]]; then
+DIFF_TYPE=git
+  else
+return 1 # All diff lines must be diff --git lines.
+  fi
+fi
+if [[ $line =~ ^\+\+\+\  ]] ||
+   [[ $line =~ ^\-\-\-\  ]]; then
+  if ! [[ $line =~ ^[ab]/ || $line =~ ^/dev/null ]]; then
+return 1 # All +++ and --- lines must start with a/ or b/ or be 
/dev/null.
+  fi
+fi
+  done  $1
+  [ x$DIFF_TYPE == xgit ] || return 1
+  return 0 # return true (= 0 in bash)
+}
 
 PATCH_FILE=$1
+DRY_RUN=$2
 if [ -z $PATCH_FILE ]; then
   echo usage: $0 patch-file
   exit 1
 fi
 
+TMPDIR=${TMPDIR:-/tmp}
 PATCH=${PATCH:-patch} # allow overriding patch binary
 
 # Cleanup handler for temporary files
@@ -31,13 +65,56 @@ trap cleanup 1 HUP INT QUIT TERM
 
 # Allow passing - for stdin patches
 if [ $PATCH_FILE == - ]; then
-  PATCH_FILE=/tmp/tmp.in.$$
+  PATCH_FILE=$TMPDIR/smart-apply.in.$RANDOM
   cat /dev/fd/0  $PATCH_FILE
   TOCLEAN=$TOCLEAN $PATCH_FILE
 fi
 
+ISSUE_RE='^(HADOOP|YARN|MAPREDUCE|HDFS)-[0-9]+$'
+if [[ ${PATCH_FILE} =~ ^http || ${PATCH_FILE} =~ ${ISSUE_RE} ]]; then
+  # Allow downloading of patches
+  PFILE=$TMPDIR/smart-apply.in.$RANDOM
+  TOCLEAN=$TOCLEAN $PFILE
+  if [[ ${PATCH_FILE} =~ ^http ]]; then
+patchURL=${PATCH_FILE}
+  else # Get URL of patch from JIRA
+wget -q -O ${PFILE} http://issues.apache.org/jira/browse/${PATCH_FILE};
+if [[ $? != 0 ]]; then
+  echo Unable to determine what ${PATCH_FILE} may reference. 12
+  cleanup 1
+elif [[ $(grep -c 'Patch Available' ${PFILE}) == 0 ]]; then
+  echo ${PATCH_FILE} is not \Patch Available\.  Exiting. 12
+  cleanup 1
+fi
+relativePatchURL=$(grep -o '/jira/secure/attachment/[0-9]*/[^]*' 
${PFILE} | grep -v -e 'htm[l]*$' | sort | tail -1 | grep -o 
'/jira/secure/attachment/[0-9]*/[^]*')
+patchURL=http://issues.apache.org${relativePatchURL};
+  fi
+  if [[ -n $DRY_RUN ]]; then
+echo Downloading ${patchURL}
+  fi
+  wget -q -O ${PFILE} ${patchURL}
+  if [[ $? != 0 ]]; then
+echo ${PATCH_FILE} could not be downloaded. 12
+cleanup 1
+  fi
+  PATCH_FILE=${PFILE}
+fi
+
+# Special case for git-diff patches without --no-prefix
+if is_git_diff_with_prefix $PATCH_FILE; then
+  GIT_FLAGS=--binary -p1 -v
+  if [[ -z $DRY_RUN ]]; then
+  GIT_FLAGS=$GIT_FLAGS --stat --apply 
+  echo Going to apply git patch with: git apply ${GIT_FLAGS}
+  else
+  GIT_FLAGS=$GIT_FLAGS --check 
+  fi
+  git apply ${GIT_FLAGS} ${PATCH_FILE}
+  exit $?
+fi
+
 # Come up with a list of changed files into $TMP
-TMP=/tmp/tmp.paths.$$
+TMP=$TMPDIR/smart-apply.paths.$RANDOM
 TOCLEAN=$TOCLEAN $TMP
 
 if $PATCH -p0 -E --dry-run  $PATCH_FILE 21  $TMP; then
@@ -46,10 +123,10 @@ if $PATCH -p0 -E --dry-run  $PATCH_FILE 21  $TMP; then
   # is adding new files and they would apply anywhere. So try to 

hadoop git commit: HADOOP-11898. add nfs3 and portmap starting command in hadoop-daemon.sh in branch-2. Contributed by Brandon Li

2015-05-05 Thread brandonli
Repository: hadoop
Updated Branches:
  refs/heads/branch-2 f95fd5b6d - a9d12128f


HADOOP-11898. add nfs3 and portmap starting command in hadoop-daemon.sh in 
branch-2. Contributed by Brandon Li


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/a9d12128
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/a9d12128
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/a9d12128

Branch: refs/heads/branch-2
Commit: a9d12128f320362c5a77d2269a3199e79db9d845
Parents: f95fd5b
Author: Brandon Li brando...@apache.org
Authored: Tue May 5 14:23:13 2015 -0700
Committer: Brandon Li brando...@apache.org
Committed: Tue May 5 14:23:13 2015 -0700

--
 hadoop-common-project/hadoop-common/CHANGES.txt   | 3 +++
 hadoop-common-project/hadoop-common/src/main/bin/hadoop-daemon.sh | 2 +-
 2 files changed, 4 insertions(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/a9d12128/hadoop-common-project/hadoop-common/CHANGES.txt
--
diff --git a/hadoop-common-project/hadoop-common/CHANGES.txt 
b/hadoop-common-project/hadoop-common/CHANGES.txt
index c2e5fcb..feb68ae 100644
--- a/hadoop-common-project/hadoop-common/CHANGES.txt
+++ b/hadoop-common-project/hadoop-common/CHANGES.txt
@@ -157,6 +157,9 @@ Release 2.8.0 - UNRELEASED
 HADOOP-11904. test-patch.sh goes into an infinite loop on non-maven
 builds (aw)
 
+HADOOP-11898. add nfs3 and portmap starting command in hadoop-daemon.sh
+in branch-2 (brandonli)
+
 Release 2.7.1 - UNRELEASED
 
   INCOMPATIBLE CHANGES

http://git-wip-us.apache.org/repos/asf/hadoop/blob/a9d12128/hadoop-common-project/hadoop-common/src/main/bin/hadoop-daemon.sh
--
diff --git a/hadoop-common-project/hadoop-common/src/main/bin/hadoop-daemon.sh 
b/hadoop-common-project/hadoop-common/src/main/bin/hadoop-daemon.sh
index 6a4cd69..d2ef1cf 100755
--- a/hadoop-common-project/hadoop-common/src/main/bin/hadoop-daemon.sh
+++ b/hadoop-common-project/hadoop-common/src/main/bin/hadoop-daemon.sh
@@ -150,7 +150,7 @@ case $startStop in
 echo starting $command, logging to $log
 cd $HADOOP_PREFIX
 case $command in
-  
namenode|secondarynamenode|datanode|journalnode|dfs|dfsadmin|fsck|balancer|zkfc)
+  
namenode|secondarynamenode|datanode|journalnode|dfs|dfsadmin|fsck|balancer|zkfc|portmap|nfs3)
 if [ -z $HADOOP_HDFS_HOME ]; then
   hdfsScript=$HADOOP_PREFIX/bin/hdfs
 else



hadoop git commit: MAPREDUCE-6192. Create unit test to automatically compare MR related classes and mapred-default.xml (rchiang via rkanter)

2015-05-05 Thread rkanter
Repository: hadoop
Updated Branches:
  refs/heads/trunk 0100b1550 - 9809a16d3


MAPREDUCE-6192. Create unit test to automatically compare MR related classes 
and mapred-default.xml (rchiang via rkanter)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/9809a16d
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/9809a16d
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/9809a16d

Branch: refs/heads/trunk
Commit: 9809a16d3c8068beccbf0106e99c7ede6ba11e0f
Parents: 0100b15
Author: Robert Kanter rkan...@apache.org
Authored: Mon May 4 17:48:10 2015 -0700
Committer: Robert Kanter rkan...@apache.org
Committed: Tue May 5 14:43:28 2015 -0700

--
 .../conf/TestConfigurationFieldsBase.java   | 58 ++-
 hadoop-mapreduce-project/CHANGES.txt|  3 +
 .../mapred/TestMapreduceConfigFields.java   | 76 
 3 files changed, 135 insertions(+), 2 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/9809a16d/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestConfigurationFieldsBase.java
--
diff --git 
a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestConfigurationFieldsBase.java
 
b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestConfigurationFieldsBase.java
index c3fe3a3..2e4d8b1 100644
--- 
a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestConfigurationFieldsBase.java
+++ 
b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestConfigurationFieldsBase.java
@@ -147,6 +147,12 @@ public abstract class TestConfigurationFieldsBase {
   private SetString xmlFieldsMissingInConfiguration = null;
 
   /**
+   * Member variable for debugging base class operation
+   */
+  protected boolean configDebug = false;
+  protected boolean xmlDebug = false;
+
+  /**
* Abstract method to be used by subclasses for initializing base
* members.
*/
@@ -168,13 +174,16 @@ public abstract class TestConfigurationFieldsBase {
 HashMapString,String retVal = new HashMapString,String();
 
 // Setup regexp for valid properties
-String propRegex = ^[A-Za-z_-]+(\\.[A-Za-z_-]+)+$;
+String propRegex = ^[A-Za-z][A-Za-z0-9_-]+(\\.[A-Za-z0-9_-]+)+$;
 Pattern p = Pattern.compile(propRegex);
 
 // Iterate through class member variables
 int totalFields = 0;
 String value;
 for (Field f : fields) {
+  if (configDebug) {
+System.out.println(Field:  + f);
+  }
   // Filter out anything that isn't public static final
   if (!Modifier.isStatic(f.getModifiers()) ||
   !Modifier.isPublic(f.getModifiers()) ||
@@ -192,6 +201,9 @@ public abstract class TestConfigurationFieldsBase {
   } catch (IllegalAccessException iaException) {
 continue;
   }
+  if (configDebug) {
+System.out.println(  Value:  + value);
+  }
   // Special Case: Detect and ignore partial properties (ending in x)
   //   or file properties (ending in .xml)
   if (value.endsWith(.xml) ||
@@ -221,11 +233,23 @@ public abstract class TestConfigurationFieldsBase {
   //  something like: blah.blah2(.blah3.blah4...)
   Matcher m = p.matcher(value);
   if (!m.find()) {
+if (configDebug) {
+  System.out.println(  Passes Regex: false);
+}
 continue;
   }
+  if (configDebug) {
+System.out.println(  Passes Regex: true);
+  }
 
   // Save member variable/value as hash
-  retVal.put(value,f.getName());
+  if (!retVal.containsKey(value)) {
+retVal.put(value,f.getName());
+  } else {
+if (configDebug) {
+  System.out.println(ERROR: Already found key for property  + value);
+}
+  }
 }
 
 return retVal;
@@ -256,6 +280,9 @@ public abstract class TestConfigurationFieldsBase {
   // Ignore known xml props
   if (xmlPropsToSkipCompare != null) {
 if (xmlPropsToSkipCompare.contains(key)) {
+  if (xmlDebug) {
+System.out.println(  Skipping Full Key:  + key);
+  }
   continue;
 }
   }
@@ -270,14 +297,23 @@ public abstract class TestConfigurationFieldsBase {
}
   }
   if (skipPrefix) {
+if (xmlDebug) {
+  System.out.println(  Skipping Prefix Key:  + key);
+}
 continue;
   }
   if (conf.onlyKeyExists(key)) {
 retVal.put(key,null);
+if (xmlDebug) {
+  System.out.println(  XML Key,Null Value:  + key);
+}
   } else {
 String value = conf.get(key);
 if (value!=null) {
   retVal.put(key,entry.getValue());
+

hadoop git commit: MAPREDUCE-6192. Create unit test to automatically compare MR related classes and mapred-default.xml (rchiang via rkanter)

2015-05-05 Thread rkanter
Repository: hadoop
Updated Branches:
  refs/heads/branch-2 a9d12128f - bd207b6cc


MAPREDUCE-6192. Create unit test to automatically compare MR related classes 
and mapred-default.xml (rchiang via rkanter)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/bd207b6c
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/bd207b6c
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/bd207b6c

Branch: refs/heads/branch-2
Commit: bd207b6cc0c223df6b3288c8a0249ee065ac9371
Parents: a9d1212
Author: Robert Kanter rkan...@apache.org
Authored: Tue May 5 14:47:52 2015 -0700
Committer: Robert Kanter rkan...@apache.org
Committed: Tue May 5 14:47:52 2015 -0700

--
 .../conf/TestConfigurationFieldsBase.java   | 58 +-
 hadoop-mapreduce-project/CHANGES.txt|  3 +
 .../mapred/TestMapreduceConfigFields.java   | 83 
 3 files changed, 142 insertions(+), 2 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/bd207b6c/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestConfigurationFieldsBase.java
--
diff --git 
a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestConfigurationFieldsBase.java
 
b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestConfigurationFieldsBase.java
index c3fe3a3..2e4d8b1 100644
--- 
a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestConfigurationFieldsBase.java
+++ 
b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestConfigurationFieldsBase.java
@@ -147,6 +147,12 @@ public abstract class TestConfigurationFieldsBase {
   private SetString xmlFieldsMissingInConfiguration = null;
 
   /**
+   * Member variable for debugging base class operation
+   */
+  protected boolean configDebug = false;
+  protected boolean xmlDebug = false;
+
+  /**
* Abstract method to be used by subclasses for initializing base
* members.
*/
@@ -168,13 +174,16 @@ public abstract class TestConfigurationFieldsBase {
 HashMapString,String retVal = new HashMapString,String();
 
 // Setup regexp for valid properties
-String propRegex = ^[A-Za-z_-]+(\\.[A-Za-z_-]+)+$;
+String propRegex = ^[A-Za-z][A-Za-z0-9_-]+(\\.[A-Za-z0-9_-]+)+$;
 Pattern p = Pattern.compile(propRegex);
 
 // Iterate through class member variables
 int totalFields = 0;
 String value;
 for (Field f : fields) {
+  if (configDebug) {
+System.out.println(Field:  + f);
+  }
   // Filter out anything that isn't public static final
   if (!Modifier.isStatic(f.getModifiers()) ||
   !Modifier.isPublic(f.getModifiers()) ||
@@ -192,6 +201,9 @@ public abstract class TestConfigurationFieldsBase {
   } catch (IllegalAccessException iaException) {
 continue;
   }
+  if (configDebug) {
+System.out.println(  Value:  + value);
+  }
   // Special Case: Detect and ignore partial properties (ending in x)
   //   or file properties (ending in .xml)
   if (value.endsWith(.xml) ||
@@ -221,11 +233,23 @@ public abstract class TestConfigurationFieldsBase {
   //  something like: blah.blah2(.blah3.blah4...)
   Matcher m = p.matcher(value);
   if (!m.find()) {
+if (configDebug) {
+  System.out.println(  Passes Regex: false);
+}
 continue;
   }
+  if (configDebug) {
+System.out.println(  Passes Regex: true);
+  }
 
   // Save member variable/value as hash
-  retVal.put(value,f.getName());
+  if (!retVal.containsKey(value)) {
+retVal.put(value,f.getName());
+  } else {
+if (configDebug) {
+  System.out.println(ERROR: Already found key for property  + value);
+}
+  }
 }
 
 return retVal;
@@ -256,6 +280,9 @@ public abstract class TestConfigurationFieldsBase {
   // Ignore known xml props
   if (xmlPropsToSkipCompare != null) {
 if (xmlPropsToSkipCompare.contains(key)) {
+  if (xmlDebug) {
+System.out.println(  Skipping Full Key:  + key);
+  }
   continue;
 }
   }
@@ -270,14 +297,23 @@ public abstract class TestConfigurationFieldsBase {
}
   }
   if (skipPrefix) {
+if (xmlDebug) {
+  System.out.println(  Skipping Prefix Key:  + key);
+}
 continue;
   }
   if (conf.onlyKeyExists(key)) {
 retVal.put(key,null);
+if (xmlDebug) {
+  System.out.println(  XML Key,Null Value:  + key);
+}
   } else {
 String value = conf.get(key);
 if (value!=null) {
   retVal.put(key,entry.getValue());
+