svn commit: r1662636 - in /hadoop/common/site/main: author/src/documentation/content/xdocs/who.xml publish/who.html publish/who.pdf

2015-02-26 Thread yjzhangal
Author: yjzhangal
Date: Fri Feb 27 05:41:49 2015
New Revision: 1662636

URL: http://svn.apache.org/r1662636
Log:
Fix name order in committer list.

Modified:
hadoop/common/site/main/author/src/documentation/content/xdocs/who.xml
hadoop/common/site/main/publish/who.html
hadoop/common/site/main/publish/who.pdf

Modified: hadoop/common/site/main/author/src/documentation/content/xdocs/who.xml
URL: 
http://svn.apache.org/viewvc/hadoop/common/site/main/author/src/documentation/content/xdocs/who.xml?rev=1662636r1=1662635r2=1662636view=diff
==
--- hadoop/common/site/main/author/src/documentation/content/xdocs/who.xml 
(original)
+++ hadoop/common/site/main/author/src/documentation/content/xdocs/who.xml Fri 
Feb 27 05:41:49 2015
@@ -1172,14 +1172,6 @@
/tr
 
tr
- tdyliu/td
- tda href=http://people.apache.org/~yliu;Yi Liu/a/td
- tdIntel/td
- td/td
- td+8/td
-   /tr
-
-   tr
  tdyjzhangal/td
  tda href=http://people.apache.org/~yjzhangal;Yongjun 
Zhang/a/td
  tdCloudera/td
@@ -1188,6 +1180,14 @@
/tr
 
tr
+ tdyliu/td
+ tda href=http://people.apache.org/~yliu;Yi Liu/a/td
+ tdIntel/td
+ td/td
+ td+8/td
+   /tr
+
+   tr
  tdzjshen/td
  tda href=http://people.apache.org/~zjshen;Zhijie Shen/a/td
  tdHortonworks/td

Modified: hadoop/common/site/main/publish/who.html
URL: 
http://svn.apache.org/viewvc/hadoop/common/site/main/publish/who.html?rev=1662636r1=1662635r2=1662636view=diff
==
--- hadoop/common/site/main/publish/who.html (original)
+++ hadoop/common/site/main/publish/who.html Fri Feb 27 05:41:49 2015
@@ -1838,22 +1838,22 @@ document.write(Last Published:  + docu

 tr
  
-td colspan=1 rowspan=1yliu/td
- td colspan=1 rowspan=1a 
href=http://people.apache.org/~yliu;Yi Liu/a/td
- td colspan=1 rowspan=1Intel/td
+td colspan=1 rowspan=1yjzhangal/td
+ td colspan=1 rowspan=1a 
href=http://people.apache.org/~yjzhangal;Yongjun Zhang/a/td
+ td colspan=1 rowspan=1Cloudera/td
  td colspan=1 rowspan=1/td
- td colspan=1 rowspan=1+8/td
+ td colspan=1 rowspan=1-8/td

 /tr
 

 tr
  
-td colspan=1 rowspan=1yjzhangal/td
- td colspan=1 rowspan=1a 
href=http://people.apache.org/~yjzhangal;Yongjun Zhang/a/td
- td colspan=1 rowspan=1Cloudera/td
+td colspan=1 rowspan=1yliu/td
+ td colspan=1 rowspan=1a 
href=http://people.apache.org/~yliu;Yi Liu/a/td
+ td colspan=1 rowspan=1Intel/td
  td colspan=1 rowspan=1/td
- td colspan=1 rowspan=1-8/td
+ td colspan=1 rowspan=1+8/td

 /tr
 

Modified: hadoop/common/site/main/publish/who.pdf
URL: 
http://svn.apache.org/viewvc/hadoop/common/site/main/publish/who.pdf?rev=1662636r1=1662635r2=1662636view=diff
==
Binary files - no diff available.




hadoop git commit: YARN-3217. Remove httpclient dependency from hadoop-yarn-server-web-proxy. Contributed by Brahma Reddy Battula.

2015-02-26 Thread ozawa
Repository: hadoop
Updated Branches:
  refs/heads/trunk 0d4296f0e - 773b6515a


YARN-3217. Remove httpclient dependency from hadoop-yarn-server-web-proxy. 
Contributed by Brahma Reddy Battula.


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/773b6515
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/773b6515
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/773b6515

Branch: refs/heads/trunk
Commit: 773b6515ac51af3484824bd6f57685a9726a1e70
Parents: 0d4296f
Author: Tsuyoshi Ozawa oz...@apache.org
Authored: Fri Feb 27 00:22:46 2015 +0900
Committer: Tsuyoshi Ozawa oz...@apache.org
Committed: Fri Feb 27 00:24:29 2015 +0900

--
 hadoop-yarn-project/CHANGES.txt |  3 ++
 .../hadoop-yarn-server-web-proxy/pom.xml|  4 --
 .../server/webproxy/WebAppProxyServlet.java | 46 ++--
 3 files changed, 26 insertions(+), 27 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/773b6515/hadoop-yarn-project/CHANGES.txt
--
diff --git a/hadoop-yarn-project/CHANGES.txt b/hadoop-yarn-project/CHANGES.txt
index ac3cbb2..a635592 100644
--- a/hadoop-yarn-project/CHANGES.txt
+++ b/hadoop-yarn-project/CHANGES.txt
@@ -327,6 +327,9 @@ Release 2.7.0 - UNRELEASED
 YARN-2797. Add -help to yarn logs and nodes CLI command. 
 (Jagadesh Kiran N via devaraj)
 
+YARN-3217. Remove httpclient dependency from hadoop-yarn-server-web-proxy.
+(Brahma Reddy Battula via ozawa).
+
   OPTIMIZATIONS
 
 YARN-2990. FairScheduler's delay-scheduling always waits for node-local 
and 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/773b6515/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-web-proxy/pom.xml
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-web-proxy/pom.xml
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-web-proxy/pom.xml
index fdba1fe..9801064 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-web-proxy/pom.xml
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-web-proxy/pom.xml
@@ -79,10 +79,6 @@
 /dependency
 
 dependency
-  groupIdcommons-httpclient/groupId
-  artifactIdcommons-httpclient/artifactId
-/dependency
-dependency
   groupIdcom.google.guava/groupId
   artifactIdguava/artifactId
 /dependency

http://git-wip-us.apache.org/repos/asf/hadoop/blob/773b6515/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-web-proxy/src/main/java/org/apache/hadoop/yarn/server/webproxy/WebAppProxyServlet.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-web-proxy/src/main/java/org/apache/hadoop/yarn/server/webproxy/WebAppProxyServlet.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-web-proxy/src/main/java/org/apache/hadoop/yarn/server/webproxy/WebAppProxyServlet.java
index 47f7769..fd98c80 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-web-proxy/src/main/java/org/apache/hadoop/yarn/server/webproxy/WebAppProxyServlet.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-web-proxy/src/main/java/org/apache/hadoop/yarn/server/webproxy/WebAppProxyServlet.java
@@ -40,13 +40,6 @@ import javax.servlet.http.HttpServletRequest;
 import javax.servlet.http.HttpServletResponse;
 import javax.ws.rs.core.UriBuilder;
 
-import org.apache.commons.httpclient.Header;
-import org.apache.commons.httpclient.HostConfiguration;
-import org.apache.commons.httpclient.HttpClient;
-import org.apache.commons.httpclient.HttpMethod;
-import org.apache.commons.httpclient.cookie.CookiePolicy;
-import org.apache.commons.httpclient.methods.GetMethod;
-import org.apache.commons.httpclient.params.HttpClientParams;
 import org.apache.hadoop.io.IOUtils;
 import org.apache.hadoop.yarn.api.records.ApplicationId;
 import org.apache.hadoop.yarn.api.records.ApplicationReport;
@@ -59,8 +52,15 @@ import org.apache.hadoop.yarn.util.TrackingUriPlugin;
 import org.apache.hadoop.yarn.webapp.MimeType;
 import org.apache.hadoop.yarn.webapp.hamlet.Hamlet;
 import org.apache.hadoop.yarn.webapp.util.WebAppUtils;
+import org.apache.http.Header;
+import org.apache.http.HttpResponse;
 import org.apache.http.NameValuePair;
+import org.apache.http.client.methods.HttpGet;
+import org.apache.http.client.params.ClientPNames;
+import org.apache.http.client.params.CookiePolicy;
 import org.apache.http.client.utils.URLEncodedUtils;
+import org.apache.http.conn.params.ConnRoutePNames;
+import 

hadoop git commit: HADOOP-11637. bash location hard-coded in shell scripts (aw)

2015-02-26 Thread aw
Repository: hadoop
Updated Branches:
  refs/heads/trunk 773b6515a - dce8b9c4d


HADOOP-11637. bash location hard-coded in shell scripts (aw)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/dce8b9c4
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/dce8b9c4
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/dce8b9c4

Branch: refs/heads/trunk
Commit: dce8b9c4d0b2da1780f743d81e840ca0fdfc62cf
Parents: 773b651
Author: Allen Wittenauer a...@apache.org
Authored: Thu Feb 26 09:29:16 2015 -0800
Committer: Allen Wittenauer a...@apache.org
Committed: Thu Feb 26 09:29:16 2015 -0800

--
 hadoop-common-project/hadoop-common/CHANGES.txt | 2 ++
 .../hadoop-common/src/main/bin/hadoop-functions.sh  | 2 +-
 .../hadoop-common/src/site/markdown/RackAwareness.md| 5 +++--
 hadoop-common-project/hadoop-kms/src/main/conf/kms-env.sh   | 2 +-
 hadoop-common-project/hadoop-kms/src/main/libexec/kms-config.sh | 2 +-
 hadoop-common-project/hadoop-kms/src/main/sbin/kms.sh   | 2 +-
 .../hadoop-hdfs-httpfs/src/main/conf/httpfs-env.sh  | 2 +-
 .../hadoop-hdfs-httpfs/src/main/libexec/httpfs-config.sh| 2 +-
 hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/sbin/httpfs.sh  | 2 +-
 hadoop-tools/hadoop-sls/src/main/bin/rumen2sls.sh   | 2 +-
 hadoop-tools/hadoop-sls/src/main/bin/slsrun.sh  | 2 +-
 11 files changed, 14 insertions(+), 11 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/dce8b9c4/hadoop-common-project/hadoop-common/CHANGES.txt
--
diff --git a/hadoop-common-project/hadoop-common/CHANGES.txt 
b/hadoop-common-project/hadoop-common/CHANGES.txt
index 39062a8..ca27463 100644
--- a/hadoop-common-project/hadoop-common/CHANGES.txt
+++ b/hadoop-common-project/hadoop-common/CHANGES.txt
@@ -404,6 +404,8 @@ Trunk (Unreleased)
 
 HADOOP-11625. Minor fixes to command manual  SLA doc (aw)
 
+HADOOP-11637. bash location hard-coded in shell scripts (aw)
+
   OPTIMIZATIONS
 
 HADOOP-7761. Improve the performance of raw comparisons. (todd)

http://git-wip-us.apache.org/repos/asf/hadoop/blob/dce8b9c4/hadoop-common-project/hadoop-common/src/main/bin/hadoop-functions.sh
--
diff --git 
a/hadoop-common-project/hadoop-common/src/main/bin/hadoop-functions.sh 
b/hadoop-common-project/hadoop-common/src/main/bin/hadoop-functions.sh
index cec6b2c..bccbe25 100644
--- a/hadoop-common-project/hadoop-common/src/main/bin/hadoop-functions.sh
+++ b/hadoop-common-project/hadoop-common/src/main/bin/hadoop-functions.sh
@@ -1,4 +1,4 @@
-#!/bin/bash
+#!/usr/bin/env bash
 # Licensed to the Apache Software Foundation (ASF) under one or more
 # contributor license agreements.  See the NOTICE file distributed with
 # this work for additional information regarding copyright ownership.

http://git-wip-us.apache.org/repos/asf/hadoop/blob/dce8b9c4/hadoop-common-project/hadoop-common/src/site/markdown/RackAwareness.md
--
diff --git 
a/hadoop-common-project/hadoop-common/src/site/markdown/RackAwareness.md 
b/hadoop-common-project/hadoop-common/src/site/markdown/RackAwareness.md
index c5ab19a..09f5610 100644
--- a/hadoop-common-project/hadoop-common/src/site/markdown/RackAwareness.md
+++ b/hadoop-common-project/hadoop-common/src/site/markdown/RackAwareness.md
@@ -105,7 +105,7 @@ bash Example
 
 
 ```bash
-#!/bin/bash
+#!/usr/bin/env bash
 # Here's a bash example to show just how simple these scripts can be
 # Assuming we have flat network with everything on a single switch, we can 
fake a rack topology.
 # This could occur in a lab environment where we have limited nodes,like 2-8 
physical machines on a unmanaged switch.
@@ -133,4 +133,5 @@ bash Example
 #fails to split on four dots, it will still print '/rack-' last field value
 
 echo $@ | xargs -n 1 | awk -F '.' '{print /rack-$NF}'
-```
\ No newline at end of file
+```
+

http://git-wip-us.apache.org/repos/asf/hadoop/blob/dce8b9c4/hadoop-common-project/hadoop-kms/src/main/conf/kms-env.sh
--
diff --git a/hadoop-common-project/hadoop-kms/src/main/conf/kms-env.sh 
b/hadoop-common-project/hadoop-kms/src/main/conf/kms-env.sh
index de9554f..41449ef 100644
--- a/hadoop-common-project/hadoop-kms/src/main/conf/kms-env.sh
+++ b/hadoop-common-project/hadoop-kms/src/main/conf/kms-env.sh
@@ -1,4 +1,4 @@
-#!/bin/bash
+#!/usr/bin/env bash
 #
 # Licensed under the Apache License, Version 2.0 (the License);
 # you may not use this file except in compliance with the License.


hadoop git commit: HDFS-7819. Log WARN message for the blocks which are not in Block ID based layout (Rakesh R via Colin P. McCabe)

2015-02-26 Thread cmccabe
Repository: hadoop
Updated Branches:
  refs/heads/trunk dce8b9c4d - f0c980abe


HDFS-7819. Log WARN message for the blocks which are not in Block ID based 
layout (Rakesh R via Colin P. McCabe)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/f0c980ab
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/f0c980ab
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/f0c980ab

Branch: refs/heads/trunk
Commit: f0c980abed3843923e0eb16b626fa27334195eda
Parents: dce8b9c
Author: Colin Patrick Mccabe cmcc...@cloudera.com
Authored: Thu Feb 26 11:58:29 2015 -0800
Committer: Colin Patrick Mccabe cmcc...@cloudera.com
Committed: Thu Feb 26 11:58:29 2015 -0800

--
 hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt |  3 +++
 .../hdfs/server/datanode/DirectoryScanner.java  | 26 +---
 2 files changed, 25 insertions(+), 4 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/f0c980ab/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
--
diff --git a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt 
b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
index e09714f..54b4057 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
+++ b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
@@ -679,6 +679,9 @@ Release 2.7.0 - UNRELEASED
 HDFS-7832. Show 'Last Modified' in Namenode's 'Browse Filesystem'
 (vinayakumarb)
 
+HDFS-7819. Log WARN message for the blocks which are not in Block ID based
+layout (Rakesh R via Colin P. McCabe)
+
   OPTIMIZATIONS
 
 HDFS-7454. Reduce memory footprint for AclEntries in NameNode.

http://git-wip-us.apache.org/repos/asf/hadoop/blob/f0c980ab/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DirectoryScanner.java
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DirectoryScanner.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DirectoryScanner.java
index 71f976b..09c2914 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DirectoryScanner.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DirectoryScanner.java
@@ -597,14 +597,15 @@ public class DirectoryScanner implements Runnable {
   for (String bpid : bpList) {
 LinkedListScanInfo report = new LinkedListScanInfo();
 File bpFinalizedDir = volume.getFinalizedDir(bpid);
-result.put(bpid, compileReport(volume, bpFinalizedDir, report));
+result.put(bpid,
+compileReport(volume, bpFinalizedDir, bpFinalizedDir, report));
   }
   return result;
 }
 
 /** Compile list {@link ScanInfo} for the blocks in the directory dir */
-private LinkedListScanInfo compileReport(FsVolumeSpi vol, File dir,
-LinkedListScanInfo report) {
+private LinkedListScanInfo compileReport(FsVolumeSpi vol,
+File bpFinalizedDir, File dir, LinkedListScanInfo report) {
   File[] files;
   try {
 files = FileUtil.listFiles(dir);
@@ -622,12 +623,14 @@ public class DirectoryScanner implements Runnable {
*/
   for (int i = 0; i  files.length; i++) {
 if (files[i].isDirectory()) {
-  compileReport(vol, files[i], report);
+  compileReport(vol, bpFinalizedDir, files[i], report);
   continue;
 }
 if (!Block.isBlockFilename(files[i])) {
   if (isBlockMetaFile(blk_, files[i].getName())) {
 long blockId = Block.getBlockId(files[i].getName());
+verifyFileLocation(files[i].getParentFile(), bpFinalizedDir,
+blockId);
 report.add(new ScanInfo(blockId, null, files[i], vol));
   }
   continue;
@@ -646,9 +649,24 @@ public class DirectoryScanner implements Runnable {
 break;
   }
 }
+verifyFileLocation(blockFile.getParentFile(), bpFinalizedDir,
+blockId);
 report.add(new ScanInfo(blockId, blockFile, metaFile, vol));
   }
   return report;
 }
+
+/**
+ * Verify whether the actual directory location of block file has the
+ * expected directory path computed using its block ID.
+ */
+private void verifyFileLocation(File actualBlockDir,
+File bpFinalizedDir, long blockId) {
+  File blockDir = DatanodeUtil.idToBlockDir(bpFinalizedDir, blockId);
+  if (actualBlockDir.compareTo(blockDir) != 0) {
+LOG.warn(Block:  + blockId
++  has to be upgraded to block ID-based layout);
+  }
+}
   }
 }



hadoop git commit: HDFS-7819. Log WARN message for the blocks which are not in Block ID based layout (Rakesh R via Colin P. McCabe)

2015-02-26 Thread cmccabe
Repository: hadoop
Updated Branches:
  refs/heads/branch-2 dc348f489 - 8b3b9568b


HDFS-7819. Log WARN message for the blocks which are not in Block ID based 
layout (Rakesh R via Colin P. McCabe)

(cherry picked from commit f0c980abed3843923e0eb16b626fa27334195eda)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/8b3b9568
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/8b3b9568
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/8b3b9568

Branch: refs/heads/branch-2
Commit: 8b3b9568b684820800f59eae3e48e0d058a8f21b
Parents: dc348f4
Author: Colin Patrick Mccabe cmcc...@cloudera.com
Authored: Thu Feb 26 11:58:29 2015 -0800
Committer: Colin Patrick Mccabe cmcc...@cloudera.com
Committed: Thu Feb 26 12:03:11 2015 -0800

--
 hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt |  3 +++
 .../hdfs/server/datanode/DirectoryScanner.java  | 26 +---
 2 files changed, 25 insertions(+), 4 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/8b3b9568/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
--
diff --git a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt 
b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
index f59bb71..79cf934 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
+++ b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
@@ -381,6 +381,9 @@ Release 2.7.0 - UNRELEASED
 HDFS-7832. Show 'Last Modified' in Namenode's 'Browse Filesystem'
 (vinayakumarb)
 
+HDFS-7819. Log WARN message for the blocks which are not in Block ID based
+layout (Rakesh R via Colin P. McCabe)
+
   OPTIMIZATIONS
 
 HDFS-7454. Reduce memory footprint for AclEntries in NameNode.

http://git-wip-us.apache.org/repos/asf/hadoop/blob/8b3b9568/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DirectoryScanner.java
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DirectoryScanner.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DirectoryScanner.java
index 71f976b..09c2914 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DirectoryScanner.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DirectoryScanner.java
@@ -597,14 +597,15 @@ public class DirectoryScanner implements Runnable {
   for (String bpid : bpList) {
 LinkedListScanInfo report = new LinkedListScanInfo();
 File bpFinalizedDir = volume.getFinalizedDir(bpid);
-result.put(bpid, compileReport(volume, bpFinalizedDir, report));
+result.put(bpid,
+compileReport(volume, bpFinalizedDir, bpFinalizedDir, report));
   }
   return result;
 }
 
 /** Compile list {@link ScanInfo} for the blocks in the directory dir */
-private LinkedListScanInfo compileReport(FsVolumeSpi vol, File dir,
-LinkedListScanInfo report) {
+private LinkedListScanInfo compileReport(FsVolumeSpi vol,
+File bpFinalizedDir, File dir, LinkedListScanInfo report) {
   File[] files;
   try {
 files = FileUtil.listFiles(dir);
@@ -622,12 +623,14 @@ public class DirectoryScanner implements Runnable {
*/
   for (int i = 0; i  files.length; i++) {
 if (files[i].isDirectory()) {
-  compileReport(vol, files[i], report);
+  compileReport(vol, bpFinalizedDir, files[i], report);
   continue;
 }
 if (!Block.isBlockFilename(files[i])) {
   if (isBlockMetaFile(blk_, files[i].getName())) {
 long blockId = Block.getBlockId(files[i].getName());
+verifyFileLocation(files[i].getParentFile(), bpFinalizedDir,
+blockId);
 report.add(new ScanInfo(blockId, null, files[i], vol));
   }
   continue;
@@ -646,9 +649,24 @@ public class DirectoryScanner implements Runnable {
 break;
   }
 }
+verifyFileLocation(blockFile.getParentFile(), bpFinalizedDir,
+blockId);
 report.add(new ScanInfo(blockId, blockFile, metaFile, vol));
   }
   return report;
 }
+
+/**
+ * Verify whether the actual directory location of block file has the
+ * expected directory path computed using its block ID.
+ */
+private void verifyFileLocation(File actualBlockDir,
+File bpFinalizedDir, long blockId) {
+  File blockDir = DatanodeUtil.idToBlockDir(bpFinalizedDir, blockId);
+  if (actualBlockDir.compareTo(blockDir) != 0) {
+LOG.warn(Block:  + blockId
++  has to be upgraded to block ID-based layout);
+  }
+}
  

[2/2] hadoop git commit: HADOOP-9922. hadoop windows native build will fail in 32 bit machine. Contributed by Kiran Kumar M R.

2015-02-26 Thread cnauroth
HADOOP-9922. hadoop windows native build will fail in 32 bit machine. 
Contributed by Kiran Kumar M R.

(cherry picked from commit 2214dab60ff11b8de74c9d661585452a078fe0c1)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/264c9677
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/264c9677
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/264c9677

Branch: refs/heads/branch-2
Commit: 264c96777e02454fe1f7af96dafe614d5913449e
Parents: 8b3b956
Author: cnauroth cnaur...@apache.org
Authored: Thu Feb 26 12:41:33 2015 -0800
Committer: cnauroth cnaur...@apache.org
Committed: Thu Feb 26 12:41:48 2015 -0800

--
 hadoop-common-project/hadoop-common/CHANGES.txt |  3 +
 .../hadoop-common/src/main/native/native.sln|  8 +--
 .../src/main/native/native.vcxproj  | 40 
 .../src/main/winutils/include/winutils.h|  5 +-
 .../src/main/winutils/libwinutils.c |  2 +-
 .../src/main/winutils/libwinutils.vcxproj   | 64 +++-
 .../hadoop-common/src/main/winutils/service.c   |  8 +--
 .../hadoop-common/src/main/winutils/task.c  |  2 +-
 .../src/main/winutils/winutils.sln  | 10 +++
 .../src/main/winutils/winutils.vcxproj  | 61 ++-
 10 files changed, 189 insertions(+), 14 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/264c9677/hadoop-common-project/hadoop-common/CHANGES.txt
--
diff --git a/hadoop-common-project/hadoop-common/CHANGES.txt 
b/hadoop-common-project/hadoop-common/CHANGES.txt
index 34eeb45..e81ab67 100644
--- a/hadoop-common-project/hadoop-common/CHANGES.txt
+++ b/hadoop-common-project/hadoop-common/CHANGES.txt
@@ -605,6 +605,9 @@ Release 2.7.0 - UNRELEASED
 HADOOP-11629. WASB filesystem should not start BandwidthGaugeUpdater if
 fs.azure.skip.metrics set to true. (Shanyu Zhao via cnauroth)
 
+HADOOP-9922. hadoop windows native build will fail in 32 bit machine.
+(Kiran Kumar M R via cnauroth)
+
 Release 2.6.1 - UNRELEASED
 
   INCOMPATIBLE CHANGES

http://git-wip-us.apache.org/repos/asf/hadoop/blob/264c9677/hadoop-common-project/hadoop-common/src/main/native/native.sln
--
diff --git a/hadoop-common-project/hadoop-common/src/main/native/native.sln 
b/hadoop-common-project/hadoop-common/src/main/native/native.sln
index 40a7821..54bc17e 100644
--- a/hadoop-common-project/hadoop-common/src/main/native/native.sln
+++ b/hadoop-common-project/hadoop-common/src/main/native/native.sln
@@ -31,14 +31,14 @@ Global
GlobalSection(ProjectConfigurationPlatforms) = postSolution
{4C0C12D2-3CB0-47F8-BCD0-55BD5732DFA7}.Debug|Mixed 
Platforms.ActiveCfg = Release|x64
{4C0C12D2-3CB0-47F8-BCD0-55BD5732DFA7}.Debug|Mixed 
Platforms.Build.0 = Release|x64
-   {4C0C12D2-3CB0-47F8-BCD0-55BD5732DFA7}.Debug|Win32.ActiveCfg = 
Release|x64
-   {4C0C12D2-3CB0-47F8-BCD0-55BD5732DFA7}.Debug|Win32.Build.0 = 
Release|x64
+   {4C0C12D2-3CB0-47F8-BCD0-55BD5732DFA7}.Debug|Win32.ActiveCfg = 
Release|Win32
+   {4C0C12D2-3CB0-47F8-BCD0-55BD5732DFA7}.Debug|Win32.Build.0 = 
Release|Win32
{4C0C12D2-3CB0-47F8-BCD0-55BD5732DFA7}.Debug|x64.ActiveCfg = 
Release|x64
{4C0C12D2-3CB0-47F8-BCD0-55BD5732DFA7}.Debug|x64.Build.0 = 
Release|x64
{4C0C12D2-3CB0-47F8-BCD0-55BD5732DFA7}.Release|Mixed 
Platforms.ActiveCfg = Release|x64
{4C0C12D2-3CB0-47F8-BCD0-55BD5732DFA7}.Release|Mixed 
Platforms.Build.0 = Release|x64
-   {4C0C12D2-3CB0-47F8-BCD0-55BD5732DFA7}.Release|Win32.ActiveCfg 
= Release|x64
-   {4C0C12D2-3CB0-47F8-BCD0-55BD5732DFA7}.Release|Win32.Build.0 = 
Release|x64
+   {4C0C12D2-3CB0-47F8-BCD0-55BD5732DFA7}.Release|Win32.ActiveCfg 
= Release|Win32
+   {4C0C12D2-3CB0-47F8-BCD0-55BD5732DFA7}.Release|Win32.Build.0 = 
Release|Win32
{4C0C12D2-3CB0-47F8-BCD0-55BD5732DFA7}.Release|x64.ActiveCfg = 
Release|x64
{4C0C12D2-3CB0-47F8-BCD0-55BD5732DFA7}.Release|x64.Build.0 = 
Release|x64
EndGlobalSection

http://git-wip-us.apache.org/repos/asf/hadoop/blob/264c9677/hadoop-common-project/hadoop-common/src/main/native/native.vcxproj
--
diff --git a/hadoop-common-project/hadoop-common/src/main/native/native.vcxproj 
b/hadoop-common-project/hadoop-common/src/main/native/native.vcxproj
index 2d60e56..0912c6a 100644
--- a/hadoop-common-project/hadoop-common/src/main/native/native.vcxproj
+++ b/hadoop-common-project/hadoop-common/src/main/native/native.vcxproj
@@ -19,6 +19,10 @@
 
 Project 

[1/2] hadoop git commit: HADOOP-9922. hadoop windows native build will fail in 32 bit machine. Contributed by Kiran Kumar M R.

2015-02-26 Thread cnauroth
Repository: hadoop
Updated Branches:
  refs/heads/branch-2 8b3b9568b - 264c96777
  refs/heads/trunk f0c980abe - 2214dab60


HADOOP-9922. hadoop windows native build will fail in 32 bit machine. 
Contributed by Kiran Kumar M R.


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/2214dab6
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/2214dab6
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/2214dab6

Branch: refs/heads/trunk
Commit: 2214dab60ff11b8de74c9d661585452a078fe0c1
Parents: f0c980a
Author: cnauroth cnaur...@apache.org
Authored: Thu Feb 26 12:41:33 2015 -0800
Committer: cnauroth cnaur...@apache.org
Committed: Thu Feb 26 12:41:33 2015 -0800

--
 hadoop-common-project/hadoop-common/CHANGES.txt |  3 +
 .../hadoop-common/src/main/native/native.sln|  8 +--
 .../src/main/native/native.vcxproj  | 40 
 .../src/main/winutils/include/winutils.h|  5 +-
 .../src/main/winutils/libwinutils.c |  2 +-
 .../src/main/winutils/libwinutils.vcxproj   | 64 +++-
 .../hadoop-common/src/main/winutils/service.c   |  8 +--
 .../hadoop-common/src/main/winutils/task.c  |  2 +-
 .../src/main/winutils/winutils.sln  | 10 +++
 .../src/main/winutils/winutils.vcxproj  | 61 ++-
 10 files changed, 189 insertions(+), 14 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/2214dab6/hadoop-common-project/hadoop-common/CHANGES.txt
--
diff --git a/hadoop-common-project/hadoop-common/CHANGES.txt 
b/hadoop-common-project/hadoop-common/CHANGES.txt
index ca27463..1d9a6d4 100644
--- a/hadoop-common-project/hadoop-common/CHANGES.txt
+++ b/hadoop-common-project/hadoop-common/CHANGES.txt
@@ -1010,6 +1010,9 @@ Release 2.7.0 - UNRELEASED
 HADOOP-11629. WASB filesystem should not start BandwidthGaugeUpdater if
 fs.azure.skip.metrics set to true. (Shanyu Zhao via cnauroth)
 
+HADOOP-9922. hadoop windows native build will fail in 32 bit machine.
+(Kiran Kumar M R via cnauroth)
+
 Release 2.6.1 - UNRELEASED
 
   INCOMPATIBLE CHANGES

http://git-wip-us.apache.org/repos/asf/hadoop/blob/2214dab6/hadoop-common-project/hadoop-common/src/main/native/native.sln
--
diff --git a/hadoop-common-project/hadoop-common/src/main/native/native.sln 
b/hadoop-common-project/hadoop-common/src/main/native/native.sln
index 40a7821..54bc17e 100644
--- a/hadoop-common-project/hadoop-common/src/main/native/native.sln
+++ b/hadoop-common-project/hadoop-common/src/main/native/native.sln
@@ -31,14 +31,14 @@ Global
GlobalSection(ProjectConfigurationPlatforms) = postSolution
{4C0C12D2-3CB0-47F8-BCD0-55BD5732DFA7}.Debug|Mixed 
Platforms.ActiveCfg = Release|x64
{4C0C12D2-3CB0-47F8-BCD0-55BD5732DFA7}.Debug|Mixed 
Platforms.Build.0 = Release|x64
-   {4C0C12D2-3CB0-47F8-BCD0-55BD5732DFA7}.Debug|Win32.ActiveCfg = 
Release|x64
-   {4C0C12D2-3CB0-47F8-BCD0-55BD5732DFA7}.Debug|Win32.Build.0 = 
Release|x64
+   {4C0C12D2-3CB0-47F8-BCD0-55BD5732DFA7}.Debug|Win32.ActiveCfg = 
Release|Win32
+   {4C0C12D2-3CB0-47F8-BCD0-55BD5732DFA7}.Debug|Win32.Build.0 = 
Release|Win32
{4C0C12D2-3CB0-47F8-BCD0-55BD5732DFA7}.Debug|x64.ActiveCfg = 
Release|x64
{4C0C12D2-3CB0-47F8-BCD0-55BD5732DFA7}.Debug|x64.Build.0 = 
Release|x64
{4C0C12D2-3CB0-47F8-BCD0-55BD5732DFA7}.Release|Mixed 
Platforms.ActiveCfg = Release|x64
{4C0C12D2-3CB0-47F8-BCD0-55BD5732DFA7}.Release|Mixed 
Platforms.Build.0 = Release|x64
-   {4C0C12D2-3CB0-47F8-BCD0-55BD5732DFA7}.Release|Win32.ActiveCfg 
= Release|x64
-   {4C0C12D2-3CB0-47F8-BCD0-55BD5732DFA7}.Release|Win32.Build.0 = 
Release|x64
+   {4C0C12D2-3CB0-47F8-BCD0-55BD5732DFA7}.Release|Win32.ActiveCfg 
= Release|Win32
+   {4C0C12D2-3CB0-47F8-BCD0-55BD5732DFA7}.Release|Win32.Build.0 = 
Release|Win32
{4C0C12D2-3CB0-47F8-BCD0-55BD5732DFA7}.Release|x64.ActiveCfg = 
Release|x64
{4C0C12D2-3CB0-47F8-BCD0-55BD5732DFA7}.Release|x64.Build.0 = 
Release|x64
EndGlobalSection

http://git-wip-us.apache.org/repos/asf/hadoop/blob/2214dab6/hadoop-common-project/hadoop-common/src/main/native/native.vcxproj
--
diff --git a/hadoop-common-project/hadoop-common/src/main/native/native.vcxproj 
b/hadoop-common-project/hadoop-common/src/main/native/native.vcxproj
index 2d60e56..0912c6a 100644
--- a/hadoop-common-project/hadoop-common/src/main/native/native.vcxproj
+++ 

Git Push Summary

2015-02-26 Thread cmccabe
Repository: hadoop
Updated Branches:
  refs/heads/HDFS-7836 [created] 2214dab60


[1/2] hadoop git commit: HDFS-7774. Unresolved symbols error while compiling HDFS on Windows 7/32 bit. Contributed by Kiran Kumar M R.

2015-02-26 Thread cnauroth
Repository: hadoop
Updated Branches:
  refs/heads/branch-2 264c96777 - 5dfb793b0
  refs/heads/trunk 2214dab60 - c6d5b37a1


HDFS-7774. Unresolved symbols error while compiling HDFS on Windows 7/32 bit. 
Contributed by Kiran Kumar M R.


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/c6d5b37a
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/c6d5b37a
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/c6d5b37a

Branch: refs/heads/trunk
Commit: c6d5b37a1c4c3acc190f5f4e27109594efb7be8c
Parents: 2214dab
Author: cnauroth cnaur...@apache.org
Authored: Thu Feb 26 13:37:46 2015 -0800
Committer: cnauroth cnaur...@apache.org
Committed: Thu Feb 26 13:37:46 2015 -0800

--
 hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt  | 3 +++
 hadoop-hdfs-project/hadoop-hdfs/pom.xml  | 5 -
 .../hadoop-hdfs/src/main/native/libhdfs/os/windows/thread.c  | 2 +-
 .../main/native/libhdfs/os/windows/thread_local_storage.c| 8 
 4 files changed, 16 insertions(+), 2 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/c6d5b37a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
--
diff --git a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt 
b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
index 54b4057..ae83898 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
+++ b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
@@ -1040,6 +1040,9 @@ Release 2.7.0 - UNRELEASED
 HDFS-7843. A truncated file is corrupted after rollback from a rolling
 upgrade.  (szetszwo)
 
+HDFS-7774. Unresolved symbols error while compiling HDFS on Windows 7/32 
bit.
+(Kiran Kumar M R via cnauroth)
+
 BREAKDOWN OF HDFS-7584 SUBTASKS AND RELATED JIRAS
 
   HDFS-7720. Quota by Storage Type API, tools and ClientNameNode

http://git-wip-us.apache.org/repos/asf/hadoop/blob/c6d5b37a/hadoop-hdfs-project/hadoop-hdfs/pom.xml
--
diff --git a/hadoop-hdfs-project/hadoop-hdfs/pom.xml 
b/hadoop-hdfs-project/hadoop-hdfs/pom.xml
index 5efce5c..2d402a9 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/pom.xml
+++ b/hadoop-hdfs-project/hadoop-hdfs/pom.xml
@@ -440,10 +440,13 @@ http://maven.apache.org/xsd/maven-4.0.0.xsd;
 /goals
 configuration
   target
+condition property=generator value=Visual Studio 10 
else=Visual Studio 10 Win64
+  equals arg1=Win32 arg2=${env.PLATFORM} /
+/condition
 mkdir dir=${project.build.directory}/native/
 exec executable=cmake 
dir=${project.build.directory}/native
 failonerror=true
-  arg line=${basedir}/src/ 
-DGENERATED_JAVAH=${project.build.directory}/native/javah 
-DJVM_ARCH_DATA_MODEL=${sun.arch.data.model} 
-DREQUIRE_LIBWEBHDFS=${require.libwebhdfs} -DREQUIRE_FUSE=${require.fuse} -G 
'Visual Studio 10 Win64'/
+  arg line=${basedir}/src/ 
-DGENERATED_JAVAH=${project.build.directory}/native/javah 
-DJVM_ARCH_DATA_MODEL=${sun.arch.data.model} 
-DREQUIRE_LIBWEBHDFS=${require.libwebhdfs} -DREQUIRE_FUSE=${require.fuse} -G 
'${generator}'/
 /exec
 exec executable=msbuild 
dir=${project.build.directory}/native
 failonerror=true

http://git-wip-us.apache.org/repos/asf/hadoop/blob/c6d5b37a/hadoop-hdfs-project/hadoop-hdfs/src/main/native/libhdfs/os/windows/thread.c
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/native/libhdfs/os/windows/thread.c 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/native/libhdfs/os/windows/thread.c
index 90450d8..f5cc2a7 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/native/libhdfs/os/windows/thread.c
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/native/libhdfs/os/windows/thread.c
@@ -28,7 +28,7 @@
  * @param toRun thread to run
  * @return DWORD result of running thread (always 0)
  */
-static DWORD runThread(LPVOID toRun) {
+static DWORD WINAPI runThread(LPVOID toRun) {
   const thread *t = toRun;
   t-start(t-arg);
   return 0;

http://git-wip-us.apache.org/repos/asf/hadoop/blob/c6d5b37a/hadoop-hdfs-project/hadoop-hdfs/src/main/native/libhdfs/os/windows/thread_local_storage.c
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/native/libhdfs/os/windows/thread_local_storage.c
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/native/libhdfs/os/windows/thread_local_storage.c
index 70ad152..4c415e1 100644
--- 

[2/2] hadoop git commit: HDFS-7774. Unresolved symbols error while compiling HDFS on Windows 7/32 bit. Contributed by Kiran Kumar M R.

2015-02-26 Thread cnauroth
HDFS-7774. Unresolved symbols error while compiling HDFS on Windows 7/32 bit. 
Contributed by Kiran Kumar M R.

(cherry picked from commit c6d5b37a1c4c3acc190f5f4e27109594efb7be8c)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/5dfb793b
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/5dfb793b
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/5dfb793b

Branch: refs/heads/branch-2
Commit: 5dfb793b05a1a200aafa0645775d80b1fa656d66
Parents: 264c967
Author: cnauroth cnaur...@apache.org
Authored: Thu Feb 26 13:37:46 2015 -0800
Committer: cnauroth cnaur...@apache.org
Committed: Thu Feb 26 13:37:58 2015 -0800

--
 hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt  | 3 +++
 hadoop-hdfs-project/hadoop-hdfs/pom.xml  | 5 -
 .../hadoop-hdfs/src/main/native/libhdfs/os/windows/thread.c  | 2 +-
 .../main/native/libhdfs/os/windows/thread_local_storage.c| 8 
 4 files changed, 16 insertions(+), 2 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/5dfb793b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
--
diff --git a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt 
b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
index 79cf934..70aad62 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
+++ b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
@@ -741,6 +741,9 @@ Release 2.7.0 - UNRELEASED
 HDFS-7843. A truncated file is corrupted after rollback from a rolling
 upgrade.  (szetszwo)
 
+HDFS-7774. Unresolved symbols error while compiling HDFS on Windows 7/32 
bit.
+(Kiran Kumar M R via cnauroth)
+
 BREAKDOWN OF HDFS-7584 SUBTASKS AND RELATED JIRAS
 
   HDFS-7720. Quota by Storage Type API, tools and ClientNameNode

http://git-wip-us.apache.org/repos/asf/hadoop/blob/5dfb793b/hadoop-hdfs-project/hadoop-hdfs/pom.xml
--
diff --git a/hadoop-hdfs-project/hadoop-hdfs/pom.xml 
b/hadoop-hdfs-project/hadoop-hdfs/pom.xml
index 71f2b42..dbaa42a 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/pom.xml
+++ b/hadoop-hdfs-project/hadoop-hdfs/pom.xml
@@ -438,10 +438,13 @@ http://maven.apache.org/xsd/maven-4.0.0.xsd;
 /goals
 configuration
   target
+condition property=generator value=Visual Studio 10 
else=Visual Studio 10 Win64
+  equals arg1=Win32 arg2=${env.PLATFORM} /
+/condition
 mkdir dir=${project.build.directory}/native/
 exec executable=cmake 
dir=${project.build.directory}/native
 failonerror=true
-  arg line=${basedir}/src/ 
-DGENERATED_JAVAH=${project.build.directory}/native/javah 
-DJVM_ARCH_DATA_MODEL=${sun.arch.data.model} 
-DREQUIRE_LIBWEBHDFS=${require.libwebhdfs} -DREQUIRE_FUSE=${require.fuse} -G 
'Visual Studio 10 Win64'/
+  arg line=${basedir}/src/ 
-DGENERATED_JAVAH=${project.build.directory}/native/javah 
-DJVM_ARCH_DATA_MODEL=${sun.arch.data.model} 
-DREQUIRE_LIBWEBHDFS=${require.libwebhdfs} -DREQUIRE_FUSE=${require.fuse} -G 
'${generator}'/
 /exec
 exec executable=msbuild 
dir=${project.build.directory}/native
 failonerror=true

http://git-wip-us.apache.org/repos/asf/hadoop/blob/5dfb793b/hadoop-hdfs-project/hadoop-hdfs/src/main/native/libhdfs/os/windows/thread.c
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/native/libhdfs/os/windows/thread.c 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/native/libhdfs/os/windows/thread.c
index 90450d8..f5cc2a7 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/native/libhdfs/os/windows/thread.c
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/native/libhdfs/os/windows/thread.c
@@ -28,7 +28,7 @@
  * @param toRun thread to run
  * @return DWORD result of running thread (always 0)
  */
-static DWORD runThread(LPVOID toRun) {
+static DWORD WINAPI runThread(LPVOID toRun) {
   const thread *t = toRun;
   t-start(t-arg);
   return 0;

http://git-wip-us.apache.org/repos/asf/hadoop/blob/5dfb793b/hadoop-hdfs-project/hadoop-hdfs/src/main/native/libhdfs/os/windows/thread_local_storage.c
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/native/libhdfs/os/windows/thread_local_storage.c
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/native/libhdfs/os/windows/thread_local_storage.c
index 70ad152..4c415e1 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/native/libhdfs/os/windows/thread_local_storage.c
+++ 

hadoop git commit: HDFS-7832. Show 'Last Modified' in Namenode's 'Browse Filesystem' (Contributed by Vinayakumar B)

2015-02-26 Thread vinayakumarb
Repository: hadoop
Updated Branches:
  refs/heads/branch-2 0512e50d6 - f0f3d1937


HDFS-7832. Show 'Last Modified' in Namenode's 'Browse Filesystem' (Contributed 
by Vinayakumar B)

(cherry picked from commit 166eecf687765f369cd5bf210cc6d7bf493121ea)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/f0f3d193
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/f0f3d193
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/f0f3d193

Branch: refs/heads/branch-2
Commit: f0f3d1937db2f43479b3daa6890dce1f86c1aa23
Parents: 0512e50
Author: Vinayakumar B vinayakum...@apache.org
Authored: Thu Feb 26 14:36:09 2015 +0530
Committer: Vinayakumar B vinayakum...@apache.org
Committed: Thu Feb 26 14:37:03 2015 +0530

--
 hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt | 3 +++
 .../hadoop-hdfs/src/main/webapps/hdfs/explorer.html | 2 ++
 .../hadoop-hdfs/src/main/webapps/hdfs/explorer.js   | 9 -
 3 files changed, 13 insertions(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/f0f3d193/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
--
diff --git a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt 
b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
index cda4b7d..f59bb71 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
+++ b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
@@ -378,6 +378,9 @@ Release 2.7.0 - UNRELEASED
 HDFS-7537. Add UNDER MIN REPL'D BLOCKS count to fsck.  (GAO Rui via
 szetszwo)
 
+HDFS-7832. Show 'Last Modified' in Namenode's 'Browse Filesystem'
+(vinayakumarb)
+
   OPTIMIZATIONS
 
 HDFS-7454. Reduce memory footprint for AclEntries in NameNode.

http://git-wip-us.apache.org/repos/asf/hadoop/blob/f0f3d193/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/hdfs/explorer.html
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/hdfs/explorer.html 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/hdfs/explorer.html
index e1fdfa3..7b34044 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/hdfs/explorer.html
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/hdfs/explorer.html
@@ -112,6 +112,7 @@
 thOwner/th
 thGroup/th
 thSize/th
+thLast Modified/th
 thReplication/th
 thBlock Size/th
 thName/th
@@ -124,6 +125,7 @@
 td{owner}/td
 td{group}/td
 td{length|fmt_bytes}/td
+td{#helper_date_tostring value={modificationTime}/}/td
 td{replication}/td
 td{blockSize|fmt_bytes}/td
 tda style=cursor:pointer inode-type={type} 
class=explorer-browse-links inode-path={pathSuffix}{pathSuffix}/a/td

http://git-wip-us.apache.org/repos/asf/hadoop/blob/f0f3d193/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/hdfs/explorer.js
--
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/hdfs/explorer.js 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/hdfs/explorer.js
index 87d47fa..0a53dcd 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/hdfs/explorer.js
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/hdfs/explorer.js
@@ -143,6 +143,12 @@
   }
 
   function browse_directory(dir) {
+var HELPERS = {
+  'helper_date_tostring' : function (chunk, ctx, bodies, params) {
+var value = dust.helpers.tap(params.value, chunk, ctx);
+return chunk.write('' + new Date(Number(value)).toLocaleString());
+  }
+};
 var url = '/webhdfs/v1' + dir + '?op=LISTSTATUS';
 $.get(url, function(data) {
   var d = get_response(data, FileStatuses);
@@ -154,7 +160,8 @@
   current_directory = dir;
   $('#directory').val(dir);
   window.location.hash = dir;
-  dust.render('explorer', d, function(err, out) {
+  var base = dust.makeBase(HELPERS);
+  dust.render('explorer', base.push(d), function(err, out) {
 $('#panel').html(out);
 
 $('.explorer-browse-links').click(function() {



hadoop git commit: HDFS-7832. Show 'Last Modified' in Namenode's 'Browse Filesystem' (Contributed by Vinayakumar B)

2015-02-26 Thread vinayakumarb
Repository: hadoop
Updated Branches:
  refs/heads/trunk 71385f9b7 - 166eecf68


HDFS-7832. Show 'Last Modified' in Namenode's 'Browse Filesystem' (Contributed 
by Vinayakumar B)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/166eecf6
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/166eecf6
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/166eecf6

Branch: refs/heads/trunk
Commit: 166eecf687765f369cd5bf210cc6d7bf493121ea
Parents: 71385f9
Author: Vinayakumar B vinayakum...@apache.org
Authored: Thu Feb 26 14:36:09 2015 +0530
Committer: Vinayakumar B vinayakum...@apache.org
Committed: Thu Feb 26 14:36:09 2015 +0530

--
 hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt | 3 +++
 .../hadoop-hdfs/src/main/webapps/hdfs/explorer.html | 2 ++
 .../hadoop-hdfs/src/main/webapps/hdfs/explorer.js   | 9 -
 3 files changed, 13 insertions(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/166eecf6/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
--
diff --git a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt 
b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
index 4523bf4..e09714f 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
+++ b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
@@ -676,6 +676,9 @@ Release 2.7.0 - UNRELEASED
 HDFS-7537. Add UNDER MIN REPL'D BLOCKS count to fsck.  (GAO Rui via
 szetszwo)
 
+HDFS-7832. Show 'Last Modified' in Namenode's 'Browse Filesystem'
+(vinayakumarb)
+
   OPTIMIZATIONS
 
 HDFS-7454. Reduce memory footprint for AclEntries in NameNode.

http://git-wip-us.apache.org/repos/asf/hadoop/blob/166eecf6/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/hdfs/explorer.html
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/hdfs/explorer.html 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/hdfs/explorer.html
index e1fdfa3..7b34044 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/hdfs/explorer.html
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/hdfs/explorer.html
@@ -112,6 +112,7 @@
 thOwner/th
 thGroup/th
 thSize/th
+thLast Modified/th
 thReplication/th
 thBlock Size/th
 thName/th
@@ -124,6 +125,7 @@
 td{owner}/td
 td{group}/td
 td{length|fmt_bytes}/td
+td{#helper_date_tostring value={modificationTime}/}/td
 td{replication}/td
 td{blockSize|fmt_bytes}/td
 tda style=cursor:pointer inode-type={type} 
class=explorer-browse-links inode-path={pathSuffix}{pathSuffix}/a/td

http://git-wip-us.apache.org/repos/asf/hadoop/blob/166eecf6/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/hdfs/explorer.js
--
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/hdfs/explorer.js 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/hdfs/explorer.js
index 87d47fa..0a53dcd 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/hdfs/explorer.js
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/hdfs/explorer.js
@@ -143,6 +143,12 @@
   }
 
   function browse_directory(dir) {
+var HELPERS = {
+  'helper_date_tostring' : function (chunk, ctx, bodies, params) {
+var value = dust.helpers.tap(params.value, chunk, ctx);
+return chunk.write('' + new Date(Number(value)).toLocaleString());
+  }
+};
 var url = '/webhdfs/v1' + dir + '?op=LISTSTATUS';
 $.get(url, function(data) {
   var d = get_response(data, FileStatuses);
@@ -154,7 +160,8 @@
   current_directory = dir;
   $('#directory').val(dir);
   window.location.hash = dir;
-  dust.render('explorer', d, function(err, out) {
+  var base = dust.makeBase(HELPERS);
+  dust.render('explorer', base.push(d), function(err, out) {
 $('#panel').html(out);
 
 $('.explorer-browse-links').click(function() {



hadoop git commit: YARN-3256. TestClientToAMTokens#testClientTokenRace is not running against all Schedulers even when using ParameterizedSchedulerTestBase. Contributed by Anubhav Dhoot.

2015-02-26 Thread devaraj
Repository: hadoop
Updated Branches:
  refs/heads/trunk 166eecf68 - 0d4296f0e


YARN-3256. TestClientToAMTokens#testClientTokenRace is not running against
all Schedulers even when using ParameterizedSchedulerTestBase. Contributed
by Anubhav Dhoot.


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/0d4296f0
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/0d4296f0
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/0d4296f0

Branch: refs/heads/trunk
Commit: 0d4296f0e0f545267f2e39a868d4ffefc9844db8
Parents: 166eecf
Author: Devaraj K deva...@apache.org
Authored: Thu Feb 26 15:45:41 2015 +0530
Committer: Devaraj K deva...@apache.org
Committed: Thu Feb 26 15:45:41 2015 +0530

--
 hadoop-yarn-project/CHANGES.txt  | 4 
 .../server/resourcemanager/security/TestClientToAMTokens.java| 3 ---
 2 files changed, 4 insertions(+), 3 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/0d4296f0/hadoop-yarn-project/CHANGES.txt
--
diff --git a/hadoop-yarn-project/CHANGES.txt b/hadoop-yarn-project/CHANGES.txt
index e5148eb..ac3cbb2 100644
--- a/hadoop-yarn-project/CHANGES.txt
+++ b/hadoop-yarn-project/CHANGES.txt
@@ -658,6 +658,10 @@ Release 2.7.0 - UNRELEASED
 YARN-3239. WebAppProxy does not support a final tracking url which has
 query fragments and params (Jian He via jlowe)
 
+YARN-3256. TestClientToAMTokens#testClientTokenRace is not running against 
+all Schedulers even when using ParameterizedSchedulerTestBase. 
+(Anubhav Dhoot via devaraj)
+
 Release 2.6.0 - 2014-11-18
 
   INCOMPATIBLE CHANGES

http://git-wip-us.apache.org/repos/asf/hadoop/blob/0d4296f0/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/security/TestClientToAMTokens.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/security/TestClientToAMTokens.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/security/TestClientToAMTokens.java
index 78bc728..499b4d1 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/security/TestClientToAMTokens.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/security/TestClientToAMTokens.java
@@ -33,7 +33,6 @@ import java.net.InetSocketAddress;
 import java.nio.ByteBuffer;
 import java.security.PrivilegedAction;
 import java.security.PrivilegedExceptionAction;
-import java.util.Arrays;
 import java.util.Timer;
 import java.util.TimerTask;
 
@@ -43,7 +42,6 @@ import org.junit.Assert;
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
-import org.apache.hadoop.io.DataInputBuffer;
 import org.apache.hadoop.ipc.RPC;
 import org.apache.hadoop.ipc.RemoteException;
 import org.apache.hadoop.ipc.Server;
@@ -421,7 +419,6 @@ public class TestClientToAMTokens extends 
ParameterizedSchedulerTestBase {
   @Test(timeout=2)
   public void testClientTokenRace() throws Exception {
 
-final Configuration conf = new Configuration();
 conf.set(CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTHENTICATION,
   kerberos);
 UserGroupInformation.setConfiguration(conf);



hadoop git commit: YARN-3256. TestClientToAMTokens#testClientTokenRace is not running against all Schedulers even when using ParameterizedSchedulerTestBase. Contributed by Anubhav Dhoot.

2015-02-26 Thread devaraj
Repository: hadoop
Updated Branches:
  refs/heads/branch-2 f0f3d1937 - f83d2e441


YARN-3256. TestClientToAMTokens#testClientTokenRace is not running against
all Schedulers even when using ParameterizedSchedulerTestBase. Contributed
by Anubhav Dhoot.

(cherry picked from commit 0d4296f0e0f545267f2e39a868d4ffefc9844db8)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/f83d2e44
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/f83d2e44
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/f83d2e44

Branch: refs/heads/branch-2
Commit: f83d2e4410e5dadaf4c787fd1875d92e11882013
Parents: f0f3d19
Author: Devaraj K deva...@apache.org
Authored: Thu Feb 26 15:45:41 2015 +0530
Committer: Devaraj K deva...@apache.org
Committed: Thu Feb 26 15:47:32 2015 +0530

--
 hadoop-yarn-project/CHANGES.txt  | 4 
 .../server/resourcemanager/security/TestClientToAMTokens.java| 3 ---
 2 files changed, 4 insertions(+), 3 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/f83d2e44/hadoop-yarn-project/CHANGES.txt
--
diff --git a/hadoop-yarn-project/CHANGES.txt b/hadoop-yarn-project/CHANGES.txt
index 9046482..e399f17 100644
--- a/hadoop-yarn-project/CHANGES.txt
+++ b/hadoop-yarn-project/CHANGES.txt
@@ -619,6 +619,10 @@ Release 2.7.0 - UNRELEASED
 YARN-3239. WebAppProxy does not support a final tracking url which has
 query fragments and params (Jian He via jlowe)
 
+YARN-3256. TestClientToAMTokens#testClientTokenRace is not running against 
+all Schedulers even when using ParameterizedSchedulerTestBase. 
+(Anubhav Dhoot via devaraj)
+
 Release 2.6.0 - 2014-11-18
 
   INCOMPATIBLE CHANGES

http://git-wip-us.apache.org/repos/asf/hadoop/blob/f83d2e44/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/security/TestClientToAMTokens.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/security/TestClientToAMTokens.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/security/TestClientToAMTokens.java
index 78bc728..499b4d1 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/security/TestClientToAMTokens.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/security/TestClientToAMTokens.java
@@ -33,7 +33,6 @@ import java.net.InetSocketAddress;
 import java.nio.ByteBuffer;
 import java.security.PrivilegedAction;
 import java.security.PrivilegedExceptionAction;
-import java.util.Arrays;
 import java.util.Timer;
 import java.util.TimerTask;
 
@@ -43,7 +42,6 @@ import org.junit.Assert;
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
-import org.apache.hadoop.io.DataInputBuffer;
 import org.apache.hadoop.ipc.RPC;
 import org.apache.hadoop.ipc.RemoteException;
 import org.apache.hadoop.ipc.Server;
@@ -421,7 +419,6 @@ public class TestClientToAMTokens extends 
ParameterizedSchedulerTestBase {
   @Test(timeout=2)
   public void testClientTokenRace() throws Exception {
 
-final Configuration conf = new Configuration();
 conf.set(CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTHENTICATION,
   kerberos);
 UserGroupInformation.setConfiguration(conf);



hadoop git commit: YARN-3087. Made the REST server of per-node aggregator work alone in NM daemon. Conntributed by Li Lu.

2015-02-26 Thread zjshen
Repository: hadoop
Updated Branches:
  refs/heads/YARN-2928 7788f9773 - 41a08ad40


YARN-3087. Made the REST server of per-node aggregator work alone in NM daemon. 
Conntributed by Li Lu.


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/41a08ad4
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/41a08ad4
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/41a08ad4

Branch: refs/heads/YARN-2928
Commit: 41a08ad404d4278fe598d6c222b2ae0e84bae0df
Parents: 7788f97
Author: Zhijie Shen zjs...@apache.org
Authored: Thu Feb 26 15:21:42 2015 -0800
Committer: Zhijie Shen zjs...@apache.org
Committed: Thu Feb 26 15:21:42 2015 -0800

--
 hadoop-yarn-project/CHANGES.txt |  3 +
 .../HierarchicalTimelineEntity.java | 14 -
 .../records/timelineservice/TimelineEntity.java | 58 ---
 .../records/timelineservice/TimelineEvent.java  | 14 -
 .../records/timelineservice/TimelineMetric.java | 28 +++--
 .../server/nodemanager/webapp/WebServer.java|  6 --
 .../aggregator/PerNodeAggregatorServer.java | 61 
 .../aggregator/PerNodeAggregatorWebService.java | 22 +++
 8 files changed, 150 insertions(+), 56 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/41a08ad4/hadoop-yarn-project/CHANGES.txt
--
diff --git a/hadoop-yarn-project/CHANGES.txt b/hadoop-yarn-project/CHANGES.txt
index 996289a..de088bf 100644
--- a/hadoop-yarn-project/CHANGES.txt
+++ b/hadoop-yarn-project/CHANGES.txt
@@ -17,6 +17,9 @@ Branch YARN-2928: Timeline Server Next Generation: Phase 1
 YARN-3240. Implement client API to put generic entities. (Zhijie Shen via
 junping_du)
 
+YARN-3087. Made the REST server of per-node aggregator work alone in NM
+daemon. (Li Lu via zjshen)
+
   IMPROVEMENTS
 
   OPTIMIZATIONS

http://git-wip-us.apache.org/repos/asf/hadoop/blob/41a08ad4/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/timelineservice/HierarchicalTimelineEntity.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/timelineservice/HierarchicalTimelineEntity.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/timelineservice/HierarchicalTimelineEntity.java
index 1a62a5d..01d85cf 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/timelineservice/HierarchicalTimelineEntity.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/timelineservice/HierarchicalTimelineEntity.java
@@ -33,7 +33,7 @@ import java.util.Set;
 @InterfaceStability.Unstable
 public abstract class HierarchicalTimelineEntity extends TimelineEntity {
   private Identifier parent;
-  private MapString, SetString children = new HashMap();
+  private HashMapString, SetString children = new HashMap();
 
   HierarchicalTimelineEntity(String type) {
 super(type);
@@ -56,14 +56,24 @@ public abstract class HierarchicalTimelineEntity extends 
TimelineEntity {
 parent.setId(id);
   }
 
+  // required by JAXB
+  @InterfaceAudience.Private
   @XmlElement(name = children)
+  public HashMapString, SetString getChildrenJAXB() {
+return children;
+  }
+
   public MapString, SetString getChildren() {
 return children;
   }
 
   public void setChildren(MapString, SetString children) {
 validateChildren(children);
-this.children = children;
+if (children != null  !(children instanceof HashMap)) {
+  this.children = new HashMapString, SetString(children);
+} else {
+  this.children = (HashMap) children;
+}
   }
 
   public void addChildren(MapString, SetString children) {

http://git-wip-us.apache.org/repos/asf/hadoop/blob/41a08ad4/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/timelineservice/TimelineEntity.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/timelineservice/TimelineEntity.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/timelineservice/TimelineEntity.java
index d6d54e8..1afb564 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/timelineservice/TimelineEntity.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/timelineservice/TimelineEntity.java
@@ -65,12 +65,12 @@ public 

hadoop git commit: MAPREDUCE-6223. TestJobConf#testNegativeValueForTaskVmem failures. (Varun Saxena via kasha)

2015-02-26 Thread kasha
Repository: hadoop
Updated Branches:
  refs/heads/trunk c6d5b37a1 - 1047c883b


MAPREDUCE-6223. TestJobConf#testNegativeValueForTaskVmem failures. (Varun 
Saxena via kasha)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/1047c883
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/1047c883
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/1047c883

Branch: refs/heads/trunk
Commit: 1047c883ba01a252dbd4203e525fb1ff8ea313aa
Parents: c6d5b37
Author: Karthik Kambatla ka...@apache.org
Authored: Thu Feb 26 14:24:19 2015 -0800
Committer: Karthik Kambatla ka...@apache.org
Committed: Thu Feb 26 14:24:19 2015 -0800

--
 hadoop-mapreduce-project/CHANGES.txt|  3 +++
 .../java/org/apache/hadoop/mapred/JobConf.java  |  6 ++---
 .../org/apache/hadoop/conf/TestJobConf.java | 23 ++--
 3 files changed, 17 insertions(+), 15 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/1047c883/hadoop-mapreduce-project/CHANGES.txt
--
diff --git a/hadoop-mapreduce-project/CHANGES.txt 
b/hadoop-mapreduce-project/CHANGES.txt
index d2c4ab8..6cb1754 100644
--- a/hadoop-mapreduce-project/CHANGES.txt
+++ b/hadoop-mapreduce-project/CHANGES.txt
@@ -390,6 +390,9 @@ Release 2.7.0 - UNRELEASED
 MAPREDUCE-2815. JavaDoc does not generate correctly for
 MultithreadedMapRunner. (Chris Palmer via aajisaka)
 
+MAPREDUCE-6223. TestJobConf#testNegativeValueForTaskVmem failures. 
+(Varun Saxena via kasha)
+
 Release 2.6.1 - UNRELEASED
 
   INCOMPATIBLE CHANGES

http://git-wip-us.apache.org/repos/asf/hadoop/blob/1047c883/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/JobConf.java
--
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/JobConf.java
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/JobConf.java
index 98a643f..315c829 100644
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/JobConf.java
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/JobConf.java
@@ -1826,8 +1826,7 @@ public class JobConf extends Configuration {
   public long getMemoryForMapTask() {
 long value = getDeprecatedMemoryValue();
 if (value  0) {
-  return getLong(JobConf.MAPRED_JOB_MAP_MEMORY_MB_PROPERTY,
-  JobContext.DEFAULT_MAP_MEMORY_MB);
+  return getMemoryRequired(TaskType.MAP);
 }
 return value;
   }
@@ -1853,8 +1852,7 @@ public class JobConf extends Configuration {
   public long getMemoryForReduceTask() {
 long value = getDeprecatedMemoryValue();
 if (value  0) {
-  return getLong(JobConf.MAPRED_JOB_REDUCE_MEMORY_MB_PROPERTY,
-  JobContext.DEFAULT_REDUCE_MEMORY_MB);
+  return getMemoryRequired(TaskType.REDUCE);
 }
 return value;
   }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/1047c883/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/conf/TestJobConf.java
--
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/conf/TestJobConf.java
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/conf/TestJobConf.java
index e380d92..f67ba1f 100644
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/conf/TestJobConf.java
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/conf/TestJobConf.java
@@ -19,9 +19,7 @@ package org.apache.hadoop.conf;
 
 import org.junit.Assert;
 import org.junit.Test;
-
 import org.apache.hadoop.mapred.JobConf;
-import org.apache.hadoop.mapred.JobContext;
 import org.apache.hadoop.mapreduce.MRJobConfig;
 
 public class TestJobConf {
@@ -87,8 +85,10 @@ public class TestJobConf {
 configuration.set(mapred.task.maxvmem , String.valueOf(-1));
 configuration.set(MRJobConfig.MAP_MEMORY_MB,-1);
 configuration.set(MRJobConfig.REDUCE_MEMORY_MB,-1);
-Assert.assertEquals(configuration.getMemoryForMapTask(),-1);
-Assert.assertEquals(configuration.getMemoryForReduceTask(),-1);
+Assert.assertEquals(configuration.getMemoryForMapTask(),
+MRJobConfig.DEFAULT_MAP_MEMORY_MB);
+

[04/17] hadoop git commit: HDFS-7843. A truncated file is corrupted after rollback from a rolling upgrade.

2015-02-26 Thread zjshen
HDFS-7843. A truncated file is corrupted after rollback from a rolling upgrade.


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/606f5b51
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/606f5b51
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/606f5b51

Branch: refs/heads/YARN-2928
Commit: 606f5b517ffbeae0140a8c80b4cddc012c7fb3c4
Parents: d140d76
Author: Tsz-Wo Nicholas Sze szets...@hortonworks.com
Authored: Thu Feb 26 10:14:40 2015 +0800
Committer: Tsz-Wo Nicholas Sze szets...@hortonworks.com
Committed: Thu Feb 26 10:14:40 2015 +0800

--
 hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt |  3 ++
 .../hdfs/server/namenode/FSNamesystem.java  |  3 ++
 .../apache/hadoop/hdfs/TestRollingUpgrade.java  | 48 ++--
 3 files changed, 40 insertions(+), 14 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/606f5b51/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
--
diff --git a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt 
b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
index e0f9267..f8b0c37 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
+++ b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
@@ -1026,6 +1026,9 @@ Release 2.7.0 - UNRELEASED
 HDFS-7763. fix zkfc hung issue due to not catching exception in a corner
 case. (Liang Xie via wang)
 
+HDFS-7843. A truncated file is corrupted after rollback from a rolling
+upgrade.  (szetszwo)
+
 BREAKDOWN OF HDFS-7584 SUBTASKS AND RELATED JIRAS
 
   HDFS-7720. Quota by Storage Type API, tools and ClientNameNode

http://git-wip-us.apache.org/repos/asf/hadoop/blob/606f5b51/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java
index 120a597..bbab09e 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java
@@ -2077,6 +2077,9 @@ public class FSNamesystem implements Namesystem, 
FSNamesystemMBean,
 if(!isUpgradeFinalized()) {
   return true;
 }
+if (isRollingUpgrade()) {
+  return true;
+}
 return file.isBlockInLatestSnapshot(blk);
   }
 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/606f5b51/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestRollingUpgrade.java
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestRollingUpgrade.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestRollingUpgrade.java
index 8e7b4b1..9746049 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestRollingUpgrade.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestRollingUpgrade.java
@@ -23,9 +23,11 @@ import java.io.IOException;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FSDataOutputStream;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.FileUtil;
 import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hdfs.MiniDFSCluster.DataNodeProperties;
 import org.apache.hadoop.hdfs.protocol.HdfsConstants.RollingUpgradeAction;
 import org.apache.hadoop.hdfs.protocol.HdfsConstants.SafeModeAction;
 import org.apache.hadoop.hdfs.protocol.RollingUpgradeInfo;
@@ -36,6 +38,7 @@ import org.apache.hadoop.hdfs.server.datanode.DataNode;
 import org.apache.hadoop.hdfs.server.namenode.FSImage;
 import org.apache.hadoop.hdfs.server.namenode.NNStorage;
 import org.apache.hadoop.hdfs.server.namenode.SecondaryNameNode;
+import org.apache.hadoop.hdfs.server.namenode.TestFileTruncate;
 import org.apache.hadoop.hdfs.tools.DFSAdmin;
 import org.apache.hadoop.io.IOUtils;
 import org.junit.Assert;
@@ -260,42 +263,50 @@ public class TestRollingUpgrade {
 final Configuration conf = new HdfsConfiguration();
 MiniDFSCluster cluster = null;
 try {
-  cluster = new MiniDFSCluster.Builder(conf).numDataNodes(0).build();
+  cluster = new MiniDFSCluster.Builder(conf).numDataNodes(1).build();
   cluster.waitActive();
 
   final Path foo = new Path(/foo);
   final Path bar = new Path(/bar);
   cluster.getFileSystem().mkdirs(foo);
 
-  startRollingUpgrade(foo, 

[10/17] hadoop git commit: YARN-3217. Remove httpclient dependency from hadoop-yarn-server-web-proxy. Contributed by Brahma Reddy Battula.

2015-02-26 Thread zjshen
YARN-3217. Remove httpclient dependency from hadoop-yarn-server-web-proxy. 
Contributed by Brahma Reddy Battula.


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/773b6515
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/773b6515
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/773b6515

Branch: refs/heads/YARN-2928
Commit: 773b6515ac51af3484824bd6f57685a9726a1e70
Parents: 0d4296f
Author: Tsuyoshi Ozawa oz...@apache.org
Authored: Fri Feb 27 00:22:46 2015 +0900
Committer: Tsuyoshi Ozawa oz...@apache.org
Committed: Fri Feb 27 00:24:29 2015 +0900

--
 hadoop-yarn-project/CHANGES.txt |  3 ++
 .../hadoop-yarn-server-web-proxy/pom.xml|  4 --
 .../server/webproxy/WebAppProxyServlet.java | 46 ++--
 3 files changed, 26 insertions(+), 27 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/773b6515/hadoop-yarn-project/CHANGES.txt
--
diff --git a/hadoop-yarn-project/CHANGES.txt b/hadoop-yarn-project/CHANGES.txt
index ac3cbb2..a635592 100644
--- a/hadoop-yarn-project/CHANGES.txt
+++ b/hadoop-yarn-project/CHANGES.txt
@@ -327,6 +327,9 @@ Release 2.7.0 - UNRELEASED
 YARN-2797. Add -help to yarn logs and nodes CLI command. 
 (Jagadesh Kiran N via devaraj)
 
+YARN-3217. Remove httpclient dependency from hadoop-yarn-server-web-proxy.
+(Brahma Reddy Battula via ozawa).
+
   OPTIMIZATIONS
 
 YARN-2990. FairScheduler's delay-scheduling always waits for node-local 
and 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/773b6515/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-web-proxy/pom.xml
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-web-proxy/pom.xml
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-web-proxy/pom.xml
index fdba1fe..9801064 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-web-proxy/pom.xml
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-web-proxy/pom.xml
@@ -79,10 +79,6 @@
 /dependency
 
 dependency
-  groupIdcommons-httpclient/groupId
-  artifactIdcommons-httpclient/artifactId
-/dependency
-dependency
   groupIdcom.google.guava/groupId
   artifactIdguava/artifactId
 /dependency

http://git-wip-us.apache.org/repos/asf/hadoop/blob/773b6515/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-web-proxy/src/main/java/org/apache/hadoop/yarn/server/webproxy/WebAppProxyServlet.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-web-proxy/src/main/java/org/apache/hadoop/yarn/server/webproxy/WebAppProxyServlet.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-web-proxy/src/main/java/org/apache/hadoop/yarn/server/webproxy/WebAppProxyServlet.java
index 47f7769..fd98c80 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-web-proxy/src/main/java/org/apache/hadoop/yarn/server/webproxy/WebAppProxyServlet.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-web-proxy/src/main/java/org/apache/hadoop/yarn/server/webproxy/WebAppProxyServlet.java
@@ -40,13 +40,6 @@ import javax.servlet.http.HttpServletRequest;
 import javax.servlet.http.HttpServletResponse;
 import javax.ws.rs.core.UriBuilder;
 
-import org.apache.commons.httpclient.Header;
-import org.apache.commons.httpclient.HostConfiguration;
-import org.apache.commons.httpclient.HttpClient;
-import org.apache.commons.httpclient.HttpMethod;
-import org.apache.commons.httpclient.cookie.CookiePolicy;
-import org.apache.commons.httpclient.methods.GetMethod;
-import org.apache.commons.httpclient.params.HttpClientParams;
 import org.apache.hadoop.io.IOUtils;
 import org.apache.hadoop.yarn.api.records.ApplicationId;
 import org.apache.hadoop.yarn.api.records.ApplicationReport;
@@ -59,8 +52,15 @@ import org.apache.hadoop.yarn.util.TrackingUriPlugin;
 import org.apache.hadoop.yarn.webapp.MimeType;
 import org.apache.hadoop.yarn.webapp.hamlet.Hamlet;
 import org.apache.hadoop.yarn.webapp.util.WebAppUtils;
+import org.apache.http.Header;
+import org.apache.http.HttpResponse;
 import org.apache.http.NameValuePair;
+import org.apache.http.client.methods.HttpGet;
+import org.apache.http.client.params.ClientPNames;
+import org.apache.http.client.params.CookiePolicy;
 import org.apache.http.client.utils.URLEncodedUtils;
+import org.apache.http.conn.params.ConnRoutePNames;
+import org.apache.http.impl.client.DefaultHttpClient;
 import org.slf4j.Logger;
 

[09/17] hadoop git commit: YARN-3256. TestClientToAMTokens#testClientTokenRace is not running against all Schedulers even when using ParameterizedSchedulerTestBase. Contributed by Anubhav Dhoot.

2015-02-26 Thread zjshen
YARN-3256. TestClientToAMTokens#testClientTokenRace is not running against
all Schedulers even when using ParameterizedSchedulerTestBase. Contributed
by Anubhav Dhoot.


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/0d4296f0
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/0d4296f0
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/0d4296f0

Branch: refs/heads/YARN-2928
Commit: 0d4296f0e0f545267f2e39a868d4ffefc9844db8
Parents: 166eecf
Author: Devaraj K deva...@apache.org
Authored: Thu Feb 26 15:45:41 2015 +0530
Committer: Devaraj K deva...@apache.org
Committed: Thu Feb 26 15:45:41 2015 +0530

--
 hadoop-yarn-project/CHANGES.txt  | 4 
 .../server/resourcemanager/security/TestClientToAMTokens.java| 3 ---
 2 files changed, 4 insertions(+), 3 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/0d4296f0/hadoop-yarn-project/CHANGES.txt
--
diff --git a/hadoop-yarn-project/CHANGES.txt b/hadoop-yarn-project/CHANGES.txt
index e5148eb..ac3cbb2 100644
--- a/hadoop-yarn-project/CHANGES.txt
+++ b/hadoop-yarn-project/CHANGES.txt
@@ -658,6 +658,10 @@ Release 2.7.0 - UNRELEASED
 YARN-3239. WebAppProxy does not support a final tracking url which has
 query fragments and params (Jian He via jlowe)
 
+YARN-3256. TestClientToAMTokens#testClientTokenRace is not running against 
+all Schedulers even when using ParameterizedSchedulerTestBase. 
+(Anubhav Dhoot via devaraj)
+
 Release 2.6.0 - 2014-11-18
 
   INCOMPATIBLE CHANGES

http://git-wip-us.apache.org/repos/asf/hadoop/blob/0d4296f0/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/security/TestClientToAMTokens.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/security/TestClientToAMTokens.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/security/TestClientToAMTokens.java
index 78bc728..499b4d1 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/security/TestClientToAMTokens.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/security/TestClientToAMTokens.java
@@ -33,7 +33,6 @@ import java.net.InetSocketAddress;
 import java.nio.ByteBuffer;
 import java.security.PrivilegedAction;
 import java.security.PrivilegedExceptionAction;
-import java.util.Arrays;
 import java.util.Timer;
 import java.util.TimerTask;
 
@@ -43,7 +42,6 @@ import org.junit.Assert;
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
-import org.apache.hadoop.io.DataInputBuffer;
 import org.apache.hadoop.ipc.RPC;
 import org.apache.hadoop.ipc.RemoteException;
 import org.apache.hadoop.ipc.Server;
@@ -421,7 +419,6 @@ public class TestClientToAMTokens extends 
ParameterizedSchedulerTestBase {
   @Test(timeout=2)
   public void testClientTokenRace() throws Exception {
 
-final Configuration conf = new Configuration();
 conf.set(CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTHENTICATION,
   kerberos);
 UserGroupInformation.setConfiguration(conf);



[05/17] hadoop git commit: HDFS-7460. Rewrite httpfs to use new shell framework (John Smith via aw)

2015-02-26 Thread zjshen
HDFS-7460. Rewrite httpfs to use new shell framework (John Smith via aw)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/8c4f76aa
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/8c4f76aa
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/8c4f76aa

Branch: refs/heads/YARN-2928
Commit: 8c4f76aa20e75635bd6d3de14924ec246a8a071a
Parents: 606f5b5
Author: Allen Wittenauer a...@apache.org
Authored: Wed Feb 25 18:57:41 2015 -0800
Committer: Allen Wittenauer a...@apache.org
Committed: Wed Feb 25 18:57:41 2015 -0800

--
 hadoop-hdfs-project/hadoop-hdfs-httpfs/pom.xml  |   2 +-
 .../src/main/conf/httpfs-env.sh |  51 +++--
 .../src/main/libexec/httpfs-config.sh   | 208 ---
 .../hadoop-hdfs-httpfs/src/main/sbin/httpfs.sh  | 116 +++
 .../src/main/tomcat/ssl-server.xml  | 135 
 .../src/main/tomcat/ssl-server.xml.conf | 135 
 .../src/site/apt/ServerSetup.apt.vm | 159 --
 .../src/site/apt/UsingHttpTools.apt.vm  |  87 
 .../src/site/apt/index.apt.vm   |  83 
 .../src/site/markdown/ServerSetup.md.vm | 121 +++
 .../src/site/markdown/UsingHttpTools.md |  62 ++
 .../src/site/markdown/index.md  |  52 +
 hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt |   2 +
 13 files changed, 533 insertions(+), 680 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/8c4f76aa/hadoop-hdfs-project/hadoop-hdfs-httpfs/pom.xml
--
diff --git a/hadoop-hdfs-project/hadoop-hdfs-httpfs/pom.xml 
b/hadoop-hdfs-project/hadoop-hdfs-httpfs/pom.xml
index 4c42ef9..ddc6033 100644
--- a/hadoop-hdfs-project/hadoop-hdfs-httpfs/pom.xml
+++ b/hadoop-hdfs-project/hadoop-hdfs-httpfs/pom.xml
@@ -524,7 +524,7 @@
 copy file=${basedir}/src/main/tomcat/server.xml
   toDir=${httpfs.tomcat.dist.dir}/conf/
 delete 
file=${httpfs.tomcat.dist.dir}/conf/ssl-server.xml/
-copy file=${basedir}/src/main/tomcat/ssl-server.xml
+copy file=${basedir}/src/main/tomcat/ssl-server.xml.conf
   toDir=${httpfs.tomcat.dist.dir}/conf/
 delete 
file=${httpfs.tomcat.dist.dir}/conf/logging.properties/
 copy file=${basedir}/src/main/tomcat/logging.properties

http://git-wip-us.apache.org/repos/asf/hadoop/blob/8c4f76aa/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/conf/httpfs-env.sh
--
diff --git a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/conf/httpfs-env.sh 
b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/conf/httpfs-env.sh
index a2701d4..0e8cc40 100644
--- a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/conf/httpfs-env.sh
+++ b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/conf/httpfs-env.sh
@@ -14,40 +14,59 @@
 #
 
 # Set httpfs specific environment variables here.
-
-# Settings for the Embedded Tomcat that runs HttpFS
-# Java System properties for HttpFS should be specified in this variable
 #
-# export CATALINA_OPTS=
-
-# HttpFS logs directory
+# hadoop-env.sh is read prior to this file.
 #
-# export HTTPFS_LOG=${HTTPFS_HOME}/logs
 
-# HttpFS temporary directory
+# HTTPFS temporary directory
 #
-# export HTTPFS_TEMP=${HTTPFS_HOME}/temp
+# export HTTPFS_TEMP=${HADOOP_PREFIX}/temp
 
-# The HTTP port used by HttpFS
+# The HTTP port used by HTTPFS
 #
 # export HTTPFS_HTTP_PORT=14000
 
-# The Admin port used by HttpFS
+# The Admin port used by HTTPFS
 #
-# export HTTPFS_ADMIN_PORT=`expr ${HTTPFS_HTTP_PORT} + 1`
+# export HTTPFS_ADMIN_PORT=$((HTTPFS_HTTP_PORT + 1))
 
-# The hostname HttpFS server runs on
+# The maximum number of Tomcat handler threads
 #
-# export HTTPFS_HTTP_HOSTNAME=`hostname -f`
+# export HTTPFS_MAX_THREADS=1000
 
-# Indicates if HttpFS is using SSL
+# The hostname HttpFS server runs on
 #
-# export HTTPFS_SSL_ENABLED=false
+# export HTTPFS_HTTP_HOSTNAME=$(hostname -f)
 
 # The location of the SSL keystore if using SSL
 #
 # export HTTPFS_SSL_KEYSTORE_FILE=${HOME}/.keystore
 
+#
 # The password of the SSL keystore if using SSL
 #
 # export HTTPFS_SSL_KEYSTORE_PASS=password
+
+##
+## Tomcat specific settings
+##
+#
+# Location of tomcat
+#
+# export HTTPFS_CATALINA_HOME=${HADOOP_PREFIX}/share/hadoop/httpfs/tomcat
+
+# Java System properties for HTTPFS should be specified in this variable.
+# The java.library.path and hadoop.home.dir properties are automatically
+# configured.  In order to supplement java.library.path,
+# one should add to the JAVA_LIBRARY_PATH env var.
+#
+# export CATALINA_OPTS=
+
+# PID file
+#
+# export 

[06/17] hadoop git commit: HDFS-7537. Add UNDER MIN REPL'D BLOCKS count to fsck. Contributed by GAO Rui

2015-02-26 Thread zjshen
HDFS-7537. Add UNDER MIN REPL'D BLOCKS count to fsck.  Contributed by GAO Rui


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/725cc499
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/725cc499
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/725cc499

Branch: refs/heads/YARN-2928
Commit: 725cc499f00abeeab9f58cbc778e65522eec9d98
Parents: 8c4f76a
Author: Tsz-Wo Nicholas Sze szets...@hortonworks.com
Authored: Thu Feb 26 11:45:56 2015 +0800
Committer: Tsz-Wo Nicholas Sze szets...@hortonworks.com
Committed: Thu Feb 26 11:45:56 2015 +0800

--
 hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt |  3 +
 .../hdfs/server/namenode/NamenodeFsck.java  | 36 +++--
 .../hadoop/hdfs/server/namenode/TestFsck.java   | 81 +++-
 3 files changed, 111 insertions(+), 9 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/725cc499/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
--
diff --git a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt 
b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
index 6dc7a0f..4523bf4 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
+++ b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
@@ -673,6 +673,9 @@ Release 2.7.0 - UNRELEASED
 HDFS-7495. Remove updatePosition argument from DFSInputStream#getBlockAt()
 (cmccabe)
 
+HDFS-7537. Add UNDER MIN REPL'D BLOCKS count to fsck.  (GAO Rui via
+szetszwo)
+
   OPTIMIZATIONS
 
 HDFS-7454. Reduce memory footprint for AclEntries in NameNode.

http://git-wip-us.apache.org/repos/asf/hadoop/blob/725cc499/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NamenodeFsck.java
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NamenodeFsck.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NamenodeFsck.java
index 5134f3c..36b4461 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NamenodeFsck.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NamenodeFsck.java
@@ -512,6 +512,9 @@ public class NamenodeFsck implements 
DataEncryptionKeyFactory {
   res.totalReplicas += liveReplicas;
   short targetFileReplication = file.getReplication();
   res.numExpectedReplicas += targetFileReplication;
+  if(liveReplicasminReplication){
+res.numUnderMinReplicatedBlocks++;
+  }
   if (liveReplicas  targetFileReplication) {
 res.excessiveReplicas += (liveReplicas - targetFileReplication);
 res.numOverReplicatedBlocks += 1;
@@ -858,6 +861,7 @@ public class NamenodeFsck implements 
DataEncryptionKeyFactory {
 long corruptBlocks = 0L;
 long excessiveReplicas = 0L;
 long missingReplicas = 0L;
+long numUnderMinReplicatedBlocks=0L;
 long numOverReplicatedBlocks = 0L;
 long numUnderReplicatedBlocks = 0L;
 long numMisReplicatedBlocks = 0L;  // blocks that do not satisfy block 
placement policy
@@ -874,10 +878,13 @@ public class NamenodeFsck implements 
DataEncryptionKeyFactory {
 long totalReplicas = 0L;
 
 final short replication;
+final int minReplication;
 
 Result(Configuration conf) {
   this.replication = (short)conf.getInt(DFSConfigKeys.DFS_REPLICATION_KEY, 
 
DFSConfigKeys.DFS_REPLICATION_DEFAULT);
+  this.minReplication = 
(short)conf.getInt(DFSConfigKeys.DFS_NAMENODE_REPLICATION_MIN_KEY,
+
DFSConfigKeys.DFS_NAMENODE_REPLICATION_MIN_DEFAULT);
 }
 
 /**
@@ -925,15 +932,28 @@ public class NamenodeFsck implements 
DataEncryptionKeyFactory {
 res.append( (Total open file blocks (not validated): ).append(
 totalOpenFilesBlocks).append());
   }
-  if (corruptFiles  0) {
-res.append(\n  ).append(
-\n  CORRUPT FILES:\t).append(corruptFiles);
-if (missingSize  0) {
-  res.append(\n  MISSING BLOCKS:\t).append(missingIds.size()).append(
-  \n  MISSING SIZE:\t\t).append(missingSize).append( B);
+  if (corruptFiles  0 || numUnderMinReplicatedBlocks0) {
+res.append(\n  );
+if(numUnderMinReplicatedBlocks0){
+  res.append(\n  UNDER MIN REPL'D 
BLOCKS:\t).append(numUnderMinReplicatedBlocks);
+  if(totalBlocks0){
+res.append( ().append(
+((float) (numUnderMinReplicatedBlocks * 100) / (float) 
totalBlocks))
+.append( %));
+  }
+  res.append(\n  

[16/17] hadoop git commit: MAPREDUCE-5612. Add javadoc for TaskCompletionEvent.Status. Contributed by Chris Palmer.

2015-02-26 Thread zjshen
MAPREDUCE-5612. Add javadoc for TaskCompletionEvent.Status. Contributed by 
Chris Palmer.


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/bfbf076b
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/bfbf076b
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/bfbf076b

Branch: refs/heads/YARN-2928
Commit: bfbf076b7dcb71b59d85b05d0a6da88195ea9902
Parents: 1047c88
Author: Akira Ajisaka aajis...@apache.org
Authored: Thu Feb 26 15:05:17 2015 -0800
Committer: Akira Ajisaka aajis...@apache.org
Committed: Thu Feb 26 15:05:17 2015 -0800

--
 hadoop-mapreduce-project/CHANGES.txt|  3 ++
 .../hadoop/mapred/TaskCompletionEvent.java  | 32 +++-
 .../hadoop/mapreduce/TaskCompletionEvent.java   | 32 +++-
 3 files changed, 65 insertions(+), 2 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/bfbf076b/hadoop-mapreduce-project/CHANGES.txt
--
diff --git a/hadoop-mapreduce-project/CHANGES.txt 
b/hadoop-mapreduce-project/CHANGES.txt
index 6cb1754..f509d4e 100644
--- a/hadoop-mapreduce-project/CHANGES.txt
+++ b/hadoop-mapreduce-project/CHANGES.txt
@@ -308,6 +308,9 @@ Release 2.7.0 - UNRELEASED
 MAPREDUCE-6264. Remove httpclient dependency from hadoop-mapreduce-client.
 (Brahma Reddy Battula via aajisaka)
 
+MAPREDUCE-5612. Add javadoc for TaskCompletionEvent.Status.
+(Chris Palmer via aajisaka)
+
   OPTIMIZATIONS
 
 MAPREDUCE-6169. MergeQueue should release reference to the current item 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/bfbf076b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/TaskCompletionEvent.java
--
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/TaskCompletionEvent.java
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/TaskCompletionEvent.java
index dc4d82e..2bb55a2 100644
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/TaskCompletionEvent.java
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/TaskCompletionEvent.java
@@ -32,7 +32,37 @@ public class TaskCompletionEvent
 extends org.apache.hadoop.mapreduce.TaskCompletionEvent {
   @InterfaceAudience.Public
   @InterfaceStability.Stable
-  static public enum Status {FAILED, KILLED, SUCCEEDED, OBSOLETE, TIPFAILED};
+  /**
+   *  Task Completion Statuses
+   */
+  static public enum Status {
+/**
+ * Task Event Attempt failed but there are attempts remaining.
+ */
+FAILED,
+/**
+ * Task Event was killed.
+ */
+KILLED,
+/**
+ * Task Event was successful.
+ */
+SUCCEEDED,
+/**
+ * Used to Override a previously successful event status.
+ * Example:  Map attempt runs and a SUCCEEDED event is sent. Later a task
+ * is retroactively failed due to excessive fetch failure during shuffle
+ * phase. When the retroactive attempt failure occurs, an OBSOLETE event is
+ * sent for the map attempt indicating the prior event is no longer valid.
+ */
+OBSOLETE,
+/**
+ * Task Event attempt failed and no further attempts exist.
+ * reached MAX attempts. When a reducer receives a TIPFAILED event it
+ * gives up trying to shuffle data from that map task.
+ */
+TIPFAILED
+  }
   
   public static final TaskCompletionEvent[] EMPTY_ARRAY = 
new TaskCompletionEvent[0];

http://git-wip-us.apache.org/repos/asf/hadoop/blob/bfbf076b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/TaskCompletionEvent.java
--
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/TaskCompletionEvent.java
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/TaskCompletionEvent.java
index 31643a9..21c3823 100644
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/TaskCompletionEvent.java
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/TaskCompletionEvent.java
@@ -36,7 +36,37 @@ import org.apache.hadoop.io.WritableUtils;
 public class TaskCompletionEvent 

[17/17] hadoop git commit: Merge remote-tracking branch 'apache/trunk' into YARN-2928

2015-02-26 Thread zjshen
Merge remote-tracking branch 'apache/trunk' into YARN-2928


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/7788f977
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/7788f977
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/7788f977

Branch: refs/heads/YARN-2928
Commit: 7788f9773aab9c312b75834209d5debeda9d2820
Parents: 6bb198e bfbf076
Author: Zhijie Shen zjs...@apache.org
Authored: Thu Feb 26 15:10:08 2015 -0800
Committer: Zhijie Shen zjs...@apache.org
Committed: Thu Feb 26 15:10:08 2015 -0800

--
 hadoop-common-project/hadoop-common/CHANGES.txt |  11 +
 .../src/main/bin/hadoop-functions.sh|   2 +-
 .../crypto/key/kms/KMSClientProvider.java   |  84 -
 .../key/kms/LoadBalancingKMSClientProvider.java | 347 +++
 .../hadoop-common/src/main/native/native.sln|   8 +-
 .../src/main/native/native.vcxproj  |  40 +++
 .../src/main/winutils/include/winutils.h|   5 +-
 .../src/main/winutils/libwinutils.c |   2 +-
 .../src/main/winutils/libwinutils.vcxproj   |  64 +++-
 .../hadoop-common/src/main/winutils/service.c   |   8 +-
 .../hadoop-common/src/main/winutils/task.c  |   2 +-
 .../src/main/winutils/winutils.sln  |  10 +
 .../src/main/winutils/winutils.vcxproj  |  61 +++-
 .../src/site/markdown/RackAwareness.md  |   5 +-
 .../kms/TestLoadBalancingKMSClientProvider.java | 166 +
 .../hadoop-kms/src/main/conf/kms-env.sh |   2 +-
 .../hadoop-kms/src/main/libexec/kms-config.sh   |   2 +-
 .../hadoop-kms/src/main/sbin/kms.sh |   2 +-
 .../hadoop/crypto/key/kms/server/TestKMS.java   | 114 +++---
 hadoop-hdfs-project/hadoop-hdfs-httpfs/pom.xml  |   2 +-
 .../src/main/conf/httpfs-env.sh |  53 ++-
 .../src/main/libexec/httpfs-config.sh   | 206 +++
 .../hadoop-hdfs-httpfs/src/main/sbin/httpfs.sh  | 114 --
 .../src/main/tomcat/ssl-server.xml  | 135 
 .../src/main/tomcat/ssl-server.xml.conf | 135 
 .../src/site/apt/ServerSetup.apt.vm | 159 -
 .../src/site/apt/UsingHttpTools.apt.vm  |  87 -
 .../src/site/apt/index.apt.vm   |  83 -
 .../src/site/markdown/ServerSetup.md.vm | 121 +++
 .../src/site/markdown/UsingHttpTools.md |  62 
 .../src/site/markdown/index.md  |  52 +++
 hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt |  20 ++
 hadoop-hdfs-project/hadoop-hdfs/pom.xml |   5 +-
 .../org/apache/hadoop/hdfs/DFSInputStream.java  |  33 +-
 .../hdfs/server/datanode/DirectoryScanner.java  |  26 +-
 .../hdfs/server/namenode/FSNamesystem.java  |   3 +
 .../hdfs/server/namenode/NamenodeFsck.java  |  59 +++-
 .../server/namenode/StoragePolicySummary.java   | 257 ++
 .../org/apache/hadoop/hdfs/tools/DFSck.java |   2 +
 .../src/main/native/libhdfs/os/windows/thread.c |   2 +-
 .../libhdfs/os/windows/thread_local_storage.c   |   8 +
 .../src/main/webapps/hdfs/explorer.html |   2 +
 .../src/main/webapps/hdfs/explorer.js   |   9 +-
 .../apache/hadoop/hdfs/TestRollingUpgrade.java  |  48 ++-
 .../hadoop/hdfs/server/namenode/TestFsck.java   | 159 -
 .../namenode/TestStoragePolicySummary.java  | 201 +++
 hadoop-mapreduce-project/CHANGES.txt|   6 +
 .../java/org/apache/hadoop/mapred/JobConf.java  |   6 +-
 .../hadoop/mapred/TaskCompletionEvent.java  |  32 +-
 .../hadoop/mapreduce/TaskCompletionEvent.java   |  32 +-
 .../org/apache/hadoop/conf/TestJobConf.java |  23 +-
 .../fs/azure/AzureNativeFileSystemStore.java|  29 +-
 .../hadoop-sls/src/main/bin/rumen2sls.sh|   2 +-
 hadoop-tools/hadoop-sls/src/main/bin/slsrun.sh  |   2 +-
 hadoop-yarn-project/CHANGES.txt |   7 +
 .../security/TestClientToAMTokens.java  |   3 -
 .../hadoop-yarn-server-web-proxy/pom.xml|   4 -
 .../server/webproxy/WebAppProxyServlet.java |  46 +--
 58 files changed, 2290 insertions(+), 880 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/7788f977/hadoop-yarn-project/CHANGES.txt
--



[07/17] hadoop git commit: HADOOP-11620. Add support for load balancing across a group of KMS for HA. Contributed by Arun Suresh.

2015-02-26 Thread zjshen
HADOOP-11620. Add support for load balancing across a group of KMS for HA. 
Contributed by Arun Suresh.


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/71385f9b
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/71385f9b
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/71385f9b

Branch: refs/heads/YARN-2928
Commit: 71385f9b70e22618db3f3d2b2c6dca3b1e82c317
Parents: 725cc49
Author: Andrew Wang w...@apache.org
Authored: Wed Feb 25 21:15:44 2015 -0800
Committer: Andrew Wang w...@apache.org
Committed: Wed Feb 25 21:16:37 2015 -0800

--
 hadoop-common-project/hadoop-common/CHANGES.txt |   3 +
 .../crypto/key/kms/KMSClientProvider.java   |  84 -
 .../key/kms/LoadBalancingKMSClientProvider.java | 347 +++
 .../kms/TestLoadBalancingKMSClientProvider.java | 166 +
 .../hadoop/crypto/key/kms/server/TestKMS.java   | 114 +++---
 5 files changed, 654 insertions(+), 60 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/71385f9b/hadoop-common-project/hadoop-common/CHANGES.txt
--
diff --git a/hadoop-common-project/hadoop-common/CHANGES.txt 
b/hadoop-common-project/hadoop-common/CHANGES.txt
index 0d452f7..39062a8 100644
--- a/hadoop-common-project/hadoop-common/CHANGES.txt
+++ b/hadoop-common-project/hadoop-common/CHANGES.txt
@@ -648,6 +648,9 @@ Release 2.7.0 - UNRELEASED
 HADOOP-11506. Configuration variable expansion regex expensive for long
 values. (Gera Shegalov via gera)
 
+HADOOP-11620. Add support for load balancing across a group of KMS for HA.
+(Arun Suresh via wang)
+
   BUG FIXES
 
 HADOOP-11512. Use getTrimmedStrings when reading serialization keys

http://git-wip-us.apache.org/repos/asf/hadoop/blob/71385f9b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/kms/KMSClientProvider.java
--
diff --git 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/kms/KMSClientProvider.java
 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/kms/KMSClientProvider.java
index 97ab253..223e69a 100644
--- 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/kms/KMSClientProvider.java
+++ 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/kms/KMSClientProvider.java
@@ -52,6 +52,7 @@ import java.io.Writer;
 import java.lang.reflect.UndeclaredThrowableException;
 import java.net.HttpURLConnection;
 import java.net.InetSocketAddress;
+import java.net.MalformedURLException;
 import java.net.SocketTimeoutException;
 import java.net.URI;
 import java.net.URISyntaxException;
@@ -74,6 +75,7 @@ import 
org.apache.hadoop.crypto.key.KeyProviderCryptoExtension.CryptoExtension;
 
 import com.google.common.annotations.VisibleForTesting;
 import com.google.common.base.Preconditions;
+import com.google.common.base.Strings;
 
 /**
  * KMS client codeKeyProvider/code implementation.
@@ -221,14 +223,71 @@ public class KMSClientProvider extends KeyProvider 
implements CryptoExtension,
*/
   public static class Factory extends KeyProviderFactory {
 
+/**
+ * This provider expects URIs in the following form :
+ * kms://PROTO@AUTHORITY/PATH
+ *
+ * where :
+ * - PROTO = http or https
+ * - AUTHORITY = HOSTS[:PORT]
+ * - HOSTS = HOSTNAME[;HOSTS]
+ * - HOSTNAME = string
+ * - PORT = integer
+ *
+ * If multiple hosts are provider, the Factory will create a
+ * {@link LoadBalancingKMSClientProvider} that round-robins requests
+ * across the provided list of hosts.
+ */
 @Override
-public KeyProvider createProvider(URI providerName, Configuration conf)
+public KeyProvider createProvider(URI providerUri, Configuration conf)
 throws IOException {
-  if (SCHEME_NAME.equals(providerName.getScheme())) {
-return new KMSClientProvider(providerName, conf);
+  if (SCHEME_NAME.equals(providerUri.getScheme())) {
+URL origUrl = new URL(extractKMSPath(providerUri).toString());
+String authority = origUrl.getAuthority();
+// check for ';' which delimits the backup hosts
+if (Strings.isNullOrEmpty(authority)) {
+  throw new IOException(
+  No valid authority in kms uri [ + origUrl + ]);
+}
+// Check if port is present in authority
+// In the current scheme, all hosts have to run on the same port
+int port = -1;
+String hostsPart = authority;
+if (authority.contains(:)) {
+  String[] t = authority.split(:);
+  try {
+port = Integer.parseInt(t[1]);
+  } catch (Exception e) {

[12/17] hadoop git commit: HDFS-7819. Log WARN message for the blocks which are not in Block ID based layout (Rakesh R via Colin P. McCabe)

2015-02-26 Thread zjshen
HDFS-7819. Log WARN message for the blocks which are not in Block ID based 
layout (Rakesh R via Colin P. McCabe)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/f0c980ab
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/f0c980ab
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/f0c980ab

Branch: refs/heads/YARN-2928
Commit: f0c980abed3843923e0eb16b626fa27334195eda
Parents: dce8b9c
Author: Colin Patrick Mccabe cmcc...@cloudera.com
Authored: Thu Feb 26 11:58:29 2015 -0800
Committer: Colin Patrick Mccabe cmcc...@cloudera.com
Committed: Thu Feb 26 11:58:29 2015 -0800

--
 hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt |  3 +++
 .../hdfs/server/datanode/DirectoryScanner.java  | 26 +---
 2 files changed, 25 insertions(+), 4 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/f0c980ab/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
--
diff --git a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt 
b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
index e09714f..54b4057 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
+++ b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
@@ -679,6 +679,9 @@ Release 2.7.0 - UNRELEASED
 HDFS-7832. Show 'Last Modified' in Namenode's 'Browse Filesystem'
 (vinayakumarb)
 
+HDFS-7819. Log WARN message for the blocks which are not in Block ID based
+layout (Rakesh R via Colin P. McCabe)
+
   OPTIMIZATIONS
 
 HDFS-7454. Reduce memory footprint for AclEntries in NameNode.

http://git-wip-us.apache.org/repos/asf/hadoop/blob/f0c980ab/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DirectoryScanner.java
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DirectoryScanner.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DirectoryScanner.java
index 71f976b..09c2914 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DirectoryScanner.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DirectoryScanner.java
@@ -597,14 +597,15 @@ public class DirectoryScanner implements Runnable {
   for (String bpid : bpList) {
 LinkedListScanInfo report = new LinkedListScanInfo();
 File bpFinalizedDir = volume.getFinalizedDir(bpid);
-result.put(bpid, compileReport(volume, bpFinalizedDir, report));
+result.put(bpid,
+compileReport(volume, bpFinalizedDir, bpFinalizedDir, report));
   }
   return result;
 }
 
 /** Compile list {@link ScanInfo} for the blocks in the directory dir */
-private LinkedListScanInfo compileReport(FsVolumeSpi vol, File dir,
-LinkedListScanInfo report) {
+private LinkedListScanInfo compileReport(FsVolumeSpi vol,
+File bpFinalizedDir, File dir, LinkedListScanInfo report) {
   File[] files;
   try {
 files = FileUtil.listFiles(dir);
@@ -622,12 +623,14 @@ public class DirectoryScanner implements Runnable {
*/
   for (int i = 0; i  files.length; i++) {
 if (files[i].isDirectory()) {
-  compileReport(vol, files[i], report);
+  compileReport(vol, bpFinalizedDir, files[i], report);
   continue;
 }
 if (!Block.isBlockFilename(files[i])) {
   if (isBlockMetaFile(blk_, files[i].getName())) {
 long blockId = Block.getBlockId(files[i].getName());
+verifyFileLocation(files[i].getParentFile(), bpFinalizedDir,
+blockId);
 report.add(new ScanInfo(blockId, null, files[i], vol));
   }
   continue;
@@ -646,9 +649,24 @@ public class DirectoryScanner implements Runnable {
 break;
   }
 }
+verifyFileLocation(blockFile.getParentFile(), bpFinalizedDir,
+blockId);
 report.add(new ScanInfo(blockId, blockFile, metaFile, vol));
   }
   return report;
 }
+
+/**
+ * Verify whether the actual directory location of block file has the
+ * expected directory path computed using its block ID.
+ */
+private void verifyFileLocation(File actualBlockDir,
+File bpFinalizedDir, long blockId) {
+  File blockDir = DatanodeUtil.idToBlockDir(bpFinalizedDir, blockId);
+  if (actualBlockDir.compareTo(blockDir) != 0) {
+LOG.warn(Block:  + blockId
++  has to be upgraded to block ID-based layout);
+  }
+}
   }
 }



[11/17] hadoop git commit: HADOOP-11637. bash location hard-coded in shell scripts (aw)

2015-02-26 Thread zjshen
HADOOP-11637. bash location hard-coded in shell scripts (aw)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/dce8b9c4
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/dce8b9c4
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/dce8b9c4

Branch: refs/heads/YARN-2928
Commit: dce8b9c4d0b2da1780f743d81e840ca0fdfc62cf
Parents: 773b651
Author: Allen Wittenauer a...@apache.org
Authored: Thu Feb 26 09:29:16 2015 -0800
Committer: Allen Wittenauer a...@apache.org
Committed: Thu Feb 26 09:29:16 2015 -0800

--
 hadoop-common-project/hadoop-common/CHANGES.txt | 2 ++
 .../hadoop-common/src/main/bin/hadoop-functions.sh  | 2 +-
 .../hadoop-common/src/site/markdown/RackAwareness.md| 5 +++--
 hadoop-common-project/hadoop-kms/src/main/conf/kms-env.sh   | 2 +-
 hadoop-common-project/hadoop-kms/src/main/libexec/kms-config.sh | 2 +-
 hadoop-common-project/hadoop-kms/src/main/sbin/kms.sh   | 2 +-
 .../hadoop-hdfs-httpfs/src/main/conf/httpfs-env.sh  | 2 +-
 .../hadoop-hdfs-httpfs/src/main/libexec/httpfs-config.sh| 2 +-
 hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/sbin/httpfs.sh  | 2 +-
 hadoop-tools/hadoop-sls/src/main/bin/rumen2sls.sh   | 2 +-
 hadoop-tools/hadoop-sls/src/main/bin/slsrun.sh  | 2 +-
 11 files changed, 14 insertions(+), 11 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/dce8b9c4/hadoop-common-project/hadoop-common/CHANGES.txt
--
diff --git a/hadoop-common-project/hadoop-common/CHANGES.txt 
b/hadoop-common-project/hadoop-common/CHANGES.txt
index 39062a8..ca27463 100644
--- a/hadoop-common-project/hadoop-common/CHANGES.txt
+++ b/hadoop-common-project/hadoop-common/CHANGES.txt
@@ -404,6 +404,8 @@ Trunk (Unreleased)
 
 HADOOP-11625. Minor fixes to command manual  SLA doc (aw)
 
+HADOOP-11637. bash location hard-coded in shell scripts (aw)
+
   OPTIMIZATIONS
 
 HADOOP-7761. Improve the performance of raw comparisons. (todd)

http://git-wip-us.apache.org/repos/asf/hadoop/blob/dce8b9c4/hadoop-common-project/hadoop-common/src/main/bin/hadoop-functions.sh
--
diff --git 
a/hadoop-common-project/hadoop-common/src/main/bin/hadoop-functions.sh 
b/hadoop-common-project/hadoop-common/src/main/bin/hadoop-functions.sh
index cec6b2c..bccbe25 100644
--- a/hadoop-common-project/hadoop-common/src/main/bin/hadoop-functions.sh
+++ b/hadoop-common-project/hadoop-common/src/main/bin/hadoop-functions.sh
@@ -1,4 +1,4 @@
-#!/bin/bash
+#!/usr/bin/env bash
 # Licensed to the Apache Software Foundation (ASF) under one or more
 # contributor license agreements.  See the NOTICE file distributed with
 # this work for additional information regarding copyright ownership.

http://git-wip-us.apache.org/repos/asf/hadoop/blob/dce8b9c4/hadoop-common-project/hadoop-common/src/site/markdown/RackAwareness.md
--
diff --git 
a/hadoop-common-project/hadoop-common/src/site/markdown/RackAwareness.md 
b/hadoop-common-project/hadoop-common/src/site/markdown/RackAwareness.md
index c5ab19a..09f5610 100644
--- a/hadoop-common-project/hadoop-common/src/site/markdown/RackAwareness.md
+++ b/hadoop-common-project/hadoop-common/src/site/markdown/RackAwareness.md
@@ -105,7 +105,7 @@ bash Example
 
 
 ```bash
-#!/bin/bash
+#!/usr/bin/env bash
 # Here's a bash example to show just how simple these scripts can be
 # Assuming we have flat network with everything on a single switch, we can 
fake a rack topology.
 # This could occur in a lab environment where we have limited nodes,like 2-8 
physical machines on a unmanaged switch.
@@ -133,4 +133,5 @@ bash Example
 #fails to split on four dots, it will still print '/rack-' last field value
 
 echo $@ | xargs -n 1 | awk -F '.' '{print /rack-$NF}'
-```
\ No newline at end of file
+```
+

http://git-wip-us.apache.org/repos/asf/hadoop/blob/dce8b9c4/hadoop-common-project/hadoop-kms/src/main/conf/kms-env.sh
--
diff --git a/hadoop-common-project/hadoop-kms/src/main/conf/kms-env.sh 
b/hadoop-common-project/hadoop-kms/src/main/conf/kms-env.sh
index de9554f..41449ef 100644
--- a/hadoop-common-project/hadoop-kms/src/main/conf/kms-env.sh
+++ b/hadoop-common-project/hadoop-kms/src/main/conf/kms-env.sh
@@ -1,4 +1,4 @@
-#!/bin/bash
+#!/usr/bin/env bash
 #
 # Licensed under the Apache License, Version 2.0 (the License);
 # you may not use this file except in compliance with the License.


[14/17] hadoop git commit: HDFS-7774. Unresolved symbols error while compiling HDFS on Windows 7/32 bit. Contributed by Kiran Kumar M R.

2015-02-26 Thread zjshen
HDFS-7774. Unresolved symbols error while compiling HDFS on Windows 7/32 bit. 
Contributed by Kiran Kumar M R.


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/c6d5b37a
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/c6d5b37a
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/c6d5b37a

Branch: refs/heads/YARN-2928
Commit: c6d5b37a1c4c3acc190f5f4e27109594efb7be8c
Parents: 2214dab
Author: cnauroth cnaur...@apache.org
Authored: Thu Feb 26 13:37:46 2015 -0800
Committer: cnauroth cnaur...@apache.org
Committed: Thu Feb 26 13:37:46 2015 -0800

--
 hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt  | 3 +++
 hadoop-hdfs-project/hadoop-hdfs/pom.xml  | 5 -
 .../hadoop-hdfs/src/main/native/libhdfs/os/windows/thread.c  | 2 +-
 .../main/native/libhdfs/os/windows/thread_local_storage.c| 8 
 4 files changed, 16 insertions(+), 2 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/c6d5b37a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
--
diff --git a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt 
b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
index 54b4057..ae83898 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
+++ b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
@@ -1040,6 +1040,9 @@ Release 2.7.0 - UNRELEASED
 HDFS-7843. A truncated file is corrupted after rollback from a rolling
 upgrade.  (szetszwo)
 
+HDFS-7774. Unresolved symbols error while compiling HDFS on Windows 7/32 
bit.
+(Kiran Kumar M R via cnauroth)
+
 BREAKDOWN OF HDFS-7584 SUBTASKS AND RELATED JIRAS
 
   HDFS-7720. Quota by Storage Type API, tools and ClientNameNode

http://git-wip-us.apache.org/repos/asf/hadoop/blob/c6d5b37a/hadoop-hdfs-project/hadoop-hdfs/pom.xml
--
diff --git a/hadoop-hdfs-project/hadoop-hdfs/pom.xml 
b/hadoop-hdfs-project/hadoop-hdfs/pom.xml
index 5efce5c..2d402a9 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/pom.xml
+++ b/hadoop-hdfs-project/hadoop-hdfs/pom.xml
@@ -440,10 +440,13 @@ http://maven.apache.org/xsd/maven-4.0.0.xsd;
 /goals
 configuration
   target
+condition property=generator value=Visual Studio 10 
else=Visual Studio 10 Win64
+  equals arg1=Win32 arg2=${env.PLATFORM} /
+/condition
 mkdir dir=${project.build.directory}/native/
 exec executable=cmake 
dir=${project.build.directory}/native
 failonerror=true
-  arg line=${basedir}/src/ 
-DGENERATED_JAVAH=${project.build.directory}/native/javah 
-DJVM_ARCH_DATA_MODEL=${sun.arch.data.model} 
-DREQUIRE_LIBWEBHDFS=${require.libwebhdfs} -DREQUIRE_FUSE=${require.fuse} -G 
'Visual Studio 10 Win64'/
+  arg line=${basedir}/src/ 
-DGENERATED_JAVAH=${project.build.directory}/native/javah 
-DJVM_ARCH_DATA_MODEL=${sun.arch.data.model} 
-DREQUIRE_LIBWEBHDFS=${require.libwebhdfs} -DREQUIRE_FUSE=${require.fuse} -G 
'${generator}'/
 /exec
 exec executable=msbuild 
dir=${project.build.directory}/native
 failonerror=true

http://git-wip-us.apache.org/repos/asf/hadoop/blob/c6d5b37a/hadoop-hdfs-project/hadoop-hdfs/src/main/native/libhdfs/os/windows/thread.c
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/native/libhdfs/os/windows/thread.c 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/native/libhdfs/os/windows/thread.c
index 90450d8..f5cc2a7 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/native/libhdfs/os/windows/thread.c
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/native/libhdfs/os/windows/thread.c
@@ -28,7 +28,7 @@
  * @param toRun thread to run
  * @return DWORD result of running thread (always 0)
  */
-static DWORD runThread(LPVOID toRun) {
+static DWORD WINAPI runThread(LPVOID toRun) {
   const thread *t = toRun;
   t-start(t-arg);
   return 0;

http://git-wip-us.apache.org/repos/asf/hadoop/blob/c6d5b37a/hadoop-hdfs-project/hadoop-hdfs/src/main/native/libhdfs/os/windows/thread_local_storage.c
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/native/libhdfs/os/windows/thread_local_storage.c
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/native/libhdfs/os/windows/thread_local_storage.c
index 70ad152..4c415e1 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/native/libhdfs/os/windows/thread_local_storage.c
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/native/libhdfs/os/windows/thread_local_storage.c
@@ -96,13 +96,21 

[13/17] hadoop git commit: HADOOP-9922. hadoop windows native build will fail in 32 bit machine. Contributed by Kiran Kumar M R.

2015-02-26 Thread zjshen
HADOOP-9922. hadoop windows native build will fail in 32 bit machine. 
Contributed by Kiran Kumar M R.


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/2214dab6
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/2214dab6
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/2214dab6

Branch: refs/heads/YARN-2928
Commit: 2214dab60ff11b8de74c9d661585452a078fe0c1
Parents: f0c980a
Author: cnauroth cnaur...@apache.org
Authored: Thu Feb 26 12:41:33 2015 -0800
Committer: cnauroth cnaur...@apache.org
Committed: Thu Feb 26 12:41:33 2015 -0800

--
 hadoop-common-project/hadoop-common/CHANGES.txt |  3 +
 .../hadoop-common/src/main/native/native.sln|  8 +--
 .../src/main/native/native.vcxproj  | 40 
 .../src/main/winutils/include/winutils.h|  5 +-
 .../src/main/winutils/libwinutils.c |  2 +-
 .../src/main/winutils/libwinutils.vcxproj   | 64 +++-
 .../hadoop-common/src/main/winutils/service.c   |  8 +--
 .../hadoop-common/src/main/winutils/task.c  |  2 +-
 .../src/main/winutils/winutils.sln  | 10 +++
 .../src/main/winutils/winutils.vcxproj  | 61 ++-
 10 files changed, 189 insertions(+), 14 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/2214dab6/hadoop-common-project/hadoop-common/CHANGES.txt
--
diff --git a/hadoop-common-project/hadoop-common/CHANGES.txt 
b/hadoop-common-project/hadoop-common/CHANGES.txt
index ca27463..1d9a6d4 100644
--- a/hadoop-common-project/hadoop-common/CHANGES.txt
+++ b/hadoop-common-project/hadoop-common/CHANGES.txt
@@ -1010,6 +1010,9 @@ Release 2.7.0 - UNRELEASED
 HADOOP-11629. WASB filesystem should not start BandwidthGaugeUpdater if
 fs.azure.skip.metrics set to true. (Shanyu Zhao via cnauroth)
 
+HADOOP-9922. hadoop windows native build will fail in 32 bit machine.
+(Kiran Kumar M R via cnauroth)
+
 Release 2.6.1 - UNRELEASED
 
   INCOMPATIBLE CHANGES

http://git-wip-us.apache.org/repos/asf/hadoop/blob/2214dab6/hadoop-common-project/hadoop-common/src/main/native/native.sln
--
diff --git a/hadoop-common-project/hadoop-common/src/main/native/native.sln 
b/hadoop-common-project/hadoop-common/src/main/native/native.sln
index 40a7821..54bc17e 100644
--- a/hadoop-common-project/hadoop-common/src/main/native/native.sln
+++ b/hadoop-common-project/hadoop-common/src/main/native/native.sln
@@ -31,14 +31,14 @@ Global
GlobalSection(ProjectConfigurationPlatforms) = postSolution
{4C0C12D2-3CB0-47F8-BCD0-55BD5732DFA7}.Debug|Mixed 
Platforms.ActiveCfg = Release|x64
{4C0C12D2-3CB0-47F8-BCD0-55BD5732DFA7}.Debug|Mixed 
Platforms.Build.0 = Release|x64
-   {4C0C12D2-3CB0-47F8-BCD0-55BD5732DFA7}.Debug|Win32.ActiveCfg = 
Release|x64
-   {4C0C12D2-3CB0-47F8-BCD0-55BD5732DFA7}.Debug|Win32.Build.0 = 
Release|x64
+   {4C0C12D2-3CB0-47F8-BCD0-55BD5732DFA7}.Debug|Win32.ActiveCfg = 
Release|Win32
+   {4C0C12D2-3CB0-47F8-BCD0-55BD5732DFA7}.Debug|Win32.Build.0 = 
Release|Win32
{4C0C12D2-3CB0-47F8-BCD0-55BD5732DFA7}.Debug|x64.ActiveCfg = 
Release|x64
{4C0C12D2-3CB0-47F8-BCD0-55BD5732DFA7}.Debug|x64.Build.0 = 
Release|x64
{4C0C12D2-3CB0-47F8-BCD0-55BD5732DFA7}.Release|Mixed 
Platforms.ActiveCfg = Release|x64
{4C0C12D2-3CB0-47F8-BCD0-55BD5732DFA7}.Release|Mixed 
Platforms.Build.0 = Release|x64
-   {4C0C12D2-3CB0-47F8-BCD0-55BD5732DFA7}.Release|Win32.ActiveCfg 
= Release|x64
-   {4C0C12D2-3CB0-47F8-BCD0-55BD5732DFA7}.Release|Win32.Build.0 = 
Release|x64
+   {4C0C12D2-3CB0-47F8-BCD0-55BD5732DFA7}.Release|Win32.ActiveCfg 
= Release|Win32
+   {4C0C12D2-3CB0-47F8-BCD0-55BD5732DFA7}.Release|Win32.Build.0 = 
Release|Win32
{4C0C12D2-3CB0-47F8-BCD0-55BD5732DFA7}.Release|x64.ActiveCfg = 
Release|x64
{4C0C12D2-3CB0-47F8-BCD0-55BD5732DFA7}.Release|x64.Build.0 = 
Release|x64
EndGlobalSection

http://git-wip-us.apache.org/repos/asf/hadoop/blob/2214dab6/hadoop-common-project/hadoop-common/src/main/native/native.vcxproj
--
diff --git a/hadoop-common-project/hadoop-common/src/main/native/native.vcxproj 
b/hadoop-common-project/hadoop-common/src/main/native/native.vcxproj
index 2d60e56..0912c6a 100644
--- a/hadoop-common-project/hadoop-common/src/main/native/native.vcxproj
+++ b/hadoop-common-project/hadoop-common/src/main/native/native.vcxproj
@@ -19,6 +19,10 @@
 
 Project DefaultTargets=CheckRequireSnappy;Build ToolsVersion=4.0 

[15/17] hadoop git commit: MAPREDUCE-6223. TestJobConf#testNegativeValueForTaskVmem failures. (Varun Saxena via kasha)

2015-02-26 Thread zjshen
MAPREDUCE-6223. TestJobConf#testNegativeValueForTaskVmem failures. (Varun 
Saxena via kasha)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/1047c883
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/1047c883
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/1047c883

Branch: refs/heads/YARN-2928
Commit: 1047c883ba01a252dbd4203e525fb1ff8ea313aa
Parents: c6d5b37
Author: Karthik Kambatla ka...@apache.org
Authored: Thu Feb 26 14:24:19 2015 -0800
Committer: Karthik Kambatla ka...@apache.org
Committed: Thu Feb 26 14:24:19 2015 -0800

--
 hadoop-mapreduce-project/CHANGES.txt|  3 +++
 .../java/org/apache/hadoop/mapred/JobConf.java  |  6 ++---
 .../org/apache/hadoop/conf/TestJobConf.java | 23 ++--
 3 files changed, 17 insertions(+), 15 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/1047c883/hadoop-mapreduce-project/CHANGES.txt
--
diff --git a/hadoop-mapreduce-project/CHANGES.txt 
b/hadoop-mapreduce-project/CHANGES.txt
index d2c4ab8..6cb1754 100644
--- a/hadoop-mapreduce-project/CHANGES.txt
+++ b/hadoop-mapreduce-project/CHANGES.txt
@@ -390,6 +390,9 @@ Release 2.7.0 - UNRELEASED
 MAPREDUCE-2815. JavaDoc does not generate correctly for
 MultithreadedMapRunner. (Chris Palmer via aajisaka)
 
+MAPREDUCE-6223. TestJobConf#testNegativeValueForTaskVmem failures. 
+(Varun Saxena via kasha)
+
 Release 2.6.1 - UNRELEASED
 
   INCOMPATIBLE CHANGES

http://git-wip-us.apache.org/repos/asf/hadoop/blob/1047c883/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/JobConf.java
--
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/JobConf.java
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/JobConf.java
index 98a643f..315c829 100644
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/JobConf.java
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/JobConf.java
@@ -1826,8 +1826,7 @@ public class JobConf extends Configuration {
   public long getMemoryForMapTask() {
 long value = getDeprecatedMemoryValue();
 if (value  0) {
-  return getLong(JobConf.MAPRED_JOB_MAP_MEMORY_MB_PROPERTY,
-  JobContext.DEFAULT_MAP_MEMORY_MB);
+  return getMemoryRequired(TaskType.MAP);
 }
 return value;
   }
@@ -1853,8 +1852,7 @@ public class JobConf extends Configuration {
   public long getMemoryForReduceTask() {
 long value = getDeprecatedMemoryValue();
 if (value  0) {
-  return getLong(JobConf.MAPRED_JOB_REDUCE_MEMORY_MB_PROPERTY,
-  JobContext.DEFAULT_REDUCE_MEMORY_MB);
+  return getMemoryRequired(TaskType.REDUCE);
 }
 return value;
   }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/1047c883/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/conf/TestJobConf.java
--
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/conf/TestJobConf.java
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/conf/TestJobConf.java
index e380d92..f67ba1f 100644
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/conf/TestJobConf.java
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/conf/TestJobConf.java
@@ -19,9 +19,7 @@ package org.apache.hadoop.conf;
 
 import org.junit.Assert;
 import org.junit.Test;
-
 import org.apache.hadoop.mapred.JobConf;
-import org.apache.hadoop.mapred.JobContext;
 import org.apache.hadoop.mapreduce.MRJobConfig;
 
 public class TestJobConf {
@@ -87,8 +85,10 @@ public class TestJobConf {
 configuration.set(mapred.task.maxvmem , String.valueOf(-1));
 configuration.set(MRJobConfig.MAP_MEMORY_MB,-1);
 configuration.set(MRJobConfig.REDUCE_MEMORY_MB,-1);
-Assert.assertEquals(configuration.getMemoryForMapTask(),-1);
-Assert.assertEquals(configuration.getMemoryForReduceTask(),-1);
+Assert.assertEquals(configuration.getMemoryForMapTask(),
+MRJobConfig.DEFAULT_MAP_MEMORY_MB);
+Assert.assertEquals(configuration.getMemoryForReduceTask(),
+

[08/17] hadoop git commit: HDFS-7832. Show 'Last Modified' in Namenode's 'Browse Filesystem' (Contributed by Vinayakumar B)

2015-02-26 Thread zjshen
HDFS-7832. Show 'Last Modified' in Namenode's 'Browse Filesystem' (Contributed 
by Vinayakumar B)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/166eecf6
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/166eecf6
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/166eecf6

Branch: refs/heads/YARN-2928
Commit: 166eecf687765f369cd5bf210cc6d7bf493121ea
Parents: 71385f9
Author: Vinayakumar B vinayakum...@apache.org
Authored: Thu Feb 26 14:36:09 2015 +0530
Committer: Vinayakumar B vinayakum...@apache.org
Committed: Thu Feb 26 14:36:09 2015 +0530

--
 hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt | 3 +++
 .../hadoop-hdfs/src/main/webapps/hdfs/explorer.html | 2 ++
 .../hadoop-hdfs/src/main/webapps/hdfs/explorer.js   | 9 -
 3 files changed, 13 insertions(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/166eecf6/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
--
diff --git a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt 
b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
index 4523bf4..e09714f 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
+++ b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
@@ -676,6 +676,9 @@ Release 2.7.0 - UNRELEASED
 HDFS-7537. Add UNDER MIN REPL'D BLOCKS count to fsck.  (GAO Rui via
 szetszwo)
 
+HDFS-7832. Show 'Last Modified' in Namenode's 'Browse Filesystem'
+(vinayakumarb)
+
   OPTIMIZATIONS
 
 HDFS-7454. Reduce memory footprint for AclEntries in NameNode.

http://git-wip-us.apache.org/repos/asf/hadoop/blob/166eecf6/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/hdfs/explorer.html
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/hdfs/explorer.html 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/hdfs/explorer.html
index e1fdfa3..7b34044 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/hdfs/explorer.html
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/hdfs/explorer.html
@@ -112,6 +112,7 @@
 thOwner/th
 thGroup/th
 thSize/th
+thLast Modified/th
 thReplication/th
 thBlock Size/th
 thName/th
@@ -124,6 +125,7 @@
 td{owner}/td
 td{group}/td
 td{length|fmt_bytes}/td
+td{#helper_date_tostring value={modificationTime}/}/td
 td{replication}/td
 td{blockSize|fmt_bytes}/td
 tda style=cursor:pointer inode-type={type} 
class=explorer-browse-links inode-path={pathSuffix}{pathSuffix}/a/td

http://git-wip-us.apache.org/repos/asf/hadoop/blob/166eecf6/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/hdfs/explorer.js
--
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/hdfs/explorer.js 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/hdfs/explorer.js
index 87d47fa..0a53dcd 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/hdfs/explorer.js
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/hdfs/explorer.js
@@ -143,6 +143,12 @@
   }
 
   function browse_directory(dir) {
+var HELPERS = {
+  'helper_date_tostring' : function (chunk, ctx, bodies, params) {
+var value = dust.helpers.tap(params.value, chunk, ctx);
+return chunk.write('' + new Date(Number(value)).toLocaleString());
+  }
+};
 var url = '/webhdfs/v1' + dir + '?op=LISTSTATUS';
 $.get(url, function(data) {
   var d = get_response(data, FileStatuses);
@@ -154,7 +160,8 @@
   current_directory = dir;
   $('#directory').val(dir);
   window.location.hash = dir;
-  dust.render('explorer', d, function(err, out) {
+  var base = dust.makeBase(HELPERS);
+  dust.render('explorer', base.push(d), function(err, out) {
 $('#panel').html(out);
 
 $('.explorer-browse-links').click(function() {



hadoop git commit: YARN-3251. Fixed a deadlock in CapacityScheduler when computing absoluteMaxAvailableCapacity in LeafQueue (Craig Welch via wangda)

2015-02-26 Thread wangda
Repository: hadoop
Updated Branches:
  refs/heads/branch-2.6 5b3d9bf63 - 881084fe5


YARN-3251. Fixed a deadlock in CapacityScheduler when computing 
absoluteMaxAvailableCapacity in LeafQueue (Craig Welch via wangda)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/881084fe
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/881084fe
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/881084fe

Branch: refs/heads/branch-2.6
Commit: 881084fe5c3118c1f62585aa1b72262d46d74ac6
Parents: 5b3d9bf
Author: Wangda Tan wan...@apache.org
Authored: Thu Feb 26 17:05:25 2015 -0800
Committer: Wangda Tan wan...@apache.org
Committed: Thu Feb 26 17:05:25 2015 -0800

--
 hadoop-yarn-project/CHANGES.txt |  3 +++
 .../scheduler/capacity/LeafQueue.java   | 24 ++--
 2 files changed, 20 insertions(+), 7 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/881084fe/hadoop-yarn-project/CHANGES.txt
--
diff --git a/hadoop-yarn-project/CHANGES.txt b/hadoop-yarn-project/CHANGES.txt
index 3eb6fbc..c603c50 100644
--- a/hadoop-yarn-project/CHANGES.txt
+++ b/hadoop-yarn-project/CHANGES.txt
@@ -619,6 +619,9 @@ Release 2.6.0 - 2014-11-18
 identifiers to be tampered and thus causing app submission failures in
 secure mode. (Jian He via vinodkv)
 
+YARN-3251. Fixed a deadlock in CapacityScheduler when computing 
+absoluteMaxAvailableCapacity in LeafQueue (Craig Welch via wangda)
+ 
   BREAKDOWN OF YARN-1051 SUBTASKS AND RELATED JIRAS
 
 YARN-1707. Introduce APIs to add/remove/resize queues in the

http://git-wip-us.apache.org/repos/asf/hadoop/blob/881084fe/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/LeafQueue.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/LeafQueue.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/LeafQueue.java
index ffeec63..eddf30f 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/LeafQueue.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/LeafQueue.java
@@ -115,6 +115,8 @@ public class LeafQueue extends AbstractCSQueue {
   
   private final QueueHeadroomInfo queueHeadroomInfo = new QueueHeadroomInfo();
   
+  private volatile float absoluteMaxAvailCapacity;
+  
   public LeafQueue(CapacitySchedulerContext cs, 
   String queueName, CSQueue parent, CSQueue old) throws IOException {
 super(cs, queueName, parent, old);
@@ -133,6 +135,10 @@ public class LeafQueue extends AbstractCSQueue {
 (float)cs.getConfiguration().getMaximumCapacity(getQueuePath()) / 100;
 float absoluteMaxCapacity = 
 CSQueueUtils.computeAbsoluteMaximumCapacity(maximumCapacity, parent);
+
+// Initially set to absoluteMax, will be updated to more accurate
+// max avail value during assignContainers
+absoluteMaxAvailCapacity = absoluteMaxCapacity;
 
 int userLimit = cs.getConfiguration().getUserLimit(getQueuePath());
 float userLimitFactor = 
@@ -720,8 +726,18 @@ public class LeafQueue extends AbstractCSQueue {
   }
   
   @Override
-  public synchronized CSAssignment assignContainers(Resource clusterResource,
+  public CSAssignment assignContainers(Resource clusterResource,
   FiCaSchedulerNode node, boolean needToUnreserve) {
+//We should not hold a lock on a queue and its parent concurrently - it
+//can lead to deadlocks when calls which walk down the tree occur
+//concurrently (getQueueInfo...)
+absoluteMaxAvailCapacity = CSQueueUtils.getAbsoluteMaxAvailCapacity(
+  resourceCalculator, clusterResource, this);
+return assignContainersInternal(clusterResource, node, needToUnreserve);
+  }
+  
+  private synchronized CSAssignment assignContainersInternal(
+Resource clusterResource, FiCaSchedulerNode node, boolean needToUnreserve) 
{
 
 if(LOG.isDebugEnabled()) {
   LOG.debug(assignContainers: node= + node.getNodeName()
@@ -1012,12 +1028,6 @@ public class LeafQueue extends AbstractCSQueue {
 computeUserLimit(application, clusterResource, required,
 queueUser, requestedLabels);
 
-//Max 

hadoop git commit: MAPREDUCE-5612. Add javadoc for TaskCompletionEvent.Status. Contributed by Chris Palmer.

2015-02-26 Thread aajisaka
Repository: hadoop
Updated Branches:
  refs/heads/branch-2 5dfb793b0 - 8aa0c7329


MAPREDUCE-5612. Add javadoc for TaskCompletionEvent.Status. Contributed by 
Chris Palmer.

(cherry picked from commit bfbf076b7dcb71b59d85b05d0a6da88195ea9902)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/8aa0c732
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/8aa0c732
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/8aa0c732

Branch: refs/heads/branch-2
Commit: 8aa0c73294dbf6a8aa77a29839b50e8a6368f34c
Parents: 5dfb793
Author: Akira Ajisaka aajis...@apache.org
Authored: Thu Feb 26 15:05:17 2015 -0800
Committer: Akira Ajisaka aajis...@apache.org
Committed: Thu Feb 26 15:06:26 2015 -0800

--
 hadoop-mapreduce-project/CHANGES.txt|  3 ++
 .../hadoop/mapred/TaskCompletionEvent.java  | 32 +++-
 .../hadoop/mapreduce/TaskCompletionEvent.java   | 32 +++-
 3 files changed, 65 insertions(+), 2 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/8aa0c732/hadoop-mapreduce-project/CHANGES.txt
--
diff --git a/hadoop-mapreduce-project/CHANGES.txt 
b/hadoop-mapreduce-project/CHANGES.txt
index 14018b6..c51bd23 100644
--- a/hadoop-mapreduce-project/CHANGES.txt
+++ b/hadoop-mapreduce-project/CHANGES.txt
@@ -66,6 +66,9 @@ Release 2.7.0 - UNRELEASED
 MAPREDUCE-6264. Remove httpclient dependency from hadoop-mapreduce-client.
 (Brahma Reddy Battula via aajisaka)
 
+MAPREDUCE-5612. Add javadoc for TaskCompletionEvent.Status.
+(Chris Palmer via aajisaka)
+
   OPTIMIZATIONS
 
 MAPREDUCE-6169. MergeQueue should release reference to the current item 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/8aa0c732/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/TaskCompletionEvent.java
--
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/TaskCompletionEvent.java
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/TaskCompletionEvent.java
index dc4d82e..2bb55a2 100644
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/TaskCompletionEvent.java
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/TaskCompletionEvent.java
@@ -32,7 +32,37 @@ public class TaskCompletionEvent
 extends org.apache.hadoop.mapreduce.TaskCompletionEvent {
   @InterfaceAudience.Public
   @InterfaceStability.Stable
-  static public enum Status {FAILED, KILLED, SUCCEEDED, OBSOLETE, TIPFAILED};
+  /**
+   *  Task Completion Statuses
+   */
+  static public enum Status {
+/**
+ * Task Event Attempt failed but there are attempts remaining.
+ */
+FAILED,
+/**
+ * Task Event was killed.
+ */
+KILLED,
+/**
+ * Task Event was successful.
+ */
+SUCCEEDED,
+/**
+ * Used to Override a previously successful event status.
+ * Example:  Map attempt runs and a SUCCEEDED event is sent. Later a task
+ * is retroactively failed due to excessive fetch failure during shuffle
+ * phase. When the retroactive attempt failure occurs, an OBSOLETE event is
+ * sent for the map attempt indicating the prior event is no longer valid.
+ */
+OBSOLETE,
+/**
+ * Task Event attempt failed and no further attempts exist.
+ * reached MAX attempts. When a reducer receives a TIPFAILED event it
+ * gives up trying to shuffle data from that map task.
+ */
+TIPFAILED
+  }
   
   public static final TaskCompletionEvent[] EMPTY_ARRAY = 
new TaskCompletionEvent[0];

http://git-wip-us.apache.org/repos/asf/hadoop/blob/8aa0c732/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/TaskCompletionEvent.java
--
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/TaskCompletionEvent.java
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/TaskCompletionEvent.java
index 31643a9..21c3823 100644
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/TaskCompletionEvent.java
+++ 

hadoop git commit: MAPREDUCE-5612. Add javadoc for TaskCompletionEvent.Status. Contributed by Chris Palmer.

2015-02-26 Thread aajisaka
Repository: hadoop
Updated Branches:
  refs/heads/trunk 1047c883b - bfbf076b7


MAPREDUCE-5612. Add javadoc for TaskCompletionEvent.Status. Contributed by 
Chris Palmer.


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/bfbf076b
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/bfbf076b
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/bfbf076b

Branch: refs/heads/trunk
Commit: bfbf076b7dcb71b59d85b05d0a6da88195ea9902
Parents: 1047c88
Author: Akira Ajisaka aajis...@apache.org
Authored: Thu Feb 26 15:05:17 2015 -0800
Committer: Akira Ajisaka aajis...@apache.org
Committed: Thu Feb 26 15:05:17 2015 -0800

--
 hadoop-mapreduce-project/CHANGES.txt|  3 ++
 .../hadoop/mapred/TaskCompletionEvent.java  | 32 +++-
 .../hadoop/mapreduce/TaskCompletionEvent.java   | 32 +++-
 3 files changed, 65 insertions(+), 2 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/bfbf076b/hadoop-mapreduce-project/CHANGES.txt
--
diff --git a/hadoop-mapreduce-project/CHANGES.txt 
b/hadoop-mapreduce-project/CHANGES.txt
index 6cb1754..f509d4e 100644
--- a/hadoop-mapreduce-project/CHANGES.txt
+++ b/hadoop-mapreduce-project/CHANGES.txt
@@ -308,6 +308,9 @@ Release 2.7.0 - UNRELEASED
 MAPREDUCE-6264. Remove httpclient dependency from hadoop-mapreduce-client.
 (Brahma Reddy Battula via aajisaka)
 
+MAPREDUCE-5612. Add javadoc for TaskCompletionEvent.Status.
+(Chris Palmer via aajisaka)
+
   OPTIMIZATIONS
 
 MAPREDUCE-6169. MergeQueue should release reference to the current item 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/bfbf076b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/TaskCompletionEvent.java
--
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/TaskCompletionEvent.java
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/TaskCompletionEvent.java
index dc4d82e..2bb55a2 100644
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/TaskCompletionEvent.java
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/TaskCompletionEvent.java
@@ -32,7 +32,37 @@ public class TaskCompletionEvent
 extends org.apache.hadoop.mapreduce.TaskCompletionEvent {
   @InterfaceAudience.Public
   @InterfaceStability.Stable
-  static public enum Status {FAILED, KILLED, SUCCEEDED, OBSOLETE, TIPFAILED};
+  /**
+   *  Task Completion Statuses
+   */
+  static public enum Status {
+/**
+ * Task Event Attempt failed but there are attempts remaining.
+ */
+FAILED,
+/**
+ * Task Event was killed.
+ */
+KILLED,
+/**
+ * Task Event was successful.
+ */
+SUCCEEDED,
+/**
+ * Used to Override a previously successful event status.
+ * Example:  Map attempt runs and a SUCCEEDED event is sent. Later a task
+ * is retroactively failed due to excessive fetch failure during shuffle
+ * phase. When the retroactive attempt failure occurs, an OBSOLETE event is
+ * sent for the map attempt indicating the prior event is no longer valid.
+ */
+OBSOLETE,
+/**
+ * Task Event attempt failed and no further attempts exist.
+ * reached MAX attempts. When a reducer receives a TIPFAILED event it
+ * gives up trying to shuffle data from that map task.
+ */
+TIPFAILED
+  }
   
   public static final TaskCompletionEvent[] EMPTY_ARRAY = 
new TaskCompletionEvent[0];

http://git-wip-us.apache.org/repos/asf/hadoop/blob/bfbf076b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/TaskCompletionEvent.java
--
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/TaskCompletionEvent.java
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/TaskCompletionEvent.java
index 31643a9..21c3823 100644
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/TaskCompletionEvent.java
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/TaskCompletionEvent.java
@@ -36,7 +36,37 @@ 

[01/17] hadoop git commit: HADOOP-11629. WASB filesystem should not start BandwidthGaugeUpdater if fs.azure.skip.metrics set to true. Contributed by Shanyu Zhao.

2015-02-26 Thread zjshen
Repository: hadoop
Updated Branches:
  refs/heads/YARN-2928 6bb198e72 - 7788f9773


HADOOP-11629. WASB filesystem should not start BandwidthGaugeUpdater if 
fs.azure.skip.metrics set to true. Contributed by Shanyu Zhao.


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/5731c0e0
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/5731c0e0
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/5731c0e0

Branch: refs/heads/YARN-2928
Commit: 5731c0e0d08c3048fafdf62a14ca7611be4df5d7
Parents: 1a68fc4
Author: cnauroth cnaur...@apache.org
Authored: Wed Feb 25 09:08:55 2015 -0800
Committer: cnauroth cnaur...@apache.org
Committed: Wed Feb 25 09:08:55 2015 -0800

--
 hadoop-common-project/hadoop-common/CHANGES.txt |  3 ++
 .../fs/azure/AzureNativeFileSystemStore.java| 29 ++--
 2 files changed, 23 insertions(+), 9 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/5731c0e0/hadoop-common-project/hadoop-common/CHANGES.txt
--
diff --git a/hadoop-common-project/hadoop-common/CHANGES.txt 
b/hadoop-common-project/hadoop-common/CHANGES.txt
index 988eed0..0d452f7 100644
--- a/hadoop-common-project/hadoop-common/CHANGES.txt
+++ b/hadoop-common-project/hadoop-common/CHANGES.txt
@@ -1002,6 +1002,9 @@ Release 2.7.0 - UNRELEASED
 HADOOP-11480. Typo in hadoop-aws/index.md uses wrong scheme for
 test.fs.s3.name. (Ted Yu via aajisaka)
 
+HADOOP-11629. WASB filesystem should not start BandwidthGaugeUpdater if
+fs.azure.skip.metrics set to true. (Shanyu Zhao via cnauroth)
+
 Release 2.6.1 - UNRELEASED
 
   INCOMPATIBLE CHANGES

http://git-wip-us.apache.org/repos/asf/hadoop/blob/5731c0e0/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azure/AzureNativeFileSystemStore.java
--
diff --git 
a/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azure/AzureNativeFileSystemStore.java
 
b/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azure/AzureNativeFileSystemStore.java
index 2412698..6bed8bb 100644
--- 
a/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azure/AzureNativeFileSystemStore.java
+++ 
b/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azure/AzureNativeFileSystemStore.java
@@ -387,9 +387,8 @@ public class AzureNativeFileSystemStore implements 
NativeFileSystemStore {
 if (null == instrumentation) {
   throw new IllegalArgumentException(Null instrumentation);
 }
-
 this.instrumentation = instrumentation;
-this.bandwidthGaugeUpdater = new BandwidthGaugeUpdater(instrumentation);
+
 if (null == this.storageInteractionLayer) {
   this.storageInteractionLayer = new StorageInterfaceImpl();
 }
@@ -405,7 +404,13 @@ public class AzureNativeFileSystemStore implements 
NativeFileSystemStore {
 //
 if (null == conf) {
   throw new IllegalArgumentException(
-  Cannot initialize WASB file system, URI is null);
+  Cannot initialize WASB file system, conf is null);
+}
+
+if(!conf.getBoolean(
+NativeAzureFileSystem.SKIP_AZURE_METRICS_PROPERTY_NAME, false)) {
+  //If not skip azure metrics, create bandwidthGaugeUpdater
+  this.bandwidthGaugeUpdater = new BandwidthGaugeUpdater(instrumentation);
 }
 
 // Incoming parameters validated. Capture the URI and the job configuration
@@ -1782,11 +1787,14 @@ public class AzureNativeFileSystemStore implements 
NativeFileSystemStore {
   selfThrottlingWriteFactor);
 }
 
-ResponseReceivedMetricUpdater.hook(
-operationContext,
-instrumentation,
-bandwidthGaugeUpdater);
-
+if(bandwidthGaugeUpdater != null) {
+  //bandwidthGaugeUpdater is null when we config to skip azure metrics
+  ResponseReceivedMetricUpdater.hook(
+ operationContext,
+ instrumentation,
+ bandwidthGaugeUpdater);
+}
+
 // Bind operation context to receive send request callbacks on this 
operation.
 // If reads concurrent to OOB writes are allowed, the interception will 
reset
 // the conditional header on all Azure blob storage read requests.
@@ -2561,7 +2569,10 @@ public class AzureNativeFileSystemStore implements 
NativeFileSystemStore {
 
   @Override
   public void close() {
-bandwidthGaugeUpdater.close();
+if(bandwidthGaugeUpdater != null) {
+  bandwidthGaugeUpdater.close();
+  bandwidthGaugeUpdater = null;
+}
   }
   
   // Finalizer to ensure complete shutdown



[03/17] hadoop git commit: HDFS-7467. Provide storage tier information for a directory via fsck. (Benoy Antony)

2015-02-26 Thread zjshen
HDFS-7467. Provide storage tier information for a directory via fsck. (Benoy 
Antony)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/d140d76a
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/d140d76a
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/d140d76a

Branch: refs/heads/YARN-2928
Commit: d140d76a43c88e326b9c2818578f22bd3563b969
Parents: caa42ad
Author: Benoy Antony be...@apache.org
Authored: Wed Feb 25 16:19:35 2015 -0800
Committer: Benoy Antony be...@apache.org
Committed: Wed Feb 25 16:19:35 2015 -0800

--
 .../hdfs/server/namenode/NamenodeFsck.java  |  23 +-
 .../server/namenode/StoragePolicySummary.java   | 257 +++
 .../org/apache/hadoop/hdfs/tools/DFSck.java |   2 +
 .../hadoop/hdfs/server/namenode/TestFsck.java   |  78 +-
 .../namenode/TestStoragePolicySummary.java  | 201 +++
 5 files changed, 548 insertions(+), 13 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/d140d76a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NamenodeFsck.java
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NamenodeFsck.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NamenodeFsck.java
index dc9494d..5134f3c 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NamenodeFsck.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NamenodeFsck.java
@@ -25,6 +25,7 @@ import java.net.InetAddress;
 import java.net.InetSocketAddress;
 import java.net.Socket;
 import java.util.ArrayList;
+import java.util.Arrays;
 import java.util.Collection;
 import java.util.Date;
 import java.util.Iterator;
@@ -45,6 +46,7 @@ import org.apache.hadoop.hdfs.DFSClient;
 import org.apache.hadoop.hdfs.DFSConfigKeys;
 import org.apache.hadoop.hdfs.DFSUtil;
 import org.apache.hadoop.hdfs.RemotePeerFactory;
+import org.apache.hadoop.fs.StorageType;
 import org.apache.hadoop.hdfs.net.Peer;
 import org.apache.hadoop.hdfs.net.TcpPeerServer;
 import org.apache.hadoop.hdfs.protocol.Block;
@@ -127,6 +129,7 @@ public class NamenodeFsck implements 
DataEncryptionKeyFactory {
   private boolean showBlocks = false;
   private boolean showLocations = false;
   private boolean showRacks = false;
+  private boolean showStoragePolcies = false;
   private boolean showprogress = false;
   private boolean showCorruptFileBlocks = false;
 
@@ -164,6 +167,7 @@ public class NamenodeFsck implements 
DataEncryptionKeyFactory {
   private ListString snapshottableDirs = null;
 
   private final BlockPlacementPolicy bpPolicy;
+  private StoragePolicySummary storageTypeSummary = null;
 
   /**
* Filesystem checker.
@@ -200,6 +204,7 @@ public class NamenodeFsck implements 
DataEncryptionKeyFactory {
   else if (key.equals(blocks)) { this.showBlocks = true; }
   else if (key.equals(locations)) { this.showLocations = true; }
   else if (key.equals(racks)) { this.showRacks = true; }
+  else if (key.equals(storagepolicies)) { this.showStoragePolcies = 
true; }
   else if (key.equals(showprogress)) { this.showprogress = true; }
   else if (key.equals(openforwrite)) {this.showOpenFiles = true; }
   else if (key.equals(listcorruptfileblocks)) {
@@ -334,6 +339,11 @@ public class NamenodeFsck implements 
DataEncryptionKeyFactory {
   return;
 }
 
+if (this.showStoragePolcies) {
+  storageTypeSummary = new StoragePolicySummary(
+  namenode.getNamesystem().getBlockManager().getStoragePolicies());
+}
+
 Result res = new Result(conf);
 
 check(path, file, res);
@@ -342,6 +352,10 @@ public class NamenodeFsck implements 
DataEncryptionKeyFactory {
 out.println( Number of data-nodes:\t\t + totalDatanodes);
 out.println( Number of racks:\t\t + networktopology.getNumOfRacks());
 
+if (this.showStoragePolcies) {
+  out.print(storageTypeSummary.toString());
+}
+
 out.println(FSCK ended at  + new Date() +  in 
 + (Time.now() - startTime +  milliseconds));
 
@@ -492,7 +506,8 @@ public class NamenodeFsck implements 
DataEncryptionKeyFactory {
   boolean isCorrupt = lBlk.isCorrupt();
   String blkName = block.toString();
   DatanodeInfo[] locs = lBlk.getLocations();
-  NumberReplicas numberReplicas = 
namenode.getNamesystem().getBlockManager().countNodes(block.getLocalBlock());
+  NumberReplicas numberReplicas =
+  
namenode.getNamesystem().getBlockManager().countNodes(block.getLocalBlock());
   int liveReplicas = 

hadoop git commit: YARN-3255. RM, NM, JobHistoryServer, and WebAppProxyServer's main() should support generic options. Contributed by Konstantin Shvachko.

2015-02-26 Thread shv
Repository: hadoop
Updated Branches:
  refs/heads/branch-2 8aa0c7329 - 9e67f2cb0


YARN-3255. RM, NM, JobHistoryServer, and WebAppProxyServer's main() should 
support generic options. Contributed by Konstantin Shvachko.

Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/9e67f2cb
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/9e67f2cb
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/9e67f2cb

Branch: refs/heads/branch-2
Commit: 9e67f2cb0d0912e16055b2e2a16de2795d253d40
Parents: 8aa0c73
Author: Konstantin V Shvachko s...@apache.org
Authored: Thu Feb 26 17:26:08 2015 -0800
Committer: Konstantin V Shvachko s...@apache.org
Committed: Thu Feb 26 17:26:08 2015 -0800

--
 .../java/org/apache/hadoop/mapreduce/v2/hs/JobHistoryServer.java | 2 ++
 hadoop-yarn-project/CHANGES.txt  | 3 +++
 .../org/apache/hadoop/yarn/server/nodemanager/NodeManager.java   | 4 +++-
 .../hadoop/yarn/server/resourcemanager/ResourceManager.java  | 3 +++
 .../apache/hadoop/yarn/server/webproxy/WebAppProxyServer.java| 2 ++
 5 files changed, 13 insertions(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/9e67f2cb/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/JobHistoryServer.java
--
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/JobHistoryServer.java
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/JobHistoryServer.java
index 6d58040..252ac55 100644
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/JobHistoryServer.java
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/JobHistoryServer.java
@@ -37,6 +37,7 @@ import org.apache.hadoop.security.SecurityUtil;
 import org.apache.hadoop.service.AbstractService;
 import org.apache.hadoop.service.CompositeService;
 import org.apache.hadoop.util.ExitUtil;
+import org.apache.hadoop.util.GenericOptionsParser;
 import org.apache.hadoop.util.ShutdownHookManager;
 import org.apache.hadoop.util.StringUtils;
 import org.apache.hadoop.yarn.YarnUncaughtExceptionHandler;
@@ -216,6 +217,7 @@ public class JobHistoryServer extends CompositeService {
   new CompositeServiceShutdownHook(jobHistoryServer),
   SHUTDOWN_HOOK_PRIORITY);
   YarnConfiguration conf = new YarnConfiguration(new JobConf());
+  new GenericOptionsParser(conf, args);
   jobHistoryServer.init(conf);
   jobHistoryServer.start();
 } catch (Throwable t) {

http://git-wip-us.apache.org/repos/asf/hadoop/blob/9e67f2cb/hadoop-yarn-project/CHANGES.txt
--
diff --git a/hadoop-yarn-project/CHANGES.txt b/hadoop-yarn-project/CHANGES.txt
index e747888..801192a 100644
--- a/hadoop-yarn-project/CHANGES.txt
+++ b/hadoop-yarn-project/CHANGES.txt
@@ -291,6 +291,9 @@ Release 2.7.0 - UNRELEASED
 YARN-3217. Remove httpclient dependency from hadoop-yarn-server-web-proxy.
 (Brahma Reddy Battula via ozawa).
 
+YARN-3255. RM, NM, JobHistoryServer, and WebAppProxyServer's main()
+should support generic options. (shv)
+
   OPTIMIZATIONS
 
 YARN-2990. FairScheduler's delay-scheduling always waits for node-local 
and 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/9e67f2cb/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/NodeManager.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/NodeManager.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/NodeManager.java
index 53cbb11..016447c 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/NodeManager.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/NodeManager.java
@@ -37,6 +37,7 @@ import org.apache.hadoop.metrics2.lib.DefaultMetricsSystem;
 import org.apache.hadoop.security.Credentials;
 import org.apache.hadoop.security.SecurityUtil;
 import org.apache.hadoop.service.CompositeService;
+import 

hadoop git commit: YARN-3255. RM, NM, JobHistoryServer, and WebAppProxyServer's main() should support generic options. Contributed by Konstantin Shvachko.

2015-02-26 Thread shv
Repository: hadoop
Updated Branches:
  refs/heads/trunk bfbf076b7 - 8ca0d957c


YARN-3255. RM, NM, JobHistoryServer, and WebAppProxyServer's main() should 
support generic options. Contributed by Konstantin Shvachko.

Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/8ca0d957
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/8ca0d957
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/8ca0d957

Branch: refs/heads/trunk
Commit: 8ca0d957c4b1076e801e1cdce5b44aa805de889c
Parents: bfbf076
Author: Konstantin V Shvachko s...@apache.org
Authored: Thu Feb 26 17:12:19 2015 -0800
Committer: Konstantin V Shvachko s...@apache.org
Committed: Thu Feb 26 17:12:19 2015 -0800

--
 .../java/org/apache/hadoop/mapreduce/v2/hs/JobHistoryServer.java | 2 ++
 hadoop-yarn-project/CHANGES.txt  | 3 +++
 .../org/apache/hadoop/yarn/server/nodemanager/NodeManager.java   | 4 +++-
 .../hadoop/yarn/server/resourcemanager/ResourceManager.java  | 3 +++
 .../apache/hadoop/yarn/server/webproxy/WebAppProxyServer.java| 2 ++
 5 files changed, 13 insertions(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/8ca0d957/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/JobHistoryServer.java
--
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/JobHistoryServer.java
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/JobHistoryServer.java
index 6d58040..252ac55 100644
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/JobHistoryServer.java
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/JobHistoryServer.java
@@ -37,6 +37,7 @@ import org.apache.hadoop.security.SecurityUtil;
 import org.apache.hadoop.service.AbstractService;
 import org.apache.hadoop.service.CompositeService;
 import org.apache.hadoop.util.ExitUtil;
+import org.apache.hadoop.util.GenericOptionsParser;
 import org.apache.hadoop.util.ShutdownHookManager;
 import org.apache.hadoop.util.StringUtils;
 import org.apache.hadoop.yarn.YarnUncaughtExceptionHandler;
@@ -216,6 +217,7 @@ public class JobHistoryServer extends CompositeService {
   new CompositeServiceShutdownHook(jobHistoryServer),
   SHUTDOWN_HOOK_PRIORITY);
   YarnConfiguration conf = new YarnConfiguration(new JobConf());
+  new GenericOptionsParser(conf, args);
   jobHistoryServer.init(conf);
   jobHistoryServer.start();
 } catch (Throwable t) {

http://git-wip-us.apache.org/repos/asf/hadoop/blob/8ca0d957/hadoop-yarn-project/CHANGES.txt
--
diff --git a/hadoop-yarn-project/CHANGES.txt b/hadoop-yarn-project/CHANGES.txt
index a635592..40f187b 100644
--- a/hadoop-yarn-project/CHANGES.txt
+++ b/hadoop-yarn-project/CHANGES.txt
@@ -330,6 +330,9 @@ Release 2.7.0 - UNRELEASED
 YARN-3217. Remove httpclient dependency from hadoop-yarn-server-web-proxy.
 (Brahma Reddy Battula via ozawa).
 
+YARN-3255. RM, NM, JobHistoryServer, and WebAppProxyServer's main()
+should support generic options. (shv)
+
   OPTIMIZATIONS
 
 YARN-2990. FairScheduler's delay-scheduling always waits for node-local 
and 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/8ca0d957/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/NodeManager.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/NodeManager.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/NodeManager.java
index 7584138..a4be120 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/NodeManager.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/NodeManager.java
@@ -37,6 +37,7 @@ import org.apache.hadoop.metrics2.lib.DefaultMetricsSystem;
 import org.apache.hadoop.security.Credentials;
 import org.apache.hadoop.security.SecurityUtil;
 import org.apache.hadoop.service.CompositeService;
+import 

svn commit: r1662634 - in /hadoop/common/site/main: author/src/documentation/content/xdocs/who.xml publish/index.html publish/index.pdf publish/releases.html publish/releases.pdf publish/who.html publ

2015-02-26 Thread yjzhangal
Author: yjzhangal
Date: Fri Feb 27 04:33:56 2015
New Revision: 1662634

URL: http://svn.apache.org/r1662634
Log:
Add Yongjun Zhang to the committer list.


Modified:
hadoop/common/site/main/author/src/documentation/content/xdocs/who.xml
hadoop/common/site/main/publish/index.html
hadoop/common/site/main/publish/index.pdf
hadoop/common/site/main/publish/releases.html
hadoop/common/site/main/publish/releases.pdf
hadoop/common/site/main/publish/who.html
hadoop/common/site/main/publish/who.pdf

Modified: hadoop/common/site/main/author/src/documentation/content/xdocs/who.xml
URL: 
http://svn.apache.org/viewvc/hadoop/common/site/main/author/src/documentation/content/xdocs/who.xml?rev=1662634r1=1662633r2=1662634view=diff
==
--- hadoop/common/site/main/author/src/documentation/content/xdocs/who.xml 
(original)
+++ hadoop/common/site/main/author/src/documentation/content/xdocs/who.xml Fri 
Feb 27 04:33:56 2015
@@ -1180,6 +1180,14 @@
/tr
 
tr
+ tdyjzhangal/td
+ tda href=http://people.apache.org/~yjzhangal;Yongjun 
Zhang/a/td
+ tdCloudera/td
+ td/td
+ td-8/td
+   /tr
+
+   tr
  tdzjshen/td
  tda href=http://people.apache.org/~zjshen;Zhijie Shen/a/td
  tdHortonworks/td

Modified: hadoop/common/site/main/publish/index.html
URL: 
http://svn.apache.org/viewvc/hadoop/common/site/main/publish/index.html?rev=1662634r1=1662633r2=1662634view=diff
==
--- hadoop/common/site/main/publish/index.html (original)
+++ hadoop/common/site/main/publish/index.html Fri Feb 27 04:33:56 2015
@@ -492,9 +492,8 @@ document.write(Last Published:  + docu
 /p
 ul
   
-liHadoop Common/li
-  
-ul
+liHadoop Common
+  ul
 
 liKey management server (beta)/li
 
@@ -502,13 +501,13 @@ document.write(Last Published:  + docu
   
 /ul
   
-liHadoop HDFS/li
+/li
   
-ul
+liHadoop HDFS
+  ul
 
-liHeterogeneous Storage Tiers - Phase 2/li
-  
-ul
+liHeterogeneous Storage Tiers - Phase 2
+  ul
   
 liApplication APIs for heterogeneous storage/li
   
@@ -518,6 +517,8 @@ document.write(Last Published:  + docu
   
 /ul
 
+/li
+
 liSupport for Archival Storage/li
 
 liTransparent data at rest encryption (beta)/li
@@ -532,21 +533,22 @@ document.write(Last Published:  + docu
   
 /ul
   
-liHadoop YARN/li
+/li
   
-ul
+liHadoop YARN
+  ul
 
-liSupport for long running services in YARN/li
-  
-ul
+liSupport for long running services in YARN
+  ul
   
 liService Registry for applications/li
   
 /ul
 
-liSupport for rolling upgrades/li
-  
-ul
+/li
+
+liSupport for rolling upgrades
+  ul
 
 liWork-preserving restarts of ResourceManager/li
 
@@ -554,6 +556,8 @@ document.write(Last Published:  + docu
   
 /ul
 
+/li
+
 liSupport node labels during scheduling/li
 
 liSupport for time-based resource reservations in 
@@ -565,6 +569,8 @@ document.write(Last Published:  + docu
   Docker containers (alpha)/li
   
 /ul
+  
+/li
 
 /ul
 p 

Modified: hadoop/common/site/main/publish/index.pdf
URL: 
http://svn.apache.org/viewvc/hadoop/common/site/main/publish/index.pdf?rev=1662634r1=1662633r2=1662634view=diff
==
Binary files - no diff available.

Modified: hadoop/common/site/main/publish/releases.html
URL: 
http://svn.apache.org/viewvc/hadoop/common/site/main/publish/releases.html?rev=1662634r1=1662633r2=1662634view=diff
==
--- hadoop/common/site/main/publish/releases.html (original)
+++ hadoop/common/site/main/publish/releases.html Fri Feb 27 04:33:56 2015
@@ -488,9 +488,8 @@ document.write(Last Published:  + docu
 /p
 ul
   
-liHadoop Common/li
-  
-ul
+liHadoop Common
+  ul
 
 li
 a href=https://issues.apache.org/jira/browse/HADOOP-10433;
@@ -502,13 +501,13 @@ document.write(Last Published:  + docu
   
 /ul
   
-liHadoop HDFS/li
+/li
   
-ul
+liHadoop HDFS
+  ul
 
-liHeterogeneous Storage Tiers - Phase 2/li
-  
-ul
+liHeterogeneous Storage Tiers - Phase 2
+  ul
   
 li
 a href=https://issues.apache.org/jira/browse/HDFS-5682;
@@ -524,6 +523,8 @@ document.write(Last Published:  + docu
   
 /ul
 
+/li
+
 li
 a href=https://issues.apache.org/jira/browse/HDFS-6584;