hadoop git commit: HDFS-10225. DataNode hot swap drives should disallow storage type changes. Contributed by Lei (Eddy) Xu.

2016-07-21 Thread xiao
Repository: hadoop
Updated Branches:
  refs/heads/trunk 12aa18447 -> 132deb4ca


HDFS-10225. DataNode hot swap drives should disallow storage type changes. 
Contributed by Lei (Eddy) Xu.


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/132deb4c
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/132deb4c
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/132deb4c

Branch: refs/heads/trunk
Commit: 132deb4cacc413a85a6af2f390ec79a76c91961c
Parents: 12aa184
Author: Xiao Chen 
Authored: Thu Jul 21 16:41:02 2016 -0700
Committer: Xiao Chen 
Committed: Thu Jul 21 16:41:02 2016 -0700

--
 .../hadoop/hdfs/server/datanode/DataNode.java   | 14 +++-
 .../datanode/TestDataNodeHotSwapVolumes.java| 23 +++-
 2 files changed, 35 insertions(+), 2 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/132deb4c/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DataNode.java
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DataNode.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DataNode.java
index 1cd2dee..ac55397 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DataNode.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DataNode.java
@@ -634,7 +634,7 @@ public class DataNode extends ReconfigurableBase
* @param newVolumes a comma separated string that specifies the data 
volumes.
* @return changed volumes.
* @throws IOException if none of the directories are specified in the
-   * configuration.
+   * configuration, or the storage type of a directory is changed.
*/
   @VisibleForTesting
   ChangedVolumes parseChangedVolumes(String newVolumes) throws IOException {
@@ -646,6 +646,12 @@ public class DataNode extends ReconfigurableBase
   throw new IOException("No directory is specified.");
 }
 
+// Use the existing StorageLocation to detect storage type changes.
+Map existingLocations = new HashMap<>();
+for (StorageLocation loc : getStorageLocations(this.conf)) {
+  existingLocations.put(loc.getFile().getCanonicalPath(), loc);
+}
+
 ChangedVolumes results = new ChangedVolumes();
 results.newLocations.addAll(locations);
 
@@ -659,6 +665,12 @@ public class DataNode extends ReconfigurableBase
 if (location.getFile().getCanonicalPath().equals(
 dir.getRoot().getCanonicalPath())) {
   sl.remove();
+  StorageLocation old = existingLocations.get(
+  location.getFile().getCanonicalPath());
+  if (old != null &&
+  old.getStorageType() != location.getStorageType()) {
+throw new IOException("Changing storage type is not allowed.");
+  }
   results.unchangedLocations.add(location);
   found = true;
   break;

http://git-wip-us.apache.org/repos/asf/hadoop/blob/132deb4c/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/TestDataNodeHotSwapVolumes.java
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/TestDataNodeHotSwapVolumes.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/TestDataNodeHotSwapVolumes.java
index 659806b..c03b02b 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/TestDataNodeHotSwapVolumes.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/TestDataNodeHotSwapVolumes.java
@@ -27,6 +27,7 @@ import org.apache.hadoop.fs.FSDataOutputStream;
 import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.fs.StorageType;
 import org.apache.hadoop.hdfs.BlockMissingException;
 import org.apache.hadoop.hdfs.DFSConfigKeys;
 import org.apache.hadoop.hdfs.DFSTestUtil;
@@ -82,7 +83,6 @@ import static org.junit.Assert.assertTrue;
 import static org.junit.Assert.fail;
 import static org.junit.Assume.assumeTrue;
 import static org.mockito.Matchers.any;
-import static org.mockito.Matchers.anyBoolean;
 import static org.mockito.Matchers.anyString;
 import static org.mockito.Mockito.doAnswer;
 import static org.mockito.Mockito.timeout;
@@ -256,6 +256,27 @@ public class TestDataNodeHotSwapVolumes {
 }
   }
 
+  @Test
+  public void testParseStorageTypeChanges() throws IOException {
+startDFSCluster(1, 

svn commit: r1753740 - in /hadoop/common/site/main: author/src/documentation/content/xdocs/ publish/

2016-07-21 Thread xiao
Author: xiao
Date: Thu Jul 21 23:15:48 2016
New Revision: 1753740

URL: http://svn.apache.org/viewvc?rev=1753740=rev
Log:
Add xiao to committers list.

Modified:
hadoop/common/site/main/author/src/documentation/content/xdocs/who.xml
hadoop/common/site/main/publish/bylaws.pdf
hadoop/common/site/main/publish/index.pdf
hadoop/common/site/main/publish/issue_tracking.pdf
hadoop/common/site/main/publish/linkmap.pdf
hadoop/common/site/main/publish/mailing_lists.pdf
hadoop/common/site/main/publish/privacy_policy.pdf
hadoop/common/site/main/publish/releases.pdf
hadoop/common/site/main/publish/version_control.pdf
hadoop/common/site/main/publish/who.html
hadoop/common/site/main/publish/who.pdf

Modified: hadoop/common/site/main/author/src/documentation/content/xdocs/who.xml
URL: 
http://svn.apache.org/viewvc/hadoop/common/site/main/author/src/documentation/content/xdocs/who.xml?rev=1753740=1753739=1753740=diff
==
--- hadoop/common/site/main/author/src/documentation/content/xdocs/who.xml 
(original)
+++ hadoop/common/site/main/author/src/documentation/content/xdocs/who.xml Thu 
Jul 21 23:15:48 2016
@@ -1419,6 +1419,14 @@

 

+ xiao
+ Xiao Chen
+ Cloudera
+ HDFS
+ -8
+   
+
+   
  xyao
  http://people.apache.org/~xyao;>Xiaoyu Yao
  Hortonworks

Modified: hadoop/common/site/main/publish/bylaws.pdf
URL: 
http://svn.apache.org/viewvc/hadoop/common/site/main/publish/bylaws.pdf?rev=1753740=1753739=1753740=diff
==
Binary files - no diff available.

Modified: hadoop/common/site/main/publish/index.pdf
URL: 
http://svn.apache.org/viewvc/hadoop/common/site/main/publish/index.pdf?rev=1753740=1753739=1753740=diff
==
Binary files - no diff available.

Modified: hadoop/common/site/main/publish/issue_tracking.pdf
URL: 
http://svn.apache.org/viewvc/hadoop/common/site/main/publish/issue_tracking.pdf?rev=1753740=1753739=1753740=diff
==
Binary files - no diff available.

Modified: hadoop/common/site/main/publish/linkmap.pdf
URL: 
http://svn.apache.org/viewvc/hadoop/common/site/main/publish/linkmap.pdf?rev=1753740=1753739=1753740=diff
==
Binary files - no diff available.

Modified: hadoop/common/site/main/publish/mailing_lists.pdf
URL: 
http://svn.apache.org/viewvc/hadoop/common/site/main/publish/mailing_lists.pdf?rev=1753740=1753739=1753740=diff
==
Binary files - no diff available.

Modified: hadoop/common/site/main/publish/privacy_policy.pdf
URL: 
http://svn.apache.org/viewvc/hadoop/common/site/main/publish/privacy_policy.pdf?rev=1753740=1753739=1753740=diff
==
Binary files - no diff available.

Modified: hadoop/common/site/main/publish/releases.pdf
URL: 
http://svn.apache.org/viewvc/hadoop/common/site/main/publish/releases.pdf?rev=1753740=1753739=1753740=diff
==
Binary files - no diff available.

Modified: hadoop/common/site/main/publish/version_control.pdf
URL: 
http://svn.apache.org/viewvc/hadoop/common/site/main/publish/version_control.pdf?rev=1753740=1753739=1753740=diff
==
Binary files - no diff available.

Modified: hadoop/common/site/main/publish/who.html
URL: 
http://svn.apache.org/viewvc/hadoop/common/site/main/publish/who.html?rev=1753740=1753739=1753740=diff
==
--- hadoop/common/site/main/publish/who.html (original)
+++ hadoop/common/site/main/publish/who.html Thu Jul 21 23:15:48 2016
@@ -2179,6 +2179,17 @@ document.write("Last Published: " + docu

 
  
+xiao
+ Xiao Chen
+ Cloudera
+ HDFS
+ -8
+   
+
+
+   
+
+ 
 xyao
  http://people.apache.org/~xyao;>Xiaoyu Yao
  Hortonworks
@@ -2258,7 +2269,7 @@ document.write("Last Published: " + docu
 
 

-
+
 Emeritus Hadoop Committers
 
 Hadoop committers who are no longer active include:

Modified: hadoop/common/site/main/publish/who.pdf
URL: 
http://svn.apache.org/viewvc/hadoop/common/site/main/publish/who.pdf?rev=1753740=1753739=1753740=diff
==
Binary files - no diff available.



-
To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org
For additional commands, e-mail: 

[Hadoop Wiki] Update of "Roadmap" by AndrewWang

2016-07-21 Thread Apache Wiki
Dear Wiki user,

You have subscribed to a wiki page or wiki category on "Hadoop Wiki" for change 
notification.

The "Roadmap" page has been changed by AndrewWang:
https://wiki.apache.org/hadoop/Roadmap?action=diff=61=62

  For more details on how releases are created, see HowToRelease.
  
  == Hadoop 3.x Releases ==
+ === Planned for hadoop-3.0.0 ===
+  * HADOOP
+   * Classpath isolation on by default 
[[https://issues.apache.org/jira/browse/HADOOP-11656|HADOOP-11656]]
+  * HDFS
+  * YARN
+  * MAPREDUCE
+ 
+ 
- === hadoop-3.0 ===
+ === hadoop-3.0.0-alpha1 ===
   * HADOOP
* Move to JDK8+
-   * Classpath isolation on by default 
[[https://issues.apache.org/jira/browse/HADOOP-11656|HADOOP-11656]]
* Shell script rewrite 
[[https://issues.apache.org/jira/browse/HADOOP-9902|HADOOP-9902]]
* Move default ports out of ephemeral range 
[[https://issues.apache.org/jira/browse/HDFS-9427|HDFS-9427]]
   * HDFS
* Removal of hftp in favor of webhdfs 
[[https://issues.apache.org/jira/browse/HDFS-5570|HDFS-5570]]
* Support for more than two standby NameNodes 
[[https://issues.apache.org/jira/browse/HDFS-6440|HDFS-6440]]
* Support for Erasure Codes in HDFS 
[[https://issues.apache.org/jira/browse/HDFS-7285|HDFS-7285]]
+   * Intra-datanode balancer 
[[https://issues.apache.org/jira/browse/HDFS-1312|HDFS-1312]]
   * YARN
+   * YARN Timeline Service v.2 
[[https://issues.apache.org/jira/browse/YARN-2928|YARN-2928]]
   * MAPREDUCE
* Derive heap size or mapreduce.*.memory.mb automatically 
[[https://issues.apache.org/jira/browse/MAPREDUCE-5785|MAPREDUCE-5785]]
  

-
To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org
For additional commands, e-mail: common-commits-h...@hadoop.apache.org



hadoop git commit: HADOOP-13382. Remove unneeded commons-httpclient dependencies from POM files in Hadoop and sub-projects, while adding it in to the only place it is still needed, hadoop-openstack. C

2016-07-21 Thread mattf
Repository: hadoop
Updated Branches:
  refs/heads/branch-2.8 af3c860bb -> c96cb3fd4


HADOOP-13382. Remove unneeded commons-httpclient dependencies from POM files in 
Hadoop and sub-projects, while adding it in to the only place it is still 
needed, hadoop-openstack. Contributed by Matt Foley.


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/c96cb3fd
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/c96cb3fd
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/c96cb3fd

Branch: refs/heads/branch-2.8
Commit: c96cb3fd48925b3eb2cc4253e81f8bd35462007c
Parents: af3c860
Author: mattf-horton 
Authored: Thu Jul 21 14:43:57 2016 -0700
Committer: mattf-horton 
Committed: Thu Jul 21 16:01:05 2016 -0700

--
 hadoop-project/pom.xml   | 5 -
 hadoop-tools/hadoop-openstack/pom.xml| 1 +
 hadoop-yarn-project/hadoop-yarn/hadoop-yarn-registry/pom.xml | 6 --
 3 files changed, 1 insertion(+), 11 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/c96cb3fd/hadoop-project/pom.xml
--
diff --git a/hadoop-project/pom.xml b/hadoop-project/pom.xml
index 6ec46cd..d8da03f 100644
--- a/hadoop-project/pom.xml
+++ b/hadoop-project/pom.xml
@@ -458,11 +458,6 @@
 0.52
   
   
-commons-httpclient
-commons-httpclient
-3.1
-  
-  
 org.apache.httpcomponents
 httpclient
 4.5.2

http://git-wip-us.apache.org/repos/asf/hadoop/blob/c96cb3fd/hadoop-tools/hadoop-openstack/pom.xml
--
diff --git a/hadoop-tools/hadoop-openstack/pom.xml 
b/hadoop-tools/hadoop-openstack/pom.xml
index dbe2398..84dd7eb 100644
--- a/hadoop-tools/hadoop-openstack/pom.xml
+++ b/hadoop-tools/hadoop-openstack/pom.xml
@@ -119,6 +119,7 @@
 
   commons-httpclient
   commons-httpclient
+  3.1
   compile
 
 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/c96cb3fd/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-registry/pom.xml
--
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-registry/pom.xml 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-registry/pom.xml
index 04f8640..815f59c 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-registry/pom.xml
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-registry/pom.xml
@@ -76,12 +76,6 @@
 
 
 
-  commons-httpclient
-  commons-httpclient
-  test
-
-
-
   junit
   junit
   test


-
To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org
For additional commands, e-mail: common-commits-h...@hadoop.apache.org



hadoop git commit: HADOOP-13382. Remove unneeded commons-httpclient dependencies from POM files in Hadoop and sub-projects, while adding it in to the only place it is still needed, hadoop-openstack. C

2016-07-21 Thread mattf
Repository: hadoop
Updated Branches:
  refs/heads/branch-2 382bcf21e -> ea10e1384


HADOOP-13382. Remove unneeded commons-httpclient dependencies from POM files in 
Hadoop and sub-projects, while adding it in to the only place it is still 
needed, hadoop-openstack. Contributed by Matt Foley.


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/ea10e138
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/ea10e138
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/ea10e138

Branch: refs/heads/branch-2
Commit: ea10e1384ff65e275219463aafa1460171f476b4
Parents: 382bcf2
Author: mattf-horton 
Authored: Thu Jul 21 14:43:57 2016 -0700
Committer: mattf-horton 
Committed: Thu Jul 21 15:58:05 2016 -0700

--
 hadoop-project/pom.xml   | 5 -
 hadoop-tools/hadoop-openstack/pom.xml| 1 +
 hadoop-yarn-project/hadoop-yarn/hadoop-yarn-registry/pom.xml | 6 --
 3 files changed, 1 insertion(+), 11 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/ea10e138/hadoop-project/pom.xml
--
diff --git a/hadoop-project/pom.xml b/hadoop-project/pom.xml
index 2be74de..a79dbb8 100644
--- a/hadoop-project/pom.xml
+++ b/hadoop-project/pom.xml
@@ -458,11 +458,6 @@
 0.52
   
   
-commons-httpclient
-commons-httpclient
-3.1
-  
-  
 org.apache.httpcomponents
 httpclient
 4.5.2

http://git-wip-us.apache.org/repos/asf/hadoop/blob/ea10e138/hadoop-tools/hadoop-openstack/pom.xml
--
diff --git a/hadoop-tools/hadoop-openstack/pom.xml 
b/hadoop-tools/hadoop-openstack/pom.xml
index 33542fa..89d5274 100644
--- a/hadoop-tools/hadoop-openstack/pom.xml
+++ b/hadoop-tools/hadoop-openstack/pom.xml
@@ -119,6 +119,7 @@
 
   commons-httpclient
   commons-httpclient
+  3.1
   compile
 
 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/ea10e138/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-registry/pom.xml
--
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-registry/pom.xml 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-registry/pom.xml
index bf50d65..a4b3bbf 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-registry/pom.xml
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-registry/pom.xml
@@ -76,12 +76,6 @@
 
 
 
-  commons-httpclient
-  commons-httpclient
-  test
-
-
-
   junit
   junit
   test


-
To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org
For additional commands, e-mail: common-commits-h...@hadoop.apache.org



hadoop git commit: HADOOP-13382. Remove unneeded commons-httpclient dependencies from POM files in Hadoop and sub-projects, while adding it in to the only place it is still needed, hadoop-openstack. C

2016-07-21 Thread mattf
Repository: hadoop
Updated Branches:
  refs/heads/trunk 43cf6b101 -> 12aa18447


HADOOP-13382. Remove unneeded commons-httpclient dependencies from POM files in 
Hadoop and sub-projects, while adding it in to the only place it is still 
needed, hadoop-openstack. Contributed by Matt Foley.


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/12aa1844
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/12aa1844
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/12aa1844

Branch: refs/heads/trunk
Commit: 12aa184479675d6c9bd36fd8451f605ee9505b47
Parents: 43cf6b1
Author: mattf-horton 
Authored: Thu Jul 21 14:43:57 2016 -0700
Committer: mattf-horton 
Committed: Thu Jul 21 14:43:57 2016 -0700

--
 hadoop-project/pom.xml   | 5 -
 hadoop-tools/hadoop-openstack/pom.xml| 1 +
 hadoop-yarn-project/hadoop-yarn/hadoop-yarn-registry/pom.xml | 6 --
 3 files changed, 1 insertion(+), 11 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/12aa1844/hadoop-project/pom.xml
--
diff --git a/hadoop-project/pom.xml b/hadoop-project/pom.xml
index a632bff..318573a 100644
--- a/hadoop-project/pom.xml
+++ b/hadoop-project/pom.xml
@@ -486,11 +486,6 @@
 0.52
   
   
-commons-httpclient
-commons-httpclient
-3.1
-  
-  
 org.apache.httpcomponents
 httpclient
 4.5.2

http://git-wip-us.apache.org/repos/asf/hadoop/blob/12aa1844/hadoop-tools/hadoop-openstack/pom.xml
--
diff --git a/hadoop-tools/hadoop-openstack/pom.xml 
b/hadoop-tools/hadoop-openstack/pom.xml
index 7e7e4fd..b036e84 100644
--- a/hadoop-tools/hadoop-openstack/pom.xml
+++ b/hadoop-tools/hadoop-openstack/pom.xml
@@ -136,6 +136,7 @@
 
   commons-httpclient
   commons-httpclient
+  3.1
   compile
 
 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/12aa1844/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-registry/pom.xml
--
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-registry/pom.xml 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-registry/pom.xml
index 886c98f..0c78c54 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-registry/pom.xml
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-registry/pom.xml
@@ -76,12 +76,6 @@
 
 
 
-  commons-httpclient
-  commons-httpclient
-  test
-
-
-
   junit
   junit
   test


-
To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org
For additional commands, e-mail: common-commits-h...@hadoop.apache.org



[3/3] hadoop git commit: HADOOP-13240. TestAclCommands.testSetfaclValidations fail. Contributed by John Zhuge.

2016-07-21 Thread cnauroth
HADOOP-13240. TestAclCommands.testSetfaclValidations fail. Contributed by John 
Zhuge.

(cherry picked from commit 43cf6b101dacd96bacfd199826b717f6946109af)
(cherry picked from commit 382bcf21e7f7f752ba901f856abf080e8700e890)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/af3c860b
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/af3c860b
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/af3c860b

Branch: refs/heads/branch-2.8
Commit: af3c860bb1910bc33e163a798a3bebe5c99d982b
Parents: 9442e1d
Author: Chris Nauroth 
Authored: Thu Jul 21 14:12:31 2016 -0700
Committer: Chris Nauroth 
Committed: Thu Jul 21 14:12:47 2016 -0700

--
 .../org/apache/hadoop/fs/shell/AclCommands.java |  4 +++
 .../apache/hadoop/fs/shell/TestAclCommands.java | 34 
 2 files changed, 24 insertions(+), 14 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/af3c860b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/AclCommands.java
--
diff --git 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/AclCommands.java
 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/AclCommands.java
index 51a2255..9a54040 100644
--- 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/AclCommands.java
+++ 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/AclCommands.java
@@ -213,6 +213,10 @@ class AclCommands extends FsCommand {
   "Missing either  or ");
 }
 aclEntries = AclEntry.parseAclSpec(args.removeFirst(), 
!cf.getOpt("x"));
+if (aclEntries.isEmpty()) {
+  throw new HadoopIllegalArgumentException(
+  "Missing  entry");
+}
   }
 
   if (args.isEmpty()) {

http://git-wip-us.apache.org/repos/asf/hadoop/blob/af3c860b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestAclCommands.java
--
diff --git 
a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestAclCommands.java
 
b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestAclCommands.java
index 3b9d397..5637e70 100644
--- 
a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestAclCommands.java
+++ 
b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestAclCommands.java
@@ -43,48 +43,54 @@ import org.apache.hadoop.ipc.RpcNoSuchMethodException;
 import org.apache.hadoop.util.Progressable;
 import org.apache.hadoop.util.ToolRunner;
 import org.junit.Before;
+import org.junit.Rule;
 import org.junit.Test;
+import org.junit.rules.TemporaryFolder;
 
 public class TestAclCommands {
+  @Rule
+  public TemporaryFolder testFolder = new TemporaryFolder();
+
+  private String path;
 
   private Configuration conf = null;
 
   @Before
   public void setup() throws IOException {
 conf = new Configuration();
+path = testFolder.newFile("file").getPath();
   }
 
   @Test
   public void testGetfaclValidations() throws Exception {
 assertFalse("getfacl should fail without path",
-0 == runCommand(new String[] { "-getfacl" }));
+0 == runCommand(new String[] {"-getfacl"}));
 assertFalse("getfacl should fail with extra argument",
-0 == runCommand(new String[] { "-getfacl", "/test", "extraArg" }));
+0 == runCommand(new String[] {"-getfacl", path, "extraArg"}));
   }
 
   @Test
   public void testSetfaclValidations() throws Exception {
 assertFalse("setfacl should fail without options",
-0 == runCommand(new String[] { "-setfacl", "/" }));
+0 == runCommand(new String[] {"-setfacl", path}));
 assertFalse("setfacl should fail without options -b, -k, -m, -x or --set",
-0 == runCommand(new String[] { "-setfacl", "-R", "/" }));
+0 == runCommand(new String[] {"-setfacl", "-R", path}));
 assertFalse("setfacl should fail without path",
-0 == runCommand(new String[] { "-setfacl" }));
+0 == runCommand(new String[] {"-setfacl"}));
 assertFalse("setfacl should fail without aclSpec",
-0 == runCommand(new String[] { "-setfacl", "-m", "/path" }));
+0 == runCommand(new String[] {"-setfacl", "-m", path}));
 assertFalse("setfacl should fail with conflicting options",
-0 == runCommand(new String[] { "-setfacl", "-m", "/path" }));
+0 == runCommand(new String[] {"-setfacl", "-m", path}));
 assertFalse("setfacl should fail with extra arguments",
-0 == runCommand(new String[] { "-setfacl", "/path", "extra" }));

[1/3] hadoop git commit: HADOOP-13240. TestAclCommands.testSetfaclValidations fail. Contributed by John Zhuge.

2016-07-21 Thread cnauroth
Repository: hadoop
Updated Branches:
  refs/heads/branch-2 43a40fa00 -> 382bcf21e
  refs/heads/branch-2.8 9442e1d48 -> af3c860bb
  refs/heads/trunk ecff7d035 -> 43cf6b101


HADOOP-13240. TestAclCommands.testSetfaclValidations fail. Contributed by John 
Zhuge.


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/43cf6b10
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/43cf6b10
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/43cf6b10

Branch: refs/heads/trunk
Commit: 43cf6b101dacd96bacfd199826b717f6946109af
Parents: ecff7d0
Author: Chris Nauroth 
Authored: Thu Jul 21 14:12:31 2016 -0700
Committer: Chris Nauroth 
Committed: Thu Jul 21 14:12:31 2016 -0700

--
 .../org/apache/hadoop/fs/shell/AclCommands.java |  4 +++
 .../apache/hadoop/fs/shell/TestAclCommands.java | 34 
 2 files changed, 24 insertions(+), 14 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/43cf6b10/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/AclCommands.java
--
diff --git 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/AclCommands.java
 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/AclCommands.java
index 51a2255..9a54040 100644
--- 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/AclCommands.java
+++ 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/AclCommands.java
@@ -213,6 +213,10 @@ class AclCommands extends FsCommand {
   "Missing either  or ");
 }
 aclEntries = AclEntry.parseAclSpec(args.removeFirst(), 
!cf.getOpt("x"));
+if (aclEntries.isEmpty()) {
+  throw new HadoopIllegalArgumentException(
+  "Missing  entry");
+}
   }
 
   if (args.isEmpty()) {

http://git-wip-us.apache.org/repos/asf/hadoop/blob/43cf6b10/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestAclCommands.java
--
diff --git 
a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestAclCommands.java
 
b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestAclCommands.java
index 3b9d397..5637e70 100644
--- 
a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestAclCommands.java
+++ 
b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestAclCommands.java
@@ -43,48 +43,54 @@ import org.apache.hadoop.ipc.RpcNoSuchMethodException;
 import org.apache.hadoop.util.Progressable;
 import org.apache.hadoop.util.ToolRunner;
 import org.junit.Before;
+import org.junit.Rule;
 import org.junit.Test;
+import org.junit.rules.TemporaryFolder;
 
 public class TestAclCommands {
+  @Rule
+  public TemporaryFolder testFolder = new TemporaryFolder();
+
+  private String path;
 
   private Configuration conf = null;
 
   @Before
   public void setup() throws IOException {
 conf = new Configuration();
+path = testFolder.newFile("file").getPath();
   }
 
   @Test
   public void testGetfaclValidations() throws Exception {
 assertFalse("getfacl should fail without path",
-0 == runCommand(new String[] { "-getfacl" }));
+0 == runCommand(new String[] {"-getfacl"}));
 assertFalse("getfacl should fail with extra argument",
-0 == runCommand(new String[] { "-getfacl", "/test", "extraArg" }));
+0 == runCommand(new String[] {"-getfacl", path, "extraArg"}));
   }
 
   @Test
   public void testSetfaclValidations() throws Exception {
 assertFalse("setfacl should fail without options",
-0 == runCommand(new String[] { "-setfacl", "/" }));
+0 == runCommand(new String[] {"-setfacl", path}));
 assertFalse("setfacl should fail without options -b, -k, -m, -x or --set",
-0 == runCommand(new String[] { "-setfacl", "-R", "/" }));
+0 == runCommand(new String[] {"-setfacl", "-R", path}));
 assertFalse("setfacl should fail without path",
-0 == runCommand(new String[] { "-setfacl" }));
+0 == runCommand(new String[] {"-setfacl"}));
 assertFalse("setfacl should fail without aclSpec",
-0 == runCommand(new String[] { "-setfacl", "-m", "/path" }));
+0 == runCommand(new String[] {"-setfacl", "-m", path}));
 assertFalse("setfacl should fail with conflicting options",
-0 == runCommand(new String[] { "-setfacl", "-m", "/path" }));
+0 == runCommand(new String[] {"-setfacl", "-m", path}));
 assertFalse("setfacl should fail with extra arguments",
-0 == runCommand(new String[] { 

[2/3] hadoop git commit: HADOOP-13240. TestAclCommands.testSetfaclValidations fail. Contributed by John Zhuge.

2016-07-21 Thread cnauroth
HADOOP-13240. TestAclCommands.testSetfaclValidations fail. Contributed by John 
Zhuge.

(cherry picked from commit 43cf6b101dacd96bacfd199826b717f6946109af)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/382bcf21
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/382bcf21
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/382bcf21

Branch: refs/heads/branch-2
Commit: 382bcf21e7f7f752ba901f856abf080e8700e890
Parents: 43a40fa
Author: Chris Nauroth 
Authored: Thu Jul 21 14:12:31 2016 -0700
Committer: Chris Nauroth 
Committed: Thu Jul 21 14:12:39 2016 -0700

--
 .../org/apache/hadoop/fs/shell/AclCommands.java |  4 +++
 .../apache/hadoop/fs/shell/TestAclCommands.java | 34 
 2 files changed, 24 insertions(+), 14 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/382bcf21/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/AclCommands.java
--
diff --git 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/AclCommands.java
 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/AclCommands.java
index 51a2255..9a54040 100644
--- 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/AclCommands.java
+++ 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/AclCommands.java
@@ -213,6 +213,10 @@ class AclCommands extends FsCommand {
   "Missing either  or ");
 }
 aclEntries = AclEntry.parseAclSpec(args.removeFirst(), 
!cf.getOpt("x"));
+if (aclEntries.isEmpty()) {
+  throw new HadoopIllegalArgumentException(
+  "Missing  entry");
+}
   }
 
   if (args.isEmpty()) {

http://git-wip-us.apache.org/repos/asf/hadoop/blob/382bcf21/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestAclCommands.java
--
diff --git 
a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestAclCommands.java
 
b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestAclCommands.java
index 3b9d397..5637e70 100644
--- 
a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestAclCommands.java
+++ 
b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestAclCommands.java
@@ -43,48 +43,54 @@ import org.apache.hadoop.ipc.RpcNoSuchMethodException;
 import org.apache.hadoop.util.Progressable;
 import org.apache.hadoop.util.ToolRunner;
 import org.junit.Before;
+import org.junit.Rule;
 import org.junit.Test;
+import org.junit.rules.TemporaryFolder;
 
 public class TestAclCommands {
+  @Rule
+  public TemporaryFolder testFolder = new TemporaryFolder();
+
+  private String path;
 
   private Configuration conf = null;
 
   @Before
   public void setup() throws IOException {
 conf = new Configuration();
+path = testFolder.newFile("file").getPath();
   }
 
   @Test
   public void testGetfaclValidations() throws Exception {
 assertFalse("getfacl should fail without path",
-0 == runCommand(new String[] { "-getfacl" }));
+0 == runCommand(new String[] {"-getfacl"}));
 assertFalse("getfacl should fail with extra argument",
-0 == runCommand(new String[] { "-getfacl", "/test", "extraArg" }));
+0 == runCommand(new String[] {"-getfacl", path, "extraArg"}));
   }
 
   @Test
   public void testSetfaclValidations() throws Exception {
 assertFalse("setfacl should fail without options",
-0 == runCommand(new String[] { "-setfacl", "/" }));
+0 == runCommand(new String[] {"-setfacl", path}));
 assertFalse("setfacl should fail without options -b, -k, -m, -x or --set",
-0 == runCommand(new String[] { "-setfacl", "-R", "/" }));
+0 == runCommand(new String[] {"-setfacl", "-R", path}));
 assertFalse("setfacl should fail without path",
-0 == runCommand(new String[] { "-setfacl" }));
+0 == runCommand(new String[] {"-setfacl"}));
 assertFalse("setfacl should fail without aclSpec",
-0 == runCommand(new String[] { "-setfacl", "-m", "/path" }));
+0 == runCommand(new String[] {"-setfacl", "-m", path}));
 assertFalse("setfacl should fail with conflicting options",
-0 == runCommand(new String[] { "-setfacl", "-m", "/path" }));
+0 == runCommand(new String[] {"-setfacl", "-m", path}));
 assertFalse("setfacl should fail with extra arguments",
-0 == runCommand(new String[] { "-setfacl", "/path", "extra" }));
+0 == runCommand(new String[] {"-setfacl", path, "extra"}));
  

hadoop git commit: HADOOP-12009. Clarify FileSystem.listStatus() sorting order & fix FileSystemContractBaseTest:testListStatus. backport to 2.8

2016-07-21 Thread stevel
Repository: hadoop
Updated Branches:
  refs/heads/branch-2 f4d94f224 -> 43a40fa00


HADOOP-12009. Clarify FileSystem.listStatus() sorting order & fix 
FileSystemContractBaseTest:testListStatus. backport to 2.8


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/43a40fa0
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/43a40fa0
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/43a40fa0

Branch: refs/heads/branch-2
Commit: 43a40fa00978f41cf12124fb79379a26aecfdc5e
Parents: f4d94f2
Author: Jakob Homan 
Authored: Thu Jul 23 17:46:13 2015 -0700
Committer: Steve Loughran 
Committed: Thu Jul 21 21:50:38 2016 +0100

--
 .../main/java/org/apache/hadoop/fs/FileSystem.java | 17 -
 .../src/site/markdown/filesystem/filesystem.md |  4 
 .../hadoop/fs/FileSystemContractBaseTest.java  | 11 ---
 3 files changed, 28 insertions(+), 4 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/43a40fa0/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java
--
diff --git 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java
 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java
index 007d90c..88fe3eb 100644
--- 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java
+++ 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java
@@ -1520,7 +1520,9 @@ public abstract class FileSystem extends Configured 
implements Closeable {
   /**
* List the statuses of the files/directories in the given path if the path 
is
* a directory.
-   * 
+   * 
+   * Does not guarantee to return the List of files/directories status in a
+   * sorted order.
* @param f given path
* @return the statuses of the files/directories in the given patch
* @throws FileNotFoundException when the path does not exist;
@@ -1562,6 +1564,9 @@ public abstract class FileSystem extends Configured 
implements Closeable {
   /**
* Filter files/directories in the given path using the user-supplied path
* filter.
+   * 
+   * Does not guarantee to return the List of files/directories status in a
+   * sorted order.
* 
* @param f
*  a path name
@@ -1582,6 +1587,9 @@ public abstract class FileSystem extends Configured 
implements Closeable {
   /**
* Filter files/directories in the given list of paths using default
* path filter.
+   * 
+   * Does not guarantee to return the List of files/directories status in a
+   * sorted order.
* 
* @param files
*  a list of paths
@@ -1598,6 +1606,9 @@ public abstract class FileSystem extends Configured 
implements Closeable {
   /**
* Filter files/directories in the given list of paths using user-supplied
* path filter.
+   * 
+   * Does not guarantee to return the List of files/directories status in a
+   * sorted order.
* 
* @param files
*  a list of paths
@@ -1761,6 +1772,8 @@ public abstract class FileSystem extends Configured 
implements Closeable {
* while consuming the entries. Each file system implementation should
* override this method and provide a more efficient implementation, if
* possible. 
+   * Does not guarantee to return the iterator that traverses statuses
+   * of the files in a sorted order.
*
* @param p target path
* @return remote iterator
@@ -1788,6 +1801,8 @@ public abstract class FileSystem extends Configured 
implements Closeable {
 
   /**
* List the statuses and block locations of the files in the given path.
+   * Does not guarantee to return the iterator that traverses statuses
+   * of the files in a sorted order.
* 
* If the path is a directory, 
*   if recursive is false, returns files in the directory;

http://git-wip-us.apache.org/repos/asf/hadoop/blob/43a40fa0/hadoop-common-project/hadoop-common/src/site/markdown/filesystem/filesystem.md
--
diff --git 
a/hadoop-common-project/hadoop-common/src/site/markdown/filesystem/filesystem.md
 
b/hadoop-common-project/hadoop-common/src/site/markdown/filesystem/filesystem.md
index d2e1712..778cff4 100644
--- 
a/hadoop-common-project/hadoop-common/src/site/markdown/filesystem/filesystem.md
+++ 
b/hadoop-common-project/hadoop-common/src/site/markdown/filesystem/filesystem.md
@@ -152,6 +152,10 @@ to the same path:
 forall fs in listStatus(Path) :
   fs == getFileStatus(fs.path)
 
+**Ordering of results**: there is no guarantee of ordering of the listed 
entries.
+While HDFS currently returns an alphanumerically sorted 

hadoop git commit: HADOOP-12009. Clarify FileSystem.listStatus() sorting order & fix FileSystemContractBaseTest:testListStatus. backport to 2.8

2016-07-21 Thread stevel
Repository: hadoop
Updated Branches:
  refs/heads/branch-2.8 3dcbd2df5 -> 9442e1d48


HADOOP-12009. Clarify FileSystem.listStatus() sorting order & fix 
FileSystemContractBaseTest:testListStatus. backport to 2.8


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/9442e1d4
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/9442e1d4
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/9442e1d4

Branch: refs/heads/branch-2.8
Commit: 9442e1d48ea7303a8765056ff087a513d0478719
Parents: 3dcbd2d
Author: Jakob Homan 
Authored: Thu Jul 23 17:46:13 2015 -0700
Committer: Steve Loughran 
Committed: Thu Jul 21 21:50:25 2016 +0100

--
 .../main/java/org/apache/hadoop/fs/FileSystem.java | 17 -
 .../src/site/markdown/filesystem/filesystem.md |  4 
 .../hadoop/fs/FileSystemContractBaseTest.java  | 11 ---
 3 files changed, 28 insertions(+), 4 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/9442e1d4/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java
--
diff --git 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java
 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java
index e876c3a..14da0d3 100644
--- 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java
+++ 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java
@@ -1515,7 +1515,9 @@ public abstract class FileSystem extends Configured 
implements Closeable {
   /**
* List the statuses of the files/directories in the given path if the path 
is
* a directory.
-   * 
+   * 
+   * Does not guarantee to return the List of files/directories status in a
+   * sorted order.
* @param f given path
* @return the statuses of the files/directories in the given patch
* @throws FileNotFoundException when the path does not exist;
@@ -1557,6 +1559,9 @@ public abstract class FileSystem extends Configured 
implements Closeable {
   /**
* Filter files/directories in the given path using the user-supplied path
* filter.
+   * 
+   * Does not guarantee to return the List of files/directories status in a
+   * sorted order.
* 
* @param f
*  a path name
@@ -1577,6 +1582,9 @@ public abstract class FileSystem extends Configured 
implements Closeable {
   /**
* Filter files/directories in the given list of paths using default
* path filter.
+   * 
+   * Does not guarantee to return the List of files/directories status in a
+   * sorted order.
* 
* @param files
*  a list of paths
@@ -1593,6 +1601,9 @@ public abstract class FileSystem extends Configured 
implements Closeable {
   /**
* Filter files/directories in the given list of paths using user-supplied
* path filter.
+   * 
+   * Does not guarantee to return the List of files/directories status in a
+   * sorted order.
* 
* @param files
*  a list of paths
@@ -1756,6 +1767,8 @@ public abstract class FileSystem extends Configured 
implements Closeable {
* while consuming the entries. Each file system implementation should
* override this method and provide a more efficient implementation, if
* possible. 
+   * Does not guarantee to return the iterator that traverses statuses
+   * of the files in a sorted order.
*
* @param p target path
* @return remote iterator
@@ -1783,6 +1796,8 @@ public abstract class FileSystem extends Configured 
implements Closeable {
 
   /**
* List the statuses and block locations of the files in the given path.
+   * Does not guarantee to return the iterator that traverses statuses
+   * of the files in a sorted order.
* 
* If the path is a directory, 
*   if recursive is false, returns files in the directory;

http://git-wip-us.apache.org/repos/asf/hadoop/blob/9442e1d4/hadoop-common-project/hadoop-common/src/site/markdown/filesystem/filesystem.md
--
diff --git 
a/hadoop-common-project/hadoop-common/src/site/markdown/filesystem/filesystem.md
 
b/hadoop-common-project/hadoop-common/src/site/markdown/filesystem/filesystem.md
index d2e1712..778cff4 100644
--- 
a/hadoop-common-project/hadoop-common/src/site/markdown/filesystem/filesystem.md
+++ 
b/hadoop-common-project/hadoop-common/src/site/markdown/filesystem/filesystem.md
@@ -152,6 +152,10 @@ to the same path:
 forall fs in listStatus(Path) :
   fs == getFileStatus(fs.path)
 
+**Ordering of results**: there is no guarantee of ordering of the listed 
entries.
+While HDFS currently returns an alphanumerically 

hadoop git commit: MAPREDUCE-6738. TestJobListCache.testAddExisting failed intermittently in slow VM testbed. Contributed by Junping Du.

2016-07-21 Thread aajisaka
Repository: hadoop
Updated Branches:
  refs/heads/branch-2.8 6a7fe835f -> 3dcbd2df5


MAPREDUCE-6738. TestJobListCache.testAddExisting failed intermittently in slow 
VM testbed. Contributed by Junping Du.

(cherry picked from commit ecff7d03595b45d7a5d6aed73b9931fef85a56ee)
(cherry picked from commit f4d94f2246da3a015a105cb7f55ec1ebc451e0e7)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/3dcbd2df
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/3dcbd2df
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/3dcbd2df

Branch: refs/heads/branch-2.8
Commit: 3dcbd2df5627c0122596ac2afc937cdbb6c8b3df
Parents: 6a7fe83
Author: Akira Ajisaka 
Authored: Thu Jul 21 11:34:48 2016 -0700
Committer: Akira Ajisaka 
Committed: Thu Jul 21 11:38:26 2016 -0700

--
 .../java/org/apache/hadoop/mapreduce/v2/hs/TestJobListCache.java   | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/3dcbd2df/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/TestJobListCache.java
--
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/TestJobListCache.java
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/TestJobListCache.java
index 3ccc222..8479936 100644
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/TestJobListCache.java
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/TestJobListCache.java
@@ -31,7 +31,7 @@ import static org.junit.Assert.*;
 
 public class TestJobListCache {
 
-  @Test (timeout = 1000)
+  @Test (timeout = 5000)
   public void testAddExisting() {
 JobListCache cache = new JobListCache(2, 1000);
 


-
To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org
For additional commands, e-mail: common-commits-h...@hadoop.apache.org



hadoop git commit: MAPREDUCE-6738. TestJobListCache.testAddExisting failed intermittently in slow VM testbed. Contributed by Junping Du.

2016-07-21 Thread aajisaka
Repository: hadoop
Updated Branches:
  refs/heads/branch-2 1222889f1 -> f4d94f224


MAPREDUCE-6738. TestJobListCache.testAddExisting failed intermittently in slow 
VM testbed. Contributed by Junping Du.

(cherry picked from commit ecff7d03595b45d7a5d6aed73b9931fef85a56ee)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/f4d94f22
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/f4d94f22
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/f4d94f22

Branch: refs/heads/branch-2
Commit: f4d94f2246da3a015a105cb7f55ec1ebc451e0e7
Parents: 1222889
Author: Akira Ajisaka 
Authored: Thu Jul 21 11:34:48 2016 -0700
Committer: Akira Ajisaka 
Committed: Thu Jul 21 11:38:02 2016 -0700

--
 .../java/org/apache/hadoop/mapreduce/v2/hs/TestJobListCache.java   | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/f4d94f22/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/TestJobListCache.java
--
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/TestJobListCache.java
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/TestJobListCache.java
index 3ccc222..8479936 100644
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/TestJobListCache.java
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/TestJobListCache.java
@@ -31,7 +31,7 @@ import static org.junit.Assert.*;
 
 public class TestJobListCache {
 
-  @Test (timeout = 1000)
+  @Test (timeout = 5000)
   public void testAddExisting() {
 JobListCache cache = new JobListCache(2, 1000);
 


-
To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org
For additional commands, e-mail: common-commits-h...@hadoop.apache.org



hadoop git commit: MAPREDUCE-6738. TestJobListCache.testAddExisting failed intermittently in slow VM testbed. Contributed by Junping Du.

2016-07-21 Thread aajisaka
Repository: hadoop
Updated Branches:
  refs/heads/trunk fcde6940e -> ecff7d035


MAPREDUCE-6738. TestJobListCache.testAddExisting failed intermittently in slow 
VM testbed. Contributed by Junping Du.


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/ecff7d03
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/ecff7d03
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/ecff7d03

Branch: refs/heads/trunk
Commit: ecff7d03595b45d7a5d6aed73b9931fef85a56ee
Parents: fcde694
Author: Akira Ajisaka 
Authored: Thu Jul 21 11:34:48 2016 -0700
Committer: Akira Ajisaka 
Committed: Thu Jul 21 11:34:48 2016 -0700

--
 .../java/org/apache/hadoop/mapreduce/v2/hs/TestJobListCache.java   | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/ecff7d03/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/TestJobListCache.java
--
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/TestJobListCache.java
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/TestJobListCache.java
index 3ccc222..8479936 100644
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/TestJobListCache.java
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/TestJobListCache.java
@@ -31,7 +31,7 @@ import static org.junit.Assert.*;
 
 public class TestJobListCache {
 
-  @Test (timeout = 1000)
+  @Test (timeout = 5000)
   public void testAddExisting() {
 JobListCache cache = new JobListCache(2, 1000);
 


-
To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org
For additional commands, e-mail: common-commits-h...@hadoop.apache.org



hadoop git commit: HDFS-10653. Optimize conversion from path string to components. Contributed by Daryn Sharp.

2016-07-21 Thread zhz
Repository: hadoop
Updated Branches:
  refs/heads/branch-2.7 993801026 -> 2a5b8e5b3


HDFS-10653. Optimize conversion from path string to components. Contributed by 
Daryn Sharp.

(cherry picked from commit bd3dcf46e263b6e6aa3fca6a5d9936cc49e3280f)
(cherry picked from commit 6a7fe835f19141cc633824cd5d21f1e30f014bce)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/2a5b8e5b
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/2a5b8e5b
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/2a5b8e5b

Branch: refs/heads/branch-2.7
Commit: 2a5b8e5b3a7ac716253f68ccd25f95ae189bc3ea
Parents: 9938010
Author: Jing Zhao 
Authored: Thu Jul 21 11:14:39 2016 -0700
Committer: Zhe Zhang 
Committed: Thu Jul 21 11:30:06 2016 -0700

--
 .../java/org/apache/hadoop/hdfs/DFSUtil.java|  9 ++
 .../hadoop/hdfs/server/namenode/INode.java  | 20 +
 .../server/namenode/TestSnapshotPathINodes.java | 30 +++-
 3 files changed, 26 insertions(+), 33 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/2a5b8e5b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSUtil.java
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSUtil.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSUtil.java
index faa658a..338c215 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSUtil.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSUtil.java
@@ -426,6 +426,15 @@ public class DFSUtil {
   }
 
   /**
+   * Convert a UTF8 string to an array of byte arrays.
+   */
+  public static byte[][] getPathComponents(String path) {
+// avoid intermediate split to String[]
+final byte[] bytes = string2Bytes(path);
+return bytes2byteArray(bytes, bytes.length, (byte)Path.SEPARATOR_CHAR);
+  }
+
+  /**
* Splits the array of bytes into array of arrays of bytes
* on byte separator
* @param bytes the array of bytes to split

http://git-wip-us.apache.org/repos/asf/hadoop/blob/2a5b8e5b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/INode.java
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/INode.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/INode.java
index e629441..3a4516e 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/INode.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/INode.java
@@ -737,18 +737,8 @@ public abstract class INode implements INodeAttributes, 
Diff.Element {
*/
   @VisibleForTesting
   public static byte[][] getPathComponents(String path) {
-return getPathComponents(getPathNames(path));
-  }
-
-  /** Convert strings to byte arrays for path components. */
-  static byte[][] getPathComponents(String[] strings) {
-if (strings.length == 0) {
-  return new byte[][]{null};
-}
-byte[][] bytes = new byte[strings.length][];
-for (int i = 0; i < strings.length; i++)
-  bytes[i] = DFSUtil.string2Bytes(strings[i]);
-return bytes;
+checkAbsolutePath(path);
+return DFSUtil.getPathComponents(path);
   }
 
   /**
@@ -757,10 +747,14 @@ public abstract class INode implements INodeAttributes, 
Diff.Element {
* @return array of path components.
*/
   public static String[] getPathNames(String path) {
+checkAbsolutePath(path);
+return StringUtils.split(path, Path.SEPARATOR_CHAR);
+  }
+
+  private static void checkAbsolutePath(final String path) {
 if (path == null || !path.startsWith(Path.SEPARATOR)) {
   throw new AssertionError("Absolute path required");
 }
-return StringUtils.split(path, Path.SEPARATOR_CHAR);
   }
 
   @Override

http://git-wip-us.apache.org/repos/asf/hadoop/blob/2a5b8e5b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestSnapshotPathINodes.java
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestSnapshotPathINodes.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestSnapshotPathINodes.java
index e416e00..45c65ef 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestSnapshotPathINodes.java
+++ 

hadoop git commit: HDFS-10653. Optimize conversion from path string to components. Contributed by Daryn Sharp.

2016-07-21 Thread zhz
Repository: hadoop
Updated Branches:
  refs/heads/branch-2.8 2f14d585c -> 6a7fe835f


HDFS-10653. Optimize conversion from path string to components. Contributed by 
Daryn Sharp.

(cherry picked from commit bd3dcf46e263b6e6aa3fca6a5d9936cc49e3280f)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/6a7fe835
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/6a7fe835
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/6a7fe835

Branch: refs/heads/branch-2.8
Commit: 6a7fe835f19141cc633824cd5d21f1e30f014bce
Parents: 2f14d58
Author: Jing Zhao 
Authored: Thu Jul 21 11:14:39 2016 -0700
Committer: Zhe Zhang 
Committed: Thu Jul 21 11:29:27 2016 -0700

--
 .../java/org/apache/hadoop/hdfs/DFSUtil.java|  9 ++
 .../hadoop/hdfs/server/namenode/INode.java  | 20 +
 .../server/namenode/TestSnapshotPathINodes.java | 30 +++-
 3 files changed, 26 insertions(+), 33 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/6a7fe835/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSUtil.java
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSUtil.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSUtil.java
index 0848521..da7d26a 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSUtil.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSUtil.java
@@ -321,6 +321,15 @@ public class DFSUtil {
   }
 
   /**
+   * Convert a UTF8 string to an array of byte arrays.
+   */
+  public static byte[][] getPathComponents(String path) {
+// avoid intermediate split to String[]
+final byte[] bytes = string2Bytes(path);
+return bytes2byteArray(bytes, bytes.length, (byte)Path.SEPARATOR_CHAR);
+  }
+
+  /**
* Splits the array of bytes into array of arrays of bytes
* on byte separator
* @param bytes the array of bytes to split

http://git-wip-us.apache.org/repos/asf/hadoop/blob/6a7fe835/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/INode.java
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/INode.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/INode.java
index 9d04fbb..b548763 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/INode.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/INode.java
@@ -727,18 +727,8 @@ public abstract class INode implements INodeAttributes, 
Diff.Element {
*/
   @VisibleForTesting
   public static byte[][] getPathComponents(String path) {
-return getPathComponents(getPathNames(path));
-  }
-
-  /** Convert strings to byte arrays for path components. */
-  static byte[][] getPathComponents(String[] strings) {
-if (strings.length == 0) {
-  return new byte[][]{null};
-}
-byte[][] bytes = new byte[strings.length][];
-for (int i = 0; i < strings.length; i++)
-  bytes[i] = DFSUtil.string2Bytes(strings[i]);
-return bytes;
+checkAbsolutePath(path);
+return DFSUtil.getPathComponents(path);
   }
 
   /**
@@ -747,11 +737,15 @@ public abstract class INode implements INodeAttributes, 
Diff.Element {
* @return array of path components.
*/
   public static String[] getPathNames(String path) {
+checkAbsolutePath(path);
+return StringUtils.split(path, Path.SEPARATOR_CHAR);
+  }
+
+  private static void checkAbsolutePath(final String path) {
 if (path == null || !path.startsWith(Path.SEPARATOR)) {
   throw new AssertionError("Absolute path required, but got '"
   + path + "'");
 }
-return StringUtils.split(path, Path.SEPARATOR_CHAR);
   }
 
   @Override

http://git-wip-us.apache.org/repos/asf/hadoop/blob/6a7fe835/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestSnapshotPathINodes.java
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestSnapshotPathINodes.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestSnapshotPathINodes.java
index e416e00..45c65ef 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestSnapshotPathINodes.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestSnapshotPathINodes.java
@@ 

hadoop git commit: HDFS-10287. MiniDFSCluster should implement AutoCloseable. Contributed by Andras Bokor.

2016-07-21 Thread aajisaka
Repository: hadoop
Updated Branches:
  refs/heads/branch-2.8 7234fce25 -> 2f14d585c


HDFS-10287. MiniDFSCluster should implement AutoCloseable. Contributed by 
Andras Bokor.

(cherry picked from commit fcde6940e0cbdedb1105007e4857137ecdfa1284)
(cherry picked from commit 1222889f1a3b45df30c48a80d8c2bec7ad45ee43)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/2f14d585
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/2f14d585
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/2f14d585

Branch: refs/heads/branch-2.8
Commit: 2f14d585c43d7ae206601621846199438b305dbf
Parents: 7234fce
Author: Akira Ajisaka 
Authored: Thu Jul 21 11:25:28 2016 -0700
Committer: Akira Ajisaka 
Committed: Thu Jul 21 11:28:46 2016 -0700

--
 .../org/apache/hadoop/hdfs/MiniDFSCluster.java  |  7 ++-
 .../apache/hadoop/hdfs/TestMiniDFSCluster.java  | 53 +++-
 2 files changed, 24 insertions(+), 36 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/2f14d585/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/MiniDFSCluster.java
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/MiniDFSCluster.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/MiniDFSCluster.java
index 651b703..3b9df70 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/MiniDFSCluster.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/MiniDFSCluster.java
@@ -137,7 +137,7 @@ import com.google.common.collect.Sets;
  */
 @InterfaceAudience.LimitedPrivate({"HBase", "HDFS", "Hive", "MapReduce", 
"Pig"})
 @InterfaceStability.Unstable
-public class MiniDFSCluster {
+public class MiniDFSCluster implements AutoCloseable {
 
   private static final String NAMESERVICE_ID_PREFIX = "nameserviceId";
   private static final Log LOG = LogFactory.getLog(MiniDFSCluster.class);
@@ -2959,4 +2959,9 @@ public class MiniDFSCluster {
   writer.close();
 }
   }
+
+  @Override
+  public void close() {
+shutdown();
+  }
 }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/2f14d585/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestMiniDFSCluster.java
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestMiniDFSCluster.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestMiniDFSCluster.java
index ec72d87..4d027dc 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestMiniDFSCluster.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestMiniDFSCluster.java
@@ -64,22 +64,17 @@ public class TestMiniDFSCluster {
   public void testClusterWithoutSystemProperties() throws Throwable {
 String oldPrp = System.getProperty(MiniDFSCluster.PROP_TEST_BUILD_DATA);
 System.clearProperty(MiniDFSCluster.PROP_TEST_BUILD_DATA);
-MiniDFSCluster cluster = null;
-try {
-  Configuration conf = new HdfsConfiguration();
-  File testDataCluster1 = new File(testDataPath, CLUSTER_1);
-  String c1Path = testDataCluster1.getAbsolutePath();
-  conf.set(MiniDFSCluster.HDFS_MINIDFS_BASEDIR, c1Path);
-  cluster = new MiniDFSCluster.Builder(conf).build();
+Configuration conf = new HdfsConfiguration();
+File testDataCluster1 = new File(testDataPath, CLUSTER_1);
+String c1Path = testDataCluster1.getAbsolutePath();
+conf.set(MiniDFSCluster.HDFS_MINIDFS_BASEDIR, c1Path);
+try (MiniDFSCluster cluster = new MiniDFSCluster.Builder(conf).build()){
   assertEquals(new File(c1Path + "/data"),
   new File(cluster.getDataDirectory()));
 } finally {
   if (oldPrp != null) {
 System.setProperty(MiniDFSCluster.PROP_TEST_BUILD_DATA, oldPrp);
   }
-  if (cluster != null) {
-cluster.shutdown();
-  }
 }
   }
 
@@ -110,15 +105,12 @@ public class TestMiniDFSCluster {
 File testDataCluster5 = new File(testDataPath, CLUSTER_5);
 String c5Path = testDataCluster5.getAbsolutePath();
 conf.set(MiniDFSCluster.HDFS_MINIDFS_BASEDIR, c5Path);
-MiniDFSCluster cluster5 = new MiniDFSCluster.Builder(conf)
-  .numDataNodes(1)
-  .checkDataNodeHostConfig(true)
-  .build();
-try {
+try (MiniDFSCluster cluster5 = new MiniDFSCluster.Builder(conf)
+.numDataNodes(1)
+.checkDataNodeHostConfig(true)
+.build()) {
   assertEquals("DataNode hostname config not respected", "MYHOST",
   cluster5.getDataNodes().get(0).getDatanodeId().getHostName());
-} finally {
-  

hadoop git commit: HDFS-10287. MiniDFSCluster should implement AutoCloseable. Contributed by Andras Bokor.

2016-07-21 Thread aajisaka
Repository: hadoop
Updated Branches:
  refs/heads/branch-2 419d6ce19 -> 1222889f1


HDFS-10287. MiniDFSCluster should implement AutoCloseable. Contributed by 
Andras Bokor.

(cherry picked from commit fcde6940e0cbdedb1105007e4857137ecdfa1284)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/1222889f
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/1222889f
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/1222889f

Branch: refs/heads/branch-2
Commit: 1222889f1a3b45df30c48a80d8c2bec7ad45ee43
Parents: 419d6ce
Author: Akira Ajisaka 
Authored: Thu Jul 21 11:25:28 2016 -0700
Committer: Akira Ajisaka 
Committed: Thu Jul 21 11:28:19 2016 -0700

--
 .../org/apache/hadoop/hdfs/MiniDFSCluster.java  |  7 ++-
 .../apache/hadoop/hdfs/TestMiniDFSCluster.java  | 53 +++-
 2 files changed, 24 insertions(+), 36 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/1222889f/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/MiniDFSCluster.java
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/MiniDFSCluster.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/MiniDFSCluster.java
index 8215b70..248c4f2 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/MiniDFSCluster.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/MiniDFSCluster.java
@@ -136,7 +136,7 @@ import com.google.common.collect.Sets;
  */
 @InterfaceAudience.LimitedPrivate({"HBase", "HDFS", "Hive", "MapReduce", 
"Pig"})
 @InterfaceStability.Unstable
-public class MiniDFSCluster {
+public class MiniDFSCluster implements AutoCloseable {
 
   private static final String NAMESERVICE_ID_PREFIX = "nameserviceId";
   private static final Log LOG = LogFactory.getLog(MiniDFSCluster.class);
@@ -2971,4 +2971,9 @@ public class MiniDFSCluster {
   writer.close();
 }
   }
+
+  @Override
+  public void close() {
+shutdown();
+  }
 }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/1222889f/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestMiniDFSCluster.java
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestMiniDFSCluster.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestMiniDFSCluster.java
index ec72d87..4d027dc 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestMiniDFSCluster.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestMiniDFSCluster.java
@@ -64,22 +64,17 @@ public class TestMiniDFSCluster {
   public void testClusterWithoutSystemProperties() throws Throwable {
 String oldPrp = System.getProperty(MiniDFSCluster.PROP_TEST_BUILD_DATA);
 System.clearProperty(MiniDFSCluster.PROP_TEST_BUILD_DATA);
-MiniDFSCluster cluster = null;
-try {
-  Configuration conf = new HdfsConfiguration();
-  File testDataCluster1 = new File(testDataPath, CLUSTER_1);
-  String c1Path = testDataCluster1.getAbsolutePath();
-  conf.set(MiniDFSCluster.HDFS_MINIDFS_BASEDIR, c1Path);
-  cluster = new MiniDFSCluster.Builder(conf).build();
+Configuration conf = new HdfsConfiguration();
+File testDataCluster1 = new File(testDataPath, CLUSTER_1);
+String c1Path = testDataCluster1.getAbsolutePath();
+conf.set(MiniDFSCluster.HDFS_MINIDFS_BASEDIR, c1Path);
+try (MiniDFSCluster cluster = new MiniDFSCluster.Builder(conf).build()){
   assertEquals(new File(c1Path + "/data"),
   new File(cluster.getDataDirectory()));
 } finally {
   if (oldPrp != null) {
 System.setProperty(MiniDFSCluster.PROP_TEST_BUILD_DATA, oldPrp);
   }
-  if (cluster != null) {
-cluster.shutdown();
-  }
 }
   }
 
@@ -110,15 +105,12 @@ public class TestMiniDFSCluster {
 File testDataCluster5 = new File(testDataPath, CLUSTER_5);
 String c5Path = testDataCluster5.getAbsolutePath();
 conf.set(MiniDFSCluster.HDFS_MINIDFS_BASEDIR, c5Path);
-MiniDFSCluster cluster5 = new MiniDFSCluster.Builder(conf)
-  .numDataNodes(1)
-  .checkDataNodeHostConfig(true)
-  .build();
-try {
+try (MiniDFSCluster cluster5 = new MiniDFSCluster.Builder(conf)
+.numDataNodes(1)
+.checkDataNodeHostConfig(true)
+.build()) {
   assertEquals("DataNode hostname config not respected", "MYHOST",
   cluster5.getDataNodes().get(0).getDatanodeId().getHostName());
-} finally {
-  MiniDFSCluster.shutdownCluster(cluster5);
 }
   }
 
@@ -128,9 +120,8 @@ 

hadoop git commit: HDFS-10287. MiniDFSCluster should implement AutoCloseable. Contributed by Andras Bokor.

2016-07-21 Thread aajisaka
Repository: hadoop
Updated Branches:
  refs/heads/trunk bd3dcf46e -> fcde6940e


HDFS-10287. MiniDFSCluster should implement AutoCloseable. Contributed by 
Andras Bokor.


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/fcde6940
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/fcde6940
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/fcde6940

Branch: refs/heads/trunk
Commit: fcde6940e0cbdedb1105007e4857137ecdfa1284
Parents: bd3dcf4
Author: Akira Ajisaka 
Authored: Thu Jul 21 11:25:28 2016 -0700
Committer: Akira Ajisaka 
Committed: Thu Jul 21 11:26:08 2016 -0700

--
 .../org/apache/hadoop/hdfs/MiniDFSCluster.java  |  7 ++-
 .../apache/hadoop/hdfs/TestMiniDFSCluster.java  | 53 +++-
 2 files changed, 24 insertions(+), 36 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/fcde6940/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/MiniDFSCluster.java
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/MiniDFSCluster.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/MiniDFSCluster.java
index 3b3a88b..3bb3a10 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/MiniDFSCluster.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/MiniDFSCluster.java
@@ -136,7 +136,7 @@ import com.google.common.collect.Sets;
  */
 @InterfaceAudience.LimitedPrivate({"HBase", "HDFS", "Hive", "MapReduce", 
"Pig"})
 @InterfaceStability.Unstable
-public class MiniDFSCluster {
+public class MiniDFSCluster implements AutoCloseable {
 
   private static final String NAMESERVICE_ID_PREFIX = "nameserviceId";
   private static final Log LOG = LogFactory.getLog(MiniDFSCluster.class);
@@ -3057,4 +3057,9 @@ public class MiniDFSCluster {
   writer.close();
 }
   }
+
+  @Override
+  public void close() {
+shutdown();
+  }
 }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/fcde6940/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestMiniDFSCluster.java
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestMiniDFSCluster.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestMiniDFSCluster.java
index ec72d87..4d027dc 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestMiniDFSCluster.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestMiniDFSCluster.java
@@ -64,22 +64,17 @@ public class TestMiniDFSCluster {
   public void testClusterWithoutSystemProperties() throws Throwable {
 String oldPrp = System.getProperty(MiniDFSCluster.PROP_TEST_BUILD_DATA);
 System.clearProperty(MiniDFSCluster.PROP_TEST_BUILD_DATA);
-MiniDFSCluster cluster = null;
-try {
-  Configuration conf = new HdfsConfiguration();
-  File testDataCluster1 = new File(testDataPath, CLUSTER_1);
-  String c1Path = testDataCluster1.getAbsolutePath();
-  conf.set(MiniDFSCluster.HDFS_MINIDFS_BASEDIR, c1Path);
-  cluster = new MiniDFSCluster.Builder(conf).build();
+Configuration conf = new HdfsConfiguration();
+File testDataCluster1 = new File(testDataPath, CLUSTER_1);
+String c1Path = testDataCluster1.getAbsolutePath();
+conf.set(MiniDFSCluster.HDFS_MINIDFS_BASEDIR, c1Path);
+try (MiniDFSCluster cluster = new MiniDFSCluster.Builder(conf).build()){
   assertEquals(new File(c1Path + "/data"),
   new File(cluster.getDataDirectory()));
 } finally {
   if (oldPrp != null) {
 System.setProperty(MiniDFSCluster.PROP_TEST_BUILD_DATA, oldPrp);
   }
-  if (cluster != null) {
-cluster.shutdown();
-  }
 }
   }
 
@@ -110,15 +105,12 @@ public class TestMiniDFSCluster {
 File testDataCluster5 = new File(testDataPath, CLUSTER_5);
 String c5Path = testDataCluster5.getAbsolutePath();
 conf.set(MiniDFSCluster.HDFS_MINIDFS_BASEDIR, c5Path);
-MiniDFSCluster cluster5 = new MiniDFSCluster.Builder(conf)
-  .numDataNodes(1)
-  .checkDataNodeHostConfig(true)
-  .build();
-try {
+try (MiniDFSCluster cluster5 = new MiniDFSCluster.Builder(conf)
+.numDataNodes(1)
+.checkDataNodeHostConfig(true)
+.build()) {
   assertEquals("DataNode hostname config not respected", "MYHOST",
   cluster5.getDataNodes().get(0).getDatanodeId().getHostName());
-} finally {
-  MiniDFSCluster.shutdownCluster(cluster5);
 }
   }
 
@@ -128,9 +120,8 @@ public class TestMiniDFSCluster {
 StorageType[][] storageType = new 

hadoop git commit: HDFS-10653. Optimize conversion from path string to components. Contributed by Daryn Sharp.

2016-07-21 Thread jing9
Repository: hadoop
Updated Branches:
  refs/heads/branch-2 eff65463d -> 419d6ce19


HDFS-10653. Optimize conversion from path string to components. Contributed by 
Daryn Sharp.

(cherry picked from commit bd3dcf46e263b6e6aa3fca6a5d9936cc49e3280f)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/419d6ce1
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/419d6ce1
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/419d6ce1

Branch: refs/heads/branch-2
Commit: 419d6ce19cea0654b32d220af93fe230f94a
Parents: eff6546
Author: Jing Zhao 
Authored: Thu Jul 21 11:14:39 2016 -0700
Committer: Jing Zhao 
Committed: Thu Jul 21 11:15:30 2016 -0700

--
 .../java/org/apache/hadoop/hdfs/DFSUtil.java|  9 ++
 .../hadoop/hdfs/server/namenode/INode.java  | 20 +
 .../server/namenode/TestSnapshotPathINodes.java | 30 +++-
 3 files changed, 26 insertions(+), 33 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/419d6ce1/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSUtil.java
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSUtil.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSUtil.java
index 0848521..da7d26a 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSUtil.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSUtil.java
@@ -321,6 +321,15 @@ public class DFSUtil {
   }
 
   /**
+   * Convert a UTF8 string to an array of byte arrays.
+   */
+  public static byte[][] getPathComponents(String path) {
+// avoid intermediate split to String[]
+final byte[] bytes = string2Bytes(path);
+return bytes2byteArray(bytes, bytes.length, (byte)Path.SEPARATOR_CHAR);
+  }
+
+  /**
* Splits the array of bytes into array of arrays of bytes
* on byte separator
* @param bytes the array of bytes to split

http://git-wip-us.apache.org/repos/asf/hadoop/blob/419d6ce1/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/INode.java
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/INode.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/INode.java
index c8f36e1..eb910d4 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/INode.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/INode.java
@@ -727,18 +727,8 @@ public abstract class INode implements INodeAttributes, 
Diff.Element {
*/
   @VisibleForTesting
   public static byte[][] getPathComponents(String path) {
-return getPathComponents(getPathNames(path));
-  }
-
-  /** Convert strings to byte arrays for path components. */
-  static byte[][] getPathComponents(String[] strings) {
-if (strings.length == 0) {
-  return new byte[][]{null};
-}
-byte[][] bytes = new byte[strings.length][];
-for (int i = 0; i < strings.length; i++)
-  bytes[i] = DFSUtil.string2Bytes(strings[i]);
-return bytes;
+checkAbsolutePath(path);
+return DFSUtil.getPathComponents(path);
   }
 
   /**
@@ -747,11 +737,15 @@ public abstract class INode implements INodeAttributes, 
Diff.Element {
* @return array of path components.
*/
   public static String[] getPathNames(String path) {
+checkAbsolutePath(path);
+return StringUtils.split(path, Path.SEPARATOR_CHAR);
+  }
+
+  private static void checkAbsolutePath(final String path) {
 if (path == null || !path.startsWith(Path.SEPARATOR)) {
   throw new AssertionError("Absolute path required, but got '"
   + path + "'");
 }
-return StringUtils.split(path, Path.SEPARATOR_CHAR);
   }
 
   @Override

http://git-wip-us.apache.org/repos/asf/hadoop/blob/419d6ce1/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestSnapshotPathINodes.java
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestSnapshotPathINodes.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestSnapshotPathINodes.java
index e416e00..45c65ef 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestSnapshotPathINodes.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestSnapshotPathINodes.java
@@ 

hadoop git commit: YARN-3707. RM Web UI queue filter doesn't work. Contributed by Wangda Tan (cherry picked from commit 446d51591e6e99cc60a85c4b9fbac379a8caa49d) (cherry picked from commit 03af057a477

2016-07-21 Thread jlowe
Repository: hadoop
Updated Branches:
  refs/heads/branch-2.7 5220d4dc2 -> 993801026


YARN-3707. RM Web UI queue filter doesn't work. Contributed by Wangda Tan
(cherry picked from commit 446d51591e6e99cc60a85c4b9fbac379a8caa49d)
(cherry picked from commit 03af057a477b2a31b66831d66f8a38cff42f2201)

Conflicts:

hadoop-yarn-project/CHANGES.txt


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/99380102
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/99380102
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/99380102

Branch: refs/heads/branch-2.7
Commit: 99380102642da6ced784ed705a5c21d3e7be5c4c
Parents: 5220d4d
Author: Jason Lowe 
Authored: Thu Jul 21 18:17:29 2016 +
Committer: Jason Lowe 
Committed: Thu Jul 21 18:17:29 2016 +

--
 hadoop-yarn-project/CHANGES.txt| 2 ++
 .../server/resourcemanager/webapp/CapacitySchedulerPage.java   | 6 +++---
 2 files changed, 5 insertions(+), 3 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/99380102/hadoop-yarn-project/CHANGES.txt
--
diff --git a/hadoop-yarn-project/CHANGES.txt b/hadoop-yarn-project/CHANGES.txt
index 4a001fd..a5f0445 100644
--- a/hadoop-yarn-project/CHANGES.txt
+++ b/hadoop-yarn-project/CHANGES.txt
@@ -21,6 +21,8 @@ Release 2.7.4 - UNRELEASED
 YARN-5353. ResourceManager can leak delegation tokens when they are shared 
across
 apps. (Jason Lowe via Varun Saxena)
 
+YARN-3707. RM Web UI queue filter doesn't work. (Wangda Tan via jianhe)
+
 Release 2.7.3 - UNRELEASED
 
   INCOMPATIBLE CHANGES

http://git-wip-us.apache.org/repos/asf/hadoop/blob/99380102/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/CapacitySchedulerPage.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/CapacitySchedulerPage.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/CapacitySchedulerPage.java
index a7a1f18..cf4dc1f 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/CapacitySchedulerPage.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/CapacitySchedulerPage.java
@@ -303,7 +303,7 @@ class CapacitySchedulerPage extends RmView {
 a(_Q).$style(width(Q_MAX_WIDTH)).
   span().$style(join(width(used), ";left:0%;",
   used > 1 ? Q_OVER : Q_UNDER))._(".")._().
-  span(".q", "root")._().
+  span(".q", "Queue: root")._().
 span().$class("qstats").$style(left(Q_STATS_POS)).
   _(join(percent(used), " used"))._().
 _(QueueBlock.class)._();
@@ -377,8 +377,8 @@ class CapacitySchedulerPage extends RmView {
   "  });",
   "  $('#cs').bind('select_node.jstree', function(e, data) {",
   "var q = $('.q', data.rslt.obj).first().text();",
-  "if (q == 'root') q = '';",
-  "else q = '^' + q.substr(q.lastIndexOf('.') + 1) + '$';",
+  "if (q == 'Queue: root') q = '';",
+  "else q = '^' + q.substr(q.lastIndexOf(':') + 2) + '$';",
   "$('#apps').dataTable().fnFilter(q, 4, true);",
   "  });",
   "  $('#cs').show();",


-
To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org
For additional commands, e-mail: common-commits-h...@hadoop.apache.org



hadoop git commit: HDFS-10653. Optimize conversion from path string to components. Contributed by Daryn Sharp.

2016-07-21 Thread jing9
Repository: hadoop
Updated Branches:
  refs/heads/trunk 557a245d8 -> bd3dcf46e


HDFS-10653. Optimize conversion from path string to components. Contributed by 
Daryn Sharp.


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/bd3dcf46
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/bd3dcf46
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/bd3dcf46

Branch: refs/heads/trunk
Commit: bd3dcf46e263b6e6aa3fca6a5d9936cc49e3280f
Parents: 557a245
Author: Jing Zhao 
Authored: Thu Jul 21 11:14:39 2016 -0700
Committer: Jing Zhao 
Committed: Thu Jul 21 11:14:39 2016 -0700

--
 .../java/org/apache/hadoop/hdfs/DFSUtil.java|  9 ++
 .../hadoop/hdfs/server/namenode/INode.java  | 20 +
 .../server/namenode/TestSnapshotPathINodes.java | 30 +++-
 3 files changed, 26 insertions(+), 33 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/bd3dcf46/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSUtil.java
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSUtil.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSUtil.java
index f1a6de7..0ba80d9 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSUtil.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSUtil.java
@@ -320,6 +320,15 @@ public class DFSUtil {
   }
 
   /**
+   * Convert a UTF8 string to an array of byte arrays.
+   */
+  public static byte[][] getPathComponents(String path) {
+// avoid intermediate split to String[]
+final byte[] bytes = string2Bytes(path);
+return bytes2byteArray(bytes, bytes.length, (byte)Path.SEPARATOR_CHAR);
+  }
+
+  /**
* Splits the array of bytes into array of arrays of bytes
* on byte separator
* @param bytes the array of bytes to split

http://git-wip-us.apache.org/repos/asf/hadoop/blob/bd3dcf46/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/INode.java
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/INode.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/INode.java
index c8f36e1..eb910d4 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/INode.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/INode.java
@@ -727,18 +727,8 @@ public abstract class INode implements INodeAttributes, 
Diff.Element {
*/
   @VisibleForTesting
   public static byte[][] getPathComponents(String path) {
-return getPathComponents(getPathNames(path));
-  }
-
-  /** Convert strings to byte arrays for path components. */
-  static byte[][] getPathComponents(String[] strings) {
-if (strings.length == 0) {
-  return new byte[][]{null};
-}
-byte[][] bytes = new byte[strings.length][];
-for (int i = 0; i < strings.length; i++)
-  bytes[i] = DFSUtil.string2Bytes(strings[i]);
-return bytes;
+checkAbsolutePath(path);
+return DFSUtil.getPathComponents(path);
   }
 
   /**
@@ -747,11 +737,15 @@ public abstract class INode implements INodeAttributes, 
Diff.Element {
* @return array of path components.
*/
   public static String[] getPathNames(String path) {
+checkAbsolutePath(path);
+return StringUtils.split(path, Path.SEPARATOR_CHAR);
+  }
+
+  private static void checkAbsolutePath(final String path) {
 if (path == null || !path.startsWith(Path.SEPARATOR)) {
   throw new AssertionError("Absolute path required, but got '"
   + path + "'");
 }
-return StringUtils.split(path, Path.SEPARATOR_CHAR);
   }
 
   @Override

http://git-wip-us.apache.org/repos/asf/hadoop/blob/bd3dcf46/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestSnapshotPathINodes.java
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestSnapshotPathINodes.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestSnapshotPathINodes.java
index e416e00..45c65ef 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestSnapshotPathINodes.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestSnapshotPathINodes.java
@@ -138,8 +138,7 @@ public class TestSnapshotPathINodes {
   @Test 

hadoop git commit: HADOOP-11149. Increase the timeout of TestZKFailoverController. Contributed by Steve Loughran. (cherry picked from commit 579f9030dadac4bf53f38ae8499a95a3bdd43c97)

2016-07-21 Thread jlowe
Repository: hadoop
Updated Branches:
  refs/heads/branch-2.7 1b8641a3c -> 5220d4dc2


HADOOP-11149. Increase the timeout of TestZKFailoverController. Contributed by 
Steve Loughran.
(cherry picked from commit 579f9030dadac4bf53f38ae8499a95a3bdd43c97)

Conflicts:

hadoop-common-project/hadoop-common/CHANGES.txt


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/5220d4dc
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/5220d4dc
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/5220d4dc

Branch: refs/heads/branch-2.7
Commit: 5220d4dc27a5c6b050a3406bc37f39ab632f56bb
Parents: 1b8641a
Author: Jason Lowe 
Authored: Thu Jul 21 15:01:56 2016 +
Committer: Jason Lowe 
Committed: Thu Jul 21 15:01:56 2016 +

--
 hadoop-common-project/hadoop-common/CHANGES.txt |   3 +
 .../org/apache/hadoop/ha/MiniZKFCCluster.java   |   8 +-
 .../hadoop/ha/TestZKFailoverController.java | 633 +--
 3 files changed, 308 insertions(+), 336 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/5220d4dc/hadoop-common-project/hadoop-common/CHANGES.txt
--
diff --git a/hadoop-common-project/hadoop-common/CHANGES.txt 
b/hadoop-common-project/hadoop-common/CHANGES.txt
index e974931..60cb70e 100644
--- a/hadoop-common-project/hadoop-common/CHANGES.txt
+++ b/hadoop-common-project/hadoop-common/CHANGES.txt
@@ -25,6 +25,9 @@ Release 2.7.4 - UNRELEASED
 HADOOP-12991. Conflicting default ports in DelegateToFileSystem.
 (Kai Sasaki via aajisaka)
 
+HADOOP-11149. Increase the timeout of TestZKFailoverController.
+(Steve Loughran via wheat9)
+
 Release 2.7.3 - UNRELEASED
 
   INCOMPATIBLE CHANGES

http://git-wip-us.apache.org/repos/asf/hadoop/blob/5220d4dc/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ha/MiniZKFCCluster.java
--
diff --git 
a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ha/MiniZKFCCluster.java
 
b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ha/MiniZKFCCluster.java
index 5aee611..04f6576 100644
--- 
a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ha/MiniZKFCCluster.java
+++ 
b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ha/MiniZKFCCluster.java
@@ -102,9 +102,11 @@ public class MiniZKFCCluster {
* @throws Exception if either of the services had encountered a fatal error
*/
   public void stop() throws Exception {
-for (DummyZKFCThread thr : thrs) {
-  if (thr != null) {
-thr.interrupt();
+if (thrs != null) {
+  for (DummyZKFCThread thr : thrs) {
+if (thr != null) {
+  thr.interrupt();
+}
   }
 }
 if (ctx != null) {

http://git-wip-us.apache.org/repos/asf/hadoop/blob/5220d4dc/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ha/TestZKFailoverController.java
--
diff --git 
a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ha/TestZKFailoverController.java
 
b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ha/TestZKFailoverController.java
index d8271c5..115be0a 100644
--- 
a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ha/TestZKFailoverController.java
+++ 
b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ha/TestZKFailoverController.java
@@ -34,14 +34,23 @@ import org.apache.zookeeper.KeeperException;
 import org.apache.zookeeper.ZooKeeper;
 import org.apache.zookeeper.data.Stat;
 import org.apache.zookeeper.server.auth.DigestAuthenticationProvider;
+import org.junit.After;
 import org.junit.Before;
+import org.junit.Rule;
 import org.junit.Test;
+import org.junit.rules.Timeout;
 import org.mockito.Mockito;
 
 public class TestZKFailoverController extends ClientBaseWithFixes {
   private Configuration conf;
   private MiniZKFCCluster cluster;
-  
+
+  /**
+   * Set the timeout for every test
+   */
+  @Rule
+  public Timeout testTimeout = new Timeout(3 * 60 * 1000);
+
   // Set up ZK digest-based credentials for the purposes of the tests,
   // to make sure all of our functionality works with auth and ACLs
   // present.
@@ -74,11 +83,21 @@ public class TestZKFailoverController extends 
ClientBaseWithFixes {
 this.cluster = new MiniZKFCCluster(conf, getServer(serverFactory));
   }
 
+  @After
+  public void teardown() {
+if (cluster != null) {
+  try {
+cluster.stop();
+  } catch (Exception e) {
+LOG.warn("When stopping the cluster", e);
+  }
+}
+  }
   /**
* Test that the various command lines for 

hadoop git commit: YARN-5092. TestRMDelegationTokens fails intermittently. Contributed by Jason Lowe.

2016-07-21 Thread rohithsharmaks
Repository: hadoop
Updated Branches:
  refs/heads/branch-2.8 cdcd131d5 -> 7234fce25


YARN-5092. TestRMDelegationTokens fails intermittently. Contributed by Jason 
Lowe.

(cherry picked from commit 557a245d83c358211f51b3ab1950d707a70679d9)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/7234fce2
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/7234fce2
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/7234fce2

Branch: refs/heads/branch-2.8
Commit: 7234fce25b3d3896c68ad85215486d112a74c39f
Parents: cdcd131
Author: Rohith Sharma K S 
Authored: Thu Jul 21 12:47:27 2016 +0530
Committer: Rohith Sharma K S 
Committed: Thu Jul 21 12:49:52 2016 +0530

--
 .../security/TestRMDelegationTokens.java| 36 
 1 file changed, 22 insertions(+), 14 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/7234fce2/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/security/TestRMDelegationTokens.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/security/TestRMDelegationTokens.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/security/TestRMDelegationTokens.java
index 068d008..80310a5 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/security/TestRMDelegationTokens.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/security/TestRMDelegationTokens.java
@@ -43,7 +43,6 @@ import 
org.apache.hadoop.yarn.server.resourcemanager.TestRMRestart.TestSecurityM
 import 
org.apache.hadoop.yarn.server.resourcemanager.recovery.MemoryRMStateStore;
 import org.apache.hadoop.yarn.server.resourcemanager.recovery.RMStateStore;
 import 
org.apache.hadoop.yarn.server.resourcemanager.recovery.RMStateStore.RMState;
-import 
org.apache.hadoop.yarn.server.resourcemanager.scheduler.fair.FairScheduler;
 import org.apache.hadoop.yarn.util.ConverterUtils;
 import org.apache.log4j.Level;
 import org.apache.log4j.LogManager;
@@ -54,23 +53,24 @@ import org.junit.Test;
 
 public class TestRMDelegationTokens {
 
-  private YarnConfiguration conf;
+  private YarnConfiguration testConf;
 
   @Before
   public void setup() {
 Logger rootLogger = LogManager.getRootLogger();
 rootLogger.setLevel(Level.DEBUG);
 ExitUtil.disableSystemExit();
-conf = new YarnConfiguration();
-UserGroupInformation.setConfiguration(conf);
-conf.set(YarnConfiguration.RM_STORE, MemoryRMStateStore.class.getName());
-conf.set(YarnConfiguration.RM_SCHEDULER, FairScheduler.class.getName());
+testConf = new YarnConfiguration();
+UserGroupInformation.setLoginUser(null);
+UserGroupInformation.setConfiguration(testConf);
+testConf.set(YarnConfiguration.RM_STORE,
+MemoryRMStateStore.class.getName());
   }
 
   // Test the DT mast key in the state-store when the mast key is being rolled.
   @Test(timeout = 15000)
   public void testRMDTMasterKeyStateOnRollingMasterKey() throws Exception {
-Configuration conf = new Configuration();
+Configuration conf = new Configuration(testConf);
 conf.set("hadoop.security.authentication", "kerberos");
 UserGroupInformation.setConfiguration(conf);
 MemoryRMStateStore memStore = new MemoryRMStateStore();
@@ -93,9 +93,6 @@ public class TestRMDelegationTokens {
 rm1.getRMContext().getRMDelegationTokenSecretManager();
 // assert all master keys are saved
 Assert.assertEquals(dtSecretManager.getAllMasterKeys(), 
rmDTMasterKeyState);
-Set expiringKeys = new HashSet();
-expiringKeys.addAll(dtSecretManager.getAllMasterKeys());
-
 
 // request to generate a RMDelegationToken
 GetDelegationTokenRequest request = mock(GetDelegationTokenRequest.class);
@@ -131,13 +128,13 @@ public class TestRMDelegationTokens {
   @Test(timeout = 15000)
   public void testRemoveExpiredMasterKeyInRMStateStore() throws Exception {
 MemoryRMStateStore memStore = new MemoryRMStateStore();
-memStore.init(conf);
+memStore.init(testConf);
 RMState rmState = memStore.getState();
 
 Set rmDTMasterKeyState =
 rmState.getRMDTSecretManagerState().getMasterKeyState();
 
-MockRM rm1 = new MyMockRM(conf, memStore);
+MockRM rm1 = new MyMockRM(testConf, memStore);
   

hadoop git commit: YARN-5092. TestRMDelegationTokens fails intermittently. Contributed by Jason Lowe.

2016-07-21 Thread rohithsharmaks
Repository: hadoop
Updated Branches:
  refs/heads/branch-2 d9ed29e8c -> eff65463d


YARN-5092. TestRMDelegationTokens fails intermittently. Contributed by Jason 
Lowe.

(cherry picked from commit 557a245d83c358211f51b3ab1950d707a70679d9)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/eff65463
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/eff65463
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/eff65463

Branch: refs/heads/branch-2
Commit: eff65463dea6249c3ca389cb736a78752f680339
Parents: d9ed29e
Author: Rohith Sharma K S 
Authored: Thu Jul 21 12:47:27 2016 +0530
Committer: Rohith Sharma K S 
Committed: Thu Jul 21 12:49:06 2016 +0530

--
 .../security/TestRMDelegationTokens.java| 36 
 1 file changed, 22 insertions(+), 14 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/eff65463/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/security/TestRMDelegationTokens.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/security/TestRMDelegationTokens.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/security/TestRMDelegationTokens.java
index 068d008..80310a5 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/security/TestRMDelegationTokens.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/security/TestRMDelegationTokens.java
@@ -43,7 +43,6 @@ import 
org.apache.hadoop.yarn.server.resourcemanager.TestRMRestart.TestSecurityM
 import 
org.apache.hadoop.yarn.server.resourcemanager.recovery.MemoryRMStateStore;
 import org.apache.hadoop.yarn.server.resourcemanager.recovery.RMStateStore;
 import 
org.apache.hadoop.yarn.server.resourcemanager.recovery.RMStateStore.RMState;
-import 
org.apache.hadoop.yarn.server.resourcemanager.scheduler.fair.FairScheduler;
 import org.apache.hadoop.yarn.util.ConverterUtils;
 import org.apache.log4j.Level;
 import org.apache.log4j.LogManager;
@@ -54,23 +53,24 @@ import org.junit.Test;
 
 public class TestRMDelegationTokens {
 
-  private YarnConfiguration conf;
+  private YarnConfiguration testConf;
 
   @Before
   public void setup() {
 Logger rootLogger = LogManager.getRootLogger();
 rootLogger.setLevel(Level.DEBUG);
 ExitUtil.disableSystemExit();
-conf = new YarnConfiguration();
-UserGroupInformation.setConfiguration(conf);
-conf.set(YarnConfiguration.RM_STORE, MemoryRMStateStore.class.getName());
-conf.set(YarnConfiguration.RM_SCHEDULER, FairScheduler.class.getName());
+testConf = new YarnConfiguration();
+UserGroupInformation.setLoginUser(null);
+UserGroupInformation.setConfiguration(testConf);
+testConf.set(YarnConfiguration.RM_STORE,
+MemoryRMStateStore.class.getName());
   }
 
   // Test the DT mast key in the state-store when the mast key is being rolled.
   @Test(timeout = 15000)
   public void testRMDTMasterKeyStateOnRollingMasterKey() throws Exception {
-Configuration conf = new Configuration();
+Configuration conf = new Configuration(testConf);
 conf.set("hadoop.security.authentication", "kerberos");
 UserGroupInformation.setConfiguration(conf);
 MemoryRMStateStore memStore = new MemoryRMStateStore();
@@ -93,9 +93,6 @@ public class TestRMDelegationTokens {
 rm1.getRMContext().getRMDelegationTokenSecretManager();
 // assert all master keys are saved
 Assert.assertEquals(dtSecretManager.getAllMasterKeys(), 
rmDTMasterKeyState);
-Set expiringKeys = new HashSet();
-expiringKeys.addAll(dtSecretManager.getAllMasterKeys());
-
 
 // request to generate a RMDelegationToken
 GetDelegationTokenRequest request = mock(GetDelegationTokenRequest.class);
@@ -131,13 +128,13 @@ public class TestRMDelegationTokens {
   @Test(timeout = 15000)
   public void testRemoveExpiredMasterKeyInRMStateStore() throws Exception {
 MemoryRMStateStore memStore = new MemoryRMStateStore();
-memStore.init(conf);
+memStore.init(testConf);
 RMState rmState = memStore.getState();
 
 Set rmDTMasterKeyState =
 rmState.getRMDTSecretManagerState().getMasterKeyState();
 
-MockRM rm1 = new MyMockRM(conf, memStore);
+MockRM rm1 = new MyMockRM(testConf, memStore);
 

hadoop git commit: YARN-5092. TestRMDelegationTokens fails intermittently. Contributed by Jason Lowe.

2016-07-21 Thread rohithsharmaks
Repository: hadoop
Updated Branches:
  refs/heads/trunk be34b2a8f -> 557a245d8


YARN-5092. TestRMDelegationTokens fails intermittently. Contributed by Jason 
Lowe.


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/557a245d
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/557a245d
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/557a245d

Branch: refs/heads/trunk
Commit: 557a245d83c358211f51b3ab1950d707a70679d9
Parents: be34b2a
Author: Rohith Sharma K S 
Authored: Thu Jul 21 12:47:27 2016 +0530
Committer: Rohith Sharma K S 
Committed: Thu Jul 21 12:47:27 2016 +0530

--
 .../security/TestRMDelegationTokens.java| 36 
 1 file changed, 22 insertions(+), 14 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/557a245d/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/security/TestRMDelegationTokens.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/security/TestRMDelegationTokens.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/security/TestRMDelegationTokens.java
index 068d008..80310a5 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/security/TestRMDelegationTokens.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/security/TestRMDelegationTokens.java
@@ -43,7 +43,6 @@ import 
org.apache.hadoop.yarn.server.resourcemanager.TestRMRestart.TestSecurityM
 import 
org.apache.hadoop.yarn.server.resourcemanager.recovery.MemoryRMStateStore;
 import org.apache.hadoop.yarn.server.resourcemanager.recovery.RMStateStore;
 import 
org.apache.hadoop.yarn.server.resourcemanager.recovery.RMStateStore.RMState;
-import 
org.apache.hadoop.yarn.server.resourcemanager.scheduler.fair.FairScheduler;
 import org.apache.hadoop.yarn.util.ConverterUtils;
 import org.apache.log4j.Level;
 import org.apache.log4j.LogManager;
@@ -54,23 +53,24 @@ import org.junit.Test;
 
 public class TestRMDelegationTokens {
 
-  private YarnConfiguration conf;
+  private YarnConfiguration testConf;
 
   @Before
   public void setup() {
 Logger rootLogger = LogManager.getRootLogger();
 rootLogger.setLevel(Level.DEBUG);
 ExitUtil.disableSystemExit();
-conf = new YarnConfiguration();
-UserGroupInformation.setConfiguration(conf);
-conf.set(YarnConfiguration.RM_STORE, MemoryRMStateStore.class.getName());
-conf.set(YarnConfiguration.RM_SCHEDULER, FairScheduler.class.getName());
+testConf = new YarnConfiguration();
+UserGroupInformation.setLoginUser(null);
+UserGroupInformation.setConfiguration(testConf);
+testConf.set(YarnConfiguration.RM_STORE,
+MemoryRMStateStore.class.getName());
   }
 
   // Test the DT mast key in the state-store when the mast key is being rolled.
   @Test(timeout = 15000)
   public void testRMDTMasterKeyStateOnRollingMasterKey() throws Exception {
-Configuration conf = new Configuration();
+Configuration conf = new Configuration(testConf);
 conf.set("hadoop.security.authentication", "kerberos");
 UserGroupInformation.setConfiguration(conf);
 MemoryRMStateStore memStore = new MemoryRMStateStore();
@@ -93,9 +93,6 @@ public class TestRMDelegationTokens {
 rm1.getRMContext().getRMDelegationTokenSecretManager();
 // assert all master keys are saved
 Assert.assertEquals(dtSecretManager.getAllMasterKeys(), 
rmDTMasterKeyState);
-Set expiringKeys = new HashSet();
-expiringKeys.addAll(dtSecretManager.getAllMasterKeys());
-
 
 // request to generate a RMDelegationToken
 GetDelegationTokenRequest request = mock(GetDelegationTokenRequest.class);
@@ -131,13 +128,13 @@ public class TestRMDelegationTokens {
   @Test(timeout = 15000)
   public void testRemoveExpiredMasterKeyInRMStateStore() throws Exception {
 MemoryRMStateStore memStore = new MemoryRMStateStore();
-memStore.init(conf);
+memStore.init(testConf);
 RMState rmState = memStore.getState();
 
 Set rmDTMasterKeyState =
 rmState.getRMDTSecretManagerState().getMasterKeyState();
 
-MockRM rm1 = new MyMockRM(conf, memStore);
+MockRM rm1 = new MyMockRM(testConf, memStore);
 rm1.start();
 RMDelegationTokenSecretManager dtSecretManager =
 

hadoop git commit: YARN-1126. Add validation of users input nodes-states options to nodes CLI. Contributed by Wei Yan.

2016-07-21 Thread rohithsharmaks
Repository: hadoop
Updated Branches:
  refs/heads/branch-2.8 66f305609 -> cdcd131d5


YARN-1126. Add validation of users input nodes-states options to nodes CLI. 
Contributed by Wei Yan.

(cherry picked from commit be34b2a8fd30a1a0e803ac8b2198c695600a9e46)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/cdcd131d
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/cdcd131d
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/cdcd131d

Branch: refs/heads/branch-2.8
Commit: cdcd131d5fa8c209d200124f92f0a46b07489504
Parents: 66f3056
Author: Rohith Sharma K S 
Authored: Thu Jul 21 12:20:47 2016 +0530
Committer: Rohith Sharma K S 
Committed: Thu Jul 21 12:25:14 2016 +0530

--
 .../apache/hadoop/yarn/client/cli/NodeCLI.java  | 32 ++--
 .../hadoop/yarn/client/cli/TestYarnCLI.java | 31 +++
 2 files changed, 48 insertions(+), 15 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/cdcd131d/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/cli/NodeCLI.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/cli/NodeCLI.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/cli/NodeCLI.java
index f51fee9..288a5d2 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/cli/NodeCLI.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/cli/NodeCLI.java
@@ -76,7 +76,8 @@ public class NodeCLI extends YarnCLI {
 "based on node state, all -all to list all nodes, " +
 "-showDetails to display more details about each node.");
 Option nodeStateOpt = new Option(NODE_STATE_CMD, true,
-"Works with -list to filter nodes based on input comma-separated list 
of node states.");
+"Works with -list to filter nodes based on input comma-separated " +
+"list of node states. " + getAllValidNodeStates());
 nodeStateOpt.setValueSeparator(',');
 nodeStateOpt.setArgs(Option.UNLIMITED_VALUES);
 nodeStateOpt.setArgName("States");
@@ -89,6 +90,14 @@ public class NodeCLI extends YarnCLI {
 opts.addOption(showDetailsOpt);
 opts.getOption(STATUS_CMD).setArgName("NodeId");
 
+if (args != null && args.length > 0) {
+  for (int i = args.length - 1; i >= 0; i--) {
+if (args[i].equalsIgnoreCase("-" + NODE_ALL)) {
+  args[i] = "-" + NODE_ALL;
+}
+  }
+}
+
 int exitCode = -1;
 CommandLine cliParser = null;
 try {
@@ -116,8 +125,15 @@ public class NodeCLI extends YarnCLI {
 if (types != null) {
   for (String type : types) {
 if (!type.trim().isEmpty()) {
-  nodeStates.add(NodeState.valueOf(
-  
org.apache.hadoop.util.StringUtils.toUpperCase(type.trim(;
+  try {
+nodeStates.add(NodeState.valueOf(
+org.apache.hadoop.util.StringUtils.toUpperCase(
+type.trim(;
+  } catch (IllegalArgumentException ex) {
+sysout.println("The node state " + type + " is invalid.");
+sysout.println(getAllValidNodeStates());
+return exitCode;
+  }
 }
   }
 }
@@ -320,4 +336,14 @@ public class NodeCLI extends YarnCLI {
 nodeReportStr.close();
 sysout.println(baos.toString("UTF-8"));
   }
+
+  private String getAllValidNodeStates() {
+StringBuilder sb = new StringBuilder();
+sb.append("The valid node state can be one of the following: ");
+for (NodeState state : NodeState.values()) {
+  sb.append(state).append(",");
+}
+String output = sb.toString();
+return output.substring(0, output.length() - 1) + ".";
+  }
 }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/cdcd131d/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/client/cli/TestYarnCLI.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/client/cli/TestYarnCLI.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/client/cli/TestYarnCLI.java
index 3783aac..380f3db 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/client/cli/TestYarnCLI.java
+++ 

hadoop git commit: YARN-1126. Add validation of users input nodes-states options to nodes CLI. Contributed by Wei Yan.

2016-07-21 Thread rohithsharmaks
Repository: hadoop
Updated Branches:
  refs/heads/branch-2 2bf5c4eb1 -> d9ed29e8c


YARN-1126. Add validation of users input nodes-states options to nodes CLI. 
Contributed by Wei Yan.

(cherry picked from commit be34b2a8fd30a1a0e803ac8b2198c695600a9e46)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/d9ed29e8
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/d9ed29e8
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/d9ed29e8

Branch: refs/heads/branch-2
Commit: d9ed29e8c600de1f6fb74634918ce7d949ea5e11
Parents: 2bf5c4e
Author: Rohith Sharma K S 
Authored: Thu Jul 21 12:20:47 2016 +0530
Committer: Rohith Sharma K S 
Committed: Thu Jul 21 12:22:34 2016 +0530

--
 .../apache/hadoop/yarn/client/cli/NodeCLI.java  | 32 ++--
 .../hadoop/yarn/client/cli/TestYarnCLI.java | 31 +++
 2 files changed, 48 insertions(+), 15 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/d9ed29e8/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/cli/NodeCLI.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/cli/NodeCLI.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/cli/NodeCLI.java
index f51fee9..288a5d2 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/cli/NodeCLI.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/cli/NodeCLI.java
@@ -76,7 +76,8 @@ public class NodeCLI extends YarnCLI {
 "based on node state, all -all to list all nodes, " +
 "-showDetails to display more details about each node.");
 Option nodeStateOpt = new Option(NODE_STATE_CMD, true,
-"Works with -list to filter nodes based on input comma-separated list 
of node states.");
+"Works with -list to filter nodes based on input comma-separated " +
+"list of node states. " + getAllValidNodeStates());
 nodeStateOpt.setValueSeparator(',');
 nodeStateOpt.setArgs(Option.UNLIMITED_VALUES);
 nodeStateOpt.setArgName("States");
@@ -89,6 +90,14 @@ public class NodeCLI extends YarnCLI {
 opts.addOption(showDetailsOpt);
 opts.getOption(STATUS_CMD).setArgName("NodeId");
 
+if (args != null && args.length > 0) {
+  for (int i = args.length - 1; i >= 0; i--) {
+if (args[i].equalsIgnoreCase("-" + NODE_ALL)) {
+  args[i] = "-" + NODE_ALL;
+}
+  }
+}
+
 int exitCode = -1;
 CommandLine cliParser = null;
 try {
@@ -116,8 +125,15 @@ public class NodeCLI extends YarnCLI {
 if (types != null) {
   for (String type : types) {
 if (!type.trim().isEmpty()) {
-  nodeStates.add(NodeState.valueOf(
-  
org.apache.hadoop.util.StringUtils.toUpperCase(type.trim(;
+  try {
+nodeStates.add(NodeState.valueOf(
+org.apache.hadoop.util.StringUtils.toUpperCase(
+type.trim(;
+  } catch (IllegalArgumentException ex) {
+sysout.println("The node state " + type + " is invalid.");
+sysout.println(getAllValidNodeStates());
+return exitCode;
+  }
 }
   }
 }
@@ -320,4 +336,14 @@ public class NodeCLI extends YarnCLI {
 nodeReportStr.close();
 sysout.println(baos.toString("UTF-8"));
   }
+
+  private String getAllValidNodeStates() {
+StringBuilder sb = new StringBuilder();
+sb.append("The valid node state can be one of the following: ");
+for (NodeState state : NodeState.values()) {
+  sb.append(state).append(",");
+}
+String output = sb.toString();
+return output.substring(0, output.length() - 1) + ".";
+  }
 }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/d9ed29e8/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/client/cli/TestYarnCLI.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/client/cli/TestYarnCLI.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/client/cli/TestYarnCLI.java
index 5bef691..6d7aaa7 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/client/cli/TestYarnCLI.java
+++ 

hadoop git commit: YARN-1126. Add validation of users input nodes-states options to nodes CLI. Contributed by Wei Yan.

2016-07-21 Thread rohithsharmaks
Repository: hadoop
Updated Branches:
  refs/heads/trunk 521f34317 -> be34b2a8f


YARN-1126. Add validation of users input nodes-states options to nodes CLI. 
Contributed by Wei Yan.


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/be34b2a8
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/be34b2a8
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/be34b2a8

Branch: refs/heads/trunk
Commit: be34b2a8fd30a1a0e803ac8b2198c695600a9e46
Parents: 521f343
Author: Rohith Sharma K S 
Authored: Thu Jul 21 12:20:47 2016 +0530
Committer: Rohith Sharma K S 
Committed: Thu Jul 21 12:20:47 2016 +0530

--
 .../apache/hadoop/yarn/client/cli/NodeCLI.java  | 32 ++--
 .../hadoop/yarn/client/cli/TestYarnCLI.java | 31 +++
 2 files changed, 48 insertions(+), 15 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/be34b2a8/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/cli/NodeCLI.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/cli/NodeCLI.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/cli/NodeCLI.java
index f51fee9..288a5d2 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/cli/NodeCLI.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/cli/NodeCLI.java
@@ -76,7 +76,8 @@ public class NodeCLI extends YarnCLI {
 "based on node state, all -all to list all nodes, " +
 "-showDetails to display more details about each node.");
 Option nodeStateOpt = new Option(NODE_STATE_CMD, true,
-"Works with -list to filter nodes based on input comma-separated list 
of node states.");
+"Works with -list to filter nodes based on input comma-separated " +
+"list of node states. " + getAllValidNodeStates());
 nodeStateOpt.setValueSeparator(',');
 nodeStateOpt.setArgs(Option.UNLIMITED_VALUES);
 nodeStateOpt.setArgName("States");
@@ -89,6 +90,14 @@ public class NodeCLI extends YarnCLI {
 opts.addOption(showDetailsOpt);
 opts.getOption(STATUS_CMD).setArgName("NodeId");
 
+if (args != null && args.length > 0) {
+  for (int i = args.length - 1; i >= 0; i--) {
+if (args[i].equalsIgnoreCase("-" + NODE_ALL)) {
+  args[i] = "-" + NODE_ALL;
+}
+  }
+}
+
 int exitCode = -1;
 CommandLine cliParser = null;
 try {
@@ -116,8 +125,15 @@ public class NodeCLI extends YarnCLI {
 if (types != null) {
   for (String type : types) {
 if (!type.trim().isEmpty()) {
-  nodeStates.add(NodeState.valueOf(
-  
org.apache.hadoop.util.StringUtils.toUpperCase(type.trim(;
+  try {
+nodeStates.add(NodeState.valueOf(
+org.apache.hadoop.util.StringUtils.toUpperCase(
+type.trim(;
+  } catch (IllegalArgumentException ex) {
+sysout.println("The node state " + type + " is invalid.");
+sysout.println(getAllValidNodeStates());
+return exitCode;
+  }
 }
   }
 }
@@ -320,4 +336,14 @@ public class NodeCLI extends YarnCLI {
 nodeReportStr.close();
 sysout.println(baos.toString("UTF-8"));
   }
+
+  private String getAllValidNodeStates() {
+StringBuilder sb = new StringBuilder();
+sb.append("The valid node state can be one of the following: ");
+for (NodeState state : NodeState.values()) {
+  sb.append(state).append(",");
+}
+String output = sb.toString();
+return output.substring(0, output.length() - 1) + ".";
+  }
 }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/be34b2a8/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/client/cli/TestYarnCLI.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/client/cli/TestYarnCLI.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/client/cli/TestYarnCLI.java
index 5bef691..6d7aaa7 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/client/cli/TestYarnCLI.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/client/cli/TestYarnCLI.java
@@ -1070,7 +1070,7 @@ public class TestYarnCLI {
 NodeState[]