hadoop git commit: HDFS-12301. NN File Browser UI: Navigate to a path when enter is pressed

2017-08-15 Thread raviprak
Repository: hadoop
Updated Branches:
  refs/heads/branch-2.8.2 77e9d9ff4 -> 155646c48


HDFS-12301. NN File Browser UI: Navigate to a path when enter is pressed

(cherry picked from commit f34646d652310442cb5339aa269f10dfa838)
(cherry picked from commit 895a35eac0422c1845ebe3c5bed1ac2bda1ac807)
(cherry picked from commit a611922de4e2fc82506872d68c4efcb963fb15e9)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/155646c4
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/155646c4
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/155646c4

Branch: refs/heads/branch-2.8.2
Commit: 155646c48c30fa2bba1cde0412080abd7e033b53
Parents: 77e9d9f
Author: Ravi Prakash 
Authored: Tue Aug 15 15:44:59 2017 -0700
Committer: Ravi Prakash 
Committed: Tue Aug 15 15:47:08 2017 -0700

--
 .../hadoop-hdfs/src/main/webapps/hdfs/explorer.js  | 6 ++
 1 file changed, 6 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/155646c4/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/hdfs/explorer.js
--
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/hdfs/explorer.js 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/hdfs/explorer.js
index 102da9d..a86f7e3 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/hdfs/explorer.js
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/hdfs/explorer.js
@@ -337,6 +337,12 @@
 
 var b = function() { browse_directory($('#directory').val()); };
 $('#btn-nav-directory').click(b);
+//Also navigate to the directory when a user presses enter.
+$('#directory').on('keyup', function (e) {
+  if (e.which == 13) {
+browse_directory($('#directory').val());
+  }
+});
 var dir = window.location.hash.slice(1);
 if(dir == "") {
   window.location.hash = "/";


-
To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org
For additional commands, e-mail: common-commits-h...@hadoop.apache.org



hadoop git commit: HDFS-12301. NN File Browser UI: Navigate to a path when enter is pressed

2017-08-15 Thread raviprak
Repository: hadoop
Updated Branches:
  refs/heads/branch-2.8 87c36d8b8 -> a611922de


HDFS-12301. NN File Browser UI: Navigate to a path when enter is pressed

(cherry picked from commit f34646d652310442cb5339aa269f10dfa838)
(cherry picked from commit 895a35eac0422c1845ebe3c5bed1ac2bda1ac807)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/a611922d
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/a611922d
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/a611922d

Branch: refs/heads/branch-2.8
Commit: a611922de4e2fc82506872d68c4efcb963fb15e9
Parents: 87c36d8
Author: Ravi Prakash 
Authored: Tue Aug 15 15:44:59 2017 -0700
Committer: Ravi Prakash 
Committed: Tue Aug 15 15:46:22 2017 -0700

--
 .../hadoop-hdfs/src/main/webapps/hdfs/explorer.js  | 6 ++
 1 file changed, 6 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/a611922d/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/hdfs/explorer.js
--
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/hdfs/explorer.js 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/hdfs/explorer.js
index 102da9d..a86f7e3 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/hdfs/explorer.js
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/hdfs/explorer.js
@@ -337,6 +337,12 @@
 
 var b = function() { browse_directory($('#directory').val()); };
 $('#btn-nav-directory').click(b);
+//Also navigate to the directory when a user presses enter.
+$('#directory').on('keyup', function (e) {
+  if (e.which == 13) {
+browse_directory($('#directory').val());
+  }
+});
 var dir = window.location.hash.slice(1);
 if(dir == "") {
   window.location.hash = "/";


-
To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org
For additional commands, e-mail: common-commits-h...@hadoop.apache.org



hadoop git commit: HDFS-12301. NN File Browser UI: Navigate to a path when enter is pressed

2017-08-15 Thread raviprak
Repository: hadoop
Updated Branches:
  refs/heads/branch-2 b30522c59 -> 895a35eac


HDFS-12301. NN File Browser UI: Navigate to a path when enter is pressed

(cherry picked from commit f34646d652310442cb5339aa269f10dfa838)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/895a35ea
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/895a35ea
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/895a35ea

Branch: refs/heads/branch-2
Commit: 895a35eac0422c1845ebe3c5bed1ac2bda1ac807
Parents: b30522c
Author: Ravi Prakash 
Authored: Tue Aug 15 15:44:59 2017 -0700
Committer: Ravi Prakash 
Committed: Tue Aug 15 15:45:37 2017 -0700

--
 .../hadoop-hdfs/src/main/webapps/hdfs/explorer.js  | 6 ++
 1 file changed, 6 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/895a35ea/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/hdfs/explorer.js
--
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/hdfs/explorer.js 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/hdfs/explorer.js
index 3e276a9..dae3519 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/hdfs/explorer.js
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/hdfs/explorer.js
@@ -370,6 +370,12 @@
 
 var b = function() { browse_directory($('#directory').val()); };
 $('#btn-nav-directory').click(b);
+//Also navigate to the directory when a user presses enter.
+$('#directory').on('keyup', function (e) {
+  if (e.which == 13) {
+browse_directory($('#directory').val());
+  }
+});
 var dir = window.location.hash.slice(1);
 if(dir == "") {
   window.location.hash = "/";


-
To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org
For additional commands, e-mail: common-commits-h...@hadoop.apache.org



hadoop git commit: HDFS-12301. NN File Browser UI: Navigate to a path when enter is pressed

2017-08-15 Thread raviprak
Repository: hadoop
Updated Branches:
  refs/heads/trunk d26545902 -> f34646d65


HDFS-12301. NN File Browser UI: Navigate to a path when enter is pressed


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/f34646d6
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/f34646d6
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/f34646d6

Branch: refs/heads/trunk
Commit: f34646d652310442cb5339aa269f10dfa838
Parents: d265459
Author: Ravi Prakash 
Authored: Tue Aug 15 15:44:59 2017 -0700
Committer: Ravi Prakash 
Committed: Tue Aug 15 15:44:59 2017 -0700

--
 .../hadoop-hdfs/src/main/webapps/hdfs/explorer.js  | 6 ++
 1 file changed, 6 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/f34646d6/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/hdfs/explorer.js
--
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/hdfs/explorer.js 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/hdfs/explorer.js
index 3e276a9..dae3519 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/hdfs/explorer.js
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/hdfs/explorer.js
@@ -370,6 +370,12 @@
 
 var b = function() { browse_directory($('#directory').val()); };
 $('#btn-nav-directory').click(b);
+//Also navigate to the directory when a user presses enter.
+$('#directory').on('keyup', function (e) {
+  if (e.which == 13) {
+browse_directory($('#directory').val());
+  }
+});
 var dir = window.location.hash.slice(1);
 if(dir == "") {
   window.location.hash = "/";


-
To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org
For additional commands, e-mail: common-commits-h...@hadoop.apache.org



hadoop git commit: MAPREDUCE-6923. Optimize MapReduce Shuffle I/O for small partitions. Contributed by Robert Schmidtke.

2017-08-09 Thread raviprak
Repository: hadoop
Updated Branches:
  refs/heads/branch-2 306abf498 -> c556cdaaf


MAPREDUCE-6923. Optimize MapReduce Shuffle I/O for small partitions. 
Contributed by Robert Schmidtke.

(cherry picked from commit ac7d0604bc73c0925eff240ad9837e14719d57b7)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/c556cdaa
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/c556cdaa
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/c556cdaa

Branch: refs/heads/branch-2
Commit: c556cdaaf4795aae4d3a314e26468f3e9b37ebe7
Parents: 306abf4
Author: Ravi Prakash 
Authored: Wed Aug 9 15:39:52 2017 -0700
Committer: Ravi Prakash 
Committed: Wed Aug 9 15:40:57 2017 -0700

--
 .../main/java/org/apache/hadoop/mapred/FadvisedFileRegion.java  | 5 -
 1 file changed, 4 insertions(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/c556cdaa/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-shuffle/src/main/java/org/apache/hadoop/mapred/FadvisedFileRegion.java
--
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-shuffle/src/main/java/org/apache/hadoop/mapred/FadvisedFileRegion.java
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-shuffle/src/main/java/org/apache/hadoop/mapred/FadvisedFileRegion.java
index cb9b5e0..79045f9 100644
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-shuffle/src/main/java/org/apache/hadoop/mapred/FadvisedFileRegion.java
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-shuffle/src/main/java/org/apache/hadoop/mapred/FadvisedFileRegion.java
@@ -111,7 +111,10 @@ public class FadvisedFileRegion extends DefaultFileRegion {
 
 long trans = actualCount;
 int readSize;
-ByteBuffer byteBuffer = ByteBuffer.allocate(this.shuffleBufferSize);
+ByteBuffer byteBuffer = ByteBuffer.allocate(
+Math.min(
+this.shuffleBufferSize,
+trans > Integer.MAX_VALUE ? Integer.MAX_VALUE : (int) trans));
 
 while(trans > 0L &&
 (readSize = fileChannel.read(byteBuffer, this.position+position)) > 0) 
{


-
To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org
For additional commands, e-mail: common-commits-h...@hadoop.apache.org



hadoop git commit: MAPREDUCE-6923. Optimize MapReduce Shuffle I/O for small partitions. Contributed by Robert Schmidtke.

2017-08-09 Thread raviprak
Repository: hadoop
Updated Branches:
  refs/heads/trunk b5c02f95b -> ac7d0604b


MAPREDUCE-6923. Optimize MapReduce Shuffle I/O for small partitions. 
Contributed by Robert Schmidtke.


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/ac7d0604
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/ac7d0604
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/ac7d0604

Branch: refs/heads/trunk
Commit: ac7d0604bc73c0925eff240ad9837e14719d57b7
Parents: b5c02f9
Author: Ravi Prakash 
Authored: Wed Aug 9 15:39:52 2017 -0700
Committer: Ravi Prakash 
Committed: Wed Aug 9 15:39:52 2017 -0700

--
 .../main/java/org/apache/hadoop/mapred/FadvisedFileRegion.java  | 5 -
 1 file changed, 4 insertions(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/ac7d0604/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-shuffle/src/main/java/org/apache/hadoop/mapred/FadvisedFileRegion.java
--
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-shuffle/src/main/java/org/apache/hadoop/mapred/FadvisedFileRegion.java
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-shuffle/src/main/java/org/apache/hadoop/mapred/FadvisedFileRegion.java
index cb9b5e0..79045f9 100644
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-shuffle/src/main/java/org/apache/hadoop/mapred/FadvisedFileRegion.java
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-shuffle/src/main/java/org/apache/hadoop/mapred/FadvisedFileRegion.java
@@ -111,7 +111,10 @@ public class FadvisedFileRegion extends DefaultFileRegion {
 
 long trans = actualCount;
 int readSize;
-ByteBuffer byteBuffer = ByteBuffer.allocate(this.shuffleBufferSize);
+ByteBuffer byteBuffer = ByteBuffer.allocate(
+Math.min(
+this.shuffleBufferSize,
+trans > Integer.MAX_VALUE ? Integer.MAX_VALUE : (int) trans));
 
 while(trans > 0L &&
 (readSize = fileChannel.read(byteBuffer, this.position+position)) > 0) 
{


-
To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org
For additional commands, e-mail: common-commits-h...@hadoop.apache.org



hadoop git commit: HADOOP-14229. hadoop.security.auth_to_local example is incorrect in the documentation. Contributed by Andras Bokor.

2017-07-28 Thread raviprak
Repository: hadoop
Updated Branches:
  refs/heads/trunk 480c8db40 -> 746189ad8


HADOOP-14229. hadoop.security.auth_to_local example is incorrect in the 
documentation. Contributed by Andras Bokor.


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/746189ad
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/746189ad
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/746189ad

Branch: refs/heads/trunk
Commit: 746189ad8cdf90ab35baec9364b2e02956a1e70c
Parents: 480c8db
Author: Ravi Prakash 
Authored: Fri Jul 28 11:43:36 2017 -0700
Committer: Ravi Prakash 
Committed: Fri Jul 28 11:43:36 2017 -0700

--
 .../hadoop-common/src/site/markdown/SecureMode.md   | 9 +++--
 1 file changed, 3 insertions(+), 6 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/746189ad/hadoop-common-project/hadoop-common/src/site/markdown/SecureMode.md
--
diff --git 
a/hadoop-common-project/hadoop-common/src/site/markdown/SecureMode.md 
b/hadoop-common-project/hadoop-common/src/site/markdown/SecureMode.md
index e1aad5a..5a62c4f 100644
--- a/hadoop-common-project/hadoop-common/src/site/markdown/SecureMode.md
+++ b/hadoop-common-project/hadoop-common/src/site/markdown/SecureMode.md
@@ -142,12 +142,9 @@ In a typical cluster HDFS and YARN services will be 
launched as the system `hdfs
 
   hadoop.security.auth_to_local
   
-RULE:[2:$1@$0](nn/.*@.*REALM.TLD)s/.*/hdfs/
-RULE:[2:$1@$0](jn/.*@.*REALM.TLD)s/.*/hdfs/
-RULE:[2:$1@$0](dn/.*@.*REALM.TLD)s/.*/hdfs/
-RULE:[2:$1@$0](nm/.*@.*REALM.TLD)s/.*/yarn/
-RULE:[2:$1@$0](rm/.*@.*REALM.TLD)s/.*/yarn/
-RULE:[2:$1@$0](jhs/.*@.*REALM.TLD)s/.*/mapred/
+RULE:[2:$1/$2@$0]([ndj]n/.*@REALM.TLD)s/.*/hdfs/
+RULE:[2:$1/$2@$0]([rn]m/.*@REALM.TLD)s/.*/yarn/
+RULE:[2:$1/$2@$0](jhs/.*@REALM.TLD)s/.*/mapred/
 DEFAULT
   
 


-
To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org
For additional commands, e-mail: common-commits-h...@hadoop.apache.org



hadoop git commit: HDFS-12193. Fix style issues in HttpFS tests. Contributed by Zoran Dimitrijevic

2017-07-24 Thread raviprak
Repository: hadoop
Updated Branches:
  refs/heads/trunk 94ca52ae9 -> c98201b5d


HDFS-12193. Fix style issues in HttpFS tests. Contributed by Zoran Dimitrijevic


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/c98201b5
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/c98201b5
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/c98201b5

Branch: refs/heads/trunk
Commit: c98201b5d83a700b4d08165c6fd1a6ef2eed
Parents: 94ca52a
Author: Ravi Prakash 
Authored: Mon Jul 24 19:06:15 2017 -0700
Committer: Ravi Prakash 
Committed: Mon Jul 24 19:06:15 2017 -0700

--
 .../hadoop/fs/http/server/TestHttpFSServer.java | 106 +++
 .../fs/http/server/TestHttpFSServerNoACLs.java  |  15 +--
 .../http/server/TestHttpFSServerNoXAttrs.java   |  10 +-
 3 files changed, 77 insertions(+), 54 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/c98201b5/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/server/TestHttpFSServer.java
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/server/TestHttpFSServer.java
 
b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/server/TestHttpFSServer.java
index 7cdb39c..0e1cc20 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/server/TestHttpFSServer.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/server/TestHttpFSServer.java
@@ -73,6 +73,9 @@ import com.google.common.collect.Maps;
 import java.util.Properties;
 import org.apache.hadoop.security.authentication.server.AuthenticationFilter;
 
+/**
+ * Main test class for HttpFSServer.
+ */
 public class TestHttpFSServer extends HFSTestCase {
 
   @Test
@@ -82,15 +85,20 @@ public class TestHttpFSServer extends HFSTestCase {
 String dir = TestDirHelper.getTestDir().getAbsolutePath();
 
 Configuration httpfsConf = new Configuration(false);
-HttpFSServerWebApp server = new HttpFSServerWebApp(dir, dir, dir, dir, 
httpfsConf);
+HttpFSServerWebApp server = new HttpFSServerWebApp(dir, dir, dir, dir,
+   httpfsConf);
 server.init();
 server.destroy();
   }
 
-  public static class MockGroups implements Service,Groups {
+  /**
+   * Mock groups.
+   */
+  public static class MockGroups implements Service, Groups {
 
 @Override
-public void init(org.apache.hadoop.lib.server.Server server) throws 
ServiceException {
+public void init(org.apache.hadoop.lib.server.Server server)
+throws ServiceException {
 }
 
 @Override
@@ -112,8 +120,10 @@ public class TestHttpFSServer extends HFSTestCase {
 }
 
 @Override
-public void serverStatusChange(org.apache.hadoop.lib.server.Server.Status 
oldStatus,
-   org.apache.hadoop.lib.server.Server.Status 
newStatus) throws ServiceException {
+public void serverStatusChange(
+org.apache.hadoop.lib.server.Server.Status oldStatus,
+org.apache.hadoop.lib.server.Server.Status newStatus)
+throws ServiceException {
 }
 
 @Override
@@ -300,25 +310,30 @@ public class TestHttpFSServer extends HFSTestCase {
 createHttpFSServer(false, false);
 
 URL url = new URL(TestJettyHelper.getJettyURL(),
-  
MessageFormat.format("/webhdfs/v1?user.name={0}=instrumentation", "nobody"));
+MessageFormat.format("/webhdfs/v1?user.name={0}=instrumentation",
+ "nobody"));
 HttpURLConnection conn = (HttpURLConnection) url.openConnection();
-Assert.assertEquals(conn.getResponseCode(), 
HttpURLConnection.HTTP_UNAUTHORIZED);
+Assert.assertEquals(conn.getResponseCode(),
+HttpURLConnection.HTTP_UNAUTHORIZED);
 
 url = new URL(TestJettyHelper.getJettyURL(),
-  
MessageFormat.format("/webhdfs/v1?user.name={0}=instrumentation",
-   
HadoopUsersConfTestHelper.getHadoopUsers()[0]));
+MessageFormat.format("/webhdfs/v1?user.name={0}=instrumentation",
+ HadoopUsersConfTestHelper.getHadoopUsers()[0]));
 conn = (HttpURLConnection) url.openConnection();
 Assert.assertEquals(conn.getResponseCode(), HttpURLConnection.HTTP_OK);
-BufferedReader reader = new BufferedReader(new 
InputStreamReader(conn.getInputStream()));
+BufferedReader reader = new BufferedReader(
+new InputStreamReader(conn.getInputStream()));
 String line = reader.readLine();
 reader.close();
 Assert.assertTrue(line.contains("\"counters\":{"));
 
 url = new 

hadoop git commit: HADOOP-14597. Native compilation broken with OpenSSL-1.1.0. Contributed by Ravi Prakash.

2017-07-24 Thread raviprak
Repository: hadoop
Updated Branches:
  refs/heads/trunk 10583625c -> 94ca52ae9


HADOOP-14597. Native compilation broken with OpenSSL-1.1.0. Contributed by Ravi 
Prakash.


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/94ca52ae
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/94ca52ae
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/94ca52ae

Branch: refs/heads/trunk
Commit: 94ca52ae9ec0ae04854d726bf2ac1bc457b96a9c
Parents: 1058362
Author: Ravi Prakash 
Authored: Mon Jul 24 16:01:45 2017 -0700
Committer: Ravi Prakash 
Committed: Mon Jul 24 16:01:45 2017 -0700

--
 .../org/apache/hadoop/crypto/OpensslCipher.c| 46 ++--
 .../src/main/native/pipes/impl/HadoopPipes.cc   | 12 -
 2 files changed, 53 insertions(+), 5 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/94ca52ae/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/crypto/OpensslCipher.c
--
diff --git 
a/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/crypto/OpensslCipher.c
 
b/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/crypto/OpensslCipher.c
index 5cb5bba..c7984a3 100644
--- 
a/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/crypto/OpensslCipher.c
+++ 
b/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/crypto/OpensslCipher.c
@@ -30,6 +30,11 @@ static void (*dlsym_EVP_CIPHER_CTX_free)(EVP_CIPHER_CTX *);
 static int (*dlsym_EVP_CIPHER_CTX_cleanup)(EVP_CIPHER_CTX *);
 static void (*dlsym_EVP_CIPHER_CTX_init)(EVP_CIPHER_CTX *);
 static int (*dlsym_EVP_CIPHER_CTX_set_padding)(EVP_CIPHER_CTX *, int);
+static int (*dlsym_EVP_CIPHER_CTX_test_flags)(const EVP_CIPHER_CTX *, int);
+static int (*dlsym_EVP_CIPHER_CTX_block_size)(const EVP_CIPHER_CTX *);
+#if OPENSSL_VERSION_NUMBER >= 0x1010L
+static int (*dlsym_EVP_CIPHER_CTX_encrypting)(const EVP_CIPHER_CTX *);
+#endif
 static int (*dlsym_EVP_CipherInit_ex)(EVP_CIPHER_CTX *, const EVP_CIPHER *,  \
ENGINE *, const unsigned char *, const unsigned char *, int);
 static int (*dlsym_EVP_CipherUpdate)(EVP_CIPHER_CTX *, unsigned char *,  \
@@ -46,6 +51,11 @@ typedef void (__cdecl 
*__dlsym_EVP_CIPHER_CTX_free)(EVP_CIPHER_CTX *);
 typedef int (__cdecl *__dlsym_EVP_CIPHER_CTX_cleanup)(EVP_CIPHER_CTX *);
 typedef void (__cdecl *__dlsym_EVP_CIPHER_CTX_init)(EVP_CIPHER_CTX *);
 typedef int (__cdecl *__dlsym_EVP_CIPHER_CTX_set_padding)(EVP_CIPHER_CTX *, 
int);
+typedef int (__cdecl *__dlsym_EVP_CIPHER_CTX_test_flags)(const EVP_CIPHER_CTX 
*, int);
+typedef int (__cdecl *__dlsym_EVP_CIPHER_CTX_block_size)(const EVP_CIPHER_CTX 
*);
+#if OPENSSL_VERSION_NUMBER >= 0x1010L
+typedef int (__cdecl *__dlsym_EVP_CIPHER_CTX_encrypting)(const EVP_CIPHER_CTX 
*);
+#endif
 typedef int (__cdecl *__dlsym_EVP_CipherInit_ex)(EVP_CIPHER_CTX *,  \
  const EVP_CIPHER *, ENGINE *, const unsigned char *,  \
  const unsigned char *, int);
@@ -60,6 +70,11 @@ static __dlsym_EVP_CIPHER_CTX_free dlsym_EVP_CIPHER_CTX_free;
 static __dlsym_EVP_CIPHER_CTX_cleanup dlsym_EVP_CIPHER_CTX_cleanup;
 static __dlsym_EVP_CIPHER_CTX_init dlsym_EVP_CIPHER_CTX_init;
 static __dlsym_EVP_CIPHER_CTX_set_padding dlsym_EVP_CIPHER_CTX_set_padding;
+static __dlsym_EVP_CIPHER_CTX_test_flags dlsym_EVP_CIPHER_CTX_test_flags;
+static __dlsym_EVP_CIPHER_CTX_block_size dlsym_EVP_CIPHER_CTX_block_size;
+#if OPENSSL_VERSION_NUMBER >= 0x1010L
+static __dlsym_EVP_CIPHER_CTX_encrypting dlsym_EVP_CIPHER_CTX_encrypting;
+#endif
 static __dlsym_EVP_CipherInit_ex dlsym_EVP_CipherInit_ex;
 static __dlsym_EVP_CipherUpdate dlsym_EVP_CipherUpdate;
 static __dlsym_EVP_CipherFinal_ex dlsym_EVP_CipherFinal_ex;
@@ -114,6 +129,14 @@ JNIEXPORT void JNICALL 
Java_org_apache_hadoop_crypto_OpensslCipher_initIDs
   "EVP_CIPHER_CTX_init");
   LOAD_DYNAMIC_SYMBOL(dlsym_EVP_CIPHER_CTX_set_padding, env, openssl,  \
   "EVP_CIPHER_CTX_set_padding");
+  LOAD_DYNAMIC_SYMBOL(dlsym_EVP_CIPHER_CTX_test_flags, env, openssl,  \
+  "EVP_CIPHER_CTX_test_flags");
+  LOAD_DYNAMIC_SYMBOL(dlsym_EVP_CIPHER_CTX_block_size, env, openssl,  \
+  "EVP_CIPHER_CTX_block_size");
+#if OPENSSL_VERSION_NUMBER >= 0x1010L
+  LOAD_DYNAMIC_SYMBOL(dlsym_EVP_CIPHER_CTX_encrypting, env, openssl,  \
+  "EVP_CIPHER_CTX_encrypting");
+#endif
   LOAD_DYNAMIC_SYMBOL(dlsym_EVP_CipherInit_ex, env, openssl,  \
   "EVP_CipherInit_ex");
   LOAD_DYNAMIC_SYMBOL(dlsym_EVP_CipherUpdate, env, openssl,  \
@@ -135,6 +158,17 @@ JNIEXPORT void JNICALL 

hadoop git commit: MAPREDUCE-6910. MapReduceTrackingUriPlugin can not return the right URI of history server with HTTPS. Contributed by Lantao Jin

2017-07-13 Thread raviprak
Repository: hadoop
Updated Branches:
  refs/heads/branch-2.8 8c69b040a -> d6228fbeb


MAPREDUCE-6910. MapReduceTrackingUriPlugin can not return the right URI of 
history server with HTTPS. Contributed by Lantao Jin

(cherry picked from commit 43f0503286eccbc6bb8ae77584b635bfd0c48e50)
(cherry picked from commit 756a06814355465c85b9d66f262ee875dd86dbb7)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/d6228fbe
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/d6228fbe
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/d6228fbe

Branch: refs/heads/branch-2.8
Commit: d6228fbebd0f43fb3dae43d9736caf26cbd0d0ae
Parents: 8c69b04
Author: Ravi Prakash 
Authored: Thu Jul 13 16:16:45 2017 -0700
Committer: Ravi Prakash 
Committed: Thu Jul 13 22:57:44 2017 -0700

--
 .../hadoop/mapreduce/v2/util/MRWebAppUtil.java  |  9 ---
 .../webapp/TestMapReduceTrackingUriPlugin.java  | 26 ++--
 2 files changed, 29 insertions(+), 6 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/d6228fbe/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/util/MRWebAppUtil.java
--
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/util/MRWebAppUtil.java
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/util/MRWebAppUtil.java
index d367060..951c9d5 100644
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/util/MRWebAppUtil.java
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/util/MRWebAppUtil.java
@@ -29,7 +29,6 @@ import 
org.apache.hadoop.mapreduce.v2.jobhistory.JHAdminConfig;
 import org.apache.hadoop.net.NetUtils;
 import org.apache.hadoop.yarn.api.records.ApplicationId;
 import org.apache.hadoop.yarn.conf.YarnConfiguration;
-import org.apache.hadoop.yarn.ipc.RPCUtil;
 
 import java.net.InetAddress;
 import java.net.InetSocketAddress;
@@ -76,7 +75,9 @@ public class MRWebAppUtil {
 : "http://;;
   }
 
-  public static String getJHSWebappScheme() {
+  public static String getJHSWebappScheme(Configuration conf) {
+setHttpPolicyInJHS(conf.get(JHAdminConfig.MR_HS_HTTP_POLICY,
+JHAdminConfig.DEFAULT_MR_HS_HTTP_POLICY));
 return httpPolicyInJHS == HttpConfig.Policy.HTTPS_ONLY ? "https://;
 : "http://;;
   }
@@ -101,7 +102,7 @@ public class MRWebAppUtil {
   }
   
   public static String getJHSWebappURLWithScheme(Configuration conf) {
-return getJHSWebappScheme() + getJHSWebappURLWithoutScheme(conf);
+return getJHSWebappScheme(conf) + getJHSWebappURLWithoutScheme(conf);
   }
   
   public static InetSocketAddress getJHSWebBindAddress(Configuration conf) {
@@ -153,7 +154,7 @@ public class MRWebAppUtil {
   
   public static String getApplicationWebURLOnJHSWithScheme(Configuration conf,
   ApplicationId appId) throws UnknownHostException {
-return getJHSWebappScheme()
+return getJHSWebappScheme(conf)
 + getApplicationWebURLOnJHSWithoutScheme(conf, appId);
   }
 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/d6228fbe/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs-plugins/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/TestMapReduceTrackingUriPlugin.java
--
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs-plugins/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/TestMapReduceTrackingUriPlugin.java
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs-plugins/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/TestMapReduceTrackingUriPlugin.java
index 8c3be58..9291097 100644
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs-plugins/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/TestMapReduceTrackingUriPlugin.java
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs-plugins/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/TestMapReduceTrackingUriPlugin.java
@@ -23,6 +23,7 @@ import static org.junit.Assert.assertEquals;
 import java.net.URI;
 import java.net.URISyntaxException;
 
+import org.apache.hadoop.http.HttpConfig;
 import org.apache.hadoop.mapreduce.v2.jobhistory.JHAdminConfig;
 import org.apache.hadoop.yarn.api.records.ApplicationId;
 import 

hadoop git commit: MAPREDUCE-6910. MapReduceTrackingUriPlugin can not return the right URI of history server with HTTPS. Contributed by Lantao Jin

2017-07-13 Thread raviprak
Repository: hadoop
Updated Branches:
  refs/heads/branch-2 a77fb561e -> 756a06814


MAPREDUCE-6910. MapReduceTrackingUriPlugin can not return the right URI of 
history server with HTTPS. Contributed by Lantao Jin

(cherry picked from commit 43f0503286eccbc6bb8ae77584b635bfd0c48e50)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/756a0681
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/756a0681
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/756a0681

Branch: refs/heads/branch-2
Commit: 756a06814355465c85b9d66f262ee875dd86dbb7
Parents: a77fb56
Author: Ravi Prakash 
Authored: Thu Jul 13 16:16:45 2017 -0700
Committer: Ravi Prakash 
Committed: Thu Jul 13 16:19:52 2017 -0700

--
 .../hadoop/mapreduce/v2/util/MRWebAppUtil.java  |  9 ---
 .../webapp/TestMapReduceTrackingUriPlugin.java  | 26 ++--
 2 files changed, 29 insertions(+), 6 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/756a0681/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/util/MRWebAppUtil.java
--
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/util/MRWebAppUtil.java
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/util/MRWebAppUtil.java
index d367060..951c9d5 100644
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/util/MRWebAppUtil.java
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/util/MRWebAppUtil.java
@@ -29,7 +29,6 @@ import 
org.apache.hadoop.mapreduce.v2.jobhistory.JHAdminConfig;
 import org.apache.hadoop.net.NetUtils;
 import org.apache.hadoop.yarn.api.records.ApplicationId;
 import org.apache.hadoop.yarn.conf.YarnConfiguration;
-import org.apache.hadoop.yarn.ipc.RPCUtil;
 
 import java.net.InetAddress;
 import java.net.InetSocketAddress;
@@ -76,7 +75,9 @@ public class MRWebAppUtil {
 : "http://;;
   }
 
-  public static String getJHSWebappScheme() {
+  public static String getJHSWebappScheme(Configuration conf) {
+setHttpPolicyInJHS(conf.get(JHAdminConfig.MR_HS_HTTP_POLICY,
+JHAdminConfig.DEFAULT_MR_HS_HTTP_POLICY));
 return httpPolicyInJHS == HttpConfig.Policy.HTTPS_ONLY ? "https://;
 : "http://;;
   }
@@ -101,7 +102,7 @@ public class MRWebAppUtil {
   }
   
   public static String getJHSWebappURLWithScheme(Configuration conf) {
-return getJHSWebappScheme() + getJHSWebappURLWithoutScheme(conf);
+return getJHSWebappScheme(conf) + getJHSWebappURLWithoutScheme(conf);
   }
   
   public static InetSocketAddress getJHSWebBindAddress(Configuration conf) {
@@ -153,7 +154,7 @@ public class MRWebAppUtil {
   
   public static String getApplicationWebURLOnJHSWithScheme(Configuration conf,
   ApplicationId appId) throws UnknownHostException {
-return getJHSWebappScheme()
+return getJHSWebappScheme(conf)
 + getApplicationWebURLOnJHSWithoutScheme(conf, appId);
   }
 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/756a0681/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs-plugins/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/TestMapReduceTrackingUriPlugin.java
--
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs-plugins/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/TestMapReduceTrackingUriPlugin.java
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs-plugins/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/TestMapReduceTrackingUriPlugin.java
index 8c3be58..9291097 100644
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs-plugins/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/TestMapReduceTrackingUriPlugin.java
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs-plugins/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/TestMapReduceTrackingUriPlugin.java
@@ -23,6 +23,7 @@ import static org.junit.Assert.assertEquals;
 import java.net.URI;
 import java.net.URISyntaxException;
 
+import org.apache.hadoop.http.HttpConfig;
 import org.apache.hadoop.mapreduce.v2.jobhistory.JHAdminConfig;
 import org.apache.hadoop.yarn.api.records.ApplicationId;
 import org.apache.hadoop.yarn.conf.YarnConfiguration;
@@ -30,17 +31,38 @@ import org.junit.Test;
 
 public 

hadoop git commit: MAPREDUCE-6910. MapReduceTrackingUriPlugin can not return the right URI of history server with HTTPS. Contributed by Lantao Jin

2017-07-13 Thread raviprak
Repository: hadoop
Updated Branches:
  refs/heads/trunk ebc048cc0 -> 43f050328


MAPREDUCE-6910. MapReduceTrackingUriPlugin can not return the right URI of 
history server with HTTPS. Contributed by Lantao Jin


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/43f05032
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/43f05032
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/43f05032

Branch: refs/heads/trunk
Commit: 43f0503286eccbc6bb8ae77584b635bfd0c48e50
Parents: ebc048c
Author: Ravi Prakash 
Authored: Thu Jul 13 16:16:45 2017 -0700
Committer: Ravi Prakash 
Committed: Thu Jul 13 16:16:45 2017 -0700

--
 .../hadoop/mapreduce/v2/util/MRWebAppUtil.java  |  9 ---
 .../webapp/TestMapReduceTrackingUriPlugin.java  | 26 ++--
 2 files changed, 29 insertions(+), 6 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/43f05032/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/util/MRWebAppUtil.java
--
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/util/MRWebAppUtil.java
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/util/MRWebAppUtil.java
index d367060..951c9d5 100644
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/util/MRWebAppUtil.java
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/util/MRWebAppUtil.java
@@ -29,7 +29,6 @@ import 
org.apache.hadoop.mapreduce.v2.jobhistory.JHAdminConfig;
 import org.apache.hadoop.net.NetUtils;
 import org.apache.hadoop.yarn.api.records.ApplicationId;
 import org.apache.hadoop.yarn.conf.YarnConfiguration;
-import org.apache.hadoop.yarn.ipc.RPCUtil;
 
 import java.net.InetAddress;
 import java.net.InetSocketAddress;
@@ -76,7 +75,9 @@ public class MRWebAppUtil {
 : "http://;;
   }
 
-  public static String getJHSWebappScheme() {
+  public static String getJHSWebappScheme(Configuration conf) {
+setHttpPolicyInJHS(conf.get(JHAdminConfig.MR_HS_HTTP_POLICY,
+JHAdminConfig.DEFAULT_MR_HS_HTTP_POLICY));
 return httpPolicyInJHS == HttpConfig.Policy.HTTPS_ONLY ? "https://;
 : "http://;;
   }
@@ -101,7 +102,7 @@ public class MRWebAppUtil {
   }
   
   public static String getJHSWebappURLWithScheme(Configuration conf) {
-return getJHSWebappScheme() + getJHSWebappURLWithoutScheme(conf);
+return getJHSWebappScheme(conf) + getJHSWebappURLWithoutScheme(conf);
   }
   
   public static InetSocketAddress getJHSWebBindAddress(Configuration conf) {
@@ -153,7 +154,7 @@ public class MRWebAppUtil {
   
   public static String getApplicationWebURLOnJHSWithScheme(Configuration conf,
   ApplicationId appId) throws UnknownHostException {
-return getJHSWebappScheme()
+return getJHSWebappScheme(conf)
 + getApplicationWebURLOnJHSWithoutScheme(conf, appId);
   }
 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/43f05032/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs-plugins/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/TestMapReduceTrackingUriPlugin.java
--
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs-plugins/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/TestMapReduceTrackingUriPlugin.java
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs-plugins/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/TestMapReduceTrackingUriPlugin.java
index 8c3be58..9291097 100644
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs-plugins/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/TestMapReduceTrackingUriPlugin.java
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs-plugins/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/TestMapReduceTrackingUriPlugin.java
@@ -23,6 +23,7 @@ import static org.junit.Assert.assertEquals;
 import java.net.URI;
 import java.net.URISyntaxException;
 
+import org.apache.hadoop.http.HttpConfig;
 import org.apache.hadoop.mapreduce.v2.jobhistory.JHAdminConfig;
 import org.apache.hadoop.yarn.api.records.ApplicationId;
 import org.apache.hadoop.yarn.conf.YarnConfiguration;
@@ -30,17 +31,38 @@ import org.junit.Test;
 
 public class TestMapReduceTrackingUriPlugin {
   @Test
-  public void 

hadoop git commit: HDFS-11993. Add log info when connect to datanode socket address failed. Contributed by chencan

2017-06-26 Thread raviprak
Repository: hadoop
Updated Branches:
  refs/heads/branch-2 b59b96dee -> ef491f463


HDFS-11993. Add log info when connect to datanode socket address failed. 
Contributed by chencan

(cherry picked from commit a9d3412b4ce40f5ab5a18756ede7e0606b653171)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/ef491f46
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/ef491f46
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/ef491f46

Branch: refs/heads/branch-2
Commit: ef491f463799bdfd757d563bee145ce52765d3de
Parents: b59b96d
Author: Ravi Prakash 
Authored: Mon Jun 26 13:24:27 2017 -0700
Committer: Ravi Prakash 
Committed: Mon Jun 26 13:25:32 2017 -0700

--
 .../src/main/java/org/apache/hadoop/hdfs/DFSInputStream.java| 5 +++--
 1 file changed, 3 insertions(+), 2 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/ef491f46/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/DFSInputStream.java
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/DFSInputStream.java
 
b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/DFSInputStream.java
index 05b0b67..1cd420e 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/DFSInputStream.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/DFSInputStream.java
@@ -674,8 +674,9 @@ public class DFSInputStream extends FSInputStream
   fetchBlockAt(target);
 } else {
   connectFailedOnce = true;
-  DFSClient.LOG.warn("Failed to connect to " + targetAddr + " for 
block"
-  + ", add to deadNodes and continue. " + ex, ex);
+  DFSClient.LOG.warn("Failed to connect to {} for block {}, " +
+  "add to deadNodes and continue. ", targetAddr,
+  targetBlock.getBlock(), ex);
   // Put chosen node into dead list, continue
   addToDeadNodes(chosenNode);
 }


-
To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org
For additional commands, e-mail: common-commits-h...@hadoop.apache.org



hadoop git commit: HDFS-11993. Add log info when connect to datanode socket address failed. Contributed by chencan

2017-06-26 Thread raviprak
Repository: hadoop
Updated Branches:
  refs/heads/trunk 2c367b464 -> a9d3412b4


HDFS-11993. Add log info when connect to datanode socket address failed. 
Contributed by chencan


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/a9d3412b
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/a9d3412b
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/a9d3412b

Branch: refs/heads/trunk
Commit: a9d3412b4ce40f5ab5a18756ede7e0606b653171
Parents: 2c367b4
Author: Ravi Prakash 
Authored: Mon Jun 26 13:24:27 2017 -0700
Committer: Ravi Prakash 
Committed: Mon Jun 26 13:24:27 2017 -0700

--
 .../src/main/java/org/apache/hadoop/hdfs/DFSInputStream.java| 5 +++--
 1 file changed, 3 insertions(+), 2 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/a9d3412b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/DFSInputStream.java
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/DFSInputStream.java
 
b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/DFSInputStream.java
index 77f5a92..dcc997c 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/DFSInputStream.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/DFSInputStream.java
@@ -585,8 +585,9 @@ public class DFSInputStream extends FSInputStream
   fetchBlockAt(target);
 } else {
   connectFailedOnce = true;
-  DFSClient.LOG.warn("Failed to connect to " + targetAddr + " for 
block"
-  + ", add to deadNodes and continue. " + ex, ex);
+  DFSClient.LOG.warn("Failed to connect to {} for block {}, " +
+  "add to deadNodes and continue. ", targetAddr,
+  targetBlock.getBlock(), ex);
   // Put chosen node into dead list, continue
   addToDeadNodes(chosenNode);
 }


-
To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org
For additional commands, e-mail: common-commits-h...@hadoop.apache.org



hadoop git commit: MAPREDUCE-6897. Add Unit Test to ensure Job end notification gets sent even when appMaster stop gets YarnRuntimeException. Contributed by Gergely Novák

2017-06-16 Thread raviprak
Repository: hadoop
Updated Branches:
  refs/heads/branch-2 43fb79317 -> 182e98c70


MAPREDUCE-6897. Add Unit Test to ensure Job end notification gets sent even 
when appMaster stop gets YarnRuntimeException. Contributed by Gergely Novák

(cherry picked from commit 82bbcbf37f8137112a6270932b2ad7572785c387)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/182e98c7
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/182e98c7
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/182e98c7

Branch: refs/heads/branch-2
Commit: 182e98c703aee94ad58325f4b26fd238ec474abc
Parents: 43fb793
Author: Ravi Prakash 
Authored: Fri Jun 16 14:25:12 2017 -0700
Committer: Ravi Prakash 
Committed: Fri Jun 16 14:27:06 2017 -0700

--
 .../mapreduce/v2/app/TestJobEndNotifier.java| 25 +---
 1 file changed, 22 insertions(+), 3 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/182e98c7/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestJobEndNotifier.java
--
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestJobEndNotifier.java
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestJobEndNotifier.java
index ecfa43c..d122a9b 100644
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestJobEndNotifier.java
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestJobEndNotifier.java
@@ -19,6 +19,7 @@
 package org.apache.hadoop.mapreduce.v2.app;
 
 import static org.mockito.Mockito.doNothing;
+import static org.mockito.Mockito.doThrow;
 import static org.mockito.Mockito.mock;
 import static org.mockito.Mockito.spy;
 import static org.mockito.Mockito.when;
@@ -30,6 +31,7 @@ import java.io.PrintStream;
 import java.net.Proxy;
 import java.net.URI;
 import java.net.URISyntaxException;
+import java.nio.channels.ClosedChannelException;
 
 import javax.servlet.ServletException;
 import javax.servlet.http.HttpServlet;
@@ -53,6 +55,7 @@ import 
org.apache.hadoop.mapreduce.v2.app.rm.ContainerAllocatorEvent;
 import org.apache.hadoop.mapreduce.v2.app.rm.RMCommunicator;
 import org.apache.hadoop.mapreduce.v2.app.rm.RMHeartbeatHandler;
 import org.apache.hadoop.yarn.exceptions.YarnException;
+import org.apache.hadoop.yarn.exceptions.YarnRuntimeException;
 import org.junit.Assert;
 import org.junit.Test;
 
@@ -197,8 +200,8 @@ public class TestJobEndNotifier extends JobEndNotifier {
 
   }
 
-  @Test
-  public void testNotificationOnLastRetryNormalShutdown() throws Exception {
+  private void testNotificationOnLastRetry(boolean withRuntimeException)
+  throws Exception {
 HttpServer2 server = startHttpServer();
 // Act like it is the second attempt. Default max attempts is 2
 MRApp app = spy(new MRAppWithCustomContainerAllocator(
@@ -210,17 +213,33 @@ public class TestJobEndNotifier extends JobEndNotifier {
 JobImpl job = (JobImpl)app.submit(conf);
 app.waitForInternalState(job, JobStateInternal.SUCCEEDED);
 // Unregistration succeeds: successfullyUnregistered is set
+if (withRuntimeException) {
+  YarnRuntimeException runtimeException = new YarnRuntimeException(
+  new ClosedChannelException());
+  doThrow(runtimeException).when(app).stop();
+}
 app.shutDownJob();
 Assert.assertTrue(app.isLastAMRetry());
 Assert.assertEquals(1, JobEndServlet.calledTimes);
 Assert.assertEquals("jobid=" + job.getID() + "=SUCCEEDED",
 JobEndServlet.requestUri.getQuery());
 Assert.assertEquals(JobState.SUCCEEDED.toString(),
-  JobEndServlet.foundJobState);
+JobEndServlet.foundJobState);
 server.stop();
   }
 
   @Test
+  public void testNotificationOnLastRetryNormalShutdown() throws Exception {
+testNotificationOnLastRetry(false);
+  }
+
+  @Test
+  public void testNotificationOnLastRetryShutdownWithRuntimeException()
+  throws Exception {
+testNotificationOnLastRetry(true);
+  }
+
+  @Test
   public void testAbsentNotificationOnNotLastRetryUnregistrationFailure()
   throws Exception {
 HttpServer2 server = startHttpServer();


-
To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org
For additional commands, e-mail: common-commits-h...@hadoop.apache.org



hadoop git commit: MAPREDUCE-6897. Add Unit Test to ensure Job end notification gets sent even when appMaster stop gets YarnRuntimeException. Contributed by Gergely Novák

2017-06-16 Thread raviprak
Repository: hadoop
Updated Branches:
  refs/heads/trunk 9ef2743fa -> 82bbcbf37


MAPREDUCE-6897. Add Unit Test to ensure Job end notification gets sent even 
when appMaster stop gets YarnRuntimeException. Contributed by Gergely Novák


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/82bbcbf3
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/82bbcbf3
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/82bbcbf3

Branch: refs/heads/trunk
Commit: 82bbcbf37f8137112a6270932b2ad7572785c387
Parents: 9ef2743
Author: Ravi Prakash 
Authored: Fri Jun 16 14:25:12 2017 -0700
Committer: Ravi Prakash 
Committed: Fri Jun 16 14:25:44 2017 -0700

--
 .../mapreduce/v2/app/TestJobEndNotifier.java| 25 +---
 1 file changed, 22 insertions(+), 3 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/82bbcbf3/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestJobEndNotifier.java
--
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestJobEndNotifier.java
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestJobEndNotifier.java
index ecfa43c..d122a9b 100644
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestJobEndNotifier.java
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestJobEndNotifier.java
@@ -19,6 +19,7 @@
 package org.apache.hadoop.mapreduce.v2.app;
 
 import static org.mockito.Mockito.doNothing;
+import static org.mockito.Mockito.doThrow;
 import static org.mockito.Mockito.mock;
 import static org.mockito.Mockito.spy;
 import static org.mockito.Mockito.when;
@@ -30,6 +31,7 @@ import java.io.PrintStream;
 import java.net.Proxy;
 import java.net.URI;
 import java.net.URISyntaxException;
+import java.nio.channels.ClosedChannelException;
 
 import javax.servlet.ServletException;
 import javax.servlet.http.HttpServlet;
@@ -53,6 +55,7 @@ import 
org.apache.hadoop.mapreduce.v2.app.rm.ContainerAllocatorEvent;
 import org.apache.hadoop.mapreduce.v2.app.rm.RMCommunicator;
 import org.apache.hadoop.mapreduce.v2.app.rm.RMHeartbeatHandler;
 import org.apache.hadoop.yarn.exceptions.YarnException;
+import org.apache.hadoop.yarn.exceptions.YarnRuntimeException;
 import org.junit.Assert;
 import org.junit.Test;
 
@@ -197,8 +200,8 @@ public class TestJobEndNotifier extends JobEndNotifier {
 
   }
 
-  @Test
-  public void testNotificationOnLastRetryNormalShutdown() throws Exception {
+  private void testNotificationOnLastRetry(boolean withRuntimeException)
+  throws Exception {
 HttpServer2 server = startHttpServer();
 // Act like it is the second attempt. Default max attempts is 2
 MRApp app = spy(new MRAppWithCustomContainerAllocator(
@@ -210,17 +213,33 @@ public class TestJobEndNotifier extends JobEndNotifier {
 JobImpl job = (JobImpl)app.submit(conf);
 app.waitForInternalState(job, JobStateInternal.SUCCEEDED);
 // Unregistration succeeds: successfullyUnregistered is set
+if (withRuntimeException) {
+  YarnRuntimeException runtimeException = new YarnRuntimeException(
+  new ClosedChannelException());
+  doThrow(runtimeException).when(app).stop();
+}
 app.shutDownJob();
 Assert.assertTrue(app.isLastAMRetry());
 Assert.assertEquals(1, JobEndServlet.calledTimes);
 Assert.assertEquals("jobid=" + job.getID() + "=SUCCEEDED",
 JobEndServlet.requestUri.getQuery());
 Assert.assertEquals(JobState.SUCCEEDED.toString(),
-  JobEndServlet.foundJobState);
+JobEndServlet.foundJobState);
 server.stop();
   }
 
   @Test
+  public void testNotificationOnLastRetryNormalShutdown() throws Exception {
+testNotificationOnLastRetry(false);
+  }
+
+  @Test
+  public void testNotificationOnLastRetryShutdownWithRuntimeException()
+  throws Exception {
+testNotificationOnLastRetry(true);
+  }
+
+  @Test
   public void testAbsentNotificationOnNotLastRetryUnregistrationFailure()
   throws Exception {
 HttpServer2 server = startHttpServer();


-
To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org
For additional commands, e-mail: common-commits-h...@hadoop.apache.org



hadoop git commit: HDFS-11574. Spelling mistakes in the Java source. Contributed by Hu Xiaodong.

2017-03-24 Thread raviprak
Repository: hadoop
Updated Branches:
  refs/heads/trunk ab759e91b -> d4f73e7e2


HDFS-11574. Spelling mistakes in the Java source. Contributed by Hu Xiaodong.


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/d4f73e7e
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/d4f73e7e
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/d4f73e7e

Branch: refs/heads/trunk
Commit: d4f73e7e27141ce0a88edb96fa304418a97a82a3
Parents: ab759e9
Author: Ravi Prakash 
Authored: Fri Mar 24 09:38:17 2017 -0700
Committer: Ravi Prakash 
Committed: Fri Mar 24 09:38:17 2017 -0700

--
 .../hdfs/server/namenode/ha/RequestHedgingProxyProvider.java | 2 +-
 .../java/org/apache/hadoop/hdfs/TestDataTransferProtocol.java| 2 +-
 .../hadoop/hdfs/server/namenode/ha/TestRetryCacheWithHA.java | 2 +-
 .../hadoop/yarn/server/nodemanager/NodeStatusUpdaterImpl.java| 4 ++--
 .../hadoop/yarn/server/nodemanager/TestNodeStatusUpdater.java| 2 +-
 .../apache/hadoop/yarn/server/resourcemanager/Application.java   | 2 +-
 6 files changed, 7 insertions(+), 7 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/d4f73e7e/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/ha/RequestHedgingProxyProvider.java
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/ha/RequestHedgingProxyProvider.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/ha/RequestHedgingProxyProvider.java
index a765e95..2f6c9bc 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/ha/RequestHedgingProxyProvider.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/ha/RequestHedgingProxyProvider.java
@@ -44,7 +44,7 @@ import org.slf4j.LoggerFactory;
  * per-se. It constructs a wrapper proxy that sends the request to ALL
  * underlying proxies simultaneously. It assumes the in an HA setup, there will
  * be only one Active, and the active should respond faster than any configured
- * standbys. Once it recieve a response from any one of the configred proxies,
+ * standbys. Once it receive a response from any one of the configred proxies,
  * outstanding requests to other proxies are immediately cancelled.
  */
 public class RequestHedgingProxyProvider extends

http://git-wip-us.apache.org/repos/asf/hadoop/blob/d4f73e7e/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDataTransferProtocol.java
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDataTransferProtocol.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDataTransferProtocol.java
index aeff16d..3f4fe28 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDataTransferProtocol.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDataTransferProtocol.java
@@ -133,7 +133,7 @@ public class TestDataTransferProtocol {
   LOG.info("Expected: " + expected);
   
   if (eofExpected) {
-throw new IOException("Did not recieve IOException when an exception " 
+
+throw new IOException("Did not receive IOException when an exception " 
+
   "is expected while reading from " + datanode); 
   }
   assertEquals(expected, received);

http://git-wip-us.apache.org/repos/asf/hadoop/blob/d4f73e7e/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/ha/TestRetryCacheWithHA.java
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/ha/TestRetryCacheWithHA.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/ha/TestRetryCacheWithHA.java
index 12fa211..e29d518 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/ha/TestRetryCacheWithHA.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/ha/TestRetryCacheWithHA.java
@@ -1282,7 +1282,7 @@ public class TestRetryCacheWithHA {
 
   /**
* When NN failover happens, if the client did not receive the response and
-   * send a retry request to the other NN, the same response should be recieved
+   * send a retry request to the other NN, the same response should be received
* based on the retry cache.
*/
   public void testClientRetryWithFailover(final AtMostOnceOp op)


hadoop git commit: HADOOP-14213. Move Configuration runtime check for hadoop-site.xml to initialization. Contributed by Jonathan Eagles

2017-03-23 Thread raviprak
Repository: hadoop
Updated Branches:
  refs/heads/branch-2 20878d052 -> 87339d769


HADOOP-14213. Move Configuration runtime check for hadoop-site.xml to 
initialization. Contributed by Jonathan Eagles

(cherry picked from commit 595f62e362c08704d6fb692e21c97b512bc7ec49)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/87339d76
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/87339d76
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/87339d76

Branch: refs/heads/branch-2
Commit: 87339d769163edb88c798e8adfdd742c094e85c6
Parents: 20878d0
Author: Ravi Prakash 
Authored: Thu Mar 23 09:28:10 2017 -0700
Committer: Ravi Prakash 
Committed: Thu Mar 23 09:30:22 2017 -0700

--
 .../org/apache/hadoop/conf/Configuration.java | 18 --
 1 file changed, 8 insertions(+), 10 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/87339d76/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java
--
diff --git 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java
 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java
index 005984d..2528da9 100644
--- 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java
+++ 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java
@@ -655,21 +655,24 @@ public class Configuration implements 
Iterable>,
 }
   }
  
-  static{
-//print deprecation warning if hadoop-site.xml is found in classpath
+  static {
+// Add default resources
+addDefaultResource("core-default.xml");
+addDefaultResource("core-site.xml");
+
+// print deprecation warning if hadoop-site.xml is found in classpath
 ClassLoader cL = Thread.currentThread().getContextClassLoader();
 if (cL == null) {
   cL = Configuration.class.getClassLoader();
 }
-if(cL.getResource("hadoop-site.xml")!=null) {
+if (cL.getResource("hadoop-site.xml") != null) {
   LOG.warn("DEPRECATED: hadoop-site.xml found in the classpath. " +
   "Usage of hadoop-site.xml is deprecated. Instead use core-site.xml, "
   + "mapred-site.xml and hdfs-site.xml to override properties of " +
   "core-default.xml, mapred-default.xml and hdfs-default.xml " +
   "respectively");
+  addDefaultResource("hadoop-site.xml");
 }
-addDefaultResource("core-default.xml");
-addDefaultResource("core-site.xml");
   }
   
   private Properties properties;
@@ -2557,11 +2560,6 @@ public class Configuration implements 
Iterable>,
   for (String resource : defaultResources) {
 loadResource(properties, new Resource(resource), quiet);
   }
-
-  //support the hadoop-site.xml as a deprecated case
-  if(getResource("hadoop-site.xml")!=null) {
-loadResource(properties, new Resource("hadoop-site.xml"), quiet);
-  }
 }
 
 for (int i = 0; i < resources.size(); i++) {


-
To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org
For additional commands, e-mail: common-commits-h...@hadoop.apache.org



hadoop git commit: HADOOP-14213. Move Configuration runtime check for hadoop-site.xml to initialization. Contributed by Jonathan Eagles

2017-03-23 Thread raviprak
Repository: hadoop
Updated Branches:
  refs/heads/trunk a5a4867f3 -> 595f62e36


HADOOP-14213. Move Configuration runtime check for hadoop-site.xml to 
initialization. Contributed by Jonathan Eagles


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/595f62e3
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/595f62e3
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/595f62e3

Branch: refs/heads/trunk
Commit: 595f62e362c08704d6fb692e21c97b512bc7ec49
Parents: a5a4867
Author: Ravi Prakash 
Authored: Thu Mar 23 09:28:10 2017 -0700
Committer: Ravi Prakash 
Committed: Thu Mar 23 09:28:10 2017 -0700

--
 .../org/apache/hadoop/conf/Configuration.java | 18 --
 1 file changed, 8 insertions(+), 10 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/595f62e3/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java
--
diff --git 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java
 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java
index bade06e..a9c8d9c 100644
--- 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java
+++ 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java
@@ -668,21 +668,24 @@ public class Configuration implements 
Iterable>,
 }
   }
  
-  static{
-//print deprecation warning if hadoop-site.xml is found in classpath
+  static {
+// Add default resources
+addDefaultResource("core-default.xml");
+addDefaultResource("core-site.xml");
+
+// print deprecation warning if hadoop-site.xml is found in classpath
 ClassLoader cL = Thread.currentThread().getContextClassLoader();
 if (cL == null) {
   cL = Configuration.class.getClassLoader();
 }
-if(cL.getResource("hadoop-site.xml")!=null) {
+if (cL.getResource("hadoop-site.xml") != null) {
   LOG.warn("DEPRECATED: hadoop-site.xml found in the classpath. " +
   "Usage of hadoop-site.xml is deprecated. Instead use core-site.xml, "
   + "mapred-site.xml and hdfs-site.xml to override properties of " +
   "core-default.xml, mapred-default.xml and hdfs-default.xml " +
   "respectively");
+  addDefaultResource("hadoop-site.xml");
 }
-addDefaultResource("core-default.xml");
-addDefaultResource("core-site.xml");
   }
   
   private Properties properties;
@@ -2638,11 +2641,6 @@ public class Configuration implements 
Iterable>,
   for (String resource : defaultResources) {
 loadResource(properties, new Resource(resource), quiet);
   }
-
-  //support the hadoop-site.xml as a deprecated case
-  if(getResource("hadoop-site.xml")!=null) {
-loadResource(properties, new Resource("hadoop-site.xml"), quiet);
-  }
 }
 
 for (int i = 0; i < resources.size(); i++) {


-
To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org
For additional commands, e-mail: common-commits-h...@hadoop.apache.org



hadoop git commit: HADOOP-13898. Should set HADOOP_JOB_HISTORYSERVER_HEAPSIZE only if it's empty. Contributed by Fei Hui.

2016-12-13 Thread raviprak
Repository: hadoop
Updated Branches:
  refs/heads/branch-2 282a56243 -> d7d9006af


HADOOP-13898. Should set HADOOP_JOB_HISTORYSERVER_HEAPSIZE only if it's empty. 
Contributed by Fei Hui.


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/d7d9006a
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/d7d9006a
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/d7d9006a

Branch: refs/heads/branch-2
Commit: d7d9006afc60dd17884d939921a67d4d19f87f4f
Parents: 282a562
Author: Ravi Prakash 
Authored: Tue Dec 13 13:08:54 2016 -0800
Committer: Ravi Prakash 
Committed: Tue Dec 13 13:08:54 2016 -0800

--
 hadoop-mapreduce-project/conf/mapred-env.cmd | 5 -
 hadoop-mapreduce-project/conf/mapred-env.sh  | 5 -
 2 files changed, 8 insertions(+), 2 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/d7d9006a/hadoop-mapreduce-project/conf/mapred-env.cmd
--
diff --git a/hadoop-mapreduce-project/conf/mapred-env.cmd 
b/hadoop-mapreduce-project/conf/mapred-env.cmd
index f27943f..b50ff12 100644
--- a/hadoop-mapreduce-project/conf/mapred-env.cmd
+++ b/hadoop-mapreduce-project/conf/mapred-env.cmd
@@ -14,7 +14,10 @@
 @rem See the License for the specific language governing permissions and
 @rem limitations under the License.
 
-set HADOOP_JOB_HISTORYSERVER_HEAPSIZE=1000
+@rem when HADOOP_JOB_HISTORYSERVER_HEAPSIZE is not defined, set it
+if not defined HADOOP_JOB_HISTORYSERVER_HEAPSIZE (
+  set HADOOP_JOB_HISTORYSERVER_HEAPSIZE=1000
+)
 
 set HADOOP_MAPRED_ROOT_LOGGER=%HADOOP_LOGLEVEL%,RFA
 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/d7d9006a/hadoop-mapreduce-project/conf/mapred-env.sh
--
diff --git a/hadoop-mapreduce-project/conf/mapred-env.sh 
b/hadoop-mapreduce-project/conf/mapred-env.sh
index 6be1e27..3590882 100644
--- a/hadoop-mapreduce-project/conf/mapred-env.sh
+++ b/hadoop-mapreduce-project/conf/mapred-env.sh
@@ -15,7 +15,10 @@
 
 # export JAVA_HOME=/home/y/libexec/jdk1.6.0/
 
-export HADOOP_JOB_HISTORYSERVER_HEAPSIZE=1000
+# when HADOOP_JOB_HISTORYSERVER_HEAPSIZE is not defined, set it.
+if [ "$HADOOP_JOB_HISTORYSERVER_HEAPSIZE" = "" ];then
+  export HADOOP_JOB_HISTORYSERVER_HEAPSIZE=1000
+fi
 
 export HADOOP_MAPRED_ROOT_LOGGER=INFO,RFA
 


-
To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org
For additional commands, e-mail: common-commits-h...@hadoop.apache.org



hadoop git commit: MAPREDUCE-6810. Fix hadoop-mapreduce-client-nativetask compilation with GCC-6.2.1. Contributed by Ravi Prakash.

2016-11-30 Thread raviprak
Repository: hadoop
Updated Branches:
  refs/heads/trunk b3befc021 -> 7c848719d


MAPREDUCE-6810. Fix hadoop-mapreduce-client-nativetask compilation with 
GCC-6.2.1. Contributed by Ravi Prakash.


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/7c848719
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/7c848719
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/7c848719

Branch: refs/heads/trunk
Commit: 7c848719de778929258f1f9e2778e56f267c90ed
Parents: b3befc0
Author: Ravi Prakash 
Authored: Wed Nov 30 10:47:41 2016 -0800
Committer: Ravi Prakash 
Committed: Wed Nov 30 10:47:41 2016 -0800

--
 .../src/main/native/src/lib/Log.h  | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/7c848719/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/native/src/lib/Log.h
--
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/native/src/lib/Log.h
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/native/src/lib/Log.h
index a0c17f3..a84b055 100644
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/native/src/lib/Log.h
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/native/src/lib/Log.h
@@ -32,7 +32,7 @@ extern FILE * LOG_DEVICE;
 #define LOG(_fmt_, args...)   if (LOG_DEVICE) { \
 time_t log_timer; struct tm log_tm; \
 time(_timer); localtime_r(_timer, _tm); \
-fprintf(LOG_DEVICE, "%02d/%02d/%02d %02d:%02d:%02d INFO "_fmt_"\n", \
+fprintf(LOG_DEVICE, "%02d/%02d/%02d %02d:%02d:%02d INFO " _fmt_ "\n", \
 log_tm.tm_year%100, log_tm.tm_mon+1, log_tm.tm_mday, \
 log_tm.tm_hour, log_tm.tm_min, log_tm.tm_sec, \
 ##args);}


-
To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org
For additional commands, e-mail: common-commits-h...@hadoop.apache.org



hadoop git commit: HDFS-8678. Bring back the feature to view chunks of files in the HDFS file browser. Contributed by Ivo Udelsmann.

2016-11-30 Thread raviprak
Repository: hadoop
Updated Branches:
  refs/heads/trunk 51e6c1cc3 -> 625df87c7


HDFS-8678. Bring back the feature to view chunks of files in the HDFS file 
browser. Contributed by Ivo Udelsmann.


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/625df87c
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/625df87c
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/625df87c

Branch: refs/heads/trunk
Commit: 625df87c7b8ec2787e743d845fadde5e73479dc1
Parents: 51e6c1c
Author: Ravi Prakash 
Authored: Wed Nov 30 09:11:19 2016 -0800
Committer: Ravi Prakash 
Committed: Wed Nov 30 09:12:15 2016 -0800

--
 .../src/main/webapps/hdfs/explorer.html | 13 +--
 .../src/main/webapps/hdfs/explorer.js   | 37 +---
 2 files changed, 43 insertions(+), 7 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/625df87c/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/hdfs/explorer.html
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/hdfs/explorer.html 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/hdfs/explorer.html
index ad8c374..3700a5e 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/hdfs/explorer.html
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/hdfs/explorer.html
@@ -57,8 +57,17 @@
File information
  
  
-   Download
-
+   
+  
+Download
+  
+  
+Head the 
file (first 32K)
+  
+  
+Tail the 
file (last 32K)
+ 
+   


  

http://git-wip-us.apache.org/repos/asf/hadoop/blob/625df87c/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/hdfs/explorer.js
--
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/hdfs/explorer.js 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/hdfs/explorer.js
index 1739db2..3e276a9 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/hdfs/explorer.js
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/hdfs/explorer.js
@@ -192,13 +192,40 @@
   var download_url = '/webhdfs/v1' + abs_path + '?op=OPEN';
 
   $('#file-info-download').attr('href', download_url);
-  $('#file-info-preview').click(function() {
+
+  var processPreview = function(url) {
+url += "=true";
+$.ajax({
+  type: 'GET',
+  url: url,
+  processData: false,
+  crossDomain: true
+}).done(function(data) {
+  url = data.Location;
+  $.ajax({
+type: 'GET',
+url: url,
+processData: false,
+crossDomain: true
+  }).complete(function(data) {
+$('#file-info-preview-body').val(data.responseText);
+$('#file-info-tail').show();
+  }).error(function(jqXHR, textStatus, errorThrown) {
+show_err_msg("Couldn't preview the file. " + errorThrown);
+  });
+}).error(function(jqXHR, textStatus, errorThrown) {
+  show_err_msg("Couldn't find datanode to read file from. " + 
errorThrown);
+});
+  }
+
+  $('#file-info-preview-tail').click(function() {
 var offset = d.fileLength - TAIL_CHUNK_SIZE;
 var url = offset > 0 ? download_url + '=' + offset : 
download_url;
-$.get(url, function(t) {
-  $('#file-info-preview-body').val(t);
-  $('#file-info-tail').show();
-}, "text").error(network_error_handler(url));
+processPreview(url);
+  });
+  $('#file-info-preview-head').click(function() {
+var url = d.fileLength > TAIL_CHUNK_SIZE ? download_url + '=' + 
TAIL_CHUNK_SIZE : download_url;
+processPreview(url);
   });
 
   if (d.fileLength > 0) {


-
To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org
For additional commands, e-mail: common-commits-h...@hadoop.apache.org



hadoop git commit: HADOOP-13773. Wrong HADOOP_CLIENT_OPTS in hadoop-env on branch-2. Contributed by Fei Hui

2016-11-01 Thread raviprak
Repository: hadoop
Updated Branches:
  refs/heads/branch-2.8 db6dfeca1 -> 8504e20b2


HADOOP-13773. Wrong HADOOP_CLIENT_OPTS in hadoop-env on branch-2. Contributed 
by Fei Hui

(cherry picked from commit 045501a538800a48b77e688e46c55dce54473041)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/8504e20b
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/8504e20b
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/8504e20b

Branch: refs/heads/branch-2.8
Commit: 8504e20b2e6b596c3297d6620346f8b1ab44297c
Parents: db6dfec
Author: Ravi Prakash 
Authored: Tue Nov 1 10:49:04 2016 -0700
Committer: Ravi Prakash 
Committed: Tue Nov 1 10:49:45 2016 -0700

--
 .../hadoop-common/src/main/conf/hadoop-env.cmd | 6 +-
 .../hadoop-common/src/main/conf/hadoop-env.sh  | 6 +-
 2 files changed, 10 insertions(+), 2 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/8504e20b/hadoop-common-project/hadoop-common/src/main/conf/hadoop-env.cmd
--
diff --git a/hadoop-common-project/hadoop-common/src/main/conf/hadoop-env.cmd 
b/hadoop-common-project/hadoop-common/src/main/conf/hadoop-env.cmd
index 5dbd635..5aed07d 100644
--- a/hadoop-common-project/hadoop-common/src/main/conf/hadoop-env.cmd
+++ b/hadoop-common-project/hadoop-common/src/main/conf/hadoop-env.cmd
@@ -58,7 +58,11 @@ set HADOOP_DATANODE_OPTS=-Dhadoop.security.logger=ERROR,RFAS 
%HADOOP_DATANODE_OP
 set 
HADOOP_SECONDARYNAMENODE_OPTS=-Dhadoop.security.logger=%HADOOP_SECURITY_LOGGER% 
-Dhdfs.audit.logger=%HDFS_AUDIT_LOGGER% %HADOOP_SECONDARYNAMENODE_OPTS%
 
 @rem The following applies to multiple commands (fs, dfs, fsck, distcp etc)
-set HADOOP_CLIENT_OPTS=-Xmx512m %HADOOP_CLIENT_OPTS%
+set HADOOP_CLIENT_OPTS=%HADOOP_CLIENT_OPTS%
+@rem set heap args when HADOOP_HEAPSIZE is empty
+if not defined HADOOP_HEAPSIZE (
+  set HADOOP_CLIENT_OPTS=-Xmx512m %HADOOP_CLIENT_OPTS%
+)
 @rem set HADOOP_JAVA_PLATFORM_OPTS="-XX:-UsePerfData 
%HADOOP_JAVA_PLATFORM_OPTS%"
 
 @rem On secure datanodes, user to run the datanode as after dropping privileges

http://git-wip-us.apache.org/repos/asf/hadoop/blob/8504e20b/hadoop-common-project/hadoop-common/src/main/conf/hadoop-env.sh
--
diff --git a/hadoop-common-project/hadoop-common/src/main/conf/hadoop-env.sh 
b/hadoop-common-project/hadoop-common/src/main/conf/hadoop-env.sh
index 6469e61..8d15f98 100644
--- a/hadoop-common-project/hadoop-common/src/main/conf/hadoop-env.sh
+++ b/hadoop-common-project/hadoop-common/src/main/conf/hadoop-env.sh
@@ -64,7 +64,11 @@ export HADOOP_NFS3_OPTS="$HADOOP_NFS3_OPTS"
 export HADOOP_PORTMAP_OPTS="-Xmx512m $HADOOP_PORTMAP_OPTS"
 
 # The following applies to multiple commands (fs, dfs, fsck, distcp etc)
-export HADOOP_CLIENT_OPTS="-Xmx512m $HADOOP_CLIENT_OPTS"
+export HADOOP_CLIENT_OPTS="$HADOOP_CLIENT_OPTS"
+# set heap args when HADOOP_HEAPSIZE is empty
+if [ "$HADOOP_HEAPSIZE" = "" ]; then
+  export HADOOP_CLIENT_OPTS="-Xmx512m $HADOOP_CLIENT_OPTS"
+fi
 #HADOOP_JAVA_PLATFORM_OPTS="-XX:-UsePerfData $HADOOP_JAVA_PLATFORM_OPTS"
 
 # On secure datanodes, user to run the datanode as after dropping privileges.


-
To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org
For additional commands, e-mail: common-commits-h...@hadoop.apache.org



hadoop git commit: HADOOP-13773. Wrong HADOOP_CLIENT_OPTS in hadoop-env on branch-2. Contributed by Fei Hui

2016-11-01 Thread raviprak
Repository: hadoop
Updated Branches:
  refs/heads/branch-2 7c1a1834e -> 045501a53


HADOOP-13773. Wrong HADOOP_CLIENT_OPTS in hadoop-env on branch-2. Contributed 
by Fei Hui


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/045501a5
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/045501a5
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/045501a5

Branch: refs/heads/branch-2
Commit: 045501a538800a48b77e688e46c55dce54473041
Parents: 7c1a183
Author: Ravi Prakash 
Authored: Tue Nov 1 10:49:04 2016 -0700
Committer: Ravi Prakash 
Committed: Tue Nov 1 10:49:04 2016 -0700

--
 .../hadoop-common/src/main/conf/hadoop-env.cmd | 6 +-
 .../hadoop-common/src/main/conf/hadoop-env.sh  | 6 +-
 2 files changed, 10 insertions(+), 2 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/045501a5/hadoop-common-project/hadoop-common/src/main/conf/hadoop-env.cmd
--
diff --git a/hadoop-common-project/hadoop-common/src/main/conf/hadoop-env.cmd 
b/hadoop-common-project/hadoop-common/src/main/conf/hadoop-env.cmd
index 5dbd635..5aed07d 100644
--- a/hadoop-common-project/hadoop-common/src/main/conf/hadoop-env.cmd
+++ b/hadoop-common-project/hadoop-common/src/main/conf/hadoop-env.cmd
@@ -58,7 +58,11 @@ set HADOOP_DATANODE_OPTS=-Dhadoop.security.logger=ERROR,RFAS 
%HADOOP_DATANODE_OP
 set 
HADOOP_SECONDARYNAMENODE_OPTS=-Dhadoop.security.logger=%HADOOP_SECURITY_LOGGER% 
-Dhdfs.audit.logger=%HDFS_AUDIT_LOGGER% %HADOOP_SECONDARYNAMENODE_OPTS%
 
 @rem The following applies to multiple commands (fs, dfs, fsck, distcp etc)
-set HADOOP_CLIENT_OPTS=-Xmx512m %HADOOP_CLIENT_OPTS%
+set HADOOP_CLIENT_OPTS=%HADOOP_CLIENT_OPTS%
+@rem set heap args when HADOOP_HEAPSIZE is empty
+if not defined HADOOP_HEAPSIZE (
+  set HADOOP_CLIENT_OPTS=-Xmx512m %HADOOP_CLIENT_OPTS%
+)
 @rem set HADOOP_JAVA_PLATFORM_OPTS="-XX:-UsePerfData 
%HADOOP_JAVA_PLATFORM_OPTS%"
 
 @rem On secure datanodes, user to run the datanode as after dropping privileges

http://git-wip-us.apache.org/repos/asf/hadoop/blob/045501a5/hadoop-common-project/hadoop-common/src/main/conf/hadoop-env.sh
--
diff --git a/hadoop-common-project/hadoop-common/src/main/conf/hadoop-env.sh 
b/hadoop-common-project/hadoop-common/src/main/conf/hadoop-env.sh
index 6469e61..8d15f98 100644
--- a/hadoop-common-project/hadoop-common/src/main/conf/hadoop-env.sh
+++ b/hadoop-common-project/hadoop-common/src/main/conf/hadoop-env.sh
@@ -64,7 +64,11 @@ export HADOOP_NFS3_OPTS="$HADOOP_NFS3_OPTS"
 export HADOOP_PORTMAP_OPTS="-Xmx512m $HADOOP_PORTMAP_OPTS"
 
 # The following applies to multiple commands (fs, dfs, fsck, distcp etc)
-export HADOOP_CLIENT_OPTS="-Xmx512m $HADOOP_CLIENT_OPTS"
+export HADOOP_CLIENT_OPTS="$HADOOP_CLIENT_OPTS"
+# set heap args when HADOOP_HEAPSIZE is empty
+if [ "$HADOOP_HEAPSIZE" = "" ]; then
+  export HADOOP_CLIENT_OPTS="-Xmx512m $HADOOP_CLIENT_OPTS"
+fi
 #HADOOP_JAVA_PLATFORM_OPTS="-XX:-UsePerfData $HADOOP_JAVA_PLATFORM_OPTS"
 
 # On secure datanodes, user to run the datanode as after dropping privileges.


-
To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org
For additional commands, e-mail: common-commits-h...@hadoop.apache.org



hadoop git commit: HADOOP-13587. distcp.map.bandwidth.mb is overwritten even when -bandwidth flag isn't set. Contributed by Zoran Dimitrijevic

2016-09-12 Thread raviprak
Repository: hadoop
Updated Branches:
  refs/heads/trunk cc01ed702 -> 9faccd104


HADOOP-13587. distcp.map.bandwidth.mb is overwritten even when -bandwidth flag 
isn't set. Contributed by Zoran Dimitrijevic


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/9faccd10
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/9faccd10
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/9faccd10

Branch: refs/heads/trunk
Commit: 9faccd104672dfef123735ca8ada178fc3a6196f
Parents: cc01ed70
Author: Ravi Prakash 
Authored: Mon Sep 12 08:26:08 2016 -0700
Committer: Ravi Prakash 
Committed: Mon Sep 12 08:26:08 2016 -0700

--
 .../java/org/apache/hadoop/tools/DistCp.java|  4 ++
 .../org/apache/hadoop/tools/DistCpOptions.java  |  8 ++-
 .../apache/hadoop/tools/TestOptionsParser.java  | 64 +++-
 3 files changed, 70 insertions(+), 6 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/9faccd10/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/DistCp.java
--
diff --git 
a/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/DistCp.java 
b/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/DistCp.java
index 36e5ee4..be58f13 100644
--- 
a/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/DistCp.java
+++ 
b/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/DistCp.java
@@ -71,6 +71,7 @@ public class DistCp extends Configured implements Tool {
   private static final String PREFIX = "_distcp";
   private static final String WIP_PREFIX = "._WIP_";
   private static final String DISTCP_DEFAULT_XML = "distcp-default.xml";
+  private static final String DISTCP_SITE_XML = "distcp-site.xml";
   static final Random rand = new Random();
 
   private boolean submitted;
@@ -86,6 +87,7 @@ public class DistCp extends Configured implements Tool {
   public DistCp(Configuration configuration, DistCpOptions inputOptions) 
throws Exception {
 Configuration config = new Configuration(configuration);
 config.addResource(DISTCP_DEFAULT_XML);
+config.addResource(DISTCP_SITE_XML);
 setConf(config);
 this.inputOptions = inputOptions;
 this.metaFolder   = createMetaFolderPath();
@@ -393,10 +395,12 @@ public class DistCp extends Configured implements Tool {
* Loads properties from distcp-default.xml into configuration
* object
* @return Configuration which includes properties from distcp-default.xml
+   * and distcp-site.xml
*/
   private static Configuration getDefaultConf() {
 Configuration config = new Configuration();
 config.addResource(DISTCP_DEFAULT_XML);
+config.addResource(DISTCP_SITE_XML);
 return config;
   }
 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/9faccd10/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/DistCpOptions.java
--
diff --git 
a/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/DistCpOptions.java
 
b/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/DistCpOptions.java
index e6f53f5..4c5518f 100644
--- 
a/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/DistCpOptions.java
+++ 
b/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/DistCpOptions.java
@@ -47,7 +47,7 @@ public class DistCpOptions {
   public static final int maxNumListstatusThreads = 40;
   private int numListstatusThreads = 0;  // Indicates that flag is not set.
   private int maxMaps = DistCpConstants.DEFAULT_MAPS;
-  private float mapBandwidth = DistCpConstants.DEFAULT_BANDWIDTH_MB;
+  private float mapBandwidth = 0;  // Indicates that we should use the default.
 
   private String copyStrategy = DistCpConstants.UNIFORMSIZE;
 
@@ -609,8 +609,10 @@ public class DistCpOptions {
 String.valueOf(useDiff));
 DistCpOptionSwitch.addToConf(conf, DistCpOptionSwitch.SKIP_CRC,
 String.valueOf(skipCRC));
-DistCpOptionSwitch.addToConf(conf, DistCpOptionSwitch.BANDWIDTH,
-String.valueOf(mapBandwidth));
+if (mapBandwidth > 0) {
+  DistCpOptionSwitch.addToConf(conf, DistCpOptionSwitch.BANDWIDTH,
+  String.valueOf(mapBandwidth));
+}
 DistCpOptionSwitch.addToConf(conf, DistCpOptionSwitch.PRESERVE_STATUS,
 DistCpUtils.packAttributes(preserveStatus));
 if (filtersFile != null) {

http://git-wip-us.apache.org/repos/asf/hadoop/blob/9faccd10/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/TestOptionsParser.java
--
diff --git 

[3/3] hadoop git commit: HDFS-9877. HDFS Namenode UI: Fix browsing directories that need to be encoded (Ravi Prakash via aw)

2016-09-01 Thread raviprak
HDFS-9877. HDFS Namenode UI: Fix browsing directories that need to be encoded 
(Ravi Prakash via aw)

(cherry picked from commit 15f018434c5b715729488fd0b03a11f1bc943470)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/b1466b81
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/b1466b81
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/b1466b81

Branch: refs/heads/branch-2
Commit: b1466b81242f4bf0815d2760ca9da32ee6c3d871
Parents: b9eedc2
Author: Allen Wittenauer 
Authored: Fri Jun 3 17:06:29 2016 -0700
Committer: Ravi Prakash 
Committed: Thu Sep 1 14:24:13 2016 -0700

--
 hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/hdfs/explorer.js | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/b1466b81/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/hdfs/explorer.js
--
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/hdfs/explorer.js 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/hdfs/explorer.js
index c0ac057..1739db2 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/hdfs/explorer.js
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/hdfs/explorer.js
@@ -33,7 +33,7 @@
   $(window).bind('hashchange', function () {
 $('#alert-panel').hide();
 
-var dir = window.location.hash.slice(1);
+var dir = decodeURIComponent(window.location.hash.slice(1));
 if(dir == "") {
   dir = "/";
 }


-
To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org
For additional commands, e-mail: common-commits-h...@hadoop.apache.org



[2/3] hadoop git commit: HDFS-7987. Allow files / directories to be moved (Ravi Prakash via aw)

2016-09-01 Thread raviprak
HDFS-7987. Allow files / directories to be moved (Ravi Prakash via aw)

(cherry picked from commit d44f4745b4a186dd06dd6837a85ad90a237d7d97)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/b9eedc24
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/b9eedc24
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/b9eedc24

Branch: refs/heads/branch-2
Commit: b9eedc24a644891fd8c530631560bee051563ca4
Parents: 743c0eb
Author: Allen Wittenauer 
Authored: Fri Jun 10 09:02:28 2016 -0700
Committer: Ravi Prakash 
Committed: Thu Sep 1 14:23:42 2016 -0700

--
 .../src/main/webapps/hdfs/explorer.html |  9 +++
 .../src/main/webapps/hdfs/explorer.js   | 83 
 .../src/main/webapps/static/hadoop.css  |  7 ++
 3 files changed, 83 insertions(+), 16 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/b9eedc24/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/hdfs/explorer.html
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/hdfs/explorer.html 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/hdfs/explorer.html
index 51f72e5..ad8c374 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/hdfs/explorer.html
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/hdfs/explorer.html
@@ -179,6 +179,13 @@
   data-target="#modal-upload-file" title="Upload Files">
 
 
+
+
+
+  Cut
+  Paste
+
   
 
 
@@ -236,6 +243,7 @@
   
 
   
+
 Permission
 Owner
 Group
@@ -251,6 +259,7 @@
   {#FileStatus}
   
+ 
 
   {type|helper_to_directory}{permission|helper_to_permission}
   {aclBit|helper_to_acl_bit}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/b9eedc24/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/hdfs/explorer.js
--
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/hdfs/explorer.js 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/hdfs/explorer.js
index f835baf..c0ac057 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/hdfs/explorer.js
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/hdfs/explorer.js
@@ -310,22 +310,28 @@
   var absolute_file_path = append_path(current_directory, inode_name);
   delete_path(inode_name, absolute_file_path);
 });
-  
-  $('#table-explorer').dataTable( {
-  'lengthMenu': [ [25, 50, 100, -1], [25, 50, 100, "All"] ],
-  'columns': [
-  {'searchable': false }, //Permissions
-  null, //Owner
-  null, //Group
-  { 'searchable': false, 'render': func_size_render}, //Size
-  { 'searchable': false, 'render': func_time_render}, //Last 
Modified
-  { 'searchable': false }, //Replication
-  null, //Block Size
-  null, //Name
-  { 'sortable' : false } //Trash
-  ],
-  "deferRender": true
-  });
+
+$('#file-selector-all').click(function() {
+  $('.file_selector').prop('checked', 
$('#file-selector-all')[0].checked );
+});
+
+//This needs to be last because it repaints the table
+$('#table-explorer').dataTable( {
+  'lengthMenu': [ [25, 50, 100, -1], [25, 50, 100, "All"] ],
+  'columns': [
+{ 'orderable' : false }, //select
+{'searchable': false }, //Permissions
+null, //Owner
+null, //Group
+{ 'searchable': false, 'render': func_size_render}, //Size
+{ 'searchable': false, 'render': func_time_render}, //Last Modified
+{ 'searchable': false }, //Replication
+null, //Block Size
+null, //Name
+{ 'orderable' : false } //Trash
+  ],
+  "deferRender": true
+});
   });
 }).error(network_error_handler(url));
   }
@@ -417,5 +423,50 @@
 }
   });
 
+  //Store the list of files which have been checked into session storage
+  function store_selected_files(current_directory) {
+sessionStorage.setItem("source_directory", current_directory);
+var selected_files = $("input:checked.file_selector");
+var selected_file_names = new Array();
+selected_files.each(function(index) {
+  selected_file_names[index] = $(this).closest('tr').attr('inode-path');
+})
+sessionStorage.setItem("selected_file_names", 
JSON.stringify(selected_file_names));
+

[1/3] hadoop git commit: HDFS-7767. Use the noredirect flag in WebHDFS to allow web browsers to upload files via the NN UI (Ravi Prakash via aw)

2016-09-01 Thread raviprak
Repository: hadoop
Updated Branches:
  refs/heads/branch-2 caef03d74 -> b1466b812


HDFS-7767. Use the noredirect flag in WebHDFS to allow web browsers to upload 
files via the NN UI (Ravi Prakash via aw)

(cherry picked from commit 99a771cd7a3f792a76ac89c406b82a983c059d28)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/743c0ebd
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/743c0ebd
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/743c0ebd

Branch: refs/heads/branch-2
Commit: 743c0ebd6729d6f0af79722cbe777b6eba5d7d5b
Parents: caef03d
Author: Allen Wittenauer 
Authored: Fri Jun 3 17:07:39 2016 -0700
Committer: Ravi Prakash 
Committed: Thu Sep 1 14:22:23 2016 -0700

--
 .../src/main/webapps/hdfs/explorer.html | 25 +-
 .../src/main/webapps/hdfs/explorer.js   | 51 
 2 files changed, 74 insertions(+), 2 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/743c0ebd/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/hdfs/explorer.html
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/hdfs/explorer.html 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/hdfs/explorer.html
index 5106006..51f72e5 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/hdfs/explorer.html
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/hdfs/explorer.html
@@ -119,6 +119,23 @@
   
 
 
+
+  
+ 
+   
+ Upload File
+   
+   
+ 
+   
+   
+ Close
+ Upload
+   
+ 
+  
+
   
 
   
@@ -142,7 +159,7 @@
   
 
   
-  
+  
 
   
 
@@ -152,12 +169,16 @@
   
 
   
-  
+  
 
 
 
+
+
+
   
 
 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/743c0ebd/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/hdfs/explorer.js
--
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/hdfs/explorer.js 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/hdfs/explorer.js
index 102da9d..f835baf 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/hdfs/explorer.js
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/hdfs/explorer.js
@@ -366,5 +366,56 @@
 });
   })
 
+  $('#modal-upload-file-button').click(function() {
+$(this).prop('disabled', true);
+$(this).button('complete');
+var files = []
+var numCompleted = 0
+
+for(var i = 0; i < $('#modal-upload-file-input').prop('files').length; 
i++) {
+  (function() {
+var file = $('#modal-upload-file-input').prop('files')[i];
+var url = '/webhdfs/v1' + current_directory;
+url = encode_path(append_path(url, file.name));
+url += '?op=CREATE=true';
+files.push( { file: file } )
+files[i].request = $.ajax({
+  type: 'PUT',
+  url: url,
+  processData: false,
+  crossDomain: true
+});
+  })()
+ }
+for(var f in files) {
+  (function() {
+var file = files[f];
+file.request.done(function(data) {
+  var url = data['Location'];
+  $.ajax({
+type: 'PUT',
+url: url,
+data: file.file,
+processData: false,
+crossDomain: true
+  }).complete(function(data) {
+numCompleted++;
+if(numCompleted == files.length) {
+  $('#modal-upload-file').modal('hide');
+  $('#modal-upload-file-button').button('reset');
+  browse_directory(current_directory);
+}
+  }).error(function(jqXHR, textStatus, errorThrown) {
+numCompleted++;
+show_err_msg("Couldn't upload the file " + file.file.name + ". "+ 
errorThrown);
+  });
+}).error(function(jqXHR, textStatus, errorThrown) {
+  numCompleted++;
+  show_err_msg("Couldn't find datanode to write file. " + errorThrown);
+});
+  })();
+}
+  });
+
   init();
 })();


-
To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org
For additional commands, e-mail: common-commits-h...@hadoop.apache.org



hadoop git commit: HDFS-10220. A large number of expired leases can make namenode unresponsive and cause failover (Nicolas Fraison via raviprak)

2016-06-20 Thread raviprak
Repository: hadoop
Updated Branches:
  refs/heads/branch-2.8 c5dafb4ad -> 588ea6d43


HDFS-10220. A large number of expired leases can make namenode unresponsive and 
cause failover (Nicolas Fraison via raviprak)

(cherry picked from commit ae047655f4355288406cd5396fb4e3ea7c307b14)
(cherry picked from commit 9c5f7f290eb05808fe89835a15bee0947b91d1a0)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/588ea6d4
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/588ea6d4
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/588ea6d4

Branch: refs/heads/branch-2.8
Commit: 588ea6d431397771c13952c5f10a1cf55ecac733
Parents: c5dafb4
Author: Ravi Prakash <ravip...@altiscale.com>
Authored: Wed Jun 8 13:44:22 2016 -0700
Committer: Ravi Prakash <ravip...@altiscale.com>
Committed: Mon Jun 20 14:38:05 2016 -0700

--
 .../org/apache/hadoop/hdfs/DFSConfigKeys.java   | 10 +
 .../hdfs/server/common/HdfsServerConstants.java |  1 -
 .../hdfs/server/namenode/FSNamesystem.java  | 42 
 .../hdfs/server/namenode/LeaseManager.java  | 21 --
 .../src/main/resources/hdfs-default.xml | 18 +
 .../hdfs/server/namenode/TestLeaseManager.java  | 24 ++-
 6 files changed, 94 insertions(+), 22 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/588ea6d4/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSConfigKeys.java
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSConfigKeys.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSConfigKeys.java
index 36cb7f5..42f3ff7 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSConfigKeys.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSConfigKeys.java
@@ -365,6 +365,16 @@ public class DFSConfigKeys extends CommonConfigurationKeys 
{
   public static final int DFS_NAMENODE_MAX_XATTR_SIZE_DEFAULT = 16384;
   public static final int DFS_NAMENODE_MAX_XATTR_SIZE_HARD_LIMIT = 32768;
 
+  public static final String  DFS_NAMENODE_LEASE_RECHECK_INTERVAL_MS_KEY =
+  "dfs.namenode.lease-recheck-interval-ms";
+  public static final longDFS_NAMENODE_LEASE_RECHECK_INTERVAL_MS_DEFAULT =
+  2000;
+  public static final String
+  DFS_NAMENODE_MAX_LOCK_HOLD_TO_RELEASE_LEASE_MS_KEY =
+  "dfs.namenode.max-lock-hold-to-release-lease-ms";
+  public static final long
+  DFS_NAMENODE_MAX_LOCK_HOLD_TO_RELEASE_LEASE_MS_DEFAULT = 25;
+
   public static final String  DFS_UPGRADE_DOMAIN_FACTOR = 
"dfs.namenode.upgrade.domain.factor";
   public static final int DFS_UPGRADE_DOMAIN_FACTOR_DEFAULT = 
DFS_REPLICATION_DEFAULT;
 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/588ea6d4/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/common/HdfsServerConstants.java
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/common/HdfsServerConstants.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/common/HdfsServerConstants.java
index e447d3a..01d40ea 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/common/HdfsServerConstants.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/common/HdfsServerConstants.java
@@ -355,7 +355,6 @@ public interface HdfsServerConstants {
   }
   
   String NAMENODE_LEASE_HOLDER = "HDFS_NameNode";
-  long NAMENODE_LEASE_RECHECK_INTERVAL = 2000;
 
   String CRYPTO_XATTR_ENCRYPTION_ZONE =
   "raw.hdfs.crypto.encryption.zone";

http://git-wip-us.apache.org/repos/asf/hadoop/blob/588ea6d4/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java
index 9cd1720..d83b4ef 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java
@@ -82,6 +82,10 @@ import static 
org.apache.hadoop.hdfs.DFSConfigKeys.DFS_NAMENODE_SAFEMODE_MIN_DAT
 import static 
org.apache.hadoop.hdfs.DFSConfigKeys.DFS_NAMENODE_SAFEMODE_THRESHOLD_PCT_DEFAULT;
 import static 
org.apache.hadoop.hd

hadoop git commit: HADOOP-3733. "s3x:" URLs break when Secret Key contains a slash, even if encoded. Contributed by Steve Loughran.

2016-06-16 Thread raviprak
Repository: hadoop
Updated Branches:
  refs/heads/branch-2 7eb524dde -> b216dbfb5


HADOOP-3733. "s3x:" URLs break when Secret Key contains a slash, even if 
encoded. Contributed by Steve Loughran.

(cherry picked from commit 4aefe119a0203c03cdc893dcb3330fd37f26f0ee)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/b216dbfb
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/b216dbfb
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/b216dbfb

Branch: refs/heads/branch-2
Commit: b216dbfb55f17e09e4a57bfdc90052275db55a6a
Parents: 7eb524d
Author: Ravi Prakash 
Authored: Thu Jun 16 11:13:35 2016 -0700
Committer: Ravi Prakash 
Committed: Thu Jun 16 11:14:38 2016 -0700

--
 .../hadoop-aws/dev-support/findbugs-exclude.xml |   7 +
 .../org/apache/hadoop/fs/s3/S3Credentials.java  |  31 +-
 .../org/apache/hadoop/fs/s3/S3FileSystem.java   |  20 +-
 .../org/apache/hadoop/fs/s3a/S3AFileSystem.java |  79 ++
 .../java/org/apache/hadoop/fs/s3a/S3AUtils.java |  46 +++
 .../hadoop/fs/s3native/NativeS3FileSystem.java  |  19 +-
 .../hadoop/fs/s3native/S3xLoginHelper.java  | 283 +++
 .../src/site/markdown/tools/hadoop-aws/index.md |   5 +-
 .../apache/hadoop/fs/s3/TestS3FileSystem.java   |  24 +-
 .../hadoop/fs/s3a/TestS3AConfiguration.java |  55 ++--
 .../hadoop/fs/s3a/TestS3ACredentialsInURL.java  | 153 ++
 .../hadoop/fs/s3native/TestS3xLoginHelper.java  | 197 +
 12 files changed, 801 insertions(+), 118 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/b216dbfb/hadoop-tools/hadoop-aws/dev-support/findbugs-exclude.xml
--
diff --git a/hadoop-tools/hadoop-aws/dev-support/findbugs-exclude.xml 
b/hadoop-tools/hadoop-aws/dev-support/findbugs-exclude.xml
index 2b4160a..ffb0a79 100644
--- a/hadoop-tools/hadoop-aws/dev-support/findbugs-exclude.xml
+++ b/hadoop-tools/hadoop-aws/dev-support/findbugs-exclude.xml
@@ -15,6 +15,13 @@
limitations under the License.
 -->
 
+
+  
+  
+
+
+
+  
   
   
 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/b216dbfb/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3/S3Credentials.java
--
diff --git 
a/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3/S3Credentials.java
 
b/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3/S3Credentials.java
index 5ab352a..3951a08 100644
--- 
a/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3/S3Credentials.java
+++ 
b/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3/S3Credentials.java
@@ -24,6 +24,7 @@ import java.net.URI;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.s3native.S3xLoginHelper;
 
 /**
  * 
@@ -49,18 +50,12 @@ public class S3Credentials {
 if (uri.getHost() == null) {
   throw new IllegalArgumentException("Invalid hostname in URI " + uri);
 }
-
-String userInfo = uri.getUserInfo();
-if (userInfo != null) {
-  int index = userInfo.indexOf(':');
-  if (index != -1) {
-accessKey = userInfo.substring(0, index);
-secretAccessKey = userInfo.substring(index + 1);
-  } else {
-accessKey = userInfo;
-  }
+S3xLoginHelper.Login login =
+S3xLoginHelper.extractLoginDetailsWithWarnings(uri);
+if (login.hasLogin()) {
+  accessKey = login.getUser();
+  secretAccessKey = login.getPassword();
 }
-
 String scheme = uri.getScheme();
 String accessKeyProperty = String.format("fs.%s.awsAccessKeyId", scheme);
 String secretAccessKeyProperty =
@@ -77,24 +72,20 @@ public class S3Credentials {
 if (accessKey == null && secretAccessKey == null) {
   throw new IllegalArgumentException("AWS " +
  "Access Key ID and Secret Access " +
- "Key must be specified as the " +
- "username or password " +
- "(respectively) of a " + scheme +
- " URL, or by setting the " +
- accessKeyProperty + " or " +
+ "Key must be specified " +
+ "by setting the " +
+ accessKeyProperty + " and " +
  secretAccessKeyProperty +
  " properties (respectively).");
 } 

hadoop git commit: HADOOP-3733. "s3x:" URLs break when Secret Key contains a slash, even if encoded. Contributed by Steve Loughran.

2016-06-16 Thread raviprak
Repository: hadoop
Updated Branches:
  refs/heads/trunk e983eafb4 -> 4aefe119a


HADOOP-3733. "s3x:" URLs break when Secret Key contains a slash, even if 
encoded. Contributed by Steve Loughran.


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/4aefe119
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/4aefe119
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/4aefe119

Branch: refs/heads/trunk
Commit: 4aefe119a0203c03cdc893dcb3330fd37f26f0ee
Parents: e983eaf
Author: Ravi Prakash 
Authored: Thu Jun 16 11:13:35 2016 -0700
Committer: Ravi Prakash 
Committed: Thu Jun 16 11:13:35 2016 -0700

--
 .../hadoop-aws/dev-support/findbugs-exclude.xml |   7 +
 .../org/apache/hadoop/fs/s3/S3Credentials.java  |  31 +-
 .../org/apache/hadoop/fs/s3/S3FileSystem.java   |  20 +-
 .../org/apache/hadoop/fs/s3a/S3AFileSystem.java |  79 ++
 .../java/org/apache/hadoop/fs/s3a/S3AUtils.java |  46 +++
 .../hadoop/fs/s3native/NativeS3FileSystem.java  |  19 +-
 .../hadoop/fs/s3native/S3xLoginHelper.java  | 283 +++
 .../src/site/markdown/tools/hadoop-aws/index.md |   5 +-
 .../apache/hadoop/fs/s3/TestS3FileSystem.java   |  24 +-
 .../hadoop/fs/s3a/TestS3AConfiguration.java |  55 ++--
 .../hadoop/fs/s3a/TestS3ACredentialsInURL.java  | 153 ++
 .../hadoop/fs/s3native/TestS3xLoginHelper.java  | 197 +
 12 files changed, 801 insertions(+), 118 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/4aefe119/hadoop-tools/hadoop-aws/dev-support/findbugs-exclude.xml
--
diff --git a/hadoop-tools/hadoop-aws/dev-support/findbugs-exclude.xml 
b/hadoop-tools/hadoop-aws/dev-support/findbugs-exclude.xml
index 2b4160a..ffb0a79 100644
--- a/hadoop-tools/hadoop-aws/dev-support/findbugs-exclude.xml
+++ b/hadoop-tools/hadoop-aws/dev-support/findbugs-exclude.xml
@@ -15,6 +15,13 @@
limitations under the License.
 -->
 
+
+  
+  
+
+
+
+  
   
   
 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/4aefe119/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3/S3Credentials.java
--
diff --git 
a/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3/S3Credentials.java
 
b/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3/S3Credentials.java
index 5ab352a..3951a08 100644
--- 
a/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3/S3Credentials.java
+++ 
b/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3/S3Credentials.java
@@ -24,6 +24,7 @@ import java.net.URI;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.s3native.S3xLoginHelper;
 
 /**
  * 
@@ -49,18 +50,12 @@ public class S3Credentials {
 if (uri.getHost() == null) {
   throw new IllegalArgumentException("Invalid hostname in URI " + uri);
 }
-
-String userInfo = uri.getUserInfo();
-if (userInfo != null) {
-  int index = userInfo.indexOf(':');
-  if (index != -1) {
-accessKey = userInfo.substring(0, index);
-secretAccessKey = userInfo.substring(index + 1);
-  } else {
-accessKey = userInfo;
-  }
+S3xLoginHelper.Login login =
+S3xLoginHelper.extractLoginDetailsWithWarnings(uri);
+if (login.hasLogin()) {
+  accessKey = login.getUser();
+  secretAccessKey = login.getPassword();
 }
-
 String scheme = uri.getScheme();
 String accessKeyProperty = String.format("fs.%s.awsAccessKeyId", scheme);
 String secretAccessKeyProperty =
@@ -77,24 +72,20 @@ public class S3Credentials {
 if (accessKey == null && secretAccessKey == null) {
   throw new IllegalArgumentException("AWS " +
  "Access Key ID and Secret Access " +
- "Key must be specified as the " +
- "username or password " +
- "(respectively) of a " + scheme +
- " URL, or by setting the " +
- accessKeyProperty + " or " +
+ "Key must be specified " +
+ "by setting the " +
+ accessKeyProperty + " and " +
  secretAccessKeyProperty +
  " properties (respectively).");
 } else if (accessKey == null) {
   throw new IllegalArgumentException("AWS 

hadoop git commit: HDFS-10220. A large number of expired leases can make namenode unresponsive and cause failover (Nicolas Fraison via raviprak)

2016-06-08 Thread raviprak
Repository: hadoop
Updated Branches:
  refs/heads/branch-2 931966546 -> 9c5f7f290


HDFS-10220. A large number of expired leases can make namenode unresponsive and 
cause failover (Nicolas Fraison via raviprak)

(cherry picked from commit ae047655f4355288406cd5396fb4e3ea7c307b14)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/9c5f7f29
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/9c5f7f29
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/9c5f7f29

Branch: refs/heads/branch-2
Commit: 9c5f7f290eb05808fe89835a15bee0947b91d1a0
Parents: 9319665
Author: Ravi Prakash <ravip...@altiscale.com>
Authored: Wed Jun 8 13:44:22 2016 -0700
Committer: Ravi Prakash <ravip...@altiscale.com>
Committed: Wed Jun 8 13:46:41 2016 -0700

--
 .../org/apache/hadoop/hdfs/DFSConfigKeys.java   | 10 +
 .../hdfs/server/common/HdfsServerConstants.java |  1 -
 .../hdfs/server/namenode/FSNamesystem.java  | 42 
 .../hdfs/server/namenode/LeaseManager.java  | 21 --
 .../src/main/resources/hdfs-default.xml | 18 +
 .../hdfs/server/namenode/TestLeaseManager.java  | 24 ++-
 6 files changed, 94 insertions(+), 22 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/9c5f7f29/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSConfigKeys.java
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSConfigKeys.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSConfigKeys.java
index abacfaf..f3a4dcb 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSConfigKeys.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSConfigKeys.java
@@ -374,6 +374,16 @@ public class DFSConfigKeys extends CommonConfigurationKeys 
{
   public static final int DFS_NAMENODE_MAX_XATTR_SIZE_DEFAULT = 16384;
   public static final int DFS_NAMENODE_MAX_XATTR_SIZE_HARD_LIMIT = 32768;
 
+  public static final String  DFS_NAMENODE_LEASE_RECHECK_INTERVAL_MS_KEY =
+  "dfs.namenode.lease-recheck-interval-ms";
+  public static final longDFS_NAMENODE_LEASE_RECHECK_INTERVAL_MS_DEFAULT =
+  2000;
+  public static final String
+  DFS_NAMENODE_MAX_LOCK_HOLD_TO_RELEASE_LEASE_MS_KEY =
+  "dfs.namenode.max-lock-hold-to-release-lease-ms";
+  public static final long
+  DFS_NAMENODE_MAX_LOCK_HOLD_TO_RELEASE_LEASE_MS_DEFAULT = 25;
+
   public static final String  DFS_UPGRADE_DOMAIN_FACTOR = 
"dfs.namenode.upgrade.domain.factor";
   public static final int DFS_UPGRADE_DOMAIN_FACTOR_DEFAULT = 
DFS_REPLICATION_DEFAULT;
 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/9c5f7f29/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/common/HdfsServerConstants.java
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/common/HdfsServerConstants.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/common/HdfsServerConstants.java
index 2beb540..0b925f0 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/common/HdfsServerConstants.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/common/HdfsServerConstants.java
@@ -354,7 +354,6 @@ public interface HdfsServerConstants {
   }
   
   String NAMENODE_LEASE_HOLDER = "HDFS_NameNode";
-  long NAMENODE_LEASE_RECHECK_INTERVAL = 2000;
 
   String CRYPTO_XATTR_ENCRYPTION_ZONE =
   "raw.hdfs.crypto.encryption.zone";

http://git-wip-us.apache.org/repos/asf/hadoop/blob/9c5f7f29/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java
index 0147b48..bdf15bc 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java
@@ -76,6 +76,10 @@ import static 
org.apache.hadoop.hdfs.DFSConfigKeys.DFS_NAMENODE_RETRY_CACHE_EXPI
 import static 
org.apache.hadoop.hdfs.DFSConfigKeys.DFS_NAMENODE_RETRY_CACHE_HEAP_PERCENT_DEFAULT;
 import static 
org.apache.hadoop.hdfs.DFSCon

hadoop git commit: HDFS-10220. A large number of expired leases can make namenode unresponsive and cause failover (Nicolas Fraison via raviprak)

2016-06-08 Thread raviprak
Repository: hadoop
Updated Branches:
  refs/heads/trunk 0af96a1c0 -> ae047655f


HDFS-10220. A large number of expired leases can make namenode unresponsive and 
cause failover (Nicolas Fraison via raviprak)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/ae047655
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/ae047655
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/ae047655

Branch: refs/heads/trunk
Commit: ae047655f4355288406cd5396fb4e3ea7c307b14
Parents: 0af96a1
Author: Ravi Prakash <ravip...@altiscale.com>
Authored: Wed Jun 8 13:44:22 2016 -0700
Committer: Ravi Prakash <ravip...@altiscale.com>
Committed: Wed Jun 8 13:44:22 2016 -0700

--
 .../org/apache/hadoop/hdfs/DFSConfigKeys.java   | 10 +
 .../hdfs/server/common/HdfsServerConstants.java |  1 -
 .../hdfs/server/namenode/FSNamesystem.java  | 42 
 .../hdfs/server/namenode/LeaseManager.java  | 21 --
 .../src/main/resources/hdfs-default.xml | 18 +
 .../hdfs/server/namenode/TestLeaseManager.java  | 24 ++-
 6 files changed, 94 insertions(+), 22 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/ae047655/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSConfigKeys.java
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSConfigKeys.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSConfigKeys.java
index 19e1791..f18a6c6 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSConfigKeys.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSConfigKeys.java
@@ -397,6 +397,16 @@ public class DFSConfigKeys extends CommonConfigurationKeys 
{
   public static final int DFS_NAMENODE_MAX_XATTR_SIZE_DEFAULT = 16384;
   public static final int DFS_NAMENODE_MAX_XATTR_SIZE_HARD_LIMIT = 32768;
 
+  public static final String  DFS_NAMENODE_LEASE_RECHECK_INTERVAL_MS_KEY =
+  "dfs.namenode.lease-recheck-interval-ms";
+  public static final longDFS_NAMENODE_LEASE_RECHECK_INTERVAL_MS_DEFAULT =
+  2000;
+  public static final String
+  DFS_NAMENODE_MAX_LOCK_HOLD_TO_RELEASE_LEASE_MS_KEY =
+  "dfs.namenode.max-lock-hold-to-release-lease-ms";
+  public static final long
+  DFS_NAMENODE_MAX_LOCK_HOLD_TO_RELEASE_LEASE_MS_DEFAULT = 25;
+
   public static final String  DFS_UPGRADE_DOMAIN_FACTOR = 
"dfs.namenode.upgrade.domain.factor";
   public static final int DFS_UPGRADE_DOMAIN_FACTOR_DEFAULT = 
DFS_REPLICATION_DEFAULT;
 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/ae047655/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/common/HdfsServerConstants.java
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/common/HdfsServerConstants.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/common/HdfsServerConstants.java
index b2dda3c..3798394 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/common/HdfsServerConstants.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/common/HdfsServerConstants.java
@@ -361,7 +361,6 @@ public interface HdfsServerConstants {
   }
   
   String NAMENODE_LEASE_HOLDER = "HDFS_NameNode";
-  long NAMENODE_LEASE_RECHECK_INTERVAL = 2000;
 
   String CRYPTO_XATTR_ENCRYPTION_ZONE =
   "raw.hdfs.crypto.encryption.zone";

http://git-wip-us.apache.org/repos/asf/hadoop/blob/ae047655/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java
index c9f2487..915ae97 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java
@@ -76,6 +76,10 @@ import static 
org.apache.hadoop.hdfs.DFSConfigKeys.DFS_NAMENODE_RETRY_CACHE_EXPI
 import static 
org.apache.hadoop.hdfs.DFSConfigKeys.DFS_NAMENODE_RETRY_CACHE_HEAP_PERCENT_DEFAULT;
 import static 
org.apache.hadoop.hdfs.DFSConfigKeys.DFS_NAMENODE_RETRY_CACHE_HEAP_PERCENT_KEY;
 import static 
org.apache.hadoop.hdf

hadoop git commit: HADOOP-13051. Test for special characters in path being respected during globPaths. Contributed by Harsh J.

2016-05-05 Thread raviprak
Repository: hadoop
Updated Branches:
  refs/heads/trunk bb62e0592 -> d8faf47f3


HADOOP-13051. Test for special characters in path being respected during 
globPaths. Contributed by Harsh J.


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/d8faf47f
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/d8faf47f
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/d8faf47f

Branch: refs/heads/trunk
Commit: d8faf47f32c7ace6ceeb55bbb584c2dbab38902f
Parents: bb62e05
Author: Ravi Prakash 
Authored: Thu May 5 14:17:38 2016 -0700
Committer: Ravi Prakash 
Committed: Thu May 5 14:18:39 2016 -0700

--
 .../java/org/apache/hadoop/fs/TestGlobPaths.java | 19 +++
 1 file changed, 19 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/d8faf47f/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/fs/TestGlobPaths.java
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/fs/TestGlobPaths.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/fs/TestGlobPaths.java
index 2803037..30778e6 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/fs/TestGlobPaths.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/fs/TestGlobPaths.java
@@ -88,6 +88,25 @@ public class TestGlobPaths {
 }
   }
 
+  /**
+   * Test case to ensure that globs work on files with special characters.
+   * Tests with a file pair where one has a \r at end and other does not.
+   */
+  @Test
+  public void testCRInPathGlob() throws IOException {
+FileStatus[] statuses;
+Path d1 = new Path(USER_DIR, "dir1");
+Path fNormal = new Path(d1, "f1");
+Path fWithCR = new Path(d1, "f1\r");
+fs.mkdirs(d1);
+fs.createNewFile(fNormal);
+fs.createNewFile(fWithCR);
+statuses = fs.globStatus(new Path(d1, "f1*"));
+assertEquals("Expected both normal and CR-carrying files in result: ",
+2, statuses.length);
+cleanupDFS();
+  }
+
   @Test
   public void testMultiGlob() throws IOException {
 FileStatus[] status;


-
To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org
For additional commands, e-mail: common-commits-h...@hadoop.apache.org



[1/2] hadoop git commit: HADOOP-12563. Updated utility (dtutil) to create/modify token files. Contributed by Matthew Paduano

2016-04-29 Thread raviprak
Repository: hadoop
Updated Branches:
  refs/heads/trunk 7da540d03 -> 2753185a0


http://git-wip-us.apache.org/repos/asf/hadoop/blob/2753185a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/tools/TestCommandShell.java
--
diff --git 
a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/tools/TestCommandShell.java
 
b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/tools/TestCommandShell.java
new file mode 100644
index 000..6067918
--- /dev/null
+++ 
b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/tools/TestCommandShell.java
@@ -0,0 +1,128 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.tools;
+
+import java.io.ByteArrayOutputStream;
+import java.io.PrintStream;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.tools.CommandShell;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertTrue;
+import org.junit.Before;
+import org.junit.Test;
+
+public class TestCommandShell {
+
+  public class Example extends CommandShell {
+public static final String EXAMPLE = "example";
+public static final String HELLO = "hello";
+public static final String HELLO_MSG = "hello is running";
+public static final String GOODBYE = "goodbye";
+public static final String GOODBYE_MSG = "goodbye is running";
+
+public String[] savedArgs = null;
+
+@Override
+protected int init(String[] args) throws Exception {
+  String command = args[0];
+  if (command.equals(HELLO)) {
+setSubCommand(new Hello());
+  } else if (command.equals(GOODBYE)) {
+setSubCommand(new Goodbye());
+  } else{
+return 1;
+  }
+  savedArgs = args;
+  return 0;
+}
+
+public String getCommandUsage() {
+  return EXAMPLE;
+}
+
+public class Hello extends SubCommand {
+  public static final String HELLO_USAGE = EXAMPLE + " hello";
+  @Override
+  public boolean validate() {
+return savedArgs.length == 1;
+  }
+  @Override
+  public void execute() throws Exception {
+System.out.println(HELLO_MSG);
+  }
+  @Override
+  public String getUsage() {
+return HELLO_USAGE;
+  }
+}
+
+public class Goodbye extends SubCommand {
+  public static final String GOODBYE_USAGE = EXAMPLE + " goodbye";
+  @Override
+  public void execute() throws Exception {
+System.out.println(GOODBYE_MSG);
+  }
+  @Override
+  public String getUsage() {
+return GOODBYE_USAGE;
+  }
+}
+  }
+
+  private final ByteArrayOutputStream outContent = new ByteArrayOutputStream();
+
+  private String outMsg(String message) {
+return "OUT:\n" + outContent.toString() + "\n" + message;
+  }
+
+  @Before
+  public void setup() throws Exception {
+System.setOut(new PrintStream(outContent));
+  }
+
+  @Test
+  public void testCommandShellExample() throws Exception {
+Example ex = new Example();
+ex.setConf(new Configuration());
+int rc = 0;
+
+outContent.reset();
+String[] args1 = {"hello"};
+rc = ex.run(args1);
+assertEquals(outMsg("test exit code - normal hello"), 0, rc);
+assertTrue(outMsg("test normal hello message"),
+   outContent.toString().contains(Example.HELLO_MSG));
+
+outContent.reset();
+String[] args2 = {"hello", "x"};
+rc = ex.run(args2);
+assertEquals(outMsg("test exit code - bad hello"), 1, rc);
+assertTrue(outMsg("test bad hello message"),
+   outContent.toString().contains(Example.Hello.HELLO_USAGE));
+
+outContent.reset();
+String[] args3 = {"goodbye"};
+rc = ex.run(args3);
+assertEquals(outMsg("test exit code - normal goodbye"), 0, rc);
+assertTrue(outMsg("test normal goodbye message"),
+   outContent.toString().contains(Example.GOODBYE_MSG));
+  }
+}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/2753185a/hadoop-common-project/hadoop-common/src/test/resources/META-INF/services/org.apache.hadoop.security.token.DtFetcher

[2/2] hadoop git commit: HADOOP-12563. Updated utility (dtutil) to create/modify token files. Contributed by Matthew Paduano

2016-04-29 Thread raviprak
HADOOP-12563. Updated utility (dtutil) to create/modify token files. 
Contributed by Matthew Paduano


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/2753185a
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/2753185a
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/2753185a

Branch: refs/heads/trunk
Commit: 2753185a010e70f8d9539f42151c79177781122d
Parents: 7da540d
Author: Ravi Prakash 
Authored: Fri Apr 29 22:32:27 2016 -0700
Committer: Ravi Prakash 
Committed: Fri Apr 29 22:32:27 2016 -0700

--
 .../hadoop-common/src/main/bin/hadoop   |   4 +
 .../org/apache/hadoop/security/Credentials.java | 169 +++---
 .../apache/hadoop/security/token/DtFetcher.java |  41 +++
 .../hadoop/security/token/DtFileOperations.java | 271 +++
 .../hadoop/security/token/DtUtilShell.java  | 326 +++
 .../org/apache/hadoop/security/token/Token.java |  94 --
 .../org/apache/hadoop/tools/CommandShell.java   | 114 +++
 .../hadoop-common/src/main/proto/Security.proto |  11 +
 .../src/site/markdown/CommandsManual.md |  19 ++
 .../apache/hadoop/security/TestCredentials.java | 246 --
 .../hadoop/security/token/TestDtFetcher.java|  41 +++
 .../hadoop/security/token/TestDtUtilShell.java  | 264 +++
 .../apache/hadoop/tools/TestCommandShell.java   | 128 
 .../org.apache.hadoop.security.token.DtFetcher  |  14 +
 .../org/apache/hadoop/hdfs/HdfsDtFetcher.java   |  82 +
 .../apache/hadoop/hdfs/SWebHdfsDtFetcher.java   |  39 +++
 .../apache/hadoop/hdfs/WebHdfsDtFetcher.java|  39 +++
 .../hdfs/tools/DelegationTokenFetcher.java  |   3 +-
 .../org.apache.hadoop.security.token.DtFetcher  |  18 +
 19 files changed, 1820 insertions(+), 103 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/2753185a/hadoop-common-project/hadoop-common/src/main/bin/hadoop
--
diff --git a/hadoop-common-project/hadoop-common/src/main/bin/hadoop 
b/hadoop-common-project/hadoop-common/src/main/bin/hadoop
index 0756987..23fa9c7 100755
--- a/hadoop-common-project/hadoop-common/src/main/bin/hadoop
+++ b/hadoop-common-project/hadoop-common/src/main/bin/hadoop
@@ -33,6 +33,7 @@ function hadoop_usage
   hadoop_add_subcommand "daemonlog" "get/set the log level for each daemon"
   hadoop_add_subcommand "distch" "distributed metadata changer"
   hadoop_add_subcommand "distcp" "copy file or directories recursively"
+  hadoop_add_subcommand "dtutil" "operations related to delegation tokens"
   hadoop_add_subcommand "envvars" "display computed Hadoop environment 
variables"
   hadoop_add_subcommand "fs" "run a generic filesystem user client"
   hadoop_add_subcommand "jar " "run a jar file. NOTE: please use \"yarn 
jar\" to launch YARN applications, not this command."
@@ -139,6 +140,9 @@ case ${COMMAND} in
 CLASS=org.apache.hadoop.tools.DistCp
 hadoop_add_to_classpath_tools hadoop-distcp
   ;;
+  dtutil)
+CLASS=org.apache.hadoop.security.token.DtUtilShell
+  ;;
   envvars)
 echo "JAVA_HOME='${JAVA_HOME}'"
 echo "HADOOP_COMMON_HOME='${HADOOP_COMMON_HOME}'"

http://git-wip-us.apache.org/repos/asf/hadoop/blob/2753185a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/Credentials.java
--
diff --git 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/Credentials.java
 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/Credentials.java
index e6b8722..977ccb5 100644
--- 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/Credentials.java
+++ 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/Credentials.java
@@ -18,6 +18,8 @@
 
 package org.apache.hadoop.security;
 
+import com.google.protobuf.ByteString;
+
 import java.io.BufferedInputStream;
 import java.io.DataInput;
 import java.io.DataInputStream;
@@ -25,6 +27,7 @@ import java.io.DataOutput;
 import java.io.DataOutputStream;
 import java.io.File;
 import java.io.FileInputStream;
+import java.io.FileOutputStream;
 import java.io.IOException;
 import java.util.Arrays;
 import java.util.Collection;
@@ -47,9 +50,11 @@ import org.apache.hadoop.io.Writable;
 import org.apache.hadoop.io.WritableUtils;
 import org.apache.hadoop.security.token.Token;
 import org.apache.hadoop.security.token.TokenIdentifier;
+import org.apache.hadoop.security.proto.SecurityProtos.CredentialsKVProto;
+import org.apache.hadoop.security.proto.SecurityProtos.CredentialsProto;
 
 /**
- * A class that provides the facilities of reading and writing 
+ * A class that provides the facilities of 

hadoop git commit: HDFS-9525. hadoop utilities need to support provided delegation tokens. Contributed by HeeSoo Kim

2016-04-23 Thread raviprak
Repository: hadoop
Updated Branches:
  refs/heads/branch-2 96111caca -> 88e1427a0


HDFS-9525. hadoop utilities need to support provided delegation tokens. 
Contributed by HeeSoo Kim


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/88e1427a
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/88e1427a
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/88e1427a

Branch: refs/heads/branch-2
Commit: 88e1427a036fb0d7b7dd9cd333fedcf1d63f3ad7
Parents: 96111ca
Author: Ravi Prakash 
Authored: Sat Apr 23 20:50:56 2016 -0700
Committer: Ravi Prakash 
Committed: Sat Apr 23 20:50:56 2016 -0700

--
 .../fs/CommonConfigurationKeysPublic.java   |  3 ++
 .../hadoop/security/UserGroupInformation.java   | 22 +
 .../src/main/resources/core-default.xml |  6 +++
 .../security/TestUserGroupInformation.java  | 48 +++-
 .../hadoop/hdfs/web/WebHdfsFileSystem.java  | 14 +++---
 .../hdfs/web/resources/DelegationParam.java |  5 +-
 .../apache/hadoop/hdfs/web/TestWebHdfsUrl.java  |  5 +-
 7 files changed, 89 insertions(+), 14 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/88e1427a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CommonConfigurationKeysPublic.java
--
diff --git 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CommonConfigurationKeysPublic.java
 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CommonConfigurationKeysPublic.java
index 0baca07..ca17f8d 100644
--- 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CommonConfigurationKeysPublic.java
+++ 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CommonConfigurationKeysPublic.java
@@ -316,6 +316,9 @@ public class CommonConfigurationKeysPublic {
   /** See core-default.xml */
   public static final String HADOOP_SECURITY_DNS_NAMESERVER_KEY =
 "hadoop.security.dns.nameserver";
+  /** See core-default.xml */
+  public static final String HADOOP_TOKEN_FILES =
+  "hadoop.token.files";
 
   @Deprecated
   /** Only used by HttpServer. */

http://git-wip-us.apache.org/repos/asf/hadoop/blob/88e1427a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/UserGroupInformation.java
--
diff --git 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/UserGroupInformation.java
 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/UserGroupInformation.java
index 90d396f..2ea80dd 100644
--- 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/UserGroupInformation.java
+++ 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/UserGroupInformation.java
@@ -18,6 +18,7 @@
 package org.apache.hadoop.security;
 
 import static 
org.apache.hadoop.fs.CommonConfigurationKeys.HADOOP_USER_GROUP_METRICS_PERCENTILES_INTERVALS;
+import static 
org.apache.hadoop.fs.CommonConfigurationKeysPublic.HADOOP_TOKEN_FILES;
 import static org.apache.hadoop.util.PlatformName.IBM_JAVA;
 
 import java.io.File;
@@ -66,6 +67,7 @@ import 
org.apache.hadoop.security.authentication.util.KerberosUtil;
 import org.apache.hadoop.security.token.Token;
 import org.apache.hadoop.security.token.TokenIdentifier;
 import org.apache.hadoop.util.Shell;
+import org.apache.hadoop.util.StringUtils;
 import org.apache.hadoop.util.Time;
 
 import com.google.common.annotations.VisibleForTesting;
@@ -811,6 +813,26 @@ public class UserGroupInformation {
   }
   loginUser = proxyUser == null ? realUser : createProxyUser(proxyUser, 
realUser);
 
+  String tokenFileLocation = System.getProperty(HADOOP_TOKEN_FILES);
+  if (tokenFileLocation == null) {
+tokenFileLocation = conf.get(HADOOP_TOKEN_FILES);
+  }
+  if (tokenFileLocation != null) {
+for (String tokenFileName:
+ StringUtils.getTrimmedStrings(tokenFileLocation)) {
+  if (tokenFileName.length() > 0) {
+File tokenFile = new File(tokenFileName);
+if (tokenFile.exists() && tokenFile.isFile()) {
+  Credentials cred = Credentials.readTokenStorageFile(
+  tokenFile, conf);
+  loginUser.addCredentials(cred);
+} else {
+  LOG.info("tokenFile("+tokenFileName+") does not exist");
+}
+  }
+}
+  }
+
   String fileLocation = System.getenv(HADOOP_TOKEN_FILE_LOCATION);
   if (fileLocation != null) {
 // Load the token storage file and put all of the tokens into the


[1/2] hadoop git commit: Revert "HADOOP-12563. Updated utility (dtutil) to create/modify token files. Contributed by Matthew Paduano"

2016-04-22 Thread raviprak
Repository: hadoop
Updated Branches:
  refs/heads/trunk 5d4255a80 -> d6402fade


http://git-wip-us.apache.org/repos/asf/hadoop/blob/d6402fad/hadoop-hdfs-project/hadoop-hdfs/src/main/resources/META-INF/services/org.apache.hadoop.security.token.DtFetcher
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/resources/META-INF/services/org.apache.hadoop.security.token.DtFetcher
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/resources/META-INF/services/org.apache.hadoop.security.token.DtFetcher
deleted file mode 100644
index a4217fb..000
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/resources/META-INF/services/org.apache.hadoop.security.token.DtFetcher
+++ /dev/null
@@ -1,18 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-org.apache.hadoop.hdfs.HdfsDtFetcher
-org.apache.hadoop.hdfs.WebHdfsDtFetcher
-org.apache.hadoop.hdfs.SWebHdfsDtFetcher



[2/2] hadoop git commit: Revert "HADOOP-12563. Updated utility (dtutil) to create/modify token files. Contributed by Matthew Paduano"

2016-04-22 Thread raviprak
Revert "HADOOP-12563. Updated utility (dtutil) to create/modify token files. 
Contributed by Matthew Paduano"

This reverts commit 4838b735f0d472765f402fe6b1c8b6ce85b9fbf1.


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/d6402fad
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/d6402fad
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/d6402fad

Branch: refs/heads/trunk
Commit: d6402fadedade4289949ba9f70f7a0bfb9bca140
Parents: 5d4255a
Author: Ravi Prakash 
Authored: Fri Apr 22 10:17:20 2016 -0700
Committer: Ravi Prakash 
Committed: Fri Apr 22 10:17:20 2016 -0700

--
 .../hadoop-common/src/main/bin/hadoop   |   4 -
 .../org/apache/hadoop/security/Credentials.java | 169 +++---
 .../apache/hadoop/security/token/DtFetcher.java |  41 ---
 .../hadoop/security/token/DtFileOperations.java | 271 ---
 .../hadoop/security/token/DtUtilShell.java  | 326 ---
 .../org/apache/hadoop/security/token/Token.java |  94 ++
 .../org/apache/hadoop/tools/CommandShell.java   | 114 ---
 .../hadoop-common/src/main/proto/Security.proto |  11 -
 .../src/site/markdown/CommandsManual.md |  19 --
 .../hadoop/security/token/TestDtFetcher.java|  41 ---
 .../hadoop/security/token/TestDtUtilShell.java  | 264 ---
 .../apache/hadoop/tools/TestCommandShell.java   | 128 
 .../org.apache.hadoop.security.token.DtFetcher  |  14 -
 .../org/apache/hadoop/hdfs/HdfsDtFetcher.java   |  82 -
 .../apache/hadoop/hdfs/SWebHdfsDtFetcher.java   |  39 ---
 .../apache/hadoop/hdfs/WebHdfsDtFetcher.java|  39 ---
 .../hdfs/tools/DelegationTokenFetcher.java  |   3 +-
 .../org.apache.hadoop.security.token.DtFetcher  |  18 -
 18 files changed, 77 insertions(+), 1600 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/d6402fad/hadoop-common-project/hadoop-common/src/main/bin/hadoop
--
diff --git a/hadoop-common-project/hadoop-common/src/main/bin/hadoop 
b/hadoop-common-project/hadoop-common/src/main/bin/hadoop
index 23fa9c7..0756987 100755
--- a/hadoop-common-project/hadoop-common/src/main/bin/hadoop
+++ b/hadoop-common-project/hadoop-common/src/main/bin/hadoop
@@ -33,7 +33,6 @@ function hadoop_usage
   hadoop_add_subcommand "daemonlog" "get/set the log level for each daemon"
   hadoop_add_subcommand "distch" "distributed metadata changer"
   hadoop_add_subcommand "distcp" "copy file or directories recursively"
-  hadoop_add_subcommand "dtutil" "operations related to delegation tokens"
   hadoop_add_subcommand "envvars" "display computed Hadoop environment 
variables"
   hadoop_add_subcommand "fs" "run a generic filesystem user client"
   hadoop_add_subcommand "jar " "run a jar file. NOTE: please use \"yarn 
jar\" to launch YARN applications, not this command."
@@ -140,9 +139,6 @@ case ${COMMAND} in
 CLASS=org.apache.hadoop.tools.DistCp
 hadoop_add_to_classpath_tools hadoop-distcp
   ;;
-  dtutil)
-CLASS=org.apache.hadoop.security.token.DtUtilShell
-  ;;
   envvars)
 echo "JAVA_HOME='${JAVA_HOME}'"
 echo "HADOOP_COMMON_HOME='${HADOOP_COMMON_HOME}'"

http://git-wip-us.apache.org/repos/asf/hadoop/blob/d6402fad/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/Credentials.java
--
diff --git 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/Credentials.java
 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/Credentials.java
index 662eb3e..e6b8722 100644
--- 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/Credentials.java
+++ 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/Credentials.java
@@ -18,8 +18,6 @@
 
 package org.apache.hadoop.security;
 
-import com.google.protobuf.ByteString;
-
 import java.io.BufferedInputStream;
 import java.io.DataInput;
 import java.io.DataInputStream;
@@ -27,7 +25,6 @@ import java.io.DataOutput;
 import java.io.DataOutputStream;
 import java.io.File;
 import java.io.FileInputStream;
-import java.io.FileOutputStream;
 import java.io.IOException;
 import java.util.Arrays;
 import java.util.Collection;
@@ -50,11 +47,9 @@ import org.apache.hadoop.io.Writable;
 import org.apache.hadoop.io.WritableUtils;
 import org.apache.hadoop.security.token.Token;
 import org.apache.hadoop.security.token.TokenIdentifier;
-import org.apache.hadoop.security.proto.SecurityProtos.CredentialsKVProto;
-import org.apache.hadoop.security.proto.SecurityProtos.CredentialsProto;
 
 /**
- * A class that provides the facilities of reading and writing
+ * A class that provides the facilities 

[2/2] hadoop git commit: HADOOP-12563. Updated utility (dtutil) to create/modify token files. Contributed by Matthew Paduano

2016-04-21 Thread raviprak
HADOOP-12563. Updated utility (dtutil) to create/modify token files. 
Contributed by Matthew Paduano


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/4838b735
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/4838b735
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/4838b735

Branch: refs/heads/trunk
Commit: 4838b735f0d472765f402fe6b1c8b6ce85b9fbf1
Parents: 95a5046
Author: Ravi Prakash 
Authored: Thu Apr 21 11:32:39 2016 -0700
Committer: Ravi Prakash 
Committed: Thu Apr 21 11:32:39 2016 -0700

--
 .../hadoop-common/src/main/bin/hadoop   |   4 +
 .../org/apache/hadoop/security/Credentials.java | 169 +++---
 .../apache/hadoop/security/token/DtFetcher.java |  41 +++
 .../hadoop/security/token/DtFileOperations.java | 271 +++
 .../hadoop/security/token/DtUtilShell.java  | 326 +++
 .../org/apache/hadoop/security/token/Token.java |  94 --
 .../org/apache/hadoop/tools/CommandShell.java   | 114 +++
 .../hadoop-common/src/main/proto/Security.proto |  11 +
 .../src/site/markdown/CommandsManual.md |  19 ++
 .../hadoop/security/token/TestDtFetcher.java|  41 +++
 .../hadoop/security/token/TestDtUtilShell.java  | 264 +++
 .../apache/hadoop/tools/TestCommandShell.java   | 128 
 .../org.apache.hadoop.security.token.DtFetcher  |  14 +
 .../org/apache/hadoop/hdfs/HdfsDtFetcher.java   |  82 +
 .../apache/hadoop/hdfs/SWebHdfsDtFetcher.java   |  39 +++
 .../apache/hadoop/hdfs/WebHdfsDtFetcher.java|  39 +++
 .../hdfs/tools/DelegationTokenFetcher.java  |   3 +-
 .../org.apache.hadoop.security.token.DtFetcher  |  18 +
 18 files changed, 1600 insertions(+), 77 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/4838b735/hadoop-common-project/hadoop-common/src/main/bin/hadoop
--
diff --git a/hadoop-common-project/hadoop-common/src/main/bin/hadoop 
b/hadoop-common-project/hadoop-common/src/main/bin/hadoop
index 0756987..23fa9c7 100755
--- a/hadoop-common-project/hadoop-common/src/main/bin/hadoop
+++ b/hadoop-common-project/hadoop-common/src/main/bin/hadoop
@@ -33,6 +33,7 @@ function hadoop_usage
   hadoop_add_subcommand "daemonlog" "get/set the log level for each daemon"
   hadoop_add_subcommand "distch" "distributed metadata changer"
   hadoop_add_subcommand "distcp" "copy file or directories recursively"
+  hadoop_add_subcommand "dtutil" "operations related to delegation tokens"
   hadoop_add_subcommand "envvars" "display computed Hadoop environment 
variables"
   hadoop_add_subcommand "fs" "run a generic filesystem user client"
   hadoop_add_subcommand "jar " "run a jar file. NOTE: please use \"yarn 
jar\" to launch YARN applications, not this command."
@@ -139,6 +140,9 @@ case ${COMMAND} in
 CLASS=org.apache.hadoop.tools.DistCp
 hadoop_add_to_classpath_tools hadoop-distcp
   ;;
+  dtutil)
+CLASS=org.apache.hadoop.security.token.DtUtilShell
+  ;;
   envvars)
 echo "JAVA_HOME='${JAVA_HOME}'"
 echo "HADOOP_COMMON_HOME='${HADOOP_COMMON_HOME}'"

http://git-wip-us.apache.org/repos/asf/hadoop/blob/4838b735/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/Credentials.java
--
diff --git 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/Credentials.java
 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/Credentials.java
index e6b8722..662eb3e 100644
--- 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/Credentials.java
+++ 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/Credentials.java
@@ -18,6 +18,8 @@
 
 package org.apache.hadoop.security;
 
+import com.google.protobuf.ByteString;
+
 import java.io.BufferedInputStream;
 import java.io.DataInput;
 import java.io.DataInputStream;
@@ -25,6 +27,7 @@ import java.io.DataOutput;
 import java.io.DataOutputStream;
 import java.io.File;
 import java.io.FileInputStream;
+import java.io.FileOutputStream;
 import java.io.IOException;
 import java.util.Arrays;
 import java.util.Collection;
@@ -47,9 +50,11 @@ import org.apache.hadoop.io.Writable;
 import org.apache.hadoop.io.WritableUtils;
 import org.apache.hadoop.security.token.Token;
 import org.apache.hadoop.security.token.TokenIdentifier;
+import org.apache.hadoop.security.proto.SecurityProtos.CredentialsKVProto;
+import org.apache.hadoop.security.proto.SecurityProtos.CredentialsProto;
 
 /**
- * A class that provides the facilities of reading and writing 
+ * A class that provides the facilities of reading and writing
  * secret keys and Tokens.
  */
 

[1/2] [hadoop] Git Push Summary

2016-04-21 Thread raviprak
Repository: hadoop
Updated Branches:
  refs/heads/trunk 95a504660 -> 4838b735f


hadoop git commit: HDFS-10235. Last contact for Live Nodes should be relative time. Contributed by Brahma Reddy Battula.

2016-04-05 Thread raviprak
Repository: hadoop
Updated Branches:
  refs/heads/branch-2.8 deef54b26 -> 7286c435c


HDFS-10235. Last contact for Live Nodes should be relative time. Contributed by 
Brahma Reddy Battula.

(cherry picked from commit 0cd320a8463efe19a6228f9fe14693aa37ac8a10)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/7286c435
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/7286c435
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/7286c435

Branch: refs/heads/branch-2.8
Commit: 7286c435c46fdc049a1a025bee931c3dbb2f8303
Parents: deef54b
Author: Ravi Prakash 
Authored: Tue Apr 5 13:41:19 2016 -0700
Committer: Ravi Prakash 
Committed: Tue Apr 5 13:43:08 2016 -0700

--
 .../hadoop-hdfs/src/main/webapps/hdfs/dfshealth.html   | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/7286c435/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/hdfs/dfshealth.html
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/hdfs/dfshealth.html 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/hdfs/dfshealth.html
index 3d9ca42..a9c3304 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/hdfs/dfshealth.html
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/hdfs/dfshealth.html
@@ -310,7 +310,7 @@
   {#LiveNodes}
   
 {name} ({xferaddr})
-{#helper_relative_time 
value="{lastContact}"/}
+{lastContact}s
 
   
 {capacity|fmt_bytes}



hadoop git commit: HDFS-10235. Last contact for Live Nodes should be relative time. Contributed by Brahma Reddy Battula.

2016-04-05 Thread raviprak
Repository: hadoop
Updated Branches:
  refs/heads/branch-2 e98bb0279 -> 2ca6251a6


HDFS-10235. Last contact for Live Nodes should be relative time. Contributed by 
Brahma Reddy Battula.

(cherry picked from commit 0cd320a8463efe19a6228f9fe14693aa37ac8a10)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/2ca6251a
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/2ca6251a
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/2ca6251a

Branch: refs/heads/branch-2
Commit: 2ca6251a6ae1acfed1c639e32c6a7e849b4d47a9
Parents: e98bb02
Author: Ravi Prakash 
Authored: Tue Apr 5 13:41:19 2016 -0700
Committer: Ravi Prakash 
Committed: Tue Apr 5 13:43:42 2016 -0700

--
 .../hadoop-hdfs/src/main/webapps/hdfs/dfshealth.html   | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/2ca6251a/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/hdfs/dfshealth.html
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/hdfs/dfshealth.html 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/hdfs/dfshealth.html
index 3d9ca42..a9c3304 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/hdfs/dfshealth.html
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/hdfs/dfshealth.html
@@ -310,7 +310,7 @@
   {#LiveNodes}
   
 {name} ({xferaddr})
-{#helper_relative_time 
value="{lastContact}"/}
+{lastContact}s
 
   
 {capacity|fmt_bytes}



hadoop git commit: HDFS-9953. Download File from UI broken after pagination. Contributed by Brahma Reddy Battula

2016-03-14 Thread raviprak
Repository: hadoop
Updated Branches:
  refs/heads/branch-2.8 e6c6f4dda -> 9e804d14c


HDFS-9953. Download File from UI broken after pagination. Contributed by Brahma 
Reddy Battula

(cherry picked from commit 51b7d8935af7487547d46693946b386409edc6c8)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/9e804d14
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/9e804d14
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/9e804d14

Branch: refs/heads/branch-2.8
Commit: 9e804d14c94ad40b3de84cac15c645af096e64b9
Parents: e6c6f4d
Author: Ravi Prakash 
Authored: Mon Mar 14 14:32:42 2016 -0700
Committer: Ravi Prakash 
Committed: Mon Mar 14 14:33:29 2016 -0700

--
 .../src/main/webapps/hdfs/explorer.js   | 33 ++--
 1 file changed, 17 insertions(+), 16 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/9e804d14/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/hdfs/explorer.js
--
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/hdfs/explorer.js 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/hdfs/explorer.js
index baca798..102da9d 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/hdfs/explorer.js
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/hdfs/explorer.js
@@ -281,21 +281,6 @@
   dust.render('explorer', base.push(d), function(err, out) {
 $('#panel').html(out);
 
-$('#table-explorer').dataTable( {
-  'lengthMenu': [ [25, 50, 100, -1], [25, 50, 100, "All"] ],
-  'columns': [
-{'searchable': false }, //Permissions
-null, //Owner
-null, //Group
-{ 'searchable': false, 'render': func_size_render}, //Size
-{ 'searchable': false, 'render': func_time_render}, //Last Modified
-{ 'searchable': false }, //Replication
-null, //Block Size
-null, //Name
-{ 'sortable' : false } //Trash
-  ],
-  "deferRender": true
-});
 
 $('.explorer-browse-links').click(function() {
   var type = $(this).attr('inode-type');
@@ -324,7 +309,23 @@
   var inode_name = $(this).closest('tr').attr('inode-path');
   var absolute_file_path = append_path(current_directory, inode_name);
   delete_path(inode_name, absolute_file_path);
-})
+});
+  
+  $('#table-explorer').dataTable( {
+  'lengthMenu': [ [25, 50, 100, -1], [25, 50, 100, "All"] ],
+  'columns': [
+  {'searchable': false }, //Permissions
+  null, //Owner
+  null, //Group
+  { 'searchable': false, 'render': func_size_render}, //Size
+  { 'searchable': false, 'render': func_time_render}, //Last 
Modified
+  { 'searchable': false }, //Replication
+  null, //Block Size
+  null, //Name
+  { 'sortable' : false } //Trash
+  ],
+  "deferRender": true
+  });
   });
 }).error(network_error_handler(url));
   }



hadoop git commit: HDFS-9953. Download File from UI broken after pagination. Contributed by Brahma Reddy Battula

2016-03-14 Thread raviprak
Repository: hadoop
Updated Branches:
  refs/heads/branch-2 b4c869309 -> 51b7d8935


HDFS-9953. Download File from UI broken after pagination. Contributed by Brahma 
Reddy Battula


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/51b7d893
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/51b7d893
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/51b7d893

Branch: refs/heads/branch-2
Commit: 51b7d8935af7487547d46693946b386409edc6c8
Parents: b4c8693
Author: Ravi Prakash 
Authored: Mon Mar 14 14:32:42 2016 -0700
Committer: Ravi Prakash 
Committed: Mon Mar 14 14:32:42 2016 -0700

--
 .../src/main/webapps/hdfs/explorer.js   | 33 ++--
 1 file changed, 17 insertions(+), 16 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/51b7d893/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/hdfs/explorer.js
--
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/hdfs/explorer.js 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/hdfs/explorer.js
index baca798..102da9d 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/hdfs/explorer.js
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/hdfs/explorer.js
@@ -281,21 +281,6 @@
   dust.render('explorer', base.push(d), function(err, out) {
 $('#panel').html(out);
 
-$('#table-explorer').dataTable( {
-  'lengthMenu': [ [25, 50, 100, -1], [25, 50, 100, "All"] ],
-  'columns': [
-{'searchable': false }, //Permissions
-null, //Owner
-null, //Group
-{ 'searchable': false, 'render': func_size_render}, //Size
-{ 'searchable': false, 'render': func_time_render}, //Last Modified
-{ 'searchable': false }, //Replication
-null, //Block Size
-null, //Name
-{ 'sortable' : false } //Trash
-  ],
-  "deferRender": true
-});
 
 $('.explorer-browse-links').click(function() {
   var type = $(this).attr('inode-type');
@@ -324,7 +309,23 @@
   var inode_name = $(this).closest('tr').attr('inode-path');
   var absolute_file_path = append_path(current_directory, inode_name);
   delete_path(inode_name, absolute_file_path);
-})
+});
+  
+  $('#table-explorer').dataTable( {
+  'lengthMenu': [ [25, 50, 100, -1], [25, 50, 100, "All"] ],
+  'columns': [
+  {'searchable': false }, //Permissions
+  null, //Owner
+  null, //Group
+  { 'searchable': false, 'render': func_size_render}, //Size
+  { 'searchable': false, 'render': func_time_render}, //Last 
Modified
+  { 'searchable': false }, //Replication
+  null, //Block Size
+  null, //Name
+  { 'sortable' : false } //Trash
+  ],
+  "deferRender": true
+  });
   });
 }).error(network_error_handler(url));
   }



hadoop git commit: Download File from UI broken after pagination. Contributed by Brahma Reddy Battula

2016-03-14 Thread raviprak
Repository: hadoop
Updated Branches:
  refs/heads/trunk 20d389ce6 -> bd5556ba1


Download File from UI broken after pagination. Contributed by Brahma Reddy 
Battula


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/bd5556ba
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/bd5556ba
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/bd5556ba

Branch: refs/heads/trunk
Commit: bd5556ba124a5d9ec3e3f2041d96a2c445d69369
Parents: 20d389c
Author: Ravi Prakash 
Authored: Mon Mar 14 14:30:13 2016 -0700
Committer: Ravi Prakash 
Committed: Mon Mar 14 14:30:13 2016 -0700

--
 .../src/main/webapps/hdfs/explorer.js   | 33 ++--
 1 file changed, 17 insertions(+), 16 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/bd5556ba/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/hdfs/explorer.js
--
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/hdfs/explorer.js 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/hdfs/explorer.js
index baca798..102da9d 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/hdfs/explorer.js
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/webapps/hdfs/explorer.js
@@ -281,21 +281,6 @@
   dust.render('explorer', base.push(d), function(err, out) {
 $('#panel').html(out);
 
-$('#table-explorer').dataTable( {
-  'lengthMenu': [ [25, 50, 100, -1], [25, 50, 100, "All"] ],
-  'columns': [
-{'searchable': false }, //Permissions
-null, //Owner
-null, //Group
-{ 'searchable': false, 'render': func_size_render}, //Size
-{ 'searchable': false, 'render': func_time_render}, //Last Modified
-{ 'searchable': false }, //Replication
-null, //Block Size
-null, //Name
-{ 'sortable' : false } //Trash
-  ],
-  "deferRender": true
-});
 
 $('.explorer-browse-links').click(function() {
   var type = $(this).attr('inode-type');
@@ -324,7 +309,23 @@
   var inode_name = $(this).closest('tr').attr('inode-path');
   var absolute_file_path = append_path(current_directory, inode_name);
   delete_path(inode_name, absolute_file_path);
-})
+});
+  
+  $('#table-explorer').dataTable( {
+  'lengthMenu': [ [25, 50, 100, -1], [25, 50, 100, "All"] ],
+  'columns': [
+  {'searchable': false }, //Permissions
+  null, //Owner
+  null, //Group
+  { 'searchable': false, 'render': func_size_render}, //Size
+  { 'searchable': false, 'render': func_time_render}, //Last 
Modified
+  { 'searchable': false }, //Replication
+  null, //Block Size
+  null, //Name
+  { 'sortable' : false } //Trash
+  ],
+  "deferRender": true
+  });
   });
 }).error(network_error_handler(url));
   }



hadoop git commit: HADOOP-12696. Add tests for S3FileSystem Contract. Contributed by Matt Paduano

2016-01-19 Thread raviprak
Repository: hadoop
Updated Branches:
  refs/heads/branch-2.8 5dc2e78c9 -> a359dc87d


HADOOP-12696. Add tests for S3FileSystem Contract. Contributed by Matt Paduano

(cherry picked from commit 1acc509b45d58c0eb7e83ea1ba13169410be0dbe)
(cherry picked from commit 2cbb8bbd728aab6d438c3051c970e4b4475b8e68)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/a359dc87
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/a359dc87
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/a359dc87

Branch: refs/heads/branch-2.8
Commit: a359dc87d431573b2783dbe4af25e00c836096aa
Parents: 5dc2e78
Author: Ravi Prakash <ravip...@altiscale.com>
Authored: Tue Jan 19 13:57:08 2016 -0800
Committer: Ravi Prakash <ravip...@altiscale.com>
Committed: Tue Jan 19 14:01:51 2016 -0800

--
 hadoop-common-project/hadoop-common/CHANGES.txt |   2 +
 .../src/site/markdown/filesystem/testing.md |   4 +-
 .../fs/contract/AbstractContractSeekTest.java   |   8 +-
 .../hadoop/fs/contract/ContractOptions.java |   6 ++
 .../org/apache/hadoop/fs/s3/S3FileSystem.java   |  58 +++
 .../org/apache/hadoop/fs/s3/S3InputStream.java  |  17 +--
 .../hadoop/fs/contract/s3/S3Contract.java   |  40 +++
 .../fs/contract/s3/TestS3ContractCreate.java|  32 ++
 .../fs/contract/s3/TestS3ContractDelete.java|  31 ++
 .../fs/contract/s3/TestS3ContractMkdir.java |  32 ++
 .../fs/contract/s3/TestS3ContractOpen.java  |  32 ++
 .../fs/contract/s3/TestS3ContractRename.java|  32 ++
 .../fs/contract/s3/TestS3ContractRootDir.java   |  34 ++
 .../fs/contract/s3/TestS3ContractSeek.java  |  32 ++
 .../src/test/resources/contract/s3.xml  | 104 +++
 15 files changed, 430 insertions(+), 34 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/a359dc87/hadoop-common-project/hadoop-common/CHANGES.txt
--
diff --git a/hadoop-common-project/hadoop-common/CHANGES.txt 
b/hadoop-common-project/hadoop-common/CHANGES.txt
index 0c2cf41..7406ee7 100644
--- a/hadoop-common-project/hadoop-common/CHANGES.txt
+++ b/hadoop-common-project/hadoop-common/CHANGES.txt
@@ -380,6 +380,8 @@ Release 2.8.0 - UNRELEASED
 HADOOP-12604. Exception may be swallowed in KMSClientProvider.
 (Yongjun Zhang)
 
+HADOOP-12696. Add tests for S3Filesystem Contract (Matt Paduano via 
raviprak)
+
   OPTIMIZATIONS
 
 HADOOP-11785. Reduce the number of listStatus operation in distcp

http://git-wip-us.apache.org/repos/asf/hadoop/blob/a359dc87/hadoop-common-project/hadoop-common/src/site/markdown/filesystem/testing.md
--
diff --git 
a/hadoop-common-project/hadoop-common/src/site/markdown/filesystem/testing.md 
b/hadoop-common-project/hadoop-common/src/site/markdown/filesystem/testing.md
index 444fb60..99561cd 100644
--- 
a/hadoop-common-project/hadoop-common/src/site/markdown/filesystem/testing.md
+++ 
b/hadoop-common-project/hadoop-common/src/site/markdown/filesystem/testing.md
@@ -190,7 +190,7 @@ tests against remote FileSystems that require login details 
require usernames/ID
 
 All these details MUST be required to be placed in the file 
`src/test/resources/contract-test-options.xml`, and your SCM tools configured 
to never commit this file to subversion, git or
 equivalent. Furthermore, the build MUST be configured to never bundle this 
file in any `-test` artifacts generated. The Hadoop build does this, excluding 
`src/test/**/*.xml` from the JAR files.
-
+In addition, `src/test/resources/auth-keys.xml` will need to be created.  It 
can be a copy of `contract-test-options.xml`.
 The `AbstractFSContract` class automatically loads this resource file if 
present; specific keys for specific test cases can be added.
 
 As an example, here are what S3N test keys look like:
@@ -214,7 +214,7 @@ As an example, here are what S3N test keys look like:
 
 The `AbstractBondedFSContract` automatically skips a test suite if the 
FileSystem URL is not defined in the property `fs.contract.test.fs.%s`, where 
`%s` matches the schema name of the FileSystem.
 
-
+When running the tests `maven.test.skip` will need to be turned off since it 
is true by default on these tests.  This can be done with a command like `mvn 
test -Ptests-on`.
 
 ### Important: passing the tests does not guarantee compatibility
 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/a359dc87/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/contract/AbstractContractSeekTest.java
--
diff --git 
a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/contract/AbstractContractSe

hadoop git commit: HADOOP-12689. S3 filesystem operations stopped working correctly

2016-01-15 Thread raviprak
Repository: hadoop
Updated Branches:
  refs/heads/branch-2.8 4c8131b1b -> 5468c40fe


HADOOP-12689. S3 filesystem operations stopped working correctly

(cherry picked from commit 2d16f40dab291a29b3fc005221b12fd587615d4e)
(cherry picked from commit 6330683778d684d539ae756226f62728776cc1d7)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/5468c40f
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/5468c40f
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/5468c40f

Branch: refs/heads/branch-2.8
Commit: 5468c40fe7553d1b5209ce90e8a553a3581d1d47
Parents: 4c8131b
Author: Ravi Prakash <ravip...@apache.org>
Authored: Tue Jan 5 23:26:03 2016 -0800
Committer: Ravi Prakash <ravip...@altiscale.com>
Committed: Fri Jan 15 15:19:15 2016 -0800

--
 hadoop-common-project/hadoop-common/CHANGES.txt  | 3 +++
 .../java/org/apache/hadoop/fs/s3/Jets3tFileSystemStore.java  | 8 ++--
 2 files changed, 9 insertions(+), 2 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/5468c40f/hadoop-common-project/hadoop-common/CHANGES.txt
--
diff --git a/hadoop-common-project/hadoop-common/CHANGES.txt 
b/hadoop-common-project/hadoop-common/CHANGES.txt
index 4420b6e..bd709b2 100644
--- a/hadoop-common-project/hadoop-common/CHANGES.txt
+++ b/hadoop-common-project/hadoop-common/CHANGES.txt
@@ -931,6 +931,9 @@ Release 2.8.0 - UNRELEASED
 HADOOP-12700. Remove unused import in TestCompressorDecompressor.java.
 (John Zhuge via aajisaka)
 
+HADOOP-12689. S3 filesystem operations stopped working correctly
+(Matt Paduano via raviprak)
+
 Release 2.7.3 - UNRELEASED
 
   INCOMPATIBLE CHANGES

http://git-wip-us.apache.org/repos/asf/hadoop/blob/5468c40f/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3/Jets3tFileSystemStore.java
--
diff --git 
a/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3/Jets3tFileSystemStore.java
 
b/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3/Jets3tFileSystemStore.java
index 901f89b..e5387f3 100644
--- 
a/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3/Jets3tFileSystemStore.java
+++ 
b/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3/Jets3tFileSystemStore.java
@@ -167,7 +167,7 @@ class Jets3tFileSystemStore implements FileSystemStore {
   return object.getDataInputStream();
 } catch (S3ServiceException e) {
   if ("NoSuchKey".equals(e.getS3ErrorCode())) {
-throw new IOException(key + " doesn't exist");
+return null;
   }
   if (e.getCause() instanceof IOException) {
 throw (IOException) e.getCause();
@@ -229,7 +229,11 @@ class Jets3tFileSystemStore implements FileSystemStore {
 OutputStream out = null;
 try {
   fileBlock = newBackupFile();
-  in = get(blockToKey(block), byteRangeStart);
+  String blockId = blockToKey(block);
+  in = get(blockId, byteRangeStart);
+  if (in == null) {
+throw new IOException("Block missing from S3 store: " + blockId);
+  }
   out = new BufferedOutputStream(new FileOutputStream(fileBlock));
   byte[] buf = new byte[bufferSize];
   int numRead;



hadoop git commit: HADOOP-12689. S3 filesystem operations stopped working correctly

2016-01-05 Thread raviprak
Repository: hadoop
Updated Branches:
  refs/heads/branch-2 745885488 -> 633068377


HADOOP-12689. S3 filesystem operations stopped working correctly

(cherry picked from commit 2d16f40dab291a29b3fc005221b12fd587615d4e)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/63306837
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/63306837
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/63306837

Branch: refs/heads/branch-2
Commit: 6330683778d684d539ae756226f62728776cc1d7
Parents: 7458854
Author: Ravi Prakash <ravip...@apache.org>
Authored: Tue Jan 5 23:26:03 2016 -0800
Committer: Ravi Prakash <ravip...@apache.org>
Committed: Tue Jan 5 23:43:17 2016 -0800

--
 hadoop-common-project/hadoop-common/CHANGES.txt  | 3 +++
 .../java/org/apache/hadoop/fs/s3/Jets3tFileSystemStore.java  | 8 ++--
 2 files changed, 9 insertions(+), 2 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/63306837/hadoop-common-project/hadoop-common/CHANGES.txt
--
diff --git a/hadoop-common-project/hadoop-common/CHANGES.txt 
b/hadoop-common-project/hadoop-common/CHANGES.txt
index 45ed2c4..9ae15f2 100644
--- a/hadoop-common-project/hadoop-common/CHANGES.txt
+++ b/hadoop-common-project/hadoop-common/CHANGES.txt
@@ -923,6 +923,9 @@ Release 2.8.0 - UNRELEASED
 HADOOP-12608. Fix exception message in WASB when connecting with anonymous
 credential. (Dushyanth via xyao)
 
+HADOOP-12689. S3 filesystem operations stopped working correctly
+(Matt Paduano via raviprak)
+
 Release 2.7.3 - UNRELEASED
 
   INCOMPATIBLE CHANGES

http://git-wip-us.apache.org/repos/asf/hadoop/blob/63306837/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3/Jets3tFileSystemStore.java
--
diff --git 
a/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3/Jets3tFileSystemStore.java
 
b/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3/Jets3tFileSystemStore.java
index 901f89b..e5387f3 100644
--- 
a/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3/Jets3tFileSystemStore.java
+++ 
b/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3/Jets3tFileSystemStore.java
@@ -167,7 +167,7 @@ class Jets3tFileSystemStore implements FileSystemStore {
   return object.getDataInputStream();
 } catch (S3ServiceException e) {
   if ("NoSuchKey".equals(e.getS3ErrorCode())) {
-throw new IOException(key + " doesn't exist");
+return null;
   }
   if (e.getCause() instanceof IOException) {
 throw (IOException) e.getCause();
@@ -229,7 +229,11 @@ class Jets3tFileSystemStore implements FileSystemStore {
 OutputStream out = null;
 try {
   fileBlock = newBackupFile();
-  in = get(blockToKey(block), byteRangeStart);
+  String blockId = blockToKey(block);
+  in = get(blockId, byteRangeStart);
+  if (in == null) {
+throw new IOException("Block missing from S3 store: " + blockId);
+  }
   out = new BufferedOutputStream(new FileOutputStream(fileBlock));
   byte[] buf = new byte[bufferSize];
   int numRead;



hadoop git commit: HADOOP-12689. S3 filesystem operations stopped working correctly

2016-01-05 Thread raviprak
Repository: hadoop
Updated Branches:
  refs/heads/trunk c52b407cb -> 2d16f40da


HADOOP-12689. S3 filesystem operations stopped working correctly


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/2d16f40d
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/2d16f40d
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/2d16f40d

Branch: refs/heads/trunk
Commit: 2d16f40dab291a29b3fc005221b12fd587615d4e
Parents: c52b407
Author: Ravi Prakash <ravip...@apache.org>
Authored: Tue Jan 5 23:26:03 2016 -0800
Committer: Ravi Prakash <ravip...@apache.org>
Committed: Tue Jan 5 23:26:03 2016 -0800

--
 hadoop-common-project/hadoop-common/CHANGES.txt  | 3 +++
 .../java/org/apache/hadoop/fs/s3/Jets3tFileSystemStore.java  | 8 ++--
 2 files changed, 9 insertions(+), 2 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/2d16f40d/hadoop-common-project/hadoop-common/CHANGES.txt
--
diff --git a/hadoop-common-project/hadoop-common/CHANGES.txt 
b/hadoop-common-project/hadoop-common/CHANGES.txt
index 863d047..1b867f0 100644
--- a/hadoop-common-project/hadoop-common/CHANGES.txt
+++ b/hadoop-common-project/hadoop-common/CHANGES.txt
@@ -1550,6 +1550,9 @@ Release 2.8.0 - UNRELEASED
 HADOOP-12682. Fix TestKMS#testKMSRestart* failure.
 (Wei-Chiu Chuang via xyao)
 
+HADOOP-12689. S3 filesystem operations stopped working correctly
+(Matt Paduano via raviprak)
+
 Release 2.7.3 - UNRELEASED
 
   INCOMPATIBLE CHANGES

http://git-wip-us.apache.org/repos/asf/hadoop/blob/2d16f40d/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3/Jets3tFileSystemStore.java
--
diff --git 
a/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3/Jets3tFileSystemStore.java
 
b/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3/Jets3tFileSystemStore.java
index 5f46aea..a186c14 100644
--- 
a/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3/Jets3tFileSystemStore.java
+++ 
b/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3/Jets3tFileSystemStore.java
@@ -173,7 +173,7 @@ class Jets3tFileSystemStore implements FileSystemStore {
   return object.getDataInputStream();
 } catch (S3ServiceException e) {
   if ("NoSuchKey".equals(e.getS3ErrorCode())) {
-throw new IOException(key + " doesn't exist");
+return null;
   }
   if (e.getCause() instanceof IOException) {
 throw (IOException) e.getCause();
@@ -241,7 +241,11 @@ class Jets3tFileSystemStore implements FileSystemStore {
 OutputStream out = null;
 try {
   fileBlock = newBackupFile();
-  in = get(blockToKey(block), byteRangeStart);
+  String blockId = blockToKey(block);
+  in = get(blockId, byteRangeStart);
+  if (in == null) {
+throw new IOException("Block missing from S3 store: " + blockId);
+  }
   out = new BufferedOutputStream(new FileOutputStream(fileBlock));
   byte[] buf = new byte[bufferSize];
   int numRead;



hadoop git commit: Revert HDFS-8344. NameNode doesn't recover lease for files with missing blocks (raviprak)

2015-07-21 Thread raviprak
Repository: hadoop
Updated Branches:
  refs/heads/branch-2 3dd113fa7 - b728edce0


Revert HDFS-8344. NameNode doesn't recover lease for files with missing blocks 
(raviprak)

This reverts commit 3d58c7a7006991b46efe8a8f60b244f4f85b481a.

Conflicts:
hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/b728edce
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/b728edce
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/b728edce

Branch: refs/heads/branch-2
Commit: b728edce0f904c15132726587c8ee92f901347bf
Parents: 3dd113f
Author: Ravi Prakash ravip...@altiscale.com
Authored: Tue Jul 21 11:30:41 2015 -0700
Committer: Ravi Prakash ravip...@altiscale.com
Committed: Tue Jul 21 11:30:41 2015 -0700

--
 hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt |  4 -
 .../org/apache/hadoop/hdfs/DFSConfigKeys.java   |  3 -
 .../BlockInfoUnderConstruction.java | 19 +
 .../server/blockmanagement/BlockManager.java| 14 +---
 .../hdfs/server/namenode/FSNamesystem.java  | 10 ---
 .../src/main/resources/hdfs-default.xml |  9 ---
 .../apache/hadoop/hdfs/TestLeaseRecovery.java   | 78 
 7 files changed, 4 insertions(+), 133 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/b728edce/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
--
diff --git a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt 
b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
index 686ec30..1cb74ee 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
+++ b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
@@ -716,12 +716,8 @@ Release 2.8.0 - UNRELEASED
 HDFS-8778. TestBlockReportRateLimiting#testLeaseExpiration can deadlock.
 (Arpit Agarwal)
 
-HDFS-8344. NameNode doesn't recover lease for files with missing blocks
-(raviprak)
-
 HDFS-7582. Enforce maximum number of ACL entries separately per access
 and default. (vinayakumarb)
-
 Release 2.7.2 - UNRELEASED
 
   INCOMPATIBLE CHANGES

http://git-wip-us.apache.org/repos/asf/hadoop/blob/b728edce/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSConfigKeys.java
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSConfigKeys.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSConfigKeys.java
index 37e2d3d..0fafade 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSConfigKeys.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSConfigKeys.java
@@ -440,9 +440,6 @@ public class DFSConfigKeys extends CommonConfigurationKeys {
   public static final longDFS_CACHEREPORT_INTERVAL_MSEC_DEFAULT = 10 * 
1000;
   public static final String  DFS_BLOCK_INVALIDATE_LIMIT_KEY = 
dfs.block.invalidate.limit;
   public static final int DFS_BLOCK_INVALIDATE_LIMIT_DEFAULT = 1000;
-  public static final String  DFS_BLOCK_UC_MAX_RECOVERY_ATTEMPTS = 
dfs.block.uc.max.recovery.attempts;
-  public static final int DFS_BLOCK_UC_MAX_RECOVERY_ATTEMPTS_DEFAULT = 5;
-
   public static final String  DFS_DEFAULT_MAX_CORRUPT_FILES_RETURNED_KEY = 
dfs.corruptfilesreturned.max;
   public static final int DFS_DEFAULT_MAX_CORRUPT_FILES_RETURNED = 500;
   /* Maximum number of blocks to process for initializing replication queues */

http://git-wip-us.apache.org/repos/asf/hadoop/blob/b728edce/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/BlockInfoUnderConstruction.java
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/BlockInfoUnderConstruction.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/BlockInfoUnderConstruction.java
index 28f1633..9cd3987 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/BlockInfoUnderConstruction.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/BlockInfoUnderConstruction.java
@@ -18,6 +18,7 @@
 package org.apache.hadoop.hdfs.server.blockmanagement;
 
 import java.io.IOException;
+import java.util.ArrayList;
 import java.util.Iterator;
 import java.util.List;
 
@@ -60,11 +61,6 @@ public abstract class BlockInfoUnderConstruction extends 
BlockInfo {
*/
   protected Block truncateBlock;
 
-  /** The number of times all replicas will be used to attempt recovery before
-   * giving up and marking the block under construction missing.
-   */
-  private int

hadoop git commit: Revert HDFS-8344. NameNode doesn't recover lease for files with missing blocks (raviprak)

2015-07-21 Thread raviprak
Repository: hadoop
Updated Branches:
  refs/heads/trunk 68d1f4bfe - 5137b388f


Revert HDFS-8344. NameNode doesn't recover lease for files with missing blocks 
(raviprak)

This reverts commit e4f756260f16156179ba4adad974ec92279c2fac.

Conflicts:
hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/5137b388
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/5137b388
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/5137b388

Branch: refs/heads/trunk
Commit: 5137b388fc9d4d716f780daf6d04292feeb9df96
Parents: 68d1f4b
Author: Ravi Prakash ravip...@altiscale.com
Authored: Tue Jul 21 11:29:35 2015 -0700
Committer: Ravi Prakash ravip...@altiscale.com
Committed: Tue Jul 21 11:29:35 2015 -0700

--
 hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt |  3 -
 .../org/apache/hadoop/hdfs/DFSConfigKeys.java   |  3 -
 .../BlockInfoUnderConstruction.java | 19 +
 .../server/blockmanagement/BlockManager.java| 14 +---
 .../hdfs/server/namenode/FSNamesystem.java  | 10 ---
 .../src/main/resources/hdfs-default.xml |  9 ---
 .../apache/hadoop/hdfs/TestLeaseRecovery.java   | 78 
 7 files changed, 4 insertions(+), 132 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/5137b388/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
--
diff --git a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt 
b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
index 223baaf..a29a090 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
+++ b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
@@ -1056,9 +1056,6 @@ Release 2.8.0 - UNRELEASED
 HDFS-8778. TestBlockReportRateLimiting#testLeaseExpiration can deadlock.
 (Arpit Agarwal)
 
-HDFS-8344. NameNode doesn't recover lease for files with missing blocks
-(raviprak)
-
 HDFS-7582. Enforce maximum number of ACL entries separately per access
 and default. (vinayakumarb)
 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/5137b388/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSConfigKeys.java
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSConfigKeys.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSConfigKeys.java
index 210d1e5..0e569f0 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSConfigKeys.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSConfigKeys.java
@@ -440,9 +440,6 @@ public class DFSConfigKeys extends CommonConfigurationKeys {
   public static final longDFS_CACHEREPORT_INTERVAL_MSEC_DEFAULT = 10 * 
1000;
   public static final String  DFS_BLOCK_INVALIDATE_LIMIT_KEY = 
dfs.block.invalidate.limit;
   public static final int DFS_BLOCK_INVALIDATE_LIMIT_DEFAULT = 1000;
-  public static final String  DFS_BLOCK_UC_MAX_RECOVERY_ATTEMPTS = 
dfs.block.uc.max.recovery.attempts;
-  public static final int DFS_BLOCK_UC_MAX_RECOVERY_ATTEMPTS_DEFAULT = 5;
-
   public static final String  DFS_DEFAULT_MAX_CORRUPT_FILES_RETURNED_KEY = 
dfs.corruptfilesreturned.max;
   public static final int DFS_DEFAULT_MAX_CORRUPT_FILES_RETURNED = 500;
   /* Maximum number of blocks to process for initializing replication queues */

http://git-wip-us.apache.org/repos/asf/hadoop/blob/5137b388/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/BlockInfoUnderConstruction.java
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/BlockInfoUnderConstruction.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/BlockInfoUnderConstruction.java
index 28f1633..9cd3987 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/BlockInfoUnderConstruction.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/BlockInfoUnderConstruction.java
@@ -18,6 +18,7 @@
 package org.apache.hadoop.hdfs.server.blockmanagement;
 
 import java.io.IOException;
+import java.util.ArrayList;
 import java.util.Iterator;
 import java.util.List;
 
@@ -60,11 +61,6 @@ public abstract class BlockInfoUnderConstruction extends 
BlockInfo {
*/
   protected Block truncateBlock;
 
-  /** The number of times all replicas will be used to attempt recovery before
-   * giving up and marking the block under construction missing.
-   */
-  private int recoveryAttemptsBeforeMarkingBlockMissing

hadoop git commit: HDFS-8344. NameNode doesn't recover lease for files with missing blocks (raviprak)

2015-07-20 Thread raviprak
Repository: hadoop
Updated Branches:
  refs/heads/branch-2 d3c23c7b1 - 3d58c7a70


HDFS-8344. NameNode doesn't recover lease for files with missing blocks 
(raviprak)

(cherry picked from commit e4f756260f16156179ba4adad974ec92279c2fac)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/3d58c7a7
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/3d58c7a7
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/3d58c7a7

Branch: refs/heads/branch-2
Commit: 3d58c7a7006991b46efe8a8f60b244f4f85b481a
Parents: d3c23c7
Author: Ravi Prakash ravip...@altiscale.com
Authored: Mon Jul 20 14:03:34 2015 -0700
Committer: Ravi Prakash ravip...@altiscale.com
Committed: Mon Jul 20 14:04:28 2015 -0700

--
 hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt |  3 +
 .../org/apache/hadoop/hdfs/DFSConfigKeys.java   |  3 +
 .../BlockInfoUnderConstruction.java | 19 -
 .../server/blockmanagement/BlockManager.java| 14 +++-
 .../hdfs/server/namenode/FSNamesystem.java  | 10 +++
 .../src/main/resources/hdfs-default.xml |  9 +++
 .../apache/hadoop/hdfs/TestLeaseRecovery.java   | 78 
 7 files changed, 132 insertions(+), 4 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/3d58c7a7/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
--
diff --git a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt 
b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
index bc01dde..c4ce009 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
+++ b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
@@ -714,6 +714,9 @@ Release 2.8.0 - UNRELEASED
 HDFS-8778. TestBlockReportRateLimiting#testLeaseExpiration can deadlock.
 (Arpit Agarwal)
 
+HDFS-8344. NameNode doesn't recover lease for files with missing blocks
+(raviprak)
+
 Release 2.7.2 - UNRELEASED
 
   INCOMPATIBLE CHANGES

http://git-wip-us.apache.org/repos/asf/hadoop/blob/3d58c7a7/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSConfigKeys.java
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSConfigKeys.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSConfigKeys.java
index 0fafade..37e2d3d 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSConfigKeys.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSConfigKeys.java
@@ -440,6 +440,9 @@ public class DFSConfigKeys extends CommonConfigurationKeys {
   public static final longDFS_CACHEREPORT_INTERVAL_MSEC_DEFAULT = 10 * 
1000;
   public static final String  DFS_BLOCK_INVALIDATE_LIMIT_KEY = 
dfs.block.invalidate.limit;
   public static final int DFS_BLOCK_INVALIDATE_LIMIT_DEFAULT = 1000;
+  public static final String  DFS_BLOCK_UC_MAX_RECOVERY_ATTEMPTS = 
dfs.block.uc.max.recovery.attempts;
+  public static final int DFS_BLOCK_UC_MAX_RECOVERY_ATTEMPTS_DEFAULT = 5;
+
   public static final String  DFS_DEFAULT_MAX_CORRUPT_FILES_RETURNED_KEY = 
dfs.corruptfilesreturned.max;
   public static final int DFS_DEFAULT_MAX_CORRUPT_FILES_RETURNED = 500;
   /* Maximum number of blocks to process for initializing replication queues */

http://git-wip-us.apache.org/repos/asf/hadoop/blob/3d58c7a7/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/BlockInfoUnderConstruction.java
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/BlockInfoUnderConstruction.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/BlockInfoUnderConstruction.java
index 9cd3987..28f1633 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/BlockInfoUnderConstruction.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/BlockInfoUnderConstruction.java
@@ -18,7 +18,6 @@
 package org.apache.hadoop.hdfs.server.blockmanagement;
 
 import java.io.IOException;
-import java.util.ArrayList;
 import java.util.Iterator;
 import java.util.List;
 
@@ -61,6 +60,11 @@ public abstract class BlockInfoUnderConstruction extends 
BlockInfo {
*/
   protected Block truncateBlock;
 
+  /** The number of times all replicas will be used to attempt recovery before
+   * giving up and marking the block under construction missing.
+   */
+  private int recoveryAttemptsBeforeMarkingBlockMissing;
+
   /**
* ReplicaUnderConstruction contains information about replicas while
* they are under construction.
@@ -174,6 +178,8

hadoop git commit: HDFS-8344. NameNode doesn't recover lease for files with missing blocks (raviprak)

2015-07-20 Thread raviprak
Repository: hadoop
Updated Branches:
  refs/heads/trunk 98c2bc87b - e4f756260


HDFS-8344. NameNode doesn't recover lease for files with missing blocks 
(raviprak)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/e4f75626
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/e4f75626
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/e4f75626

Branch: refs/heads/trunk
Commit: e4f756260f16156179ba4adad974ec92279c2fac
Parents: 98c2bc8
Author: Ravi Prakash ravip...@altiscale.com
Authored: Mon Jul 20 14:03:34 2015 -0700
Committer: Ravi Prakash ravip...@altiscale.com
Committed: Mon Jul 20 14:03:34 2015 -0700

--
 hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt |  3 +
 .../org/apache/hadoop/hdfs/DFSConfigKeys.java   |  3 +
 .../BlockInfoUnderConstruction.java | 19 -
 .../server/blockmanagement/BlockManager.java| 14 +++-
 .../hdfs/server/namenode/FSNamesystem.java  | 10 +++
 .../src/main/resources/hdfs-default.xml |  9 +++
 .../apache/hadoop/hdfs/TestLeaseRecovery.java   | 78 
 7 files changed, 132 insertions(+), 4 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/e4f75626/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
--
diff --git a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt 
b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
index 58491a6..13d9969 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
+++ b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
@@ -1050,6 +1050,9 @@ Release 2.8.0 - UNRELEASED
 HDFS-8778. TestBlockReportRateLimiting#testLeaseExpiration can deadlock.
 (Arpit Agarwal)
 
+HDFS-8344. NameNode doesn't recover lease for files with missing blocks
+(raviprak)
+
 Release 2.7.2 - UNRELEASED
 
   INCOMPATIBLE CHANGES

http://git-wip-us.apache.org/repos/asf/hadoop/blob/e4f75626/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSConfigKeys.java
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSConfigKeys.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSConfigKeys.java
index 0e569f0..210d1e5 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSConfigKeys.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSConfigKeys.java
@@ -440,6 +440,9 @@ public class DFSConfigKeys extends CommonConfigurationKeys {
   public static final longDFS_CACHEREPORT_INTERVAL_MSEC_DEFAULT = 10 * 
1000;
   public static final String  DFS_BLOCK_INVALIDATE_LIMIT_KEY = 
dfs.block.invalidate.limit;
   public static final int DFS_BLOCK_INVALIDATE_LIMIT_DEFAULT = 1000;
+  public static final String  DFS_BLOCK_UC_MAX_RECOVERY_ATTEMPTS = 
dfs.block.uc.max.recovery.attempts;
+  public static final int DFS_BLOCK_UC_MAX_RECOVERY_ATTEMPTS_DEFAULT = 5;
+
   public static final String  DFS_DEFAULT_MAX_CORRUPT_FILES_RETURNED_KEY = 
dfs.corruptfilesreturned.max;
   public static final int DFS_DEFAULT_MAX_CORRUPT_FILES_RETURNED = 500;
   /* Maximum number of blocks to process for initializing replication queues */

http://git-wip-us.apache.org/repos/asf/hadoop/blob/e4f75626/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/BlockInfoUnderConstruction.java
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/BlockInfoUnderConstruction.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/BlockInfoUnderConstruction.java
index 9cd3987..28f1633 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/BlockInfoUnderConstruction.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/BlockInfoUnderConstruction.java
@@ -18,7 +18,6 @@
 package org.apache.hadoop.hdfs.server.blockmanagement;
 
 import java.io.IOException;
-import java.util.ArrayList;
 import java.util.Iterator;
 import java.util.List;
 
@@ -61,6 +60,11 @@ public abstract class BlockInfoUnderConstruction extends 
BlockInfo {
*/
   protected Block truncateBlock;
 
+  /** The number of times all replicas will be used to attempt recovery before
+   * giving up and marking the block under construction missing.
+   */
+  private int recoveryAttemptsBeforeMarkingBlockMissing;
+
   /**
* ReplicaUnderConstruction contains information about replicas while
* they are under construction.
@@ -174,6 +178,8 @@ public abstract class BlockInfoUnderConstruction extends 
BlockInfo

hadoop git commit: YARN-3302. TestDockerContainerExecutor should run automatically if it can detect docker in the usual place (Ravindra Kumar Naik via raviprak)

2015-05-19 Thread raviprak
Repository: hadoop
Updated Branches:
  refs/heads/trunk 5009ad4a7 - c97f32e7b


YARN-3302. TestDockerContainerExecutor should run automatically if it can 
detect docker in the usual place (Ravindra Kumar Naik via raviprak)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/c97f32e7
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/c97f32e7
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/c97f32e7

Branch: refs/heads/trunk
Commit: c97f32e7b9d9e1d4c80682cc01741579166174d1
Parents: 5009ad4
Author: Ravi Prakash ravip...@altiscale.com
Authored: Tue May 19 10:28:11 2015 -0700
Committer: Ravi Prakash ravip...@altiscale.com
Committed: Tue May 19 10:28:11 2015 -0700

--
 hadoop-yarn-project/CHANGES.txt |  3 +++
 .../TestDockerContainerExecutor.java| 27 +++-
 2 files changed, 24 insertions(+), 6 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/c97f32e7/hadoop-yarn-project/CHANGES.txt
--
diff --git a/hadoop-yarn-project/CHANGES.txt b/hadoop-yarn-project/CHANGES.txt
index e17e9c7..34cd051 100644
--- a/hadoop-yarn-project/CHANGES.txt
+++ b/hadoop-yarn-project/CHANGES.txt
@@ -417,6 +417,9 @@ Release 2.8.0 - UNRELEASED
 YARN-2421. RM still allocates containers to an app in the FINISHING
 state (Chang Li via jlowe)
 
+YARN-3302. TestDockerContainerExecutor should run automatically if it can
+detect docker in the usual place (Ravindra Kumar Naik via raviprak)
+
 Release 2.7.1 - UNRELEASED
 
   INCOMPATIBLE CHANGES

http://git-wip-us.apache.org/repos/asf/hadoop/blob/c97f32e7/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestDockerContainerExecutor.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestDockerContainerExecutor.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestDockerContainerExecutor.java
index 65e381c..9386897 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestDockerContainerExecutor.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestDockerContainerExecutor.java
@@ -51,10 +51,11 @@ import com.google.common.base.Strings;
  * This is intended to test the DockerContainerExecutor code, but it requires
  * docker to be installed.
  * brol
- * liInstall docker, and Compile the code with docker-service-url set to the
- * host and port where docker service is running.
+ * liTo run the tests, set the docker-service-url to the host and port where
+ * docker service is running (If docker-service-url is not specified then the
+ * local daemon will be used).
  * brprecode
- *  mvn clean install -Ddocker-service-url=tcp://0.0.0.0:4243 -DskipTests
+ * mvn test -Ddocker-service-url=tcp://0.0.0.0:4243 
-Dtest=TestDockerContainerExecutor
  * /code/pre
  */
 public class TestDockerContainerExecutor {
@@ -98,10 +99,13 @@ public class TestDockerContainerExecutor {
 
 dockerUrl = System.getProperty(docker-service-url);
 LOG.info(dockerUrl:  + dockerUrl);
-if (Strings.isNullOrEmpty(dockerUrl)) {
+if (!Strings.isNullOrEmpty(dockerUrl)) {
+  dockerUrl =  -H  + dockerUrl;
+} else if(isDockerDaemonRunningLocally()) {
+  dockerUrl = ;
+} else {
   return;
 }
-dockerUrl =  -H  + dockerUrl;
 dockerExec = docker  + dockerUrl;
 conf.set(
   YarnConfiguration.NM_DOCKER_CONTAINER_EXECUTOR_IMAGE_NAME, yarnImage);
@@ -136,6 +140,17 @@ public class TestDockerContainerExecutor {
 return exec != null;
   }
 
+  private boolean isDockerDaemonRunningLocally() {
+boolean dockerDaemonRunningLocally = true;
+  try {
+shellExec(docker info);
+  } catch (Exception e) {
+LOG.info(docker daemon is not running on local machine.);
+dockerDaemonRunningLocally = false;
+  }
+  return dockerDaemonRunningLocally;
+  }
+
   /**
* Test that a docker container can be launched to run a command
* @param cId a fake ContainerID
@@ -200,7 +215,7 @@ public class TestDockerContainerExecutor {
* Test that a touch command can be launched successfully in a docker
* container
*/
-  @Test
+  @Test(timeout=100)
   public void testLaunchContainer() throws IOException {
 if (!shouldRun()) {
   LOG.warn(Docker

hadoop git commit: YARN-3302. TestDockerContainerExecutor should run automatically if it can detect docker in the usual place (Ravindra Kumar Naik via raviprak)

2015-05-19 Thread raviprak
Repository: hadoop
Updated Branches:
  refs/heads/branch-2 d39039d54 - 084e45362


YARN-3302. TestDockerContainerExecutor should run automatically if it can 
detect docker in the usual place (Ravindra Kumar Naik via raviprak)

(cherry picked from commit c97f32e7b9d9e1d4c80682cc01741579166174d1)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/084e4536
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/084e4536
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/084e4536

Branch: refs/heads/branch-2
Commit: 084e453629865ced18ec72721d67291623ed21d3
Parents: d39039d
Author: Ravi Prakash ravip...@altiscale.com
Authored: Tue May 19 10:28:11 2015 -0700
Committer: Ravi Prakash ravip...@altiscale.com
Committed: Tue May 19 10:30:24 2015 -0700

--
 hadoop-yarn-project/CHANGES.txt |  3 +++
 .../TestDockerContainerExecutor.java| 27 +++-
 2 files changed, 24 insertions(+), 6 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/084e4536/hadoop-yarn-project/CHANGES.txt
--
diff --git a/hadoop-yarn-project/CHANGES.txt b/hadoop-yarn-project/CHANGES.txt
index eb5c183..c97df93 100644
--- a/hadoop-yarn-project/CHANGES.txt
+++ b/hadoop-yarn-project/CHANGES.txt
@@ -372,6 +372,9 @@ Release 2.8.0 - UNRELEASED
 YARN-2421. RM still allocates containers to an app in the FINISHING
 state (Chang Li via jlowe)
 
+YARN-3302. TestDockerContainerExecutor should run automatically if it can
+detect docker in the usual place (Ravindra Kumar Naik via raviprak)
+
 Release 2.7.1 - UNRELEASED
 
   INCOMPATIBLE CHANGES

http://git-wip-us.apache.org/repos/asf/hadoop/blob/084e4536/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestDockerContainerExecutor.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestDockerContainerExecutor.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestDockerContainerExecutor.java
index 65e381c..9386897 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestDockerContainerExecutor.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestDockerContainerExecutor.java
@@ -51,10 +51,11 @@ import com.google.common.base.Strings;
  * This is intended to test the DockerContainerExecutor code, but it requires
  * docker to be installed.
  * brol
- * liInstall docker, and Compile the code with docker-service-url set to the
- * host and port where docker service is running.
+ * liTo run the tests, set the docker-service-url to the host and port where
+ * docker service is running (If docker-service-url is not specified then the
+ * local daemon will be used).
  * brprecode
- *  mvn clean install -Ddocker-service-url=tcp://0.0.0.0:4243 -DskipTests
+ * mvn test -Ddocker-service-url=tcp://0.0.0.0:4243 
-Dtest=TestDockerContainerExecutor
  * /code/pre
  */
 public class TestDockerContainerExecutor {
@@ -98,10 +99,13 @@ public class TestDockerContainerExecutor {
 
 dockerUrl = System.getProperty(docker-service-url);
 LOG.info(dockerUrl:  + dockerUrl);
-if (Strings.isNullOrEmpty(dockerUrl)) {
+if (!Strings.isNullOrEmpty(dockerUrl)) {
+  dockerUrl =  -H  + dockerUrl;
+} else if(isDockerDaemonRunningLocally()) {
+  dockerUrl = ;
+} else {
   return;
 }
-dockerUrl =  -H  + dockerUrl;
 dockerExec = docker  + dockerUrl;
 conf.set(
   YarnConfiguration.NM_DOCKER_CONTAINER_EXECUTOR_IMAGE_NAME, yarnImage);
@@ -136,6 +140,17 @@ public class TestDockerContainerExecutor {
 return exec != null;
   }
 
+  private boolean isDockerDaemonRunningLocally() {
+boolean dockerDaemonRunningLocally = true;
+  try {
+shellExec(docker info);
+  } catch (Exception e) {
+LOG.info(docker daemon is not running on local machine.);
+dockerDaemonRunningLocally = false;
+  }
+  return dockerDaemonRunningLocally;
+  }
+
   /**
* Test that a docker container can be launched to run a command
* @param cId a fake ContainerID
@@ -200,7 +215,7 @@ public class TestDockerContainerExecutor {
* Test that a touch command can be launched successfully in a docker
* container
*/
-  @Test
+  @Test(timeout=100)
   public void testLaunchContainer

hadoop git commit: HDFS-4185. Add a metric for number of active leases (Rakesh R via raviprak)

2015-05-18 Thread raviprak
Repository: hadoop
Updated Branches:
  refs/heads/trunk 0c590e1c0 - cdfae446a


HDFS-4185. Add a metric for number of active leases (Rakesh R via raviprak)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/cdfae446
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/cdfae446
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/cdfae446

Branch: refs/heads/trunk
Commit: cdfae446ad285db979a79bf55665363fd943702c
Parents: 0c590e1
Author: Ravi Prakash ravip...@altiscale.com
Authored: Mon May 18 12:37:21 2015 -0700
Committer: Ravi Prakash ravip...@altiscale.com
Committed: Mon May 18 12:37:21 2015 -0700

--
 hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt |  2 +
 .../hdfs/server/namenode/FSNamesystem.java  | 17 ++
 .../hdfs/server/namenode/LeaseManager.java  |  9 +++
 .../namenode/metrics/TestNameNodeMetrics.java   | 59 ++--
 4 files changed, 83 insertions(+), 4 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/cdfae446/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
--
diff --git a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt 
b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
index 7fd3495..35c3b5a 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
+++ b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
@@ -570,6 +570,8 @@ Release 2.8.0 - UNRELEASED
 HDFS-8345. Storage policy APIs must be exposed via the FileSystem
 interface. (Arpit Agarwal)
 
+HDFS-4185. Add a metric for number of active leases (Rakesh R via raviprak)
+
   OPTIMIZATIONS
 
 HDFS-8026. Trace FSOutputSummer#writeChecksumChunks rather than

http://git-wip-us.apache.org/repos/asf/hadoop/blob/cdfae446/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java
index 0fec5ee..7e5b981 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java
@@ -5347,6 +5347,23 @@ public class FSNamesystem implements Namesystem, 
FSNamesystemMBean,
   }
 
   /**
+   * Get the number of files under construction in the system.
+   */
+  @Metric({ NumFilesUnderConstruction,
+  Number of files under construction })
+  public long getNumFilesUnderConstruction() {
+return leaseManager.countPath();
+  }
+
+  /**
+   * Get the total number of active clients holding lease in the system.
+   */
+  @Metric({ NumActiveClients, Number of active clients holding lease })
+  public long getNumActiveClients() {
+return leaseManager.countLease();
+  }
+
+  /**
* Get the total number of COMPLETE blocks in the system.
* For safe mode only complete blocks are counted.
*/

http://git-wip-us.apache.org/repos/asf/hadoop/blob/cdfae446/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/LeaseManager.java
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/LeaseManager.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/LeaseManager.java
index ade2312..0806f82 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/LeaseManager.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/LeaseManager.java
@@ -130,6 +130,15 @@ public class LeaseManager {
   @VisibleForTesting
   public synchronized int countLease() {return sortedLeases.size();}
 
+  /** @return the number of paths contained in all leases */
+  synchronized int countPath() {
+int count = 0;
+for (Lease lease : sortedLeases) {
+  count += lease.getFiles().size();
+}
+return count;
+  }
+
   /**
* Adds (or re-adds) the lease for the specified file.
*/

http://git-wip-us.apache.org/repos/asf/hadoop/blob/cdfae446/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/metrics/TestNameNodeMetrics.java
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/metrics/TestNameNodeMetrics.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/metrics/TestNameNodeMetrics.java
index b390391..3120f85 100644

hadoop git commit: HDFS-4185. Add a metric for number of active leases (Rakesh R via raviprak)

2015-05-18 Thread raviprak
Repository: hadoop
Updated Branches:
  refs/heads/branch-2 7bd4db968 - e5b805d36


HDFS-4185. Add a metric for number of active leases (Rakesh R via raviprak)

(cherry picked from commit cdfae446ad285db979a79bf55665363fd943702c)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/e5b805d3
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/e5b805d3
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/e5b805d3

Branch: refs/heads/branch-2
Commit: e5b805d361dd1735c3ab615347e0bf5739759a07
Parents: 7bd4db9
Author: Ravi Prakash ravip...@altiscale.com
Authored: Mon May 18 12:37:21 2015 -0700
Committer: Ravi Prakash ravip...@altiscale.com
Committed: Mon May 18 12:38:32 2015 -0700

--
 hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt |  2 +
 .../hdfs/server/namenode/FSNamesystem.java  | 17 ++
 .../hdfs/server/namenode/LeaseManager.java  |  9 +++
 .../namenode/metrics/TestNameNodeMetrics.java   | 59 ++--
 4 files changed, 83 insertions(+), 4 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/e5b805d3/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
--
diff --git a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt 
b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
index 56eb913..36c3fe0 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
+++ b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
@@ -233,6 +233,8 @@ Release 2.8.0 - UNRELEASED
 HDFS-8345. Storage policy APIs must be exposed via the FileSystem
 interface. (Arpit Agarwal)
 
+HDFS-4185. Add a metric for number of active leases (Rakesh R via raviprak)
+
   OPTIMIZATIONS
 
 HDFS-8026. Trace FSOutputSummer#writeChecksumChunks rather than

http://git-wip-us.apache.org/repos/asf/hadoop/blob/e5b805d3/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java
index 13692a0..4974b92 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java
@@ -5341,6 +5341,23 @@ public class FSNamesystem implements Namesystem, 
FSNamesystemMBean,
   }
 
   /**
+   * Get the number of files under construction in the system.
+   */
+  @Metric({ NumFilesUnderConstruction,
+  Number of files under construction })
+  public long getNumFilesUnderConstruction() {
+return leaseManager.countPath();
+  }
+
+  /**
+   * Get the total number of active clients holding lease in the system.
+   */
+  @Metric({ NumActiveClients, Number of active clients holding lease })
+  public long getNumActiveClients() {
+return leaseManager.countLease();
+  }
+
+  /**
* Get the total number of COMPLETE blocks in the system.
* For safe mode only complete blocks are counted.
*/

http://git-wip-us.apache.org/repos/asf/hadoop/blob/e5b805d3/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/LeaseManager.java
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/LeaseManager.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/LeaseManager.java
index ade2312..0806f82 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/LeaseManager.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/LeaseManager.java
@@ -130,6 +130,15 @@ public class LeaseManager {
   @VisibleForTesting
   public synchronized int countLease() {return sortedLeases.size();}
 
+  /** @return the number of paths contained in all leases */
+  synchronized int countPath() {
+int count = 0;
+for (Lease lease : sortedLeases) {
+  count += lease.getFiles().size();
+}
+return count;
+  }
+
   /**
* Adds (or re-adds) the lease for the specified file.
*/

http://git-wip-us.apache.org/repos/asf/hadoop/blob/e5b805d3/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/metrics/TestNameNodeMetrics.java
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/metrics/TestNameNodeMetrics.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server

hadoop git commit: YARN-1519. Check in container-executor if sysconf is implemented before using it (Radim Kolar and Eric Payne via raviprak)

2015-05-14 Thread raviprak
Repository: hadoop
Updated Branches:
  refs/heads/branch-2 fa082e1a0 - 4caadaa8b


YARN-1519. Check in container-executor if sysconf is implemented before using 
it (Radim Kolar and Eric Payne via raviprak)

(cherry picked from commit 53fe4eff09fdaeed75a8cad3a26156bf963a8d37)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/4caadaa8
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/4caadaa8
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/4caadaa8

Branch: refs/heads/branch-2
Commit: 4caadaa8bb61e6514c002a81b31bf4a649a685b5
Parents: fa082e1
Author: Ravi Prakash ravip...@apache.org
Authored: Thu May 14 15:55:37 2015 -0700
Committer: Ravi Prakash ravip...@apache.org
Committed: Thu May 14 15:56:36 2015 -0700

--
 hadoop-yarn-project/CHANGES.txt   | 3 +++
 .../main/native/container-executor/impl/container-executor.c  | 7 +--
 2 files changed, 8 insertions(+), 2 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/4caadaa8/hadoop-yarn-project/CHANGES.txt
--
diff --git a/hadoop-yarn-project/CHANGES.txt b/hadoop-yarn-project/CHANGES.txt
index c642b12..3ce0ab5 100644
--- a/hadoop-yarn-project/CHANGES.txt
+++ b/hadoop-yarn-project/CHANGES.txt
@@ -363,6 +363,9 @@ Release 2.8.0 - UNRELEASED
 YARN-2921. Fix MockRM/MockAM#waitForState sleep too long. 
 (Tsuyoshi Ozawa via wangda)
 
+YARN-1519. Check in container-executor if sysconf is implemented before
+using it (Radim Kolar and Eric Payne via raviprak)
+
 Release 2.7.1 - UNRELEASED
 
   INCOMPATIBLE CHANGES

http://git-wip-us.apache.org/repos/asf/hadoop/blob/4caadaa8/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/native/container-executor/impl/container-executor.c
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/native/container-executor/impl/container-executor.c
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/native/container-executor/impl/container-executor.c
index 6727838..962d52a 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/native/container-executor/impl/container-executor.c
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/native/container-executor/impl/container-executor.c
@@ -631,8 +631,11 @@ static int create_container_directories(const char* user, 
const char *app_id,
  */
 static struct passwd* get_user_info(const char* user) {
   int string_size = sysconf(_SC_GETPW_R_SIZE_MAX);
-  void* buffer = malloc(string_size + sizeof(struct passwd));
   struct passwd *result = NULL;
+  if(string_size  1024) {
+string_size = 1024;
+  }
+  void* buffer = malloc(string_size + sizeof(struct passwd));
   if (getpwnam_r(user, buffer, buffer + sizeof(struct passwd), string_size,
 result) != 0) {
 free(buffer);
@@ -1425,7 +1428,7 @@ void chown_dir_contents(const char *dir_path, uid_t uid, 
gid_t gid) {
  
   dp = opendir(dir_path);
   if (dp != NULL) {
-while (ep = readdir(dp)) {
+while ((ep = readdir(dp)) != NULL) {
   stpncpy(buf, ep-d_name, strlen(ep-d_name));
   buf[strlen(ep-d_name)] = '\0';
   change_owner(path_tmp, uid, gid);



hadoop git commit: YARN-1519. Check in container-executor if sysconf is implemented before using it (Radim Kolar and Eric Payne via raviprak)

2015-05-14 Thread raviprak
Repository: hadoop
Updated Branches:
  refs/heads/trunk 09fe16f16 - 53fe4eff0


YARN-1519. Check in container-executor if sysconf is implemented before using 
it (Radim Kolar and Eric Payne via raviprak)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/53fe4eff
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/53fe4eff
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/53fe4eff

Branch: refs/heads/trunk
Commit: 53fe4eff09fdaeed75a8cad3a26156bf963a8d37
Parents: 09fe16f
Author: Ravi Prakash ravip...@apache.org
Authored: Thu May 14 15:55:37 2015 -0700
Committer: Ravi Prakash ravip...@apache.org
Committed: Thu May 14 15:55:37 2015 -0700

--
 hadoop-yarn-project/CHANGES.txt | 3 +++
 .../native/container-executor/impl/container-executor.c | 9 ++---
 2 files changed, 9 insertions(+), 3 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/53fe4eff/hadoop-yarn-project/CHANGES.txt
--
diff --git a/hadoop-yarn-project/CHANGES.txt b/hadoop-yarn-project/CHANGES.txt
index e0f2c52..f2a518e 100644
--- a/hadoop-yarn-project/CHANGES.txt
+++ b/hadoop-yarn-project/CHANGES.txt
@@ -408,6 +408,9 @@ Release 2.8.0 - UNRELEASED
 YARN-2921. Fix MockRM/MockAM#waitForState sleep too long. 
 (Tsuyoshi Ozawa via wangda)
 
+YARN-1519. Check in container-executor if sysconf is implemented before
+using it (Radim Kolar and Eric Payne via raviprak)
+
 Release 2.7.1 - UNRELEASED
 
   INCOMPATIBLE CHANGES

http://git-wip-us.apache.org/repos/asf/hadoop/blob/53fe4eff/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/native/container-executor/impl/container-executor.c
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/native/container-executor/impl/container-executor.c
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/native/container-executor/impl/container-executor.c
index 485399a..ff28d30 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/native/container-executor/impl/container-executor.c
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/native/container-executor/impl/container-executor.c
@@ -631,8 +631,11 @@ static int create_container_directories(const char* user, 
const char *app_id,
  */
 static struct passwd* get_user_info(const char* user) {
   int string_size = sysconf(_SC_GETPW_R_SIZE_MAX);
-  void* buffer = malloc(string_size + sizeof(struct passwd));
   struct passwd *result = NULL;
+  if(string_size  1024) {
+string_size = 1024;
+  }
+  void* buffer = malloc(string_size + sizeof(struct passwd));
   if (getpwnam_r(user, buffer, buffer + sizeof(struct passwd), string_size,
 result) != 0) {
 free(buffer);
@@ -1425,7 +1428,7 @@ void chown_dir_contents(const char *dir_path, uid_t uid, 
gid_t gid) {
  
   dp = opendir(dir_path);
   if (dp != NULL) {
-while (ep = readdir(dp)) {
+while ((ep = readdir(dp)) != NULL) {
   stpncpy(buf, ep-d_name, strlen(ep-d_name));
   buf[strlen(ep-d_name)] = '\0';
   change_owner(path_tmp, uid, gid);
@@ -1545,4 +1548,4 @@ int traffic_control_read_state(char *command_file) {
  */
 int traffic_control_read_stats(char *command_file) {
   return run_traffic_control(TC_READ_STATS_OPTS, command_file);
-}
\ No newline at end of file
+}



hadoop git commit: MAPREDUCE-4750. Enable NNBenchWithoutMR in MapredTestDriver (Liang Xie and Jason Lowe via raviprak)

2015-05-08 Thread raviprak
Repository: hadoop
Updated Branches:
  refs/heads/trunk 25e2b0212 - 5aab01434


MAPREDUCE-4750. Enable NNBenchWithoutMR in MapredTestDriver (Liang Xie and 
Jason Lowe via raviprak)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/5aab0143
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/5aab0143
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/5aab0143

Branch: refs/heads/trunk
Commit: 5aab014340b53ebc9363ee244b2cbea7a4c1f573
Parents: 25e2b02
Author: Ravi Prakash ravip...@altiscale.com
Authored: Fri May 8 15:56:29 2015 -0700
Committer: Ravi Prakash ravip...@altiscale.com
Committed: Fri May 8 15:56:29 2015 -0700

--
 hadoop-mapreduce-project/CHANGES.txt| 4 +++-
 .../src/test/java/org/apache/hadoop/test/MapredTestDriver.java  | 5 -
 2 files changed, 7 insertions(+), 2 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/5aab0143/hadoop-mapreduce-project/CHANGES.txt
--
diff --git a/hadoop-mapreduce-project/CHANGES.txt 
b/hadoop-mapreduce-project/CHANGES.txt
index 4507545..a09d25e 100644
--- a/hadoop-mapreduce-project/CHANGES.txt
+++ b/hadoop-mapreduce-project/CHANGES.txt
@@ -328,7 +328,6 @@ Release 2.8.0 - UNRELEASED
   OPTIMIZATIONS
 
   BUG FIXES
-
 MAPREDUCE-6314. TestPipeApplication fails on trunk.
 (Varun Vasudev via harsh)
 
@@ -400,6 +399,9 @@ Release 2.8.0 - UNRELEASED
 MAPREDUCE-3383. Duplicate job.getOutputValueGroupingComparator() in
 ReduceTask (Binglin Chang via jlowe)
 
+MAPREDUCE-4750. Enable NNBenchWithoutMR in MapredTestDriver (Liang Xie
+and Jason Lowe via raviprak)
+
 Release 2.7.1 - UNRELEASED
 
   INCOMPATIBLE CHANGES

http://git-wip-us.apache.org/repos/asf/hadoop/blob/5aab0143/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/test/MapredTestDriver.java
--
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/test/MapredTestDriver.java
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/test/MapredTestDriver.java
index b1dfe56..8fa82aa 100644
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/test/MapredTestDriver.java
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/test/MapredTestDriver.java
@@ -35,6 +35,7 @@ import org.apache.hadoop.mapreduce.SleepJob;
 import org.apache.hadoop.util.ProgramDriver;
 
 import org.apache.hadoop.hdfs.NNBench;
+import org.apache.hadoop.hdfs.NNBenchWithoutMR;
 import org.apache.hadoop.fs.TestFileSystem;
 import org.apache.hadoop.fs.TestDFSIO;
 import org.apache.hadoop.fs.DFSCIOTest;
@@ -90,7 +91,9 @@ public class MapredTestDriver {
   pgd.addClass(sleep, SleepJob.class, 
A job that sleeps at each map and reduce task.);
   pgd.addClass(nnbench, NNBench.class, 
-  A benchmark that stresses the namenode.);
+  A benchmark that stresses the namenode w/ MR.);
+  pgd.addClass(nnbenchWithoutMR, NNBenchWithoutMR.class,
+  A benchmark that stresses the namenode w/o MR.);
   pgd.addClass(testfilesystem, TestFileSystem.class, 
   A test for FileSystem read/write.);
   pgd.addClass(TestDFSIO.class.getSimpleName(), TestDFSIO.class, 



hadoop git commit: MAPREDUCE-4750. Enable NNBenchWithoutMR in MapredTestDriver (Liang Xie and Jason Lowe via raviprak)

2015-05-08 Thread raviprak
Repository: hadoop
Updated Branches:
  refs/heads/branch-2 743ca1a05 - 23065e605


MAPREDUCE-4750. Enable NNBenchWithoutMR in MapredTestDriver (Liang Xie and 
Jason Lowe via raviprak)

(cherry picked from commit 5aab014340b53ebc9363ee244b2cbea7a4c1f573)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/23065e60
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/23065e60
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/23065e60

Branch: refs/heads/branch-2
Commit: 23065e60514ca4a9336be246566959da89c23ee1
Parents: 743ca1a
Author: Ravi Prakash ravip...@altiscale.com
Authored: Fri May 8 15:56:29 2015 -0700
Committer: Ravi Prakash ravip...@altiscale.com
Committed: Fri May 8 15:58:29 2015 -0700

--
 hadoop-mapreduce-project/CHANGES.txt| 4 +++-
 .../src/test/java/org/apache/hadoop/test/MapredTestDriver.java  | 5 -
 2 files changed, 7 insertions(+), 2 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/23065e60/hadoop-mapreduce-project/CHANGES.txt
--
diff --git a/hadoop-mapreduce-project/CHANGES.txt 
b/hadoop-mapreduce-project/CHANGES.txt
index c7ad3c6..35ff978 100644
--- a/hadoop-mapreduce-project/CHANGES.txt
+++ b/hadoop-mapreduce-project/CHANGES.txt
@@ -73,7 +73,6 @@ Release 2.8.0 - UNRELEASED
   OPTIMIZATIONS
 
   BUG FIXES
-
 MAPREDUCE-6314. TestPipeApplication fails on trunk.
 (Varun Vasudev via harsh)
 
@@ -145,6 +144,9 @@ Release 2.8.0 - UNRELEASED
 MAPREDUCE-3383. Duplicate job.getOutputValueGroupingComparator() in
 ReduceTask (Binglin Chang via jlowe)
 
+MAPREDUCE-4750. Enable NNBenchWithoutMR in MapredTestDriver (Liang Xie
+and Jason Lowe via raviprak)
+
 Release 2.7.1 - UNRELEASED
 
   INCOMPATIBLE CHANGES

http://git-wip-us.apache.org/repos/asf/hadoop/blob/23065e60/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/test/MapredTestDriver.java
--
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/test/MapredTestDriver.java
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/test/MapredTestDriver.java
index b1dfe56..8fa82aa 100644
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/test/MapredTestDriver.java
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/test/MapredTestDriver.java
@@ -35,6 +35,7 @@ import org.apache.hadoop.mapreduce.SleepJob;
 import org.apache.hadoop.util.ProgramDriver;
 
 import org.apache.hadoop.hdfs.NNBench;
+import org.apache.hadoop.hdfs.NNBenchWithoutMR;
 import org.apache.hadoop.fs.TestFileSystem;
 import org.apache.hadoop.fs.TestDFSIO;
 import org.apache.hadoop.fs.DFSCIOTest;
@@ -90,7 +91,9 @@ public class MapredTestDriver {
   pgd.addClass(sleep, SleepJob.class, 
A job that sleeps at each map and reduce task.);
   pgd.addClass(nnbench, NNBench.class, 
-  A benchmark that stresses the namenode.);
+  A benchmark that stresses the namenode w/ MR.);
+  pgd.addClass(nnbenchWithoutMR, NNBenchWithoutMR.class,
+  A benchmark that stresses the namenode w/o MR.);
   pgd.addClass(testfilesystem, TestFileSystem.class, 
   A test for FileSystem read/write.);
   pgd.addClass(TestDFSIO.class.getSimpleName(), TestDFSIO.class, 



hadoop git commit: HADOOP-11876. Refactor code to make it more readable, minor maybePrintStats bug (Zoran Dimitrijevic via raviprak)

2015-04-24 Thread raviprak
Repository: hadoop
Updated Branches:
  refs/heads/trunk 80935268f - a287d2fb7


HADOOP-11876. Refactor code to make it more readable, minor maybePrintStats bug 
(Zoran Dimitrijevic via raviprak)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/a287d2fb
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/a287d2fb
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/a287d2fb

Branch: refs/heads/trunk
Commit: a287d2fb77d9873b61c6ab24134993d784ae8475
Parents: 8093526
Author: Ravi Prakash ravip...@altiscale.com
Authored: Fri Apr 24 13:39:07 2015 -0700
Committer: Ravi Prakash ravip...@altiscale.com
Committed: Fri Apr 24 13:39:07 2015 -0700

--
 hadoop-common-project/hadoop-common/CHANGES.txt   |  3 +++
 .../java/org/apache/hadoop/tools/SimpleCopyListing.java   | 10 +-
 2 files changed, 8 insertions(+), 5 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/a287d2fb/hadoop-common-project/hadoop-common/CHANGES.txt
--
diff --git a/hadoop-common-project/hadoop-common/CHANGES.txt 
b/hadoop-common-project/hadoop-common/CHANGES.txt
index 80c8a54..826c77e 100644
--- a/hadoop-common-project/hadoop-common/CHANGES.txt
+++ b/hadoop-common-project/hadoop-common/CHANGES.txt
@@ -561,6 +561,9 @@ Release 2.8.0 - UNRELEASED
 HADOOP-11864. JWTRedirectAuthenticationHandler breaks java8 javadocs.
 (Larry McCay via stevel)
 
+HADOOP-11876. Refactor code to make it more readable, minor
+maybePrintStats bug (Zoran Dimitrijevic via raviprak)
+
 Release 2.7.1 - UNRELEASED
 
   INCOMPATIBLE CHANGES

http://git-wip-us.apache.org/repos/asf/hadoop/blob/a287d2fb/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/SimpleCopyListing.java
--
diff --git 
a/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/SimpleCopyListing.java
 
b/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/SimpleCopyListing.java
index b9ba099..4ea1dc9 100644
--- 
a/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/SimpleCopyListing.java
+++ 
b/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/SimpleCopyListing.java
@@ -343,11 +343,12 @@ public class SimpleCopyListing extends CopyListing {
   }
 }
 result = new WorkReportFileStatus[](
-fileSystem.listStatus(parent.getPath()), 0, true);
+fileSystem.listStatus(parent.getPath()), retry, true);
   } catch (FileNotFoundException fnf) {
 LOG.error(FileNotFoundException exception in listStatus:  +
   fnf.getMessage());
-result = new WorkReportFileStatus[](new FileStatus[0], 0, true, fnf);
+result = new WorkReportFileStatus[](new FileStatus[0], retry, true,
+  fnf);
   } catch (Exception e) {
 LOG.error(Exception in listStatus. Will send for retry.);
 FileStatus[] parentList = new FileStatus[1];
@@ -391,7 +392,6 @@ public class SimpleCopyListing extends CopyListing {
 
 for (FileStatus status : sourceDirs) {
   workers.put(new WorkRequestFileStatus(status, 0));
-  maybePrintStats();
 }
 
 while (workers.hasWork()) {
@@ -402,7 +402,7 @@ public class SimpleCopyListing extends CopyListing {
   if (LOG.isDebugEnabled()) {
 LOG.debug(Recording source-path:  + child.getPath() +  for 
copy.);
   }
-  if (retry == 0) {
+  if (workResult.getSuccess()) {
 CopyListingFileStatus childCopyListingStatus =
   DistCpUtils.toCopyListingFileStatus(sourceFS, child,
 preserveAcls  child.isDirectory(),
@@ -417,7 +417,6 @@ public class SimpleCopyListing extends CopyListing {
 LOG.debug(Traversing into source dir:  + child.getPath());
   }
   workers.put(new WorkRequestFileStatus(child, retry));
-  maybePrintStats();
 }
   } else {
 LOG.error(Giving up on  + child.getPath() +
@@ -472,5 +471,6 @@ public class SimpleCopyListing extends CopyListing {
   totalDirs++;
 }
 totalPaths++;
+maybePrintStats();
   }
 }



hadoop git commit: HADOOP-11876. Refactor code to make it more readable, minor maybePrintStats bug (Zoran Dimitrijevic via raviprak)

2015-04-24 Thread raviprak
Repository: hadoop
Updated Branches:
  refs/heads/branch-2 524593ee8 - 122262a1f


HADOOP-11876. Refactor code to make it more readable, minor maybePrintStats bug 
(Zoran Dimitrijevic via raviprak)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/122262a1
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/122262a1
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/122262a1

Branch: refs/heads/branch-2
Commit: 122262a1fb2225b487ed34a970c23e95cee3528c
Parents: 524593e
Author: Ravi Prakash ravip...@altiscale.com
Authored: Fri Apr 24 13:39:07 2015 -0700
Committer: Ravi Prakash ravip...@altiscale.com
Committed: Fri Apr 24 13:39:48 2015 -0700

--
 hadoop-common-project/hadoop-common/CHANGES.txt   |  3 +++
 .../java/org/apache/hadoop/tools/SimpleCopyListing.java   | 10 +-
 2 files changed, 8 insertions(+), 5 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/122262a1/hadoop-common-project/hadoop-common/CHANGES.txt
--
diff --git a/hadoop-common-project/hadoop-common/CHANGES.txt 
b/hadoop-common-project/hadoop-common/CHANGES.txt
index 8a55411..e018bc9 100644
--- a/hadoop-common-project/hadoop-common/CHANGES.txt
+++ b/hadoop-common-project/hadoop-common/CHANGES.txt
@@ -112,6 +112,9 @@ Release 2.8.0 - UNRELEASED
 HADOOP-11864. JWTRedirectAuthenticationHandler breaks java8 javadocs.
 (Larry McCay via stevel)
 
+HADOOP-11876. Refactor code to make it more readable, minor
+maybePrintStats bug (Zoran Dimitrijevic via raviprak)
+
 Release 2.7.1 - UNRELEASED
 
   INCOMPATIBLE CHANGES

http://git-wip-us.apache.org/repos/asf/hadoop/blob/122262a1/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/SimpleCopyListing.java
--
diff --git 
a/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/SimpleCopyListing.java
 
b/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/SimpleCopyListing.java
index b9ba099..4ea1dc9 100644
--- 
a/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/SimpleCopyListing.java
+++ 
b/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/SimpleCopyListing.java
@@ -343,11 +343,12 @@ public class SimpleCopyListing extends CopyListing {
   }
 }
 result = new WorkReportFileStatus[](
-fileSystem.listStatus(parent.getPath()), 0, true);
+fileSystem.listStatus(parent.getPath()), retry, true);
   } catch (FileNotFoundException fnf) {
 LOG.error(FileNotFoundException exception in listStatus:  +
   fnf.getMessage());
-result = new WorkReportFileStatus[](new FileStatus[0], 0, true, fnf);
+result = new WorkReportFileStatus[](new FileStatus[0], retry, true,
+  fnf);
   } catch (Exception e) {
 LOG.error(Exception in listStatus. Will send for retry.);
 FileStatus[] parentList = new FileStatus[1];
@@ -391,7 +392,6 @@ public class SimpleCopyListing extends CopyListing {
 
 for (FileStatus status : sourceDirs) {
   workers.put(new WorkRequestFileStatus(status, 0));
-  maybePrintStats();
 }
 
 while (workers.hasWork()) {
@@ -402,7 +402,7 @@ public class SimpleCopyListing extends CopyListing {
   if (LOG.isDebugEnabled()) {
 LOG.debug(Recording source-path:  + child.getPath() +  for 
copy.);
   }
-  if (retry == 0) {
+  if (workResult.getSuccess()) {
 CopyListingFileStatus childCopyListingStatus =
   DistCpUtils.toCopyListingFileStatus(sourceFS, child,
 preserveAcls  child.isDirectory(),
@@ -417,7 +417,6 @@ public class SimpleCopyListing extends CopyListing {
 LOG.debug(Traversing into source dir:  + child.getPath());
   }
   workers.put(new WorkRequestFileStatus(child, retry));
-  maybePrintStats();
 }
   } else {
 LOG.error(Giving up on  + child.getPath() +
@@ -472,5 +471,6 @@ public class SimpleCopyListing extends CopyListing {
   totalDirs++;
 }
 totalPaths++;
+maybePrintStats();
   }
 }



hadoop git commit: HADOOP-11827. Speed-up distcp buildListing() using threadpool (Zoran Dimitrijevic via raviprak)

2015-04-21 Thread raviprak
Repository: hadoop
Updated Branches:
  refs/heads/branch-2 6d8a49dd6 - aa34aa5ca


HADOOP-11827. Speed-up distcp buildListing() using threadpool (Zoran 
Dimitrijevic via raviprak)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/aa34aa5c
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/aa34aa5c
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/aa34aa5c

Branch: refs/heads/branch-2
Commit: aa34aa5caae8b399b2f333b2f1c92fafe3616622
Parents: 6d8a49d
Author: Ravi Prakash ravip...@altiscale.com
Authored: Tue Apr 21 16:43:02 2015 -0700
Committer: Ravi Prakash ravip...@altiscale.com
Committed: Tue Apr 21 16:50:40 2015 -0700

--
 hadoop-common-project/hadoop-common/CHANGES.txt |   3 +
 .../apache/hadoop/tools/DistCpConstants.java|   4 +
 .../apache/hadoop/tools/DistCpOptionSwitch.java |   9 +-
 .../org/apache/hadoop/tools/DistCpOptions.java  |  27 +++
 .../org/apache/hadoop/tools/OptionsParser.java  |  12 ++
 .../apache/hadoop/tools/SimpleCopyListing.java  | 169 +++---
 .../hadoop/tools/util/ProducerConsumer.java | 177 +++
 .../apache/hadoop/tools/util/WorkReport.java|  78 
 .../apache/hadoop/tools/util/WorkRequest.java   |  53 ++
 .../hadoop/tools/util/WorkRequestProcessor.java |  38 
 .../apache/hadoop/tools/TestCopyListing.java|  20 ++-
 .../apache/hadoop/tools/TestIntegration.java|  17 ++
 .../apache/hadoop/tools/TestOptionsParser.java  |  42 +
 .../hadoop/tools/util/TestProducerConsumer.java | 109 
 14 files changed, 728 insertions(+), 30 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/aa34aa5c/hadoop-common-project/hadoop-common/CHANGES.txt
--
diff --git a/hadoop-common-project/hadoop-common/CHANGES.txt 
b/hadoop-common-project/hadoop-common/CHANGES.txt
index 9960a16..f6580f5 100644
--- a/hadoop-common-project/hadoop-common/CHANGES.txt
+++ b/hadoop-common-project/hadoop-common/CHANGES.txt
@@ -55,6 +55,9 @@ Release 2.8.0 - UNRELEASED
 HADOOP-11819. HttpServerFunctionalTest#prepareTestWebapp should create web
 app directory if it does not exist. (Rohith via vinayakumarb)
 
+HADOOP-11827. Speed-up distcp buildListing() using threadpool
+(Zoran Dimitrijevic via raviprak)
+
   OPTIMIZATIONS
 
 HADOOP-11785. Reduce the number of listStatus operation in distcp

http://git-wip-us.apache.org/repos/asf/hadoop/blob/aa34aa5c/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/DistCpConstants.java
--
diff --git 
a/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/DistCpConstants.java
 
b/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/DistCpConstants.java
index a1af2af..7ecb6ce 100644
--- 
a/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/DistCpConstants.java
+++ 
b/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/DistCpConstants.java
@@ -23,6 +23,9 @@ package org.apache.hadoop.tools;
  */
 public class DistCpConstants {
 
+  /* Default number of threads to use for building file listing */
+  public static final int DEFAULT_LISTSTATUS_THREADS = 1;
+
   /* Default number of maps to use for DistCp */
   public static final int DEFAULT_MAPS = 20;
 
@@ -47,6 +50,7 @@ public class DistCpConstants {
   public static final String CONF_LABEL_SYNC_FOLDERS = distcp.sync.folders;
   public static final String CONF_LABEL_DELETE_MISSING = 
distcp.delete.missing.source;
   public static final String CONF_LABEL_SSL_CONF = distcp.keystore.resource;
+  public static final String CONF_LABEL_LISTSTATUS_THREADS = 
distcp.liststatus.threads;
   public static final String CONF_LABEL_MAX_MAPS = distcp.max.maps;
   public static final String CONF_LABEL_SOURCE_LISTING = 
distcp.source.listing;
   public static final String CONF_LABEL_COPY_STRATEGY = distcp.copy.strategy;

http://git-wip-us.apache.org/repos/asf/hadoop/blob/aa34aa5c/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/DistCpOptionSwitch.java
--
diff --git 
a/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/DistCpOptionSwitch.java
 
b/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/DistCpOptionSwitch.java
index e9c7d46..f90319d 100644
--- 
a/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/DistCpOptionSwitch.java
+++ 
b/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/DistCpOptionSwitch.java
@@ -83,7 +83,14 @@ public enum DistCpOptionSwitch {
   SSL_CONF(DistCpConstants.CONF_LABEL_SSL_CONF,
   new Option(mapredSslConf, true, Configuration for ssl config file +
   , to use

hadoop git commit: HADOOP-11827. Speed-up distcp buildListing() using threadpool (Zoran Dimitrijevic via raviprak)

2015-04-21 Thread raviprak
Repository: hadoop
Updated Branches:
  refs/heads/trunk 2c1469036 - cfba35505


HADOOP-11827. Speed-up distcp buildListing() using threadpool (Zoran 
Dimitrijevic via raviprak)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/cfba3550
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/cfba3550
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/cfba3550

Branch: refs/heads/trunk
Commit: cfba355052df15f8eb6cc9b8e90e2d8492bec7d7
Parents: 2c14690
Author: Ravi Prakash ravip...@altiscale.com
Authored: Tue Apr 21 16:43:02 2015 -0700
Committer: Ravi Prakash ravip...@altiscale.com
Committed: Tue Apr 21 16:49:37 2015 -0700

--
 hadoop-common-project/hadoop-common/CHANGES.txt |   3 +
 .../apache/hadoop/tools/DistCpConstants.java|   4 +
 .../apache/hadoop/tools/DistCpOptionSwitch.java |   9 +-
 .../org/apache/hadoop/tools/DistCpOptions.java  |  27 +++
 .../org/apache/hadoop/tools/OptionsParser.java  |  12 ++
 .../apache/hadoop/tools/SimpleCopyListing.java  | 169 +++---
 .../hadoop/tools/util/ProducerConsumer.java | 177 +++
 .../apache/hadoop/tools/util/WorkReport.java|  78 
 .../apache/hadoop/tools/util/WorkRequest.java   |  53 ++
 .../hadoop/tools/util/WorkRequestProcessor.java |  38 
 .../apache/hadoop/tools/TestCopyListing.java|  20 ++-
 .../apache/hadoop/tools/TestIntegration.java|  17 ++
 .../apache/hadoop/tools/TestOptionsParser.java  |  42 +
 .../hadoop/tools/util/TestProducerConsumer.java | 109 
 14 files changed, 728 insertions(+), 30 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/cfba3550/hadoop-common-project/hadoop-common/CHANGES.txt
--
diff --git a/hadoop-common-project/hadoop-common/CHANGES.txt 
b/hadoop-common-project/hadoop-common/CHANGES.txt
index 02066b6..a6814f8 100644
--- a/hadoop-common-project/hadoop-common/CHANGES.txt
+++ b/hadoop-common-project/hadoop-common/CHANGES.txt
@@ -499,6 +499,9 @@ Release 2.8.0 - UNRELEASED
 HADOOP-11819. HttpServerFunctionalTest#prepareTestWebapp should create web
 app directory if it does not exist. (Rohith via vinayakumarb)
 
+HADOOP-11827. Speed-up distcp buildListing() using threadpool
+(Zoran Dimitrijevic via raviprak)
+
   OPTIMIZATIONS
 
 HADOOP-11785. Reduce the number of listStatus operation in distcp

http://git-wip-us.apache.org/repos/asf/hadoop/blob/cfba3550/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/DistCpConstants.java
--
diff --git 
a/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/DistCpConstants.java
 
b/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/DistCpConstants.java
index a1af2af..7ecb6ce 100644
--- 
a/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/DistCpConstants.java
+++ 
b/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/DistCpConstants.java
@@ -23,6 +23,9 @@ package org.apache.hadoop.tools;
  */
 public class DistCpConstants {
 
+  /* Default number of threads to use for building file listing */
+  public static final int DEFAULT_LISTSTATUS_THREADS = 1;
+
   /* Default number of maps to use for DistCp */
   public static final int DEFAULT_MAPS = 20;
 
@@ -47,6 +50,7 @@ public class DistCpConstants {
   public static final String CONF_LABEL_SYNC_FOLDERS = distcp.sync.folders;
   public static final String CONF_LABEL_DELETE_MISSING = 
distcp.delete.missing.source;
   public static final String CONF_LABEL_SSL_CONF = distcp.keystore.resource;
+  public static final String CONF_LABEL_LISTSTATUS_THREADS = 
distcp.liststatus.threads;
   public static final String CONF_LABEL_MAX_MAPS = distcp.max.maps;
   public static final String CONF_LABEL_SOURCE_LISTING = 
distcp.source.listing;
   public static final String CONF_LABEL_COPY_STRATEGY = distcp.copy.strategy;

http://git-wip-us.apache.org/repos/asf/hadoop/blob/cfba3550/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/DistCpOptionSwitch.java
--
diff --git 
a/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/DistCpOptionSwitch.java
 
b/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/DistCpOptionSwitch.java
index e9c7d46..f90319d 100644
--- 
a/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/DistCpOptionSwitch.java
+++ 
b/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/DistCpOptionSwitch.java
@@ -83,7 +83,14 @@ public enum DistCpOptionSwitch {
   SSL_CONF(DistCpConstants.CONF_LABEL_SSL_CONF,
   new Option(mapredSslConf, true, Configuration for ssl config file +
   , to use with hftps

hadoop git commit: YARN-3288. Document and fix indentation in the DockerContainerExecutor code

2015-03-28 Thread raviprak
Repository: hadoop
Updated Branches:
  refs/heads/branch-2 274db918c - b1b495145


YARN-3288. Document and fix indentation in the DockerContainerExecutor code

(cherry picked from commit e0ccea33c9e12f6930b2867e14b1b37569fed659)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/b1b49514
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/b1b49514
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/b1b49514

Branch: refs/heads/branch-2
Commit: b1b4951452ff9944f3d1deb4e039d7e9e578d8d7
Parents: 274db91
Author: Ravi Prakash ravip...@altiscale.com
Authored: Sat Mar 28 08:00:41 2015 -0700
Committer: Ravi Prakash ravip...@altiscale.com
Committed: Sat Mar 28 08:01:26 2015 -0700

--
 hadoop-yarn-project/CHANGES.txt |   2 +
 .../server/nodemanager/ContainerExecutor.java   |  18 +-
 .../nodemanager/DockerContainerExecutor.java| 229 +++
 .../launcher/ContainerLaunch.java   |   8 +-
 .../TestDockerContainerExecutor.java|  98 
 .../TestDockerContainerExecutorWithMocks.java   | 110 +
 6 files changed, 277 insertions(+), 188 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/b1b49514/hadoop-yarn-project/CHANGES.txt
--
diff --git a/hadoop-yarn-project/CHANGES.txt b/hadoop-yarn-project/CHANGES.txt
index 0597518..0d1bef1 100644
--- a/hadoop-yarn-project/CHANGES.txt
+++ b/hadoop-yarn-project/CHANGES.txt
@@ -33,6 +33,8 @@ Release 2.8.0 - UNRELEASED
 
 YARN-3397. yarn rmadmin should skip -failover. (J.Andreina via kasha)
 
+YARN-3288. Document and fix indentation in the DockerContainerExecutor code
+
   OPTIMIZATIONS
 
 YARN-3339. TestDockerContainerExecutor should pull a single image and not

http://git-wip-us.apache.org/repos/asf/hadoop/blob/b1b49514/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/ContainerExecutor.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/ContainerExecutor.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/ContainerExecutor.java
index 377fd1d..1c670a1 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/ContainerExecutor.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/ContainerExecutor.java
@@ -210,8 +210,22 @@ public abstract class ContainerExecutor implements 
Configurable {
 }
   }
 
-  public void writeLaunchEnv(OutputStream out, MapString, String 
environment, MapPath, ListString resources, ListString command) throws 
IOException{
-ContainerLaunch.ShellScriptBuilder sb = 
ContainerLaunch.ShellScriptBuilder.create();
+  /**
+   * This method writes out the launch environment of a container. This can be
+   * overridden by extending ContainerExecutors to provide different behaviors
+   * @param out the output stream to which the environment is written (usually
+   * a script file which will be executed by the Launcher)
+   * @param environment The environment variables and their values
+   * @param resources The resources which have been localized for this 
container
+   * Symlinks will be created to these localized resources
+   * @param command The command that will be run.
+   * @throws IOException if any errors happened writing to the OutputStream,
+   * while creating symlinks
+   */
+  public void writeLaunchEnv(OutputStream out, MapString, String environment,
+MapPath, ListString resources, ListString command) throws 
IOException{
+ContainerLaunch.ShellScriptBuilder sb =
+  ContainerLaunch.ShellScriptBuilder.create();
 if (environment != null) {
   for (Map.EntryString,String env : environment.entrySet()) {
 sb.env(env.getKey().toString(), env.getValue().toString());

http://git-wip-us.apache.org/repos/asf/hadoop/blob/b1b49514/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/DockerContainerExecutor.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/DockerContainerExecutor.java
 

hadoop git commit: YARN-3288. Document and fix indentation in the DockerContainerExecutor code

2015-03-28 Thread raviprak
Repository: hadoop
Updated Branches:
  refs/heads/trunk 27d49e671 - e0ccea33c


YARN-3288. Document and fix indentation in the DockerContainerExecutor code


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/e0ccea33
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/e0ccea33
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/e0ccea33

Branch: refs/heads/trunk
Commit: e0ccea33c9e12f6930b2867e14b1b37569fed659
Parents: 27d49e6
Author: Ravi Prakash ravip...@altiscale.com
Authored: Sat Mar 28 08:00:41 2015 -0700
Committer: Ravi Prakash ravip...@altiscale.com
Committed: Sat Mar 28 08:00:41 2015 -0700

--
 hadoop-yarn-project/CHANGES.txt |   2 +
 .../server/nodemanager/ContainerExecutor.java   |  18 +-
 .../nodemanager/DockerContainerExecutor.java| 229 +++
 .../launcher/ContainerLaunch.java   |   8 +-
 .../TestDockerContainerExecutor.java|  98 
 .../TestDockerContainerExecutorWithMocks.java   | 110 +
 6 files changed, 277 insertions(+), 188 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/e0ccea33/hadoop-yarn-project/CHANGES.txt
--
diff --git a/hadoop-yarn-project/CHANGES.txt b/hadoop-yarn-project/CHANGES.txt
index d6ded77..fb233e3 100644
--- a/hadoop-yarn-project/CHANGES.txt
+++ b/hadoop-yarn-project/CHANGES.txt
@@ -81,6 +81,8 @@ Release 2.8.0 - UNRELEASED
 
 YARN-3397. yarn rmadmin should skip -failover. (J.Andreina via kasha)
 
+YARN-3288. Document and fix indentation in the DockerContainerExecutor code
+
   OPTIMIZATIONS
 
 YARN-3339. TestDockerContainerExecutor should pull a single image and not

http://git-wip-us.apache.org/repos/asf/hadoop/blob/e0ccea33/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/ContainerExecutor.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/ContainerExecutor.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/ContainerExecutor.java
index 377fd1d..1c670a1 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/ContainerExecutor.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/ContainerExecutor.java
@@ -210,8 +210,22 @@ public abstract class ContainerExecutor implements 
Configurable {
 }
   }
 
-  public void writeLaunchEnv(OutputStream out, MapString, String 
environment, MapPath, ListString resources, ListString command) throws 
IOException{
-ContainerLaunch.ShellScriptBuilder sb = 
ContainerLaunch.ShellScriptBuilder.create();
+  /**
+   * This method writes out the launch environment of a container. This can be
+   * overridden by extending ContainerExecutors to provide different behaviors
+   * @param out the output stream to which the environment is written (usually
+   * a script file which will be executed by the Launcher)
+   * @param environment The environment variables and their values
+   * @param resources The resources which have been localized for this 
container
+   * Symlinks will be created to these localized resources
+   * @param command The command that will be run.
+   * @throws IOException if any errors happened writing to the OutputStream,
+   * while creating symlinks
+   */
+  public void writeLaunchEnv(OutputStream out, MapString, String environment,
+MapPath, ListString resources, ListString command) throws 
IOException{
+ContainerLaunch.ShellScriptBuilder sb =
+  ContainerLaunch.ShellScriptBuilder.create();
 if (environment != null) {
   for (Map.EntryString,String env : environment.entrySet()) {
 sb.env(env.getKey().toString(), env.getValue().toString());

http://git-wip-us.apache.org/repos/asf/hadoop/blob/e0ccea33/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/DockerContainerExecutor.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/DockerContainerExecutor.java
 

hadoop git commit: YARN-3339. TestDockerContainerExecutor should pull a single image and not the entire centos repository. (Ravindra Kumar Naik via raviprak)

2015-03-16 Thread raviprak
Repository: hadoop
Updated Branches:
  refs/heads/trunk 7522a643f - 56085203c


YARN-3339. TestDockerContainerExecutor should pull a single image and not the 
entire centos repository. (Ravindra Kumar Naik via raviprak)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/56085203
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/56085203
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/56085203

Branch: refs/heads/trunk
Commit: 56085203c43b8f2561bf3745910e03f8ac176a67
Parents: 7522a64
Author: Ravi Prakash ravip...@altiscale.com
Authored: Mon Mar 16 16:17:58 2015 -0700
Committer: Ravi Prakash ravip...@altiscale.com
Committed: Mon Mar 16 16:17:58 2015 -0700

--
 hadoop-yarn-project/CHANGES.txt   | 3 +++
 .../yarn/server/nodemanager/TestDockerContainerExecutor.java  | 2 +-
 2 files changed, 4 insertions(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/56085203/hadoop-yarn-project/CHANGES.txt
--
diff --git a/hadoop-yarn-project/CHANGES.txt b/hadoop-yarn-project/CHANGES.txt
index b8e07a0..cb68480 100644
--- a/hadoop-yarn-project/CHANGES.txt
+++ b/hadoop-yarn-project/CHANGES.txt
@@ -58,6 +58,9 @@ Release 2.8.0 - UNRELEASED
 
   OPTIMIZATIONS
 
+YARN-3339. TestDockerContainerExecutor should pull a single image and not
+the entire centos repository. (Ravindra Kumar Naik via raviprak)
+
   BUG FIXES
 
 Release 2.7.0 - UNRELEASED

http://git-wip-us.apache.org/repos/asf/hadoop/blob/56085203/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestDockerContainerExecutor.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestDockerContainerExecutor.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestDockerContainerExecutor.java
index e43ac2e..ac02542 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestDockerContainerExecutor.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestDockerContainerExecutor.java
@@ -78,7 +78,7 @@ public class TestDockerContainerExecutor {
   private int id = 0;
   private String appSubmitter;
   private String dockerUrl;
-  private String testImage = centos;
+  private String testImage = centos:latest;
   private String dockerExec;
   private String containerIdStr;
 



hadoop git commit: YARN-3339. TestDockerContainerExecutor should pull a single image and not the entire centos repository. (Ravindra Kumar Naik via raviprak)

2015-03-16 Thread raviprak
Repository: hadoop
Updated Branches:
  refs/heads/branch-2 a20427d33 - 9f227ad69


YARN-3339. TestDockerContainerExecutor should pull a single image and not the 
entire centos repository. (Ravindra Kumar Naik via raviprak)

(cherry picked from commit 56085203c43b8f2561bf3745910e03f8ac176a67)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/9f227ad6
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/9f227ad6
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/9f227ad6

Branch: refs/heads/branch-2
Commit: 9f227ad69684308b5c7a5d86e69b48f11707c6e9
Parents: a20427d
Author: Ravi Prakash ravip...@altiscale.com
Authored: Mon Mar 16 16:17:58 2015 -0700
Committer: Ravi Prakash ravip...@altiscale.com
Committed: Mon Mar 16 16:18:42 2015 -0700

--
 hadoop-yarn-project/CHANGES.txt   | 3 +++
 .../yarn/server/nodemanager/TestDockerContainerExecutor.java  | 2 +-
 2 files changed, 4 insertions(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/9f227ad6/hadoop-yarn-project/CHANGES.txt
--
diff --git a/hadoop-yarn-project/CHANGES.txt b/hadoop-yarn-project/CHANGES.txt
index 9d3be99..538b4dd 100644
--- a/hadoop-yarn-project/CHANGES.txt
+++ b/hadoop-yarn-project/CHANGES.txt
@@ -10,6 +10,9 @@ Release 2.8.0 - UNRELEASED
 
   OPTIMIZATIONS
 
+YARN-3339. TestDockerContainerExecutor should pull a single image and not
+the entire centos repository. (Ravindra Kumar Naik via raviprak)
+
   BUG FIXES
 
 Release 2.7.0 - UNRELEASED

http://git-wip-us.apache.org/repos/asf/hadoop/blob/9f227ad6/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestDockerContainerExecutor.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestDockerContainerExecutor.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestDockerContainerExecutor.java
index e43ac2e..ac02542 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestDockerContainerExecutor.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestDockerContainerExecutor.java
@@ -78,7 +78,7 @@ public class TestDockerContainerExecutor {
   private int id = 0;
   private String appSubmitter;
   private String dockerUrl;
-  private String testImage = centos;
+  private String testImage = centos:latest;
   private String dockerExec;
   private String containerIdStr;
 



hadoop git commit: HADOOP-11596. Allow smart-apply-patch.sh to add new files in binary git patches (raviprak)

2015-02-17 Thread raviprak
Repository: hadoop
Updated Branches:
  refs/heads/trunk db6606223 - 13d1ba996


HADOOP-11596. Allow smart-apply-patch.sh to add new files in binary git patches 
(raviprak)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/13d1ba99
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/13d1ba99
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/13d1ba99

Branch: refs/heads/trunk
Commit: 13d1ba9965236381e9014fce22120b999c36189a
Parents: db66062
Author: Ravi Prakash ravip...@altiscale.com
Authored: Tue Feb 17 11:13:47 2015 -0800
Committer: Ravi Prakash ravip...@altiscale.com
Committed: Tue Feb 17 11:13:47 2015 -0800

--
 dev-support/smart-apply-patch.sh| 4 ++--
 hadoop-common-project/hadoop-common/CHANGES.txt | 3 +++
 2 files changed, 5 insertions(+), 2 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/13d1ba99/dev-support/smart-apply-patch.sh
--
diff --git a/dev-support/smart-apply-patch.sh b/dev-support/smart-apply-patch.sh
index 3542fb4..03bc4f8 100755
--- a/dev-support/smart-apply-patch.sh
+++ b/dev-support/smart-apply-patch.sh
@@ -38,8 +38,8 @@ is_git_diff_with_prefix() {
 fi
 if [[ $line =~ ^\+\+\+\  ]] ||
[[ $line =~ ^\-\-\-\  ]]; then
-  if ! [[ $line =~ ^[ab]/ ]]; then
-return 1 # All +++ and --- lines must start with a/ or b/.
+  if ! [[ $line =~ ^[ab]/ || $line =~ ^/dev/null ]]; then
+return 1 # All +++ and --- lines must start with a/ or b/ or be 
/dev/null.
   fi
 fi
   done  $1

http://git-wip-us.apache.org/repos/asf/hadoop/blob/13d1ba99/hadoop-common-project/hadoop-common/CHANGES.txt
--
diff --git a/hadoop-common-project/hadoop-common/CHANGES.txt 
b/hadoop-common-project/hadoop-common/CHANGES.txt
index 6e43872..0de835a 100644
--- a/hadoop-common-project/hadoop-common/CHANGES.txt
+++ b/hadoop-common-project/hadoop-common/CHANGES.txt
@@ -612,6 +612,9 @@ Release 2.7.0 - UNRELEASED
 
 HADOOP-11522. Update S3A Documentation. (Thomas Demoor via stevel)
 
+HADOOP-11596. Allow smart-apply-patch.sh to add new files in binary git
+patches (raviprak)
+
   OPTIMIZATIONS
 
 HADOOP-11323. WritableComparator#compare keeps reference to byte array.



hadoop git commit: HADOOP-11473. test-patch says -1 overall even when all checks are +1 (Jason Lowe via raviprak)

2015-01-12 Thread raviprak
Repository: hadoop
Updated Branches:
  refs/heads/trunk 6bbf9fdd0 - f3507fa35


HADOOP-11473. test-patch says -1 overall even when all checks are +1
(Jason Lowe via raviprak)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/f3507fa3
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/f3507fa3
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/f3507fa3

Branch: refs/heads/trunk
Commit: f3507fa35cd5edf7be8945e84beb29965276d80b
Parents: 6bbf9fd
Author: Ravi Prakash ravip...@altiscale.com
Authored: Mon Jan 12 14:20:11 2015 -0800
Committer: Ravi Prakash ravip...@altiscale.com
Committed: Mon Jan 12 14:20:11 2015 -0800

--
 dev-support/test-patch.sh   | 1 -
 hadoop-common-project/hadoop-common/CHANGES.txt | 3 +++
 2 files changed, 3 insertions(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/f3507fa3/dev-support/test-patch.sh
--
diff --git a/dev-support/test-patch.sh b/dev-support/test-patch.sh
index ece3ddf..b0fbb80 100755
--- a/dev-support/test-patch.sh
+++ b/dev-support/test-patch.sh
@@ -992,7 +992,6 @@ if [[ $JENKINS == true || $RUN_TESTS == true ]] ; then
   runTests
   (( RESULT = RESULT + $? ))
 fi
-(( RESULT = RESULT + $? ))
 JIRA_COMMENT_FOOTER=Test results: $BUILD_URL/testReport/
 $JIRA_COMMENT_FOOTER
 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/f3507fa3/hadoop-common-project/hadoop-common/CHANGES.txt
--
diff --git a/hadoop-common-project/hadoop-common/CHANGES.txt 
b/hadoop-common-project/hadoop-common/CHANGES.txt
index 679989a..a9300c2 100644
--- a/hadoop-common-project/hadoop-common/CHANGES.txt
+++ b/hadoop-common-project/hadoop-common/CHANGES.txt
@@ -148,6 +148,9 @@ Trunk (Unreleased)
 
   BUG FIXES
 
+HADOOP-11473. test-patch says -1 overall even when all checks are +1
+(Jason Lowe via raviprak)
+
 HADOOP-9451. Fault single-layer config if node group topology is enabled.
 (Junping Du via llu)
 



hadoop git commit: HADOOP-11400. GraphiteSink does not reconnect to Graphite after 'broken pipe' (Kamil Gorlo via raviprak)

2015-01-10 Thread raviprak
Repository: hadoop
Updated Branches:
  refs/heads/trunk a26040626 - 4d2914210


HADOOP-11400. GraphiteSink does not reconnect to Graphite after 'broken pipe'
(Kamil Gorlo via raviprak)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/4d291421
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/4d291421
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/4d291421

Branch: refs/heads/trunk
Commit: 4d2914210053f28c95094aa59e48f8e84c55a2c7
Parents: a260406
Author: Ravi Prakash ravip...@apache.org
Authored: Sat Jan 10 08:35:40 2015 -0800
Committer: Ravi Prakash ravip...@apache.org
Committed: Sat Jan 10 08:35:40 2015 -0800

--
 hadoop-common-project/hadoop-common/CHANGES.txt |   3 +
 .../hadoop/metrics2/sink/GraphiteSink.java  | 141 ++-
 .../metrics2/impl/TestGraphiteMetrics.java  | 111 +--
 3 files changed, 175 insertions(+), 80 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/4d291421/hadoop-common-project/hadoop-common/CHANGES.txt
--
diff --git a/hadoop-common-project/hadoop-common/CHANGES.txt 
b/hadoop-common-project/hadoop-common/CHANGES.txt
index 90bfe3e..679989a 100644
--- a/hadoop-common-project/hadoop-common/CHANGES.txt
+++ b/hadoop-common-project/hadoop-common/CHANGES.txt
@@ -488,6 +488,9 @@ Release 2.7.0 - UNRELEASED
 
   BUG FIXES
 
+HADOOP 11400. GraphiteSink does not reconnect to Graphite after 'broken 
pipe' 
+(Kamil Gorlo via raviprak)
+
 HADOOP-11236. NFS: Fix javadoc warning in RpcProgram.java (Abhiraj Butala 
via harsh)
 
 HADOOP-11166. Remove ulimit from test-patch.sh. (wang)

http://git-wip-us.apache.org/repos/asf/hadoop/blob/4d291421/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/sink/GraphiteSink.java
--
diff --git 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/sink/GraphiteSink.java
 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/sink/GraphiteSink.java
index e72fe24..e46a654 100644
--- 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/sink/GraphiteSink.java
+++ 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/sink/GraphiteSink.java
@@ -18,25 +18,24 @@
 
 package org.apache.hadoop.metrics2.sink;
 
-import java.io.IOException;
-import java.io.OutputStreamWriter;
-import java.io.Writer;
-import java.io.Closeable;
-import java.net.Socket;
-
 import org.apache.commons.configuration.SubsetConfiguration;
 import org.apache.commons.io.Charsets;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
-import org.apache.hadoop.io.IOUtils;
 import org.apache.hadoop.metrics2.AbstractMetric;
 import org.apache.hadoop.metrics2.MetricsException;
 import org.apache.hadoop.metrics2.MetricsRecord;
 import org.apache.hadoop.metrics2.MetricsSink;
 import org.apache.hadoop.metrics2.MetricsTag;
 
+import java.io.Closeable;
+import java.io.IOException;
+import java.io.OutputStreamWriter;
+import java.io.Writer;
+import java.net.Socket;
+
 /**
  * A metrics sink that writes to a Graphite server
  */
@@ -47,30 +46,22 @@ public class GraphiteSink implements MetricsSink, Closeable 
{
 private static final String SERVER_HOST_KEY = server_host;
 private static final String SERVER_PORT_KEY = server_port;
 private static final String METRICS_PREFIX = metrics_prefix;
-private Writer writer = null;
 private String metricsPrefix = null;
-private Socket socket = null;
+private Graphite graphite = null;
 
 @Override
 public void init(SubsetConfiguration conf) {
 // Get Graphite host configurations.
-String serverHost = conf.getString(SERVER_HOST_KEY);
-Integer serverPort = Integer.parseInt(conf.getString(SERVER_PORT_KEY));
+final String serverHost = conf.getString(SERVER_HOST_KEY);
+final int serverPort = 
Integer.parseInt(conf.getString(SERVER_PORT_KEY));
 
 // Get Graphite metrics graph prefix.
 metricsPrefix = conf.getString(METRICS_PREFIX);
 if (metricsPrefix == null)
 metricsPrefix = ;
 
-try {
-// Open an connection to Graphite server.
-socket = new Socket(serverHost, serverPort);
-writer = new OutputStreamWriter(
-socket.getOutputStream(), Charsets.UTF_8);
-} catch (Exception e) {
-throw new MetricsException(Error creating connection, 
-+ serverHost + : + serverPort, e

hadoop git commit: HADOOP-11400. GraphiteSink does not reconnect to Graphite after 'broken pipe' (Kamil Gorlo via raviprak)

2015-01-10 Thread raviprak
Repository: hadoop
Updated Branches:
  refs/heads/branch-2 36b3dcaab - 619fbf1fa


HADOOP-11400. GraphiteSink does not reconnect to Graphite after 'broken pipe'
(Kamil Gorlo via raviprak)

(cherry picked from commit 4d2914210053f28c95094aa59e48f8e84c55a2c7)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/619fbf1f
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/619fbf1f
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/619fbf1f

Branch: refs/heads/branch-2
Commit: 619fbf1fa4b1c5e142a48fc9746d8527d0096da3
Parents: 36b3dca
Author: Ravi Prakash ravip...@apache.org
Authored: Sat Jan 10 08:35:40 2015 -0800
Committer: Ravi Prakash ravip...@apache.org
Committed: Sat Jan 10 08:37:35 2015 -0800

--
 hadoop-common-project/hadoop-common/CHANGES.txt |   3 +
 .../hadoop/metrics2/sink/GraphiteSink.java  | 141 ++-
 .../metrics2/impl/TestGraphiteMetrics.java  | 111 +--
 3 files changed, 175 insertions(+), 80 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/619fbf1f/hadoop-common-project/hadoop-common/CHANGES.txt
--
diff --git a/hadoop-common-project/hadoop-common/CHANGES.txt 
b/hadoop-common-project/hadoop-common/CHANGES.txt
index 71283a5..7c7596a 100644
--- a/hadoop-common-project/hadoop-common/CHANGES.txt
+++ b/hadoop-common-project/hadoop-common/CHANGES.txt
@@ -128,6 +128,9 @@ Release 2.7.0 - UNRELEASED
 
   BUG FIXES
 
+HADOOP 11400. GraphiteSink does not reconnect to Graphite after 'broken 
pipe' 
+(Kamil Gorlo via raviprak)
+
 HADOOP-11236. NFS: Fix javadoc warning in RpcProgram.java (Abhiraj Butala 
via harsh)
 
 HADOOP-11166. Remove ulimit from test-patch.sh. (wang)

http://git-wip-us.apache.org/repos/asf/hadoop/blob/619fbf1f/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/sink/GraphiteSink.java
--
diff --git 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/sink/GraphiteSink.java
 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/sink/GraphiteSink.java
index e72fe24..e46a654 100644
--- 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/sink/GraphiteSink.java
+++ 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/sink/GraphiteSink.java
@@ -18,25 +18,24 @@
 
 package org.apache.hadoop.metrics2.sink;
 
-import java.io.IOException;
-import java.io.OutputStreamWriter;
-import java.io.Writer;
-import java.io.Closeable;
-import java.net.Socket;
-
 import org.apache.commons.configuration.SubsetConfiguration;
 import org.apache.commons.io.Charsets;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
-import org.apache.hadoop.io.IOUtils;
 import org.apache.hadoop.metrics2.AbstractMetric;
 import org.apache.hadoop.metrics2.MetricsException;
 import org.apache.hadoop.metrics2.MetricsRecord;
 import org.apache.hadoop.metrics2.MetricsSink;
 import org.apache.hadoop.metrics2.MetricsTag;
 
+import java.io.Closeable;
+import java.io.IOException;
+import java.io.OutputStreamWriter;
+import java.io.Writer;
+import java.net.Socket;
+
 /**
  * A metrics sink that writes to a Graphite server
  */
@@ -47,30 +46,22 @@ public class GraphiteSink implements MetricsSink, Closeable 
{
 private static final String SERVER_HOST_KEY = server_host;
 private static final String SERVER_PORT_KEY = server_port;
 private static final String METRICS_PREFIX = metrics_prefix;
-private Writer writer = null;
 private String metricsPrefix = null;
-private Socket socket = null;
+private Graphite graphite = null;
 
 @Override
 public void init(SubsetConfiguration conf) {
 // Get Graphite host configurations.
-String serverHost = conf.getString(SERVER_HOST_KEY);
-Integer serverPort = Integer.parseInt(conf.getString(SERVER_PORT_KEY));
+final String serverHost = conf.getString(SERVER_HOST_KEY);
+final int serverPort = 
Integer.parseInt(conf.getString(SERVER_PORT_KEY));
 
 // Get Graphite metrics graph prefix.
 metricsPrefix = conf.getString(METRICS_PREFIX);
 if (metricsPrefix == null)
 metricsPrefix = ;
 
-try {
-// Open an connection to Graphite server.
-socket = new Socket(serverHost, serverPort);
-writer = new OutputStreamWriter(
-socket.getOutputStream(), Charsets.UTF_8);
-} catch (Exception e) {
-throw new MetricsException(Error creating

hadoop git commit: MAPREDUCE-5918. LineRecordReader can return the same decompressor to CodecPool multiple times (Sergey Murylev via raviprak)

2014-11-14 Thread raviprak
Repository: hadoop
Updated Branches:
  refs/heads/trunk d005404ef - 1a1dcce82


MAPREDUCE-5918. LineRecordReader can return the same decompressor to CodecPool 
multiple times (Sergey Murylev via raviprak)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/1a1dcce8
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/1a1dcce8
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/1a1dcce8

Branch: refs/heads/trunk
Commit: 1a1dcce827d8747a5629151afa335598fbc94f9e
Parents: d005404
Author: Ravi Prakash ravip...@apache.org
Authored: Fri Nov 14 03:45:53 2014 -0800
Committer: Ravi Prakash ravip...@apache.org
Committed: Fri Nov 14 03:45:53 2014 -0800

--
 .../apache/hadoop/io/compress/CodecPool.java| 54 +++-
 .../hadoop/io/compress/TestCodecPool.java   | 47 ++---
 hadoop-mapreduce-project/CHANGES.txt|  3 ++
 .../apache/hadoop/mapred/LineRecordReader.java  |  1 +
 .../mapreduce/lib/input/LineRecordReader.java   |  1 +
 .../hadoop/mapred/TestLineRecordReader.java | 37 ++
 .../lib/input/TestLineRecordReader.java | 38 ++
 7 files changed, 150 insertions(+), 31 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/1a1dcce8/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CodecPool.java
--
diff --git 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CodecPool.java
 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CodecPool.java
index 11d88f1..bb566de 100644
--- 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CodecPool.java
+++ 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CodecPool.java
@@ -17,9 +17,9 @@
  */
 package org.apache.hadoop.io.compress;
 
-import java.util.ArrayList;
+import java.util.HashSet;
 import java.util.HashMap;
-import java.util.List;
+import java.util.Set;
 import java.util.Map;
 import java.util.concurrent.atomic.AtomicInteger;
 
@@ -47,15 +47,15 @@ public class CodecPool {
* A global compressor pool used to save the expensive 
* construction/destruction of (possibly native) decompression codecs.
*/
-  private static final MapClassCompressor, ListCompressor compressorPool 
= 
-new HashMapClassCompressor, ListCompressor();
+  private static final MapClassCompressor, SetCompressor compressorPool =
+new HashMapClassCompressor, SetCompressor();
   
   /**
* A global decompressor pool used to save the expensive 
* construction/destruction of (possibly native) decompression codecs.
*/
-  private static final MapClassDecompressor, ListDecompressor 
decompressorPool = 
-new HashMapClassDecompressor, ListDecompressor();
+  private static final MapClassDecompressor, SetDecompressor 
decompressorPool =
+new HashMapClassDecompressor, SetDecompressor();
 
   private static T LoadingCacheClassT, AtomicInteger createCache(
   ClassT klass) {
@@ -80,20 +80,21 @@ public class CodecPool {
   private static final LoadingCacheClassDecompressor, AtomicInteger 
decompressorCounts =
   createCache(Decompressor.class);
 
-  private static T T borrow(MapClassT, ListT pool,
+  private static T T borrow(MapClassT, SetT pool,
  Class? extends T codecClass) {
 T codec = null;
 
 // Check if an appropriate codec is available
-ListT codecList;
+SetT codecSet;
 synchronized (pool) {
-  codecList = pool.get(codecClass);
+  codecSet = pool.get(codecClass);
 }
 
-if (codecList != null) {
-  synchronized (codecList) {
-if (!codecList.isEmpty()) {
-  codec = codecList.remove(codecList.size() - 1);
+if (codecSet != null) {
+  synchronized (codecSet) {
+if (!codecSet.isEmpty()) {
+  codec = codecSet.iterator().next();
+  codecSet.remove(codec);
 }
   }
 }
@@ -101,22 +102,23 @@ public class CodecPool {
 return codec;
   }
 
-  private static T void payback(MapClassT, ListT pool, T codec) {
+  private static T boolean payback(MapClassT, SetT pool, T codec) {
 if (codec != null) {
   ClassT codecClass = ReflectionUtils.getClass(codec);
-  ListT codecList;
+  SetT codecSet;
   synchronized (pool) {
-codecList = pool.get(codecClass);
-if (codecList == null) {
-  codecList = new ArrayListT();
-  pool.put(codecClass, codecList);
+codecSet = pool.get(codecClass);
+if (codecSet == null) {
+  codecSet = new HashSetT();
+  pool.put(codecClass, codecSet);
 }
   }
 
-  synchronized (codecList) {
-codecList.add(codec

hadoop git commit: MAPREDUCE-5918. LineRecordReader can return the same decompressor to CodecPool multiple times (Sergey Murylev via raviprak)

2014-11-14 Thread raviprak
Repository: hadoop
Updated Branches:
  refs/heads/branch-2 beb184ac5 - 45692effe


MAPREDUCE-5918. LineRecordReader can return the same decompressor to CodecPool 
multiple times (Sergey Murylev via raviprak)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/45692eff
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/45692eff
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/45692eff

Branch: refs/heads/branch-2
Commit: 45692effe4a4ae3374442522e3fa6013a1ad0b0b
Parents: beb184a
Author: Ravi Prakash ravip...@apache.org
Authored: Fri Nov 14 03:45:53 2014 -0800
Committer: Ravi Prakash ravip...@apache.org
Committed: Fri Nov 14 03:48:28 2014 -0800

--
 .../apache/hadoop/io/compress/CodecPool.java| 54 +++-
 .../hadoop/io/compress/TestCodecPool.java   | 47 ++---
 hadoop-mapreduce-project/CHANGES.txt|  3 ++
 .../apache/hadoop/mapred/LineRecordReader.java  |  1 +
 .../mapreduce/lib/input/LineRecordReader.java   |  1 +
 .../hadoop/mapred/TestLineRecordReader.java | 37 ++
 .../lib/input/TestLineRecordReader.java | 38 ++
 7 files changed, 150 insertions(+), 31 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/45692eff/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CodecPool.java
--
diff --git 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CodecPool.java
 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CodecPool.java
index 11d88f1..bb566de 100644
--- 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CodecPool.java
+++ 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CodecPool.java
@@ -17,9 +17,9 @@
  */
 package org.apache.hadoop.io.compress;
 
-import java.util.ArrayList;
+import java.util.HashSet;
 import java.util.HashMap;
-import java.util.List;
+import java.util.Set;
 import java.util.Map;
 import java.util.concurrent.atomic.AtomicInteger;
 
@@ -47,15 +47,15 @@ public class CodecPool {
* A global compressor pool used to save the expensive 
* construction/destruction of (possibly native) decompression codecs.
*/
-  private static final MapClassCompressor, ListCompressor compressorPool 
= 
-new HashMapClassCompressor, ListCompressor();
+  private static final MapClassCompressor, SetCompressor compressorPool =
+new HashMapClassCompressor, SetCompressor();
   
   /**
* A global decompressor pool used to save the expensive 
* construction/destruction of (possibly native) decompression codecs.
*/
-  private static final MapClassDecompressor, ListDecompressor 
decompressorPool = 
-new HashMapClassDecompressor, ListDecompressor();
+  private static final MapClassDecompressor, SetDecompressor 
decompressorPool =
+new HashMapClassDecompressor, SetDecompressor();
 
   private static T LoadingCacheClassT, AtomicInteger createCache(
   ClassT klass) {
@@ -80,20 +80,21 @@ public class CodecPool {
   private static final LoadingCacheClassDecompressor, AtomicInteger 
decompressorCounts =
   createCache(Decompressor.class);
 
-  private static T T borrow(MapClassT, ListT pool,
+  private static T T borrow(MapClassT, SetT pool,
  Class? extends T codecClass) {
 T codec = null;
 
 // Check if an appropriate codec is available
-ListT codecList;
+SetT codecSet;
 synchronized (pool) {
-  codecList = pool.get(codecClass);
+  codecSet = pool.get(codecClass);
 }
 
-if (codecList != null) {
-  synchronized (codecList) {
-if (!codecList.isEmpty()) {
-  codec = codecList.remove(codecList.size() - 1);
+if (codecSet != null) {
+  synchronized (codecSet) {
+if (!codecSet.isEmpty()) {
+  codec = codecSet.iterator().next();
+  codecSet.remove(codec);
 }
   }
 }
@@ -101,22 +102,23 @@ public class CodecPool {
 return codec;
   }
 
-  private static T void payback(MapClassT, ListT pool, T codec) {
+  private static T boolean payback(MapClassT, SetT pool, T codec) {
 if (codec != null) {
   ClassT codecClass = ReflectionUtils.getClass(codec);
-  ListT codecList;
+  SetT codecSet;
   synchronized (pool) {
-codecList = pool.get(codecClass);
-if (codecList == null) {
-  codecList = new ArrayListT();
-  pool.put(codecClass, codecList);
+codecSet = pool.get(codecClass);
+if (codecSet == null) {
+  codecSet = new HashSetT();
+  pool.put(codecClass, codecSet);
 }
   }
 
-  synchronized (codecList) {
-codecList.add

hadoop git commit: YARN-1964. Create Docker analog of the LinuxContainerExecutor in YARN

2014-11-12 Thread raviprak
Repository: hadoop
Updated Branches:
  refs/heads/branch-2.6.0 fcd3067ab - 28702a909


YARN-1964. Create Docker analog of the LinuxContainerExecutor in YARN


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/28702a90
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/28702a90
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/28702a90

Branch: refs/heads/branch-2.6.0
Commit: 28702a909e6b011a4343db46bdb84fc417b397a5
Parents: fcd3067
Author: Ravi Prakash ravip...@apache.org
Authored: Tue Nov 11 21:28:11 2014 -0800
Committer: Ravi Prakash ravip...@altiscale.com
Committed: Wed Nov 12 10:24:54 2014 -0800

--
 hadoop-project/src/site/site.xml|   1 +
 hadoop-yarn-project/CHANGES.txt |   3 +
 .../hadoop/yarn/conf/YarnConfiguration.java |  14 +-
 .../src/main/resources/yarn-default.xml |  12 +-
 .../server/nodemanager/ContainerExecutor.java   |  31 +
 .../nodemanager/DockerContainerExecutor.java| 794 +++
 .../launcher/ContainerLaunch.java   |  36 +-
 .../TestDockerContainerExecutor.java| 213 +
 .../TestDockerContainerExecutorWithMocks.java   | 259 ++
 .../launcher/TestContainerLaunch.java   |  10 +-
 .../src/site/apt/DockerContainerExecutor.apt.vm | 200 +
 11 files changed, 1533 insertions(+), 40 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/28702a90/hadoop-project/src/site/site.xml
--
diff --git a/hadoop-project/src/site/site.xml b/hadoop-project/src/site/site.xml
index 4a2c221..1b649fc 100644
--- a/hadoop-project/src/site/site.xml
+++ b/hadoop-project/src/site/site.xml
@@ -125,6 +125,7 @@
   item name=YARN Commands 
href=hadoop-yarn/hadoop-yarn-site/YarnCommands.html/
   item name=Scheduler Load Simulator 
href=hadoop-sls/SchedulerLoadSimulator.html/
   item name=NodeManager Restart 
href=hadoop-yarn/hadoop-yarn-site/NodeManagerRestart.html/
+  item name=DockerContainerExecutor 
href=hadoop-yarn/hadoop-yarn-site/DockerContainerExecutor.html/
 /menu
 
 menu name=YARN REST APIs inherit=top

http://git-wip-us.apache.org/repos/asf/hadoop/blob/28702a90/hadoop-yarn-project/CHANGES.txt
--
diff --git a/hadoop-yarn-project/CHANGES.txt b/hadoop-yarn-project/CHANGES.txt
index d206553..9fd6e31 100644
--- a/hadoop-yarn-project/CHANGES.txt
+++ b/hadoop-yarn-project/CHANGES.txt
@@ -6,6 +6,9 @@ Release 2.6.0 - 2014-11-15
 
   NEW FEATURES
 
+YARN-1964. Create Docker analog of the LinuxContainerExecutor in YARN. 
(Abin 
+Shahab via raviprak)
+
 YARN-2131. Add a way to format the RMStateStore. (Robert Kanter via kasha)
 
 YARN-1367. Changed NM to not kill containers on NM resync if RM 
work-preserving

http://git-wip-us.apache.org/repos/asf/hadoop/blob/28702a90/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/conf/YarnConfiguration.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/conf/YarnConfiguration.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/conf/YarnConfiguration.java
index 20edbdc..83cbfc5 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/conf/YarnConfiguration.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/conf/YarnConfiguration.java
@@ -892,7 +892,19 @@ public class YarnConfiguration extends Configuration {
   /** The arguments to pass to the health check script.*/
   public static final String NM_HEALTH_CHECK_SCRIPT_OPTS = 
 NM_PREFIX + health-checker.script.opts;
-  
+
+  /** The Docker image name(For DockerContainerExecutor).*/
+  public static final String NM_DOCKER_CONTAINER_EXECUTOR_IMAGE_NAME =
+NM_PREFIX + docker-container-executor.image-name;
+
+  /** The name of the docker executor (For DockerContainerExecutor).*/
+  public static final String NM_DOCKER_CONTAINER_EXECUTOR_EXEC_NAME =
+NM_PREFIX + docker-container-executor.exec-name;
+
+  /** The default docker executor (For DockerContainerExecutor).*/
+  public static final String NM_DEFAULT_DOCKER_CONTAINER_EXECUTOR_EXEC_NAME =
+  /usr/bin/docker;
+
   /** The path to the Linux container executor.*/
   public static final String NM_LINUX_CONTAINER_EXECUTOR_PATH =
 NM_PREFIX + linux-container-executor.path;

http://git-wip-us.apache.org/repos/asf/hadoop/blob/28702a90/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/resources/yarn-default.xml

hadoop git commit: YARN-1964. Create Docker analog of the LinuxContainerExecutor in YARN

2014-11-11 Thread raviprak
Repository: hadoop
Updated Branches:
  refs/heads/trunk 46f6f9d60 - 53f64ee51


YARN-1964. Create Docker analog of the LinuxContainerExecutor in YARN


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/53f64ee5
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/53f64ee5
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/53f64ee5

Branch: refs/heads/trunk
Commit: 53f64ee516d03f6ec87b41d77c214aa2fe4fa0ed
Parents: 46f6f9d
Author: Ravi Prakash ravip...@apache.org
Authored: Tue Nov 11 21:28:11 2014 -0800
Committer: Ravi Prakash ravip...@apache.org
Committed: Tue Nov 11 21:28:11 2014 -0800

--
 hadoop-project/src/site/site.xml|   1 +
 hadoop-yarn-project/CHANGES.txt |   3 +
 .../hadoop/yarn/conf/YarnConfiguration.java |  14 +-
 .../src/main/resources/yarn-default.xml |  12 +-
 .../server/nodemanager/ContainerExecutor.java   |  31 +
 .../nodemanager/DockerContainerExecutor.java| 794 +++
 .../launcher/ContainerLaunch.java   |  36 +-
 .../TestDockerContainerExecutor.java| 213 +
 .../TestDockerContainerExecutorWithMocks.java   | 259 ++
 .../launcher/TestContainerLaunch.java   |  10 +-
 .../src/site/apt/DockerContainerExecutor.apt.vm | 200 +
 11 files changed, 1533 insertions(+), 40 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/53f64ee5/hadoop-project/src/site/site.xml
--
diff --git a/hadoop-project/src/site/site.xml b/hadoop-project/src/site/site.xml
index 6a61a83..4a2c2f8 100644
--- a/hadoop-project/src/site/site.xml
+++ b/hadoop-project/src/site/site.xml
@@ -124,6 +124,7 @@
   item name=YARN Commands 
href=hadoop-yarn/hadoop-yarn-site/YarnCommands.html/
   item name=Scheduler Load Simulator 
href=hadoop-sls/SchedulerLoadSimulator.html/
   item name=NodeManager Restart 
href=hadoop-yarn/hadoop-yarn-site/NodeManagerRestart.html/
+  item name=DockerContainerExecutor 
href=hadoop-yarn/hadoop-yarn-site/DockerContainerExecutor.html/
 /menu
 
 menu name=YARN REST APIs inherit=top

http://git-wip-us.apache.org/repos/asf/hadoop/blob/53f64ee5/hadoop-yarn-project/CHANGES.txt
--
diff --git a/hadoop-yarn-project/CHANGES.txt b/hadoop-yarn-project/CHANGES.txt
index b40d9b7..c3bdf3d 100644
--- a/hadoop-yarn-project/CHANGES.txt
+++ b/hadoop-yarn-project/CHANGES.txt
@@ -89,6 +89,9 @@ Release 2.6.0 - 2014-11-15
 
   NEW FEATURES
 
+YARN-1964. Create Docker analog of the LinuxContainerExecutor in YARN. 
(Abin 
+Shahab via raviprak)
+
 YARN-2131. Add a way to format the RMStateStore. (Robert Kanter via kasha)
 
 YARN-1367. Changed NM to not kill containers on NM resync if RM 
work-preserving

http://git-wip-us.apache.org/repos/asf/hadoop/blob/53f64ee5/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/conf/YarnConfiguration.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/conf/YarnConfiguration.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/conf/YarnConfiguration.java
index b459ee3..9b57a42 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/conf/YarnConfiguration.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/conf/YarnConfiguration.java
@@ -891,7 +891,19 @@ public class YarnConfiguration extends Configuration {
   /** The arguments to pass to the health check script.*/
   public static final String NM_HEALTH_CHECK_SCRIPT_OPTS = 
 NM_PREFIX + health-checker.script.opts;
-  
+
+  /** The Docker image name(For DockerContainerExecutor).*/
+  public static final String NM_DOCKER_CONTAINER_EXECUTOR_IMAGE_NAME =
+NM_PREFIX + docker-container-executor.image-name;
+
+  /** The name of the docker executor (For DockerContainerExecutor).*/
+  public static final String NM_DOCKER_CONTAINER_EXECUTOR_EXEC_NAME =
+NM_PREFIX + docker-container-executor.exec-name;
+
+  /** The default docker executor (For DockerContainerExecutor).*/
+  public static final String NM_DEFAULT_DOCKER_CONTAINER_EXECUTOR_EXEC_NAME =
+  /usr/bin/docker;
+
   /** The path to the Linux container executor.*/
   public static final String NM_LINUX_CONTAINER_EXECUTOR_PATH =
 NM_PREFIX + linux-container-executor.path;

http://git-wip-us.apache.org/repos/asf/hadoop/blob/53f64ee5/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/resources/yarn-default.xml
--
diff

hadoop git commit: YARN-1964. Create Docker analog of the LinuxContainerExecutor in YARN

2014-11-11 Thread raviprak
Repository: hadoop
Updated Branches:
  refs/heads/branch-2 3cb426be4 - d863f54f5


YARN-1964. Create Docker analog of the LinuxContainerExecutor in YARN


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/d863f54f
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/d863f54f
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/d863f54f

Branch: refs/heads/branch-2
Commit: d863f54f57b606a4eb7d04c2bf44f7a5daadee26
Parents: 3cb426b
Author: Ravi Prakash ravip...@apache.org
Authored: Tue Nov 11 21:28:11 2014 -0800
Committer: Ravi Prakash ravip...@apache.org
Committed: Tue Nov 11 21:29:27 2014 -0800

--
 hadoop-project/src/site/site.xml|   1 +
 hadoop-yarn-project/CHANGES.txt |   3 +
 .../hadoop/yarn/conf/YarnConfiguration.java |  14 +-
 .../src/main/resources/yarn-default.xml |  12 +-
 .../server/nodemanager/ContainerExecutor.java   |  31 +
 .../nodemanager/DockerContainerExecutor.java| 794 +++
 .../launcher/ContainerLaunch.java   |  36 +-
 .../TestDockerContainerExecutor.java| 213 +
 .../TestDockerContainerExecutorWithMocks.java   | 259 ++
 .../launcher/TestContainerLaunch.java   |  10 +-
 .../src/site/apt/DockerContainerExecutor.apt.vm | 200 +
 11 files changed, 1533 insertions(+), 40 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/d863f54f/hadoop-project/src/site/site.xml
--
diff --git a/hadoop-project/src/site/site.xml b/hadoop-project/src/site/site.xml
index 4a2c221..1b649fc 100644
--- a/hadoop-project/src/site/site.xml
+++ b/hadoop-project/src/site/site.xml
@@ -125,6 +125,7 @@
   item name=YARN Commands 
href=hadoop-yarn/hadoop-yarn-site/YarnCommands.html/
   item name=Scheduler Load Simulator 
href=hadoop-sls/SchedulerLoadSimulator.html/
   item name=NodeManager Restart 
href=hadoop-yarn/hadoop-yarn-site/NodeManagerRestart.html/
+  item name=DockerContainerExecutor 
href=hadoop-yarn/hadoop-yarn-site/DockerContainerExecutor.html/
 /menu
 
 menu name=YARN REST APIs inherit=top

http://git-wip-us.apache.org/repos/asf/hadoop/blob/d863f54f/hadoop-yarn-project/CHANGES.txt
--
diff --git a/hadoop-yarn-project/CHANGES.txt b/hadoop-yarn-project/CHANGES.txt
index 2ca9db2..51413ca 100644
--- a/hadoop-yarn-project/CHANGES.txt
+++ b/hadoop-yarn-project/CHANGES.txt
@@ -62,6 +62,9 @@ Release 2.6.0 - 2014-11-15
 
   NEW FEATURES
 
+YARN-1964. Create Docker analog of the LinuxContainerExecutor in YARN. 
(Abin 
+Shahab via raviprak)
+
 YARN-2131. Add a way to format the RMStateStore. (Robert Kanter via kasha)
 
 YARN-1367. Changed NM to not kill containers on NM resync if RM 
work-preserving

http://git-wip-us.apache.org/repos/asf/hadoop/blob/d863f54f/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/conf/YarnConfiguration.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/conf/YarnConfiguration.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/conf/YarnConfiguration.java
index 301631b..7168068 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/conf/YarnConfiguration.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/conf/YarnConfiguration.java
@@ -891,7 +891,19 @@ public class YarnConfiguration extends Configuration {
   /** The arguments to pass to the health check script.*/
   public static final String NM_HEALTH_CHECK_SCRIPT_OPTS = 
 NM_PREFIX + health-checker.script.opts;
-  
+
+  /** The Docker image name(For DockerContainerExecutor).*/
+  public static final String NM_DOCKER_CONTAINER_EXECUTOR_IMAGE_NAME =
+NM_PREFIX + docker-container-executor.image-name;
+
+  /** The name of the docker executor (For DockerContainerExecutor).*/
+  public static final String NM_DOCKER_CONTAINER_EXECUTOR_EXEC_NAME =
+NM_PREFIX + docker-container-executor.exec-name;
+
+  /** The default docker executor (For DockerContainerExecutor).*/
+  public static final String NM_DEFAULT_DOCKER_CONTAINER_EXECUTOR_EXEC_NAME =
+  /usr/bin/docker;
+
   /** The path to the Linux container executor.*/
   public static final String NM_LINUX_CONTAINER_EXECUTOR_PATH =
 NM_PREFIX + linux-container-executor.path;

http://git-wip-us.apache.org/repos/asf/hadoop/blob/d863f54f/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/resources/yarn-default.xml

git commit: HDFS-7309. XMLUtils.mangleXmlString doesn't seem to handle less than sign. (Colin Patrick McCabe via raviprak)

2014-10-31 Thread raviprak
Repository: hadoop
Updated Branches:
  refs/heads/trunk b6c1188b8 - c7f81dad3


HDFS-7309. XMLUtils.mangleXmlString doesn't seem to handle less than sign. 
(Colin Patrick McCabe via raviprak)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/c7f81dad
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/c7f81dad
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/c7f81dad

Branch: refs/heads/trunk
Commit: c7f81dad30c391822eed7273278cf5885fa59264
Parents: b6c1188
Author: Ravi Prakash ravip...@altiscale.com
Authored: Fri Oct 31 11:22:25 2014 -0700
Committer: Ravi Prakash ravip...@altiscale.com
Committed: Fri Oct 31 11:22:25 2014 -0700

--
 hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt |  3 +
 .../OfflineEditsXmlLoader.java  |  4 +-
 .../offlineImageViewer/PBImageXmlWriter.java|  3 +-
 .../offlineImageViewer/XmlImageVisitor.java |  3 +-
 .../org/apache/hadoop/hdfs/util/XMLUtils.java   | 79 +---
 .../apache/hadoop/hdfs/util/TestXMLUtils.java   | 31 ++--
 6 files changed, 102 insertions(+), 21 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/c7f81dad/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
--
diff --git a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt 
b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
index 7010c4a..b1ea79c 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
+++ b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
@@ -694,6 +694,9 @@ Release 2.6.0 - UNRELEASED
 
   BUG FIXES
 
+HDFS-7309. XMLUtils.mangleXmlString doesn't seem to handle less than sign
+(Colin Patrick McCabe via raviprak)
+
 HDFS-6823. dfs.web.authentication.kerberos.principal shows up in logs for 
 insecure HDFS (Allen Wittenauer via raviprak)
 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/c7f81dad/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineEditsViewer/OfflineEditsXmlLoader.java
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineEditsViewer/OfflineEditsXmlLoader.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineEditsViewer/OfflineEditsXmlLoader.java
index cf761cc..1882e58 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineEditsViewer/OfflineEditsXmlLoader.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineEditsViewer/OfflineEditsXmlLoader.java
@@ -177,7 +177,7 @@ class OfflineEditsXmlLoader
   
   @Override
   public void endElement (String uri, String name, String qName) {
-String str = XMLUtils.unmangleXmlString(cbuf.toString()).trim();
+String str = XMLUtils.unmangleXmlString(cbuf.toString(), false).trim();
 cbuf = new StringBuffer();
 switch (state) {
 case EXPECT_EDITS_TAG:
@@ -260,4 +260,4 @@ class OfflineEditsXmlLoader
   public void characters (char ch[], int start, int length) {
 cbuf.append(ch, start, length);
   }
-}
\ No newline at end of file
+}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/c7f81dad/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/PBImageXmlWriter.java
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/PBImageXmlWriter.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/PBImageXmlWriter.java
index df00499..fa8c59d 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/PBImageXmlWriter.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/PBImageXmlWriter.java
@@ -411,7 +411,8 @@ public final class PBImageXmlWriter {
   }
 
   private PBImageXmlWriter o(final String e, final Object v) {
-out.print( + e +  + XMLUtils.mangleXmlString(v.toString()) + / + 
e + );
+out.print( + e +  +
+XMLUtils.mangleXmlString(v.toString(), true) + / + e + );
 return this;
   }
 }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/c7f81dad/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/XmlImageVisitor.java
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/XmlImageVisitor.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/XmlImageVisitor.java
index 2719109..44593a3 100644
--- 
a/hadoop

git commit: HDFS-7309. XMLUtils.mangleXmlString doesn't seem to handle less than sign. (Colin Patrick McCabe via raviprak)

2014-10-31 Thread raviprak
Repository: hadoop
Updated Branches:
  refs/heads/branch-2 9b879d713 - fe8cf3249


HDFS-7309. XMLUtils.mangleXmlString doesn't seem to handle less than sign. 
(Colin Patrick McCabe via raviprak)

(cherry picked from commit c7f81dad30c391822eed7273278cf5885fa59264)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/fe8cf324
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/fe8cf324
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/fe8cf324

Branch: refs/heads/branch-2
Commit: fe8cf32493c66e88b7cc26adb002950531b9a870
Parents: 9b879d7
Author: Ravi Prakash ravip...@altiscale.com
Authored: Fri Oct 31 11:22:25 2014 -0700
Committer: Ravi Prakash ravip...@altiscale.com
Committed: Fri Oct 31 11:24:00 2014 -0700

--
 hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt |  3 +
 .../OfflineEditsXmlLoader.java  |  4 +-
 .../offlineImageViewer/PBImageXmlWriter.java|  3 +-
 .../offlineImageViewer/XmlImageVisitor.java |  3 +-
 .../org/apache/hadoop/hdfs/util/XMLUtils.java   | 79 +---
 .../apache/hadoop/hdfs/util/TestXMLUtils.java   | 31 ++--
 6 files changed, 102 insertions(+), 21 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/fe8cf324/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
--
diff --git a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt 
b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
index f32ebf9..b1d5ec4 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
+++ b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
@@ -446,6 +446,9 @@ Release 2.6.0 - UNRELEASED
 
   BUG FIXES
 
+HDFS-7309. XMLUtils.mangleXmlString doesn't seem to handle less than sign
+(Colin Patrick McCabe via raviprak)
+
 HDFS-6823. dfs.web.authentication.kerberos.principal shows up in logs for 
 insecure HDFS (Allen Wittenauer via raviprak)
 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/fe8cf324/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineEditsViewer/OfflineEditsXmlLoader.java
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineEditsViewer/OfflineEditsXmlLoader.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineEditsViewer/OfflineEditsXmlLoader.java
index cf761cc..1882e58 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineEditsViewer/OfflineEditsXmlLoader.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineEditsViewer/OfflineEditsXmlLoader.java
@@ -177,7 +177,7 @@ class OfflineEditsXmlLoader
   
   @Override
   public void endElement (String uri, String name, String qName) {
-String str = XMLUtils.unmangleXmlString(cbuf.toString()).trim();
+String str = XMLUtils.unmangleXmlString(cbuf.toString(), false).trim();
 cbuf = new StringBuffer();
 switch (state) {
 case EXPECT_EDITS_TAG:
@@ -260,4 +260,4 @@ class OfflineEditsXmlLoader
   public void characters (char ch[], int start, int length) {
 cbuf.append(ch, start, length);
   }
-}
\ No newline at end of file
+}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/fe8cf324/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/PBImageXmlWriter.java
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/PBImageXmlWriter.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/PBImageXmlWriter.java
index df00499..fa8c59d 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/PBImageXmlWriter.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/PBImageXmlWriter.java
@@ -411,7 +411,8 @@ public final class PBImageXmlWriter {
   }
 
   private PBImageXmlWriter o(final String e, final Object v) {
-out.print( + e +  + XMLUtils.mangleXmlString(v.toString()) + / + 
e + );
+out.print( + e +  +
+XMLUtils.mangleXmlString(v.toString(), true) + / + e + );
 return this;
   }
 }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/fe8cf324/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/XmlImageVisitor.java
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/XmlImageVisitor.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools

git commit: HDFS-7309. XMLUtils.mangleXmlString doesn't seem to handle less than sign. (Colin Patrick McCabe via raviprak)

2014-10-31 Thread raviprak
Repository: hadoop
Updated Branches:
  refs/heads/branch-2.6 16511ef40 - b7296ee84


HDFS-7309. XMLUtils.mangleXmlString doesn't seem to handle less than sign. 
(Colin Patrick McCabe via raviprak)

(cherry picked from commit c7f81dad30c391822eed7273278cf5885fa59264)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/b7296ee8
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/b7296ee8
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/b7296ee8

Branch: refs/heads/branch-2.6
Commit: b7296ee84f19ebd2ab45f36eea1648279986abcb
Parents: 16511ef
Author: Ravi Prakash ravip...@altiscale.com
Authored: Fri Oct 31 11:22:25 2014 -0700
Committer: Ravi Prakash ravip...@altiscale.com
Committed: Fri Oct 31 11:24:38 2014 -0700

--
 hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt |  3 +
 .../OfflineEditsXmlLoader.java  |  4 +-
 .../offlineImageViewer/PBImageXmlWriter.java|  3 +-
 .../offlineImageViewer/XmlImageVisitor.java |  3 +-
 .../org/apache/hadoop/hdfs/util/XMLUtils.java   | 79 +---
 .../apache/hadoop/hdfs/util/TestXMLUtils.java   | 31 ++--
 6 files changed, 102 insertions(+), 21 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/b7296ee8/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
--
diff --git a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt 
b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
index 123dcb2..af892d9 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
+++ b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
@@ -317,6 +317,9 @@ Release 2.6.0 - UNRELEASED
 
   BUG FIXES
 
+HDFS-7309. XMLUtils.mangleXmlString doesn't seem to handle less than sign
+(Colin Patrick McCabe via raviprak)
+
 HDFS-6823. dfs.web.authentication.kerberos.principal shows up in logs for 
 insecure HDFS (Allen Wittenauer via raviprak)
 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/b7296ee8/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineEditsViewer/OfflineEditsXmlLoader.java
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineEditsViewer/OfflineEditsXmlLoader.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineEditsViewer/OfflineEditsXmlLoader.java
index cf761cc..1882e58 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineEditsViewer/OfflineEditsXmlLoader.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineEditsViewer/OfflineEditsXmlLoader.java
@@ -177,7 +177,7 @@ class OfflineEditsXmlLoader
   
   @Override
   public void endElement (String uri, String name, String qName) {
-String str = XMLUtils.unmangleXmlString(cbuf.toString()).trim();
+String str = XMLUtils.unmangleXmlString(cbuf.toString(), false).trim();
 cbuf = new StringBuffer();
 switch (state) {
 case EXPECT_EDITS_TAG:
@@ -260,4 +260,4 @@ class OfflineEditsXmlLoader
   public void characters (char ch[], int start, int length) {
 cbuf.append(ch, start, length);
   }
-}
\ No newline at end of file
+}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/b7296ee8/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/PBImageXmlWriter.java
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/PBImageXmlWriter.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/PBImageXmlWriter.java
index df00499..fa8c59d 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/PBImageXmlWriter.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/PBImageXmlWriter.java
@@ -411,7 +411,8 @@ public final class PBImageXmlWriter {
   }
 
   private PBImageXmlWriter o(final String e, final Object v) {
-out.print( + e +  + XMLUtils.mangleXmlString(v.toString()) + / + 
e + );
+out.print( + e +  +
+XMLUtils.mangleXmlString(v.toString(), true) + / + e + );
 return this;
   }
 }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/b7296ee8/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/XmlImageVisitor.java
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/XmlImageVisitor.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools

git commit: HADOOP-11182. GraphiteSink emits wrong timestamps (Sascha Coenen via raviprak) Fix CHANGES.txt

2014-10-16 Thread raviprak
Repository: hadoop
Updated Branches:
  refs/heads/trunk 233d446be - b0d6ac92f


HADOOP-11182. GraphiteSink emits wrong timestamps (Sascha Coenen via raviprak) 
Fix CHANGES.txt


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/b0d6ac92
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/b0d6ac92
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/b0d6ac92

Branch: refs/heads/trunk
Commit: b0d6ac92fe1f51cf9742abab778200d2d0eb99fa
Parents: 233d446
Author: Ravi Prakash ravip...@altiscale.com
Authored: Thu Oct 16 16:49:48 2014 -0700
Committer: Ravi Prakash ravip...@altiscale.com
Committed: Thu Oct 16 16:49:48 2014 -0700

--
 hadoop-common-project/hadoop-common/CHANGES.txt | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/b0d6ac92/hadoop-common-project/hadoop-common/CHANGES.txt
--
diff --git a/hadoop-common-project/hadoop-common/CHANGES.txt 
b/hadoop-common-project/hadoop-common/CHANGES.txt
index f617137..d113c4a 100644
--- a/hadoop-common-project/hadoop-common/CHANGES.txt
+++ b/hadoop-common-project/hadoop-common/CHANGES.txt
@@ -661,7 +661,7 @@ Release 2.6.0 - UNRELEASED
 
   BUG FIXES
 
-HADOOP-11181. GraphiteSink emits wrong timestamps (Sascha Coenen via 
raviprak)
+HADOOP-11182. GraphiteSink emits wrong timestamps (Sascha Coenen via 
raviprak)
 
 HADOOP-10781. Unportable getgrouplist() usage breaks FreeBSD (Dmitry
 Sivachenko via Colin Patrick McCabe)



git commit: HADOOP-11182. GraphiteSink emits wrong timestamps (Sascha Coenen via raviprak) Fix CHANGES.txt

2014-10-16 Thread raviprak
Repository: hadoop
Updated Branches:
  refs/heads/branch-2 caba212f1 - a85767f37


HADOOP-11182. GraphiteSink emits wrong timestamps (Sascha Coenen via raviprak) 
Fix CHANGES.txt


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/a85767f3
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/a85767f3
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/a85767f3

Branch: refs/heads/branch-2
Commit: a85767f376cb8c86411bf02207f582aa9a9c5240
Parents: caba212
Author: Ravi Prakash ravip...@altiscale.com
Authored: Thu Oct 16 16:49:48 2014 -0700
Committer: Ravi Prakash ravip...@altiscale.com
Committed: Thu Oct 16 16:53:38 2014 -0700

--
 hadoop-common-project/hadoop-common/CHANGES.txt | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/a85767f3/hadoop-common-project/hadoop-common/CHANGES.txt
--
diff --git a/hadoop-common-project/hadoop-common/CHANGES.txt 
b/hadoop-common-project/hadoop-common/CHANGES.txt
index a5634b4..a736e63 100644
--- a/hadoop-common-project/hadoop-common/CHANGES.txt
+++ b/hadoop-common-project/hadoop-common/CHANGES.txt
@@ -316,7 +316,7 @@ Release 2.6.0 - UNRELEASED
 
   BUG FIXES
 
-HADOOP-11181. GraphiteSink emits wrong timestamps (Sascha Coenen via 
raviprak)
+HADOOP-11182. GraphiteSink emits wrong timestamps (Sascha Coenen via 
raviprak)
 
 HADOOP-10781. Unportable getgrouplist() usage breaks FreeBSD (Dmitry
 Sivachenko via Colin Patrick McCabe)



git commit: HADOOP-11182. GraphiteSink emits wrong timestamps (Sascha Coenen via raviprak) Fix CHANGES.txt

2014-10-16 Thread raviprak
Repository: hadoop
Updated Branches:
  refs/heads/branch-2.6 6be833060 - d4606bff3


HADOOP-11182. GraphiteSink emits wrong timestamps (Sascha Coenen via raviprak) 
Fix CHANGES.txt


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/d4606bff
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/d4606bff
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/d4606bff

Branch: refs/heads/branch-2.6
Commit: d4606bff3ccb9bc42121ca66a7720f970c00b70e
Parents: 6be8330
Author: Ravi Prakash ravip...@altiscale.com
Authored: Thu Oct 16 16:49:48 2014 -0700
Committer: Ravi Prakash ravip...@altiscale.com
Committed: Thu Oct 16 16:54:21 2014 -0700

--
 hadoop-common-project/hadoop-common/CHANGES.txt | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/d4606bff/hadoop-common-project/hadoop-common/CHANGES.txt
--
diff --git a/hadoop-common-project/hadoop-common/CHANGES.txt 
b/hadoop-common-project/hadoop-common/CHANGES.txt
index 4b70008..d4262f5 100644
--- a/hadoop-common-project/hadoop-common/CHANGES.txt
+++ b/hadoop-common-project/hadoop-common/CHANGES.txt
@@ -296,7 +296,7 @@ Release 2.6.0 - UNRELEASED
 
   BUG FIXES
 
-HADOOP-11181. GraphiteSink emits wrong timestamps (Sascha Coenen via 
raviprak)
+HADOOP-11182. GraphiteSink emits wrong timestamps (Sascha Coenen via 
raviprak)
 
 HADOOP-10781. Unportable getgrouplist() usage breaks FreeBSD (Dmitry
 Sivachenko via Colin Patrick McCabe)



git commit: HADOOP-11181. GraphiteSink emits wrong timestamps (Sascha Coenen via raviprak)

2014-10-15 Thread raviprak
Repository: hadoop
Updated Branches:
  refs/heads/trunk 0af1a2b5b - 466f08792


HADOOP-11181. GraphiteSink emits wrong timestamps (Sascha Coenen via raviprak)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/466f0879
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/466f0879
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/466f0879

Branch: refs/heads/trunk
Commit: 466f08792f11c2f95bf293ac9b6facd7018a5bb8
Parents: 0af1a2b
Author: Ravi Prakash ravip...@apache.org
Authored: Wed Oct 15 15:49:46 2014 -0700
Committer: Ravi Prakash ravip...@apache.org
Committed: Wed Oct 15 15:49:46 2014 -0700

--
 hadoop-common-project/hadoop-common/CHANGES.txt |  2 +
 .../hadoop/metrics2/sink/GraphiteSink.java  |  4 +-
 .../metrics2/impl/TestGraphiteMetrics.java  | 44 +++-
 3 files changed, 46 insertions(+), 4 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/466f0879/hadoop-common-project/hadoop-common/CHANGES.txt
--
diff --git a/hadoop-common-project/hadoop-common/CHANGES.txt 
b/hadoop-common-project/hadoop-common/CHANGES.txt
index 9f43937..f617137 100644
--- a/hadoop-common-project/hadoop-common/CHANGES.txt
+++ b/hadoop-common-project/hadoop-common/CHANGES.txt
@@ -661,6 +661,8 @@ Release 2.6.0 - UNRELEASED
 
   BUG FIXES
 
+HADOOP-11181. GraphiteSink emits wrong timestamps (Sascha Coenen via 
raviprak)
+
 HADOOP-10781. Unportable getgrouplist() usage breaks FreeBSD (Dmitry
 Sivachenko via Colin Patrick McCabe)
 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/466f0879/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/sink/GraphiteSink.java
--
diff --git 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/sink/GraphiteSink.java
 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/sink/GraphiteSink.java
index f474d82..9bc3f15 100644
--- 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/sink/GraphiteSink.java
+++ 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/sink/GraphiteSink.java
@@ -89,8 +89,8 @@ public class GraphiteSink implements MetricsSink, Closeable {
 }
 }
 
-// Round the timestamp to second as Graphite accepts it in such format.
-int timestamp = Math.round(record.timestamp() / 1000.0f);
+// The record timestamp is in milliseconds while Graphite expects an 
epoc time in seconds.
+long timestamp = record.timestamp() / 1000L;
 
 // Collect datapoints.
 for (AbstractMetric metric : record.metrics()) {

http://git-wip-us.apache.org/repos/asf/hadoop/blob/466f0879/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/impl/TestGraphiteMetrics.java
--
diff --git 
a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/impl/TestGraphiteMetrics.java
 
b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/impl/TestGraphiteMetrics.java
index f54c27d..09f0081 100644
--- 
a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/impl/TestGraphiteMetrics.java
+++ 
b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/impl/TestGraphiteMetrics.java
@@ -19,11 +19,16 @@
 package org.apache.hadoop.metrics2.impl;
 
 import static org.junit.Assert.assertEquals;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.verify;
+import static org.mockito.Mockito.when;
 
+import java.io.ByteArrayOutputStream;
 import java.io.IOException;
 import java.io.OutputStreamWriter;
 import java.io.Writer;
 import java.util.ArrayList;
+import java.util.Collections;
 import java.util.HashSet;
 import java.util.List;
 import java.util.Set;
@@ -34,8 +39,6 @@ import org.apache.hadoop.metrics2.MetricsRecord;
 import org.apache.hadoop.metrics2.MetricsTag;
 import org.apache.hadoop.metrics2.sink.GraphiteSink;
 import org.junit.Test;
-
-import static org.mockito.Mockito.*;
 import org.mockito.ArgumentCaptor;
 import org.mockito.internal.util.reflection.Whitebox;
 
@@ -108,6 +111,43 @@ public class TestGraphiteMetrics {
 result.equals(null.all.Context.Context=all.foo2 2 10\n + 
 null.all.Context.Context=all.foo1 1 10\n));
 }
+
+/**
+ * Assert that timestamps are converted correctly, ticket HADOOP-11182
+ */
+@Test
+public void testPutMetrics3() {
+
+  // setup GraphiteSink
+  GraphiteSink sink = new GraphiteSink();
+  ByteArrayOutputStream out = new ByteArrayOutputStream

git commit: HADOOP-11181. GraphiteSink emits wrong timestamps (Sascha Coenen via raviprak)

2014-10-15 Thread raviprak
Repository: hadoop
Updated Branches:
  refs/heads/branch-2 f93d2ea27 - 08eeb3e5b


HADOOP-11181. GraphiteSink emits wrong timestamps (Sascha Coenen via raviprak)

(cherry picked from commit 466f08792f11c2f95bf293ac9b6facd7018a5bb8)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/08eeb3e5
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/08eeb3e5
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/08eeb3e5

Branch: refs/heads/branch-2
Commit: 08eeb3e5b6516075e7b7bd2a9a9575538dfcfbf2
Parents: f93d2ea
Author: Ravi Prakash ravip...@apache.org
Authored: Wed Oct 15 15:49:46 2014 -0700
Committer: Ravi Prakash ravip...@apache.org
Committed: Wed Oct 15 15:52:46 2014 -0700

--
 hadoop-common-project/hadoop-common/CHANGES.txt |  2 +
 .../hadoop/metrics2/sink/GraphiteSink.java  |  4 +-
 .../metrics2/impl/TestGraphiteMetrics.java  | 44 +++-
 3 files changed, 46 insertions(+), 4 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/08eeb3e5/hadoop-common-project/hadoop-common/CHANGES.txt
--
diff --git a/hadoop-common-project/hadoop-common/CHANGES.txt 
b/hadoop-common-project/hadoop-common/CHANGES.txt
index c42d4fa..a5634b4 100644
--- a/hadoop-common-project/hadoop-common/CHANGES.txt
+++ b/hadoop-common-project/hadoop-common/CHANGES.txt
@@ -316,6 +316,8 @@ Release 2.6.0 - UNRELEASED
 
   BUG FIXES
 
+HADOOP-11181. GraphiteSink emits wrong timestamps (Sascha Coenen via 
raviprak)
+
 HADOOP-10781. Unportable getgrouplist() usage breaks FreeBSD (Dmitry
 Sivachenko via Colin Patrick McCabe)
 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/08eeb3e5/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/sink/GraphiteSink.java
--
diff --git 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/sink/GraphiteSink.java
 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/sink/GraphiteSink.java
index f474d82..9bc3f15 100644
--- 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/sink/GraphiteSink.java
+++ 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/sink/GraphiteSink.java
@@ -89,8 +89,8 @@ public class GraphiteSink implements MetricsSink, Closeable {
 }
 }
 
-// Round the timestamp to second as Graphite accepts it in such format.
-int timestamp = Math.round(record.timestamp() / 1000.0f);
+// The record timestamp is in milliseconds while Graphite expects an 
epoc time in seconds.
+long timestamp = record.timestamp() / 1000L;
 
 // Collect datapoints.
 for (AbstractMetric metric : record.metrics()) {

http://git-wip-us.apache.org/repos/asf/hadoop/blob/08eeb3e5/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/impl/TestGraphiteMetrics.java
--
diff --git 
a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/impl/TestGraphiteMetrics.java
 
b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/impl/TestGraphiteMetrics.java
index f54c27d..09f0081 100644
--- 
a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/impl/TestGraphiteMetrics.java
+++ 
b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/impl/TestGraphiteMetrics.java
@@ -19,11 +19,16 @@
 package org.apache.hadoop.metrics2.impl;
 
 import static org.junit.Assert.assertEquals;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.verify;
+import static org.mockito.Mockito.when;
 
+import java.io.ByteArrayOutputStream;
 import java.io.IOException;
 import java.io.OutputStreamWriter;
 import java.io.Writer;
 import java.util.ArrayList;
+import java.util.Collections;
 import java.util.HashSet;
 import java.util.List;
 import java.util.Set;
@@ -34,8 +39,6 @@ import org.apache.hadoop.metrics2.MetricsRecord;
 import org.apache.hadoop.metrics2.MetricsTag;
 import org.apache.hadoop.metrics2.sink.GraphiteSink;
 import org.junit.Test;
-
-import static org.mockito.Mockito.*;
 import org.mockito.ArgumentCaptor;
 import org.mockito.internal.util.reflection.Whitebox;
 
@@ -108,6 +111,43 @@ public class TestGraphiteMetrics {
 result.equals(null.all.Context.Context=all.foo2 2 10\n + 
 null.all.Context.Context=all.foo1 1 10\n));
 }
+
+/**
+ * Assert that timestamps are converted correctly, ticket HADOOP-11182
+ */
+@Test
+public void testPutMetrics3() {
+
+  // setup GraphiteSink
+  GraphiteSink sink = new

git commit: HADOOP-11181. GraphiteSink emits wrong timestamps (Sascha Coenen via raviprak)

2014-10-15 Thread raviprak
Repository: hadoop
Updated Branches:
  refs/heads/branch-2.6 bd45dba80 - 7b53ab7b1


HADOOP-11181. GraphiteSink emits wrong timestamps (Sascha Coenen via raviprak)

(cherry picked from commit 466f08792f11c2f95bf293ac9b6facd7018a5bb8)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/7b53ab7b
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/7b53ab7b
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/7b53ab7b

Branch: refs/heads/branch-2.6
Commit: 7b53ab7b145509e9057f800a8cd88d502296d58c
Parents: bd45dba
Author: Ravi Prakash ravip...@apache.org
Authored: Wed Oct 15 15:49:46 2014 -0700
Committer: Ravi Prakash ravip...@apache.org
Committed: Wed Oct 15 15:53:47 2014 -0700

--
 hadoop-common-project/hadoop-common/CHANGES.txt |  2 +
 .../hadoop/metrics2/sink/GraphiteSink.java  |  4 +-
 .../metrics2/impl/TestGraphiteMetrics.java  | 44 +++-
 3 files changed, 46 insertions(+), 4 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/7b53ab7b/hadoop-common-project/hadoop-common/CHANGES.txt
--
diff --git a/hadoop-common-project/hadoop-common/CHANGES.txt 
b/hadoop-common-project/hadoop-common/CHANGES.txt
index 77a2677..4b70008 100644
--- a/hadoop-common-project/hadoop-common/CHANGES.txt
+++ b/hadoop-common-project/hadoop-common/CHANGES.txt
@@ -296,6 +296,8 @@ Release 2.6.0 - UNRELEASED
 
   BUG FIXES
 
+HADOOP-11181. GraphiteSink emits wrong timestamps (Sascha Coenen via 
raviprak)
+
 HADOOP-10781. Unportable getgrouplist() usage breaks FreeBSD (Dmitry
 Sivachenko via Colin Patrick McCabe)
 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7b53ab7b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/sink/GraphiteSink.java
--
diff --git 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/sink/GraphiteSink.java
 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/sink/GraphiteSink.java
index f474d82..9bc3f15 100644
--- 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/sink/GraphiteSink.java
+++ 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/sink/GraphiteSink.java
@@ -89,8 +89,8 @@ public class GraphiteSink implements MetricsSink, Closeable {
 }
 }
 
-// Round the timestamp to second as Graphite accepts it in such format.
-int timestamp = Math.round(record.timestamp() / 1000.0f);
+// The record timestamp is in milliseconds while Graphite expects an 
epoc time in seconds.
+long timestamp = record.timestamp() / 1000L;
 
 // Collect datapoints.
 for (AbstractMetric metric : record.metrics()) {

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7b53ab7b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/impl/TestGraphiteMetrics.java
--
diff --git 
a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/impl/TestGraphiteMetrics.java
 
b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/impl/TestGraphiteMetrics.java
index f54c27d..09f0081 100644
--- 
a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/impl/TestGraphiteMetrics.java
+++ 
b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/impl/TestGraphiteMetrics.java
@@ -19,11 +19,16 @@
 package org.apache.hadoop.metrics2.impl;
 
 import static org.junit.Assert.assertEquals;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.verify;
+import static org.mockito.Mockito.when;
 
+import java.io.ByteArrayOutputStream;
 import java.io.IOException;
 import java.io.OutputStreamWriter;
 import java.io.Writer;
 import java.util.ArrayList;
+import java.util.Collections;
 import java.util.HashSet;
 import java.util.List;
 import java.util.Set;
@@ -34,8 +39,6 @@ import org.apache.hadoop.metrics2.MetricsRecord;
 import org.apache.hadoop.metrics2.MetricsTag;
 import org.apache.hadoop.metrics2.sink.GraphiteSink;
 import org.junit.Test;
-
-import static org.mockito.Mockito.*;
 import org.mockito.ArgumentCaptor;
 import org.mockito.internal.util.reflection.Whitebox;
 
@@ -108,6 +111,43 @@ public class TestGraphiteMetrics {
 result.equals(null.all.Context.Context=all.foo2 2 10\n + 
 null.all.Context.Context=all.foo1 1 10\n));
 }
+
+/**
+ * Assert that timestamps are converted correctly, ticket HADOOP-11182
+ */
+@Test
+public void testPutMetrics3() {
+
+  // setup GraphiteSink
+  GraphiteSink sink = new

svn commit: r1605646 - in /hadoop/common/trunk/hadoop-common-project/hadoop-common: CHANGES.txt src/main/java/org/apache/hadoop/metrics2/sink/GraphiteSink.java src/test/java/org/apache/hadoop/metrics2

2014-06-25 Thread raviprak
Author: raviprak
Date: Thu Jun 26 01:50:58 2014
New Revision: 1605646

URL: http://svn.apache.org/r1605646
Log:
HADOOP-10715. Remove public GraphiteSink#setWriter()

Modified:
hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt

hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/sink/GraphiteSink.java

hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/impl/TestGraphiteMetrics.java

Modified: hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt?rev=1605646r1=1605645r2=1605646view=diff
==
--- hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt 
(original)
+++ hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt Thu Jun 
26 01:50:58 2014
@@ -646,6 +646,8 @@ Release 2.5.0 - UNRELEASED
 HADOOP-10479. Fix new findbugs warnings in hadoop-minikdc.
 (Swarnim Kulkarni via wheat9)
 
+HADOOP-10715. Remove public GraphiteSink#setWriter (Babak Behzad via 
raviprak)
+
 Release 2.4.1 - 2014-06-23 
 
   INCOMPATIBLE CHANGES

Modified: 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/sink/GraphiteSink.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/sink/GraphiteSink.java?rev=1605646r1=1605645r2=1605646view=diff
==
--- 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/sink/GraphiteSink.java
 (original)
+++ 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/sink/GraphiteSink.java
 Thu Jun 26 01:50:58 2014
@@ -50,10 +50,6 @@ public class GraphiteSink implements Met
 private String metricsPrefix = null;
 private Socket socket = null;
 
-public void setWriter(Writer writer) {
-this.writer = writer;
-}
-
 @Override
 public void init(SubsetConfiguration conf) {
 // Get Graphite host configurations.
@@ -68,7 +64,7 @@ public class GraphiteSink implements Met
 try {
 // Open an connection to Graphite server.
 socket = new Socket(serverHost, serverPort);
-setWriter(new OutputStreamWriter(socket.getOutputStream()));
+writer = new OutputStreamWriter(socket.getOutputStream());
 } catch (Exception e) {
 throw new MetricsException(Error creating connection, 
 + serverHost + : + serverPort, e);

Modified: 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/impl/TestGraphiteMetrics.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/impl/TestGraphiteMetrics.java?rev=1605646r1=1605645r2=1605646view=diff
==
--- 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/impl/TestGraphiteMetrics.java
 (original)
+++ 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/impl/TestGraphiteMetrics.java
 Thu Jun 26 01:50:58 2014
@@ -28,15 +28,16 @@ import java.util.HashSet;
 import java.util.List;
 import java.util.Set;
 
-import static org.mockito.Mockito.*;
-
 import org.apache.hadoop.metrics2.AbstractMetric;
 import org.apache.hadoop.metrics2.MetricsException;
 import org.apache.hadoop.metrics2.MetricsRecord;
 import org.apache.hadoop.metrics2.MetricsTag;
 import org.apache.hadoop.metrics2.sink.GraphiteSink;
 import org.junit.Test;
+
+import static org.mockito.Mockito.*;
 import org.mockito.ArgumentCaptor;
+import org.mockito.internal.util.reflection.Whitebox;
 
 public class TestGraphiteMetrics {
 private AbstractMetric makeMetric(String name, Number value) {
@@ -57,14 +58,13 @@ public class TestGraphiteMetrics {
 metrics.add(makeMetric(foo2, 2.25));
 MetricsRecord record = new MetricsRecordImpl(MsInfo.Context, (long) 
1, tags, metrics);
 
-OutputStreamWriter writer = mock(OutputStreamWriter.class);
+OutputStreamWriter mockWriter = mock(OutputStreamWriter.class);
 ArgumentCaptorString argument = 
ArgumentCaptor.forClass(String.class);
-
-sink.setWriter(writer);
+Whitebox.setInternalState(sink, writer, mockWriter);
 sink.putMetrics(record);
 
 try {
-verify(writer).write(argument.capture());
+verify(mockWriter).write(argument.capture());
 } catch (IOException e) {
 e.printStackTrace();
 }
@@ -89,14 +89,13 @@ public class TestGraphiteMetrics

svn commit: r1605647 - in /hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common: CHANGES.txt src/main/java/org/apache/hadoop/metrics2/sink/GraphiteSink.java src/test/java/org/apache/had

2014-06-25 Thread raviprak
Author: raviprak
Date: Thu Jun 26 01:53:43 2014
New Revision: 1605647

URL: http://svn.apache.org/r1605647
Log:
HADOOP-10715. Remove public GraphiteSink#setWriter()

Modified:

hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt

hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/sink/GraphiteSink.java

hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/impl/TestGraphiteMetrics.java

Modified: 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt?rev=1605647r1=1605646r2=1605647view=diff
==
--- 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt 
(original)
+++ 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt 
Thu Jun 26 01:53:43 2014
@@ -288,6 +288,8 @@ Release 2.5.0 - UNRELEASED
 HADOOP-10479. Fix new findbugs warnings in hadoop-minikdc.
 (Swarnim Kulkarni via wheat9)
 
+HADOOP-10715. Remove public GraphiteSink#setWriter (Babak Behzad via 
raviprak)
+
 Release 2.4.1 - 2014-06-23 
 
   INCOMPATIBLE CHANGES

Modified: 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/sink/GraphiteSink.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/sink/GraphiteSink.java?rev=1605647r1=1605646r2=1605647view=diff
==
--- 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/sink/GraphiteSink.java
 (original)
+++ 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/sink/GraphiteSink.java
 Thu Jun 26 01:53:43 2014
@@ -50,10 +50,6 @@ public class GraphiteSink implements Met
 private String metricsPrefix = null;
 private Socket socket = null;
 
-public void setWriter(Writer writer) {
-this.writer = writer;
-}
-
 @Override
 public void init(SubsetConfiguration conf) {
 // Get Graphite host configurations.
@@ -68,7 +64,7 @@ public class GraphiteSink implements Met
 try {
 // Open an connection to Graphite server.
 socket = new Socket(serverHost, serverPort);
-setWriter(new OutputStreamWriter(socket.getOutputStream()));
+writer = new OutputStreamWriter(socket.getOutputStream());
 } catch (Exception e) {
 throw new MetricsException(Error creating connection, 
 + serverHost + : + serverPort, e);

Modified: 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/impl/TestGraphiteMetrics.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/impl/TestGraphiteMetrics.java?rev=1605647r1=1605646r2=1605647view=diff
==
--- 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/impl/TestGraphiteMetrics.java
 (original)
+++ 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/impl/TestGraphiteMetrics.java
 Thu Jun 26 01:53:43 2014
@@ -28,15 +28,16 @@ import java.util.HashSet;
 import java.util.List;
 import java.util.Set;
 
-import static org.mockito.Mockito.*;
-
 import org.apache.hadoop.metrics2.AbstractMetric;
 import org.apache.hadoop.metrics2.MetricsException;
 import org.apache.hadoop.metrics2.MetricsRecord;
 import org.apache.hadoop.metrics2.MetricsTag;
 import org.apache.hadoop.metrics2.sink.GraphiteSink;
 import org.junit.Test;
+
+import static org.mockito.Mockito.*;
 import org.mockito.ArgumentCaptor;
+import org.mockito.internal.util.reflection.Whitebox;
 
 public class TestGraphiteMetrics {
 private AbstractMetric makeMetric(String name, Number value) {
@@ -57,14 +58,13 @@ public class TestGraphiteMetrics {
 metrics.add(makeMetric(foo2, 2.25));
 MetricsRecord record = new MetricsRecordImpl(MsInfo.Context, (long) 
1, tags, metrics);
 
-OutputStreamWriter writer = mock(OutputStreamWriter.class);
+OutputStreamWriter mockWriter = mock(OutputStreamWriter.class);
 ArgumentCaptorString argument = 
ArgumentCaptor.forClass(String.class);
-
-sink.setWriter(writer);
+Whitebox.setInternalState(sink, writer, mockWriter);
 sink.putMetrics(record);
 
 try {
-verify(writer).write(argument.capture

svn commit: r1603379 - in /hadoop/common/trunk/hadoop-common-project/hadoop-common: CHANGES.txt src/main/java/org/apache/hadoop/metrics2/sink/GraphiteSink.java src/test/java/org/apache/hadoop/metrics2

2014-06-18 Thread raviprak
Author: raviprak
Date: Wed Jun 18 09:27:03 2014
New Revision: 1603379

URL: http://svn.apache.org/r1603379
Log:
HADOOP-10660. GraphiteSink should implement Closeable (Chen He and Ted Yu via 
raviprak)

Modified:
hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt

hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/sink/GraphiteSink.java

hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/impl/TestGraphiteMetrics.java

Modified: hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt?rev=1603379r1=1603378r2=1603379view=diff
==
--- hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt 
(original)
+++ hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt Wed Jun 
18 09:27:03 2014
@@ -568,6 +568,8 @@ Release 2.5.0 - UNRELEASED
 HADOOP-10699. Fix build native library on mac osx (Binglin Chang via
 jlowe)
 
+HADOOP-10660. GraphiteSink should implement Closeable (Chen He and Ted Yu 
via raviprak)
+
   BREAKDOWN OF HADOOP-10514 SUBTASKS AND RELATED JIRAS
 
 HADOOP-10520. Extended attributes definition and FileSystem APIs for

Modified: 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/sink/GraphiteSink.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/sink/GraphiteSink.java?rev=1603379r1=1603378r2=1603379view=diff
==
--- 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/sink/GraphiteSink.java
 (original)
+++ 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/sink/GraphiteSink.java
 Wed Jun 18 09:27:03 2014
@@ -18,13 +18,18 @@
 
 package org.apache.hadoop.metrics2.sink;
 
+import java.io.IOException;
 import java.io.OutputStreamWriter;
 import java.io.Writer;
+import java.io.Closeable;
 import java.net.Socket;
 
 import org.apache.commons.configuration.SubsetConfiguration;
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
+import org.apache.hadoop.io.IOUtils;
 import org.apache.hadoop.metrics2.AbstractMetric;
 import org.apache.hadoop.metrics2.MetricsException;
 import org.apache.hadoop.metrics2.MetricsRecord;
@@ -36,12 +41,14 @@ import org.apache.hadoop.metrics2.Metric
  */
 @InterfaceAudience.Public
 @InterfaceStability.Evolving
-public class GraphiteSink implements MetricsSink {
+public class GraphiteSink implements MetricsSink, Closeable {
+private static final Log LOG = LogFactory.getLog(GraphiteSink.class);
 private static final String SERVER_HOST_KEY = server_host;
 private static final String SERVER_PORT_KEY = server_port;
 private static final String METRICS_PREFIX = metrics_prefix;
 private Writer writer = null;
 private String metricsPrefix = null;
+private Socket socket = null;
 
 public void setWriter(Writer writer) {
 this.writer = writer;
@@ -60,7 +67,7 @@ public class GraphiteSink implements Met
 
 try {
 // Open an connection to Graphite server.
-Socket socket = new Socket(serverHost, serverPort);
+socket = new Socket(serverHost, serverPort);
 setWriter(new OutputStreamWriter(socket.getOutputStream()));
 } catch (Exception e) {
 throw new MetricsException(Error creating connection, 
@@ -99,7 +106,11 @@ public class GraphiteSink implements Met
 }
 
 try {
-writer.write(lines.toString());
+if(writer != null){
+  writer.write(lines.toString());
+} else {
+  throw new MetricsException(Writer in GraphiteSink is null!);
+}
 } catch (Exception e) {
 throw new MetricsException(Error sending metrics, e);
 }
@@ -113,4 +124,21 @@ public class GraphiteSink implements Met
 throw new MetricsException(Error flushing metrics, e);
 }
 }
+
+@Override
+public void close() throws IOException {
+  try {
+IOUtils.closeStream(writer);
+writer = null;
+LOG.info(writer in GraphiteSink is closed!);
+  } catch (Throwable e){
+throw new MetricsException(Error closing writer, e);
+  } finally {
+if (socket != null  !socket.isClosed()) {
+  socket.close();
+  socket = null;
+  LOG.info(socket in GraphiteSink is closed!);
+}
+  }
+}
 }

Modified: 
hadoop/common/trunk/hadoop

svn commit: r1603380 - in /hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common: CHANGES.txt src/main/java/org/apache/hadoop/metrics2/sink/GraphiteSink.java src/test/java/org/apache/had

2014-06-18 Thread raviprak
Author: raviprak
Date: Wed Jun 18 09:28:26 2014
New Revision: 1603380

URL: http://svn.apache.org/r1603380
Log:
HADOOP-10660. GraphiteSink should implement Closeable (Chen He and Ted Yu via 
raviprak)

Modified:

hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt

hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/sink/GraphiteSink.java

hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/impl/TestGraphiteMetrics.java

Modified: 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt?rev=1603380r1=1603379r2=1603380view=diff
==
--- 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt 
(original)
+++ 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt 
Wed Jun 18 09:28:26 2014
@@ -231,6 +231,8 @@ Release 2.5.0 - UNRELEASED
 HADOOP-10699. Fix build native library on mac osx (Binglin Chang via
 jlowe)
 
+HADOOP-10660. GraphiteSink should implement Closeable (Chen He and Ted Yu 
via raviprak)
+
   BREAKDOWN OF HADOOP-10514 SUBTASKS AND RELATED JIRAS
 
 HADOOP-10520. Extended attributes definition and FileSystem APIs for

Modified: 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/sink/GraphiteSink.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/sink/GraphiteSink.java?rev=1603380r1=1603379r2=1603380view=diff
==
--- 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/sink/GraphiteSink.java
 (original)
+++ 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/sink/GraphiteSink.java
 Wed Jun 18 09:28:26 2014
@@ -18,13 +18,18 @@
 
 package org.apache.hadoop.metrics2.sink;
 
+import java.io.IOException;
 import java.io.OutputStreamWriter;
 import java.io.Writer;
+import java.io.Closeable;
 import java.net.Socket;
 
 import org.apache.commons.configuration.SubsetConfiguration;
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
+import org.apache.hadoop.io.IOUtils;
 import org.apache.hadoop.metrics2.AbstractMetric;
 import org.apache.hadoop.metrics2.MetricsException;
 import org.apache.hadoop.metrics2.MetricsRecord;
@@ -36,12 +41,14 @@ import org.apache.hadoop.metrics2.Metric
  */
 @InterfaceAudience.Public
 @InterfaceStability.Evolving
-public class GraphiteSink implements MetricsSink {
+public class GraphiteSink implements MetricsSink, Closeable {
+private static final Log LOG = LogFactory.getLog(GraphiteSink.class);
 private static final String SERVER_HOST_KEY = server_host;
 private static final String SERVER_PORT_KEY = server_port;
 private static final String METRICS_PREFIX = metrics_prefix;
 private Writer writer = null;
 private String metricsPrefix = null;
+private Socket socket = null;
 
 public void setWriter(Writer writer) {
 this.writer = writer;
@@ -60,7 +67,7 @@ public class GraphiteSink implements Met
 
 try {
 // Open an connection to Graphite server.
-Socket socket = new Socket(serverHost, serverPort);
+socket = new Socket(serverHost, serverPort);
 setWriter(new OutputStreamWriter(socket.getOutputStream()));
 } catch (Exception e) {
 throw new MetricsException(Error creating connection, 
@@ -99,7 +106,11 @@ public class GraphiteSink implements Met
 }
 
 try {
-writer.write(lines.toString());
+if(writer != null){
+  writer.write(lines.toString());
+} else {
+  throw new MetricsException(Writer in GraphiteSink is null!);
+}
 } catch (Exception e) {
 throw new MetricsException(Error sending metrics, e);
 }
@@ -113,4 +124,21 @@ public class GraphiteSink implements Met
 throw new MetricsException(Error flushing metrics, e);
 }
 }
+
+@Override
+public void close() throws IOException {
+  try {
+IOUtils.closeStream(writer);
+writer = null;
+LOG.info(writer in GraphiteSink is closed!);
+  } catch (Throwable e){
+throw new MetricsException(Error closing writer, e);
+  } finally {
+if (socket != null  !socket.isClosed()) {
+  socket.close

svn commit: r1599413 - in /hadoop/common/trunk/hadoop-common-project/hadoop-common: CHANGES.txt src/main/java/org/apache/hadoop/metrics2/sink/GraphiteSink.java src/test/java/org/apache/hadoop/metrics2

2014-06-02 Thread raviprak
Author: raviprak
Date: Tue Jun  3 05:42:50 2014
New Revision: 1599413

URL: http://svn.apache.org/r1599413
Log:
HADOOP 9704. Write metrics sink plugin for Hadoop/Graphite (Chu Tong, Alex 
Newman and Babak Behzad via raviprak)

Added:

hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/sink/GraphiteSink.java

hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/impl/TestGraphiteMetrics.java
Modified:
hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt

Modified: hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt?rev=1599413r1=1599412r2=1599413view=diff
==
--- hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt 
(original)
+++ hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt Tue Jun 
 3 05:42:50 2014
@@ -371,6 +371,8 @@ Release 2.5.0 - UNRELEASED
 
 HADOOP-10498. Add support for proxy server. (daryn)
 
+HADOOP-9704. Write metrics sink plugin for Hadoop/Graphite (Chu Tong, Alex 
Newman and Babak Behzad via raviprak)
+
   IMPROVEMENTS
 
 HADOOP-10451. Remove unused field and imports from SaslRpcServer.

Added: 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/sink/GraphiteSink.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/sink/GraphiteSink.java?rev=1599413view=auto
==
--- 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/sink/GraphiteSink.java
 (added)
+++ 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/sink/GraphiteSink.java
 Tue Jun  3 05:42:50 2014
@@ -0,0 +1,116 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * License); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an AS IS BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.metrics2.sink;
+
+import java.io.OutputStreamWriter;
+import java.io.Writer;
+import java.net.Socket;
+
+import org.apache.commons.configuration.SubsetConfiguration;
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
+import org.apache.hadoop.metrics2.AbstractMetric;
+import org.apache.hadoop.metrics2.MetricsException;
+import org.apache.hadoop.metrics2.MetricsRecord;
+import org.apache.hadoop.metrics2.MetricsSink;
+import org.apache.hadoop.metrics2.MetricsTag;
+
+/**
+ * A metrics sink that writes to a Graphite server
+ */
+@InterfaceAudience.Public
+@InterfaceStability.Evolving
+public class GraphiteSink implements MetricsSink {
+private static final String SERVER_HOST_KEY = server_host;
+private static final String SERVER_PORT_KEY = server_port;
+private static final String METRICS_PREFIX = metrics_prefix;
+private Writer writer = null;
+private String metricsPrefix = null;
+
+public void setWriter(Writer writer) {
+this.writer = writer;
+}
+
+@Override
+public void init(SubsetConfiguration conf) {
+// Get Graphite host configurations.
+String serverHost = conf.getString(SERVER_HOST_KEY);
+Integer serverPort = Integer.parseInt(conf.getString(SERVER_PORT_KEY));
+
+// Get Graphite metrics graph prefix.
+metricsPrefix = conf.getString(METRICS_PREFIX);
+if (metricsPrefix == null)
+metricsPrefix = ;
+
+try {
+// Open an connection to Graphite server.
+Socket socket = new Socket(serverHost, serverPort);
+setWriter(new OutputStreamWriter(socket.getOutputStream()));
+} catch (Exception e) {
+throw new MetricsException(Error creating connection, 
++ serverHost + : + serverPort, e);
+}
+}
+
+@Override
+public void putMetrics(MetricsRecord record) {
+StringBuilder lines = new StringBuilder();
+StringBuilder metricsPathPrefix = new StringBuilder();
+
+// Configure

svn commit: r1599415 - in /hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common: CHANGES.txt src/main/java/org/apache/hadoop/metrics2/sink/GraphiteSink.java src/test/java/org/apache/had

2014-06-02 Thread raviprak
Author: raviprak
Date: Tue Jun  3 05:51:52 2014
New Revision: 1599415

URL: http://svn.apache.org/r1599415
Log:
HADOOP-9704. Write metrics sink plugin for Hadoop/Graphite (Chu Tong, Alex 
Newman and Babak Behzad via raviprak)

Added:

hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/sink/GraphiteSink.java
  - copied unchanged from r1599413, 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/sink/GraphiteSink.java

hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/impl/TestGraphiteMetrics.java
  - copied unchanged from r1599413, 
hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/impl/TestGraphiteMetrics.java
Modified:

hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt

Modified: 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt?rev=1599415r1=1599414r2=1599415view=diff
==
--- 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt 
(original)
+++ 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt 
Tue Jun  3 05:51:52 2014
@@ -8,6 +8,8 @@ Release 2.5.0 - UNRELEASED
 
 HADOOP-10498. Add support for proxy server. (daryn)
 
+HADOOP-9704. Write metrics sink plugin for Hadoop/Graphite (Chu Tong, Alex 
Newman and Babak Behzad via raviprak)
+
   IMPROVEMENTS
 
 HADOOP-10451. Remove unused field and imports from SaslRpcServer.




svn commit: r1595265 - /hadoop/common/trunk/hadoop-project/src/site/site.xml

2014-05-16 Thread raviprak
Author: raviprak
Date: Fri May 16 16:50:58 2014
New Revision: 1595265

URL: http://svn.apache.org/r1595265
Log:
MAPREDUCE 5874. Creating MapReduce REST API section. Contributed by Tsuyoshi 
Ozawa

Modified:
hadoop/common/trunk/hadoop-project/src/site/site.xml

Modified: hadoop/common/trunk/hadoop-project/src/site/site.xml
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-project/src/site/site.xml?rev=1595265r1=1595264r2=1595265view=diff
==
--- hadoop/common/trunk/hadoop-project/src/site/site.xml (original)
+++ hadoop/common/trunk/hadoop-project/src/site/site.xml Fri May 16 16:50:58 
2014
@@ -99,6 +99,11 @@
   item name=DistCp 
href=hadoop-mapreduce-client/hadoop-mapreduce-client-core/DistCp.html/
 /menu
 
+menu name=MapReduce REST APIs inherit=top
+  item name=MR Application Master 
href=hadoop-mapreduce-client/hadoop-mapreduce-client-core/MapredAppMasterRest.html/
+  item name=MR History Server 
href=hadoop-mapreduce-client/hadoop-mapreduce-client-hs/HistoryServerRest.html/
+/menu
+
 menu name=YARN inherit=top
   item name=YARN Architecture 
href=hadoop-yarn/hadoop-yarn-site/YARN.html/
   item name=Capacity Scheduler 
href=hadoop-yarn/hadoop-yarn-site/CapacityScheduler.html/
@@ -116,8 +121,6 @@
   item name=Introduction 
href=hadoop-yarn/hadoop-yarn-site/WebServicesIntro.html/
   item name=Resource Manager 
href=hadoop-yarn/hadoop-yarn-site/ResourceManagerRest.html/
   item name=Node Manager 
href=hadoop-yarn/hadoop-yarn-site/NodeManagerRest.html/
-  item name=MR Application Master 
href=hadoop-yarn/hadoop-yarn-site/MapredAppMasterRest.html/
-  item name=History Server 
href=hadoop-yarn/hadoop-yarn-site/HistoryServerRest.html/
 /menu
 
 menu name=Auth inherit=top




svn commit: r1595268 - /hadoop/common/branches/branch-2/hadoop-project/src/site/site.xml

2014-05-16 Thread raviprak
Author: raviprak
Date: Fri May 16 17:11:01 2014
New Revision: 1595268

URL: http://svn.apache.org/r1595268
Log:
MAPREDUCE 5874. Creating MapReduce REST API section. Contributed by Tsuyoshi 
Ozawa

Modified:
hadoop/common/branches/branch-2/hadoop-project/src/site/site.xml

Modified: hadoop/common/branches/branch-2/hadoop-project/src/site/site.xml
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-project/src/site/site.xml?rev=1595268r1=1595267r2=1595268view=diff
==
--- hadoop/common/branches/branch-2/hadoop-project/src/site/site.xml (original)
+++ hadoop/common/branches/branch-2/hadoop-project/src/site/site.xml Fri May 16 
17:11:01 2014
@@ -97,7 +97,12 @@
   item name=Hadoop Archives 
href=hadoop-mapreduce-client/hadoop-mapreduce-client-core/HadoopArchives.html/
   item name=DistCp 
href=hadoop-mapreduce-client/hadoop-mapreduce-client-core/DistCp.html/
 /menu
-
+
+menu name=MapReduce REST APIs inherit=top
+  item name=MR Application Master 
href=hadoop-mapreduce-client/hadoop-mapreduce-client-core/MapredAppMasterRest.html/
+  item name=MR History Server 
href=hadoop-mapreduce-client/hadoop-mapreduce-client-hs/HistoryServerRest.html/
+/menu
+
 menu name=YARN inherit=top
   item name=YARN Architecture 
href=hadoop-yarn/hadoop-yarn-site/YARN.html/
   item name=Capacity Scheduler 
href=hadoop-yarn/hadoop-yarn-site/CapacityScheduler.html/
@@ -115,8 +120,6 @@
   item name=Introduction 
href=hadoop-yarn/hadoop-yarn-site/WebServicesIntro.html/
   item name=Resource Manager 
href=hadoop-yarn/hadoop-yarn-site/ResourceManagerRest.html/
   item name=Node Manager 
href=hadoop-yarn/hadoop-yarn-site/NodeManagerRest.html/
-  item name=MR Application Master 
href=hadoop-yarn/hadoop-yarn-site/MapredAppMasterRest.html/
-  item name=History Server 
href=hadoop-yarn/hadoop-yarn-site/HistoryServerRest.html/
 /menu
 
 menu name=Auth inherit=top




svn commit: r1589595 - /hadoop/common/trunk/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/JobBuilder.java

2014-04-24 Thread raviprak
Author: raviprak
Date: Thu Apr 24 05:59:28 2014
New Revision: 1589595

URL: http://svn.apache.org/r1589595
Log:
MAPREDUCE-5846. Rumen doesn't understand JobQueueChangedEvent (Nathan Roberts 
via raviprak)

Modified:

hadoop/common/trunk/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/JobBuilder.java

Modified: 
hadoop/common/trunk/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/JobBuilder.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/JobBuilder.java?rev=1589595r1=1589594r2=1589595view=diff
==
--- 
hadoop/common/trunk/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/JobBuilder.java
 (original)
+++ 
hadoop/common/trunk/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/JobBuilder.java
 Thu Apr 24 05:59:28 2014
@@ -38,6 +38,7 @@ import org.apache.hadoop.mapreduce.jobhi
 import org.apache.hadoop.mapreduce.jobhistory.JobPriorityChangeEvent;
 import org.apache.hadoop.mapreduce.jobhistory.JobStatusChangedEvent;
 import org.apache.hadoop.mapreduce.jobhistory.JobSubmittedEvent;
+import org.apache.hadoop.mapreduce.jobhistory.JobQueueChangeEvent;
 import org.apache.hadoop.mapreduce.jobhistory.JobUnsuccessfulCompletionEvent;
 import org.apache.hadoop.mapreduce.jobhistory.MapAttemptFinished;
 import org.apache.hadoop.mapreduce.jobhistory.MapAttemptFinishedEvent;
@@ -144,6 +145,8 @@ public class JobBuilder {
   processJobInitedEvent((JobInitedEvent) event);
 } else if (event instanceof JobPriorityChangeEvent) {
   processJobPriorityChangeEvent((JobPriorityChangeEvent) event);
+} else if (event instanceof JobQueueChangeEvent) {
+  processJobQueueChangeEvent((JobQueueChangeEvent) event);
 } else if (event instanceof JobStatusChangedEvent) {
   processJobStatusChangedEvent((JobStatusChangedEvent) event);
 } else if (event instanceof JobSubmittedEvent) {
@@ -611,6 +614,14 @@ public class JobBuilder {
 }
   }
 
+  private void processJobQueueChangeEvent(JobQueueChangeEvent event) {
+// set the queue name if existing
+String queue = event.getJobQueueName();
+if (queue != null) {
+  result.setQueue(queue);
+}
+  }
+
   private void processJobStatusChangedEvent(JobStatusChangedEvent event) {
 result.setOutcome(Pre21JobHistoryConstants.Values
 .valueOf(event.getStatus()));




svn commit: r1589597 - /hadoop/common/branches/branch-2/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/JobBuilder.java

2014-04-24 Thread raviprak
Author: raviprak
Date: Thu Apr 24 06:05:31 2014
New Revision: 1589597

URL: http://svn.apache.org/r1589597
Log:
MAPREDUCE-5846. Rumen doesn't understand JobQueueChangedEvent (Nathan Roberts 
via raviprak)

Modified:

hadoop/common/branches/branch-2/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/JobBuilder.java

Modified: 
hadoop/common/branches/branch-2/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/JobBuilder.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/JobBuilder.java?rev=1589597r1=1589596r2=1589597view=diff
==
--- 
hadoop/common/branches/branch-2/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/JobBuilder.java
 (original)
+++ 
hadoop/common/branches/branch-2/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/JobBuilder.java
 Thu Apr 24 06:05:31 2014
@@ -38,6 +38,7 @@ import org.apache.hadoop.mapreduce.jobhi
 import org.apache.hadoop.mapreduce.jobhistory.JobPriorityChangeEvent;
 import org.apache.hadoop.mapreduce.jobhistory.JobStatusChangedEvent;
 import org.apache.hadoop.mapreduce.jobhistory.JobSubmittedEvent;
+import org.apache.hadoop.mapreduce.jobhistory.JobQueueChangeEvent;
 import org.apache.hadoop.mapreduce.jobhistory.JobUnsuccessfulCompletionEvent;
 import org.apache.hadoop.mapreduce.jobhistory.MapAttemptFinished;
 import org.apache.hadoop.mapreduce.jobhistory.MapAttemptFinishedEvent;
@@ -144,6 +145,8 @@ public class JobBuilder {
   processJobInitedEvent((JobInitedEvent) event);
 } else if (event instanceof JobPriorityChangeEvent) {
   processJobPriorityChangeEvent((JobPriorityChangeEvent) event);
+} else if (event instanceof JobQueueChangeEvent) {
+  processJobQueueChangeEvent((JobQueueChangeEvent) event);
 } else if (event instanceof JobStatusChangedEvent) {
   processJobStatusChangedEvent((JobStatusChangedEvent) event);
 } else if (event instanceof JobSubmittedEvent) {
@@ -611,6 +614,14 @@ public class JobBuilder {
 }
   }
 
+  private void processJobQueueChangeEvent(JobQueueChangeEvent event) {
+// set the queue name if existing
+String queue = event.getJobQueueName();
+if (queue != null) {
+  result.setQueue(queue);
+}
+  }
+
   private void processJobStatusChangedEvent(JobStatusChangedEvent event) {
 result.setOutcome(Pre21JobHistoryConstants.Values
 .valueOf(event.getStatus()));




svn commit: r1568104 - /hadoop/common/site/main/author/src/documentation/content/xdocs/who.xml

2014-02-13 Thread raviprak
Author: raviprak
Date: Thu Feb 13 23:03:41 2014
New Revision: 1568104

URL: http://svn.apache.org/r1568104
Log:
Updating affliations page

Modified:
hadoop/common/site/main/author/src/documentation/content/xdocs/who.xml

Modified: hadoop/common/site/main/author/src/documentation/content/xdocs/who.xml
URL: 
http://svn.apache.org/viewvc/hadoop/common/site/main/author/src/documentation/content/xdocs/who.xml?rev=1568104r1=1568103r2=1568104view=diff
==
--- hadoop/common/site/main/author/src/documentation/content/xdocs/who.xml 
(original)
+++ hadoop/common/site/main/author/src/documentation/content/xdocs/who.xml Thu 
Feb 13 23:03:41 2014
@@ -839,7 +839,7 @@
tr
  tdraviprak/td
  tda href=http://people.apache.org/~raviprak;Ravi Prakash/a/td
- tdYahoo!/td
+ tdAltiscale, Inc./td
  td/td
  td-8/td
/tr




svn commit: r1553987 - /hadoop/common/site/main/author/src/documentation/content/xdocs/who.xml

2013-12-28 Thread raviprak
Author: raviprak
Date: Sun Dec 29 02:03:52 2013
New Revision: 1553987

URL: http://svn.apache.org/r1553987
Log:
Added my name to the committer list

Modified:
hadoop/common/site/main/author/src/documentation/content/xdocs/who.xml

Modified: hadoop/common/site/main/author/src/documentation/content/xdocs/who.xml
URL: 
http://svn.apache.org/viewvc/hadoop/common/site/main/author/src/documentation/content/xdocs/who.xml?rev=1553987r1=1553986r2=1553987view=diff
==
--- hadoop/common/site/main/author/src/documentation/content/xdocs/who.xml 
(original)
+++ hadoop/common/site/main/author/src/documentation/content/xdocs/who.xml Sun 
Dec 29 02:03:52 2013
@@ -829,6 +829,14 @@
/tr
 
tr
+ tdraviprak/td
+ tda href=http://people.apache.org/~raviprak;Ravi Prakash/a/td
+ tdYahoo!/td
+ td/td
+ td-8/td
+   /tr
+
+   tr
  tdrvs/td
  tda href=http://people.apache.org/~rvs;Roman Shaposhnik/a/td
  tdPivotal/td




  1   2   >