git commit: HDFS-7061. Add test to verify encryption zone creation after NameNode restart without saving namespace. Contributed by Stephen Chu.

2014-09-15 Thread wang
Repository: hadoop
Updated Branches:
  refs/heads/branch-2 5cdb24d4b - dd3e28d43


HDFS-7061. Add test to verify encryption zone creation after NameNode restart 
without saving namespace. Contributed by Stephen Chu.

(cherry picked from commit fc741b5d78e7e006355e17b1b5839f502e37261b)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/dd3e28d4
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/dd3e28d4
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/dd3e28d4

Branch: refs/heads/branch-2
Commit: dd3e28d434e32c2c312856a987883b056c28b8ae
Parents: 5cdb24d
Author: Andrew Wang w...@apache.org
Authored: Sun Sep 14 23:48:24 2014 -0700
Committer: Andrew Wang w...@apache.org
Committed: Sun Sep 14 23:49:26 2014 -0700

--
 hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt   |  3 +++
 .../java/org/apache/hadoop/hdfs/TestEncryptionZones.java  | 10 ++
 2 files changed, 13 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/dd3e28d4/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
--
diff --git a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt 
b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
index 8ac8dc3..7a42ea9 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
+++ b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
@@ -197,6 +197,9 @@ Release 2.6.0 - UNRELEASED
 HDFS-6482. Use block ID-based block layout on datanodes (James Thomas via
 Colin Patrick McCabe)
 
+HDFS-7061. Add test to verify encryption zone creation after NameNode
+restart without saving namespace. (Stephen Chu via wang)
+
   OPTIMIZATIONS
 
 HDFS-6690. Deduplicate xattr names in memory. (wang)

http://git-wip-us.apache.org/repos/asf/hadoop/blob/dd3e28d4/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestEncryptionZones.java
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestEncryptionZones.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestEncryptionZones.java
index db3c085..b3bf5d9 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestEncryptionZones.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestEncryptionZones.java
@@ -338,6 +338,16 @@ public class TestEncryptionZones {
 cluster.restartNameNode(true);
 assertNumZones(numZones);
 assertZonePresent(null, zone1.toString());
+
+// Verify newly added ez is present after restarting the NameNode
+// without persisting the namespace.
+Path nonpersistZone = new Path(/nonpersistZone);
+fsWrapper.mkdir(nonpersistZone, FsPermission.getDirDefault(), false);
+dfsAdmin.createEncryptionZone(nonpersistZone, TEST_KEY);
+numZones++;
+cluster.restartNameNode(true);
+assertNumZones(numZones);
+assertZonePresent(null, nonpersistZone.toString());
   }
 
   /**



[2/2] git commit: HADOOP-10400. Incorporate new S3A FileSystem implementation. Contributed by Jordan Mendelson and Dave Wang.

2014-09-15 Thread atm
HADOOP-10400. Incorporate new S3A FileSystem implementation. Contributed by 
Jordan Mendelson and Dave Wang.


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/24d920b8
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/24d920b8
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/24d920b8

Branch: refs/heads/trunk
Commit: 24d920b80eb3626073925a1d0b6dcf148add8cc0
Parents: fc741b5
Author: Aaron T. Myers a...@apache.org
Authored: Mon Sep 15 08:27:07 2014 -0700
Committer: Aaron T. Myers a...@apache.org
Committed: Mon Sep 15 08:27:07 2014 -0700

--
 hadoop-common-project/hadoop-common/CHANGES.txt |3 +
 .../src/main/conf/log4j.properties  |5 +
 .../src/main/resources/core-default.xml |   86 ++
 hadoop-hdfs-project/hadoop-hdfs-httpfs/pom.xml  |8 +
 hadoop-project/pom.xml  |   26 +-
 hadoop-tools/hadoop-aws/pom.xml |   10 +
 .../fs/s3a/AnonymousAWSCredentialsProvider.java |   37 +
 .../fs/s3a/BasicAWSCredentialsProvider.java |   51 +
 .../org/apache/hadoop/fs/s3a/Constants.java |   90 ++
 .../org/apache/hadoop/fs/s3a/S3AFileStatus.java |   62 ++
 .../org/apache/hadoop/fs/s3a/S3AFileSystem.java | 1019 ++
 .../apache/hadoop/fs/s3a/S3AInputStream.java|  207 
 .../apache/hadoop/fs/s3a/S3AOutputStream.java   |  208 
 .../services/org.apache.hadoop.fs.FileSystem|1 +
 .../hadoop/fs/contract/s3a/S3AContract.java |   43 +
 .../fs/contract/s3a/TestS3AContractCreate.java  |   38 +
 .../fs/contract/s3a/TestS3AContractDelete.java  |   31 +
 .../fs/contract/s3a/TestS3AContractMkdir.java   |   34 +
 .../fs/contract/s3a/TestS3AContractOpen.java|   31 +
 .../fs/contract/s3a/TestS3AContractRename.java  |   64 ++
 .../fs/contract/s3a/TestS3AContractRootDir.java |   35 +
 .../fs/contract/s3a/TestS3AContractSeek.java|   31 +
 .../fs/s3a/S3AFileSystemContractBaseTest.java   |  327 ++
 .../src/test/resources/contract/s3a.xml |  105 ++
 .../src/test/resources/contract/s3n.xml |7 +-
 hadoop-tools/hadoop-azure/pom.xml   |   10 +-
 26 files changed, 2552 insertions(+), 17 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/24d920b8/hadoop-common-project/hadoop-common/CHANGES.txt
--
diff --git a/hadoop-common-project/hadoop-common/CHANGES.txt 
b/hadoop-common-project/hadoop-common/CHANGES.txt
index 051eac1..c2ae5ed 100644
--- a/hadoop-common-project/hadoop-common/CHANGES.txt
+++ b/hadoop-common-project/hadoop-common/CHANGES.txt
@@ -342,6 +342,9 @@ Release 2.6.0 - UNRELEASED
 HADOOP-10893. isolated classloader on the client side (Sangjin Lee via
 jlowe)
 
+HADOOP-10400. Incorporate new S3A FileSystem implementation. (Jordan
+Mendelson and Dave Wang via atm)
+
   IMPROVEMENTS
 
 HADOOP-10808. Remove unused native code for munlock. (cnauroth)

http://git-wip-us.apache.org/repos/asf/hadoop/blob/24d920b8/hadoop-common-project/hadoop-common/src/main/conf/log4j.properties
--
diff --git a/hadoop-common-project/hadoop-common/src/main/conf/log4j.properties 
b/hadoop-common-project/hadoop-common/src/main/conf/log4j.properties
index ef9acbf..5fa21fa 100644
--- a/hadoop-common-project/hadoop-common/src/main/conf/log4j.properties
+++ b/hadoop-common-project/hadoop-common/src/main/conf/log4j.properties
@@ -174,6 +174,11 @@ 
log4j.appender.MRAUDIT.MaxBackupIndex=${mapred.audit.log.maxbackupindex}
 # Jets3t library
 log4j.logger.org.jets3t.service.impl.rest.httpclient.RestS3Service=ERROR
 
+# AWS SDK  S3A FileSystem
+log4j.logger.com.amazonaws=ERROR
+log4j.logger.com.amazonaws.http.AmazonHttpClient=ERROR
+log4j.logger.org.apache.hadoop.fs.s3a.S3AFileSystem=WARN
+
 #
 # Event Counter Appender
 # Sends counts of logging messages at different severity levels to Hadoop 
Metrics.

http://git-wip-us.apache.org/repos/asf/hadoop/blob/24d920b8/hadoop-common-project/hadoop-common/src/main/resources/core-default.xml
--
diff --git 
a/hadoop-common-project/hadoop-common/src/main/resources/core-default.xml 
b/hadoop-common-project/hadoop-common/src/main/resources/core-default.xml
index 3cc7545..828dec2 100644
--- a/hadoop-common-project/hadoop-common/src/main/resources/core-default.xml
+++ b/hadoop-common-project/hadoop-common/src/main/resources/core-default.xml
@@ -690,6 +690,92 @@ for ldap providers in the same way as above does.
 /property
 
 property
+  namefs.s3a.access.key/name
+  descriptionAWS access key ID. Omit for Role-based 
authentication./description
+/property
+
+property
+  namefs.s3a.secret.key/name
+  descriptionAWS secret key. Omit for 

[1/2] HADOOP-10400. Incorporate new S3A FileSystem implementation. Contributed by Jordan Mendelson and Dave Wang.

2014-09-15 Thread atm
Repository: hadoop
Updated Branches:
  refs/heads/trunk fc741b5d7 - 24d920b80


http://git-wip-us.apache.org/repos/asf/hadoop/blob/24d920b8/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/contract/s3a/TestS3AContractSeek.java
--
diff --git 
a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/contract/s3a/TestS3AContractSeek.java
 
b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/contract/s3a/TestS3AContractSeek.java
new file mode 100644
index 000..d677ec4
--- /dev/null
+++ 
b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/contract/s3a/TestS3AContractSeek.java
@@ -0,0 +1,31 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ *  or more contributor license agreements.  See the NOTICE file
+ *  distributed with this work for additional information
+ *  regarding copyright ownership.  The ASF licenses this file
+ *  to you under the Apache License, Version 2.0 (the
+ *  License); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an AS IS BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ */
+
+package org.apache.hadoop.fs.contract.s3a;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.contract.AbstractContractSeekTest;
+import org.apache.hadoop.fs.contract.AbstractFSContract;
+
+public class TestS3AContractSeek extends AbstractContractSeekTest {
+
+  @Override
+  protected AbstractFSContract createContract(Configuration conf) {
+return new S3AContract(conf);
+  }
+}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/24d920b8/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/S3AFileSystemContractBaseTest.java
--
diff --git 
a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/S3AFileSystemContractBaseTest.java
 
b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/S3AFileSystemContractBaseTest.java
new file mode 100644
index 000..8455233
--- /dev/null
+++ 
b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/S3AFileSystemContractBaseTest.java
@@ -0,0 +1,327 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * License); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an AS IS BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.fs.s3a;
+
+import static org.junit.Assume.*;
+
+import org.junit.Test;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FSDataInputStream;
+import org.apache.hadoop.fs.FileSystemContractBaseTest;
+import org.apache.hadoop.fs.Path;
+
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.OutputStream;
+import java.net.URI;
+import java.util.UUID;
+
+/**
+ *  Tests a live S3 system. If you keys and bucket aren't specified, all tests 
+ *  are marked as passed 
+ *  
+ *  This uses BlockJUnit4ClassRunner because FileSystemContractBaseTest from 
+ *  TestCase which uses the old Junit3 runner that doesn't ignore assumptions 
+ *  properly making it impossible to skip the tests if we don't have a valid
+ *  bucket.
+ **/
+public class S3AFileSystemContractBaseTest extends FileSystemContractBaseTest {
+  private static final int TEST_BUFFER_SIZE = 128;
+  private static final int MODULUS = 128;
+
+  protected static final Logger LOG = 
LoggerFactory.getLogger(S3AFileSystemContractBaseTest.class);
+
+  @Override
+  public void setUp() throws Exception {
+Configuration conf = new Configuration();
+
+URI testURI = URI.create(conf.get(test.fs.s3a.name));
+
+boolean liveTest = testURI != null  !testURI.equals(s3a:///);
+
+// This doesn't work with our JUnit 3 style test cases, so instead we'll 
+// make this whole class not run by default
+assumeTrue(liveTest);
+
+fs = new S3AFileSystem();
+fs.initialize(testURI, conf);

[1/2] HADOOP-10400. Incorporate new S3A FileSystem implementation. Contributed by Jordan Mendelson and Dave Wang.

2014-09-15 Thread atm
Repository: hadoop
Updated Branches:
  refs/heads/branch-2 dd3e28d43 - a0c54aeb0


http://git-wip-us.apache.org/repos/asf/hadoop/blob/a0c54aeb/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/contract/s3a/TestS3AContractSeek.java
--
diff --git 
a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/contract/s3a/TestS3AContractSeek.java
 
b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/contract/s3a/TestS3AContractSeek.java
new file mode 100644
index 000..d677ec4
--- /dev/null
+++ 
b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/contract/s3a/TestS3AContractSeek.java
@@ -0,0 +1,31 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ *  or more contributor license agreements.  See the NOTICE file
+ *  distributed with this work for additional information
+ *  regarding copyright ownership.  The ASF licenses this file
+ *  to you under the Apache License, Version 2.0 (the
+ *  License); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an AS IS BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ */
+
+package org.apache.hadoop.fs.contract.s3a;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.contract.AbstractContractSeekTest;
+import org.apache.hadoop.fs.contract.AbstractFSContract;
+
+public class TestS3AContractSeek extends AbstractContractSeekTest {
+
+  @Override
+  protected AbstractFSContract createContract(Configuration conf) {
+return new S3AContract(conf);
+  }
+}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/a0c54aeb/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/S3AFileSystemContractBaseTest.java
--
diff --git 
a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/S3AFileSystemContractBaseTest.java
 
b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/S3AFileSystemContractBaseTest.java
new file mode 100644
index 000..8455233
--- /dev/null
+++ 
b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/S3AFileSystemContractBaseTest.java
@@ -0,0 +1,327 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * License); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an AS IS BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.fs.s3a;
+
+import static org.junit.Assume.*;
+
+import org.junit.Test;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FSDataInputStream;
+import org.apache.hadoop.fs.FileSystemContractBaseTest;
+import org.apache.hadoop.fs.Path;
+
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.OutputStream;
+import java.net.URI;
+import java.util.UUID;
+
+/**
+ *  Tests a live S3 system. If you keys and bucket aren't specified, all tests 
+ *  are marked as passed 
+ *  
+ *  This uses BlockJUnit4ClassRunner because FileSystemContractBaseTest from 
+ *  TestCase which uses the old Junit3 runner that doesn't ignore assumptions 
+ *  properly making it impossible to skip the tests if we don't have a valid
+ *  bucket.
+ **/
+public class S3AFileSystemContractBaseTest extends FileSystemContractBaseTest {
+  private static final int TEST_BUFFER_SIZE = 128;
+  private static final int MODULUS = 128;
+
+  protected static final Logger LOG = 
LoggerFactory.getLogger(S3AFileSystemContractBaseTest.class);
+
+  @Override
+  public void setUp() throws Exception {
+Configuration conf = new Configuration();
+
+URI testURI = URI.create(conf.get(test.fs.s3a.name));
+
+boolean liveTest = testURI != null  !testURI.equals(s3a:///);
+
+// This doesn't work with our JUnit 3 style test cases, so instead we'll 
+// make this whole class not run by default
+assumeTrue(liveTest);
+
+fs = new S3AFileSystem();
+fs.initialize(testURI, 

[2/2] git commit: HADOOP-10400. Incorporate new S3A FileSystem implementation. Contributed by Jordan Mendelson and Dave Wang.

2014-09-15 Thread atm
HADOOP-10400. Incorporate new S3A FileSystem implementation. Contributed by 
Jordan Mendelson and Dave Wang.


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/a0c54aeb
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/a0c54aeb
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/a0c54aeb

Branch: refs/heads/branch-2
Commit: a0c54aeb00c0bc38f7dfa3615ce6866023d1ef74
Parents: dd3e28d
Author: Aaron T. Myers a...@apache.org
Authored: Mon Sep 15 08:30:42 2014 -0700
Committer: Aaron T. Myers a...@apache.org
Committed: Mon Sep 15 08:30:42 2014 -0700

--
 hadoop-common-project/hadoop-common/CHANGES.txt |3 +
 .../src/main/conf/log4j.properties  |5 +
 .../src/main/resources/core-default.xml |   86 ++
 hadoop-hdfs-project/hadoop-hdfs-httpfs/pom.xml  |8 +
 hadoop-project/pom.xml  |   26 +-
 hadoop-tools/hadoop-aws/pom.xml |   10 +
 .../fs/s3a/AnonymousAWSCredentialsProvider.java |   37 +
 .../fs/s3a/BasicAWSCredentialsProvider.java |   51 +
 .../org/apache/hadoop/fs/s3a/Constants.java |   90 ++
 .../org/apache/hadoop/fs/s3a/S3AFileStatus.java |   62 ++
 .../org/apache/hadoop/fs/s3a/S3AFileSystem.java | 1019 ++
 .../apache/hadoop/fs/s3a/S3AInputStream.java|  207 
 .../apache/hadoop/fs/s3a/S3AOutputStream.java   |  208 
 .../services/org.apache.hadoop.fs.FileSystem|1 +
 .../hadoop/fs/contract/s3a/S3AContract.java |   43 +
 .../fs/contract/s3a/TestS3AContractCreate.java  |   38 +
 .../fs/contract/s3a/TestS3AContractDelete.java  |   31 +
 .../fs/contract/s3a/TestS3AContractMkdir.java   |   34 +
 .../fs/contract/s3a/TestS3AContractOpen.java|   31 +
 .../fs/contract/s3a/TestS3AContractRename.java  |   64 ++
 .../fs/contract/s3a/TestS3AContractRootDir.java |   35 +
 .../fs/contract/s3a/TestS3AContractSeek.java|   31 +
 .../fs/s3a/S3AFileSystemContractBaseTest.java   |  327 ++
 .../src/test/resources/contract/s3a.xml |  105 ++
 .../src/test/resources/contract/s3n.xml |7 +-
 25 files changed, 2550 insertions(+), 9 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/a0c54aeb/hadoop-common-project/hadoop-common/CHANGES.txt
--
diff --git a/hadoop-common-project/hadoop-common/CHANGES.txt 
b/hadoop-common-project/hadoop-common/CHANGES.txt
index 2dababb..e3dd7d1 100644
--- a/hadoop-common-project/hadoop-common/CHANGES.txt
+++ b/hadoop-common-project/hadoop-common/CHANGES.txt
@@ -11,6 +11,9 @@ Release 2.6.0 - UNRELEASED
 HADOOP-10893. isolated classloader on the client side (Sangjin Lee via
 jlowe)
 
+HADOOP-10400. Incorporate new S3A FileSystem implementation. (Jordan
+Mendelson and Dave Wang via atm)
+
   IMPROVEMENTS
 
 HADOOP-10808. Remove unused native code for munlock. (cnauroth)

http://git-wip-us.apache.org/repos/asf/hadoop/blob/a0c54aeb/hadoop-common-project/hadoop-common/src/main/conf/log4j.properties
--
diff --git a/hadoop-common-project/hadoop-common/src/main/conf/log4j.properties 
b/hadoop-common-project/hadoop-common/src/main/conf/log4j.properties
index ef9acbf..5fa21fa 100644
--- a/hadoop-common-project/hadoop-common/src/main/conf/log4j.properties
+++ b/hadoop-common-project/hadoop-common/src/main/conf/log4j.properties
@@ -174,6 +174,11 @@ 
log4j.appender.MRAUDIT.MaxBackupIndex=${mapred.audit.log.maxbackupindex}
 # Jets3t library
 log4j.logger.org.jets3t.service.impl.rest.httpclient.RestS3Service=ERROR
 
+# AWS SDK  S3A FileSystem
+log4j.logger.com.amazonaws=ERROR
+log4j.logger.com.amazonaws.http.AmazonHttpClient=ERROR
+log4j.logger.org.apache.hadoop.fs.s3a.S3AFileSystem=WARN
+
 #
 # Event Counter Appender
 # Sends counts of logging messages at different severity levels to Hadoop 
Metrics.

http://git-wip-us.apache.org/repos/asf/hadoop/blob/a0c54aeb/hadoop-common-project/hadoop-common/src/main/resources/core-default.xml
--
diff --git 
a/hadoop-common-project/hadoop-common/src/main/resources/core-default.xml 
b/hadoop-common-project/hadoop-common/src/main/resources/core-default.xml
index ee3cbf0..cd953e3 100644
--- a/hadoop-common-project/hadoop-common/src/main/resources/core-default.xml
+++ b/hadoop-common-project/hadoop-common/src/main/resources/core-default.xml
@@ -682,6 +682,92 @@ for ldap providers in the same way as above does.
 /property
 
 property
+  namefs.s3a.access.key/name
+  descriptionAWS access key ID. Omit for Role-based 
authentication./description
+/property
+
+property
+  namefs.s3a.secret.key/name
+  descriptionAWS secret key. Omit for Role-based 
authentication./description
+/property
+
+property

git commit: HDFS-7062. Archival Storage: skip under construction block for migration. Contributed by Jing Zhao.

2014-09-15 Thread jing9
Repository: hadoop
Updated Branches:
  refs/heads/HDFS-6584 dba52ce0b - 2689b6ca7


HDFS-7062. Archival Storage: skip under construction block for migration. 
Contributed by Jing Zhao.


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/2689b6ca
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/2689b6ca
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/2689b6ca

Branch: refs/heads/HDFS-6584
Commit: 2689b6ca727fff8a13347b811eb4cf79b9d30f48
Parents: dba52ce
Author: Jing Zhao ji...@apache.org
Authored: Mon Sep 15 10:16:56 2014 -0700
Committer: Jing Zhao ji...@apache.org
Committed: Mon Sep 15 10:16:56 2014 -0700

--
 .../apache/hadoop/hdfs/server/mover/Mover.java  | 10 ++-
 .../org/apache/hadoop/hdfs/tools/DFSAdmin.java  |  4 +-
 .../hdfs/server/mover/TestStorageMover.java | 77 
 3 files changed, 88 insertions(+), 3 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/2689b6ca/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/mover/Mover.java
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/mover/Mover.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/mover/Mover.java
index 96588ff..e336ebc 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/mover/Mover.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/mover/Mover.java
@@ -321,7 +321,14 @@ public class Mover {
 
   final LocatedBlocks locatedBlocks = status.getBlockLocations();
   boolean hasRemaining = false;
-  for(LocatedBlock lb : locatedBlocks.getLocatedBlocks()) {
+  final boolean lastBlkComplete = locatedBlocks.isLastBlockComplete();
+  ListLocatedBlock lbs = locatedBlocks.getLocatedBlocks();
+  for(int i = 0; i  lbs.size(); i++) {
+if (i == lbs.size() - 1  !lastBlkComplete) {
+  // last block is incomplete, skip it
+  continue;
+}
+LocatedBlock lb = lbs.get(i);
 final StorageTypeDiff diff = new StorageTypeDiff(types,
 lb.getStorageTypes());
 if (!diff.removeOverlap()) {
@@ -472,6 +479,7 @@ public class Mover {
   final ExitStatus r = m.run();
 
   if (r == ExitStatus.SUCCESS) {
+IOUtils.cleanup(LOG, nnc);
 iter.remove();
   } else if (r != ExitStatus.IN_PROGRESS) {
 // must be an error statue, return

http://git-wip-us.apache.org/repos/asf/hadoop/blob/2689b6ca/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/DFSAdmin.java
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/DFSAdmin.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/DFSAdmin.java
index 10012c6..556eca6 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/DFSAdmin.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/DFSAdmin.java
@@ -387,8 +387,8 @@ public class DFSAdmin extends FsShell {
 \t[-shutdownDatanode datanode_host:ipc_port [upgrade]]\n +
 \t[-getDatanodeInfo datanode_host:ipc_port]\n +
 \t[-metasave filename]\n +
-\t[-setStoragePolicy path policyName\n +
-\t[-getStoragePolicy path\n +
+\t[-setStoragePolicy path policyName]\n +
+\t[-getStoragePolicy path]\n +
 \t[-help [cmd]]\n;
 
   /**

http://git-wip-us.apache.org/repos/asf/hadoop/blob/2689b6ca/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/mover/TestStorageMover.java
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/mover/TestStorageMover.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/mover/TestStorageMover.java
index d5d5cab..ceedfc2 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/mover/TestStorageMover.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/mover/TestStorageMover.java
@@ -30,9 +30,13 @@ import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.commons.logging.impl.Log4JLogger;
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.BlockLocation;
+import org.apache.hadoop.fs.FSDataInputStream;
+import org.apache.hadoop.fs.FSDataOutputStream;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hdfs.BlockStoragePolicy;
 import org.apache.hadoop.hdfs.DFSConfigKeys;
+import 

git commit: HDFS-7032. Add WebHDFS support for reading and writing to encryption zones. Contributed by Charles Lamb.

2014-09-15 Thread wang
Repository: hadoop
Updated Branches:
  refs/heads/trunk 24d920b80 - 43b030300


HDFS-7032. Add WebHDFS support for reading and writing to encryption zones. 
Contributed by Charles Lamb.


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/43b03030
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/43b03030
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/43b03030

Branch: refs/heads/trunk
Commit: 43b03030084839db041d0337013806aaeef12aaa
Parents: 24d920b
Author: Andrew Wang w...@apache.org
Authored: Mon Sep 15 10:23:57 2014 -0700
Committer: Andrew Wang w...@apache.org
Committed: Mon Sep 15 10:23:57 2014 -0700

--
 hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt |  3 ++
 .../web/resources/DatanodeWebHdfsMethods.java   | 13 +++--
 .../apache/hadoop/hdfs/TestEncryptionZones.java | 52 
 3 files changed, 63 insertions(+), 5 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/43b03030/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
--
diff --git a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt 
b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
index 57a4a0f..209c7c1 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
+++ b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
@@ -647,6 +647,9 @@ Release 2.6.0 - UNRELEASED
 HDFS-7045. Fix NameNode deadlock when opening file under /.reserved path.
 (Yi Liu via wang)
 
+HDFS-7032. Add WebHDFS support for reading and writing to encryption zones.
+(clamb via wang)
+
 BREAKDOWN OF HDFS-6134 AND HADOOP-10150 SUBTASKS AND RELATED JIRAS
   
   HDFS-6387. HDFS CLI admin tool for creating  deleting an

http://git-wip-us.apache.org/repos/asf/hadoop/blob/43b03030/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/web/resources/DatanodeWebHdfsMethods.java
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/web/resources/DatanodeWebHdfsMethods.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/web/resources/DatanodeWebHdfsMethods.java
index 51731c8..0f0f3be 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/web/resources/DatanodeWebHdfsMethods.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/web/resources/DatanodeWebHdfsMethods.java
@@ -231,11 +231,13 @@ public class DatanodeWebHdfsMethods {
   DFSClient dfsclient = newDfsClient(nnId, conf);
   FSDataOutputStream out = null;
   try {
-out = new FSDataOutputStream(dfsclient.create(
+out = dfsclient.createWrappedOutputStream(dfsclient.create(
 fullpath, permission.getFsPermission(), 
-overwrite.getValue() ? EnumSet.of(CreateFlag.CREATE, 
CreateFlag.OVERWRITE)
-: EnumSet.of(CreateFlag.CREATE),
-replication.getValue(conf), blockSize.getValue(conf), null, b, 
null), null);
+overwrite.getValue() ?
+EnumSet.of(CreateFlag.CREATE, CreateFlag.OVERWRITE) :
+EnumSet.of(CreateFlag.CREATE),
+replication.getValue(conf), blockSize.getValue(conf), null,
+b, null), null);
 IOUtils.copyBytes(in, out, b);
 out.close();
 out = null;
@@ -418,7 +420,8 @@ public class DatanodeWebHdfsMethods {
   final DFSClient dfsclient = newDfsClient(nnId, conf);
   HdfsDataInputStream in = null;
   try {
-in = new HdfsDataInputStream(dfsclient.open(fullpath, b, true));
+in = dfsclient.createWrappedInputStream(
+dfsclient.open(fullpath, b, true));
 in.seek(offset.getValue());
   } catch(IOException ioe) {
 IOUtils.cleanup(LOG, in);

http://git-wip-us.apache.org/repos/asf/hadoop/blob/43b03030/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestEncryptionZones.java
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestEncryptionZones.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestEncryptionZones.java
index b3bf5d9..68fc850 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestEncryptionZones.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestEncryptionZones.java
@@ -42,6 +42,7 @@ import org.apache.hadoop.crypto.key.JavaKeyStoreProvider;
 import org.apache.hadoop.crypto.key.KeyProvider;
 import org.apache.hadoop.crypto.key.KeyProviderCryptoExtension;
 import org.apache.hadoop.crypto.key.KeyProviderFactory;

git commit: HDFS-7032. Add WebHDFS support for reading and writing to encryption zones. Contributed by Charles Lamb.

2014-09-15 Thread wang
Repository: hadoop
Updated Branches:
  refs/heads/branch-2 a0c54aeb0 - d59555785


HDFS-7032. Add WebHDFS support for reading and writing to encryption zones. 
Contributed by Charles Lamb.

(cherry picked from commit 43b03030084839db041d0337013806aaeef12aaa)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/d5955578
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/d5955578
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/d5955578

Branch: refs/heads/branch-2
Commit: d59555785935a25526f0b294038294ac6293fdc6
Parents: a0c54ae
Author: Andrew Wang w...@apache.org
Authored: Mon Sep 15 10:23:57 2014 -0700
Committer: Andrew Wang w...@apache.org
Committed: Mon Sep 15 10:24:38 2014 -0700

--
 hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt |  3 ++
 .../web/resources/DatanodeWebHdfsMethods.java   | 13 +++--
 .../apache/hadoop/hdfs/TestEncryptionZones.java | 52 
 3 files changed, 63 insertions(+), 5 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/d5955578/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
--
diff --git a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt 
b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
index 7a42ea9..9afe524 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
+++ b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
@@ -389,6 +389,9 @@ Release 2.6.0 - UNRELEASED
 HDFS-7045. Fix NameNode deadlock when opening file under /.reserved path.
 (Yi Liu via wang)
 
+HDFS-7032. Add WebHDFS support for reading and writing to encryption zones.
+(clamb via wang)
+
 BREAKDOWN OF HDFS-6134 AND HADOOP-10150 SUBTASKS AND RELATED JIRAS
   
   HDFS-6387. HDFS CLI admin tool for creating  deleting an

http://git-wip-us.apache.org/repos/asf/hadoop/blob/d5955578/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/web/resources/DatanodeWebHdfsMethods.java
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/web/resources/DatanodeWebHdfsMethods.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/web/resources/DatanodeWebHdfsMethods.java
index 51731c8..0f0f3be 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/web/resources/DatanodeWebHdfsMethods.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/web/resources/DatanodeWebHdfsMethods.java
@@ -231,11 +231,13 @@ public class DatanodeWebHdfsMethods {
   DFSClient dfsclient = newDfsClient(nnId, conf);
   FSDataOutputStream out = null;
   try {
-out = new FSDataOutputStream(dfsclient.create(
+out = dfsclient.createWrappedOutputStream(dfsclient.create(
 fullpath, permission.getFsPermission(), 
-overwrite.getValue() ? EnumSet.of(CreateFlag.CREATE, 
CreateFlag.OVERWRITE)
-: EnumSet.of(CreateFlag.CREATE),
-replication.getValue(conf), blockSize.getValue(conf), null, b, 
null), null);
+overwrite.getValue() ?
+EnumSet.of(CreateFlag.CREATE, CreateFlag.OVERWRITE) :
+EnumSet.of(CreateFlag.CREATE),
+replication.getValue(conf), blockSize.getValue(conf), null,
+b, null), null);
 IOUtils.copyBytes(in, out, b);
 out.close();
 out = null;
@@ -418,7 +420,8 @@ public class DatanodeWebHdfsMethods {
   final DFSClient dfsclient = newDfsClient(nnId, conf);
   HdfsDataInputStream in = null;
   try {
-in = new HdfsDataInputStream(dfsclient.open(fullpath, b, true));
+in = dfsclient.createWrappedInputStream(
+dfsclient.open(fullpath, b, true));
 in.seek(offset.getValue());
   } catch(IOException ioe) {
 IOUtils.cleanup(LOG, in);

http://git-wip-us.apache.org/repos/asf/hadoop/blob/d5955578/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestEncryptionZones.java
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestEncryptionZones.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestEncryptionZones.java
index b3bf5d9..68fc850 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestEncryptionZones.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestEncryptionZones.java
@@ -42,6 +42,7 @@ import org.apache.hadoop.crypto.key.JavaKeyStoreProvider;
 import org.apache.hadoop.crypto.key.KeyProvider;
 import 

[26/52] [abbrv] MAPREDUCE-6056. native-task: move system test working dir to target dir and cleanup test config xml files (Manu Zhang via bchang)

2014-09-15 Thread jing9
http://git-wip-us.apache.org/repos/asf/hadoop/blob/7ade9b04/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/test/resources/test-snappy-compress-conf.xml
--
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/test/resources/test-snappy-compress-conf.xml
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/test/resources/test-snappy-compress-conf.xml
deleted file mode 100644
index 46b3ea7..000
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/test/resources/test-snappy-compress-conf.xml
+++ /dev/null
@@ -1,41 +0,0 @@
-?xml version=1.0?
-?xml-stylesheet type=text/xsl href=configuration.xsl?
-
-!-- Put site-specific property overrides in this file. --
-
-configuration
-property
-namemapred.output.compress/name
-valuefalse/value
-/property
-property
-namemapreduce.reduce.class/name
-valueorg.apache.hadoop.mapred.nativetask.kvtest.HashSumReducer/value
-/property
-
-property
-namemapred.output.value.class/name
-valueorg.apache.hadoop.io.IntWritable/value
-/property
-property
-namenativetask.compress.filesize/name
-value10/value
-/property
-
-property
-namemapred.compress.map.output/name
-valuetrue/value
-/property
-property
-namemapred.output.compression.type/name
-valueBLOCK/value
-/property
-property
-namemapred.map.output.compression.codec/name
-valueorg.apache.hadoop.io.compress.SnappyCodec/value
-/property
-property
-namehadoop.native.lib/name
-valuetrue/value
-/property
-/configuration



[25/52] [abbrv] git commit: MAPREDUCE-6058. native-task: KVTest and LargeKVTest should check mr job is sucessful (Contributed by Binglin Chang)

2014-09-15 Thread jing9
MAPREDUCE-6058. native-task: KVTest and LargeKVTest should check mr job is 
sucessful (Contributed by Binglin Chang)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/17cd0faa
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/17cd0faa
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/17cd0faa

Branch: refs/heads/HDFS-6584
Commit: 17cd0faaceb8f9ce00b8c2c200e810f78f36c671
Parents: bfd1d75
Author: Binglin Chang bch...@apache.org
Authored: Tue Sep 2 11:00:48 2014 +0800
Committer: Binglin Chang bch...@apache.org
Committed: Tue Sep 2 11:00:48 2014 +0800

--
 .../CHANGES.MAPREDUCE-2841.txt  |  1 +
 .../hadoop-mapreduce-client-nativetask/pom.xml  | 28 ++--
 .../nativetask/combinertest/CombinerTest.java   | 10 +--
 .../combinertest/LargeKVCombinerTest.java   | 15 +--
 .../combinertest/OldAPICombinerTest.java|  5 
 .../nativetask/compresstest/CompressTest.java   | 18 -
 .../hadoop/mapred/nativetask/kvtest/KVJob.java  |  5 ++--
 .../hadoop/mapred/nativetask/kvtest/KVTest.java | 15 +--
 .../mapred/nativetask/kvtest/LargeKVTest.java   | 15 +--
 .../nativetask/nonsorttest/NonSortTest.java |  5 
 10 files changed, 74 insertions(+), 43 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/17cd0faa/hadoop-mapreduce-project/CHANGES.MAPREDUCE-2841.txt
--
diff --git a/hadoop-mapreduce-project/CHANGES.MAPREDUCE-2841.txt 
b/hadoop-mapreduce-project/CHANGES.MAPREDUCE-2841.txt
index 6384757..baa88c1 100644
--- a/hadoop-mapreduce-project/CHANGES.MAPREDUCE-2841.txt
+++ b/hadoop-mapreduce-project/CHANGES.MAPREDUCE-2841.txt
@@ -17,3 +17,4 @@ MAPREDUCE-6026. native-task: fix logging (Manu Zhang via todd)
 MAPREDUCE-6035. native-task: sources/test-sources jar distribution (Manu Zhang 
via todd)
 MAPREDUCE-5977. Fix or suppress native-task gcc warnings (Manu Zhang via todd)
 MAPREDUCE-6054. native-task: Speed up tests (todd)
+MAPREDUCE-6058. native-task: KVTest and LargeKVTest should check mr job is 
sucessful (Binglin Chang)

http://git-wip-us.apache.org/repos/asf/hadoop/blob/17cd0faa/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/pom.xml
--
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/pom.xml
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/pom.xml
index bb7d7bb..f62743e 100644
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/pom.xml
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/pom.xml
@@ -83,10 +83,8 @@
 /property
   /properties
   includes
-include**/TestTaskContext.java/include
-include**/buffer/Test*.java/include
-include**/handlers/Test*.java/include
-include**/serde/Test*.java/include
+include**/*Test.java/include
+include**/Test*.java/include
   /includes
 /configuration
   /plugin
@@ -201,28 +199,6 @@
   /execution
 /executions
   /plugin
-  plugin
-groupIdorg.apache.maven.plugins/groupId
-artifactIdmaven-surefire-plugin/artifactId
-configuration
-  properties
-property
-  namelistener/name
-  valueorg.apache.hadoop.test.TimedOutTestsListener/value
-/property
-  /properties
-  includes
-include**/TestTaskContext.java/include
-include**/buffer/Test*.java/include
-include**/handlers/Test*.java/include
-include**/serde/Test*.java/include
-include**/combinertest/*Test.java/include
-include**/compresstest/*Test.java/include
-include**/nonsorttest/*Test.java/include
-include**/kvtest/*Test.java/include
-  /includes
-/configuration
-  /plugin
 /plugins
   /build
 /profile

http://git-wip-us.apache.org/repos/asf/hadoop/blob/17cd0faa/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/test/java/org/apache/hadoop/mapred/nativetask/combinertest/CombinerTest.java
--
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/test/java/org/apache/hadoop/mapred/nativetask/combinertest/CombinerTest.java
 

[41/52] [abbrv] git commit: HADOOP-11088. Unittest TestKeyShell, TestCredShell and TestKMS assume UNIX path separator for JECKS key store path. Contributed by Xiaoyu Yao.

2014-09-15 Thread jing9
HADOOP-11088. Unittest TestKeyShell, TestCredShell and TestKMS assume UNIX path 
separator for JECKS key store path. Contributed by Xiaoyu Yao.


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/957414d4
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/957414d4
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/957414d4

Branch: refs/heads/HDFS-6584
Commit: 957414d4cb57cb8172070cc53530b7da78b8c9ca
Parents: 54e5794
Author: cnauroth cnaur...@apache.org
Authored: Fri Sep 12 14:50:07 2014 -0700
Committer: cnauroth cnaur...@apache.org
Committed: Fri Sep 12 14:50:07 2014 -0700

--
 hadoop-common-project/hadoop-common/CHANGES.txt |  3 +++
 .../apache/hadoop/crypto/key/TestKeyShell.java  |  4 +++-
 .../hadoop/security/alias/TestCredShell.java| 22 +---
 .../crypto/key/kms/server/KMSConfiguration.java | 15 ++---
 .../hadoop/crypto/key/kms/server/MiniKMS.java   |  3 ++-
 .../hadoop/crypto/key/kms/server/TestKMS.java   |  3 ++-
 6 files changed, 30 insertions(+), 20 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/957414d4/hadoop-common-project/hadoop-common/CHANGES.txt
--
diff --git a/hadoop-common-project/hadoop-common/CHANGES.txt 
b/hadoop-common-project/hadoop-common/CHANGES.txt
index 1176a21..051eac1 100644
--- a/hadoop-common-project/hadoop-common/CHANGES.txt
+++ b/hadoop-common-project/hadoop-common/CHANGES.txt
@@ -793,6 +793,9 @@ Release 2.6.0 - UNRELEASED
 HADOOP-11083. After refactoring of HTTP proxyuser to common, doAs param is 
 case sensitive. (tucu)
 
+HADOOP-11088. Unittest TestKeyShell, TestCredShell and TestKMS assume UNIX
+path separator for JECKS key store path. (Xiaoyu Yao via cnauroth)
+
 Release 2.5.1 - 2014-09-05
 
   INCOMPATIBLE CHANGES

http://git-wip-us.apache.org/repos/asf/hadoop/blob/957414d4/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/crypto/key/TestKeyShell.java
--
diff --git 
a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/crypto/key/TestKeyShell.java
 
b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/crypto/key/TestKeyShell.java
index d65d9b2..3407eb7 100644
--- 
a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/crypto/key/TestKeyShell.java
+++ 
b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/crypto/key/TestKeyShell.java
@@ -24,6 +24,7 @@ import java.io.PrintStream;
 import java.util.UUID;
 
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.Path;
 import org.junit.After;
 import org.junit.Before;
 import org.junit.Test;
@@ -51,7 +52,8 @@ public class TestKeyShell {
 if (!tmpDir.mkdirs()) {
   throw new IOException(Unable to create  + tmpDir);
 }
-jceksProvider = jceks://file + tmpDir + /keystore.jceks;
+final Path jksPath = new Path(tmpDir.toString(), keystore.jceks);
+jceksProvider = jceks://file + jksPath.toUri();
 initialStdOut = System.out;
 initialStdErr = System.err;
 System.setOut(new PrintStream(outContent));

http://git-wip-us.apache.org/repos/asf/hadoop/blob/957414d4/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/alias/TestCredShell.java
--
diff --git 
a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/alias/TestCredShell.java
 
b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/alias/TestCredShell.java
index b9f0dc9..c890362 100644
--- 
a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/alias/TestCredShell.java
+++ 
b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/alias/TestCredShell.java
@@ -29,6 +29,7 @@ import java.util.Arrays;
 import java.util.List;
 
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.Path;
 import org.junit.Before;
 import org.junit.Test;
 
@@ -37,18 +38,23 @@ public class TestCredShell {
   private final ByteArrayOutputStream errContent = new ByteArrayOutputStream();
   private static final File tmpDir =
   new File(System.getProperty(test.build.data, /tmp), creds);
-  
+
+  /* The default JCEKS provider - for testing purposes */
+  private String jceksProvider;
+
   @Before
   public void setup() throws Exception {
 System.setOut(new PrintStream(outContent));
 System.setErr(new PrintStream(errContent));
+final Path jksPath = new Path(tmpDir.toString(), keystore.jceks);
+jceksProvider = jceks://file + jksPath.toUri();
   }
   
   @Test
   public void testCredentialSuccessfulLifecycle() throws Exception {
 

[11/52] [abbrv] git commit: MAPREDUCE-5995. native-task: Revert changes to Text internals. Contributed by Todd Lipcon.

2014-09-15 Thread jing9
MAPREDUCE-5995. native-task: Revert changes to Text internals. Contributed by 
Todd Lipcon.

git-svn-id: 
https://svn.apache.org/repos/asf/hadoop/common/branches/MR-2841@1613828 
13f79535-47bb-0310-9956-ffa450edef68


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/78d86a98
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/78d86a98
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/78d86a98

Branch: refs/heads/HDFS-6584
Commit: 78d86a983960f231f51e47579c0b1ae73f9645fd
Parents: 43917e5
Author: Todd Lipcon t...@apache.org
Authored: Sun Jul 27 19:04:14 2014 +
Committer: Todd Lipcon t...@apache.org
Committed: Sun Jul 27 19:04:14 2014 +

--
 .../src/main/java/org/apache/hadoop/io/Text.java| 6 +-
 hadoop-mapreduce-project/CHANGES.MAPREDUCE-2841.txt | 1 +
 .../hadoop/mapred/nativetask/serde/TextSerializer.java  | 9 +
 3 files changed, 3 insertions(+), 13 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/78d86a98/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/Text.java
--
diff --git 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/Text.java
 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/Text.java
index 73cd1e6..3dc5076 100644
--- 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/Text.java
+++ 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/Text.java
@@ -226,10 +226,6 @@ public class Text extends BinaryComparable
 this.length = len;
   }
 
-  public void setLength(int len) {
-this.length = len;
-  }
-
   /**
* Append a range of bytes to the end of the given text
* @param utf8 the data to copy from
@@ -264,7 +260,7 @@ public class Text extends BinaryComparable
* @param len the number of bytes we need
* @param keepData should the old data be kept
*/
-  public void setCapacity(int len, boolean keepData) {
+  private void setCapacity(int len, boolean keepData) {
 if (bytes == null || bytes.length  len) {
   if (bytes != null  keepData) {
 bytes = Arrays.copyOf(bytes, Math.max(len,length  1));

http://git-wip-us.apache.org/repos/asf/hadoop/blob/78d86a98/hadoop-mapreduce-project/CHANGES.MAPREDUCE-2841.txt
--
diff --git a/hadoop-mapreduce-project/CHANGES.MAPREDUCE-2841.txt 
b/hadoop-mapreduce-project/CHANGES.MAPREDUCE-2841.txt
index 92c94a8..42e0af3 100644
--- a/hadoop-mapreduce-project/CHANGES.MAPREDUCE-2841.txt
+++ b/hadoop-mapreduce-project/CHANGES.MAPREDUCE-2841.txt
@@ -7,3 +7,4 @@ MAPREDUCE-5996. native-task: Rename system tests into standard 
directory layout
 MAPREDUCE-5997. native-task: Use DirectBufferPool from Hadoop Common (todd)
 MAPREDUCE-6000. native-task: Simplify ByteBufferDataReader/Writer (todd)
 MAPREDUCE-5991. native-task should not run unit tests if native profile is not 
enabled. (Binglin Chang)
+MAPREDUCE-5995. native-task: Revert changes to Text internals (todd)

http://git-wip-us.apache.org/repos/asf/hadoop/blob/78d86a98/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/java/org/apache/hadoop/mapred/nativetask/serde/TextSerializer.java
--
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/java/org/apache/hadoop/mapred/nativetask/serde/TextSerializer.java
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/java/org/apache/hadoop/mapred/nativetask/serde/TextSerializer.java
index 63a64de..cde4c7b 100644
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/java/org/apache/hadoop/mapred/nativetask/serde/TextSerializer.java
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/java/org/apache/hadoop/mapred/nativetask/serde/TextSerializer.java
@@ -42,13 +42,6 @@ public class TextSerializer implements 
INativeSerializerText, INativeComparabl
 
   @Override
   public void deserialize(DataInput in, int length, Text w) throws IOException 
{
-try {
-  w.setCapacity(length, true);
-  w.setLength(length);
-} catch (final Exception e) {
-  throw new IOException(e);
-}
-final byte[] bytes = w.getBytes();
-in.readFully(bytes, 0, length);
+w.readWithKnownLength(in, length);
   }
 }



[30/52] [abbrv] git commit: MAPREDUCE-6055. native-task: findbugs, interface annotations, and other misc cleanup

2014-09-15 Thread jing9
MAPREDUCE-6055. native-task: findbugs, interface annotations, and other misc 
cleanup


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/1081d9ce
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/1081d9ce
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/1081d9ce

Branch: refs/heads/HDFS-6584
Commit: 1081d9cee23aa661d7c9165bc9855865a38b528e
Parents: cce7d1e
Author: Todd Lipcon t...@apache.org
Authored: Wed Sep 3 12:02:47 2014 -0700
Committer: Todd Lipcon t...@apache.org
Committed: Wed Sep 3 12:22:38 2014 -0700

--
 .../CHANGES.MAPREDUCE-2841.txt  |  1 +
 .../hadoop/mapred/nativetask/Command.java   |  8 +
 .../mapred/nativetask/CommandDispatcher.java| 11 ++-
 .../hadoop/mapred/nativetask/Constants.java | 18 ++-
 .../hadoop/mapred/nativetask/DataChannel.java   |  3 ++
 .../hadoop/mapred/nativetask/DataReceiver.java  |  3 ++
 .../mapred/nativetask/HadoopPlatform.java   |  6 ++--
 .../mapred/nativetask/ICombineHandler.java  |  3 ++
 .../mapred/nativetask/INativeComparable.java|  5 +++
 .../mapred/nativetask/INativeHandler.java   |  2 ++
 .../mapred/nativetask/NativeBatchProcessor.java |  3 +-
 .../mapred/nativetask/NativeDataSource.java | 10 ++
 .../mapred/nativetask/NativeDataTarget.java | 14 +++--
 .../NativeMapOutputCollectorDelegator.java  | 11 ---
 .../hadoop/mapred/nativetask/NativeRuntime.java | 10 +++---
 .../hadoop/mapred/nativetask/Platform.java  | 15 -
 .../hadoop/mapred/nativetask/Platforms.java |  7 +++--
 .../mapred/nativetask/StatusReportChecker.java  | 33 ++--
 .../hadoop/mapred/nativetask/TaskContext.java   | 29 +
 .../mapred/nativetask/buffer/BufferType.java|  5 +--
 .../nativetask/buffer/ByteBufferDataReader.java | 10 ++
 .../nativetask/buffer/ByteBufferDataWriter.java | 10 +++---
 .../nativetask/buffer/DataInputStream.java  |  2 ++
 .../nativetask/buffer/DataOutputStream.java | 11 +++
 .../mapred/nativetask/buffer/InputBuffer.java   |  2 ++
 .../mapred/nativetask/buffer/OutputBuffer.java  |  2 ++
 .../nativetask/handlers/BufferPullee.java   |  4 ++-
 .../nativetask/handlers/BufferPuller.java   | 26 ---
 .../nativetask/handlers/BufferPushee.java   |  2 ++
 .../nativetask/handlers/BufferPusher.java   |  3 +-
 .../nativetask/handlers/CombinerHandler.java| 19 +--
 .../mapred/nativetask/handlers/IDataLoader.java |  3 ++
 .../handlers/NativeCollectorOnlyHandler.java| 22 -
 .../serde/BoolWritableSerializer.java   |  2 ++
 .../serde/ByteWritableSerializer.java   |  2 ++
 .../serde/BytesWritableSerializer.java  |  2 ++
 .../nativetask/serde/DefaultSerializer.java |  4 +++
 .../serde/DoubleWritableSerializer.java |  2 ++
 .../serde/FloatWritableSerializer.java  |  3 ++
 .../mapred/nativetask/serde/IKVSerializer.java  | 12 ---
 .../nativetask/serde/INativeSerializer.java |  4 +++
 .../nativetask/serde/IntWritableSerializer.java |  2 ++
 .../mapred/nativetask/serde/KVSerializer.java   | 17 ++
 .../serde/LongWritableSerializer.java   |  2 ++
 .../nativetask/serde/NativeSerialization.java   |  2 ++
 .../serde/NullWritableSerializer.java   |  2 ++
 .../serde/SerializationFramework.java   |  3 ++
 .../mapred/nativetask/serde/TextSerializer.java |  2 ++
 .../serde/VIntWritableSerializer.java   |  3 +-
 .../serde/VLongWritableSerializer.java  |  3 +-
 .../mapred/nativetask/util/BytesUtil.java   |  2 ++
 .../mapred/nativetask/util/ConfigUtil.java  |  5 ++-
 .../nativetask/util/LocalJobOutputFiles.java|  2 ++
 .../nativetask/util/NativeTaskOutput.java   |  2 ++
 .../nativetask/util/NativeTaskOutputFiles.java  |  3 +-
 .../mapred/nativetask/util/OutputUtil.java  |  5 +--
 .../mapred/nativetask/util/ReadWriteBuffer.java |  5 +--
 .../mapred/nativetask/util/SizedWritable.java   |  4 ++-
 .../mapred/nativetask/TestTaskContext.java  |  4 +--
 59 files changed, 264 insertions(+), 148 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/1081d9ce/hadoop-mapreduce-project/CHANGES.MAPREDUCE-2841.txt
--
diff --git a/hadoop-mapreduce-project/CHANGES.MAPREDUCE-2841.txt 
b/hadoop-mapreduce-project/CHANGES.MAPREDUCE-2841.txt
index 7c9558e..269a2f6 100644
--- a/hadoop-mapreduce-project/CHANGES.MAPREDUCE-2841.txt
+++ b/hadoop-mapreduce-project/CHANGES.MAPREDUCE-2841.txt
@@ -19,3 +19,4 @@ MAPREDUCE-5977. Fix or suppress native-task gcc warnings 
(Manu Zhang via todd)
 MAPREDUCE-6054. native-task: Speed up tests (todd)
 MAPREDUCE-6058. native-task: KVTest and LargeKVTest should check mr job is 

[07/52] [abbrv] git commit: MAPREDUCE-5997. native-task: Use DirectBufferPool from Hadoop Common. Contributed by Todd Lipcon.

2014-09-15 Thread jing9
MAPREDUCE-5997. native-task: Use DirectBufferPool from Hadoop Common. 
Contributed by Todd Lipcon.

git-svn-id: 
https://svn.apache.org/repos/asf/hadoop/common/branches/MR-2841@1613034 
13f79535-47bb-0310-9956-ffa450edef68


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/77acc70d
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/77acc70d
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/77acc70d

Branch: refs/heads/HDFS-6584
Commit: 77acc70df5d64055ea809222f3d2f0d66c611196
Parents: 5149a8a
Author: Todd Lipcon t...@apache.org
Authored: Thu Jul 24 08:20:25 2014 +
Committer: Todd Lipcon t...@apache.org
Committed: Thu Jul 24 08:20:25 2014 +

--
 .../CHANGES.MAPREDUCE-2841.txt  |   1 +
 .../mapred/nativetask/NativeBatchProcessor.java |   8 +-
 .../nativetask/buffer/DirectBufferPool.java |  93 -
 .../mapred/nativetask/buffer/InputBuffer.java   |  17 +-
 .../nativetask/buffer/TestDirectBufferPool.java | 201 ---
 5 files changed, 20 insertions(+), 300 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/77acc70d/hadoop-mapreduce-project/CHANGES.MAPREDUCE-2841.txt
--
diff --git a/hadoop-mapreduce-project/CHANGES.MAPREDUCE-2841.txt 
b/hadoop-mapreduce-project/CHANGES.MAPREDUCE-2841.txt
index cea5a76..e12f743 100644
--- a/hadoop-mapreduce-project/CHANGES.MAPREDUCE-2841.txt
+++ b/hadoop-mapreduce-project/CHANGES.MAPREDUCE-2841.txt
@@ -4,3 +4,4 @@ Changes for Hadoop Native Map Output Collector
 MAPREDUCE-5985. native-task: Fix build on macosx. Contributed by Binglin Chang
 MAPREDUCE-5994. Simplify ByteUtils and fix failing test. (todd)
 MAPREDUCE-5996. native-task: Rename system tests into standard directory 
layout (todd)
+MAPREDUCE-5997. native-task: Use DirectBufferPool from Hadoop Common (todd)

http://git-wip-us.apache.org/repos/asf/hadoop/blob/77acc70d/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/java/org/apache/hadoop/mapred/nativetask/NativeBatchProcessor.java
--
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/java/org/apache/hadoop/mapred/nativetask/NativeBatchProcessor.java
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/java/org/apache/hadoop/mapred/nativetask/NativeBatchProcessor.java
index fd68ea6..837da0e 100644
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/java/org/apache/hadoop/mapred/nativetask/NativeBatchProcessor.java
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/java/org/apache/hadoop/mapred/nativetask/NativeBatchProcessor.java
@@ -24,12 +24,13 @@ import java.nio.ByteBuffer;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.io.IOUtils;
 import org.apache.hadoop.mapred.nativetask.buffer.BufferType;
-import org.apache.hadoop.mapred.nativetask.buffer.DirectBufferPool;
 import org.apache.hadoop.mapred.nativetask.buffer.InputBuffer;
 import org.apache.hadoop.mapred.nativetask.buffer.OutputBuffer;
 import org.apache.hadoop.mapred.nativetask.util.ReadWriteBuffer;
 import org.apache.hadoop.mapred.nativetask.util.ConfigUtil;
+import org.apache.hadoop.util.DirectBufferPool;
 
 /**
  * used to create channel, transfer data and command between Java and native
@@ -126,9 +127,8 @@ public class NativeBatchProcessor implements INativeHandler 
{
   NativeRuntime.releaseNativeObject(nativeHandlerAddr);
   nativeHandlerAddr = 0;
 }
-if (null != in  null != in.getByteBuffer()  
in.getByteBuffer().isDirect()) {
-  DirectBufferPool.getInstance().returnBuffer(in.getByteBuffer());
-}
+IOUtils.cleanup(LOG, in);
+in = null;
   }
 
   @Override

http://git-wip-us.apache.org/repos/asf/hadoop/blob/77acc70d/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/java/org/apache/hadoop/mapred/nativetask/buffer/DirectBufferPool.java
--
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/java/org/apache/hadoop/mapred/nativetask/buffer/DirectBufferPool.java
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/java/org/apache/hadoop/mapred/nativetask/buffer/DirectBufferPool.java
deleted file mode 100644
index bd3c6bb..000
--- 

[12/52] [abbrv] git commit: MAPREDUCE-6005. native-task: Fix some valgrind errors (Binglin Chang)

2014-09-15 Thread jing9
MAPREDUCE-6005. native-task: Fix some valgrind errors (Binglin Chang)

git-svn-id: 
https://svn.apache.org/repos/asf/hadoop/common/branches/MR-2841@1615489 
13f79535-47bb-0310-9956-ffa450edef68


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/83a39673
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/83a39673
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/83a39673

Branch: refs/heads/HDFS-6584
Commit: 83a396733ea43564beee928f41041a341e26db05
Parents: 78d86a9
Author: Binglin Chang bch...@apache.org
Authored: Mon Aug 4 06:19:51 2014 +
Committer: Binglin Chang bch...@apache.org
Committed: Mon Aug 4 06:19:51 2014 +

--
 .../CHANGES.MAPREDUCE-2841.txt  |  1 +
 .../src/main/native/src/codec/GzipCodec.cc  | 11 +---
 .../src/main/native/src/lib/Buffers.cc  |  2 +-
 .../src/main/native/src/lib/FileSystem.cc   | 54 +++-
 .../src/main/native/src/lib/FileSystem.h|  5 --
 .../src/main/native/src/lib/MemoryBlock.h   |  4 ++
 .../main/native/src/lib/NativeObjectFactory.cc  |  2 +-
 .../main/native/src/lib/NativeObjectFactory.h   | 15 +-
 .../native/src/lib/PartitionBucketIterator.cc   |  7 ++-
 .../src/main/native/src/util/StringUtil.cc  | 15 ++
 .../src/main/native/src/util/StringUtil.h   |  2 +-
 .../src/main/native/src/util/WritableUtils.cc   |  2 +-
 .../src/main/native/test/TestCommand.cc | 12 ++---
 .../src/main/native/test/TestCounter.cc | 15 +-
 .../src/main/native/test/TestFileSystem.cc  | 31 ---
 .../src/main/native/test/TestIFile.cc   |  9 ++--
 .../src/main/native/test/TestMain.cc|  8 ++-
 .../src/main/native/test/lib/TestByteBuffer.cc  |  1 +
 .../lib/TestComparatorForDualPivotQuickSort.cc  |  1 +
 .../native/test/lib/TestComparatorForStdSort.cc |  1 +
 .../native/test/lib/TestFixSizeContainer.cc |  3 ++
 .../src/main/native/test/lib/TestIterator.cc| 40 +++
 .../src/main/native/test/lib/TestKVBuffer.cc|  1 +
 .../native/test/lib/TestMemBlockIterator.cc |  8 +++
 .../src/main/native/test/lib/TestMemoryBlock.cc |  3 ++
 .../main/native/test/lib/TestPartitionBucket.cc | 16 +-
 .../src/main/native/test/lib/TestReadBuffer.cc  |  4 +-
 .../main/native/test/lib/TestReadWriteBuffer.cc |  4 +-
 .../src/main/native/test/util/TestStringUtil.cc |  5 ++
 29 files changed, 146 insertions(+), 136 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/83a39673/hadoop-mapreduce-project/CHANGES.MAPREDUCE-2841.txt
--
diff --git a/hadoop-mapreduce-project/CHANGES.MAPREDUCE-2841.txt 
b/hadoop-mapreduce-project/CHANGES.MAPREDUCE-2841.txt
index 42e0af3..c70df12 100644
--- a/hadoop-mapreduce-project/CHANGES.MAPREDUCE-2841.txt
+++ b/hadoop-mapreduce-project/CHANGES.MAPREDUCE-2841.txt
@@ -8,3 +8,4 @@ MAPREDUCE-5997. native-task: Use DirectBufferPool from Hadoop 
Common (todd)
 MAPREDUCE-6000. native-task: Simplify ByteBufferDataReader/Writer (todd)
 MAPREDUCE-5991. native-task should not run unit tests if native profile is not 
enabled. (Binglin Chang)
 MAPREDUCE-5995. native-task: Revert changes to Text internals (todd)
+MAPREDUCE-6005. native-task: Fix some valgrind errors (Binglin Chang)

http://git-wip-us.apache.org/repos/asf/hadoop/blob/83a39673/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/native/src/codec/GzipCodec.cc
--
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/native/src/codec/GzipCodec.cc
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/native/src/codec/GzipCodec.cc
index 79eac8b..bb699e9 100644
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/native/src/codec/GzipCodec.cc
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/native/src/codec/GzipCodec.cc
@@ -28,7 +28,6 @@ GzipCompressStream::GzipCompressStream(OutputStream * stream, 
uint32_t bufferSiz
 : CompressStream(stream), _compressedBytesWritten(0), _zstream(NULL), 
_finished(false) {
   _buffer = new char[bufferSizeHint];
   _capacity = bufferSizeHint;
-  std::cout  gzip capacity   _capacity  std::endl;
   _zstream = malloc(sizeof(z_stream));
   z_stream * zstream = (z_stream*)_zstream;
   memset(zstream, 0, sizeof(z_stream));
@@ -44,6 +43,7 @@ GzipCompressStream::GzipCompressStream(OutputStream * stream, 
uint32_t bufferSiz
 
 GzipCompressStream::~GzipCompressStream() {
   if (_zstream != NULL) {
+deflateEnd((z_stream*)_zstream);
 free(_zstream);
 _zstream = 

[49/52] [abbrv] git commit: HDFS-7061. Add test to verify encryption zone creation after NameNode restart without saving namespace. Contributed by Stephen Chu.

2014-09-15 Thread jing9
HDFS-7061. Add test to verify encryption zone creation after NameNode restart 
without saving namespace. Contributed by Stephen Chu.


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/fc741b5d
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/fc741b5d
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/fc741b5d

Branch: refs/heads/HDFS-6584
Commit: fc741b5d78e7e006355e17b1b5839f502e37261b
Parents: 14e2639
Author: Andrew Wang w...@apache.org
Authored: Sun Sep 14 23:48:24 2014 -0700
Committer: Andrew Wang w...@apache.org
Committed: Sun Sep 14 23:49:01 2014 -0700

--
 hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt   |  3 +++
 .../java/org/apache/hadoop/hdfs/TestEncryptionZones.java  | 10 ++
 2 files changed, 13 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/fc741b5d/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
--
diff --git a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt 
b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
index bddf303..57a4a0f 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
+++ b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
@@ -455,6 +455,9 @@ Release 2.6.0 - UNRELEASED
 HDFS-6482. Use block ID-based block layout on datanodes (James Thomas via
 Colin Patrick McCabe)
 
+HDFS-7061. Add test to verify encryption zone creation after NameNode
+restart without saving namespace. (Stephen Chu via wang)
+
   OPTIMIZATIONS
 
 HDFS-6690. Deduplicate xattr names in memory. (wang)

http://git-wip-us.apache.org/repos/asf/hadoop/blob/fc741b5d/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestEncryptionZones.java
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestEncryptionZones.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestEncryptionZones.java
index db3c085..b3bf5d9 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestEncryptionZones.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestEncryptionZones.java
@@ -338,6 +338,16 @@ public class TestEncryptionZones {
 cluster.restartNameNode(true);
 assertNumZones(numZones);
 assertZonePresent(null, zone1.toString());
+
+// Verify newly added ez is present after restarting the NameNode
+// without persisting the namespace.
+Path nonpersistZone = new Path(/nonpersistZone);
+fsWrapper.mkdir(nonpersistZone, FsPermission.getDirDefault(), false);
+dfsAdmin.createEncryptionZone(nonpersistZone, TEST_KEY);
+numZones++;
+cluster.restartNameNode(true);
+assertNumZones(numZones);
+assertZonePresent(null, nonpersistZone.toString());
   }
 
   /**



[14/52] [abbrv] git commit: MAPREDUCE-5976. native-task: should not fail to build if snappy is missing (Manu Zhang)

2014-09-15 Thread jing9
MAPREDUCE-5976. native-task: should not fail to build if snappy is missing 
(Manu Zhang)

git-svn-id: 
https://svn.apache.org/repos/asf/hadoop/common/branches/MR-2841@1616115 
13f79535-47bb-0310-9956-ffa450edef68


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/432f641b
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/432f641b
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/432f641b

Branch: refs/heads/HDFS-6584
Commit: 432f641bc2f6c7fdb67da578233cbe07fdfbf1ea
Parents: 7ecaa81
Author: Sean Zhong seanzh...@apache.org
Authored: Wed Aug 6 07:32:49 2014 +
Committer: Sean Zhong seanzh...@apache.org
Committed: Wed Aug 6 07:32:49 2014 +

--
 .../CHANGES.MAPREDUCE-2841.txt  |   1 +
 .../src/CMakeLists.txt  |   8 +-
 .../NativeMapOutputCollectorDelegator.java  |  34 ++--
 .../hadoop/mapred/nativetask/NativeRuntime.java |  11 +-
 .../mapred/nativetask/util/SnappyUtil.java  |  32 
 .../src/main/native/src/codec/SnappyCodec.cc|   9 +-
 .../src/main/native/src/codec/snappy-c.h| 138 
 .../src/main/native/src/codec/snappy.h  | 163 ---
 .../src/main/native/src/lib/Compressions.cc |   9 +
 .../main/native/src/lib/NativeRuntimeJniImpl.cc |  25 +++
 ...che_hadoop_mapred_nativetask_NativeRuntime.h |   8 +
 .../src/main/native/test/TestCompressions.cc|  69 
 .../src/main/native/test/TestIFile.cc   |   3 +
 .../src/main/native/test/lib/TestReadBuffer.cc  |   4 +-
 14 files changed, 117 insertions(+), 397 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/432f641b/hadoop-mapreduce-project/CHANGES.MAPREDUCE-2841.txt
--
diff --git a/hadoop-mapreduce-project/CHANGES.MAPREDUCE-2841.txt 
b/hadoop-mapreduce-project/CHANGES.MAPREDUCE-2841.txt
index 9dddcd5..4b77262 100644
--- a/hadoop-mapreduce-project/CHANGES.MAPREDUCE-2841.txt
+++ b/hadoop-mapreduce-project/CHANGES.MAPREDUCE-2841.txt
@@ -10,3 +10,4 @@ MAPREDUCE-5991. native-task should not run unit tests if 
native profile is not e
 MAPREDUCE-5995. native-task: Revert changes to Text internals (todd)
 MAPREDUCE-6005. native-task: Fix some valgrind errors (Binglin Chang)
 MAPREDUCE-5984. native-task: Reuse lz4 sources in hadoop-common (Binglin Chang)
+MAPREDUCE-5976. native-task: should not fail to build if snappy is missing 
(Manu Zhang)
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/hadoop/blob/432f641b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/CMakeLists.txt
--
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/CMakeLists.txt
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/CMakeLists.txt
index 36dbd9c..f38021d 100644
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/CMakeLists.txt
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/CMakeLists.txt
@@ -122,7 +122,6 @@ CHECK_FUNCTION_EXISTS(memset HAVE_MEMSET)
 CHECK_FUNCTION_EXISTS(strchr HAVE_STRCHR)
 CHECK_FUNCTION_EXISTS(strtoul HAVE_STRTOUL)
 
-
 SET(STORED_CMAKE_FIND_LIBRARY_SUFFIXES CMAKE_FIND_LIBRARY_SUFFIXES)
 set_find_shared_library_version(1)
 find_library(SNAPPY_LIBRARY
@@ -139,6 +138,7 @@ if (SNAPPY_LIBRARY AND SNAPPY_INCLUDE_DIR)
 set(SNAPPY_SOURCE_FILES
 ${D}/src/codec/SnappyCodec.cc)
 else (SNAPPY_LIBRARY AND SNAPPY_INCLUDE_DIR)
+set(SNAPPY_LIBRARY )
 set(SNAPPY_INCLUDE_DIR )
 set(SNAPPY_SOURCE_FILES )
 IF(REQUIRE_SNAPPY)
@@ -146,6 +146,8 @@ else (SNAPPY_LIBRARY AND SNAPPY_INCLUDE_DIR)
 ENDIF(REQUIRE_SNAPPY)
 endif (SNAPPY_LIBRARY AND SNAPPY_INCLUDE_DIR)
 
+CONFIGURE_FILE(${CMAKE_SOURCE_DIR}/config.h.cmake ${CMAKE_BINARY_DIR}/config.h)
+
 include_directories(
 ${GENERATED_JAVAH}
 ${D}
@@ -154,15 +156,11 @@ include_directories(
 ${D}/src/lib
 ${D}/test
 ${CMAKE_CURRENT_SOURCE_DIR}
-#${CMAKE_CURRENT_SOURCE_DIR}/src
 ${CMAKE_BINARY_DIR}
 ${JNI_INCLUDE_DIRS}
 ${SNAPPY_INCLUDE_DIR}
 )
 
-#SET(CMAKE_SOURCE_DIR 
/cygdrive/c/Users/tianlunz/repo/hadoop-2.2.0-src/hadoop-common-project/hadoop-common/src)
-CONFIGURE_FILE(${CMAKE_SOURCE_DIR}/config.h.cmake ${CMAKE_BINARY_DIR}/config.h)
-
 
 SET(CMAKE_BUILD_WITH_INSTALL_RPATH TRUE)
 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/432f641b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/java/org/apache/hadoop/mapred/nativetask/NativeMapOutputCollectorDelegator.java

[04/52] [abbrv] MAPREDUCE-5996. native-task: Rename system tests into standard directory layout. Contributed by Todd Lipcon.

2014-09-15 Thread jing9
http://git-wip-us.apache.org/repos/asf/hadoop/blob/b2cba48f/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/test/java/org/apache/hadoop/mapred/nativetask/testutil/MockValueClass.java
--
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/test/java/org/apache/hadoop/mapred/nativetask/testutil/MockValueClass.java
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/test/java/org/apache/hadoop/mapred/nativetask/testutil/MockValueClass.java
new file mode 100644
index 000..b27f00f
--- /dev/null
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/test/java/org/apache/hadoop/mapred/nativetask/testutil/MockValueClass.java
@@ -0,0 +1,72 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * License); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an AS IS BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.mapred.nativetask.testutil;
+
+import java.io.DataInput;
+import java.io.DataOutput;
+import java.io.IOException;
+import java.util.Random;
+
+import org.apache.hadoop.io.LongWritable;
+import org.apache.hadoop.io.Text;
+import org.apache.hadoop.io.Writable;
+import org.apache.hadoop.mapred.nativetask.util.BytesUtil;
+
+public class MockValueClass implements Writable {
+  private final static int DEFAULT_ARRAY_LENGTH = 16;
+  private int a = 0;
+  private byte[] array;
+  private final LongWritable longWritable;
+  private final Text txt;
+  private final Random rand = new Random();
+
+  public MockValueClass() {
+a = rand.nextInt();
+array = new byte[DEFAULT_ARRAY_LENGTH];
+rand.nextBytes(array);
+longWritable = new LongWritable(rand.nextLong());
+txt = new Text(BytesUtil.toStringBinary(array));
+  }
+
+  public MockValueClass(byte[] seed) {
+a = seed.length;
+array = new byte[seed.length];
+System.arraycopy(seed, 0, array, 0, seed.length);
+longWritable = new LongWritable(a);
+txt = new Text(BytesUtil.toStringBinary(array));
+  }
+
+  @Override
+  public void write(DataOutput out) throws IOException {
+out.writeInt(a);
+out.writeInt(array.length);
+out.write(array);
+longWritable.write(out);
+txt.write(out);
+  }
+
+  @Override
+  public void readFields(DataInput in) throws IOException {
+a = in.readInt();
+final int length = in.readInt();
+array = new byte[length];
+in.readFully(array);
+longWritable.readFields(in);
+txt.readFields(in);
+  }
+}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/b2cba48f/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/test/java/org/apache/hadoop/mapred/nativetask/testutil/ResultVerifier.java
--
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/test/java/org/apache/hadoop/mapred/nativetask/testutil/ResultVerifier.java
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/test/java/org/apache/hadoop/mapred/nativetask/testutil/ResultVerifier.java
new file mode 100644
index 000..b665971
--- /dev/null
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/test/java/org/apache/hadoop/mapred/nativetask/testutil/ResultVerifier.java
@@ -0,0 +1,141 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * License); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an AS IS BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */

[24/52] [abbrv] git commit: MAPREDUCE-6054. native-task: Speed up tests. Contributed by Todd Lipcon.

2014-09-15 Thread jing9
MAPREDUCE-6054. native-task: Speed up tests. Contributed by Todd Lipcon.


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/bfd1d75d
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/bfd1d75d
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/bfd1d75d

Branch: refs/heads/HDFS-6584
Commit: bfd1d75d875b6ba261fdb1825d0f151b026c2d24
Parents: fad4524
Author: Todd Lipcon t...@apache.org
Authored: Wed Aug 27 12:25:07 2014 -0700
Committer: Todd Lipcon t...@apache.org
Committed: Wed Aug 27 12:25:49 2014 -0700

--
 .../CHANGES.MAPREDUCE-2841.txt  |  2 +-
 .../mapred/nativetask/util/BytesUtil.java   |  7 +-
 .../hadoop/mapred/nativetask/kvtest/KVJob.java  |  8 +-
 .../hadoop/mapred/nativetask/kvtest/KVTest.java | 94 +---
 .../mapred/nativetask/kvtest/TestInputFile.java | 13 ++-
 .../nativetask/testutil/BytesFactory.java   | 71 +--
 .../nativetask/testutil/MockValueClass.java |  8 ++
 .../mapred/nativetask/utils/TestBytesUtil.java  |  7 ++
 8 files changed, 123 insertions(+), 87 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/bfd1d75d/hadoop-mapreduce-project/CHANGES.MAPREDUCE-2841.txt
--
diff --git a/hadoop-mapreduce-project/CHANGES.MAPREDUCE-2841.txt 
b/hadoop-mapreduce-project/CHANGES.MAPREDUCE-2841.txt
index 4dc08cb..6384757 100644
--- a/hadoop-mapreduce-project/CHANGES.MAPREDUCE-2841.txt
+++ b/hadoop-mapreduce-project/CHANGES.MAPREDUCE-2841.txt
@@ -16,4 +16,4 @@ MAPREDUCE-6006. native-task: add native tests to maven and 
fix bug in pom.xml (B
 MAPREDUCE-6026. native-task: fix logging (Manu Zhang via todd)
 MAPREDUCE-6035. native-task: sources/test-sources jar distribution (Manu Zhang 
via todd)
 MAPREDUCE-5977. Fix or suppress native-task gcc warnings (Manu Zhang via todd)
-
+MAPREDUCE-6054. native-task: Speed up tests (todd)

http://git-wip-us.apache.org/repos/asf/hadoop/blob/bfd1d75d/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/java/org/apache/hadoop/mapred/nativetask/util/BytesUtil.java
--
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/java/org/apache/hadoop/mapred/nativetask/util/BytesUtil.java
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/java/org/apache/hadoop/mapred/nativetask/util/BytesUtil.java
index d90ae8d..e33b23e 100644
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/java/org/apache/hadoop/mapred/nativetask/util/BytesUtil.java
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/java/org/apache/hadoop/mapred/nativetask/util/BytesUtil.java
@@ -23,6 +23,9 @@ import com.google.common.primitives.Longs;
 
 public class BytesUtil {
 
+  private static final char[] HEX_CHARS =
+  0123456789abcdef.toCharArray();
+
   /**
* Converts a big-endian byte array to a long value.
*
@@ -124,7 +127,9 @@ public class BytesUtil {
 ||  `~!@#$%^*()-_=+[]{}|;:'\,./?.indexOf(ch) = 0 ) {
 result.append((char)ch);
   } else {
-result.append(String.format(\\x%02X, ch));
+result.append(\\x);
+result.append(HEX_CHARS[(ch  4)  0x0F]);
+result.append(HEX_CHARS[ch  0x0F]);
   }
 }
 return result.toString();

http://git-wip-us.apache.org/repos/asf/hadoop/blob/bfd1d75d/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/test/java/org/apache/hadoop/mapred/nativetask/kvtest/KVJob.java
--
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/test/java/org/apache/hadoop/mapred/nativetask/kvtest/KVJob.java
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/test/java/org/apache/hadoop/mapred/nativetask/kvtest/KVJob.java
index 6d683f8..3215d0b 100644
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/test/java/org/apache/hadoop/mapred/nativetask/kvtest/KVJob.java
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/test/java/org/apache/hadoop/mapred/nativetask/kvtest/KVJob.java
@@ -20,7 +20,11 @@ package org.apache.hadoop.mapred.nativetask.kvtest;
 import java.io.IOException;
 import java.util.zip.CRC32;
 
+import com.google.common.base.Stopwatch;
 import com.google.common.primitives.Longs;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
 import 

[36/52] [abbrv] git commit: Merge remote-tracking branch 'apache/trunk' into MR-2841

2014-09-15 Thread jing9
Merge remote-tracking branch 'apache/trunk' into MR-2841


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/4b3f1e2c
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/4b3f1e2c
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/4b3f1e2c

Branch: refs/heads/HDFS-6584
Commit: 4b3f1e2ce48124b7406d77cc2ae1d0914311b0d4
Parents: 683987b c6107f5
Author: Todd Lipcon t...@apache.org
Authored: Fri Sep 5 10:47:27 2014 -0700
Committer: Todd Lipcon t...@apache.org
Committed: Fri Sep 5 10:47:27 2014 -0700

--
 LICENSE.txt | 290 
 NOTICE.txt  |   2 +
 README.txt  |  31 ++
 dev-support/create-release.sh   |  24 +-
 .../main/resources/assemblies/hadoop-src.xml|   8 +
 hadoop-common-project/hadoop-common/CHANGES.txt |  20 ++
 hadoop-common-project/hadoop-common/LICENSE.txt | 290 
 hadoop-common-project/hadoop-common/NOTICE.txt  |   2 -
 hadoop-common-project/hadoop-common/README.txt  |  31 --
 .../dev-support/findbugsExcludeFile.xml |   2 +-
 .../hadoop/crypto/key/KeyProviderFactory.java   |  36 +-
 .../crypto/key/kms/KMSClientProvider.java   |  57 +---
 .../hadoop/crypto/random/OsSecureRandom.java|  21 +-
 .../security/authorize/AccessControlList.java   |  12 +-
 .../DelegationTokenAuthenticationFilter.java|  15 +-
 .../DelegationTokenAuthenticationHandler.java   |   6 +-
 .../web/DelegationTokenAuthenticator.java   |  20 +-
 .../apache/hadoop/util/HttpExceptionUtils.java  | 185 ++
 .../apache/hadoop/crypto/TestCryptoCodec.java   |  69 +++-
 .../apache/hadoop/crypto/TestCryptoStreams.java |   2 +-
 .../crypto/key/TestKeyProviderFactory.java  |  13 +
 .../crypto/random/TestOsSecureRandom.java   |  15 +
 ...tionTokenAuthenticationHandlerWithMocks.java |  35 +-
 .../hadoop/util/TestHttpExceptionUtils.java | 167 +
 hadoop-common-project/hadoop-kms/pom.xml|   1 +
 .../hadoop/crypto/key/kms/server/KMS.java   |  27 +-
 .../hadoop/crypto/key/kms/server/KMSACLs.java   |  55 ++-
 .../key/kms/server/KMSExceptionsProvider.java   |  12 +-
 .../hadoop-kms/src/site/apt/index.apt.vm|  88 -
 .../hadoop/crypto/key/kms/server/TestKMS.java   | 100 +-
 .../crypto/key/kms/server/TestKMSACLs.java  |   2 +-
 hadoop-dist/pom.xml |   3 +
 .../hadoop/fs/http/client/HttpFSFileSystem.java |  70 ++--
 .../hadoop/fs/http/client/HttpFSUtils.java  |  50 ---
 .../hadoop/lib/wsrs/ExceptionProvider.java  |  14 +-
 .../fs/http/client/BaseTestHttpFSWith.java  |   4 +-
 .../fs/http/server/TestHttpFSServerNoACLs.java  |  10 +-
 .../hadoop/hdfs/nfs/nfs3/OpenFileCtx.java   |   6 +-
 .../hadoop/hdfs/nfs/nfs3/RpcProgramNfs3.java|   2 +-
 hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt |  25 ++
 hadoop-hdfs-project/hadoop-hdfs/LICENSE.txt | 271 ---
 hadoop-hdfs-project/hadoop-hdfs/NOTICE.txt  |   2 -
 .../org/apache/hadoop/hdfs/DFSConfigKeys.java   |   3 +
 .../java/org/apache/hadoop/hdfs/DFSUtil.java|  80 -
 .../hadoop/hdfs/DistributedFileSystem.java  |  11 +-
 .../org/apache/hadoop/hdfs/inotify/Event.java   |  12 +
 .../apache/hadoop/hdfs/protocolPB/PBHelper.java |   6 +-
 .../hdfs/server/datanode/BlockPoolManager.java  |  12 +-
 .../hadoop/hdfs/server/datanode/DataNode.java   |   2 +-
 .../hdfs/server/datanode/DataStorage.java   |   2 +-
 .../hdfs/server/namenode/FSDirectory.java   |  44 ++-
 .../hadoop/hdfs/server/namenode/FSEditLog.java  |   6 +-
 .../hdfs/server/namenode/FSEditLogLoader.java   |   8 +-
 .../hdfs/server/namenode/FSEditLogOp.java   |  18 +
 .../server/namenode/FSImageSerialization.java   |  17 +
 .../hdfs/server/namenode/FSNamesystem.java  |  60 ++--
 .../hdfs/server/namenode/INodeReference.java|   4 +-
 .../namenode/InotifyFSEditLogOpTranslator.java  |   1 +
 .../server/namenode/NameNodeLayoutVersion.java  |   4 +-
 .../snapshot/DirectorySnapshottableFeature.java |   2 +-
 .../org/apache/hadoop/hdfs/tools/GetConf.java   |   4 +-
 .../hadoop-hdfs/src/main/proto/inotify.proto|   1 +
 .../src/main/resources/hdfs-default.xml |  18 +
 .../hadoop/hdfs/TestBlocksScheduledCounter.java |  18 +-
 .../hdfs/TestDFSInotifyEventInputStream.java|   8 +-
 .../org/apache/hadoop/hdfs/TestDFSRename.java   |   6 +
 .../org/apache/hadoop/hdfs/TestDFSUtil.java |  26 ++
 .../org/apache/hadoop/hdfs/TestFileAppend4.java |   2 +-
 .../apache/hadoop/hdfs/TestFileCreation.java| 119 +++
 .../org/apache/hadoop/hdfs/TestLocalDFS.java|  29 +-
 .../server/datanode/TestBlockPoolManager.java   |  22 ++
 .../hdfs/server/namenode/CreateEditsLog.java|   2 +-
 .../hdfs/server/namenode/TestEditLog.java   |   2 +-
 .../hdfs/server/namenode/TestStartup.java

[03/52] [abbrv] MAPREDUCE-5996. native-task: Rename system tests into standard directory layout. Contributed by Todd Lipcon.

2014-09-15 Thread jing9
http://git-wip-us.apache.org/repos/asf/hadoop/blob/b2cba48f/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/test/java/system/data/testGlibcBugSpill.out
--
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/test/java/system/data/testGlibcBugSpill.out
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/test/java/system/data/testGlibcBugSpill.out
deleted file mode 100644
index 168a65d..000
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/test/java/system/data/testGlibcBugSpill.out
+++ /dev/null
@@ -1,2 +0,0 @@
-��*�PKM��:\xBD\xC8\xCA\xCB\xAA\xB4c`\xD5|A1+\xCB\xDC\xAA\x7F\x1D\xA4\x1Bx\x1B\xD1k\xE1\x01\x0E\xEA\xC82\xB7\xF8
 
\xBD.z\xEFH\xFB*\xE8\xBD\xD4\x15o\x82\xA1\x0F}\x03\x00\x17\xC7\xF7Ce*T\x1D\xC4tk\xE7\x0CS\xBF[\xADW{\xB9\xFFLM\xB2\x87\x03\xFC\xC2\xE2\x9C%\xCB\xFF\x9A\x97]\x15_N\xA3\x8FDb\xA6\x172\x99\xF8+\xA4\xA5\xAC4n\x0D}\xA0/[\xBD\xFD\xC4\xA7\xBCj\xF8\x85\xC6V\x87B\xBA\x85\x8A\xCDZ\xFBU\x84\xD7\xBC\xAB\x84H}_v\xC4\xB2\x11\x19\xDB\x06\x93\xB5\xBE\x92E\xCE\xFA\x02RG\xE4\xA3HcI4\xAB.X\xA5\xB8a\x06\x1E.\xADS\xFB\x8AW\xDA\xCA2^\\x90\x87\x95l]r\x99\x88b\x1EU\xC8\xE0l\x1F#h\x08\xA8\x80I\x9B\xC0E\x19%\xDE\xE5\xA6?\xC2\x83\xA0\xD6O1\xFB_\x94\xF8\x8F\xD6\xD9\x16\xE2\x91\x1E\x8EL\x1F\xA2Y\x05\x9CoMN\x0C\x86\xE2\xE9\x9A\xB16o\xF6\xF3Q\xA2\xD8\xB8\xD0\xDB\x86\x93\xAFesc\xA0h\x12\xB9E\x1BU\x12\x02OXS\x8BXn\x0EQ\xABJ\xCE\xB2k\xB1\xD7Z'\xE4\xB7[`~4\x11\xB4\xCD`\xF5%\x1F\xF9\xEEW\x88\xC5\x14+O\x1B9B\x11\x95\xDE\xD5\xA4Co\x8E\xDB\xEAz\x9FGi\xAD\xFE\xF8\xB63\xED\x04\xA1\xE2v\x86G\
 
xAE\x09\x91\xE4\x03JS=\\xD1\x81}\xEE\xA3?\x87\xDB\xC1\x8B\xFF)\xE8L\xC3\xC7\x8B\x05\x1B'\xCC)\xD4\x0D\x90{\xB9\xB7C6\xA9\x16\xE2\xF9D%\xE9!Bt]N\xFB\xF3\xCE\x7F\xB7d8\xBCN\x191,Nm\xABb\x92\x00\xEA\xBC\xD0\xD5!\x956\xF9\x9D\x98\x86\x81QDnL\xB1\xEFxX\xA4\x1FZ\xE9uf\x7F\xF7\x8F\xCD\xC5P\x81\x17\x17\xB0\xCB\xAF\x9C\x05\x8E\xC6\xDB\x09\x1Ak\xA6\xF822\xF4\xA4t\xA1;2\xCC\xEA\xFD?0k]\xF8\xE4\x13\xD2'Z\x9F~5\x9C\xFC+k\x07\xE4k\xB9\xEDx\x9B\x17\xA2\xA8\x85R\x90\x1CV\xD3T\xB7b[\x81)8\xEA\xE6\x12\xE0\x88~\xB8\x87\xA9X\xB0\x88\x19\xA5,\x88cm@\xEE\xF9.\x8A?\xF8!\xD8oR\xAB\x05\x93h3\x13\x0A\x98_E\x11\x81\xD6\xB8;P\xD8u\x9DTv]\\xF1\x0C\xD5\xF4\x0E#\x87}\xE3\x89\xA2\xC2\xEA\x86\x9D\xE7\xAF\xA1\xC3;\xD2\xFF\xA6\xB2!\xAB\\x90i|n\xDE\xBB:\xC6\x08\x1D,Q\xC1;\x15\x9DUV\x8F\xD3;\xFA\x12\xD0U\xA9\xD7\xC6\xFDX\x8F\x10vX\xF1\xF9H7\x12Z\x1FoB\x8B\xD6\xF0\x04\xEB\xA2\xC1\xACH\xFE\xC9_R\xF5\x1D\xA8\x82\xBD\xC6\x11\x19#\x9A\xA8\x09\x95\x06\x98\xA9f\xD9\x12\xA0i\xFA\xD3\xB2\x01\xE9\x06\x14~.z\xDF\xF2YV
 
#z\xEB-\xFA$[U\xA6\xE2.\xD6\xD0kf\xA3\x83\xE0\x86\xD9\xC1\x7FO\xE9\xF4\xEF\x81\x06\xA3\xDE\xC8\xACt\x8B\xCAr\xE6\x82$8\x0F\xE1\x06\xF9\xBB\x0C\x9F\xAA8\x94\xBBU\x8DQ\xC3\xE9@'\xF9\xBF\xCF\x8B\xD4\x995\xEB\xE9t3Q\x10\xD7\x0D\x9D\x94\xCA\x84\xC2\xE2{\x0B\x89r\xA6\xC6\xAA\xE5C\xC6U\xA6\xD1tqa\xA0\xD7RO\x92\xC9\xBE\xF9\xD1\xDE\x93b\x06\xD3ae:\xB7\x8C\x99\xD6\xFFI\x86\x8CvtX@k\xE4m\x93wz\x8A\xC2U\xFBb\xA2\x9Ao\xAF\x8D\x19k\xA2pP\x83s\xFE\x0E\x0FY\xA0\xA7E'\xC0\x02\xF4\x98A5\xF2\x8A?\x04$\x89\xC7]\x0A\xFBX\x97*\xAEN\x13#\xB3a\xD2y\xD3#_B\xAC\x05:\xAC\x92\xEAO\x08H\x88N\x1A\xB9\xDC\xFA\x11ikJ\x12!\xE8u\xCD+\x88\x98\xE3c\xCB\xD91%\x98KDN\xC6\xF2\xB7\x86o6\x91P`\x9B\xA1\x0B\x82\xEB\x15H\xA0\xC4\x85\xDF\xAC\xA1b\xD9\xA3b\xB8E\xB59_\xF4\xCCWH\x095\xE6\xBE\xF2\x19FC\x0E\xAB\xEA6\x0C\xAD5\x90/$D\xB3\x9E\x81[9j\x8A\xC4\x85\xAAA\x7Fe\xDCG8\x00\xDA\xCFi\xBDp\x18?\xF5\xA8~@\xC1\x08\xDF\xE5\xAE,\xDF0t\xCB\x92W8V\x01F\x1A./\x8D\xAF\xD8\x87\xCE\x80w*\x18Is\x17\x15\x17DI\xB4a_\N\xB77\xA7n\x16\xDF
 
IE\xEF\x9E\x8Cd7\x1B\xF9\x97\xF9E\x86\x98\x9F\x1D\xB6\x9F\x94\xF7\x8AJ\x1A\xCD\x88\xD3\xD3\xDEw\x92Q\\xF5\xC6\xD6\x11c\x81\x00\xE8\xD9'\xE1\x9D5\xFC\x11},\xB8\xB2V\xE6\xC0\xB7a/\x18~=G\xAC\x9EGxR,\x9B\x91\xA0\xE9\x85\x14J\xB3\xB2O\xEA3\xB2F\xA7vo\x88\xFEm\x18*g%\xA4l\x9B\xF0\xA5`$\xEBo\xFC?\x13s\x0D\x91y\x92\xE0u\xFA\xD1p+)\xACpi\xE3\xB4L-\x0A\xF1#\xCF\x1A\x82\x8A\xE5\xEF80\xC9\x17z@\xD1\x9AoK\xCE\xE42\x92M\xEF\x85\xBE(z\x860\xC6\x03t\x02\xA1\xD2\x09\x1E\xB3\x80t\x86|\x8E~F\xFD1i|\x84^\x07\xC9Z\xBE\x91\xA7\x06\x9B\xC7\x8F\xFB\xD4\xB84\xED\xA2\x108/X\x89\xF4W\xF6\xE3\xEE\x94Q,H\xFFo3E\xA7q\xE5\x15\x86\xCF\x0F_\xF9\xE8N\xCD}\xEB\xFD\x0E\x03EZi\x83
 
\xA7D|{]\xEE\xBA\xE4\x00RR\x1C\xFBj\x81\xF0{w\x9F\xA6F\xBB\x00\x0Cw\x01\xE1\xFE\xC8\xE8\xAC\xD18\x19,\xE9\x9E\xFE\xF0\xA45ov[K\x86UT\x00\xC0*\xEF\x9De\xE7pN[\xA5~\xF3\xDA\xAD\xE0\x85\xB0Nb\x09I_\xA8B:b\x9A\x10\xC2\xCF\xE83|\xB1\xCD\x17\xE8\x95\xA6!\xD72DR\x03\xB7\xF4\xC2\x88\xF1Rl+t\xA7x\x04\x10\xC1@\\xC3\xE5}\xDD`\xA2\x91W\x7F%S\xB7\x1
 

[13/52] [abbrv] git commit: MAPREDUCE-5984. native-task: Reuse lz4 sources in hadoop-common. Contributed by Binglin Chang

2014-09-15 Thread jing9
MAPREDUCE-5984. native-task: Reuse lz4 sources in hadoop-common. Contributed by 
Binglin Chang

git-svn-id: 
https://svn.apache.org/repos/asf/hadoop/common/branches/MR-2841@1616105 
13f79535-47bb-0310-9956-ffa450edef68


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/7ecaa81d
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/7ecaa81d
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/7ecaa81d

Branch: refs/heads/HDFS-6584
Commit: 7ecaa81d2746a805b9937fcd134ceaa8607ed7b5
Parents: 83a3967
Author: Binglin Chang bch...@apache.org
Authored: Wed Aug 6 06:01:12 2014 +
Committer: Binglin Chang bch...@apache.org
Committed: Wed Aug 6 06:01:12 2014 +

--
 .../CHANGES.MAPREDUCE-2841.txt  |   1 +
 .../hadoop-mapreduce-client-nativetask/pom.xml  |   6 +
 .../src/CMakeLists.txt  |   4 +-
 .../src/main/native/COPYING |  12 +-
 .../src/main/native/lz4/lz4.c   | 740 ---
 .../src/main/native/lz4/lz4.h   |  96 ---
 .../src/main/native/src/codec/Lz4Codec.cc   |  22 +-
 .../src/main/native/test/TestCompressions.cc|   6 +-
 .../src/main/native/test/TestIFile.cc   |   2 +-
 9 files changed, 21 insertions(+), 868 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/7ecaa81d/hadoop-mapreduce-project/CHANGES.MAPREDUCE-2841.txt
--
diff --git a/hadoop-mapreduce-project/CHANGES.MAPREDUCE-2841.txt 
b/hadoop-mapreduce-project/CHANGES.MAPREDUCE-2841.txt
index c70df12..9dddcd5 100644
--- a/hadoop-mapreduce-project/CHANGES.MAPREDUCE-2841.txt
+++ b/hadoop-mapreduce-project/CHANGES.MAPREDUCE-2841.txt
@@ -9,3 +9,4 @@ MAPREDUCE-6000. native-task: Simplify 
ByteBufferDataReader/Writer (todd)
 MAPREDUCE-5991. native-task should not run unit tests if native profile is not 
enabled. (Binglin Chang)
 MAPREDUCE-5995. native-task: Revert changes to Text internals (todd)
 MAPREDUCE-6005. native-task: Fix some valgrind errors (Binglin Chang)
+MAPREDUCE-5984. native-task: Reuse lz4 sources in hadoop-common (Binglin Chang)

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7ecaa81d/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/pom.xml
--
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/pom.xml
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/pom.xml
index 2cb483e..9727800 100644
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/pom.xml
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/pom.xml
@@ -163,6 +163,12 @@
 /goals
 configuration
   target
+copy 
file=${basedir}/../../../hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/lz4/lz4.h
+  todir=${project.build.directory}/native/ /
+copy 
file=${basedir}/../../../hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/lz4/lz4_encoder.h
+  todir=${project.build.directory}/native/ /
+copy 
file=${basedir}/../../../hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/lz4/lz4.c
+  todir=${project.build.directory}/native/ /
 copy 
todir=${project.build.directory}/native/test/testData
   overwrite=true
   fileset dir=${basedir}/src/main/native/testData /

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7ecaa81d/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/CMakeLists.txt
--
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/CMakeLists.txt
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/CMakeLists.txt
index 3094162..36dbd9c 100644
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/CMakeLists.txt
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/CMakeLists.txt
@@ -155,7 +155,7 @@ include_directories(
 ${D}/test
 ${CMAKE_CURRENT_SOURCE_DIR}
 #${CMAKE_CURRENT_SOURCE_DIR}/src
-#${CMAKE_BINARY_DIR}
+${CMAKE_BINARY_DIR}
 ${JNI_INCLUDE_DIRS}
 ${SNAPPY_INCLUDE_DIR}
 )
@@ -174,7 +174,7 @@ else (${CMAKE_SYSTEM_NAME} MATCHES Darwin)
 endif (${CMAKE_SYSTEM_NAME} MATCHES Darwin)
 
 

[44/52] [abbrv] git commit: YARN-2542. Fixed NPE when retrieving ApplicationReport from TimeLineServer. Contributed by Zhijie Shen

2014-09-15 Thread jing9
YARN-2542. Fixed NPE when retrieving ApplicationReport from TimeLineServer. 
Contributed by Zhijie Shen


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/a0ad975e
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/a0ad975e
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/a0ad975e

Branch: refs/heads/HDFS-6584
Commit: a0ad975ea1e70f9532cf6cb6c1d9d92736ca0ebc
Parents: e65ae57
Author: Jian He jia...@apache.org
Authored: Fri Sep 12 15:24:17 2014 -0700
Committer: Jian He jia...@apache.org
Committed: Fri Sep 12 15:27:13 2014 -0700

--
 hadoop-yarn-project/CHANGES.txt |  3 +
 .../hadoop/yarn/client/cli/ApplicationCLI.java  | 12 ++-
 .../hadoop/yarn/client/cli/TestYarnCLI.java | 80 ++--
 3 files changed, 54 insertions(+), 41 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/a0ad975e/hadoop-yarn-project/CHANGES.txt
--
diff --git a/hadoop-yarn-project/CHANGES.txt b/hadoop-yarn-project/CHANGES.txt
index efc3e09..06d94ca 100644
--- a/hadoop-yarn-project/CHANGES.txt
+++ b/hadoop-yarn-project/CHANGES.txt
@@ -348,6 +348,9 @@ Release 2.6.0 - UNRELEASED
 YARN-2456. Possible livelock in CapacityScheduler when RM is recovering 
apps.
 (Jian He via xgong)
 
+YARN-2542. Fixed NPE when retrieving ApplicationReport from TimeLineServer.
+(Zhijie Shen via jianhe)
+
 Release 2.5.1 - 2014-09-05
 
   INCOMPATIBLE CHANGES

http://git-wip-us.apache.org/repos/asf/hadoop/blob/a0ad975e/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/cli/ApplicationCLI.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/cli/ApplicationCLI.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/cli/ApplicationCLI.java
index 54cfe91..a847cd5 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/cli/ApplicationCLI.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/cli/ApplicationCLI.java
@@ -463,9 +463,15 @@ public class ApplicationCLI extends YarnCLI {
   appReportStr.println(appReport.getHost());
   appReportStr.print(\tAggregate Resource Allocation : );
 
-  ApplicationResourceUsageReport usageReport = 
appReport.getApplicationResourceUsageReport();
-  appReportStr.print(usageReport.getMemorySeconds() +  MB-seconds, );
-  appReportStr.println(usageReport.getVcoreSeconds() +  vcore-seconds);
+  ApplicationResourceUsageReport usageReport =
+  appReport.getApplicationResourceUsageReport();
+  if (usageReport != null) {
+//completed app report in the timeline server doesn't have usage report
+appReportStr.print(usageReport.getMemorySeconds() +  MB-seconds, );
+appReportStr.println(usageReport.getVcoreSeconds() +  vcore-seconds);
+  } else {
+appReportStr.println(N/A);
+  }
   appReportStr.print(\tDiagnostics : );
   appReportStr.print(appReport.getDiagnostics());
 } else {

http://git-wip-us.apache.org/repos/asf/hadoop/blob/a0ad975e/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/client/cli/TestYarnCLI.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/client/cli/TestYarnCLI.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/client/cli/TestYarnCLI.java
index 1a593d2..980517f 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/client/cli/TestYarnCLI.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/client/cli/TestYarnCLI.java
@@ -86,44 +86,48 @@ public class TestYarnCLI {
   
   @Test
   public void testGetApplicationReport() throws Exception {
-ApplicationCLI cli = createAndGetAppCLI();
-ApplicationId applicationId = ApplicationId.newInstance(1234, 5);
-ApplicationResourceUsageReport usageReport = 
-ApplicationResourceUsageReport.newInstance(
-2, 0, null, null, null, 123456, 4567);
-ApplicationReport newApplicationReport = ApplicationReport.newInstance(
-applicationId, ApplicationAttemptId.newInstance(applicationId, 1),
-user, queue, appname, host, 124, null,
-YarnApplicationState.FINISHED, diagnostics, url, 0, 0,
-FinalApplicationStatus.SUCCEEDED, usageReport, N/A, 0.53789f, YARN,
-null);
-

[38/52] [abbrv] git commit: MAPREDUCE-6077. native-task: Remove CustomModule examples in nativetask (seanzhong)

2014-09-15 Thread jing9
MAPREDUCE-6077. native-task: Remove CustomModule examples in nativetask 
(seanzhong)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/52a8b4db
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/52a8b4db
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/52a8b4db

Branch: refs/heads/HDFS-6584
Commit: 52a8b4db92a35598006103c59a2ff93afc5312ee
Parents: 7c91f9b
Author: Sean Zhong clock...@gmail.com
Authored: Sat Sep 6 11:46:07 2014 +0800
Committer: Sean Zhong clock...@gmail.com
Committed: Sat Sep 6 11:46:07 2014 +0800

--
 .../CHANGES.MAPREDUCE-2841.txt  |   1 +
 .../sdk/example/CustomModule/README.txt |   1 -
 .../sdk/example/CustomModule/pom.xml| 131 ---
 .../platform/custom/CustomPlatform.java |  39 --
 .../nativetask/serde/custom/CustomWritable.java |  75 ---
 .../serde/custom/CustomWritableSerializer.java  |  33 -
 .../src/main/native/src/CustomComparator.cpp|  88 -
 7 files changed, 1 insertion(+), 367 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/52a8b4db/hadoop-mapreduce-project/CHANGES.MAPREDUCE-2841.txt
--
diff --git a/hadoop-mapreduce-project/CHANGES.MAPREDUCE-2841.txt 
b/hadoop-mapreduce-project/CHANGES.MAPREDUCE-2841.txt
index 539e7be..cfc9412 100644
--- a/hadoop-mapreduce-project/CHANGES.MAPREDUCE-2841.txt
+++ b/hadoop-mapreduce-project/CHANGES.MAPREDUCE-2841.txt
@@ -23,3 +23,4 @@ MAPREDUCE-6055. native-task: findbugs, interface annotations, 
and other misc cle
 MAPREDUCE-6067. native-task: fix some counter issues (Binglin Chang)
 MAPREDUCE-6069. native-task: Lint/style fixes and removal of unused code (todd)
 MAPREDUCE-6074. native-task: fix release audit, javadoc, javac warnings (todd)
+MAPREDUCE-6077. native-task: Remove CustomModule examples in nativetask 
(seanzhong)

http://git-wip-us.apache.org/repos/asf/hadoop/blob/52a8b4db/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/sdk/example/CustomModule/README.txt
--
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/sdk/example/CustomModule/README.txt
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/sdk/example/CustomModule/README.txt
deleted file mode 100644
index 0ad6f1e..000
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/sdk/example/CustomModule/README.txt
+++ /dev/null
@@ -1 +0,0 @@
-This project is depend on hadoop and hadoop-nativetask, so in order to 
complete the build, you have to specify the hadoop dir first.
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/hadoop/blob/52a8b4db/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/sdk/example/CustomModule/pom.xml
--
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/sdk/example/CustomModule/pom.xml
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/sdk/example/CustomModule/pom.xml
deleted file mode 100644
index 5bf67c7..000
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/sdk/example/CustomModule/pom.xml
+++ /dev/null
@@ -1,131 +0,0 @@
-?xml version=1.0?
-!-- Licensed under the Apache License, Version 2.0 (the License); you 
-  may not use this file except in compliance with the License. You may obtain 
-  a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless 
-  required by applicable law or agreed to in writing, software distributed 
-  under the License is distributed on an AS IS BASIS, WITHOUT WARRANTIES 
-  OR CONDITIONS OF ANY KIND, either express or implied. See the License for 
-  the specific language governing permissions and limitations under the 
License. 
-  See accompanying LICENSE file. --
-project xmlns=http://maven.apache.org/POM/4.0.0; 
xmlns:xsi=http://www.w3.org/2001/XMLSchema-instance;
-  xsi:schemaLocation=http://maven.apache.org/POM/4.0.0
-  http://maven.apache.org/xsd/maven-4.0.0.xsd;
-  modelVersion4.0.0/modelVersion
-  groupIdorg.apache.hadoop/groupId
-  artifactIdnativetask-sdk/artifactId
-  version2.2.0/version
-  namenativetask-sdk/name
-
-  properties
-  /properties
-
-  dependencies
-dependency
-  groupIdorg.apache.hadoop/groupId
-  artifactIdhadoop-mapreduce-client-nativetask/artifactId
-  version2.2.0/version
-/dependency
-  /dependencies
-
-  build
-plugins
-  plugin
-

[10/52] [abbrv] git commit: Add entry to branch's CHANGES.txt for previous commit

2014-09-15 Thread jing9
Add entry to branch's CHANGES.txt for previous commit


git-svn-id: 
https://svn.apache.org/repos/asf/hadoop/common/branches/MR-2841@1613827 
13f79535-47bb-0310-9956-ffa450edef68


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/43917e56
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/43917e56
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/43917e56

Branch: refs/heads/HDFS-6584
Commit: 43917e564d266b67c323f4657d11a8b70ae1d706
Parents: ea9e5b7
Author: Todd Lipcon t...@apache.org
Authored: Sun Jul 27 18:54:02 2014 +
Committer: Todd Lipcon t...@apache.org
Committed: Sun Jul 27 18:54:02 2014 +

--
 hadoop-mapreduce-project/CHANGES.MAPREDUCE-2841.txt | 1 +
 1 file changed, 1 insertion(+)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/43917e56/hadoop-mapreduce-project/CHANGES.MAPREDUCE-2841.txt
--
diff --git a/hadoop-mapreduce-project/CHANGES.MAPREDUCE-2841.txt 
b/hadoop-mapreduce-project/CHANGES.MAPREDUCE-2841.txt
index aa695cf..92c94a8 100644
--- a/hadoop-mapreduce-project/CHANGES.MAPREDUCE-2841.txt
+++ b/hadoop-mapreduce-project/CHANGES.MAPREDUCE-2841.txt
@@ -6,3 +6,4 @@ MAPREDUCE-5994. Simplify ByteUtils and fix failing test. (todd)
 MAPREDUCE-5996. native-task: Rename system tests into standard directory 
layout (todd)
 MAPREDUCE-5997. native-task: Use DirectBufferPool from Hadoop Common (todd)
 MAPREDUCE-6000. native-task: Simplify ByteBufferDataReader/Writer (todd)
+MAPREDUCE-5991. native-task should not run unit tests if native profile is not 
enabled. (Binglin Chang)



[29/52] [abbrv] MAPREDUCE-6055. native-task: findbugs, interface annotations, and other misc cleanup

2014-09-15 Thread jing9
http://git-wip-us.apache.org/repos/asf/hadoop/blob/1081d9ce/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/java/org/apache/hadoop/mapred/nativetask/serde/LongWritableSerializer.java
--
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/java/org/apache/hadoop/mapred/nativetask/serde/LongWritableSerializer.java
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/java/org/apache/hadoop/mapred/nativetask/serde/LongWritableSerializer.java
index ec326ca..d8a6595 100644
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/java/org/apache/hadoop/mapred/nativetask/serde/LongWritableSerializer.java
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/java/org/apache/hadoop/mapred/nativetask/serde/LongWritableSerializer.java
@@ -20,9 +20,11 @@ package org.apache.hadoop.mapred.nativetask.serde;
 
 import java.io.IOException;
 
+import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.io.Writable;
 import org.apache.hadoop.mapred.nativetask.INativeComparable;
 
+@InterfaceAudience.Private
 public class LongWritableSerializer extends DefaultSerializer implements
 INativeComparable {
   @Override

http://git-wip-us.apache.org/repos/asf/hadoop/blob/1081d9ce/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/java/org/apache/hadoop/mapred/nativetask/serde/NativeSerialization.java
--
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/java/org/apache/hadoop/mapred/nativetask/serde/NativeSerialization.java
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/java/org/apache/hadoop/mapred/nativetask/serde/NativeSerialization.java
index f5a033d..5881a46 100644
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/java/org/apache/hadoop/mapred/nativetask/serde/NativeSerialization.java
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/java/org/apache/hadoop/mapred/nativetask/serde/NativeSerialization.java
@@ -21,8 +21,10 @@ package org.apache.hadoop.mapred.nativetask.serde;
 import java.io.IOException;
 import java.util.concurrent.ConcurrentHashMap;
 
+import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.io.Writable;
 
+@InterfaceAudience.Private
 public class NativeSerialization {
 
   private final ConcurrentHashMapString, Class? map = new 
ConcurrentHashMapString, Class?();

http://git-wip-us.apache.org/repos/asf/hadoop/blob/1081d9ce/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/java/org/apache/hadoop/mapred/nativetask/serde/NullWritableSerializer.java
--
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/java/org/apache/hadoop/mapred/nativetask/serde/NullWritableSerializer.java
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/java/org/apache/hadoop/mapred/nativetask/serde/NullWritableSerializer.java
index afa4e8e..f6e7cf5 100644
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/java/org/apache/hadoop/mapred/nativetask/serde/NullWritableSerializer.java
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/java/org/apache/hadoop/mapred/nativetask/serde/NullWritableSerializer.java
@@ -20,9 +20,11 @@ package org.apache.hadoop.mapred.nativetask.serde;
 
 import java.io.IOException;
 
+import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.io.Writable;
 import org.apache.hadoop.mapred.nativetask.INativeComparable;
 
+@InterfaceAudience.Private
 public class NullWritableSerializer extends DefaultSerializer implements
 INativeComparable {
 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/1081d9ce/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/java/org/apache/hadoop/mapred/nativetask/serde/SerializationFramework.java
--
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/java/org/apache/hadoop/mapred/nativetask/serde/SerializationFramework.java
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/java/org/apache/hadoop/mapred/nativetask/serde/SerializationFramework.java
index e95a0c4..8dee58f 100644
--- 

[09/52] [abbrv] git commit: MAPREDUCE-5991. native-task should not run unit tests if native profile is not enabled. Contributed by Binglin Chang.

2014-09-15 Thread jing9
MAPREDUCE-5991. native-task should not run unit tests if native profile is not 
enabled. Contributed by Binglin Chang.

git-svn-id: 
https://svn.apache.org/repos/asf/hadoop/common/branches/MR-2841@1613072 
13f79535-47bb-0310-9956-ffa450edef68


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/ea9e5b7b
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/ea9e5b7b
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/ea9e5b7b

Branch: refs/heads/HDFS-6584
Commit: ea9e5b7baae34c7ba660681d1e5aceb9c1047e6f
Parents: 6635180
Author: Binglin Chang bch...@apache.org
Authored: Thu Jul 24 11:44:09 2014 +
Committer: Binglin Chang bch...@apache.org
Committed: Thu Jul 24 11:44:09 2014 +

--
 .../hadoop-mapreduce-client-common/pom.xml  | 11 ++
 .../hadoop-mapreduce-client-nativetask/pom.xml  | 35 
 hadoop-project/pom.xml  |  6 
 3 files changed, 45 insertions(+), 7 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/ea9e5b7b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/pom.xml
--
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/pom.xml
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/pom.xml
index 7ef7d3b..48db5c8 100644
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/pom.xml
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/pom.xml
@@ -84,6 +84,17 @@
   /execution
 /executions
   /plugin
+  plugin
+artifactIdmaven-jar-plugin/artifactId
+executions
+  execution
+goals
+  goaltest-jar/goal
+/goals
+phasetest-compile/phase
+  /execution
+/executions
+  /plugin
 /plugins
   /build
 /project

http://git-wip-us.apache.org/repos/asf/hadoop/blob/ea9e5b7b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/pom.xml
--
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/pom.xml
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/pom.xml
index 40b6520..2cb483e 100644
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/pom.xml
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/pom.xml
@@ -63,6 +63,12 @@
   typetest-jar/type
   scopetest/scope
 /dependency
+dependency
+  groupIdorg.apache.hadoop/groupId
+  artifactIdhadoop-mapreduce-client-common/artifactId
+  typetest-jar/type
+  scopetest/scope
+/dependency
   /dependencies
 
   build
@@ -82,14 +88,7 @@
 include**/buffer/Test*.java/include
 include**/handlers/Test*.java/include
 include**/serde/Test*.java/include
-include**/combinertest/*Test.java/include
-include**/compresstest/*Test.java/include
-include**/nonsorttest/*Test.java/include
-include**/kvtest/*Test.java/include
   /includes
-  additionalClasspathElements
-
additionalClasspathElement${basedir}/../hadoop-mapreduce-client-common/target/classes/additionalClasspathElement
-  /additionalClasspathElements
 /configuration
   /plugin
 /plugins
@@ -184,6 +183,28 @@
   /execution
 /executions
   /plugin
+  plugin
+groupIdorg.apache.maven.plugins/groupId
+artifactIdmaven-surefire-plugin/artifactId
+configuration
+  properties
+property
+  namelistener/name
+  valueorg.apache.hadoop.test.TimedOutTestsListener/value
+/property
+  /properties
+  includes
+include**/TestTaskContext.java/include
+include**/buffer/Test*.java/include
+include**/handlers/Test*.java/include
+include**/serde/Test*.java/include
+include**/combinertest/*Test.java/include
+include**/compresstest/*Test.java/include
+include**/nonsorttest/*Test.java/include
+include**/kvtest/*Test.java/include
+  /includes
+/configuration
+  /plugin
 /plugins
   /build
 /profile

http://git-wip-us.apache.org/repos/asf/hadoop/blob/ea9e5b7b/hadoop-project/pom.xml
--
diff --git 

[27/52] [abbrv] git commit: MAPREDUCE-6056. native-task: move system test working dir to target dir and cleanup test config xml files (Manu Zhang via bchang)

2014-09-15 Thread jing9
MAPREDUCE-6056. native-task: move system test working dir to target dir and 
cleanup test config xml files (Manu Zhang via bchang)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/7ade9b04
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/7ade9b04
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/7ade9b04

Branch: refs/heads/HDFS-6584
Commit: 7ade9b04ea0a46a51564d23c8d715ddb1b9ca6d0
Parents: 17cd0fa
Author: Binglin Chang bch...@apache.org
Authored: Tue Sep 2 16:26:09 2014 +0800
Committer: Binglin Chang bch...@apache.org
Committed: Tue Sep 2 16:26:09 2014 +0800

--
 .../CHANGES.MAPREDUCE-2841.txt  |   1 +
 .../nativetask/combinertest/CombinerTest.java   |  65 
 .../combinertest/LargeKVCombinerTest.java   |  94 +--
 .../combinertest/OldAPICombinerTest.java|  27 ++--
 .../nativetask/compresstest/CompressMapper.java |  38 ++---
 .../nativetask/compresstest/CompressTest.java   |  90 +++
 .../TestNativeCollectorOnlyHandler.java |  20 ++-
 .../hadoop/mapred/nativetask/kvtest/KVTest.java |  83 +-
 .../mapred/nativetask/kvtest/LargeKVTest.java   | 104 ++--
 .../nativetask/nonsorttest/NonSortTest.java |  30 ++--
 .../nativetask/testutil/TestConstants.java  |  56 ---
 .../src/test/resources/common_conf.xml  |  71 +
 .../src/test/resources/kvtest-conf.xml  | 158 +--
 .../src/test/resources/native_conf.xml  |  33 ++--
 .../src/test/resources/normal_conf.xml  |  26 ++-
 .../src/test/resources/test-combiner-conf.xml   |  54 +++
 .../src/test/resources/test-compress-conf.xml   |  50 ++
 .../test/resources/test-gzip-compress-conf.xml  |  39 -
 .../test/resources/test-lz4-compress-conf.xml   |  39 -
 .../src/test/resources/test-nonsort-conf.xml|  49 +++---
 .../resources/test-snappy-compress-conf.xml |  41 -
 21 files changed, 588 insertions(+), 580 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/7ade9b04/hadoop-mapreduce-project/CHANGES.MAPREDUCE-2841.txt
--
diff --git a/hadoop-mapreduce-project/CHANGES.MAPREDUCE-2841.txt 
b/hadoop-mapreduce-project/CHANGES.MAPREDUCE-2841.txt
index baa88c1..7c9558e 100644
--- a/hadoop-mapreduce-project/CHANGES.MAPREDUCE-2841.txt
+++ b/hadoop-mapreduce-project/CHANGES.MAPREDUCE-2841.txt
@@ -18,3 +18,4 @@ MAPREDUCE-6035. native-task: sources/test-sources jar 
distribution (Manu Zhang v
 MAPREDUCE-5977. Fix or suppress native-task gcc warnings (Manu Zhang via todd)
 MAPREDUCE-6054. native-task: Speed up tests (todd)
 MAPREDUCE-6058. native-task: KVTest and LargeKVTest should check mr job is 
sucessful (Binglin Chang)
+MAPREDUCE-6056. native-task: move system test working dir to target dir and 
cleanup test config xml files (Manu Zhang via bchang)

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7ade9b04/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/test/java/org/apache/hadoop/mapred/nativetask/combinertest/CombinerTest.java
--
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/test/java/org/apache/hadoop/mapred/nativetask/combinertest/CombinerTest.java
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/test/java/org/apache/hadoop/mapred/nativetask/combinertest/CombinerTest.java
index abbe28e..d7f05be 100644
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/test/java/org/apache/hadoop/mapred/nativetask/combinertest/CombinerTest.java
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/test/java/org/apache/hadoop/mapred/nativetask/combinertest/CombinerTest.java
@@ -38,11 +38,14 @@ import org.apache.hadoop.mapreduce.Job;
 import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
 import org.apache.hadoop.mapreduce.lib.input.SequenceFileInputFormat;
 import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
+import org.junit.AfterClass;
 import org.apache.hadoop.util.NativeCodeLoader;
 import org.junit.Assume;
 import org.junit.Before;
 import org.junit.Test;
 
+import java.io.IOException;
+
 public class CombinerTest {
   private FileSystem fs;
   private String inputpath;
@@ -50,33 +53,25 @@ public class CombinerTest {
   private String hadoopoutputpath;
 
   @Test
-  public void testWordCountCombiner() {
-try {
-
-  final Configuration nativeConf = 
ScenarioConfiguration.getNativeConfiguration();
-  nativeConf.addResource(TestConstants.COMBINER_CONF_PATH);
-  final Job nativejob = getJob(nativewordcount, 

[23/52] [abbrv] git commit: MAPREDUCE-5977. Fix or suppress native-task gcc warnings. Contributed by Manu Zhang.

2014-09-15 Thread jing9
MAPREDUCE-5977. Fix or suppress native-task gcc warnings. Contributed by Manu 
Zhang.


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/fad4524c
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/fad4524c
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/fad4524c

Branch: refs/heads/HDFS-6584
Commit: fad4524c85848b8efe27dcf4a3e3a9ac1725bd2a
Parents: 6d39367
Author: Todd Lipcon t...@apache.org
Authored: Wed Aug 27 12:23:03 2014 -0700
Committer: Todd Lipcon t...@apache.org
Committed: Wed Aug 27 12:25:42 2014 -0700

--
 .../CHANGES.MAPREDUCE-2841.txt  | 2 +
 .../src/CMakeLists.txt  | 9 +-
 .../src/main/native/gtest/gtest.h   | 19541 -
 .../src/main/native/gtest/include/gtest/gtest.h | 19541 +
 .../src/main/native/src/NativeTask.h| 2 -
 .../src/main/native/src/codec/BlockCodec.cc | 4 +-
 .../src/main/native/src/handler/BatchHandler.h  | 4 +-
 .../main/native/src/handler/CombineHandler.cc   |10 +-
 .../src/main/native/src/lib/Buffers.h   | 4 +-
 .../src/main/native/src/lib/IFile.cc|11 +-
 .../main/native/src/lib/MapOutputCollector.cc   |14 +-
 .../main/native/src/lib/MapOutputCollector.h| 4 +-
 .../src/main/native/src/lib/Merge.cc| 4 +-
 .../src/main/native/src/lib/PartitionBucket.h   |12 +-
 .../src/main/native/src/lib/commons.h   | 3 +
 .../src/main/native/src/util/StringUtil.cc  | 6 +-
 .../src/main/native/test/TestCompressions.cc| 7 +-
 .../src/main/native/test/TestIFile.cc   | 4 +
 .../src/main/native/test/lib/TestByteBuffer.cc  | 2 +-
 .../native/test/lib/TestFixSizeContainer.cc | 2 +-
 .../src/main/native/test/lib/TestIterator.cc| 2 -
 .../native/test/lib/TestMemBlockIterator.cc | 6 +-
 .../src/main/native/test/lib/TestMemoryBlock.cc | 1 +
 .../src/main/native/test/util/TestHash.cc   | 2 +-
 24 files changed, 19600 insertions(+), 19597 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/fad4524c/hadoop-mapreduce-project/CHANGES.MAPREDUCE-2841.txt
--
diff --git a/hadoop-mapreduce-project/CHANGES.MAPREDUCE-2841.txt 
b/hadoop-mapreduce-project/CHANGES.MAPREDUCE-2841.txt
index 95f7858..4dc08cb 100644
--- a/hadoop-mapreduce-project/CHANGES.MAPREDUCE-2841.txt
+++ b/hadoop-mapreduce-project/CHANGES.MAPREDUCE-2841.txt
@@ -15,3 +15,5 @@ MAPREDUCE-5978. native-task: remove test case for not 
supported codec Bzip2Codec
 MAPREDUCE-6006. native-task: add native tests to maven and fix bug in pom.xml 
(Binglin Chang via todd)
 MAPREDUCE-6026. native-task: fix logging (Manu Zhang via todd)
 MAPREDUCE-6035. native-task: sources/test-sources jar distribution (Manu Zhang 
via todd)
+MAPREDUCE-5977. Fix or suppress native-task gcc warnings (Manu Zhang via todd)
+

http://git-wip-us.apache.org/repos/asf/hadoop/blob/fad4524c/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/CMakeLists.txt
--
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/CMakeLists.txt
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/CMakeLists.txt
index 77b6109..0ab99db 100644
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/CMakeLists.txt
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/CMakeLists.txt
@@ -90,7 +90,7 @@ SET(CMAKE_FIND_LIBRARY_SUFFIXES 
STORED_CMAKE_FIND_LIBRARY_SUFFIXES)
 
 # primitive configs
 set(PRFLAGS -DSIMPLE_MEMCPY)
-set(CMAKE_C_FLAGS ${CMAKE_C_FLAGS} ${PRFLAGS} -Wall)
+set(CMAKE_C_FLAGS ${CMAKE_C_FLAGS} ${PRFLAGS} -fno-strict-aliasing -Wall 
-Wno-sign-compare)
 set(CMAKE_LD_FLAGS ${CMAKE_LD_FLAGS} -no-undefined -version-info 0:1:0
 -L${_JAVA_HOME}/jre/lib/amd64/server -ljvm)
 set(CMAKE_CXX_FLAGS ${CMAKE_CXX_FLAGS} ${CMAKE_C_FLAGS} -g -O2 -DNDEBUG 
-fPIC)
@@ -150,7 +150,6 @@ CONFIGURE_FILE(${CMAKE_SOURCE_DIR}/config.h.cmake 
${CMAKE_BINARY_DIR}/config.h)
 
 include_directories(
 ${GENERATED_JAVAH}
-${D}
 ${D}/src
 ${D}/src/util
 ${D}/src/lib
@@ -160,6 +159,8 @@ include_directories(
 ${JNI_INCLUDE_DIRS}
 ${SNAPPY_INCLUDE_DIR}
 )
+# add gtest as system library to suppress gcc warnings
+include_directories(SYSTEM ${D}/gtest/include)
 
 
 SET(CMAKE_BUILD_WITH_INSTALL_RPATH TRUE)
@@ -220,8 +221,9 @@ add_dual_library(nativetask
 
 target_link_libraries(nativetask ${NT_DEPEND_LIBRARY})
 
+add_library(gtest ${D}/gtest/gtest-all.cc)
+set_target_properties(gtest 

[47/52] [abbrv] git commit: YARN-2528. Relaxed http response split vulnerability protection for the origins header and made it accept multiple origins in CrossOriginFilter. Contributed by Jonathan Eag

2014-09-15 Thread jing9
YARN-2528. Relaxed http response split vulnerability protection for the origins 
header and made it accept multiple origins in CrossOriginFilter. Contributed by 
Jonathan Eagles.


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/98588cf0
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/98588cf0
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/98588cf0

Branch: refs/heads/HDFS-6584
Commit: 98588cf044d9908ecf767257c09a52cf17aa2ec2
Parents: 3c91817
Author: Zhijie Shen zjs...@apache.org
Authored: Fri Sep 12 21:33:01 2014 -0700
Committer: Zhijie Shen zjs...@apache.org
Committed: Fri Sep 12 21:33:01 2014 -0700

--
 hadoop-yarn-project/CHANGES.txt |  4 ++
 .../timeline/webapp/CrossOriginFilter.java  | 47 ++--
 .../timeline/webapp/TestCrossOriginFilter.java  | 41 ++---
 3 files changed, 61 insertions(+), 31 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/98588cf0/hadoop-yarn-project/CHANGES.txt
--
diff --git a/hadoop-yarn-project/CHANGES.txt b/hadoop-yarn-project/CHANGES.txt
index 06d94ca..065869a 100644
--- a/hadoop-yarn-project/CHANGES.txt
+++ b/hadoop-yarn-project/CHANGES.txt
@@ -351,6 +351,10 @@ Release 2.6.0 - UNRELEASED
 YARN-2542. Fixed NPE when retrieving ApplicationReport from TimeLineServer.
 (Zhijie Shen via jianhe)
 
+YARN-2528. Relaxed http response split vulnerability protection for the 
origins
+header and made it accept multiple origins in CrossOriginFilter. (Jonathan
+Eagles via zjshen)
+
 Release 2.5.1 - 2014-09-05
 
   INCOMPATIBLE CHANGES

http://git-wip-us.apache.org/repos/asf/hadoop/blob/98588cf0/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/timeline/webapp/CrossOriginFilter.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/timeline/webapp/CrossOriginFilter.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/timeline/webapp/CrossOriginFilter.java
index cceee54..d5fab7a 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/timeline/webapp/CrossOriginFilter.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/timeline/webapp/CrossOriginFilter.java
@@ -19,8 +19,6 @@
 package org.apache.hadoop.yarn.server.timeline.webapp;
 
 import java.io.IOException;
-import java.io.UnsupportedEncodingException;
-import java.net.URLEncoder;
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.List;
@@ -106,12 +104,12 @@ public class CrossOriginFilter implements Filter {
 
   private void doCrossFilter(HttpServletRequest req, HttpServletResponse res) {
 
-String origin = encodeHeader(req.getHeader(ORIGIN));
-if (!isCrossOrigin(origin)) {
+String originsList = encodeHeader(req.getHeader(ORIGIN));
+if (!isCrossOrigin(originsList)) {
   return;
 }
 
-if (!isOriginAllowed(origin)) {
+if (!areOriginsAllowed(originsList)) {
   return;
 }
 
@@ -127,7 +125,7 @@ public class CrossOriginFilter implements Filter {
   return;
 }
 
-res.setHeader(ACCESS_CONTROL_ALLOW_ORIGIN, origin);
+res.setHeader(ACCESS_CONTROL_ALLOW_ORIGIN, originsList);
 res.setHeader(ACCESS_CONTROL_ALLOW_CREDENTIALS, Boolean.TRUE.toString());
 res.setHeader(ACCESS_CONTROL_ALLOW_METHODS, getAllowedMethodsHeader());
 res.setHeader(ACCESS_CONTROL_ALLOW_HEADERS, getAllowedHeadersHeader());
@@ -191,35 +189,36 @@ public class CrossOriginFilter implements Filter {
 if (header == null) {
   return null;
 }
-try {
-  // Protect against HTTP response splitting vulnerability
-  // since value is written as part of the response header
-  return URLEncoder.encode(header, ASCII);
-} catch (UnsupportedEncodingException e) {
-  return null;
-}
+// Protect against HTTP response splitting vulnerability
+// since value is written as part of the response header
+// Ensure this header only has one header by removing
+// CRs and LFs
+return header.split(\n|\r)[0].trim();
   }
 
-  static boolean isCrossOrigin(String origin) {
-return origin != null;
+  static boolean isCrossOrigin(String originsList) {
+return originsList != null;
   }
 
   @VisibleForTesting
-  boolean 

[50/52] [abbrv] HADOOP-10400. Incorporate new S3A FileSystem implementation. Contributed by Jordan Mendelson and Dave Wang.

2014-09-15 Thread jing9
http://git-wip-us.apache.org/repos/asf/hadoop/blob/24d920b8/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/contract/s3a/TestS3AContractSeek.java
--
diff --git 
a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/contract/s3a/TestS3AContractSeek.java
 
b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/contract/s3a/TestS3AContractSeek.java
new file mode 100644
index 000..d677ec4
--- /dev/null
+++ 
b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/contract/s3a/TestS3AContractSeek.java
@@ -0,0 +1,31 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ *  or more contributor license agreements.  See the NOTICE file
+ *  distributed with this work for additional information
+ *  regarding copyright ownership.  The ASF licenses this file
+ *  to you under the Apache License, Version 2.0 (the
+ *  License); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an AS IS BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ */
+
+package org.apache.hadoop.fs.contract.s3a;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.contract.AbstractContractSeekTest;
+import org.apache.hadoop.fs.contract.AbstractFSContract;
+
+public class TestS3AContractSeek extends AbstractContractSeekTest {
+
+  @Override
+  protected AbstractFSContract createContract(Configuration conf) {
+return new S3AContract(conf);
+  }
+}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/24d920b8/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/S3AFileSystemContractBaseTest.java
--
diff --git 
a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/S3AFileSystemContractBaseTest.java
 
b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/S3AFileSystemContractBaseTest.java
new file mode 100644
index 000..8455233
--- /dev/null
+++ 
b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/S3AFileSystemContractBaseTest.java
@@ -0,0 +1,327 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * License); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an AS IS BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.fs.s3a;
+
+import static org.junit.Assume.*;
+
+import org.junit.Test;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FSDataInputStream;
+import org.apache.hadoop.fs.FileSystemContractBaseTest;
+import org.apache.hadoop.fs.Path;
+
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.OutputStream;
+import java.net.URI;
+import java.util.UUID;
+
+/**
+ *  Tests a live S3 system. If you keys and bucket aren't specified, all tests 
+ *  are marked as passed 
+ *  
+ *  This uses BlockJUnit4ClassRunner because FileSystemContractBaseTest from 
+ *  TestCase which uses the old Junit3 runner that doesn't ignore assumptions 
+ *  properly making it impossible to skip the tests if we don't have a valid
+ *  bucket.
+ **/
+public class S3AFileSystemContractBaseTest extends FileSystemContractBaseTest {
+  private static final int TEST_BUFFER_SIZE = 128;
+  private static final int MODULUS = 128;
+
+  protected static final Logger LOG = 
LoggerFactory.getLogger(S3AFileSystemContractBaseTest.class);
+
+  @Override
+  public void setUp() throws Exception {
+Configuration conf = new Configuration();
+
+URI testURI = URI.create(conf.get(test.fs.s3a.name));
+
+boolean liveTest = testURI != null  !testURI.equals(s3a:///);
+
+// This doesn't work with our JUnit 3 style test cases, so instead we'll 
+// make this whole class not run by default
+assumeTrue(liveTest);
+
+fs = new S3AFileSystem();
+fs.initialize(testURI, conf);
+super.setUp();
+  }
+
+  @Override
+  protected void tearDown() throws 

[33/52] [abbrv] MAPREDUCE-6069. Lint/style fixes and removal of unused code. Contributed by Todd Lipcon.

2014-09-15 Thread jing9
http://git-wip-us.apache.org/repos/asf/hadoop/blob/683987be/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/native/src/util/Hash.cc
--
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/native/src/util/Hash.cc
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/native/src/util/Hash.cc
deleted file mode 100644
index eec8091..000
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/native/src/util/Hash.cc
+++ /dev/null
@@ -1,24 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * License); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an AS IS BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include Hash.h
-
-namespace NativeTask {
-
-} // namespace NativeTask
-

http://git-wip-us.apache.org/repos/asf/hadoop/blob/683987be/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/native/src/util/Hash.h
--
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/native/src/util/Hash.h
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/native/src/util/Hash.h
deleted file mode 100644
index 4f99d26..000
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/native/src/util/Hash.h
+++ /dev/null
@@ -1,73 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * License); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an AS IS BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef HASH_H_
-#define HASH_H_
-
-#include stdint.h
-#include stdlib.h
-
-// Hash function for a byte array.
-extern uint64_t CityHash64(const char *buf, size_t len);
-
-// Hash function for a byte array.  For convenience, a 64-bit seed is also
-// hashed into the result.
-extern uint64_t CityHash64WithSeed(const char *buf, size_t len, uint64_t seed);
-
-namespace NativeTask {
-
-class Hash {
-public:
-  /**
-   * Compatible with hadoop Text  BytesWritable hash
-   */
-  inline static int32_t BytesHash(const char * bytes, uint32_t length) {
-int32_t hash = 1;
-for (uint32_t i = 0; i  length; i++)
-  hash = (31 * hash) + (int32_t)bytes[i];
-return hash;
-  }
-
-  /**
-   * Unsigned version of BytesHash
-   */
-  inline static uint32_t BytesHashU(const char * bytes, uint32_t length) {
-uint32_t hash = 1;
-for (uint32_t i = 0; i  length; i++)
-  hash = (31U * hash) + (uint32_t)bytes[i];
-return hash;
-  }
-
-  /**
-   * City hash, faster for longer input
-   */
-  inline static uint64_t CityHash(const char * bytes, uint32_t length) {
-return CityHash64(bytes, length);
-  }
-
-  /**
-   * City hash, faster for longer input
-   */
-  inline static uint64_t CityHashWithSeed(const char * bytes, uint32_t length, 
uint64_t seed) {
-return CityHash64WithSeed(bytes, length, seed);
-  }
-};
-
-} // namespace NativeTask
-
-#endif /* HASH_H_ */

http://git-wip-us.apache.org/repos/asf/hadoop/blob/683987be/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/native/src/util/Random.cc
--
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/native/src/util/Random.cc
 

[02/52] [abbrv] MAPREDUCE-5996. native-task: Rename system tests into standard directory layout. Contributed by Todd Lipcon.

2014-09-15 Thread jing9
http://git-wip-us.apache.org/repos/asf/hadoop/blob/b2cba48f/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/test/java/system/function/org/apache/hadoop/mapred/nativetask/combinertest/CombinerTest.java
--
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/test/java/system/function/org/apache/hadoop/mapred/nativetask/combinertest/CombinerTest.java
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/test/java/system/function/org/apache/hadoop/mapred/nativetask/combinertest/CombinerTest.java
deleted file mode 100644
index 8a4aa6f..000
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/test/java/system/function/org/apache/hadoop/mapred/nativetask/combinertest/CombinerTest.java
+++ /dev/null
@@ -1,122 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * License); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an AS IS BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.mapred.nativetask.combinertest;
-
-import static org.junit.Assert.assertEquals;
-
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.io.IntWritable;
-import org.apache.hadoop.io.Text;
-import org.apache.hadoop.mapred.Task;
-import 
org.apache.hadoop.mapred.nativetask.combinertest.WordCount.IntSumReducer;
-import 
org.apache.hadoop.mapred.nativetask.combinertest.WordCount.TokenizerMapper;
-import org.apache.hadoop.mapred.nativetask.kvtest.TestInputFile;
-import org.apache.hadoop.mapred.nativetask.testutil.ResultVerifier;
-import org.apache.hadoop.mapred.nativetask.testutil.ScenarioConfiguration;
-import org.apache.hadoop.mapred.nativetask.testutil.TestConstants;
-import org.apache.hadoop.mapreduce.Counter;
-import org.apache.hadoop.mapreduce.Job;
-import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
-import org.apache.hadoop.mapreduce.lib.input.SequenceFileInputFormat;
-import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
-import org.junit.Before;
-import org.junit.Test;
-
-public class CombinerTest {
-  private FileSystem fs;
-  private String inputpath;
-  private String nativeoutputpath;
-  private String hadoopoutputpath;
-
-  @Test
-  public void testWordCountCombiner() {
-try {
-
-  final Configuration nativeConf = 
ScenarioConfiguration.getNativeConfiguration();
-  nativeConf.addResource(TestConstants.COMBINER_CONF_PATH);
-  final Job nativejob = getJob(nativewordcount, nativeConf, inputpath, 
nativeoutputpath);
-
-  final Configuration commonConf = 
ScenarioConfiguration.getNormalConfiguration();
-  commonConf.addResource(TestConstants.COMBINER_CONF_PATH);
-
-  final Job normaljob = getJob(normalwordcount, commonConf, inputpath, 
hadoopoutputpath);
-
-  nativejob.waitForCompletion(true);
-
-  Counter nativeReduceGroups = 
nativejob.getCounters().findCounter(Task.Counter.REDUCE_INPUT_RECORDS);
-  
-  normaljob.waitForCompletion(true);
-  Counter normalReduceGroups = 
normaljob.getCounters().findCounter(Task.Counter.REDUCE_INPUT_RECORDS);
-   
-  assertEquals(true, ResultVerifier.verify(nativeoutputpath, 
hadoopoutputpath));
-  assertEquals(Native Reduce reduce group counter should equal orignal 
reduce group counter, 
-  nativeReduceGroups.getValue(), normalReduceGroups.getValue());
-  
-} catch (final Exception e) {
-  e.printStackTrace();
-  assertEquals(run exception, true, false);
-}
-  }
-
-  @Before
-  public void startUp() throws Exception {
-final ScenarioConfiguration conf = new ScenarioConfiguration();
-conf.addcombinerConf();
-
-this.fs = FileSystem.get(conf);
-
-this.inputpath = 
conf.get(TestConstants.NATIVETASK_TEST_COMBINER_INPUTPATH_KEY,
-TestConstants.NATIVETASK_TEST_COMBINER_INPUTPATH_DEFAULTV) + 
/wordcount;
-
-if (!fs.exists(new Path(inputpath))) {
-  new TestInputFile(
-  conf.getInt(TestConstants.NATIVETASK_COMBINER_WORDCOUNT_FILESIZE, 
100),
-  Text.class.getName(),
-  Text.class.getName(), conf).createSequenceTestFile(inputpath, 1, 

[21/52] [abbrv] MAPREDUCE-5977. Fix or suppress native-task gcc warnings. Contributed by Manu Zhang.

2014-09-15 Thread jing9
http://git-wip-us.apache.org/repos/asf/hadoop/blob/fad4524c/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/native/gtest/include/gtest/gtest.h
--
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/native/gtest/include/gtest/gtest.h
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/native/gtest/include/gtest/gtest.h
new file mode 100644
index 000..b467822
--- /dev/null
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/native/gtest/include/gtest/gtest.h
@@ -0,0 +1,19541 @@
+// Copyright 2005, Google Inc.
+// All rights reserved.
+//
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following disclaimer
+// in the documentation and/or other materials provided with the
+// distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived from
+// this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// AS IS AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+//
+// Author: w...@google.com (Zhanyong Wan)
+//
+// The Google C++ Testing Framework (Google Test)
+//
+// This header file defines the public API for Google Test.  It should be
+// included by any test program that uses Google Test.
+//
+// IMPORTANT NOTE: Due to limitation of the C++ language, we have to
+// leave some internal implementation details in this header file.
+// They are clearly marked by comments like this:
+//
+//   // INTERNAL IMPLEMENTATION - DO NOT USE IN A USER PROGRAM.
+//
+// Such code is NOT meant to be used by a user directly, and is subject
+// to CHANGE WITHOUT NOTICE.  Therefore DO NOT DEPEND ON IT in a user
+// program!
+//
+// Acknowledgment: Google Test borrowed the idea of automatic test
+// registration from Barthelemy Dagenais' (barthel...@prologique.com)
+// easyUnit framework.
+
+#ifndef GTEST_INCLUDE_GTEST_GTEST_H_
+#define GTEST_INCLUDE_GTEST_GTEST_H_
+
+#include limits
+#include vector
+
+// Copyright 2005, Google Inc.
+// All rights reserved.
+//
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following disclaimer
+// in the documentation and/or other materials provided with the
+// distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived from
+// this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// AS IS AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+//
+// Authors: w...@google.com (Zhanyong Wan), eef...@gmail.com (Sean Mcafee)
+//
+// The Google C++ Testing Framework (Google Test)
+//
+// 

[08/52] [abbrv] git commit: MAPREDUCE-6000. native-task: Simplify ByteBufferDataReader/Writer. Contributed by Todd Lipcon.

2014-09-15 Thread jing9
MAPREDUCE-6000. native-task: Simplify ByteBufferDataReader/Writer. Contributed 
by Todd Lipcon.

git-svn-id: 
https://svn.apache.org/repos/asf/hadoop/common/branches/MR-2841@1613036 
13f79535-47bb-0310-9956-ffa450edef68


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/66351803
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/66351803
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/66351803

Branch: refs/heads/HDFS-6584
Commit: 663518032449940f136ae8a9f149ed453837a8d8
Parents: 77acc70
Author: Todd Lipcon t...@apache.org
Authored: Thu Jul 24 08:24:05 2014 +
Committer: Todd Lipcon t...@apache.org
Committed: Thu Jul 24 08:24:05 2014 +

--
 .../CHANGES.MAPREDUCE-2841.txt  |   1 +
 .../nativetask/buffer/ByteBufferDataReader.java | 126 +
 .../nativetask/buffer/ByteBufferDataWriter.java | 141 ---
 .../buffer/TestByteBufferReadWrite.java |  93 +---
 4 files changed, 75 insertions(+), 286 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/66351803/hadoop-mapreduce-project/CHANGES.MAPREDUCE-2841.txt
--
diff --git a/hadoop-mapreduce-project/CHANGES.MAPREDUCE-2841.txt 
b/hadoop-mapreduce-project/CHANGES.MAPREDUCE-2841.txt
index e12f743..aa695cf 100644
--- a/hadoop-mapreduce-project/CHANGES.MAPREDUCE-2841.txt
+++ b/hadoop-mapreduce-project/CHANGES.MAPREDUCE-2841.txt
@@ -5,3 +5,4 @@ MAPREDUCE-5985. native-task: Fix build on macosx. Contributed 
by Binglin Chang
 MAPREDUCE-5994. Simplify ByteUtils and fix failing test. (todd)
 MAPREDUCE-5996. native-task: Rename system tests into standard directory 
layout (todd)
 MAPREDUCE-5997. native-task: Use DirectBufferPool from Hadoop Common (todd)
+MAPREDUCE-6000. native-task: Simplify ByteBufferDataReader/Writer (todd)

http://git-wip-us.apache.org/repos/asf/hadoop/blob/66351803/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/java/org/apache/hadoop/mapred/nativetask/buffer/ByteBufferDataReader.java
--
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/java/org/apache/hadoop/mapred/nativetask/buffer/ByteBufferDataReader.java
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/java/org/apache/hadoop/mapred/nativetask/buffer/ByteBufferDataReader.java
index 5af7180..24f402d 100644
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/java/org/apache/hadoop/mapred/nativetask/buffer/ByteBufferDataReader.java
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/java/org/apache/hadoop/mapred/nativetask/buffer/ByteBufferDataReader.java
@@ -17,6 +17,8 @@
  */
 package org.apache.hadoop.mapred.nativetask.buffer;
 
+import com.google.common.base.Charsets;
+
 import java.io.DataInput;
 import java.io.EOFException;
 import java.io.IOException;
@@ -31,11 +33,13 @@ import java.nio.ByteBuffer;
 public class ByteBufferDataReader extends DataInputStream {
   private ByteBuffer byteBuffer;
   private char lineCache[];
+  private java.io.DataInputStream javaReader;
 
   public ByteBufferDataReader(InputBuffer buffer) {
 if (buffer != null) {
-  this.byteBuffer = buffer.getByteBuffer();
+  reset(buffer);
 }
+javaReader = new java.io.DataInputStream(this);
   }
 
   public void reset(InputBuffer buffer) {
@@ -128,128 +132,12 @@ public class ByteBufferDataReader extends 
DataInputStream {
 
   @Override
   public String readLine() throws IOException {
-
-InputStream in = this;
-
-char buf[] = lineCache;
-
-if (buf == null) {
-  buf = lineCache = new char[128];
-}
-
-int room = buf.length;
-int offset = 0;
-int c;
-
-loop: while (true) {
-  switch (c = in.read()) {
-  case -1:
-  case '\n':
-break loop;
-
-  case '\r':
-final int c2 = in.read();
-if ((c2 != '\n')  (c2 != -1)) {
-  if (!(in instanceof PushbackInputStream)) {
-in = new PushbackInputStream(in);
-  }
-  ((PushbackInputStream) in).unread(c2);
-}
-break loop;
-
-  default:
-if (--room  0) {
-  buf = new char[offset + 128];
-  room = buf.length - offset - 1;
-  System.arraycopy(lineCache, 0, buf, 0, offset);
-  lineCache = buf;
-}
-buf[offset++] = (char) c;
-break;
-  }
-}
-if ((c == -1)  (offset == 0)) {
-  return null;
-}
-return String.copyValueOf(buf, 0, offset);
+return javaReader.readLine();
   }
 
   @Override
   

[46/52] [abbrv] git commit: Merge MR-2841 changes into main MapReduce CHANGES.txt file

2014-09-15 Thread jing9
Merge MR-2841 changes into main MapReduce CHANGES.txt file


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/3c918172
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/3c918172
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/3c918172

Branch: refs/heads/HDFS-6584
Commit: 3c9181722b05a9192f5440ea8f3f77231f84eac6
Parents: 8a3a327
Author: Todd Lipcon t...@apache.org
Authored: Fri Sep 12 18:35:13 2014 -0700
Committer: Todd Lipcon t...@apache.org
Committed: Fri Sep 12 18:35:13 2014 -0700

--
 .../CHANGES.MAPREDUCE-2841.txt  | 26 
 hadoop-mapreduce-project/CHANGES.txt| 65 
 2 files changed, 65 insertions(+), 26 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/3c918172/hadoop-mapreduce-project/CHANGES.MAPREDUCE-2841.txt
--
diff --git a/hadoop-mapreduce-project/CHANGES.MAPREDUCE-2841.txt 
b/hadoop-mapreduce-project/CHANGES.MAPREDUCE-2841.txt
deleted file mode 100644
index cfc9412..000
--- a/hadoop-mapreduce-project/CHANGES.MAPREDUCE-2841.txt
+++ /dev/null
@@ -1,26 +0,0 @@
-Changes for Hadoop Native Map Output Collector
-
-
-MAPREDUCE-5985. native-task: Fix build on macosx. Contributed by Binglin Chang
-MAPREDUCE-5994. Simplify ByteUtils and fix failing test. (todd)
-MAPREDUCE-5996. native-task: Rename system tests into standard directory 
layout (todd)
-MAPREDUCE-5997. native-task: Use DirectBufferPool from Hadoop Common (todd)
-MAPREDUCE-6000. native-task: Simplify ByteBufferDataReader/Writer (todd)
-MAPREDUCE-5991. native-task should not run unit tests if native profile is not 
enabled. (Binglin Chang)
-MAPREDUCE-5995. native-task: Revert changes to Text internals (todd)
-MAPREDUCE-6005. native-task: Fix some valgrind errors (Binglin Chang)
-MAPREDUCE-5984. native-task: Reuse lz4 sources in hadoop-common (Binglin Chang)
-MAPREDUCE-5976. native-task: should not fail to build if snappy is missing 
(Manu Zhang)
-MAPREDUCE-5978. native-task: remove test case for not supported codec 
Bzip2Codec and DefaultCodec (Manu Zhang)
-MAPREDUCE-6006. native-task: add native tests to maven and fix bug in pom.xml 
(Binglin Chang via todd)
-MAPREDUCE-6026. native-task: fix logging (Manu Zhang via todd)
-MAPREDUCE-6035. native-task: sources/test-sources jar distribution (Manu Zhang 
via todd)
-MAPREDUCE-5977. Fix or suppress native-task gcc warnings (Manu Zhang via todd)
-MAPREDUCE-6054. native-task: Speed up tests (todd)
-MAPREDUCE-6058. native-task: KVTest and LargeKVTest should check mr job is 
sucessful (Binglin Chang)
-MAPREDUCE-6056. native-task: move system test working dir to target dir and 
cleanup test config xml files (Manu Zhang via bchang)
-MAPREDUCE-6055. native-task: findbugs, interface annotations, and other misc 
cleanup (todd)
-MAPREDUCE-6067. native-task: fix some counter issues (Binglin Chang)
-MAPREDUCE-6069. native-task: Lint/style fixes and removal of unused code (todd)
-MAPREDUCE-6074. native-task: fix release audit, javadoc, javac warnings (todd)
-MAPREDUCE-6077. native-task: Remove CustomModule examples in nativetask 
(seanzhong)

http://git-wip-us.apache.org/repos/asf/hadoop/blob/3c918172/hadoop-mapreduce-project/CHANGES.txt
--
diff --git a/hadoop-mapreduce-project/CHANGES.txt 
b/hadoop-mapreduce-project/CHANGES.txt
index f1435d2..5d1e5f5 100644
--- a/hadoop-mapreduce-project/CHANGES.txt
+++ b/hadoop-mapreduce-project/CHANGES.txt
@@ -20,6 +20,9 @@ Trunk (Unreleased)
 MAPREDUCE-5910. Make MR AM resync with RM in case of work-preserving
 RM-restart. (Rohith via jianhe)
 
+MAPREDUCE-2841. Add a native implementation of MapOutputCollector.
+(see section below for detailed breakdown)
+
   IMPROVEMENTS
 
 MAPREDUCE-3481. [Gridmix] Improve Gridmix STRESS mode. (amarrk)
@@ -157,6 +160,68 @@ Trunk (Unreleased)
 MAPREDUCE-5972. Fix typo 'programatically' in job.xml (and a few other
 places) (Akira AJISAKA via aw)
 
+  BREAKDOWN OF MAPREDUCE-2841 (NATIVE TASK) SUBTASKS
+
+MAPREDUCE-5985. native-task: Fix build on macosx. Contributed by
+Binglin Chang
+
+MAPREDUCE-5994. Simplify ByteUtils and fix failing test. (todd)
+
+MAPREDUCE-5996. native-task: Rename system tests into standard directory
+layout (todd)
+
+MAPREDUCE-5997. native-task: Use DirectBufferPool from Hadoop Common (todd)
+
+MAPREDUCE-6000. native-task: Simplify ByteBufferDataReader/Writer (todd)
+
+MAPREDUCE-5991. native-task should not run unit tests if native profile is
+not enabled. (Binglin Chang)
+
+MAPREDUCE-5995. native-task: Revert changes to Text internals (todd)
+
+MAPREDUCE-6005. 

[40/52] [abbrv] git commit: YARN-2547. Cross Origin Filter throws UnsupportedOperationException upon destroy (Mit Desai via jeagles)

2014-09-15 Thread jing9
YARN-2547. Cross Origin Filter throws UnsupportedOperationException upon 
destroy (Mit Desai via jeagles)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/54e57948
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/54e57948
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/54e57948

Branch: refs/heads/HDFS-6584
Commit: 54e5794806bd856da0277510efe63656eed23146
Parents: 3122daa
Author: Jonathan Eagles jeag...@gmail.com
Authored: Fri Sep 12 15:35:17 2014 -0500
Committer: Jonathan Eagles jeag...@gmail.com
Committed: Fri Sep 12 15:35:17 2014 -0500

--
 hadoop-yarn-project/CHANGES.txt |  3 ++
 .../timeline/webapp/CrossOriginFilter.java  | 12 ++---
 .../timeline/webapp/TestCrossOriginFilter.java  | 49 
 3 files changed, 58 insertions(+), 6 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/54e57948/hadoop-yarn-project/CHANGES.txt
--
diff --git a/hadoop-yarn-project/CHANGES.txt b/hadoop-yarn-project/CHANGES.txt
index bb73dc7..3993f5e 100644
--- a/hadoop-yarn-project/CHANGES.txt
+++ b/hadoop-yarn-project/CHANGES.txt
@@ -214,6 +214,9 @@ Release 2.6.0 - UNRELEASED
 YARN-2229. Changed the integer field of ContainerId to be long type.
 (Tsuyoshi OZAWA via jianhe)
 
+YARN-2547. Cross Origin Filter throws UnsupportedOperationException upon
+destroy (Mit Desai via jeagles)
+
   OPTIMIZATIONS
 
   BUG FIXES

http://git-wip-us.apache.org/repos/asf/hadoop/blob/54e57948/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/timeline/webapp/CrossOriginFilter.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/timeline/webapp/CrossOriginFilter.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/timeline/webapp/CrossOriginFilter.java
index 5a0703d..cceee54 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/timeline/webapp/CrossOriginFilter.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/timeline/webapp/CrossOriginFilter.java
@@ -150,8 +150,8 @@ public class CrossOriginFilter implements Filter {
 if (allowedMethodsConfig == null) {
   allowedMethodsConfig = ALLOWED_METHODS_DEFAULT;
 }
-allowedMethods =
-Arrays.asList(allowedMethodsConfig.trim().split(\\s*,\\s*));
+allowedMethods.addAll(
+Arrays.asList(allowedMethodsConfig.trim().split(\\s*,\\s*)));
 LOG.info(Allowed Methods:  + getAllowedMethodsHeader());
   }
 
@@ -161,8 +161,8 @@ public class CrossOriginFilter implements Filter {
 if (allowedHeadersConfig == null) {
   allowedHeadersConfig = ALLOWED_HEADERS_DEFAULT;
 }
-allowedHeaders =
-Arrays.asList(allowedHeadersConfig.trim().split(\\s*,\\s*));
+allowedHeaders.addAll(
+Arrays.asList(allowedHeadersConfig.trim().split(\\s*,\\s*)));
 LOG.info(Allowed Headers:  + getAllowedHeadersHeader());
   }
 
@@ -172,8 +172,8 @@ public class CrossOriginFilter implements Filter {
 if (allowedOriginsConfig == null) {
   allowedOriginsConfig = ALLOWED_ORIGINS_DEFAULT;
 }
-allowedOrigins =
-Arrays.asList(allowedOriginsConfig.trim().split(\\s*,\\s*));
+allowedOrigins.addAll(
+Arrays.asList(allowedOriginsConfig.trim().split(\\s*,\\s*)));
 allowAllOrigins = allowedOrigins.contains(*);
 LOG.info(Allowed Origins:  + StringUtils.join(allowedOrigins, ','));
 LOG.info(Allow All Origins:  + allowAllOrigins);

http://git-wip-us.apache.org/repos/asf/hadoop/blob/54e57948/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/timeline/webapp/TestCrossOriginFilter.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/timeline/webapp/TestCrossOriginFilter.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/timeline/webapp/TestCrossOriginFilter.java
index ccc9bbf..e0990f9 100644
--- 

[35/52] [abbrv] git commit: MAPREDUCE-6069. Lint/style fixes and removal of unused code. Contributed by Todd Lipcon.

2014-09-15 Thread jing9
MAPREDUCE-6069. Lint/style fixes and removal of unused code. Contributed by 
Todd Lipcon.


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/683987be
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/683987be
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/683987be

Branch: refs/heads/HDFS-6584
Commit: 683987be7c160e67ddb8534eeb3c464bbe2796dd
Parents: 0032216
Author: Todd Lipcon t...@apache.org
Authored: Wed Sep 3 13:07:24 2014 -0700
Committer: Todd Lipcon t...@apache.org
Committed: Fri Sep 5 10:44:38 2014 -0700

--
 .../CHANGES.MAPREDUCE-2841.txt  |   1 +
 .../hadoop-mapreduce-client-nativetask/LICENSE  |  22 --
 .../src/CMakeLists.txt  |   6 -
 .../hadoop/mapred/nativetask/Constants.java |  13 -
 .../hadoop/mapred/nativetask/DataReceiver.java  |   3 -
 .../mapred/nativetask/ICombineHandler.java  |   2 -
 .../mapred/nativetask/INativeHandler.java   |   8 -
 .../mapred/nativetask/NativeBatchProcessor.java |  11 -
 .../NativeMapOutputCollectorDelegator.java  |  10 +-
 .../hadoop/mapred/nativetask/NativeRuntime.java |  32 +-
 .../hadoop/mapred/nativetask/Platform.java  |  11 +-
 .../nativetask/handlers/BufferPullee.java   |   3 +-
 .../nativetask/handlers/BufferPuller.java   |   2 -
 .../nativetask/handlers/BufferPushee.java   |   6 +-
 .../nativetask/handlers/BufferPusher.java   |   3 +-
 .../nativetask/handlers/CombinerHandler.java|  16 +-
 .../mapred/nativetask/handlers/IDataLoader.java |   1 -
 .../handlers/NativeCollectorOnlyHandler.java|   6 +-
 .../serde/BytesWritableSerializer.java  |   3 +-
 .../mapred/nativetask/serde/IKVSerializer.java  |  31 +-
 .../mapred/nativetask/serde/KVSerializer.java   |   6 +-
 .../nativetask/serde/NativeSerialization.java   |  11 +-
 .../mapred/nativetask/util/BytesUtil.java   |   2 +-
 .../nativetask/util/LocalJobOutputFiles.java|  58 +---
 .../nativetask/util/NativeTaskOutput.java   |  55 +---
 .../nativetask/util/NativeTaskOutputFiles.java  |  72 ++---
 .../mapred/nativetask/util/ReadWriteBuffer.java |  16 +-
 .../src/main/native/cityhash/city.cc| 307 ---
 .../src/main/native/cityhash/city.h |  90 --
 .../src/main/native/src/NativeTask.h| 137 -
 .../src/main/native/src/codec/BlockCodec.cc |   5 +-
 .../src/main/native/src/codec/BlockCodec.h  |   2 +-
 .../src/main/native/src/codec/GzipCodec.cc  |   2 +-
 .../src/main/native/src/codec/GzipCodec.h   |   2 +-
 .../src/main/native/src/codec/Lz4Codec.cc   |   2 +-
 .../src/main/native/src/codec/Lz4Codec.h|   2 +-
 .../src/main/native/src/codec/SnappyCodec.cc|   2 +-
 .../src/main/native/src/codec/SnappyCodec.h |   2 +-
 .../native/src/handler/AbstractMapHandler.cc|   9 +-
 .../src/main/native/src/handler/BatchHandler.cc |   9 +-
 .../main/native/src/handler/CombineHandler.cc   |   4 +-
 .../main/native/src/handler/CombineHandler.h|   2 +-
 .../src/handler/MCollectorOutputHandler.cc  |   6 +-
 .../src/main/native/src/lib/BufferStream.cc | 116 +--
 .../src/main/native/src/lib/BufferStream.h  |  39 +--
 .../src/main/native/src/lib/Buffers.cc  |  77 +
 .../src/main/native/src/lib/Buffers.h   |  63 +---
 .../src/main/native/src/lib/Combiner.cc |  73 -
 .../src/main/native/src/lib/Combiner.h  |  18 +-
 .../src/main/native/src/lib/Compressions.cc |   6 +-
 .../src/main/native/src/lib/Compressions.h  |   2 +-
 .../src/main/native/src/lib/FileSystem.cc   |  23 +-
 .../src/main/native/src/lib/FileSystem.h|   2 +-
 .../src/main/native/src/lib/IFile.cc|   6 +-
 .../src/main/native/src/lib/IFile.h |  10 +-
 .../src/main/native/src/lib/Iterator.cc |   6 +-
 .../src/main/native/src/lib/Log.cc  |   2 +-
 .../main/native/src/lib/MapOutputCollector.cc   |  58 ++--
 .../main/native/src/lib/MapOutputCollector.h|  16 +-
 .../src/main/native/src/lib/MapOutputSpec.cc|   4 +-
 .../src/main/native/src/lib/MapOutputSpec.h |   4 +-
 .../src/main/native/src/lib/MemoryBlock.cc  |  24 +-
 .../src/main/native/src/lib/MemoryPool.h|   4 +-
 .../src/main/native/src/lib/Merge.cc|   5 +-
 .../src/main/native/src/lib/Merge.h |   8 +-
 .../src/main/native/src/lib/MinHeap.h   |   2 +-
 .../src/main/native/src/lib/NativeLibrary.cc|   7 +-
 .../main/native/src/lib/NativeObjectFactory.cc  |  19 +-
 .../main/native/src/lib/NativeRuntimeJniImpl.cc |  27 +-
 .../src/main/native/src/lib/NativeTask.cc   |  44 +--
 .../src/main/native/src/lib/PartitionBucket.cc  |  25 +-
 .../src/main/native/src/lib/PartitionBucket.h   |  16 +-
 .../native/src/lib/PartitionBucketIterator.cc   |  22 +-
 

[31/52] [abbrv] git commit: MAPREDUCE-6067. native-task: fix some counter issues (Binglin Chang)

2014-09-15 Thread jing9
MAPREDUCE-6067. native-task: fix some counter issues (Binglin Chang)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/00322161
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/00322161
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/00322161

Branch: refs/heads/HDFS-6584
Commit: 00322161b5d4d54770c2f0823e036537edecf5bf
Parents: 1081d9c
Author: Binglin Chang bch...@apache.org
Authored: Fri Sep 5 14:20:39 2014 +0800
Committer: Binglin Chang bch...@apache.org
Committed: Fri Sep 5 14:20:39 2014 +0800

--
 .../CHANGES.MAPREDUCE-2841.txt  |  1 +
 .../NativeMapOutputCollectorDelegator.java  |  4 +
 .../mapred/nativetask/StatusReportChecker.java  |  5 --
 .../src/handler/MCollectorOutputHandler.cc  |  3 +-
 .../src/main/native/src/lib/IFile.cc|  6 +-
 .../src/main/native/src/lib/IFile.h |  3 +-
 .../main/native/src/lib/MapOutputCollector.cc   | 85 ++--
 .../main/native/src/lib/MapOutputCollector.h|  8 +-
 .../src/main/native/src/lib/Merge.cc| 24 --
 .../src/main/native/src/lib/PartitionBucket.h   |  2 -
 .../src/main/native/src/lib/TaskCounters.cc | 10 +--
 .../src/main/native/src/lib/TaskCounters.h  |  8 --
 .../nativetask/combinertest/CombinerTest.java   |  8 +-
 .../combinertest/LargeKVCombinerTest.java   |  6 +-
 .../nativetask/compresstest/CompressTest.java   |  3 +
 .../hadoop/mapred/nativetask/kvtest/KVTest.java | 74 +++--
 .../mapred/nativetask/kvtest/LargeKVTest.java   | 82 +--
 .../nativetask/nonsorttest/NonSortTest.java |  6 +-
 .../nativetask/testutil/ResultVerifier.java | 24 +-
 19 files changed, 179 insertions(+), 183 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/00322161/hadoop-mapreduce-project/CHANGES.MAPREDUCE-2841.txt
--
diff --git a/hadoop-mapreduce-project/CHANGES.MAPREDUCE-2841.txt 
b/hadoop-mapreduce-project/CHANGES.MAPREDUCE-2841.txt
index 269a2f6..279b960 100644
--- a/hadoop-mapreduce-project/CHANGES.MAPREDUCE-2841.txt
+++ b/hadoop-mapreduce-project/CHANGES.MAPREDUCE-2841.txt
@@ -20,3 +20,4 @@ MAPREDUCE-6054. native-task: Speed up tests (todd)
 MAPREDUCE-6058. native-task: KVTest and LargeKVTest should check mr job is 
sucessful (Binglin Chang)
 MAPREDUCE-6056. native-task: move system test working dir to target dir and 
cleanup test config xml files (Manu Zhang via bchang)
 MAPREDUCE-6055. native-task: findbugs, interface annotations, and other misc 
cleanup (todd)
+MAPREDUCE-6067. native-task: fix some counter issues (Binglin Chang)

http://git-wip-us.apache.org/repos/asf/hadoop/blob/00322161/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/java/org/apache/hadoop/mapred/nativetask/NativeMapOutputCollectorDelegator.java
--
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/java/org/apache/hadoop/mapred/nativetask/NativeMapOutputCollectorDelegator.java
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/java/org/apache/hadoop/mapred/nativetask/NativeMapOutputCollectorDelegator.java
index 224b95b..828d7df 100644
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/java/org/apache/hadoop/mapred/nativetask/NativeMapOutputCollectorDelegator.java
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/java/org/apache/hadoop/mapred/nativetask/NativeMapOutputCollectorDelegator.java
@@ -34,6 +34,7 @@ import 
org.apache.hadoop.mapred.nativetask.serde.INativeSerializer;
 import org.apache.hadoop.mapred.nativetask.serde.NativeSerialization;
 import org.apache.hadoop.mapreduce.MRConfig;
 import org.apache.hadoop.mapreduce.MRJobConfig;
+import org.apache.hadoop.mapreduce.TaskCounter;
 import org.apache.hadoop.util.QuickSort;
 
 /**
@@ -46,6 +47,7 @@ public class NativeMapOutputCollectorDelegatorK, V 
implements MapOutputCollect
   private JobConf job;
   private NativeCollectorOnlyHandlerK, V handler;
 
+  private Context context;
   private StatusReportChecker updater;
 
   @Override
@@ -58,6 +60,7 @@ public class NativeMapOutputCollectorDelegatorK, V 
implements MapOutputCollect
 handler.close();
 if (null != updater) {
   updater.stop();
+  NativeRuntime.reportStatus(context.getReporter());
 }
   }
 
@@ -69,6 +72,7 @@ public class NativeMapOutputCollectorDelegatorK, V 
implements MapOutputCollect
   @SuppressWarnings(unchecked)
   @Override
   public void init(Context context) throws IOException, ClassNotFoundException 
{
+

[22/52] [abbrv] MAPREDUCE-5977. Fix or suppress native-task gcc warnings. Contributed by Manu Zhang.

2014-09-15 Thread jing9
http://git-wip-us.apache.org/repos/asf/hadoop/blob/fad4524c/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/native/gtest/gtest.h
--
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/native/gtest/gtest.h
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/native/gtest/gtest.h
deleted file mode 100644
index b467822..000
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/native/gtest/gtest.h
+++ /dev/null
@@ -1,19541 +0,0 @@
-// Copyright 2005, Google Inc.
-// All rights reserved.
-//
-// Redistribution and use in source and binary forms, with or without
-// modification, are permitted provided that the following conditions are
-// met:
-//
-// * Redistributions of source code must retain the above copyright
-// notice, this list of conditions and the following disclaimer.
-// * Redistributions in binary form must reproduce the above
-// copyright notice, this list of conditions and the following disclaimer
-// in the documentation and/or other materials provided with the
-// distribution.
-// * Neither the name of Google Inc. nor the names of its
-// contributors may be used to endorse or promote products derived from
-// this software without specific prior written permission.
-//
-// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-// AS IS AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-//
-// Author: w...@google.com (Zhanyong Wan)
-//
-// The Google C++ Testing Framework (Google Test)
-//
-// This header file defines the public API for Google Test.  It should be
-// included by any test program that uses Google Test.
-//
-// IMPORTANT NOTE: Due to limitation of the C++ language, we have to
-// leave some internal implementation details in this header file.
-// They are clearly marked by comments like this:
-//
-//   // INTERNAL IMPLEMENTATION - DO NOT USE IN A USER PROGRAM.
-//
-// Such code is NOT meant to be used by a user directly, and is subject
-// to CHANGE WITHOUT NOTICE.  Therefore DO NOT DEPEND ON IT in a user
-// program!
-//
-// Acknowledgment: Google Test borrowed the idea of automatic test
-// registration from Barthelemy Dagenais' (barthel...@prologique.com)
-// easyUnit framework.
-
-#ifndef GTEST_INCLUDE_GTEST_GTEST_H_
-#define GTEST_INCLUDE_GTEST_GTEST_H_
-
-#include limits
-#include vector
-
-// Copyright 2005, Google Inc.
-// All rights reserved.
-//
-// Redistribution and use in source and binary forms, with or without
-// modification, are permitted provided that the following conditions are
-// met:
-//
-// * Redistributions of source code must retain the above copyright
-// notice, this list of conditions and the following disclaimer.
-// * Redistributions in binary form must reproduce the above
-// copyright notice, this list of conditions and the following disclaimer
-// in the documentation and/or other materials provided with the
-// distribution.
-// * Neither the name of Google Inc. nor the names of its
-// contributors may be used to endorse or promote products derived from
-// this software without specific prior written permission.
-//
-// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-// AS IS AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-//
-// Authors: w...@google.com (Zhanyong Wan), eef...@gmail.com (Sean Mcafee)
-//
-// The Google C++ Testing Framework (Google Test)
-//
-// This header file declares functions and macros used 

[15/52] [abbrv] git commit: MAPREDUCE-5978. native-task: remove test case for not supported codec Bzip2Codec and DefaultCodec (Manu Zhang)

2014-09-15 Thread jing9
MAPREDUCE-5978. native-task: remove test case for not supported codec 
Bzip2Codec and DefaultCodec (Manu Zhang)

git-svn-id: 
https://svn.apache.org/repos/asf/hadoop/common/branches/MR-2841@1616116 
13f79535-47bb-0310-9956-ffa450edef68


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/4d4fb172
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/4d4fb172
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/4d4fb172

Branch: refs/heads/HDFS-6584
Commit: 4d4fb1723fd825df4ad2488e4ecde0c69359e83a
Parents: 432f641
Author: Sean Zhong seanzh...@apache.org
Authored: Wed Aug 6 07:40:24 2014 +
Committer: Sean Zhong seanzh...@apache.org
Committed: Wed Aug 6 07:40:24 2014 +

--
 .../CHANGES.MAPREDUCE-2841.txt  |  3 +-
 .../nativetask/compresstest/CompressTest.java   | 33 -
 .../test/resources/test-bzip2-compress-conf.xml | 39 
 .../resources/test-default-compress-conf.xml| 39 
 4 files changed, 2 insertions(+), 112 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/4d4fb172/hadoop-mapreduce-project/CHANGES.MAPREDUCE-2841.txt
--
diff --git a/hadoop-mapreduce-project/CHANGES.MAPREDUCE-2841.txt 
b/hadoop-mapreduce-project/CHANGES.MAPREDUCE-2841.txt
index 4b77262..462ac8b 100644
--- a/hadoop-mapreduce-project/CHANGES.MAPREDUCE-2841.txt
+++ b/hadoop-mapreduce-project/CHANGES.MAPREDUCE-2841.txt
@@ -10,4 +10,5 @@ MAPREDUCE-5991. native-task should not run unit tests if 
native profile is not e
 MAPREDUCE-5995. native-task: Revert changes to Text internals (todd)
 MAPREDUCE-6005. native-task: Fix some valgrind errors (Binglin Chang)
 MAPREDUCE-5984. native-task: Reuse lz4 sources in hadoop-common (Binglin Chang)
-MAPREDUCE-5976. native-task: should not fail to build if snappy is missing 
(Manu Zhang)
\ No newline at end of file
+MAPREDUCE-5976. native-task: should not fail to build if snappy is missing 
(Manu Zhang)
+MAPREDUCE-5978. native-task: remove test case for not supported codec 
Bzip2Codec and DefaultCodec (Manu Zhang)
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/hadoop/blob/4d4fb172/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/test/java/org/apache/hadoop/mapred/nativetask/compresstest/CompressTest.java
--
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/test/java/org/apache/hadoop/mapred/nativetask/compresstest/CompressTest.java
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/test/java/org/apache/hadoop/mapred/nativetask/compresstest/CompressTest.java
index 0406375..b98e2de 100644
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/test/java/org/apache/hadoop/mapred/nativetask/compresstest/CompressTest.java
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/test/java/org/apache/hadoop/mapred/nativetask/compresstest/CompressTest.java
@@ -68,23 +68,6 @@ public class CompressTest {
   }
 
   @Test
-  public void testBzip2Compress() throws Exception {
-final Configuration nativeconf = 
ScenarioConfiguration.getNativeConfiguration();
-nativeconf.addResource(TestConstants.BZIP2_COMPRESS_CONF_PATH);
-final Job nativejob = CompressMapper.getCompressJob(nativebzip2, 
nativeconf);
-nativejob.waitForCompletion(true);
-
-final Configuration hadoopconf = 
ScenarioConfiguration.getNormalConfiguration();
-hadoopconf.addResource(TestConstants.BZIP2_COMPRESS_CONF_PATH);
-final Job hadoopjob = CompressMapper.getCompressJob(hadoopbzip2, 
hadoopconf);
-hadoopjob.waitForCompletion(true);
-
-final boolean compareRet = 
ResultVerifier.verify(CompressMapper.outputFileDir + nativebzip2,
-CompressMapper.outputFileDir + hadoopbzip2);
-assertEquals(file compare result: if they are the same ,then return 
true, true, compareRet);
-  }
-
-  @Test
   public void testLz4Compress() throws Exception {
 final Configuration nativeConf = 
ScenarioConfiguration.getNativeConfiguration();
 nativeConf.addResource(TestConstants.LZ4_COMPRESS_CONF_PATH);
@@ -100,22 +83,6 @@ public class CompressTest {
 assertEquals(file compare result: if they are the same ,then return 
true, true, compareRet);
   }
 
-  @Test
-  public void testDefaultCompress() throws Exception {
-final Configuration nativeConf = 
ScenarioConfiguration.getNativeConfiguration();
-nativeConf.addResource(TestConstants.DEFAULT_COMPRESS_CONF_PATH);
-final Job nativeJob = CompressMapper.getCompressJob(nativedefault, 
nativeConf);
-

[32/52] [abbrv] MAPREDUCE-6069. Lint/style fixes and removal of unused code. Contributed by Todd Lipcon.

2014-09-15 Thread jing9
http://git-wip-us.apache.org/repos/asf/hadoop/blob/683987be/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/test/java/org/apache/hadoop/mapred/nativetask/nonsorttest/NonSortTest.java
--
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/test/java/org/apache/hadoop/mapred/nativetask/nonsorttest/NonSortTest.java
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/test/java/org/apache/hadoop/mapred/nativetask/nonsorttest/NonSortTest.java
index 2258726..f81d94f 100644
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/test/java/org/apache/hadoop/mapred/nativetask/nonsorttest/NonSortTest.java
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/test/java/org/apache/hadoop/mapred/nativetask/nonsorttest/NonSortTest.java
@@ -74,13 +74,14 @@ public class NonSortTest {
   public void startUp() throws Exception {
 Assume.assumeTrue(NativeCodeLoader.isNativeCodeLoaded());
 Assume.assumeTrue(NativeRuntime.isNativeLibraryLoaded());
-final ScenarioConfiguration configuration = new ScenarioConfiguration();
-configuration.addNonSortTestConf();
-final FileSystem fs = FileSystem.get(configuration);
+final ScenarioConfiguration conf = new ScenarioConfiguration();
+conf.addNonSortTestConf();
+final FileSystem fs = FileSystem.get(conf);
 final Path path = new Path(TestConstants.NATIVETASK_NONSORT_TEST_INPUTDIR);
 if (!fs.exists(path)) {
-  new 
TestInputFile(configuration.getInt(TestConstants.NATIVETASK_NONSORTTEST_FILESIZE,
 1000), Text.class.getName(),
-  Text.class.getName(), 
configuration).createSequenceTestFile(path.toString());
+  int filesize = 
conf.getInt(TestConstants.NATIVETASK_NONSORTTEST_FILESIZE, 1000);
+  new TestInputFile(filesize, Text.class.getName(),
+  Text.class.getName(), conf).createSequenceTestFile(path.toString());
 }
 fs.close();
   }
@@ -93,7 +94,8 @@ public class NonSortTest {
   }
 
 
-  private Job getJob(Configuration conf, String jobName, String inputpath, 
String outputpath) throws IOException {
+  private Job getJob(Configuration conf, String jobName,
+ String inputpath, String outputpath) throws IOException {
 final FileSystem fs = FileSystem.get(conf);
 if (fs.exists(new Path(outputpath))) {
   fs.delete(new Path(outputpath), true);

http://git-wip-us.apache.org/repos/asf/hadoop/blob/683987be/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/test/java/org/apache/hadoop/mapred/nativetask/nonsorttest/NonSortTestMR.java
--
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/test/java/org/apache/hadoop/mapred/nativetask/nonsorttest/NonSortTestMR.java
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/test/java/org/apache/hadoop/mapred/nativetask/nonsorttest/NonSortTestMR.java
index 4ca2449..4092e5f 100644
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/test/java/org/apache/hadoop/mapred/nativetask/nonsorttest/NonSortTestMR.java
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/test/java/org/apache/hadoop/mapred/nativetask/nonsorttest/NonSortTestMR.java
@@ -36,7 +36,8 @@ public class NonSortTestMR {
 private final Text word = new Text();
 
 @Override
-public void map(Object key, Text value, Context context) throws 
IOException, InterruptedException {
+public void map(Object key, Text value, Context context)
+  throws IOException, InterruptedException {
   final String line = value.toString();
   final StringTokenizer tokenizer = new StringTokenizer(line);
   while (tokenizer.hasMoreTokens()) {

http://git-wip-us.apache.org/repos/asf/hadoop/blob/683987be/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/test/java/org/apache/hadoop/mapred/nativetask/serde/TestKVSerializer.java
--
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/test/java/org/apache/hadoop/mapred/nativetask/serde/TestKVSerializer.java
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/test/java/org/apache/hadoop/mapred/nativetask/serde/TestKVSerializer.java
index 004e8b8..fd5b100 100644
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/test/java/org/apache/hadoop/mapred/nativetask/serde/TestKVSerializer.java
+++ 

[52/52] [abbrv] git commit: Merge remote-tracking branch 'origin/trunk' into HDFS-6584

2014-09-15 Thread jing9
Merge remote-tracking branch 'origin/trunk' into HDFS-6584


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/ece3ecab
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/ece3ecab
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/ece3ecab

Branch: refs/heads/HDFS-6584
Commit: ece3ecabdf348dd9f6876f2819e65f72c28cc25f
Parents: 2689b6c 24d920b
Author: Jing Zhao ji...@apache.org
Authored: Mon Sep 15 10:45:45 2014 -0700
Committer: Jing Zhao ji...@apache.org
Committed: Mon Sep 15 10:45:45 2014 -0700

--
 LICENSE.txt |33 +
 .../assemblies/hadoop-mapreduce-dist.xml| 7 +
 hadoop-common-project/hadoop-common/CHANGES.txt | 6 +
 .../src/main/conf/log4j.properties  | 5 +
 .../src/main/resources/core-default.xml |86 +
 .../apache/hadoop/crypto/key/TestKeyShell.java  | 4 +-
 .../hadoop/security/alias/TestCredShell.java|22 +-
 .../crypto/key/kms/server/KMSConfiguration.java |15 +-
 .../hadoop/crypto/key/kms/server/MiniKMS.java   | 3 +-
 .../hadoop/crypto/key/kms/server/TestKMS.java   | 3 +-
 hadoop-hdfs-project/hadoop-hdfs-httpfs/pom.xml  | 8 +
 hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt | 3 +
 .../apache/hadoop/hdfs/TestEncryptionZones.java |10 +
 hadoop-mapreduce-project/CHANGES.txt|65 +
 .../mapreduce/v2/app/ControlledClock.java   |43 -
 .../v2/app/job/impl/TestTaskAttempt.java| 3 +-
 .../v2/app/rm/TestRMContainerAllocator.java | 3 +-
 .../mapreduce/v2/hs/TestHistoryFileManager.java | 3 +-
 .../hadoop-mapreduce-client-jobclient/pom.xml   | 6 +
 .../v2/TestSpeculativeExecutionWithMRApp.java   | 3 +-
 .../hadoop-mapreduce-client-nativetask/pom.xml  |   216 +
 .../src/CMakeLists.txt  |   276 +
 .../src/JNIFlags.cmake  |   118 +
 .../src/config.h.cmake  |23 +
 .../hadoop/mapred/nativetask/Command.java   |57 +
 .../mapred/nativetask/CommandDispatcher.java|33 +
 .../hadoop/mapred/nativetask/Constants.java |48 +
 .../hadoop/mapred/nativetask/DataChannel.java   |41 +
 .../hadoop/mapred/nativetask/DataReceiver.java  |37 +
 .../mapred/nativetask/HadoopPlatform.java   |84 +
 .../mapred/nativetask/ICombineHandler.java  |44 +
 .../mapred/nativetask/INativeComparable.java|54 +
 .../mapred/nativetask/INativeHandler.java   |53 +
 .../mapred/nativetask/NativeBatchProcessor.java |   279 +
 .../mapred/nativetask/NativeDataSource.java |47 +
 .../mapred/nativetask/NativeDataTarget.java |47 +
 .../NativeMapOutputCollectorDelegator.java  |   171 +
 .../hadoop/mapred/nativetask/NativeRuntime.java |   197 +
 .../hadoop/mapred/nativetask/Platform.java  |   100 +
 .../hadoop/mapred/nativetask/Platforms.java |79 +
 .../mapred/nativetask/StatusReportChecker.java  |99 +
 .../hadoop/mapred/nativetask/TaskContext.java   |94 +
 .../mapred/nativetask/buffer/BufferType.java|27 +
 .../nativetask/buffer/ByteBufferDataReader.java |   148 +
 .../nativetask/buffer/ByteBufferDataWriter.java |   169 +
 .../nativetask/buffer/DataInputStream.java  |27 +
 .../nativetask/buffer/DataOutputStream.java |39 +
 .../mapred/nativetask/buffer/InputBuffer.java   |   136 +
 .../mapred/nativetask/buffer/OutputBuffer.java  |76 +
 .../nativetask/handlers/BufferPullee.java   |   121 +
 .../nativetask/handlers/BufferPuller.java   |   203 +
 .../nativetask/handlers/BufferPushee.java   |   151 +
 .../nativetask/handlers/BufferPusher.java   |91 +
 .../nativetask/handlers/CombinerHandler.java|   145 +
 .../mapred/nativetask/handlers/IDataLoader.java |37 +
 .../handlers/NativeCollectorOnlyHandler.java|   171 +
 .../serde/BoolWritableSerializer.java   |35 +
 .../serde/ByteWritableSerializer.java   |35 +
 .../serde/BytesWritableSerializer.java  |48 +
 .../nativetask/serde/DefaultSerializer.java |71 +
 .../serde/DoubleWritableSerializer.java |35 +
 .../serde/FloatWritableSerializer.java  |36 +
 .../mapred/nativetask/serde/IKVSerializer.java  |48 +
 .../nativetask/serde/INativeSerializer.java |49 +
 .../nativetask/serde/IntWritableSerializer.java |35 +
 .../mapred/nativetask/serde/KVSerializer.java   |   115 +
 .../serde/LongWritableSerializer.java   |34 +
 .../nativetask/serde/NativeSerialization.java   |91 +
 .../serde/NullWritableSerializer.java   |35 +
 .../serde/SerializationFramework.java   |35 +
 .../mapred/nativetask/serde/TextSerializer.java |49 +
 .../serde/VIntWritableSerializer.java   |27 +
 .../serde/VLongWritableSerializer.java  |27 +
 

[28/52] [abbrv] git commit: Merge remote-tracking branch 'origin/trunk' into MR-2841

2014-09-15 Thread jing9
Merge remote-tracking branch 'origin/trunk' into MR-2841

Conflicts:

hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/MapTask.java


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/cce7d1e2
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/cce7d1e2
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/cce7d1e2

Branch: refs/heads/HDFS-6584
Commit: cce7d1e2f96ff75fe19ab2879ddd90a898ca5b18
Parents: 7ade9b0 727331b
Author: Todd Lipcon t...@apache.org
Authored: Tue Sep 2 15:55:54 2014 -0700
Committer: Todd Lipcon t...@apache.org
Committed: Tue Sep 2 15:55:54 2014 -0700

--
 .gitignore  |1 +
 BUILDING.txt|   22 +
 dev-support/test-patch.sh   |   26 +-
 .../main/resources/assemblies/hadoop-dist.xml   |7 +
 .../dev-support/findbugsExcludeFile.xml |   38 +
 hadoop-common-project/hadoop-auth/pom.xml   |   26 +
 .../authentication/client/AuthenticatedURL.java |   36 +-
 .../server/AuthenticationFilter.java|  109 +-
 .../server/KerberosAuthenticationHandler.java   |   21 +-
 .../server/PseudoAuthenticationHandler.java |   21 +-
 .../util/RandomSignerSecretProvider.java|   49 +
 .../util/RolloverSignerSecretProvider.java  |  139 ++
 .../security/authentication/util/Signer.java|   46 +-
 .../util/SignerSecretProvider.java  |   62 +
 .../util/StringSignerSecretProvider.java|   49 +
 .../client/AuthenticatorTestCase.java   |  137 +-
 .../client/TestAuthenticatedURL.java|   38 +-
 .../client/TestKerberosAuthenticator.java   |   58 +-
 .../server/TestAuthenticationFilter.java|  118 +-
 .../util/TestRandomSignerSecretProvider.java|   63 +
 .../util/TestRolloverSignerSecretProvider.java  |   79 +
 .../authentication/util/TestSigner.java |   85 +-
 .../util/TestStringSignerSecretProvider.java|   33 +
 hadoop-common-project/hadoop-common/CHANGES.txt |  502 +--
 .../dev-support/findbugsExcludeFile.xml |5 +
 hadoop-common-project/hadoop-common/pom.xml |   34 +-
 .../hadoop-common/src/CMakeLists.txt|   34 +
 .../hadoop-common/src/JNIFlags.cmake|6 +
 .../hadoop-common/src/config.h.cmake|1 +
 .../src/contrib/bash-tab-completion/hadoop.sh   |   28 +-
 .../hadoop-common/src/main/bin/hadoop   |  229 +--
 .../src/main/bin/hadoop-config.cmd  |   10 +-
 .../hadoop-common/src/main/bin/hadoop-config.sh |  409 ++---
 .../hadoop-common/src/main/bin/hadoop-daemon.sh |  214 +--
 .../src/main/bin/hadoop-daemons.sh  |   37 +-
 .../src/main/bin/hadoop-functions.sh| 1066 ++
 .../src/main/bin/hadoop-layout.sh.example   |   93 ++
 .../hadoop-common/src/main/bin/hadoop.cmd   |   33 +-
 .../hadoop-common/src/main/bin/rcc  |   52 +-
 .../hadoop-common/src/main/bin/slaves.sh|   51 +-
 .../hadoop-common/src/main/bin/start-all.sh |   38 +-
 .../hadoop-common/src/main/bin/stop-all.sh  |   36 +-
 .../hadoop-common/src/main/conf/hadoop-env.sh   |  432 +-
 .../org/apache/hadoop/conf/Configuration.java   |   78 +-
 .../hadoop/conf/ReconfigurationServlet.java |2 +
 .../apache/hadoop/crypto/AesCtrCryptoCodec.java |   67 +
 .../org/apache/hadoop/crypto/CipherSuite.java   |  115 ++
 .../org/apache/hadoop/crypto/CryptoCodec.java   |  179 +++
 .../apache/hadoop/crypto/CryptoInputStream.java |  680 +
 .../hadoop/crypto/CryptoOutputStream.java   |  280 
 .../apache/hadoop/crypto/CryptoStreamUtils.java |   70 +
 .../org/apache/hadoop/crypto/Decryptor.java |   72 +
 .../org/apache/hadoop/crypto/Encryptor.java |   71 +
 .../hadoop/crypto/JceAesCtrCryptoCodec.java |  165 +++
 .../hadoop/crypto/OpensslAesCtrCryptoCodec.java |  164 +++
 .../org/apache/hadoop/crypto/OpensslCipher.java |  289 
 .../hadoop/crypto/key/JavaKeyStoreProvider.java |  248 +++-
 .../apache/hadoop/crypto/key/KeyProvider.java   |   32 +-
 .../crypto/key/KeyProviderCryptoExtension.java  |   91 +-
 .../KeyProviderDelegationTokenExtension.java|8 +-
 .../hadoop/crypto/key/KeyProviderExtension.java |1 +
 .../org/apache/hadoop/crypto/key/KeyShell.java  |   89 +-
 .../apache/hadoop/crypto/key/UserProvider.java  |5 +-
 .../crypto/key/kms/KMSClientProvider.java   |  117 +-
 .../hadoop/crypto/key/kms/KMSRESTConstants.java |2 +-
 .../crypto/random/OpensslSecureRandom.java  |  122 ++
 .../hadoop/crypto/random/OsSecureRandom.java|  115 ++
 .../apache/hadoop/fs/AbstractFileSystem.java|  103 +-
 .../apache/hadoop/fs/ChecksumFileSystem.java|8 +-
 .../java/org/apache/hadoop/fs/ChecksumFs.java   |8 +-
 .../hadoop/fs/CommonConfigurationKeys.java 

[20/52] [abbrv] MAPREDUCE-5977. Fix or suppress native-task gcc warnings. Contributed by Manu Zhang.

2014-09-15 Thread jing9
http://git-wip-us.apache.org/repos/asf/hadoop/blob/fad4524c/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/native/src/NativeTask.h
--
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/native/src/NativeTask.h
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/native/src/NativeTask.h
index 11712ab..60bb6f5 100644
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/native/src/NativeTask.h
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/native/src/NativeTask.h
@@ -622,7 +622,6 @@ typedef void (*ANY_FUNC_PTR)();
 #define DEFINE_NATIVE_LIBRARY(Library) \
   static std::mapstd::string, NativeTask::ObjectCreatorFunc 
Library##ClassMap__; \
   extern C void * Library##GetFunctionGetter(const std::string  name) { \
-  void * ret = NULL; \
   std::mapstd::string, NativeTask::ObjectCreatorFunc::iterator itr = 
Library##ClassMap__.find(name); \
   if (itr != Library##ClassMap__.end()) { \
 return (void *)(itr-second); \
@@ -630,7 +629,6 @@ typedef void (*ANY_FUNC_PTR)();
   return NULL; \
 } \
   extern C NativeTask::ObjectCreatorFunc Library##GetObjectCreator(const 
std::string  name) { \
-NativeObject * ret = NULL; \
 std::mapstd::string, NativeTask::ObjectCreatorFunc::iterator itr = 
Library##ClassMap__.find(name); \
 if (itr != Library##ClassMap__.end()) { \
   return itr-second; \

http://git-wip-us.apache.org/repos/asf/hadoop/blob/fad4524c/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/native/src/codec/BlockCodec.cc
--
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/native/src/codec/BlockCodec.cc
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/native/src/codec/BlockCodec.cc
index a59b9d8..59cd8fb 100644
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/native/src/codec/BlockCodec.cc
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/native/src/codec/BlockCodec.cc
@@ -23,7 +23,7 @@
 namespace NativeTask {
 
 BlockCompressStream::BlockCompressStream(OutputStream * stream, uint32_t 
bufferSizeHint)
-: CompressStream(stream), _compressedBytesWritten(0), _tempBufferSize(0), 
_tempBuffer(NULL) {
+: CompressStream(stream), _tempBuffer(NULL), _tempBufferSize(0), 
_compressedBytesWritten(0) {
   _hint = bufferSizeHint;
   _blockMax = bufferSizeHint / 2 * 3;
 }
@@ -68,7 +68,7 @@ uint64_t BlockCompressStream::compressedBytesWritten() {
 //
 
 BlockDecompressStream::BlockDecompressStream(InputStream * stream, uint32_t 
bufferSizeHint)
-: DecompressStream(stream), _tempBufferSize(0), _tempBuffer(NULL) {
+: DecompressStream(stream), _tempBuffer(NULL), _tempBufferSize(0) {
   _hint = bufferSizeHint;
   _blockMax = bufferSizeHint / 2 * 3;
   _tempDecompressBuffer = NULL;

http://git-wip-us.apache.org/repos/asf/hadoop/blob/fad4524c/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/native/src/handler/BatchHandler.h
--
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/native/src/handler/BatchHandler.h
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/native/src/handler/BatchHandler.h
index bc022a8..15ce2ea 100644
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/native/src/handler/BatchHandler.h
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/native/src/handler/BatchHandler.h
@@ -103,10 +103,10 @@ protected:
*/
   inline void output(const char * buff, uint32_t length) {
 while (length  0) {
-  if (length  _out.remain()) {
+  uint32_t remain = _out.remain();
+  if (length  remain) {
 flushOutput();
   }
-  uint32_t remain = _out.remain();
   uint32_t cp = length  remain ? length : remain;
   simple_memcpy(_out.current(), buff, cp);
   buff += cp;

http://git-wip-us.apache.org/repos/asf/hadoop/blob/fad4524c/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/native/src/handler/CombineHandler.cc
--
diff --git 

[42/52] [abbrv] git commit: YARN-2525. yarn logs command gives error on trunk (Akira AJISAKA via aw)

2014-09-15 Thread jing9
YARN-2525. yarn logs command gives error on trunk (Akira AJISAKA via aw)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/40364dc4
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/40364dc4
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/40364dc4

Branch: refs/heads/HDFS-6584
Commit: 40364dc47c03efa295ae03fe8aa8467017fb6f26
Parents: 957414d
Author: Allen Wittenauer a...@apache.org
Authored: Fri Sep 12 10:35:41 2014 -0700
Committer: Allen Wittenauer a...@apache.org
Committed: Fri Sep 12 14:56:41 2014 -0700

--
 hadoop-yarn-project/CHANGES.txt  | 2 ++
 hadoop-yarn-project/hadoop-yarn/bin/yarn | 2 +-
 2 files changed, 3 insertions(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/40364dc4/hadoop-yarn-project/CHANGES.txt
--
diff --git a/hadoop-yarn-project/CHANGES.txt b/hadoop-yarn-project/CHANGES.txt
index 3993f5e..9002e6a 100644
--- a/hadoop-yarn-project/CHANGES.txt
+++ b/hadoop-yarn-project/CHANGES.txt
@@ -23,6 +23,8 @@ Trunk - Unreleased
 
 YARN-2436. [post-HADOOP-9902] yarn application help doesn't work (aw)
 
+YARN-2525. yarn logs command gives error on trunk (Akira AJISAKA via aw)
+
 Release 2.6.0 - UNRELEASED
 
   INCOMPATIBLE CHANGES

http://git-wip-us.apache.org/repos/asf/hadoop/blob/40364dc4/hadoop-yarn-project/hadoop-yarn/bin/yarn
--
diff --git a/hadoop-yarn-project/hadoop-yarn/bin/yarn 
b/hadoop-yarn-project/hadoop-yarn/bin/yarn
index 371d23d..12f7bb5 100644
--- a/hadoop-yarn-project/hadoop-yarn/bin/yarn
+++ b/hadoop-yarn-project/hadoop-yarn/bin/yarn
@@ -99,7 +99,7 @@ case ${COMMAND} in
 
CLASS='org.apache.hadoop.yarn.server.applicationhistoryservice.ApplicationHistoryServer'
   ;;
   logs)
-CLASS=org.apache.hadoop.yarn.logaggregation.LogDumper
+CLASS=org.apache.hadoop.yarn.client.cli.LogsCLI
 hadoop_debug Append YARN_CLIENT_OPTS onto YARN_OPTS
 YARN_OPTS=${YARN_OPTS} ${YARN_CLIENT_OPTS}
   ;;



[48/52] [abbrv] git commit: YARN-611. Added an API to let apps specify an interval beyond which AM failures should be ignored towards counting max-attempts. Contributed by Xuan Gong.

2014-09-15 Thread jing9
YARN-611. Added an API to let apps specify an interval beyond which AM failures 
should be ignored towards counting max-attempts. Contributed by Xuan Gong.


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/14e2639f
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/14e2639f
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/14e2639f

Branch: refs/heads/HDFS-6584
Commit: 14e2639fd0d53f7e0b58f2f4744af44983d4e867
Parents: 98588cf
Author: Vinod Kumar Vavilapalli vino...@apache.org
Authored: Sat Sep 13 18:04:05 2014 -0700
Committer: Vinod Kumar Vavilapalli vino...@apache.org
Committed: Sat Sep 13 18:04:05 2014 -0700

--
 .../mapreduce/v2/app/ControlledClock.java   |  43 ---
 .../v2/app/job/impl/TestTaskAttempt.java|   3 +-
 .../v2/app/rm/TestRMContainerAllocator.java |   3 +-
 .../mapreduce/v2/hs/TestHistoryFileManager.java |   3 +-
 .../hadoop-mapreduce-client-jobclient/pom.xml   |   6 +
 .../v2/TestSpeculativeExecutionWithMRApp.java   |   3 +-
 hadoop-yarn-project/CHANGES.txt |   6 +-
 .../records/ApplicationSubmissionContext.java   |  43 +++
 .../src/main/proto/yarn_protos.proto|   1 +
 .../pb/ApplicationSubmissionContextPBImpl.java  |  13 ++
 .../hadoop/yarn/util/ControlledClock.java   |  42 ++
 .../recovery/FileSystemRMStateStore.java|   1 +
 .../recovery/MemoryRMStateStore.java|   1 +
 .../resourcemanager/recovery/RMStateStore.java  |  10 +-
 .../recovery/ZKRMStateStore.java|   2 +
 .../records/ApplicationAttemptStateData.java|  15 ++-
 .../pb/ApplicationAttemptStateDataPBImpl.java   |  12 ++
 .../server/resourcemanager/rmapp/RMAppImpl.java |  44 ++-
 .../rmapp/attempt/RMAppAttempt.java |   6 +
 .../rmapp/attempt/RMAppAttemptImpl.java |  27 +++-
 .../yarn_server_resourcemanager_recovery.proto  |   1 +
 .../yarn/server/resourcemanager/MockRM.java |  23 +++-
 .../applicationsmanager/TestAMRestart.java  | 127 +++
 .../recovery/RMStateStoreTestBase.java  |   6 +-
 24 files changed, 372 insertions(+), 69 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/14e2639f/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/ControlledClock.java
--
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/ControlledClock.java
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/ControlledClock.java
deleted file mode 100644
index 198117b..000
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/ControlledClock.java
+++ /dev/null
@@ -1,43 +0,0 @@
-/**
-* Licensed to the Apache Software Foundation (ASF) under one
-* or more contributor license agreements.  See the NOTICE file
-* distributed with this work for additional information
-* regarding copyright ownership.  The ASF licenses this file
-* to you under the Apache License, Version 2.0 (the
-* License); you may not use this file except in compliance
-* with the License.  You may obtain a copy of the License at
-*
-* http://www.apache.org/licenses/LICENSE-2.0
-*
-* Unless required by applicable law or agreed to in writing, software
-* distributed under the License is distributed on an AS IS BASIS,
-* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-* See the License for the specific language governing permissions and
-* limitations under the License.
-*/
-package org.apache.hadoop.mapreduce.v2.app;
-
-import org.apache.hadoop.yarn.util.Clock;
-
-public class ControlledClock implements Clock {
-  private long time = -1;
-  private final Clock actualClock;
-  public ControlledClock(Clock actualClock) {
-this.actualClock = actualClock;
-  }
-  public synchronized void setTime(long time) {
-this.time = time;
-  }
-  public synchronized void reset() {
-time = -1;
-  }
-
-  @Override
-  public synchronized long getTime() {
-if (time != -1) {
-  return time;
-}
-return actualClock.getTime();
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/14e2639f/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/job/impl/TestTaskAttempt.java
--
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/job/impl/TestTaskAttempt.java
 

[01/52] [abbrv] MAPREDUCE-5996. native-task: Rename system tests into standard directory layout. Contributed by Todd Lipcon.

2014-09-15 Thread jing9
Repository: hadoop
Updated Branches:
  refs/heads/HDFS-6584 2689b6ca7 - ece3ecabd


http://git-wip-us.apache.org/repos/asf/hadoop/blob/b2cba48f/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/test/java/system/function/org/apache/hadoop/mapred/nativetask/testutil/ResultVerifier.java
--
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/test/java/system/function/org/apache/hadoop/mapred/nativetask/testutil/ResultVerifier.java
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/test/java/system/function/org/apache/hadoop/mapred/nativetask/testutil/ResultVerifier.java
deleted file mode 100644
index b665971..000
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/test/java/system/function/org/apache/hadoop/mapred/nativetask/testutil/ResultVerifier.java
+++ /dev/null
@@ -1,141 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * License); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an AS IS BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.mapred.nativetask.testutil;
-
-import java.io.IOException;
-import java.util.zip.CRC32;
-
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.FSDataInputStream;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.FileUtil;
-import org.apache.hadoop.fs.Path;
-
-public class ResultVerifier {
-  /**
-   * verify the result
-   * 
-   * @param sample
-   *  :nativetask output
-   * @param source
-   *  :yuanwenjian
-   * @throws Exception
-   */
-  public static boolean verify(String sample, String source) throws Exception {
-FSDataInputStream sourcein = null;
-FSDataInputStream samplein = null;
-
-final Configuration conf = new Configuration();
-final FileSystem fs = FileSystem.get(conf);
-final Path hdfssource = new Path(source);
-final Path[] sourcepaths = FileUtil.stat2Paths(fs.listStatus(hdfssource));
-
-final Path hdfssample = new Path(sample);
-final Path[] samplepaths = FileUtil.stat2Paths(fs.listStatus(hdfssample));
-if (sourcepaths == null) {
-  throw new Exception(source file can not be found);
-}
-if (samplepaths == null) {
-  throw new Exception(sample file can not be found);
-}
-if (sourcepaths.length != samplepaths.length) {
-  return false;
-}
-for (int i = 0; i  sourcepaths.length; i++) {
-  final Path sourcepath = sourcepaths[i];
-  // op result file start with part-r like part-r-0
-
-  if (!sourcepath.getName().startsWith(part-r)) {
-continue;
-  }
-  Path samplepath = null;
-  for (int j = 0; j  samplepaths.length; j++) {
-if (samplepaths[i].getName().equals(sourcepath.getName())) {
-  samplepath = samplepaths[i];
-  break;
-}
-  }
-  if (samplepath == null) {
-throw new Exception(cound not found file  + 
samplepaths[0].getParent() + / + sourcepath.getName()
-+  , as sourcepaths has such file);
-  }
-
-  // compare
-  try {
-if (fs.exists(sourcepath)  fs.exists(samplepath)) {
-  sourcein = fs.open(sourcepath);
-  samplein = fs.open(samplepath);
-} else {
-  System.err.println(result file not found: + sourcepath +  or  + 
samplepath);
-  return false;
-}
-
-CRC32 sourcecrc, samplecrc;
-samplecrc = new CRC32();
-sourcecrc = new CRC32();
-final byte[] bufin = new byte[1  16];
-int readnum = 0;
-int totalRead = 0;
-while (samplein.available()  0) {
-  readnum = samplein.read(bufin);
-  totalRead += readnum;
-  samplecrc.update(bufin, 0, readnum);
-}
-
-if (0 == totalRead) {
-  throw new Exception(source  + sample +  is empty file);
-}
-
-totalRead = 0;
-while (sourcein.available()  0) {
-  readnum = sourcein.read(bufin);
-  totalRead += readnum;
-  sourcecrc.update(bufin, 0, readnum);
-}
-if (0 == totalRead) {
-  throw new Exception(source  + sample +  is empty file);
-}
-
-  

[37/52] [abbrv] git commit: MAPREDUCE-6074. native-task: Fix release audit warnings

2014-09-15 Thread jing9
MAPREDUCE-6074. native-task: Fix release audit warnings


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/7c91f9b1
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/7c91f9b1
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/7c91f9b1

Branch: refs/heads/HDFS-6584
Commit: 7c91f9b1484d487e792dca051fbd418697049422
Parents: 4b3f1e2
Author: Todd Lipcon t...@apache.org
Authored: Fri Sep 5 13:41:18 2014 -0700
Committer: Todd Lipcon t...@apache.org
Committed: Fri Sep 5 19:58:41 2014 -0700

--
 LICENSE.txt |  33 +++
 .../CHANGES.MAPREDUCE-2841.txt  |   1 +
 .../hadoop-mapreduce-client-nativetask/LICENSE  | 266 ---
 .../hadoop-mapreduce-client-nativetask/pom.xml  |  12 +-
 .../mapred/nativetask/INativeComparable.java|  27 +-
 .../src/main/native/COPYING |  87 --
 ...oop_mapred_nativetask_NativeBatchProcessor.h |  54 
 ...che_hadoop_mapred_nativetask_NativeRuntime.h |  66 -
 .../src/main/native/test.sh |  11 +
 .../nativetask/buffer/TestBufferPushPull.java   |  13 +-
 .../buffer/TestByteBufferReadWrite.java |  16 +-
 .../nativetask/combinertest/CombinerTest.java   |   4 +-
 .../combinertest/OldAPICombinerTest.java|   5 +-
 .../nativetask/combinertest/WordCount.java  |   3 +-
 .../nativetask/compresstest/CompressMapper.java |   4 +-
 .../nativetask/compresstest/CompressTest.java   |   2 +-
 .../nativetask/handlers/TestCombineHandler.java |  13 +-
 .../TestNativeCollectorOnlyHandler.java |  17 +-
 .../hadoop/mapred/nativetask/kvtest/KVJob.java  |   3 +-
 .../mapred/nativetask/kvtest/TestInputFile.java |   7 +-
 .../nativetask/nonsorttest/NonSortTest.java |   2 +-
 .../serde/TestNativeSerialization.java  |  10 +-
 .../nativetask/testutil/BytesFactory.java   |   2 +-
 .../mapred/nativetask/utils/TestBytesUtil.java  |  20 +-
 hadoop-mapreduce-project/pom.xml|   2 +-
 25 files changed, 143 insertions(+), 537 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/7c91f9b1/LICENSE.txt
--
diff --git a/LICENSE.txt b/LICENSE.txt
index 946a6df..99989f1 100644
--- a/LICENSE.txt
+++ b/LICENSE.txt
@@ -288,3 +288,36 @@ lz4_encoder.h,lz4hc.h,lz4hc.c,lz4hc_encoder.h},
- LZ4 homepage : http://fastcompression.blogspot.com/p/lz4.html
- LZ4 source repository : http://code.google.com/p/lz4/
 */
+
+
+For 
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/native/gtest
+-
+Copyright 2008, Google Inc.
+All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are
+met:
+
+* Redistributions of source code must retain the above copyright
+notice, this list of conditions and the following disclaimer.
+* Redistributions in binary form must reproduce the above
+copyright notice, this list of conditions and the following disclaimer
+in the documentation and/or other materials provided with the
+distribution.
+* Neither the name of Google Inc. nor the names of its
+contributors may be used to endorse or promote products derived from
+this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+AS IS AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7c91f9b1/hadoop-mapreduce-project/CHANGES.MAPREDUCE-2841.txt
--
diff --git a/hadoop-mapreduce-project/CHANGES.MAPREDUCE-2841.txt 
b/hadoop-mapreduce-project/CHANGES.MAPREDUCE-2841.txt
index ea17907..539e7be 100644
--- a/hadoop-mapreduce-project/CHANGES.MAPREDUCE-2841.txt
+++ b/hadoop-mapreduce-project/CHANGES.MAPREDUCE-2841.txt
@@ -22,3 +22,4 @@ MAPREDUCE-6056. native-task: move system test working dir to 
target dir and clea
 MAPREDUCE-6055. native-task: findbugs, interface annotations, 

[06/52] [abbrv] git commit: Merge trunk into branch

2014-09-15 Thread jing9
Merge trunk into branch


git-svn-id: 
https://svn.apache.org/repos/asf/hadoop/common/branches/MR-2841@1613007 
13f79535-47bb-0310-9956-ffa450edef68


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/5149a8a6
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/5149a8a6
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/5149a8a6

Branch: refs/heads/HDFS-6584
Commit: 5149a8a6f1d2fc673a7d011d7e0e1a5407ceac4c
Parents: b2cba48 2054453
Author: Todd Lipcon t...@apache.org
Authored: Thu Jul 24 06:23:41 2014 +
Committer: Todd Lipcon t...@apache.org
Committed: Thu Jul 24 06:23:41 2014 +

--
 hadoop-common-project/hadoop-common/CHANGES.txt |  14 +
 .../apache/hadoop/fs/ChecksumFileSystem.java|   4 +-
 .../java/org/apache/hadoop/fs/FileContext.java  |  29 ++
 .../java/org/apache/hadoop/fs/FileSystem.java   |   2 +-
 .../org/apache/hadoop/fs/viewfs/ChRootedFs.java |  34 ++
 .../apache/hadoop/fs/viewfs/ViewFileSystem.java |  34 ++
 .../org/apache/hadoop/fs/viewfs/ViewFs.java |  80 +
 .../security/alias/JavaKeyStoreProvider.java|   5 +-
 .../apache/hadoop/util/DirectBufferPool.java| 109 ++
 .../org/apache/hadoop/fs/TestDFVariations.java  |  21 +-
 .../apache/hadoop/fs/shell/TestPathData.java|  12 +-
 .../fs/viewfs/ViewFileSystemBaseTest.java   |  30 ++
 .../apache/hadoop/fs/viewfs/ViewFsBaseTest.java |  30 ++
 .../hadoop/util/TestDirectBufferPool.java   |  97 +
 .../java/org/apache/hadoop/nfs/NfsExports.java  |   7 +-
 .../hadoop/hdfs/nfs/mount/RpcProgramMountd.java |  19 +-
 .../hadoop/hdfs/nfs/nfs3/RpcProgramNfs3.java|   5 +-
 hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt |  10 +
 .../apache/hadoop/hdfs/BlockReaderLocal.java|   2 +-
 .../hadoop/hdfs/BlockReaderLocalLegacy.java |   2 +-
 .../hadoop/hdfs/protocol/ClientProtocol.java|   2 +-
 .../protocol/datatransfer/PacketReceiver.java   |   2 +-
 .../server/datanode/BlockPoolSliceScanner.java  |  54 ++-
 .../hadoop/hdfs/server/namenode/FSEditLog.java  |   3 +-
 .../hdfs/server/namenode/FSEditLogLoader.java   |   4 +
 .../hdfs/server/namenode/FSEditLogOp.java   |   5 +
 .../hdfs/server/namenode/FSNamesystem.java  |  62 +++-
 .../server/namenode/XAttrPermissionFilter.java  |  17 +-
 .../hadoop/hdfs/util/DirectBufferPool.java  | 107 --
 .../hdfs/web/resources/XAttrNameParam.java  |   4 +-
 .../org/apache/hadoop/hdfs/TestDFSShell.java|  69 
 .../hdfs/server/namenode/FSXAttrBaseTest.java   | 351 +--
 .../server/namenode/TestNamenodeRetryCache.java |   4 +-
 .../namenode/ha/TestRetryCacheWithHA.java   |  54 ++-
 .../hadoop/hdfs/util/TestDirectBufferPool.java  |  97 -
 .../hadoop/hdfs/web/resources/TestParam.java|   6 -
 .../hadoop-hdfs/src/test/resources/editsStored  | Bin 4970 - 4992 bytes
 .../src/test/resources/editsStored.xml  |   2 +
 hadoop-yarn-project/CHANGES.txt |  12 +
 .../dev-support/findbugs-exclude.xml|   6 +
 .../BaseContainerTokenSecretManager.java|   2 +-
 .../yarn/server/nodemanager/NodeManager.java|   6 +-
 .../recovery/NMLeveldbStateStoreService.java| 117 ++-
 .../recovery/NMNullStateStoreService.java   |  30 +-
 .../recovery/NMStateStoreService.java   |  40 ++-
 .../security/NMContainerTokenSecretManager.java | 105 +-
 .../security/NMTokenSecretManagerInNM.java  |   5 +-
 .../recovery/NMMemoryStateStoreService.java |  55 ++-
 .../TestNMLeveldbStateStoreService.java |  96 -
 .../TestNMContainerTokenSecretManager.java  | 144 
 .../security/TestNMTokenSecretManagerInNM.java  |   8 +-
 .../scheduler/fair/FairScheduler.java   |  15 +-
 .../fair/FairSchedulerConfiguration.java|   9 +
 .../security/DelegationTokenRenewer.java|   6 +-
 .../fair/TestFairSchedulerPreemption.java   |   2 +-
 .../security/TestDelegationTokenRenewer.java|  36 +-
 .../src/site/apt/FairScheduler.apt.vm   |   6 +
 .../src/site/apt/ResourceManagerRest.apt.vm | 105 +++---
 58 files changed, 1789 insertions(+), 405 deletions(-)
--




[19/52] [abbrv] git commit: MAPREDUCE-6025: distribute nativetask.so to right folder (Manu Zhang)

2014-09-15 Thread jing9
MAPREDUCE-6025: distribute nativetask.so to right folder (Manu Zhang)

git-svn-id: 
https://svn.apache.org/repos/asf/hadoop/common/branches/MR-2841@1618120 
13f79535-47bb-0310-9956-ffa450edef68


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/6d39367f
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/6d39367f
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/6d39367f

Branch: refs/heads/HDFS-6584
Commit: 6d39367f6a95383b634d79b6df35a278e0d89fe0
Parents: eace4dd
Author: Sean Zhong seanzh...@apache.org
Authored: Fri Aug 15 08:55:49 2014 +
Committer: Sean Zhong seanzh...@apache.org
Committed: Fri Aug 15 08:55:49 2014 +

--
 hadoop-dist/pom.xml  |  1 -
 hadoop-mapreduce-project/pom.xml | 28 
 2 files changed, 28 insertions(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/6d39367f/hadoop-dist/pom.xml
--
diff --git a/hadoop-dist/pom.xml b/hadoop-dist/pom.xml
index 62a27f4..149f404 100644
--- a/hadoop-dist/pom.xml
+++ b/hadoop-dist/pom.xml
@@ -122,7 +122,6 @@
   run cp -r 
$ROOT/hadoop-hdfs-project/hadoop-hdfs-nfs/target/hadoop-hdfs-nfs-${project.version}/*
 .
   run cp -r 
$ROOT/hadoop-yarn-project/target/hadoop-yarn-project-${project.version}/* .
   run cp -r 
$ROOT/hadoop-mapreduce-project/target/hadoop-mapreduce-${project.version}/* .
-  run cp -r 
$ROOT/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/target/hadoop-mapreduce-client-nativetask-${project.version}/*
 .
   run cp -r 
$ROOT/hadoop-tools/hadoop-tools-dist/target/hadoop-tools-dist-${project.version}/*
 .
   echo
   echo Hadoop dist layout available at: 
${project.build.directory}/hadoop-${project.version}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/6d39367f/hadoop-mapreduce-project/pom.xml
--
diff --git a/hadoop-mapreduce-project/pom.xml b/hadoop-mapreduce-project/pom.xml
index 772a2a3..9110942 100644
--- a/hadoop-mapreduce-project/pom.xml
+++ b/hadoop-mapreduce-project/pom.xml
@@ -236,6 +236,34 @@
   /plugin
   plugin
 groupIdorg.apache.maven.plugins/groupId
+artifactIdmaven-antrun-plugin/artifactId
+executions
+  execution
+iddist/id
+phaseprepare-package/phase
+goals
+  goalrun/goal
+/goals
+configuration
+  target
+echo 
file=${project.build.directory}/dist-copynativelibs.sh
+  
LIB_DIR=${mr.basedir}/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/target/native/target/usr/local/lib
+  if [ -d $${LIB_DIR} ] ; then
+
TARGET_DIR=${project.build.directory}/${project.artifactId}-${project.version}/lib/native
+mkdir -p $${TARGET_DIR}
+cp -R $${LIB_DIR}/lib* $${TARGET_DIR}
+  fi
+/echo
+exec executable=sh dir=${project.build.directory} 
failonerror=true
+  arg line=./dist-copynativelibs.sh/
+/exec
+  /target
+/configuration
+  /execution
+/executions
+  /plugin
+  plugin
+groupIdorg.apache.maven.plugins/groupId
 artifactIdmaven-assembly-plugin/artifactId
 dependencies
   dependency



[45/52] [abbrv] git commit: MAPREDUCE-2841. Merge remote-tracking branch 'apache/MR-2841' into trunk

2014-09-15 Thread jing9
MAPREDUCE-2841. Merge remote-tracking branch 'apache/MR-2841' into trunk


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/8a3a327c
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/8a3a327c
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/8a3a327c

Branch: refs/heads/HDFS-6584
Commit: 8a3a327cd3c7fd1049b521325e485c59137855e8
Parents: a0ad975 b160707
Author: Todd Lipcon t...@apache.org
Authored: Fri Sep 12 18:31:53 2014 -0700
Committer: Todd Lipcon t...@apache.org
Committed: Fri Sep 12 18:31:53 2014 -0700

--
 LICENSE.txt |33 +
 .../assemblies/hadoop-mapreduce-dist.xml| 7 +
 .../CHANGES.MAPREDUCE-2841.txt  |26 +
 .../hadoop-mapreduce-client-nativetask/pom.xml  |   216 +
 .../src/CMakeLists.txt  |   276 +
 .../src/JNIFlags.cmake  |   118 +
 .../src/config.h.cmake  |23 +
 .../hadoop/mapred/nativetask/Command.java   |57 +
 .../mapred/nativetask/CommandDispatcher.java|33 +
 .../hadoop/mapred/nativetask/Constants.java |48 +
 .../hadoop/mapred/nativetask/DataChannel.java   |41 +
 .../hadoop/mapred/nativetask/DataReceiver.java  |37 +
 .../mapred/nativetask/HadoopPlatform.java   |84 +
 .../mapred/nativetask/ICombineHandler.java  |44 +
 .../mapred/nativetask/INativeComparable.java|54 +
 .../mapred/nativetask/INativeHandler.java   |53 +
 .../mapred/nativetask/NativeBatchProcessor.java |   279 +
 .../mapred/nativetask/NativeDataSource.java |47 +
 .../mapred/nativetask/NativeDataTarget.java |47 +
 .../NativeMapOutputCollectorDelegator.java  |   171 +
 .../hadoop/mapred/nativetask/NativeRuntime.java |   197 +
 .../hadoop/mapred/nativetask/Platform.java  |   100 +
 .../hadoop/mapred/nativetask/Platforms.java |79 +
 .../mapred/nativetask/StatusReportChecker.java  |99 +
 .../hadoop/mapred/nativetask/TaskContext.java   |94 +
 .../mapred/nativetask/buffer/BufferType.java|27 +
 .../nativetask/buffer/ByteBufferDataReader.java |   148 +
 .../nativetask/buffer/ByteBufferDataWriter.java |   169 +
 .../nativetask/buffer/DataInputStream.java  |27 +
 .../nativetask/buffer/DataOutputStream.java |39 +
 .../mapred/nativetask/buffer/InputBuffer.java   |   136 +
 .../mapred/nativetask/buffer/OutputBuffer.java  |76 +
 .../nativetask/handlers/BufferPullee.java   |   121 +
 .../nativetask/handlers/BufferPuller.java   |   203 +
 .../nativetask/handlers/BufferPushee.java   |   151 +
 .../nativetask/handlers/BufferPusher.java   |91 +
 .../nativetask/handlers/CombinerHandler.java|   145 +
 .../mapred/nativetask/handlers/IDataLoader.java |37 +
 .../handlers/NativeCollectorOnlyHandler.java|   171 +
 .../serde/BoolWritableSerializer.java   |35 +
 .../serde/ByteWritableSerializer.java   |35 +
 .../serde/BytesWritableSerializer.java  |48 +
 .../nativetask/serde/DefaultSerializer.java |71 +
 .../serde/DoubleWritableSerializer.java |35 +
 .../serde/FloatWritableSerializer.java  |36 +
 .../mapred/nativetask/serde/IKVSerializer.java  |48 +
 .../nativetask/serde/INativeSerializer.java |49 +
 .../nativetask/serde/IntWritableSerializer.java |35 +
 .../mapred/nativetask/serde/KVSerializer.java   |   115 +
 .../serde/LongWritableSerializer.java   |34 +
 .../nativetask/serde/NativeSerialization.java   |91 +
 .../serde/NullWritableSerializer.java   |35 +
 .../serde/SerializationFramework.java   |35 +
 .../mapred/nativetask/serde/TextSerializer.java |49 +
 .../serde/VIntWritableSerializer.java   |27 +
 .../serde/VLongWritableSerializer.java  |27 +
 .../mapred/nativetask/util/BytesUtil.java   |   172 +
 .../mapred/nativetask/util/ConfigUtil.java  |47 +
 .../nativetask/util/LocalJobOutputFiles.java|   159 +
 .../nativetask/util/NativeTaskOutput.java   |   106 +
 .../nativetask/util/NativeTaskOutputFiles.java  |   170 +
 .../mapred/nativetask/util/OutputUtil.java  |47 +
 .../mapred/nativetask/util/ReadWriteBuffer.java |   159 +
 .../mapred/nativetask/util/SizedWritable.java   |55 +
 .../src/main/native/gtest/gtest-all.cc  |  9120 
 .../src/main/native/gtest/gtest_main.cc |39 +
 .../src/main/native/gtest/include/gtest/gtest.h | 19541 +
 .../src/main/native/src/NativeTask.h|   507 +
 .../src/main/native/src/codec/BlockCodec.cc |   179 +
 .../src/main/native/src/codec/BlockCodec.h  |98 +
 .../src/main/native/src/codec/GzipCodec.cc  |   192 +
 .../src/main/native/src/codec/GzipCodec.h   |82 +
 .../src/main/native/src/codec/Lz4Codec.cc   |88 

[18/52] [abbrv] git commit: MAPREDUCE-6035. native-task: sources/test-sources jar distribution. Contributed by Manu Zhang.

2014-09-15 Thread jing9
MAPREDUCE-6035. native-task: sources/test-sources jar distribution. Contributed 
by Manu Zhang.

git-svn-id: 
https://svn.apache.org/repos/asf/hadoop/common/branches/MR-2841@1617879 
13f79535-47bb-0310-9956-ffa450edef68


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/eace4dd5
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/eace4dd5
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/eace4dd5

Branch: refs/heads/HDFS-6584
Commit: eace4dd50b1214a8842e47c522da258e1746386a
Parents: 808bf8b
Author: Todd Lipcon t...@apache.org
Authored: Thu Aug 14 04:56:43 2014 +
Committer: Todd Lipcon t...@apache.org
Committed: Thu Aug 14 04:56:43 2014 +

--
 .../src/main/resources/assemblies/hadoop-mapreduce-dist.xml   | 7 +++
 hadoop-mapreduce-project/CHANGES.MAPREDUCE-2841.txt   | 1 +
 2 files changed, 8 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/eace4dd5/hadoop-assemblies/src/main/resources/assemblies/hadoop-mapreduce-dist.xml
--
diff --git 
a/hadoop-assemblies/src/main/resources/assemblies/hadoop-mapreduce-dist.xml 
b/hadoop-assemblies/src/main/resources/assemblies/hadoop-mapreduce-dist.xml
index 0cb01ea..749e16a 100644
--- a/hadoop-assemblies/src/main/resources/assemblies/hadoop-mapreduce-dist.xml
+++ b/hadoop-assemblies/src/main/resources/assemblies/hadoop-mapreduce-dist.xml
@@ -136,6 +136,13 @@
   /includes
 /fileSet
 fileSet
+  
directoryhadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/target/directory
+  
outputDirectory/share/hadoop/${hadoop.component}/sources/outputDirectory
+  includes
+include*-sources.jar/include
+  /includes
+/fileSet
+fileSet
   
directoryhadoop-mapreduce-client/hadoop-mapreduce-client-shuffle/target/directory
   
outputDirectory/share/hadoop/${hadoop.component}/sources/outputDirectory
   includes

http://git-wip-us.apache.org/repos/asf/hadoop/blob/eace4dd5/hadoop-mapreduce-project/CHANGES.MAPREDUCE-2841.txt
--
diff --git a/hadoop-mapreduce-project/CHANGES.MAPREDUCE-2841.txt 
b/hadoop-mapreduce-project/CHANGES.MAPREDUCE-2841.txt
index 123eb6d..95f7858 100644
--- a/hadoop-mapreduce-project/CHANGES.MAPREDUCE-2841.txt
+++ b/hadoop-mapreduce-project/CHANGES.MAPREDUCE-2841.txt
@@ -14,3 +14,4 @@ MAPREDUCE-5976. native-task: should not fail to build if 
snappy is missing (Manu
 MAPREDUCE-5978. native-task: remove test case for not supported codec 
Bzip2Codec and DefaultCodec (Manu Zhang)
 MAPREDUCE-6006. native-task: add native tests to maven and fix bug in pom.xml 
(Binglin Chang via todd)
 MAPREDUCE-6026. native-task: fix logging (Manu Zhang via todd)
+MAPREDUCE-6035. native-task: sources/test-sources jar distribution (Manu Zhang 
via todd)



[43/52] [abbrv] git commit: YARN-2456. Possible livelock in CapacityScheduler when RM is recovering apps. Contributed by Jian He

2014-09-15 Thread jing9
YARN-2456. Possible livelock in CapacityScheduler when RM is recovering
apps. Contributed by Jian He


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/e65ae575
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/e65ae575
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/e65ae575

Branch: refs/heads/HDFS-6584
Commit: e65ae575a059a426c4c38fdabe22a31eabbb349e
Parents: 40364dc
Author: XuanGong xg...@apache.org
Authored: Fri Sep 12 15:21:46 2014 -0700
Committer: XuanGong xg...@apache.org
Committed: Fri Sep 12 15:21:46 2014 -0700

--
 hadoop-yarn-project/CHANGES.txt |  3 ++
 .../resourcemanager/recovery/RMStateStore.java  |  3 +-
 .../server/resourcemanager/TestRMRestart.java   | 43 
 3 files changed, 48 insertions(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/e65ae575/hadoop-yarn-project/CHANGES.txt
--
diff --git a/hadoop-yarn-project/CHANGES.txt b/hadoop-yarn-project/CHANGES.txt
index 9002e6a..efc3e09 100644
--- a/hadoop-yarn-project/CHANGES.txt
+++ b/hadoop-yarn-project/CHANGES.txt
@@ -345,6 +345,9 @@ Release 2.6.0 - UNRELEASED
 YARN-2484. FileSystemRMStateStore#readFile/writeFile should close
 FSData(In|Out)putStream in final block (Tsuyoshi OZAWA via jlowe)
 
+YARN-2456. Possible livelock in CapacityScheduler when RM is recovering 
apps.
+(Jian He via xgong)
+
 Release 2.5.1 - 2014-09-05
 
   INCOMPATIBLE CHANGES

http://git-wip-us.apache.org/repos/asf/hadoop/blob/e65ae575/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/recovery/RMStateStore.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/recovery/RMStateStore.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/recovery/RMStateStore.java
index ac51a17..df4f3a9 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/recovery/RMStateStore.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/recovery/RMStateStore.java
@@ -22,6 +22,7 @@ import java.util.HashMap;
 import java.util.HashSet;
 import java.util.Map;
 import java.util.Set;
+import java.util.TreeMap;
 
 import javax.crypto.SecretKey;
 
@@ -421,7 +422,7 @@ public abstract class RMStateStore extends AbstractService {
*/
   public static class RMState {
 MapApplicationId, ApplicationState appState =
-new HashMapApplicationId, ApplicationState();
+new TreeMapApplicationId, ApplicationState();
 
 RMDTSecretManagerState rmSecretManagerState = new RMDTSecretManagerState();
 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/e65ae575/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/TestRMRestart.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/TestRMRestart.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/TestRMRestart.java
index 7d511db..caa5647 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/TestRMRestart.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/TestRMRestart.java
@@ -19,9 +19,11 @@
 package org.apache.hadoop.yarn.server.resourcemanager;
 
 import static org.mockito.Matchers.isA;
+import static org.mockito.Mockito.mock;
 import static org.mockito.Mockito.spy;
 import static org.mockito.Mockito.times;
 import static org.mockito.Mockito.verify;
+import static org.mockito.Mockito.when;
 
 import java.io.File;
 import java.io.FileOutputStream;
@@ -1656,6 +1658,47 @@ public class TestRMRestart {
 rm2.waitForState(app1.getApplicationId(), RMAppState.FAILED);
   }
 
+  @Test (timeout = 2)
+  public void testAppRecoveredInOrderOnRMRestart() throws Exception {
+MemoryRMStateStore memStore = new MemoryRMStateStore();
+  

[17/52] [abbrv] git commit: MAPREDUCE-6026. native-task: fix logging. Contributed by Manu Zhang.

2014-09-15 Thread jing9
MAPREDUCE-6026. native-task: fix logging. Contributed by Manu Zhang.

git-svn-id: 
https://svn.apache.org/repos/asf/hadoop/common/branches/MR-2841@1617878 
13f79535-47bb-0310-9956-ffa450edef68


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/808bf8ba
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/808bf8ba
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/808bf8ba

Branch: refs/heads/HDFS-6584
Commit: 808bf8bac1d4bdf83fb3bdf6e096b4038c9567ce
Parents: 886338f
Author: Todd Lipcon t...@apache.org
Authored: Thu Aug 14 04:53:36 2014 +
Committer: Todd Lipcon t...@apache.org
Committed: Thu Aug 14 04:53:36 2014 +

--
 .../CHANGES.MAPREDUCE-2841.txt   |  1 +
 .../hadoop/mapred/nativetask/HadoopPlatform.java |  5 +++--
 .../hadoop/mapred/nativetask/Platforms.java  |  5 +++--
 .../combinertest/LargeKVCombinerTest.java|  5 -
 .../hadoop/mapred/nativetask/kvtest/KVTest.java  |  8 ++--
 .../mapred/nativetask/kvtest/LargeKVTest.java|  7 +--
 .../mapred/nativetask/kvtest/TestInputFile.java  |  9 ++---
 .../EnforceNativeOutputCollectorDelegator.java   |  6 --
 .../src/test/resources/log4j.properties  | 19 +++
 9 files changed, 51 insertions(+), 14 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/808bf8ba/hadoop-mapreduce-project/CHANGES.MAPREDUCE-2841.txt
--
diff --git a/hadoop-mapreduce-project/CHANGES.MAPREDUCE-2841.txt 
b/hadoop-mapreduce-project/CHANGES.MAPREDUCE-2841.txt
index c3c6ab9..123eb6d 100644
--- a/hadoop-mapreduce-project/CHANGES.MAPREDUCE-2841.txt
+++ b/hadoop-mapreduce-project/CHANGES.MAPREDUCE-2841.txt
@@ -13,3 +13,4 @@ MAPREDUCE-5984. native-task: Reuse lz4 sources in 
hadoop-common (Binglin Chang)
 MAPREDUCE-5976. native-task: should not fail to build if snappy is missing 
(Manu Zhang)
 MAPREDUCE-5978. native-task: remove test case for not supported codec 
Bzip2Codec and DefaultCodec (Manu Zhang)
 MAPREDUCE-6006. native-task: add native tests to maven and fix bug in pom.xml 
(Binglin Chang via todd)
+MAPREDUCE-6026. native-task: fix logging (Manu Zhang via todd)

http://git-wip-us.apache.org/repos/asf/hadoop/blob/808bf8ba/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/java/org/apache/hadoop/mapred/nativetask/HadoopPlatform.java
--
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/java/org/apache/hadoop/mapred/nativetask/HadoopPlatform.java
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/java/org/apache/hadoop/mapred/nativetask/HadoopPlatform.java
index 1c4ede5..7599bb8 100644
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/java/org/apache/hadoop/mapred/nativetask/HadoopPlatform.java
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/java/org/apache/hadoop/mapred/nativetask/HadoopPlatform.java
@@ -19,6 +19,8 @@ package org.apache.hadoop.mapred.nativetask;
 
 import java.io.IOException;
 
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.io.BooleanWritable;
 import org.apache.hadoop.io.ByteWritable;
 import org.apache.hadoop.io.BytesWritable;
@@ -33,10 +35,9 @@ import org.apache.hadoop.io.VLongWritable;
 import org.apache.hadoop.io.Writable;
 import org.apache.hadoop.mapred.JobConf;
 import org.apache.hadoop.mapred.nativetask.serde.*;
-import org.apache.log4j.Logger;
 
 public class HadoopPlatform extends Platform {
-  private static final Logger LOG = Logger.getLogger(HadoopPlatform.class);
+  private static final Log LOG = LogFactory.getLog(HadoopPlatform.class);
 
   public HadoopPlatform() throws IOException {
   }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/808bf8ba/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/java/org/apache/hadoop/mapred/nativetask/Platforms.java
--
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/java/org/apache/hadoop/mapred/nativetask/Platforms.java
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/java/org/apache/hadoop/mapred/nativetask/Platforms.java
index 154bbc8..d0a8496 100644
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/java/org/apache/hadoop/mapred/nativetask/Platforms.java
+++ 

[51/52] [abbrv] git commit: HADOOP-10400. Incorporate new S3A FileSystem implementation. Contributed by Jordan Mendelson and Dave Wang.

2014-09-15 Thread jing9
HADOOP-10400. Incorporate new S3A FileSystem implementation. Contributed by 
Jordan Mendelson and Dave Wang.


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/24d920b8
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/24d920b8
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/24d920b8

Branch: refs/heads/HDFS-6584
Commit: 24d920b80eb3626073925a1d0b6dcf148add8cc0
Parents: fc741b5
Author: Aaron T. Myers a...@apache.org
Authored: Mon Sep 15 08:27:07 2014 -0700
Committer: Aaron T. Myers a...@apache.org
Committed: Mon Sep 15 08:27:07 2014 -0700

--
 hadoop-common-project/hadoop-common/CHANGES.txt |3 +
 .../src/main/conf/log4j.properties  |5 +
 .../src/main/resources/core-default.xml |   86 ++
 hadoop-hdfs-project/hadoop-hdfs-httpfs/pom.xml  |8 +
 hadoop-project/pom.xml  |   26 +-
 hadoop-tools/hadoop-aws/pom.xml |   10 +
 .../fs/s3a/AnonymousAWSCredentialsProvider.java |   37 +
 .../fs/s3a/BasicAWSCredentialsProvider.java |   51 +
 .../org/apache/hadoop/fs/s3a/Constants.java |   90 ++
 .../org/apache/hadoop/fs/s3a/S3AFileStatus.java |   62 ++
 .../org/apache/hadoop/fs/s3a/S3AFileSystem.java | 1019 ++
 .../apache/hadoop/fs/s3a/S3AInputStream.java|  207 
 .../apache/hadoop/fs/s3a/S3AOutputStream.java   |  208 
 .../services/org.apache.hadoop.fs.FileSystem|1 +
 .../hadoop/fs/contract/s3a/S3AContract.java |   43 +
 .../fs/contract/s3a/TestS3AContractCreate.java  |   38 +
 .../fs/contract/s3a/TestS3AContractDelete.java  |   31 +
 .../fs/contract/s3a/TestS3AContractMkdir.java   |   34 +
 .../fs/contract/s3a/TestS3AContractOpen.java|   31 +
 .../fs/contract/s3a/TestS3AContractRename.java  |   64 ++
 .../fs/contract/s3a/TestS3AContractRootDir.java |   35 +
 .../fs/contract/s3a/TestS3AContractSeek.java|   31 +
 .../fs/s3a/S3AFileSystemContractBaseTest.java   |  327 ++
 .../src/test/resources/contract/s3a.xml |  105 ++
 .../src/test/resources/contract/s3n.xml |7 +-
 hadoop-tools/hadoop-azure/pom.xml   |   10 +-
 26 files changed, 2552 insertions(+), 17 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/24d920b8/hadoop-common-project/hadoop-common/CHANGES.txt
--
diff --git a/hadoop-common-project/hadoop-common/CHANGES.txt 
b/hadoop-common-project/hadoop-common/CHANGES.txt
index 051eac1..c2ae5ed 100644
--- a/hadoop-common-project/hadoop-common/CHANGES.txt
+++ b/hadoop-common-project/hadoop-common/CHANGES.txt
@@ -342,6 +342,9 @@ Release 2.6.0 - UNRELEASED
 HADOOP-10893. isolated classloader on the client side (Sangjin Lee via
 jlowe)
 
+HADOOP-10400. Incorporate new S3A FileSystem implementation. (Jordan
+Mendelson and Dave Wang via atm)
+
   IMPROVEMENTS
 
 HADOOP-10808. Remove unused native code for munlock. (cnauroth)

http://git-wip-us.apache.org/repos/asf/hadoop/blob/24d920b8/hadoop-common-project/hadoop-common/src/main/conf/log4j.properties
--
diff --git a/hadoop-common-project/hadoop-common/src/main/conf/log4j.properties 
b/hadoop-common-project/hadoop-common/src/main/conf/log4j.properties
index ef9acbf..5fa21fa 100644
--- a/hadoop-common-project/hadoop-common/src/main/conf/log4j.properties
+++ b/hadoop-common-project/hadoop-common/src/main/conf/log4j.properties
@@ -174,6 +174,11 @@ 
log4j.appender.MRAUDIT.MaxBackupIndex=${mapred.audit.log.maxbackupindex}
 # Jets3t library
 log4j.logger.org.jets3t.service.impl.rest.httpclient.RestS3Service=ERROR
 
+# AWS SDK  S3A FileSystem
+log4j.logger.com.amazonaws=ERROR
+log4j.logger.com.amazonaws.http.AmazonHttpClient=ERROR
+log4j.logger.org.apache.hadoop.fs.s3a.S3AFileSystem=WARN
+
 #
 # Event Counter Appender
 # Sends counts of logging messages at different severity levels to Hadoop 
Metrics.

http://git-wip-us.apache.org/repos/asf/hadoop/blob/24d920b8/hadoop-common-project/hadoop-common/src/main/resources/core-default.xml
--
diff --git 
a/hadoop-common-project/hadoop-common/src/main/resources/core-default.xml 
b/hadoop-common-project/hadoop-common/src/main/resources/core-default.xml
index 3cc7545..828dec2 100644
--- a/hadoop-common-project/hadoop-common/src/main/resources/core-default.xml
+++ b/hadoop-common-project/hadoop-common/src/main/resources/core-default.xml
@@ -690,6 +690,92 @@ for ldap providers in the same way as above does.
 /property
 
 property
+  namefs.s3a.access.key/name
+  descriptionAWS access key ID. Omit for Role-based 
authentication./description
+/property
+
+property
+  namefs.s3a.secret.key/name
+  descriptionAWS secret key. Omit for 

git commit: YARN-2549. TestContainerLaunch fails due to classpath problem with hamcrest classes. Contributed by Chris Nauroth.

2014-09-15 Thread cnauroth
Repository: hadoop
Updated Branches:
  refs/heads/trunk 43b030300 - 9d4ec97c9


YARN-2549. TestContainerLaunch fails due to classpath problem with hamcrest 
classes. Contributed by Chris Nauroth.


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/9d4ec97c
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/9d4ec97c
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/9d4ec97c

Branch: refs/heads/trunk
Commit: 9d4ec97c95b51c6f99112030da91950551b22b9c
Parents: 43b0303
Author: cnauroth cnaur...@apache.org
Authored: Mon Sep 15 11:53:57 2014 -0700
Committer: cnauroth cnaur...@apache.org
Committed: Mon Sep 15 11:53:57 2014 -0700

--
 hadoop-yarn-project/CHANGES.txt   |  3 +++
 .../hadoop-yarn-server-nodemanager/pom.xml| 14 +-
 2 files changed, 12 insertions(+), 5 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/9d4ec97c/hadoop-yarn-project/CHANGES.txt
--
diff --git a/hadoop-yarn-project/CHANGES.txt b/hadoop-yarn-project/CHANGES.txt
index 9c1abf7..5cfc460 100644
--- a/hadoop-yarn-project/CHANGES.txt
+++ b/hadoop-yarn-project/CHANGES.txt
@@ -359,6 +359,9 @@ Release 2.6.0 - UNRELEASED
 header and made it accept multiple origins in CrossOriginFilter. (Jonathan
 Eagles via zjshen)
 
+YARN-2549. TestContainerLaunch fails due to classpath problem with hamcrest
+classes. (cnauroth)
+
 Release 2.5.1 - 2014-09-05
 
   INCOMPATIBLE CHANGES

http://git-wip-us.apache.org/repos/asf/hadoop/blob/9d4ec97c/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/pom.xml
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/pom.xml
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/pom.xml
index 370cc36..b1efa5f 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/pom.xml
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/pom.xml
@@ -102,11 +102,6 @@
   groupIdorg.apache.hadoop/groupId
   artifactIdhadoop-annotations/artifactId
 /dependency
-dependency
-  groupIdorg.mockito/groupId
-  artifactIdmockito-all/artifactId
-  scopetest/scope
-/dependency
 !-- 'mvn dependency:analyze' fails to detect use of this dependency --
 dependency
   groupIdorg.apache.hadoop/groupId
@@ -122,12 +117,21 @@
   groupIdcom.google.protobuf/groupId
   artifactIdprotobuf-java/artifactId
 /dependency
+!--
+junit must be before mockito-all on the classpath.  mockito-all bundles its
+own copy of the hamcrest classes, but they don't match our junit version.
+--
 dependency
   groupIdjunit/groupId
   artifactIdjunit/artifactId
   scopetest/scope
 /dependency
 dependency
+  groupIdorg.mockito/groupId
+  artifactIdmockito-all/artifactId
+  scopetest/scope
+/dependency
+dependency
   groupIdcom.google.inject/groupId
   artifactIdguice/artifactId
 /dependency



git commit: YARN-2549. TestContainerLaunch fails due to classpath problem with hamcrest classes. Contributed by Chris Nauroth.

2014-09-15 Thread cnauroth
Repository: hadoop
Updated Branches:
  refs/heads/branch-2 d59555785 - aaef460e7


YARN-2549. TestContainerLaunch fails due to classpath problem with hamcrest 
classes. Contributed by Chris Nauroth.

(cherry picked from commit 9d4ec97c95b51c6f99112030da91950551b22b9c)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/aaef460e
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/aaef460e
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/aaef460e

Branch: refs/heads/branch-2
Commit: aaef460e7a4a75e723557ea027ba9df329350ec9
Parents: d595557
Author: cnauroth cnaur...@apache.org
Authored: Mon Sep 15 11:53:57 2014 -0700
Committer: cnauroth cnaur...@apache.org
Committed: Mon Sep 15 11:54:41 2014 -0700

--
 hadoop-yarn-project/CHANGES.txt   |  3 +++
 .../hadoop-yarn-server-nodemanager/pom.xml| 14 +-
 2 files changed, 12 insertions(+), 5 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/aaef460e/hadoop-yarn-project/CHANGES.txt
--
diff --git a/hadoop-yarn-project/CHANGES.txt b/hadoop-yarn-project/CHANGES.txt
index c675e75..6b29f2f 100644
--- a/hadoop-yarn-project/CHANGES.txt
+++ b/hadoop-yarn-project/CHANGES.txt
@@ -331,6 +331,9 @@ Release 2.6.0 - UNRELEASED
 header and made it accept multiple origins in CrossOriginFilter. (Jonathan
 Eagles via zjshen)
 
+YARN-2549. TestContainerLaunch fails due to classpath problem with hamcrest
+classes. (cnauroth)
+
 Release 2.5.1 - 2014-09-05
 
   INCOMPATIBLE CHANGES

http://git-wip-us.apache.org/repos/asf/hadoop/blob/aaef460e/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/pom.xml
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/pom.xml
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/pom.xml
index 24bc784..e72d489 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/pom.xml
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/pom.xml
@@ -120,11 +120,6 @@
   groupIdorg.apache.hadoop/groupId
   artifactIdhadoop-annotations/artifactId
 /dependency
-dependency
-  groupIdorg.mockito/groupId
-  artifactIdmockito-all/artifactId
-  scopetest/scope
-/dependency
 !-- 'mvn dependency:analyze' fails to detect use of this dependency --
 dependency
   groupIdorg.apache.hadoop/groupId
@@ -140,12 +135,21 @@
   groupIdcom.google.protobuf/groupId
   artifactIdprotobuf-java/artifactId
 /dependency
+!--
+junit must be before mockito-all on the classpath.  mockito-all bundles its
+own copy of the hamcrest classes, but they don't match our junit version.
+--
 dependency
   groupIdjunit/groupId
   artifactIdjunit/artifactId
   scopetest/scope
 /dependency
 dependency
+  groupIdorg.mockito/groupId
+  artifactIdmockito-all/artifactId
+  scopetest/scope
+/dependency
+dependency
   groupIdcom.google.inject/groupId
   artifactIdguice/artifactId
 /dependency



git commit: YARN-2529. Generic history service RPC interface doesn't work when service authorization is enabled. Contributed by Zhijie Shen

2014-09-15 Thread jianhe
Repository: hadoop
Updated Branches:
  refs/heads/trunk 9d4ec97c9 - 88e329fdf


YARN-2529. Generic history service RPC interface doesn't work when service 
authorization is enabled. Contributed by Zhijie Shen


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/88e329fd
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/88e329fd
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/88e329fd

Branch: refs/heads/trunk
Commit: 88e329fdfb27c11f1964c0345188bcf328c95fd6
Parents: 9d4ec97
Author: Jian He jia...@apache.org
Authored: Mon Sep 15 13:13:54 2014 -0700
Committer: Jian He jia...@apache.org
Committed: Mon Sep 15 13:13:54 2014 -0700

--
 .../src/main/conf/hadoop-policy.xml |  9 
 hadoop-yarn-project/CHANGES.txt |  3 ++
 .../hadoop/yarn/conf/YarnConfiguration.java |  4 ++
 .../ApplicationHistoryClientService.java| 15 ++-
 .../authorize/TimelinePolicyProvider.java   | 44 
 5 files changed, 74 insertions(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/88e329fd/hadoop-common-project/hadoop-common/src/main/conf/hadoop-policy.xml
--
diff --git 
a/hadoop-common-project/hadoop-common/src/main/conf/hadoop-policy.xml 
b/hadoop-common-project/hadoop-common/src/main/conf/hadoop-policy.xml
index 39e4f4d..2bf5c02 100644
--- a/hadoop-common-project/hadoop-common/src/main/conf/hadoop-policy.xml
+++ b/hadoop-common-project/hadoop-common/src/main/conf/hadoop-policy.xml
@@ -214,4 +214,13 @@
 A special value of * means all users are allowed./description
   /property
 
+  property
+namesecurity.applicationhistory.protocol.acl/name
+value*/value
+descriptionACL for ApplicationHistoryProtocol, used by the timeline
+server and the generic history service client to communicate with each 
other.
+The ACL is a comma-separated list of user and group names. The user and
+group list is separated by a blank. For e.g. alice,bob users,wheel.
+A special value of * means all users are allowed./description
+  /property
 /configuration

http://git-wip-us.apache.org/repos/asf/hadoop/blob/88e329fd/hadoop-yarn-project/CHANGES.txt
--
diff --git a/hadoop-yarn-project/CHANGES.txt b/hadoop-yarn-project/CHANGES.txt
index 5cfc460..629d3b3 100644
--- a/hadoop-yarn-project/CHANGES.txt
+++ b/hadoop-yarn-project/CHANGES.txt
@@ -362,6 +362,9 @@ Release 2.6.0 - UNRELEASED
 YARN-2549. TestContainerLaunch fails due to classpath problem with hamcrest
 classes. (cnauroth)
 
+YARN-2529. Generic history service RPC interface doesn't work when service
+authorization is enabled. (Zhijie Shen via jianhe)
+
 Release 2.5.1 - 2014-09-05
 
   INCOMPATIBLE CHANGES

http://git-wip-us.apache.org/repos/asf/hadoop/blob/88e329fd/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/conf/YarnConfiguration.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/conf/YarnConfiguration.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/conf/YarnConfiguration.java
index 43f510d..a92b358 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/conf/YarnConfiguration.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/conf/YarnConfiguration.java
@@ -991,6 +991,10 @@ public class YarnConfiguration extends Configuration {
   YARN_SECURITY_SERVICE_AUTHORIZATION_RESOURCE_LOCALIZER =
   security.resourcelocalizer.protocol.acl;
 
+  public static final String
+  YARN_SECURITY_SERVICE_AUTHORIZATION_APPLICATIONHISTORY_PROTOCOL =
+  security.applicationhistory.protocol.acl;
+
   /** No. of milliseconds to wait between sending a SIGTERM and SIGKILL
* to a running container */
   public static final String NM_SLEEP_DELAY_BEFORE_SIGKILL_MS =

http://git-wip-us.apache.org/repos/asf/hadoop/blob/88e329fd/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/ApplicationHistoryClientService.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/ApplicationHistoryClientService.java
 

git commit: YARN-2529. Generic history service RPC interface doesn't work when service authorization is enabled. Contributed by Zhijie Shen (cherry picked from commit 88e329fdfb27c11f1964c0345188bcf32

2014-09-15 Thread jianhe
Repository: hadoop
Updated Branches:
  refs/heads/branch-2 aaef460e7 - 1c3f83783


YARN-2529. Generic history service RPC interface doesn't work when service 
authorization is enabled. Contributed by Zhijie Shen
(cherry picked from commit 88e329fdfb27c11f1964c0345188bcf328c95fd6)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/1c3f8378
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/1c3f8378
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/1c3f8378

Branch: refs/heads/branch-2
Commit: 1c3f8378358b01e6ea7312f7f673a9bd37ddf49a
Parents: aaef460
Author: Jian He jia...@apache.org
Authored: Mon Sep 15 13:13:54 2014 -0700
Committer: Jian He jia...@apache.org
Committed: Mon Sep 15 13:15:07 2014 -0700

--
 .../src/main/conf/hadoop-policy.xml |  9 
 hadoop-yarn-project/CHANGES.txt |  3 ++
 .../hadoop/yarn/conf/YarnConfiguration.java |  4 ++
 .../ApplicationHistoryClientService.java| 15 ++-
 .../authorize/TimelinePolicyProvider.java   | 44 
 5 files changed, 74 insertions(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/1c3f8378/hadoop-common-project/hadoop-common/src/main/conf/hadoop-policy.xml
--
diff --git 
a/hadoop-common-project/hadoop-common/src/main/conf/hadoop-policy.xml 
b/hadoop-common-project/hadoop-common/src/main/conf/hadoop-policy.xml
index 39e4f4d..2bf5c02 100644
--- a/hadoop-common-project/hadoop-common/src/main/conf/hadoop-policy.xml
+++ b/hadoop-common-project/hadoop-common/src/main/conf/hadoop-policy.xml
@@ -214,4 +214,13 @@
 A special value of * means all users are allowed./description
   /property
 
+  property
+namesecurity.applicationhistory.protocol.acl/name
+value*/value
+descriptionACL for ApplicationHistoryProtocol, used by the timeline
+server and the generic history service client to communicate with each 
other.
+The ACL is a comma-separated list of user and group names. The user and
+group list is separated by a blank. For e.g. alice,bob users,wheel.
+A special value of * means all users are allowed./description
+  /property
 /configuration

http://git-wip-us.apache.org/repos/asf/hadoop/blob/1c3f8378/hadoop-yarn-project/CHANGES.txt
--
diff --git a/hadoop-yarn-project/CHANGES.txt b/hadoop-yarn-project/CHANGES.txt
index 6b29f2f..da90f27 100644
--- a/hadoop-yarn-project/CHANGES.txt
+++ b/hadoop-yarn-project/CHANGES.txt
@@ -334,6 +334,9 @@ Release 2.6.0 - UNRELEASED
 YARN-2549. TestContainerLaunch fails due to classpath problem with hamcrest
 classes. (cnauroth)
 
+YARN-2529. Generic history service RPC interface doesn't work when service
+authorization is enabled. (Zhijie Shen via jianhe)
+
 Release 2.5.1 - 2014-09-05
 
   INCOMPATIBLE CHANGES

http://git-wip-us.apache.org/repos/asf/hadoop/blob/1c3f8378/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/conf/YarnConfiguration.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/conf/YarnConfiguration.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/conf/YarnConfiguration.java
index f3104e2..9e88c97 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/conf/YarnConfiguration.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/conf/YarnConfiguration.java
@@ -991,6 +991,10 @@ public class YarnConfiguration extends Configuration {
   YARN_SECURITY_SERVICE_AUTHORIZATION_RESOURCE_LOCALIZER =
   security.resourcelocalizer.protocol.acl;
 
+  public static final String
+  YARN_SECURITY_SERVICE_AUTHORIZATION_APPLICATIONHISTORY_PROTOCOL =
+  security.applicationhistory.protocol.acl;
+
   /** No. of milliseconds to wait between sending a SIGTERM and SIGKILL
* to a running container */
   public static final String NM_SLEEP_DELAY_BEFORE_SIGKILL_MS =

http://git-wip-us.apache.org/repos/asf/hadoop/blob/1c3f8378/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/ApplicationHistoryClientService.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/ApplicationHistoryClientService.java
 

git commit: YARN-2438. yarn-env.sh cleanup (aw)

2014-09-15 Thread aw
Repository: hadoop
Updated Branches:
  refs/heads/trunk 88e329fdf - 0c8aec43f


YARN-2438. yarn-env.sh cleanup (aw)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/0c8aec43
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/0c8aec43
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/0c8aec43

Branch: refs/heads/trunk
Commit: 0c8aec43f0acd4f896df8cd605d97808f9a09887
Parents: 88e329f
Author: Allen Wittenauer a...@apache.org
Authored: Mon Sep 15 14:09:45 2014 -0700
Committer: Allen Wittenauer a...@apache.org
Committed: Mon Sep 15 14:09:50 2014 -0700

--
 hadoop-yarn-project/CHANGES.txt |  2 +
 .../hadoop-yarn/conf/yarn-env.sh| 52 +---
 2 files changed, 37 insertions(+), 17 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/0c8aec43/hadoop-yarn-project/CHANGES.txt
--
diff --git a/hadoop-yarn-project/CHANGES.txt b/hadoop-yarn-project/CHANGES.txt
index 629d3b3..b41ad82 100644
--- a/hadoop-yarn-project/CHANGES.txt
+++ b/hadoop-yarn-project/CHANGES.txt
@@ -8,6 +8,8 @@ Trunk - Unreleased
 
   IMPROVEMENTS
 
+YARN-2438. yarn-env.sh cleanup (aw)
+
   OPTIMIZATIONS
 
   BUG FIXES

http://git-wip-us.apache.org/repos/asf/hadoop/blob/0c8aec43/hadoop-yarn-project/hadoop-yarn/conf/yarn-env.sh
--
diff --git a/hadoop-yarn-project/hadoop-yarn/conf/yarn-env.sh 
b/hadoop-yarn-project/hadoop-yarn/conf/yarn-env.sh
index 755cfd8..0f5e9b7 100644
--- a/hadoop-yarn-project/hadoop-yarn/conf/yarn-env.sh
+++ b/hadoop-yarn-project/hadoop-yarn/conf/yarn-env.sh
@@ -1,3 +1,4 @@
+#
 # Licensed to the Apache Software Foundation (ASF) under one or more
 # contributor license agreements.  See the NOTICE file distributed with
 # this work for additional information regarding copyright ownership.
@@ -25,27 +26,21 @@
 ##
 
 ###
-# Generic settings for YARN
+# YARN-specific overrides for generic settings
 ###
 
-# User for YARN daemons
-export HADOOP_YARN_USER=${HADOOP_YARN_USER:-yarn}
-
-#
-# By default, YARN will use HADOOP_CONF_DIR. Specify a custom
-# YARN_CONF_DIR here
-# export YARN_CONF_DIR=${YARN_CONF_DIR:-$HADOOP_YARN_HOME/conf}
-#
-
-# Override Hadoop's log directory  file
-# export YARN_LOG_DIR=$HADOOP_YARN_HOME/logs
-# export YARN_LOGFILE='yarn.log'
+# By default, YARN will use HADOOP_LOG_DIR for YARN logging.  Specify a custom
+# log directory for YARN things here:
+# export YARN_LOG_DIR=${HADOOP_LOG_DIR}
 
-# Need a custom-to-YARN service-level authorization policy file?
-# export YARN_POLICYFILE=yarn-policy.xml
+# By default, YARN will use the value of HADOOP_LOGFILE as the 'fallback' log
+# file # when log4j settings are not defined.  Specify a custom YARN log file
+# here:
+# export YARN_LOGFILE=${HADOOP_LOGFILE}
 
-#Override the log4j settings for all YARN apps
-# export YARN_ROOT_LOGGER=INFO,console
+#Override the log4j settings for all YARN apps By default, YARN will use
+# HADOOP_ROOT_LOGGER.
+# export YARN_ROOT_LOGGER=${HADOOP_ROOT_LOGGER}
 
 ###
 # Resource Manager specific parameters
@@ -125,3 +120,26 @@ export HADOOP_YARN_USER=${HADOOP_YARN_USER:-yarn}
 #
 #export YARN_TIMELINESERVER_OPTS=
 
+###
+# Web App Proxy Server specifc parameters
+###
+
+# Specify the max Heapsize for the proxy server using a numerical value
+# in the scale of MB. For example, to specify an jvm option of -Xmx1000m, set
+# the value to 1000.
+# This value will be overridden by an Xmx setting specified in either 
YARN_OPTS,
+# HADOOP_OPTS, and/or YARN_PROXYSERVER_OPTS.
+# If not specified, the default value will be picked from either YARN_HEAPMAX
+# or JAVA_HEAP_MAX with YARN_HEAPMAX as the preferred option of the two.
+#
+#export YARN_PROXYSERVER_HEAPSIZE=1000
+
+# Specify the JVM options to be used when starting the proxy server.
+# These options will be appended to the options specified as YARN_OPTS
+# and therefore may override any similar flags set in YARN_OPTS
+#
+# See ResourceManager for some examples
+#
+#export YARN_PROXYSERVER_OPTS=
+
+



git commit: HDFS-7059. HAadmin transtionToActive with forceActive option can show confusing message. Contributed by Rushabh Shah.

2014-09-15 Thread kihwal
Repository: hadoop
Updated Branches:
  refs/heads/trunk 0c8aec43f - 2f847b335


HDFS-7059. HAadmin transtionToActive with forceActive option can show
confusing message. Contributed by Rushabh Shah.


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/2f847b33
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/2f847b33
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/2f847b33

Branch: refs/heads/trunk
Commit: 2f847b3357448184aaec935052f34d116ad3a42c
Parents: 0c8aec4
Author: Kihwal Lee kih...@apache.org
Authored: Mon Sep 15 16:27:22 2014 -0500
Committer: Kihwal Lee kih...@apache.org
Committed: Mon Sep 15 16:27:22 2014 -0500

--
 .../main/java/org/apache/hadoop/ha/HAAdmin.java |  6 --
 hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt |  3 +++
 .../hdfs/tools/TestDFSHAAdminMiniCluster.java   | 20 
 3 files changed, 7 insertions(+), 22 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/2f847b33/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/HAAdmin.java
--
diff --git 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/HAAdmin.java
 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/HAAdmin.java
index dfa03e8..bd6366c 100644
--- 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/HAAdmin.java
+++ 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/HAAdmin.java
@@ -143,8 +143,10 @@ public abstract class HAAdmin extends Configured 
implements Tool {
 }
 /*  returns true if other target node is active or some exception occurred 
 and forceActive was not set  */
-if(isOtherTargetNodeActive(argv[0], cmd.hasOption(FORCEACTIVE))) {
-  return -1;
+if(!cmd.hasOption(FORCEACTIVE)) {
+  if(isOtherTargetNodeActive(argv[0], cmd.hasOption(FORCEACTIVE))) {
+return -1;
+  }
 }
 HAServiceTarget target = resolveTarget(argv[0]);
 if (!checkManualStateManagementOK(target)) {

http://git-wip-us.apache.org/repos/asf/hadoop/blob/2f847b33/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
--
diff --git a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt 
b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
index 209c7c1..f596265 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
+++ b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
@@ -458,6 +458,9 @@ Release 2.6.0 - UNRELEASED
 HDFS-7061. Add test to verify encryption zone creation after NameNode
 restart without saving namespace. (Stephen Chu via wang)
 
+HDFS-7059. HAadmin transtionToActive with forceActive option can show
+confusing message.
+
   OPTIMIZATIONS
 
 HDFS-6690. Deduplicate xattr names in memory. (wang)

http://git-wip-us.apache.org/repos/asf/hadoop/blob/2f847b33/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/tools/TestDFSHAAdminMiniCluster.java
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/tools/TestDFSHAAdminMiniCluster.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/tools/TestDFSHAAdminMiniCluster.java
index 288bcd0..ee1c184 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/tools/TestDFSHAAdminMiniCluster.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/tools/TestDFSHAAdminMiniCluster.java
@@ -232,26 +232,6 @@ public class TestDFSHAAdminMiniCluster {
 assertFalse(Both namenodes cannot be active, nn1.isActiveState() 
  nn2.isActiveState());

-/*  This test case doesn't allow nn2 to transition to Active even with
-forceActive switch since nn1 is already active  */
-if(nn1.getState() != null  !nn1.getState().
-equals(HAServiceState.STANDBY.name()) ) {
-  cluster.transitionToStandby(0);
-}
-if(nn2.getState() != null  !nn2.getState().
-equals(HAServiceState.STANDBY.name()) ) {
-  cluster.transitionToStandby(1);
-}
-//Making sure both the namenode are in standby state
-assertTrue(nn1.isStandbyState());
-assertTrue(nn2.isStandbyState());
-
-runTool(-transitionToActive, nn1);
-runTool(-transitionToActive, nn2,--forceactive);
-
-assertFalse(Both namenodes cannot be active even though with forceActive,
-nn1.isActiveState()  nn2.isActiveState());
-
 /*  In this test case, we have deliberately shut down nn1 and this will
 cause HAAAdmin#isOtherTargetNodeActive to throw an Exception 
 and transitionToActive for nn2 with  forceActive switch will succeed 



git commit: HDFS-7059. HAadmin transtionToActive with forceActive option can show confusing message. Contributed by Rushabh Shah. (cherry picked from commit 2f847b3357448184aaec935052f34d116ad3a42c)

2014-09-15 Thread kihwal
Repository: hadoop
Updated Branches:
  refs/heads/branch-2 1c3f83783 - e6e31fbbb


HDFS-7059. HAadmin transtionToActive with forceActive option can show
confusing message. Contributed by Rushabh Shah.
(cherry picked from commit 2f847b3357448184aaec935052f34d116ad3a42c)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/e6e31fbb
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/e6e31fbb
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/e6e31fbb

Branch: refs/heads/branch-2
Commit: e6e31fbbba1f06c2e7caf0b0d1abbe1a20acaf91
Parents: 1c3f837
Author: Kihwal Lee kih...@apache.org
Authored: Mon Sep 15 16:30:22 2014 -0500
Committer: Kihwal Lee kih...@apache.org
Committed: Mon Sep 15 16:30:22 2014 -0500

--
 .../main/java/org/apache/hadoop/ha/HAAdmin.java |  6 --
 hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt |  3 +++
 .../hdfs/tools/TestDFSHAAdminMiniCluster.java   | 20 
 3 files changed, 7 insertions(+), 22 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/e6e31fbb/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/HAAdmin.java
--
diff --git 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/HAAdmin.java
 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/HAAdmin.java
index dfa03e8..bd6366c 100644
--- 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/HAAdmin.java
+++ 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/HAAdmin.java
@@ -143,8 +143,10 @@ public abstract class HAAdmin extends Configured 
implements Tool {
 }
 /*  returns true if other target node is active or some exception occurred 
 and forceActive was not set  */
-if(isOtherTargetNodeActive(argv[0], cmd.hasOption(FORCEACTIVE))) {
-  return -1;
+if(!cmd.hasOption(FORCEACTIVE)) {
+  if(isOtherTargetNodeActive(argv[0], cmd.hasOption(FORCEACTIVE))) {
+return -1;
+  }
 }
 HAServiceTarget target = resolveTarget(argv[0]);
 if (!checkManualStateManagementOK(target)) {

http://git-wip-us.apache.org/repos/asf/hadoop/blob/e6e31fbb/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
--
diff --git a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt 
b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
index 9afe524..080f3b1 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
+++ b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
@@ -200,6 +200,9 @@ Release 2.6.0 - UNRELEASED
 HDFS-7061. Add test to verify encryption zone creation after NameNode
 restart without saving namespace. (Stephen Chu via wang)
 
+HDFS-7059. HAadmin transtionToActive with forceActive option can show
+confusing message.
+
   OPTIMIZATIONS
 
 HDFS-6690. Deduplicate xattr names in memory. (wang)

http://git-wip-us.apache.org/repos/asf/hadoop/blob/e6e31fbb/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/tools/TestDFSHAAdminMiniCluster.java
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/tools/TestDFSHAAdminMiniCluster.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/tools/TestDFSHAAdminMiniCluster.java
index 288bcd0..ee1c184 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/tools/TestDFSHAAdminMiniCluster.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/tools/TestDFSHAAdminMiniCluster.java
@@ -232,26 +232,6 @@ public class TestDFSHAAdminMiniCluster {
 assertFalse(Both namenodes cannot be active, nn1.isActiveState() 
  nn2.isActiveState());

-/*  This test case doesn't allow nn2 to transition to Active even with
-forceActive switch since nn1 is already active  */
-if(nn1.getState() != null  !nn1.getState().
-equals(HAServiceState.STANDBY.name()) ) {
-  cluster.transitionToStandby(0);
-}
-if(nn2.getState() != null  !nn2.getState().
-equals(HAServiceState.STANDBY.name()) ) {
-  cluster.transitionToStandby(1);
-}
-//Making sure both the namenode are in standby state
-assertTrue(nn1.isStandbyState());
-assertTrue(nn2.isStandbyState());
-
-runTool(-transitionToActive, nn1);
-runTool(-transitionToActive, nn2,--forceactive);
-
-assertFalse(Both namenodes cannot be active even though with forceActive,
-nn1.isActiveState()  nn2.isActiveState());
-
 /*  In this test case, we have deliberately shut down nn1 and this will
 cause HAAAdmin#isOtherTargetNodeActive to throw an Exception 
 and 

git commit: HDFS-6912. SharedFileDescriptorFactory should not allocate sparse files (cmccabe)

2014-09-15 Thread cmccabe
Repository: hadoop
Updated Branches:
  refs/heads/trunk 2f847b335 - 8008f0e81


HDFS-6912. SharedFileDescriptorFactory should not allocate sparse files 
(cmccabe)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/8008f0e8
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/8008f0e8
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/8008f0e8

Branch: refs/heads/trunk
Commit: 8008f0e8191b1c7adbed96ed4c380208e3a37692
Parents: 2f847b3
Author: Colin Patrick Mccabe cmcc...@cloudera.com
Authored: Mon Sep 15 14:47:27 2014 -0700
Committer: Colin Patrick Mccabe cmcc...@cloudera.com
Committed: Mon Sep 15 14:47:27 2014 -0700

--
 hadoop-common-project/hadoop-common/CHANGES.txt |  3 ++
 .../io/nativeio/SharedFileDescriptorFactory.c   | 32 ++--
 2 files changed, 33 insertions(+), 2 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/8008f0e8/hadoop-common-project/hadoop-common/CHANGES.txt
--
diff --git a/hadoop-common-project/hadoop-common/CHANGES.txt 
b/hadoop-common-project/hadoop-common/CHANGES.txt
index c2ae5ed..6f80eb2 100644
--- a/hadoop-common-project/hadoop-common/CHANGES.txt
+++ b/hadoop-common-project/hadoop-common/CHANGES.txt
@@ -708,6 +708,9 @@ Release 2.6.0 - UNRELEASED
 HADOOP-11056. OsSecureRandom.setConf() might leak file descriptors (yzhang
 via cmccabe)
 
+HDFS-6912. SharedFileDescriptorFactory should not allocate sparse files
+(cmccabe)
+
 BREAKDOWN OF HDFS-6134 AND HADOOP-10150 SUBTASKS AND RELATED JIRAS
   
   HADOOP-10734. Implement high-performance secure random number sources.

http://git-wip-us.apache.org/repos/asf/hadoop/blob/8008f0e8/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/nativeio/SharedFileDescriptorFactory.c
--
diff --git 
a/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/nativeio/SharedFileDescriptorFactory.c
 
b/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/nativeio/SharedFileDescriptorFactory.c
index 8368402..3a8540c 100644
--- 
a/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/nativeio/SharedFileDescriptorFactory.c
+++ 
b/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/nativeio/SharedFileDescriptorFactory.c
@@ -37,6 +37,8 @@
 #include sys/types.h
 #include unistd.h
 
+#define ZERO_FULLY_BUF_SIZE 8192
+
 static pthread_mutex_t g_rand_lock = PTHREAD_MUTEX_INITIALIZER;
 
 JNIEXPORT void JNICALL
@@ -83,6 +85,24 @@ done:
   }
 }
 
+static int zero_fully(int fd, jint length)
+{
+  char buf[ZERO_FULLY_BUF_SIZE];
+  int res;
+
+  memset(buf, 0, sizeof(buf));
+  while (length  0) {
+res = write(fd, buf,
+  (length  ZERO_FULLY_BUF_SIZE) ? ZERO_FULLY_BUF_SIZE : length);
+if (res  0) {
+  if (errno == EINTR) continue;
+  return errno;
+}
+length -= res;
+  }
+  return 0;
+}
+
 JNIEXPORT jobject JNICALL
 
Java_org_apache_hadoop_io_nativeio_SharedFileDescriptorFactory_createDescriptor0(
   JNIEnv *env, jclass clazz, jstring jprefix, jstring jpath, jint length)
@@ -136,12 +156,20 @@ 
Java_org_apache_hadoop_io_nativeio_SharedFileDescriptorFactory_createDescriptor0
 (*env)-Throw(env, jthr);
 goto done;
   }
-  if (ftruncate(fd, length)  0) {
-jthr = newIOException(env, ftruncate(%s, %d) failed: error %d (%s),
+  ret = zero_fully(fd, length);
+  if (ret) {
+jthr = newIOException(env, zero_fully(%s, %d) failed: error %d (%s),
   path, length, ret, terror(ret));
 (*env)-Throw(env, jthr);
 goto done;
   }
+  if (lseek(fd, 0, SEEK_SET)  0) {
+ret = errno;
+jthr = newIOException(env, lseek(%s, 0, SEEK_SET) failed: error %d (%s),
+  path, ret, terror(ret));
+(*env)-Throw(env, jthr);
+goto done;
+  }
   jret = fd_create(env, fd); // throws exception on error.
 
 done:



git commit: HDFS-6912. SharedFileDescriptorFactory should not allocate sparse files (cmccabe) (cherry picked from commit 8008f0e8191b1c7adbed96ed4c380208e3a37692)

2014-09-15 Thread cmccabe
Repository: hadoop
Updated Branches:
  refs/heads/branch-2 e6e31fbbb - b4d9aca06


HDFS-6912. SharedFileDescriptorFactory should not allocate sparse files 
(cmccabe)
(cherry picked from commit 8008f0e8191b1c7adbed96ed4c380208e3a37692)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/b4d9aca0
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/b4d9aca0
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/b4d9aca0

Branch: refs/heads/branch-2
Commit: b4d9aca066a09f3536d2df2de21744e02cbed0dd
Parents: e6e31fb
Author: Colin Patrick Mccabe cmcc...@cloudera.com
Authored: Mon Sep 15 14:47:27 2014 -0700
Committer: Colin Patrick Mccabe cmcc...@cloudera.com
Committed: Mon Sep 15 14:51:59 2014 -0700

--
 hadoop-common-project/hadoop-common/CHANGES.txt |  3 ++
 .../io/nativeio/SharedFileDescriptorFactory.c   | 32 ++--
 2 files changed, 33 insertions(+), 2 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/b4d9aca0/hadoop-common-project/hadoop-common/CHANGES.txt
--
diff --git a/hadoop-common-project/hadoop-common/CHANGES.txt 
b/hadoop-common-project/hadoop-common/CHANGES.txt
index e3dd7d1..d2deee6 100644
--- a/hadoop-common-project/hadoop-common/CHANGES.txt
+++ b/hadoop-common-project/hadoop-common/CHANGES.txt
@@ -378,6 +378,9 @@ Release 2.6.0 - UNRELEASED
 HADOOP-11056. OsSecureRandom.setConf() might leak file descriptors. (yzhang
 via cmccabe)
 
+HDFS-6912. SharedFileDescriptorFactory should not allocate sparse files
+(cmccabe)
+
 BREAKDOWN OF HDFS-6134 AND HADOOP-10150 SUBTASKS AND RELATED JIRAS
   
   HADOOP-10734. Implement high-performance secure random number sources.

http://git-wip-us.apache.org/repos/asf/hadoop/blob/b4d9aca0/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/nativeio/SharedFileDescriptorFactory.c
--
diff --git 
a/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/nativeio/SharedFileDescriptorFactory.c
 
b/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/nativeio/SharedFileDescriptorFactory.c
index 8368402..3a8540c 100644
--- 
a/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/nativeio/SharedFileDescriptorFactory.c
+++ 
b/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/nativeio/SharedFileDescriptorFactory.c
@@ -37,6 +37,8 @@
 #include sys/types.h
 #include unistd.h
 
+#define ZERO_FULLY_BUF_SIZE 8192
+
 static pthread_mutex_t g_rand_lock = PTHREAD_MUTEX_INITIALIZER;
 
 JNIEXPORT void JNICALL
@@ -83,6 +85,24 @@ done:
   }
 }
 
+static int zero_fully(int fd, jint length)
+{
+  char buf[ZERO_FULLY_BUF_SIZE];
+  int res;
+
+  memset(buf, 0, sizeof(buf));
+  while (length  0) {
+res = write(fd, buf,
+  (length  ZERO_FULLY_BUF_SIZE) ? ZERO_FULLY_BUF_SIZE : length);
+if (res  0) {
+  if (errno == EINTR) continue;
+  return errno;
+}
+length -= res;
+  }
+  return 0;
+}
+
 JNIEXPORT jobject JNICALL
 
Java_org_apache_hadoop_io_nativeio_SharedFileDescriptorFactory_createDescriptor0(
   JNIEnv *env, jclass clazz, jstring jprefix, jstring jpath, jint length)
@@ -136,12 +156,20 @@ 
Java_org_apache_hadoop_io_nativeio_SharedFileDescriptorFactory_createDescriptor0
 (*env)-Throw(env, jthr);
 goto done;
   }
-  if (ftruncate(fd, length)  0) {
-jthr = newIOException(env, ftruncate(%s, %d) failed: error %d (%s),
+  ret = zero_fully(fd, length);
+  if (ret) {
+jthr = newIOException(env, zero_fully(%s, %d) failed: error %d (%s),
   path, length, ret, terror(ret));
 (*env)-Throw(env, jthr);
 goto done;
   }
+  if (lseek(fd, 0, SEEK_SET)  0) {
+ret = errno;
+jthr = newIOException(env, lseek(%s, 0, SEEK_SET) failed: error %d (%s),
+  path, ret, terror(ret));
+(*env)-Throw(env, jthr);
+goto done;
+  }
   jret = fd_create(env, fd); // throws exception on error.
 
 done:



git commit: HDFS-7066. LazyWriter#evictBlocks misses a null check for replicaState. (Contributed by Xiaoyu Yao)

2014-09-15 Thread arp
Repository: hadoop
Updated Branches:
  refs/heads/HDFS-6581 fdf0542d8 - a4dcbaa33


HDFS-7066. LazyWriter#evictBlocks misses a null check for replicaState. 
(Contributed by Xiaoyu Yao)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/a4dcbaa3
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/a4dcbaa3
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/a4dcbaa3

Branch: refs/heads/HDFS-6581
Commit: a4dcbaa33255cd1dd8d6c54763f55486c9e4317c
Parents: fdf0542
Author: arp a...@apache.org
Authored: Mon Sep 15 15:28:17 2014 -0700
Committer: arp a...@apache.org
Committed: Mon Sep 15 15:28:17 2014 -0700

--
 hadoop-hdfs-project/hadoop-hdfs/CHANGES-HDFS-6581.txt| 3 +++
 .../hdfs/server/datanode/fsdataset/impl/FsDatasetImpl.java   | 4 
 2 files changed, 7 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/a4dcbaa3/hadoop-hdfs-project/hadoop-hdfs/CHANGES-HDFS-6581.txt
--
diff --git a/hadoop-hdfs-project/hadoop-hdfs/CHANGES-HDFS-6581.txt 
b/hadoop-hdfs-project/hadoop-hdfs/CHANGES-HDFS-6581.txt
index 346f912..0877636 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/CHANGES-HDFS-6581.txt
+++ b/hadoop-hdfs-project/hadoop-hdfs/CHANGES-HDFS-6581.txt
@@ -44,3 +44,6 @@
 HDFS-6978. Directory scanner should correctly reconcile blocks on RAM
 disk. (Arpit Agarwal)
 
+HDFS-7066. LazyWriter#evictBlocks misses a null check for replicaState.
+(Xiaoyu Yao via Arpit Agarwal)
+

http://git-wip-us.apache.org/repos/asf/hadoop/blob/a4dcbaa3/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/FsDatasetImpl.java
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/FsDatasetImpl.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/FsDatasetImpl.java
index 6366a4f..23753bd 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/FsDatasetImpl.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/FsDatasetImpl.java
@@ -2331,6 +2331,10 @@ class FsDatasetImpl implements 
FsDatasetSpiFsVolumeImpl {
 LazyWriteReplicaTracker.ReplicaState replicaState =
 lazyWriteReplicaTracker.getNextCandidateForEviction();
 
+if (replicaState == null) {
+  break;
+}
+
 if (LOG.isDebugEnabled()) {
   LOG.debug(Evicting block  + replicaState);
 }



[05/10] git commit: YARN-2549. TestContainerLaunch fails due to classpath problem with hamcrest classes. Contributed by Chris Nauroth.

2014-09-15 Thread arp
YARN-2549. TestContainerLaunch fails due to classpath problem with hamcrest 
classes. Contributed by Chris Nauroth.


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/9d4ec97c
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/9d4ec97c
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/9d4ec97c

Branch: refs/heads/HDFS-6581
Commit: 9d4ec97c95b51c6f99112030da91950551b22b9c
Parents: 43b0303
Author: cnauroth cnaur...@apache.org
Authored: Mon Sep 15 11:53:57 2014 -0700
Committer: cnauroth cnaur...@apache.org
Committed: Mon Sep 15 11:53:57 2014 -0700

--
 hadoop-yarn-project/CHANGES.txt   |  3 +++
 .../hadoop-yarn-server-nodemanager/pom.xml| 14 +-
 2 files changed, 12 insertions(+), 5 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/9d4ec97c/hadoop-yarn-project/CHANGES.txt
--
diff --git a/hadoop-yarn-project/CHANGES.txt b/hadoop-yarn-project/CHANGES.txt
index 9c1abf7..5cfc460 100644
--- a/hadoop-yarn-project/CHANGES.txt
+++ b/hadoop-yarn-project/CHANGES.txt
@@ -359,6 +359,9 @@ Release 2.6.0 - UNRELEASED
 header and made it accept multiple origins in CrossOriginFilter. (Jonathan
 Eagles via zjshen)
 
+YARN-2549. TestContainerLaunch fails due to classpath problem with hamcrest
+classes. (cnauroth)
+
 Release 2.5.1 - 2014-09-05
 
   INCOMPATIBLE CHANGES

http://git-wip-us.apache.org/repos/asf/hadoop/blob/9d4ec97c/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/pom.xml
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/pom.xml
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/pom.xml
index 370cc36..b1efa5f 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/pom.xml
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/pom.xml
@@ -102,11 +102,6 @@
   groupIdorg.apache.hadoop/groupId
   artifactIdhadoop-annotations/artifactId
 /dependency
-dependency
-  groupIdorg.mockito/groupId
-  artifactIdmockito-all/artifactId
-  scopetest/scope
-/dependency
 !-- 'mvn dependency:analyze' fails to detect use of this dependency --
 dependency
   groupIdorg.apache.hadoop/groupId
@@ -122,12 +117,21 @@
   groupIdcom.google.protobuf/groupId
   artifactIdprotobuf-java/artifactId
 /dependency
+!--
+junit must be before mockito-all on the classpath.  mockito-all bundles its
+own copy of the hamcrest classes, but they don't match our junit version.
+--
 dependency
   groupIdjunit/groupId
   artifactIdjunit/artifactId
   scopetest/scope
 /dependency
 dependency
+  groupIdorg.mockito/groupId
+  artifactIdmockito-all/artifactId
+  scopetest/scope
+/dependency
+dependency
   groupIdcom.google.inject/groupId
   artifactIdguice/artifactId
 /dependency



[10/10] git commit: Merge branch 'trunk' into HDFS-6581

2014-09-15 Thread arp
Merge branch 'trunk' into HDFS-6581


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/a7814e1a
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/a7814e1a
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/a7814e1a

Branch: refs/heads/HDFS-6581
Commit: a7814e1aa440066ee20fde4c4992173d0b1514c1
Parents: a4dcbaa 8008f0e
Author: arp a...@apache.org
Authored: Mon Sep 15 15:30:19 2014 -0700
Committer: arp a...@apache.org
Committed: Mon Sep 15 15:30:19 2014 -0700

--
 hadoop-common-project/hadoop-common/CHANGES.txt |6 +
 .../src/main/conf/hadoop-policy.xml |9 +
 .../src/main/conf/log4j.properties  |5 +
 .../main/java/org/apache/hadoop/ha/HAAdmin.java |6 +-
 .../io/nativeio/SharedFileDescriptorFactory.c   |   32 +-
 .../src/main/resources/core-default.xml |   86 ++
 hadoop-hdfs-project/hadoop-hdfs-httpfs/pom.xml  |8 +
 hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt |9 +
 .../web/resources/DatanodeWebHdfsMethods.java   |   13 +-
 .../apache/hadoop/hdfs/TestEncryptionZones.java |   62 ++
 .../hdfs/tools/TestDFSHAAdminMiniCluster.java   |   20 -
 hadoop-project/pom.xml  |   26 +-
 hadoop-tools/hadoop-aws/pom.xml |   10 +
 .../fs/s3a/AnonymousAWSCredentialsProvider.java |   37 +
 .../fs/s3a/BasicAWSCredentialsProvider.java |   51 +
 .../org/apache/hadoop/fs/s3a/Constants.java |   90 ++
 .../org/apache/hadoop/fs/s3a/S3AFileStatus.java |   62 ++
 .../org/apache/hadoop/fs/s3a/S3AFileSystem.java | 1019 ++
 .../apache/hadoop/fs/s3a/S3AInputStream.java|  207 
 .../apache/hadoop/fs/s3a/S3AOutputStream.java   |  208 
 .../services/org.apache.hadoop.fs.FileSystem|1 +
 .../hadoop/fs/contract/s3a/S3AContract.java |   43 +
 .../fs/contract/s3a/TestS3AContractCreate.java  |   38 +
 .../fs/contract/s3a/TestS3AContractDelete.java  |   31 +
 .../fs/contract/s3a/TestS3AContractMkdir.java   |   34 +
 .../fs/contract/s3a/TestS3AContractOpen.java|   31 +
 .../fs/contract/s3a/TestS3AContractRename.java  |   64 ++
 .../fs/contract/s3a/TestS3AContractRootDir.java |   35 +
 .../fs/contract/s3a/TestS3AContractSeek.java|   31 +
 .../fs/s3a/S3AFileSystemContractBaseTest.java   |  327 ++
 .../src/test/resources/contract/s3a.xml |  105 ++
 .../src/test/resources/contract/s3n.xml |7 +-
 hadoop-tools/hadoop-azure/pom.xml   |   10 +-
 hadoop-yarn-project/CHANGES.txt |8 +
 .../hadoop-yarn/conf/yarn-env.sh|   52 +-
 .../hadoop/yarn/conf/YarnConfiguration.java |4 +
 .../ApplicationHistoryClientService.java|   15 +-
 .../authorize/TimelinePolicyProvider.java   |   44 +
 .../hadoop-yarn-server-nodemanager/pom.xml  |   14 +-
 39 files changed, 2791 insertions(+), 69 deletions(-)
--




[02/10] HADOOP-10400. Incorporate new S3A FileSystem implementation. Contributed by Jordan Mendelson and Dave Wang.

2014-09-15 Thread arp
http://git-wip-us.apache.org/repos/asf/hadoop/blob/24d920b8/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/contract/s3a/TestS3AContractSeek.java
--
diff --git 
a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/contract/s3a/TestS3AContractSeek.java
 
b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/contract/s3a/TestS3AContractSeek.java
new file mode 100644
index 000..d677ec4
--- /dev/null
+++ 
b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/contract/s3a/TestS3AContractSeek.java
@@ -0,0 +1,31 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ *  or more contributor license agreements.  See the NOTICE file
+ *  distributed with this work for additional information
+ *  regarding copyright ownership.  The ASF licenses this file
+ *  to you under the Apache License, Version 2.0 (the
+ *  License); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an AS IS BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ */
+
+package org.apache.hadoop.fs.contract.s3a;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.contract.AbstractContractSeekTest;
+import org.apache.hadoop.fs.contract.AbstractFSContract;
+
+public class TestS3AContractSeek extends AbstractContractSeekTest {
+
+  @Override
+  protected AbstractFSContract createContract(Configuration conf) {
+return new S3AContract(conf);
+  }
+}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/24d920b8/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/S3AFileSystemContractBaseTest.java
--
diff --git 
a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/S3AFileSystemContractBaseTest.java
 
b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/S3AFileSystemContractBaseTest.java
new file mode 100644
index 000..8455233
--- /dev/null
+++ 
b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/S3AFileSystemContractBaseTest.java
@@ -0,0 +1,327 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * License); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an AS IS BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.fs.s3a;
+
+import static org.junit.Assume.*;
+
+import org.junit.Test;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FSDataInputStream;
+import org.apache.hadoop.fs.FileSystemContractBaseTest;
+import org.apache.hadoop.fs.Path;
+
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.OutputStream;
+import java.net.URI;
+import java.util.UUID;
+
+/**
+ *  Tests a live S3 system. If you keys and bucket aren't specified, all tests 
+ *  are marked as passed 
+ *  
+ *  This uses BlockJUnit4ClassRunner because FileSystemContractBaseTest from 
+ *  TestCase which uses the old Junit3 runner that doesn't ignore assumptions 
+ *  properly making it impossible to skip the tests if we don't have a valid
+ *  bucket.
+ **/
+public class S3AFileSystemContractBaseTest extends FileSystemContractBaseTest {
+  private static final int TEST_BUFFER_SIZE = 128;
+  private static final int MODULUS = 128;
+
+  protected static final Logger LOG = 
LoggerFactory.getLogger(S3AFileSystemContractBaseTest.class);
+
+  @Override
+  public void setUp() throws Exception {
+Configuration conf = new Configuration();
+
+URI testURI = URI.create(conf.get(test.fs.s3a.name));
+
+boolean liveTest = testURI != null  !testURI.equals(s3a:///);
+
+// This doesn't work with our JUnit 3 style test cases, so instead we'll 
+// make this whole class not run by default
+assumeTrue(liveTest);
+
+fs = new S3AFileSystem();
+fs.initialize(testURI, conf);
+super.setUp();
+  }
+
+  @Override
+  protected void tearDown() throws 

[01/10] git commit: HDFS-7061. Add test to verify encryption zone creation after NameNode restart without saving namespace. Contributed by Stephen Chu.

2014-09-15 Thread arp
Repository: hadoop
Updated Branches:
  refs/heads/HDFS-6581 a4dcbaa33 - a7814e1aa


HDFS-7061. Add test to verify encryption zone creation after NameNode restart 
without saving namespace. Contributed by Stephen Chu.


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/fc741b5d
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/fc741b5d
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/fc741b5d

Branch: refs/heads/HDFS-6581
Commit: fc741b5d78e7e006355e17b1b5839f502e37261b
Parents: 14e2639
Author: Andrew Wang w...@apache.org
Authored: Sun Sep 14 23:48:24 2014 -0700
Committer: Andrew Wang w...@apache.org
Committed: Sun Sep 14 23:49:01 2014 -0700

--
 hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt   |  3 +++
 .../java/org/apache/hadoop/hdfs/TestEncryptionZones.java  | 10 ++
 2 files changed, 13 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/fc741b5d/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
--
diff --git a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt 
b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
index bddf303..57a4a0f 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
+++ b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
@@ -455,6 +455,9 @@ Release 2.6.0 - UNRELEASED
 HDFS-6482. Use block ID-based block layout on datanodes (James Thomas via
 Colin Patrick McCabe)
 
+HDFS-7061. Add test to verify encryption zone creation after NameNode
+restart without saving namespace. (Stephen Chu via wang)
+
   OPTIMIZATIONS
 
 HDFS-6690. Deduplicate xattr names in memory. (wang)

http://git-wip-us.apache.org/repos/asf/hadoop/blob/fc741b5d/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestEncryptionZones.java
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestEncryptionZones.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestEncryptionZones.java
index db3c085..b3bf5d9 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestEncryptionZones.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestEncryptionZones.java
@@ -338,6 +338,16 @@ public class TestEncryptionZones {
 cluster.restartNameNode(true);
 assertNumZones(numZones);
 assertZonePresent(null, zone1.toString());
+
+// Verify newly added ez is present after restarting the NameNode
+// without persisting the namespace.
+Path nonpersistZone = new Path(/nonpersistZone);
+fsWrapper.mkdir(nonpersistZone, FsPermission.getDirDefault(), false);
+dfsAdmin.createEncryptionZone(nonpersistZone, TEST_KEY);
+numZones++;
+cluster.restartNameNode(true);
+assertNumZones(numZones);
+assertZonePresent(null, nonpersistZone.toString());
   }
 
   /**



[06/10] git commit: YARN-2529. Generic history service RPC interface doesn't work when service authorization is enabled. Contributed by Zhijie Shen

2014-09-15 Thread arp
YARN-2529. Generic history service RPC interface doesn't work when service 
authorization is enabled. Contributed by Zhijie Shen


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/88e329fd
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/88e329fd
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/88e329fd

Branch: refs/heads/HDFS-6581
Commit: 88e329fdfb27c11f1964c0345188bcf328c95fd6
Parents: 9d4ec97
Author: Jian He jia...@apache.org
Authored: Mon Sep 15 13:13:54 2014 -0700
Committer: Jian He jia...@apache.org
Committed: Mon Sep 15 13:13:54 2014 -0700

--
 .../src/main/conf/hadoop-policy.xml |  9 
 hadoop-yarn-project/CHANGES.txt |  3 ++
 .../hadoop/yarn/conf/YarnConfiguration.java |  4 ++
 .../ApplicationHistoryClientService.java| 15 ++-
 .../authorize/TimelinePolicyProvider.java   | 44 
 5 files changed, 74 insertions(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/88e329fd/hadoop-common-project/hadoop-common/src/main/conf/hadoop-policy.xml
--
diff --git 
a/hadoop-common-project/hadoop-common/src/main/conf/hadoop-policy.xml 
b/hadoop-common-project/hadoop-common/src/main/conf/hadoop-policy.xml
index 39e4f4d..2bf5c02 100644
--- a/hadoop-common-project/hadoop-common/src/main/conf/hadoop-policy.xml
+++ b/hadoop-common-project/hadoop-common/src/main/conf/hadoop-policy.xml
@@ -214,4 +214,13 @@
 A special value of * means all users are allowed./description
   /property
 
+  property
+namesecurity.applicationhistory.protocol.acl/name
+value*/value
+descriptionACL for ApplicationHistoryProtocol, used by the timeline
+server and the generic history service client to communicate with each 
other.
+The ACL is a comma-separated list of user and group names. The user and
+group list is separated by a blank. For e.g. alice,bob users,wheel.
+A special value of * means all users are allowed./description
+  /property
 /configuration

http://git-wip-us.apache.org/repos/asf/hadoop/blob/88e329fd/hadoop-yarn-project/CHANGES.txt
--
diff --git a/hadoop-yarn-project/CHANGES.txt b/hadoop-yarn-project/CHANGES.txt
index 5cfc460..629d3b3 100644
--- a/hadoop-yarn-project/CHANGES.txt
+++ b/hadoop-yarn-project/CHANGES.txt
@@ -362,6 +362,9 @@ Release 2.6.0 - UNRELEASED
 YARN-2549. TestContainerLaunch fails due to classpath problem with hamcrest
 classes. (cnauroth)
 
+YARN-2529. Generic history service RPC interface doesn't work when service
+authorization is enabled. (Zhijie Shen via jianhe)
+
 Release 2.5.1 - 2014-09-05
 
   INCOMPATIBLE CHANGES

http://git-wip-us.apache.org/repos/asf/hadoop/blob/88e329fd/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/conf/YarnConfiguration.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/conf/YarnConfiguration.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/conf/YarnConfiguration.java
index 43f510d..a92b358 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/conf/YarnConfiguration.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/conf/YarnConfiguration.java
@@ -991,6 +991,10 @@ public class YarnConfiguration extends Configuration {
   YARN_SECURITY_SERVICE_AUTHORIZATION_RESOURCE_LOCALIZER =
   security.resourcelocalizer.protocol.acl;
 
+  public static final String
+  YARN_SECURITY_SERVICE_AUTHORIZATION_APPLICATIONHISTORY_PROTOCOL =
+  security.applicationhistory.protocol.acl;
+
   /** No. of milliseconds to wait between sending a SIGTERM and SIGKILL
* to a running container */
   public static final String NM_SLEEP_DELAY_BEFORE_SIGKILL_MS =

http://git-wip-us.apache.org/repos/asf/hadoop/blob/88e329fd/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/ApplicationHistoryClientService.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/ApplicationHistoryClientService.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/ApplicationHistoryClientService.java
index 

[07/10] git commit: YARN-2438. yarn-env.sh cleanup (aw)

2014-09-15 Thread arp
YARN-2438. yarn-env.sh cleanup (aw)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/0c8aec43
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/0c8aec43
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/0c8aec43

Branch: refs/heads/HDFS-6581
Commit: 0c8aec43f0acd4f896df8cd605d97808f9a09887
Parents: 88e329f
Author: Allen Wittenauer a...@apache.org
Authored: Mon Sep 15 14:09:45 2014 -0700
Committer: Allen Wittenauer a...@apache.org
Committed: Mon Sep 15 14:09:50 2014 -0700

--
 hadoop-yarn-project/CHANGES.txt |  2 +
 .../hadoop-yarn/conf/yarn-env.sh| 52 +---
 2 files changed, 37 insertions(+), 17 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/0c8aec43/hadoop-yarn-project/CHANGES.txt
--
diff --git a/hadoop-yarn-project/CHANGES.txt b/hadoop-yarn-project/CHANGES.txt
index 629d3b3..b41ad82 100644
--- a/hadoop-yarn-project/CHANGES.txt
+++ b/hadoop-yarn-project/CHANGES.txt
@@ -8,6 +8,8 @@ Trunk - Unreleased
 
   IMPROVEMENTS
 
+YARN-2438. yarn-env.sh cleanup (aw)
+
   OPTIMIZATIONS
 
   BUG FIXES

http://git-wip-us.apache.org/repos/asf/hadoop/blob/0c8aec43/hadoop-yarn-project/hadoop-yarn/conf/yarn-env.sh
--
diff --git a/hadoop-yarn-project/hadoop-yarn/conf/yarn-env.sh 
b/hadoop-yarn-project/hadoop-yarn/conf/yarn-env.sh
index 755cfd8..0f5e9b7 100644
--- a/hadoop-yarn-project/hadoop-yarn/conf/yarn-env.sh
+++ b/hadoop-yarn-project/hadoop-yarn/conf/yarn-env.sh
@@ -1,3 +1,4 @@
+#
 # Licensed to the Apache Software Foundation (ASF) under one or more
 # contributor license agreements.  See the NOTICE file distributed with
 # this work for additional information regarding copyright ownership.
@@ -25,27 +26,21 @@
 ##
 
 ###
-# Generic settings for YARN
+# YARN-specific overrides for generic settings
 ###
 
-# User for YARN daemons
-export HADOOP_YARN_USER=${HADOOP_YARN_USER:-yarn}
-
-#
-# By default, YARN will use HADOOP_CONF_DIR. Specify a custom
-# YARN_CONF_DIR here
-# export YARN_CONF_DIR=${YARN_CONF_DIR:-$HADOOP_YARN_HOME/conf}
-#
-
-# Override Hadoop's log directory  file
-# export YARN_LOG_DIR=$HADOOP_YARN_HOME/logs
-# export YARN_LOGFILE='yarn.log'
+# By default, YARN will use HADOOP_LOG_DIR for YARN logging.  Specify a custom
+# log directory for YARN things here:
+# export YARN_LOG_DIR=${HADOOP_LOG_DIR}
 
-# Need a custom-to-YARN service-level authorization policy file?
-# export YARN_POLICYFILE=yarn-policy.xml
+# By default, YARN will use the value of HADOOP_LOGFILE as the 'fallback' log
+# file # when log4j settings are not defined.  Specify a custom YARN log file
+# here:
+# export YARN_LOGFILE=${HADOOP_LOGFILE}
 
-#Override the log4j settings for all YARN apps
-# export YARN_ROOT_LOGGER=INFO,console
+#Override the log4j settings for all YARN apps By default, YARN will use
+# HADOOP_ROOT_LOGGER.
+# export YARN_ROOT_LOGGER=${HADOOP_ROOT_LOGGER}
 
 ###
 # Resource Manager specific parameters
@@ -125,3 +120,26 @@ export HADOOP_YARN_USER=${HADOOP_YARN_USER:-yarn}
 #
 #export YARN_TIMELINESERVER_OPTS=
 
+###
+# Web App Proxy Server specifc parameters
+###
+
+# Specify the max Heapsize for the proxy server using a numerical value
+# in the scale of MB. For example, to specify an jvm option of -Xmx1000m, set
+# the value to 1000.
+# This value will be overridden by an Xmx setting specified in either 
YARN_OPTS,
+# HADOOP_OPTS, and/or YARN_PROXYSERVER_OPTS.
+# If not specified, the default value will be picked from either YARN_HEAPMAX
+# or JAVA_HEAP_MAX with YARN_HEAPMAX as the preferred option of the two.
+#
+#export YARN_PROXYSERVER_HEAPSIZE=1000
+
+# Specify the JVM options to be used when starting the proxy server.
+# These options will be appended to the options specified as YARN_OPTS
+# and therefore may override any similar flags set in YARN_OPTS
+#
+# See ResourceManager for some examples
+#
+#export YARN_PROXYSERVER_OPTS=
+
+



[08/10] git commit: HDFS-7059. HAadmin transtionToActive with forceActive option can show confusing message. Contributed by Rushabh Shah.

2014-09-15 Thread arp
HDFS-7059. HAadmin transtionToActive with forceActive option can show
confusing message. Contributed by Rushabh Shah.


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/2f847b33
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/2f847b33
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/2f847b33

Branch: refs/heads/HDFS-6581
Commit: 2f847b3357448184aaec935052f34d116ad3a42c
Parents: 0c8aec4
Author: Kihwal Lee kih...@apache.org
Authored: Mon Sep 15 16:27:22 2014 -0500
Committer: Kihwal Lee kih...@apache.org
Committed: Mon Sep 15 16:27:22 2014 -0500

--
 .../main/java/org/apache/hadoop/ha/HAAdmin.java |  6 --
 hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt |  3 +++
 .../hdfs/tools/TestDFSHAAdminMiniCluster.java   | 20 
 3 files changed, 7 insertions(+), 22 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/2f847b33/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/HAAdmin.java
--
diff --git 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/HAAdmin.java
 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/HAAdmin.java
index dfa03e8..bd6366c 100644
--- 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/HAAdmin.java
+++ 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/HAAdmin.java
@@ -143,8 +143,10 @@ public abstract class HAAdmin extends Configured 
implements Tool {
 }
 /*  returns true if other target node is active or some exception occurred 
 and forceActive was not set  */
-if(isOtherTargetNodeActive(argv[0], cmd.hasOption(FORCEACTIVE))) {
-  return -1;
+if(!cmd.hasOption(FORCEACTIVE)) {
+  if(isOtherTargetNodeActive(argv[0], cmd.hasOption(FORCEACTIVE))) {
+return -1;
+  }
 }
 HAServiceTarget target = resolveTarget(argv[0]);
 if (!checkManualStateManagementOK(target)) {

http://git-wip-us.apache.org/repos/asf/hadoop/blob/2f847b33/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
--
diff --git a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt 
b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
index 209c7c1..f596265 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
+++ b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
@@ -458,6 +458,9 @@ Release 2.6.0 - UNRELEASED
 HDFS-7061. Add test to verify encryption zone creation after NameNode
 restart without saving namespace. (Stephen Chu via wang)
 
+HDFS-7059. HAadmin transtionToActive with forceActive option can show
+confusing message.
+
   OPTIMIZATIONS
 
 HDFS-6690. Deduplicate xattr names in memory. (wang)

http://git-wip-us.apache.org/repos/asf/hadoop/blob/2f847b33/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/tools/TestDFSHAAdminMiniCluster.java
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/tools/TestDFSHAAdminMiniCluster.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/tools/TestDFSHAAdminMiniCluster.java
index 288bcd0..ee1c184 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/tools/TestDFSHAAdminMiniCluster.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/tools/TestDFSHAAdminMiniCluster.java
@@ -232,26 +232,6 @@ public class TestDFSHAAdminMiniCluster {
 assertFalse(Both namenodes cannot be active, nn1.isActiveState() 
  nn2.isActiveState());

-/*  This test case doesn't allow nn2 to transition to Active even with
-forceActive switch since nn1 is already active  */
-if(nn1.getState() != null  !nn1.getState().
-equals(HAServiceState.STANDBY.name()) ) {
-  cluster.transitionToStandby(0);
-}
-if(nn2.getState() != null  !nn2.getState().
-equals(HAServiceState.STANDBY.name()) ) {
-  cluster.transitionToStandby(1);
-}
-//Making sure both the namenode are in standby state
-assertTrue(nn1.isStandbyState());
-assertTrue(nn2.isStandbyState());
-
-runTool(-transitionToActive, nn1);
-runTool(-transitionToActive, nn2,--forceactive);
-
-assertFalse(Both namenodes cannot be active even though with forceActive,
-nn1.isActiveState()  nn2.isActiveState());
-
 /*  In this test case, we have deliberately shut down nn1 and this will
 cause HAAAdmin#isOtherTargetNodeActive to throw an Exception 
 and transitionToActive for nn2 with  forceActive switch will succeed 



[03/10] git commit: HADOOP-10400. Incorporate new S3A FileSystem implementation. Contributed by Jordan Mendelson and Dave Wang.

2014-09-15 Thread arp
HADOOP-10400. Incorporate new S3A FileSystem implementation. Contributed by 
Jordan Mendelson and Dave Wang.


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/24d920b8
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/24d920b8
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/24d920b8

Branch: refs/heads/HDFS-6581
Commit: 24d920b80eb3626073925a1d0b6dcf148add8cc0
Parents: fc741b5
Author: Aaron T. Myers a...@apache.org
Authored: Mon Sep 15 08:27:07 2014 -0700
Committer: Aaron T. Myers a...@apache.org
Committed: Mon Sep 15 08:27:07 2014 -0700

--
 hadoop-common-project/hadoop-common/CHANGES.txt |3 +
 .../src/main/conf/log4j.properties  |5 +
 .../src/main/resources/core-default.xml |   86 ++
 hadoop-hdfs-project/hadoop-hdfs-httpfs/pom.xml  |8 +
 hadoop-project/pom.xml  |   26 +-
 hadoop-tools/hadoop-aws/pom.xml |   10 +
 .../fs/s3a/AnonymousAWSCredentialsProvider.java |   37 +
 .../fs/s3a/BasicAWSCredentialsProvider.java |   51 +
 .../org/apache/hadoop/fs/s3a/Constants.java |   90 ++
 .../org/apache/hadoop/fs/s3a/S3AFileStatus.java |   62 ++
 .../org/apache/hadoop/fs/s3a/S3AFileSystem.java | 1019 ++
 .../apache/hadoop/fs/s3a/S3AInputStream.java|  207 
 .../apache/hadoop/fs/s3a/S3AOutputStream.java   |  208 
 .../services/org.apache.hadoop.fs.FileSystem|1 +
 .../hadoop/fs/contract/s3a/S3AContract.java |   43 +
 .../fs/contract/s3a/TestS3AContractCreate.java  |   38 +
 .../fs/contract/s3a/TestS3AContractDelete.java  |   31 +
 .../fs/contract/s3a/TestS3AContractMkdir.java   |   34 +
 .../fs/contract/s3a/TestS3AContractOpen.java|   31 +
 .../fs/contract/s3a/TestS3AContractRename.java  |   64 ++
 .../fs/contract/s3a/TestS3AContractRootDir.java |   35 +
 .../fs/contract/s3a/TestS3AContractSeek.java|   31 +
 .../fs/s3a/S3AFileSystemContractBaseTest.java   |  327 ++
 .../src/test/resources/contract/s3a.xml |  105 ++
 .../src/test/resources/contract/s3n.xml |7 +-
 hadoop-tools/hadoop-azure/pom.xml   |   10 +-
 26 files changed, 2552 insertions(+), 17 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/24d920b8/hadoop-common-project/hadoop-common/CHANGES.txt
--
diff --git a/hadoop-common-project/hadoop-common/CHANGES.txt 
b/hadoop-common-project/hadoop-common/CHANGES.txt
index 051eac1..c2ae5ed 100644
--- a/hadoop-common-project/hadoop-common/CHANGES.txt
+++ b/hadoop-common-project/hadoop-common/CHANGES.txt
@@ -342,6 +342,9 @@ Release 2.6.0 - UNRELEASED
 HADOOP-10893. isolated classloader on the client side (Sangjin Lee via
 jlowe)
 
+HADOOP-10400. Incorporate new S3A FileSystem implementation. (Jordan
+Mendelson and Dave Wang via atm)
+
   IMPROVEMENTS
 
 HADOOP-10808. Remove unused native code for munlock. (cnauroth)

http://git-wip-us.apache.org/repos/asf/hadoop/blob/24d920b8/hadoop-common-project/hadoop-common/src/main/conf/log4j.properties
--
diff --git a/hadoop-common-project/hadoop-common/src/main/conf/log4j.properties 
b/hadoop-common-project/hadoop-common/src/main/conf/log4j.properties
index ef9acbf..5fa21fa 100644
--- a/hadoop-common-project/hadoop-common/src/main/conf/log4j.properties
+++ b/hadoop-common-project/hadoop-common/src/main/conf/log4j.properties
@@ -174,6 +174,11 @@ 
log4j.appender.MRAUDIT.MaxBackupIndex=${mapred.audit.log.maxbackupindex}
 # Jets3t library
 log4j.logger.org.jets3t.service.impl.rest.httpclient.RestS3Service=ERROR
 
+# AWS SDK  S3A FileSystem
+log4j.logger.com.amazonaws=ERROR
+log4j.logger.com.amazonaws.http.AmazonHttpClient=ERROR
+log4j.logger.org.apache.hadoop.fs.s3a.S3AFileSystem=WARN
+
 #
 # Event Counter Appender
 # Sends counts of logging messages at different severity levels to Hadoop 
Metrics.

http://git-wip-us.apache.org/repos/asf/hadoop/blob/24d920b8/hadoop-common-project/hadoop-common/src/main/resources/core-default.xml
--
diff --git 
a/hadoop-common-project/hadoop-common/src/main/resources/core-default.xml 
b/hadoop-common-project/hadoop-common/src/main/resources/core-default.xml
index 3cc7545..828dec2 100644
--- a/hadoop-common-project/hadoop-common/src/main/resources/core-default.xml
+++ b/hadoop-common-project/hadoop-common/src/main/resources/core-default.xml
@@ -690,6 +690,92 @@ for ldap providers in the same way as above does.
 /property
 
 property
+  namefs.s3a.access.key/name
+  descriptionAWS access key ID. Omit for Role-based 
authentication./description
+/property
+
+property
+  namefs.s3a.secret.key/name
+  descriptionAWS secret key. Omit for 

[04/10] git commit: HDFS-7032. Add WebHDFS support for reading and writing to encryption zones. Contributed by Charles Lamb.

2014-09-15 Thread arp
HDFS-7032. Add WebHDFS support for reading and writing to encryption zones. 
Contributed by Charles Lamb.


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/43b03030
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/43b03030
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/43b03030

Branch: refs/heads/HDFS-6581
Commit: 43b03030084839db041d0337013806aaeef12aaa
Parents: 24d920b
Author: Andrew Wang w...@apache.org
Authored: Mon Sep 15 10:23:57 2014 -0700
Committer: Andrew Wang w...@apache.org
Committed: Mon Sep 15 10:23:57 2014 -0700

--
 hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt |  3 ++
 .../web/resources/DatanodeWebHdfsMethods.java   | 13 +++--
 .../apache/hadoop/hdfs/TestEncryptionZones.java | 52 
 3 files changed, 63 insertions(+), 5 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/43b03030/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
--
diff --git a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt 
b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
index 57a4a0f..209c7c1 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
+++ b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
@@ -647,6 +647,9 @@ Release 2.6.0 - UNRELEASED
 HDFS-7045. Fix NameNode deadlock when opening file under /.reserved path.
 (Yi Liu via wang)
 
+HDFS-7032. Add WebHDFS support for reading and writing to encryption zones.
+(clamb via wang)
+
 BREAKDOWN OF HDFS-6134 AND HADOOP-10150 SUBTASKS AND RELATED JIRAS
   
   HDFS-6387. HDFS CLI admin tool for creating  deleting an

http://git-wip-us.apache.org/repos/asf/hadoop/blob/43b03030/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/web/resources/DatanodeWebHdfsMethods.java
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/web/resources/DatanodeWebHdfsMethods.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/web/resources/DatanodeWebHdfsMethods.java
index 51731c8..0f0f3be 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/web/resources/DatanodeWebHdfsMethods.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/web/resources/DatanodeWebHdfsMethods.java
@@ -231,11 +231,13 @@ public class DatanodeWebHdfsMethods {
   DFSClient dfsclient = newDfsClient(nnId, conf);
   FSDataOutputStream out = null;
   try {
-out = new FSDataOutputStream(dfsclient.create(
+out = dfsclient.createWrappedOutputStream(dfsclient.create(
 fullpath, permission.getFsPermission(), 
-overwrite.getValue() ? EnumSet.of(CreateFlag.CREATE, 
CreateFlag.OVERWRITE)
-: EnumSet.of(CreateFlag.CREATE),
-replication.getValue(conf), blockSize.getValue(conf), null, b, 
null), null);
+overwrite.getValue() ?
+EnumSet.of(CreateFlag.CREATE, CreateFlag.OVERWRITE) :
+EnumSet.of(CreateFlag.CREATE),
+replication.getValue(conf), blockSize.getValue(conf), null,
+b, null), null);
 IOUtils.copyBytes(in, out, b);
 out.close();
 out = null;
@@ -418,7 +420,8 @@ public class DatanodeWebHdfsMethods {
   final DFSClient dfsclient = newDfsClient(nnId, conf);
   HdfsDataInputStream in = null;
   try {
-in = new HdfsDataInputStream(dfsclient.open(fullpath, b, true));
+in = dfsclient.createWrappedInputStream(
+dfsclient.open(fullpath, b, true));
 in.seek(offset.getValue());
   } catch(IOException ioe) {
 IOUtils.cleanup(LOG, in);

http://git-wip-us.apache.org/repos/asf/hadoop/blob/43b03030/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestEncryptionZones.java
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestEncryptionZones.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestEncryptionZones.java
index b3bf5d9..68fc850 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestEncryptionZones.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestEncryptionZones.java
@@ -42,6 +42,7 @@ import org.apache.hadoop.crypto.key.JavaKeyStoreProvider;
 import org.apache.hadoop.crypto.key.KeyProvider;
 import org.apache.hadoop.crypto.key.KeyProviderCryptoExtension;
 import org.apache.hadoop.crypto.key.KeyProviderFactory;
+import org.apache.hadoop.fs.FSDataOutputStream;
 import 

git commit: HADOOP-11091. Eliminate old configuration parameter names from s3a (dsw via cmccabe) (cherry picked from commit 0ac760a58d96b36ab30e9d60679bbea6365ef120)

2014-09-15 Thread cmccabe
Repository: hadoop
Updated Branches:
  refs/heads/branch-2 b4d9aca06 - f80b10e1e


HADOOP-11091. Eliminate old configuration parameter names from s3a (dsw via 
cmccabe)
(cherry picked from commit 0ac760a58d96b36ab30e9d60679bbea6365ef120)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/f80b10e1
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/f80b10e1
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/f80b10e1

Branch: refs/heads/branch-2
Commit: f80b10e1e7d47550996ba3c8b0f23f4a97a8c16a
Parents: b4d9aca
Author: Colin Patrick Mccabe cmcc...@cloudera.com
Authored: Mon Sep 15 16:49:15 2014 -0700
Committer: Colin Patrick Mccabe cmcc...@cloudera.com
Committed: Mon Sep 15 16:50:10 2014 -0700

--
 hadoop-common-project/hadoop-common/CHANGES.txt |  3 ++
 .../org/apache/hadoop/fs/s3a/Constants.java | 36 +--
 .../org/apache/hadoop/fs/s3a/S3AFileSystem.java | 46 +---
 .../apache/hadoop/fs/s3a/S3AOutputStream.java   |  6 +--
 4 files changed, 38 insertions(+), 53 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/f80b10e1/hadoop-common-project/hadoop-common/CHANGES.txt
--
diff --git a/hadoop-common-project/hadoop-common/CHANGES.txt 
b/hadoop-common-project/hadoop-common/CHANGES.txt
index d2deee6..fddd86d 100644
--- a/hadoop-common-project/hadoop-common/CHANGES.txt
+++ b/hadoop-common-project/hadoop-common/CHANGES.txt
@@ -186,6 +186,9 @@ Release 2.6.0 - UNRELEASED
 HADOOP-11074. Move s3-related FS connector code to hadoop-aws (David S.
 Wang via Colin Patrick McCabe)
 
+HADOOP-11091. Eliminate old configuration parameter names from s3a (David
+S. Wang via Colin Patrick McCabe)
+
   OPTIMIZATIONS
 
 HADOOP-10838. Byte array native checksumming. (James Thomas via todd)

http://git-wip-us.apache.org/repos/asf/hadoop/blob/f80b10e1/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/Constants.java
--
diff --git 
a/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/Constants.java 
b/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/Constants.java
index 9723b82..26b7ddd 100644
--- 
a/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/Constants.java
+++ 
b/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/Constants.java
@@ -21,46 +21,37 @@ package org.apache.hadoop.fs.s3a;
 
 public class Constants {
   // s3 access key
-  public static final String OLD_ACCESS_KEY = fs.s3a.awsAccessKeyId;
-  public static final String NEW_ACCESS_KEY = fs.s3a.access.key;
+  public static final String ACCESS_KEY = fs.s3a.access.key;
 
   // s3 secret key
-  public static final String OLD_SECRET_KEY = fs.s3a.awsSecretAccessKey;
-  public static final String NEW_SECRET_KEY = fs.s3a.secret.key;
+  public static final String SECRET_KEY = fs.s3a.secret.key;
   
   // number of simultaneous connections to s3
-  public static final String OLD_MAXIMUM_CONNECTIONS = fs.s3a.maxConnections;
-  public static final String NEW_MAXIMUM_CONNECTIONS = 
fs.s3a.connection.maximum;
+  public static final String MAXIMUM_CONNECTIONS = fs.s3a.connection.maximum;
   public static final int DEFAULT_MAXIMUM_CONNECTIONS = 15;
   
   // connect to s3 over ssl?
-  public static final String OLD_SECURE_CONNECTIONS = 
fs.s3a.secureConnections;
-  public static final String NEW_SECURE_CONNECTIONS = 
fs.s3a.connection.ssl.enabled;
+  public static final String SECURE_CONNECTIONS = 
fs.s3a.connection.ssl.enabled;
   public static final boolean DEFAULT_SECURE_CONNECTIONS = true;
   
   // number of times we should retry errors
-  public static final String OLD_MAX_ERROR_RETRIES = fs.s3a.maxErrorRetries;
-  public static final String NEW_MAX_ERROR_RETRIES = fs.s3a.attempts.maximum;
+  public static final String MAX_ERROR_RETRIES = fs.s3a.attempts.maximum;
   public static final int DEFAULT_MAX_ERROR_RETRIES = 10;
   
   // seconds until we give up on a connection to s3
-  public static final String OLD_SOCKET_TIMEOUT = fs.s3a.socketTimeout;
-  public static final String NEW_SOCKET_TIMEOUT = fs.s3a.connection.timeout;
+  public static final String SOCKET_TIMEOUT = fs.s3a.connection.timeout;
   public static final int DEFAULT_SOCKET_TIMEOUT = 5;
 
   // number of records to get while paging through a directory listing
-  public static final String OLD_MAX_PAGING_KEYS = fs.s3a.maxPagingKeys;
-  public static final String NEW_MAX_PAGING_KEYS = fs.s3a.paging.maximum;
+  public static final String MAX_PAGING_KEYS = fs.s3a.paging.maximum;
   public static final int DEFAULT_MAX_PAGING_KEYS = 5000;
 
   // size of each of or multipart pieces in bytes
-  public static final String OLD_MULTIPART_SIZE = 

git commit: HADOOP-11091. Eliminate old configuration parameter names from s3a (dsw via cmccabe)

2014-09-15 Thread cmccabe
Repository: hadoop
Updated Branches:
  refs/heads/trunk 8008f0e81 - 0ac760a58


HADOOP-11091. Eliminate old configuration parameter names from s3a (dsw via 
cmccabe)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/0ac760a5
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/0ac760a5
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/0ac760a5

Branch: refs/heads/trunk
Commit: 0ac760a58d96b36ab30e9d60679bbea6365ef120
Parents: 8008f0e
Author: Colin Patrick Mccabe cmcc...@cloudera.com
Authored: Mon Sep 15 16:49:15 2014 -0700
Committer: Colin Patrick Mccabe cmcc...@cloudera.com
Committed: Mon Sep 15 16:49:15 2014 -0700

--
 hadoop-common-project/hadoop-common/CHANGES.txt |  3 ++
 .../org/apache/hadoop/fs/s3a/Constants.java | 36 +--
 .../org/apache/hadoop/fs/s3a/S3AFileSystem.java | 46 +---
 .../apache/hadoop/fs/s3a/S3AOutputStream.java   |  6 +--
 4 files changed, 38 insertions(+), 53 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/0ac760a5/hadoop-common-project/hadoop-common/CHANGES.txt
--
diff --git a/hadoop-common-project/hadoop-common/CHANGES.txt 
b/hadoop-common-project/hadoop-common/CHANGES.txt
index 6f80eb2..89bce4d 100644
--- a/hadoop-common-project/hadoop-common/CHANGES.txt
+++ b/hadoop-common-project/hadoop-common/CHANGES.txt
@@ -517,6 +517,9 @@ Release 2.6.0 - UNRELEASED
 HADOOP-11074. Move s3-related FS connector code to hadoop-aws (David S.
 Wang via Colin Patrick McCabe)
 
+HADOOP-11091. Eliminate old configuration parameter names from s3a (David
+S. Wang via Colin Patrick McCabe)
+
   OPTIMIZATIONS
 
 HADOOP-10838. Byte array native checksumming. (James Thomas via todd)

http://git-wip-us.apache.org/repos/asf/hadoop/blob/0ac760a5/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/Constants.java
--
diff --git 
a/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/Constants.java 
b/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/Constants.java
index 9723b82..26b7ddd 100644
--- 
a/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/Constants.java
+++ 
b/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/Constants.java
@@ -21,46 +21,37 @@ package org.apache.hadoop.fs.s3a;
 
 public class Constants {
   // s3 access key
-  public static final String OLD_ACCESS_KEY = fs.s3a.awsAccessKeyId;
-  public static final String NEW_ACCESS_KEY = fs.s3a.access.key;
+  public static final String ACCESS_KEY = fs.s3a.access.key;
 
   // s3 secret key
-  public static final String OLD_SECRET_KEY = fs.s3a.awsSecretAccessKey;
-  public static final String NEW_SECRET_KEY = fs.s3a.secret.key;
+  public static final String SECRET_KEY = fs.s3a.secret.key;
   
   // number of simultaneous connections to s3
-  public static final String OLD_MAXIMUM_CONNECTIONS = fs.s3a.maxConnections;
-  public static final String NEW_MAXIMUM_CONNECTIONS = 
fs.s3a.connection.maximum;
+  public static final String MAXIMUM_CONNECTIONS = fs.s3a.connection.maximum;
   public static final int DEFAULT_MAXIMUM_CONNECTIONS = 15;
   
   // connect to s3 over ssl?
-  public static final String OLD_SECURE_CONNECTIONS = 
fs.s3a.secureConnections;
-  public static final String NEW_SECURE_CONNECTIONS = 
fs.s3a.connection.ssl.enabled;
+  public static final String SECURE_CONNECTIONS = 
fs.s3a.connection.ssl.enabled;
   public static final boolean DEFAULT_SECURE_CONNECTIONS = true;
   
   // number of times we should retry errors
-  public static final String OLD_MAX_ERROR_RETRIES = fs.s3a.maxErrorRetries;
-  public static final String NEW_MAX_ERROR_RETRIES = fs.s3a.attempts.maximum;
+  public static final String MAX_ERROR_RETRIES = fs.s3a.attempts.maximum;
   public static final int DEFAULT_MAX_ERROR_RETRIES = 10;
   
   // seconds until we give up on a connection to s3
-  public static final String OLD_SOCKET_TIMEOUT = fs.s3a.socketTimeout;
-  public static final String NEW_SOCKET_TIMEOUT = fs.s3a.connection.timeout;
+  public static final String SOCKET_TIMEOUT = fs.s3a.connection.timeout;
   public static final int DEFAULT_SOCKET_TIMEOUT = 5;
 
   // number of records to get while paging through a directory listing
-  public static final String OLD_MAX_PAGING_KEYS = fs.s3a.maxPagingKeys;
-  public static final String NEW_MAX_PAGING_KEYS = fs.s3a.paging.maximum;
+  public static final String MAX_PAGING_KEYS = fs.s3a.paging.maximum;
   public static final int DEFAULT_MAX_PAGING_KEYS = 5000;
 
   // size of each of or multipart pieces in bytes
-  public static final String OLD_MULTIPART_SIZE = fs.s3a.multipartSize;
-  public static final String NEW_MULTIPART_SIZE = 

git commit: YARN-1710. Logic to find allocations within a Plan that satisfy user ReservationRequest(s). Contributed by Carlo Curino and Subru Krishnan.

2014-09-15 Thread curino
Repository: hadoop
Updated Branches:
  refs/heads/YARN-1051 ca9be6fda - 7800b53c4


YARN-1710. Logic to find allocations within a Plan that satisfy user 
ReservationRequest(s). Contributed by Carlo Curino and Subru Krishnan.


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/7800b53c
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/7800b53c
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/7800b53c

Branch: refs/heads/YARN-1051
Commit: 7800b53c4d1de918f2a2f970e838f151403504a3
Parents: ca9be6f
Author: carlo curino Carlo Curino
Authored: Mon Sep 15 16:56:28 2014 -0700
Committer: carlo curino Carlo Curino
Committed: Mon Sep 15 16:56:28 2014 -0700

--
 YARN-1051-CHANGES.txt   |  12 +-
 .../reservation/GreedyReservationAgent.java | 367 
 .../reservation/ReservationAgent.java   |  55 ++
 .../exceptions/ContractValidationException.java |  12 +
 .../reservation/TestGreedyReservationAgent.java | 588 +++
 5 files changed, 1031 insertions(+), 3 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/7800b53c/YARN-1051-CHANGES.txt
--
diff --git a/YARN-1051-CHANGES.txt b/YARN-1051-CHANGES.txt
index 410d974..deece7c 100644
--- a/YARN-1051-CHANGES.txt
+++ b/YARN-1051-CHANGES.txt
@@ -4,7 +4,13 @@ CapacityScheduler. (Carlo Curino and Subru Krishnan via curino)
 YARN-2475. Logic for responding to capacity drops for the 
 ReservationSystem. (Carlo Curino and Subru Krishnan via curino)
 
-YARN-1708. Public YARN APIs for creating/updating/deleting reservations. 
(subru)
+YARN-1708. Public YARN APIs for creating/updating/deleting 
+reservations. (Carlo Curino and Subru Krishnan via subru)
 
-YARN-1709. In-memory data structures used to track resources over time to
-enable reservations. (subru)
+YARN-1709. In-memory data structures used to track resources over
+time to enable reservations. (Carlo Curino and Subru Krishnan via 
+subru)
+
+YARN-1710. Logic to find allocations within a Plan that satisfy 
+user ReservationRequest(s). (Carlo Curino and Subru Krishnan via 
+curino) 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7800b53c/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/reservation/GreedyReservationAgent.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/reservation/GreedyReservationAgent.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/reservation/GreedyReservationAgent.java
new file mode 100644
index 000..3214f93
--- /dev/null
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/reservation/GreedyReservationAgent.java
@@ -0,0 +1,367 @@
+package org.apache.hadoop.yarn.server.resourcemanager.reservation;
+
+import java.util.HashMap;
+import java.util.List;
+import java.util.ListIterator;
+import java.util.Map;
+import java.util.Set;
+
+import org.apache.hadoop.yarn.api.records.ReservationDefinition;
+import org.apache.hadoop.yarn.api.records.ReservationId;
+import org.apache.hadoop.yarn.api.records.ReservationRequest;
+import org.apache.hadoop.yarn.api.records.ReservationRequestInterpreter;
+import org.apache.hadoop.yarn.api.records.Resource;
+import 
org.apache.hadoop.yarn.server.resourcemanager.reservation.exceptions.ContractValidationException;
+import 
org.apache.hadoop.yarn.server.resourcemanager.reservation.exceptions.PlanningException;
+import org.apache.hadoop.yarn.util.resource.Resources;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * This Agent employs a simple greedy placement strategy, placing the various
+ * stages of a {@link ReservationRequest} from the deadline moving backward
+ * towards the arrival. This allows jobs with earlier deadline to be scheduled
+ * greedily as well. Combined with an opportunistic anticipation of work if the
+ * cluster is not fully utilized also seems to provide good latency for
+ * best-effort jobs (i.e., jobs running without a reservation).
+ * 
+ * This agent does not account for locality and only consider container
+ * granularity for validation purposes (i.e., you can't exceed max-container
+ * size).
+ */
+public class GreedyReservationAgent implements ReservationAgent {
+
+  private static final Logger LOG = LoggerFactory
+  .getLogger(GreedyReservationAgent.class);
+
+  @Override
+  public 

[1/2] HADOOP-10868. AuthenticationFilter should support externalizing the secret for signing and provide rotation support. (rkanter via tucu)

2014-09-15 Thread tucu
Repository: hadoop
Updated Branches:
  refs/heads/branch-2 f80b10e1e - e59f6771e


http://git-wip-us.apache.org/repos/asf/hadoop/blob/e59f6771/hadoop-common-project/hadoop-auth/src/test/java/org/apache/hadoop/security/authentication/util/TestZKSignerSecretProvider.java
--
diff --git 
a/hadoop-common-project/hadoop-auth/src/test/java/org/apache/hadoop/security/authentication/util/TestZKSignerSecretProvider.java
 
b/hadoop-common-project/hadoop-auth/src/test/java/org/apache/hadoop/security/authentication/util/TestZKSignerSecretProvider.java
new file mode 100644
index 000..d7b6e17
--- /dev/null
+++ 
b/hadoop-common-project/hadoop-auth/src/test/java/org/apache/hadoop/security/authentication/util/TestZKSignerSecretProvider.java
@@ -0,0 +1,270 @@
+/**
+ * Licensed under the Apache License, Version 2.0 (the License);
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an AS IS BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License. See accompanying LICENSE file.
+ */
+package org.apache.hadoop.security.authentication.util;
+
+import java.util.Arrays;
+import java.util.Properties;
+import java.util.Random;
+import javax.servlet.ServletContext;
+import org.apache.curator.test.TestingServer;
+import org.junit.After;
+import org.junit.Assert;
+import org.junit.Before;
+import org.junit.Test;
+import org.mockito.Mockito;
+
+public class TestZKSignerSecretProvider {
+
+  private TestingServer zkServer;
+
+  @Before
+  public void setup() throws Exception {
+zkServer = new TestingServer();
+  }
+
+  @After
+  public void teardown() throws Exception {
+if (zkServer != null) {
+  zkServer.stop();
+  zkServer.close();
+}
+  }
+
+  @Test
+  // Test just one ZKSignerSecretProvider to verify that it works in the
+  // simplest case
+  public void testOne() throws Exception {
+long rolloverFrequency = 15 * 1000; // rollover every 15 sec
+// use the same seed so we can predict the RNG
+long seed = System.currentTimeMillis();
+Random rand = new Random(seed);
+byte[] secret2 = Long.toString(rand.nextLong()).getBytes();
+byte[] secret1 = Long.toString(rand.nextLong()).getBytes();
+byte[] secret3 = Long.toString(rand.nextLong()).getBytes();
+ZKSignerSecretProvider secretProvider = new ZKSignerSecretProvider(seed);
+Properties config = new Properties();
+config.setProperty(
+ZKSignerSecretProvider.ZOOKEEPER_CONNECTION_STRING,
+zkServer.getConnectString());
+config.setProperty(ZKSignerSecretProvider.ZOOKEEPER_PATH,
+/secret);
+try {
+  secretProvider.init(config, getDummyServletContext(), rolloverFrequency);
+
+  byte[] currentSecret = secretProvider.getCurrentSecret();
+  byte[][] allSecrets = secretProvider.getAllSecrets();
+  Assert.assertArrayEquals(secret1, currentSecret);
+  Assert.assertEquals(2, allSecrets.length);
+  Assert.assertArrayEquals(secret1, allSecrets[0]);
+  Assert.assertNull(allSecrets[1]);
+  Thread.sleep((rolloverFrequency + 2000));
+
+  currentSecret = secretProvider.getCurrentSecret();
+  allSecrets = secretProvider.getAllSecrets();
+  Assert.assertArrayEquals(secret2, currentSecret);
+  Assert.assertEquals(2, allSecrets.length);
+  Assert.assertArrayEquals(secret2, allSecrets[0]);
+  Assert.assertArrayEquals(secret1, allSecrets[1]);
+  Thread.sleep((rolloverFrequency + 2000));
+
+  currentSecret = secretProvider.getCurrentSecret();
+  allSecrets = secretProvider.getAllSecrets();
+  Assert.assertArrayEquals(secret3, currentSecret);
+  Assert.assertEquals(2, allSecrets.length);
+  Assert.assertArrayEquals(secret3, allSecrets[0]);
+  Assert.assertArrayEquals(secret2, allSecrets[1]);
+  Thread.sleep((rolloverFrequency + 2000));
+} finally {
+  secretProvider.destroy();
+}
+  }
+
+  @Test
+  public void testMultipleInit() throws Exception {
+long rolloverFrequency = 15 * 1000; // rollover every 15 sec
+// use the same seed so we can predict the RNG
+long seedA = System.currentTimeMillis();
+Random rand = new Random(seedA);
+byte[] secretA2 = Long.toString(rand.nextLong()).getBytes();
+byte[] secretA1 = Long.toString(rand.nextLong()).getBytes();
+// use the same seed so we can predict the RNG
+long seedB = System.currentTimeMillis() + rand.nextLong();
+rand = new Random(seedB);
+byte[] secretB2 = Long.toString(rand.nextLong()).getBytes();
+byte[] secretB1 = Long.toString(rand.nextLong()).getBytes();
+// use the same seed so we can 

[2/2] git commit: HADOOP-10868. AuthenticationFilter should support externalizing the secret for signing and provide rotation support. (rkanter via tucu)

2014-09-15 Thread tucu
HADOOP-10868. AuthenticationFilter should support externalizing the secret for 
signing and provide rotation support. (rkanter via tucu)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/e59f6771
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/e59f6771
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/e59f6771

Branch: refs/heads/branch-2
Commit: e59f6771e89ded737cc91698763a02f6ebf23c61
Parents: f80b10e
Author: Alejandro Abdelnur t...@apache.org
Authored: Mon Sep 15 17:10:43 2014 -0700
Committer: Alejandro Abdelnur t...@apache.org
Committed: Mon Sep 15 17:10:43 2014 -0700

--
 hadoop-common-project/hadoop-auth/pom.xml   |  13 +
 .../server/AuthenticationFilter.java| 152 --
 .../util/RandomSignerSecretProvider.java|   4 +-
 .../util/RolloverSignerSecretProvider.java  |   7 +-
 .../util/SignerSecretProvider.java  |   9 +-
 .../util/StringSignerSecretProvider.java|  15 +-
 .../util/ZKSignerSecretProvider.java| 503 +++
 .../src/site/apt/Configuration.apt.vm   | 148 +-
 .../hadoop-auth/src/site/apt/index.apt.vm   |   5 +
 .../server/TestAuthenticationFilter.java| 117 -
 .../util/TestJaasConfiguration.java |  55 ++
 .../util/TestRandomSignerSecretProvider.java|   2 +-
 .../util/TestRolloverSignerSecretProvider.java  |   2 +-
 .../authentication/util/TestSigner.java |  23 +-
 .../util/TestStringSignerSecretProvider.java|   9 +-
 .../util/TestZKSignerSecretProvider.java| 270 ++
 hadoop-common-project/hadoop-common/CHANGES.txt |   3 +
 .../hadoop/fs/http/server/TestHttpFSServer.java |   8 +-
 hadoop-project/pom.xml  |  11 +
 19 files changed, 1259 insertions(+), 97 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/e59f6771/hadoop-common-project/hadoop-auth/pom.xml
--
diff --git a/hadoop-common-project/hadoop-auth/pom.xml 
b/hadoop-common-project/hadoop-auth/pom.xml
index 20304e1..1da98dc 100644
--- a/hadoop-common-project/hadoop-auth/pom.xml
+++ b/hadoop-common-project/hadoop-auth/pom.xml
@@ -135,6 +135,19 @@
   /exclusion
 /exclusions
 /dependency
+dependency
+  groupIdorg.apache.zookeeper/groupId
+  artifactIdzookeeper/artifactId
+/dependency
+dependency
+  groupIdorg.apache.curator/groupId
+  artifactIdcurator-framework/artifactId
+/dependency
+dependency
+  groupIdorg.apache.curator/groupId
+  artifactIdcurator-test/artifactId
+  scopetest/scope
+/dependency
   /dependencies
 
   build

http://git-wip-us.apache.org/repos/asf/hadoop/blob/e59f6771/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/server/AuthenticationFilter.java
--
diff --git 
a/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/server/AuthenticationFilter.java
 
b/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/server/AuthenticationFilter.java
index 9330444..47cf54c 100644
--- 
a/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/server/AuthenticationFilter.java
+++ 
b/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/server/AuthenticationFilter.java
@@ -22,6 +22,7 @@ import 
org.apache.hadoop.security.authentication.util.SignerException;
 import 
org.apache.hadoop.security.authentication.util.RandomSignerSecretProvider;
 import org.apache.hadoop.security.authentication.util.SignerSecretProvider;
 import 
org.apache.hadoop.security.authentication.util.StringSignerSecretProvider;
+import org.apache.hadoop.security.authentication.util.ZKSignerSecretProvider;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -42,7 +43,7 @@ import java.util.*;
 
 /**
  * The {@link AuthenticationFilter} enables protecting web application 
resources with different (pluggable)
- * authentication mechanisms.
+ * authentication mechanisms and signer secret providers.
  * p/
  * Out of the box it provides 2 authentication mechanisms: Pseudo and Kerberos 
SPNEGO.
  * p/
@@ -60,10 +61,13 @@ import java.util.*;
  * li[#PREFIX#.]type: simple|kerberos|#CLASS#, 'simple' is short for the
  * {@link PseudoAuthenticationHandler}, 'kerberos' is short for {@link 
KerberosAuthenticationHandler}, otherwise
  * the full class name of the {@link AuthenticationHandler} must be 
specified./li
- * li[#PREFIX#.]signature.secret: the secret used to sign the HTTP cookie 
value. The default value is a random
- * value. Unless multiple webapp instances need to share 

git commit: HADOOP-10868. Addendum

2014-09-15 Thread tucu
Repository: hadoop
Updated Branches:
  refs/heads/trunk 932ae036a - 7e08c0f23


HADOOP-10868. Addendum


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/7e08c0f2
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/7e08c0f2
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/7e08c0f2

Branch: refs/heads/trunk
Commit: 7e08c0f23f58aa143f0997f2472e8051175142e9
Parents: 932ae03
Author: Alejandro Abdelnur t...@apache.org
Authored: Mon Sep 15 19:39:27 2014 -0700
Committer: Alejandro Abdelnur t...@apache.org
Committed: Mon Sep 15 19:39:27 2014 -0700

--
 .../security/authentication/util/ZKSignerSecretProvider.java  | 7 +--
 1 file changed, 5 insertions(+), 2 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/7e08c0f2/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/util/ZKSignerSecretProvider.java
--
diff --git 
a/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/util/ZKSignerSecretProvider.java
 
b/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/util/ZKSignerSecretProvider.java
index 45d4d65..a17b6d4 100644
--- 
a/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/util/ZKSignerSecretProvider.java
+++ 
b/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/util/ZKSignerSecretProvider.java
@@ -139,6 +139,9 @@ public class ZKSignerSecretProvider extends 
RolloverSignerSecretProvider {
   ZOOKEEPER_SIGNER_SECRET_PROVIDER_CURATOR_CLIENT_ATTRIBUTE =
   CONFIG_PREFIX + curator.client;
 
+  private static final String JAAS_LOGIN_ENTRY_NAME =
+  ZKSignerSecretProviderClient;
+
   private static Logger LOG = LoggerFactory.getLogger(
   ZKSignerSecretProvider.class);
   private String path;
@@ -384,7 +387,7 @@ public class ZKSignerSecretProvider extends 
RolloverSignerSecretProvider {
   + and using 'sasl' ACLs);
   String principal = setJaasConfiguration(config);
   System.setProperty(ZooKeeperSaslClient.LOGIN_CONTEXT_NAME_KEY,
-  ZKSignerSecretProviderClient);
+  JAAS_LOGIN_ENTRY_NAME);
   System.setProperty(zookeeper.authProvider.1,
   org.apache.zookeeper.server.auth.SASLAuthenticationProvider);
   aclProvider = new SASLOwnerACLProvider(principal);
@@ -417,7 +420,7 @@ public class ZKSignerSecretProvider extends 
RolloverSignerSecretProvider {
 // This is equivalent to writing a jaas.conf file and setting the system
 // property, java.security.auth.login.config, to point to it
 JaasConfiguration jConf =
-new JaasConfiguration(Client, principal, keytabFile);
+new JaasConfiguration(JAAS_LOGIN_ENTRY_NAME, principal, 
keytabFile);
 Configuration.setConfiguration(jConf);
 return principal.split([/@])[0];
   }



git commit: HADOOP-10868. Addendum

2014-09-15 Thread tucu
Repository: hadoop
Updated Branches:
  refs/heads/branch-2 e59f6771e - 1023196ce


HADOOP-10868. Addendum


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/1023196c
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/1023196c
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/1023196c

Branch: refs/heads/branch-2
Commit: 1023196ceaa600f92f328cfe67a8bccac3445a64
Parents: e59f677
Author: Alejandro Abdelnur t...@apache.org
Authored: Mon Sep 15 19:39:12 2014 -0700
Committer: Alejandro Abdelnur t...@apache.org
Committed: Mon Sep 15 19:39:12 2014 -0700

--
 .../security/authentication/util/ZKSignerSecretProvider.java  | 7 +--
 1 file changed, 5 insertions(+), 2 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/1023196c/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/util/ZKSignerSecretProvider.java
--
diff --git 
a/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/util/ZKSignerSecretProvider.java
 
b/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/util/ZKSignerSecretProvider.java
index 45d4d65..a17b6d4 100644
--- 
a/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/util/ZKSignerSecretProvider.java
+++ 
b/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/util/ZKSignerSecretProvider.java
@@ -139,6 +139,9 @@ public class ZKSignerSecretProvider extends 
RolloverSignerSecretProvider {
   ZOOKEEPER_SIGNER_SECRET_PROVIDER_CURATOR_CLIENT_ATTRIBUTE =
   CONFIG_PREFIX + curator.client;
 
+  private static final String JAAS_LOGIN_ENTRY_NAME =
+  ZKSignerSecretProviderClient;
+
   private static Logger LOG = LoggerFactory.getLogger(
   ZKSignerSecretProvider.class);
   private String path;
@@ -384,7 +387,7 @@ public class ZKSignerSecretProvider extends 
RolloverSignerSecretProvider {
   + and using 'sasl' ACLs);
   String principal = setJaasConfiguration(config);
   System.setProperty(ZooKeeperSaslClient.LOGIN_CONTEXT_NAME_KEY,
-  ZKSignerSecretProviderClient);
+  JAAS_LOGIN_ENTRY_NAME);
   System.setProperty(zookeeper.authProvider.1,
   org.apache.zookeeper.server.auth.SASLAuthenticationProvider);
   aclProvider = new SASLOwnerACLProvider(principal);
@@ -417,7 +420,7 @@ public class ZKSignerSecretProvider extends 
RolloverSignerSecretProvider {
 // This is equivalent to writing a jaas.conf file and setting the system
 // property, java.security.auth.login.config, to point to it
 JaasConfiguration jConf =
-new JaasConfiguration(Client, principal, keytabFile);
+new JaasConfiguration(JAAS_LOGIN_ENTRY_NAME, principal, 
keytabFile);
 Configuration.setConfiguration(jConf);
 return principal.split([/@])[0];
   }