[2/2] hadoop git commit: Add missing files from HDFS-9005. (lei)

2016-03-25 Thread lei
Add missing files from HDFS-9005. (lei)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/ae983149
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/ae983149
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/ae983149

Branch: refs/heads/branch-2
Commit: ae9831498035c87660bfe54ad954f7dffd2fba58
Parents: 4936486
Author: Lei Xu 
Authored: Fri Mar 25 17:10:31 2016 -0700
Committer: Lei Xu 
Committed: Fri Mar 25 18:13:01 2016 -0700

--
 .../apache/hadoop/hdfs/protocol/DatanodeID.java |   6 +
 .../org/apache/hadoop/hdfs/DFSConfigKeys.java   |   4 +-
 .../server/blockmanagement/DatanodeManager.java |  59 +---
 .../server/blockmanagement/HostFileManager.java | 147 +--
 .../src/main/resources/hdfs-default.xml |  14 ++
 .../src/site/markdown/HdfsUserGuide.md  |   6 +-
 .../apache/hadoop/hdfs/TestDatanodeReport.java  |  57 ++-
 .../TestBlocksWithNotEnoughRacks.java   |  33 ++---
 .../blockmanagement/TestDatanodeManager.java|   8 +-
 .../blockmanagement/TestHostFileManager.java|  10 +-
 .../hdfs/server/namenode/TestHostsFiles.java|  70 +
 .../server/namenode/TestNameNodeMXBean.java |  25 ++--
 .../hdfs/server/namenode/TestStartup.java   |  53 +--
 13 files changed, 227 insertions(+), 265 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/ae983149/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/protocol/DatanodeID.java
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/protocol/DatanodeID.java
 
b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/protocol/DatanodeID.java
index 86782f2..e94c07d 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/protocol/DatanodeID.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/protocol/DatanodeID.java
@@ -23,6 +23,8 @@ import org.apache.hadoop.classification.InterfaceStability;
 
 import com.google.common.annotations.VisibleForTesting;
 
+import java.net.InetSocketAddress;
+
 /**
  * This class represents the primary identifier for a Datanode.
  * Datanodes are identified by how they can be contacted (hostname
@@ -272,4 +274,8 @@ public class DatanodeID implements Comparable {
   public int compareTo(DatanodeID that) {
 return getXferAddr().compareTo(that.getXferAddr());
   }
+
+  public InetSocketAddress getResolvedAddress() {
+return new InetSocketAddress(this.getIpAddr(), this.getXferPort());
+  }
 }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/ae983149/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSConfigKeys.java
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSConfigKeys.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSConfigKeys.java
index 7aa8c46..8d2ab3d 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSConfigKeys.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSConfigKeys.java
@@ -396,12 +396,12 @@ public class DFSConfigKeys extends 
CommonConfigurationKeys {
   public static final String  DFS_METRICS_PERCENTILES_INTERVALS_KEY = 
"dfs.metrics.percentiles.intervals";
   public static final String  DFS_DATANODE_HOST_NAME_KEY =
   HdfsClientConfigKeys.DeprecatedKeys.DFS_DATANODE_HOST_NAME_KEY;
-  public static final String  DFS_NAMENODE_HOSTS_KEY = "dfs.namenode.hosts";
-  public static final String  DFS_NAMENODE_HOSTS_EXCLUDE_KEY = 
"dfs.namenode.hosts.exclude";
   public static final String  DFS_NAMENODE_CHECKPOINT_DIR_KEY =
   HdfsClientConfigKeys.DeprecatedKeys.DFS_NAMENODE_CHECKPOINT_DIR_KEY;
   public static final String  DFS_NAMENODE_CHECKPOINT_EDITS_DIR_KEY =
   
HdfsClientConfigKeys.DeprecatedKeys.DFS_NAMENODE_CHECKPOINT_EDITS_DIR_KEY;
+  public static final String  DFS_NAMENODE_HOSTS_PROVIDER_CLASSNAME_KEY =
+  "dfs.namenode.hosts.provider.classname";
   public static final String  DFS_HOSTS = "dfs.hosts";
   public static final String  DFS_HOSTS_EXCLUDE = "dfs.hosts.exclude";
   public static final String  DFS_NAMENODE_AUDIT_LOGGERS_KEY = 
"dfs.namenode.audit.loggers";

http://git-wip-us.apache.org/repos/asf/hadoop/blob/ae983149/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/DatanodeManager.java
--
diff --git 

[1/2] hadoop git commit: HDFS-9005. Provide support for upgrade domain script. (Ming Ma via Lei Xu)

2016-03-25 Thread lei
Repository: hadoop
Updated Branches:
  refs/heads/branch-2 5e823d839 -> ae9831498


HDFS-9005. Provide support for upgrade domain script. (Ming Ma via Lei Xu)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/49364861
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/49364861
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/49364861

Branch: refs/heads/branch-2
Commit: 493648611de79ba7a36fe39c0494c63b4d60546f
Parents: 5e823d8
Author: Lei Xu 
Authored: Fri Mar 25 17:09:12 2016 -0700
Committer: Lei Xu 
Committed: Fri Mar 25 17:12:44 2016 -0700

--
 .../hdfs/protocol/DatanodeAdminProperties.java  | 100 
 .../hdfs/util/CombinedHostsFileReader.java  |  76 ++
 .../hdfs/util/CombinedHostsFileWriter.java  |  69 +
 .../CombinedHostFileManager.java| 250 +++
 .../blockmanagement/HostConfigManager.java  |  80 ++
 .../hdfs/server/blockmanagement/HostSet.java| 114 +
 .../TestUpgradeDomainBlockPlacementPolicy.java  | 169 +
 .../hadoop/hdfs/util/HostsFileWriter.java   | 122 +
 .../hdfs/util/TestCombinedHostsFileReader.java  |  79 ++
 .../src/test/resources/dfs.hosts.json   |   5 +
 10 files changed, 1064 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/49364861/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/protocol/DatanodeAdminProperties.java
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/protocol/DatanodeAdminProperties.java
 
b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/protocol/DatanodeAdminProperties.java
new file mode 100644
index 000..9f7b983
--- /dev/null
+++ 
b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/protocol/DatanodeAdminProperties.java
@@ -0,0 +1,100 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hdfs.protocol;
+
+import org.apache.hadoop.hdfs.protocol.DatanodeInfo.AdminStates;
+
+/**
+ * The class describes the configured admin properties for a datanode.
+ *
+ * It is the static configuration specified by administrators via dfsadmin
+ * command; different from the runtime state. CombinedHostFileManager uses
+ * the class to deserialize the configurations from json-based file format.
+ *
+ * To decommission a node, use AdminStates.DECOMMISSIONED.
+ */
+public class DatanodeAdminProperties {
+  private String hostName;
+  private int port;
+  private String upgradeDomain;
+  private AdminStates adminState = AdminStates.NORMAL;
+
+  /**
+   * Return the host name of the datanode.
+   * @return the host name of the datanode.
+   */
+  public String getHostName() {
+return hostName;
+  }
+
+  /**
+   * Set the host name of the datanode.
+   * @param hostName the host name of the datanode.
+   */
+  public void setHostName(final String hostName) {
+this.hostName = hostName;
+  }
+
+  /**
+   * Get the port number of the datanode.
+   * @return the port number of the datanode.
+   */
+  public int getPort() {
+return port;
+  }
+
+  /**
+   * Set the port number of the datanode.
+   * @param port the port number of the datanode.
+   */
+  public void setPort(final int port) {
+this.port = port;
+  }
+
+  /**
+   * Get the upgrade domain of the datanode.
+   * @return the upgrade domain of the datanode.
+   */
+  public String getUpgradeDomain() {
+return upgradeDomain;
+  }
+
+  /**
+   * Set the upgrade domain of the datanode.
+   * @param upgradeDomain the upgrade domain of the datanode.
+   */
+  public void setUpgradeDomain(final String upgradeDomain) {
+this.upgradeDomain = upgradeDomain;
+  }
+
+  /**
+   * Get the admin state of the datanode.
+   * @return the admin state of the datanode.
+   */
+  public AdminStates getAdminState() {
+return adminState;
+  }
+
+  /**
+   * Set the admin state 

hadoop git commit: Add missing files from HDFS-9005. (lei)

2016-03-25 Thread lei
Repository: hadoop
Updated Branches:
  refs/heads/trunk 4fcfea71b -> fde8ac5d8


Add missing files from HDFS-9005. (lei)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/fde8ac5d
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/fde8ac5d
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/fde8ac5d

Branch: refs/heads/trunk
Commit: fde8ac5d8514f5146f438f8d0794116aaef20416
Parents: 4fcfea7
Author: Lei Xu 
Authored: Fri Mar 25 17:10:31 2016 -0700
Committer: Lei Xu 
Committed: Fri Mar 25 17:11:35 2016 -0700

--
 .../apache/hadoop/hdfs/protocol/DatanodeID.java |   6 +
 .../org/apache/hadoop/hdfs/DFSConfigKeys.java   |   4 +-
 .../server/blockmanagement/DatanodeManager.java |  59 +---
 .../server/blockmanagement/HostFileManager.java | 147 +--
 .../src/main/resources/hdfs-default.xml |  14 ++
 .../src/site/markdown/HdfsUserGuide.md  |   6 +-
 .../apache/hadoop/hdfs/TestDatanodeReport.java  |  57 ++-
 .../TestBlocksWithNotEnoughRacks.java   |  34 ++---
 .../blockmanagement/TestDatanodeManager.java|   8 +-
 .../blockmanagement/TestHostFileManager.java|  10 +-
 .../hdfs/server/namenode/TestHostsFiles.java|  70 +
 .../server/namenode/TestNameNodeMXBean.java |  25 ++--
 .../hdfs/server/namenode/TestStartup.java   |  54 +--
 13 files changed, 227 insertions(+), 267 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/fde8ac5d/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/protocol/DatanodeID.java
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/protocol/DatanodeID.java
 
b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/protocol/DatanodeID.java
index 5fd845d..af720c7 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/protocol/DatanodeID.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/protocol/DatanodeID.java
@@ -23,6 +23,8 @@ import org.apache.hadoop.classification.InterfaceStability;
 
 import com.google.common.annotations.VisibleForTesting;
 
+import java.net.InetSocketAddress;
+
 /**
  * This class represents the primary identifier for a Datanode.
  * Datanodes are identified by how they can be contacted (hostname
@@ -274,4 +276,8 @@ public class DatanodeID implements Comparable {
   public int compareTo(DatanodeID that) {
 return getXferAddr().compareTo(that.getXferAddr());
   }
+
+  public InetSocketAddress getResolvedAddress() {
+return new InetSocketAddress(this.getIpAddr(), this.getXferPort());
+  }
 }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/fde8ac5d/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSConfigKeys.java
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSConfigKeys.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSConfigKeys.java
index 844fec2..9424662 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSConfigKeys.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSConfigKeys.java
@@ -415,12 +415,12 @@ public class DFSConfigKeys extends 
CommonConfigurationKeys {
   public static final String  DFS_METRICS_PERCENTILES_INTERVALS_KEY = 
"dfs.metrics.percentiles.intervals";
   public static final String  DFS_DATANODE_HOST_NAME_KEY =
   HdfsClientConfigKeys.DeprecatedKeys.DFS_DATANODE_HOST_NAME_KEY;
-  public static final String  DFS_NAMENODE_HOSTS_KEY = "dfs.namenode.hosts";
-  public static final String  DFS_NAMENODE_HOSTS_EXCLUDE_KEY = 
"dfs.namenode.hosts.exclude";
   public static final String  DFS_NAMENODE_CHECKPOINT_DIR_KEY =
   HdfsClientConfigKeys.DeprecatedKeys.DFS_NAMENODE_CHECKPOINT_DIR_KEY;
   public static final String  DFS_NAMENODE_CHECKPOINT_EDITS_DIR_KEY =
   
HdfsClientConfigKeys.DeprecatedKeys.DFS_NAMENODE_CHECKPOINT_EDITS_DIR_KEY;
+  public static final String  DFS_NAMENODE_HOSTS_PROVIDER_CLASSNAME_KEY =
+  "dfs.namenode.hosts.provider.classname";
   public static final String  DFS_HOSTS = "dfs.hosts";
   public static final String  DFS_HOSTS_EXCLUDE = "dfs.hosts.exclude";
   public static final String  DFS_NAMENODE_AUDIT_LOGGERS_KEY = 
"dfs.namenode.audit.loggers";

http://git-wip-us.apache.org/repos/asf/hadoop/blob/fde8ac5d/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/DatanodeManager.java
--
diff --git 

hadoop git commit: HDFS-9005. Provide support for upgrade domain script. (Ming Ma via Lei Xu)

2016-03-25 Thread lei
Repository: hadoop
Updated Branches:
  refs/heads/trunk 9a09200a1 -> 4fcfea71b


HDFS-9005. Provide support for upgrade domain script. (Ming Ma via Lei Xu)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/4fcfea71
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/4fcfea71
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/4fcfea71

Branch: refs/heads/trunk
Commit: 4fcfea71bfb16295f3a661e712d66351a1edc55e
Parents: 9a09200
Author: Lei Xu 
Authored: Fri Mar 25 17:09:12 2016 -0700
Committer: Lei Xu 
Committed: Fri Mar 25 17:09:12 2016 -0700

--
 .../hdfs/protocol/DatanodeAdminProperties.java  | 100 
 .../hdfs/util/CombinedHostsFileReader.java  |  76 ++
 .../hdfs/util/CombinedHostsFileWriter.java  |  69 +
 .../CombinedHostFileManager.java| 250 +++
 .../blockmanagement/HostConfigManager.java  |  80 ++
 .../hdfs/server/blockmanagement/HostSet.java| 114 +
 .../TestUpgradeDomainBlockPlacementPolicy.java  | 169 +
 .../hadoop/hdfs/util/HostsFileWriter.java   | 122 +
 .../hdfs/util/TestCombinedHostsFileReader.java  |  79 ++
 .../src/test/resources/dfs.hosts.json   |   5 +
 10 files changed, 1064 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/4fcfea71/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/protocol/DatanodeAdminProperties.java
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/protocol/DatanodeAdminProperties.java
 
b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/protocol/DatanodeAdminProperties.java
new file mode 100644
index 000..9f7b983
--- /dev/null
+++ 
b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/protocol/DatanodeAdminProperties.java
@@ -0,0 +1,100 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hdfs.protocol;
+
+import org.apache.hadoop.hdfs.protocol.DatanodeInfo.AdminStates;
+
+/**
+ * The class describes the configured admin properties for a datanode.
+ *
+ * It is the static configuration specified by administrators via dfsadmin
+ * command; different from the runtime state. CombinedHostFileManager uses
+ * the class to deserialize the configurations from json-based file format.
+ *
+ * To decommission a node, use AdminStates.DECOMMISSIONED.
+ */
+public class DatanodeAdminProperties {
+  private String hostName;
+  private int port;
+  private String upgradeDomain;
+  private AdminStates adminState = AdminStates.NORMAL;
+
+  /**
+   * Return the host name of the datanode.
+   * @return the host name of the datanode.
+   */
+  public String getHostName() {
+return hostName;
+  }
+
+  /**
+   * Set the host name of the datanode.
+   * @param hostName the host name of the datanode.
+   */
+  public void setHostName(final String hostName) {
+this.hostName = hostName;
+  }
+
+  /**
+   * Get the port number of the datanode.
+   * @return the port number of the datanode.
+   */
+  public int getPort() {
+return port;
+  }
+
+  /**
+   * Set the port number of the datanode.
+   * @param port the port number of the datanode.
+   */
+  public void setPort(final int port) {
+this.port = port;
+  }
+
+  /**
+   * Get the upgrade domain of the datanode.
+   * @return the upgrade domain of the datanode.
+   */
+  public String getUpgradeDomain() {
+return upgradeDomain;
+  }
+
+  /**
+   * Set the upgrade domain of the datanode.
+   * @param upgradeDomain the upgrade domain of the datanode.
+   */
+  public void setUpgradeDomain(final String upgradeDomain) {
+this.upgradeDomain = upgradeDomain;
+  }
+
+  /**
+   * Get the admin state of the datanode.
+   * @return the admin state of the datanode.
+   */
+  public AdminStates getAdminState() {
+return adminState;
+  }
+
+  /**
+   * Set the admin state of the 

hadoop git commit: HADOOP-12958. PhantomReference for filesystem statistics can trigger OOM. Contributed by Sangjin Lee (cherry picked from commit 9a09200a1f5f752e266d4fb8e0c808073080bde8)

2016-03-25 Thread jlowe
Repository: hadoop
Updated Branches:
  refs/heads/branch-2.6 160a8c74d -> c092a8c1b


HADOOP-12958. PhantomReference for filesystem statistics can trigger OOM. 
Contributed by Sangjin Lee
(cherry picked from commit 9a09200a1f5f752e266d4fb8e0c808073080bde8)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/c092a8c1
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/c092a8c1
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/c092a8c1

Branch: refs/heads/branch-2.6
Commit: c092a8c1bb071fb708e20d9a66ea2fd6195f18e7
Parents: 160a8c7
Author: Jason Lowe 
Authored: Fri Mar 25 23:05:29 2016 +
Committer: Jason Lowe 
Committed: Fri Mar 25 23:05:29 2016 +

--
 hadoop-common-project/hadoop-common/CHANGES.txt  | 3 +++
 .../src/main/java/org/apache/hadoop/fs/FileSystem.java   | 8 
 2 files changed, 7 insertions(+), 4 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/c092a8c1/hadoop-common-project/hadoop-common/CHANGES.txt
--
diff --git a/hadoop-common-project/hadoop-common/CHANGES.txt 
b/hadoop-common-project/hadoop-common/CHANGES.txt
index 5782165..39f14ca 100644
--- a/hadoop-common-project/hadoop-common/CHANGES.txt
+++ b/hadoop-common-project/hadoop-common/CHANGES.txt
@@ -27,6 +27,9 @@ Release 2.6.5 - UNRELEASED
 HADOOP-12589. Fix intermittent test failure of TestCopyPreserveFlag
 (iwasakims)
 
+HADOOP-12958. PhantomReference for filesystem statistics can trigger OOM
+(Sangjin Lee via jlowe)
+
 Release 2.6.4 - 2016-02-11
 
   INCOMPATIBLE CHANGES

http://git-wip-us.apache.org/repos/asf/hadoop/blob/c092a8c1/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java
--
diff --git 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java
 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java
index 64efe66..ffee0fd 100644
--- 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java
+++ 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java
@@ -20,7 +20,7 @@ package org.apache.hadoop.fs;
 import java.io.Closeable;
 import java.io.FileNotFoundException;
 import java.io.IOException;
-import java.lang.ref.PhantomReference;
+import java.lang.ref.WeakReference;
 import java.lang.ref.ReferenceQueue;
 import java.net.URI;
 import java.net.URISyntaxException;
@@ -2889,7 +2889,7 @@ public abstract class FileSystem extends Configured 
implements Closeable {
 
 /**
  * Set of all thread-local data areas.  Protected by the Statistics lock.
- * The references to the statistics data are kept using phantom references
+ * The references to the statistics data are kept using weak references
  * to the associated threads. Proper clean-up is performed by the cleaner
  * thread when the threads are garbage collected.
  */
@@ -2942,11 +2942,11 @@ public abstract class FileSystem extends Configured 
implements Closeable {
 }
 
 /**
- * A phantom reference to a thread that also includes the data associated
+ * A weak reference to a thread that also includes the data associated
  * with that thread. On the thread being garbage collected, it is enqueued
  * to the reference queue for clean-up.
  */
-private class StatisticsDataReference extends PhantomReference {
+private class StatisticsDataReference extends WeakReference {
   private final StatisticsData data;
 
   public StatisticsDataReference(StatisticsData data, Thread thread) {



hadoop git commit: HADOOP-12958. PhantomReference for filesystem statistics can trigger OOM. Contributed by Sangjin Lee (cherry picked from commit 9a09200a1f5f752e266d4fb8e0c808073080bde8)

2016-03-25 Thread jlowe
Repository: hadoop
Updated Branches:
  refs/heads/branch-2.7 a112c0138 -> 02e81caa2


HADOOP-12958. PhantomReference for filesystem statistics can trigger OOM. 
Contributed by Sangjin Lee
(cherry picked from commit 9a09200a1f5f752e266d4fb8e0c808073080bde8)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/02e81caa
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/02e81caa
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/02e81caa

Branch: refs/heads/branch-2.7
Commit: 02e81caa216ea45741c611c2b016e69e9c208704
Parents: a112c01
Author: Jason Lowe 
Authored: Fri Mar 25 23:03:39 2016 +
Committer: Jason Lowe 
Committed: Fri Mar 25 23:03:39 2016 +

--
 hadoop-common-project/hadoop-common/CHANGES.txt  | 3 +++
 .../src/main/java/org/apache/hadoop/fs/FileSystem.java   | 8 
 2 files changed, 7 insertions(+), 4 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/02e81caa/hadoop-common-project/hadoop-common/CHANGES.txt
--
diff --git a/hadoop-common-project/hadoop-common/CHANGES.txt 
b/hadoop-common-project/hadoop-common/CHANGES.txt
index a2381fa..c51378a 100644
--- a/hadoop-common-project/hadoop-common/CHANGES.txt
+++ b/hadoop-common-project/hadoop-common/CHANGES.txt
@@ -76,6 +76,9 @@ Release 2.7.3 - UNRELEASED
 
 HADOOP-12688. Fix deadlinks in Compatibility.md. (Gabor Liptak via 
aajisaka)
 
+HADOOP-12958. PhantomReference for filesystem statistics can trigger OOM
+(Sangjin Lee via jlowe)
+
 Release 2.7.2 - 2016-01-25
 
   INCOMPATIBLE CHANGES

http://git-wip-us.apache.org/repos/asf/hadoop/blob/02e81caa/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java
--
diff --git 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java
 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java
index 35c28b7..fac3c40 100644
--- 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java
+++ 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java
@@ -20,7 +20,7 @@ package org.apache.hadoop.fs;
 import java.io.Closeable;
 import java.io.FileNotFoundException;
 import java.io.IOException;
-import java.lang.ref.PhantomReference;
+import java.lang.ref.WeakReference;
 import java.lang.ref.ReferenceQueue;
 import java.net.URI;
 import java.net.URISyntaxException;
@@ -2950,7 +2950,7 @@ public abstract class FileSystem extends Configured 
implements Closeable {
 
 /**
  * Set of all thread-local data areas.  Protected by the Statistics lock.
- * The references to the statistics data are kept using phantom references
+ * The references to the statistics data are kept using weak references
  * to the associated threads. Proper clean-up is performed by the cleaner
  * thread when the threads are garbage collected.
  */
@@ -3003,11 +3003,11 @@ public abstract class FileSystem extends Configured 
implements Closeable {
 }
 
 /**
- * A phantom reference to a thread that also includes the data associated
+ * A weak reference to a thread that also includes the data associated
  * with that thread. On the thread being garbage collected, it is enqueued
  * to the reference queue for clean-up.
  */
-private class StatisticsDataReference extends PhantomReference {
+private class StatisticsDataReference extends WeakReference {
   private final StatisticsData data;
 
   public StatisticsDataReference(StatisticsData data, Thread thread) {



hadoop git commit: HADOOP-12958. PhantomReference for filesystem statistics can trigger OOM. Contributed by Sangjin Lee (cherry picked from commit 9a09200a1f5f752e266d4fb8e0c808073080bde8)

2016-03-25 Thread jlowe
Repository: hadoop
Updated Branches:
  refs/heads/branch-2.8 1a194f875 -> 25b476e93


HADOOP-12958. PhantomReference for filesystem statistics can trigger OOM. 
Contributed by Sangjin Lee
(cherry picked from commit 9a09200a1f5f752e266d4fb8e0c808073080bde8)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/25b476e9
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/25b476e9
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/25b476e9

Branch: refs/heads/branch-2.8
Commit: 25b476e93b718dad9a7bb68c700624e15d5f1e5c
Parents: 1a194f8
Author: Jason Lowe 
Authored: Fri Mar 25 22:56:49 2016 +
Committer: Jason Lowe 
Committed: Fri Mar 25 23:00:18 2016 +

--
 .../src/main/java/org/apache/hadoop/fs/FileSystem.java   | 8 
 1 file changed, 4 insertions(+), 4 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/25b476e9/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java
--
diff --git 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java
 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java
index fa2816e..200e893 100644
--- 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java
+++ 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java
@@ -20,7 +20,7 @@ package org.apache.hadoop.fs;
 import java.io.Closeable;
 import java.io.FileNotFoundException;
 import java.io.IOException;
-import java.lang.ref.PhantomReference;
+import java.lang.ref.WeakReference;
 import java.lang.ref.ReferenceQueue;
 import java.net.URI;
 import java.net.URISyntaxException;
@@ -3070,7 +3070,7 @@ public abstract class FileSystem extends Configured 
implements Closeable {
 
 /**
  * Set of all thread-local data areas.  Protected by the Statistics lock.
- * The references to the statistics data are kept using phantom references
+ * The references to the statistics data are kept using weak references
  * to the associated threads. Proper clean-up is performed by the cleaner
  * thread when the threads are garbage collected.
  */
@@ -3123,11 +3123,11 @@ public abstract class FileSystem extends Configured 
implements Closeable {
 }
 
 /**
- * A phantom reference to a thread that also includes the data associated
+ * A weak reference to a thread that also includes the data associated
  * with that thread. On the thread being garbage collected, it is enqueued
  * to the reference queue for clean-up.
  */
-private class StatisticsDataReference extends PhantomReference {
+private class StatisticsDataReference extends WeakReference {
   private final StatisticsData data;
 
   public StatisticsDataReference(StatisticsData data, Thread thread) {



hadoop git commit: HADOOP-12958. PhantomReference for filesystem statistics can trigger OOM. Contributed by Sangjin Lee (cherry picked from commit 9a09200a1f5f752e266d4fb8e0c808073080bde8)

2016-03-25 Thread jlowe
Repository: hadoop
Updated Branches:
  refs/heads/branch-2 c58599acb -> 5e823d839


HADOOP-12958. PhantomReference for filesystem statistics can trigger OOM. 
Contributed by Sangjin Lee
(cherry picked from commit 9a09200a1f5f752e266d4fb8e0c808073080bde8)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/5e823d83
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/5e823d83
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/5e823d83

Branch: refs/heads/branch-2
Commit: 5e823d839db643efdaf228eb7da742aa728bed99
Parents: c58599a
Author: Jason Lowe 
Authored: Fri Mar 25 22:56:49 2016 +
Committer: Jason Lowe 
Committed: Fri Mar 25 22:59:14 2016 +

--
 .../src/main/java/org/apache/hadoop/fs/FileSystem.java   | 8 
 1 file changed, 4 insertions(+), 4 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/5e823d83/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java
--
diff --git 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java
 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java
index ca5f3a3..e3f8ea8 100644
--- 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java
+++ 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java
@@ -20,7 +20,7 @@ package org.apache.hadoop.fs;
 import java.io.Closeable;
 import java.io.FileNotFoundException;
 import java.io.IOException;
-import java.lang.ref.PhantomReference;
+import java.lang.ref.WeakReference;
 import java.lang.ref.ReferenceQueue;
 import java.net.URI;
 import java.net.URISyntaxException;
@@ -3119,7 +3119,7 @@ public abstract class FileSystem extends Configured 
implements Closeable {
 
 /**
  * Set of all thread-local data areas.  Protected by the Statistics lock.
- * The references to the statistics data are kept using phantom references
+ * The references to the statistics data are kept using weak references
  * to the associated threads. Proper clean-up is performed by the cleaner
  * thread when the threads are garbage collected.
  */
@@ -3172,11 +3172,11 @@ public abstract class FileSystem extends Configured 
implements Closeable {
 }
 
 /**
- * A phantom reference to a thread that also includes the data associated
+ * A weak reference to a thread that also includes the data associated
  * with that thread. On the thread being garbage collected, it is enqueued
  * to the reference queue for clean-up.
  */
-private class StatisticsDataReference extends PhantomReference {
+private class StatisticsDataReference extends WeakReference {
   private final StatisticsData data;
 
   public StatisticsDataReference(StatisticsData data, Thread thread) {



hadoop git commit: HADOOP-12958. PhantomReference for filesystem statistics can trigger OOM. Contributed by Sangjin Lee

2016-03-25 Thread jlowe
Repository: hadoop
Updated Branches:
  refs/heads/trunk 00bebb7e5 -> 9a09200a1


HADOOP-12958. PhantomReference for filesystem statistics can trigger OOM. 
Contributed by Sangjin Lee


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/9a09200a
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/9a09200a
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/9a09200a

Branch: refs/heads/trunk
Commit: 9a09200a1f5f752e266d4fb8e0c808073080bde8
Parents: 00bebb7
Author: Jason Lowe 
Authored: Fri Mar 25 22:56:49 2016 +
Committer: Jason Lowe 
Committed: Fri Mar 25 22:58:04 2016 +

--
 .../src/main/java/org/apache/hadoop/fs/FileSystem.java   | 8 
 1 file changed, 4 insertions(+), 4 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/9a09200a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java
--
diff --git 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java
 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java
index a8a5c6d..e0ea7ac 100644
--- 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java
+++ 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java
@@ -20,7 +20,7 @@ package org.apache.hadoop.fs;
 import java.io.Closeable;
 import java.io.FileNotFoundException;
 import java.io.IOException;
-import java.lang.ref.PhantomReference;
+import java.lang.ref.WeakReference;
 import java.lang.ref.ReferenceQueue;
 import java.net.URI;
 import java.net.URISyntaxException;
@@ -3133,7 +3133,7 @@ public abstract class FileSystem extends Configured 
implements Closeable {
 
 /**
  * Set of all thread-local data areas.  Protected by the Statistics lock.
- * The references to the statistics data are kept using phantom references
+ * The references to the statistics data are kept using weak references
  * to the associated threads. Proper clean-up is performed by the cleaner
  * thread when the threads are garbage collected.
  */
@@ -3186,11 +3186,11 @@ public abstract class FileSystem extends Configured 
implements Closeable {
 }
 
 /**
- * A phantom reference to a thread that also includes the data associated
+ * A weak reference to a thread that also includes the data associated
  * with that thread. On the thread being garbage collected, it is enqueued
  * to the reference queue for clean-up.
  */
-private class StatisticsDataReference extends PhantomReference {
+private class StatisticsDataReference extends WeakReference {
   private final StatisticsData data;
 
   public StatisticsDataReference(StatisticsData data, Thread thread) {



hadoop git commit: YARN-4823. Refactor the nested reservation id field in listReservation to simple string field. (subru via asuresh)

2016-03-25 Thread asuresh
Repository: hadoop
Updated Branches:
  refs/heads/branch-2.8 d2c33ba61 -> 1a194f875


YARN-4823. Refactor the nested reservation id field in listReservation to 
simple string field. (subru via asuresh)

(cherry picked from commit 00bebb7e58ba6899904e1619d151aa1b2f5b6acd)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/1a194f87
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/1a194f87
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/1a194f87

Branch: refs/heads/branch-2.8
Commit: 1a194f8751cec01df8d9d36949802c3c6caad0f1
Parents: d2c33ba
Author: Arun Suresh 
Authored: Fri Mar 25 15:54:38 2016 -0700
Committer: Arun Suresh 
Committed: Fri Mar 25 15:57:11 2016 -0700

--
 .../webapp/dao/ReservationIdInfo.java   | 64 
 .../webapp/dao/ReservationInfo.java |  7 +--
 .../webapp/TestRMWebServicesReservation.java| 15 ++---
 .../src/site/markdown/ResourceManagerRest.md| 27 +++--
 4 files changed, 16 insertions(+), 97 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/1a194f87/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/dao/ReservationIdInfo.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/dao/ReservationIdInfo.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/dao/ReservationIdInfo.java
deleted file mode 100644
index 3a2596a..000
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/dao/ReservationIdInfo.java
+++ /dev/null
@@ -1,64 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.yarn.server.resourcemanager.webapp.dao;
-
-import org.apache.hadoop.yarn.api.records.ReservationId;
-
-import javax.xml.bind.annotation.XmlAccessType;
-import javax.xml.bind.annotation.XmlAccessorType;
-import javax.xml.bind.annotation.XmlElement;
-import javax.xml.bind.annotation.XmlRootElement;
-
-/**
- * Simple class that represent a reservation ID.
- */
-@XmlRootElement
-@XmlAccessorType(XmlAccessType.FIELD)
-public class ReservationIdInfo {
-  @XmlElement(name = "cluster-timestamp")
-  private long clusterTimestamp;
-
-  @XmlElement(name = "reservation-id")
-  private long reservationId;
-
-  public ReservationIdInfo() {
-this.clusterTimestamp = 0;
-this.reservationId = 0;
-  }
-
-  public ReservationIdInfo(ReservationId reservationId) {
-this.clusterTimestamp = reservationId.getClusterTimestamp();
-this.reservationId = reservationId.getId();
-  }
-
-  public long getClusterTimestamp() {
-return this.clusterTimestamp;
-  }
-
-  public void setClusterTimestamp(long newClusterTimestamp) {
-this.clusterTimestamp = newClusterTimestamp;
-  }
-
-  public long getReservationId() {
-return this.reservationId;
-  }
-
-  public void setReservationId(long newReservationId) {
-this.reservationId = newReservationId;
-  }
-}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/1a194f87/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/dao/ReservationInfo.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/dao/ReservationInfo.java
 

hadoop git commit: YARN-4823. Refactor the nested reservation id field in listReservation to simple string field. (subru via asuresh)

2016-03-25 Thread asuresh
Repository: hadoop
Updated Branches:
  refs/heads/trunk d4df7849a -> 00bebb7e5


YARN-4823. Refactor the nested reservation id field in listReservation to 
simple string field. (subru via asuresh)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/00bebb7e
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/00bebb7e
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/00bebb7e

Branch: refs/heads/trunk
Commit: 00bebb7e58ba6899904e1619d151aa1b2f5b6acd
Parents: d4df784
Author: Arun Suresh 
Authored: Fri Mar 25 15:54:38 2016 -0700
Committer: Arun Suresh 
Committed: Fri Mar 25 15:54:38 2016 -0700

--
 .../webapp/dao/ReservationIdInfo.java   | 64 
 .../webapp/dao/ReservationInfo.java |  7 +--
 .../webapp/TestRMWebServicesReservation.java| 15 ++---
 .../src/site/markdown/ResourceManagerRest.md| 27 +++--
 4 files changed, 16 insertions(+), 97 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/00bebb7e/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/dao/ReservationIdInfo.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/dao/ReservationIdInfo.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/dao/ReservationIdInfo.java
deleted file mode 100644
index 3a2596a..000
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/dao/ReservationIdInfo.java
+++ /dev/null
@@ -1,64 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.yarn.server.resourcemanager.webapp.dao;
-
-import org.apache.hadoop.yarn.api.records.ReservationId;
-
-import javax.xml.bind.annotation.XmlAccessType;
-import javax.xml.bind.annotation.XmlAccessorType;
-import javax.xml.bind.annotation.XmlElement;
-import javax.xml.bind.annotation.XmlRootElement;
-
-/**
- * Simple class that represent a reservation ID.
- */
-@XmlRootElement
-@XmlAccessorType(XmlAccessType.FIELD)
-public class ReservationIdInfo {
-  @XmlElement(name = "cluster-timestamp")
-  private long clusterTimestamp;
-
-  @XmlElement(name = "reservation-id")
-  private long reservationId;
-
-  public ReservationIdInfo() {
-this.clusterTimestamp = 0;
-this.reservationId = 0;
-  }
-
-  public ReservationIdInfo(ReservationId reservationId) {
-this.clusterTimestamp = reservationId.getClusterTimestamp();
-this.reservationId = reservationId.getId();
-  }
-
-  public long getClusterTimestamp() {
-return this.clusterTimestamp;
-  }
-
-  public void setClusterTimestamp(long newClusterTimestamp) {
-this.clusterTimestamp = newClusterTimestamp;
-  }
-
-  public long getReservationId() {
-return this.reservationId;
-  }
-
-  public void setReservationId(long newReservationId) {
-this.reservationId = newReservationId;
-  }
-}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/00bebb7e/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/dao/ReservationInfo.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/dao/ReservationInfo.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/dao/ReservationInfo.java
index 1a31a8b..8b532ad 100644
--- 

hadoop git commit: HADOOP-12962. KMS key names are incorrectly encoded when creating key. Contributed by Xiao Chen.

2016-03-25 Thread wang
Repository: hadoop
Updated Branches:
  refs/heads/branch-2.8 e06446be7 -> d2c33ba61


HADOOP-12962. KMS key names are incorrectly encoded when creating key. 
Contributed by Xiao Chen.

(cherry picked from commit d4df7849a5caf749403bd89d29652f69c9c3f5a8)
(cherry picked from commit c58599acba369fd18f42e4c0c32094440787b2cc)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/d2c33ba6
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/d2c33ba6
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/d2c33ba6

Branch: refs/heads/branch-2.8
Commit: d2c33ba61e55470c7657a4ab20fd9c44d86b0a45
Parents: e06446b
Author: Andrew Wang 
Authored: Fri Mar 25 15:28:53 2016 -0700
Committer: Andrew Wang 
Committed: Fri Mar 25 15:29:09 2016 -0700

--
 .../hadoop/crypto/key/kms/server/KMS.java   | 14 +++
 .../hadoop/crypto/key/kms/server/TestKMS.java   | 42 
 2 files changed, 49 insertions(+), 7 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/d2c33ba6/hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMS.java
--
diff --git 
a/hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMS.java
 
b/hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMS.java
index 43b07fe..f069fca 100644
--- 
a/hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMS.java
+++ 
b/hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMS.java
@@ -41,10 +41,10 @@ import javax.ws.rs.Produces;
 import javax.ws.rs.QueryParam;
 import javax.ws.rs.core.MediaType;
 import javax.ws.rs.core.Response;
+import javax.ws.rs.core.UriBuilder;
 
 import java.io.IOException;
 import java.net.URI;
-import java.net.URISyntaxException;
 import java.security.PrivilegedExceptionAction;
 import java.util.ArrayList;
 import java.util.LinkedList;
@@ -89,9 +89,9 @@ public class KMS {
 keyVersion.getVersionName(), null);
   }
 
-  private static URI getKeyURI(String name) throws URISyntaxException {
-return new URI(KMSRESTConstants.SERVICE_VERSION + "/" +
-KMSRESTConstants.KEY_RESOURCE + "/" + name);
+  private static URI getKeyURI(String domain, String keyName) {
+return UriBuilder.fromPath("{a}/{b}/{c}")
+.build(domain, KMSRESTConstants.KEY_RESOURCE, keyName);
   }
 
   @POST
@@ -151,9 +151,9 @@ public class KMS {
 String requestURL = KMSMDCFilter.getURL();
 int idx = requestURL.lastIndexOf(KMSRESTConstants.KEYS_RESOURCE);
 requestURL = requestURL.substring(0, idx);
-String keyURL = requestURL + KMSRESTConstants.KEY_RESOURCE + "/" + name;
-return Response.created(getKeyURI(name)).type(MediaType.APPLICATION_JSON).
-header("Location", keyURL).entity(json).build();
+return Response.created(getKeyURI(KMSRESTConstants.SERVICE_VERSION, name))
+.type(MediaType.APPLICATION_JSON)
+.header("Location", getKeyURI(requestURL, name)).entity(json).build();
   }
 
   @DELETE

http://git-wip-us.apache.org/repos/asf/hadoop/blob/d2c33ba6/hadoop-common-project/hadoop-kms/src/test/java/org/apache/hadoop/crypto/key/kms/server/TestKMS.java
--
diff --git 
a/hadoop-common-project/hadoop-kms/src/test/java/org/apache/hadoop/crypto/key/kms/server/TestKMS.java
 
b/hadoop-common-project/hadoop-kms/src/test/java/org/apache/hadoop/crypto/key/kms/server/TestKMS.java
index 9b75ee1..8094ae2 100644
--- 
a/hadoop-common-project/hadoop-kms/src/test/java/org/apache/hadoop/crypto/key/kms/server/TestKMS.java
+++ 
b/hadoop-common-project/hadoop-kms/src/test/java/org/apache/hadoop/crypto/key/kms/server/TestKMS.java
@@ -39,11 +39,15 @@ import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hadoop.security.authorize.AuthorizationException;
 import org.apache.hadoop.security.ssl.KeyStoreTestUtil;
 import org.apache.hadoop.security.token.Token;
+import org.apache.hadoop.test.GenericTestUtils;
+import org.apache.log4j.Level;
 import org.junit.AfterClass;
 import org.junit.Assert;
 import org.junit.Before;
 import org.junit.BeforeClass;
 import org.junit.Test;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import javax.security.auth.kerberos.KerberosPrincipal;
 import javax.security.auth.login.AppConfigurationEntry;
@@ -69,12 +73,14 @@ import java.util.UUID;
 import java.util.concurrent.Callable;
 
 public class TestKMS {
+  private static final Logger LOG = LoggerFactory.getLogger(TestKMS.class);
 
   @Before
   public void cleanUp() {
 // resetting kerberos security
 Configuration conf = new Configuration();
   

hadoop git commit: HADOOP-12962. KMS key names are incorrectly encoded when creating key. Contributed by Xiao Chen.

2016-03-25 Thread wang
Repository: hadoop
Updated Branches:
  refs/heads/trunk e8fc81f9c -> d4df7849a


HADOOP-12962. KMS key names are incorrectly encoded when creating key. 
Contributed by Xiao Chen.


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/d4df7849
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/d4df7849
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/d4df7849

Branch: refs/heads/trunk
Commit: d4df7849a5caf749403bd89d29652f69c9c3f5a8
Parents: e8fc81f
Author: Andrew Wang 
Authored: Fri Mar 25 15:28:53 2016 -0700
Committer: Andrew Wang 
Committed: Fri Mar 25 15:28:53 2016 -0700

--
 .../hadoop/crypto/key/kms/server/KMS.java   | 14 +++
 .../hadoop/crypto/key/kms/server/TestKMS.java   | 42 
 2 files changed, 49 insertions(+), 7 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/d4df7849/hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMS.java
--
diff --git 
a/hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMS.java
 
b/hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMS.java
index 43b07fe..f069fca 100644
--- 
a/hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMS.java
+++ 
b/hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMS.java
@@ -41,10 +41,10 @@ import javax.ws.rs.Produces;
 import javax.ws.rs.QueryParam;
 import javax.ws.rs.core.MediaType;
 import javax.ws.rs.core.Response;
+import javax.ws.rs.core.UriBuilder;
 
 import java.io.IOException;
 import java.net.URI;
-import java.net.URISyntaxException;
 import java.security.PrivilegedExceptionAction;
 import java.util.ArrayList;
 import java.util.LinkedList;
@@ -89,9 +89,9 @@ public class KMS {
 keyVersion.getVersionName(), null);
   }
 
-  private static URI getKeyURI(String name) throws URISyntaxException {
-return new URI(KMSRESTConstants.SERVICE_VERSION + "/" +
-KMSRESTConstants.KEY_RESOURCE + "/" + name);
+  private static URI getKeyURI(String domain, String keyName) {
+return UriBuilder.fromPath("{a}/{b}/{c}")
+.build(domain, KMSRESTConstants.KEY_RESOURCE, keyName);
   }
 
   @POST
@@ -151,9 +151,9 @@ public class KMS {
 String requestURL = KMSMDCFilter.getURL();
 int idx = requestURL.lastIndexOf(KMSRESTConstants.KEYS_RESOURCE);
 requestURL = requestURL.substring(0, idx);
-String keyURL = requestURL + KMSRESTConstants.KEY_RESOURCE + "/" + name;
-return Response.created(getKeyURI(name)).type(MediaType.APPLICATION_JSON).
-header("Location", keyURL).entity(json).build();
+return Response.created(getKeyURI(KMSRESTConstants.SERVICE_VERSION, name))
+.type(MediaType.APPLICATION_JSON)
+.header("Location", getKeyURI(requestURL, name)).entity(json).build();
   }
 
   @DELETE

http://git-wip-us.apache.org/repos/asf/hadoop/blob/d4df7849/hadoop-common-project/hadoop-kms/src/test/java/org/apache/hadoop/crypto/key/kms/server/TestKMS.java
--
diff --git 
a/hadoop-common-project/hadoop-kms/src/test/java/org/apache/hadoop/crypto/key/kms/server/TestKMS.java
 
b/hadoop-common-project/hadoop-kms/src/test/java/org/apache/hadoop/crypto/key/kms/server/TestKMS.java
index 9b75ee1..8094ae2 100644
--- 
a/hadoop-common-project/hadoop-kms/src/test/java/org/apache/hadoop/crypto/key/kms/server/TestKMS.java
+++ 
b/hadoop-common-project/hadoop-kms/src/test/java/org/apache/hadoop/crypto/key/kms/server/TestKMS.java
@@ -39,11 +39,15 @@ import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hadoop.security.authorize.AuthorizationException;
 import org.apache.hadoop.security.ssl.KeyStoreTestUtil;
 import org.apache.hadoop.security.token.Token;
+import org.apache.hadoop.test.GenericTestUtils;
+import org.apache.log4j.Level;
 import org.junit.AfterClass;
 import org.junit.Assert;
 import org.junit.Before;
 import org.junit.BeforeClass;
 import org.junit.Test;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import javax.security.auth.kerberos.KerberosPrincipal;
 import javax.security.auth.login.AppConfigurationEntry;
@@ -69,12 +73,14 @@ import java.util.UUID;
 import java.util.concurrent.Callable;
 
 public class TestKMS {
+  private static final Logger LOG = LoggerFactory.getLogger(TestKMS.class);
 
   @Before
   public void cleanUp() {
 // resetting kerberos security
 Configuration conf = new Configuration();
 UserGroupInformation.setConfiguration(conf);
+GenericTestUtils.setLogLevel(LOG, Level.INFO);
   }
 
   public static File getTestDir() throws 

hadoop git commit: HADOOP-12962. KMS key names are incorrectly encoded when creating key. Contributed by Xiao Chen.

2016-03-25 Thread wang
Repository: hadoop
Updated Branches:
  refs/heads/branch-2 3cea00a29 -> c58599acb


HADOOP-12962. KMS key names are incorrectly encoded when creating key. 
Contributed by Xiao Chen.

(cherry picked from commit d4df7849a5caf749403bd89d29652f69c9c3f5a8)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/c58599ac
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/c58599ac
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/c58599ac

Branch: refs/heads/branch-2
Commit: c58599acba369fd18f42e4c0c32094440787b2cc
Parents: 3cea00a
Author: Andrew Wang 
Authored: Fri Mar 25 15:28:53 2016 -0700
Committer: Andrew Wang 
Committed: Fri Mar 25 15:29:06 2016 -0700

--
 .../hadoop/crypto/key/kms/server/KMS.java   | 14 +++
 .../hadoop/crypto/key/kms/server/TestKMS.java   | 42 
 2 files changed, 49 insertions(+), 7 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/c58599ac/hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMS.java
--
diff --git 
a/hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMS.java
 
b/hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMS.java
index 43b07fe..f069fca 100644
--- 
a/hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMS.java
+++ 
b/hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMS.java
@@ -41,10 +41,10 @@ import javax.ws.rs.Produces;
 import javax.ws.rs.QueryParam;
 import javax.ws.rs.core.MediaType;
 import javax.ws.rs.core.Response;
+import javax.ws.rs.core.UriBuilder;
 
 import java.io.IOException;
 import java.net.URI;
-import java.net.URISyntaxException;
 import java.security.PrivilegedExceptionAction;
 import java.util.ArrayList;
 import java.util.LinkedList;
@@ -89,9 +89,9 @@ public class KMS {
 keyVersion.getVersionName(), null);
   }
 
-  private static URI getKeyURI(String name) throws URISyntaxException {
-return new URI(KMSRESTConstants.SERVICE_VERSION + "/" +
-KMSRESTConstants.KEY_RESOURCE + "/" + name);
+  private static URI getKeyURI(String domain, String keyName) {
+return UriBuilder.fromPath("{a}/{b}/{c}")
+.build(domain, KMSRESTConstants.KEY_RESOURCE, keyName);
   }
 
   @POST
@@ -151,9 +151,9 @@ public class KMS {
 String requestURL = KMSMDCFilter.getURL();
 int idx = requestURL.lastIndexOf(KMSRESTConstants.KEYS_RESOURCE);
 requestURL = requestURL.substring(0, idx);
-String keyURL = requestURL + KMSRESTConstants.KEY_RESOURCE + "/" + name;
-return Response.created(getKeyURI(name)).type(MediaType.APPLICATION_JSON).
-header("Location", keyURL).entity(json).build();
+return Response.created(getKeyURI(KMSRESTConstants.SERVICE_VERSION, name))
+.type(MediaType.APPLICATION_JSON)
+.header("Location", getKeyURI(requestURL, name)).entity(json).build();
   }
 
   @DELETE

http://git-wip-us.apache.org/repos/asf/hadoop/blob/c58599ac/hadoop-common-project/hadoop-kms/src/test/java/org/apache/hadoop/crypto/key/kms/server/TestKMS.java
--
diff --git 
a/hadoop-common-project/hadoop-kms/src/test/java/org/apache/hadoop/crypto/key/kms/server/TestKMS.java
 
b/hadoop-common-project/hadoop-kms/src/test/java/org/apache/hadoop/crypto/key/kms/server/TestKMS.java
index 9b75ee1..8094ae2 100644
--- 
a/hadoop-common-project/hadoop-kms/src/test/java/org/apache/hadoop/crypto/key/kms/server/TestKMS.java
+++ 
b/hadoop-common-project/hadoop-kms/src/test/java/org/apache/hadoop/crypto/key/kms/server/TestKMS.java
@@ -39,11 +39,15 @@ import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hadoop.security.authorize.AuthorizationException;
 import org.apache.hadoop.security.ssl.KeyStoreTestUtil;
 import org.apache.hadoop.security.token.Token;
+import org.apache.hadoop.test.GenericTestUtils;
+import org.apache.log4j.Level;
 import org.junit.AfterClass;
 import org.junit.Assert;
 import org.junit.Before;
 import org.junit.BeforeClass;
 import org.junit.Test;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import javax.security.auth.kerberos.KerberosPrincipal;
 import javax.security.auth.login.AppConfigurationEntry;
@@ -69,12 +73,14 @@ import java.util.UUID;
 import java.util.concurrent.Callable;
 
 public class TestKMS {
+  private static final Logger LOG = LoggerFactory.getLogger(TestKMS.class);
 
   @Before
   public void cleanUp() {
 // resetting kerberos security
 Configuration conf = new Configuration();
 UserGroupInformation.setConfiguration(conf);
+

hadoop git commit: YARN-4814. ATS 1.5 timelineclient impl call flush after every event write. Contributed by Xuan Gong (cherry picked from commit e8fc81f9c812b0c167411de7f1789a9a433a0d57)

2016-03-25 Thread jlowe
Repository: hadoop
Updated Branches:
  refs/heads/branch-2.8 416ea158c -> e06446be7


YARN-4814. ATS 1.5 timelineclient impl call flush after every event write. 
Contributed by Xuan Gong
(cherry picked from commit e8fc81f9c812b0c167411de7f1789a9a433a0d57)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/e06446be
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/e06446be
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/e06446be

Branch: refs/heads/branch-2.8
Commit: e06446be74c01f4e1713cf10de37883346c8ed43
Parents: 416ea15
Author: Jason Lowe 
Authored: Fri Mar 25 20:15:49 2016 +
Committer: Jason Lowe 
Committed: Fri Mar 25 20:20:01 2016 +

--
 .../hadoop/yarn/client/api/impl/FileSystemTimelineWriter.java  | 2 ++
 1 file changed, 2 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/e06446be/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/client/api/impl/FileSystemTimelineWriter.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/client/api/impl/FileSystemTimelineWriter.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/client/api/impl/FileSystemTimelineWriter.java
index 35d9970..3fa8691 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/client/api/impl/FileSystemTimelineWriter.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/client/api/impl/FileSystemTimelineWriter.java
@@ -379,6 +379,8 @@ public class FileSystemTimelineWriter extends 
TimelineWriter{
   this.stream = createLogFileStream(fs, logPath);
   this.jsonGenerator = new JsonFactory().createJsonGenerator(stream);
   this.jsonGenerator.setPrettyPrinter(new MinimalPrettyPrinter("\n"));
+  this.jsonGenerator.configure(
+  JsonGenerator.Feature.FLUSH_PASSED_TO_STREAM, false);
   this.lastModifiedTime = Time.monotonicNow();
 }
 



hadoop git commit: YARN-4814. ATS 1.5 timelineclient impl call flush after every event write. Contributed by Xuan Gong (cherry picked from commit e8fc81f9c812b0c167411de7f1789a9a433a0d57)

2016-03-25 Thread jlowe
Repository: hadoop
Updated Branches:
  refs/heads/branch-2 8360d98d6 -> 3cea00a29


YARN-4814. ATS 1.5 timelineclient impl call flush after every event write. 
Contributed by Xuan Gong
(cherry picked from commit e8fc81f9c812b0c167411de7f1789a9a433a0d57)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/3cea00a2
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/3cea00a2
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/3cea00a2

Branch: refs/heads/branch-2
Commit: 3cea00a29471b82ca020874ea51324c19b5cdd97
Parents: 8360d98
Author: Jason Lowe 
Authored: Fri Mar 25 20:15:49 2016 +
Committer: Jason Lowe 
Committed: Fri Mar 25 20:19:15 2016 +

--
 .../hadoop/yarn/client/api/impl/FileSystemTimelineWriter.java  | 2 ++
 1 file changed, 2 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/3cea00a2/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/client/api/impl/FileSystemTimelineWriter.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/client/api/impl/FileSystemTimelineWriter.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/client/api/impl/FileSystemTimelineWriter.java
index 35d9970..3fa8691 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/client/api/impl/FileSystemTimelineWriter.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/client/api/impl/FileSystemTimelineWriter.java
@@ -379,6 +379,8 @@ public class FileSystemTimelineWriter extends 
TimelineWriter{
   this.stream = createLogFileStream(fs, logPath);
   this.jsonGenerator = new JsonFactory().createJsonGenerator(stream);
   this.jsonGenerator.setPrettyPrinter(new MinimalPrettyPrinter("\n"));
+  this.jsonGenerator.configure(
+  JsonGenerator.Feature.FLUSH_PASSED_TO_STREAM, false);
   this.lastModifiedTime = Time.monotonicNow();
 }
 



hadoop git commit: YARN-4814. ATS 1.5 timelineclient impl call flush after every event write. Contributed by Xuan Gong

2016-03-25 Thread jlowe
Repository: hadoop
Updated Branches:
  refs/heads/trunk 3f622a143 -> e8fc81f9c


YARN-4814. ATS 1.5 timelineclient impl call flush after every event write. 
Contributed by Xuan Gong


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/e8fc81f9
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/e8fc81f9
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/e8fc81f9

Branch: refs/heads/trunk
Commit: e8fc81f9c812b0c167411de7f1789a9a433a0d57
Parents: 3f622a1
Author: Jason Lowe 
Authored: Fri Mar 25 20:15:49 2016 +
Committer: Jason Lowe 
Committed: Fri Mar 25 20:15:49 2016 +

--
 .../hadoop/yarn/client/api/impl/FileSystemTimelineWriter.java  | 2 ++
 1 file changed, 2 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/e8fc81f9/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/client/api/impl/FileSystemTimelineWriter.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/client/api/impl/FileSystemTimelineWriter.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/client/api/impl/FileSystemTimelineWriter.java
index 35d9970..3fa8691 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/client/api/impl/FileSystemTimelineWriter.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/client/api/impl/FileSystemTimelineWriter.java
@@ -379,6 +379,8 @@ public class FileSystemTimelineWriter extends 
TimelineWriter{
   this.stream = createLogFileStream(fs, logPath);
   this.jsonGenerator = new JsonFactory().createJsonGenerator(stream);
   this.jsonGenerator.setPrettyPrinter(new MinimalPrettyPrinter("\n"));
+  this.jsonGenerator.configure(
+  JsonGenerator.Feature.FLUSH_PASSED_TO_STREAM, false);
   this.lastModifiedTime = Time.monotonicNow();
 }
 



hadoop git commit: MAPREDUCE-6535. TaskID default constructor results in NPE on toString(). Contributed by Daniel Templeton (cherry picked from commit 3f622a143c5fb15fee7e5dded99e4a4136f19810)

2016-03-25 Thread jlowe
Repository: hadoop
Updated Branches:
  refs/heads/branch-2 0dc145362 -> 8360d98d6


MAPREDUCE-6535. TaskID default constructor results in NPE on toString(). 
Contributed by Daniel Templeton
(cherry picked from commit 3f622a143c5fb15fee7e5dded99e4a4136f19810)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/8360d98d
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/8360d98d
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/8360d98d

Branch: refs/heads/branch-2
Commit: 8360d98d6aafae7e97cdd634bc01bfc52bf6352b
Parents: 0dc1453
Author: Jason Lowe 
Authored: Fri Mar 25 20:04:32 2016 +
Committer: Jason Lowe 
Committed: Fri Mar 25 20:07:56 2016 +

--
 .../org/apache/hadoop/mapreduce/TaskID.java |  57 ++-
 .../org/apache/hadoop/mapreduce/TestTaskID.java | 461 +++
 2 files changed, 508 insertions(+), 10 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/8360d98d/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/TaskID.java
--
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/TaskID.java
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/TaskID.java
index b9817dd..3ddfbe9 100644
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/TaskID.java
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/TaskID.java
@@ -63,6 +63,7 @@ public class TaskID extends org.apache.hadoop.mapred.ID {
   public static final String TASK_ID_REGEX = TASK + "_(\\d+)_(\\d+)_" +
   CharTaskTypeMaps.allTaskTypes + "_(\\d+)";
   public static final Pattern taskIdPattern = Pattern.compile(TASK_ID_REGEX);
+
   static {
 idFormat.setGroupingUsed(false);
 idFormat.setMinimumIntegerDigits(6);
@@ -72,7 +73,8 @@ public class TaskID extends org.apache.hadoop.mapred.ID {
   private TaskType type;
   
   /**
-   * Constructs a TaskID object from given {@link JobID}.  
+   * Constructs a TaskID object from given {@link JobID}.
+   *
* @param jobId JobID that this tip belongs to 
* @param type the {@link TaskType} of the task 
* @param id the tip number
@@ -88,6 +90,7 @@ public class TaskID extends org.apache.hadoop.mapred.ID {
   
   /**
* Constructs a TaskInProgressId object from given parts.
+   *
* @param jtIdentifier jobTracker identifier
* @param jobId job number 
* @param type the TaskType 
@@ -99,6 +102,7 @@ public class TaskID extends org.apache.hadoop.mapred.ID {
 
   /**
* Constructs a TaskID object from given {@link JobID}.
+   *
* @param jobId JobID that this tip belongs to
* @param isMap whether the tip is a map
* @param id the tip number
@@ -110,6 +114,7 @@ public class TaskID extends org.apache.hadoop.mapred.ID {
 
   /**
* Constructs a TaskInProgressId object from given parts.
+   *
* @param jtIdentifier jobTracker identifier
* @param jobId job number
* @param isMap whether the tip is a map
@@ -120,23 +125,37 @@ public class TaskID extends org.apache.hadoop.mapred.ID {
 this(new JobID(jtIdentifier, jobId), isMap, id);
   }
   
+  /**
+   * Default constructor for Writable. Sets the task type to
+   * {@link TaskType#REDUCE}, the ID to 0, and the job ID to an empty job ID.
+   */
   public TaskID() { 
-jobId = new JobID();
+this(new JobID(), TaskType.REDUCE, 0);
   }
   
-  /** Returns the {@link JobID} object that this tip belongs to */
+  /**
+   * Returns the {@link JobID} object that this tip belongs to.
+   *
+   * @return the JobID object
+   */
   public JobID getJobID() {
 return jobId;
   }
   
-  /**Returns whether this TaskID is a map ID */
+  /**
+   * Returns whether this TaskID is a map ID.
+   *
+   * @return whether this TaskID is a map ID
+   */
   @Deprecated
   public boolean isMap() {
 return type == TaskType.MAP;
   }
 
   /**
-   * Get the type of the task
+   * Get the type of the task.
+   *
+   * @return the type of the task
*/
   public TaskType getTaskType() {
 return type;
@@ -151,8 +170,14 @@ public class TaskID extends org.apache.hadoop.mapred.ID {
 return this.type == that.type && this.jobId.equals(that.jobId);
   }
 
-  /**Compare TaskInProgressIds by first jobIds, then by tip numbers. Reduces 
are 
-   * defined as greater then maps.*/
+  /**
+   * Compare TaskInProgressIds by first jobIds, then by tip numbers.
+   * Reducers are defined as greater than mappers.
+   

hadoop git commit: MAPREDUCE-6535. TaskID default constructor results in NPE on toString(). Contributed by Daniel Templeton

2016-03-25 Thread jlowe
Repository: hadoop
Updated Branches:
  refs/heads/trunk 2c268cc93 -> 3f622a143


MAPREDUCE-6535. TaskID default constructor results in NPE on toString(). 
Contributed by Daniel Templeton


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/3f622a14
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/3f622a14
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/3f622a14

Branch: refs/heads/trunk
Commit: 3f622a143c5fb15fee7e5dded99e4a4136f19810
Parents: 2c268cc
Author: Jason Lowe 
Authored: Fri Mar 25 20:04:32 2016 +
Committer: Jason Lowe 
Committed: Fri Mar 25 20:04:32 2016 +

--
 .../org/apache/hadoop/mapreduce/TaskID.java |  57 ++-
 .../org/apache/hadoop/mapreduce/TestTaskID.java | 461 +++
 2 files changed, 508 insertions(+), 10 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/3f622a14/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/TaskID.java
--
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/TaskID.java
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/TaskID.java
index b9817dd..3ddfbe9 100644
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/TaskID.java
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/TaskID.java
@@ -63,6 +63,7 @@ public class TaskID extends org.apache.hadoop.mapred.ID {
   public static final String TASK_ID_REGEX = TASK + "_(\\d+)_(\\d+)_" +
   CharTaskTypeMaps.allTaskTypes + "_(\\d+)";
   public static final Pattern taskIdPattern = Pattern.compile(TASK_ID_REGEX);
+
   static {
 idFormat.setGroupingUsed(false);
 idFormat.setMinimumIntegerDigits(6);
@@ -72,7 +73,8 @@ public class TaskID extends org.apache.hadoop.mapred.ID {
   private TaskType type;
   
   /**
-   * Constructs a TaskID object from given {@link JobID}.  
+   * Constructs a TaskID object from given {@link JobID}.
+   *
* @param jobId JobID that this tip belongs to 
* @param type the {@link TaskType} of the task 
* @param id the tip number
@@ -88,6 +90,7 @@ public class TaskID extends org.apache.hadoop.mapred.ID {
   
   /**
* Constructs a TaskInProgressId object from given parts.
+   *
* @param jtIdentifier jobTracker identifier
* @param jobId job number 
* @param type the TaskType 
@@ -99,6 +102,7 @@ public class TaskID extends org.apache.hadoop.mapred.ID {
 
   /**
* Constructs a TaskID object from given {@link JobID}.
+   *
* @param jobId JobID that this tip belongs to
* @param isMap whether the tip is a map
* @param id the tip number
@@ -110,6 +114,7 @@ public class TaskID extends org.apache.hadoop.mapred.ID {
 
   /**
* Constructs a TaskInProgressId object from given parts.
+   *
* @param jtIdentifier jobTracker identifier
* @param jobId job number
* @param isMap whether the tip is a map
@@ -120,23 +125,37 @@ public class TaskID extends org.apache.hadoop.mapred.ID {
 this(new JobID(jtIdentifier, jobId), isMap, id);
   }
   
+  /**
+   * Default constructor for Writable. Sets the task type to
+   * {@link TaskType#REDUCE}, the ID to 0, and the job ID to an empty job ID.
+   */
   public TaskID() { 
-jobId = new JobID();
+this(new JobID(), TaskType.REDUCE, 0);
   }
   
-  /** Returns the {@link JobID} object that this tip belongs to */
+  /**
+   * Returns the {@link JobID} object that this tip belongs to.
+   *
+   * @return the JobID object
+   */
   public JobID getJobID() {
 return jobId;
   }
   
-  /**Returns whether this TaskID is a map ID */
+  /**
+   * Returns whether this TaskID is a map ID.
+   *
+   * @return whether this TaskID is a map ID
+   */
   @Deprecated
   public boolean isMap() {
 return type == TaskType.MAP;
   }
 
   /**
-   * Get the type of the task
+   * Get the type of the task.
+   *
+   * @return the type of the task
*/
   public TaskType getTaskType() {
 return type;
@@ -151,8 +170,14 @@ public class TaskID extends org.apache.hadoop.mapred.ID {
 return this.type == that.type && this.jobId.equals(that.jobId);
   }
 
-  /**Compare TaskInProgressIds by first jobIds, then by tip numbers. Reduces 
are 
-   * defined as greater then maps.*/
+  /**
+   * Compare TaskInProgressIds by first jobIds, then by tip numbers.
+   * Reducers are defined as greater than mappers.
+   *
+   * @param o the TaskID against which to compare
+   * @return 0 if 

[Hadoop Wiki] Update of "HowToRelease" by XiaoChen

2016-03-25 Thread Apache Wiki
Dear Wiki user,

You have subscribed to a wiki page or wiki category on "Hadoop Wiki" for change 
notification.

The "HowToRelease" page has been changed by XiaoChen:
https://wiki.apache.org/hadoop/HowToRelease?action=diff=81=82

Comment:
Add 1 step at the beginning of 'Creating the release candidate', according to 
HADOOP-12768.

  = Creating the release candidate (X.Y.Z-RC) =
  These steps need to be performed to create the ''N''th RC for X.Y.Z, where 
''N'' starts from 0.
  
+  1. Check if the release year for Web UI footer is updated (the property 
{{{}}} in {{{hadoop-project/pom.xml}}}). If not, create a jira to 
update the property value to the right year, and propagate the fix from trunk 
to all necessary branches. Consider the voting time needed before publishing, 
it's better to use the year of (current time + voting time) here, to be 
consistent with the publishing time.
   1. Run mvn rat-check and fix any errors
   {{{
  mvn apache-rat:check


hadoop git commit: HDFS-10202. ozone : Add key commands to CLI. Contributed by Anu Engineer.

2016-03-25 Thread cnauroth
Repository: hadoop
Updated Branches:
  refs/heads/HDFS-7240 58cdb715f -> 2dc48b7f1


HDFS-10202. ozone : Add key commands to CLI. Contributed by Anu Engineer.


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/2dc48b7f
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/2dc48b7f
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/2dc48b7f

Branch: refs/heads/HDFS-7240
Commit: 2dc48b7f1e90f9b457bd8e80cb33b923ec34f459
Parents: 58cdb71
Author: Chris Nauroth 
Authored: Fri Mar 25 09:50:58 2016 -0700
Committer: Chris Nauroth 
Committed: Fri Mar 25 09:50:58 2016 -0700

--
 .../src/main/conf/log4j.properties  |  24 
 .../apache/hadoop/ozone/web/ozShell/Shell.java  | 107 ++--
 .../web/ozShell/bucket/InfoBucketHandler.java   |   2 +-
 .../web/ozShell/bucket/UpdateBucketHandler.java |  98 +++
 .../web/ozShell/keys/DeleteKeyHandler.java  |  95 +++
 .../ozone/web/ozShell/keys/GetKeyHandler.java   | 122 +++
 .../ozone/web/ozShell/keys/InfoKeyHandler.java  |  93 ++
 .../ozone/web/ozShell/keys/ListKeyHandler.java  | 105 
 .../ozone/web/ozShell/keys/PutKeyHandler.java   | 110 +
 9 files changed, 748 insertions(+), 8 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/2dc48b7f/hadoop-common-project/hadoop-common/src/main/conf/log4j.properties
--
diff --git a/hadoop-common-project/hadoop-common/src/main/conf/log4j.properties 
b/hadoop-common-project/hadoop-common/src/main/conf/log4j.properties
index dc7e705..e1cc73a 100644
--- a/hadoop-common-project/hadoop-common/src/main/conf/log4j.properties
+++ b/hadoop-common-project/hadoop-common/src/main/conf/log4j.properties
@@ -308,3 +308,27 @@ 
log4j.appender.EWMA=org.apache.hadoop.yarn.util.Log4jWarningErrorMetricsAppender
 log4j.appender.EWMA.cleanupInterval=${yarn.ewma.cleanupInterval}
 log4j.appender.EWMA.messageAgeLimitSeconds=${yarn.ewma.messageAgeLimitSeconds}
 log4j.appender.EWMA.maxUniqueMessages=${yarn.ewma.maxUniqueMessages}
+
+#
+# Add a logger for ozone that is separate from the Datanode.
+#
+log4j.logger.org.apache.hadoop.ozone=DEBUG,OZONE,FILE
+
+# Do not log into datanode logs. Remove this line to have single log.
+log4j.additivity.org.apache.hadoop.ozone=false
+
+# For development purposes, log both to console and log file.
+log4j.appender.OZONE=org.apache.log4j.ConsoleAppender
+log4j.appender.OZONE.Threshold=info
+log4j.appender.OZONE.layout=org.apache.log4j.PatternLayout
+log4j.appender.OZONE.layout.ConversionPattern=%d{ISO8601} [%t] %-5p \
+ %X{component} %X{function} %X{resource} %X{user} %X{request} - %m%n
+
+# Real ozone logger that writes to ozone.log
+log4j.appender.FILE=org.apache.log4j.DailyRollingFileAppender
+log4j.appender.FILE.File=${hadoop.log.dir}/ozone.log
+log4j.appender.FILE.Threshold=debug
+log4j.appender.FILE.layout=org.apache.log4j.PatternLayout
+log4j.appender.FILE.layout.ConversionPattern=%d{ISO8601} [%t] %-5p \
+(%F:%L) %X{function} %X{resource} %X{user} %X{request} - \
+%m%n

http://git-wip-us.apache.org/repos/asf/hadoop/blob/2dc48b7f/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/ozone/web/ozShell/Shell.java
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/ozone/web/ozShell/Shell.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/ozone/web/ozShell/Shell.java
index 6fc42a0..7a8dd4d 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/ozone/web/ozShell/Shell.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/ozone/web/ozShell/Shell.java
@@ -23,9 +23,16 @@ import org.apache.commons.cli.CommandLine;
 import org.apache.commons.cli.HelpFormatter;
 import org.apache.commons.cli.Option;
 import org.apache.commons.cli.Options;
+import org.apache.commons.cli.ParseException;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.conf.Configured;
 import org.apache.hadoop.ozone.web.exceptions.OzoneException;
+import org.apache.hadoop.ozone.web.ozShell.bucket.UpdateBucketHandler;
+import org.apache.hadoop.ozone.web.ozShell.keys.DeleteKeyHandler;
+import org.apache.hadoop.ozone.web.ozShell.keys.GetKeyHandler;
+import org.apache.hadoop.ozone.web.ozShell.keys.InfoKeyHandler;
+import org.apache.hadoop.ozone.web.ozShell.keys.ListKeyHandler;
+import org.apache.hadoop.ozone.web.ozShell.keys.PutKeyHandler;
 import org.apache.hadoop.ozone.web.ozShell.volume.CreateVolumeHandler;
 import org.apache.hadoop.ozone.web.ozShell.volume.DeleteVolumeHandler;
 import 

[3/3] hadoop git commit: MAPREDUCE-6543. Migrate MR client test cases part 2. Contributed by Dustin Cote.

2016-03-25 Thread aajisaka
MAPREDUCE-6543. Migrate MR client test cases part 2. Contributed by Dustin Cote.

(cherry picked from commit 2c268cc9365851f5b02d967d13c8c0cbca850a86)

Conflicts:

hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/TestMRJobClient.java

hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/partition/TestTotalOrderPartitioner.java

(cherry picked from commit 0dc145362642f6e5f9def8f50775b5c16599f6e6)

Conflicts:

hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/TestMRJobClient.java


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/416ea158
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/416ea158
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/416ea158

Branch: refs/heads/branch-2.8
Commit: 416ea158c2aa197b4a997ed420842154a1294b1d
Parents: 945e3cd
Author: Akira Ajisaka 
Authored: Fri Mar 25 15:17:27 2016 +0900
Committer: Akira Ajisaka 
Committed: Fri Mar 25 15:33:01 2016 +0900

--
 .../java/org/apache/hadoop/fs/DFSCIOTest.java   |  8 +--
 .../org/apache/hadoop/fs/TestFileSystem.java| 18 +--
 .../java/org/apache/hadoop/fs/TestJHLA.java |  6 +--
 .../io/TestSequenceFileMergeProgress.java   | 12 +++--
 .../hadoop/mapred/ClusterMapReduceTestCase.java | 14 +++---
 .../apache/hadoop/mapred/TestAuditLogger.java   |  9 ++--
 .../apache/hadoop/mapred/TestBadRecords.java|  8 ++-
 .../mapred/TestClusterMapReduceTestCase.java| 10 
 .../org/apache/hadoop/mapred/TestCollect.java   | 10 ++--
 .../mapred/TestCommandLineJobSubmission.java|  9 ++--
 .../hadoop/mapred/TestFieldSelection.java   |  6 ++-
 .../mapred/TestFileInputFormatPathFilter.java   | 19 ---
 .../apache/hadoop/mapred/TestGetSplitHosts.java |  7 +--
 .../apache/hadoop/mapred/TestIFileStreams.java  | 13 ++---
 .../org/apache/hadoop/mapred/TestInputPath.java |  7 +--
 .../hadoop/mapred/TestJavaSerialization.java| 10 ++--
 .../org/apache/hadoop/mapred/TestJobName.java   |  6 +++
 .../hadoop/mapred/TestJobSysDirWithDFS.java | 10 ++--
 .../mapred/TestKeyValueTextInputFormat.java | 15 +++---
 .../apache/hadoop/mapred/TestLazyOutput.java|  7 +--
 .../hadoop/mapred/TestMRCJCFileInputFormat.java | 32 +++-
 .../mapred/TestMRCJCFileOutputCommitter.java| 28 +++
 .../apache/hadoop/mapred/TestMapProgress.java   |  9 ++--
 .../org/apache/hadoop/mapred/TestMerge.java |  7 +--
 .../apache/hadoop/mapred/TestMiniMRBringup.java |  6 ++-
 .../hadoop/mapred/TestMiniMRDFSCaching.java | 14 +++---
 .../hadoop/mapred/TestMultiFileInputFormat.java | 19 +++
 .../hadoop/mapred/TestMultiFileSplit.java   | 10 ++--
 .../hadoop/mapred/TestMultipleLevelCaching.java | 12 +++--
 .../mapred/TestMultipleTextOutputFormat.java| 23 -
 .../apache/hadoop/mapred/TestReduceFetch.java   | 10 ++--
 .../mapred/TestReduceFetchFromPartialMem.java   | 46 +++--
 .../apache/hadoop/mapred/TestReduceTask.java| 18 ---
 .../TestSequenceFileAsBinaryInputFormat.java| 19 ---
 .../TestSequenceFileAsBinaryOutputFormat.java   | 31 
 .../TestSequenceFileAsTextInputFormat.java  | 33 +++--
 .../mapred/TestSequenceFileInputFilter.java | 32 ++--
 .../mapred/TestSequenceFileInputFormat.java | 32 ++--
 .../apache/hadoop/mapred/TestSortedRanges.java  | 19 ---
 .../TestSpecialCharactersInOutputPath.java  | 21 
 .../hadoop/mapred/TestStatisticsCollector.java  | 10 ++--
 .../hadoop/mapred/TestUserDefinedCounters.java  | 24 -
 .../hadoop/mapred/TestWritableJobConf.java  | 20 +---
 .../apache/hadoop/mapred/TestYARNRunner.java|  8 +--
 .../hadoop/mapred/join/TestDatamerge.java   | 42 +---
 .../hadoop/mapred/join/TestTupleWritable.java   | 24 ++---
 .../TestWrappedRecordReaderClassloader.java |  7 +--
 .../mapred/lib/TestDelegatingInputFormat.java   |  9 ++--
 .../hadoop/mapred/lib/TestLineInputFormat.java  |  7 +--
 .../hadoop/mapred/lib/TestMultipleInputs.java   |  2 -
 .../mapred/lib/aggregate/TestAggregates.java|  7 +--
 .../mapred/lib/db/TestConstructQuery.java   | 16 +++---
 .../apache/hadoop/mapred/pipes/TestPipes.java   |  9 ++--
 .../hadoop/mapreduce/TestLocalRunner.java   | 34 +++--
 .../hadoop/mapreduce/TestMRJobClient.java   | 40 ---
 .../mapreduce/TestMapReduceLazyOutput.java  |  9 ++--
 .../hadoop/mapreduce/TestValueIterReset.java|  8 +--
 .../TestYarnClientProtocolProvider.java |  5 +-
 .../lib/aggregate/TestMapReduceAggregates.java  | 23 -
 .../mapreduce/lib/db/TestDBOutputFormat.java  

[1/3] hadoop git commit: MAPREDUCE-6543. Migrate MR client test cases part 2. Contributed by Dustin Cote.

2016-03-25 Thread aajisaka
Repository: hadoop
Updated Branches:
  refs/heads/branch-2.8 945e3cd55 -> 416ea158c


http://git-wip-us.apache.org/repos/asf/hadoop/blob/416ea158/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/join/TestJoinTupleWritable.java
--
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/join/TestJoinTupleWritable.java
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/join/TestJoinTupleWritable.java
index d35941f..093da26 100644
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/join/TestJoinTupleWritable.java
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/join/TestJoinTupleWritable.java
@@ -24,8 +24,6 @@ import java.io.DataOutputStream;
 import java.util.Arrays;
 import java.util.Random;
 
-import junit.framework.TestCase;
-
 import org.apache.hadoop.io.BooleanWritable;
 import org.apache.hadoop.io.BytesWritable;
 import org.apache.hadoop.io.FloatWritable;
@@ -33,8 +31,13 @@ import org.apache.hadoop.io.IntWritable;
 import org.apache.hadoop.io.LongWritable;
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.io.Writable;
+import org.junit.Test;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertTrue;
 
-public class TestJoinTupleWritable extends TestCase {
+public class TestJoinTupleWritable {
 
   private TupleWritable makeTuple(Writable[] writs) {
 Writable[] sub1 = { writs[1], writs[2] };
@@ -97,6 +100,7 @@ public class TestJoinTupleWritable extends TestCase {
 return i;
   }
 
+  @Test
   public void testIterable() throws Exception {
 Random r = new Random();
 Writable[] writs = {
@@ -118,6 +122,7 @@ public class TestJoinTupleWritable extends TestCase {
 verifIter(writs, t, 0);
   }
 
+  @Test
   public void testNestedIterable() throws Exception {
 Random r = new Random();
 Writable[] writs = {
@@ -136,6 +141,7 @@ public class TestJoinTupleWritable extends TestCase {
 assertTrue("Bad count", writs.length == verifIter(writs, sTuple, 0));
   }
 
+  @Test
   public void testWritable() throws Exception {
 Random r = new Random();
 Writable[] writs = {
@@ -159,6 +165,7 @@ public class TestJoinTupleWritable extends TestCase {
 assertTrue("Failed to write/read tuple", sTuple.equals(dTuple));
   }
 
+  @Test
   public void testWideWritable() throws Exception {
 Writable[] manyWrits = makeRandomWritables(131);
 
@@ -178,7 +185,8 @@ public class TestJoinTupleWritable extends TestCase {
 assertEquals("All tuple data has not been read from the stream", 
   -1, in.read());
   }
-  
+
+  @Test
   public void testWideWritable2() throws Exception {
 Writable[] manyWrits = makeRandomWritables(71);
 
@@ -201,6 +209,7 @@ public class TestJoinTupleWritable extends TestCase {
* Tests a tuple writable with more than 64 values and the values set written
* spread far apart.
*/
+  @Test
   public void testSparseWideWritable() throws Exception {
 Writable[] manyWrits = makeRandomWritables(131);
 
@@ -220,7 +229,8 @@ public class TestJoinTupleWritable extends TestCase {
 assertEquals("All tuple data has not been read from the stream", 
   -1, in.read());
   }
-  
+
+  @Test
   public void testWideTuple() throws Exception {
 Text emptyText = new Text("Should be empty");
 Writable[] values = new Writable[64];
@@ -241,7 +251,8 @@ public class TestJoinTupleWritable extends TestCase {
   }
 }
   }
-  
+
+  @Test
   public void testWideTuple2() throws Exception {
 Text emptyText = new Text("Should be empty");
 Writable[] values = new Writable[64];
@@ -266,6 +277,7 @@ public class TestJoinTupleWritable extends TestCase {
   /**
* Tests that we can write more than 64 values.
*/
+  @Test
   public void testWideTupleBoundary() throws Exception {
 Text emptyText = new Text("Should not be set written");
 Writable[] values = new Writable[65];

http://git-wip-us.apache.org/repos/asf/hadoop/blob/416ea158/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/join/TestWrappedRRClassloader.java
--
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/join/TestWrappedRRClassloader.java
 

[1/3] hadoop git commit: MAPREDUCE-6543. Migrate MR client test cases part 2. Contributed by Dustin Cote.

2016-03-25 Thread aajisaka
Repository: hadoop
Updated Branches:
  refs/heads/trunk 2e1d0ff4e -> 2c268cc93


http://git-wip-us.apache.org/repos/asf/hadoop/blob/2c268cc9/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/join/TestWrappedRRClassloader.java
--
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/join/TestWrappedRRClassloader.java
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/join/TestWrappedRRClassloader.java
index 36cf187..680e246 100644
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/join/TestWrappedRRClassloader.java
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/join/TestWrappedRRClassloader.java
@@ -17,23 +17,32 @@
  */
 package org.apache.hadoop.mapreduce.lib.join;
 
-import junit.framework.TestCase;
-
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.io.NullWritable;
-import org.apache.hadoop.mapreduce.*;
+import org.apache.hadoop.mapreduce.InputSplit;
+import org.apache.hadoop.mapreduce.Job;
+import org.apache.hadoop.mapreduce.MRJobConfig;
+import org.apache.hadoop.mapreduce.MapReduceTestUtil;
 import org.apache.hadoop.mapreduce.MapReduceTestUtil.Fake_RR;
+import org.apache.hadoop.mapreduce.RecordReader;
+import org.apache.hadoop.mapreduce.TaskAttemptContext;
+import org.apache.hadoop.mapreduce.TaskAttemptID;
+import org.apache.hadoop.mapreduce.TaskType;
 import org.apache.hadoop.mapreduce.task.TaskAttemptContextImpl;
+import org.junit.Test;
+
+import static org.junit.Assert.assertTrue;
 
-public class TestWrappedRRClassloader extends TestCase {
+public class TestWrappedRRClassloader {
   /**
* Tests the class loader set by 
* {@link Configuration#setClassLoader(ClassLoader)}
* is inherited by any {@link WrappedRecordReader}s created by
* {@link CompositeRecordReader}
*/
+  @Test
   public void testClassLoader() throws Exception {
 Configuration conf = new Configuration();
 Fake_ClassLoader classLoader = new Fake_ClassLoader();

http://git-wip-us.apache.org/repos/asf/hadoop/blob/2c268cc9/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/output/TestMRSequenceFileAsBinaryOutputFormat.java
--
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/output/TestMRSequenceFileAsBinaryOutputFormat.java
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/output/TestMRSequenceFileAsBinaryOutputFormat.java
index 2e40f72..5a8aeda 100644
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/output/TestMRSequenceFileAsBinaryOutputFormat.java
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/output/TestMRSequenceFileAsBinaryOutputFormat.java
@@ -18,12 +18,17 @@
 
 package org.apache.hadoop.mapreduce.lib.output;
 
-import java.io.IOException;
-import java.util.Random;
-
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.*;
-import org.apache.hadoop.io.*;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.io.BooleanWritable;
+import org.apache.hadoop.io.BytesWritable;
+import org.apache.hadoop.io.DataOutputBuffer;
+import org.apache.hadoop.io.DoubleWritable;
+import org.apache.hadoop.io.FloatWritable;
+import org.apache.hadoop.io.IntWritable;
 import org.apache.hadoop.io.SequenceFile.CompressionType;
 import org.apache.hadoop.mapred.InvalidJobConfException;
 import org.apache.hadoop.mapreduce.InputFormat;
@@ -38,16 +43,22 @@ import org.apache.hadoop.mapreduce.RecordWriter;
 import org.apache.hadoop.mapreduce.TaskAttemptContext;
 import org.apache.hadoop.mapreduce.lib.input.SequenceFileInputFormat;
 import org.apache.hadoop.mapreduce.task.MapContextImpl;
+import org.junit.Test;
 
-import junit.framework.TestCase;
-import org.apache.commons.logging.*;
+import java.io.IOException;
+import java.util.Random;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
 
-public class 

[2/3] hadoop git commit: MAPREDUCE-6543. Migrate MR client test cases part 2. Contributed by Dustin Cote.

2016-03-25 Thread aajisaka
http://git-wip-us.apache.org/repos/asf/hadoop/blob/0dc14536/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestSequenceFileInputFormat.java
--
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestSequenceFileInputFormat.java
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestSequenceFileInputFormat.java
index 575ed53..338e91d 100644
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestSequenceFileInputFormat.java
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestSequenceFileInputFormat.java
@@ -18,22 +18,28 @@
 
 package org.apache.hadoop.mapred;
 
-import java.io.*;
-import java.util.*;
-import junit.framework.TestCase;
-
-import org.apache.commons.logging.*;
-
-import org.apache.hadoop.fs.*;
-import org.apache.hadoop.io.*;
-import org.apache.hadoop.conf.*;
-
-public class TestSequenceFileInputFormat extends TestCase {
+import org.apache.commons.logging.Log;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.io.BytesWritable;
+import org.apache.hadoop.io.IntWritable;
+import org.apache.hadoop.io.SequenceFile;
+import org.junit.Test;
+
+import java.util.BitSet;
+import java.util.Random;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+
+public class TestSequenceFileInputFormat {
   private static final Log LOG = FileInputFormat.LOG;
 
   private static int MAX_LENGTH = 1;
   private static Configuration conf = new Configuration();
 
+  @Test
   public void testFormat() throws Exception {
 JobConf job = new JobConf(conf);
 FileSystem fs = FileSystem.getLocal(conf);
@@ -110,8 +116,4 @@ public class TestSequenceFileInputFormat extends TestCase {
 
 }
   }
-
-  public static void main(String[] args) throws Exception {
-new TestSequenceFileInputFormat().testFormat();
-  }
 }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/0dc14536/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestSortedRanges.java
--
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestSortedRanges.java
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestSortedRanges.java
index ad4d4ce..82d1d2d 100644
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestSortedRanges.java
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestSortedRanges.java
@@ -17,18 +17,20 @@
  */
 package org.apache.hadoop.mapred;
 
-import java.util.Iterator;
-
-import junit.framework.TestCase;
-
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.mapred.SortedRanges.Range;
+import org.junit.Test;
+
+import java.util.Iterator;
+
+import static org.junit.Assert.assertEquals;
 
-public class TestSortedRanges extends TestCase {
-  private static final Log LOG = 
+public class TestSortedRanges {
+  private static final Log LOG =
 LogFactory.getLog(TestSortedRanges.class);
-  
+
+  @Test
   public void testAdd() {
 SortedRanges sr = new SortedRanges();
 sr.add(new Range(2,9));
@@ -66,7 +68,8 @@ public class TestSortedRanges extends TestCase {
 assertEquals(77, it.next().longValue());
 
   }
-  
+
+  @Test
   public void testRemove() {
 SortedRanges sr = new SortedRanges();
 sr.add(new Range(2,19));

http://git-wip-us.apache.org/repos/asf/hadoop/blob/0dc14536/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestSpecialCharactersInOutputPath.java
--
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestSpecialCharactersInOutputPath.java
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestSpecialCharactersInOutputPath.java
index 426686f..b9e3275 100644
--- 

[2/3] hadoop git commit: MAPREDUCE-6543. Migrate MR client test cases part 2. Contributed by Dustin Cote.

2016-03-25 Thread aajisaka
http://git-wip-us.apache.org/repos/asf/hadoop/blob/416ea158/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestSequenceFileInputFilter.java
--
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestSequenceFileInputFilter.java
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestSequenceFileInputFilter.java
index e50c396..93f21ce 100644
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestSequenceFileInputFilter.java
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestSequenceFileInputFilter.java
@@ -18,17 +18,21 @@
 
 package org.apache.hadoop.mapred;
 
-import java.io.*;
-import java.util.*;
-import junit.framework.TestCase;
+import org.apache.commons.logging.Log;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.io.BytesWritable;
+import org.apache.hadoop.io.SequenceFile;
+import org.apache.hadoop.io.Text;
+import org.junit.Test;
 
-import org.apache.commons.logging.*;
+import java.io.IOException;
+import java.util.Random;
 
-import org.apache.hadoop.fs.*;
-import org.apache.hadoop.io.*;
-import org.apache.hadoop.conf.*;
+import static org.junit.Assert.assertEquals;
 
-public class TestSequenceFileInputFilter extends TestCase {
+public class TestSequenceFileInputFilter {
   private static final Log LOG = FileInputFormat.LOG;
 
   private static final int MAX_LENGTH = 15000;
@@ -97,7 +101,8 @@ public class TestSequenceFileInputFilter extends TestCase {
 }
 return count;
   }
-  
+
+  @Test
   public void testRegexFilter() throws Exception {
 // set the filter class
 LOG.info("Testing Regex Filter with patter: \\A10*");
@@ -121,6 +126,7 @@ public class TestSequenceFileInputFilter extends TestCase {
 fs.delete(inDir, true);
   }
 
+  @Test
   public void testPercentFilter() throws Exception {
 LOG.info("Testing Percent Filter with frequency: 1000");
 // set the filter class
@@ -147,7 +153,8 @@ public class TestSequenceFileInputFilter extends TestCase {
 // clean up
 fs.delete(inDir, true);
   }
-  
+
+  @Test
   public void testMD5Filter() throws Exception {
 // set the filter class
 LOG.info("Testing MD5 Filter with frequency: 1000");
@@ -168,9 +175,4 @@ public class TestSequenceFileInputFilter extends TestCase {
 // clean up
 fs.delete(inDir, true);
   }
-
-  public static void main(String[] args) throws Exception {
-TestSequenceFileInputFilter filter = new TestSequenceFileInputFilter();
-filter.testRegexFilter();
-  }
 }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/416ea158/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestSequenceFileInputFormat.java
--
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestSequenceFileInputFormat.java
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestSequenceFileInputFormat.java
index 575ed53..338e91d 100644
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestSequenceFileInputFormat.java
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestSequenceFileInputFormat.java
@@ -18,22 +18,28 @@
 
 package org.apache.hadoop.mapred;
 
-import java.io.*;
-import java.util.*;
-import junit.framework.TestCase;
-
-import org.apache.commons.logging.*;
-
-import org.apache.hadoop.fs.*;
-import org.apache.hadoop.io.*;
-import org.apache.hadoop.conf.*;
-
-public class TestSequenceFileInputFormat extends TestCase {
+import org.apache.commons.logging.Log;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.io.BytesWritable;
+import org.apache.hadoop.io.IntWritable;
+import org.apache.hadoop.io.SequenceFile;
+import org.junit.Test;
+
+import java.util.BitSet;
+import java.util.Random;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+
+public class TestSequenceFileInputFormat {
   private static final Log LOG = FileInputFormat.LOG;
 
   private static int MAX_LENGTH = 1;
   private static Configuration conf = new Configuration();
 
+  @Test
   public void 

[1/3] hadoop git commit: MAPREDUCE-6543. Migrate MR client test cases part 2. Contributed by Dustin Cote.

2016-03-25 Thread aajisaka
Repository: hadoop
Updated Branches:
  refs/heads/branch-2 c722262c7 -> 0dc145362


http://git-wip-us.apache.org/repos/asf/hadoop/blob/0dc14536/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/join/TestWrappedRRClassloader.java
--
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/join/TestWrappedRRClassloader.java
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/join/TestWrappedRRClassloader.java
index 36cf187..680e246 100644
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/join/TestWrappedRRClassloader.java
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/join/TestWrappedRRClassloader.java
@@ -17,23 +17,32 @@
  */
 package org.apache.hadoop.mapreduce.lib.join;
 
-import junit.framework.TestCase;
-
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.io.NullWritable;
-import org.apache.hadoop.mapreduce.*;
+import org.apache.hadoop.mapreduce.InputSplit;
+import org.apache.hadoop.mapreduce.Job;
+import org.apache.hadoop.mapreduce.MRJobConfig;
+import org.apache.hadoop.mapreduce.MapReduceTestUtil;
 import org.apache.hadoop.mapreduce.MapReduceTestUtil.Fake_RR;
+import org.apache.hadoop.mapreduce.RecordReader;
+import org.apache.hadoop.mapreduce.TaskAttemptContext;
+import org.apache.hadoop.mapreduce.TaskAttemptID;
+import org.apache.hadoop.mapreduce.TaskType;
 import org.apache.hadoop.mapreduce.task.TaskAttemptContextImpl;
+import org.junit.Test;
+
+import static org.junit.Assert.assertTrue;
 
-public class TestWrappedRRClassloader extends TestCase {
+public class TestWrappedRRClassloader {
   /**
* Tests the class loader set by 
* {@link Configuration#setClassLoader(ClassLoader)}
* is inherited by any {@link WrappedRecordReader}s created by
* {@link CompositeRecordReader}
*/
+  @Test
   public void testClassLoader() throws Exception {
 Configuration conf = new Configuration();
 Fake_ClassLoader classLoader = new Fake_ClassLoader();

http://git-wip-us.apache.org/repos/asf/hadoop/blob/0dc14536/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/output/TestMRSequenceFileAsBinaryOutputFormat.java
--
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/output/TestMRSequenceFileAsBinaryOutputFormat.java
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/output/TestMRSequenceFileAsBinaryOutputFormat.java
index 2e40f72..5a8aeda 100644
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/output/TestMRSequenceFileAsBinaryOutputFormat.java
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/output/TestMRSequenceFileAsBinaryOutputFormat.java
@@ -18,12 +18,17 @@
 
 package org.apache.hadoop.mapreduce.lib.output;
 
-import java.io.IOException;
-import java.util.Random;
-
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.*;
-import org.apache.hadoop.io.*;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.io.BooleanWritable;
+import org.apache.hadoop.io.BytesWritable;
+import org.apache.hadoop.io.DataOutputBuffer;
+import org.apache.hadoop.io.DoubleWritable;
+import org.apache.hadoop.io.FloatWritable;
+import org.apache.hadoop.io.IntWritable;
 import org.apache.hadoop.io.SequenceFile.CompressionType;
 import org.apache.hadoop.mapred.InvalidJobConfException;
 import org.apache.hadoop.mapreduce.InputFormat;
@@ -38,16 +43,22 @@ import org.apache.hadoop.mapreduce.RecordWriter;
 import org.apache.hadoop.mapreduce.TaskAttemptContext;
 import org.apache.hadoop.mapreduce.lib.input.SequenceFileInputFormat;
 import org.apache.hadoop.mapreduce.task.MapContextImpl;
+import org.junit.Test;
 
-import junit.framework.TestCase;
-import org.apache.commons.logging.*;
+import java.io.IOException;
+import java.util.Random;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
 
-public class 

[3/3] hadoop git commit: MAPREDUCE-6543. Migrate MR client test cases part 2. Contributed by Dustin Cote.

2016-03-25 Thread aajisaka
MAPREDUCE-6543. Migrate MR client test cases part 2. Contributed by Dustin Cote.

(cherry picked from commit 2c268cc9365851f5b02d967d13c8c0cbca850a86)

Conflicts:

hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/TestMRJobClient.java

hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/lib/partition/TestTotalOrderPartitioner.java


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/0dc14536
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/0dc14536
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/0dc14536

Branch: refs/heads/branch-2
Commit: 0dc145362642f6e5f9def8f50775b5c16599f6e6
Parents: c722262
Author: Akira Ajisaka 
Authored: Fri Mar 25 15:17:27 2016 +0900
Committer: Akira Ajisaka 
Committed: Fri Mar 25 15:23:23 2016 +0900

--
 .../java/org/apache/hadoop/fs/DFSCIOTest.java   |  8 +--
 .../org/apache/hadoop/fs/TestFileSystem.java| 18 +--
 .../java/org/apache/hadoop/fs/TestJHLA.java |  6 +--
 .../io/TestSequenceFileMergeProgress.java   | 12 +++--
 .../hadoop/mapred/ClusterMapReduceTestCase.java | 14 +++---
 .../apache/hadoop/mapred/TestAuditLogger.java   |  9 ++--
 .../apache/hadoop/mapred/TestBadRecords.java|  8 ++-
 .../mapred/TestClusterMapReduceTestCase.java| 10 
 .../org/apache/hadoop/mapred/TestCollect.java   | 10 ++--
 .../mapred/TestCommandLineJobSubmission.java|  9 ++--
 .../hadoop/mapred/TestFieldSelection.java   |  6 ++-
 .../mapred/TestFileInputFormatPathFilter.java   | 19 ---
 .../apache/hadoop/mapred/TestGetSplitHosts.java |  7 +--
 .../apache/hadoop/mapred/TestIFileStreams.java  | 13 ++---
 .../org/apache/hadoop/mapred/TestInputPath.java |  7 +--
 .../hadoop/mapred/TestJavaSerialization.java| 10 ++--
 .../org/apache/hadoop/mapred/TestJobName.java   |  6 +++
 .../hadoop/mapred/TestJobSysDirWithDFS.java | 10 ++--
 .../mapred/TestKeyValueTextInputFormat.java | 15 +++---
 .../apache/hadoop/mapred/TestLazyOutput.java|  7 +--
 .../hadoop/mapred/TestMRCJCFileInputFormat.java | 32 +++-
 .../mapred/TestMRCJCFileOutputCommitter.java| 28 +++
 .../apache/hadoop/mapred/TestMapProgress.java   |  9 ++--
 .../org/apache/hadoop/mapred/TestMerge.java |  7 +--
 .../apache/hadoop/mapred/TestMiniMRBringup.java |  6 ++-
 .../hadoop/mapred/TestMiniMRDFSCaching.java | 14 +++---
 .../hadoop/mapred/TestMultiFileInputFormat.java | 19 +++
 .../hadoop/mapred/TestMultiFileSplit.java   | 10 ++--
 .../hadoop/mapred/TestMultipleLevelCaching.java | 12 +++--
 .../mapred/TestMultipleTextOutputFormat.java| 23 -
 .../apache/hadoop/mapred/TestReduceFetch.java   | 10 ++--
 .../mapred/TestReduceFetchFromPartialMem.java   | 46 +++--
 .../apache/hadoop/mapred/TestReduceTask.java| 18 ---
 .../TestSequenceFileAsBinaryInputFormat.java| 19 ---
 .../TestSequenceFileAsBinaryOutputFormat.java   | 31 
 .../TestSequenceFileAsTextInputFormat.java  | 33 +++--
 .../mapred/TestSequenceFileInputFilter.java | 32 ++--
 .../mapred/TestSequenceFileInputFormat.java | 32 ++--
 .../apache/hadoop/mapred/TestSortedRanges.java  | 19 ---
 .../TestSpecialCharactersInOutputPath.java  | 21 
 .../hadoop/mapred/TestStatisticsCollector.java  | 10 ++--
 .../hadoop/mapred/TestUserDefinedCounters.java  | 24 -
 .../hadoop/mapred/TestWritableJobConf.java  | 20 +---
 .../apache/hadoop/mapred/TestYARNRunner.java|  8 +--
 .../hadoop/mapred/join/TestDatamerge.java   | 42 +---
 .../hadoop/mapred/join/TestTupleWritable.java   | 24 ++---
 .../TestWrappedRecordReaderClassloader.java |  7 +--
 .../mapred/lib/TestDelegatingInputFormat.java   |  9 ++--
 .../hadoop/mapred/lib/TestLineInputFormat.java  |  7 +--
 .../hadoop/mapred/lib/TestMultipleInputs.java   |  2 -
 .../mapred/lib/aggregate/TestAggregates.java|  7 +--
 .../mapred/lib/db/TestConstructQuery.java   | 16 +++---
 .../apache/hadoop/mapred/pipes/TestPipes.java   |  9 ++--
 .../hadoop/mapreduce/TestLocalRunner.java   | 34 +++--
 .../hadoop/mapreduce/TestMRJobClient.java   | 48 +-
 .../mapreduce/TestMapReduceLazyOutput.java  |  9 ++--
 .../hadoop/mapreduce/TestValueIterReset.java|  8 +--
 .../TestYarnClientProtocolProvider.java |  5 +-
 .../lib/aggregate/TestMapReduceAggregates.java  | 23 -
 .../mapreduce/lib/db/TestDBOutputFormat.java| 17 ---
 .../mapreduce/lib/db/TestIntegerSplitter.java   | 15 --
 .../mapreduce/lib/db/TestTextSplitter.java  | 18 +--
 .../lib/fieldsel/TestMRFieldSelection.java  | 20 
 

[3/3] hadoop git commit: MAPREDUCE-6543. Migrate MR client test cases part 2. Contributed by Dustin Cote.

2016-03-25 Thread aajisaka
MAPREDUCE-6543. Migrate MR client test cases part 2. Contributed by Dustin Cote.


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/2c268cc9
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/2c268cc9
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/2c268cc9

Branch: refs/heads/trunk
Commit: 2c268cc9365851f5b02d967d13c8c0cbca850a86
Parents: 2e1d0ff
Author: Akira Ajisaka 
Authored: Fri Mar 25 15:17:27 2016 +0900
Committer: Akira Ajisaka 
Committed: Fri Mar 25 15:17:27 2016 +0900

--
 .../java/org/apache/hadoop/fs/DFSCIOTest.java   |  8 +--
 .../org/apache/hadoop/fs/TestFileSystem.java| 18 +--
 .../java/org/apache/hadoop/fs/TestJHLA.java |  6 +--
 .../io/TestSequenceFileMergeProgress.java   | 12 +++--
 .../hadoop/mapred/ClusterMapReduceTestCase.java | 14 +++---
 .../apache/hadoop/mapred/TestAuditLogger.java   |  9 ++--
 .../apache/hadoop/mapred/TestBadRecords.java|  8 ++-
 .../mapred/TestClusterMapReduceTestCase.java| 10 
 .../org/apache/hadoop/mapred/TestCollect.java   | 10 ++--
 .../mapred/TestCommandLineJobSubmission.java|  9 ++--
 .../hadoop/mapred/TestFieldSelection.java   |  6 ++-
 .../mapred/TestFileInputFormatPathFilter.java   | 19 ---
 .../apache/hadoop/mapred/TestGetSplitHosts.java |  7 +--
 .../apache/hadoop/mapred/TestIFileStreams.java  | 13 ++---
 .../org/apache/hadoop/mapred/TestInputPath.java |  7 +--
 .../hadoop/mapred/TestJavaSerialization.java| 10 ++--
 .../org/apache/hadoop/mapred/TestJobName.java   |  6 +++
 .../hadoop/mapred/TestJobSysDirWithDFS.java | 10 ++--
 .../mapred/TestKeyValueTextInputFormat.java | 15 +++---
 .../apache/hadoop/mapred/TestLazyOutput.java|  7 +--
 .../hadoop/mapred/TestMRCJCFileInputFormat.java | 32 +++-
 .../mapred/TestMRCJCFileOutputCommitter.java| 28 +++
 .../apache/hadoop/mapred/TestMapProgress.java   |  9 ++--
 .../org/apache/hadoop/mapred/TestMerge.java |  7 +--
 .../apache/hadoop/mapred/TestMiniMRBringup.java |  6 ++-
 .../hadoop/mapred/TestMiniMRDFSCaching.java | 14 +++---
 .../hadoop/mapred/TestMultiFileInputFormat.java | 19 +++
 .../hadoop/mapred/TestMultiFileSplit.java   | 10 ++--
 .../hadoop/mapred/TestMultipleLevelCaching.java | 12 +++--
 .../mapred/TestMultipleTextOutputFormat.java| 23 -
 .../apache/hadoop/mapred/TestReduceFetch.java   | 10 ++--
 .../mapred/TestReduceFetchFromPartialMem.java   | 46 +++--
 .../apache/hadoop/mapred/TestReduceTask.java| 18 ---
 .../TestSequenceFileAsBinaryInputFormat.java| 19 ---
 .../TestSequenceFileAsBinaryOutputFormat.java   | 31 
 .../TestSequenceFileAsTextInputFormat.java  | 33 +++--
 .../mapred/TestSequenceFileInputFilter.java | 32 ++--
 .../mapred/TestSequenceFileInputFormat.java | 32 ++--
 .../apache/hadoop/mapred/TestSortedRanges.java  | 19 ---
 .../TestSpecialCharactersInOutputPath.java  | 21 
 .../hadoop/mapred/TestStatisticsCollector.java  | 10 ++--
 .../hadoop/mapred/TestUserDefinedCounters.java  | 24 -
 .../hadoop/mapred/TestWritableJobConf.java  | 20 +---
 .../apache/hadoop/mapred/TestYARNRunner.java|  8 +--
 .../hadoop/mapred/join/TestDatamerge.java   | 42 +---
 .../hadoop/mapred/join/TestTupleWritable.java   | 24 ++---
 .../TestWrappedRecordReaderClassloader.java |  7 +--
 .../mapred/lib/TestDelegatingInputFormat.java   |  9 ++--
 .../hadoop/mapred/lib/TestLineInputFormat.java  |  7 +--
 .../hadoop/mapred/lib/TestMultipleInputs.java   |  2 -
 .../mapred/lib/aggregate/TestAggregates.java|  7 +--
 .../mapred/lib/db/TestConstructQuery.java   | 16 +++---
 .../apache/hadoop/mapred/pipes/TestPipes.java   |  9 ++--
 .../hadoop/mapreduce/TestLocalRunner.java   | 34 +++--
 .../hadoop/mapreduce/TestMRJobClient.java   | 49 +-
 .../mapreduce/TestMapReduceLazyOutput.java  |  9 ++--
 .../hadoop/mapreduce/TestValueIterReset.java|  8 +--
 .../TestYarnClientProtocolProvider.java |  5 +-
 .../lib/aggregate/TestMapReduceAggregates.java  | 23 -
 .../mapreduce/lib/db/TestDBOutputFormat.java| 17 ---
 .../mapreduce/lib/db/TestIntegerSplitter.java   | 15 --
 .../mapreduce/lib/db/TestTextSplitter.java  | 18 +--
 .../lib/fieldsel/TestMRFieldSelection.java  | 20 
 .../TestMRSequenceFileAsBinaryInputFormat.java  | 21 +---
 .../TestMRSequenceFileAsTextInputFormat.java| 27 +-
 .../input/TestMRSequenceFileInputFilter.java| 39 ---
 .../lib/input/TestNLineInputFormat.java | 34 -
 .../mapreduce/lib/join/TestJoinDatamerge.java   | 52 +++-
 .../mapreduce/lib/join/TestJoinProperties.java  | 44 +
 .../lib/join/TestJoinTupleWritable.java