hadoop git commit: MAPREDUCE-6675. TestJobImpl.testUnusableNode failed (haibochen via rkanter)

2016-05-04 Thread rkanter
Repository: hadoop
Updated Branches:
  refs/heads/branch-2 cca8e9b90 -> f9d609210


MAPREDUCE-6675. TestJobImpl.testUnusableNode failed (haibochen via rkanter)

(cherry picked from commit 9d3fcdfbb314c83ba6185e4ac8de649dad51a279)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/f9d60921
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/f9d60921
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/f9d60921

Branch: refs/heads/branch-2
Commit: f9d60921083d38efcd77a47748d6975eefbaab29
Parents: cca8e9b
Author: Robert Kanter 
Authored: Wed May 4 22:42:21 2016 -0700
Committer: Robert Kanter 
Committed: Wed May 4 22:42:34 2016 -0700

--
 .../org/apache/hadoop/mapreduce/v2/app/job/impl/TestJobImpl.java  | 3 ++-
 1 file changed, 2 insertions(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/f9d60921/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/job/impl/TestJobImpl.java
--
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/job/impl/TestJobImpl.java
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/job/impl/TestJobImpl.java
index fa0aef3..36221e0 100644
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/job/impl/TestJobImpl.java
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/job/impl/TestJobImpl.java
@@ -531,7 +531,7 @@ public class TestJobImpl {
 Configuration conf = new Configuration();
 conf.set(MRJobConfig.MR_AM_STAGING_DIR, stagingDir);
 conf.setInt(MRJobConfig.NUM_REDUCES, 1);
-AsyncDispatcher dispatcher = new AsyncDispatcher();
+DrainDispatcher dispatcher = new DrainDispatcher();
 dispatcher.init(conf);
 dispatcher.start();
 CyclicBarrier syncBarrier = new CyclicBarrier(2);
@@ -608,6 +608,7 @@ public class TestJobImpl {
 NodeReport secondMapperNodeReport = nodeReports.get(1);
 job.handle(new JobUpdatedNodesEvent(job.getID(),
 Collections.singletonList(firstMapperNodeReport)));
+dispatcher.await();
 // complete the reducer
 for (TaskId taskId: job.tasks.keySet()) {
   if (taskId.getTaskType() == TaskType.REDUCE) {


-
To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org
For additional commands, e-mail: common-commits-h...@hadoop.apache.org



hadoop git commit: MAPREDUCE-6675. TestJobImpl.testUnusableNode failed (haibochen via rkanter)

2016-05-04 Thread rkanter
Repository: hadoop
Updated Branches:
  refs/heads/trunk ecce3b7d5 -> 9d3fcdfbb


MAPREDUCE-6675. TestJobImpl.testUnusableNode failed (haibochen via rkanter)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/9d3fcdfb
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/9d3fcdfb
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/9d3fcdfb

Branch: refs/heads/trunk
Commit: 9d3fcdfbb314c83ba6185e4ac8de649dad51a279
Parents: ecce3b7
Author: Robert Kanter 
Authored: Wed May 4 22:42:21 2016 -0700
Committer: Robert Kanter 
Committed: Wed May 4 22:42:21 2016 -0700

--
 .../org/apache/hadoop/mapreduce/v2/app/job/impl/TestJobImpl.java  | 3 ++-
 1 file changed, 2 insertions(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/9d3fcdfb/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/job/impl/TestJobImpl.java
--
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/job/impl/TestJobImpl.java
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/job/impl/TestJobImpl.java
index fa0aef3..36221e0 100644
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/job/impl/TestJobImpl.java
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/job/impl/TestJobImpl.java
@@ -531,7 +531,7 @@ public class TestJobImpl {
 Configuration conf = new Configuration();
 conf.set(MRJobConfig.MR_AM_STAGING_DIR, stagingDir);
 conf.setInt(MRJobConfig.NUM_REDUCES, 1);
-AsyncDispatcher dispatcher = new AsyncDispatcher();
+DrainDispatcher dispatcher = new DrainDispatcher();
 dispatcher.init(conf);
 dispatcher.start();
 CyclicBarrier syncBarrier = new CyclicBarrier(2);
@@ -608,6 +608,7 @@ public class TestJobImpl {
 NodeReport secondMapperNodeReport = nodeReports.get(1);
 job.handle(new JobUpdatedNodesEvent(job.getID(),
 Collections.singletonList(firstMapperNodeReport)));
+dispatcher.await();
 // complete the reducer
 for (TaskId taskId: job.tasks.keySet()) {
   if (taskId.getTaskType() == TaskType.REDUCE) {


-
To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org
For additional commands, e-mail: common-commits-h...@hadoop.apache.org



hadoop git commit: MAPREDUCE-6677. LocalContainerAllocator doesn't specify resource of the containers allocated (haibochen via rkanter)

2016-05-04 Thread rkanter
Repository: hadoop
Updated Branches:
  refs/heads/branch-2 5b5383317 -> cca8e9b90


MAPREDUCE-6677. LocalContainerAllocator doesn't specify resource of the 
containers allocated (haibochen via rkanter)

(cherry picked from commit ecce3b7d53ab624c9931273716326c76641ea11d)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/cca8e9b9
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/cca8e9b9
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/cca8e9b9

Branch: refs/heads/branch-2
Commit: cca8e9b90c38680b5268126554e758d83523fd67
Parents: 5b53833
Author: Robert Kanter 
Authored: Wed May 4 22:19:20 2016 -0700
Committer: Robert Kanter 
Committed: Wed May 4 22:19:42 2016 -0700

--
 .../v2/app/local/LocalContainerAllocator.java   |  2 +
 .../app/local/TestLocalContainerAllocator.java  | 43 
 .../apache/hadoop/mapreduce/v2/TestMRJobs.java  |  9 
 3 files changed, 45 insertions(+), 9 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/cca8e9b9/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/local/LocalContainerAllocator.java
--
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/local/LocalContainerAllocator.java
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/local/LocalContainerAllocator.java
index 7437357..269e5f4 100644
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/local/LocalContainerAllocator.java
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/local/LocalContainerAllocator.java
@@ -44,6 +44,7 @@ import org.apache.hadoop.yarn.api.records.Container;
 import org.apache.hadoop.yarn.api.records.ContainerId;
 import org.apache.hadoop.yarn.api.records.NodeId;
 import org.apache.hadoop.yarn.api.records.Priority;
+import org.apache.hadoop.yarn.api.records.Resource;
 import org.apache.hadoop.yarn.api.records.ResourceRequest;
 import org.apache.hadoop.yarn.api.records.Token;
 import org.apache.hadoop.yarn.client.ClientRMProxy;
@@ -177,6 +178,7 @@ public class LocalContainerAllocator extends RMCommunicator
   Container container = recordFactory.newRecordInstance(Container.class);
   container.setId(cID);
   NodeId nodeId = NodeId.newInstance(this.nmHost, this.nmPort);
+  container.setResource(Resource.newInstance(0, 0));
   container.setNodeId(nodeId);
   container.setContainerToken(null);
   container.setNodeHttpAddress(this.nmHost + ":" + this.nmHttpPort);

http://git-wip-us.apache.org/repos/asf/hadoop/blob/cca8e9b9/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/local/TestLocalContainerAllocator.java
--
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/local/TestLocalContainerAllocator.java
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/local/TestLocalContainerAllocator.java
index 38df8f0..0f7dc87 100644
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/local/TestLocalContainerAllocator.java
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/local/TestLocalContainerAllocator.java
@@ -19,6 +19,8 @@ package org.apache.hadoop.mapreduce.v2.app.local;
 
 import static org.mockito.Matchers.isA;
 import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.times;
+import static org.mockito.Mockito.verify;
 import static org.mockito.Mockito.when;
 
 import java.io.IOException;
@@ -29,10 +31,15 @@ import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.mapreduce.MRJobConfig;
 import org.apache.hadoop.mapreduce.v2.api.records.JobId;
+import org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptId;
+import org.apache.hadoop.mapreduce.v2.api.records.TaskId;
 import org.apache.hadoop.mapreduce.v2.app.AppContext;
 import org.apache.hadoop.mapreduce.v2.app.ClusterInfo;
 import org.apache.hadoop.mapreduce.v2.app.client.ClientService;
 import org.apache.hadoop.mapreduce.v2.app.job.Job;
+import 

hadoop git commit: MAPREDUCE-6677. LocalContainerAllocator doesn't specify resource of the containers allocated (haibochen via rkanter)

2016-05-04 Thread rkanter
Repository: hadoop
Updated Branches:
  refs/heads/trunk 1268cf5fb -> ecce3b7d5


MAPREDUCE-6677. LocalContainerAllocator doesn't specify resource of the 
containers allocated (haibochen via rkanter)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/ecce3b7d
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/ecce3b7d
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/ecce3b7d

Branch: refs/heads/trunk
Commit: ecce3b7d53ab624c9931273716326c76641ea11d
Parents: 1268cf5
Author: Robert Kanter 
Authored: Wed May 4 22:19:20 2016 -0700
Committer: Robert Kanter 
Committed: Wed May 4 22:19:20 2016 -0700

--
 .../v2/app/local/LocalContainerAllocator.java   |  2 +
 .../app/local/TestLocalContainerAllocator.java  | 43 
 .../apache/hadoop/mapreduce/v2/TestMRJobs.java  |  9 
 3 files changed, 45 insertions(+), 9 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/ecce3b7d/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/local/LocalContainerAllocator.java
--
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/local/LocalContainerAllocator.java
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/local/LocalContainerAllocator.java
index 7437357..269e5f4 100644
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/local/LocalContainerAllocator.java
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/local/LocalContainerAllocator.java
@@ -44,6 +44,7 @@ import org.apache.hadoop.yarn.api.records.Container;
 import org.apache.hadoop.yarn.api.records.ContainerId;
 import org.apache.hadoop.yarn.api.records.NodeId;
 import org.apache.hadoop.yarn.api.records.Priority;
+import org.apache.hadoop.yarn.api.records.Resource;
 import org.apache.hadoop.yarn.api.records.ResourceRequest;
 import org.apache.hadoop.yarn.api.records.Token;
 import org.apache.hadoop.yarn.client.ClientRMProxy;
@@ -177,6 +178,7 @@ public class LocalContainerAllocator extends RMCommunicator
   Container container = recordFactory.newRecordInstance(Container.class);
   container.setId(cID);
   NodeId nodeId = NodeId.newInstance(this.nmHost, this.nmPort);
+  container.setResource(Resource.newInstance(0, 0));
   container.setNodeId(nodeId);
   container.setContainerToken(null);
   container.setNodeHttpAddress(this.nmHost + ":" + this.nmHttpPort);

http://git-wip-us.apache.org/repos/asf/hadoop/blob/ecce3b7d/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/local/TestLocalContainerAllocator.java
--
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/local/TestLocalContainerAllocator.java
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/local/TestLocalContainerAllocator.java
index 38df8f0..0f7dc87 100644
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/local/TestLocalContainerAllocator.java
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/local/TestLocalContainerAllocator.java
@@ -19,6 +19,8 @@ package org.apache.hadoop.mapreduce.v2.app.local;
 
 import static org.mockito.Matchers.isA;
 import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.times;
+import static org.mockito.Mockito.verify;
 import static org.mockito.Mockito.when;
 
 import java.io.IOException;
@@ -29,10 +31,15 @@ import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.mapreduce.MRJobConfig;
 import org.apache.hadoop.mapreduce.v2.api.records.JobId;
+import org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptId;
+import org.apache.hadoop.mapreduce.v2.api.records.TaskId;
 import org.apache.hadoop.mapreduce.v2.app.AppContext;
 import org.apache.hadoop.mapreduce.v2.app.ClusterInfo;
 import org.apache.hadoop.mapreduce.v2.app.client.ClientService;
 import org.apache.hadoop.mapreduce.v2.app.job.Job;
+import 

[08/18] hadoop git commit: HADOOP-13095. hadoop-hdfs unit tests for dynamic commands

2016-05-04 Thread aw
HADOOP-13095. hadoop-hdfs unit tests for dynamic commands


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/1df23089
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/1df23089
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/1df23089

Branch: refs/heads/HADOOP-12930
Commit: 1df23089e207957d70bfdf09215c1617cb1ecdcd
Parents: a48954b
Author: Allen Wittenauer 
Authored: Wed May 4 20:40:39 2016 -0700
Committer: Allen Wittenauer 
Committed: Wed May 4 20:43:21 2016 -0700

--
 hadoop-hdfs-project/hadoop-hdfs/pom.xml | 38 ++-
 .../scripts/hdfs-functions_test_helper.bash | 58 +
 .../src/test/scripts/hdfs_subcommands.bats  | 66 
 .../hadoop-hdfs/src/test/scripts/run-bats.sh| 43 +
 4 files changed, 204 insertions(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/1df23089/hadoop-hdfs-project/hadoop-hdfs/pom.xml
--
diff --git a/hadoop-hdfs-project/hadoop-hdfs/pom.xml 
b/hadoop-hdfs-project/hadoop-hdfs/pom.xml
index 668bbfe..c8198c1 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/pom.xml
+++ b/hadoop-hdfs-project/hadoop-hdfs/pom.xml
@@ -417,7 +417,7 @@ http://maven.apache.org/xsd/maven-4.0.0.xsd;>
 
   
 
-  
+  
 
 
   startKdc
@@ -559,5 +559,41 @@ http://maven.apache.org/xsd/maven-4.0.0.xsd;>
 
   
 
+
+
+
+  shelltest
+  
+
+  !skipTests
+
+  
+  
+
+  
+maven-antrun-plugin
+
+
+hdfs-test-bats-driver
+test
+
+run
+
+
+  
+  
+   
+ 
+  
+
+
+
+  
+
+  
+
+
   
 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/1df23089/hadoop-hdfs-project/hadoop-hdfs/src/test/scripts/hdfs-functions_test_helper.bash
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/scripts/hdfs-functions_test_helper.bash
 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/scripts/hdfs-functions_test_helper.bash
new file mode 100755
index 000..d3cdda4
--- /dev/null
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/scripts/hdfs-functions_test_helper.bash
@@ -0,0 +1,58 @@
+#!/usr/bin/env bash
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+setup() {
+
+  TMP="${BATS_TEST_DIRNAME}/../../../target/test-dir/bats.$$.${RANDOM}"
+  mkdir -p "${TMP}"
+  TMP=$(cd -P -- "${TMP}" >/dev/null && pwd -P)
+  export TMP
+  TESTBINDIR="${BATS_TEST_DIRNAME}"
+  HADOOP_LIBEXEC_DIR=${TESTBINDIR}/../../main/bin
+  HADOOP_LIBEXEC_DIR=$(cd -P -- "${HADOOP_LIBEXEC_DIR}" >/dev/null && pwd -P)
+
+  # shellcheck disable=SC2034
+  HADOOP_SHELL_SCRIPT_DEBUG=true
+  unset HADOOP_CONF_DIR
+  # we unset both of these for bw compat
+  unset HADOOP_HOME
+  unset HADOOP_PREFIX
+
+  echo "bindir: ${TESTBINDIR}" 2>&1
+
+  mkdir -p "${TMP}"
+
+  # shellcheck disable=SC2034
+  QATESTMODE=true
+
+  # shellcheck disable=SC1090
+  . 
"${BATS_TEST_DIRNAME}/../../../../../hadoop-common-project/hadoop-common/src/main/bin/hadoop-functions.sh"
+  pushd "${TMP}" >/dev/null
+}
+
+teardown() {
+  popd >/dev/null
+  rm -rf "${TMP}"
+}
+
+
+strstr() {
+  if [ "${1#*$2}" != "${1}" ]; then
+echo true
+  else
+echo false
+  fi
+}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/1df23089/hadoop-hdfs-project/hadoop-hdfs/src/test/scripts/hdfs_subcommands.bats
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/scripts/hdfs_subcommands.bats 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/scripts/hdfs_subcommands.bats
new file mode 100755
index 000..33fb9aa
--- 

[18/18] hadoop git commit: HADOOP-12933. bin/hdfs work for dynamic subcommands

2016-05-04 Thread aw
HADOOP-12933. bin/hdfs work for dynamic subcommands


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/7020c503
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/7020c503
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/7020c503

Branch: refs/heads/HADOOP-12930
Commit: 7020c503d69f3f2538f25620cf116812b792a5fb
Parents: ff0d5fa
Author: Allen Wittenauer 
Authored: Tue May 3 12:45:21 2016 -0700
Committer: Allen Wittenauer 
Committed: Wed May 4 20:43:21 2016 -0700

--
 .../hadoop-hdfs/src/main/bin/hdfs   | 402 ++-
 1 file changed, 212 insertions(+), 190 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/7020c503/hadoop-hdfs-project/hadoop-hdfs/src/main/bin/hdfs
--
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/bin/hdfs 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/bin/hdfs
index c365250..310fb41 100755
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/bin/hdfs
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/bin/hdfs
@@ -16,7 +16,12 @@
 # limitations under the License.
 
 MYNAME="${BASH_SOURCE-$0}"
+HADOOP_SHELL_EXECNAME="${MYNAME##*/}"
 
+## @description  build up the hdfs command's usage text.
+## @audience public
+## @stabilitystable
+## @replaceable  no
 function hadoop_usage
 {
   hadoop_add_option "--buildpaths" "attempt to add class files from build tree"
@@ -56,7 +61,194 @@ function hadoop_usage
   hadoop_add_subcommand "storagepolicies" "list/get/set block storage policies"
   hadoop_add_subcommand "version" "print the version"
   hadoop_add_subcommand "zkfc" "run the ZK Failover Controller daemon"
-  hadoop_generate_usage "${MYNAME}" false
+  hadoop_generate_usage "${HADOOP_SHELL_EXECNAME}" false
+}
+
+## @description  Default command handler for hadoop command
+## @audience public
+## @stabilitystable
+## @replaceable  no
+## @paramCLI arguments
+function hdfscmd_case
+{
+  subcmd=$1
+  shift
+
+  case ${subcmd} in
+balancer)
+  supportdaemonization="true"
+  HADOOP_CLASSNAME=org.apache.hadoop.hdfs.server.balancer.Balancer
+  hadoop_debug "Appending HADOOP_BALANCER_OPTS onto HADOOP_OPTS"
+  HADOOP_OPTS="${HADOOP_OPTS} ${HADOOP_BALANCER_OPTS}"
+;;
+cacheadmin)
+  HADOOP_CLASSNAME=org.apache.hadoop.hdfs.tools.CacheAdmin
+;;
+classpath)
+  hadoop_do_classpath_subcommand HADOOP_CLASSNAME "$@"
+;;
+crypto)
+  HADOOP_CLASSNAME=org.apache.hadoop.hdfs.tools.CryptoAdmin
+;;
+datanode)
+  supportdaemonization="true"
+  # Determine if we're starting a secure datanode, and
+  # if so, redefine appropriate variables
+  if [[ -n "${HADOOP_SECURE_DN_USER}" ]]; then
+secure_service="true"
+secure_user="${HADOOP_SECURE_DN_USER}"
+
+# backward compatiblity
+
HADOOP_SECURE_PID_DIR="${HADOOP_SECURE_PID_DIR:-$HADOOP_SECURE_DN_PID_DIR}"
+
HADOOP_SECURE_LOG_DIR="${HADOOP_SECURE_LOG_DIR:-$HADOOP_SECURE_DN_LOG_DIR}"
+
+hadoop_debug "Appending HADOOP_DATANODE_OPTS onto HADOOP_OPTS"
+hadoop_debug "Appending HADOOP_DN_SECURE_EXTRA_OPTS onto HADOOP_OPTS"
+HADOOP_OPTS="${HADOOP_OPTS} ${HADOOP_DATANODE_OPTS} 
${HADOOP_DN_SECURE_EXTRA_OPTS}"
+
HADOOP_CLASSNAME="org.apache.hadoop.hdfs.server.datanode.SecureDataNodeStarter"
+  else
+hadoop_debug "Appending HADOOP_DATANODE_OPTS onto HADOOP_OPTS"
+HADOOP_OPTS="${HADOOP_OPTS} ${HADOOP_DATANODE_OPTS}"
+HADOOP_CLASSNAME='org.apache.hadoop.hdfs.server.datanode.DataNode'
+  fi
+;;
+debug)
+  HADOOP_CLASSNAME='org.apache.hadoop.hdfs.tools.DebugAdmin'
+;;
+dfs)
+  HADOOP_CLASSNAME=org.apache.hadoop.fs.FsShell
+  hadoop_debug "Appending HADOOP_CLIENT_OPTS onto HADOOP_OPTS"
+  HADOOP_OPTS="${HADOOP_OPTS} ${HADOOP_CLIENT_OPTS}"
+;;
+dfsadmin)
+  HADOOP_CLASSNAME=org.apache.hadoop.hdfs.tools.DFSAdmin
+  hadoop_debug "Appending HADOOP_CLIENT_OPTS onto HADOOP_OPTS"
+  HADOOP_OPTS="${HADOOP_OPTS} ${HADOOP_CLIENT_OPTS}"
+;;
+envvars)
+  echo "JAVA_HOME='${JAVA_HOME}'"
+  echo "HADOOP_HDFS_HOME='${HADOOP_HDFS_HOME}'"
+  echo "HDFS_DIR='${HDFS_DIR}'"
+  echo "HDFS_LIB_JARS_DIR='${HDFS_LIB_JARS_DIR}'"
+  echo "HADOOP_CONF_DIR='${HADOOP_CONF_DIR}'"
+  echo "HADOOP_TOOLS_HOME='${HADOOP_TOOLS_HOME}'"
+  echo "HADOOP_TOOLS_DIR='${HADOOP_TOOLS_DIR}'"
+  echo "HADOOP_TOOLS_LIB_JARS_DIR='${HADOOP_TOOLS_LIB_JARS_DIR}'"
+  exit 0
+;;
+erasurecode)
+  HADOOP_CLASSNAME=org.apache.hadoop.hdfs.tools.erasurecode.ECCli
+  hadoop_debug "Appending HADOOP_CLIENT_OPTS onto HADOOP_OPTS"
+  HADOOP_OPTS="${HADOOP_OPTS} ${HADOOP_CLIENT_OPTS}"

[04/18] hadoop git commit: YARN-4984. LogAggregationService shouldn't swallow exception in handling createAppDir() which cause thread leak. (Junping Du via wangda)

2016-05-04 Thread aw
YARN-4984. LogAggregationService shouldn't swallow exception in handling 
createAppDir() which cause thread leak. (Junping Du via wangda)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/7bd418e4
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/7bd418e4
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/7bd418e4

Branch: refs/heads/HADOOP-12930
Commit: 7bd418e48c71590fc8026d69f9b8f8ad42f2aade
Parents: e61d431
Author: Wangda Tan 
Authored: Wed May 4 11:38:55 2016 -0700
Committer: Wangda Tan 
Committed: Wed May 4 11:38:55 2016 -0700

--
 .../logaggregation/LogAggregationService.java|  7 +++
 .../logaggregation/TestLogAggregationService.java| 11 ---
 2 files changed, 11 insertions(+), 7 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/7bd418e4/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/logaggregation/LogAggregationService.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/logaggregation/LogAggregationService.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/logaggregation/LogAggregationService.java
index 2d6b900..d46f7a3 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/logaggregation/LogAggregationService.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/logaggregation/LogAggregationService.java
@@ -376,6 +376,9 @@ public class LogAggregationService extends AbstractService 
implements
   } else {
 appDirException = (YarnRuntimeException)e;
   }
+  appLogAggregators.remove(appId);
+  closeFileSystems(userUgi);
+  throw appDirException;
 }
 
 // TODO Get the user configuration for the list of containers that need log
@@ -393,10 +396,6 @@ public class LogAggregationService extends AbstractService 
implements
   }
 };
 this.threadPool.execute(aggregatorWrapper);
-
-if (appDirException != null) {
-  throw appDirException;
-}
   }
 
   protected void closeFileSystems(final UserGroupInformation userUgi) {

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7bd418e4/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/logaggregation/TestLogAggregationService.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/logaggregation/TestLogAggregationService.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/logaggregation/TestLogAggregationService.java
index fec12ff..fa9a0b5 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/logaggregation/TestLogAggregationService.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/logaggregation/TestLogAggregationService.java
@@ -777,8 +777,8 @@ public class TestLogAggregationService extends 
BaseContainerManagerTest {
 
 dispatcher.await();
 ApplicationEvent expectedEvents[] = new ApplicationEvent[]{
-new ApplicationEvent(appId, 
-   ApplicationEventType.APPLICATION_LOG_HANDLING_FAILED)
+new ApplicationEvent(appId,
+ApplicationEventType.APPLICATION_LOG_HANDLING_FAILED)
 };
 checkEvents(appEventHandler, expectedEvents, false,
 "getType", "getApplicationID", "getDiagnostic");
@@ -794,10 +794,15 @@ public class TestLogAggregationService extends 
BaseContainerManagerTest {
 
 logAggregationService.stop();
 assertEquals(0, logAggregationService.getNumAggregators());
-verify(spyDelSrvc).delete(eq(user), any(Path.class),
+// local log dir shouldn't be deleted given log aggregation cannot
+// continue due to aggregated log dir creation failure on remoteFS.
+

[15/18] hadoop git commit: HADOOP-13094. hadoop-common unit tests for dynamic commands

2016-05-04 Thread aw
HADOOP-13094.  hadoop-common unit tests for dynamic commands


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/a48954b7
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/a48954b7
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/a48954b7

Branch: refs/heads/HADOOP-12930
Commit: a48954b7473a407397e4c1a7edb8e55bb0b58429
Parents: b3a7b75
Author: Allen Wittenauer 
Authored: Wed May 4 17:41:23 2016 -0700
Committer: Allen Wittenauer 
Committed: Wed May 4 20:43:21 2016 -0700

--
 .../scripts/hadoop-functions_test_helper.bash   |  4 +-
 .../src/test/scripts/hadoop_subcommands.bats| 66 
 2 files changed, 68 insertions(+), 2 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/a48954b7/hadoop-common-project/hadoop-common/src/test/scripts/hadoop-functions_test_helper.bash
--
diff --git 
a/hadoop-common-project/hadoop-common/src/test/scripts/hadoop-functions_test_helper.bash
 
b/hadoop-common-project/hadoop-common/src/test/scripts/hadoop-functions_test_helper.bash
index be2d7f5..cc37268 100755
--- 
a/hadoop-common-project/hadoop-common/src/test/scripts/hadoop-functions_test_helper.bash
+++ 
b/hadoop-common-project/hadoop-common/src/test/scripts/hadoop-functions_test_helper.bash
@@ -16,7 +16,7 @@
 
 setup() {
 
-  TMP=../../../target/test-dir/bats.$$.${RANDOM}
+  TMP="${BATS_TEST_DIRNAME}/../../../target/test-dir/bats.$$.${RANDOM}"
   mkdir -p ${TMP}
   TMP=$(cd -P -- "${TMP}" >/dev/null && pwd -P)
   export TMP
@@ -38,7 +38,7 @@ setup() {
   # shellcheck disable=SC2034
   QATESTMODE=true
 
-  . ../../main/bin/hadoop-functions.sh
+  . "${BATS_TEST_DIRNAME}/../../main/bin/hadoop-functions.sh"
   pushd "${TMP}" >/dev/null
 }
 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/a48954b7/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_subcommands.bats
--
diff --git 
a/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_subcommands.bats 
b/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_subcommands.bats
new file mode 100755
index 000..eda2e3d
--- /dev/null
+++ 
b/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_subcommands.bats
@@ -0,0 +1,66 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+load hadoop-functions_test_helper
+
+# the loading of shell profiles are tested elseswhere
+# this only tests the specific subcommand parts
+
+subcommandsetup () {
+  export HADOOP_LIBEXEC_DIR="${TMP}/libexec"
+  export HADOOP_CONF_DIR="${TMP}/conf"
+  mkdir -p "${HADOOP_LIBEXEC_DIR}/shellprofile.d" 
"${HADOOP_CONF_DIR}/shellprofile.d"
+  cat <<-'TOKEN'   > "${HADOOP_LIBEXEC_DIR}/hadoop-config.sh"
+
+hadoop_subcommand_sub () {
+  echo "unittest"
+  exit 0
+}
+
+hadoop_subcommand_conftest ()
+{
+  echo conftest
+  exit 0
+}
+
+hadoop_subcommand_envcheck ()
+{
+  echo ${HADOOP_SHELL_EXECNAME}
+  exit 0
+}
+TOKEN
+  chmod a+rx "${HADOOP_LIBEXEC_DIR}/hadoop-config.sh"
+
+}
+
+@test "hadoop_subcommand (addition)" {
+  subcommandsetup
+  run "${BATS_TEST_DIRNAME}/../../main/bin/hadoop" sub
+  echo ">${output}<"
+  [ "${output}" = unittest ]
+}
+
+@test "hadoop_subcommand (substitute)" {
+  subcommandsetup
+  run "${BATS_TEST_DIRNAME}/../../main/bin/hadoop" conftest
+  echo ">${output}<"
+  [ "${output}" = conftest ]
+}
+
+@test "hadoop_subcommand (envcheck)" {
+  subcommandsetup
+  run "${BATS_TEST_DIRNAME}/../../main/bin/hadoop" envcheck
+  [ "${output}" = hadoop ]
+}


-
To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org
For additional commands, e-mail: common-commits-h...@hadoop.apache.org



[07/18] hadoop git commit: HADOOP-12936. modify hadoop-tools to take advantage of dynamic subcommands

2016-05-04 Thread aw
HADOOP-12936. modify hadoop-tools to take advantage of dynamic subcommands


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/b67da297
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/b67da297
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/b67da297

Branch: refs/heads/HADOOP-12930
Commit: b67da297df9d2422a14d53ab4fd8466b8609
Parents: 03699e0
Author: Allen Wittenauer 
Authored: Wed May 4 07:21:36 2016 -0700
Committer: Allen Wittenauer 
Committed: Wed May 4 20:43:21 2016 -0700

--
 .../main/resources/assemblies/hadoop-tools.xml  | 32 ++
 .../hadoop-common/src/main/bin/hadoop   | 45 +++-
 hadoop-mapreduce-project/bin/mapred | 21 -
 .../main/shellprofile.d/hadoop-archive-logs.sh  | 33 ++
 .../src/main/shellprofile.d/hadoop-archives.sh  | 41 ++
 .../src/main/shellprofile.d/hadoop-distcp.sh| 43 +++
 .../src/main/shellprofile.d/hadoop-extras.sh| 33 ++
 7 files changed, 197 insertions(+), 51 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/b67da297/hadoop-assemblies/src/main/resources/assemblies/hadoop-tools.xml
--
diff --git a/hadoop-assemblies/src/main/resources/assemblies/hadoop-tools.xml 
b/hadoop-assemblies/src/main/resources/assemblies/hadoop-tools.xml
index f8ba48e..c5ea6ad 100644
--- a/hadoop-assemblies/src/main/resources/assemblies/hadoop-tools.xml
+++ b/hadoop-assemblies/src/main/resources/assemblies/hadoop-tools.xml
@@ -24,6 +24,38 @@
   false
   
 
+  ../hadoop-archive-logs/src/main/shellprofile.d
+  
+*
+  
+  /libexec/shellprofile.d
+  0755
+
+
+  ../hadoop-archives/src/main/shellprofile.d
+  
+*
+  
+  /libexec/shellprofile.d
+  0755
+
+
+  ../hadoop-distcp/src/main/shellprofile.d
+  
+*
+  
+  /libexec/shellprofile.d
+  0755
+
+
+  ../hadoop-extras/src/main/shellprofile.d
+  
+*
+  
+  /libexec/shellprofile.d
+  0755
+
+
   ../hadoop-pipes/src/main/native/pipes/api/hadoop
   
 *.hh

http://git-wip-us.apache.org/repos/asf/hadoop/blob/b67da297/hadoop-common-project/hadoop-common/src/main/bin/hadoop
--
diff --git a/hadoop-common-project/hadoop-common/src/main/bin/hadoop 
b/hadoop-common-project/hadoop-common/src/main/bin/hadoop
index 81bff75..bb4b041 100755
--- a/hadoop-common-project/hadoop-common/src/main/bin/hadoop
+++ b/hadoop-common-project/hadoop-common/src/main/bin/hadoop
@@ -30,14 +30,11 @@ function hadoop_usage
   hadoop_add_option "hosts filename" "list of hosts to use in slave mode"
   hadoop_add_option "slaves" "turn on slave mode"
 
-  hadoop_add_subcommand "archive" "create a Hadoop archive"
   hadoop_add_subcommand "checknative" "check native Hadoop and compression 
libraries availability"
   hadoop_add_subcommand "classpath" "prints the class path needed to get the 
Hadoop jar and the required libraries"
   hadoop_add_subcommand "conftest" "validate configuration XML files"
   hadoop_add_subcommand "credential" "interact with credential providers"
   hadoop_add_subcommand "daemonlog" "get/set the log level for each daemon"
-  hadoop_add_subcommand "distch" "distributed metadata changer"
-  hadoop_add_subcommand "distcp" "copy file or directories recursively"
   hadoop_add_subcommand "dtutil" "operations related to delegation tokens"
   hadoop_add_subcommand "envvars" "display computed Hadoop environment 
variables"
   hadoop_add_subcommand "fs" "run a generic filesystem user client"
@@ -101,35 +98,23 @@ function hadoopcmd_case
 exit 1
   fi
 ;;
-archive)
-  HADOOP_CLASS=org.apache.hadoop.tools.HadoopArchives
-  hadoop_add_to_classpath_tools hadoop-archives
-;;
 checknative)
-  HADOOP_CLASS=org.apache.hadoop.util.NativeLibraryChecker
+  HADOOP_CLASSNAME=org.apache.hadoop.util.NativeLibraryChecker
 ;;
 classpath)
-  hadoop_do_classpath_subcommand HADOOP_CLASS "$@"
+  hadoop_do_classpath_subcommand HADOOP_CLASSNAME "$@"
 ;;
 conftest)
-  HADOOP_CLASS=org.apache.hadoop.util.ConfTest
+  HADOOP_CLASSNAME=org.apache.hadoop.util.ConfTest
 ;;
 credential)
-  HADOOP_CLASS=org.apache.hadoop.security.alias.CredentialShell
+  HADOOP_CLASSNAME=org.apache.hadoop.security.alias.CredentialShell
 ;;
 daemonlog)
-  HADOOP_CLASS=org.apache.hadoop.log.LogLevel
-;;
-distch)
-  HADOOP_CLASS=org.apache.hadoop.tools.DistCh
-  hadoop_add_to_classpath_tools hadoop-extras
-;;
-distcp)
-  

[17/18] hadoop git commit: HADOOP-13089. hadoop distcp adds client opts twice when dynamic

2016-05-04 Thread aw
HADOOP-13089. hadoop distcp adds client opts twice when dynamic


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/4517b497
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/4517b497
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/4517b497

Branch: refs/heads/HADOOP-12930
Commit: 4517b497a56fa6894c7b6e6bfab9512259f84854
Parents: b1417a5
Author: Allen Wittenauer 
Authored: Wed May 4 10:50:15 2016 -0700
Committer: Allen Wittenauer 
Committed: Wed May 4 20:43:21 2016 -0700

--
 hadoop-common-project/hadoop-common/src/main/bin/hadoop | 8 
 1 file changed, 4 insertions(+), 4 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/4517b497/hadoop-common-project/hadoop-common/src/main/bin/hadoop
--
diff --git a/hadoop-common-project/hadoop-common/src/main/bin/hadoop 
b/hadoop-common-project/hadoop-common/src/main/bin/hadoop
index 7b18d22..3e514ff 100755
--- a/hadoop-common-project/hadoop-common/src/main/bin/hadoop
+++ b/hadoop-common-project/hadoop-common/src/main/bin/hadoop
@@ -161,6 +161,10 @@ function hadoopcmd_case
   fi
 ;;
   esac
+
+  # Always respect HADOOP_OPTS and HADOOP_CLIENT_OPTS
+  hadoop_debug "Appending HADOOP_CLIENT_OPTS onto HADOOP_OPTS"
+  HADOOP_OPTS="${HADOOP_OPTS} ${HADOOP_CLIENT_OPTS}"
 }
 
 # This script runs the hadoop core commands.
@@ -203,10 +207,6 @@ if [[ ${HADOOP_SLAVE_MODE} = true ]]; then
   exit $?
 fi
 
-# Always respect HADOOP_OPTS and HADOOP_CLIENT_OPTS
-hadoop_debug "Appending HADOOP_CLIENT_OPTS onto HADOOP_OPTS"
-HADOOP_OPTS="${HADOOP_OPTS} ${HADOOP_CLIENT_OPTS}"
-
 if [[ -n "${HADOOP_SUBCMD_SECURESERVICE}" ]]; then
   HADOOP_SECURE_USER="${HADOOP_SUBCMD_SECUREUSER}"
   hadoop_verify_secure_prereq


-
To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org
For additional commands, e-mail: common-commits-h...@hadoop.apache.org



[14/18] hadoop git commit: HADOOP-13088. fix shellprofiles in hadoop-tools to allow replacement

2016-05-04 Thread aw
HADOOP-13088. fix shellprofiles in hadoop-tools to allow replacement


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/b3a7b75d
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/b3a7b75d
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/b3a7b75d

Branch: refs/heads/HADOOP-12930
Commit: b3a7b75df008f231e518e54d1aebe57b5d13926a
Parents: ec7e8c3
Author: Allen Wittenauer 
Authored: Wed May 4 12:44:47 2016 -0700
Committer: Allen Wittenauer 
Committed: Wed May 4 20:43:21 2016 -0700

--
 .../main/shellprofile.d/hadoop-archive-logs.sh  | 14 ++---
 .../src/main/shellprofile.d/hadoop-archives.sh  | 33 +++-
 .../src/main/shellprofile.d/hadoop-distcp.sh| 33 +++-
 .../src/main/shellprofile.d/hadoop-extras.sh| 14 ++---
 4 files changed, 71 insertions(+), 23 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/b3a7b75d/hadoop-tools/hadoop-archive-logs/src/main/shellprofile.d/hadoop-archive-logs.sh
--
diff --git 
a/hadoop-tools/hadoop-archive-logs/src/main/shellprofile.d/hadoop-archive-logs.sh
 
b/hadoop-tools/hadoop-archive-logs/src/main/shellprofile.d/hadoop-archive-logs.sh
index d37411e..ae7b6c6 100755
--- 
a/hadoop-tools/hadoop-archive-logs/src/main/shellprofile.d/hadoop-archive-logs.sh
+++ 
b/hadoop-tools/hadoop-archive-logs/src/main/shellprofile.d/hadoop-archive-logs.sh
@@ -15,14 +15,18 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-if [[ "${HADOOP_SHELL_EXECNAME}" = mapred ]]; then
-  hadoop_add_subcommand "archive-logs" "combine aggregated logs into hadoop 
archives"
-fi
+if ! declare -f mapred_subcommand_archive-logs >/dev/null 2>/dev/null; then
+
+  if [[ "${HADOOP_SHELL_EXECNAME}" = mapred ]]; then
+hadoop_add_subcommand "archive-logs" "combine aggregated logs into hadoop 
archives"
+  fi
+
+  # this can't be indented otherwise shelldocs won't get it
 
 ## @description  archive-logs command for mapred
 ## @audience public
 ## @stabilitystable
-## @replaceable  no
+## @replaceable  yes
 function mapred_subcommand_archive-logs
 {
   # shellcheck disable=SC2034
@@ -31,3 +35,5 @@ function mapred_subcommand_archive-logs
   hadoop_debug "Appending HADOOP_CLIENT_OPTS onto HADOOP_OPTS"
   HADOOP_OPTS="${HADOOP_OPTS} ${HADOOP_CLIENT_OPTS}"
 }
+
+fi

http://git-wip-us.apache.org/repos/asf/hadoop/blob/b3a7b75d/hadoop-tools/hadoop-archives/src/main/shellprofile.d/hadoop-archives.sh
--
diff --git 
a/hadoop-tools/hadoop-archives/src/main/shellprofile.d/hadoop-archives.sh 
b/hadoop-tools/hadoop-archives/src/main/shellprofile.d/hadoop-archives.sh
index b85ff25..f74fe5b 100755
--- a/hadoop-tools/hadoop-archives/src/main/shellprofile.d/hadoop-archives.sh
+++ b/hadoop-tools/hadoop-archives/src/main/shellprofile.d/hadoop-archives.sh
@@ -15,15 +15,18 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-if [[ "${HADOOP_SHELL_EXECNAME}" = hadoop
-   || "${HADOOP_SHELL_EXECNAME}" = mapred ]]; then
-  hadoop_add_subcommand "archive" "create a Hadoop archive"
-fi
+if ! declare -f hadoop_subcommand_archive >/dev/null 2>/dev/null; then
+
+  if [[ "${HADOOP_SHELL_EXECNAME}" = hadoop ]]; then
+hadoop_add_subcommand "archive" "create a Hadoop archive"
+  fi
+
+  # this can't be indented otherwise shelldocs won't get it
 
 ## @description  archive command for hadoop (and mapred)
 ## @audience public
 ## @stabilitystable
-## @replaceable  no
+## @replaceable  yes
 function hadoop_subcommand_archive
 {
   # shellcheck disable=SC2034
@@ -31,11 +34,25 @@ function hadoop_subcommand_archive
   hadoop_add_to_classpath_tools hadoop-archives
 }
 
-## @description  archive-logs command for mapred (calls hadoop version)
+fi
+
+if ! declare -f mapred_subcommand_archive >/dev/null 2>/dev/null; then
+
+  if [[ "${HADOOP_SHELL_EXECNAME}" = mapred ]]; then
+hadoop_add_subcommand "archive" "create a Hadoop archive"
+  fi
+
+  # this can't be indented otherwise shelldocs won't get it
+
+## @description  archive command for mapred (calls hadoop version)
 ## @audience public
 ## @stabilitystable
-## @replaceable  no
+## @replaceable  yes
 function mapred_subcommand_archive
 {
-  hadoop_subcommand_archive
+  # shellcheck disable=SC2034
+  HADOOP_CLASSNAME=org.apache.hadoop.tools.HadoopArchives
+  hadoop_add_to_classpath_tools hadoop-archives
 }
+
+fi

http://git-wip-us.apache.org/repos/asf/hadoop/blob/b3a7b75d/hadoop-tools/hadoop-distcp/src/main/shellprofile.d/hadoop-distcp.sh
--
diff --git 

[06/18] hadoop git commit: HDFS-10320. Rack failures may result in NN terminate. (Xiao Chen via mingma)

2016-05-04 Thread aw
HDFS-10320. Rack failures may result in NN terminate. (Xiao Chen via mingma)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/1268cf5f
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/1268cf5f
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/1268cf5f

Branch: refs/heads/HADOOP-12930
Commit: 1268cf5fbe4458fa75ad0662512d352f9e8d3470
Parents: 9e37fe3
Author: Ming Ma 
Authored: Wed May 4 17:02:26 2016 -0700
Committer: Ming Ma 
Committed: Wed May 4 17:02:26 2016 -0700

--
 .../org/apache/hadoop/net/NetworkTopology.java  | 109 +--
 .../AvailableSpaceBlockPlacementPolicy.java |  11 +-
 .../BlockPlacementPolicyDefault.java|  84 +++---
 .../web/resources/NamenodeWebHdfsMethods.java   |  13 +--
 .../apache/hadoop/net/TestNetworkTopology.java  |  75 -
 5 files changed, 196 insertions(+), 96 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/1268cf5f/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/NetworkTopology.java
--
diff --git 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/NetworkTopology.java
 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/NetworkTopology.java
index e1d2968..1e23ff6 100644
--- 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/NetworkTopology.java
+++ 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/NetworkTopology.java
@@ -29,13 +29,13 @@ import java.util.concurrent.locks.ReadWriteLock;
 import java.util.concurrent.locks.ReentrantReadWriteLock;
 
 import com.google.common.annotations.VisibleForTesting;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
 import org.apache.hadoop.util.ReflectionUtils;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import com.google.common.base.Preconditions;
 import com.google.common.collect.Lists;
@@ -54,8 +54,8 @@ import com.google.common.collect.Lists;
 public class NetworkTopology {
   public final static String DEFAULT_RACK = "/default-rack";
   public final static int DEFAULT_HOST_LEVEL = 2;
-  public static final Log LOG =
-LogFactory.getLog(NetworkTopology.class);
+  public static final Logger LOG =
+  LoggerFactory.getLogger(NetworkTopology.class);
 
   public static class InvalidTopologyException extends RuntimeException {
 private static final long serialVersionUID = 1L;
@@ -442,9 +442,7 @@ public class NetworkTopology {
   }
 }
   }
-  if(LOG.isDebugEnabled()) {
-LOG.debug("NetworkTopology became:\n" + this.toString());
-  }
+  LOG.debug("NetworkTopology became:\n{}", this.toString());
 } finally {
   netlock.writeLock().unlock();
 }
@@ -517,9 +515,7 @@ public class NetworkTopology {
   numOfRacks--;
 }
   }
-  if(LOG.isDebugEnabled()) {
-LOG.debug("NetworkTopology became:\n" + this.toString());
-  }
+  LOG.debug("NetworkTopology became:\n{}", this.toString());
 } finally {
   netlock.writeLock().unlock();
 }
@@ -717,26 +713,45 @@ public class NetworkTopology {
 r.setSeed(seed);
   }
 
-  /** randomly choose one node from scope
-   * if scope starts with ~, choose one from the all nodes except for the
-   * ones in scope; otherwise, choose one from scope
+  /**
+   * Randomly choose a node.
+   *
* @param scope range of nodes from which a node will be chosen
* @return the chosen node
+   *
+   * @see #chooseRandom(String, Collection)
*/
-  public Node chooseRandom(String scope) {
+  public Node chooseRandom(final String scope) {
+return chooseRandom(scope, null);
+  }
+
+  /**
+   * Randomly choose one node from scope.
+   *
+   * If scope starts with ~, choose one from the all nodes except for the
+   * ones in scope; otherwise, choose one from scope.
+   * If excludedNodes is given, choose a node that's not in excludedNodes.
+   *
+   * @param scope range of nodes from which a node will be chosen
+   * @param excludedNodes nodes to be excluded from
+   * @return the chosen node
+   */
+  public Node chooseRandom(final String scope,
+  final Collection excludedNodes) {
 netlock.readLock().lock();
 try {
   if (scope.startsWith("~")) {
-return chooseRandom(NodeBase.ROOT, scope.substring(1));
+return chooseRandom(NodeBase.ROOT, scope.substring(1), excludedNodes);
   } else {
-

[10/18] hadoop git commit: HADOOP-12935. API documentation for dynamic subcommands (aw)

2016-05-04 Thread aw
HADOOP-12935. API documentation for dynamic subcommands (aw)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/7754fcb0
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/7754fcb0
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/7754fcb0

Branch: refs/heads/HADOOP-12930
Commit: 7754fcb04f824f66a87fade381c449e02c0482e4
Parents: 1268cf5
Author: Allen Wittenauer 
Authored: Mon Mar 28 09:00:07 2016 -0700
Committer: Allen Wittenauer 
Committed: Wed May 4 20:43:21 2016 -0700

--
 .../src/site/markdown/UnixShellGuide.md | 48 ++--
 1 file changed, 45 insertions(+), 3 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/7754fcb0/hadoop-common-project/hadoop-common/src/site/markdown/UnixShellGuide.md
--
diff --git 
a/hadoop-common-project/hadoop-common/src/site/markdown/UnixShellGuide.md 
b/hadoop-common-project/hadoop-common/src/site/markdown/UnixShellGuide.md
index a5fa10c..668a744 100644
--- a/hadoop-common-project/hadoop-common/src/site/markdown/UnixShellGuide.md
+++ b/hadoop-common-project/hadoop-common/src/site/markdown/UnixShellGuide.md
@@ -89,7 +89,7 @@ Shell profiles may be installed in either 
`${HADOOP_CONF_DIR}/shellprofile.d` or
 
 An example of a shell profile is in the libexec directory.
 
-## Shell API
+### Shell API
 
 Apache Hadoop's shell code has a [function library](./UnixShellAPI.html) that 
is open for administrators and developers to use to assist in their 
configuration and advanced feature management.  These APIs follow the standard 
[Apache Hadoop Interface Classification](./InterfaceClassification.html), with 
one addition: Replaceable.
 
@@ -97,10 +97,8 @@ The shell code allows for core functions to be overridden. 
However, not all func
 
 In order to replace a function, create a file called 
`hadoop-user-functions.sh` in the `${HADOOP_CONF_DIR}` directory.  Simply 
define the new, replacement function in this file and the system will pick it 
up automatically.  There may be as many replacement functions as needed in this 
file.  Examples of function replacement are in the 
`hadoop-user-functions.sh.examples` file.
 
-
 Functions that are marked Public and Stable are safe to use in shell profiles 
as-is.  Other functions may change in a minor release.
 
-
 ### User-level API Access
 
 In addition to `.hadoop-env`, which allows individual users to override 
`hadoop-env.sh`, user's may also use `.hadooprc`.  This is called after the 
Apache Hadoop shell environment has been configured and allows the full set of 
shell API function calls.
@@ -112,3 +110,47 @@ hadoop_add_classpath /some/path/custom.jar
 ```
 
 would go into `.hadooprc`
+
+### Dynamic Subcommands
+
+Utilizing the Shell API, it is possible for third parties to add their own 
subcommands to the primary Hadoop shell scripts (hadoop, hdfs, mapred, yarn).
+
+Prior to executing a subcommand, the primary scripts will check for the 
existance of a (scriptname)_subcommand_(subcommand) function.  This function 
gets executed with the parameters set to all remaining command line arguments.  
For example, if the following function is defined:
+
+```bash
+function yarn_subcommand_hello
+{
+  echo "$@"
+}
+```
+
+then executing `yarn --debug hello world I see you` will activate script 
debugging and call the `yarn_subcommand_hello` funciton as:
+
+```bash
+yarn_subcommand_hello world I see you
+```
+
+which will result in the output of:
+
+```bash
+world I see you
+```
+
+It is also possible to add the new subcommands to the usage output. The 
`hadoop_add_subcommand` function adds text to the usage output.  Utilizing the 
standard HADOOP_SHELL_EXECNAME variable, we can limit which command gets our 
new function.
+
+```bash
+if [[ "${HADOOP_SHELL_EXECNAME}" = "yarn" ]]; then
+  hadoop_add_subcommand "hello" "Print some text to the screen"
+fi
+```
+
+This functionality may also be use to override the built-ins.  For example, 
defining:
+
+```bash
+function hdfs_subcommand_fetchdt
+{
+  ...
+}
+```
+
+... will replace the existing `hdfs fetchdt` subcommand with a custom one.


-
To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org
For additional commands, e-mail: common-commits-h...@hadoop.apache.org



[03/18] hadoop git commit: YARN-4920. ATS/NM should support a link to dowload/get the logs in text format. Contributed by Xuan Gong.

2016-05-04 Thread aw
YARN-4920. ATS/NM should support a link to dowload/get the logs in text format. 
Contributed by Xuan Gong.


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/e61d4312
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/e61d4312
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/e61d4312

Branch: refs/heads/HADOOP-12930
Commit: e61d431275d7fe5641fe9da4903e285b10330fa0
Parents: af94258
Author: Junping Du 
Authored: Wed May 4 09:40:13 2016 -0700
Committer: Junping Du 
Committed: Wed May 4 10:35:49 2016 -0700

--
 .../webapp/AHSWebServices.java  | 270 ++-
 ...pplicationHistoryManagerOnTimelineStore.java |  29 +-
 .../webapp/TestAHSWebServices.java  | 203 +-
 .../yarn/server/webapp/dao/ContainerInfo.java   |   6 +
 .../nodemanager/webapp/NMWebServices.java   |  22 +-
 .../nodemanager/webapp/TestNMWebServices.java   |  12 +-
 6 files changed, 525 insertions(+), 17 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/e61d4312/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/AHSWebServices.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/AHSWebServices.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/AHSWebServices.java
index e7a22bd..75dce07 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/AHSWebServices.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/AHSWebServices.java
@@ -18,6 +18,11 @@
 
 package org.apache.hadoop.yarn.server.applicationhistoryservice.webapp;
 
+import java.io.DataInputStream;
+import java.io.EOFException;
+import java.io.IOException;
+import java.io.OutputStream;
+import java.nio.charset.Charset;
 import java.util.Collections;
 import java.util.Set;
 
@@ -28,13 +33,30 @@ import javax.ws.rs.Path;
 import javax.ws.rs.PathParam;
 import javax.ws.rs.Produces;
 import javax.ws.rs.QueryParam;
+import javax.ws.rs.WebApplicationException;
 import javax.ws.rs.core.Context;
 import javax.ws.rs.core.MediaType;
+import javax.ws.rs.core.Response;
+import javax.ws.rs.core.StreamingOutput;
+import javax.ws.rs.core.Response.ResponseBuilder;
+import javax.ws.rs.core.Response.Status;
 
+import org.apache.hadoop.classification.InterfaceAudience.Public;
+import org.apache.hadoop.classification.InterfaceStability.Unstable;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileContext;
+import org.apache.hadoop.fs.FileStatus;
+import org.apache.hadoop.fs.RemoteIterator;
 import org.apache.hadoop.util.StringUtils;
+import org.apache.hadoop.yarn.api.records.ApplicationId;
+import org.apache.hadoop.yarn.api.records.ContainerId;
 import org.apache.hadoop.yarn.api.records.YarnApplicationState;
 import org.apache.hadoop.yarn.api.ApplicationBaseProtocol;
 import org.apache.hadoop.yarn.api.records.timeline.TimelineAbout;
+import org.apache.hadoop.yarn.conf.YarnConfiguration;
+import org.apache.hadoop.yarn.logaggregation.AggregatedLogFormat;
+import org.apache.hadoop.yarn.logaggregation.LogAggregationUtils;
+import org.apache.hadoop.yarn.logaggregation.AggregatedLogFormat.LogKey;
 import org.apache.hadoop.yarn.server.webapp.WebServices;
 import org.apache.hadoop.yarn.server.webapp.dao.AppAttemptInfo;
 import org.apache.hadoop.yarn.server.webapp.dao.AppAttemptsInfo;
@@ -42,9 +64,10 @@ import org.apache.hadoop.yarn.server.webapp.dao.AppInfo;
 import org.apache.hadoop.yarn.server.webapp.dao.AppsInfo;
 import org.apache.hadoop.yarn.server.webapp.dao.ContainerInfo;
 import org.apache.hadoop.yarn.server.webapp.dao.ContainersInfo;
+import org.apache.hadoop.yarn.util.Times;
 import org.apache.hadoop.yarn.util.timeline.TimelineUtils;
 import org.apache.hadoop.yarn.webapp.BadRequestException;
-
+import com.google.common.base.Joiner;
 import com.google.inject.Inject;
 import com.google.inject.Singleton;
 
@@ -52,9 +75,17 @@ import com.google.inject.Singleton;
 @Path("/ws/v1/applicationhistory")
 public class AHSWebServices extends WebServices {
 
+  private static final String NM_DOWNLOAD_URI_STR =
+  "/ws/v1/node/containerlogs";
+  

[11/18] hadoop git commit: HADOOP-12932. bin/yarn work for dynamic subcommands

2016-05-04 Thread aw
HADOOP-12932. bin/yarn work for dynamic subcommands


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/03699e03
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/03699e03
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/03699e03

Branch: refs/heads/HADOOP-12930
Commit: 03699e033c320e52b6a1a9deadfaaeccb923ff3e
Parents: 7020c50
Author: Allen Wittenauer 
Authored: Tue May 3 14:17:44 2016 -0700
Committer: Allen Wittenauer 
Committed: Wed May 4 20:43:21 2016 -0700

--
 hadoop-yarn-project/hadoop-yarn/bin/yarn | 366 ++
 1 file changed, 197 insertions(+), 169 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/03699e03/hadoop-yarn-project/hadoop-yarn/bin/yarn
--
diff --git a/hadoop-yarn-project/hadoop-yarn/bin/yarn 
b/hadoop-yarn-project/hadoop-yarn/bin/yarn
index cac3bb6..7544b58 100755
--- a/hadoop-yarn-project/hadoop-yarn/bin/yarn
+++ b/hadoop-yarn-project/hadoop-yarn/bin/yarn
@@ -16,7 +16,12 @@
 # limitations under the License.
 
 MYNAME="${BASH_SOURCE-$0}"
+HADOOP_SHELL_EXECNAME="${MYNAME##*/}"
 
+## @description  build up the yarn command's usage text.
+## @audience public
+## @stabilitystable
+## @replaceable  no
 function hadoop_usage
 {
   hadoop_add_option "--buildpaths" "attempt to add class files from build tree"
@@ -46,9 +51,180 @@ function hadoop_usage
   hadoop_add_subcommand "timelineserver" "run the timeline server"
   hadoop_add_subcommand "top" "view cluster information"
   hadoop_add_subcommand "version" "print the version"
-  hadoop_generate_usage "${MYNAME}" true
+  hadoop_generate_usage "${HADOOP_SHELL_EXECNAME}" true
 }
 
+## @description  Default command handler for yarn command
+## @audience public
+## @stabilitystable
+## @replaceable  no
+## @paramCLI arguments
+function yarncmd_case
+{
+  subcmd=$1
+  shift
+
+  case ${subcmd} in
+application|applicationattempt|container)
+  HADOOP_CLASSNAME=org.apache.hadoop.yarn.client.cli.ApplicationCLI
+  hadoop_debug "Append YARN_CLIENT_OPTS onto HADOOP_OPTS"
+  HADOOP_OPTS="${HADOOP_OPTS} ${YARN_CLIENT_OPTS}"
+  set -- "${subcmd}" "$@"
+;;
+classpath)
+  hadoop_do_classpath_subcommand HADOOP_CLASSNAME "$@"
+;;
+cluster)
+  HADOOP_CLASSNAME=org.apache.hadoop.yarn.client.cli.ClusterCLI
+  hadoop_debug "Append YARN_CLIENT_OPTS onto YARN_OPTS"
+  YARN_OPTS="${YARN_OPTS} ${YARN_CLIENT_OPTS}"
+;;
+daemonlog)
+  HADOOP_CLASSNAME=org.apache.hadoop.log.LogLevel
+  hadoop_debug "Append YARN_CLIENT_OPTS onto HADOOP_OPTS"
+  HADOOP_OPTS="${HADOOP_OPTS} ${YARN_CLIENT_OPTS}"
+;;
+envvars)
+  echo "JAVA_HOME='${JAVA_HOME}'"
+  echo "HADOOP_YARN_HOME='${HADOOP_YARN_HOME}'"
+  echo "YARN_DIR='${YARN_DIR}'"
+  echo "YARN_LIB_JARS_DIR='${YARN_LIB_JARS_DIR}'"
+  echo "HADOOP_CONF_DIR='${HADOOP_CONF_DIR}'"
+  echo "HADOOP_TOOLS_HOME='${HADOOP_TOOLS_HOME}'"
+  echo "HADOOP_TOOLS_DIR='${HADOOP_TOOLS_DIR}'"
+  echo "HADOOP_TOOLS_LIB_JARS_DIR='${HADOOP_TOOLS_LIB_JARS_DIR}'"
+  exit 0
+;;
+jar)
+  HADOOP_CLASSNAME=org.apache.hadoop.util.RunJar
+  hadoop_debug "Append YARN_CLIENT_OPTS onto HADOOP_OPTS"
+  HADOOP_OPTS="${HADOOP_OPTS} ${YARN_CLIENT_OPTS}"
+;;
+historyserver)
+  supportdaemonization="true"
+  echo "DEPRECATED: Use of this command to start the timeline server is 
deprecated." 1>&2
+  echo "Instead use the timelineserver command for it." 1>&2
+  echo "Starting the History Server anyway..." 1>&2
+  
HADOOP_CLASSNAME='org.apache.hadoop.yarn.server.applicationhistoryservice.ApplicationHistoryServer'
+;;
+logs)
+  HADOOP_CLASSNAME=org.apache.hadoop.yarn.client.cli.LogsCLI
+  hadoop_debug "Append YARN_CLIENT_OPTS onto HADOOP_OPTS"
+  HADOOP_OPTS="${HADOOP_OPTS} ${YARN_CLIENT_OPTS}"
+;;
+node)
+  HADOOP_CLASSNAME=org.apache.hadoop.yarn.client.cli.NodeCLI
+  hadoop_debug "Append YARN_CLIENT_OPTS onto HADOOP_OPTS"
+  HADOOP_OPTS="${HADOOP_OPTS} ${YARN_CLIENT_OPTS}"
+;;
+nodemanager)
+  supportdaemonization="true"
+  HADOOP_CLASSNAME='org.apache.hadoop.yarn.server.nodemanager.NodeManager'
+  hadoop_debug "Append YARN_NODEMANAGER_OPTS onto HADOOP_OPTS"
+  HADOOP_OPTS="${HADOOP_OPTS} ${YARN_NODEMANAGER_OPTS}"
+  # Backwards compatibility
+  if [[ -n "${YARN_NODEMANAGER_HEAPSIZE}" ]]; then
+HADOOP_HEAPSIZE_MAX="${YARN_NODEMANAGER_HEAPSIZE}"
+  fi
+;;
+proxyserver)
+  supportdaemonization="true"
+  
HADOOP_CLASSNAME='org.apache.hadoop.yarn.server.webproxy.WebAppProxyServer'
+  hadoop_debug "Append YARN_PROXYSERVER_OPTS onto 

[05/18] hadoop git commit: YARN-4905. Improved "yarn logs" command-line to optionally show log metadata also. Contributed by Xuan Gong.

2016-05-04 Thread aw
YARN-4905. Improved "yarn logs" command-line to optionally show log metadata 
also. Contributed by Xuan Gong.


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/9e37fe3b
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/9e37fe3b
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/9e37fe3b

Branch: refs/heads/HADOOP-12930
Commit: 9e37fe3b7a3b5f0a193d228bb5e065f41acd2835
Parents: 7bd418e
Author: Vinod Kumar Vavilapalli 
Authored: Wed May 4 14:16:03 2016 -0700
Committer: Vinod Kumar Vavilapalli 
Committed: Wed May 4 14:16:03 2016 -0700

--
 .../apache/hadoop/yarn/client/cli/LogsCLI.java  | 507 +++
 .../hadoop/yarn/client/cli/TestLogsCLI.java | 190 ++-
 .../logaggregation/AggregatedLogFormat.java |  20 +
 .../yarn/logaggregation/LogCLIHelpers.java  | 192 +--
 4 files changed, 654 insertions(+), 255 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/9e37fe3b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/cli/LogsCLI.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/cli/LogsCLI.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/cli/LogsCLI.java
index 2c4fee6..487b694 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/cli/LogsCLI.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/cli/LogsCLI.java
@@ -45,6 +45,7 @@ import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hadoop.util.Tool;
 import org.apache.hadoop.yarn.api.records.ApplicationId;
 import org.apache.hadoop.yarn.api.records.ApplicationReport;
+import org.apache.hadoop.yarn.api.records.ContainerId;
 import org.apache.hadoop.yarn.api.records.ContainerReport;
 import org.apache.hadoop.yarn.api.records.YarnApplicationState;
 import org.apache.hadoop.yarn.client.api.YarnClient;
@@ -78,59 +79,16 @@ public class LogsCLI extends Configured implements Tool {
   private static final String APP_OWNER_OPTION = "appOwner";
   private static final String AM_CONTAINER_OPTION = "am";
   private static final String CONTAINER_LOG_FILES = "logFiles";
+  private static final String SHOW_META_INFO = "show_meta_info";
+  private static final String LIST_NODES_OPTION = "list_nodes";
   public static final String HELP_CMD = "help";
 
   @Override
   public int run(String[] args) throws Exception {
 
-Options opts = new Options();
-opts.addOption(HELP_CMD, false, "Displays help for all commands.");
-Option appIdOpt =
-new Option(APPLICATION_ID_OPTION, true, "ApplicationId (required)");
-appIdOpt.setRequired(true);
-opts.addOption(appIdOpt);
-opts.addOption(CONTAINER_ID_OPTION, true, "ContainerId. "
-+ "By default, it will only print syslog if the application is runing."
-+ " Work with -logFiles to get other logs.");
-opts.addOption(NODE_ADDRESS_OPTION, true, "NodeAddress in the format "
-  + "nodename:port");
-opts.addOption(APP_OWNER_OPTION, true,
-  "AppOwner (assumed to be current user if not specified)");
-Option amOption = new Option(AM_CONTAINER_OPTION, true, 
-  "Prints the AM Container logs for this application. "
-  + "Specify comma-separated value to get logs for related AM Container. "
-  + "For example, If we specify -am 1,2, we will get the logs for "
-  + "the first AM Container as well as the second AM Container. "
-  + "To get logs for all AM Containers, use -am ALL. "
-  + "To get logs for the latest AM Container, use -am -1. "
-  + "By default, it will only print out syslog. Work with -logFiles "
-  + "to get other logs");
-amOption.setValueSeparator(',');
-amOption.setArgs(Option.UNLIMITED_VALUES);
-amOption.setArgName("AM Containers");
-opts.addOption(amOption);
-Option logFileOpt = new Option(CONTAINER_LOG_FILES, true,
-  "Work with -am/-containerId and specify comma-separated value "
-+ "to get specified container log files. Use \"ALL\" to fetch all the "
-+ "log files for the container.");
-logFileOpt.setValueSeparator(',');
-logFileOpt.setArgs(Option.UNLIMITED_VALUES);
-logFileOpt.setArgName("Log File Name");
-opts.addOption(logFileOpt);
-
-opts.getOption(APPLICATION_ID_OPTION).setArgName("Application ID");
-opts.getOption(CONTAINER_ID_OPTION).setArgName("Container ID");
-opts.getOption(NODE_ADDRESS_OPTION).setArgName("Node Address");
-opts.getOption(APP_OWNER_OPTION).setArgName("Application Owner");
-

[16/18] hadoop git commit: HADOOP-12931. bin/hadoop work for dynamic subcommands

2016-05-04 Thread aw
HADOOP-12931. bin/hadoop work for dynamic subcommands


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/ef60d131
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/ef60d131
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/ef60d131

Branch: refs/heads/HADOOP-12930
Commit: ef60d131e00cf5cf1ab25577eaf096ba6aec80b3
Parents: 7754fcb
Author: Allen Wittenauer 
Authored: Tue May 3 10:49:46 2016 -0700
Committer: Allen Wittenauer 
Committed: Wed May 4 20:43:21 2016 -0700

--
 .../hadoop-common/src/main/bin/hadoop   | 264 ++-
 1 file changed, 143 insertions(+), 121 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/ef60d131/hadoop-common-project/hadoop-common/src/main/bin/hadoop
--
diff --git a/hadoop-common-project/hadoop-common/src/main/bin/hadoop 
b/hadoop-common-project/hadoop-common/src/main/bin/hadoop
index 23fa9c7..81bff75 100755
--- a/hadoop-common-project/hadoop-common/src/main/bin/hadoop
+++ b/hadoop-common-project/hadoop-common/src/main/bin/hadoop
@@ -16,7 +16,12 @@
 # limitations under the License.
 
 MYNAME="${BASH_SOURCE-$0}"
+HADOOP_SHELL_EXECNAME="${MYNAME##*/}"
 
+## @description  build up the hadoop command's usage text.
+## @audience public
+## @stabilitystable
+## @replaceable  no
 function hadoop_usage
 {
   hadoop_add_option "buildpaths" "attempt to add class files from build tree"
@@ -42,7 +47,135 @@ function hadoop_usage
   hadoop_add_subcommand "key" "manage keys via the KeyProvider"
   hadoop_add_subcommand "trace" "view and modify Hadoop tracing settings"
   hadoop_add_subcommand "version" "print the version"
-  hadoop_generate_usage "${MYNAME}" true
+  hadoop_generate_usage "${HADOOP_SHELL_EXECNAME}" true
+}
+
+## @description  Default command handler for hadoop command
+## @audience public
+## @stabilitystable
+## @replaceable  no
+## @paramCLI arguments
+function hadoopcmd_case
+{
+  subcmd=$1
+  shift
+
+  case ${subcmd} in
+balancer|datanode|dfs|dfsadmin|dfsgroups|  \
+namenode|secondarynamenode|fsck|fetchdt|oiv| \
+portmap|nfs3)
+  hadoop_error "WARNING: Use of this script to execute ${subcmd} is 
deprecated."
+  subcmd=${subcmd/dfsgroups/groups}
+  hadoop_error "WARNING: Attempting to execute replacement \"hdfs 
${subcmd}\" instead."
+  hadoop_error ""
+  #try to locate hdfs and if present, delegate to it.
+  if [[ -f "${HADOOP_HDFS_HOME}/bin/hdfs" ]]; then
+# shellcheck disable=SC2086
+exec "${HADOOP_HDFS_HOME}/bin/hdfs" \
+--config "${HADOOP_CONF_DIR}" "${subcmd}"  "$@"
+  elif [[ -f "${HADOOP_HOME}/bin/hdfs" ]]; then
+# shellcheck disable=SC2086
+exec "${HADOOP_HOME}/bin/hdfs" \
+--config "${HADOOP_CONF_DIR}" "${subcmd}" "$@"
+  else
+hadoop_error "HADOOP_HDFS_HOME not found!"
+exit 1
+  fi
+;;
+
+#mapred commands for backwards compatibility
+pipes|job|queue|mrgroups|mradmin|jobtracker|tasktracker)
+  hadoop_error "WARNING: Use of this script to execute ${subcmd} is 
deprecated."
+  subcmd=${subcmd/mrgroups/groups}
+  hadoop_error "WARNING: Attempting to execute replacement \"mapred 
${subcmd}\" instead."
+  hadoop_error ""
+  #try to locate mapred and if present, delegate to it.
+  if [[ -f "${HADOOP_MAPRED_HOME}/bin/mapred" ]]; then
+exec "${HADOOP_MAPRED_HOME}/bin/mapred" \
+--config "${HADOOP_CONF_DIR}" "${subcmd}" "$@"
+  elif [[ -f "${HADOOP_HOME}/bin/mapred" ]]; then
+exec "${HADOOP_HOME}/bin/mapred" \
+--config "${HADOOP_CONF_DIR}" "${subcmd}" "$@"
+  else
+hadoop_error "HADOOP_MAPRED_HOME not found!"
+exit 1
+  fi
+;;
+archive)
+  HADOOP_CLASS=org.apache.hadoop.tools.HadoopArchives
+  hadoop_add_to_classpath_tools hadoop-archives
+;;
+checknative)
+  HADOOP_CLASS=org.apache.hadoop.util.NativeLibraryChecker
+;;
+classpath)
+  hadoop_do_classpath_subcommand HADOOP_CLASS "$@"
+;;
+conftest)
+  HADOOP_CLASS=org.apache.hadoop.util.ConfTest
+;;
+credential)
+  HADOOP_CLASS=org.apache.hadoop.security.alias.CredentialShell
+;;
+daemonlog)
+  HADOOP_CLASS=org.apache.hadoop.log.LogLevel
+;;
+distch)
+  HADOOP_CLASS=org.apache.hadoop.tools.DistCh
+  hadoop_add_to_classpath_tools hadoop-extras
+;;
+distcp)
+  HADOOP_CLASS=org.apache.hadoop.tools.DistCp
+  hadoop_add_to_classpath_tools hadoop-distcp
+;;
+dtutil)
+  HADOOP_CLASS=org.apache.hadoop.security.token.DtUtilShell
+;;
+envvars)
+  echo "JAVA_HOME='${JAVA_HOME}'"
+  echo 

[12/18] hadoop git commit: HADOOP-12934. bin/mapred work for dynamic subcommands

2016-05-04 Thread aw
HADOOP-12934. bin/mapred work for dynamic subcommands


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/ff0d5fac
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/ff0d5fac
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/ff0d5fac

Branch: refs/heads/HADOOP-12930
Commit: ff0d5fac269f926cf79fac86e2ce9bfb8259bab3
Parents: ef60d13
Author: Allen Wittenauer 
Authored: Tue May 3 12:44:45 2016 -0700
Committer: Allen Wittenauer 
Committed: Wed May 4 20:43:21 2016 -0700

--
 hadoop-mapreduce-project/bin/mapred | 215 +--
 1 file changed, 120 insertions(+), 95 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/ff0d5fac/hadoop-mapreduce-project/bin/mapred
--
diff --git a/hadoop-mapreduce-project/bin/mapred 
b/hadoop-mapreduce-project/bin/mapred
index f280f31..69b00b3 100755
--- a/hadoop-mapreduce-project/bin/mapred
+++ b/hadoop-mapreduce-project/bin/mapred
@@ -16,7 +16,12 @@
 # limitations under the License.
 
 MYNAME="${BASH_SOURCE-$0}"
+HADOOP_SHELL_EXECNAME="${MYNAME##*/}"
 
+## @description  build up the mapred command's usage text.
+## @audience public
+## @stabilitystable
+## @replaceable  no
 function hadoop_usage
 {
   hadoop_add_subcommand "archive" "create a hadoop archive"
@@ -31,7 +36,103 @@ function hadoop_usage
   hadoop_add_subcommand "queue" "get information regarding JobQueues"
   hadoop_add_subcommand "sampler" "sampler"
   hadoop_add_subcommand "version" "print the version"
-  hadoop_generate_usage "${MYNAME}" true
+  hadoop_generate_usage "${HADOOP_SHELL_EXECNAME}" true
+}
+
+## @description  Default command handler for hadoop command
+## @audience public
+## @stabilitystable
+## @replaceable  no
+## @paramCLI arguments
+function mapredcmd_case
+{
+  subcmd=$1
+  shift
+
+  case ${subcmd} in
+mradmin|jobtracker|tasktracker|groups)
+  hadoop_error "Sorry, the ${subcmd} command is no longer supported."
+  hadoop_error "You may find similar functionality with the \"yarn\" shell 
command."
+  hadoop_exit_with_usage 1
+;;
+archive)
+  HADOOP_CLASSNAME=org.apache.hadoop.tools.HadoopArchives
+  hadoop_add_to_classpath_tools hadoop-archives
+  hadoop_debug "Appending HADOOP_CLIENT_OPTS onto HADOOP_OPTS"
+  HADOOP_OPTS="${HADOOP_OPTS} ${HADOOP_CLIENT_OPTS}"
+;;
+archive-logs)
+  HADOOP_CLASSNAME=org.apache.hadoop.tools.HadoopArchiveLogs
+  hadoop_add_to_classpath_tools hadoop-archive-logs
+  hadoop_debug "Appending HADOOP_CLIENT_OPTS onto HADOOP_OPTS"
+  HADOOP_OPTS="${HADOOP_OPTS} ${HADOOP_CLIENT_OPTS}"
+;;
+classpath)
+  hadoop_do_classpath_subcommand HADOOP_CLASSNAME "$@"
+;;
+distcp)
+  HADOOP_CLASSNAME=org.apache.hadoop.tools.DistCp
+  hadoop_add_to_classpath_tools hadoop-distcp
+  hadoop_debug "Appending HADOOP_CLIENT_OPTS onto HADOOP_OPTS"
+  HADOOP_OPTS="${HADOOP_OPTS} ${HADOOP_CLIENT_OPTS}"
+;;
+envvars)
+  echo "JAVA_HOME='${JAVA_HOME}'"
+  echo "HADOOP_MAPRED_HOME='${HADOOP_MAPRED_HOME}'"
+  echo "MAPRED_DIR='${MAPRED_DIR}'"
+  echo "MAPRED_LIB_JARS_DIR='${MAPRED_LIB_JARS_DIR}'"
+  echo "HADOOP_CONF_DIR='${HADOOP_CONF_DIR}'"
+  echo "HADOOP_TOOLS_HOME='${HADOOP_TOOLS_HOME}'"
+  echo "HADOOP_TOOLS_DIR='${HADOOP_TOOLS_DIR}'"
+  echo "HADOOP_TOOLS_LIB_JARS_DIR='${HADOOP_TOOLS_LIB_JARS_DIR}'"
+  exit 0
+;;
+historyserver)
+  supportdaemonization="true"
+  HADOOP_CLASSNAME=org.apache.hadoop.mapreduce.v2.hs.JobHistoryServer
+  hadoop_debug "Appending HADOOP_JOB_HISTORYSERVER_OPTS onto HADOOP_OPTS"
+  HADOOP_OPTS="${HADOOP_OPTS} ${HADOOP_JOB_HISTORYSERVER_OPTS}"
+  if [ -n "${HADOOP_JOB_HISTORYSERVER_HEAPSIZE}" ]; then
+# shellcheck disable=SC2034
+HADOOP_HEAPSIZE_MAX="${HADOOP_JOB_HISTORYSERVER_HEAPSIZE}"
+  fi
+  
HADOOP_DAEMON_ROOT_LOGGER=${HADOOP_JHS_LOGGER:-$HADOOP_DAEMON_ROOT_LOGGER}
+;;
+hsadmin)
+  HADOOP_CLASSNAME=org.apache.hadoop.mapreduce.v2.hs.client.HSAdmin
+  hadoop_debug "Appending HADOOP_CLIENT_OPTS onto HADOOP_OPTS"
+  HADOOP_OPTS="${HADOOP_OPTS} ${HADOOP_CLIENT_OPTS}"
+;;
+job)
+  HADOOP_CLASSNAME=org.apache.hadoop.mapred.JobClient
+  hadoop_debug "Appending HADOOP_CLIENT_OPTS onto HADOOP_OPTS"
+  HADOOP_OPTS="${HADOOP_OPTS} ${HADOOP_CLIENT_OPTS}"
+;;
+pipes)
+  HADOOP_CLASSNAME=org.apache.hadoop.mapred.pipes.Submitter
+  hadoop_debug "Appending HADOOP_CLIENT_OPTS onto HADOOP_OPTS"
+  HADOOP_OPTS="${HADOOP_OPTS} ${HADOOP_CLIENT_OPTS}"
+;;
+queue)
+  HADOOP_CLASSNAME=org.apache.hadoop.mapred.JobQueueClient
+;;
+sampler)
+  

[13/18] hadoop git commit: HADOOP-13087. env var doc update for dynamic commands

2016-05-04 Thread aw
HADOOP-13087. env var doc update for dynamic commands


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/ec7e8c32
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/ec7e8c32
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/ec7e8c32

Branch: refs/heads/HADOOP-12930
Commit: ec7e8c32ef719028c0f5c34d3963371dcb1c1a24
Parents: 4517b49
Author: Allen Wittenauer 
Authored: Wed May 4 10:53:25 2016 -0700
Committer: Allen Wittenauer 
Committed: Wed May 4 20:43:21 2016 -0700

--
 .../src/site/markdown/UnixShellGuide.md | 22 
 1 file changed, 22 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/ec7e8c32/hadoop-common-project/hadoop-common/src/site/markdown/UnixShellGuide.md
--
diff --git 
a/hadoop-common-project/hadoop-common/src/site/markdown/UnixShellGuide.md 
b/hadoop-common-project/hadoop-common/src/site/markdown/UnixShellGuide.md
index 668a744..a459012 100644
--- a/hadoop-common-project/hadoop-common/src/site/markdown/UnixShellGuide.md
+++ b/hadoop-common-project/hadoop-common/src/site/markdown/UnixShellGuide.md
@@ -154,3 +154,25 @@ function hdfs_subcommand_fetchdt
 ```
 
 ... will replace the existing `hdfs fetchdt` subcommand with a custom one.
+
+Some key environment variables related to Dynamic Subcommands:
+
+* HADOOP\_CLASSNAME
+
+This is the name of the Java class to execute.
+
+* HADOOP\_SHELL\_EXECNAME
+
+This is the name of the script that is being executed.  It will be one of 
hadoop, hdfs, mapred, or yarn.
+
+* HADOOP\_SUBCMD\_SECURESERVICE
+
+If this command should/will be executed as a secure daemon, set this to true.
+
+* HADOOP\_SUBCMD\_SECUREUSER
+
+If this command should/will be executed as a secure daemon, set the user name 
to be used.
+
+* HADOOP\_SUBCMD\_SUPPORTDAEMONIZATION
+
+If this command can be executed as a daemon, set this to true.


-
To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org
For additional commands, e-mail: common-commits-h...@hadoop.apache.org



[01/18] hadoop git commit: HADOOP-13068. Clean up RunJar and related test class. (Contributed by Andras Bokor) [Forced Update!]

2016-05-04 Thread aw
Repository: hadoop
Updated Branches:
  refs/heads/HADOOP-12930 55c604517 -> 1df23089e (forced update)


HADOOP-13068. Clean up RunJar and related test class. (Contributed by Andras 
Bokor)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/f343d91e
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/f343d91e
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/f343d91e

Branch: refs/heads/HADOOP-12930
Commit: f343d91ecc0d1c6d9dc9810faf68ec04f7b07c2f
Parents: 36972d6
Author: Arpit Agarwal 
Authored: Wed May 4 09:49:33 2016 -0700
Committer: Arpit Agarwal 
Committed: Wed May 4 09:49:33 2016 -0700

--
 .../java/org/apache/hadoop/util/RunJar.java | 89 +++-
 .../java/org/apache/hadoop/util/TestRunJar.java | 62 +++---
 2 files changed, 80 insertions(+), 71 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/f343d91e/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/RunJar.java
--
diff --git 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/RunJar.java
 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/RunJar.java
index 52cf05c..19b51ad 100644
--- 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/RunJar.java
+++ 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/RunJar.java
@@ -23,7 +23,6 @@ import java.io.FileOutputStream;
 import java.io.IOException;
 import java.io.InputStream;
 import java.io.OutputStream;
-import java.lang.reflect.Array;
 import java.lang.reflect.InvocationTargetException;
 import java.lang.reflect.Method;
 import java.net.MalformedURLException;
@@ -40,7 +39,6 @@ import java.util.regex.Pattern;
 
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
-import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileUtil;
 import org.apache.hadoop.io.IOUtils;
 import org.slf4j.Logger;
@@ -53,7 +51,7 @@ public class RunJar {
 
   private static final Logger LOG = LoggerFactory.getLogger(RunJar.class);
 
-  /** Pattern that matches any string */
+  /** Pattern that matches any string. */
   public static final Pattern MATCH_ANY = Pattern.compile(".*");
 
   /**
@@ -77,9 +75,20 @@ public class RunJar {
   "HADOOP_CLIENT_CLASSLOADER_SYSTEM_CLASSES";
 
   /**
+   * Buffer size for copy the content of compressed file to new file.
+   */
+  private static final int BUFFER_SIZE = 8_192;
+
+  /**
* Unpack a jar file into a directory.
*
* This version unpacks all files inside the jar regardless of filename.
+   *
+   * @param jarFile the .jar file to unpack
+   * @param toDir the destination directory into which to unpack the jar
+   *
+   * @throws IOException if an I/O error has occurred or toDir
+   * cannot be created and does not already exist
*/
   public static void unJar(File jarFile, File toDir) throws IOException {
 unJar(jarFile, toDir, MATCH_ANY);
@@ -92,47 +101,43 @@ public class RunJar {
* @param jarFile the .jar file to unpack
* @param toDir the destination directory into which to unpack the jar
* @param unpackRegex the pattern to match jar entries against
+   *
+   * @throws IOException if an I/O error has occurred or toDir
+   * cannot be created and does not already exist
*/
   public static void unJar(File jarFile, File toDir, Pattern unpackRegex)
-throws IOException {
-JarFile jar = new JarFile(jarFile);
-try {
+  throws IOException {
+try (JarFile jar = new JarFile(jarFile)) {
   int numOfFailedLastModifiedSet = 0;
   Enumeration entries = jar.entries();
   while (entries.hasMoreElements()) {
 final JarEntry entry = entries.nextElement();
 if (!entry.isDirectory() &&
 unpackRegex.matcher(entry.getName()).matches()) {
-  InputStream in = jar.getInputStream(entry);
-  try {
+  try (InputStream in = jar.getInputStream(entry)) {
 File file = new File(toDir, entry.getName());
 ensureDirectory(file.getParentFile());
-OutputStream out = new FileOutputStream(file);
-try {
-  IOUtils.copyBytes(in, out, 8192);
-} finally {
-  out.close();
+try (OutputStream out = new FileOutputStream(file)) {
+  IOUtils.copyBytes(in, out, BUFFER_SIZE);
 }
 if (!file.setLastModified(entry.getTime())) {
   numOfFailedLastModifiedSet++;
 }
-  } finally {
-in.close();
   }
 }
   }
   if (numOfFailedLastModifiedSet > 0) {
 

[09/18] hadoop git commit: HADOOP-13086. enable daemonization of dynamic commands

2016-05-04 Thread aw
HADOOP-13086. enable daemonization of dynamic commands


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/b1417a52
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/b1417a52
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/b1417a52

Branch: refs/heads/HADOOP-12930
Commit: b1417a52af2faae82dd2ed42eabede72f4e83ca1
Parents: b67da29
Author: Allen Wittenauer 
Authored: Wed May 4 10:13:18 2016 -0700
Committer: Allen Wittenauer 
Committed: Wed May 4 20:43:21 2016 -0700

--
 .../hadoop-common/src/main/bin/hadoop   | 53 ++-
 .../hadoop-hdfs/src/main/bin/hdfs   | 36 ++---
 hadoop-mapreduce-project/bin/mapred | 49 +++--
 hadoop-yarn-project/hadoop-yarn/bin/yarn| 55 ++--
 4 files changed, 142 insertions(+), 51 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/b1417a52/hadoop-common-project/hadoop-common/src/main/bin/hadoop
--
diff --git a/hadoop-common-project/hadoop-common/src/main/bin/hadoop 
b/hadoop-common-project/hadoop-common/src/main/bin/hadoop
index bb4b041..7b18d22 100755
--- a/hadoop-common-project/hadoop-common/src/main/bin/hadoop
+++ b/hadoop-common-project/hadoop-common/src/main/bin/hadoop
@@ -207,6 +207,57 @@ fi
 hadoop_debug "Appending HADOOP_CLIENT_OPTS onto HADOOP_OPTS"
 HADOOP_OPTS="${HADOOP_OPTS} ${HADOOP_CLIENT_OPTS}"
 
+if [[ -n "${HADOOP_SUBCMD_SECURESERVICE}" ]]; then
+  HADOOP_SECURE_USER="${HADOOP_SUBCMD_SECUREUSER}"
+  hadoop_verify_secure_prereq
+  hadoop_setup_secure_service
+  
priv_outfile="${HADOOP_LOG_DIR}/privileged-${HADOOP_IDENT_STRING}-${HADOOP_SUBCMD}-${HOSTNAME}.out"
+  
priv_errfile="${HADOOP_LOG_DIR}/privileged-${HADOOP_IDENT_STRING}-${HADOOP_SUBCMD}-${HOSTNAME}.err"
+  
priv_pidfile="${HADOOP_PID_DIR}/privileged-${HADOOP_IDENT_STRING}-${HADOOP_SUBCMD}.pid"
+  
daemon_outfile="${HADOOP_LOG_DIR}/hadoop-${HADOOP_SECURE_USER}-${HADOOP_IDENT_STRING}-${HADOOP_SUBCMD}-${HOSTNAME}.out"
+  
daemon_pidfile="${HADOOP_PID_DIR}/hadoop-${HADOOP_SECURE_USER}-${HADOOP_IDENT_STRING}-${HADOOP_SUBCMD}.pid"
+else
+  
daemon_outfile="${HADOOP_LOG_DIR}/hadoop-${HADOOP_IDENT_STRING}-${HADOOP_SUBCMD}-${HOSTNAME}.out"
+  
daemon_pidfile="${HADOOP_PID_DIR}/hadoop-${HADOOP_IDENT_STRING}-${HADOOP_SUBCMD}.pid"
+fi
+
+if [[ "${HADOOP_DAEMON_MODE}" != "default" ]]; then
+  # shellcheck disable=SC2034
+  HADOOP_ROOT_LOGGER="${HADOOP_DAEMON_ROOT_LOGGER}"
+  if [[ -n "${HADOOP_SUBCMD_SECURESERVICE}" ]]; then
+# shellcheck disable=SC2034
+
HADOOP_LOGFILE="hadoop-${HADOOP_SECURE_USER}-${HADOOP_IDENT_STRING}-${HADOOP_SUBCMD}-${HOSTNAME}.log"
+  else
+# shellcheck disable=SC2034
+
HADOOP_LOGFILE="hadoop-${HADOOP_IDENT_STRING}-${HADOOP_SUBCMD}-${HOSTNAME}.log"
+  fi
+fi
+
 hadoop_finalize
-hadoop_java_exec "${HADOOP_SUBCMD}" "${HADOOP_CLASSNAME}" "$@"
 
+if [[ -n "${HADOOP_SUBCMD_SUPPORTDAEMONIZATION}" ]]; then
+  if [[ -n "${HADOOP_SUBCMD_SECURESERVICE}" ]]; then
+hadoop_secure_daemon_handler \
+  "${HADOOP_DAEMON_MODE}" \
+  "${HADOOP_SUBCMD}" \
+  "${HADOOP_CLASSNAME}" \
+  "${daemon_pidfile}" \
+  "${daemon_outfile}" \
+  "${priv_pidfile}" \
+  "${priv_outfile}" \
+  "${priv_errfile}" \
+  "$@"
+  else
+hadoop_daemon_handler \
+  "${HADOOP_DAEMON_MODE}" \
+  "${HADOOP_SUBCMD}" \
+  "${HADOOP_CLASSNAME}" \
+  "${daemon_pidfile}" \
+  "${daemon_outfile}" \
+  "$@"
+  fi
+  exit $?
+else
+  # shellcheck disable=SC2086
+  hadoop_java_exec "${HADOOP_SUBCMD}" "${HADOOP_CLASSNAME}" "$@"
+fi

http://git-wip-us.apache.org/repos/asf/hadoop/blob/b1417a52/hadoop-hdfs-project/hadoop-hdfs/src/main/bin/hdfs
--
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/bin/hdfs 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/bin/hdfs
index 310fb41..4c0b7fb 100755
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/bin/hdfs
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/bin/hdfs
@@ -76,7 +76,7 @@ function hdfscmd_case
 
   case ${subcmd} in
 balancer)
-  supportdaemonization="true"
+  HADOOP_SUBCMD_SUPPORTDAEMONIZATION="true"
   HADOOP_CLASSNAME=org.apache.hadoop.hdfs.server.balancer.Balancer
   hadoop_debug "Appending HADOOP_BALANCER_OPTS onto HADOOP_OPTS"
   HADOOP_OPTS="${HADOOP_OPTS} ${HADOOP_BALANCER_OPTS}"
@@ -91,12 +91,12 @@ function hdfscmd_case
   HADOOP_CLASSNAME=org.apache.hadoop.hdfs.tools.CryptoAdmin
 ;;
 datanode)
-  supportdaemonization="true"
+  HADOOP_SUBCMD_SUPPORTDAEMONIZATION="true"
   # Determine if we're starting a secure datanode, and
   # if so, redefine appropriate variables
   if [[ 

[02/18] hadoop git commit: HADOOP-12469. distcp should not ignore the ignoreFailures option. Contributed by Mingliang Liu.

2016-05-04 Thread aw
HADOOP-12469. distcp should not ignore the ignoreFailures option. Contributed 
by Mingliang Liu.


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/af942585
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/af942585
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/af942585

Branch: refs/heads/HADOOP-12930
Commit: af942585a108d70e0946f6dd4c465a54d068eabf
Parents: f343d91
Author: Jing Zhao 
Authored: Wed May 4 10:23:04 2016 -0700
Committer: Jing Zhao 
Committed: Wed May 4 10:23:04 2016 -0700

--
 .../apache/hadoop/tools/mapred/CopyMapper.java  |  6 +-
 .../hadoop/tools/mapred/TestCopyMapper.java | 85 
 2 files changed, 89 insertions(+), 2 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/af942585/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/mapred/CopyMapper.java
--
diff --git 
a/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/mapred/CopyMapper.java
 
b/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/mapred/CopyMapper.java
index 09bcead..4db1d4e 100644
--- 
a/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/mapred/CopyMapper.java
+++ 
b/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/mapred/CopyMapper.java
@@ -22,6 +22,7 @@ import java.io.FileNotFoundException;
 import java.io.IOException;
 import java.util.EnumSet;
 
+import org.apache.commons.lang.exception.ExceptionUtils;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
@@ -36,6 +37,7 @@ import org.apache.hadoop.tools.DistCpConstants;
 import org.apache.hadoop.tools.DistCpOptionSwitch;
 import org.apache.hadoop.tools.DistCpOptions;
 import org.apache.hadoop.tools.DistCpOptions.FileAttribute;
+import 
org.apache.hadoop.tools.mapred.RetriableFileCopyCommand.CopyReadException;
 import org.apache.hadoop.tools.util.DistCpUtils;
 import org.apache.hadoop.util.StringUtils;
 
@@ -251,8 +253,8 @@ public class CopyMapper extends Mapper
 LOG.error("Failure in copying " + sourceFileStatus.getPath() + " to " +
 target, exception);
 
-if (ignoreFailures && exception.getCause() instanceof
-RetriableFileCopyCommand.CopyReadException) {
+if (ignoreFailures &&
+ExceptionUtils.indexOfType(exception, CopyReadException.class) != -1) {
   incrementCounter(context, Counter.FAIL, 1);
   incrementCounter(context, Counter.BYTESFAILED, 
sourceFileStatus.getLen());
   context.write(null, new Text("FAIL: " + sourceFileStatus.getPath() + " - 
" +

http://git-wip-us.apache.org/repos/asf/hadoop/blob/af942585/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/mapred/TestCopyMapper.java
--
diff --git 
a/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/mapred/TestCopyMapper.java
 
b/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/mapred/TestCopyMapper.java
index 4d0752f..866ad6e 100644
--- 
a/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/mapred/TestCopyMapper.java
+++ 
b/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/mapred/TestCopyMapper.java
@@ -392,6 +392,8 @@ public class TestCopyMapper {
   public void testIgnoreFailures() {
 doTestIgnoreFailures(true);
 doTestIgnoreFailures(false);
+doTestIgnoreFailuresDoubleWrapped(true);
+doTestIgnoreFailuresDoubleWrapped(false);
   }
 
   @Test(timeout=4)
@@ -800,6 +802,89 @@ public class TestCopyMapper {
 }
   }
 
+  /**
+   * This test covers the case where the CopyReadException is double-wrapped 
and
+   * the mapper should be able to ignore this nested read exception.
+   * @see #doTestIgnoreFailures
+   */
+  private void doTestIgnoreFailuresDoubleWrapped(final boolean ignoreFailures) 
{
+try {
+  deleteState();
+  createSourceData();
+
+  final UserGroupInformation tmpUser = UserGroupInformation
+  .createRemoteUser("guest");
+
+  final CopyMapper copyMapper = new CopyMapper();
+
+  final Mapper.Context context =
+  tmpUser.doAs(new PrivilegedAction<
+  Mapper.Context>() {
+@Override
+public Mapper.Context
+run() {
+  try {
+StubContext stubContext = new StubContext(
+getConfiguration(), null, 0);
+return stubContext.getContext();
+  } catch (Exception e) 

hadoop git commit: HADOOP-13095. hadoop-hdfs unit tests for dynamic commands

2016-05-04 Thread aw
Repository: hadoop
Updated Branches:
  refs/heads/HADOOP-12930 8092e9c7b -> 55c604517


HADOOP-13095. hadoop-hdfs unit tests for dynamic commands


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/55c60451
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/55c60451
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/55c60451

Branch: refs/heads/HADOOP-12930
Commit: 55c60451780b51987f59bef4dd0094b16a98efaa
Parents: 8092e9c
Author: Allen Wittenauer 
Authored: Wed May 4 20:40:39 2016 -0700
Committer: Allen Wittenauer 
Committed: Wed May 4 20:40:39 2016 -0700

--
 hadoop-hdfs-project/hadoop-hdfs/pom.xml | 38 ++-
 .../scripts/hdfs-functions_test_helper.bash | 58 +
 .../src/test/scripts/hdfs_subcommands.bats  | 66 
 .../hadoop-hdfs/src/test/scripts/run-bats.sh| 43 +
 4 files changed, 204 insertions(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/55c60451/hadoop-hdfs-project/hadoop-hdfs/pom.xml
--
diff --git a/hadoop-hdfs-project/hadoop-hdfs/pom.xml 
b/hadoop-hdfs-project/hadoop-hdfs/pom.xml
index 668bbfe..c8198c1 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/pom.xml
+++ b/hadoop-hdfs-project/hadoop-hdfs/pom.xml
@@ -417,7 +417,7 @@ http://maven.apache.org/xsd/maven-4.0.0.xsd;>
 
   
 
-  
+  
 
 
   startKdc
@@ -559,5 +559,41 @@ http://maven.apache.org/xsd/maven-4.0.0.xsd;>
 
   
 
+
+
+
+  shelltest
+  
+
+  !skipTests
+
+  
+  
+
+  
+maven-antrun-plugin
+
+
+hdfs-test-bats-driver
+test
+
+run
+
+
+  
+  
+   
+ 
+  
+
+
+
+  
+
+  
+
+
   
 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/55c60451/hadoop-hdfs-project/hadoop-hdfs/src/test/scripts/hdfs-functions_test_helper.bash
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/scripts/hdfs-functions_test_helper.bash
 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/scripts/hdfs-functions_test_helper.bash
new file mode 100755
index 000..d3cdda4
--- /dev/null
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/scripts/hdfs-functions_test_helper.bash
@@ -0,0 +1,58 @@
+#!/usr/bin/env bash
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+setup() {
+
+  TMP="${BATS_TEST_DIRNAME}/../../../target/test-dir/bats.$$.${RANDOM}"
+  mkdir -p "${TMP}"
+  TMP=$(cd -P -- "${TMP}" >/dev/null && pwd -P)
+  export TMP
+  TESTBINDIR="${BATS_TEST_DIRNAME}"
+  HADOOP_LIBEXEC_DIR=${TESTBINDIR}/../../main/bin
+  HADOOP_LIBEXEC_DIR=$(cd -P -- "${HADOOP_LIBEXEC_DIR}" >/dev/null && pwd -P)
+
+  # shellcheck disable=SC2034
+  HADOOP_SHELL_SCRIPT_DEBUG=true
+  unset HADOOP_CONF_DIR
+  # we unset both of these for bw compat
+  unset HADOOP_HOME
+  unset HADOOP_PREFIX
+
+  echo "bindir: ${TESTBINDIR}" 2>&1
+
+  mkdir -p "${TMP}"
+
+  # shellcheck disable=SC2034
+  QATESTMODE=true
+
+  # shellcheck disable=SC1090
+  . 
"${BATS_TEST_DIRNAME}/../../../../../hadoop-common-project/hadoop-common/src/main/bin/hadoop-functions.sh"
+  pushd "${TMP}" >/dev/null
+}
+
+teardown() {
+  popd >/dev/null
+  rm -rf "${TMP}"
+}
+
+
+strstr() {
+  if [ "${1#*$2}" != "${1}" ]; then
+echo true
+  else
+echo false
+  fi
+}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/55c60451/hadoop-hdfs-project/hadoop-hdfs/src/test/scripts/hdfs_subcommands.bats
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/scripts/hdfs_subcommands.bats 

hadoop git commit: HADOOP-13094. hadoop-common unit tests for dynamic commands [Forced Update!]

2016-05-04 Thread aw
Repository: hadoop
Updated Branches:
  refs/heads/HADOOP-12930 7bf750d0d -> 8092e9c7b (forced update)


HADOOP-13094.  hadoop-common unit tests for dynamic commands


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/8092e9c7
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/8092e9c7
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/8092e9c7

Branch: refs/heads/HADOOP-12930
Commit: 8092e9c7b4d6b8a77a1b6931926233a66159de74
Parents: e886e4d
Author: Allen Wittenauer 
Authored: Wed May 4 17:41:23 2016 -0700
Committer: Allen Wittenauer 
Committed: Wed May 4 18:18:24 2016 -0700

--
 .../scripts/hadoop-functions_test_helper.bash   |  4 +-
 .../src/test/scripts/hadoop_subcommands.bats| 66 
 2 files changed, 68 insertions(+), 2 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/8092e9c7/hadoop-common-project/hadoop-common/src/test/scripts/hadoop-functions_test_helper.bash
--
diff --git 
a/hadoop-common-project/hadoop-common/src/test/scripts/hadoop-functions_test_helper.bash
 
b/hadoop-common-project/hadoop-common/src/test/scripts/hadoop-functions_test_helper.bash
index be2d7f5..cc37268 100755
--- 
a/hadoop-common-project/hadoop-common/src/test/scripts/hadoop-functions_test_helper.bash
+++ 
b/hadoop-common-project/hadoop-common/src/test/scripts/hadoop-functions_test_helper.bash
@@ -16,7 +16,7 @@
 
 setup() {
 
-  TMP=../../../target/test-dir/bats.$$.${RANDOM}
+  TMP="${BATS_TEST_DIRNAME}/../../../target/test-dir/bats.$$.${RANDOM}"
   mkdir -p ${TMP}
   TMP=$(cd -P -- "${TMP}" >/dev/null && pwd -P)
   export TMP
@@ -38,7 +38,7 @@ setup() {
   # shellcheck disable=SC2034
   QATESTMODE=true
 
-  . ../../main/bin/hadoop-functions.sh
+  . "${BATS_TEST_DIRNAME}/../../main/bin/hadoop-functions.sh"
   pushd "${TMP}" >/dev/null
 }
 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/8092e9c7/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_subcommands.bats
--
diff --git 
a/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_subcommands.bats 
b/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_subcommands.bats
new file mode 100755
index 000..eda2e3d
--- /dev/null
+++ 
b/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_subcommands.bats
@@ -0,0 +1,66 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+load hadoop-functions_test_helper
+
+# the loading of shell profiles are tested elseswhere
+# this only tests the specific subcommand parts
+
+subcommandsetup () {
+  export HADOOP_LIBEXEC_DIR="${TMP}/libexec"
+  export HADOOP_CONF_DIR="${TMP}/conf"
+  mkdir -p "${HADOOP_LIBEXEC_DIR}/shellprofile.d" 
"${HADOOP_CONF_DIR}/shellprofile.d"
+  cat <<-'TOKEN'   > "${HADOOP_LIBEXEC_DIR}/hadoop-config.sh"
+
+hadoop_subcommand_sub () {
+  echo "unittest"
+  exit 0
+}
+
+hadoop_subcommand_conftest ()
+{
+  echo conftest
+  exit 0
+}
+
+hadoop_subcommand_envcheck ()
+{
+  echo ${HADOOP_SHELL_EXECNAME}
+  exit 0
+}
+TOKEN
+  chmod a+rx "${HADOOP_LIBEXEC_DIR}/hadoop-config.sh"
+
+}
+
+@test "hadoop_subcommand (addition)" {
+  subcommandsetup
+  run "${BATS_TEST_DIRNAME}/../../main/bin/hadoop" sub
+  echo ">${output}<"
+  [ "${output}" = unittest ]
+}
+
+@test "hadoop_subcommand (substitute)" {
+  subcommandsetup
+  run "${BATS_TEST_DIRNAME}/../../main/bin/hadoop" conftest
+  echo ">${output}<"
+  [ "${output}" = conftest ]
+}
+
+@test "hadoop_subcommand (envcheck)" {
+  subcommandsetup
+  run "${BATS_TEST_DIRNAME}/../../main/bin/hadoop" envcheck
+  [ "${output}" = hadoop ]
+}


-
To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org
For additional commands, e-mail: common-commits-h...@hadoop.apache.org



hadoop git commit: HADOOP-13094. hadoop-common unit tests for dynamic commands

2016-05-04 Thread aw
Repository: hadoop
Updated Branches:
  refs/heads/HADOOP-12930 e886e4d86 -> 7bf750d0d


HADOOP-13094.  hadoop-common unit tests for dynamic commands


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/7bf750d0
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/7bf750d0
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/7bf750d0

Branch: refs/heads/HADOOP-12930
Commit: 7bf750d0d3ed396f58f2c583348e31f324959401
Parents: e886e4d
Author: Allen Wittenauer 
Authored: Wed May 4 17:41:23 2016 -0700
Committer: Allen Wittenauer 
Committed: Wed May 4 17:41:23 2016 -0700

--
 .../src/test/scripts/hadoop-functions_test_helper.bash   | 4 ++--
 1 file changed, 2 insertions(+), 2 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/7bf750d0/hadoop-common-project/hadoop-common/src/test/scripts/hadoop-functions_test_helper.bash
--
diff --git 
a/hadoop-common-project/hadoop-common/src/test/scripts/hadoop-functions_test_helper.bash
 
b/hadoop-common-project/hadoop-common/src/test/scripts/hadoop-functions_test_helper.bash
index be2d7f5..cc37268 100755
--- 
a/hadoop-common-project/hadoop-common/src/test/scripts/hadoop-functions_test_helper.bash
+++ 
b/hadoop-common-project/hadoop-common/src/test/scripts/hadoop-functions_test_helper.bash
@@ -16,7 +16,7 @@
 
 setup() {
 
-  TMP=../../../target/test-dir/bats.$$.${RANDOM}
+  TMP="${BATS_TEST_DIRNAME}/../../../target/test-dir/bats.$$.${RANDOM}"
   mkdir -p ${TMP}
   TMP=$(cd -P -- "${TMP}" >/dev/null && pwd -P)
   export TMP
@@ -38,7 +38,7 @@ setup() {
   # shellcheck disable=SC2034
   QATESTMODE=true
 
-  . ../../main/bin/hadoop-functions.sh
+  . "${BATS_TEST_DIRNAME}/../../main/bin/hadoop-functions.sh"
   pushd "${TMP}" >/dev/null
 }
 


-
To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org
For additional commands, e-mail: common-commits-h...@hadoop.apache.org



hadoop git commit: HDFS-10320. Rack failures may result in NN terminate. (Xiao Chen via mingma)

2016-05-04 Thread mingma
Repository: hadoop
Updated Branches:
  refs/heads/branch-2.8 22ac37615 -> d6e95ae47


HDFS-10320. Rack failures may result in NN terminate. (Xiao Chen via mingma)

(cherry picked from commit 1268cf5fbe4458fa75ad0662512d352f9e8d3470)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/d6e95ae4
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/d6e95ae4
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/d6e95ae4

Branch: refs/heads/branch-2.8
Commit: d6e95ae47b6281219ca2b634507ece3b4ac6a12e
Parents: 22ac376
Author: Ming Ma 
Authored: Wed May 4 17:02:26 2016 -0700
Committer: Ming Ma 
Committed: Wed May 4 17:07:36 2016 -0700

--
 .../org/apache/hadoop/net/NetworkTopology.java  | 109 +--
 .../AvailableSpaceBlockPlacementPolicy.java |  11 +-
 .../BlockPlacementPolicyDefault.java|  84 +++---
 .../web/resources/NamenodeWebHdfsMethods.java   |  13 +--
 .../apache/hadoop/net/TestNetworkTopology.java  |  75 -
 5 files changed, 196 insertions(+), 96 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/d6e95ae4/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/NetworkTopology.java
--
diff --git 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/NetworkTopology.java
 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/NetworkTopology.java
index b637da1..d680094 100644
--- 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/NetworkTopology.java
+++ 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/NetworkTopology.java
@@ -29,13 +29,13 @@ import java.util.concurrent.locks.ReadWriteLock;
 import java.util.concurrent.locks.ReentrantReadWriteLock;
 
 import com.google.common.annotations.VisibleForTesting;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
 import org.apache.hadoop.util.ReflectionUtils;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import com.google.common.base.Preconditions;
 import com.google.common.collect.Lists;
@@ -54,8 +54,8 @@ import com.google.common.collect.Lists;
 public class NetworkTopology {
   public final static String DEFAULT_RACK = "/default-rack";
   public final static int DEFAULT_HOST_LEVEL = 2;
-  public static final Log LOG =
-LogFactory.getLog(NetworkTopology.class);
+  public static final Logger LOG =
+  LoggerFactory.getLogger(NetworkTopology.class);
 
   public static class InvalidTopologyException extends RuntimeException {
 private static final long serialVersionUID = 1L;
@@ -432,9 +432,7 @@ public class NetworkTopology {
   }
 }
   }
-  if(LOG.isDebugEnabled()) {
-LOG.debug("NetworkTopology became:\n" + this.toString());
-  }
+  LOG.debug("NetworkTopology became:\n{}", this.toString());
 } finally {
   netlock.writeLock().unlock();
 }
@@ -507,9 +505,7 @@ public class NetworkTopology {
   numOfRacks--;
 }
   }
-  if(LOG.isDebugEnabled()) {
-LOG.debug("NetworkTopology became:\n" + this.toString());
-  }
+  LOG.debug("NetworkTopology became:\n{}", this.toString());
 } finally {
   netlock.writeLock().unlock();
 }
@@ -702,26 +698,45 @@ public class NetworkTopology {
 r.setSeed(seed);
   }
 
-  /** randomly choose one node from scope
-   * if scope starts with ~, choose one from the all nodes except for the
-   * ones in scope; otherwise, choose one from scope
+  /**
+   * Randomly choose a node.
+   *
* @param scope range of nodes from which a node will be chosen
* @return the chosen node
+   *
+   * @see #chooseRandom(String, Collection)
*/
-  public Node chooseRandom(String scope) {
+  public Node chooseRandom(final String scope) {
+return chooseRandom(scope, null);
+  }
+
+  /**
+   * Randomly choose one node from scope.
+   *
+   * If scope starts with ~, choose one from the all nodes except for the
+   * ones in scope; otherwise, choose one from scope.
+   * If excludedNodes is given, choose a node that's not in excludedNodes.
+   *
+   * @param scope range of nodes from which a node will be chosen
+   * @param excludedNodes nodes to be excluded from
+   * @return the chosen node
+   */
+  public Node chooseRandom(final String scope,
+  final Collection excludedNodes) {
 netlock.readLock().lock();
 try {
   if (scope.startsWith("~")) {
-return 

hadoop git commit: HDFS-10320. Rack failures may result in NN terminate. (Xiao Chen via mingma)

2016-05-04 Thread mingma
Repository: hadoop
Updated Branches:
  refs/heads/branch-2 8262ef831 -> 5b5383317


HDFS-10320. Rack failures may result in NN terminate. (Xiao Chen via mingma)

(cherry picked from commit 1268cf5fbe4458fa75ad0662512d352f9e8d3470)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/5b538331
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/5b538331
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/5b538331

Branch: refs/heads/branch-2
Commit: 5b53833171f9427234d2c608e48b1eb323efb435
Parents: 8262ef8
Author: Ming Ma 
Authored: Wed May 4 17:02:26 2016 -0700
Committer: Ming Ma 
Committed: Wed May 4 17:04:44 2016 -0700

--
 .../org/apache/hadoop/net/NetworkTopology.java  | 109 +--
 .../AvailableSpaceBlockPlacementPolicy.java |  11 +-
 .../BlockPlacementPolicyDefault.java|  84 +++---
 .../web/resources/NamenodeWebHdfsMethods.java   |  13 +--
 .../apache/hadoop/net/TestNetworkTopology.java  |  75 -
 5 files changed, 196 insertions(+), 96 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/5b538331/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/NetworkTopology.java
--
diff --git 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/NetworkTopology.java
 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/NetworkTopology.java
index e1d2968..1e23ff6 100644
--- 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/NetworkTopology.java
+++ 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/NetworkTopology.java
@@ -29,13 +29,13 @@ import java.util.concurrent.locks.ReadWriteLock;
 import java.util.concurrent.locks.ReentrantReadWriteLock;
 
 import com.google.common.annotations.VisibleForTesting;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
 import org.apache.hadoop.util.ReflectionUtils;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import com.google.common.base.Preconditions;
 import com.google.common.collect.Lists;
@@ -54,8 +54,8 @@ import com.google.common.collect.Lists;
 public class NetworkTopology {
   public final static String DEFAULT_RACK = "/default-rack";
   public final static int DEFAULT_HOST_LEVEL = 2;
-  public static final Log LOG =
-LogFactory.getLog(NetworkTopology.class);
+  public static final Logger LOG =
+  LoggerFactory.getLogger(NetworkTopology.class);
 
   public static class InvalidTopologyException extends RuntimeException {
 private static final long serialVersionUID = 1L;
@@ -442,9 +442,7 @@ public class NetworkTopology {
   }
 }
   }
-  if(LOG.isDebugEnabled()) {
-LOG.debug("NetworkTopology became:\n" + this.toString());
-  }
+  LOG.debug("NetworkTopology became:\n{}", this.toString());
 } finally {
   netlock.writeLock().unlock();
 }
@@ -517,9 +515,7 @@ public class NetworkTopology {
   numOfRacks--;
 }
   }
-  if(LOG.isDebugEnabled()) {
-LOG.debug("NetworkTopology became:\n" + this.toString());
-  }
+  LOG.debug("NetworkTopology became:\n{}", this.toString());
 } finally {
   netlock.writeLock().unlock();
 }
@@ -717,26 +713,45 @@ public class NetworkTopology {
 r.setSeed(seed);
   }
 
-  /** randomly choose one node from scope
-   * if scope starts with ~, choose one from the all nodes except for the
-   * ones in scope; otherwise, choose one from scope
+  /**
+   * Randomly choose a node.
+   *
* @param scope range of nodes from which a node will be chosen
* @return the chosen node
+   *
+   * @see #chooseRandom(String, Collection)
*/
-  public Node chooseRandom(String scope) {
+  public Node chooseRandom(final String scope) {
+return chooseRandom(scope, null);
+  }
+
+  /**
+   * Randomly choose one node from scope.
+   *
+   * If scope starts with ~, choose one from the all nodes except for the
+   * ones in scope; otherwise, choose one from scope.
+   * If excludedNodes is given, choose a node that's not in excludedNodes.
+   *
+   * @param scope range of nodes from which a node will be chosen
+   * @param excludedNodes nodes to be excluded from
+   * @return the chosen node
+   */
+  public Node chooseRandom(final String scope,
+  final Collection excludedNodes) {
 netlock.readLock().lock();
 try {
   if (scope.startsWith("~")) {
-return 

hadoop git commit: HDFS-10320. Rack failures may result in NN terminate. (Xiao Chen via mingma)

2016-05-04 Thread mingma
Repository: hadoop
Updated Branches:
  refs/heads/trunk 9e37fe3b7 -> 1268cf5fb


HDFS-10320. Rack failures may result in NN terminate. (Xiao Chen via mingma)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/1268cf5f
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/1268cf5f
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/1268cf5f

Branch: refs/heads/trunk
Commit: 1268cf5fbe4458fa75ad0662512d352f9e8d3470
Parents: 9e37fe3
Author: Ming Ma 
Authored: Wed May 4 17:02:26 2016 -0700
Committer: Ming Ma 
Committed: Wed May 4 17:02:26 2016 -0700

--
 .../org/apache/hadoop/net/NetworkTopology.java  | 109 +--
 .../AvailableSpaceBlockPlacementPolicy.java |  11 +-
 .../BlockPlacementPolicyDefault.java|  84 +++---
 .../web/resources/NamenodeWebHdfsMethods.java   |  13 +--
 .../apache/hadoop/net/TestNetworkTopology.java  |  75 -
 5 files changed, 196 insertions(+), 96 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/1268cf5f/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/NetworkTopology.java
--
diff --git 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/NetworkTopology.java
 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/NetworkTopology.java
index e1d2968..1e23ff6 100644
--- 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/NetworkTopology.java
+++ 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/NetworkTopology.java
@@ -29,13 +29,13 @@ import java.util.concurrent.locks.ReadWriteLock;
 import java.util.concurrent.locks.ReentrantReadWriteLock;
 
 import com.google.common.annotations.VisibleForTesting;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
 import org.apache.hadoop.util.ReflectionUtils;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import com.google.common.base.Preconditions;
 import com.google.common.collect.Lists;
@@ -54,8 +54,8 @@ import com.google.common.collect.Lists;
 public class NetworkTopology {
   public final static String DEFAULT_RACK = "/default-rack";
   public final static int DEFAULT_HOST_LEVEL = 2;
-  public static final Log LOG =
-LogFactory.getLog(NetworkTopology.class);
+  public static final Logger LOG =
+  LoggerFactory.getLogger(NetworkTopology.class);
 
   public static class InvalidTopologyException extends RuntimeException {
 private static final long serialVersionUID = 1L;
@@ -442,9 +442,7 @@ public class NetworkTopology {
   }
 }
   }
-  if(LOG.isDebugEnabled()) {
-LOG.debug("NetworkTopology became:\n" + this.toString());
-  }
+  LOG.debug("NetworkTopology became:\n{}", this.toString());
 } finally {
   netlock.writeLock().unlock();
 }
@@ -517,9 +515,7 @@ public class NetworkTopology {
   numOfRacks--;
 }
   }
-  if(LOG.isDebugEnabled()) {
-LOG.debug("NetworkTopology became:\n" + this.toString());
-  }
+  LOG.debug("NetworkTopology became:\n{}", this.toString());
 } finally {
   netlock.writeLock().unlock();
 }
@@ -717,26 +713,45 @@ public class NetworkTopology {
 r.setSeed(seed);
   }
 
-  /** randomly choose one node from scope
-   * if scope starts with ~, choose one from the all nodes except for the
-   * ones in scope; otherwise, choose one from scope
+  /**
+   * Randomly choose a node.
+   *
* @param scope range of nodes from which a node will be chosen
* @return the chosen node
+   *
+   * @see #chooseRandom(String, Collection)
*/
-  public Node chooseRandom(String scope) {
+  public Node chooseRandom(final String scope) {
+return chooseRandom(scope, null);
+  }
+
+  /**
+   * Randomly choose one node from scope.
+   *
+   * If scope starts with ~, choose one from the all nodes except for the
+   * ones in scope; otherwise, choose one from scope.
+   * If excludedNodes is given, choose a node that's not in excludedNodes.
+   *
+   * @param scope range of nodes from which a node will be chosen
+   * @param excludedNodes nodes to be excluded from
+   * @return the chosen node
+   */
+  public Node chooseRandom(final String scope,
+  final Collection excludedNodes) {
 netlock.readLock().lock();
 try {
   if (scope.startsWith("~")) {
-return chooseRandom(NodeBase.ROOT, scope.substring(1));
+return 

[37/50] [abbrv] hadoop git commit: Addendum to YARN-3863. Deleted files that were added incorrectly.

2016-05-04 Thread gtcarrera9
Addendum to YARN-3863. Deleted files that were added incorrectly.


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/974a9d7c
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/974a9d7c
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/974a9d7c

Branch: refs/heads/YARN-2928
Commit: 974a9d7cad7162c87576a92d28774592114d1317
Parents: 2b2df86
Author: Sangjin Lee 
Authored: Tue Apr 12 12:32:43 2016 -0700
Committer: Li Lu 
Committed: Wed May 4 16:35:33 2016 -0700

--
 .../reader/filter/TimelineExistsFilter.java | 62 -
 .../reader/filter/TimelineKeyValueFilter.java   | 48 -
 .../reader/filter/TimelineKeyValuesFilter.java  | 71 
 .../common/TimelineEntityFiltersType.java   | 71 
 4 files changed, 252 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/974a9d7c/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/reader/filter/TimelineExistsFilter.java
--
diff --git 
a/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/reader/filter/TimelineExistsFilter.java
 
b/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/reader/filter/TimelineExistsFilter.java
deleted file mode 100644
index 36d0d7b..000
--- 
a/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/reader/filter/TimelineExistsFilter.java
+++ /dev/null
@@ -1,62 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.yarn.server.timelineservice.reader.filter;
-
-import org.apache.hadoop.classification.InterfaceAudience.Private;
-import org.apache.hadoop.classification.InterfaceStability.Unstable;
-
-/**
- * Filter class which represents filter to be applied based on existence of a
- * value.
- */
-@Private
-@Unstable
-public class TimelineExistsFilter extends TimelineFilter {
-
-  private final TimelineCompareOp compareOp;
-  private final String value;
-
-  public TimelineExistsFilter(TimelineCompareOp op, String value) {
-this.value = value;
-if (op != TimelineCompareOp.EQUAL && op != TimelineCompareOp.NOT_EQUAL) {
-  throw new IllegalArgumentException("CompareOp for exists filter should " 
+
-  "be EQUAL or NOT_EQUAL");
-}
-this.compareOp = op;
-  }
-
-  @Override
-  public TimelineFilterType getFilterType() {
-return TimelineFilterType.EXISTS;
-  }
-
-  public String getValue() {
-return value;
-  }
-
-  public TimelineCompareOp getCompareOp() {
-return compareOp;
-  }
-
-  @Override
-  public String toString() {
-return String.format("%s (%s %s)",
-this.getClass().getSimpleName(), this.compareOp.name(), this.value);
-  }
-}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/974a9d7c/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/reader/filter/TimelineKeyValueFilter.java
--
diff --git 
a/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/reader/filter/TimelineKeyValueFilter.java
 
b/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/reader/filter/TimelineKeyValueFilter.java
deleted file mode 100644
index 58f0ee9..000
--- 
a/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/reader/filter/TimelineKeyValueFilter.java
+++ /dev/null
@@ -1,48 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor 

[43/50] [abbrv] hadoop git commit: YARN-3816. [Aggregation] App-level aggregation and accumulation for YARN system metrics (Li Lu via sjlee)

2016-05-04 Thread gtcarrera9
YARN-3816. [Aggregation] App-level aggregation and accumulation for YARN system 
metrics (Li Lu via sjlee)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/64d15d9c
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/64d15d9c
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/64d15d9c

Branch: refs/heads/YARN-2928
Commit: 64d15d9cc408072594102adad474f9302234f80c
Parents: 974a9d7
Author: Sangjin Lee 
Authored: Fri Apr 22 10:24:40 2016 -0700
Committer: Li Lu 
Committed: Wed May 4 16:35:37 2016 -0700

--
 .../records/timelineservice/TimelineMetric.java | 140 ++--
 .../TimelineMetricCalculator.java   | 115 ++
 .../TimelineMetricOperation.java| 167 +++
 .../timelineservice/TestTimelineMetric.java | 100 +
 .../TestTimelineServiceRecords.java |   6 +-
 .../timelineservice/NMTimelinePublisher.java|   4 +
 .../collector/AppLevelTimelineCollector.java|  72 +++
 .../collector/TimelineCollector.java| 213 ++-
 .../storage/TimelineAggregationTrack.java   |   2 +-
 .../collector/TestTimelineCollector.java| 127 +++
 .../TestFileSystemTimelineWriterImpl.java   |  43 +++-
 .../storage/TestHBaseTimelineStorage.java   |  35 ++-
 12 files changed, 998 insertions(+), 26 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/64d15d9c/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/timelineservice/TimelineMetric.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/timelineservice/TimelineMetric.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/timelineservice/TimelineMetric.java
index 2f60515..f0c6849 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/timelineservice/TimelineMetric.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/timelineservice/TimelineMetric.java
@@ -19,12 +19,13 @@ package org.apache.hadoop.yarn.api.records.timelineservice;
 
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
+import org.apache.hadoop.yarn.exceptions.YarnRuntimeException;
 
 import javax.xml.bind.annotation.XmlAccessType;
 import javax.xml.bind.annotation.XmlAccessorType;
 import javax.xml.bind.annotation.XmlElement;
 import javax.xml.bind.annotation.XmlRootElement;
-import java.util.Comparator;
+import java.util.Collections;
 import java.util.Map;
 import java.util.TreeMap;
 
@@ -48,13 +49,13 @@ public class TimelineMetric {
 
   private Type type;
   private String id;
-  private Comparator reverseComparator = new Comparator() {
-@Override
-public int compare(Long l1, Long l2) {
-  return l2.compareTo(l1);
-}
-  };
-  private TreeMap values = new TreeMap<>(reverseComparator);
+  // By default, not to do any aggregation operations. This field will NOT be
+  // persisted (like a "transient" member).
+  private TimelineMetricOperation realtimeAggregationOp
+  = TimelineMetricOperation.NOP;
+
+  private TreeMap values
+  = new TreeMap<>(Collections.reverseOrder());
 
   public TimelineMetric() {
 this(Type.SINGLE_VALUE);
@@ -83,6 +84,26 @@ public class TimelineMetric {
 this.id = metricId;
   }
 
+  /**
+   * Get the real time aggregation operation of this metric.
+   *
+   * @return Real time aggregation operation
+   */
+  public TimelineMetricOperation getRealtimeAggregationOp() {
+return realtimeAggregationOp;
+  }
+
+  /**
+   * Set the real time aggregation operation of this metric.
+   *
+   * @param op A timeline metric operation that the metric should perform on
+   *   real time aggregations
+   */
+  public void setRealtimeAggregationOp(
+  final TimelineMetricOperation op) {
+this.realtimeAggregationOp = op;
+  }
+
   // required by JAXB
   @InterfaceAudience.Private
   @XmlElement(name = "values")
@@ -98,8 +119,8 @@ public class TimelineMetric {
 if (type == Type.SINGLE_VALUE) {
   overwrite(vals);
 } else {
-  if (values != null) {
-this.values = new TreeMap(reverseComparator);
+  if (vals != null) {
+this.values = new TreeMap<>(Collections.reverseOrder());
 this.values.putAll(vals);
   } else {
 this.values = null;
@@ -166,11 +187,100 @@ public class TimelineMetric {
 
   @Override
   public String toString() {
-String str = "{id:" + id + ", 

[29/50] [abbrv] hadoop git commit: YARN-4700. ATS storage has one extra record each time the RM got restarted. (Naganarasimha G R via Varun Saxena)

2016-05-04 Thread gtcarrera9
YARN-4700. ATS storage has one extra record each time the RM got restarted. 
(Naganarasimha G R via Varun Saxena)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/34248bde
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/34248bde
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/34248bde

Branch: refs/heads/YARN-2928
Commit: 34248bdeab608c18860777fc77ba0706a4fe7ed9
Parents: f59c743
Author: Varun Saxena 
Authored: Fri Mar 4 19:42:22 2016 +0530
Committer: Li Lu 
Committed: Wed May 4 16:33:34 2016 -0700

--
 .../storage/HBaseTimelineWriterImpl.java| 47 +
 .../storage/common/TimelineStorageUtils.java| 35 +++--
 .../storage/flow/FlowActivityRowKey.java| 27 +++---
 ...stTimelineReaderWebServicesHBaseStorage.java | 25 +
 .../storage/flow/TestFlowDataGenerator.java | 22 
 .../flow/TestHBaseStorageFlowActivity.java  | 53 
 .../storage/flow/TestHBaseStorageFlowRun.java   |  4 +-
 7 files changed, 96 insertions(+), 117 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/34248bde/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/HBaseTimelineWriterImpl.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/HBaseTimelineWriterImpl.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/HBaseTimelineWriterImpl.java
index 997b175..1afe878 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/HBaseTimelineWriterImpl.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/HBaseTimelineWriterImpl.java
@@ -36,6 +36,7 @@ import 
org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity;
 import org.apache.hadoop.yarn.api.records.timelineservice.TimelineEvent;
 import org.apache.hadoop.yarn.api.records.timelineservice.TimelineMetric;
 import 
org.apache.hadoop.yarn.api.records.timelineservice.TimelineWriteResponse;
+import org.apache.hadoop.yarn.server.metrics.ApplicationMetricsConstants;
 import org.apache.hadoop.yarn.server.timeline.GenericObjectMapper;
 import 
org.apache.hadoop.yarn.server.timelineservice.storage.application.ApplicationColumn;
 import 
org.apache.hadoop.yarn.server.timelineservice.storage.application.ApplicationColumnPrefix;
@@ -53,11 +54,11 @@ import 
org.apache.hadoop.yarn.server.timelineservice.storage.entity.EntityColumn
 import 
org.apache.hadoop.yarn.server.timelineservice.storage.entity.EntityRowKey;
 import 
org.apache.hadoop.yarn.server.timelineservice.storage.entity.EntityTable;
 import 
org.apache.hadoop.yarn.server.timelineservice.storage.flow.AggregationCompactionDimension;
+import 
org.apache.hadoop.yarn.server.timelineservice.storage.flow.AggregationOperation;
 import org.apache.hadoop.yarn.server.timelineservice.storage.flow.Attribute;
 import 
org.apache.hadoop.yarn.server.timelineservice.storage.flow.FlowActivityColumnPrefix;
 import 
org.apache.hadoop.yarn.server.timelineservice.storage.flow.FlowActivityRowKey;
 import 
org.apache.hadoop.yarn.server.timelineservice.storage.flow.FlowActivityTable;
-import 
org.apache.hadoop.yarn.server.timelineservice.storage.flow.AggregationOperation;
 import 
org.apache.hadoop.yarn.server.timelineservice.storage.flow.FlowRunColumn;
 import 
org.apache.hadoop.yarn.server.timelineservice.storage.flow.FlowRunColumnPrefix;
 import 
org.apache.hadoop.yarn.server.timelineservice.storage.flow.FlowRunRowKey;
@@ -140,19 +141,22 @@ public class HBaseTimelineWriterImpl extends 
AbstractService implements
   storeRelations(rowKey, te, isApplication);
 
   if (isApplication) {
-if (TimelineStorageUtils.isApplicationCreated(te)) {
+TimelineEvent event = TimelineStorageUtils.getApplicationEvent(te,
+ApplicationMetricsConstants.CREATED_EVENT_TYPE);
+if (event != null) {
   onApplicationCreated(clusterId, userId, flowName, flowVersion,
-  flowRunId, appId, te);
+  flowRunId, appId, te, event.getTimestamp());
 }
 // if it's an application entity, store metrics
 storeFlowMetricsAppRunning(clusterId, userId, flowName, flowRunId,
 appId, te);
 // if application 

[17/50] [abbrv] hadoop git commit: YARN-4224. Support fetching entities by UID and change the REST interface to conform to current REST APIs' in YARN. (Varun Saxena via gtcarrera9)

2016-05-04 Thread gtcarrera9
http://git-wip-us.apache.org/repos/asf/hadoop/blob/97d5cf32/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/reader/TimelineReaderWebServicesUtils.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/reader/TimelineReaderWebServicesUtils.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/reader/TimelineReaderWebServicesUtils.java
new file mode 100644
index 000..d052d51
--- /dev/null
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/reader/TimelineReaderWebServicesUtils.java
@@ -0,0 +1,222 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.yarn.server.timelineservice.reader;
+
+import java.io.IOException;
+import java.util.EnumSet;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.Map;
+import java.util.Set;
+
+import javax.servlet.http.HttpServletRequest;
+
+import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.hadoop.yarn.server.timeline.GenericObjectMapper;
+import 
org.apache.hadoop.yarn.server.timelineservice.storage.TimelineReader.Field;
+
+/**
+ * Set of utility methods to be used by timeline reader web services.
+ */
+final class TimelineReaderWebServicesUtils {
+  private TimelineReaderWebServicesUtils() {
+  }
+
+  /**
+   * Parse a delimited string and convert it into a set of strings. For
+   * instance, if delimiter is ",", then the string should be represented as
+   * "value1,value2,value3".
+   * @param str delimited string.
+   * @param delimiter string is delimited by this delimiter.
+   * @return set of strings.
+   */
+  static Set parseValuesStr(String str, String delimiter) {
+if (str == null || str.isEmpty()) {
+  return null;
+}
+Set strSet = new HashSet();
+String[] strs = str.split(delimiter);
+for (String aStr : strs) {
+  strSet.add(aStr.trim());
+}
+return strSet;
+  }
+
+  @SuppressWarnings("unchecked")
+  private static  void parseKeyValues(Map map, String str,
+  String pairsDelim, String keyValuesDelim, boolean stringValue,
+  boolean multipleValues) {
+String[] pairs = str.split(pairsDelim);
+for (String pair : pairs) {
+  if (pair == null || pair.trim().isEmpty()) {
+continue;
+  }
+  String[] pairStrs = pair.split(keyValuesDelim);
+  if (pairStrs.length < 2) {
+continue;
+  }
+  if (!stringValue) {
+try {
+  Object value =
+  GenericObjectMapper.OBJECT_READER.readValue(pairStrs[1].trim());
+  map.put(pairStrs[0].trim(), (T) value);
+} catch (IOException e) {
+  map.put(pairStrs[0].trim(), (T) pairStrs[1].trim());
+}
+  } else {
+String key = pairStrs[0].trim();
+if (multipleValues) {
+  Set values = new HashSet();
+  for (int i = 1; i < pairStrs.length; i++) {
+values.add(pairStrs[i].trim());
+  }
+  map.put(key, (T) values);
+} else {
+  map.put(key, (T) pairStrs[1].trim());
+}
+  }
+}
+  }
+
+  /**
+   * Parse a delimited string and convert it into a map of key-values with each
+   * key having a set of values. Both the key and values are interpreted as
+   * strings.
+   * For instance, if pairsDelim is "," and keyValuesDelim is ":", then the
+   * string should be represented as
+   * "key1:value11:value12:value13,key2:value21,key3:value31:value32".
+   * @param str delimited string represented as multiple keys having multiple
+   * values.
+   * @param pairsDelim key-values pairs are delimited by this delimiter.
+   * @param keyValuesDelim values for a key are delimited by this delimiter.
+   * @return a map of key-values with each key having a set of values.
+   */
+  static Map 

[31/50] [abbrv] hadoop git commit: MAPREDUCE-6546. reconcile the two versions of the timeline service performance tests. (Sangjin Lee via Naganarasimha G R)

2016-05-04 Thread gtcarrera9
MAPREDUCE-6546. reconcile the two versions of the timeline service performance 
tests. (Sangjin Lee via Naganarasimha G R)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/cd444089
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/cd444089
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/cd444089

Branch: refs/heads/YARN-2928
Commit: cd4440899a2cebbbfb15e18228d3821961319986
Parents: 34248bd
Author: naganarasimha 
Authored: Wed Mar 9 11:20:32 2016 +0530
Committer: Li Lu 
Committed: Wed May 4 16:35:05 2016 -0700

--
 .../hadoop/mapred/JobHistoryFileParser.java |  53 
 .../mapred/JobHistoryFileReplayMapper.java  | 301 ---
 .../hadoop/mapred/SimpleEntityWriter.java   | 140 -
 .../hadoop/mapred/TimelineEntityConverter.java  | 211 -
 .../mapred/TimelineServicePerformanceV2.java| 229 --
 .../apache/hadoop/mapreduce/EntityWriterV2.java |  56 
 .../mapreduce/JobHistoryFileReplayMapperV1.java |  14 +-
 .../mapreduce/JobHistoryFileReplayMapperV2.java | 161 ++
 .../mapreduce/SimpleEntityWriterConstants.java  |  43 +++
 .../hadoop/mapreduce/SimpleEntityWriterV1.java  |  28 +-
 .../hadoop/mapreduce/SimpleEntityWriterV2.java  | 131 
 .../mapreduce/TimelineEntityConverterV1.java|   5 -
 .../mapreduce/TimelineEntityConverterV2.java| 211 +
 .../mapreduce/TimelineServicePerformance.java   | 129 +---
 .../apache/hadoop/test/MapredTestDriver.java|  35 +--
 15 files changed, 704 insertions(+), 1043 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/cd444089/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/JobHistoryFileParser.java
--
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/JobHistoryFileParser.java
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/JobHistoryFileParser.java
deleted file mode 100644
index 9d051df..000
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/JobHistoryFileParser.java
+++ /dev/null
@@ -1,53 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.mapred;
-
-import java.io.IOException;
-
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser;
-import org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser.JobInfo;
-
-class JobHistoryFileParser {
-  private static final Log LOG = LogFactory.getLog(JobHistoryFileParser.class);
-
-  private final FileSystem fs;
-
-  public JobHistoryFileParser(FileSystem fs) {
-LOG.info("JobHistoryFileParser created with " + fs);
-this.fs = fs;
-  }
-
-  public JobInfo parseHistoryFile(Path path) throws IOException {
-LOG.info("parsing job history file " + path);
-JobHistoryParser parser = new JobHistoryParser(fs, path);
-return parser.parse();
-  }
-
-  public Configuration parseConfiguration(Path path) throws IOException {
-LOG.info("parsing job configuration file " + path);
-Configuration conf = new Configuration(false);
-conf.addResource(fs.open(path));
-return conf;
-  }
-}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/cd444089/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/JobHistoryFileReplayMapper.java
--
diff --git 

[11/50] [abbrv] hadoop git commit: YARN-4200. Refactor reader classes in storage to nest under hbase specific package name. Contributed by Li Lu.

2016-05-04 Thread gtcarrera9
http://git-wip-us.apache.org/repos/asf/hadoop/blob/61737325/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/ApplicationEntityReader.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/ApplicationEntityReader.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/ApplicationEntityReader.java
new file mode 100644
index 000..181ec81
--- /dev/null
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/ApplicationEntityReader.java
@@ -0,0 +1,383 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.yarn.server.timelineservice.storage.reader;
+
+import java.io.IOException;
+import java.util.EnumSet;
+import java.util.Map;
+import java.util.Set;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hbase.client.Connection;
+import org.apache.hadoop.hbase.client.Get;
+import org.apache.hadoop.hbase.client.Result;
+import org.apache.hadoop.hbase.client.ResultScanner;
+import org.apache.hadoop.hbase.client.Scan;
+import org.apache.hadoop.hbase.filter.BinaryComparator;
+import org.apache.hadoop.hbase.filter.BinaryPrefixComparator;
+import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp;
+import org.apache.hadoop.hbase.filter.FamilyFilter;
+import org.apache.hadoop.hbase.filter.FilterList;
+import org.apache.hadoop.hbase.filter.FilterList.Operator;
+import org.apache.hadoop.hbase.filter.PageFilter;
+import org.apache.hadoop.hbase.filter.QualifierFilter;
+import org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity;
+import org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntityType;
+import 
org.apache.hadoop.yarn.server.timelineservice.reader.filter.TimelineFilterList;
+import 
org.apache.hadoop.yarn.server.timelineservice.reader.filter.TimelineFilterUtils;
+import org.apache.hadoop.yarn.server.timelineservice.storage.TimelineReader;
+import 
org.apache.hadoop.yarn.server.timelineservice.storage.TimelineReader.Field;
+import 
org.apache.hadoop.yarn.server.timelineservice.storage.application.ApplicationColumn;
+import 
org.apache.hadoop.yarn.server.timelineservice.storage.application.ApplicationColumnFamily;
+import 
org.apache.hadoop.yarn.server.timelineservice.storage.application.ApplicationColumnPrefix;
+import 
org.apache.hadoop.yarn.server.timelineservice.storage.application.ApplicationRowKey;
+import 
org.apache.hadoop.yarn.server.timelineservice.storage.application.ApplicationTable;
+import org.apache.hadoop.yarn.server.timelineservice.storage.common.BaseTable;
+import 
org.apache.hadoop.yarn.server.timelineservice.storage.common.TimelineStorageUtils;
+
+import com.google.common.base.Preconditions;
+
+/**
+ * Timeline entity reader for application entities that are stored in the
+ * application table.
+ */
+class ApplicationEntityReader extends GenericEntityReader {
+  private static final ApplicationTable APPLICATION_TABLE =
+  new ApplicationTable();
+
+  public ApplicationEntityReader(String userId, String clusterId,
+  String flowName, Long flowRunId, String appId, String entityType,
+  Long limit, Long createdTimeBegin, Long createdTimeEnd,
+  Long modifiedTimeBegin, Long modifiedTimeEnd,
+  Map relatesTo, Map isRelatedTo,
+  Map infoFilters, Map configFilters,
+  Set metricFilters, Set eventFilters,
+  TimelineFilterList confsToRetrieve, TimelineFilterList metricsToRetrieve,
+  EnumSet fieldsToRetrieve) {
+super(userId, clusterId, flowName, flowRunId, appId, entityType, limit,
+createdTimeBegin, createdTimeEnd, modifiedTimeBegin, modifiedTimeEnd,
+relatesTo, isRelatedTo, infoFilters, configFilters, metricFilters,
+

[44/50] [abbrv] hadoop git commit: YARN-3150. Documenting the timeline service v2. (Sangjin Lee and Vrushali C via gtcarrera9)

2016-05-04 Thread gtcarrera9
YARN-3150. Documenting the timeline service v2. (Sangjin Lee and Vrushali C via 
gtcarrera9)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/caf23c34
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/caf23c34
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/caf23c34

Branch: refs/heads/YARN-2928
Commit: caf23c3453b7476df24edaaa024108cabcd56b21
Parents: 084a334
Author: Li Lu 
Authored: Sat Apr 30 15:02:12 2016 -0700
Committer: Li Lu 
Committed: Wed May 4 16:35:40 2016 -0700

--
 .../hadoop/yarn/conf/YarnConfiguration.java |   8 +-
 .../src/main/resources/yarn-default.xml |   6 +-
 .../metrics/TimelineServiceV2Publisher.java |  16 +-
 .../TestSystemMetricsPublisherForV2.java|  16 +-
 .../src/site/markdown/TimelineServer.md |   2 +-
 .../src/site/markdown/TimelineServiceV2.md  | 576 +++
 .../src/site/resources/images/timeline_v2.jpg   | Bin 0 -> 45112 bytes
 7 files changed, 600 insertions(+), 24 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/caf23c34/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/conf/YarnConfiguration.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/conf/YarnConfiguration.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/conf/YarnConfiguration.java
index b79b3a5..3837a2b 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/conf/YarnConfiguration.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/conf/YarnConfiguration.java
@@ -477,13 +477,13 @@ public class YarnConfiguration extends Configuration {
   public static final boolean DEFAULT_SYSTEM_METRICS_PUBLISHER_ENABLED = false;
 
   /**
-   * The setting that controls whether yarn container metrics is published to
-   * the timeline server or not by RM. This configuration setting is for ATS
+   * The setting that controls whether yarn container events are published to
+   * the timeline service or not by RM. This configuration setting is for ATS
* V2
*/
-  public static final String RM_PUBLISH_CONTAINER_METRICS_ENABLED = YARN_PREFIX
+  public static final String RM_PUBLISH_CONTAINER_EVENTS_ENABLED = YARN_PREFIX
   + "rm.system-metrics-publisher.emit-container-events";
-  public static final boolean DEFAULT_RM_PUBLISH_CONTAINER_METRICS_ENABLED =
+  public static final boolean DEFAULT_RM_PUBLISH_CONTAINER_EVENTS_ENABLED =
   false;
 
   public static final String RM_SYSTEM_METRICS_PUBLISHER_DISPATCHER_POOL_SIZE =

http://git-wip-us.apache.org/repos/asf/hadoop/blob/caf23c34/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/resources/yarn-default.xml
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/resources/yarn-default.xml
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/resources/yarn-default.xml
index 13d4b4f..4899dcb 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/resources/yarn-default.xml
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/resources/yarn-default.xml
@@ -799,9 +799,9 @@
   
 
   
-The setting that controls whether yarn container metrics is
-published to the timeline server or not by RM. This configuration setting 
is
-for ATS V2.
+The setting that controls whether yarn container events are
+published to the timeline service or not by RM. This configuration setting
+is for ATS V2.
 yarn.rm.system-metrics-publisher.emit-container-events
 false
   

http://git-wip-us.apache.org/repos/asf/hadoop/blob/caf23c34/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/metrics/TimelineServiceV2Publisher.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/metrics/TimelineServiceV2Publisher.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/metrics/TimelineServiceV2Publisher.java
index 14073d1..b7ece5f 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/metrics/TimelineServiceV2Publisher.java
+++ 

[25/50] [abbrv] hadoop git commit: YARN-4409. Fix javadoc and checkstyle issues in timelineservice code (Varun Saxena via sjlee)

2016-05-04 Thread gtcarrera9
http://git-wip-us.apache.org/repos/asf/hadoop/blob/ee803e16/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/TimelineWriter.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/TimelineWriter.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/TimelineWriter.java
index 50136de..663a18a 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/TimelineWriter.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/TimelineWriter.java
@@ -42,12 +42,13 @@ public interface TimelineWriter extends Service {
* @param userId context user ID
* @param flowName context flow name
* @param flowVersion context flow version
-   * @param flowRunId
-   * @param appId context app ID
+   * @param flowRunId run id for the flow.
+   * @param appId context app ID.
* @param data
*  a {@link TimelineEntities} object.
* @return a {@link TimelineWriteResponse} object.
-   * @throws IOException
+   * @throws IOException if there is any exception encountered while storing
+   * or writing entities to the backend storage.
*/
   TimelineWriteResponse write(String clusterId, String userId,
   String flowName, String flowVersion, long flowRunId, String appId,
@@ -65,8 +66,11 @@ public interface TimelineWriter extends Service {
*  a {@link TimelineEntity} object
*  a {@link TimelineAggregationTrack} enum
*  value.
+   * @param track Specifies the track or dimension along which aggregation 
would
+   * occur. Includes USER, FLOW, QUEUE, etc.
* @return a {@link TimelineWriteResponse} object.
-   * @throws IOException
+   * @throws IOException if there is any exception encountered while 
aggregating
+   * entities to the backend storage.
*/
   TimelineWriteResponse aggregate(TimelineEntity data,
   TimelineAggregationTrack track) throws IOException;
@@ -76,7 +80,8 @@ public interface TimelineWriter extends Service {
* written to the storage when the method returns. This may be a potentially
* time-consuming operation, and should be used judiciously.
*
-   * @throws IOException
+   * @throws IOException if there is any exception encountered while flushing
+   * entities to the backend storage.
*/
   void flush() throws IOException;
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/hadoop/blob/ee803e16/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/application/ApplicationColumn.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/application/ApplicationColumn.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/application/ApplicationColumn.java
index c03c9b6..5734389 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/application/ApplicationColumn.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/application/ApplicationColumn.java
@@ -34,7 +34,7 @@ import 
org.apache.hadoop.yarn.server.timelineservice.storage.flow.Attribute;
 public enum ApplicationColumn implements Column {
 
   /**
-   * App id
+   * App id.
*/
   ID(ApplicationColumnFamily.INFO, "id"),
 
@@ -84,7 +84,7 @@ public enum ApplicationColumn implements 
Column {
   /**
* Retrieve an {@link ApplicationColumn} given a name, or null if there is no
* match. The following holds true: {@code columnFor(x) == columnFor(y)} if
-   * and only if {@code x.equals(y)} or {@code (x == y == null)}
+   * and only if {@code x.equals(y)} or {@code (x == y == null)}.
*
* @param columnQualifier Name of the column to retrieve
* @return the corresponding {@link ApplicationColumn} or null


[36/50] [abbrv] hadoop git commit: YARN-3461. Consolidate flow name/version/run defaults. (Sangjin Lee via Varun Saxena)

2016-05-04 Thread gtcarrera9
YARN-3461. Consolidate flow name/version/run defaults. (Sangjin Lee via Varun 
Saxena)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/408f0014
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/408f0014
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/408f0014

Branch: refs/heads/YARN-2928
Commit: 408f00144b8e6808eed91c5549e60e63c50d052d
Parents: 899468d
Author: Varun Saxena 
Authored: Thu Apr 7 22:10:11 2016 +0530
Committer: Li Lu 
Committed: Wed May 4 16:35:28 2016 -0700

--
 .../mapred/TestMRTimelineEventHandling.java | 46 +++---
 .../distributedshell/TestDistributedShell.java  | 18 --
 .../yarn/util/timeline/TimelineUtils.java   |  8 ++-
 .../resourcemanager/amlauncher/AMLauncher.java  | 67 +++-
 .../RMTimelineCollectorManager.java | 36 +--
 .../TestSystemMetricsPublisherForV2.java| 20 +++---
 .../collector/AppLevelTimelineCollector.java| 11 +---
 .../collector/NodeTimelineCollectorManager.java | 12 
 .../collector/TimelineCollectorContext.java |  5 +-
 9 files changed, 148 insertions(+), 75 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/408f0014/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMRTimelineEventHandling.java
--
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMRTimelineEventHandling.java
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMRTimelineEventHandling.java
index f7283ae..300b4fb 100644
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMRTimelineEventHandling.java
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMRTimelineEventHandling.java
@@ -20,15 +20,12 @@ package org.apache.hadoop.mapred;
 
 import java.io.File;
 import java.io.IOException;
-
 import java.util.EnumSet;
 import java.util.List;
-import java.util.Set;
 
 import org.apache.commons.io.FileUtils;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
-
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.mapreduce.MRJobConfig;
@@ -38,9 +35,9 @@ import org.apache.hadoop.mapreduce.v2.MiniMRYarnCluster;
 import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hadoop.yarn.api.records.ApplicationId;
 import org.apache.hadoop.yarn.api.records.ApplicationReport;
+import org.apache.hadoop.yarn.api.records.YarnApplicationState;
 import org.apache.hadoop.yarn.api.records.timeline.TimelineEntities;
 import org.apache.hadoop.yarn.api.records.timeline.TimelineEntity;
-import org.apache.hadoop.yarn.api.records.YarnApplicationState;
 import org.apache.hadoop.yarn.client.api.YarnClient;
 import org.apache.hadoop.yarn.conf.YarnConfiguration;
 import org.apache.hadoop.yarn.server.MiniYARNCluster;
@@ -48,7 +45,6 @@ import org.apache.hadoop.yarn.server.timeline.TimelineStore;
 import 
org.apache.hadoop.yarn.server.timelineservice.collector.PerNodeTimelineCollectorsAuxService;
 import 
org.apache.hadoop.yarn.server.timelineservice.storage.FileSystemTimelineWriterImpl;
 import org.apache.hadoop.yarn.util.timeline.TimelineUtils;
-
 import org.junit.Assert;
 import org.junit.Test;
 
@@ -205,7 +201,7 @@ public class TestMRTimelineEventHandling {
   ApplicationReport appReport = apps.get(0);
   firstAppId = appReport.getApplicationId();
 
-  checkNewTimelineEvent(firstAppId);
+  checkNewTimelineEvent(firstAppId, appReport);
 
   LOG.info("Run 2nd job which should be failed.");
   job = UtilsForTests.runJobFail(new JobConf(conf), inDir, outDir);
@@ -214,11 +210,10 @@ public class TestMRTimelineEventHandling {
   
   apps = yarnClient.getApplications(appStates);
   Assert.assertEquals(apps.size(), 2);
-  
-  ApplicationId secAppId = null;
-  secAppId = apps.get(0).getApplicationId() == firstAppId ? 
-  apps.get(1).getApplicationId() : apps.get(0).getApplicationId();
-  checkNewTimelineEvent(firstAppId);
+
+  appReport = apps.get(0).getApplicationId().equals(firstAppId) ?
+  apps.get(0) : apps.get(1);
+  checkNewTimelineEvent(firstAppId, appReport);
 
 } finally {
   if (cluster != null) {
@@ -235,7 +230,8 @@ public class TestMRTimelineEventHandling {
 }
   }
   
-  private void checkNewTimelineEvent(ApplicationId 

[48/50] [abbrv] hadoop git commit: YARN-4447. Provide a mechanism to represent complex filters and parse them at the REST layer (Varun Saxena via sjlee)

2016-05-04 Thread gtcarrera9
http://git-wip-us.apache.org/repos/asf/hadoop/blob/047cde55/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/reader/TimelineReaderWebServicesUtils.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/reader/TimelineReaderWebServicesUtils.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/reader/TimelineReaderWebServicesUtils.java
index 57d75db..2e667d6 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/reader/TimelineReaderWebServicesUtils.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/reader/TimelineReaderWebServicesUtils.java
@@ -18,29 +18,19 @@
 
 package org.apache.hadoop.yarn.server.timelineservice.reader;
 
-import java.io.IOException;
 import java.util.EnumSet;
-import java.util.HashSet;
-import java.util.Set;
 
 import javax.servlet.http.HttpServletRequest;
 
+import org.apache.commons.io.IOUtils;
 import org.apache.hadoop.security.UserGroupInformation;
-import org.apache.hadoop.yarn.server.timeline.GenericObjectMapper;
-import 
org.apache.hadoop.yarn.server.timelineservice.reader.filter.TimelineCompareFilter;
-import 
org.apache.hadoop.yarn.server.timelineservice.reader.filter.TimelineCompareOp;
-import 
org.apache.hadoop.yarn.server.timelineservice.reader.filter.TimelineKeyValueFilter;
-import 
org.apache.hadoop.yarn.server.timelineservice.reader.filter.TimelineExistsFilter;
 import 
org.apache.hadoop.yarn.server.timelineservice.reader.filter.TimelineFilterList;
-import 
org.apache.hadoop.yarn.server.timelineservice.reader.filter.TimelineKeyValuesFilter;
 import 
org.apache.hadoop.yarn.server.timelineservice.storage.TimelineReader.Field;
 
 /**
  * Set of utility methods to be used by timeline reader web services.
  */
 final class TimelineReaderWebServicesUtils {
-  private static final String COMMA_DELIMITER = ",";
-  private static final String COLON_DELIMITER = ":";
 
   private TimelineReaderWebServicesUtils() {
   }
@@ -56,11 +46,10 @@ final class TimelineReaderWebServicesUtils {
* @param entityType Entity Type.
* @param entityId Entity Id.
* @return a {@link TimelineReaderContext} object.
-   * @throws Exception if any problem occurs during parsing.
*/
   static TimelineReaderContext createTimelineReaderContext(String clusterId,
   String userId, String flowName, String flowRunId, String appId,
-  String entityType, String entityId) throws Exception {
+  String entityType, String entityId) {
 return new TimelineReaderContext(parseStr(clusterId), parseStr(userId),
 parseStr(flowName), parseLongStr(flowRunId), parseStr(appId),
 parseStr(entityType), parseStr(entityId));
@@ -79,20 +68,17 @@ final class TimelineReaderWebServicesUtils {
* @param metricfilters Entities to return must match these metric filters.
* @param eventfilters Entities to return must match these event filters.
* @return a {@link TimelineEntityFilters} object.
-   * @throws Exception if any problem occurs during parsing.
+   * @throws TimelineParseException if any problem occurs during parsing.
*/
   static TimelineEntityFilters createTimelineEntityFilters(String limit,
   String createdTimeStart, String createdTimeEnd, String relatesTo,
   String isRelatedTo, String infofilters, String conffilters,
-  String metricfilters, String eventfilters) throws Exception {
+  String metricfilters, String eventfilters) throws TimelineParseException 
{
 return new TimelineEntityFilters(parseLongStr(limit),
 parseLongStr(createdTimeStart), parseLongStr(createdTimeEnd),
-parseKeyStrValuesStr(relatesTo, COMMA_DELIMITER, COLON_DELIMITER),
-parseKeyStrValuesStr(isRelatedTo, COMMA_DELIMITER, COLON_DELIMITER),
-parseKeyStrValueObj(infofilters, COMMA_DELIMITER, COLON_DELIMITER),
-parseKeyStrValueStr(conffilters, COMMA_DELIMITER, COLON_DELIMITER),
-parseMetricFilters(metricfilters, COMMA_DELIMITER),
-parseValuesStr(eventfilters, COMMA_DELIMITER));
+parseRelationFilters(relatesTo), parseRelationFilters(isRelatedTo),
+parseKVFilters(infofilters, false), parseKVFilters(conffilters, true),
+parseMetricFilters(metricfilters), parseEventFilters(eventfilters));
   }
 
   /**
@@ -102,12 +88,13 @@ final class TimelineReaderWebServicesUtils {
* @param metrics metrics to retrieve.
* @param fields fields to retrieve.
* @return a {@link TimelineDataToRetrieve} object.
-   * @throws Exception 

[35/50] [abbrv] hadoop git commit: YARN-4711. NM is going down with NPE's due to single thread processing of events by Timeline client (Naganarasimha G R via sjlee)

2016-05-04 Thread gtcarrera9
YARN-4711. NM is going down with NPE's due to single thread processing of 
events by Timeline client (Naganarasimha G R via sjlee)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/899468de
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/899468de
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/899468de

Branch: refs/heads/YARN-2928
Commit: 899468de23e065c0d5df3f49470b9358a5ac944b
Parents: b1e7f16
Author: Sangjin Lee 
Authored: Mon Mar 28 15:50:03 2016 -0700
Committer: Li Lu 
Committed: Wed May 4 16:35:23 2016 -0700

--
 .../dev-support/findbugs-exclude.xml|  11 +-
 .../records/timelineservice/TimelineEntity.java |  25 ++-
 .../client/api/impl/TimelineClientImpl.java |  35 ++--
 .../api/impl/TestTimelineClientV2Impl.java  |  91 +++-
 .../metrics/ContainerMetricsConstants.java  |   8 +
 .../nodemanager/NodeStatusUpdaterImpl.java  |  10 +-
 .../collectormanager/NMCollectorService.java|  10 +-
 .../application/Application.java|   4 -
 .../application/ApplicationImpl.java|  24 +--
 .../timelineservice/NMTimelinePublisher.java| 210 +++
 .../TestNMTimelinePublisher.java|  24 +--
 .../yarn/server/nodemanager/webapp/MockApp.java |   5 -
 12 files changed, 278 insertions(+), 179 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/899468de/hadoop-yarn-project/hadoop-yarn/dev-support/findbugs-exclude.xml
--
diff --git a/hadoop-yarn-project/hadoop-yarn/dev-support/findbugs-exclude.xml 
b/hadoop-yarn-project/hadoop-yarn/dev-support/findbugs-exclude.xml
index ba1af88..72fbb35 100644
--- a/hadoop-yarn-project/hadoop-yarn/dev-support/findbugs-exclude.xml
+++ b/hadoop-yarn-project/hadoop-yarn/dev-support/findbugs-exclude.xml
@@ -117,8 +117,15 @@
 
   
   
-
- 
+
+
+
+  
+
+  
+
+
+
   
 
   

http://git-wip-us.apache.org/repos/asf/hadoop/blob/899468de/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/timelineservice/TimelineEntity.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/timelineservice/TimelineEntity.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/timelineservice/TimelineEntity.java
index acc132e..7ce8279 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/timelineservice/TimelineEntity.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/timelineservice/TimelineEntity.java
@@ -17,15 +17,6 @@
  */
 package org.apache.hadoop.yarn.api.records.timelineservice;
 
-import org.apache.hadoop.classification.InterfaceAudience;
-import org.apache.hadoop.classification.InterfaceStability;
-import org.apache.hadoop.yarn.util.TimelineServiceHelper;
-import org.codehaus.jackson.annotate.JsonSetter;
-
-import javax.xml.bind.annotation.XmlAccessType;
-import javax.xml.bind.annotation.XmlAccessorType;
-import javax.xml.bind.annotation.XmlElement;
-import javax.xml.bind.annotation.XmlRootElement;
 import java.util.HashMap;
 import java.util.HashSet;
 import java.util.Map;
@@ -33,6 +24,16 @@ import java.util.NavigableSet;
 import java.util.Set;
 import java.util.TreeSet;
 
+import javax.xml.bind.annotation.XmlAccessType;
+import javax.xml.bind.annotation.XmlAccessorType;
+import javax.xml.bind.annotation.XmlElement;
+import javax.xml.bind.annotation.XmlRootElement;
+
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
+import org.apache.hadoop.yarn.util.TimelineServiceHelper;
+import org.codehaus.jackson.annotate.JsonSetter;
+
 /**
  * The basic timeline entity data structure for timeline service v2. Timeline
  * entity objects are not thread safe and should not be accessed concurrently.
@@ -564,6 +565,10 @@ public class TimelineEntity implements 
Comparable {
   }
 
   public String toString() {
-return identifier.toString();
+if (real == null) {
+  return identifier.toString();
+} else {
+  return real.toString();
+}
   }
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/hadoop/blob/899468de/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/client/api/impl/TimelineClientImpl.java
--
diff --git 

[38/50] [abbrv] hadoop git commit: YARN-3863. Support complex filters in TimelineReader (Varun Saxena via sjlee)

2016-05-04 Thread gtcarrera9
http://git-wip-us.apache.org/repos/asf/hadoop/blob/2b2df86c/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/TestFlowDataGenerator.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/TestFlowDataGenerator.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/TestFlowDataGenerator.java
index 9793ce6..3b8036d 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/TestFlowDataGenerator.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/TestFlowDataGenerator.java
@@ -154,6 +154,14 @@ class TestFlowDataGenerator {
 metrics.add(m2);
 
 entity.addMetrics(metrics);
+TimelineEvent event = new TimelineEvent();
+event.setId(ApplicationMetricsConstants.CREATED_EVENT_TYPE);
+long endTs = 1439379885000L;
+event.setTimestamp(endTs);
+String expKey = "foo_event_greater";
+String expVal = "test_app_greater";
+event.addInfo(expKey, expVal);
+entity.addEvent(event);
 return entity;
   }
 
@@ -178,6 +186,14 @@ class TestFlowDataGenerator {
 m1.setValues(metricValues);
 metrics.add(m1);
 entity.addMetrics(metrics);
+TimelineEvent event = new TimelineEvent();
+event.setId(ApplicationMetricsConstants.CREATED_EVENT_TYPE);
+long endTs = 1439379885000L;
+event.setTimestamp(endTs);
+String expKey = "foo_event_greater";
+String expVal = "test_app_greater";
+event.addInfo(expKey, expVal);
+entity.addEvent(event);
 return entity;
   }
 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/2b2df86c/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/TestHBaseStorageFlowRun.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/TestHBaseStorageFlowRun.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/TestHBaseStorageFlowRun.java
index f04dd48..a724db2 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/TestHBaseStorageFlowRun.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/TestHBaseStorageFlowRun.java
@@ -47,8 +47,10 @@ import 
org.apache.hadoop.yarn.api.records.timelineservice.TimelineMetric;
 import 
org.apache.hadoop.yarn.server.timelineservice.reader.TimelineDataToRetrieve;
 import 
org.apache.hadoop.yarn.server.timelineservice.reader.TimelineEntityFilters;
 import 
org.apache.hadoop.yarn.server.timelineservice.reader.TimelineReaderContext;
+import 
org.apache.hadoop.yarn.server.timelineservice.reader.filter.TimelineCompareFilter;
 import 
org.apache.hadoop.yarn.server.timelineservice.reader.filter.TimelineCompareOp;
 import 
org.apache.hadoop.yarn.server.timelineservice.reader.filter.TimelineFilterList;
+import 
org.apache.hadoop.yarn.server.timelineservice.reader.filter.TimelineFilterList.Operator;
 import 
org.apache.hadoop.yarn.server.timelineservice.reader.filter.TimelinePrefixFilter;
 import 
org.apache.hadoop.yarn.server.timelineservice.storage.HBaseTimelineReaderImpl;
 import 
org.apache.hadoop.yarn.server.timelineservice.storage.HBaseTimelineWriterImpl;
@@ -307,7 +309,7 @@ public class TestHBaseStorageFlowRun {
   assertEquals(141L, Bytes.toLong(values.get(q)));
 
   // check metric2
-  assertEquals(2, values.size());
+  assertEquals(3, values.size());
   q = ColumnHelper.getColumnQualifier(
   FlowRunColumnPrefix.METRIC.getColumnPrefixBytes(), metric2);
   assertTrue(values.containsKey(q));
@@ -318,11 +320,10 @@ public class TestHBaseStorageFlowRun {
 
   @Test
   public void testWriteFlowRunMetricsPrefix() throws Exception {
-String cluster = "testWriteFlowRunMetricsOneFlow_cluster1";
-String user = "testWriteFlowRunMetricsOneFlow_user1";
-String flow = "testing_flowRun_metrics_flow_name";
+String cluster = "testWriteFlowRunMetricsPrefix_cluster1";
+String user = 

[21/50] [abbrv] hadoop git commit: YARN-4446. Refactor reader API for better extensibility (Varun Saxena via sjlee)

2016-05-04 Thread gtcarrera9
http://git-wip-us.apache.org/repos/asf/hadoop/blob/675612d4/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/TestFileSystemTimelineReaderImpl.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/TestFileSystemTimelineReaderImpl.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/TestFileSystemTimelineReaderImpl.java
index b7804e7..a8a2ff8 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/TestFileSystemTimelineReaderImpl.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/TestFileSystemTimelineReaderImpl.java
@@ -37,6 +37,9 @@ import 
org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity;
 import org.apache.hadoop.yarn.api.records.timelineservice.TimelineEvent;
 import org.apache.hadoop.yarn.api.records.timelineservice.TimelineMetric;
 import org.apache.hadoop.yarn.conf.YarnConfiguration;
+import 
org.apache.hadoop.yarn.server.timelineservice.reader.TimelineDataToRetrieve;
+import 
org.apache.hadoop.yarn.server.timelineservice.reader.TimelineEntityFilters;
+import 
org.apache.hadoop.yarn.server.timelineservice.reader.TimelineReaderContext;
 import 
org.apache.hadoop.yarn.server.timelineservice.storage.TimelineReader.Field;
 import org.apache.hadoop.yarn.util.timeline.TimelineUtils;
 import org.junit.AfterClass;
@@ -258,9 +261,10 @@ public class TestFileSystemTimelineReaderImpl {
   public void testGetEntityDefaultView() throws Exception {
 // If no fields are specified, entity is returned with default view i.e.
 // only the id, type and created time.
-TimelineEntity result =
-reader.getEntity("user1", "cluster1", "flow1", 1L, "app1",
-"app", "id_1", null, null, null);
+TimelineEntity result = reader.getEntity(
+new TimelineReaderContext("cluster1", "user1", "flow1", 1L, "app1",
+"app", "id_1"),
+new TimelineDataToRetrieve(null, null, null));
 Assert.assertEquals(
 (new TimelineEntity.Identifier("app", "id_1")).toString(),
 result.getIdentifier().toString());
@@ -272,9 +276,10 @@ public class TestFileSystemTimelineReaderImpl {
   @Test
   public void testGetEntityByClusterAndApp() throws Exception {
 // Cluster and AppId should be enough to get an entity.
-TimelineEntity result =
-reader.getEntity(null, "cluster1", null, null, "app1",
-"app", "id_1", null, null, null);
+TimelineEntity result = reader.getEntity(
+new TimelineReaderContext("cluster1", null, null, null, "app1", "app",
+"id_1"),
+new TimelineDataToRetrieve(null, null, null));
 Assert.assertEquals(
 (new TimelineEntity.Identifier("app", "id_1")).toString(),
 result.getIdentifier().toString());
@@ -288,9 +293,10 @@ public class TestFileSystemTimelineReaderImpl {
   public void testAppFlowMappingCsv() throws Exception {
 // Test getting an entity by cluster and app where flow entry
 // in app flow mapping csv has commas.
-TimelineEntity result =
-reader.getEntity(null, "cluster1", null, null, "app2",
-"app", "id_5", null, null, null);
+TimelineEntity result = reader.getEntity(
+new TimelineReaderContext("cluster1", null, null, null, "app2",
+"app", "id_5"),
+new TimelineDataToRetrieve(null, null, null));
 Assert.assertEquals(
 (new TimelineEntity.Identifier("app", "id_5")).toString(),
 result.getIdentifier().toString());
@@ -300,10 +306,11 @@ public class TestFileSystemTimelineReaderImpl {
   @Test
   public void testGetEntityCustomFields() throws Exception {
 // Specified fields in addition to default view will be returned.
-TimelineEntity result =
-reader.getEntity("user1", "cluster1", "flow1", 1L,
-"app1", "app", "id_1", null, null,
-EnumSet.of(Field.INFO, Field.CONFIGS, Field.METRICS));
+TimelineEntity result = reader.getEntity(
+new TimelineReaderContext("cluster1","user1", "flow1", 1L, "app1",
+"app", "id_1"),
+new TimelineDataToRetrieve(null, null,
+EnumSet.of(Field.INFO, Field.CONFIGS, Field.METRICS)));
 Assert.assertEquals(
 (new TimelineEntity.Identifier("app", "id_1")).toString(),
 result.getIdentifier().toString());
@@ -318,9 +325,10 @@ public class TestFileSystemTimelineReaderImpl {
   @Test
   public void testGetEntityAllFields() throws Exception {
 // All fields of 

[23/50] [abbrv] hadoop git commit: YARN-4446. Refactor reader API for better extensibility (Varun Saxena via sjlee)

2016-05-04 Thread gtcarrera9
YARN-4446. Refactor reader API for better extensibility (Varun Saxena via sjlee)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/675612d4
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/675612d4
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/675612d4

Branch: refs/heads/YARN-2928
Commit: 675612d4f4564195b272a86b6bf6826634ceea19
Parents: e02094f
Author: Sangjin Lee 
Authored: Wed Feb 3 16:03:55 2016 -0800
Committer: Li Lu 
Committed: Wed May 4 16:24:20 2016 -0700

--
 .../reader/TimelineDataToRetrieve.java  | 119 ++
 .../reader/TimelineEntityFilters.java   | 187 +
 .../reader/TimelineReaderContext.java   |  10 +
 .../reader/TimelineReaderManager.java   |  44 +-
 .../reader/TimelineReaderUtils.java |   4 +-
 .../reader/TimelineReaderWebServices.java   | 417 ---
 .../reader/TimelineReaderWebServicesUtils.java  |  68 +++
 .../storage/FileSystemTimelineReaderImpl.java   | 116 +++---
 .../storage/HBaseTimelineReaderImpl.java|  36 +-
 .../timelineservice/storage/TimelineReader.java | 234 +--
 .../storage/reader/ApplicationEntityReader.java | 204 -
 .../reader/FlowActivityEntityReader.java|  59 +--
 .../storage/reader/FlowRunEntityReader.java | 101 ++---
 .../storage/reader/GenericEntityReader.java | 192 -
 .../storage/reader/TimelineEntityReader.java| 101 ++---
 .../reader/TimelineEntityReaderFactory.java |  74 ++--
 .../TestFileSystemTimelineReaderImpl.java   | 156 ---
 .../storage/TestHBaseTimelineStorage.java   | 252 ++-
 .../flow/TestHBaseStorageFlowActivity.java  |  33 +-
 .../storage/flow/TestHBaseStorageFlowRun.java   |  48 ++-
 20 files changed, 1367 insertions(+), 1088 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/675612d4/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/reader/TimelineDataToRetrieve.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/reader/TimelineDataToRetrieve.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/reader/TimelineDataToRetrieve.java
new file mode 100644
index 000..0cc83d7
--- /dev/null
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/reader/TimelineDataToRetrieve.java
@@ -0,0 +1,119 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.yarn.server.timelineservice.reader;
+
+import java.util.EnumSet;
+
+import org.apache.hadoop.classification.InterfaceAudience.Private;
+import org.apache.hadoop.classification.InterfaceStability.Unstable;
+import 
org.apache.hadoop.yarn.server.timelineservice.reader.filter.TimelineFilterList;
+import 
org.apache.hadoop.yarn.server.timelineservice.reader.filter.TimelinePrefixFilter;
+import 
org.apache.hadoop.yarn.server.timelineservice.storage.TimelineReader.Field;
+
+/**
+ * Encapsulates information regarding which data to retrieve for each entity
+ * while querying.
+ * Data to retrieve contains the following :
+ * 
+ * confsToRetrieve - Used for deciding which configs to return
+ * in response. This is represented as a {@link TimelineFilterList} object
+ * containing {@link TimelinePrefixFilter} objects. These can either be
+ * exact config keys' or prefixes which are then compared against config
+ * keys' to decide configs(inside entities) to return in response. If null
+ * or empty, all configurations will be fetched if fieldsToRetrieve
+ * contains {@link Field#CONFIGS} or {@link Field#ALL}. This should not be
+ * confused 

[41/50] [abbrv] hadoop git commit: YARN-3863. Support complex filters in TimelineReader (Varun Saxena via sjlee)

2016-05-04 Thread gtcarrera9
http://git-wip-us.apache.org/repos/asf/hadoop/blob/2b2df86c/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/TimelineStorageUtils.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/TimelineStorageUtils.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/TimelineStorageUtils.java
index b5fc214..2d85bab 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/TimelineStorageUtils.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/TimelineStorageUtils.java
@@ -17,21 +17,26 @@
 
 package org.apache.hadoop.yarn.server.timelineservice.storage.common;
 
+import java.io.IOException;
 import java.util.ArrayList;
+import java.util.EnumSet;
+import java.util.HashMap;
 import java.util.HashSet;
 import java.util.List;
 import java.util.Map;
 import java.util.Map.Entry;
 import java.util.Set;
 import java.util.SortedSet;
-import java.io.IOException;
 
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.classification.InterfaceAudience.Public;
 import org.apache.hadoop.classification.InterfaceStability.Unstable;
 import org.apache.hadoop.hbase.Cell;
 import org.apache.hadoop.hbase.CellUtil;
 import org.apache.hadoop.hbase.KeyValue;
 import org.apache.hadoop.hbase.Tag;
+import org.apache.hadoop.hbase.client.Result;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.yarn.api.records.ApplicationId;
 import org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity;
@@ -39,6 +44,15 @@ import 
org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntityType;
 import org.apache.hadoop.yarn.api.records.timelineservice.TimelineEvent;
 import org.apache.hadoop.yarn.api.records.timelineservice.TimelineMetric;
 import org.apache.hadoop.yarn.server.metrics.ApplicationMetricsConstants;
+import 
org.apache.hadoop.yarn.server.timelineservice.reader.filter.TimelineCompareFilter;
+import 
org.apache.hadoop.yarn.server.timelineservice.reader.filter.TimelineCompareOp;
+import 
org.apache.hadoop.yarn.server.timelineservice.reader.filter.TimelineKeyValueFilter;
+import 
org.apache.hadoop.yarn.server.timelineservice.reader.filter.TimelineExistsFilter;
+import 
org.apache.hadoop.yarn.server.timelineservice.reader.filter.TimelineFilter;
+import 
org.apache.hadoop.yarn.server.timelineservice.reader.filter.TimelineFilter.TimelineFilterType;
+import 
org.apache.hadoop.yarn.server.timelineservice.reader.filter.TimelineFilterList;
+import 
org.apache.hadoop.yarn.server.timelineservice.reader.filter.TimelineKeyValuesFilter;
+import 
org.apache.hadoop.yarn.server.timelineservice.storage.TimelineReader.Field;
 import 
org.apache.hadoop.yarn.server.timelineservice.storage.flow.AggregationCompactionDimension;
 import 
org.apache.hadoop.yarn.server.timelineservice.storage.flow.AggregationOperation;
 import org.apache.hadoop.yarn.server.timelineservice.storage.flow.Attribute;
@@ -53,6 +67,8 @@ public final class TimelineStorageUtils {
   private TimelineStorageUtils() {
   }
 
+  private static final Log LOG = LogFactory.getLog(TimelineStorageUtils.class);
+
   /** empty bytes. */
   public static final byte[] EMPTY_BYTES = new byte[0];
 
@@ -312,6 +328,21 @@ public final class TimelineStorageUtils {
   }
 
   /**
+   * Check if we have a certain field amongst fields to retrieve. This method
+   * checks against {@link Field#ALL} as well because that would mean field
+   * passed needs to be matched.
+   *
+   * @param fieldsToRetrieve fields to be retrieved.
+   * @param requiredField fields to be checked in fieldsToRetrieve.
+   * @return true if has the required field, false otherwise.
+   */
+  public static boolean hasField(EnumSet fieldsToRetrieve,
+  Field requiredField) {
+return fieldsToRetrieve.contains(Field.ALL) ||
+fieldsToRetrieve.contains(requiredField);
+  }
+
+  /**
* Checks if the input TimelineEntity object is an ApplicationEntity.
*
* @param te TimelineEntity object.
@@ -385,87 +416,317 @@ public final class TimelineStorageUtils {
   }
 
   /**
+   * Matches key-values filter. Used for relatesTo/isRelatedTo filters.
*
-   * @param entityRelations the relations of an entity
-   * @param relationFilters the relations for filtering
-   * @return a boolean flag to indicate if both match
+   * @param entity entity which holds relatesTo/isRelatedTo relations 

[22/50] [abbrv] hadoop git commit: YARN-4446. Refactor reader API for better extensibility (Varun Saxena via sjlee)

2016-05-04 Thread gtcarrera9
http://git-wip-us.apache.org/repos/asf/hadoop/blob/675612d4/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/TimelineReader.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/TimelineReader.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/TimelineReader.java
index 0eeb195..ccb33b7 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/TimelineReader.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/TimelineReader.java
@@ -20,17 +20,14 @@ package 
org.apache.hadoop.yarn.server.timelineservice.storage;
 
 import java.io.IOException;
 
-import java.util.EnumSet;
-import java.util.Map;
 import java.util.Set;
 import org.apache.hadoop.classification.InterfaceAudience.Private;
 import org.apache.hadoop.classification.InterfaceStability.Unstable;
 import org.apache.hadoop.service.Service;
-import org.apache.hadoop.yarn.api.records.timelineservice.FlowActivityEntity;
-import org.apache.hadoop.yarn.api.records.timelineservice.FlowRunEntity;
 import org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity;
-import 
org.apache.hadoop.yarn.server.timelineservice.reader.filter.TimelineFilterList;
-import 
org.apache.hadoop.yarn.server.timelineservice.reader.filter.TimelinePrefixFilter;
+import 
org.apache.hadoop.yarn.server.timelineservice.reader.TimelineDataToRetrieve;
+import 
org.apache.hadoop.yarn.server.timelineservice.reader.TimelineEntityFilters;
+import 
org.apache.hadoop.yarn.server.timelineservice.reader.TimelineReaderContext;
 
 /** ATSv2 reader interface. */
 @Private
@@ -38,11 +35,6 @@ import 
org.apache.hadoop.yarn.server.timelineservice.reader.filter.TimelinePrefi
 public interface TimelineReader extends Service {
 
   /**
-   * Default limit for {@link #getEntities}.
-   */
-  long DEFAULT_LIMIT = 100;
-
-  /**
* Possible fields to retrieve for {@link #getEntities} and
* {@link #getEntity}.
*/
@@ -57,55 +49,61 @@ public interface TimelineReader extends Service {
   }
 
   /**
-   * The API to fetch the single entity given the entity identifier in the
-   * scope of the given context.
-   *
-   * @param userId
-   *Context user Id(optional).
-   * @param clusterId
-   *Context cluster Id(mandatory).
-   * @param flowName
-   *Context flow Id (optional).
-   * @param flowRunId
-   *Context flow run Id (optional).
-   * @param appId
-   *Context app Id (mandatory)
-   * @param entityType
-   *Entity type (mandatory)
-   * @param entityId
-   *Entity Id (mandatory)
-   * @param confsToRetrieve
-   *Used for deciding which configs to return in response. This is
-   *represented as a {@link TimelineFilterList} object containing
-   *{@link TimelinePrefixFilter} objects. These can either be exact config
-   *keys' or prefixes which are then compared against config keys' to 
decide
-   *configs to return in response.
-   * @param metricsToRetrieve
-   *Used for deciding which metrics to return in response. This is
-   *represented as a {@link TimelineFilterList} object containing
-   *{@link TimelinePrefixFilter} objects. These can either be exact metric
-   *ids' or prefixes which are then compared against metric ids' to decide
-   *metrics to return in response.
-   * @param fieldsToRetrieve
-   *Specifies which fields of the entity object to retrieve(optional), see
-   *{@link Field}. If null, retrieves 4 fields namely entity id,
-   *entity type and entity created time. All fields will be returned if
-   *{@link Field#ALL} is specified.
-   * @return a {@link TimelineEntity} instance or null. The entity will
-   *contain the metadata plus the given fields to retrieve.
+   * The API to fetch the single entity given the identifier(depending on
+   * the entity type) in the scope of the given context.
+   * @param context Context which defines the scope in which query has to be
+   *made. Use getters of {@link TimelineReaderContext} to fetch context
+   *fields. Context contains the following :
+   *
+   *entityType - Entity type(mandatory).
+   *clusterId - Identifies the cluster(mandatory).
+   *userId - Identifies the user.
+   *flowName - Context flow name.
+   *flowRunId - Context flow run id.
+   *appId - Context app id.
+   *entityId - Entity id.
+   *
+   *Fields in context which are mandatory depends on entity type. 

[33/50] [abbrv] hadoop git commit: YARN-4062. Add the flush and compaction functionality via coprocessors and scanners for flow run table (Vrushali C via sjlee)

2016-05-04 Thread gtcarrera9
YARN-4062. Add the flush and compaction functionality via coprocessors and 
scanners for flow run table (Vrushali C via sjlee)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/3157d32a
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/3157d32a
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/3157d32a

Branch: refs/heads/YARN-2928
Commit: 3157d32ae8ed526430beb85a4920243d38bd2b41
Parents: cd44408
Author: Sangjin Lee 
Authored: Thu Mar 17 18:22:04 2016 -0700
Committer: Li Lu 
Committed: Wed May 4 16:35:14 2016 -0700

--
 .../hadoop/yarn/conf/YarnConfiguration.java |  16 +
 .../src/main/resources/yarn-default.xml |  10 +
 .../storage/HBaseTimelineWriterImpl.java|   5 +-
 .../storage/common/TimelineStorageUtils.java|  55 ++
 .../storage/common/TimestampGenerator.java  |  13 +-
 .../storage/flow/AggregationOperation.java  |  17 +-
 .../storage/flow/FlowRunColumn.java |   4 +-
 .../storage/flow/FlowRunColumnPrefix.java   |   2 +-
 .../storage/flow/FlowRunCoprocessor.java|  70 +-
 .../storage/flow/FlowRunRowKey.java |  16 +
 .../storage/flow/FlowScanner.java   | 269 ++--
 .../storage/flow/FlowScannerOperation.java  |  46 ++
 .../storage/flow/TestFlowDataGenerator.java | 178 +-
 .../storage/flow/TestHBaseStorageFlowRun.java   | 112 +++-
 .../flow/TestHBaseStorageFlowRunCompaction.java | 635 +++
 15 files changed, 1362 insertions(+), 86 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/3157d32a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/conf/YarnConfiguration.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/conf/YarnConfiguration.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/conf/YarnConfiguration.java
index 5115758..b79b3a5 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/conf/YarnConfiguration.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/conf/YarnConfiguration.java
@@ -1900,6 +1900,22 @@ public class YarnConfiguration extends Configuration {
   public static final int
   DEFAULT_TIMELINE_SERVICE_WRITER_FLUSH_INTERVAL_SECONDS = 60;
 
+  /**
+   * The name for setting that controls how long the final value of
+   * a metric of a completed app is retained before merging
+   * into the flow sum.
+   */
+  public static final String APP_FINAL_VALUE_RETENTION_THRESHOLD =
+  TIMELINE_SERVICE_PREFIX
+  + "coprocessor.app-final-value-retention-milliseconds";
+
+  /**
+   * The setting that controls how long the final value of a metric
+   * of a completed app is retained before merging into the flow sum.
+   */
+  public static final long DEFAULT_APP_FINAL_VALUE_RETENTION_THRESHOLD = 3 * 24
+  * 60 * 60 * 1000L;
+
   public static final String ATS_APP_COLLECTOR_LINGER_PERIOD_IN_MS =
   TIMELINE_SERVICE_PREFIX + "app-collector.linger-period.ms";
 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/3157d32a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/resources/yarn-default.xml
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/resources/yarn-default.xml
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/resources/yarn-default.xml
index ca913b8..13d4b4f 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/resources/yarn-default.xml
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/resources/yarn-default.xml
@@ -2128,6 +2128,7 @@
 604800
   
 
+  
   
 
yarn.timeline-service.entity-group-fs-store.leveldb-cache-read-cache-size
 
@@ -2196,6 +2197,15 @@
 
yarn.timeline-service.timeline-client.number-of-async-entities-to-merge
 10
   
+
+  
+ The setting that controls how long the final value
+of a metric of a completed app is retained before merging into
+the flow sum.
+
yarn.timeline-service.coprocessor.app-final-value-retention-milliseconds
+25920
+  
+
   
 
   

http://git-wip-us.apache.org/repos/asf/hadoop/blob/3157d32a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/HBaseTimelineWriterImpl.java
--
diff --git 

[49/50] [abbrv] hadoop git commit: YARN-4447. Provide a mechanism to represent complex filters and parse them at the REST layer (Varun Saxena via sjlee)

2016-05-04 Thread gtcarrera9
YARN-4447. Provide a mechanism to represent complex filters and parse them at 
the REST layer (Varun Saxena via sjlee)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/047cde55
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/047cde55
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/047cde55

Branch: refs/heads/YARN-2928
Commit: 047cde55aa957a1288b88fa503ef1204b56c649b
Parents: 28ec2be
Author: Sangjin Lee 
Authored: Mon May 2 14:06:19 2016 -0700
Committer: Li Lu 
Committed: Wed May 4 16:38:09 2016 -0700

--
 .../reader/TimelineParseConstants.java  |  34 +
 .../reader/TimelineParseException.java  |  36 +
 .../timelineservice/reader/TimelineParser.java  |  37 +
 .../reader/TimelineParserForCompareExpr.java| 300 ++
 .../reader/TimelineParserForDataToRetrieve.java |  95 ++
 .../reader/TimelineParserForEqualityExpr.java   | 343 +++
 .../reader/TimelineParserForExistFilters.java   |  51 +
 .../reader/TimelineParserForKVFilters.java  |  78 ++
 .../reader/TimelineParserForNumericFilters.java |  72 ++
 .../TimelineParserForRelationFilters.java   |  71 ++
 .../reader/TimelineReaderWebServices.java   | 220 -
 .../reader/TimelineReaderWebServicesUtils.java  | 196 ++--
 .../reader/filter/TimelineCompareFilter.java|  73 +-
 .../reader/filter/TimelineExistsFilter.java |  49 +-
 .../reader/filter/TimelineFilterList.java   |  36 +
 .../reader/filter/TimelineKeyValueFilter.java   |  13 +
 .../reader/filter/TimelineKeyValuesFilter.java  |  61 +-
 .../reader/filter/TimelinePrefixFilter.java |  37 +
 .../reader/TestTimelineReaderWebServices.java   |  14 +-
 ...stTimelineReaderWebServicesHBaseStorage.java | 900 +-
 .../TestTimelineReaderWebServicesUtils.java | 923 +++
 21 files changed, 3442 insertions(+), 197 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/047cde55/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/reader/TimelineParseConstants.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/reader/TimelineParseConstants.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/reader/TimelineParseConstants.java
new file mode 100644
index 000..662a102
--- /dev/null
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/reader/TimelineParseConstants.java
@@ -0,0 +1,34 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.yarn.server.timelineservice.reader;
+
+/**
+ * Set of constants used while parsing filter expressions.
+ */
+final class TimelineParseConstants {
+  private TimelineParseConstants() {
+  }
+  static final String COMMA_DELIMITER = ",";
+  static final String COLON_DELIMITER = ":";
+  static final char NOT_CHAR = '!';
+  static final char SPACE_CHAR = ' ';
+  static final char OPENING_BRACKET_CHAR = '(';
+  static final char CLOSING_BRACKET_CHAR = ')';
+  static final char COMMA_CHAR = ',';
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/hadoop/blob/047cde55/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/reader/TimelineParseException.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/reader/TimelineParseException.java
 

[32/50] [abbrv] hadoop git commit: YARN-4062. Add the flush and compaction functionality via coprocessors and scanners for flow run table (Vrushali C via sjlee)

2016-05-04 Thread gtcarrera9
http://git-wip-us.apache.org/repos/asf/hadoop/blob/3157d32a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/TestHBaseStorageFlowRunCompaction.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/TestHBaseStorageFlowRunCompaction.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/TestHBaseStorageFlowRunCompaction.java
new file mode 100644
index 000..ace218b
--- /dev/null
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/TestHBaseStorageFlowRunCompaction.java
@@ -0,0 +1,635 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.yarn.server.timelineservice.storage.flow;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.assertNotEquals;
+
+import java.io.IOException;
+import java.util.Map;
+import java.util.List;
+import java.util.SortedSet;
+import java.util.TreeSet;
+import java.util.ArrayList;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hbase.Cell;
+import org.apache.hadoop.hbase.CellUtil;
+import org.apache.hadoop.hbase.HBaseTestingUtility;
+import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.Tag;
+import org.apache.hadoop.hbase.KeyValue;
+import org.apache.hadoop.hbase.client.Connection;
+import org.apache.hadoop.hbase.client.ConnectionFactory;
+import org.apache.hadoop.hbase.client.Result;
+import org.apache.hadoop.hbase.client.ResultScanner;
+import org.apache.hadoop.hbase.client.Scan;
+import org.apache.hadoop.hbase.client.Table;
+import org.apache.hadoop.hbase.regionserver.HRegion;
+import org.apache.hadoop.hbase.util.Bytes;
+import org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntities;
+import org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity;
+import 
org.apache.hadoop.yarn.server.timelineservice.storage.common.LongConverter;
+import 
org.apache.hadoop.yarn.server.timelineservice.storage.HBaseTimelineWriterImpl;
+import 
org.apache.hadoop.yarn.server.timelineservice.storage.TimelineSchemaCreator;
+import 
org.apache.hadoop.yarn.server.timelineservice.storage.common.ColumnHelper;
+import 
org.apache.hadoop.yarn.server.timelineservice.storage.common.TimestampGenerator;
+import 
org.apache.hadoop.yarn.server.timelineservice.storage.common.TimelineStorageUtils;
+import org.junit.AfterClass;
+import org.junit.Assert;
+import org.junit.BeforeClass;
+import org.junit.Test;
+import org.apache.hadoop.hbase.regionserver.HRegionServer;
+import org.apache.hadoop.hbase.regionserver.compactions.CompactionRequest;
+
+/**
+ * Tests the FlowRun and FlowActivity Tables
+ */
+public class TestHBaseStorageFlowRunCompaction {
+
+  private static HBaseTestingUtility util;
+
+  private final String metric1 = "MAP_SLOT_MILLIS";
+  private final String metric2 = "HDFS_BYTES_READ";
+
+  private final byte[] aRowKey = Bytes.toBytes("a");
+  private final byte[] aFamily = Bytes.toBytes("family");
+  private final byte[] aQualifier = Bytes.toBytes("qualifier");
+
+  @BeforeClass
+  public static void setupBeforeClass() throws Exception {
+util = new HBaseTestingUtility();
+Configuration conf = util.getConfiguration();
+conf.setInt("hfile.format.version", 3);
+util.startMiniCluster();
+createSchema();
+  }
+
+  private static void createSchema() throws IOException {
+TimelineSchemaCreator.createAllTables(util.getConfiguration(), false);
+  }
+
+  @Test
+  public void testWriteFlowRunCompaction() throws Exception {
+String cluster = "kompaction_cluster1";
+String user = "kompaction_FlowRun__user1";
+String flow = "kompaction_flowRun_flow_name";
+String flowVersion = 

[46/50] [abbrv] hadoop git commit: MAPREDUCE-6424. Store MR counters as timeline metrics instead of event. (Naganarasimha G R via varunsaxena)

2016-05-04 Thread gtcarrera9
MAPREDUCE-6424. Store MR counters as timeline metrics instead of event. 
(Naganarasimha G R via varunsaxena)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/28ec2bee
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/28ec2bee
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/28ec2bee

Branch: refs/heads/YARN-2928
Commit: 28ec2beeed35c5f30dfde0a62e36b7b3eebcf3b3
Parents: caf23c3
Author: Varun Saxena 
Authored: Sun May 1 17:17:24 2016 +0530
Committer: Li Lu 
Committed: Wed May 4 16:38:05 2016 -0700

--
 .../jobhistory/JobHistoryEventHandler.java  | 29 +++-
 .../hadoop/mapreduce/jobhistory/TestEvents.java | 12 +++-
 .../mapreduce/jobhistory/AMStartedEvent.java| 12 +++-
 .../mapreduce/jobhistory/HistoryEvent.java  |  6 ++
 .../mapreduce/jobhistory/JobFinishedEvent.java  | 20 --
 .../jobhistory/JobInfoChangeEvent.java  | 10 ++-
 .../mapreduce/jobhistory/JobInitedEvent.java| 11 ++-
 .../jobhistory/JobPriorityChangeEvent.java  | 11 ++-
 .../jobhistory/JobQueueChangeEvent.java |  8 +++
 .../jobhistory/JobStatusChangedEvent.java   | 11 ++-
 .../mapreduce/jobhistory/JobSubmittedEvent.java | 10 ++-
 .../JobUnsuccessfulCompletionEvent.java | 11 ++-
 .../jobhistory/MapAttemptFinishedEvent.java | 12 +++-
 .../jobhistory/NormalizedResourceEvent.java |  8 +++
 .../jobhistory/ReduceAttemptFinishedEvent.java  | 12 +++-
 .../jobhistory/TaskAttemptFinishedEvent.java| 11 ++-
 .../jobhistory/TaskAttemptStartedEvent.java | 13 +++-
 .../TaskAttemptUnsuccessfulCompletionEvent.java | 17 +++--
 .../mapreduce/jobhistory/TaskFailedEvent.java   | 14 ++--
 .../mapreduce/jobhistory/TaskFinishedEvent.java | 13 +++-
 .../mapreduce/jobhistory/TaskStartedEvent.java  |  8 +++
 .../mapreduce/jobhistory/TaskUpdatedEvent.java  | 11 ++-
 .../mapreduce/util/JobHistoryEventUtils.java| 21 +-
 .../mapred/TestMRTimelineEventHandling.java | 74 +++-
 .../org/apache/hadoop/mapred/UtilsForTests.java | 39 +--
 25 files changed, 345 insertions(+), 59 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/28ec2bee/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/jobhistory/JobHistoryEventHandler.java
--
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/jobhistory/JobHistoryEventHandler.java
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/jobhistory/JobHistoryEventHandler.java
index 639f56e..d32524e 100644
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/jobhistory/JobHistoryEventHandler.java
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/jobhistory/JobHistoryEventHandler.java
@@ -25,6 +25,7 @@ import java.util.HashMap;
 import java.util.Iterator;
 import java.util.List;
 import java.util.Map;
+import java.util.Set;
 import java.util.Timer;
 import java.util.TimerTask;
 import java.util.concurrent.BlockingQueue;
@@ -68,6 +69,8 @@ import org.apache.hadoop.util.StringUtils;
 import org.apache.hadoop.yarn.api.records.timeline.TimelineEntity;
 import org.apache.hadoop.yarn.api.records.timeline.TimelineEvent;
 import org.apache.hadoop.yarn.api.records.timeline.TimelinePutResponse;
+import org.apache.hadoop.yarn.api.records.timelineservice.ApplicationEntity;
+import org.apache.hadoop.yarn.api.records.timelineservice.TimelineMetric;
 import org.apache.hadoop.yarn.client.api.TimelineClient;
 import org.apache.hadoop.yarn.conf.YarnConfiguration;
 import org.apache.hadoop.yarn.event.EventHandler;
@@ -1072,6 +1075,15 @@ public class JobHistoryEventHandler extends 
AbstractService
 return entity;
   }
   
+  // create ApplicationEntity with job finished Metrics from HistoryEvent
+  private org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity
+  createAppEntityWithJobMetrics(HistoryEvent event, JobId jobId) {
+ApplicationEntity entity = new ApplicationEntity();
+entity.setId(jobId.getAppId().toString());
+entity.setMetrics(event.getTimelineMetrics());
+return entity;
+  }
+
   // create BaseEntity from HistoryEvent with adding other info, like: 
   // timestamp and entityType.
   private org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity 
@@ -1088,6 +1100,10 @@ public class JobHistoryEventHandler extends 
AbstractService
 if (setCreatedTime) {
   entity.setCreatedTime(timestamp);

[40/50] [abbrv] hadoop git commit: YARN-3863. Support complex filters in TimelineReader (Varun Saxena via sjlee)

2016-05-04 Thread gtcarrera9
http://git-wip-us.apache.org/repos/asf/hadoop/blob/2b2df86c/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/GenericEntityReader.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/GenericEntityReader.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/GenericEntityReader.java
index d8f73d4..6696ac5 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/GenericEntityReader.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/GenericEntityReader.java
@@ -19,13 +19,8 @@ package 
org.apache.hadoop.yarn.server.timelineservice.storage.reader;
 
 import java.io.IOException;
 import java.util.EnumSet;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.Map;
 import java.util.Set;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.client.Connection;
 import org.apache.hadoop.hbase.client.Get;
@@ -33,28 +28,22 @@ import org.apache.hadoop.hbase.client.Result;
 import org.apache.hadoop.hbase.client.ResultScanner;
 import org.apache.hadoop.hbase.client.Scan;
 import org.apache.hadoop.hbase.filter.BinaryComparator;
-import org.apache.hadoop.hbase.filter.BinaryPrefixComparator;
 import org.apache.hadoop.hbase.filter.FamilyFilter;
 import org.apache.hadoop.hbase.filter.FilterList;
 import org.apache.hadoop.hbase.filter.QualifierFilter;
 import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp;
 import org.apache.hadoop.hbase.filter.FilterList.Operator;
-import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity;
-import org.apache.hadoop.yarn.api.records.timelineservice.TimelineEvent;
 import 
org.apache.hadoop.yarn.server.timelineservice.reader.TimelineDataToRetrieve;
 import 
org.apache.hadoop.yarn.server.timelineservice.reader.TimelineEntityFilters;
 import 
org.apache.hadoop.yarn.server.timelineservice.reader.TimelineReaderContext;
+import 
org.apache.hadoop.yarn.server.timelineservice.reader.filter.TimelineFilterList;
 import 
org.apache.hadoop.yarn.server.timelineservice.reader.filter.TimelineFilterUtils;
 import 
org.apache.hadoop.yarn.server.timelineservice.storage.TimelineReader.Field;
-import 
org.apache.hadoop.yarn.server.timelineservice.storage.application.ApplicationTable;
-import 
org.apache.hadoop.yarn.server.timelineservice.storage.application.ApplicationColumnPrefix;
 import 
org.apache.hadoop.yarn.server.timelineservice.storage.apptoflow.AppToFlowColumn;
 import 
org.apache.hadoop.yarn.server.timelineservice.storage.apptoflow.AppToFlowRowKey;
 import 
org.apache.hadoop.yarn.server.timelineservice.storage.apptoflow.AppToFlowTable;
 import org.apache.hadoop.yarn.server.timelineservice.storage.common.BaseTable;
-import 
org.apache.hadoop.yarn.server.timelineservice.storage.common.ColumnPrefix;
-import org.apache.hadoop.yarn.server.timelineservice.storage.common.Separator;
 import 
org.apache.hadoop.yarn.server.timelineservice.storage.common.TimelineStorageUtils;
 import 
org.apache.hadoop.yarn.server.timelineservice.storage.entity.EntityColumn;
 import 
org.apache.hadoop.yarn.server.timelineservice.storage.entity.EntityColumnFamily;
@@ -71,7 +60,6 @@ import com.google.common.base.Preconditions;
  */
 class GenericEntityReader extends TimelineEntityReader {
   private static final EntityTable ENTITY_TABLE = new EntityTable();
-  private static final Log LOG = LogFactory.getLog(GenericEntityReader.class);
 
   /**
* Used to look up the flow context.
@@ -97,92 +85,322 @@ class GenericEntityReader extends TimelineEntityReader {
   }
 
   @Override
-  protected FilterList constructFilterListBasedOnFields() {
-FilterList list = new FilterList(Operator.MUST_PASS_ONE);
-TimelineDataToRetrieve dataToRetrieve = getDataToRetrieve();
-// Fetch all the columns.
-if (dataToRetrieve.getFieldsToRetrieve().contains(Field.ALL) &&
-(dataToRetrieve.getConfsToRetrieve() == null ||
-dataToRetrieve.getConfsToRetrieve().getFilterList().isEmpty()) &&
-(dataToRetrieve.getMetricsToRetrieve() == null ||
-dataToRetrieve.getMetricsToRetrieve().getFilterList().isEmpty())) {
-  return list;
+  protected FilterList constructFilterListBasedOnFilters() throws IOException {
+// Filters here cannot be null for multiple entity reads 

[42/50] [abbrv] hadoop git commit: YARN-3863. Support complex filters in TimelineReader (Varun Saxena via sjlee)

2016-05-04 Thread gtcarrera9
YARN-3863. Support complex filters in TimelineReader (Varun Saxena via sjlee)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/2b2df86c
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/2b2df86c
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/2b2df86c

Branch: refs/heads/YARN-2928
Commit: 2b2df86c775f008c258fe128d3686f4a4d3b1a52
Parents: 408f001
Author: Sangjin Lee 
Authored: Mon Apr 11 21:07:32 2016 -0700
Committer: Li Lu 
Committed: Wed May 4 16:35:33 2016 -0700

--
 .../reader/TimelineEntityFilters.java   |  170 +-
 .../reader/TimelineReaderWebServicesUtils.java  |   88 +-
 .../reader/filter/TimelineCompareFilter.java|   35 +-
 .../reader/filter/TimelineExistsFilter.java |   62 +
 .../reader/filter/TimelineFilter.java   |   16 +-
 .../reader/filter/TimelineFilterList.java   |   14 +
 .../reader/filter/TimelineFilterUtils.java  |  206 +-
 .../reader/filter/TimelineKeyValueFilter.java   |   48 +
 .../reader/filter/TimelineKeyValuesFilter.java  |   71 +
 .../reader/filter/TimelinePrefixFilter.java |6 +
 .../storage/FileSystemTimelineReaderImpl.java   |   36 +-
 .../storage/HBaseTimelineWriterImpl.java|   31 +-
 .../storage/application/ApplicationColumn.java  |   28 +-
 .../application/ApplicationColumnPrefix.java|   37 +-
 .../storage/apptoflow/AppToFlowColumn.java  |   16 +
 .../timelineservice/storage/common/Column.java  |   17 +
 .../storage/common/ColumnHelper.java|   16 +
 .../storage/common/ColumnPrefix.java|   35 +
 .../common/TimelineEntityFiltersType.java   |   71 +
 .../storage/common/TimelineStorageUtils.java|  461 +++-
 .../storage/entity/EntityColumn.java|   28 +-
 .../storage/entity/EntityColumnPrefix.java  |   38 +-
 .../storage/flow/FlowActivityColumnPrefix.java  |   35 +
 .../storage/flow/FlowRunColumn.java |3 +
 .../storage/flow/FlowRunColumnPrefix.java   |   26 +
 .../storage/flow/FlowScanner.java   |1 +
 .../storage/reader/ApplicationEntityReader.java |  426 ++--
 .../reader/FlowActivityEntityReader.java|7 +
 .../storage/reader/FlowRunEntityReader.java |   97 +-
 .../storage/reader/GenericEntityReader.java |  623 ++---
 .../storage/reader/TimelineEntityReader.java|   71 +-
 .../reader/TestTimelineReaderWebServices.java   |   10 +-
 .../TestFileSystemTimelineReaderImpl.java   |  332 ++-
 .../storage/TestHBaseTimelineStorage.java   | 2172 +-
 .../storage/flow/TestFlowDataGenerator.java |   16 +
 .../storage/flow/TestHBaseStorageFlowRun.java   |  267 ++-
 .../flow/TestHBaseStorageFlowRunCompaction.java |2 +-
 .../reader/filter/TimelineExistsFilter.java |   62 +
 .../reader/filter/TimelineKeyValueFilter.java   |   48 +
 .../reader/filter/TimelineKeyValuesFilter.java  |   71 +
 .../common/TimelineEntityFiltersType.java   |   71 +
 41 files changed, 5054 insertions(+), 816 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/2b2df86c/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/reader/TimelineEntityFilters.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/reader/TimelineEntityFilters.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/reader/TimelineEntityFilters.java
index 5b2c300..4821d31 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/reader/TimelineEntityFilters.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/reader/TimelineEntityFilters.java
@@ -18,11 +18,14 @@
 
 package org.apache.hadoop.yarn.server.timelineservice.reader;
 
-import java.util.Map;
-import java.util.Set;
-
 import org.apache.hadoop.classification.InterfaceAudience.Private;
 import org.apache.hadoop.classification.InterfaceStability.Unstable;
+import 
org.apache.hadoop.yarn.server.timelineservice.reader.filter.TimelineCompareFilter;
+import 
org.apache.hadoop.yarn.server.timelineservice.reader.filter.TimelineCompareOp;
+import 
org.apache.hadoop.yarn.server.timelineservice.reader.filter.TimelineKeyValueFilter;
+import 
org.apache.hadoop.yarn.server.timelineservice.reader.filter.TimelineExistsFilter;
+import 

[50/50] [abbrv] hadoop git commit: MAPREDUCE-6688. Store job configurations in Timeline Service v2 (Varun Saxena via sjlee)

2016-05-04 Thread gtcarrera9
MAPREDUCE-6688. Store job configurations in Timeline Service v2 (Varun Saxena 
via sjlee)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/d2c4237f
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/d2c4237f
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/d2c4237f

Branch: refs/heads/YARN-2928
Commit: d2c4237fa7b15aaddfdd55f906f7953b29cf2fe8
Parents: 047cde5
Author: Sangjin Lee 
Authored: Tue May 3 09:19:36 2016 -0700
Committer: Li Lu 
Committed: Wed May 4 16:38:24 2016 -0700

--
 .../jobhistory/JobHistoryEventHandler.java  | 57 +++-
 .../mapreduce/v2/app/job/impl/JobImpl.java  |  2 +-
 .../mapreduce/jobhistory/JobSubmittedEvent.java | 38 +++-
 .../mapreduce/util/JobHistoryEventUtils.java|  3 +
 .../mapred/TestMRTimelineEventHandling.java | 92 +---
 .../org/apache/hadoop/mapred/UtilsForTests.java |  8 ++
 6 files changed, 181 insertions(+), 19 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/d2c4237f/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/jobhistory/JobHistoryEventHandler.java
--
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/jobhistory/JobHistoryEventHandler.java
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/jobhistory/JobHistoryEventHandler.java
index d32524e..db58ecf 100644
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/jobhistory/JobHistoryEventHandler.java
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/jobhistory/JobHistoryEventHandler.java
@@ -1074,7 +1074,16 @@ public class JobHistoryEventHandler extends 
AbstractService
 entity.setId(jobId.toString());
 return entity;
   }
-  
+
+  private org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity
+  createJobEntity(JobId jobId) {
+org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity entity =
+new 
org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity();
+entity.setId(jobId.toString());
+entity.setType(MAPREDUCE_JOB_ENTITY_TYPE);
+return entity;
+  }
+
   // create ApplicationEntity with job finished Metrics from HistoryEvent
   private org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity
   createAppEntityWithJobMetrics(HistoryEvent event, JobId jobId) {
@@ -1133,6 +1142,46 @@ public class JobHistoryEventHandler extends 
AbstractService
 return entity;
   }
 
+  private void publishConfigsOnJobSubmittedEvent(JobSubmittedEvent event,
+  JobId jobId) {
+if (event.getJobConf() == null) {
+  return;
+}
+// Publish job configurations both as job and app entity.
+// Configs are split into multiple entities if they exceed 100kb in size.
+org.apache.hadoop.yarn.api.records.timelineservice.
+TimelineEntity jobEntityForConfigs = createJobEntity(jobId);
+ApplicationEntity appEntityForConfigs = new ApplicationEntity();
+String appId = jobId.getAppId().toString();
+appEntityForConfigs.setId(appId);
+try {
+  int configSize = 0;
+  for (Map.Entry entry : event.getJobConf()) {
+int size = entry.getKey().length() + entry.getValue().length();
+configSize += size;
+if (configSize > JobHistoryEventUtils.ATS_CONFIG_PUBLISH_SIZE_BYTES) {
+  if (jobEntityForConfigs.getConfigs().size() > 0) {
+timelineClient.putEntities(jobEntityForConfigs);
+timelineClient.putEntities(appEntityForConfigs);
+jobEntityForConfigs = createJobEntity(jobId);
+appEntityForConfigs = new ApplicationEntity();
+appEntityForConfigs.setId(appId);
+  }
+  configSize = size;
+}
+jobEntityForConfigs.addConfig(entry.getKey(), entry.getValue());
+appEntityForConfigs.addConfig(entry.getKey(), entry.getValue());
+  }
+  if (configSize > 0) {
+timelineClient.putEntities(jobEntityForConfigs);
+timelineClient.putEntities(appEntityForConfigs);
+  }
+} catch (IOException | YarnException e) {
+  LOG.error("Exception while publishing configs on JOB_SUBMITTED Event " +
+  " for the job : " + jobId, e);
+}
+  }
+
   private void processEventForNewTimelineService(HistoryEvent event,
   JobId jobId, long timestamp) {
 org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity 

[39/50] [abbrv] hadoop git commit: YARN-3863. Support complex filters in TimelineReader (Varun Saxena via sjlee)

2016-05-04 Thread gtcarrera9
http://git-wip-us.apache.org/repos/asf/hadoop/blob/2b2df86c/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/TestHBaseTimelineStorage.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/TestHBaseTimelineStorage.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/TestHBaseTimelineStorage.java
index 4e07ecf..6b57ec4 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/TestHBaseTimelineStorage.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/TestHBaseTimelineStorage.java
@@ -23,6 +23,7 @@ import static org.junit.Assert.assertNotNull;
 import static org.junit.Assert.assertTrue;
 
 import java.io.IOException;
+import java.util.Arrays;
 import java.util.EnumSet;
 import java.util.HashMap;
 import java.util.HashSet;
@@ -52,10 +53,14 @@ import 
org.apache.hadoop.yarn.server.metrics.ApplicationMetricsConstants;
 import 
org.apache.hadoop.yarn.server.timelineservice.reader.TimelineDataToRetrieve;
 import 
org.apache.hadoop.yarn.server.timelineservice.reader.TimelineEntityFilters;
 import 
org.apache.hadoop.yarn.server.timelineservice.reader.TimelineReaderContext;
+import 
org.apache.hadoop.yarn.server.timelineservice.reader.filter.TimelineCompareFilter;
 import 
org.apache.hadoop.yarn.server.timelineservice.reader.filter.TimelineCompareOp;
+import 
org.apache.hadoop.yarn.server.timelineservice.reader.filter.TimelineKeyValueFilter;
+import 
org.apache.hadoop.yarn.server.timelineservice.reader.filter.TimelineExistsFilter;
 import 
org.apache.hadoop.yarn.server.timelineservice.reader.filter.TimelineFilterList;
 import 
org.apache.hadoop.yarn.server.timelineservice.reader.filter.TimelinePrefixFilter;
 import 
org.apache.hadoop.yarn.server.timelineservice.reader.filter.TimelineFilterList.Operator;
+import 
org.apache.hadoop.yarn.server.timelineservice.reader.filter.TimelineKeyValuesFilter;
 import 
org.apache.hadoop.yarn.server.timelineservice.storage.TimelineReader.Field;
 import 
org.apache.hadoop.yarn.server.timelineservice.storage.application.ApplicationColumn;
 import 
org.apache.hadoop.yarn.server.timelineservice.storage.application.ApplicationColumnPrefix;
@@ -75,9 +80,6 @@ import org.junit.Before;
 import org.junit.BeforeClass;
 import org.junit.Test;
 
-import com.google.common.collect.ImmutableMap;
-import com.google.common.collect.ImmutableSet;
-
 /**
  * Various tests to test writing entities to HBase and reading them back from
  * it.
@@ -113,30 +115,29 @@ public class TestHBaseTimelineStorage {
 String id = "application_11_";
 entity.setId(id);
 entity.setType(TimelineEntityType.YARN_APPLICATION.toString());
-Long cTime = 1425016501000L;
+Long cTime = 1425016502000L;
 entity.setCreatedTime(cTime);
 // add the info map in Timeline Entity
 Map infoMap = new HashMap();
-infoMap.put("infoMapKey1", "infoMapValue1");
-infoMap.put("infoMapKey2", 10);
+infoMap.put("infoMapKey1", "infoMapValue2");
+infoMap.put("infoMapKey2", 20);
+infoMap.put("infoMapKey3", 85.85);
 entity.addInfo(infoMap);
 // add the isRelatedToEntity info
-String key = "task";
-String value = "is_related_to_entity_id_here";
 Set isRelatedToSet = new HashSet();
-isRelatedToSet.add(value);
+isRelatedToSet.add("relatedto1");
 Map isRelatedTo = new HashMap();
-isRelatedTo.put(key, isRelatedToSet);
+isRelatedTo.put("task", isRelatedToSet);
 entity.setIsRelatedToEntities(isRelatedTo);
 // add the relatesTo info
-key = "container";
-value = "relates_to_entity_id_here";
 Set relatesToSet = new HashSet();
-relatesToSet.add(value);
-value = "relates_to_entity_id_here_Second";
-relatesToSet.add(value);
+relatesToSet.add("relatesto1");
+relatesToSet.add("relatesto3");
 Map relatesTo = new HashMap();
-relatesTo.put(key, relatesToSet);
+relatesTo.put("container", relatesToSet);
+Set relatesToSet11 = new HashSet();
+relatesToSet11.add("relatesto4");
+relatesTo.put("container1", relatesToSet11);
 entity.setRelatesToEntities(relatesTo);
 // add some config entries
 Map conf = new HashMap();
@@ -166,8 +167,8 @@ public class TestHBaseTimelineStorage {
 metrics.add(m12);
 entity.addMetrics(metrics);
 TimelineEvent event = new 

[06/50] [abbrv] hadoop git commit: YARN-4445. Unify the term flowId and flowName in timeline v2 codebase. Contributed by Zhan Zhang.

2016-05-04 Thread gtcarrera9
http://git-wip-us.apache.org/repos/asf/hadoop/blob/89b4101f/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/FlowRunRowKey.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/FlowRunRowKey.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/FlowRunRowKey.java
index a14d2bc..2cd9625 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/FlowRunRowKey.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/FlowRunRowKey.java
@@ -27,14 +27,14 @@ import 
org.apache.hadoop.yarn.server.timelineservice.storage.common.TimelineStor
 public class FlowRunRowKey {
   private final String clusterId;
   private final String userId;
-  private final String flowId;
+  private final String flowName;
   private final long flowRunId;
 
-  public FlowRunRowKey(String clusterId, String userId, String flowId,
+  public FlowRunRowKey(String clusterId, String userId, String flowName,
   long flowRunId) {
 this.clusterId = clusterId;
 this.userId = userId;
-this.flowId = flowId;
+this.flowName = flowName;
 this.flowRunId = flowRunId;
   }
 
@@ -46,8 +46,8 @@ public class FlowRunRowKey {
 return userId;
   }
 
-  public String getFlowId() {
-return flowId;
+  public String getFlowName() {
+return flowName;
   }
 
   public long getFlowRunId() {
@@ -56,33 +56,33 @@ public class FlowRunRowKey {
 
   /**
* Constructs a row key prefix for the flow run table as follows: {
-   * clusterId!userI!flowId!}
+   * clusterId!userI!flowName!}
*
* @param clusterId
* @param userId
-   * @param flowId
+   * @param flowName
* @return byte array with the row key prefix
*/
   public static byte[] getRowKeyPrefix(String clusterId, String userId,
-  String flowId) {
+  String flowName) {
 return Bytes.toBytes(Separator.QUALIFIERS.joinEncoded(clusterId, userId,
-flowId, ""));
+flowName, ""));
   }
 
   /**
* Constructs a row key for the entity table as follows: {
-   * clusterId!userI!flowId!Inverted Flow Run Id}
+   * clusterId!userI!flowName!Inverted Flow Run Id}
*
* @param clusterId
* @param userId
-   * @param flowId
+   * @param flowName
* @param flowRunId
* @return byte array with the row key
*/
   public static byte[] getRowKey(String clusterId, String userId,
-  String flowId, Long flowRunId) {
+  String flowName, Long flowRunId) {
 byte[] first = Bytes.toBytes(Separator.QUALIFIERS.joinEncoded(clusterId,
-userId, flowId));
+userId, flowName));
 // Note that flowRunId is a long, so we can't encode them all at the same
 // time.
 byte[] second = Bytes.toBytes(TimelineStorageUtils.invertLong(flowRunId));
@@ -104,10 +104,10 @@ public class FlowRunRowKey {
 Separator.QUALIFIERS.decode(Bytes.toString(rowKeyComponents[0]));
 String userId =
 Separator.QUALIFIERS.decode(Bytes.toString(rowKeyComponents[1]));
-String flowId =
+String flowName =
 Separator.QUALIFIERS.decode(Bytes.toString(rowKeyComponents[2]));
 long flowRunId =
 TimelineStorageUtils.invertLong(Bytes.toLong(rowKeyComponents[3]));
-return new FlowRunRowKey(clusterId, userId, flowId, flowRunId);
+return new FlowRunRowKey(clusterId, userId, flowName, flowRunId);
   }
 }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/89b4101f/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/FlowRunTable.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/FlowRunTable.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/FlowRunTable.java
index b1b93c1..2682fea 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/FlowRunTable.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/FlowRunTable.java
@@ -47,7 

[26/50] [abbrv] hadoop git commit: YARN-4409. Fix javadoc and checkstyle issues in timelineservice code (Varun Saxena via sjlee)

2016-05-04 Thread gtcarrera9
http://git-wip-us.apache.org/repos/asf/hadoop/blob/ee803e16/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/RMActiveServiceContext.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/RMActiveServiceContext.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/RMActiveServiceContext.java
index fc05310..12daa95 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/RMActiveServiceContext.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/RMActiveServiceContext.java
@@ -388,15 +388,15 @@ public class RMActiveServiceContext {
   @Private
   @Unstable
   public void setRMTimelineCollectorManager(
-  RMTimelineCollectorManager timelineCollectorManager) {
-this.timelineCollectorManager = timelineCollectorManager;
+  RMTimelineCollectorManager collectorManager) {
+this.timelineCollectorManager = collectorManager;
   }
 
   @Private
   @Unstable
   public void setSystemMetricsPublisher(
-  SystemMetricsPublisher systemMetricsPublisher) {
-this.systemMetricsPublisher = systemMetricsPublisher;
+  SystemMetricsPublisher metricsPublisher) {
+this.systemMetricsPublisher = metricsPublisher;
   }
 
   @Private

http://git-wip-us.apache.org/repos/asf/hadoop/blob/ee803e16/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/RMContextImpl.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/RMContextImpl.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/RMContextImpl.java
index e122ab4..4c72912 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/RMContextImpl.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/RMContextImpl.java
@@ -379,8 +379,8 @@ public class RMContextImpl implements RMContext {
   
   @Override
   public void setSystemMetricsPublisher(
-  SystemMetricsPublisher systemMetricsPublisher) {
-this.systemMetricsPublisher = systemMetricsPublisher;
+  SystemMetricsPublisher metricsPublisher) {
+this.systemMetricsPublisher = metricsPublisher;
   }
 
   @Override

http://git-wip-us.apache.org/repos/asf/hadoop/blob/ee803e16/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/metrics/AbstractSystemMetricsPublisher.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/metrics/AbstractSystemMetricsPublisher.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/metrics/AbstractSystemMetricsPublisher.java
index a8c00a4..d4a4fc3 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/metrics/AbstractSystemMetricsPublisher.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/metrics/AbstractSystemMetricsPublisher.java
@@ -30,6 +30,10 @@ import org.apache.hadoop.yarn.event.Dispatcher;
 import org.apache.hadoop.yarn.event.Event;
 import org.apache.hadoop.yarn.event.EventHandler;
 
+/**
+ * Abstract implementation of SystemMetricsPublisher which is then extended by
+ * metrics publisher implementations depending on timeline service version.
+ */
 public abstract class AbstractSystemMetricsPublisher extends CompositeService
 implements SystemMetricsPublisher {
   private MultiThreadedDispatcher dispatcher;
@@ -46,13 +50,18 @@ public abstract class AbstractSystemMetricsPublisher 
extends CompositeService
   protected void serviceInit(Configuration conf) throws Exception {
 dispatcher =
 new 

[45/50] [abbrv] hadoop git commit: YARN-4986. Add a check in the coprocessor for table to operated on (Vrushali C via sjlee)

2016-05-04 Thread gtcarrera9
YARN-4986. Add a check in the coprocessor for table to operated on (Vrushali C 
via sjlee)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/084a334a
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/084a334a
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/084a334a

Branch: refs/heads/YARN-2928
Commit: 084a334ae52179e2f14bdadab5b7c6d5b447be01
Parents: 64d15d9
Author: Sangjin Lee 
Authored: Fri Apr 29 17:13:32 2016 -0700
Committer: Li Lu 
Committed: Wed May 4 16:35:40 2016 -0700

--
 .../storage/common/TimelineStorageUtils.java| 20 +++
 .../storage/entity/EntityTable.java |  2 +-
 .../storage/flow/FlowRunCoprocessor.java| 39 +++--
 .../storage/flow/FlowScanner.java   | 13 +++--
 .../storage/flow/TestHBaseStorageFlowRun.java   | 61 
 .../flow/TestHBaseStorageFlowRunCompaction.java | 36 
 6 files changed, 160 insertions(+), 11 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/084a334a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/TimelineStorageUtils.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/TimelineStorageUtils.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/TimelineStorageUtils.java
index 2d85bab..18f975a 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/TimelineStorageUtils.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/TimelineStorageUtils.java
@@ -32,8 +32,10 @@ import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.classification.InterfaceAudience.Public;
 import org.apache.hadoop.classification.InterfaceStability.Unstable;
+import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.Cell;
 import org.apache.hadoop.hbase.CellUtil;
+import org.apache.hadoop.hbase.HRegionInfo;
 import org.apache.hadoop.hbase.KeyValue;
 import org.apache.hadoop.hbase.Tag;
 import org.apache.hadoop.hbase.client.Result;
@@ -56,6 +58,7 @@ import 
org.apache.hadoop.yarn.server.timelineservice.storage.TimelineReader.Fiel
 import 
org.apache.hadoop.yarn.server.timelineservice.storage.flow.AggregationCompactionDimension;
 import 
org.apache.hadoop.yarn.server.timelineservice.storage.flow.AggregationOperation;
 import org.apache.hadoop.yarn.server.timelineservice.storage.flow.Attribute;
+import org.apache.hadoop.yarn.server.timelineservice.storage.flow.FlowRunTable;
 import org.apache.hadoop.yarn.util.ConverterUtils;
 
 /**
@@ -887,4 +890,21 @@ public final class TimelineStorageUtils {
 Set eventsSet = new HashSet<>(eventsMap.values());
 entity.addEvents(eventsSet);
   }
+
+  public static boolean isFlowRunTable(HRegionInfo hRegionInfo,
+  Configuration conf) {
+String regionTableName = hRegionInfo.getTable().getNameAsString();
+String flowRunTableName = conf.get(FlowRunTable.TABLE_NAME_CONF_NAME,
+FlowRunTable.DEFAULT_TABLE_NAME);
+if (LOG.isDebugEnabled()) {
+  LOG.debug("regionTableName=" + regionTableName);
+}
+if (flowRunTableName.equalsIgnoreCase(regionTableName)) {
+  if (LOG.isDebugEnabled()) {
+LOG.debug(" table is the flow run table!! " + flowRunTableName);
+  }
+  return true;
+}
+return false;
+  }
 }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/084a334a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/entity/EntityTable.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/entity/EntityTable.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/entity/EntityTable.java
index 3e3e3ab..b194f07 100644
--- 

[04/50] [abbrv] hadoop git commit: YARN-4392. ApplicationCreatedEvent event time resets after RM restart/failover. Contributed by Naganarasimha G R and Xuan Gong

2016-05-04 Thread gtcarrera9
YARN-4392. ApplicationCreatedEvent event time resets after RM
restart/failover. Contributed by Naganarasimha G R and Xuan Gong

(cherry picked from commit 4546c7582b6762c18ba150d80a8976eb51a8290c)

Conflicts:

hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/rmapp/RMAppImpl.java

hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/rmapp/TestRMAppTransitions.java


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/8d798de3
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/8d798de3
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/8d798de3

Branch: refs/heads/YARN-2928
Commit: 8d798de3f0e84f14dc9554b0b65f6906679b4128
Parents: 9ceeb2a
Author: Xuan 
Authored: Mon Dec 7 12:24:55 2015 -0800
Committer: Li Lu 
Committed: Wed May 4 16:17:07 2016 -0700

--
 .../apache/hadoop/yarn/client/api/impl/TimelineClientImpl.java | 6 --
 .../hadoop/yarn/server/resourcemanager/rmapp/RMAppImpl.java| 4 +++-
 .../server/resourcemanager/rmapp/TestRMAppTransitions.java | 2 --
 3 files changed, 7 insertions(+), 5 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/8d798de3/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/client/api/impl/TimelineClientImpl.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/client/api/impl/TimelineClientImpl.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/client/api/impl/TimelineClientImpl.java
index 9979a59..b15278c 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/client/api/impl/TimelineClientImpl.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/client/api/impl/TimelineClientImpl.java
@@ -335,7 +335,8 @@ public class TimelineClientImpl extends TimelineClient {
   @Override
   protected void serviceStart() throws Exception {
 timelineWriter = createTimelineWriter(
-configuration, authUgi, client, resURI);
+configuration, authUgi, client, constructResURI(getConfig(),
+getTimelineServiceAddress(), false));
   }
 
   protected TimelineWriter createTimelineWriter(Configuration conf,
@@ -613,7 +614,8 @@ public class TimelineClientImpl extends TimelineClient {
 
   @Override
   public String toString() {
-return super.toString() + " with timeline server " + resURI
+return super.toString() + " with timeline server "
++ constructResURI(getConfig(), getTimelineServiceAddress(), false)
 + " and writer " + timelineWriter;
   }
 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/8d798de3/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/rmapp/RMAppImpl.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/rmapp/RMAppImpl.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/rmapp/RMAppImpl.java
index ca2771a..a7df5a4 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/rmapp/RMAppImpl.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/rmapp/RMAppImpl.java
@@ -897,6 +897,8 @@ public class RMAppImpl implements RMApp, Recoverable {
 //TODO recover collector address.
 //this.collectorAddr = appState.getCollectorAddr();
 
+// send the ATS create Event
+sendATSCreateEvent(this, this.startTime);
 RMAppAttemptImpl preAttempt = null;
 for (ApplicationAttemptId attemptId :
 new TreeSet<>(appState.attempts.keySet())) {
@@ -1863,7 +1865,7 @@ public class RMAppImpl implements RMApp, Recoverable {
 }
 return amNodeLabelExpression;
   }
-  
+
   @Override
   public CallerContext getCallerContext() {
 return callerContext;


[30/50] [abbrv] hadoop git commit: MAPREDUCE-6546. reconcile the two versions of the timeline service performance tests. (Sangjin Lee via Naganarasimha G R)

2016-05-04 Thread gtcarrera9
http://git-wip-us.apache.org/repos/asf/hadoop/blob/cd444089/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/test/MapredTestDriver.java
--
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/test/MapredTestDriver.java
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/test/MapredTestDriver.java
index 92e0b14..4c42bd3 100644
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/test/MapredTestDriver.java
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/test/MapredTestDriver.java
@@ -18,6 +18,19 @@
 
 package org.apache.hadoop.test;
 
+import org.apache.hadoop.fs.DFSCIOTest;
+import org.apache.hadoop.fs.DistributedFSCheck;
+import org.apache.hadoop.fs.JHLogAnalyzer;
+import org.apache.hadoop.fs.TestDFSIO;
+import org.apache.hadoop.fs.TestFileSystem;
+import org.apache.hadoop.fs.loadGenerator.DataGenerator;
+import org.apache.hadoop.fs.loadGenerator.LoadGenerator;
+import org.apache.hadoop.fs.loadGenerator.LoadGeneratorMR;
+import org.apache.hadoop.fs.loadGenerator.StructureGenerator;
+import org.apache.hadoop.fs.slive.SliveTest;
+import org.apache.hadoop.hdfs.NNBench;
+import org.apache.hadoop.hdfs.NNBenchWithoutMR;
+import org.apache.hadoop.io.FileBench;
 import org.apache.hadoop.io.TestSequenceFile;
 import org.apache.hadoop.mapred.BigMapOutput;
 import org.apache.hadoop.mapred.GenericMRLoadGenerator;
@@ -28,28 +41,13 @@ import org.apache.hadoop.mapred.TestMapRed;
 import org.apache.hadoop.mapred.TestSequenceFileInputFormat;
 import org.apache.hadoop.mapred.TestTextInputFormat;
 import org.apache.hadoop.mapred.ThreadedMapBenchmark;
-import org.apache.hadoop.mapreduce.TimelineServicePerformance;
-import org.apache.hadoop.mapred.TimelineServicePerformanceV2;
 import org.apache.hadoop.mapreduce.FailJob;
 import org.apache.hadoop.mapreduce.LargeSorter;
 import org.apache.hadoop.mapreduce.MiniHadoopClusterManager;
 import org.apache.hadoop.mapreduce.SleepJob;
+import org.apache.hadoop.mapreduce.TimelineServicePerformance;
 import org.apache.hadoop.util.ProgramDriver;
 
-import org.apache.hadoop.hdfs.NNBench;
-import org.apache.hadoop.hdfs.NNBenchWithoutMR;
-import org.apache.hadoop.fs.TestFileSystem;
-import org.apache.hadoop.fs.TestDFSIO;
-import org.apache.hadoop.fs.DFSCIOTest;
-import org.apache.hadoop.fs.DistributedFSCheck;
-import org.apache.hadoop.io.FileBench;
-import org.apache.hadoop.fs.JHLogAnalyzer;
-import org.apache.hadoop.fs.loadGenerator.DataGenerator;
-import org.apache.hadoop.fs.loadGenerator.LoadGenerator;
-import org.apache.hadoop.fs.loadGenerator.LoadGeneratorMR;
-import org.apache.hadoop.fs.loadGenerator.StructureGenerator;
-import org.apache.hadoop.fs.slive.SliveTest;
-
 /**
  * Driver for Map-reduce tests.
  *
@@ -93,9 +91,8 @@ public class MapredTestDriver {
   pgd.addClass("sleep", SleepJob.class,
"A job that sleeps at each map and reduce task.");
   pgd.addClass("timelineperformance", TimelineServicePerformance.class,
-   "A job that launches mappers to test timlineserver 
performance.");
-  pgd.addClass("timelineperformance", TimelineServicePerformanceV2.class,
-  "A job that launch mappers to test timline service v.2 
performance.");
+   "A job that launches mappers to test timline service " +
+   "performance.");
   pgd.addClass("nnbench", NNBench.class, 
   "A benchmark that stresses the namenode w/ MR.");
   pgd.addClass("nnbenchWithoutMR", NNBenchWithoutMR.class,


-
To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org
For additional commands, e-mail: common-commits-h...@hadoop.apache.org



[28/50] [abbrv] hadoop git commit: YARN-3367. Replace starting a separate thread for post entity with event loop in TimelineClient (Naganarasimha G R via sjlee)

2016-05-04 Thread gtcarrera9
YARN-3367. Replace starting a separate thread for post entity with event loop 
in TimelineClient (Naganarasimha G R via sjlee)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/f59c7433
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/f59c7433
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/f59c7433

Branch: refs/heads/YARN-2928
Commit: f59c74338313305752678ea140fa3195d9fee07e
Parents: ee803e1
Author: Sangjin Lee 
Authored: Tue Feb 9 09:07:37 2016 -0800
Committer: Li Lu 
Committed: Wed May 4 16:33:32 2016 -0700

--
 .../jobhistory/JobHistoryEventHandler.java  |  81 +
 .../mapred/JobHistoryFileReplayMapper.java  |   8 +-
 .../hadoop/mapred/TimelineEntityConverter.java  |  12 +-
 .../timelineservice/TimelineEntities.java   |  17 +-
 .../hadoop/yarn/conf/YarnConfiguration.java |   6 +
 .../distributedshell/ApplicationMaster.java |  78 +
 .../api/async/impl/AMRMClientAsyncImpl.java |  26 +-
 .../hadoop/yarn/client/api/TimelineClient.java  |   8 +-
 .../client/api/impl/TimelineClientImpl.java | 285 ++---
 .../src/main/resources/yarn-default.xml |   7 +
 .../api/impl/TestTimelineClientV2Impl.java  | 304 +++
 .../nodemanager/NodeStatusUpdaterImpl.java  |   4 +-
 12 files changed, 619 insertions(+), 217 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/f59c7433/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/jobhistory/JobHistoryEventHandler.java
--
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/jobhistory/JobHistoryEventHandler.java
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/jobhistory/JobHistoryEventHandler.java
index 853c506..639f56e 100644
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/jobhistory/JobHistoryEventHandler.java
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/jobhistory/JobHistoryEventHandler.java
@@ -28,10 +28,7 @@ import java.util.Map;
 import java.util.Timer;
 import java.util.TimerTask;
 import java.util.concurrent.BlockingQueue;
-import java.util.concurrent.ExecutorService;
-import java.util.concurrent.Executors;
 import java.util.concurrent.LinkedBlockingQueue;
-import java.util.concurrent.TimeUnit;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
@@ -85,7 +82,6 @@ import org.codehaus.jackson.node.ObjectNode;
 import com.google.common.annotations.VisibleForTesting;
 import com.sun.jersey.api.client.ClientHandlerException;
 
-import com.google.common.util.concurrent.ThreadFactoryBuilder;
 /**
  * The job history events get routed to this class. This class writes the Job
  * history events to the DFS directly into a staging dir and then moved to a
@@ -141,10 +137,6 @@ public class JobHistoryEventHandler extends AbstractService
   
   private boolean timelineServiceV2Enabled = false;
 
-  // For posting entities in new timeline service in a non-blocking way
-  // TODO YARN-3367 replace with event loop in TimelineClient.
-  private ExecutorService threadPool;
-
   private static String MAPREDUCE_JOB_ENTITY_TYPE = "MAPREDUCE_JOB";
   private static String MAPREDUCE_TASK_ENTITY_TYPE = "MAPREDUCE_TASK";
   private static final String MAPREDUCE_TASK_ATTEMPT_ENTITY_TYPE =
@@ -284,10 +276,6 @@ public class JobHistoryEventHandler extends AbstractService
 YarnConfiguration.timelineServiceV2Enabled(conf);
 LOG.info("Timeline service is enabled; version: " +
 YarnConfiguration.getTimelineServiceVersion(conf));
-if (timelineServiceV2Enabled) {
-  // initialize the thread pool for v.2 timeline service
-  threadPool = createThreadPool();
-}
   } else {
 LOG.info("Timeline service is not enabled");
   }
@@ -461,35 +449,9 @@ public class JobHistoryEventHandler extends AbstractService
 if (timelineClient != null) {
   timelineClient.stop();
 }
-if (threadPool != null) {
-  shutdownAndAwaitTermination();
-}
 LOG.info("Stopped JobHistoryEventHandler. super.stop()");
 super.serviceStop();
   }
-  
-  // TODO remove threadPool after adding non-blocking call in TimelineClient
-  private ExecutorService createThreadPool() {
-return Executors.newCachedThreadPool(
-  new ThreadFactoryBuilder().setNameFormat("TimelineService #%d")
-  .build());
-  

[18/50] [abbrv] hadoop git commit: YARN-4224. Support fetching entities by UID and change the REST interface to conform to current REST APIs' in YARN. (Varun Saxena via gtcarrera9)

2016-05-04 Thread gtcarrera9
http://git-wip-us.apache.org/repos/asf/hadoop/blob/97d5cf32/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/reader/TimelineReaderWebServices.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/reader/TimelineReaderWebServices.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/reader/TimelineReaderWebServices.java
index a054ee5..7a70de8 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/reader/TimelineReaderWebServices.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/reader/TimelineReaderWebServices.java
@@ -18,17 +18,12 @@
 
 package org.apache.hadoop.yarn.server.timelineservice.reader;
 
-import java.io.IOException;
 import java.text.DateFormat;
 import java.text.ParseException;
 import java.text.SimpleDateFormat;
 import java.util.Collections;
 import java.util.Date;
-import java.util.EnumSet;
-import java.util.HashMap;
-import java.util.HashSet;
 import java.util.Locale;
-import java.util.Map;
 import java.util.Set;
 import java.util.TimeZone;
 
@@ -51,10 +46,11 @@ import 
org.apache.hadoop.classification.InterfaceAudience.Private;
 import org.apache.hadoop.classification.InterfaceStability.Unstable;
 import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hadoop.util.Time;
+import org.apache.hadoop.yarn.api.records.timelineservice.FlowActivityEntity;
+import org.apache.hadoop.yarn.api.records.timelineservice.FlowRunEntity;
 import org.apache.hadoop.yarn.api.records.timeline.TimelineAbout;
 import org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity;
 import org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntityType;
-import org.apache.hadoop.yarn.server.timeline.GenericObjectMapper;
 import 
org.apache.hadoop.yarn.server.timelineservice.storage.TimelineReader.Field;
 import org.apache.hadoop.yarn.util.timeline.TimelineUtils;
 import org.apache.hadoop.yarn.webapp.BadRequestException;
@@ -63,7 +59,7 @@ import org.apache.hadoop.yarn.webapp.NotFoundException;
 import com.google.common.annotations.VisibleForTesting;
 import com.google.inject.Singleton;
 
-/** REST end point for Timeline Reader */
+/** REST end point for Timeline Reader. */
 @Private
 @Unstable
 @Singleton
@@ -167,117 +163,6 @@ public class TimelineReaderWebServices {
 }
   }
 
-  private static Set parseValuesStr(String str, String delimiter) {
-if (str == null || str.isEmpty()) {
-  return null;
-}
-Set strSet = new HashSet();
-String[] strs = str.split(delimiter);
-for (String aStr : strs) {
-  strSet.add(aStr.trim());
-}
-return strSet;
-  }
-
-  @SuppressWarnings("unchecked")
-  private static  void parseKeyValues(Map map, String str,
-  String pairsDelim, String keyValuesDelim, boolean stringValue,
-  boolean multipleValues) {
-String[] pairs = str.split(pairsDelim);
-for (String pair : pairs) {
-  if (pair == null || pair.trim().isEmpty()) {
-continue;
-  }
-  String[] pairStrs = pair.split(keyValuesDelim);
-  if (pairStrs.length < 2) {
-continue;
-  }
-  if (!stringValue) {
-try {
-  Object value =
-  GenericObjectMapper.OBJECT_READER.readValue(pairStrs[1].trim());
-  map.put(pairStrs[0].trim(), (T) value);
-} catch (IOException e) {
-  map.put(pairStrs[0].trim(), (T) pairStrs[1].trim());
-}
-  } else {
-String key = pairStrs[0].trim();
-if (multipleValues) {
-  Set values = new HashSet();
-  for (int i = 1; i < pairStrs.length; i++) {
-values.add(pairStrs[i].trim());
-  }
-  map.put(key, (T) values);
-} else {
-  map.put(key, (T) pairStrs[1].trim());
-}
-  }
-}
-  }
-
-  private static Map parseKeyStrValuesStr(String str,
-  String pairsDelim, String keyValuesDelim) {
-if (str == null) {
-  return null;
-}
-Map map = new HashMap();
-parseKeyValues(map, str,pairsDelim, keyValuesDelim, true, true);
-return map;
-  }
-
-  private static Map parseKeyStrValueStr(String str,
-  String pairsDelim, String keyValDelim) {
-if (str == null) {
-  return null;
-}
-Map map = new HashMap();
-parseKeyValues(map, str, pairsDelim, keyValDelim, true, false);
-return map;
-  }
-
-  private static Map 

[10/50] [abbrv] hadoop git commit: YARN-3995. Some of the NM events are not getting published due race condition when AM container finishes in NM (Naganarasimha G R via sjlee)

2016-05-04 Thread gtcarrera9
YARN-3995. Some of the NM events are not getting published due race condition 
when AM container finishes in NM (Naganarasimha G R via sjlee)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/ed5f7db9
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/ed5f7db9
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/ed5f7db9

Branch: refs/heads/YARN-2928
Commit: ed5f7db9cfe66f9eb5a70d1b1551c239436c0f23
Parents: 4a6388f
Author: Sangjin Lee 
Authored: Mon Jan 11 10:09:34 2016 -0800
Committer: Li Lu 
Committed: Wed May 4 16:22:08 2016 -0700

--
 .../hadoop/yarn/conf/YarnConfiguration.java |  5 
 .../src/main/resources/yarn-default.xml |  7 ++
 .../PerNodeTimelineCollectorsAuxService.java| 25 +---
 ...TestPerNodeTimelineCollectorsAuxService.java | 11 +
 4 files changed, 35 insertions(+), 13 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/ed5f7db9/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/conf/YarnConfiguration.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/conf/YarnConfiguration.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/conf/YarnConfiguration.java
index ed81eaa..8ee417b 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/conf/YarnConfiguration.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/conf/YarnConfiguration.java
@@ -1899,6 +1899,11 @@ public class YarnConfiguration extends Configuration {
   public static final int
   DEFAULT_TIMELINE_SERVICE_WRITER_FLUSH_INTERVAL_SECONDS = 60;
 
+  public static final String ATS_APP_COLLECTOR_LINGER_PERIOD_IN_MS =
+  TIMELINE_SERVICE_PREFIX + "app-collector.linger-period.ms";
+
+  public static final int DEFAULT_ATS_APP_COLLECTOR_LINGER_PERIOD_IN_MS = 1000;
+
   // mark app-history related configs @Private as application history is going
   // to be integrated into the timeline service
   @Private

http://git-wip-us.apache.org/repos/asf/hadoop/blob/ed5f7db9/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/resources/yarn-default.xml
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/resources/yarn-default.xml
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/resources/yarn-default.xml
index e4b562e..1867e98 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/resources/yarn-default.xml
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/resources/yarn-default.xml
@@ -2182,6 +2182,13 @@
 60
   
 
+  
+Time period till which the application collector will be alive
+ in NM, after the  application master container finishes.
+yarn.timeline-service.app-collector.linger-period.ms
+1000
+  
+
   
 
   

http://git-wip-us.apache.org/repos/asf/hadoop/blob/ed5f7db9/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/collector/PerNodeTimelineCollectorsAuxService.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/collector/PerNodeTimelineCollectorsAuxService.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/collector/PerNodeTimelineCollectorsAuxService.java
index 0319e34..b738530 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/collector/PerNodeTimelineCollectorsAuxService.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/collector/PerNodeTimelineCollectorsAuxService.java
@@ -19,6 +19,9 @@
 package org.apache.hadoop.yarn.server.timelineservice.collector;
 
 import java.nio.ByteBuffer;
+import java.util.concurrent.Executors;
+import java.util.concurrent.ScheduledExecutorService;
+import java.util.concurrent.TimeUnit;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
@@ -54,6 +57,8 @@ public class PerNodeTimelineCollectorsAuxService extends 
AuxiliaryService {
   private static final int SHUTDOWN_HOOK_PRIORITY = 30;
 
   private final 

[24/50] [abbrv] hadoop git commit: YARN-4409. Fix javadoc and checkstyle issues in timelineservice code (Varun Saxena via sjlee)

2016-05-04 Thread gtcarrera9
http://git-wip-us.apache.org/repos/asf/hadoop/blob/ee803e16/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/FlowActivityColumnFamily.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/FlowActivityColumnFamily.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/FlowActivityColumnFamily.java
index d991b42..f9eb5b4 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/FlowActivityColumnFamily.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/FlowActivityColumnFamily.java
@@ -24,7 +24,8 @@ import 
org.apache.hadoop.yarn.server.timelineservice.storage.common.Separator;
 /**
  * Represents the flow run table column families.
  */
-public enum FlowActivityColumnFamily implements 
ColumnFamily {
+public enum FlowActivityColumnFamily
+implements ColumnFamily {
 
   /**
* Info column family houses known columns, specifically ones included in

http://git-wip-us.apache.org/repos/asf/hadoop/blob/ee803e16/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/FlowActivityColumnPrefix.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/FlowActivityColumnPrefix.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/FlowActivityColumnPrefix.java
index 21ddcc2..a5933da 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/FlowActivityColumnPrefix.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/FlowActivityColumnPrefix.java
@@ -31,12 +31,13 @@ import 
org.apache.hadoop.yarn.server.timelineservice.storage.common.TimelineStor
 import 
org.apache.hadoop.yarn.server.timelineservice.storage.common.TypedBufferedMutator;
 
 /**
- * Identifies partially qualified columns for the {@link FlowActivityTable}
+ * Identifies partially qualified columns for the {@link FlowActivityTable}.
  */
-public enum FlowActivityColumnPrefix implements 
ColumnPrefix {
+public enum FlowActivityColumnPrefix
+implements ColumnPrefix {
 
   /**
-   * To store run ids of the flows
+   * To store run ids of the flows.
*/
   RUN_ID(FlowActivityColumnFamily.INFO, "r", null);
 
@@ -162,8 +163,8 @@ public enum FlowActivityColumnPrefix implements 
ColumnPrefix
* org.apache.hadoop.yarn.server.timelineservice.storage.common.ColumnPrefix
* #readResultsWithTimestamps(org.apache.hadoop.hbase.client.Result)
*/
-  public  NavigableMap> 
readResultsWithTimestamps(
-  Result result) throws IOException {
+  public  NavigableMap>
+  readResultsWithTimestamps(Result result) throws IOException {
 return column.readResultsWithTimestamps(result, columnPrefixBytes);
   }
 
@@ -179,8 +180,8 @@ public enum FlowActivityColumnPrefix implements 
ColumnPrefix
   public static final FlowActivityColumnPrefix columnFor(String columnPrefix) {
 
 // Match column based on value, assume column family matches.
-for (FlowActivityColumnPrefix flowActivityColPrefix : 
FlowActivityColumnPrefix
-.values()) {
+for (FlowActivityColumnPrefix flowActivityColPrefix :
+FlowActivityColumnPrefix.values()) {
   // Find a match based only on name.
   if (flowActivityColPrefix.getColumnPrefix().equals(columnPrefix)) {
 return flowActivityColPrefix;
@@ -209,8 +210,8 @@ public enum FlowActivityColumnPrefix implements 
ColumnPrefix
 // TODO: needs unit test to confirm and need to update javadoc to explain
 // null prefix case.
 
-for (FlowActivityColumnPrefix flowActivityColumnPrefix : 
FlowActivityColumnPrefix
-.values()) {
+for (FlowActivityColumnPrefix flowActivityColumnPrefix :
+FlowActivityColumnPrefix.values()) {
   // Find a match based column family and on name.
   if 

[13/50] [abbrv] hadoop git commit: Rebase to latest trunk

2016-05-04 Thread gtcarrera9
Rebase to latest trunk


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/2d1fc87e
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/2d1fc87e
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/2d1fc87e

Branch: refs/heads/YARN-2928
Commit: 2d1fc87e1e6894eefb48f8456fdc58ce32f60a45
Parents: 6173732
Author: Li Lu 
Authored: Wed Jan 20 01:05:52 2016 -0800
Committer: Li Lu 
Committed: Wed May 4 16:24:08 2016 -0700

--
 hadoop-tools/hadoop-aws/pom.xml   |  8 
 .../yarn/client/api/impl/TimelineClientImpl.java  |  4 ++--
 .../metrics/TimelineServiceV1Publisher.java   | 17 +++--
 .../metrics/TimelineServiceV2Publisher.java   | 18 +++---
 .../TestResourceTrackerService.java   | 12 
 .../hadoop-yarn-server-timelineservice/pom.xml|  6 --
 6 files changed, 48 insertions(+), 17 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/2d1fc87e/hadoop-tools/hadoop-aws/pom.xml
--
diff --git a/hadoop-tools/hadoop-aws/pom.xml b/hadoop-tools/hadoop-aws/pom.xml
index f422846..db98f94 100644
--- a/hadoop-tools/hadoop-aws/pom.xml
+++ b/hadoop-tools/hadoop-aws/pom.xml
@@ -155,6 +155,14 @@
   hadoop-yarn-server-tests
   test
   test-jar
+  
+
+
+  joda-time
+  joda-time
+
+  
 
 
   org.apache.hadoop

http://git-wip-us.apache.org/repos/asf/hadoop/blob/2d1fc87e/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/client/api/impl/TimelineClientImpl.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/client/api/impl/TimelineClientImpl.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/client/api/impl/TimelineClientImpl.java
index b15278c..f36167d 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/client/api/impl/TimelineClientImpl.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/client/api/impl/TimelineClientImpl.java
@@ -335,8 +335,8 @@ public class TimelineClientImpl extends TimelineClient {
   @Override
   protected void serviceStart() throws Exception {
 timelineWriter = createTimelineWriter(
-configuration, authUgi, client, constructResURI(getConfig(),
-getTimelineServiceAddress(), false));
+configuration, authUgi, client,
+constructResURI(getConfig(), timelineServiceAddress, false));
   }
 
   protected TimelineWriter createTimelineWriter(Configuration conf,

http://git-wip-us.apache.org/repos/asf/hadoop/blob/2d1fc87e/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/metrics/TimelineServiceV1Publisher.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/metrics/TimelineServiceV1Publisher.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/metrics/TimelineServiceV1Publisher.java
index d858a6b..ddc8a16 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/metrics/TimelineServiceV1Publisher.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/metrics/TimelineServiceV1Publisher.java
@@ -87,6 +87,17 @@ public class TimelineServiceV1Publisher extends 
AbstractSystemMetricsPublisher {
 app.getAmNodeLabelExpression());
 entityInfo.put(ApplicationMetricsConstants.APP_NODE_LABEL_EXPRESSION,
 app.getAppNodeLabelExpression());
+if (app.getCallerContext() != null) {
+  if (app.getCallerContext().getContext() != null) {
+entityInfo.put(ApplicationMetricsConstants.YARN_APP_CALLER_CONTEXT,
+app.getCallerContext().getContext());
+  }
+  if (app.getCallerContext().getSignature() != null) {
+entityInfo.put(ApplicationMetricsConstants.YARN_APP_CALLER_SIGNATURE,
+app.getCallerContext().getSignature());
+  }
+}
+
 entity.setOtherInfo(entityInfo);
 TimelineEvent tEvent = new TimelineEvent();
 

[27/50] [abbrv] hadoop git commit: YARN-4409. Fix javadoc and checkstyle issues in timelineservice code (Varun Saxena via sjlee)

2016-05-04 Thread gtcarrera9
YARN-4409. Fix javadoc and checkstyle issues in timelineservice code (Varun 
Saxena via sjlee)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/ee803e16
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/ee803e16
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/ee803e16

Branch: refs/heads/YARN-2928
Commit: ee803e16e13bd01520e111dd448123c76d778225
Parents: 675612d
Author: Sangjin Lee 
Authored: Mon Feb 8 12:17:43 2016 -0800
Committer: Li Lu 
Committed: Wed May 4 16:28:05 2016 -0700

--
 .../jobhistory/JobHistoryEventHandler.java  | 170 ++-
 .../hadoop/mapreduce/jobhistory/TestEvents.java |   2 +-
 .../mapreduce/util/JobHistoryEventUtils.java|   7 +-
 .../hadoop/mapred/TimelineEntityConverter.java  |   6 +-
 .../hadoop/mapreduce/JobHistoryFileParser.java  |   3 +
 .../ApplicationAttemptEntity.java   |   9 +-
 .../timelineservice/ApplicationEntity.java  |   9 +-
 .../records/timelineservice/ClusterEntity.java  |   6 +-
 .../timelineservice/ContainerEntity.java|   9 +-
 .../records/timelineservice/FlowRunEntity.java  |   9 +-
 .../HierarchicalTimelineEntity.java |   8 +-
 .../records/timelineservice/QueueEntity.java|   6 +-
 .../timelineservice/TimelineEntities.java   |  11 +-
 .../records/timelineservice/TimelineEntity.java | 106 ++--
 .../timelineservice/TimelineEntityType.java |  71 +---
 .../records/timelineservice/TimelineEvent.java  |  30 ++--
 .../records/timelineservice/TimelineMetric.java |  39 +++--
 .../timelineservice/TimelineWriteResponse.java  |  59 +++
 .../api/records/timelineservice/UserEntity.java |   6 +-
 .../records/timelineservice/package-info.java   |   8 +-
 .../hadoop/yarn/conf/YarnConfiguration.java |   9 +-
 .../hadoop/yarn/client/api/TimelineClient.java  |  13 +-
 .../client/api/impl/TimelineClientImpl.java |  16 +-
 .../yarn/util/timeline/TimelineUtils.java   |  21 ++-
 .../yarn/server/nodemanager/NodeManager.java|   2 +-
 .../collectormanager/NMCollectorService.java|   9 +-
 .../collectormanager/package-info.java  |  28 +++
 .../timelineservice/NMTimelineEvent.java|   4 +
 .../timelineservice/NMTimelineEventType.java|   3 +
 .../timelineservice/NMTimelinePublisher.java|  14 +-
 .../timelineservice/package-info.java   |  29 
 .../resourcemanager/RMActiveServiceContext.java |   8 +-
 .../server/resourcemanager/RMContextImpl.java   |   4 +-
 .../metrics/AbstractSystemMetricsPublisher.java |  20 ++-
 .../metrics/NoOpSystemMetricPublisher.java  |   2 +-
 .../metrics/SystemMetricsPublisher.java |   3 +
 .../metrics/TimelineServiceV1Publisher.java |   8 +-
 .../metrics/TimelineServiceV2Publisher.java |   7 +-
 .../resourcemanager/metrics/package-info.java   |  28 +++
 .../rmapp/RMAppCollectorUpdateEvent.java|   3 +
 .../server/resourcemanager/rmapp/RMAppImpl.java |   5 +-
 .../RMTimelineCollectorManager.java |  33 ++--
 .../timelineservice/package-info.java   |  28 +++
 .../collector/AppLevelTimelineCollector.java|   3 +-
 .../collector/NodeTimelineCollectorManager.java |   8 +-
 .../PerNodeTimelineCollectorsAuxService.java|   2 +
 .../collector/TimelineCollector.java|   2 +
 .../collector/TimelineCollectorManager.java |  12 +-
 .../collector/TimelineCollectorWebService.java  |  70 +---
 .../timelineservice/collector/package-info.java |  29 
 .../reader/TimelineReaderManager.java   |  32 +++-
 .../reader/TimelineReaderServer.java|   2 +-
 .../reader/TimelineReaderWebServices.java   |  28 +--
 .../reader/TimelineReaderWebServicesUtils.java  |  50 +++---
 .../reader/TimelineUIDConverter.java|  10 +-
 .../reader/filter/TimelineFilterUtils.java  |   8 +-
 .../timelineservice/reader/package-info.java|   6 +
 .../storage/FileSystemTimelineReaderImpl.java   |  70 
 .../storage/FileSystemTimelineWriterImpl.java   |   7 +-
 .../storage/HBaseTimelineReaderImpl.java|   3 +
 .../storage/HBaseTimelineWriterImpl.java|  29 ++--
 .../storage/OfflineAggregationWriter.java   |  13 +-
 .../PhoenixOfflineAggregationWriterImpl.java|  27 +--
 .../storage/TimelineAggregationTrack.java   |   2 +-
 .../timelineservice/storage/TimelineReader.java |   6 +-
 .../storage/TimelineSchemaCreator.java  |   4 +-
 .../timelineservice/storage/TimelineWriter.java |  15 +-
 .../storage/application/ApplicationColumn.java  |   4 +-
 .../application/ApplicationColumnPrefix.java|   8 +-
 .../storage/application/ApplicationRowKey.java  |  33 ++--
 .../storage/application/ApplicationTable.java   |  16 +-
 .../storage/application/package-info.java   |   4 +
 .../storage/apptoflow/AppToFlowColumn.java  |   6 +-
 

[12/50] [abbrv] hadoop git commit: YARN-4200. Refactor reader classes in storage to nest under hbase specific package name. Contributed by Li Lu.

2016-05-04 Thread gtcarrera9
YARN-4200. Refactor reader classes in storage to nest under hbase
specific package name. Contributed by Li Lu.


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/61737325
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/61737325
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/61737325

Branch: refs/heads/YARN-2928
Commit: 61737325ccaaea48e20d2f288c0c9d1b43a8bccb
Parents: ed5f7db
Author: Li Lu 
Authored: Mon Jan 11 18:05:36 2016 -0800
Committer: Li Lu 
Committed: Wed May 4 16:22:12 2016 -0700

--
 .../storage/ApplicationEntityReader.java| 382 --
 .../storage/FlowActivityEntityReader.java   | 163 --
 .../storage/FlowRunEntityReader.java| 225 -
 .../storage/GenericEntityReader.java| 496 --
 .../storage/HBaseTimelineReaderImpl.java|   2 +
 .../storage/TimelineEntityReader.java   | 274 --
 .../storage/TimelineEntityReaderFactory.java| 100 
 .../storage/reader/ApplicationEntityReader.java | 383 ++
 .../reader/FlowActivityEntityReader.java| 164 ++
 .../storage/reader/FlowRunEntityReader.java | 226 +
 .../storage/reader/GenericEntityReader.java | 497 +++
 .../storage/reader/TimelineEntityReader.java| 274 ++
 .../reader/TimelineEntityReaderFactory.java | 100 
 .../storage/reader/package-info.java|  23 +
 14 files changed, 1669 insertions(+), 1640 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/61737325/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/ApplicationEntityReader.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/ApplicationEntityReader.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/ApplicationEntityReader.java
deleted file mode 100644
index d812a6c..000
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/ApplicationEntityReader.java
+++ /dev/null
@@ -1,382 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.yarn.server.timelineservice.storage;
-
-import java.io.IOException;
-import java.util.EnumSet;
-import java.util.Map;
-import java.util.Set;
-
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hbase.client.Connection;
-import org.apache.hadoop.hbase.client.Get;
-import org.apache.hadoop.hbase.client.Result;
-import org.apache.hadoop.hbase.client.ResultScanner;
-import org.apache.hadoop.hbase.client.Scan;
-import org.apache.hadoop.hbase.filter.BinaryComparator;
-import org.apache.hadoop.hbase.filter.BinaryPrefixComparator;
-import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp;
-import org.apache.hadoop.hbase.filter.FamilyFilter;
-import org.apache.hadoop.hbase.filter.FilterList;
-import org.apache.hadoop.hbase.filter.FilterList.Operator;
-import org.apache.hadoop.hbase.filter.PageFilter;
-import org.apache.hadoop.hbase.filter.QualifierFilter;
-import org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity;
-import org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntityType;
-import 
org.apache.hadoop.yarn.server.timelineservice.reader.filter.TimelineFilterList;
-import 
org.apache.hadoop.yarn.server.timelineservice.reader.filter.TimelineFilterUtils;
-import 
org.apache.hadoop.yarn.server.timelineservice.storage.TimelineReader.Field;
-import 
org.apache.hadoop.yarn.server.timelineservice.storage.application.ApplicationColumn;
-import 

[14/50] [abbrv] hadoop git commit: YARN-4622. TestDistributedShell fails for v2 test cases after modifications for 1.5. (Naganarasimha G R via Varun Saxena)

2016-05-04 Thread gtcarrera9
YARN-4622. TestDistributedShell fails for v2 test cases after modifications for 
1.5. (Naganarasimha G R via Varun Saxena)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/2269e78a
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/2269e78a
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/2269e78a

Branch: refs/heads/YARN-2928
Commit: 2269e78adbd3969e84b83a541d2cd84498c46f47
Parents: 2d1fc87
Author: Varun Saxena 
Authored: Fri Jan 22 01:41:25 2016 +0530
Committer: Li Lu 
Committed: Wed May 4 16:24:10 2016 -0700

--
 .../hadoop/yarn/client/api/impl/TimelineClientImpl.java  | 8 +---
 1 file changed, 5 insertions(+), 3 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/2269e78a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/client/api/impl/TimelineClientImpl.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/client/api/impl/TimelineClientImpl.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/client/api/impl/TimelineClientImpl.java
index f36167d..4db3b71 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/client/api/impl/TimelineClientImpl.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/client/api/impl/TimelineClientImpl.java
@@ -70,6 +70,7 @@ import org.apache.hadoop.yarn.exceptions.YarnException;
 import 
org.apache.hadoop.yarn.security.client.TimelineDelegationTokenIdentifier;
 import org.apache.hadoop.yarn.webapp.YarnJacksonJaxbJsonProvider;
 import org.codehaus.jackson.map.ObjectMapper;
+
 import com.google.common.annotations.VisibleForTesting;
 import com.google.common.base.Joiner;
 import com.google.common.base.Preconditions;
@@ -334,9 +335,10 @@ public class TimelineClientImpl extends TimelineClient {
 
   @Override
   protected void serviceStart() throws Exception {
-timelineWriter = createTimelineWriter(
-configuration, authUgi, client,
-constructResURI(getConfig(), timelineServiceAddress, false));
+if (!timelineServiceV2) {
+  timelineWriter = createTimelineWriter(configuration, authUgi, client,
+  constructResURI(getConfig(), timelineServiceAddress, false));
+}
   }
 
   protected TimelineWriter createTimelineWriter(Configuration conf,


-
To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org
For additional commands, e-mail: common-commits-h...@hadoop.apache.org



[34/50] [abbrv] hadoop git commit: YARN-4712. CPU Usage Metric is not captured properly in YARN-2928. (Naganarasimha G R via Varun Saxena)

2016-05-04 Thread gtcarrera9
YARN-4712. CPU Usage Metric is not captured properly in YARN-2928. 
(Naganarasimha G R via Varun Saxena)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/b1e7f16c
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/b1e7f16c
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/b1e7f16c

Branch: refs/heads/YARN-2928
Commit: b1e7f16cdcd3f6b3f650f44dd7187ee03cfe440d
Parents: 3157d32
Author: Varun Saxena 
Authored: Fri Mar 18 23:19:18 2016 +0530
Committer: Li Lu 
Committed: Wed May 4 16:35:18 2016 -0700

--
 .../monitor/ContainersMonitorImpl.java  |   9 +-
 .../timelineservice/NMTimelinePublisher.java|  21 ++-
 .../TestNMTimelinePublisher.java| 157 +++
 3 files changed, 171 insertions(+), 16 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/b1e7f16c/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/monitor/ContainersMonitorImpl.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/monitor/ContainersMonitorImpl.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/monitor/ContainersMonitorImpl.java
index da4aac2..e0bd35e 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/monitor/ContainersMonitorImpl.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/monitor/ContainersMonitorImpl.java
@@ -22,7 +22,6 @@ import java.util.Map;
 import java.util.Map.Entry;
 import java.util.concurrent.ConcurrentHashMap;
 
-import com.google.common.annotations.VisibleForTesting;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.classification.InterfaceAudience.Private;
@@ -32,11 +31,10 @@ import 
org.apache.hadoop.util.StringUtils.TraditionalBinaryPrefix;
 import org.apache.hadoop.yarn.api.records.ContainerExitStatus;
 import org.apache.hadoop.yarn.api.records.ContainerId;
 import org.apache.hadoop.yarn.api.records.Resource;
+import org.apache.hadoop.yarn.api.records.ResourceUtilization;
 import org.apache.hadoop.yarn.conf.YarnConfiguration;
 import org.apache.hadoop.yarn.event.AsyncDispatcher;
 import org.apache.hadoop.yarn.event.Dispatcher;
-import org.apache.hadoop.yarn.api.records.ResourceUtilization;
-import org.apache.hadoop.yarn.exceptions.YarnException;
 import org.apache.hadoop.yarn.server.nodemanager.ContainerExecutor;
 import org.apache.hadoop.yarn.server.nodemanager.Context;
 import 
org.apache.hadoop.yarn.server.nodemanager.containermanager.container.Container;
@@ -47,6 +45,7 @@ import 
org.apache.hadoop.yarn.server.nodemanager.util.NodeManagerHardwareUtils;
 import org.apache.hadoop.yarn.util.ResourceCalculatorPlugin;
 import org.apache.hadoop.yarn.util.ResourceCalculatorProcessTree;
 
+import com.google.common.annotations.VisibleForTesting;
 import com.google.common.base.Preconditions;
 
 public class ContainersMonitorImpl extends AbstractService implements
@@ -567,8 +566,8 @@ public class ContainersMonitorImpl extends AbstractService 
implements
 NMTimelinePublisher nmMetricsPublisher =
 container.getNMTimelinePublisher();
 if (nmMetricsPublisher != null) {
-  nmMetricsPublisher.reportContainerResourceUsage(container, pId,
-  currentPmemUsage, cpuUsageTotalCoresPercentage);
+  nmMetricsPublisher.reportContainerResourceUsage(container,
+  currentPmemUsage, cpuUsagePercentPerCore);
 }
   } catch (Exception e) {
 // Log the exception and proceed to the next container.

http://git-wip-us.apache.org/repos/asf/hadoop/blob/b1e7f16c/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/timelineservice/NMTimelinePublisher.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/timelineservice/NMTimelinePublisher.java
 

[03/50] [abbrv] hadoop git commit: YARN-4450. TestTimelineAuthenticationFilter and TestYarnConfigurationFields fail. Contributed by Sangjin Lee.

2016-05-04 Thread gtcarrera9
YARN-4450. TestTimelineAuthenticationFilter and
TestYarnConfigurationFields fail. Contributed by Sangjin Lee.


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/9ceeb2a7
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/9ceeb2a7
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/9ceeb2a7

Branch: refs/heads/YARN-2928
Commit: 9ceeb2a7af267fb6b5c767c746faf50485d006fd
Parents: ef71c1f
Author: Li Lu 
Authored: Mon Dec 14 10:48:39 2015 -0800
Committer: Li Lu 
Committed: Wed May 4 16:01:41 2016 -0700

--
 .../yarn/conf/TestYarnConfigurationFields.java|  3 +++
 .../yarn/client/api/impl/TimelineClientImpl.java  | 18 ++
 2 files changed, 13 insertions(+), 8 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/9ceeb2a7/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/test/java/org/apache/hadoop/yarn/conf/TestYarnConfigurationFields.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/test/java/org/apache/hadoop/yarn/conf/TestYarnConfigurationFields.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/test/java/org/apache/hadoop/yarn/conf/TestYarnConfigurationFields.java
index c92a276..c8e1868 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/test/java/org/apache/hadoop/yarn/conf/TestYarnConfigurationFields.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/test/java/org/apache/hadoop/yarn/conf/TestYarnConfigurationFields.java
@@ -94,6 +94,9 @@ public class TestYarnConfigurationFields extends 
TestConfigurationFieldsBase {
 
 // Ignore all YARN Application Timeline Service (version 1) properties
 configurationPrefixToSkipCompare.add("yarn.timeline-service.");
+// skip deprecated RM_SYSTEM_METRICS_PUBLISHER_ENABLED
+configurationPropsToSkipCompare
+.add(YarnConfiguration.RM_SYSTEM_METRICS_PUBLISHER_ENABLED);
 
 // Used as Java command line properties, not XML
 configurationPrefixToSkipCompare.add("yarn.app.container");

http://git-wip-us.apache.org/repos/asf/hadoop/blob/9ceeb2a7/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/client/api/impl/TimelineClientImpl.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/client/api/impl/TimelineClientImpl.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/client/api/impl/TimelineClientImpl.java
index f92e6a7..9979a59 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/client/api/impl/TimelineClientImpl.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/client/api/impl/TimelineClientImpl.java
@@ -111,7 +111,6 @@ public class TimelineClientImpl extends TimelineClient {
   private ConnectionConfigurator connConfigurator;
   private DelegationTokenAuthenticator authenticator;
   private DelegationTokenAuthenticatedURL.Token token;
-  private URI resURI;
   private UserGroupInformation authUgi;
   private String doAsUser;
   private Configuration configuration;
@@ -552,8 +551,8 @@ public class TimelineClientImpl extends TimelineClient {
   @Override
   public Long run() throws Exception {
 // If the timeline DT to renew is different than cached, replace 
it.
-// Token to set every time for retry, because when exception 
happens,
-// DelegationTokenAuthenticatedURL will reset it to null;
+// Token to set every time for retry, because when exception
+// happens, DelegationTokenAuthenticatedURL will reset it to null;
 if (!timelineDT.equals(token.getDelegationToken())) {
   token.setDelegationToken((Token) timelineDT);
 }
@@ -562,7 +561,8 @@ public class TimelineClientImpl extends TimelineClient {
 connConfigurator);
 // If the token service address is not available, fall back to use
 // the configured service address.
-final URI serviceURI = isTokenServiceAddrEmpty ? resURI
+final URI serviceURI = isTokenServiceAddrEmpty ?
+constructResURI(getConfig(), getTimelineServiceAddress(), 
false)
 : new URI(scheme, null, address.getHostName(),
 address.getPort(), RESOURCE_URI_STR_V1, null, null);
 return authUrl
@@ -588,9 +588,10 @@ public class TimelineClientImpl extends TimelineClient {
 
   @Override
   public Void run() throws Exception {
-// If the timeline DT to 

[16/50] [abbrv] hadoop git commit: YARN-4238. createdTime and modifiedTime is not reported while publishing entities to ATSv2. (Varun Saxena via Naganarasimha G R)

2016-05-04 Thread gtcarrera9
YARN-4238. createdTime and modifiedTime is not reported while publishing 
entities to ATSv2. (Varun Saxena via Naganarasimha G R)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/fd369a54
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/fd369a54
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/fd369a54

Branch: refs/heads/YARN-2928
Commit: fd369a54e3cc08b812746f7c339826b0c4c7977f
Parents: 2269e78
Author: Naganarasimha 
Authored: Wed Jan 27 11:59:40 2016 +0530
Committer: Li Lu 
Committed: Wed May 4 16:24:12 2016 -0700

--
 .../jobhistory/JobHistoryEventHandler.java  | 47 ++
 .../records/timelineservice/TimelineEntity.java | 19 
 .../TestTimelineServiceRecords.java |  1 -
 .../monitor/ContainersMonitorImpl.java  |  6 +-
 .../timelineservice/NMTimelinePublisher.java|  6 +-
 .../TestSystemMetricsPublisherForV2.java| 34 +--
 .../reader/TimelineReaderManager.java   |  7 +-
 .../reader/TimelineReaderWebServices.java   | 51 ---
 .../storage/FileSystemTimelineReaderImpl.java   | 21 +
 .../storage/HBaseTimelineReaderImpl.java|  8 +-
 .../storage/HBaseTimelineWriterImpl.java|  4 -
 .../PhoenixOfflineAggregationWriterImpl.java|  9 +-
 .../timelineservice/storage/TimelineReader.java | 32 +++
 .../storage/application/ApplicationColumn.java  |  5 --
 .../storage/application/ApplicationTable.java   | 11 +--
 .../storage/entity/EntityColumn.java|  5 --
 .../storage/entity/EntityTable.java | 11 +--
 .../storage/reader/ApplicationEntityReader.java | 23 +
 .../reader/FlowActivityEntityReader.java|  7 +-
 .../storage/reader/FlowRunEntityReader.java |  7 +-
 .../storage/reader/GenericEntityReader.java | 22 +
 .../storage/reader/TimelineEntityReader.java|  5 --
 .../reader/TimelineEntityReaderFactory.java | 27 +++---
 .../reader/TestTimelineReaderWebServices.java   | 48 +-
 .../TestFileSystemTimelineReaderImpl.java   | 92 +--
 .../TestFileSystemTimelineWriterImpl.java   |  3 +-
 .../storage/TestHBaseTimelineStorage.java   | 95 +++-
 ...TestPhoenixOfflineAggregationWriterImpl.java |  1 -
 .../storage/flow/TestFlowDataGenerator.java |  1 -
 .../flow/TestHBaseStorageFlowActivity.java  |  6 +-
 .../storage/flow/TestHBaseStorageFlowRun.java   |  9 +-
 31 files changed, 197 insertions(+), 426 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/fd369a54/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/jobhistory/JobHistoryEventHandler.java
--
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/jobhistory/JobHistoryEventHandler.java
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/jobhistory/JobHistoryEventHandler.java
index d88588c..a10872a 100644
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/jobhistory/JobHistoryEventHandler.java
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/jobhistory/JobHistoryEventHandler.java
@@ -1135,10 +1135,10 @@ public class JobHistoryEventHandler extends 
AbstractService
   // jobId, timestamp and entityType.
   private org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity 
   createJobEntity(HistoryEvent event, long timestamp, JobId jobId, 
-  String entityType) {
-
-org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity entity = 
-createBaseEntity(event, timestamp, entityType);
+  String entityType, boolean setCreatedTime) {
+
+org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity entity =
+createBaseEntity(event, timestamp, entityType, setCreatedTime);
 entity.setId(jobId.toString());
 return entity;
   }
@@ -1146,8 +1146,9 @@ public class JobHistoryEventHandler extends 
AbstractService
   // create BaseEntity from HistoryEvent with adding other info, like: 
   // timestamp and entityType.
   private org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity 
-  createBaseEntity(HistoryEvent event, long timestamp, String entityType) {
-org.apache.hadoop.yarn.api.records.timelineservice.TimelineEvent tEvent = 
+  createBaseEntity(HistoryEvent event, long timestamp, String entityType,
+  boolean setCreatedTime) {
+

[09/50] [abbrv] hadoop git commit: YARN-3586. RM to only get back addresses of Collectors that NM needs to know. (Junping Du via Varun Saxena).

2016-05-04 Thread gtcarrera9
YARN-3586. RM to only get back addresses of Collectors that NM needs to know.
(Junping Du via Varun Saxena).


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/4a6388ff
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/4a6388ff
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/4a6388ff

Branch: refs/heads/YARN-2928
Commit: 4a6388ff7637b3c149574068e8956a871a8cd3e0
Parents: 94b7fb4
Author: Varun Saxena 
Authored: Tue Dec 22 20:58:54 2015 +0530
Committer: Li Lu 
Committed: Wed May 4 16:22:06 2016 -0700

--
 .../resourcemanager/ResourceTrackerService.java | 30 +++
 .../TestResourceTrackerService.java | 82 
 2 files changed, 97 insertions(+), 15 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/4a6388ff/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/ResourceTrackerService.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/ResourceTrackerService.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/ResourceTrackerService.java
index 1dbbeb5..fea8183 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/ResourceTrackerService.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/ResourceTrackerService.java
@@ -26,7 +26,6 @@ import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
 import java.util.Set;
-import java.util.concurrent.ConcurrentHashMap;
 import java.util.concurrent.ConcurrentMap;
 
 import org.apache.commons.logging.Log;
@@ -525,16 +524,15 @@ public class ResourceTrackerService extends 
AbstractService implements
   nodeHeartBeatResponse.setSystemCredentialsForApps(systemCredentials);
 }
 
-List keepAliveApps =
-remoteNodeStatus.getKeepAliveApplications();
-if (timelineV2Enabled && keepAliveApps != null) {
+if (timelineV2Enabled) {
   // Return collectors' map that NM needs to know
-  // TODO we should optimize this to only include collector info that NM
-  // doesn't know yet.
-  setAppCollectorsMapToResponse(keepAliveApps, nodeHeartBeatResponse);
+  setAppCollectorsMapToResponse(rmNode.getRunningApps(),
+  nodeHeartBeatResponse);
 }
 
 // 4. Send status to RMNode, saving the latest response.
+List keepAliveApps =
+remoteNodeStatus.getKeepAliveApplications();
 RMNodeStatusEvent nodeStatusEvent =
 new RMNodeStatusEvent(nodeId, remoteNodeStatus, nodeHeartBeatResponse);
 if (request.getLogAggregationReportsForApps() != null
@@ -562,18 +560,20 @@ public class ResourceTrackerService extends 
AbstractService implements
   }
 
   private void setAppCollectorsMapToResponse(
-  List liveApps, NodeHeartbeatResponse response) {
+  List runningApps, NodeHeartbeatResponse response) {
 Map liveAppCollectorsMap = new
-ConcurrentHashMap();
+HashMap();
 Map rmApps = rmContext.getRMApps();
-// Set collectors for all apps now.
-// TODO set collectors for only active apps running on NM (liveApps cannot 
be
-// used for this case)
-for (Map.Entry rmApp : rmApps.entrySet()) {
-  ApplicationId appId = rmApp.getKey();
-  String appCollectorAddr = rmApp.getValue().getCollectorAddr();
+// Set collectors for all running apps on this node.
+for (ApplicationId appId : runningApps) {
+  String appCollectorAddr = rmApps.get(appId).getCollectorAddr();
   if (appCollectorAddr != null) {
 liveAppCollectorsMap.put(appId, appCollectorAddr);
+  } else {
+if (LOG.isDebugEnabled()) {
+  LOG.debug("Collector for applicaton: " + appId +
+  " hasn't registered yet!");
+}
   }
 }
 response.setAppCollectorsMap(liveAppCollectorsMap);

http://git-wip-us.apache.org/repos/asf/hadoop/blob/4a6388ff/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/TestResourceTrackerService.java
--
diff --git 

[08/50] [abbrv] hadoop git commit: YARN-4350. TestDistributedShell fails for V2 scenarios. (Naganarasimha G R via Varun Saxena)

2016-05-04 Thread gtcarrera9
YARN-4350. TestDistributedShell fails for V2 scenarios. (Naganarasimha G R via 
Varun Saxena)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/94b7fb47
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/94b7fb47
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/94b7fb47

Branch: refs/heads/YARN-2928
Commit: 94b7fb4707ed4e37ca6d9902aa358b5c8c83c66f
Parents: 89b4101
Author: Varun Saxena 
Authored: Sun Dec 20 02:14:54 2015 +0530
Committer: Li Lu 
Committed: Wed May 4 16:22:04 2016 -0700

--
 .../applications/distributedshell/TestDistributedShell.java | 2 ++
 .../java/org/apache/hadoop/yarn/server/MiniYARNCluster.java | 5 +++--
 .../java/org/apache/hadoop/yarn/server/TestMiniYarnCluster.java | 2 --
 3 files changed, 5 insertions(+), 4 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/94b7fb47/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-distributedshell/src/test/java/org/apache/hadoop/yarn/applications/distributedshell/TestDistributedShell.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-distributedshell/src/test/java/org/apache/hadoop/yarn/applications/distributedshell/TestDistributedShell.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-distributedshell/src/test/java/org/apache/hadoop/yarn/applications/distributedshell/TestDistributedShell.java
index e35a32e..203a7fc 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-distributedshell/src/test/java/org/apache/hadoop/yarn/applications/distributedshell/TestDistributedShell.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-distributedshell/src/test/java/org/apache/hadoop/yarn/applications/distributedshell/TestDistributedShell.java
@@ -176,6 +176,8 @@ public class TestDistributedShell {
   conf.set(YarnConfiguration.TIMELINE_SERVICE_ENTITY_GROUP_PLUGIN_CLASSES,
   DistributedShellTimelinePlugin.class.getName());
 } else if (timelineVersion == 2.0f) {
+  // set version to 2
+  conf.setFloat(YarnConfiguration.TIMELINE_SERVICE_VERSION, 2.0f);
   // disable v1 timeline server since we no longer have a server here
   // enable aux-service based timeline aggregators
   conf.set(YarnConfiguration.NM_AUX_SERVICES, TIMELINE_AUX_SERVICE_NAME);

http://git-wip-us.apache.org/repos/asf/hadoop/blob/94b7fb47/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-tests/src/test/java/org/apache/hadoop/yarn/server/MiniYARNCluster.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-tests/src/test/java/org/apache/hadoop/yarn/server/MiniYARNCluster.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-tests/src/test/java/org/apache/hadoop/yarn/server/MiniYARNCluster.java
index 641cef3..b8003c4 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-tests/src/test/java/org/apache/hadoop/yarn/server/MiniYARNCluster.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-tests/src/test/java/org/apache/hadoop/yarn/server/MiniYARNCluster.java
@@ -36,6 +36,7 @@ import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.ha.HAServiceProtocol;
 import org.apache.hadoop.metrics2.lib.DefaultMetricsSystem;
 import org.apache.hadoop.security.token.Token;
+import org.apache.hadoop.net.ServerSocketUtil;
 import org.apache.hadoop.service.AbstractService;
 import org.apache.hadoop.service.CompositeService;
 import org.apache.hadoop.util.Shell;
@@ -770,8 +771,8 @@ public class MiniYARNCluster extends CompositeService {
   if (!useFixedPorts) {
 String hostname = MiniYARNCluster.getHostname();
 conf.set(YarnConfiguration.TIMELINE_SERVICE_ADDRESS, hostname + ":0");
-conf.set(YarnConfiguration.TIMELINE_SERVICE_WEBAPP_ADDRESS, hostname
-+ ":0");
+conf.set(YarnConfiguration.TIMELINE_SERVICE_WEBAPP_ADDRESS,
+hostname + ":" + ServerSocketUtil.getPort(9188, 10));
   }
   appHistoryServer.init(conf);
   super.serviceInit(conf);

http://git-wip-us.apache.org/repos/asf/hadoop/blob/94b7fb47/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-tests/src/test/java/org/apache/hadoop/yarn/server/TestMiniYarnCluster.java
--
diff --git 

[15/50] [abbrv] hadoop git commit: YARN-4238. createdTime and modifiedTime is not reported while publishing entities to ATSv2. (Varun Saxena via Naganarasimha G R)

2016-05-04 Thread gtcarrera9
http://git-wip-us.apache.org/repos/asf/hadoop/blob/fd369a54/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/TestFileSystemTimelineReaderImpl.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/TestFileSystemTimelineReaderImpl.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/TestFileSystemTimelineReaderImpl.java
index e864d61..b7804e7 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/TestFileSystemTimelineReaderImpl.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/TestFileSystemTimelineReaderImpl.java
@@ -107,7 +107,6 @@ public class TestFileSystemTimelineReaderImpl {
 entity11.setId("id_1");
 entity11.setType("app");
 entity11.setCreatedTime(1425016502000L);
-entity11.setModifiedTime(1425016502050L);
 Map info1 = new HashMap();
 info1.put("info1", "val1");
 entity11.addInfo(info1);
@@ -136,7 +135,6 @@ public class TestFileSystemTimelineReaderImpl {
 TimelineEntity entity12 = new TimelineEntity();
 entity12.setId("id_1");
 entity12.setType("app");
-entity12.setModifiedTime(1425016503000L);
 configs.clear();
 configs.put("config_2", "23");
 configs.put("config_3", "abc");
@@ -166,7 +164,6 @@ public class TestFileSystemTimelineReaderImpl {
 entity2.setId("id_2");
 entity2.setType("app");
 entity2.setCreatedTime(1425016501050L);
-entity2.setModifiedTime(1425016502010L);
 Map info2 = new HashMap();
 info1.put("info2", 4);
 entity2.addInfo(info2);
@@ -203,7 +200,6 @@ public class TestFileSystemTimelineReaderImpl {
 entity3.setId("id_3");
 entity3.setType("app");
 entity3.setCreatedTime(1425016501050L);
-entity3.setModifiedTime(1425016502010L);
 Map info3 = new HashMap();
 info3.put("info2", 3.5);
 entity3.addInfo(info3);
@@ -239,7 +235,6 @@ public class TestFileSystemTimelineReaderImpl {
 entity4.setId("id_4");
 entity4.setType("app");
 entity4.setCreatedTime(1425016502050L);
-entity4.setModifiedTime(1425016503010L);
 TimelineEvent event44 = new TimelineEvent();
 event44.setId("event_4");
 event44.setTimestamp(1425016502003L);
@@ -252,7 +247,6 @@ public class TestFileSystemTimelineReaderImpl {
 entity5.setId("id_5");
 entity5.setType("app");
 entity5.setCreatedTime(1425016502050L);
-entity5.setModifiedTime(1425016503010L);
 writeEntityFile(entity5, appDir2);
   }
 
@@ -263,7 +257,7 @@ public class TestFileSystemTimelineReaderImpl {
   @Test
   public void testGetEntityDefaultView() throws Exception {
 // If no fields are specified, entity is returned with default view i.e.
-// only the id, created and modified time
+// only the id, type and created time.
 TimelineEntity result =
 reader.getEntity("user1", "cluster1", "flow1", 1L, "app1",
 "app", "id_1", null, null, null);
@@ -271,7 +265,6 @@ public class TestFileSystemTimelineReaderImpl {
 (new TimelineEntity.Identifier("app", "id_1")).toString(),
 result.getIdentifier().toString());
 Assert.assertEquals(1425016502000L, result.getCreatedTime());
-Assert.assertEquals(1425016503000L, result.getModifiedTime());
 Assert.assertEquals(0, result.getConfigs().size());
 Assert.assertEquals(0, result.getMetrics().size());
   }
@@ -286,7 +279,6 @@ public class TestFileSystemTimelineReaderImpl {
 (new TimelineEntity.Identifier("app", "id_1")).toString(),
 result.getIdentifier().toString());
 Assert.assertEquals(1425016502000L, result.getCreatedTime());
-Assert.assertEquals(1425016503000L, result.getModifiedTime());
 Assert.assertEquals(0, result.getConfigs().size());
 Assert.assertEquals(0, result.getMetrics().size());
   }
@@ -303,7 +295,6 @@ public class TestFileSystemTimelineReaderImpl {
 (new TimelineEntity.Identifier("app", "id_5")).toString(),
 result.getIdentifier().toString());
 Assert.assertEquals(1425016502050L, result.getCreatedTime());
-Assert.assertEquals(1425016503010L, result.getModifiedTime());
   }
 
   @Test
@@ -317,7 +308,6 @@ public class TestFileSystemTimelineReaderImpl {
 (new TimelineEntity.Identifier("app", "id_1")).toString(),
 result.getIdentifier().toString());
 Assert.assertEquals(1425016502000L, 

[01/50] [abbrv] hadoop git commit: YARN-4356. Ensure the timeline service v.2 is disabled cleanly and has no impact when it's turned off. Contributed by Sangjin Lee. [Forced Update!]

2016-05-04 Thread gtcarrera9
Repository: hadoop
Updated Branches:
  refs/heads/YARN-2928 bdd5d7dfa -> d2c4237fa (forced update)


http://git-wip-us.apache.org/repos/asf/hadoop/blob/ef71c1fc/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/application/TestApplication.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/application/TestApplication.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/application/TestApplication.java
index 38b3172f..f31a98c 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/application/TestApplication.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/application/TestApplication.java
@@ -39,7 +39,6 @@ import org.apache.hadoop.yarn.api.records.ApplicationId;
 import org.apache.hadoop.yarn.api.records.ContainerId;
 import org.apache.hadoop.yarn.api.records.ContainerLaunchContext;
 import org.apache.hadoop.yarn.api.records.ContainerState;
-import org.apache.hadoop.yarn.api.records.ContainerStatus;
 import org.apache.hadoop.yarn.api.records.Priority;
 import org.apache.hadoop.yarn.api.records.Resource;
 import org.apache.hadoop.yarn.event.DrainDispatcher;
@@ -535,7 +534,7 @@ public class TestApplication {
   this.appId = BuilderUtils.newApplicationId(timestamp, id);
 
   app = new ApplicationImpl(
-  dispatcher, this.user, null, null, 0, appId, null, context);
+  dispatcher, this.user, appId, null, context);
   containers = new ArrayList();
   for (int i = 0; i < numContainers; i++) {
 Container container = createMockedContainer(this.appId, i);

http://git-wip-us.apache.org/repos/asf/hadoop/blob/ef71c1fc/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/webapp/TestNMWebServices.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/webapp/TestNMWebServices.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/webapp/TestNMWebServices.java
index e5ae1f8..c2dadd6 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/webapp/TestNMWebServices.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/webapp/TestNMWebServices.java
@@ -31,17 +31,14 @@ import javax.ws.rs.core.MediaType;
 import javax.xml.parsers.DocumentBuilder;
 import javax.xml.parsers.DocumentBuilderFactory;
 
-import org.junit.Assert;
-
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileUtil;
 import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.util.NodeHealthScriptRunner;
 import org.apache.hadoop.util.VersionInfo;
 import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
-import org.apache.hadoop.yarn.api.records.NodeId;
 import org.apache.hadoop.yarn.api.records.ApplicationId;
 import org.apache.hadoop.yarn.api.records.ContainerId;
+import org.apache.hadoop.yarn.api.records.NodeId;
 import org.apache.hadoop.yarn.conf.YarnConfiguration;
 import org.apache.hadoop.yarn.event.AsyncDispatcher;
 import org.apache.hadoop.yarn.server.nodemanager.Context;
@@ -63,6 +60,7 @@ import org.apache.hadoop.yarn.webapp.WebServicesTestUtils;
 import org.codehaus.jettison.json.JSONException;
 import org.codehaus.jettison.json.JSONObject;
 import org.junit.AfterClass;
+import org.junit.Assert;
 import org.junit.Before;
 import org.junit.Test;
 import org.w3c.dom.Document;
@@ -326,7 +324,7 @@ public class TestNMWebServices extends JerseyTestBase {
 final String filename = "logfile1";
 final String logMessage = "log message\n";
 nmContext.getApplications().put(appId, new ApplicationImpl(null, "user",
-null, null, 0, appId, null, nmContext));
+appId, null, nmContext));
 
 MockContainer container = new MockContainer(appAttemptId,
 new AsyncDispatcher(), new Configuration(), "user", appId, 1);


[07/50] [abbrv] hadoop git commit: YARN-4445. Unify the term flowId and flowName in timeline v2 codebase. Contributed by Zhan Zhang.

2016-05-04 Thread gtcarrera9
YARN-4445. Unify the term flowId and flowName in timeline v2 codebase.
Contributed by Zhan Zhang.


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/89b4101f
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/89b4101f
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/89b4101f

Branch: refs/heads/YARN-2928
Commit: 89b4101f2c8b1e9473a8352b0412c6ea83066caf
Parents: 4764f4a
Author: Li Lu 
Authored: Wed Dec 16 16:42:57 2015 -0800
Committer: Li Lu 
Committed: Wed May 4 16:17:12 2016 -0700

--
 .../mapred/TestMRTimelineEventHandling.java |  2 +-
 .../distributedshell/TestDistributedShell.java  |  2 +-
 .../yarn/util/timeline/TimelineUtils.java   |  2 +-
 .../TestSystemMetricsPublisherForV2.java|  2 +-
 .../collector/AppLevelTimelineCollector.java|  2 +-
 .../reader/TimelineReaderManager.java   |  8 +--
 .../reader/TimelineReaderWebServices.java   | 74 ++--
 .../storage/ApplicationEntityReader.java| 20 +++---
 .../storage/FileSystemTimelineReaderImpl.java   | 14 ++--
 .../storage/FlowActivityEntityReader.java   | 10 +--
 .../storage/FlowRunEntityReader.java| 16 ++---
 .../storage/GenericEntityReader.java| 22 +++---
 .../storage/HBaseTimelineReaderImpl.java|  8 +--
 .../storage/TimelineEntityReader.java   | 10 +--
 .../storage/TimelineEntityReaderFactory.java| 20 +++---
 .../timelineservice/storage/TimelineReader.java |  8 +--
 .../storage/application/ApplicationRowKey.java  | 38 +-
 .../storage/application/ApplicationTable.java   |  2 +-
 .../apptoflow/AppToFlowColumnFamily.java|  2 +-
 .../storage/apptoflow/AppToFlowTable.java   |  4 +-
 .../storage/entity/EntityRowKey.java| 38 +-
 .../storage/entity/EntityTable.java |  2 +-
 .../storage/flow/FlowActivityRowKey.java| 30 
 .../storage/flow/FlowActivityTable.java |  2 +-
 .../storage/flow/FlowRunRowKey.java | 30 
 .../storage/flow/FlowRunTable.java  |  2 +-
 .../reader/TestTimelineReaderWebServices.java   |  2 +-
 ...stTimelineReaderWebServicesHBaseStorage.java |  4 +-
 .../storage/TestHBaseTimelineStorage.java   |  4 +-
 .../flow/TestHBaseStorageFlowActivity.java  |  6 +-
 30 files changed, 193 insertions(+), 193 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/89b4101f/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMRTimelineEventHandling.java
--
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMRTimelineEventHandling.java
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMRTimelineEventHandling.java
index 1896c7b..f7283ae 100644
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMRTimelineEventHandling.java
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMRTimelineEventHandling.java
@@ -245,7 +245,7 @@ public class TestMRTimelineEventHandling {
 Assert.assertTrue(tmpRootFolder.isDirectory());
 String basePath = tmpRoot + YarnConfiguration.DEFAULT_RM_CLUSTER_ID + "/" +
 UserGroupInformation.getCurrentUser().getShortUserName() +
-"/" + TimelineUtils.generateDefaultFlowIdBasedOnAppId(appId) +
+"/" + TimelineUtils.generateDefaultFlowNameBasedOnAppId(appId) +
 "/1/1/" + appId.toString();
 // for this test, we expect MAPREDUCE_JOB and MAPREDUCE_TASK dirs
 String outputDirJob = basePath + "/MAPREDUCE_JOB/";

http://git-wip-us.apache.org/repos/asf/hadoop/blob/89b4101f/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-distributedshell/src/test/java/org/apache/hadoop/yarn/applications/distributedshell/TestDistributedShell.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-distributedshell/src/test/java/org/apache/hadoop/yarn/applications/distributedshell/TestDistributedShell.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-distributedshell/src/test/java/org/apache/hadoop/yarn/applications/distributedshell/TestDistributedShell.java
index bdf6e2b..e35a32e 100644
--- 

[19/50] [abbrv] hadoop git commit: YARN-4224. Support fetching entities by UID and change the REST interface to conform to current REST APIs' in YARN. (Varun Saxena via gtcarrera9)

2016-05-04 Thread gtcarrera9
YARN-4224. Support fetching entities by UID and change the REST
interface to conform to current REST APIs' in YARN. (Varun Saxena via
gtcarrera9)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/97d5cf32
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/97d5cf32
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/97d5cf32

Branch: refs/heads/YARN-2928
Commit: 97d5cf323128e9d6e3f832de891b90e1434fee7d
Parents: fd369a5
Author: Li Lu 
Authored: Wed Jan 27 14:04:09 2016 -0800
Committer: Li Lu 
Committed: Wed May 4 16:24:15 2016 -0700

--
 .../records/timelineservice/TimelineEntity.java |   13 +
 .../server/timelineservice/TimelineContext.java |  146 ++
 .../collector/TimelineCollectorContext.java |   86 +-
 .../server/timelineservice/package-info.java|   28 +
 .../reader/TimelineReaderContext.java   |   88 ++
 .../reader/TimelineReaderManager.java   |   82 +-
 .../reader/TimelineReaderUtils.java |  171 ++
 .../reader/TimelineReaderWebServices.java   | 1469 +++---
 .../reader/TimelineReaderWebServicesUtils.java  |  222 +++
 .../reader/TimelineUIDConverter.java|  245 +++
 .../timelineservice/storage/TimelineReader.java |   16 +-
 .../storage/reader/GenericEntityReader.java |7 +-
 .../reader/TestTimelineReaderUtils.java |   55 +
 .../reader/TestTimelineReaderWebServices.java   |   83 +-
 ...stTimelineReaderWebServicesHBaseStorage.java |  348 -
 .../reader/TestTimelineUIDConverter.java|   97 ++
 ...TestPhoenixOfflineAggregationWriterImpl.java |2 +-
 17 files changed, 2781 insertions(+), 377 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/97d5cf32/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/timelineservice/TimelineEntity.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/timelineservice/TimelineEntity.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/timelineservice/TimelineEntity.java
index dcf2473..a661f7a 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/timelineservice/TimelineEntity.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/timelineservice/TimelineEntity.java
@@ -504,6 +504,19 @@ public class TimelineEntity implements 
Comparable {
 }
   }
 
+  /**
+   * Set UID in info which will be then used for query by UI.
+   * @param uidKey key for UID in info.
+   * @param uId UID to be set for the key.
+   */
+  public void setUID(String uidKey, String uId) {
+if (real == null) {
+  info.put(uidKey, uId);
+} else {
+  real.addInfo(uidKey, uId);
+}
+  }
+
   public boolean isValid() {
 return (getId() != null && getType() != null);
   }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/97d5cf32/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/TimelineContext.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/TimelineContext.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/TimelineContext.java
new file mode 100644
index 000..694b709
--- /dev/null
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/TimelineContext.java
@@ -0,0 +1,146 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package 

[02/50] [abbrv] hadoop git commit: YARN-4356. Ensure the timeline service v.2 is disabled cleanly and has no impact when it's turned off. Contributed by Sangjin Lee.

2016-05-04 Thread gtcarrera9
YARN-4356. Ensure the timeline service v.2 is disabled cleanly and has no
impact when it's turned off. Contributed by Sangjin Lee.


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/ef71c1fc
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/ef71c1fc
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/ef71c1fc

Branch: refs/heads/YARN-2928
Commit: ef71c1fc4c37b355e895cea23a868ba57555d9b5
Parents: e987c7e
Author: Li Lu 
Authored: Fri Dec 11 11:17:34 2015 -0800
Committer: Li Lu 
Committed: Wed May 4 16:01:41 2016 -0700

--
 .../jobhistory/JobHistoryEventHandler.java  |  63 
 .../hadoop/mapreduce/v2/app/MRAppMaster.java|  11 +-
 .../apache/hadoop/mapreduce/MRJobConfig.java|   5 -
 .../src/main/resources/mapred-default.xml   |   7 -
 .../mapred/TestMRTimelineEventHandling.java |   5 +-
 .../hadoop/mapreduce/v2/MiniMRYarnCluster.java  |   2 +-
 .../hadoop/yarn/conf/YarnConfiguration.java |  58 ++-
 .../distributedshell/ApplicationMaster.java | 153 ---
 .../applications/distributedshell/Client.java   |  16 --
 .../distributedshell/TestDistributedShell.java  |  10 +-
 .../hadoop/yarn/client/api/TimelineClient.java  |  18 ++-
 .../client/api/impl/TimelineClientImpl.java |   3 +
 .../src/main/resources/yarn-default.xml |   5 +-
 .../impl/pb/NodeHeartbeatRequestPBImpl.java |  10 +-
 .../impl/pb/NodeHeartbeatResponsePBImpl.java|  10 +-
 .../hadoop/yarn/server/nodemanager/Context.java |   3 +-
 .../yarn/server/nodemanager/NodeManager.java|  23 ++-
 .../nodemanager/NodeStatusUpdaterImpl.java  |  48 +++---
 .../collectormanager/NMCollectorService.java|  10 +-
 .../containermanager/ContainerManagerImpl.java  |  62 +---
 .../application/ApplicationImpl.java|  70 +++--
 .../monitor/ContainersMonitorImpl.java  |  11 +-
 .../timelineservice/NMTimelinePublisher.java|  49 +++---
 .../TestContainerManagerRecovery.java   |   9 +-
 .../application/TestApplication.java|   3 +-
 .../nodemanager/webapp/TestNMWebServices.java   |   8 +-
 .../ApplicationMasterService.java   |  11 +-
 .../server/resourcemanager/ClientRMService.java |  35 +++--
 .../server/resourcemanager/RMAppManager.java|   7 +-
 .../server/resourcemanager/ResourceManager.java |  33 ++--
 .../resourcemanager/ResourceTrackerService.java |  21 ++-
 .../resourcemanager/amlauncher/AMLauncher.java  |  15 +-
 .../metrics/TimelineServiceV2Publisher.java |   2 +-
 .../server/resourcemanager/rmapp/RMAppImpl.java |  16 +-
 .../resourcemanager/TestClientRMService.java|   3 +
 .../metrics/TestSystemMetricsPublisher.java |   2 +-
 .../TestSystemMetricsPublisherForV2.java|   1 +
 .../TestTimelineServiceClientIntegration.java   |  30 +++-
 .../PerNodeTimelineCollectorsAuxService.java|  15 +-
 .../reader/TimelineReaderServer.java|  14 +-
 ...TestPerNodeTimelineCollectorsAuxService.java |   9 +-
 .../reader/TestTimelineReaderServer.java|   3 +
 .../reader/TestTimelineReaderWebServices.java   |   2 +
 ...stTimelineReaderWebServicesHBaseStorage.java |   2 +
 44 files changed, 520 insertions(+), 373 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/ef71c1fc/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/jobhistory/JobHistoryEventHandler.java
--
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/jobhistory/JobHistoryEventHandler.java
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/jobhistory/JobHistoryEventHandler.java
index b1c1a52..d88588c 100644
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/jobhistory/JobHistoryEventHandler.java
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/jobhistory/JobHistoryEventHandler.java
@@ -19,9 +19,6 @@
 package org.apache.hadoop.mapreduce.jobhistory;
 
 import java.io.IOException;
-import java.util.concurrent.ExecutorService;
-import java.util.concurrent.Executors;
-import java.util.concurrent.TimeUnit;
 import java.util.Collections;
 import java.util.EnumSet;
 import java.util.HashMap;
@@ -31,7 +28,11 @@ import java.util.Map;
 import java.util.Timer;
 import java.util.TimerTask;
 import java.util.concurrent.BlockingQueue;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.Executors;
 import 

[05/50] [abbrv] hadoop git commit: YARN-4460. [Bug fix] RM fails to start when SMP is enabled. (Li Lu via Varun Saxena)

2016-05-04 Thread gtcarrera9
YARN-4460. [Bug fix] RM fails to start when SMP is enabled. (Li Lu via Varun 
Saxena)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/4764f4ac
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/4764f4ac
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/4764f4ac

Branch: refs/heads/YARN-2928
Commit: 4764f4accc5c7fe02f892a78e979819fbf46f90c
Parents: 8d798de
Author: Varun Saxena 
Authored: Wed Dec 16 15:24:57 2015 +0530
Committer: Li Lu 
Committed: Wed May 4 16:17:09 2016 -0700

--
 .../resourcemanager/metrics/TimelineServiceV2Publisher.java | 9 +
 .../metrics/TestSystemMetricsPublisherForV2.java| 5 -
 2 files changed, 9 insertions(+), 5 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/4764f4ac/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/metrics/TimelineServiceV2Publisher.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/metrics/TimelineServiceV2Publisher.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/metrics/TimelineServiceV2Publisher.java
index b96114e..1954783 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/metrics/TimelineServiceV2Publisher.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/metrics/TimelineServiceV2Publisher.java
@@ -25,6 +25,7 @@ import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.classification.InterfaceAudience.Private;
 import org.apache.hadoop.classification.InterfaceStability.Unstable;
+import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
 import org.apache.hadoop.yarn.api.records.ApplicationId;
@@ -75,13 +76,13 @@ public class TimelineServiceV2Publisher extends 
AbstractSystemMetricsPublisher {
   }
 
   @Override
-  protected void serviceStart() throws Exception {
-super.serviceStart();
+  protected void serviceInit(Configuration conf) throws Exception {
+super.serviceInit(conf);
+getDispatcher().register(SystemMetricsEventType.class,
+new TimelineV2EventHandler());
 publishContainerMetrics = getConfig().getBoolean(
 YarnConfiguration.RM_PUBLISH_CONTAINER_METRICS_ENABLED,
 YarnConfiguration.DEFAULT_RM_PUBLISH_CONTAINER_METRICS_ENABLED);
-getDispatcher().register(SystemMetricsEventType.class,
-new TimelineV2EventHandler());
   }
 
   @VisibleForTesting

http://git-wip-us.apache.org/repos/asf/hadoop/blob/4764f4ac/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/metrics/TestSystemMetricsPublisherForV2.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/metrics/TestSystemMetricsPublisherForV2.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/metrics/TestSystemMetricsPublisherForV2.java
index baaa566..57258d5 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/metrics/TestSystemMetricsPublisherForV2.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/metrics/TestSystemMetricsPublisherForV2.java
@@ -156,7 +156,7 @@ public class TestSystemMetricsPublisherForV2 {
 try {
   Configuration conf = getTimelineV2Conf();
   conf.setBoolean(YarnConfiguration.RM_PUBLISH_CONTAINER_METRICS_ENABLED,
-  false);
+  YarnConfiguration.DEFAULT_RM_PUBLISH_CONTAINER_METRICS_ENABLED);
   metricsPublisher.init(conf);
   assertFalse(
   "Default configuration should not publish container Metrics from RM",
@@ -167,6 +167,9 @@ public class TestSystemMetricsPublisherForV2 {
   metricsPublisher = new 

hadoop git commit: YARN-4905. Improved "yarn logs" command-line to optionally show log metadata also. Contributed by Xuan Gong.

2016-05-04 Thread vinodkv
Repository: hadoop
Updated Branches:
  refs/heads/branch-2 585299146 -> 8262ef831


YARN-4905. Improved "yarn logs" command-line to optionally show log metadata 
also. Contributed by Xuan Gong.

(cherry picked from commit 9e37fe3b7a3b5f0a193d228bb5e065f41acd2835)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/8262ef83
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/8262ef83
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/8262ef83

Branch: refs/heads/branch-2
Commit: 8262ef8318bba41dff0451abe6383df12ddd82b2
Parents: 5852991
Author: Vinod Kumar Vavilapalli 
Authored: Wed May 4 14:16:03 2016 -0700
Committer: Vinod Kumar Vavilapalli 
Committed: Wed May 4 14:17:09 2016 -0700

--
 .../apache/hadoop/yarn/client/cli/LogsCLI.java  | 507 +++
 .../hadoop/yarn/client/cli/TestLogsCLI.java | 190 ++-
 .../logaggregation/AggregatedLogFormat.java |  20 +
 .../yarn/logaggregation/LogCLIHelpers.java  | 192 +--
 4 files changed, 654 insertions(+), 255 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/8262ef83/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/cli/LogsCLI.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/cli/LogsCLI.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/cli/LogsCLI.java
index 2c4fee6..487b694 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/cli/LogsCLI.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/cli/LogsCLI.java
@@ -45,6 +45,7 @@ import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hadoop.util.Tool;
 import org.apache.hadoop.yarn.api.records.ApplicationId;
 import org.apache.hadoop.yarn.api.records.ApplicationReport;
+import org.apache.hadoop.yarn.api.records.ContainerId;
 import org.apache.hadoop.yarn.api.records.ContainerReport;
 import org.apache.hadoop.yarn.api.records.YarnApplicationState;
 import org.apache.hadoop.yarn.client.api.YarnClient;
@@ -78,59 +79,16 @@ public class LogsCLI extends Configured implements Tool {
   private static final String APP_OWNER_OPTION = "appOwner";
   private static final String AM_CONTAINER_OPTION = "am";
   private static final String CONTAINER_LOG_FILES = "logFiles";
+  private static final String SHOW_META_INFO = "show_meta_info";
+  private static final String LIST_NODES_OPTION = "list_nodes";
   public static final String HELP_CMD = "help";
 
   @Override
   public int run(String[] args) throws Exception {
 
-Options opts = new Options();
-opts.addOption(HELP_CMD, false, "Displays help for all commands.");
-Option appIdOpt =
-new Option(APPLICATION_ID_OPTION, true, "ApplicationId (required)");
-appIdOpt.setRequired(true);
-opts.addOption(appIdOpt);
-opts.addOption(CONTAINER_ID_OPTION, true, "ContainerId. "
-+ "By default, it will only print syslog if the application is runing."
-+ " Work with -logFiles to get other logs.");
-opts.addOption(NODE_ADDRESS_OPTION, true, "NodeAddress in the format "
-  + "nodename:port");
-opts.addOption(APP_OWNER_OPTION, true,
-  "AppOwner (assumed to be current user if not specified)");
-Option amOption = new Option(AM_CONTAINER_OPTION, true, 
-  "Prints the AM Container logs for this application. "
-  + "Specify comma-separated value to get logs for related AM Container. "
-  + "For example, If we specify -am 1,2, we will get the logs for "
-  + "the first AM Container as well as the second AM Container. "
-  + "To get logs for all AM Containers, use -am ALL. "
-  + "To get logs for the latest AM Container, use -am -1. "
-  + "By default, it will only print out syslog. Work with -logFiles "
-  + "to get other logs");
-amOption.setValueSeparator(',');
-amOption.setArgs(Option.UNLIMITED_VALUES);
-amOption.setArgName("AM Containers");
-opts.addOption(amOption);
-Option logFileOpt = new Option(CONTAINER_LOG_FILES, true,
-  "Work with -am/-containerId and specify comma-separated value "
-+ "to get specified container log files. Use \"ALL\" to fetch all the "
-+ "log files for the container.");
-logFileOpt.setValueSeparator(',');
-logFileOpt.setArgs(Option.UNLIMITED_VALUES);
-logFileOpt.setArgName("Log File Name");
-opts.addOption(logFileOpt);
-
-opts.getOption(APPLICATION_ID_OPTION).setArgName("Application ID");
-opts.getOption(CONTAINER_ID_OPTION).setArgName("Container ID");
-  

hadoop git commit: YARN-4905. Improved "yarn logs" command-line to optionally show log metadata also. Contributed by Xuan Gong.

2016-05-04 Thread vinodkv
Repository: hadoop
Updated Branches:
  refs/heads/trunk 7bd418e48 -> 9e37fe3b7


YARN-4905. Improved "yarn logs" command-line to optionally show log metadata 
also. Contributed by Xuan Gong.


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/9e37fe3b
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/9e37fe3b
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/9e37fe3b

Branch: refs/heads/trunk
Commit: 9e37fe3b7a3b5f0a193d228bb5e065f41acd2835
Parents: 7bd418e
Author: Vinod Kumar Vavilapalli 
Authored: Wed May 4 14:16:03 2016 -0700
Committer: Vinod Kumar Vavilapalli 
Committed: Wed May 4 14:16:03 2016 -0700

--
 .../apache/hadoop/yarn/client/cli/LogsCLI.java  | 507 +++
 .../hadoop/yarn/client/cli/TestLogsCLI.java | 190 ++-
 .../logaggregation/AggregatedLogFormat.java |  20 +
 .../yarn/logaggregation/LogCLIHelpers.java  | 192 +--
 4 files changed, 654 insertions(+), 255 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/9e37fe3b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/cli/LogsCLI.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/cli/LogsCLI.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/cli/LogsCLI.java
index 2c4fee6..487b694 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/cli/LogsCLI.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/cli/LogsCLI.java
@@ -45,6 +45,7 @@ import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hadoop.util.Tool;
 import org.apache.hadoop.yarn.api.records.ApplicationId;
 import org.apache.hadoop.yarn.api.records.ApplicationReport;
+import org.apache.hadoop.yarn.api.records.ContainerId;
 import org.apache.hadoop.yarn.api.records.ContainerReport;
 import org.apache.hadoop.yarn.api.records.YarnApplicationState;
 import org.apache.hadoop.yarn.client.api.YarnClient;
@@ -78,59 +79,16 @@ public class LogsCLI extends Configured implements Tool {
   private static final String APP_OWNER_OPTION = "appOwner";
   private static final String AM_CONTAINER_OPTION = "am";
   private static final String CONTAINER_LOG_FILES = "logFiles";
+  private static final String SHOW_META_INFO = "show_meta_info";
+  private static final String LIST_NODES_OPTION = "list_nodes";
   public static final String HELP_CMD = "help";
 
   @Override
   public int run(String[] args) throws Exception {
 
-Options opts = new Options();
-opts.addOption(HELP_CMD, false, "Displays help for all commands.");
-Option appIdOpt =
-new Option(APPLICATION_ID_OPTION, true, "ApplicationId (required)");
-appIdOpt.setRequired(true);
-opts.addOption(appIdOpt);
-opts.addOption(CONTAINER_ID_OPTION, true, "ContainerId. "
-+ "By default, it will only print syslog if the application is runing."
-+ " Work with -logFiles to get other logs.");
-opts.addOption(NODE_ADDRESS_OPTION, true, "NodeAddress in the format "
-  + "nodename:port");
-opts.addOption(APP_OWNER_OPTION, true,
-  "AppOwner (assumed to be current user if not specified)");
-Option amOption = new Option(AM_CONTAINER_OPTION, true, 
-  "Prints the AM Container logs for this application. "
-  + "Specify comma-separated value to get logs for related AM Container. "
-  + "For example, If we specify -am 1,2, we will get the logs for "
-  + "the first AM Container as well as the second AM Container. "
-  + "To get logs for all AM Containers, use -am ALL. "
-  + "To get logs for the latest AM Container, use -am -1. "
-  + "By default, it will only print out syslog. Work with -logFiles "
-  + "to get other logs");
-amOption.setValueSeparator(',');
-amOption.setArgs(Option.UNLIMITED_VALUES);
-amOption.setArgName("AM Containers");
-opts.addOption(amOption);
-Option logFileOpt = new Option(CONTAINER_LOG_FILES, true,
-  "Work with -am/-containerId and specify comma-separated value "
-+ "to get specified container log files. Use \"ALL\" to fetch all the "
-+ "log files for the container.");
-logFileOpt.setValueSeparator(',');
-logFileOpt.setArgs(Option.UNLIMITED_VALUES);
-logFileOpt.setArgName("Log File Name");
-opts.addOption(logFileOpt);
-
-opts.getOption(APPLICATION_ID_OPTION).setArgName("Application ID");
-opts.getOption(CONTAINER_ID_OPTION).setArgName("Container ID");
-opts.getOption(NODE_ADDRESS_OPTION).setArgName("Node Address");
-

hadoop git commit: HADOOP-13088. fix shellprofiles in hadoop-tools to allow replacement

2016-05-04 Thread aw
Repository: hadoop
Updated Branches:
  refs/heads/HADOOP-12930 fb320c95a -> e886e4d86


HADOOP-13088. fix shellprofiles in hadoop-tools to allow replacement


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/e886e4d8
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/e886e4d8
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/e886e4d8

Branch: refs/heads/HADOOP-12930
Commit: e886e4d86d1502cd7cd272b782c57ff05f07000e
Parents: fb320c9
Author: Allen Wittenauer 
Authored: Wed May 4 12:44:47 2016 -0700
Committer: Allen Wittenauer 
Committed: Wed May 4 12:44:47 2016 -0700

--
 .../main/shellprofile.d/hadoop-archive-logs.sh  | 14 ++---
 .../src/main/shellprofile.d/hadoop-archives.sh  | 33 +++-
 .../src/main/shellprofile.d/hadoop-distcp.sh| 33 +++-
 .../src/main/shellprofile.d/hadoop-extras.sh| 14 ++---
 4 files changed, 71 insertions(+), 23 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/e886e4d8/hadoop-tools/hadoop-archive-logs/src/main/shellprofile.d/hadoop-archive-logs.sh
--
diff --git 
a/hadoop-tools/hadoop-archive-logs/src/main/shellprofile.d/hadoop-archive-logs.sh
 
b/hadoop-tools/hadoop-archive-logs/src/main/shellprofile.d/hadoop-archive-logs.sh
index d37411e..ae7b6c6 100755
--- 
a/hadoop-tools/hadoop-archive-logs/src/main/shellprofile.d/hadoop-archive-logs.sh
+++ 
b/hadoop-tools/hadoop-archive-logs/src/main/shellprofile.d/hadoop-archive-logs.sh
@@ -15,14 +15,18 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-if [[ "${HADOOP_SHELL_EXECNAME}" = mapred ]]; then
-  hadoop_add_subcommand "archive-logs" "combine aggregated logs into hadoop 
archives"
-fi
+if ! declare -f mapred_subcommand_archive-logs >/dev/null 2>/dev/null; then
+
+  if [[ "${HADOOP_SHELL_EXECNAME}" = mapred ]]; then
+hadoop_add_subcommand "archive-logs" "combine aggregated logs into hadoop 
archives"
+  fi
+
+  # this can't be indented otherwise shelldocs won't get it
 
 ## @description  archive-logs command for mapred
 ## @audience public
 ## @stabilitystable
-## @replaceable  no
+## @replaceable  yes
 function mapred_subcommand_archive-logs
 {
   # shellcheck disable=SC2034
@@ -31,3 +35,5 @@ function mapred_subcommand_archive-logs
   hadoop_debug "Appending HADOOP_CLIENT_OPTS onto HADOOP_OPTS"
   HADOOP_OPTS="${HADOOP_OPTS} ${HADOOP_CLIENT_OPTS}"
 }
+
+fi

http://git-wip-us.apache.org/repos/asf/hadoop/blob/e886e4d8/hadoop-tools/hadoop-archives/src/main/shellprofile.d/hadoop-archives.sh
--
diff --git 
a/hadoop-tools/hadoop-archives/src/main/shellprofile.d/hadoop-archives.sh 
b/hadoop-tools/hadoop-archives/src/main/shellprofile.d/hadoop-archives.sh
index b85ff25..f74fe5b 100755
--- a/hadoop-tools/hadoop-archives/src/main/shellprofile.d/hadoop-archives.sh
+++ b/hadoop-tools/hadoop-archives/src/main/shellprofile.d/hadoop-archives.sh
@@ -15,15 +15,18 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-if [[ "${HADOOP_SHELL_EXECNAME}" = hadoop
-   || "${HADOOP_SHELL_EXECNAME}" = mapred ]]; then
-  hadoop_add_subcommand "archive" "create a Hadoop archive"
-fi
+if ! declare -f hadoop_subcommand_archive >/dev/null 2>/dev/null; then
+
+  if [[ "${HADOOP_SHELL_EXECNAME}" = hadoop ]]; then
+hadoop_add_subcommand "archive" "create a Hadoop archive"
+  fi
+
+  # this can't be indented otherwise shelldocs won't get it
 
 ## @description  archive command for hadoop (and mapred)
 ## @audience public
 ## @stabilitystable
-## @replaceable  no
+## @replaceable  yes
 function hadoop_subcommand_archive
 {
   # shellcheck disable=SC2034
@@ -31,11 +34,25 @@ function hadoop_subcommand_archive
   hadoop_add_to_classpath_tools hadoop-archives
 }
 
-## @description  archive-logs command for mapred (calls hadoop version)
+fi
+
+if ! declare -f mapred_subcommand_archive >/dev/null 2>/dev/null; then
+
+  if [[ "${HADOOP_SHELL_EXECNAME}" = mapred ]]; then
+hadoop_add_subcommand "archive" "create a Hadoop archive"
+  fi
+
+  # this can't be indented otherwise shelldocs won't get it
+
+## @description  archive command for mapred (calls hadoop version)
 ## @audience public
 ## @stabilitystable
-## @replaceable  no
+## @replaceable  yes
 function mapred_subcommand_archive
 {
-  hadoop_subcommand_archive
+  # shellcheck disable=SC2034
+  HADOOP_CLASSNAME=org.apache.hadoop.tools.HadoopArchives
+  hadoop_add_to_classpath_tools hadoop-archives
 }
+
+fi


hadoop git commit: YARN-4984. LogAggregationService shouldn't swallow exception in handling createAppDir() which cause thread leak. (Junping Du via wangda)

2016-05-04 Thread wangda
Repository: hadoop
Updated Branches:
  refs/heads/trunk e61d43127 -> 7bd418e48


YARN-4984. LogAggregationService shouldn't swallow exception in handling 
createAppDir() which cause thread leak. (Junping Du via wangda)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/7bd418e4
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/7bd418e4
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/7bd418e4

Branch: refs/heads/trunk
Commit: 7bd418e48c71590fc8026d69f9b8f8ad42f2aade
Parents: e61d431
Author: Wangda Tan 
Authored: Wed May 4 11:38:55 2016 -0700
Committer: Wangda Tan 
Committed: Wed May 4 11:38:55 2016 -0700

--
 .../logaggregation/LogAggregationService.java|  7 +++
 .../logaggregation/TestLogAggregationService.java| 11 ---
 2 files changed, 11 insertions(+), 7 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/7bd418e4/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/logaggregation/LogAggregationService.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/logaggregation/LogAggregationService.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/logaggregation/LogAggregationService.java
index 2d6b900..d46f7a3 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/logaggregation/LogAggregationService.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/logaggregation/LogAggregationService.java
@@ -376,6 +376,9 @@ public class LogAggregationService extends AbstractService 
implements
   } else {
 appDirException = (YarnRuntimeException)e;
   }
+  appLogAggregators.remove(appId);
+  closeFileSystems(userUgi);
+  throw appDirException;
 }
 
 // TODO Get the user configuration for the list of containers that need log
@@ -393,10 +396,6 @@ public class LogAggregationService extends AbstractService 
implements
   }
 };
 this.threadPool.execute(aggregatorWrapper);
-
-if (appDirException != null) {
-  throw appDirException;
-}
   }
 
   protected void closeFileSystems(final UserGroupInformation userUgi) {

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7bd418e4/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/logaggregation/TestLogAggregationService.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/logaggregation/TestLogAggregationService.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/logaggregation/TestLogAggregationService.java
index fec12ff..fa9a0b5 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/logaggregation/TestLogAggregationService.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/logaggregation/TestLogAggregationService.java
@@ -777,8 +777,8 @@ public class TestLogAggregationService extends 
BaseContainerManagerTest {
 
 dispatcher.await();
 ApplicationEvent expectedEvents[] = new ApplicationEvent[]{
-new ApplicationEvent(appId, 
-   ApplicationEventType.APPLICATION_LOG_HANDLING_FAILED)
+new ApplicationEvent(appId,
+ApplicationEventType.APPLICATION_LOG_HANDLING_FAILED)
 };
 checkEvents(appEventHandler, expectedEvents, false,
 "getType", "getApplicationID", "getDiagnostic");
@@ -794,10 +794,15 @@ public class TestLogAggregationService extends 
BaseContainerManagerTest {
 
 logAggregationService.stop();
 assertEquals(0, logAggregationService.getNumAggregators());
-verify(spyDelSrvc).delete(eq(user), any(Path.class),
+// local log dir shouldn't be deleted given log aggregation cannot
+// 

hadoop git commit: YARN-4984. LogAggregationService shouldn't swallow exception in handling createAppDir() which cause thread leak. (Junping Du via wangda)

2016-05-04 Thread wangda
Repository: hadoop
Updated Branches:
  refs/heads/branch-2 1ffb0c43d -> 585299146


YARN-4984. LogAggregationService shouldn't swallow exception in handling 
createAppDir() which cause thread leak. (Junping Du via wangda)

(cherry picked from commit 7bd418e48c71590fc8026d69f9b8f8ad42f2aade)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/58529914
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/58529914
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/58529914

Branch: refs/heads/branch-2
Commit: 585299146aafcb4585bdecbe115a8dd7acc6e092
Parents: 1ffb0c4
Author: Wangda Tan 
Authored: Wed May 4 11:38:55 2016 -0700
Committer: Wangda Tan 
Committed: Wed May 4 11:39:25 2016 -0700

--
 .../logaggregation/LogAggregationService.java|  7 +++
 .../logaggregation/TestLogAggregationService.java| 11 ---
 2 files changed, 11 insertions(+), 7 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/58529914/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/logaggregation/LogAggregationService.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/logaggregation/LogAggregationService.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/logaggregation/LogAggregationService.java
index 2d6b900..d46f7a3 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/logaggregation/LogAggregationService.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/logaggregation/LogAggregationService.java
@@ -376,6 +376,9 @@ public class LogAggregationService extends AbstractService 
implements
   } else {
 appDirException = (YarnRuntimeException)e;
   }
+  appLogAggregators.remove(appId);
+  closeFileSystems(userUgi);
+  throw appDirException;
 }
 
 // TODO Get the user configuration for the list of containers that need log
@@ -393,10 +396,6 @@ public class LogAggregationService extends AbstractService 
implements
   }
 };
 this.threadPool.execute(aggregatorWrapper);
-
-if (appDirException != null) {
-  throw appDirException;
-}
   }
 
   protected void closeFileSystems(final UserGroupInformation userUgi) {

http://git-wip-us.apache.org/repos/asf/hadoop/blob/58529914/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/logaggregation/TestLogAggregationService.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/logaggregation/TestLogAggregationService.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/logaggregation/TestLogAggregationService.java
index fec12ff..fa9a0b5 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/logaggregation/TestLogAggregationService.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/logaggregation/TestLogAggregationService.java
@@ -777,8 +777,8 @@ public class TestLogAggregationService extends 
BaseContainerManagerTest {
 
 dispatcher.await();
 ApplicationEvent expectedEvents[] = new ApplicationEvent[]{
-new ApplicationEvent(appId, 
-   ApplicationEventType.APPLICATION_LOG_HANDLING_FAILED)
+new ApplicationEvent(appId,
+ApplicationEventType.APPLICATION_LOG_HANDLING_FAILED)
 };
 checkEvents(appEventHandler, expectedEvents, false,
 "getType", "getApplicationID", "getDiagnostic");
@@ -794,10 +794,15 @@ public class TestLogAggregationService extends 
BaseContainerManagerTest {
 
 logAggregationService.stop();
 assertEquals(0, logAggregationService.getNumAggregators());
-verify(spyDelSrvc).delete(eq(user), any(Path.class),
+

hadoop git commit: HADOOP-13087. env var doc update for dynamic commands

2016-05-04 Thread aw
Repository: hadoop
Updated Branches:
  refs/heads/HADOOP-12930 e4958f874 -> fb320c95a


HADOOP-13087. env var doc update for dynamic commands


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/fb320c95
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/fb320c95
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/fb320c95

Branch: refs/heads/HADOOP-12930
Commit: fb320c95a1049cc742770464838d564559f8752b
Parents: e4958f8
Author: Allen Wittenauer 
Authored: Wed May 4 10:53:25 2016 -0700
Committer: Allen Wittenauer 
Committed: Wed May 4 10:53:25 2016 -0700

--
 .../src/site/markdown/UnixShellGuide.md | 22 
 1 file changed, 22 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/fb320c95/hadoop-common-project/hadoop-common/src/site/markdown/UnixShellGuide.md
--
diff --git 
a/hadoop-common-project/hadoop-common/src/site/markdown/UnixShellGuide.md 
b/hadoop-common-project/hadoop-common/src/site/markdown/UnixShellGuide.md
index 668a744..a459012 100644
--- a/hadoop-common-project/hadoop-common/src/site/markdown/UnixShellGuide.md
+++ b/hadoop-common-project/hadoop-common/src/site/markdown/UnixShellGuide.md
@@ -154,3 +154,25 @@ function hdfs_subcommand_fetchdt
 ```
 
 ... will replace the existing `hdfs fetchdt` subcommand with a custom one.
+
+Some key environment variables related to Dynamic Subcommands:
+
+* HADOOP\_CLASSNAME
+
+This is the name of the Java class to execute.
+
+* HADOOP\_SHELL\_EXECNAME
+
+This is the name of the script that is being executed.  It will be one of 
hadoop, hdfs, mapred, or yarn.
+
+* HADOOP\_SUBCMD\_SECURESERVICE
+
+If this command should/will be executed as a secure daemon, set this to true.
+
+* HADOOP\_SUBCMD\_SECUREUSER
+
+If this command should/will be executed as a secure daemon, set the user name 
to be used.
+
+* HADOOP\_SUBCMD\_SUPPORTDAEMONIZATION
+
+If this command can be executed as a daemon, set this to true.


-
To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org
For additional commands, e-mail: common-commits-h...@hadoop.apache.org



hadoop git commit: HADOOP-13089. hadoop distcp adds client opts twice when dynamic

2016-05-04 Thread aw
Repository: hadoop
Updated Branches:
  refs/heads/HADOOP-12930 d0ff3c336 -> e4958f874


HADOOP-13089. hadoop distcp adds client opts twice when dynamic


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/e4958f87
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/e4958f87
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/e4958f87

Branch: refs/heads/HADOOP-12930
Commit: e4958f8749e6cac700b6a5770b3f684b6e21ca8e
Parents: d0ff3c3
Author: Allen Wittenauer 
Authored: Wed May 4 10:50:15 2016 -0700
Committer: Allen Wittenauer 
Committed: Wed May 4 10:50:15 2016 -0700

--
 hadoop-common-project/hadoop-common/src/main/bin/hadoop | 8 
 1 file changed, 4 insertions(+), 4 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/e4958f87/hadoop-common-project/hadoop-common/src/main/bin/hadoop
--
diff --git a/hadoop-common-project/hadoop-common/src/main/bin/hadoop 
b/hadoop-common-project/hadoop-common/src/main/bin/hadoop
index 7b18d22..3e514ff 100755
--- a/hadoop-common-project/hadoop-common/src/main/bin/hadoop
+++ b/hadoop-common-project/hadoop-common/src/main/bin/hadoop
@@ -161,6 +161,10 @@ function hadoopcmd_case
   fi
 ;;
   esac
+
+  # Always respect HADOOP_OPTS and HADOOP_CLIENT_OPTS
+  hadoop_debug "Appending HADOOP_CLIENT_OPTS onto HADOOP_OPTS"
+  HADOOP_OPTS="${HADOOP_OPTS} ${HADOOP_CLIENT_OPTS}"
 }
 
 # This script runs the hadoop core commands.
@@ -203,10 +207,6 @@ if [[ ${HADOOP_SLAVE_MODE} = true ]]; then
   exit $?
 fi
 
-# Always respect HADOOP_OPTS and HADOOP_CLIENT_OPTS
-hadoop_debug "Appending HADOOP_CLIENT_OPTS onto HADOOP_OPTS"
-HADOOP_OPTS="${HADOOP_OPTS} ${HADOOP_CLIENT_OPTS}"
-
 if [[ -n "${HADOOP_SUBCMD_SECURESERVICE}" ]]; then
   HADOOP_SECURE_USER="${HADOOP_SUBCMD_SECUREUSER}"
   hadoop_verify_secure_prereq


-
To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org
For additional commands, e-mail: common-commits-h...@hadoop.apache.org



hadoop git commit: YARN-4920. ATS/NM should support a link to dowload/get the logs in text format. Contributed by Xuan Gong. (cherry picked from commit 3cf223166d452a0f58f92676837a9edb8ddc1139)

2016-05-04 Thread junping_du
Repository: hadoop
Updated Branches:
  refs/heads/branch-2 d43d8a1bc -> 1ffb0c43d


YARN-4920. ATS/NM should support a link to dowload/get the logs in text format. 
Contributed by Xuan Gong.
(cherry picked from commit 3cf223166d452a0f58f92676837a9edb8ddc1139)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/1ffb0c43
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/1ffb0c43
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/1ffb0c43

Branch: refs/heads/branch-2
Commit: 1ffb0c43d6de0e53372ace001b492c432d75a08a
Parents: d43d8a1
Author: Junping Du 
Authored: Wed May 4 09:40:13 2016 -0700
Committer: Junping Du 
Committed: Wed May 4 10:36:31 2016 -0700

--
 .../webapp/AHSWebServices.java  | 270 ++-
 ...pplicationHistoryManagerOnTimelineStore.java |  29 +-
 .../webapp/TestAHSWebServices.java  | 203 +-
 .../yarn/server/webapp/dao/ContainerInfo.java   |   6 +
 .../nodemanager/webapp/NMWebServices.java   |  22 +-
 .../nodemanager/webapp/TestNMWebServices.java   |  12 +-
 6 files changed, 525 insertions(+), 17 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/1ffb0c43/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/AHSWebServices.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/AHSWebServices.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/AHSWebServices.java
index e7a22bd..75dce07 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/AHSWebServices.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/AHSWebServices.java
@@ -18,6 +18,11 @@
 
 package org.apache.hadoop.yarn.server.applicationhistoryservice.webapp;
 
+import java.io.DataInputStream;
+import java.io.EOFException;
+import java.io.IOException;
+import java.io.OutputStream;
+import java.nio.charset.Charset;
 import java.util.Collections;
 import java.util.Set;
 
@@ -28,13 +33,30 @@ import javax.ws.rs.Path;
 import javax.ws.rs.PathParam;
 import javax.ws.rs.Produces;
 import javax.ws.rs.QueryParam;
+import javax.ws.rs.WebApplicationException;
 import javax.ws.rs.core.Context;
 import javax.ws.rs.core.MediaType;
+import javax.ws.rs.core.Response;
+import javax.ws.rs.core.StreamingOutput;
+import javax.ws.rs.core.Response.ResponseBuilder;
+import javax.ws.rs.core.Response.Status;
 
+import org.apache.hadoop.classification.InterfaceAudience.Public;
+import org.apache.hadoop.classification.InterfaceStability.Unstable;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileContext;
+import org.apache.hadoop.fs.FileStatus;
+import org.apache.hadoop.fs.RemoteIterator;
 import org.apache.hadoop.util.StringUtils;
+import org.apache.hadoop.yarn.api.records.ApplicationId;
+import org.apache.hadoop.yarn.api.records.ContainerId;
 import org.apache.hadoop.yarn.api.records.YarnApplicationState;
 import org.apache.hadoop.yarn.api.ApplicationBaseProtocol;
 import org.apache.hadoop.yarn.api.records.timeline.TimelineAbout;
+import org.apache.hadoop.yarn.conf.YarnConfiguration;
+import org.apache.hadoop.yarn.logaggregation.AggregatedLogFormat;
+import org.apache.hadoop.yarn.logaggregation.LogAggregationUtils;
+import org.apache.hadoop.yarn.logaggregation.AggregatedLogFormat.LogKey;
 import org.apache.hadoop.yarn.server.webapp.WebServices;
 import org.apache.hadoop.yarn.server.webapp.dao.AppAttemptInfo;
 import org.apache.hadoop.yarn.server.webapp.dao.AppAttemptsInfo;
@@ -42,9 +64,10 @@ import org.apache.hadoop.yarn.server.webapp.dao.AppInfo;
 import org.apache.hadoop.yarn.server.webapp.dao.AppsInfo;
 import org.apache.hadoop.yarn.server.webapp.dao.ContainerInfo;
 import org.apache.hadoop.yarn.server.webapp.dao.ContainersInfo;
+import org.apache.hadoop.yarn.util.Times;
 import org.apache.hadoop.yarn.util.timeline.TimelineUtils;
 import org.apache.hadoop.yarn.webapp.BadRequestException;
-
+import com.google.common.base.Joiner;
 import com.google.inject.Inject;
 import com.google.inject.Singleton;
 
@@ -52,9 +75,17 @@ import com.google.inject.Singleton;
 @Path("/ws/v1/applicationhistory")
 

hadoop git commit: YARN-4920. ATS/NM should support a link to dowload/get the logs in text format. Contributed by Xuan Gong.

2016-05-04 Thread junping_du
Repository: hadoop
Updated Branches:
  refs/heads/trunk af942585a -> e61d43127


YARN-4920. ATS/NM should support a link to dowload/get the logs in text format. 
Contributed by Xuan Gong.


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/e61d4312
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/e61d4312
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/e61d4312

Branch: refs/heads/trunk
Commit: e61d431275d7fe5641fe9da4903e285b10330fa0
Parents: af94258
Author: Junping Du 
Authored: Wed May 4 09:40:13 2016 -0700
Committer: Junping Du 
Committed: Wed May 4 10:35:49 2016 -0700

--
 .../webapp/AHSWebServices.java  | 270 ++-
 ...pplicationHistoryManagerOnTimelineStore.java |  29 +-
 .../webapp/TestAHSWebServices.java  | 203 +-
 .../yarn/server/webapp/dao/ContainerInfo.java   |   6 +
 .../nodemanager/webapp/NMWebServices.java   |  22 +-
 .../nodemanager/webapp/TestNMWebServices.java   |  12 +-
 6 files changed, 525 insertions(+), 17 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/e61d4312/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/AHSWebServices.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/AHSWebServices.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/AHSWebServices.java
index e7a22bd..75dce07 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/AHSWebServices.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/AHSWebServices.java
@@ -18,6 +18,11 @@
 
 package org.apache.hadoop.yarn.server.applicationhistoryservice.webapp;
 
+import java.io.DataInputStream;
+import java.io.EOFException;
+import java.io.IOException;
+import java.io.OutputStream;
+import java.nio.charset.Charset;
 import java.util.Collections;
 import java.util.Set;
 
@@ -28,13 +33,30 @@ import javax.ws.rs.Path;
 import javax.ws.rs.PathParam;
 import javax.ws.rs.Produces;
 import javax.ws.rs.QueryParam;
+import javax.ws.rs.WebApplicationException;
 import javax.ws.rs.core.Context;
 import javax.ws.rs.core.MediaType;
+import javax.ws.rs.core.Response;
+import javax.ws.rs.core.StreamingOutput;
+import javax.ws.rs.core.Response.ResponseBuilder;
+import javax.ws.rs.core.Response.Status;
 
+import org.apache.hadoop.classification.InterfaceAudience.Public;
+import org.apache.hadoop.classification.InterfaceStability.Unstable;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileContext;
+import org.apache.hadoop.fs.FileStatus;
+import org.apache.hadoop.fs.RemoteIterator;
 import org.apache.hadoop.util.StringUtils;
+import org.apache.hadoop.yarn.api.records.ApplicationId;
+import org.apache.hadoop.yarn.api.records.ContainerId;
 import org.apache.hadoop.yarn.api.records.YarnApplicationState;
 import org.apache.hadoop.yarn.api.ApplicationBaseProtocol;
 import org.apache.hadoop.yarn.api.records.timeline.TimelineAbout;
+import org.apache.hadoop.yarn.conf.YarnConfiguration;
+import org.apache.hadoop.yarn.logaggregation.AggregatedLogFormat;
+import org.apache.hadoop.yarn.logaggregation.LogAggregationUtils;
+import org.apache.hadoop.yarn.logaggregation.AggregatedLogFormat.LogKey;
 import org.apache.hadoop.yarn.server.webapp.WebServices;
 import org.apache.hadoop.yarn.server.webapp.dao.AppAttemptInfo;
 import org.apache.hadoop.yarn.server.webapp.dao.AppAttemptsInfo;
@@ -42,9 +64,10 @@ import org.apache.hadoop.yarn.server.webapp.dao.AppInfo;
 import org.apache.hadoop.yarn.server.webapp.dao.AppsInfo;
 import org.apache.hadoop.yarn.server.webapp.dao.ContainerInfo;
 import org.apache.hadoop.yarn.server.webapp.dao.ContainersInfo;
+import org.apache.hadoop.yarn.util.Times;
 import org.apache.hadoop.yarn.util.timeline.TimelineUtils;
 import org.apache.hadoop.yarn.webapp.BadRequestException;
-
+import com.google.common.base.Joiner;
 import com.google.inject.Inject;
 import com.google.inject.Singleton;
 
@@ -52,9 +75,17 @@ import com.google.inject.Singleton;
 @Path("/ws/v1/applicationhistory")
 public class AHSWebServices extends WebServices {
 
+  private static 

hadoop git commit: HADOOP-12469. distcp should not ignore the ignoreFailures option. Contributed by Mingliang Liu.

2016-05-04 Thread jing9
Repository: hadoop
Updated Branches:
  refs/heads/branch-2 cb0a035bd -> d43d8a1bc


HADOOP-12469. distcp should not ignore the ignoreFailures option. Contributed 
by Mingliang Liu.

(cherry picked from commit af942585a108d70e0946f6dd4c465a54d068eabf)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/d43d8a1b
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/d43d8a1b
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/d43d8a1b

Branch: refs/heads/branch-2
Commit: d43d8a1bcd904a3e8044a85136b857e633eeebb0
Parents: cb0a035
Author: Jing Zhao 
Authored: Wed May 4 10:23:04 2016 -0700
Committer: Jing Zhao 
Committed: Wed May 4 10:24:06 2016 -0700

--
 .../apache/hadoop/tools/mapred/CopyMapper.java  |  6 +-
 .../hadoop/tools/mapred/TestCopyMapper.java | 85 
 2 files changed, 89 insertions(+), 2 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/d43d8a1b/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/mapred/CopyMapper.java
--
diff --git 
a/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/mapred/CopyMapper.java
 
b/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/mapred/CopyMapper.java
index cca36df..6f88660 100644
--- 
a/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/mapred/CopyMapper.java
+++ 
b/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/mapred/CopyMapper.java
@@ -25,6 +25,7 @@ import java.io.OutputStream;
 import java.util.Arrays;
 import java.util.EnumSet;
 
+import org.apache.commons.lang.exception.ExceptionUtils;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
@@ -40,6 +41,7 @@ import org.apache.hadoop.tools.DistCpConstants;
 import org.apache.hadoop.tools.DistCpOptionSwitch;
 import org.apache.hadoop.tools.DistCpOptions;
 import org.apache.hadoop.tools.DistCpOptions.FileAttribute;
+import 
org.apache.hadoop.tools.mapred.RetriableFileCopyCommand.CopyReadException;
 import org.apache.hadoop.tools.util.DistCpUtils;
 import org.apache.hadoop.util.StringUtils;
 
@@ -313,8 +315,8 @@ public class CopyMapper extends Mapper
 LOG.error("Failure in copying " + sourceFileStatus.getPath() + " to " +
 target, exception);
 
-if (ignoreFailures && exception.getCause() instanceof
-RetriableFileCopyCommand.CopyReadException) {
+if (ignoreFailures &&
+ExceptionUtils.indexOfType(exception, CopyReadException.class) != -1) {
   incrementCounter(context, Counter.FAIL, 1);
   incrementCounter(context, Counter.BYTESFAILED, 
sourceFileStatus.getLen());
   context.write(null, new Text("FAIL: " + sourceFileStatus.getPath() + " - 
" +

http://git-wip-us.apache.org/repos/asf/hadoop/blob/d43d8a1b/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/mapred/TestCopyMapper.java
--
diff --git 
a/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/mapred/TestCopyMapper.java
 
b/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/mapred/TestCopyMapper.java
index c1ed914..3d333d9 100644
--- 
a/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/mapred/TestCopyMapper.java
+++ 
b/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/mapred/TestCopyMapper.java
@@ -392,6 +392,8 @@ public class TestCopyMapper {
   public void testIgnoreFailures() {
 doTestIgnoreFailures(true);
 doTestIgnoreFailures(false);
+doTestIgnoreFailuresDoubleWrapped(true);
+doTestIgnoreFailuresDoubleWrapped(false);
   }
 
   @Test(timeout=4)
@@ -800,6 +802,89 @@ public class TestCopyMapper {
 }
   }
 
+  /**
+   * This test covers the case where the CopyReadException is double-wrapped 
and
+   * the mapper should be able to ignore this nested read exception.
+   * @see #doTestIgnoreFailures
+   */
+  private void doTestIgnoreFailuresDoubleWrapped(final boolean ignoreFailures) 
{
+try {
+  deleteState();
+  createSourceData();
+
+  final UserGroupInformation tmpUser = UserGroupInformation
+  .createRemoteUser("guest");
+
+  final CopyMapper copyMapper = new CopyMapper();
+
+  final Mapper.Context context =
+  tmpUser.doAs(new PrivilegedAction<
+  Mapper.Context>() {
+@Override
+public Mapper.Context
+run() {
+  try {
+StubContext stubContext = new StubContext(

hadoop git commit: HADOOP-12469. distcp should not ignore the ignoreFailures option. Contributed by Mingliang Liu.

2016-05-04 Thread jing9
Repository: hadoop
Updated Branches:
  refs/heads/trunk f343d91ec -> af942585a


HADOOP-12469. distcp should not ignore the ignoreFailures option. Contributed 
by Mingliang Liu.


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/af942585
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/af942585
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/af942585

Branch: refs/heads/trunk
Commit: af942585a108d70e0946f6dd4c465a54d068eabf
Parents: f343d91
Author: Jing Zhao 
Authored: Wed May 4 10:23:04 2016 -0700
Committer: Jing Zhao 
Committed: Wed May 4 10:23:04 2016 -0700

--
 .../apache/hadoop/tools/mapred/CopyMapper.java  |  6 +-
 .../hadoop/tools/mapred/TestCopyMapper.java | 85 
 2 files changed, 89 insertions(+), 2 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/af942585/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/mapred/CopyMapper.java
--
diff --git 
a/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/mapred/CopyMapper.java
 
b/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/mapred/CopyMapper.java
index 09bcead..4db1d4e 100644
--- 
a/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/mapred/CopyMapper.java
+++ 
b/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/mapred/CopyMapper.java
@@ -22,6 +22,7 @@ import java.io.FileNotFoundException;
 import java.io.IOException;
 import java.util.EnumSet;
 
+import org.apache.commons.lang.exception.ExceptionUtils;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
@@ -36,6 +37,7 @@ import org.apache.hadoop.tools.DistCpConstants;
 import org.apache.hadoop.tools.DistCpOptionSwitch;
 import org.apache.hadoop.tools.DistCpOptions;
 import org.apache.hadoop.tools.DistCpOptions.FileAttribute;
+import 
org.apache.hadoop.tools.mapred.RetriableFileCopyCommand.CopyReadException;
 import org.apache.hadoop.tools.util.DistCpUtils;
 import org.apache.hadoop.util.StringUtils;
 
@@ -251,8 +253,8 @@ public class CopyMapper extends Mapper
 LOG.error("Failure in copying " + sourceFileStatus.getPath() + " to " +
 target, exception);
 
-if (ignoreFailures && exception.getCause() instanceof
-RetriableFileCopyCommand.CopyReadException) {
+if (ignoreFailures &&
+ExceptionUtils.indexOfType(exception, CopyReadException.class) != -1) {
   incrementCounter(context, Counter.FAIL, 1);
   incrementCounter(context, Counter.BYTESFAILED, 
sourceFileStatus.getLen());
   context.write(null, new Text("FAIL: " + sourceFileStatus.getPath() + " - 
" +

http://git-wip-us.apache.org/repos/asf/hadoop/blob/af942585/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/mapred/TestCopyMapper.java
--
diff --git 
a/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/mapred/TestCopyMapper.java
 
b/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/mapred/TestCopyMapper.java
index 4d0752f..866ad6e 100644
--- 
a/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/mapred/TestCopyMapper.java
+++ 
b/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/mapred/TestCopyMapper.java
@@ -392,6 +392,8 @@ public class TestCopyMapper {
   public void testIgnoreFailures() {
 doTestIgnoreFailures(true);
 doTestIgnoreFailures(false);
+doTestIgnoreFailuresDoubleWrapped(true);
+doTestIgnoreFailuresDoubleWrapped(false);
   }
 
   @Test(timeout=4)
@@ -800,6 +802,89 @@ public class TestCopyMapper {
 }
   }
 
+  /**
+   * This test covers the case where the CopyReadException is double-wrapped 
and
+   * the mapper should be able to ignore this nested read exception.
+   * @see #doTestIgnoreFailures
+   */
+  private void doTestIgnoreFailuresDoubleWrapped(final boolean ignoreFailures) 
{
+try {
+  deleteState();
+  createSourceData();
+
+  final UserGroupInformation tmpUser = UserGroupInformation
+  .createRemoteUser("guest");
+
+  final CopyMapper copyMapper = new CopyMapper();
+
+  final Mapper.Context context =
+  tmpUser.doAs(new PrivilegedAction<
+  Mapper.Context>() {
+@Override
+public Mapper.Context
+run() {
+  try {
+StubContext stubContext = new StubContext(
+getConfiguration(), null, 0);
+ 

hadoop git commit: HADOOP-13086. enable daemonization of dynamic commands

2016-05-04 Thread aw
Repository: hadoop
Updated Branches:
  refs/heads/HADOOP-12930 e1cae42ad -> d0ff3c336


HADOOP-13086. enable daemonization of dynamic commands


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/d0ff3c33
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/d0ff3c33
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/d0ff3c33

Branch: refs/heads/HADOOP-12930
Commit: d0ff3c3361946d1c9a35452c2edfce03cfa983b2
Parents: e1cae42
Author: Allen Wittenauer 
Authored: Wed May 4 10:13:18 2016 -0700
Committer: Allen Wittenauer 
Committed: Wed May 4 10:13:18 2016 -0700

--
 .../hadoop-common/src/main/bin/hadoop   | 53 ++-
 .../hadoop-hdfs/src/main/bin/hdfs   | 36 ++---
 hadoop-mapreduce-project/bin/mapred | 49 +++--
 hadoop-yarn-project/hadoop-yarn/bin/yarn| 55 ++--
 4 files changed, 142 insertions(+), 51 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/d0ff3c33/hadoop-common-project/hadoop-common/src/main/bin/hadoop
--
diff --git a/hadoop-common-project/hadoop-common/src/main/bin/hadoop 
b/hadoop-common-project/hadoop-common/src/main/bin/hadoop
index bb4b041..7b18d22 100755
--- a/hadoop-common-project/hadoop-common/src/main/bin/hadoop
+++ b/hadoop-common-project/hadoop-common/src/main/bin/hadoop
@@ -207,6 +207,57 @@ fi
 hadoop_debug "Appending HADOOP_CLIENT_OPTS onto HADOOP_OPTS"
 HADOOP_OPTS="${HADOOP_OPTS} ${HADOOP_CLIENT_OPTS}"
 
+if [[ -n "${HADOOP_SUBCMD_SECURESERVICE}" ]]; then
+  HADOOP_SECURE_USER="${HADOOP_SUBCMD_SECUREUSER}"
+  hadoop_verify_secure_prereq
+  hadoop_setup_secure_service
+  
priv_outfile="${HADOOP_LOG_DIR}/privileged-${HADOOP_IDENT_STRING}-${HADOOP_SUBCMD}-${HOSTNAME}.out"
+  
priv_errfile="${HADOOP_LOG_DIR}/privileged-${HADOOP_IDENT_STRING}-${HADOOP_SUBCMD}-${HOSTNAME}.err"
+  
priv_pidfile="${HADOOP_PID_DIR}/privileged-${HADOOP_IDENT_STRING}-${HADOOP_SUBCMD}.pid"
+  
daemon_outfile="${HADOOP_LOG_DIR}/hadoop-${HADOOP_SECURE_USER}-${HADOOP_IDENT_STRING}-${HADOOP_SUBCMD}-${HOSTNAME}.out"
+  
daemon_pidfile="${HADOOP_PID_DIR}/hadoop-${HADOOP_SECURE_USER}-${HADOOP_IDENT_STRING}-${HADOOP_SUBCMD}.pid"
+else
+  
daemon_outfile="${HADOOP_LOG_DIR}/hadoop-${HADOOP_IDENT_STRING}-${HADOOP_SUBCMD}-${HOSTNAME}.out"
+  
daemon_pidfile="${HADOOP_PID_DIR}/hadoop-${HADOOP_IDENT_STRING}-${HADOOP_SUBCMD}.pid"
+fi
+
+if [[ "${HADOOP_DAEMON_MODE}" != "default" ]]; then
+  # shellcheck disable=SC2034
+  HADOOP_ROOT_LOGGER="${HADOOP_DAEMON_ROOT_LOGGER}"
+  if [[ -n "${HADOOP_SUBCMD_SECURESERVICE}" ]]; then
+# shellcheck disable=SC2034
+
HADOOP_LOGFILE="hadoop-${HADOOP_SECURE_USER}-${HADOOP_IDENT_STRING}-${HADOOP_SUBCMD}-${HOSTNAME}.log"
+  else
+# shellcheck disable=SC2034
+
HADOOP_LOGFILE="hadoop-${HADOOP_IDENT_STRING}-${HADOOP_SUBCMD}-${HOSTNAME}.log"
+  fi
+fi
+
 hadoop_finalize
-hadoop_java_exec "${HADOOP_SUBCMD}" "${HADOOP_CLASSNAME}" "$@"
 
+if [[ -n "${HADOOP_SUBCMD_SUPPORTDAEMONIZATION}" ]]; then
+  if [[ -n "${HADOOP_SUBCMD_SECURESERVICE}" ]]; then
+hadoop_secure_daemon_handler \
+  "${HADOOP_DAEMON_MODE}" \
+  "${HADOOP_SUBCMD}" \
+  "${HADOOP_CLASSNAME}" \
+  "${daemon_pidfile}" \
+  "${daemon_outfile}" \
+  "${priv_pidfile}" \
+  "${priv_outfile}" \
+  "${priv_errfile}" \
+  "$@"
+  else
+hadoop_daemon_handler \
+  "${HADOOP_DAEMON_MODE}" \
+  "${HADOOP_SUBCMD}" \
+  "${HADOOP_CLASSNAME}" \
+  "${daemon_pidfile}" \
+  "${daemon_outfile}" \
+  "$@"
+  fi
+  exit $?
+else
+  # shellcheck disable=SC2086
+  hadoop_java_exec "${HADOOP_SUBCMD}" "${HADOOP_CLASSNAME}" "$@"
+fi

http://git-wip-us.apache.org/repos/asf/hadoop/blob/d0ff3c33/hadoop-hdfs-project/hadoop-hdfs/src/main/bin/hdfs
--
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/bin/hdfs 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/bin/hdfs
index 310fb41..4c0b7fb 100755
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/bin/hdfs
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/bin/hdfs
@@ -76,7 +76,7 @@ function hdfscmd_case
 
   case ${subcmd} in
 balancer)
-  supportdaemonization="true"
+  HADOOP_SUBCMD_SUPPORTDAEMONIZATION="true"
   HADOOP_CLASSNAME=org.apache.hadoop.hdfs.server.balancer.Balancer
   hadoop_debug "Appending HADOOP_BALANCER_OPTS onto HADOOP_OPTS"
   HADOOP_OPTS="${HADOOP_OPTS} ${HADOOP_BALANCER_OPTS}"
@@ -91,12 +91,12 @@ function hdfscmd_case
   HADOOP_CLASSNAME=org.apache.hadoop.hdfs.tools.CryptoAdmin
 ;;
 datanode)
-  supportdaemonization="true"
+  HADOOP_SUBCMD_SUPPORTDAEMONIZATION="true"
   # Determine if we're 

[16/16] hadoop git commit: Merge trunk into HDFS-7240

2016-05-04 Thread arp
Merge trunk into HDFS-7240


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/78bd1b2a
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/78bd1b2a
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/78bd1b2a

Branch: refs/heads/HDFS-7240
Commit: 78bd1b2ab25357c38f77943b092b8ad54c89cd9c
Parents: 05d7a83 f343d91
Author: Arpit Agarwal 
Authored: Wed May 4 10:01:39 2016 -0700
Committer: Arpit Agarwal 
Committed: Wed May 4 10:01:39 2016 -0700

--
 dev-support/verify-xml.sh   | 150 ++
 .../main/resources/assemblies/hadoop-tools.xml  |   7 +
 .../src/main/conf/hadoop-metrics.properties |  75 ---
 .../org/apache/hadoop/http/HttpServer2.java |   4 -
 .../apache/hadoop/metrics/ContextFactory.java   | 214 
 .../apache/hadoop/metrics/MetricsContext.java   | 125 -
 .../apache/hadoop/metrics/MetricsException.java |  49 --
 .../apache/hadoop/metrics/MetricsRecord.java| 254 --
 .../apache/hadoop/metrics/MetricsServlet.java   | 188 ---
 .../org/apache/hadoop/metrics/MetricsUtil.java  | 104 
 .../java/org/apache/hadoop/metrics/Updater.java |  41 --
 .../org/apache/hadoop/metrics/file/package.html |  43 --
 .../hadoop/metrics/ganglia/GangliaContext.java  | 276 ---
 .../metrics/ganglia/GangliaContext31.java   | 147 --
 .../apache/hadoop/metrics/ganglia/package.html  |  80 ---
 .../apache/hadoop/metrics/jvm/EventCounter.java |  36 --
 .../apache/hadoop/metrics/jvm/JvmMetrics.java   | 203 
 .../apache/hadoop/metrics/jvm/package-info.java |  22 -
 .../java/org/apache/hadoop/metrics/package.html | 159 --
 .../metrics/spi/AbstractMetricsContext.java | 494 ---
 .../hadoop/metrics/spi/CompositeContext.java| 206 
 .../apache/hadoop/metrics/spi/MetricValue.java  |  58 ---
 .../hadoop/metrics/spi/MetricsRecordImpl.java   | 304 
 .../metrics/spi/NoEmitMetricsContext.java   |  61 ---
 .../apache/hadoop/metrics/spi/NullContext.java  |  74 ---
 .../spi/NullContextWithUpdateThread.java|  82 ---
 .../apache/hadoop/metrics/spi/OutputRecord.java |  93 
 .../org/apache/hadoop/metrics/spi/Util.java |  68 ---
 .../org/apache/hadoop/metrics/spi/package.html  |  36 --
 .../apache/hadoop/metrics/util/MBeanUtil.java   |  92 
 .../apache/hadoop/metrics/util/MetricsBase.java |  51 --
 .../metrics/util/MetricsDynamicMBeanBase.java   | 229 -
 .../hadoop/metrics/util/MetricsIntValue.java| 106 
 .../hadoop/metrics/util/MetricsLongValue.java   |  93 
 .../hadoop/metrics/util/MetricsRegistry.java|  90 
 .../metrics/util/MetricsTimeVaryingInt.java | 129 -
 .../metrics/util/MetricsTimeVaryingLong.java| 125 -
 .../metrics/util/MetricsTimeVaryingRate.java| 198 
 .../hadoop/metrics/util/package-info.java   |  22 -
 .../java/org/apache/hadoop/util/RunJar.java |  89 ++--
 .../org/apache/hadoop/util/SysInfoWindows.java  |   2 +-
 .../conf/TestConfigurationFieldsBase.java   | 255 ++
 .../org/apache/hadoop/http/TestHttpServer.java  |  10 +-
 .../hadoop/metrics/TestMetricsServlet.java  | 112 -
 .../metrics/ganglia/TestGangliaContext.java |  84 
 .../hadoop/metrics/spi/TestOutputRecord.java|  39 --
 .../java/org/apache/hadoop/util/TestRunJar.java |  62 +--
 .../hadoop/hdfs/DistributedFileSystem.java  |   3 +
 .../hadoop-hdfs-native-client/pom.xml   |   2 +-
 .../datanode/fsdataset/impl/FsVolumeImpl.java   |   6 +-
 .../src/main/resources/hdfs-default.xml |   5 +
 .../apache/hadoop/hdfs/TestEncryptionZones.java |  38 ++
 .../fsdataset/impl/TestFsVolumeList.java|  36 +-
 .../apache/hadoop/mapred/LocalJobRunner.java|   2 +-
 .../hadoop/mapred/LocalJobRunnerMetrics.java|  94 ++--
 .../hadoop/mapreduce/task/reduce/Shuffle.java   |   2 +-
 .../task/reduce/ShuffleClientMetrics.java   |  91 ++--
 .../yarn/util/ProcfsBasedProcessTree.java   |  19 +-
 .../yarn/util/TestProcfsBasedProcessTree.java   |  39 +-
 .../server/resourcemanager/ClientRMService.java |   6 +-
 .../server/resourcemanager/RMAuditLogger.java   |  47 +-
 .../scheduler/AbstractYarnScheduler.java|   4 +-
 .../scheduler/common/fica/FiCaSchedulerApp.java |   6 +-
 .../scheduler/fair/FSAppAttempt.java|   6 +-
 .../resourcemanager/TestRMAuditLogger.java  |  44 +-
 .../webapp/TestRMWebServicesNodes.java  | 256 +-
 .../yarn/server/timeline/EntityCacheItem.java   |  24 +-
 .../timeline/EntityGroupFSTimelineStore.java|  22 +-
 .../EntityGroupFSTimelineStoreMetrics.java  | 160 ++
 .../hadoop/yarn/server/timeline/LogInfo.java|   5 +-
 .../TestEntityGroupFSTimelineStore.java |  22 +
 71 files changed, 1108 insertions(+), 5272 deletions(-)

[02/16] hadoop git commit: Fix hadoop-hdfs-native-client compilation on Windows. Contributed by Brahma Reddy Battula.

2016-05-04 Thread arp
Fix hadoop-hdfs-native-client compilation on Windows. Contributed by Brahma 
Reddy Battula.


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/3ff0510f
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/3ff0510f
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/3ff0510f

Branch: refs/heads/HDFS-7240
Commit: 3ff0510ffdc20e44f06aec591782f8875b686327
Parents: 9e8411d
Author: Andrew Wang 
Authored: Mon May 2 18:15:51 2016 -0700
Committer: Andrew Wang 
Committed: Mon May 2 18:15:51 2016 -0700

--
 hadoop-hdfs-project/hadoop-hdfs-native-client/pom.xml | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/3ff0510f/hadoop-hdfs-project/hadoop-hdfs-native-client/pom.xml
--
diff --git a/hadoop-hdfs-project/hadoop-hdfs-native-client/pom.xml 
b/hadoop-hdfs-project/hadoop-hdfs-native-client/pom.xml
index 4441ca4..1e67646 100644
--- a/hadoop-hdfs-project/hadoop-hdfs-native-client/pom.xml
+++ b/hadoop-hdfs-project/hadoop-hdfs-native-client/pom.xml
@@ -148,7 +148,7 @@ http://maven.apache.org/xsd/maven-4.0.0.xsd;>
 
 
 
-  
+  
 
   
 


-
To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org
For additional commands, e-mail: common-commits-h...@hadoop.apache.org



[12/16] hadoop git commit: HADOOP-12504. Remove metrics v1. (aajisaka)

2016-05-04 Thread arp
http://git-wip-us.apache.org/repos/asf/hadoop/blob/36972d61/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/util/MetricsTimeVaryingRate.java
--
diff --git 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/util/MetricsTimeVaryingRate.java
 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/util/MetricsTimeVaryingRate.java
deleted file mode 100644
index 71f4b5e..000
--- 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/util/MetricsTimeVaryingRate.java
+++ /dev/null
@@ -1,198 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.metrics.util;
-
-import org.apache.hadoop.classification.InterfaceAudience;
-import org.apache.hadoop.metrics.MetricsRecord;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-
-/**
- * The MetricsTimeVaryingRate class is for a rate based metric that
- * naturally varies over time (e.g. time taken to create a file).
- * The rate is averaged at each interval heart beat (the interval
- * is set in the metrics config file).
- * This class also keeps track of the min and max rates along with 
- * a method to reset the min-max.
- *
- * @deprecated Use org.apache.hadoop.metrics2 package instead.
- */
-@Deprecated
-@InterfaceAudience.LimitedPrivate({"HDFS", "MapReduce"})
-public class MetricsTimeVaryingRate extends MetricsBase {
-
-  private static final Log LOG =
-LogFactory.getLog("org.apache.hadoop.metrics.util");
-
-  static class Metrics {
-int numOperations = 0;
-long time = 0;  // total time or average time
-
-void set(final Metrics resetTo) {
-  numOperations = resetTo.numOperations;
-  time = resetTo.time;
-}
-
-void reset() {
-  numOperations = 0;
-  time = 0;
-}
-  }
-  
-  static class MinMax {
-long minTime = -1;
-long maxTime = 0;
-
-void set(final MinMax newVal) {
-  minTime = newVal.minTime;
-  maxTime = newVal.maxTime;
-}
-
-void reset() {
-  minTime = -1;
-  maxTime = 0;
-}
-void update(final long time) { // update min max
-  minTime = (minTime == -1) ? time : Math.min(minTime, time);
-  minTime = Math.min(minTime, time);
-  maxTime = Math.max(maxTime, time);
-}
-  }
-  private Metrics currentData;
-  private Metrics previousIntervalData;
-  private MinMax minMax;
-  
-  
-  /**
-   * Constructor - create a new metric
-   * @param nam the name of the metrics to be used to publish the metric
-   * @param registry - where the metrics object will be registered
-   */
-  public MetricsTimeVaryingRate(final String nam, final MetricsRegistry 
registry, final String description) {
-super(nam, description);
-currentData = new Metrics();
-previousIntervalData = new Metrics();
-minMax = new MinMax();
-registry.add(nam, this);
-  }
-  
-  /**
-   * Constructor - create a new metric
-   * @param nam the name of the metrics to be used to publish the metric
-   * @param registry - where the metrics object will be registered
-   * A description of {@link #NO_DESCRIPTION} is used
-   */
-  public MetricsTimeVaryingRate(final String nam, MetricsRegistry registry) {
-this(nam, registry, NO_DESCRIPTION);
-
-  }
-  
-  
-  /**
-   * Increment the metrics for numOps operations
-   * @param numOps - number of operations
-   * @param time - time for numOps operations
-   */
-  public synchronized void inc(final int numOps, final long time) {
-currentData.numOperations += numOps;
-currentData.time += time;
-long timePerOps = time/numOps;
-minMax.update(timePerOps);
-  }
-  
-  /**
-   * Increment the metrics for one operation
-   * @param time for one operation
-   */
-  public synchronized void inc(final long time) {
-currentData.numOperations++;
-currentData.time += time;
-minMax.update(time);
-  }
-  
-  
-
-  private synchronized void intervalHeartBeat() {
- previousIntervalData.numOperations = currentData.numOperations;
- previousIntervalData.time = (currentData.numOperations == 0) ?
-   

[05/16] hadoop git commit: HADOOP-13080. Refresh time in SysInfoWindows is in nanoseconds. Contributed by Inigo Goiri

2016-05-04 Thread arp
HADOOP-13080. Refresh time in SysInfoWindows is in nanoseconds. Contributed by 
Inigo Goiri


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/c1cc6ac6
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/c1cc6ac6
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/c1cc6ac6

Branch: refs/heads/HDFS-7240
Commit: c1cc6ac667e9e1b2ed58f16cb9fa1584ea54f0ac
Parents: 45a753c
Author: Chris Douglas 
Authored: Mon May 2 22:36:58 2016 -0700
Committer: Chris Douglas 
Committed: Mon May 2 22:36:58 2016 -0700

--
 .../src/main/java/org/apache/hadoop/util/SysInfoWindows.java   | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/c1cc6ac6/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/SysInfoWindows.java
--
diff --git 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/SysInfoWindows.java
 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/SysInfoWindows.java
index de0c43b..490c127 100644
--- 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/SysInfoWindows.java
+++ 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/SysInfoWindows.java
@@ -59,7 +59,7 @@ public class SysInfoWindows extends SysInfo {
 
   @VisibleForTesting
   long now() {
-return System.nanoTime();
+return Time.monotonicNow();
   }
 
   void reset() {


-
To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org
For additional commands, e-mail: common-commits-h...@hadoop.apache.org



[09/16] hadoop git commit: HDFS-9902. Support different values of dfs.datanode.du.reserved per storage type. (Contributed by Brahma Reddy Battula)

2016-05-04 Thread arp
HDFS-9902. Support different values of dfs.datanode.du.reserved per storage 
type. (Contributed by Brahma Reddy Battula)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/6d77d6ea
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/6d77d6ea
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/6d77d6ea

Branch: refs/heads/HDFS-7240
Commit: 6d77d6eab7790ed7ae2cad5b327ba5d1deb485db
Parents: ed54f5f
Author: Arpit Agarwal 
Authored: Tue May 3 16:52:43 2016 -0700
Committer: Arpit Agarwal 
Committed: Tue May 3 16:52:43 2016 -0700

--
 .../datanode/fsdataset/impl/FsVolumeImpl.java   |  6 ++--
 .../src/main/resources/hdfs-default.xml |  5 +++
 .../fsdataset/impl/TestFsVolumeList.java| 36 +++-
 3 files changed, 44 insertions(+), 3 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/6d77d6ea/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/FsVolumeImpl.java
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/FsVolumeImpl.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/FsVolumeImpl.java
index 73514b6..68e2537 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/FsVolumeImpl.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/FsVolumeImpl.java
@@ -58,6 +58,7 @@ import org.apache.hadoop.hdfs.server.protocol.DatanodeStorage;
 import org.apache.hadoop.io.IOUtils;
 import org.apache.hadoop.util.CloseableReferenceCount;
 import org.apache.hadoop.util.DiskChecker.DiskErrorException;
+import org.apache.hadoop.util.StringUtils;
 import org.apache.hadoop.util.Time;
 import org.apache.hadoop.util.Timer;
 import org.codehaus.jackson.annotate.JsonProperty;
@@ -118,9 +119,10 @@ public class FsVolumeImpl implements FsVolumeSpi {
   Configuration conf, StorageType storageType) throws IOException {
 this.dataset = dataset;
 this.storageID = storageID;
-this.reserved = conf.getLong(
+this.reserved = conf.getLong(DFSConfigKeys.DFS_DATANODE_DU_RESERVED_KEY
++ "." + StringUtils.toLowerCase(storageType.toString()), conf.getLong(
 DFSConfigKeys.DFS_DATANODE_DU_RESERVED_KEY,
-DFSConfigKeys.DFS_DATANODE_DU_RESERVED_DEFAULT);
+DFSConfigKeys.DFS_DATANODE_DU_RESERVED_DEFAULT));
 this.reservedForReplicas = new AtomicLong(0L);
 this.currentDir = currentDir;
 File parent = currentDir.getParentFile();

http://git-wip-us.apache.org/repos/asf/hadoop/blob/6d77d6ea/hadoop-hdfs-project/hadoop-hdfs/src/main/resources/hdfs-default.xml
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/resources/hdfs-default.xml 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/resources/hdfs-default.xml
index 842ccbf..79f7911 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/resources/hdfs-default.xml
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/resources/hdfs-default.xml
@@ -321,6 +321,11 @@
   dfs.datanode.du.reserved
   0
   Reserved space in bytes per volume. Always leave this much 
space free for non dfs use.
+  Specific storage type based reservation is also supported. The property 
can be followed with
+  corresponding storage types ([ssd]/[disk]/[archive]/[ram_disk]) for 
cluster with heterogeneous storage.
+  For example, reserved space for RAM_DISK storage can be configured using 
property
+  'dfs.datanode.du.reserved.ram_disk'. If specific storage type 
reservation is not configured
+  then dfs.datanode.du.reserved will be used.
   
 
 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/6d77d6ea/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/TestFsVolumeList.java
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/TestFsVolumeList.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/TestFsVolumeList.java
index e24c725..796d249 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/TestFsVolumeList.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/TestFsVolumeList.java
@@ -27,6 +27,7 @@ import 
org.apache.hadoop.hdfs.server.datanode.fsdataset.FsVolumeReference;
 import 

[01/16] hadoop git commit: MAPREDUCE-6537. Include hadoop-pipes examples in the release tarball. Contributed by Kai Sasaki.

2016-05-04 Thread arp
Repository: hadoop
Updated Branches:
  refs/heads/HDFS-7240 05d7a838e -> 78bd1b2ab


MAPREDUCE-6537. Include hadoop-pipes examples in the release tarball. 
Contributed by Kai Sasaki.


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/9e8411d0
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/9e8411d0
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/9e8411d0

Branch: refs/heads/HDFS-7240
Commit: 9e8411d0a630cabbfe7493763c172ff6754e51be
Parents: 1b9f186
Author: Andrew Wang 
Authored: Mon May 2 15:35:05 2016 -0700
Committer: Andrew Wang 
Committed: Mon May 2 15:35:05 2016 -0700

--
 .../src/main/resources/assemblies/hadoop-tools.xml| 7 +++
 1 file changed, 7 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/9e8411d0/hadoop-assemblies/src/main/resources/assemblies/hadoop-tools.xml
--
diff --git a/hadoop-assemblies/src/main/resources/assemblies/hadoop-tools.xml 
b/hadoop-assemblies/src/main/resources/assemblies/hadoop-tools.xml
index 15c2572..f8ba48e 100644
--- a/hadoop-assemblies/src/main/resources/assemblies/hadoop-tools.xml
+++ b/hadoop-assemblies/src/main/resources/assemblies/hadoop-tools.xml
@@ -45,6 +45,13 @@
   lib/native
 
 
+  ../hadoop-pipes/target/native/examples
+  
+*
+  
+  lib/native/examples
+
+
   ../hadoop-archives/target
   
/share/hadoop/${hadoop.component}/sources
   


-
To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org
For additional commands, e-mail: common-commits-h...@hadoop.apache.org



[07/16] hadoop git commit: YARN-4834. ProcfsBasedProcessTree doesn't track daemonized processes. Contributed by Nathan Roberts

2016-05-04 Thread arp
YARN-4834. ProcfsBasedProcessTree doesn't track daemonized processes. 
Contributed by Nathan Roberts


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/c6b48391
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/c6b48391
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/c6b48391

Branch: refs/heads/HDFS-7240
Commit: c6b48391680c1b81a86aabc3ad4c725bfade6d2e
Parents: 06413da
Author: Jason Lowe 
Authored: Tue May 3 17:27:28 2016 +
Committer: Jason Lowe 
Committed: Tue May 3 17:27:28 2016 +

--
 .../yarn/util/ProcfsBasedProcessTree.java   | 19 +-
 .../yarn/util/TestProcfsBasedProcessTree.java   | 39 
 2 files changed, 41 insertions(+), 17 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/c6b48391/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/ProcfsBasedProcessTree.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/ProcfsBasedProcessTree.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/ProcfsBasedProcessTree.java
index 7bd5390..4fb4be3 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/ProcfsBasedProcessTree.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/ProcfsBasedProcessTree.java
@@ -216,7 +216,16 @@ public class ProcfsBasedProcessTree extends 
ResourceCalculatorProcessTree {
 String pID = entry.getKey();
 if (!pID.equals("1")) {
   ProcessInfo pInfo = entry.getValue();
-  ProcessInfo parentPInfo = allProcessInfo.get(pInfo.getPpid());
+  String ppid = pInfo.getPpid();
+  // If parent is init and process is not session leader,
+  // attach to sessionID
+  if (ppid.equals("1")) {
+  String sid = pInfo.getSessionId().toString();
+  if (!pID.equals(sid)) {
+ ppid = sid;
+  }
+  }
+  ProcessInfo parentPInfo = allProcessInfo.get(ppid);
   if (parentPInfo != null) {
 parentPInfo.addChild(pInfo);
   }
@@ -571,6 +580,14 @@ public class ProcfsBasedProcessTree extends 
ResourceCalculatorProcessTree {
 return pTree.substring(0, pTree.length()) + "]";
   }
 
+/**
+ * Returns boolean indicating whether pid
+ * is in process tree.
+ */
+  public boolean contains(String pid) {
+return processTree.containsKey(pid);
+  }
+
   /**
*
* Class containing information of a process.

http://git-wip-us.apache.org/repos/asf/hadoop/blob/c6b48391/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/util/TestProcfsBasedProcessTree.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/util/TestProcfsBasedProcessTree.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/util/TestProcfsBasedProcessTree.java
index da7849d4..6fbbfbe 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/util/TestProcfsBasedProcessTree.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/util/TestProcfsBasedProcessTree.java
@@ -36,6 +36,7 @@ import java.util.Vector;
 import java.util.regex.Matcher;
 import java.util.regex.Pattern;
 
+import org.apache.commons.io.FileUtils;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
@@ -65,7 +66,7 @@ public class TestProcfsBasedProcessTree {
 TestProcfsBasedProcessTree.class.getName() + "-localDir");
 
   private ShellCommandExecutor shexec = null;
-  private String pidFile, lowestDescendant;
+  private String pidFile, lowestDescendant, lostDescendant;
   private String shellScript;
 
   private static final int N = 6; // Controls the RogueTask
@@ -144,19 +145,17 @@ public class TestProcfsBasedProcessTree {
 
 lowestDescendant =
 TEST_ROOT_DIR + File.separator + "lowestDescendantPidFile";
+lostDescendant =
+TEST_ROOT_DIR + File.separator + "lostDescendantPidFile";
 
 // write to shell-script
-try {
-  FileWriter fWriter = new FileWriter(shellScript);
-  fWriter.write("# rogue task\n" + "sleep 1\n" + "echo hello\n"
-  + "if [ $1 -ne 0 ]\n" + "then\n" + " sh " + shellScript
-  + " $(($1-1))\n" + "else\n" + " echo $$ > " + lowestDescendant + "\n"
-  + " while 

[08/16] hadoop git commit: YARN-5003. Add container resource to RM audit log. Contributed by Nathan Roberts

2016-05-04 Thread arp
YARN-5003. Add container resource to RM audit log. Contributed by Nathan Roberts


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/ed54f5f1
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/ed54f5f1
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/ed54f5f1

Branch: refs/heads/HDFS-7240
Commit: ed54f5f1ff7862f8216f77d5ea8f9ccea674ccd1
Parents: c6b4839
Author: Jason Lowe 
Authored: Tue May 3 20:03:41 2016 +
Committer: Jason Lowe 
Committed: Tue May 3 20:03:41 2016 +

--
 .../server/resourcemanager/ClientRMService.java |  6 +--
 .../server/resourcemanager/RMAuditLogger.java   | 47 
 .../scheduler/AbstractYarnScheduler.java|  4 +-
 .../scheduler/common/fica/FiCaSchedulerApp.java |  6 +--
 .../scheduler/fair/FSAppAttempt.java|  6 +--
 .../resourcemanager/TestRMAuditLogger.java  | 44 +++---
 6 files changed, 66 insertions(+), 47 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/ed54f5f1/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/ClientRMService.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/ClientRMService.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/ClientRMService.java
index b7eb5f1..8da7ebd 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/ClientRMService.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/ClientRMService.java
@@ -1620,7 +1620,7 @@ public class ClientRMService extends AbstractService 
implements
 if (application == null) {
   RMAuditLogger.logFailure(callerUGI.getUserName(),
   AuditConstants.SIGNAL_CONTAINER, "UNKNOWN", "ClientRMService",
-  "Trying to signal an absent container", applicationId, containerId);
+  "Trying to signal an absent container", applicationId, containerId, 
null);
   throw RPCUtil
   .getRemoteException("Trying to signal an absent container "
   + containerId);
@@ -1644,11 +1644,11 @@ public class ClientRMService extends AbstractService 
implements
   request));
   RMAuditLogger.logSuccess(callerUGI.getShortUserName(),
   AuditConstants.SIGNAL_CONTAINER, "ClientRMService", applicationId,
-  containerId);
+  containerId, null);
 } else {
   RMAuditLogger.logFailure(callerUGI.getUserName(),
   AuditConstants.SIGNAL_CONTAINER, "UNKNOWN", "ClientRMService",
-  "Trying to signal an absent container", applicationId, containerId);
+  "Trying to signal an absent container", applicationId, containerId, 
null);
   throw RPCUtil
   .getRemoteException("Trying to signal an absent container "
   + containerId);

http://git-wip-us.apache.org/repos/asf/hadoop/blob/ed54f5f1/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/RMAuditLogger.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/RMAuditLogger.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/RMAuditLogger.java
index 3b603a4..d08cb9e 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/RMAuditLogger.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/RMAuditLogger.java
@@ -27,6 +27,7 @@ import org.apache.hadoop.ipc.Server;
 import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
 import org.apache.hadoop.yarn.api.records.ApplicationId;
 import org.apache.hadoop.yarn.api.records.ContainerId;
+import org.apache.hadoop.yarn.api.records.Resource;
 
 /** 
  * Manages ResourceManager audit logs. 
@@ -38,7 +39,7 @@ public class RMAuditLogger {
 
   static enum Keys {USER, OPERATION, TARGET, RESULT, IP, PERMISSIONS,
 

[04/16] hadoop git commit: HDFS-10344. DistributedFileSystem#getTrashRoots should skip encryption zone that does not have .Trash. Contributed by Xiaoyu Yao.

2016-05-04 Thread arp
HDFS-10344. DistributedFileSystem#getTrashRoots should skip encryption zone 
that does not have .Trash. Contributed by Xiaoyu Yao.


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/45a753cc
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/45a753cc
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/45a753cc

Branch: refs/heads/HDFS-7240
Commit: 45a753ccf79d334513c7bc8f2b81c89a4697075d
Parents: 4ee4e5c
Author: Xiaoyu Yao 
Authored: Mon May 2 19:30:47 2016 -0700
Committer: Xiaoyu Yao 
Committed: Mon May 2 19:30:47 2016 -0700

--
 .../hadoop/hdfs/DistributedFileSystem.java  |  3 ++
 .../apache/hadoop/hdfs/TestEncryptionZones.java | 38 
 2 files changed, 41 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/45a753cc/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/DistributedFileSystem.java
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/DistributedFileSystem.java
 
b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/DistributedFileSystem.java
index a3a8ba0..7a56265 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/DistributedFileSystem.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/DistributedFileSystem.java
@@ -2410,6 +2410,9 @@ public class DistributedFileSystem extends FileSystem {
   while (it.hasNext()) {
 Path ezTrashRoot = new Path(it.next().getPath(),
 FileSystem.TRASH_PREFIX);
+if (!exists(ezTrashRoot)) {
+  continue;
+}
 if (allUsers) {
   for (FileStatus candidate : listStatus(ezTrashRoot)) {
 if (exists(candidate.getPath())) {

http://git-wip-us.apache.org/repos/asf/hadoop/blob/45a753cc/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestEncryptionZones.java
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestEncryptionZones.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestEncryptionZones.java
index c8d98ee..ce2befd 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestEncryptionZones.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestEncryptionZones.java
@@ -28,6 +28,7 @@ import java.net.URI;
 import java.security.PrivilegedExceptionAction;
 import java.util.ArrayList;
 import java.util.Arrays;
+import java.util.Collection;
 import java.util.List;
 import java.util.concurrent.Callable;
 import java.util.concurrent.CountDownLatch;
@@ -1442,6 +1443,43 @@ public class TestEncryptionZones {
 verifyShellDeleteWithTrash(shell, encFile);
   }
 
+  @Test(timeout = 12)
+  public void testGetTrashRoots() throws Exception {
+final HdfsAdmin dfsAdmin =
+new HdfsAdmin(FileSystem.getDefaultUri(conf), conf);
+Path ezRoot1 = new Path("/ez1");
+fs.mkdirs(ezRoot1);
+dfsAdmin.createEncryptionZone(ezRoot1, TEST_KEY);
+Path ezRoot2 = new Path("/ez2");
+fs.mkdirs(ezRoot2);
+dfsAdmin.createEncryptionZone(ezRoot2, TEST_KEY);
+Path ezRoot3 = new Path("/ez3");
+fs.mkdirs(ezRoot3);
+dfsAdmin.createEncryptionZone(ezRoot3, TEST_KEY);
+Collection trashRootsBegin = fs.getTrashRoots(true);
+assertEquals("Unexpected getTrashRoots result", 0, trashRootsBegin.size());
+
+final Path encFile = new Path(ezRoot2, "encFile");
+final int len = 8192;
+DFSTestUtil.createFile(fs, encFile, len, (short) 1, 0xFEED);
+Configuration clientConf = new Configuration(conf);
+clientConf.setLong(FS_TRASH_INTERVAL_KEY, 1);
+FsShell shell = new FsShell(clientConf);
+verifyShellDeleteWithTrash(shell, encFile);
+
+Collection trashRootsDelete1 = fs.getTrashRoots(true);
+assertEquals("Unexpected getTrashRoots result", 1,
+trashRootsDelete1.size());
+
+final Path nonEncFile = new Path("/nonEncFile");
+DFSTestUtil.createFile(fs, nonEncFile, len, (short) 1, 0xFEED);
+verifyShellDeleteWithTrash(shell, nonEncFile);
+
+Collection trashRootsDelete2 = fs.getTrashRoots(true);
+assertEquals("Unexpected getTrashRoots result", 2,
+trashRootsDelete2.size());
+  }
+
   private void verifyShellDeleteWithTrash(FsShell shell, Path path)
   throws Exception{
 try {


-
To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org
For additional commands, e-mail: common-commits-h...@hadoop.apache.org



[11/16] hadoop git commit: HADOOP-12101. Add automatic search of default Configuration variables to TestConfigurationFieldsBase. Contributed by Ray Chiang.

2016-05-04 Thread arp
HADOOP-12101. Add automatic search of default Configuration variables to 
TestConfigurationFieldsBase. Contributed by Ray Chiang.


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/355325bc
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/355325bc
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/355325bc

Branch: refs/heads/HDFS-7240
Commit: 355325bcc7111fa4aac801fd23a26422ffabaf7c
Parents: 75e0450
Author: Masatake Iwasaki 
Authored: Wed May 4 15:12:51 2016 +0900
Committer: Masatake Iwasaki 
Committed: Wed May 4 15:12:51 2016 +0900

--
 dev-support/verify-xml.sh   | 150 +++
 .../conf/TestConfigurationFieldsBase.java   | 255 +++
 2 files changed, 405 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/355325bc/dev-support/verify-xml.sh
--
diff --git a/dev-support/verify-xml.sh b/dev-support/verify-xml.sh
new file mode 100755
index 000..abab4e6
--- /dev/null
+++ b/dev-support/verify-xml.sh
@@ -0,0 +1,150 @@
+#!/bin/bash
+##
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+##
+# Script to run unit tests for xml <-> 1 or more Configuration file 
verification
+# usage: ./verify-xml.sh 
+#
+
+# Utility functions
+function find_test_output_file() {
+  echo "Found test output file(s) at"
+  echo ""
+  if [ -n "$1" ] && [ -e "$1" ] ; then
+echo "  $1"
+  fi
+  if [ -n "$2" ] && [ -e "$2" ] ; then
+echo "  $2"
+  fi
+  if [ -n "$3" ] && [ -e "$3" ] ; then
+echo "  $3"
+  fi
+  if [ -n "$4" ] && [ -e "$4" ] ; then
+echo "  $4"
+  fi
+  echo ""
+  echo "Examine the file for specific information xml/Configuration 
mismatches."
+  echo ""
+}
+
+function print_test_banner() {
+  local banner_text=$1
+  local banner_length=${#banner_text}
+  local banner
+  banner=$( printf "%${banner_length}s" ' ' )
+  echo ""
+  echo "${banner// /=}"
+  echo "${banner_text}"
+  echo "${banner// /=}"
+  echo ""
+}
+
+# Wrapper functions for running unit tests
+function run_all_xml_test() {
+  mvn test 
-Dtest=TestCommonConfigurationFields,TestHdfsConfigFields,TestMapreduceConfigFields,TestYarnConfigurationFields
+  if [ $? -ne 0 ] ; then
+print_test_banner "All Test*ConfigFields FAIL"
+  else
+print_test_banner "All Test*ConfigFields SUCCESS"
+  fi
+}
+
+function run_common_xml_test() {
+  mvn test -Dtest=TestCommonConfigFields
+  if [ $? -ne 0 ] ; then
+print_test_banner "TestCommonConfigurationFields FAIL"
+  else
+print_test_banner "TestCommonConfigurationFields SUCCESS"
+  fi
+}
+
+function run_hdfs_xml_test() {
+  mvn test -Dtest=TestHdfsConfigFields
+  if [ $? -ne 0 ] ; then
+print_test_banner "TestHdfsConfigFields FAIL"
+  else
+print_test_banner "TestHdfsConfigFields SUCCESS"
+  fi
+}
+
+function run_mapreduce_xml_test() {
+  mvn test -Dtest=TestMapreduceConfigFields
+  if [ $? -ne 0 ] ; then
+print_test_banner "TestMapreduceConfigFields FAIL"
+  else
+print_test_banner "TestMapreduceConfigFields SUCCESS"
+  fi
+}
+
+function run_yarn_xml_test() {
+  mvn test -Dtest=TestYarnConfigurationFields
+  if [ $? -ne 0 ] ; then
+print_test_banner "TestYarnConfigurationFields FAIL"
+  else
+print_test_banner "TestYarnConfigurationFields SUCCESS"
+  fi
+}
+
+# Main body
+cd -P -- "$(dirname -- "${BASH_SOURCE-$0}")/.." || exit
+dir="$(pwd -P)"
+
+# - Create unit test file names
+export commonOutputFile
+commonOutputFile="$(find "${dir}" -name 
org.apache.hadoop.conf.TestCommonConfigurationFields-output.txt)"
+export hdfsOutputFile
+hdfsOutputFile="$(find "${dir}" -name 
org.apache.hadoop.tools.TestHdfsConfigFields-output.txt)"
+export mrOutputFile
+mrOutputFile="$(find "${dir}" -name 
org.apache.hadoop.mapreduce.TestMapreduceConfigFields-output.txt)"
+export yarnOutputFile
+yarnOutputFile="$(find "${dir}" -name 
org.apache.hadoop.yarn.conf.TestYarnConfigurationFields-output.txt)"
+
+# - Determine which tests to run
+case "$1" in
+
+  all)
+

[03/16] hadoop git commit: MAPREDUCE-6526. Remove usage of metrics v1 from hadoop-mapreduce. (aajisaka)

2016-05-04 Thread arp
MAPREDUCE-6526. Remove usage of metrics v1 from hadoop-mapreduce. (aajisaka)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/4ee4e5ca
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/4ee4e5ca
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/4ee4e5ca

Branch: refs/heads/HDFS-7240
Commit: 4ee4e5ca2b8488459d2231dd1de8ed44dd656d5c
Parents: 3ff0510
Author: Akira Ajisaka 
Authored: Tue May 3 10:46:11 2016 +0900
Committer: Akira Ajisaka 
Committed: Tue May 3 10:46:11 2016 +0900

--
 .../apache/hadoop/mapred/LocalJobRunner.java|  2 +-
 .../hadoop/mapred/LocalJobRunnerMetrics.java| 94 +++-
 .../hadoop/mapreduce/task/reduce/Shuffle.java   |  2 +-
 .../task/reduce/ShuffleClientMetrics.java   | 91 ---
 4 files changed, 70 insertions(+), 119 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/4ee4e5ca/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapred/LocalJobRunner.java
--
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapred/LocalJobRunner.java
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapred/LocalJobRunner.java
index 37c147d..02b9a87 100644
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapred/LocalJobRunner.java
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapred/LocalJobRunner.java
@@ -768,7 +768,7 @@ public class LocalJobRunner implements ClientProtocol {
   public LocalJobRunner(JobConf conf) throws IOException {
 this.fs = FileSystem.getLocal(conf);
 this.conf = conf;
-myMetrics = new LocalJobRunnerMetrics(new JobConf(conf));
+myMetrics = LocalJobRunnerMetrics.create();
   }
 
   // JobSubmissionProtocol methods

http://git-wip-us.apache.org/repos/asf/hadoop/blob/4ee4e5ca/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapred/LocalJobRunnerMetrics.java
--
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapred/LocalJobRunnerMetrics.java
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapred/LocalJobRunnerMetrics.java
index aec70ed..0186cdc 100644
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapred/LocalJobRunnerMetrics.java
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapred/LocalJobRunnerMetrics.java
@@ -17,82 +17,50 @@
  */
 package org.apache.hadoop.mapred;
 
-import org.apache.hadoop.metrics.MetricsContext;
-import org.apache.hadoop.metrics.MetricsRecord;
-import org.apache.hadoop.metrics.MetricsUtil;
-import org.apache.hadoop.metrics.Updater;
-import org.apache.hadoop.metrics.jvm.JvmMetrics;
+import org.apache.hadoop.metrics2.MetricsSystem;
+import org.apache.hadoop.metrics2.annotation.Metric;
+import org.apache.hadoop.metrics2.annotation.Metrics;
+import org.apache.hadoop.metrics2.lib.DefaultMetricsSystem;
+import org.apache.hadoop.metrics2.lib.MutableCounterInt;
+import org.apache.hadoop.metrics2.lib.MutableGaugeInt;
 
-@SuppressWarnings("deprecation")
-class LocalJobRunnerMetrics implements Updater {
-  private final MetricsRecord metricsRecord;
+import java.util.concurrent.ThreadLocalRandom;
 
-  private int numMapTasksLaunched = 0;
-  private int numMapTasksCompleted = 0;
-  private int numReduceTasksLaunched = 0;
-  private int numReduceTasksCompleted = 0;
-  private int numWaitingMaps = 0;
-  private int numWaitingReduces = 0;
-  
-  public LocalJobRunnerMetrics(JobConf conf) {
-String sessionId = conf.getSessionId();
-// Initiate JVM Metrics
-JvmMetrics.init("JobTracker", sessionId);
-// Create a record for map-reduce metrics
-MetricsContext context = MetricsUtil.getContext("mapred");
-// record name is jobtracker for compatibility 
-metricsRecord = MetricsUtil.createRecord(context, "jobtracker");
-metricsRecord.setTag("sessionId", sessionId);
-context.registerUpdater(this);
+@Metrics(name="LocalJobRunnerMetrics", context="mapred")
+final class LocalJobRunnerMetrics {
+
+  @Metric
+  private MutableCounterInt numMapTasksLaunched;
+  @Metric
+  private MutableCounterInt 

[13/16] hadoop git commit: HADOOP-12504. Remove metrics v1. (aajisaka)

2016-05-04 Thread arp
http://git-wip-us.apache.org/repos/asf/hadoop/blob/36972d61/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/spi/AbstractMetricsContext.java
--
diff --git 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/spi/AbstractMetricsContext.java
 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/spi/AbstractMetricsContext.java
deleted file mode 100644
index dccfbe9..000
--- 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/spi/AbstractMetricsContext.java
+++ /dev/null
@@ -1,494 +0,0 @@
-/*
- * AbstractMetricsContext.java
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.metrics.spi;
-
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.Collection;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.Iterator;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-import java.util.Timer;
-import java.util.TimerTask;
-import java.util.TreeMap;
-import java.util.Map.Entry;
-
-import org.apache.hadoop.classification.InterfaceAudience;
-import org.apache.hadoop.classification.InterfaceStability;
-import org.apache.hadoop.metrics.ContextFactory;
-import org.apache.hadoop.metrics.MetricsContext;
-import org.apache.hadoop.metrics.MetricsException;
-import org.apache.hadoop.metrics.MetricsRecord;
-import org.apache.hadoop.metrics.Updater;
-
-/**
- * The main class of the Service Provider Interface.  This class should be
- * extended in order to integrate the Metrics API with a specific metrics
- * client library. 
- *
- * This class implements the internal table of metric data, and the timer
- * on which data is to be sent to the metrics system.  Subclasses must
- * override the abstract emitRecord method in order to transmit
- * the data. 
- *
- * @deprecated Use org.apache.hadoop.metrics2 package instead.
- */
-@Deprecated
-@InterfaceAudience.Public
-@InterfaceStability.Evolving
-public abstract class AbstractMetricsContext implements MetricsContext {
-
-  private int period = MetricsContext.DEFAULT_PERIOD;
-  private Timer timer = null;
-
-  private Set updaters = new HashSet(1);
-  private volatile boolean isMonitoring = false;
-
-  private ContextFactory factory = null;
-  private String contextName = null;
-
-  @InterfaceAudience.Private
-  public static class TagMap extends TreeMap {
-private static final long serialVersionUID = 3546309335061952993L;
-TagMap() {
-  super();
-}
-TagMap(TagMap orig) {
-  super(orig);
-}
-/**
- * Returns true if this tagmap contains every tag in other.
- */
-public boolean containsAll(TagMap other) {
-  for (Map.Entry entry : other.entrySet()) {
-Object value = get(entry.getKey());
-if (value == null || !value.equals(entry.getValue())) {
-  // either key does not exist here, or the value is different
-  return false;
-}
-  }
-  return true;
-}
-  }
-  
-  @InterfaceAudience.Private
-  public static class MetricMap extends TreeMap {
-private static final long serialVersionUID = -7495051861141631609L;
-MetricMap() {
-  super();
-}
-MetricMap(MetricMap orig) {
-  super(orig);
-}
-  }
-
-  static class RecordMap extends HashMap {
-private static final long serialVersionUID = 259835619700264611L;
-  }
-
-  private Map bufferedData = new HashMap();
-
-
-  /**
-   * Creates a new instance of AbstractMetricsContext
-   */
-  protected AbstractMetricsContext() {
-  }
-
-  /**
-   * Initializes the context.
-   */
-  @Override
-  public void init(String contextName, ContextFactory factory) 
-  {
-this.contextName = contextName;
-this.factory = factory;
-  }
-
-  /**
-   * Convenience method for subclasses to access factory attributes.
-   */
-  protected String getAttribute(String attributeName) {
-String factoryAttribute = contextName + "." + attributeName;
-return (String) 

  1   2   >