[35/50] [abbrv] hadoop git commit: YARN-6691. Update YARN daemon startup/shutdown scripts to include Router service. (Giovanni Matteo Fumarola via asuresh).

2017-08-01 Thread curino
YARN-6691. Update YARN daemon startup/shutdown scripts to include Router 
service. (Giovanni Matteo Fumarola via asuresh).

(cherry picked from commit 790cba7ab949c665083c4a9702bb1cd4b7824e8d)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/433ee44b
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/433ee44b
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/433ee44b

Branch: refs/heads/trunk
Commit: 433ee44b58d905f8d10a2fe8d2b9f71c2c8fd118
Parents: 70b1a75
Author: Arun Suresh 
Authored: Mon Jun 19 10:52:23 2017 -0700
Committer: Carlo Curino 
Committed: Tue Aug 1 17:28:26 2017 -0700

--
 hadoop-yarn-project/hadoop-yarn/bin/yarn |  5 +
 hadoop-yarn-project/hadoop-yarn/bin/yarn.cmd | 13 -
 hadoop-yarn-project/hadoop-yarn/conf/yarn-env.sh | 12 
 3 files changed, 29 insertions(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/433ee44b/hadoop-yarn-project/hadoop-yarn/bin/yarn
--
diff --git a/hadoop-yarn-project/hadoop-yarn/bin/yarn 
b/hadoop-yarn-project/hadoop-yarn/bin/yarn
index cf6457b..a37d183 100755
--- a/hadoop-yarn-project/hadoop-yarn/bin/yarn
+++ b/hadoop-yarn-project/hadoop-yarn/bin/yarn
@@ -46,6 +46,7 @@ function hadoop_usage
   hadoop_add_subcommand "queue" "prints queue information"
   hadoop_add_subcommand "resourcemanager" "run the ResourceManager"
   hadoop_add_subcommand "rmadmin" "admin tools"
+  hadoop_add_subcommand "router" "run the Router daemon"
   hadoop_add_subcommand "scmadmin" "SharedCacheManager admin tools"
   hadoop_add_subcommand "sharedcachemanager" "run the SharedCacheManager 
daemon"
   hadoop_add_subcommand "timelinereader" "run the timeline reader server"
@@ -137,6 +138,10 @@ function yarncmd_case
 rmadmin)
   HADOOP_CLASSNAME='org.apache.hadoop.yarn.client.cli.RMAdminCLI'
 ;;
+router)
+  HADOOP_SUBCMD_SUPPORTDAEMONIZATION="true"
+  HADOOP_CLASSNAME='org.apache.hadoop.yarn.server.router.Router'
+;;
 scmadmin)
   HADOOP_CLASSNAME='org.apache.hadoop.yarn.client.SCMAdmin'
 ;;

http://git-wip-us.apache.org/repos/asf/hadoop/blob/433ee44b/hadoop-yarn-project/hadoop-yarn/bin/yarn.cmd
--
diff --git a/hadoop-yarn-project/hadoop-yarn/bin/yarn.cmd 
b/hadoop-yarn-project/hadoop-yarn/bin/yarn.cmd
index ca879f5..690badf 100644
--- a/hadoop-yarn-project/hadoop-yarn/bin/yarn.cmd
+++ b/hadoop-yarn-project/hadoop-yarn/bin/yarn.cmd
@@ -130,6 +130,10 @@ if "%1" == "--loglevel" (
 set 
CLASSPATH=%CLASSPATH%;%HADOOP_YARN_HOME%\yarn-server\yarn-server-applicationhistoryservice\target\classes
   )
 
+  if exist %HADOOP_YARN_HOME%\yarn-server\yarn-server-router\target\classes (
+set 
CLASSPATH=%CLASSPATH%;%HADOOP_YARN_HOME%\yarn-server\yarn-server-router\target\classes
+  )
+
   if exist %HADOOP_YARN_HOME%\build\test\classes (
 set CLASSPATH=%CLASSPATH%;%HADOOP_YARN_HOME%\build\test\classes
   )
@@ -151,7 +155,7 @@ if "%1" == "--loglevel" (
 
   set yarncommands=resourcemanager nodemanager proxyserver rmadmin version jar 
^
  application applicationattempt container node queue logs daemonlog 
historyserver ^
- timelineserver timelinereader classpath
+ timelineserver timelinereader router classpath
   for %%i in ( %yarncommands% ) do (
 if %yarn-command% == %%i set yarncommand=true
   )
@@ -248,6 +252,12 @@ goto :eof
   set YARN_OPTS=%YARN_OPTS% %YARN_TIMELINEREADER_OPTS%
   goto :eof
 
+:router
+  set CLASSPATH=%CLASSPATH%;%YARN_CONF_DIR%\router-config\log4j.properties
+  set CLASS=org.apache.hadoop.yarn.server.router.Router
+  set YARN_OPTS=%YARN_OPTS% %HADOOP_ROUTER_OPTS%
+  goto :eof
+
 :nodemanager
   set CLASSPATH=%CLASSPATH%;%YARN_CONF_DIR%\nm-config\log4j.properties
   set CLASS=org.apache.hadoop.yarn.server.nodemanager.NodeManager
@@ -317,6 +327,7 @@ goto :eof
   @echowhere COMMAND is one of:
   @echo   resourcemanager  run the ResourceManager
   @echo   nodemanager  run a nodemanager on each slave
+  @echo   router   run the Router daemon
   @echo   timelineserver   run the timeline server
   @echo   timelinereader   run the timeline reader server
   @echo   rmadmin  admin tools

http://git-wip-us.apache.org/repos/asf/hadoop/blob/433ee44b/hadoop-yarn-project/hadoop-yarn/conf/yarn-env.sh
--
diff --git a/hadoop-yarn-project/hadoop-yarn/conf/yarn-env.sh 
b/hadoop-yarn-project/hadoop-yarn/conf/yarn-env.sh
index d003adb..be42298 100644
--- a/hadoop-yarn-project/hadoop-yarn/conf/yarn-env.sh
+++ b/hadoop-yarn-project/hadoop-yarn/conf/yarn-env.sh
@@ -136,3 

[02/50] [abbrv] hadoop git commit: YARN-3673. Create a FailoverProxy for Federation services. Contributed by Subru Krishnan

2017-08-01 Thread curino
YARN-3673. Create a FailoverProxy for Federation services. Contributed by Subru 
Krishnan

(cherry picked from commit 3307564a5f8c8abc5fe84efcd05ee0f7dfdd921c)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/bdfad452
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/bdfad452
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/bdfad452

Branch: refs/heads/trunk
Commit: bdfad4523f1a5a776e20773c371ce99d0c538ac1
Parents: cfafd17
Author: Jian He 
Authored: Mon Aug 22 14:43:07 2016 +0800
Committer: Carlo Curino 
Committed: Tue Aug 1 17:28:20 2017 -0700

--
 .../org/apache/hadoop/yarn/conf/HAUtil.java |  30 ++-
 .../hadoop/yarn/conf/YarnConfiguration.java |  10 +
 .../yarn/conf/TestYarnConfigurationFields.java  |   4 +
 .../TestFederationRMFailoverProxyProvider.java  | 154 ++
 .../hadoop/yarn/client/ClientRMProxy.java   |   4 +-
 .../org/apache/hadoop/yarn/client/RMProxy.java  |  11 +-
 .../src/main/resources/yarn-default.xml |   7 +
 .../hadoop-yarn-server-common/pom.xml   |   2 -
 .../hadoop/yarn/server/api/ServerRMProxy.java   |   4 +-
 .../failover/FederationProxyProviderUtil.java   | 163 ++
 .../FederationRMFailoverProxyProvider.java  | 211 +++
 .../federation/failover/package-info.java   |  17 ++
 12 files changed, 603 insertions(+), 14 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/bdfad452/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/conf/HAUtil.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/conf/HAUtil.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/conf/HAUtil.java
index 133b377..528b642 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/conf/HAUtil.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/conf/HAUtil.java
@@ -18,7 +18,9 @@
 
 package org.apache.hadoop.yarn.conf;
 
-import com.google.common.annotations.VisibleForTesting;
+import java.net.InetSocketAddress;
+import java.util.Collection;
+
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.HadoopIllegalArgumentException;
@@ -27,8 +29,7 @@ import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.net.NetUtils;
 import org.apache.hadoop.yarn.exceptions.YarnRuntimeException;
 
-import java.net.InetSocketAddress;
-import java.util.Collection;
+import com.google.common.annotations.VisibleForTesting;
 
 @InterfaceAudience.Private
 public class HAUtil {
@@ -45,6 +46,29 @@ public class HAUtil {
   }
 
   /**
+   * Returns true if Federation is configured.
+   *
+   * @param conf Configuration
+   * @return true if federation is configured in the configuration; else false.
+   */
+  public static boolean isFederationEnabled(Configuration conf) {
+return conf.getBoolean(YarnConfiguration.FEDERATION_ENABLED,
+YarnConfiguration.DEFAULT_FEDERATION_ENABLED);
+  }
+
+  /**
+   * Returns true if RM failover is enabled in a Federation setting.
+   *
+   * @param conf Configuration
+   * @return if RM failover is enabled in conjunction with Federation in the
+   * configuration; else false.
+   */
+  public static boolean isFederationFailoverEnabled(Configuration conf) {
+return conf.getBoolean(YarnConfiguration.FEDERATION_FAILOVER_ENABLED,
+YarnConfiguration.DEFAULT_FEDERATION_FAILOVER_ENABLED);
+  }
+
+  /**
* Returns true if Resource Manager HA is configured.
*
* @param conf Configuration

http://git-wip-us.apache.org/repos/asf/hadoop/blob/bdfad452/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/conf/YarnConfiguration.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/conf/YarnConfiguration.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/conf/YarnConfiguration.java
index fe6c7b8..612d89b 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/conf/YarnConfiguration.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/conf/YarnConfiguration.java
@@ -2561,6 +2561,16 @@ public class YarnConfiguration extends Configuration {
 
   public static final String FEDERATION_PREFIX = YARN_PREFIX + "federation.";
 
+  public static final String FEDERATION_ENABLED = FEDERATION_PREFIX + 
"enabled";
+  public static 

[09/50] [abbrv] hadoop git commit: YARN-5325. Stateless ARMRMProxy policies implementation. (Carlo Curino via Subru).

2017-08-01 Thread curino
http://git-wip-us.apache.org/repos/asf/hadoop/blob/1dadd0b4/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/federation/policies/router/LoadBasedRouterPolicy.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/federation/policies/router/LoadBasedRouterPolicy.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/federation/policies/router/LoadBasedRouterPolicy.java
index e57709f..5de749f 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/federation/policies/router/LoadBasedRouterPolicy.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/federation/policies/router/LoadBasedRouterPolicy.java
@@ -17,8 +17,8 @@
 
 package org.apache.hadoop.yarn.server.federation.policies.router;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import java.util.Map;
+
 import org.apache.hadoop.yarn.api.records.ApplicationSubmissionContext;
 import org.apache.hadoop.yarn.exceptions.YarnException;
 import 
org.apache.hadoop.yarn.server.federation.policies.FederationPolicyInitializationContext;
@@ -30,34 +30,27 @@ import 
org.apache.hadoop.yarn.server.federation.store.records.SubClusterInfo;
 import org.codehaus.jettison.json.JSONException;
 import org.codehaus.jettison.json.JSONObject;
 
-import java.util.Map;
-
 /**
  * This implements a simple load-balancing policy. The policy "weights" are
  * binary 0/1 values that enable/disable each sub-cluster, and the policy peaks
  * the sub-cluster with the least load to forward this application.
  */
-public class LoadBasedRouterPolicy
-extends BaseWeightedRouterPolicy {
-
-  private static final Log LOG =
-  LogFactory.getLog(LoadBasedRouterPolicy.class);
+public class LoadBasedRouterPolicy extends AbstractRouterPolicy {
 
   @Override
-  public void reinitialize(FederationPolicyInitializationContext
-  federationPolicyContext)
+  public void reinitialize(FederationPolicyInitializationContext policyContext)
   throws FederationPolicyInitializationException {
 
 // remember old policyInfo
 WeightedPolicyInfo tempPolicy = getPolicyInfo();
 
-//attempt new initialization
-super.reinitialize(federationPolicyContext);
+// attempt new initialization
+super.reinitialize(policyContext);
 
-//check extra constraints
+// check extra constraints
 for (Float weight : getPolicyInfo().getRouterPolicyWeights().values()) {
   if (weight != 0 && weight != 1) {
-//reset to old policyInfo if check fails
+// reset to old policyInfo if check fails
 setPolicyInfo(tempPolicy);
 throw new FederationPolicyInitializationException(
 this.getClass().getCanonicalName()
@@ -69,18 +62,16 @@ public class LoadBasedRouterPolicy
 
   @Override
   public SubClusterId getHomeSubcluster(
-  ApplicationSubmissionContext appSubmissionContext)
-  throws YarnException {
+  ApplicationSubmissionContext appSubmissionContext) throws YarnException {
 
 Map activeSubclusters =
 getActiveSubclusters();
 
-Map weights = getPolicyInfo()
-.getRouterPolicyWeights();
+Map weights =
+getPolicyInfo().getRouterPolicyWeights();
 SubClusterIdInfo chosen = null;
 long currBestMem = -1;
-for (Map.Entry entry :
-activeSubclusters
+for (Map.Entry entry : activeSubclusters
 .entrySet()) {
   SubClusterIdInfo id = new SubClusterIdInfo(entry.getKey());
   if (weights.containsKey(id) && weights.get(id) > 0) {
@@ -95,8 +86,7 @@ public class LoadBasedRouterPolicy
 return chosen.toId();
   }
 
-  private long getAvailableMemory(SubClusterInfo value)
-  throws YarnException {
+  private long getAvailableMemory(SubClusterInfo value) throws YarnException {
 try {
   long mem = -1;
   JSONObject obj = new JSONObject(value.getCapability());

http://git-wip-us.apache.org/repos/asf/hadoop/blob/1dadd0b4/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/federation/policies/router/PriorityRouterPolicy.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/federation/policies/router/PriorityRouterPolicy.java
 

[05/50] [abbrv] hadoop git commit: YARN-5676. Add a HashBasedRouterPolicy, and small policies and test refactoring. (Carlo Curino via Subru).

2017-08-01 Thread curino
http://git-wip-us.apache.org/repos/asf/hadoop/blob/4128c952/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/test/java/org/apache/hadoop/yarn/server/federation/policies/TestRouterPolicyFacade.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/test/java/org/apache/hadoop/yarn/server/federation/policies/TestRouterPolicyFacade.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/test/java/org/apache/hadoop/yarn/server/federation/policies/TestRouterPolicyFacade.java
index 4975a9f..5fa02d6 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/test/java/org/apache/hadoop/yarn/server/federation/policies/TestRouterPolicyFacade.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/test/java/org/apache/hadoop/yarn/server/federation/policies/TestRouterPolicyFacade.java
@@ -29,6 +29,8 @@ import org.apache.hadoop.yarn.conf.YarnConfiguration;
 import org.apache.hadoop.yarn.exceptions.YarnException;
 import 
org.apache.hadoop.yarn.server.federation.policies.dao.WeightedPolicyInfo;
 import 
org.apache.hadoop.yarn.server.federation.policies.exceptions.FederationPolicyInitializationException;
+import 
org.apache.hadoop.yarn.server.federation.policies.manager.PriorityBroadcastPolicyManager;
+import 
org.apache.hadoop.yarn.server.federation.policies.manager.UniformBroadcastPolicyManager;
 import 
org.apache.hadoop.yarn.server.federation.policies.router.PriorityRouterPolicy;
 import 
org.apache.hadoop.yarn.server.federation.policies.router.UniformRandomRouterPolicy;
 import org.apache.hadoop.yarn.server.federation.resolver.SubClusterResolver;

http://git-wip-us.apache.org/repos/asf/hadoop/blob/4128c952/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/test/java/org/apache/hadoop/yarn/server/federation/policies/TestUniformBroadcastPolicyManager.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/test/java/org/apache/hadoop/yarn/server/federation/policies/TestUniformBroadcastPolicyManager.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/test/java/org/apache/hadoop/yarn/server/federation/policies/TestUniformBroadcastPolicyManager.java
deleted file mode 100644
index 542a5ae..000
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/test/java/org/apache/hadoop/yarn/server/federation/policies/TestUniformBroadcastPolicyManager.java
+++ /dev/null
@@ -1,40 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with this
- * work for additional information regarding copyright ownership.  The ASF
- * licenses this file to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
- * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
- * License for the specific language governing permissions and limitations 
under
- * the License.
- */
-
-package org.apache.hadoop.yarn.server.federation.policies;
-
-import 
org.apache.hadoop.yarn.server.federation.policies.amrmproxy.BroadcastAMRMProxyPolicy;
-import 
org.apache.hadoop.yarn.server.federation.policies.router.UniformRandomRouterPolicy;
-import org.junit.Before;
-
-/**
- * Simple test of {@link UniformBroadcastPolicyManager}.
- */
-public class TestUniformBroadcastPolicyManager extends BasePolicyManagerTest {
-
-  @Before
-  public void setup() {
-//config policy
-wfp = new UniformBroadcastPolicyManager();
-wfp.setQueue("queue1");
-
-//set expected params that the base test class will use for tests
-expectedPolicyManager = UniformBroadcastPolicyManager.class;
-expectedAMRMProxyPolicy = BroadcastAMRMProxyPolicy.class;
-expectedRouterPolicy = UniformRandomRouterPolicy.class;
-  }
-}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/4128c952/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/test/java/org/apache/hadoop/yarn/server/federation/policies/TestWeightedLocalityPolicyManager.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/test/java/org/apache/hadoop/yarn/server/federation/policies/TestWeightedLocalityPolicyManager.java
 

[31/50] [abbrv] hadoop git commit: YARN-3666. Federation Intercepting and propagating AM- home RM communications. (Botong Huang via Subru).

2017-08-01 Thread curino
YARN-3666. Federation Intercepting and propagating AM- home RM communications. 
(Botong Huang via Subru).


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/2399eb82
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/2399eb82
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/2399eb82

Branch: refs/heads/YARN-2915
Commit: 2399eb8200609246cb623c74450ca4a2032063cc
Parents: 749ca89
Author: Subru Krishnan 
Authored: Wed May 31 13:21:09 2017 -0700
Committer: Carlo Curino 
Committed: Tue Aug 1 17:22:12 2017 -0700

--
 .../dev-support/findbugs-exclude.xml|   7 +
 .../amrmproxy/FederationInterceptor.java| 510 +++
 .../amrmproxy/TestAMRMProxyService.java |   1 +
 .../amrmproxy/TestFederationInterceptor.java| 167 ++
 .../TestableFederationInterceptor.java  | 133 +
 5 files changed, 818 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/2399eb82/hadoop-yarn-project/hadoop-yarn/dev-support/findbugs-exclude.xml
--
diff --git a/hadoop-yarn-project/hadoop-yarn/dev-support/findbugs-exclude.xml 
b/hadoop-yarn-project/hadoop-yarn/dev-support/findbugs-exclude.xml
index ee51094..034f03c 100644
--- a/hadoop-yarn-project/hadoop-yarn/dev-support/findbugs-exclude.xml
+++ b/hadoop-yarn-project/hadoop-yarn/dev-support/findbugs-exclude.xml
@@ -594,4 +594,11 @@
 
   
 
+  
+  
+
+
+
+  
+
 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/2399eb82/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/amrmproxy/FederationInterceptor.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/amrmproxy/FederationInterceptor.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/amrmproxy/FederationInterceptor.java
new file mode 100644
index 000..5f82d69
--- /dev/null
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/amrmproxy/FederationInterceptor.java
@@ -0,0 +1,510 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.yarn.server.nodemanager.amrmproxy;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.concurrent.ConcurrentHashMap;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.hadoop.yarn.api.ApplicationMasterProtocol;
+import org.apache.hadoop.yarn.api.protocolrecords.AllocateRequest;
+import org.apache.hadoop.yarn.api.protocolrecords.AllocateResponse;
+import 
org.apache.hadoop.yarn.api.protocolrecords.FinishApplicationMasterRequest;
+import 
org.apache.hadoop.yarn.api.protocolrecords.FinishApplicationMasterResponse;
+import 
org.apache.hadoop.yarn.api.protocolrecords.RegisterApplicationMasterRequest;
+import 
org.apache.hadoop.yarn.api.protocolrecords.RegisterApplicationMasterResponse;
+import org.apache.hadoop.yarn.api.records.Container;
+import org.apache.hadoop.yarn.api.records.ContainerId;
+import org.apache.hadoop.yarn.api.records.ContainerStatus;
+import org.apache.hadoop.yarn.api.records.ResourceBlacklistRequest;
+import org.apache.hadoop.yarn.api.records.ResourceRequest;
+import org.apache.hadoop.yarn.api.records.UpdateContainerRequest;
+import org.apache.hadoop.yarn.conf.YarnConfiguration;
+import 
org.apache.hadoop.yarn.exceptions.InvalidApplicationMasterRequestException;
+import org.apache.hadoop.yarn.exceptions.YarnException;
+import 

[20/50] [abbrv] hadoop git commit: YARN-6190. Validation and synchronization fixes in LocalityMulticastAMRMProxyPolicy. (Botong Huang via curino)

2017-08-01 Thread curino
YARN-6190. Validation and synchronization fixes in 
LocalityMulticastAMRMProxyPolicy. (Botong Huang via curino)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/5c486961
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/5c486961
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/5c486961

Branch: refs/heads/YARN-2915
Commit: 5c486961cd3a175b122ef86275c99b72964f2c50
Parents: 04f1113
Author: Carlo Curino 
Authored: Tue Feb 28 17:04:20 2017 -0800
Committer: Carlo Curino 
Committed: Tue Aug 1 17:22:11 2017 -0700

--
 .../LocalityMulticastAMRMProxyPolicy.java   | 63 +---
 .../TestLocalityMulticastAMRMProxyPolicy.java   | 21 ++-
 .../policies/manager/BasePolicyManagerTest.java |  3 -
 .../resolver/TestDefaultSubClusterResolver.java |  9 ++-
 .../utils/FederationPoliciesTestUtil.java   |  6 +-
 5 files changed, 73 insertions(+), 29 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/5c486961/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/federation/policies/amrmproxy/LocalityMulticastAMRMProxyPolicy.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/federation/policies/amrmproxy/LocalityMulticastAMRMProxyPolicy.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/federation/policies/amrmproxy/LocalityMulticastAMRMProxyPolicy.java
index 283f89e..6f97a51 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/federation/policies/amrmproxy/LocalityMulticastAMRMProxyPolicy.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/federation/policies/amrmproxy/LocalityMulticastAMRMProxyPolicy.java
@@ -32,6 +32,7 @@ import 
org.apache.hadoop.yarn.api.protocolrecords.AllocateResponse;
 import org.apache.hadoop.yarn.api.records.Resource;
 import org.apache.hadoop.yarn.api.records.ResourceRequest;
 import org.apache.hadoop.yarn.exceptions.YarnException;
+import org.apache.hadoop.yarn.exceptions.YarnRuntimeException;
 import 
org.apache.hadoop.yarn.server.federation.policies.FederationPolicyInitializationContext;
 import 
org.apache.hadoop.yarn.server.federation.policies.dao.WeightedPolicyInfo;
 import 
org.apache.hadoop.yarn.server.federation.policies.exceptions.FederationPolicyInitializationException;
@@ -143,10 +144,9 @@ public class LocalityMulticastAMRMProxyPolicy extends 
AbstractAMRMProxyPolicy {
 Map newWeightsConverted = new HashMap<>();
 boolean allInactive = true;
 WeightedPolicyInfo policy = getPolicyInfo();
-if (policy.getAMRMPolicyWeights() == null
-|| policy.getAMRMPolicyWeights().size() == 0) {
-  allInactive = false;
-} else {
+
+if (policy.getAMRMPolicyWeights() != null
+&& policy.getAMRMPolicyWeights().size() > 0) {
   for (Map.Entry e : policy.getAMRMPolicyWeights()
   .entrySet()) {
 if (e.getValue() > 0) {
@@ -180,7 +180,6 @@ public class LocalityMulticastAMRMProxyPolicy extends 
AbstractAMRMProxyPolicy {
 
 this.federationFacade =
 policyContext.getFederationStateStoreFacade();
-this.bookkeeper = new AllocationBookkeeper();
 this.homeSubcluster = policyContext.getHomeSubcluster();
 
   }
@@ -197,7 +196,9 @@ public class LocalityMulticastAMRMProxyPolicy extends 
AbstractAMRMProxyPolicy {
   List resourceRequests) throws YarnException {
 
 // object used to accumulate statistics about the answer, initialize with
-// active subclusters.
+// active subclusters. Create a new instance per call because this method
+// can be called concurrently.
+bookkeeper = new AllocationBookkeeper();
 bookkeeper.reinitialize(federationFacade.getSubClusters(true));
 
 List nonLocalizedRequests =
@@ -238,12 +239,16 @@ public class LocalityMulticastAMRMProxyPolicy extends 
AbstractAMRMProxyPolicy {
 // we log altogether later
   }
   if (targetIds != null && targetIds.size() > 0) {
+boolean hasActive = false;
 for (SubClusterId tid : targetIds) {
   if (bookkeeper.isActiveAndEnabled(tid)) {
 bookkeeper.addRackRR(tid, rr);
+hasActive = true;
   }
 }
-continue;
+if (hasActive) {
+  continue;
+}
   }
 
   // Handle node/rack requests that the SubClusterResolver cannot map to
@@ -347,7 +352,7 

[18/50] [abbrv] hadoop git commit: YARN-5413. Create a proxy chain for ResourceManager Admin API in the Router. (Giovanni Matteo Fumarola via Subru).

2017-08-01 Thread curino
YARN-5413. Create a proxy chain for ResourceManager Admin API in the Router. 
(Giovanni Matteo Fumarola via Subru).


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/67846a55
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/67846a55
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/67846a55

Branch: refs/heads/YARN-2915
Commit: 67846a5519b5905c2d925cf4c602f715b653e72c
Parents: 4846069
Author: Subru Krishnan 
Authored: Tue May 9 19:19:27 2017 -0700
Committer: Carlo Curino 
Committed: Tue Aug 1 17:22:11 2017 -0700

--
 .../hadoop/yarn/conf/YarnConfiguration.java |  22 +-
 .../src/main/resources/yarn-default.xml |  25 +-
 .../hadoop/yarn/util/TestLRUCacheHashMap.java   |   2 +-
 .../yarn/server/MockResourceManagerFacade.java  | 120 +-
 .../hadoop/yarn/server/router/Router.java   |  10 +
 .../AbstractClientRequestInterceptor.java   |  11 +-
 .../DefaultClientRequestInterceptor.java|   2 +-
 .../router/clientrm/RouterClientRMService.java  |  16 +-
 .../AbstractRMAdminRequestInterceptor.java  |  90 
 .../DefaultRMAdminRequestInterceptor.java   | 215 ++
 .../rmadmin/RMAdminRequestInterceptor.java  |  65 +++
 .../router/rmadmin/RouterRMAdminService.java| 423 +++
 .../server/router/rmadmin/package-info.java |  20 +
 .../router/clientrm/BaseRouterClientRMTest.java |   2 +-
 .../router/rmadmin/BaseRouterRMAdminTest.java   | 346 +++
 .../rmadmin/MockRMAdminRequestInterceptor.java  |  36 ++
 .../PassThroughRMAdminRequestInterceptor.java   | 148 +++
 .../rmadmin/TestRouterRMAdminService.java   | 219 ++
 18 files changed, 1750 insertions(+), 22 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/67846a55/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/conf/YarnConfiguration.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/conf/YarnConfiguration.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/conf/YarnConfiguration.java
index cf9c237..1432867 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/conf/YarnConfiguration.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/conf/YarnConfiguration.java
@@ -2639,6 +2639,8 @@ public class YarnConfiguration extends Configuration {
 
   public static final String ROUTER_PREFIX = YARN_PREFIX + "router.";
 
+  public static final String ROUTER_BIND_HOST = ROUTER_PREFIX + "bind-host";
+
   public static final String ROUTER_CLIENTRM_PREFIX =
   ROUTER_PREFIX + "clientrm.";
 
@@ -2654,9 +2656,23 @@ public class YarnConfiguration extends Configuration {
   "org.apache.hadoop.yarn.server.router.clientrm."
   + "DefaultClientRequestInterceptor";
 
-  public static final String ROUTER_CLIENTRM_PIPELINE_CACHE_MAX_SIZE =
-  ROUTER_CLIENTRM_PREFIX + "cache-max-size";
-  public static final int DEFAULT_ROUTER_CLIENTRM_PIPELINE_CACHE_MAX_SIZE = 25;
+  public static final String ROUTER_PIPELINE_CACHE_MAX_SIZE =
+  ROUTER_PREFIX + "pipeline.cache-max-size";
+  public static final int DEFAULT_ROUTER_PIPELINE_CACHE_MAX_SIZE = 25;
+
+  public static final String ROUTER_RMADMIN_PREFIX = ROUTER_PREFIX + 
"rmadmin.";
+
+  public static final String ROUTER_RMADMIN_ADDRESS =
+  ROUTER_RMADMIN_PREFIX + ".address";
+  public static final int DEFAULT_ROUTER_RMADMIN_PORT = 8052;
+  public static final String DEFAULT_ROUTER_RMADMIN_ADDRESS =
+  "0.0.0.0:" + DEFAULT_ROUTER_RMADMIN_PORT;
+
+  public static final String ROUTER_RMADMIN_INTERCEPTOR_CLASS_PIPELINE =
+  ROUTER_RMADMIN_PREFIX + "interceptor-class.pipeline";
+  public static final String DEFAULT_ROUTER_RMADMIN_INTERCEPTOR_CLASS =
+  "org.apache.hadoop.yarn.server.router.rmadmin."
+  + "DefaultRMAdminRequestInterceptor";
 
   
   // Other Configs

http://git-wip-us.apache.org/repos/asf/hadoop/blob/67846a55/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/resources/yarn-default.xml
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/resources/yarn-default.xml
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/resources/yarn-default.xml
index 94dccd1..8219325 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/resources/yarn-default.xml
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/resources/yarn-default.xml
@@ -3177,14 +3177,35 @@
 
   
 
-   

[13/20] hadoop git commit: YARN-5946: Create YarnConfigurationStore interface and InMemoryConfigurationStore class. Contributed by Jonathan Hung

2017-08-01 Thread xgong
YARN-5946: Create YarnConfigurationStore interface and
InMemoryConfigurationStore class. Contributed by Jonathan Hung


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/6023666b
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/6023666b
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/6023666b

Branch: refs/heads/YARN-5734
Commit: 6023666bd100f6b9f553f2ffc5e0ed6ed206f239
Parents: fe6832e
Author: Xuan 
Authored: Fri Feb 24 15:58:12 2017 -0800
Committer: Xuan 
Committed: Tue Aug 1 08:46:36 2017 -0700

--
 .../conf/InMemoryConfigurationStore.java|  86 +++
 .../capacity/conf/YarnConfigurationStore.java   | 154 +++
 .../conf/TestYarnConfigurationStore.java|  70 +
 3 files changed, 310 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/6023666b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/conf/InMemoryConfigurationStore.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/conf/InMemoryConfigurationStore.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/conf/InMemoryConfigurationStore.java
new file mode 100644
index 000..a208fb9
--- /dev/null
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/conf/InMemoryConfigurationStore.java
@@ -0,0 +1,86 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.yarn.server.resourcemanager.scheduler.capacity.conf;
+
+import org.apache.hadoop.conf.Configuration;
+
+import java.util.LinkedList;
+import java.util.List;
+import java.util.Map;
+
+/**
+ * A default implementation of {@link YarnConfigurationStore}. Doesn't offer
+ * persistent configuration storage, just stores the configuration in memory.
+ */
+public class InMemoryConfigurationStore implements YarnConfigurationStore {
+
+  private Configuration schedConf;
+  private LinkedList pendingMutations;
+  private long pendingId;
+
+  @Override
+  public void initialize(Configuration conf, Configuration schedConf) {
+this.schedConf = schedConf;
+this.pendingMutations = new LinkedList<>();
+this.pendingId = 0;
+  }
+
+  @Override
+  public synchronized long logMutation(LogMutation logMutation) {
+logMutation.setId(++pendingId);
+pendingMutations.add(logMutation);
+return pendingId;
+  }
+
+  @Override
+  public synchronized boolean confirmMutation(long id, boolean isValid) {
+LogMutation mutation = pendingMutations.poll();
+// If confirmMutation is called out of order, discard mutations until id
+// is reached.
+while (mutation != null) {
+  if (mutation.getId() == id) {
+if (isValid) {
+  Map mutations = mutation.getUpdates();
+  for (Map.Entry kv : mutations.entrySet()) {
+schedConf.set(kv.getKey(), kv.getValue());
+  }
+}
+return true;
+  }
+  mutation = pendingMutations.poll();
+}
+return false;
+  }
+
+  @Override
+  public synchronized Configuration retrieve() {
+return schedConf;
+  }
+
+  @Override
+  public synchronized List getPendingMutations() {
+return pendingMutations;
+  }
+
+  @Override
+  public List getConfirmedConfHistory(long fromId) {
+// Unimplemented.
+return null;
+  }
+}


[02/20] hadoop git commit: Revert "MAPREDUCE-5875. Make Counter limits consistent across JobClient, MRAppMaster, and YarnChild. (Gera Shegalov via kasha)"

2017-08-01 Thread xgong
Revert "MAPREDUCE-5875. Make Counter limits consistent across JobClient, 
MRAppMaster, and YarnChild. (Gera Shegalov via kasha)"

This reverts commit e8a31f2e1c34514fba2f480e8db652f6e2ed65d8.


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/fbb7d6bc
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/fbb7d6bc
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/fbb7d6bc

Branch: refs/heads/YARN-5734
Commit: fbb7d6bcbb887ce52ab1e9d5a1fed67a7f8a4be8
Parents: 3e23415
Author: Junping Du 
Authored: Mon Jul 31 14:09:16 2017 -0700
Committer: Junping Du 
Committed: Mon Jul 31 14:09:16 2017 -0700

--
 .../hadoop/mapreduce/v2/app/MRAppMaster.java|  3 -
 .../org/apache/hadoop/mapreduce/Cluster.java| 16 ++--
 .../apache/hadoop/mapreduce/JobSubmitter.java   |  2 -
 .../hadoop/mapreduce/counters/Limits.java   |  5 --
 .../mapreduce/jobhistory/HistoryViewer.java | 16 
 .../hadoop/mapreduce/v2/hs/CompletedJob.java| 15 
 .../apache/hadoop/mapreduce/v2/TestMRJobs.java  | 87 +++-
 7 files changed, 21 insertions(+), 123 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/fbb7d6bc/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/MRAppMaster.java
--
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/MRAppMaster.java
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/MRAppMaster.java
index 1445481..8c9f605 100644
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/MRAppMaster.java
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/MRAppMaster.java
@@ -62,7 +62,6 @@ import org.apache.hadoop.mapreduce.OutputFormat;
 import org.apache.hadoop.mapreduce.TaskAttemptContext;
 import org.apache.hadoop.mapreduce.TaskAttemptID;
 import org.apache.hadoop.mapreduce.TypeConverter;
-import org.apache.hadoop.mapreduce.counters.Limits;
 import org.apache.hadoop.mapreduce.jobhistory.AMStartedEvent;
 import org.apache.hadoop.mapreduce.jobhistory.EventReader;
 import org.apache.hadoop.mapreduce.jobhistory.EventType;
@@ -1281,8 +1280,6 @@ public class MRAppMaster extends CompositeService {
 
 // finally set the job classloader
 MRApps.setClassLoader(jobClassLoader, getConfig());
-// set job classloader if configured
-Limits.init(getConfig());
 
 if (initFailed) {
   JobEvent initFailedEvent = new JobEvent(job.getID(), 
JobEventType.JOB_INIT_FAILED);

http://git-wip-us.apache.org/repos/asf/hadoop/blob/fbb7d6bc/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/Cluster.java
--
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/Cluster.java
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/Cluster.java
index fbf6806..4245daf 100644
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/Cluster.java
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/Cluster.java
@@ -213,15 +213,15 @@ public class Cluster {
   public Job getJob(JobID jobId) throws IOException, InterruptedException {
 JobStatus status = client.getJobStatus(jobId);
 if (status != null) {
-  final JobConf conf = new JobConf();
-  final Path jobPath = new Path(client.getFilesystemName(),
-  status.getJobFile());
-  final FileSystem fs = FileSystem.get(jobPath.toUri(), getConf());
+  JobConf conf;
   try {
-conf.addResource(fs.open(jobPath), jobPath.toString());
-  } catch (FileNotFoundException fnf) {
-if (LOG.isWarnEnabled()) {
-  LOG.warn("Job conf missing on cluster", fnf);
+conf = new JobConf(status.getJobFile());
+  } catch (RuntimeException ex) {
+// If job file doesn't exist it means we can't find the job
+if (ex.getCause() instanceof FileNotFoundException) {
+  return null;
+} else {
+  throw ex;
 }
   }
   return Job.getInstance(this, status, conf);


[16/20] hadoop git commit: YARN-5949. Add pluggable configuration ACL policy interface and implementation. (Jonathan Hung via wangda)

2017-08-01 Thread xgong
YARN-5949. Add pluggable configuration ACL policy interface and implementation. 
(Jonathan Hung via wangda)

Change-Id: Ib98e82ff753bede21fcab2e6ca9ec1e7a5a2008f


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/2c1dcf5c
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/2c1dcf5c
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/2c1dcf5c

Branch: refs/heads/YARN-5734
Commit: 2c1dcf5cea698950bfa63943d78794ed59f59f92
Parents: 93c1747
Author: Wangda Tan 
Authored: Mon May 22 13:38:31 2017 -0700
Committer: Xuan 
Committed: Tue Aug 1 08:46:40 2017 -0700

--
 .../hadoop/yarn/conf/YarnConfiguration.java |   3 +
 .../src/main/resources/yarn-default.xml |  11 ++
 .../ConfigurationMutationACLPolicy.java |  47 ++
 .../ConfigurationMutationACLPolicyFactory.java  |  49 ++
 .../DefaultConfigurationMutationACLPolicy.java  |  45 ++
 .../scheduler/MutableConfScheduler.java |  19 ++-
 .../scheduler/MutableConfigurationProvider.java |   8 +-
 .../scheduler/capacity/CapacityScheduler.java   |   6 +-
 .../conf/MutableCSConfigurationProvider.java| 151 +-
 ...ueueAdminConfigurationMutationACLPolicy.java |  96 
 .../resourcemanager/webapp/RMWebServices.java   | 131 +---
 .../TestConfigurationMutationACLPolicies.java   | 154 +++
 .../TestMutableCSConfigurationProvider.java |  40 +++--
 13 files changed, 610 insertions(+), 150 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/2c1dcf5c/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/conf/YarnConfiguration.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/conf/YarnConfiguration.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/conf/YarnConfiguration.java
index ce413f6..01db626 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/conf/YarnConfiguration.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/conf/YarnConfiguration.java
@@ -627,6 +627,9 @@ public class YarnConfiguration extends Configuration {
   public static final String DEFAULT_CONFIGURATION_STORE =
   MEMORY_CONFIGURATION_STORE;
 
+  public static final String RM_SCHEDULER_MUTATION_ACL_POLICY_CLASS =
+  YARN_PREFIX + "scheduler.configuration.mutation.acl-policy.class";
+
   public static final String YARN_AUTHORIZATION_PROVIDER = YARN_PREFIX
   + "authorization-provider";
   private static final List RM_SERVICES_ADDRESS_CONF_KEYS_HTTP =

http://git-wip-us.apache.org/repos/asf/hadoop/blob/2c1dcf5c/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/resources/yarn-default.xml
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/resources/yarn-default.xml
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/resources/yarn-default.xml
index 74ff747..a0bed5f 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/resources/yarn-default.xml
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/resources/yarn-default.xml
@@ -3148,4 +3148,15 @@
 memory
   
 
+  
+
+  The class to use for configuration mutation ACL policy if using a mutable
+  configuration provider. Controls whether a mutation request is allowed.
+  The DefaultConfigurationMutationACLPolicy checks if the requestor is a
+  YARN admin.
+
+yarn.scheduler.configuration.mutation.acl-policy.class
+
org.apache.hadoop.yarn.server.resourcemanager.scheduler.DefaultConfigurationMutationACLPolicy
+  
+
 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/2c1dcf5c/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/ConfigurationMutationACLPolicy.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/ConfigurationMutationACLPolicy.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/ConfigurationMutationACLPolicy.java
new file mode 100644
index 000..724487b
--- /dev/null
+++ 

[11/20] hadoop git commit: HADOOP-14245. Use Mockito.when instead of Mockito.stub. Contributed by Andras Bokor.

2017-08-01 Thread xgong
HADOOP-14245. Use Mockito.when instead of Mockito.stub. Contributed by Andras 
Bokor.


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/b38a1eea
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/b38a1eea
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/b38a1eea

Branch: refs/heads/YARN-5734
Commit: b38a1eea8e2917989d83d169a7b5773163e6832e
Parents: ceacadc
Author: Akira Ajisaka 
Authored: Tue Aug 1 15:15:43 2017 +0900
Committer: Akira Ajisaka 
Committed: Tue Aug 1 15:15:43 2017 +0900

--
 .../org/apache/hadoop/TestGenericRefresh.java   | 28 ++--
 .../util/TestCgroupsLCEResourcesHandler.java|  2 +-
 2 files changed, 15 insertions(+), 15 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/b38a1eea/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/TestGenericRefresh.java
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/TestGenericRefresh.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/TestGenericRefresh.java
index 3c73c28..dcd91c7 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/TestGenericRefresh.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/TestGenericRefresh.java
@@ -72,16 +72,16 @@ public class TestGenericRefresh {
   public void setUp() throws Exception {
 // Register Handlers, first one just sends an ok response
 firstHandler = Mockito.mock(RefreshHandler.class);
-Mockito.stub(firstHandler.handleRefresh(Mockito.anyString(), 
Mockito.any(String[].class)))
-  .toReturn(RefreshResponse.successResponse());
+Mockito.when(firstHandler.handleRefresh(Mockito.anyString(), 
Mockito.any(String[].class)))
+  .thenReturn(RefreshResponse.successResponse());
 RefreshRegistry.defaultRegistry().register("firstHandler", firstHandler);
 
 // Second handler has conditional response for testing args
 secondHandler = Mockito.mock(RefreshHandler.class);
-Mockito.stub(secondHandler.handleRefresh("secondHandler", new 
String[]{"one", "two"}))
-  .toReturn(new RefreshResponse(3, "three"));
-Mockito.stub(secondHandler.handleRefresh("secondHandler", new 
String[]{"one"}))
-  .toReturn(new RefreshResponse(2, "two"));
+Mockito.when(secondHandler.handleRefresh("secondHandler", new 
String[]{"one", "two"}))
+  .thenReturn(new RefreshResponse(3, "three"));
+Mockito.when(secondHandler.handleRefresh("secondHandler", new 
String[]{"one"}))
+  .thenReturn(new RefreshResponse(2, "two"));
 RefreshRegistry.defaultRegistry().register("secondHandler", secondHandler);
   }
 
@@ -181,12 +181,12 @@ public class TestGenericRefresh {
   public void testMultipleReturnCodeMerging() throws Exception {
 // Two handlers which return two non-zero values
 RefreshHandler handlerOne = Mockito.mock(RefreshHandler.class);
-Mockito.stub(handlerOne.handleRefresh(Mockito.anyString(), 
Mockito.any(String[].class)))
-  .toReturn(new RefreshResponse(23, "Twenty Three"));
+Mockito.when(handlerOne.handleRefresh(Mockito.anyString(), 
Mockito.any(String[].class)))
+  .thenReturn(new RefreshResponse(23, "Twenty Three"));
 
 RefreshHandler handlerTwo = Mockito.mock(RefreshHandler.class);
-Mockito.stub(handlerTwo.handleRefresh(Mockito.anyString(), 
Mockito.any(String[].class)))
-  .toReturn(new RefreshResponse(10, "Ten"));
+Mockito.when(handlerTwo.handleRefresh(Mockito.anyString(), 
Mockito.any(String[].class)))
+  .thenReturn(new RefreshResponse(10, "Ten"));
 
 // Then registered to the same ID
 RefreshRegistry.defaultRegistry().register("shared", handlerOne);
@@ -210,12 +210,12 @@ public class TestGenericRefresh {
   public void testExceptionResultsInNormalError() throws Exception {
 // In this test, we ensure that all handlers are called even if we throw 
an exception in one
 RefreshHandler exceptionalHandler = Mockito.mock(RefreshHandler.class);
-Mockito.stub(exceptionalHandler.handleRefresh(Mockito.anyString(), 
Mockito.any(String[].class)))
-  .toThrow(new RuntimeException("Exceptional Handler Throws Exception"));
+Mockito.when(exceptionalHandler.handleRefresh(Mockito.anyString(), 
Mockito.any(String[].class)))
+  .thenThrow(new RuntimeException("Exceptional Handler Throws Exception"));
 
 RefreshHandler otherExceptionalHandler = 
Mockito.mock(RefreshHandler.class);
-Mockito.stub(otherExceptionalHandler.handleRefresh(Mockito.anyString(), 
Mockito.any(String[].class)))
-  .toThrow(new RuntimeException("More Exceptions"));
+Mockito.when(otherExceptionalHandler.handleRefresh(Mockito.anyString(), 

[14/20] hadoop git commit: YARN-5948. Implement MutableConfigurationManager for handling storage into configuration store

2017-08-01 Thread xgong
YARN-5948. Implement MutableConfigurationManager for handling storage into 
configuration store


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/fc19c35f
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/fc19c35f
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/fc19c35f

Branch: refs/heads/YARN-5734
Commit: fc19c35f6faa320cbee3ef5220fe5592fca6ff9e
Parents: 6023666
Author: Jonathan Hung 
Authored: Wed Mar 1 16:03:01 2017 -0800
Committer: Xuan 
Committed: Tue Aug 1 08:46:37 2017 -0700

--
 .../hadoop/yarn/conf/YarnConfiguration.java |  6 ++
 .../src/main/resources/yarn-default.xml | 12 +++
 .../scheduler/MutableConfigurationProvider.java | 35 
 .../scheduler/capacity/CapacityScheduler.java   | 14 ++-
 .../CapacitySchedulerConfiguration.java |  3 +
 .../capacity/conf/CSConfigurationProvider.java  |  3 +-
 .../conf/MutableCSConfigurationProvider.java| 94 
 .../conf/YarnConfigurationStoreFactory.java | 46 ++
 .../TestMutableCSConfigurationProvider.java | 83 +
 9 files changed, 291 insertions(+), 5 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/fc19c35f/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/conf/YarnConfiguration.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/conf/YarnConfiguration.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/conf/YarnConfiguration.java
index 93437e3..ce413f6 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/conf/YarnConfiguration.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/conf/YarnConfiguration.java
@@ -621,6 +621,12 @@ public class YarnConfiguration extends Configuration {
   public static final String DEFAULT_RM_CONFIGURATION_PROVIDER_CLASS =
   "org.apache.hadoop.yarn.LocalConfigurationProvider";
 
+  public static final String SCHEDULER_CONFIGURATION_STORE_CLASS =
+  YARN_PREFIX + "scheduler.configuration.store.class";
+  public static final String MEMORY_CONFIGURATION_STORE = "memory";
+  public static final String DEFAULT_CONFIGURATION_STORE =
+  MEMORY_CONFIGURATION_STORE;
+
   public static final String YARN_AUTHORIZATION_PROVIDER = YARN_PREFIX
   + "authorization-provider";
   private static final List RM_SERVICES_ADDRESS_CONF_KEYS_HTTP =

http://git-wip-us.apache.org/repos/asf/hadoop/blob/fc19c35f/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/resources/yarn-default.xml
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/resources/yarn-default.xml
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/resources/yarn-default.xml
index 7ddcfcd..74ff747 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/resources/yarn-default.xml
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/resources/yarn-default.xml
@@ -3136,4 +3136,16 @@
 user-group
   
 
+  
+
+  The type of configuration store to use for storing scheduler
+  configurations, if using a mutable configuration provider.
+  Keywords such as "memory" map to certain configuration store
+  implementations. If keyword is not found, try to load this
+  value as a class.
+
+yarn.scheduler.configuration.store.class
+memory
+  
+
 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/fc19c35f/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/MutableConfigurationProvider.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/MutableConfigurationProvider.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/MutableConfigurationProvider.java
new file mode 100644
index 000..da30a2b
--- /dev/null
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/MutableConfigurationProvider.java
@@ -0,0 +1,35 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed 

[15/20] hadoop git commit: YARN-5952. Create REST API for changing YARN scheduler configurations. (Jonathan Hung via wangda)

2017-08-01 Thread xgong
YARN-5952. Create REST API for changing YARN scheduler configurations. 
(Jonathan Hung via wangda)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/93c17472
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/93c17472
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/93c17472

Branch: refs/heads/YARN-5734
Commit: 93c17472ee46924c2c4e8df8400a5625166dee7f
Parents: fc19c35
Author: Wangda Tan 
Authored: Mon Apr 3 10:12:01 2017 -0700
Committer: Xuan 
Committed: Tue Aug 1 08:46:38 2017 -0700

--
 .../scheduler/MutableConfScheduler.java |  40 ++
 .../scheduler/MutableConfigurationProvider.java |   5 +-
 .../scheduler/capacity/CapacityScheduler.java   |  16 +-
 .../conf/InMemoryConfigurationStore.java|   6 +-
 .../conf/MutableCSConfigurationProvider.java|  24 +-
 .../resourcemanager/webapp/RMWebServices.java   | 172 ++-
 .../webapp/dao/QueueConfigInfo.java |  57 +++
 .../webapp/dao/QueueConfigsUpdateInfo.java  |  60 +++
 .../TestMutableCSConfigurationProvider.java |   6 +-
 .../TestRMWebServicesConfigurationMutation.java | 477 +++
 10 files changed, 851 insertions(+), 12 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/93c17472/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/MutableConfScheduler.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/MutableConfScheduler.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/MutableConfScheduler.java
new file mode 100644
index 000..35e36e1
--- /dev/null
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/MutableConfScheduler.java
@@ -0,0 +1,40 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.yarn.server.resourcemanager.scheduler;
+
+import org.apache.hadoop.security.UserGroupInformation;
+
+import java.io.IOException;
+import java.util.Map;
+
+/**
+ * Interface for a scheduler that supports changing configuration at runtime.
+ *
+ */
+public interface MutableConfScheduler extends ResourceScheduler {
+
+  /**
+   * Update the scheduler's configuration.
+   * @param user Caller of this update
+   * @param confUpdate key-value map of the configuration update
+   * @throws IOException if update is invalid
+   */
+  void updateConfiguration(UserGroupInformation user,
+  Map confUpdate) throws IOException;
+
+}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/93c17472/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/MutableConfigurationProvider.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/MutableConfigurationProvider.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/MutableConfigurationProvider.java
index da30a2b..889c3bc 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/MutableConfigurationProvider.java
+++ 

[09/20] hadoop git commit: Revert "YARN-6873. Moving logging APIs over to slf4j in hadoop-yarn-server-applicationhistoryservice. Contributed by Yeliang Cang."

2017-08-01 Thread xgong
Revert "YARN-6873. Moving logging APIs over to slf4j in 
hadoop-yarn-server-applicationhistoryservice. Contributed by Yeliang Cang."

This reverts commit 1a78c0ff016097930edf68e8278f826b637e918c.


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/a4aa1cb4
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/a4aa1cb4
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/a4aa1cb4

Branch: refs/heads/YARN-5734
Commit: a4aa1cb40504299d3401008fdabc795eafb28713
Parents: 9586b0e
Author: Akira Ajisaka 
Authored: Tue Aug 1 12:12:25 2017 +0900
Committer: Akira Ajisaka 
Committed: Tue Aug 1 12:15:03 2017 +0900

--
 .../ApplicationHistoryClientService.java|  8 ++---
 .../ApplicationHistoryManagerImpl.java  |  8 ++---
 ...pplicationHistoryManagerOnTimelineStore.java |  8 ++---
 .../ApplicationHistoryServer.java   | 10 +++---
 .../FileSystemApplicationHistoryStore.java  | 22 ++--
 .../webapp/AHSWebServices.java  |  7 ++--
 .../webapp/NavBlock.java|  6 ++--
 .../timeline/KeyValueBasedTimelineStore.java|  8 ++---
 .../server/timeline/LeveldbTimelineStore.java   | 35 ++--
 .../yarn/server/timeline/RollingLevelDB.java| 15 -
 .../timeline/RollingLevelDBTimelineStore.java   | 22 ++--
 .../server/timeline/TimelineDataManager.java|  7 ++--
 .../recovery/LeveldbTimelineStateStore.java | 30 -
 .../timeline/security/TimelineACLsManager.java  |  7 ++--
 ...lineDelegationTokenSecretManagerService.java |  8 ++---
 .../timeline/webapp/TimelineWebServices.java|  7 ++--
 .../TestFileSystemApplicationHistoryStore.java  |  8 ++---
 .../timeline/TestLeveldbTimelineStore.java  |  2 +-
 18 files changed, 107 insertions(+), 111 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/a4aa1cb4/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/ApplicationHistoryClientService.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/ApplicationHistoryClientService.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/ApplicationHistoryClientService.java
index 7d57048..73d5d39 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/ApplicationHistoryClientService.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/ApplicationHistoryClientService.java
@@ -22,6 +22,8 @@ import java.io.IOException;
 import java.net.InetSocketAddress;
 import java.util.ArrayList;
 
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.classification.InterfaceAudience.Private;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
@@ -59,13 +61,11 @@ import org.apache.hadoop.yarn.ipc.YarnRPC;
 import 
org.apache.hadoop.yarn.server.timeline.security.authorize.TimelinePolicyProvider;
 
 import com.google.common.base.Preconditions;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
 
 public class ApplicationHistoryClientService extends AbstractService implements
 ApplicationHistoryProtocol {
-  private static final Logger LOG =
-  LoggerFactory.getLogger(ApplicationHistoryClientService.class);
+  private static final Log LOG = LogFactory
+.getLog(ApplicationHistoryClientService.class);
   private ApplicationHistoryManager history;
   private Server server;
   private InetSocketAddress bindAddress;

http://git-wip-us.apache.org/repos/asf/hadoop/blob/a4aa1cb4/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/ApplicationHistoryManagerImpl.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/ApplicationHistoryManagerImpl.java
 

[04/20] hadoop git commit: HADOOP-14644. Increase max heap size of Maven javadoc plugin. Contributed by Andras Bokor.

2017-08-01 Thread xgong
HADOOP-14644. Increase max heap size of Maven javadoc plugin. Contributed by 
Andras Bokor.


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/2be9412b
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/2be9412b
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/2be9412b

Branch: refs/heads/YARN-5734
Commit: 2be9412b73ae4308c5cee0186520fc2ad6d54e43
Parents: a7d8586
Author: Andrew Wang 
Authored: Mon Jul 31 15:09:34 2017 -0700
Committer: Andrew Wang 
Committed: Mon Jul 31 15:09:34 2017 -0700

--
 hadoop-project-dist/pom.xml | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/2be9412b/hadoop-project-dist/pom.xml
--
diff --git a/hadoop-project-dist/pom.xml b/hadoop-project-dist/pom.xml
index 6e73c0e..9da5e53 100644
--- a/hadoop-project-dist/pom.xml
+++ b/hadoop-project-dist/pom.xml
@@ -102,7 +102,7 @@
 org.apache.maven.plugins
 maven-javadoc-plugin
 
-  512m
+  768m
   true
   false
   ${maven.compile.source}


-
To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org
For additional commands, e-mail: common-commits-h...@hadoop.apache.org



[19/20] hadoop git commit: YARN-5953 addendum: Move QueueConfigInfo and SchedConfUpdateInfo to package org.apache.hadoop.yarn.webapp.dao

2017-08-01 Thread xgong
YARN-5953 addendum: Move QueueConfigInfo and SchedConfUpdateInfo to package 
org.apache.hadoop.yarn.webapp.dao


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/79701d92
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/79701d92
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/79701d92

Branch: refs/heads/YARN-5734
Commit: 79701d924a00dbfd1a78667596dd54f525de457f
Parents: f15309e
Author: Xuan 
Authored: Mon Jul 31 11:49:05 2017 -0700
Committer: Xuan 
Committed: Tue Aug 1 08:46:43 2017 -0700

--
 .../hadoop/yarn/webapp/dao/QueueConfigInfo.java | 57 +
 .../yarn/webapp/dao/SchedConfUpdateInfo.java| 85 
 .../webapp/dao/QueueConfigInfo.java | 57 -
 .../webapp/dao/SchedConfUpdateInfo.java | 85 
 4 files changed, 142 insertions(+), 142 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/79701d92/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/dao/QueueConfigInfo.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/dao/QueueConfigInfo.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/dao/QueueConfigInfo.java
new file mode 100644
index 000..d1d91c2
--- /dev/null
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/dao/QueueConfigInfo.java
@@ -0,0 +1,57 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.yarn.webapp.dao;
+
+import java.util.HashMap;
+import java.util.Map;
+
+import javax.xml.bind.annotation.XmlAccessType;
+import javax.xml.bind.annotation.XmlAccessorType;
+import javax.xml.bind.annotation.XmlElement;
+import javax.xml.bind.annotation.XmlRootElement;
+
+/**
+ * Information for adding or updating a queue to scheduler configuration
+ * for this queue.
+ */
+@XmlRootElement
+@XmlAccessorType(XmlAccessType.FIELD)
+public class QueueConfigInfo {
+
+  @XmlElement(name = "queueName")
+  private String queue;
+
+  private HashMap params = new HashMap<>();
+
+  public QueueConfigInfo() { }
+
+  public QueueConfigInfo(String queue, Map params) {
+this.queue = queue;
+this.params = new HashMap<>(params);
+  }
+
+  public String getQueue() {
+return this.queue;
+  }
+
+  public HashMap getParams() {
+return this.params;
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/79701d92/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/dao/SchedConfUpdateInfo.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/dao/SchedConfUpdateInfo.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/dao/SchedConfUpdateInfo.java
new file mode 100644
index 000..bb84096
--- /dev/null
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/dao/SchedConfUpdateInfo.java
@@ -0,0 +1,85 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either 

[17/20] hadoop git commit: YARN-6575. Support global configuration mutation in MutableConfProvider. (Jonathan Hung via Xuan Gong)

2017-08-01 Thread xgong
YARN-6575. Support global configuration mutation in MutableConfProvider. 
(Jonathan Hung via Xuan Gong)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/087477c2
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/087477c2
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/087477c2

Branch: refs/heads/YARN-5734
Commit: 087477c2d366c5393caec904dc1c4c8ae2548c5d
Parents: 2c1dcf5
Author: Xuan 
Authored: Mon Jun 5 16:30:38 2017 -0700
Committer: Xuan 
Committed: Tue Aug 1 08:46:41 2017 -0700

--
 .../ConfigurationMutationACLPolicy.java |   4 +-
 .../DefaultConfigurationMutationACLPolicy.java  |   4 +-
 .../scheduler/MutableConfScheduler.java |   4 +-
 .../scheduler/MutableConfigurationProvider.java |   4 +-
 .../scheduler/capacity/CapacityScheduler.java   |   4 +-
 .../conf/MutableCSConfigurationProvider.java|  10 +-
 ...ueueAdminConfigurationMutationACLPolicy.java |  22 +++-
 .../resourcemanager/webapp/RMWebServices.java   |   4 +-
 .../webapp/dao/QueueConfigsUpdateInfo.java  |  60 ---
 .../webapp/dao/SchedConfUpdateInfo.java |  69 +
 .../TestConfigurationMutationACLPolicies.java   |  28 -
 .../TestMutableCSConfigurationProvider.java |  10 +-
 .../TestRMWebServicesConfigurationMutation.java | 101 +--
 13 files changed, 205 insertions(+), 119 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/087477c2/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/ConfigurationMutationACLPolicy.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/ConfigurationMutationACLPolicy.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/ConfigurationMutationACLPolicy.java
index 724487b..3a388fe 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/ConfigurationMutationACLPolicy.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/ConfigurationMutationACLPolicy.java
@@ -21,7 +21,7 @@ package 
org.apache.hadoop.yarn.server.resourcemanager.scheduler;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hadoop.yarn.server.resourcemanager.RMContext;
-import 
org.apache.hadoop.yarn.server.resourcemanager.webapp.dao.QueueConfigsUpdateInfo;
+import 
org.apache.hadoop.yarn.server.resourcemanager.webapp.dao.SchedConfUpdateInfo;
 
 /**
  * Interface for determining whether configuration mutations are allowed.
@@ -41,7 +41,7 @@ public interface ConfigurationMutationACLPolicy {
* @param confUpdate configurations to be updated
* @return whether provided mutation is allowed or not
*/
-  boolean isMutationAllowed(UserGroupInformation user, QueueConfigsUpdateInfo
+  boolean isMutationAllowed(UserGroupInformation user, SchedConfUpdateInfo
   confUpdate);
 
 }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/087477c2/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/DefaultConfigurationMutationACLPolicy.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/DefaultConfigurationMutationACLPolicy.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/DefaultConfigurationMutationACLPolicy.java
index 680c3b8..6648668 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/DefaultConfigurationMutationACLPolicy.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/DefaultConfigurationMutationACLPolicy.java
@@ -22,7 +22,7 @@ import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.security.UserGroupInformation;
 import 

[03/20] hadoop git commit: HADOOP-14420. generateReports property is not applicable for maven-site-plugin:attach-descriptor goal. Contributed by Andras Bokor.

2017-08-01 Thread xgong
HADOOP-14420. generateReports property is not applicable for 
maven-site-plugin:attach-descriptor goal. Contributed by Andras Bokor.


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/a7d85866
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/a7d85866
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/a7d85866

Branch: refs/heads/YARN-5734
Commit: a7d858668ab0e458867b659499fe6a4363284ee2
Parents: fbb7d6b
Author: Andrew Wang 
Authored: Mon Jul 31 15:07:22 2017 -0700
Committer: Andrew Wang 
Committed: Mon Jul 31 15:07:22 2017 -0700

--
 pom.xml | 3 ---
 1 file changed, 3 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/a7d85866/pom.xml
--
diff --git a/pom.xml b/pom.xml
index 29524a4..d82cd9f 100644
--- a/pom.xml
+++ b/pom.xml
@@ -389,9 +389,6 @@ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 
http://maven.apache.org/xs
 
   attach-descriptor
 
-
-  true
-
   
 
   


-
To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org
For additional commands, e-mail: common-commits-h...@hadoop.apache.org



[20/20] hadoop git commit: YARN-5947: Create LeveldbConfigurationStore class using Leveldb as backing store. Contributed by Jonathan Hung

2017-08-01 Thread xgong
YARN-5947: Create LeveldbConfigurationStore class using Leveldb as backing 
store. Contributed by Jonathan Hung


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/d3e2b6fd
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/d3e2b6fd
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/d3e2b6fd

Branch: refs/heads/YARN-5734
Commit: d3e2b6fd5b7701a7d5f2ac33b09c72f520da8d6e
Parents: 79701d9
Author: Xuan 
Authored: Mon Jul 31 16:48:40 2017 -0700
Committer: Xuan 
Committed: Tue Aug 1 08:46:44 2017 -0700

--
 .../hadoop/yarn/conf/YarnConfiguration.java |  13 +
 .../src/main/resources/yarn-default.xml |  29 ++
 .../scheduler/MutableConfigurationProvider.java |   6 +
 .../scheduler/capacity/CapacityScheduler.java   |   3 +
 .../conf/LeveldbConfigurationStore.java | 314 +++
 .../conf/MutableCSConfigurationProvider.java|  38 ++-
 .../capacity/conf/YarnConfigurationStore.java   |  14 +-
 .../conf/TestYarnConfigurationStore.java|   3 +-
 8 files changed, 414 insertions(+), 6 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/d3e2b6fd/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/conf/YarnConfiguration.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/conf/YarnConfiguration.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/conf/YarnConfiguration.java
index 01db626..c3644cd 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/conf/YarnConfiguration.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/conf/YarnConfiguration.java
@@ -624,8 +624,21 @@ public class YarnConfiguration extends Configuration {
   public static final String SCHEDULER_CONFIGURATION_STORE_CLASS =
   YARN_PREFIX + "scheduler.configuration.store.class";
   public static final String MEMORY_CONFIGURATION_STORE = "memory";
+  public static final String LEVELDB_CONFIGURATION_STORE = "leveldb";
   public static final String DEFAULT_CONFIGURATION_STORE =
   MEMORY_CONFIGURATION_STORE;
+  public static final String RM_SCHEDCONF_STORE_PATH = YARN_PREFIX
+  + "scheduler.configuration.leveldb-store.path";
+
+  public static final String RM_SCHEDCONF_LEVELDB_COMPACTION_INTERVAL_SECS =
+  YARN_PREFIX
+  + "scheduler.configuration.leveldb-store.compaction-interval-secs";
+  public static final long
+  DEFAULT_RM_SCHEDCONF_LEVELDB_COMPACTION_INTERVAL_SECS = 60 * 60 * 24L;
+
+  public static final String RM_SCHEDCONF_LEVELDB_MAX_LOGS =
+  YARN_PREFIX + "scheduler.configuration.leveldb-store.max-logs";
+  public static final int DEFAULT_RM_SCHEDCONF_LEVELDB_MAX_LOGS = 1000;
 
   public static final String RM_SCHEDULER_MUTATION_ACL_POLICY_CLASS =
   YARN_PREFIX + "scheduler.configuration.mutation.acl-policy.class";

http://git-wip-us.apache.org/repos/asf/hadoop/blob/d3e2b6fd/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/resources/yarn-default.xml
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/resources/yarn-default.xml
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/resources/yarn-default.xml
index a0bed5f..6de9ab6 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/resources/yarn-default.xml
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/resources/yarn-default.xml
@@ -3159,4 +3159,33 @@
 
org.apache.hadoop.yarn.server.resourcemanager.scheduler.DefaultConfigurationMutationACLPolicy
   
 
+  
+
+  The storage path for LevelDB implementation of configuration store,
+  when yarn.scheduler.configuration.store.class is configured to be
+  "leveldb".
+
+yarn.scheduler.configuration.leveldb-store.path
+${hadoop.tmp.dir}/yarn/system/confstore
+  
+
+  
+
+  The compaction interval for LevelDB configuration store in secs,
+  when yarn.scheduler.configuration.store.class is configured to be
+  "leveldb". Default is one day.
+
+
yarn.scheduler.configuration.leveldb-store.compaction-interval-secs
+86400
+  
+
+  
+
+  The max number of configuration change log entries kept in LevelDB config
+  store, when yarn.scheduler.configuration.store.class is configured to be
+  "leveldb". Default is 1000.
+
+yarn.scheduler.configuration.leveldb-store.max-logs
+1000
+  
 


[10/20] hadoop git commit: MAPREDUCE-6921. TestUmbilicalProtocolWithJobToken#testJobTokenRpc fails.

2017-08-01 Thread xgong
MAPREDUCE-6921. TestUmbilicalProtocolWithJobToken#testJobTokenRpc fails.


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/ceacadc5
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/ceacadc5
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/ceacadc5

Branch: refs/heads/YARN-5734
Commit: ceacadc51e58bb94ad3f3669488515a61e886d88
Parents: a4aa1cb
Author: Akira Ajisaka 
Authored: Tue Aug 1 14:56:42 2017 +0900
Committer: Akira Ajisaka 
Committed: Tue Aug 1 14:56:42 2017 +0900

--
 .../TestUmbilicalProtocolWithJobToken.java  | 22 +---
 1 file changed, 10 insertions(+), 12 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/ceacadc5/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/security/TestUmbilicalProtocolWithJobToken.java
--
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/security/TestUmbilicalProtocolWithJobToken.java
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/security/TestUmbilicalProtocolWithJobToken.java
index d1004b6..5d53663 100644
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/security/TestUmbilicalProtocolWithJobToken.java
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/security/TestUmbilicalProtocolWithJobToken.java
@@ -29,12 +29,10 @@ import static org.mockito.Mockito.doReturn;
 import java.net.InetSocketAddress;
 import java.security.PrivilegedExceptionAction;
 
-import org.apache.commons.logging.*;
-import org.apache.commons.logging.impl.Log4JLogger;
-
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.io.Text;
 
+import org.apache.hadoop.test.GenericTestUtils;
 import org.apache.hadoop.ipc.Client;
 import org.apache.hadoop.ipc.RPC;
 import org.apache.hadoop.ipc.Server;
@@ -49,9 +47,10 @@ import org.apache.hadoop.security.SaslRpcClient;
 import org.apache.hadoop.security.SaslRpcServer;
 import org.apache.hadoop.security.SecurityUtil;
 import org.apache.hadoop.security.UserGroupInformation;
-
-import org.apache.log4j.Level;
+import org.slf4j.Logger;
+import org.slf4j.event.Level;
 import org.junit.Test;
+import static org.slf4j.LoggerFactory.getLogger;
 
 /** Unit tests for using Job Token over RPC. 
  * 
@@ -62,8 +61,7 @@ import org.junit.Test;
 public class TestUmbilicalProtocolWithJobToken {
   private static final String ADDRESS = "0.0.0.0";
 
-  public static final Log LOG = LogFactory
-  .getLog(TestUmbilicalProtocolWithJobToken.class);
+  public static final Logger LOG = 
getLogger(TestUmbilicalProtocolWithJobToken.class);
 
   private static Configuration conf;
   static {
@@ -73,11 +71,11 @@ public class TestUmbilicalProtocolWithJobToken {
   }
 
   static {
-((Log4JLogger) Client.LOG).getLogger().setLevel(Level.ALL);
-((Log4JLogger) Server.LOG).getLogger().setLevel(Level.ALL);
-((Log4JLogger) SaslRpcClient.LOG).getLogger().setLevel(Level.ALL);
-((Log4JLogger) SaslRpcServer.LOG).getLogger().setLevel(Level.ALL);
-((Log4JLogger) SaslInputStream.LOG).getLogger().setLevel(Level.ALL);
+GenericTestUtils.setLogLevel(Client.LOG, Level.TRACE);
+GenericTestUtils.setLogLevel(Server.LOG, Level.TRACE);
+GenericTestUtils.setLogLevel(SaslRpcClient.LOG, Level.TRACE);
+GenericTestUtils.setLogLevel(SaslRpcServer.LOG, Level.TRACE);
+GenericTestUtils.setLogLevel(SaslInputStream.LOG, Level.TRACE);
   }
 
   @Test


-
To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org
For additional commands, e-mail: common-commits-h...@hadoop.apache.org



[08/20] hadoop git commit: HADOOP-14397. Pull up the builder pattern to FileSystem and add AbstractContractCreateTest for it. (Lei (Eddy) Xu)

2017-08-01 Thread xgong
HADOOP-14397. Pull up the builder pattern to FileSystem and add 
AbstractContractCreateTest for it. (Lei (Eddy) Xu)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/9586b0e2
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/9586b0e2
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/9586b0e2

Branch: refs/heads/YARN-5734
Commit: 9586b0e24fce29c278134658e68b8c47cd9d8c51
Parents: abbf412
Author: Lei Xu 
Authored: Mon Jul 31 20:04:57 2017 -0700
Committer: Lei Xu 
Committed: Mon Jul 31 20:12:40 2017 -0700

--
 .../hadoop/fs/FSDataOutputStreamBuilder.java|  4 +-
 .../java/org/apache/hadoop/fs/FileSystem.java   | 24 --
 .../apache/hadoop/fs/TestLocalFileSystem.java   |  2 +-
 .../fs/contract/AbstractContractAppendTest.java | 33 ++-
 .../fs/contract/AbstractContractCreateTest.java | 90 ++--
 .../hadoop/fs/contract/ContractTestUtils.java   | 43 --
 .../hadoop/hdfs/DistributedFileSystem.java  |  3 +-
 7 files changed, 154 insertions(+), 45 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/9586b0e2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FSDataOutputStreamBuilder.java
--
diff --git 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FSDataOutputStreamBuilder.java
 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FSDataOutputStreamBuilder.java
index 0527202..8608a7b 100644
--- 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FSDataOutputStreamBuilder.java
+++ 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FSDataOutputStreamBuilder.java
@@ -44,8 +44,8 @@ import static 
org.apache.hadoop.fs.CommonConfigurationKeysPublic.IO_FILE_BUFFER_
  *
  * To create missing parent directory, use {@link #recursive()}.
  */
-@InterfaceAudience.Private
-@InterfaceStability.Unstable
+@InterfaceAudience.Public
+@InterfaceStability.Evolving
 public abstract class FSDataOutputStreamBuilder
 > {
   private final FileSystem fs;

http://git-wip-us.apache.org/repos/asf/hadoop/blob/9586b0e2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java
--
diff --git 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java
 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java
index d7cd7dd..fc7b9b2 100644
--- 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java
+++ 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java
@@ -4153,9 +4153,21 @@ public abstract class FileSystem extends Configured 
implements Closeable {
 
 @Override
 public FSDataOutputStream build() throws IOException {
-  return getFS().create(getPath(), getPermission(), getFlags(),
-  getBufferSize(), getReplication(), getBlockSize(), getProgress(),
-  getChecksumOpt());
+  if (getFlags().contains(CreateFlag.CREATE) ||
+  getFlags().contains(CreateFlag.OVERWRITE)) {
+if (isRecursive()) {
+  return getFS().create(getPath(), getPermission(), getFlags(),
+  getBufferSize(), getReplication(), getBlockSize(), getProgress(),
+  getChecksumOpt());
+} else {
+  return getFS().createNonRecursive(getPath(), getPermission(),
+  getFlags(), getBufferSize(), getReplication(), getBlockSize(),
+  getProgress());
+}
+  } else if (getFlags().contains(CreateFlag.APPEND)) {
+return getFS().append(getPath(), getBufferSize(), getProgress());
+  }
+  throw new IOException("Must specify either create, overwrite or append");
 }
 
 @Override
@@ -4174,8 +4186,7 @@ public abstract class FileSystem extends Configured 
implements Closeable {
* HADOOP-14384. Temporarily reduce the visibility of method before the
* builder interface becomes stable.
*/
-  @InterfaceAudience.Private
-  protected FSDataOutputStreamBuilder createFile(Path path) {
+  public FSDataOutputStreamBuilder createFile(Path path) {
 return new FileSystemDataOutputStreamBuilder(this, path)
 .create().overwrite(true);
   }
@@ -4185,8 +4196,7 @@ public abstract class FileSystem extends Configured 
implements Closeable {
* @param path file path.
* @return a {@link FSDataOutputStreamBuilder} to build file append request.
*/
-  @InterfaceAudience.Private
-  protected FSDataOutputStreamBuilder appendFile(Path path) {
+  public FSDataOutputStreamBuilder appendFile(Path path) {
 return new 

[06/20] hadoop git commit: YARN-6873. Moving logging APIs over to slf4j in hadoop-yarn-server-applicationhistoryservice. Contributed by Yeliang Cang.

2017-08-01 Thread xgong
YARN-6873. Moving logging APIs over to slf4j in 
hadoop-yarn-server-applicationhistoryservice. Contributed by Yeliang Cang.


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/1a78c0ff
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/1a78c0ff
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/1a78c0ff

Branch: refs/heads/YARN-5734
Commit: 1a78c0ff016097930edf68e8278f826b637e918c
Parents: ea56812
Author: Akira Ajisaka 
Authored: Tue Aug 1 10:53:32 2017 +0900
Committer: Akira Ajisaka 
Committed: Tue Aug 1 10:53:32 2017 +0900

--
 .../ApplicationHistoryClientService.java|  8 ++---
 .../ApplicationHistoryManagerImpl.java  |  8 ++---
 ...pplicationHistoryManagerOnTimelineStore.java |  8 ++---
 .../ApplicationHistoryServer.java   | 10 +++---
 .../FileSystemApplicationHistoryStore.java  | 22 ++--
 .../webapp/AHSWebServices.java  |  7 ++--
 .../webapp/NavBlock.java|  6 ++--
 .../timeline/KeyValueBasedTimelineStore.java|  8 ++---
 .../server/timeline/LeveldbTimelineStore.java   | 35 ++--
 .../yarn/server/timeline/RollingLevelDB.java| 15 +
 .../timeline/RollingLevelDBTimelineStore.java   | 22 ++--
 .../server/timeline/TimelineDataManager.java|  7 ++--
 .../recovery/LeveldbTimelineStateStore.java | 30 -
 .../timeline/security/TimelineACLsManager.java  |  7 ++--
 ...lineDelegationTokenSecretManagerService.java |  8 ++---
 .../timeline/webapp/TimelineWebServices.java|  7 ++--
 .../TestFileSystemApplicationHistoryStore.java  |  8 ++---
 .../timeline/TestLeveldbTimelineStore.java  |  2 +-
 18 files changed, 111 insertions(+), 107 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/1a78c0ff/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/ApplicationHistoryClientService.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/ApplicationHistoryClientService.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/ApplicationHistoryClientService.java
index 73d5d39..7d57048 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/ApplicationHistoryClientService.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/ApplicationHistoryClientService.java
@@ -22,8 +22,6 @@ import java.io.IOException;
 import java.net.InetSocketAddress;
 import java.util.ArrayList;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.classification.InterfaceAudience.Private;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
@@ -61,11 +59,13 @@ import org.apache.hadoop.yarn.ipc.YarnRPC;
 import 
org.apache.hadoop.yarn.server.timeline.security.authorize.TimelinePolicyProvider;
 
 import com.google.common.base.Preconditions;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 public class ApplicationHistoryClientService extends AbstractService implements
 ApplicationHistoryProtocol {
-  private static final Log LOG = LogFactory
-.getLog(ApplicationHistoryClientService.class);
+  private static final Logger LOG =
+  LoggerFactory.getLogger(ApplicationHistoryClientService.class);
   private ApplicationHistoryManager history;
   private Server server;
   private InetSocketAddress bindAddress;

http://git-wip-us.apache.org/repos/asf/hadoop/blob/1a78c0ff/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/ApplicationHistoryManagerImpl.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/ApplicationHistoryManagerImpl.java
 

[12/20] hadoop git commit: YARN-5951. Changes to allow CapacityScheduler to use configuration store

2017-08-01 Thread xgong
YARN-5951. Changes to allow CapacityScheduler to use configuration store


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/fe6832ec
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/fe6832ec
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/fe6832ec

Branch: refs/heads/YARN-5734
Commit: fe6832ec5f09468c4eaef1ec0afc9563fea010fe
Parents: b38a1ee
Author: Jonathan Hung 
Authored: Mon Jan 30 19:03:48 2017 -0800
Committer: Xuan 
Committed: Tue Aug 1 08:46:35 2017 -0700

--
 .../scheduler/capacity/CapacityScheduler.java   | 36 +--
 .../CapacitySchedulerConfiguration.java | 10 +++
 .../capacity/conf/CSConfigurationProvider.java  | 46 ++
 .../conf/FileBasedCSConfigurationProvider.java  | 67 
 .../scheduler/capacity/conf/package-info.java   | 29 +
 .../capacity/TestCapacityScheduler.java |  4 +-
 6 files changed, 170 insertions(+), 22 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/fe6832ec/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/CapacityScheduler.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/CapacityScheduler.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/CapacityScheduler.java
index 2ccaf63..a6feb09 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/CapacityScheduler.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/CapacityScheduler.java
@@ -19,7 +19,6 @@
 package org.apache.hadoop.yarn.server.resourcemanager.scheduler.capacity;
 
 import java.io.IOException;
-import java.io.InputStream;
 import java.util.ArrayList;
 import java.util.Collection;
 import java.util.EnumSet;
@@ -105,6 +104,8 @@ import 
org.apache.hadoop.yarn.server.resourcemanager.scheduler.activities.Activi
 import 
org.apache.hadoop.yarn.server.resourcemanager.scheduler.activities.ActivityDiagnosticConstant;
 import 
org.apache.hadoop.yarn.server.resourcemanager.scheduler.activities.ActivityState;
 import 
org.apache.hadoop.yarn.server.resourcemanager.scheduler.activities.AllocationState;
+import 
org.apache.hadoop.yarn.server.resourcemanager.scheduler.capacity.conf.CSConfigurationProvider;
+import 
org.apache.hadoop.yarn.server.resourcemanager.scheduler.capacity.conf.FileBasedCSConfigurationProvider;
 import 
org.apache.hadoop.yarn.server.resourcemanager.scheduler.capacity.preemption.KillableContainer;
 import 
org.apache.hadoop.yarn.server.resourcemanager.scheduler.capacity.preemption.PreemptionManager;
 import 
org.apache.hadoop.yarn.server.resourcemanager.scheduler.common.AssignmentInformation;
@@ -163,6 +164,7 @@ public class CapacityScheduler extends
 
   private int offswitchPerHeartbeatLimit;
 
+  private CSConfigurationProvider csConfProvider;
 
   @Override
   public void setConf(Configuration conf) {
@@ -286,7 +288,18 @@ public class CapacityScheduler extends
   IOException {
 try {
   writeLock.lock();
-  this.conf = loadCapacitySchedulerConfiguration(configuration);
+  String confProviderStr = configuration.get(
+  CapacitySchedulerConfiguration.CS_CONF_PROVIDER,
+  CapacitySchedulerConfiguration.DEFAULT_CS_CONF_PROVIDER);
+  if (confProviderStr.equals(
+  CapacitySchedulerConfiguration.FILE_CS_CONF_PROVIDER)) {
+this.csConfProvider = new FileBasedCSConfigurationProvider(rmContext);
+  } else {
+throw new IOException("Invalid CS configuration provider: " +
+confProviderStr);
+  }
+  this.csConfProvider.init(configuration);
+  this.conf = this.csConfProvider.loadConfiguration(configuration);
   validateConf(this.conf);
   this.minimumAllocation = this.conf.getMinimumAllocation();
   initMaximumResourceCapability(this.conf.getMaximumAllocation());
@@ -393,7 +406,7 @@ public class CapacityScheduler extends
   writeLock.lock();
   Configuration configuration = new Configuration(newConf);
   CapacitySchedulerConfiguration oldConf = this.conf;
-  this.conf = loadCapacitySchedulerConfiguration(configuration);
+  this.conf = csConfProvider.loadConfiguration(configuration);
  

[07/20] hadoop git commit: HADOOP-14343. Wrong pid file name in error message when starting secure daemon

2017-08-01 Thread xgong
HADOOP-14343. Wrong pid file name in error message when starting secure daemon

Signed-off-by: Allen Wittenauer 


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/abbf4129
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/abbf4129
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/abbf4129

Branch: refs/heads/YARN-5734
Commit: abbf4129a24c99fbce6d70b191ec19cf0d17e9be
Parents: 1a78c0f
Author: Andras Bokor 
Authored: Mon Jul 31 20:03:43 2017 -0700
Committer: Allen Wittenauer 
Committed: Mon Jul 31 20:03:43 2017 -0700

--
 .../hadoop-common/src/main/bin/hadoop-functions.sh | 6 ++
 1 file changed, 2 insertions(+), 4 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/abbf4129/hadoop-common-project/hadoop-common/src/main/bin/hadoop-functions.sh
--
diff --git 
a/hadoop-common-project/hadoop-common/src/main/bin/hadoop-functions.sh 
b/hadoop-common-project/hadoop-common/src/main/bin/hadoop-functions.sh
index 8ac1b0c..2744643 100755
--- a/hadoop-common-project/hadoop-common/src/main/bin/hadoop-functions.sh
+++ b/hadoop-common-project/hadoop-common/src/main/bin/hadoop-functions.sh
@@ -1873,11 +1873,9 @@ function hadoop_start_secure_daemon_wrapper
 (( counter++ ))
   done
 
-  # this is for the daemon pid creation
   #shellcheck disable=SC2086
-  echo $! > "${jsvcpidfile}" 2>/dev/null
-  if [[ $? -gt 0 ]]; then
-hadoop_error "ERROR:  Cannot write ${daemonname} pid ${daemonpidfile}."
+  if ! echo $! > "${jsvcpidfile}"; then
+hadoop_error "ERROR:  Cannot write ${daemonname} pid ${jsvcpidfile}."
   fi
 
   sleep 1


-
To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org
For additional commands, e-mail: common-commits-h...@hadoop.apache.org



[01/20] hadoop git commit: HDFS-12082. BlockInvalidateLimit value is incorrectly set after namenode heartbeat interval reconfigured. Contributed by Weiwei Yang. [Forced Update!]

2017-08-01 Thread xgong
Repository: hadoop
Updated Branches:
  refs/heads/YARN-5734 fa2c5451f -> d3e2b6fd5 (forced update)


HDFS-12082. BlockInvalidateLimit value is incorrectly set after namenode 
heartbeat interval reconfigured. Contributed by Weiwei Yang.


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/3e23415a
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/3e23415a
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/3e23415a

Branch: refs/heads/YARN-5734
Commit: 3e23415a92d43ce8818124f0b180227a52a33eaf
Parents: 0fd6d0f
Author: Arpit Agarwal 
Authored: Mon Jul 31 11:33:55 2017 -0700
Committer: Arpit Agarwal 
Committed: Mon Jul 31 11:33:55 2017 -0700

--
 .../server/blockmanagement/DatanodeManager.java | 20 +++
 .../namenode/TestNameNodeReconfigure.java   | 36 
 2 files changed, 50 insertions(+), 6 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/3e23415a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/DatanodeManager.java
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/DatanodeManager.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/DatanodeManager.java
index 1d09751..2c5779a 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/DatanodeManager.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/DatanodeManager.java
@@ -290,12 +290,19 @@ public class DatanodeManager {
 DFSConfigKeys.DFS_NAMENODE_HEARTBEAT_RECHECK_INTERVAL_DEFAULT); // 5 
minutes
 this.heartbeatExpireInterval = 2 * heartbeatRecheckInterval
 + 10 * 1000 * heartbeatIntervalSeconds;
-final int blockInvalidateLimit = 
Math.max(20*(int)(heartbeatIntervalSeconds),
+
+// Effected block invalidate limit is the bigger value between
+// value configured in hdfs-site.xml, and 20 * HB interval.
+final int configuredBlockInvalidateLimit = conf.getInt(
+DFSConfigKeys.DFS_BLOCK_INVALIDATE_LIMIT_KEY,
 DFSConfigKeys.DFS_BLOCK_INVALIDATE_LIMIT_DEFAULT);
-this.blockInvalidateLimit = conf.getInt(
-DFSConfigKeys.DFS_BLOCK_INVALIDATE_LIMIT_KEY, blockInvalidateLimit);
+final int countedBlockInvalidateLimit = 20*(int)(heartbeatIntervalSeconds);
+this.blockInvalidateLimit = Math.max(countedBlockInvalidateLimit,
+configuredBlockInvalidateLimit);
 LOG.info(DFSConfigKeys.DFS_BLOCK_INVALIDATE_LIMIT_KEY
-+ "=" + this.blockInvalidateLimit);
++ ": configured=" + configuredBlockInvalidateLimit
++ ", counted=" + countedBlockInvalidateLimit
++ ", effected=" + blockInvalidateLimit);
 
 this.checkIpHostnameInRegistration = conf.getBoolean(
 DFSConfigKeys.DFS_NAMENODE_DATANODE_REGISTRATION_IP_HOSTNAME_CHECK_KEY,
@@ -403,7 +410,8 @@ public class DatanodeManager {
 return fsClusterStats;
   }
 
-  int getBlockInvalidateLimit() {
+  @VisibleForTesting
+  public int getBlockInvalidateLimit() {
 return blockInvalidateLimit;
   }
 
@@ -1911,7 +1919,7 @@ public class DatanodeManager {
 this.heartbeatExpireInterval = 2L * recheckInterval + 10 * 1000
 * intervalSeconds;
 this.blockInvalidateLimit = Math.max(20 * (int) (intervalSeconds),
-DFSConfigKeys.DFS_BLOCK_INVALIDATE_LIMIT_DEFAULT);
+blockInvalidateLimit);
   }
 
   /**

http://git-wip-us.apache.org/repos/asf/hadoop/blob/3e23415a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestNameNodeReconfigure.java
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestNameNodeReconfigure.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestNameNodeReconfigure.java
index 6b553df..c0de63a 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestNameNodeReconfigure.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestNameNodeReconfigure.java
@@ -40,6 +40,7 @@ import static 
org.apache.hadoop.hdfs.DFSConfigKeys.DFS_HEARTBEAT_INTERVAL_KEY;
 import static 
org.apache.hadoop.hdfs.DFSConfigKeys.DFS_HEARTBEAT_INTERVAL_DEFAULT;
 import static 
org.apache.hadoop.hdfs.DFSConfigKeys.DFS_NAMENODE_HEARTBEAT_RECHECK_INTERVAL_KEY;
 import static 
org.apache.hadoop.hdfs.DFSConfigKeys.DFS_NAMENODE_HEARTBEAT_RECHECK_INTERVAL_DEFAULT;
+import static 

hadoop git commit: Addendum for YARN-5648.

2017-08-01 Thread varunsaxena
Repository: hadoop
Updated Branches:
  refs/heads/YARN-5355-branch-2 235006bdb -> 4811d481c


Addendum for YARN-5648.


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/4811d481
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/4811d481
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/4811d481

Branch: refs/heads/YARN-5355-branch-2
Commit: 4811d481c15237b4ba14221e3e82bcab3437a28c
Parents: 235006b
Author: Varun Saxena 
Authored: Tue Aug 1 19:04:09 2017 +0530
Committer: Varun Saxena 
Committed: Tue Aug 1 19:04:09 2017 +0530

--
 .../yarn/client/api/impl/TimelineV2ClientImpl.java |  4 ++--
 .../hadoop/yarn/server/TestRMNMSecretKeys.java | 17 +++--
 .../security/TestTimelineAuthFilterForV2.java  |  4 ++--
 3 files changed, 19 insertions(+), 6 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/4811d481/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/client/api/impl/TimelineV2ClientImpl.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/client/api/impl/TimelineV2ClientImpl.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/client/api/impl/TimelineV2ClientImpl.java
index cd30d98..ad869e3 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/client/api/impl/TimelineV2ClientImpl.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/client/api/impl/TimelineV2ClientImpl.java
@@ -203,8 +203,8 @@ public class TimelineV2ClientImpl extends TimelineV2Client {
 .put(ClientResponse.class, obj);
   }
 
-  protected void putObjects(URI base, String path,
-  MultivaluedMap params, Object obj)
+  protected void putObjects(final URI base, final String path,
+  final MultivaluedMap params, final Object obj)
   throws IOException, YarnException {
 ClientResponse resp = null;
 try {

http://git-wip-us.apache.org/repos/asf/hadoop/blob/4811d481/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-tests/src/test/java/org/apache/hadoop/yarn/server/TestRMNMSecretKeys.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-tests/src/test/java/org/apache/hadoop/yarn/server/TestRMNMSecretKeys.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-tests/src/test/java/org/apache/hadoop/yarn/server/TestRMNMSecretKeys.java
index ba14491..56872d3 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-tests/src/test/java/org/apache/hadoop/yarn/server/TestRMNMSecretKeys.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-tests/src/test/java/org/apache/hadoop/yarn/server/TestRMNMSecretKeys.java
@@ -19,7 +19,11 @@
 package org.apache.hadoop.yarn.server;
 
 import java.io.File;
+import java.io.FileOutputStream;
 import java.io.IOException;
+import java.nio.ByteBuffer;
+import java.nio.channels.FileChannel;
+import java.nio.charset.StandardCharsets;
 import java.util.UUID;
 
 import org.junit.AfterClass;
@@ -35,7 +39,6 @@ import 
org.apache.hadoop.yarn.server.api.protocolrecords.RegisterNodeManagerResp
 import org.apache.hadoop.yarn.server.api.records.MasterKey;
 import org.apache.hadoop.yarn.server.resourcemanager.MockNM;
 import org.apache.hadoop.yarn.server.resourcemanager.ResourceManager;
-import org.apache.kerby.util.IOUtil;
 import org.junit.Test;
 
 public class TestRMNMSecretKeys {
@@ -59,10 +62,20 @@ public class TestRMNMSecretKeys {
 "kdc = localhost:88\n}\n" +
 "[domain_realm]\n" +
 "localhost = APACHE.ORG";
-IOUtil.writeFile(content, krb5ConfFile);
+writeFile(content, krb5ConfFile);
 System.setProperty(KRB5_CONF, krb5ConfFile.getAbsolutePath());
   }
 
+  private static void writeFile(String content, File file) throws IOException {
+  FileOutputStream outputStream = new FileOutputStream(file);
+  FileChannel fc = outputStream.getChannel();
+
+  ByteBuffer buffer =
+  ByteBuffer.wrap(content.getBytes(StandardCharsets.UTF_8));
+  fc.write(buffer);
+  outputStream.close();
+  }
+
   @AfterClass
   public static void tearDown() throws IOException {
 KRB5_CONF_ROOT_DIR.delete();


hadoop git commit: HADOOP-14245. Use Mockito.when instead of Mockito.stub. Contributed by Andras Bokor.

2017-08-01 Thread aajisaka
Repository: hadoop
Updated Branches:
  refs/heads/branch-2 338d84ab7 -> 72bde1dad


HADOOP-14245. Use Mockito.when instead of Mockito.stub. Contributed by Andras 
Bokor.

(cherry picked from commit b38a1eea8e2917989d83d169a7b5773163e6832e)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/72bde1da
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/72bde1da
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/72bde1da

Branch: refs/heads/branch-2
Commit: 72bde1dad5655c97905c94ed9f697b11c58734c7
Parents: 338d84a
Author: Akira Ajisaka 
Authored: Tue Aug 1 15:15:43 2017 +0900
Committer: Akira Ajisaka 
Committed: Tue Aug 1 15:16:21 2017 +0900

--
 .../org/apache/hadoop/TestGenericRefresh.java   | 28 ++--
 .../util/TestCgroupsLCEResourcesHandler.java|  2 +-
 2 files changed, 15 insertions(+), 15 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/72bde1da/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/TestGenericRefresh.java
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/TestGenericRefresh.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/TestGenericRefresh.java
index 3c73c28..dcd91c7 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/TestGenericRefresh.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/TestGenericRefresh.java
@@ -72,16 +72,16 @@ public class TestGenericRefresh {
   public void setUp() throws Exception {
 // Register Handlers, first one just sends an ok response
 firstHandler = Mockito.mock(RefreshHandler.class);
-Mockito.stub(firstHandler.handleRefresh(Mockito.anyString(), 
Mockito.any(String[].class)))
-  .toReturn(RefreshResponse.successResponse());
+Mockito.when(firstHandler.handleRefresh(Mockito.anyString(), 
Mockito.any(String[].class)))
+  .thenReturn(RefreshResponse.successResponse());
 RefreshRegistry.defaultRegistry().register("firstHandler", firstHandler);
 
 // Second handler has conditional response for testing args
 secondHandler = Mockito.mock(RefreshHandler.class);
-Mockito.stub(secondHandler.handleRefresh("secondHandler", new 
String[]{"one", "two"}))
-  .toReturn(new RefreshResponse(3, "three"));
-Mockito.stub(secondHandler.handleRefresh("secondHandler", new 
String[]{"one"}))
-  .toReturn(new RefreshResponse(2, "two"));
+Mockito.when(secondHandler.handleRefresh("secondHandler", new 
String[]{"one", "two"}))
+  .thenReturn(new RefreshResponse(3, "three"));
+Mockito.when(secondHandler.handleRefresh("secondHandler", new 
String[]{"one"}))
+  .thenReturn(new RefreshResponse(2, "two"));
 RefreshRegistry.defaultRegistry().register("secondHandler", secondHandler);
   }
 
@@ -181,12 +181,12 @@ public class TestGenericRefresh {
   public void testMultipleReturnCodeMerging() throws Exception {
 // Two handlers which return two non-zero values
 RefreshHandler handlerOne = Mockito.mock(RefreshHandler.class);
-Mockito.stub(handlerOne.handleRefresh(Mockito.anyString(), 
Mockito.any(String[].class)))
-  .toReturn(new RefreshResponse(23, "Twenty Three"));
+Mockito.when(handlerOne.handleRefresh(Mockito.anyString(), 
Mockito.any(String[].class)))
+  .thenReturn(new RefreshResponse(23, "Twenty Three"));
 
 RefreshHandler handlerTwo = Mockito.mock(RefreshHandler.class);
-Mockito.stub(handlerTwo.handleRefresh(Mockito.anyString(), 
Mockito.any(String[].class)))
-  .toReturn(new RefreshResponse(10, "Ten"));
+Mockito.when(handlerTwo.handleRefresh(Mockito.anyString(), 
Mockito.any(String[].class)))
+  .thenReturn(new RefreshResponse(10, "Ten"));
 
 // Then registered to the same ID
 RefreshRegistry.defaultRegistry().register("shared", handlerOne);
@@ -210,12 +210,12 @@ public class TestGenericRefresh {
   public void testExceptionResultsInNormalError() throws Exception {
 // In this test, we ensure that all handlers are called even if we throw 
an exception in one
 RefreshHandler exceptionalHandler = Mockito.mock(RefreshHandler.class);
-Mockito.stub(exceptionalHandler.handleRefresh(Mockito.anyString(), 
Mockito.any(String[].class)))
-  .toThrow(new RuntimeException("Exceptional Handler Throws Exception"));
+Mockito.when(exceptionalHandler.handleRefresh(Mockito.anyString(), 
Mockito.any(String[].class)))
+  .thenThrow(new RuntimeException("Exceptional Handler Throws Exception"));
 
 RefreshHandler otherExceptionalHandler = 
Mockito.mock(RefreshHandler.class);
-Mockito.stub(otherExceptionalHandler.handleRefresh(Mockito.anyString(), 
Mockito.any(String[].class)))
-

hadoop git commit: HADOOP-14245. Use Mockito.when instead of Mockito.stub. Contributed by Andras Bokor.

2017-08-01 Thread aajisaka
Repository: hadoop
Updated Branches:
  refs/heads/trunk ceacadc51 -> b38a1eea8


HADOOP-14245. Use Mockito.when instead of Mockito.stub. Contributed by Andras 
Bokor.


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/b38a1eea
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/b38a1eea
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/b38a1eea

Branch: refs/heads/trunk
Commit: b38a1eea8e2917989d83d169a7b5773163e6832e
Parents: ceacadc
Author: Akira Ajisaka 
Authored: Tue Aug 1 15:15:43 2017 +0900
Committer: Akira Ajisaka 
Committed: Tue Aug 1 15:15:43 2017 +0900

--
 .../org/apache/hadoop/TestGenericRefresh.java   | 28 ++--
 .../util/TestCgroupsLCEResourcesHandler.java|  2 +-
 2 files changed, 15 insertions(+), 15 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/b38a1eea/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/TestGenericRefresh.java
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/TestGenericRefresh.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/TestGenericRefresh.java
index 3c73c28..dcd91c7 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/TestGenericRefresh.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/TestGenericRefresh.java
@@ -72,16 +72,16 @@ public class TestGenericRefresh {
   public void setUp() throws Exception {
 // Register Handlers, first one just sends an ok response
 firstHandler = Mockito.mock(RefreshHandler.class);
-Mockito.stub(firstHandler.handleRefresh(Mockito.anyString(), 
Mockito.any(String[].class)))
-  .toReturn(RefreshResponse.successResponse());
+Mockito.when(firstHandler.handleRefresh(Mockito.anyString(), 
Mockito.any(String[].class)))
+  .thenReturn(RefreshResponse.successResponse());
 RefreshRegistry.defaultRegistry().register("firstHandler", firstHandler);
 
 // Second handler has conditional response for testing args
 secondHandler = Mockito.mock(RefreshHandler.class);
-Mockito.stub(secondHandler.handleRefresh("secondHandler", new 
String[]{"one", "two"}))
-  .toReturn(new RefreshResponse(3, "three"));
-Mockito.stub(secondHandler.handleRefresh("secondHandler", new 
String[]{"one"}))
-  .toReturn(new RefreshResponse(2, "two"));
+Mockito.when(secondHandler.handleRefresh("secondHandler", new 
String[]{"one", "two"}))
+  .thenReturn(new RefreshResponse(3, "three"));
+Mockito.when(secondHandler.handleRefresh("secondHandler", new 
String[]{"one"}))
+  .thenReturn(new RefreshResponse(2, "two"));
 RefreshRegistry.defaultRegistry().register("secondHandler", secondHandler);
   }
 
@@ -181,12 +181,12 @@ public class TestGenericRefresh {
   public void testMultipleReturnCodeMerging() throws Exception {
 // Two handlers which return two non-zero values
 RefreshHandler handlerOne = Mockito.mock(RefreshHandler.class);
-Mockito.stub(handlerOne.handleRefresh(Mockito.anyString(), 
Mockito.any(String[].class)))
-  .toReturn(new RefreshResponse(23, "Twenty Three"));
+Mockito.when(handlerOne.handleRefresh(Mockito.anyString(), 
Mockito.any(String[].class)))
+  .thenReturn(new RefreshResponse(23, "Twenty Three"));
 
 RefreshHandler handlerTwo = Mockito.mock(RefreshHandler.class);
-Mockito.stub(handlerTwo.handleRefresh(Mockito.anyString(), 
Mockito.any(String[].class)))
-  .toReturn(new RefreshResponse(10, "Ten"));
+Mockito.when(handlerTwo.handleRefresh(Mockito.anyString(), 
Mockito.any(String[].class)))
+  .thenReturn(new RefreshResponse(10, "Ten"));
 
 // Then registered to the same ID
 RefreshRegistry.defaultRegistry().register("shared", handlerOne);
@@ -210,12 +210,12 @@ public class TestGenericRefresh {
   public void testExceptionResultsInNormalError() throws Exception {
 // In this test, we ensure that all handlers are called even if we throw 
an exception in one
 RefreshHandler exceptionalHandler = Mockito.mock(RefreshHandler.class);
-Mockito.stub(exceptionalHandler.handleRefresh(Mockito.anyString(), 
Mockito.any(String[].class)))
-  .toThrow(new RuntimeException("Exceptional Handler Throws Exception"));
+Mockito.when(exceptionalHandler.handleRefresh(Mockito.anyString(), 
Mockito.any(String[].class)))
+  .thenThrow(new RuntimeException("Exceptional Handler Throws Exception"));
 
 RefreshHandler otherExceptionalHandler = 
Mockito.mock(RefreshHandler.class);
-Mockito.stub(otherExceptionalHandler.handleRefresh(Mockito.anyString(), 
Mockito.any(String[].class)))
-  .toThrow(new RuntimeException("More Exceptions"));
+

hadoop git commit: MAPREDUCE-6921. TestUmbilicalProtocolWithJobToken#testJobTokenRpc fails. Contributed by Sonia Garudi.

2017-08-01 Thread aajisaka
Repository: hadoop
Updated Branches:
  refs/heads/branch-2 f09d20cff -> 338d84ab7


MAPREDUCE-6921. TestUmbilicalProtocolWithJobToken#testJobTokenRpc fails. 
Contributed by Sonia Garudi.

(cherry picked from commit ceacadc51e58bb94ad3f3669488515a61e886d88)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/338d84ab
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/338d84ab
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/338d84ab

Branch: refs/heads/branch-2
Commit: 338d84ab71e1be1822322598cc6d90ffa7ad4a77
Parents: f09d20c
Author: Akira Ajisaka 
Authored: Tue Aug 1 14:56:42 2017 +0900
Committer: Akira Ajisaka 
Committed: Tue Aug 1 15:02:20 2017 +0900

--
 .../TestUmbilicalProtocolWithJobToken.java  | 22 +---
 1 file changed, 10 insertions(+), 12 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/338d84ab/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/security/TestUmbilicalProtocolWithJobToken.java
--
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/security/TestUmbilicalProtocolWithJobToken.java
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/security/TestUmbilicalProtocolWithJobToken.java
index 8b3ba3a..4a04e4e 100644
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/security/TestUmbilicalProtocolWithJobToken.java
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/security/TestUmbilicalProtocolWithJobToken.java
@@ -29,12 +29,10 @@ import static org.mockito.Mockito.doReturn;
 import java.net.InetSocketAddress;
 import java.security.PrivilegedExceptionAction;
 
-import org.apache.commons.logging.*;
-import org.apache.commons.logging.impl.Log4JLogger;
-
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.io.Text;
 
+import org.apache.hadoop.test.GenericTestUtils;
 import org.apache.hadoop.ipc.Client;
 import org.apache.hadoop.ipc.RPC;
 import org.apache.hadoop.ipc.Server;
@@ -49,9 +47,10 @@ import org.apache.hadoop.security.SaslRpcClient;
 import org.apache.hadoop.security.SaslRpcServer;
 import org.apache.hadoop.security.SecurityUtil;
 import org.apache.hadoop.security.UserGroupInformation;
-
-import org.apache.log4j.Level;
+import org.slf4j.Logger;
+import org.slf4j.event.Level;
 import org.junit.Test;
+import static org.slf4j.LoggerFactory.getLogger;
 
 /** Unit tests for using Job Token over RPC. 
  * 
@@ -62,8 +61,7 @@ import org.junit.Test;
 public class TestUmbilicalProtocolWithJobToken {
   private static final String ADDRESS = "0.0.0.0";
 
-  public static final Log LOG = LogFactory
-  .getLog(TestUmbilicalProtocolWithJobToken.class);
+  public static final Logger LOG = 
getLogger(TestUmbilicalProtocolWithJobToken.class);
 
   private static Configuration conf;
   static {
@@ -73,11 +71,11 @@ public class TestUmbilicalProtocolWithJobToken {
   }
 
   static {
-((Log4JLogger) Client.LOG).getLogger().setLevel(Level.ALL);
-((Log4JLogger) Server.LOG).getLogger().setLevel(Level.ALL);
-((Log4JLogger) SaslRpcClient.LOG).getLogger().setLevel(Level.ALL);
-((Log4JLogger) SaslRpcServer.LOG).getLogger().setLevel(Level.ALL);
-((Log4JLogger) SaslInputStream.LOG).getLogger().setLevel(Level.ALL);
+GenericTestUtils.setLogLevel(Client.LOG, Level.TRACE);
+GenericTestUtils.setLogLevel(Server.LOG, Level.TRACE);
+GenericTestUtils.setLogLevel(SaslRpcClient.LOG, Level.TRACE);
+GenericTestUtils.setLogLevel(SaslRpcServer.LOG, Level.TRACE);
+GenericTestUtils.setLogLevel(SaslInputStream.LOG, Level.TRACE);
   }
 
   @Test


-
To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org
For additional commands, e-mail: common-commits-h...@hadoop.apache.org



hadoop git commit: MAPREDUCE-6921. TestUmbilicalProtocolWithJobToken#testJobTokenRpc fails.

2017-08-01 Thread aajisaka
Repository: hadoop
Updated Branches:
  refs/heads/trunk a4aa1cb40 -> ceacadc51


MAPREDUCE-6921. TestUmbilicalProtocolWithJobToken#testJobTokenRpc fails.


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/ceacadc5
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/ceacadc5
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/ceacadc5

Branch: refs/heads/trunk
Commit: ceacadc51e58bb94ad3f3669488515a61e886d88
Parents: a4aa1cb
Author: Akira Ajisaka 
Authored: Tue Aug 1 14:56:42 2017 +0900
Committer: Akira Ajisaka 
Committed: Tue Aug 1 14:56:42 2017 +0900

--
 .../TestUmbilicalProtocolWithJobToken.java  | 22 +---
 1 file changed, 10 insertions(+), 12 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/ceacadc5/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/security/TestUmbilicalProtocolWithJobToken.java
--
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/security/TestUmbilicalProtocolWithJobToken.java
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/security/TestUmbilicalProtocolWithJobToken.java
index d1004b6..5d53663 100644
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/security/TestUmbilicalProtocolWithJobToken.java
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/security/TestUmbilicalProtocolWithJobToken.java
@@ -29,12 +29,10 @@ import static org.mockito.Mockito.doReturn;
 import java.net.InetSocketAddress;
 import java.security.PrivilegedExceptionAction;
 
-import org.apache.commons.logging.*;
-import org.apache.commons.logging.impl.Log4JLogger;
-
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.io.Text;
 
+import org.apache.hadoop.test.GenericTestUtils;
 import org.apache.hadoop.ipc.Client;
 import org.apache.hadoop.ipc.RPC;
 import org.apache.hadoop.ipc.Server;
@@ -49,9 +47,10 @@ import org.apache.hadoop.security.SaslRpcClient;
 import org.apache.hadoop.security.SaslRpcServer;
 import org.apache.hadoop.security.SecurityUtil;
 import org.apache.hadoop.security.UserGroupInformation;
-
-import org.apache.log4j.Level;
+import org.slf4j.Logger;
+import org.slf4j.event.Level;
 import org.junit.Test;
+import static org.slf4j.LoggerFactory.getLogger;
 
 /** Unit tests for using Job Token over RPC. 
  * 
@@ -62,8 +61,7 @@ import org.junit.Test;
 public class TestUmbilicalProtocolWithJobToken {
   private static final String ADDRESS = "0.0.0.0";
 
-  public static final Log LOG = LogFactory
-  .getLog(TestUmbilicalProtocolWithJobToken.class);
+  public static final Logger LOG = 
getLogger(TestUmbilicalProtocolWithJobToken.class);
 
   private static Configuration conf;
   static {
@@ -73,11 +71,11 @@ public class TestUmbilicalProtocolWithJobToken {
   }
 
   static {
-((Log4JLogger) Client.LOG).getLogger().setLevel(Level.ALL);
-((Log4JLogger) Server.LOG).getLogger().setLevel(Level.ALL);
-((Log4JLogger) SaslRpcClient.LOG).getLogger().setLevel(Level.ALL);
-((Log4JLogger) SaslRpcServer.LOG).getLogger().setLevel(Level.ALL);
-((Log4JLogger) SaslInputStream.LOG).getLogger().setLevel(Level.ALL);
+GenericTestUtils.setLogLevel(Client.LOG, Level.TRACE);
+GenericTestUtils.setLogLevel(Server.LOG, Level.TRACE);
+GenericTestUtils.setLogLevel(SaslRpcClient.LOG, Level.TRACE);
+GenericTestUtils.setLogLevel(SaslRpcServer.LOG, Level.TRACE);
+GenericTestUtils.setLogLevel(SaslInputStream.LOG, Level.TRACE);
   }
 
   @Test


-
To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org
For additional commands, e-mail: common-commits-h...@hadoop.apache.org