OOZIE-2729 Kill all actions after each unit test. This fixes the scenario when 
multiple actions remained RUNNING during execution of a test suite and had been 
consuming resources (memory, vcores).

Change-Id: I02602c90371a02853361ebeaeab2e3529f76b47d


Project: http://git-wip-us.apache.org/repos/asf/oozie/repo
Commit: http://git-wip-us.apache.org/repos/asf/oozie/commit/a6712816
Tree: http://git-wip-us.apache.org/repos/asf/oozie/tree/a6712816
Diff: http://git-wip-us.apache.org/repos/asf/oozie/diff/a6712816

Branch: refs/heads/oya
Commit: a67128165d067c15483acf3ed80531dbe3f6970e
Parents: 739d53a
Author: Andras Piros <[email protected]>
Authored: Fri Nov 25 10:47:02 2016 +0100
Committer: Andras Piros <[email protected]>
Committed: Fri Nov 25 11:09:15 2016 +0100

----------------------------------------------------------------------
 .../oozie/command/wf/ActionStartXCommand.java   |   2 +-
 .../wf/TestWorkflowActionKillXCommand.java      |  12 +-
 .../java/org/apache/oozie/test/XTestCase.java   |  15 +-
 .../apache/oozie/action/hadoop/DistcpMain.java  |   2 +-
 .../apache/oozie/action/hadoop/HiveMain.java    |   2 +-
 .../apache/oozie/action/hadoop/Hive2Main.java   |   2 +-
 .../apache/oozie/action/hadoop/JavaMain.java    |   2 +-
 .../oozie/action/hadoop/LauncherMain.java       |  76 --------
 .../oozie/action/hadoop/MapReduceMain.java      |   2 +-
 .../oozie/action/hadoop/YarnJobActions.java     | 181 +++++++++++++++++++
 .../org/apache/oozie/action/hadoop/PigMain.java |   2 +-
 .../apache/oozie/action/hadoop/SparkMain.java   |   2 +-
 .../apache/oozie/action/hadoop/SqoopMain.java   |   2 +-
 13 files changed, 210 insertions(+), 92 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/oozie/blob/a6712816/core/src/main/java/org/apache/oozie/command/wf/ActionStartXCommand.java
----------------------------------------------------------------------
diff --git 
a/core/src/main/java/org/apache/oozie/command/wf/ActionStartXCommand.java 
b/core/src/main/java/org/apache/oozie/command/wf/ActionStartXCommand.java
index edfac48..ca155b1 100644
--- a/core/src/main/java/org/apache/oozie/command/wf/ActionStartXCommand.java
+++ b/core/src/main/java/org/apache/oozie/command/wf/ActionStartXCommand.java
@@ -224,7 +224,7 @@ public class ActionStartXCommand extends 
ActionXCommand<org.apache.oozie.command
                 wfAction.setErrorInfo(null, null);
                 incrActionCounter(wfAction.getType(), 1);
 
-                LOG.info("Start action [{0}] with user-retry state : 
userRetryCount [{1}], userRetryMax [{2}], userRetryInterval [{3}]",
+                LOG.info("Start action [{0}] with user-retry state   : 
userRetryCount [{1}], userRetryMax [{2}], userRetryInterval [{3}]",
                                 wfAction.getId(), 
wfAction.getUserRetryCount(), wfAction.getUserRetryMax(), wfAction
                                         .getUserRetryInterval());
 

http://git-wip-us.apache.org/repos/asf/oozie/blob/a6712816/core/src/test/java/org/apache/oozie/command/wf/TestWorkflowActionKillXCommand.java
----------------------------------------------------------------------
diff --git 
a/core/src/test/java/org/apache/oozie/command/wf/TestWorkflowActionKillXCommand.java
 
b/core/src/test/java/org/apache/oozie/command/wf/TestWorkflowActionKillXCommand.java
index 4509586..1561ddc 100644
--- 
a/core/src/test/java/org/apache/oozie/command/wf/TestWorkflowActionKillXCommand.java
+++ 
b/core/src/test/java/org/apache/oozie/command/wf/TestWorkflowActionKillXCommand.java
@@ -31,6 +31,7 @@ import org.apache.oozie.WorkflowActionBean;
 import org.apache.oozie.WorkflowJobBean;
 import org.apache.oozie.action.hadoop.LauncherMain;
 import org.apache.oozie.action.hadoop.MapperReducerForTest;
+import org.apache.oozie.action.hadoop.YarnJobActions;
 import org.apache.oozie.client.WorkflowAction;
 import org.apache.oozie.client.WorkflowJob;
 import org.apache.oozie.executor.jpa.JPAExecutorException;
@@ -43,8 +44,6 @@ import org.apache.oozie.service.UUIDService;
 import org.apache.oozie.test.XDataTestCase;
 import org.apache.oozie.workflow.WorkflowInstance;
 
-import com.google.common.collect.Sets;
-
 public class TestWorkflowActionKillXCommand extends XDataTestCase {
     private Services services;
 
@@ -172,8 +171,13 @@ public class TestWorkflowActionKillXCommand extends 
XDataTestCase {
         System.setProperty(LauncherMain.OOZIE_JOB_LAUNCH_TIME, 
String.valueOf(System.currentTimeMillis()));
 
         jobClient.submitJob(jobConf);
-        Set<ApplicationId> apps = Sets.newHashSet();
-        apps = LauncherMain.getChildYarnJobs(jobConf, 
ApplicationsRequestScope.ALL);
+
+        final Set<ApplicationId> apps = new YarnJobActions.Builder(jobConf, 
ApplicationsRequestScope.ALL)
+                .checkApplicationTags(true)
+                .checkStartRange(true)
+                .build()
+                .getYarnJobs();
+
         assertEquals("Number of YARN apps", apps.size(), 1);
 
         sleepjob.close();

http://git-wip-us.apache.org/repos/asf/oozie/blob/a6712816/core/src/test/java/org/apache/oozie/test/XTestCase.java
----------------------------------------------------------------------
diff --git a/core/src/test/java/org/apache/oozie/test/XTestCase.java 
b/core/src/test/java/org/apache/oozie/test/XTestCase.java
index b29556c..53d0a97 100644
--- a/core/src/test/java/org/apache/oozie/test/XTestCase.java
+++ b/core/src/test/java/org/apache/oozie/test/XTestCase.java
@@ -44,14 +44,15 @@ import org.apache.hadoop.hdfs.MiniDFSCluster;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.mapred.JobConf;
 import org.apache.hadoop.security.authorize.ProxyUsers;
+import org.apache.hadoop.yarn.api.protocolrecords.ApplicationsRequestScope;
 import org.apache.hadoop.yarn.api.records.ApplicationId;
 import org.apache.hadoop.yarn.api.records.YarnApplicationState;
 import org.apache.hadoop.yarn.client.api.YarnClient;
 import org.apache.hadoop.yarn.exceptions.YarnException;
-import org.apache.hadoop.yarn.server.MiniYARNCluster;
 import org.apache.hadoop.yarn.util.ConverterUtils;
 import org.apache.log4j.AppenderSkeleton;
 import org.apache.log4j.spi.LoggingEvent;
+import org.apache.oozie.action.hadoop.YarnJobActions;
 import org.apache.oozie.service.ConfigurationService;
 import org.apache.oozie.service.HadoopAccessorException;
 import org.apache.oozie.service.HadoopAccessorService;
@@ -666,17 +667,25 @@ public abstract class XTestCase extends TestCase {
     private static void shutdownMiniCluster() {
         try {
             if (yarnCluster != null) {
+                final YarnJobActions yarnJobActions =
+                        new YarnJobActions.Builder(yarnCluster.getConfig(), 
ApplicationsRequestScope.ALL)
+                                .build();
+                final Set<ApplicationId> allYarnJobs = 
yarnJobActions.getYarnJobs();
+
+                yarnJobActions.killSelectedYarnJobs(allYarnJobs);
+
                 yarnCluster.stop();
             }
         } catch (final Exception ex) {
-            System.out.println(ex);
+            System.out.println(ex.getMessage());
         }
+
         try {
             if (dfsCluster != null) {
                 dfsCluster.shutdown();
             }
         } catch (final Exception ex) {
-            System.out.println(ex);
+            System.out.println(ex.getMessage());
         }
         // This is tied to the MiniCluster because it inherits configs from 
there
         hs2Config = null;

http://git-wip-us.apache.org/repos/asf/oozie/blob/a6712816/sharelib/distcp/src/main/java/org/apache/oozie/action/hadoop/DistcpMain.java
----------------------------------------------------------------------
diff --git 
a/sharelib/distcp/src/main/java/org/apache/oozie/action/hadoop/DistcpMain.java 
b/sharelib/distcp/src/main/java/org/apache/oozie/action/hadoop/DistcpMain.java
index 40c81ba..fc37e3c 100644
--- 
a/sharelib/distcp/src/main/java/org/apache/oozie/action/hadoop/DistcpMain.java
+++ 
b/sharelib/distcp/src/main/java/org/apache/oozie/action/hadoop/DistcpMain.java
@@ -51,7 +51,7 @@ public class DistcpMain extends JavaMain {
     protected void run(String[] args) throws Exception {
 
         Configuration actionConf = loadActionConf();
-        LauncherMain.killChildYarnJobs(actionConf);
+        YarnJobActions.killChildYarnJobs(actionConf);
         String logFile = setUpDistcpLog4J(actionConf);
         Class<?> klass = 
actionConf.getClass(LauncherMapper.CONF_OOZIE_ACTION_MAIN_CLASS,
                 org.apache.hadoop.tools.DistCp.class);

http://git-wip-us.apache.org/repos/asf/oozie/blob/a6712816/sharelib/hive/src/main/java/org/apache/oozie/action/hadoop/HiveMain.java
----------------------------------------------------------------------
diff --git 
a/sharelib/hive/src/main/java/org/apache/oozie/action/hadoop/HiveMain.java 
b/sharelib/hive/src/main/java/org/apache/oozie/action/hadoop/HiveMain.java
index 242cd6c..320dbd5 100644
--- a/sharelib/hive/src/main/java/org/apache/oozie/action/hadoop/HiveMain.java
+++ b/sharelib/hive/src/main/java/org/apache/oozie/action/hadoop/HiveMain.java
@@ -306,7 +306,7 @@ public class HiveMain extends LauncherMain {
         }
         System.out.println();
 
-        LauncherMain.killChildYarnJobs(hiveConf);
+        YarnJobActions.killChildYarnJobs(hiveConf);
 
         
System.out.println("=================================================================");
         System.out.println();

http://git-wip-us.apache.org/repos/asf/oozie/blob/a6712816/sharelib/hive2/src/main/java/org/apache/oozie/action/hadoop/Hive2Main.java
----------------------------------------------------------------------
diff --git 
a/sharelib/hive2/src/main/java/org/apache/oozie/action/hadoop/Hive2Main.java 
b/sharelib/hive2/src/main/java/org/apache/oozie/action/hadoop/Hive2Main.java
index b418b89..8bf233f 100644
--- a/sharelib/hive2/src/main/java/org/apache/oozie/action/hadoop/Hive2Main.java
+++ b/sharelib/hive2/src/main/java/org/apache/oozie/action/hadoop/Hive2Main.java
@@ -229,7 +229,7 @@ public class Hive2Main extends LauncherMain {
         }
         System.out.println();
 
-        LauncherMain.killChildYarnJobs(actionConf);
+        YarnJobActions.killChildYarnJobs(actionConf);
 
         
System.out.println("=================================================================");
         System.out.println();

http://git-wip-us.apache.org/repos/asf/oozie/blob/a6712816/sharelib/oozie/src/main/java/org/apache/oozie/action/hadoop/JavaMain.java
----------------------------------------------------------------------
diff --git 
a/sharelib/oozie/src/main/java/org/apache/oozie/action/hadoop/JavaMain.java 
b/sharelib/oozie/src/main/java/org/apache/oozie/action/hadoop/JavaMain.java
index 0815318..e9c5585 100644
--- a/sharelib/oozie/src/main/java/org/apache/oozie/action/hadoop/JavaMain.java
+++ b/sharelib/oozie/src/main/java/org/apache/oozie/action/hadoop/JavaMain.java
@@ -44,7 +44,7 @@ public class JavaMain extends LauncherMain {
         setApplicationTags(actionConf, TEZ_APPLICATION_TAGS);
         setApplicationTags(actionConf, SPARK_YARN_TAGS);
 
-        LauncherMain.killChildYarnJobs(actionConf);
+        YarnJobActions.killChildYarnJobs(actionConf);
 
         Class<?> klass = actionConf.getClass(JAVA_MAIN_CLASS, Object.class);
         System.out.println("Java action main class        : " + 
klass.getName());

http://git-wip-us.apache.org/repos/asf/oozie/blob/a6712816/sharelib/oozie/src/main/java/org/apache/oozie/action/hadoop/LauncherMain.java
----------------------------------------------------------------------
diff --git 
a/sharelib/oozie/src/main/java/org/apache/oozie/action/hadoop/LauncherMain.java 
b/sharelib/oozie/src/main/java/org/apache/oozie/action/hadoop/LauncherMain.java
index 31200af..0ce2055 100644
--- 
a/sharelib/oozie/src/main/java/org/apache/oozie/action/hadoop/LauncherMain.java
+++ 
b/sharelib/oozie/src/main/java/org/apache/oozie/action/hadoop/LauncherMain.java
@@ -125,82 +125,6 @@ public abstract class LauncherMain {
         }
     }
 
-    public static Set<ApplicationId> getChildYarnJobs(Configuration 
actionConf) {
-        return getChildYarnJobs(actionConf, ApplicationsRequestScope.OWN);
-    }
-
-    public static Set<ApplicationId> getChildYarnJobs(Configuration 
actionConf, ApplicationsRequestScope scope) {
-        System.out.println("Fetching child yarn jobs");
-        Set<ApplicationId> childYarnJobs = new HashSet<ApplicationId>();
-        String tag = actionConf.get(CHILD_MAPREDUCE_JOB_TAGS);
-        if (tag == null) {
-            System.out.print("Could not find Yarn tags property " + 
CHILD_MAPREDUCE_JOB_TAGS);
-            return childYarnJobs;
-        }
-        System.out.println("tag id : " + tag);
-        long startTime = 0L;
-        try {
-            startTime = 
Long.parseLong(System.getProperty(OOZIE_JOB_LAUNCH_TIME));
-        } catch(NumberFormatException nfe) {
-            throw new RuntimeException("Could not find Oozie job launch time", 
nfe);
-        }
-
-        GetApplicationsRequest gar = GetApplicationsRequest.newInstance();
-        gar.setScope(scope);
-        gar.setApplicationTags(Collections.singleton(tag));
-
-        long endTime = System.currentTimeMillis();
-        if (startTime > endTime) {
-            System.out.println("WARNING: Clock skew between the Oozie server 
host and this host detected.  Please fix this.  " +
-                    "Attempting to work around...");
-            // We don't know which one is wrong (relative to the RM), so to be 
safe, let's assume they're both wrong and add an
-            // offset in both directions
-            long diff = 2 * (startTime - endTime);
-            startTime = startTime - diff;
-            endTime = endTime + diff;
-        }
-        gar.setStartRange(startTime, endTime);
-        try {
-            ApplicationClientProtocol proxy = 
ClientRMProxy.createRMProxy(actionConf, ApplicationClientProtocol.class);
-            GetApplicationsResponse apps = proxy.getApplications(gar);
-            List<ApplicationReport> appsList = apps.getApplicationList();
-            for(ApplicationReport appReport : appsList) {
-                childYarnJobs.add(appReport.getApplicationId());
-            }
-        } catch (IOException ioe) {
-            throw new RuntimeException("Exception occurred while finding child 
jobs", ioe);
-        } catch (YarnException ye) {
-            throw new RuntimeException("Exception occurred while finding child 
jobs", ye);
-        }
-
-        System.out.println("Child yarn jobs are found - " + 
StringUtils.join(childYarnJobs, ","));
-        return childYarnJobs;
-    }
-
-    public static void killChildYarnJobs(Configuration actionConf) {
-        try {
-            Set<ApplicationId> childYarnJobs = getChildYarnJobs(actionConf);
-            if (!childYarnJobs.isEmpty()) {
-                System.out.println();
-                System.out.println("Found [" + childYarnJobs.size() + "] 
Map-Reduce jobs from this launcher");
-                System.out.println("Killing existing jobs and starting over:");
-                YarnClient yarnClient = YarnClient.createYarnClient();
-                yarnClient.init(actionConf);
-                yarnClient.start();
-                for (ApplicationId app : childYarnJobs) {
-                    System.out.print("Killing job [" + app + "] ... ");
-                    yarnClient.killApplication(app);
-                    System.out.println("Done");
-                }
-                System.out.println();
-            }
-        } catch (YarnException ye) {
-            throw new RuntimeException("Exception occurred while killing child 
job(s)", ye);
-        } catch (IOException ioe) {
-            throw new RuntimeException("Exception occurred while killing child 
job(s)", ioe);
-        }
-    }
-
     protected abstract void run(String[] args) throws Exception;
 
     /**

http://git-wip-us.apache.org/repos/asf/oozie/blob/a6712816/sharelib/oozie/src/main/java/org/apache/oozie/action/hadoop/MapReduceMain.java
----------------------------------------------------------------------
diff --git 
a/sharelib/oozie/src/main/java/org/apache/oozie/action/hadoop/MapReduceMain.java
 
b/sharelib/oozie/src/main/java/org/apache/oozie/action/hadoop/MapReduceMain.java
index 16cf4b1..ba7324f 100644
--- 
a/sharelib/oozie/src/main/java/org/apache/oozie/action/hadoop/MapReduceMain.java
+++ 
b/sharelib/oozie/src/main/java/org/apache/oozie/action/hadoop/MapReduceMain.java
@@ -50,7 +50,7 @@ public class MapReduceMain extends LauncherMain {
 
         JobConf jobConf = new JobConf();
         addActionConf(jobConf, actionConf);
-        LauncherMain.killChildYarnJobs(jobConf);
+        YarnJobActions.killChildYarnJobs(jobConf);
 
         // Run a config class if given to update the job conf
         runConfigClass(jobConf);

http://git-wip-us.apache.org/repos/asf/oozie/blob/a6712816/sharelib/oozie/src/main/java/org/apache/oozie/action/hadoop/YarnJobActions.java
----------------------------------------------------------------------
diff --git 
a/sharelib/oozie/src/main/java/org/apache/oozie/action/hadoop/YarnJobActions.java
 
b/sharelib/oozie/src/main/java/org/apache/oozie/action/hadoop/YarnJobActions.java
new file mode 100644
index 0000000..0224362
--- /dev/null
+++ 
b/sharelib/oozie/src/main/java/org/apache/oozie/action/hadoop/YarnJobActions.java
@@ -0,0 +1,181 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.oozie.action.hadoop;
+
+import com.google.common.collect.Sets;
+import com.google.common.io.Closeables;
+import org.apache.commons.lang.StringUtils;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.yarn.api.ApplicationClientProtocol;
+import org.apache.hadoop.yarn.api.protocolrecords.ApplicationsRequestScope;
+import org.apache.hadoop.yarn.api.protocolrecords.GetApplicationsRequest;
+import org.apache.hadoop.yarn.api.protocolrecords.GetApplicationsResponse;
+import org.apache.hadoop.yarn.api.records.ApplicationId;
+import org.apache.hadoop.yarn.api.records.ApplicationReport;
+import org.apache.hadoop.yarn.client.ClientRMProxy;
+import org.apache.hadoop.yarn.client.api.YarnClient;
+import org.apache.hadoop.yarn.exceptions.YarnException;
+
+import java.io.IOException;
+import java.util.Collections;
+import java.util.List;
+import java.util.Set;
+
+public class YarnJobActions {
+    private final Configuration configuration;
+    private final ApplicationsRequestScope scope;
+    private final boolean checkApplicationTags;
+    private final boolean checkStartRange;
+
+    private YarnJobActions(final Configuration configuration,
+                           final ApplicationsRequestScope scope,
+                           final boolean checkApplicationTags,
+                           final boolean checkStartRange) {
+        this.configuration = configuration;
+        this.scope = scope;
+        this.checkApplicationTags = checkApplicationTags;
+        this.checkStartRange = checkStartRange;
+    }
+
+    public Set<ApplicationId> getYarnJobs() {
+        System.out.println(String.format("Fetching yarn jobs. 
[scope=%s;checkApplicationTags=%s;checkStartRange=%s]",
+                scope, checkApplicationTags, checkStartRange));
+
+        final Set<ApplicationId> childYarnJobs = Sets.newHashSet();
+        final GetApplicationsRequest gar = 
GetApplicationsRequest.newInstance();
+        gar.setScope(scope);
+
+        if (checkApplicationTags) {
+            final String tag = 
configuration.get(LauncherMain.CHILD_MAPREDUCE_JOB_TAGS);
+            if (tag == null) {
+                System.out.println("Could not find Yarn tags property " + 
LauncherMain.CHILD_MAPREDUCE_JOB_TAGS);
+                return childYarnJobs;
+            }
+            System.out.println("tag id : " + tag);
+            gar.setApplicationTags(Collections.singleton(tag));
+        }
+
+        if (checkStartRange) {
+            long startTime;
+            try {
+                startTime = 
Long.parseLong(System.getProperty(LauncherMain.OOZIE_JOB_LAUNCH_TIME));
+            } catch (final NumberFormatException nfe) {
+                throw new RuntimeException("Could not find Oozie job launch 
time", nfe);
+            }
+
+
+            long endTime = System.currentTimeMillis();
+            if (startTime > endTime) {
+                System.out.println("WARNING: Clock skew between the Oozie 
server host and this host detected.  Please fix this.  " +
+                        "Attempting to work around...");
+                // We don't know which one is wrong (relative to the RM), so 
to be safe, let's assume they're both wrong and add an
+                // offset in both directions
+                final long diff = 2 * (startTime - endTime);
+                startTime = startTime - diff;
+                endTime = endTime + diff;
+            }
+            gar.setStartRange(startTime, endTime);
+        }
+
+        try {
+            final ApplicationClientProtocol proxy = 
ClientRMProxy.createRMProxy(configuration, ApplicationClientProtocol.class);
+            final GetApplicationsResponse apps = proxy.getApplications(gar);
+            final List<ApplicationReport> appsList = apps.getApplicationList();
+            for (final ApplicationReport appReport : appsList) {
+                childYarnJobs.add(appReport.getApplicationId());
+            }
+        } catch (final IOException | YarnException e) {
+            throw new RuntimeException("Exception occurred while finding child 
jobs", e);
+        }
+
+        System.out.println("Child yarn jobs are found - " + 
StringUtils.join(childYarnJobs, ","));
+        return childYarnJobs;
+    }
+
+    static void killChildYarnJobs(final Configuration actionConf) {
+        final YarnJobActions yarnJobActions = new Builder(actionConf, 
ApplicationsRequestScope.OWN)
+                .build();
+        final Set<ApplicationId> childYarnJobs = yarnJobActions.getYarnJobs();
+
+        yarnJobActions.killSelectedYarnJobs(childYarnJobs);
+    }
+
+    public void killSelectedYarnJobs(final Set<ApplicationId> 
selectedApplicationIds) {
+        final YarnClient yarnClient = createYarnClient();
+
+        try {
+            if (!selectedApplicationIds.isEmpty()) {
+                System.out.println("");
+                System.out.println("Found [" + selectedApplicationIds.size() + 
"] Map-Reduce jobs from this launcher");
+                System.out.println("Killing existing jobs and starting over:");
+
+                for (final ApplicationId app : selectedApplicationIds) {
+                    System.out.println("Killing job [" + app + "] ... ");
+
+                    yarnClient.killApplication(app);
+
+                    System.out.println("Done");
+                }
+
+                System.out.println("");
+            }
+        } catch (final YarnException | IOException e) {
+            throw new RuntimeException("Exception occurred while killing child 
job(s)", e);
+        } finally {
+            Closeables.closeQuietly(yarnClient);
+        }
+    }
+
+    private YarnClient createYarnClient() {
+        final YarnClient yarnClient = YarnClient.createYarnClient();
+
+        yarnClient.init(configuration);
+        yarnClient.start();
+
+        return yarnClient;
+    }
+
+    public static class Builder {
+        private final Configuration configuration;
+        private final ApplicationsRequestScope scope;
+        private boolean checkApplicationTags = false;
+        private boolean checkStartRange = false;
+
+        public Builder(final Configuration configuration, final 
ApplicationsRequestScope scope) {
+            this.configuration = configuration;
+            this.scope = scope;
+        }
+
+        public Builder checkApplicationTags(final boolean 
checkApplicationTags) {
+            this.checkApplicationTags = checkApplicationTags;
+
+            return this;
+        }
+
+        public Builder checkStartRange(final boolean checkStartRange) {
+            this.checkStartRange = checkStartRange;
+
+            return this;
+        }
+
+        public YarnJobActions build() {
+            return new YarnJobActions(configuration, scope, 
checkApplicationTags, checkStartRange);
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/oozie/blob/a6712816/sharelib/pig/src/main/java/org/apache/oozie/action/hadoop/PigMain.java
----------------------------------------------------------------------
diff --git 
a/sharelib/pig/src/main/java/org/apache/oozie/action/hadoop/PigMain.java 
b/sharelib/pig/src/main/java/org/apache/oozie/action/hadoop/PigMain.java
index 0029dd0..98929c0 100644
--- a/sharelib/pig/src/main/java/org/apache/oozie/action/hadoop/PigMain.java
+++ b/sharelib/pig/src/main/java/org/apache/oozie/action/hadoop/PigMain.java
@@ -217,7 +217,7 @@ public class PigMain extends LauncherMain {
             System.out.println("             " + arg);
         }
 
-        LauncherMain.killChildYarnJobs(actionConf);
+        YarnJobActions.killChildYarnJobs(actionConf);
 
         
System.out.println("=================================================================");
         System.out.println();

http://git-wip-us.apache.org/repos/asf/oozie/blob/a6712816/sharelib/spark/src/main/java/org/apache/oozie/action/hadoop/SparkMain.java
----------------------------------------------------------------------
diff --git 
a/sharelib/spark/src/main/java/org/apache/oozie/action/hadoop/SparkMain.java 
b/sharelib/spark/src/main/java/org/apache/oozie/action/hadoop/SparkMain.java
index f278075..7f35cc1 100644
--- a/sharelib/spark/src/main/java/org/apache/oozie/action/hadoop/SparkMain.java
+++ b/sharelib/spark/src/main/java/org/apache/oozie/action/hadoop/SparkMain.java
@@ -85,7 +85,7 @@ public class SparkMain extends LauncherMain {
         prepareHadoopConfig(actionConf);
 
         setYarnTag(actionConf);
-        LauncherMain.killChildYarnJobs(actionConf);
+        YarnJobActions.killChildYarnJobs(actionConf);
         String logFile = setUpSparkLog4J(actionConf);
         List<String> sparkArgs = new ArrayList<String>();
 

http://git-wip-us.apache.org/repos/asf/oozie/blob/a6712816/sharelib/sqoop/src/main/java/org/apache/oozie/action/hadoop/SqoopMain.java
----------------------------------------------------------------------
diff --git 
a/sharelib/sqoop/src/main/java/org/apache/oozie/action/hadoop/SqoopMain.java 
b/sharelib/sqoop/src/main/java/org/apache/oozie/action/hadoop/SqoopMain.java
index 92c960f..29e2966 100644
--- a/sharelib/sqoop/src/main/java/org/apache/oozie/action/hadoop/SqoopMain.java
+++ b/sharelib/sqoop/src/main/java/org/apache/oozie/action/hadoop/SqoopMain.java
@@ -171,7 +171,7 @@ public class SqoopMain extends LauncherMain {
             System.out.println("             " + arg);
         }
 
-        LauncherMain.killChildYarnJobs(sqoopConf);
+        YarnJobActions.killChildYarnJobs(sqoopConf);
 
         
System.out.println("=================================================================");
         System.out.println();

Reply via email to