OOZIE-2316 Drop support for Hadoop 1 and 0.23 (asasvari via rkanter)

Project: http://git-wip-us.apache.org/repos/asf/oozie/repo
Commit: http://git-wip-us.apache.org/repos/asf/oozie/commit/a37835fe
Tree: http://git-wip-us.apache.org/repos/asf/oozie/tree/a37835fe
Diff: http://git-wip-us.apache.org/repos/asf/oozie/diff/a37835fe

Branch: refs/heads/pya
Commit: a37835feced8d4f5c7b53bcf12cc9355de482a8a
Parents: fc9aeb1
Author: Robert Kanter <rkan...@cloudera.com>
Authored: Thu Jun 16 13:37:11 2016 -0700
Committer: Robert Kanter <rkan...@cloudera.com>
Committed: Thu Jun 16 13:37:11 2016 -0700

----------------------------------------------------------------------
 client/pom.xml                                  |   4 +-
 .../org/apache/oozie/client/XOozieClient.java   |  20 +-
 core/pom.xml                                    |   4 +-
 .../oozie/action/hadoop/JavaActionExecutor.java |  24 +--
 .../action/hadoop/LauncherMapperHelper.java     |   5 +-
 .../action/hadoop/SparkActionExecutor.java      |   8 +-
 .../oozie/command/wf/SubmitHttpXCommand.java    |   2 +-
 .../oozie/command/wf/SubmitMRXCommand.java      |   9 +-
 .../wf/SubmitScriptLanguageXCommand.java        |   2 +-
 .../oozie/command/wf/SubmitSqoopXCommand.java   |   2 +-
 .../oozie/service/HadoopAccessorService.java    |   9 +-
 .../apache/oozie/service/ShareLibService.java   |  30 ++-
 .../java/org/apache/oozie/util/FSUtils.java     |  53 +++++
 .../java/org/apache/oozie/util/JobUtils.java    |  21 +-
 .../action/hadoop/TestFsActionExecutor.java     |   4 +-
 .../action/hadoop/TestJavaActionExecutor.java   |  55 ++---
 .../org/apache/oozie/client/TestOozieCLI.java   |   5 +-
 .../oozie/client/TestWorkflowXClient.java       |  10 +-
 .../command/wf/TestSubmitHiveXCommand.java      |   2 +-
 .../oozie/command/wf/TestSubmitMRXCommand.java  |   8 +-
 .../oozie/command/wf/TestSubmitPigXCommand.java |   6 +-
 .../command/wf/TestSubmitSqoopXCommand.java     |   2 +-
 .../service/TestHadoopAccessorService.java      |   2 +-
 .../oozie/service/TestShareLibService.java      |  43 ++--
 .../lite/TestLiteWorkflowAppParser.java         |   1 -
 distro/src/main/bin/addtowar.sh                 |  20 +-
 docs/src/site/twiki/DG_QuickStart.twiki         |  11 +-
 docs/src/site/twiki/ENG_Building.twiki          |  11 +-
 hadooplibs/hadoop-auth-0.23/pom.xml             |  45 -----
 hadooplibs/hadoop-auth-1/pom.xml                |  43 ----
 hadooplibs/hadoop-auth-2/pom.xml                |  43 ----
 hadooplibs/hadoop-distcp-0.23/pom.xml           |  43 ----
 hadooplibs/hadoop-distcp-1/pom.xml              |  43 ----
 hadooplibs/hadoop-distcp-2/pom.xml              |  43 ----
 hadooplibs/hadoop-distcp-3/pom.xml              |  43 ----
 hadooplibs/hadoop-utils-0.23/pom.xml            |  42 ----
 .../action/hadoop/LauncherMainHadoopUtils.java  |  35 ----
 .../apache/oozie/hadoop/utils/HadoopShims.java  |  51 -----
 hadooplibs/hadoop-utils-1/pom.xml               |  41 ----
 .../action/hadoop/LauncherMainHadoopUtils.java  |  35 ----
 .../apache/oozie/hadoop/utils/HadoopShims.java  |  51 -----
 hadooplibs/hadoop-utils-2/pom.xml               |  42 ----
 .../action/hadoop/LauncherMainHadoopUtils.java  | 118 -----------
 .../apache/oozie/hadoop/utils/HadoopShims.java  |  68 -------
 hadooplibs/hadoop-utils-3/pom.xml               |  42 ----
 .../action/hadoop/LauncherMainHadoopUtils.java  | 115 -----------
 .../apache/oozie/hadoop/utils/HadoopShims.java  |  68 -------
 hadooplibs/pom.xml                              |  93 ---------
 pom.xml                                         |  92 +--------
 release-log.txt                                 |   1 +
 sharelib/distcp/pom.xml                         |   4 +-
 .../apache/oozie/action/hadoop/DistcpMain.java  |   2 +-
 sharelib/hive/pom.xml                           |  12 +-
 .../apache/oozie/action/hadoop/HiveMain.java    |   2 +-
 sharelib/hive2/pom.xml                          |   6 +-
 .../apache/oozie/action/hadoop/Hive2Main.java   |   2 +-
 sharelib/oozie/pom.xml                          |   6 -
 .../apache/oozie/action/hadoop/JavaMain.java    |   2 +-
 .../oozie/action/hadoop/LauncherMain.java       |  92 ++++++++-
 .../oozie/action/hadoop/LauncherMapper.java     |  12 +-
 .../oozie/action/hadoop/MapReduceMain.java      |   2 +-
 sharelib/pig/pom.xml                            |  10 +-
 .../org/apache/oozie/action/hadoop/PigMain.java |   2 +-
 sharelib/spark/pom.xml                          | 201 +++++++++----------
 .../apache/oozie/action/hadoop/SparkMain.java   |   2 +-
 sharelib/sqoop/pom.xml                          |   5 -
 .../apache/oozie/action/hadoop/SqoopMain.java   |   2 +-
 webapp/pom.xml                                  |   8 +-
 68 files changed, 381 insertions(+), 1561 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/oozie/blob/a37835fe/client/pom.xml
----------------------------------------------------------------------
diff --git a/client/pom.xml b/client/pom.xml
index 632d256..a3c8238 100644
--- a/client/pom.xml
+++ b/client/pom.xml
@@ -74,8 +74,8 @@
             <scope>compile</scope>
         </dependency>
         <dependency>
-            <groupId>org.apache.oozie</groupId>
-            <artifactId>oozie-hadoop-auth</artifactId>
+            <groupId>org.apache.hadoop</groupId>
+            <artifactId>hadoop-auth</artifactId>
             <scope>compile</scope>
         </dependency>
         <dependency>

http://git-wip-us.apache.org/repos/asf/oozie/blob/a37835fe/client/src/main/java/org/apache/oozie/client/XOozieClient.java
----------------------------------------------------------------------
diff --git a/client/src/main/java/org/apache/oozie/client/XOozieClient.java 
b/client/src/main/java/org/apache/oozie/client/XOozieClient.java
index 8ee9c57..60cedc9 100644
--- a/client/src/main/java/org/apache/oozie/client/XOozieClient.java
+++ b/client/src/main/java/org/apache/oozie/client/XOozieClient.java
@@ -33,19 +33,10 @@ import org.json.simple.JSONObject;
 import org.json.simple.JSONValue;
 
 public class XOozieClient extends OozieClient {
-
-    public static final String JT = "mapred.job.tracker";
-    public static final String JT_2 = "mapreduce.jobtracker.address";
-
+    public static final String RM = "yarn.resourcemanager.address";
     public static final String NN = "fs.default.name";
     public static final String NN_2 = "fs.defaultFS";
 
-    @Deprecated
-    public static final String JT_PRINCIPAL = 
"mapreduce.jobtracker.kerberos.principal";
-
-    @Deprecated
-    public static final String NN_PRINCIPAL = 
"dfs.namenode.kerberos.principal";
-
     public static final String PIG_SCRIPT = "oozie.pig.script";
 
     public static final String PIG_OPTIONS = "oozie.pig.options";
@@ -123,12 +114,9 @@ public class XOozieClient extends OozieClient {
     }
 
     private void validateHttpSubmitConf(Properties conf) {
-        String JT = conf.getProperty(XOozieClient.JT);
-        String JT_2 = conf.getProperty(XOozieClient.JT_2);
-        if (JT == null) {
-            if(JT_2 == null) {
-                throw new RuntimeException("jobtracker is not specified in 
conf");
-            }
+        String RM = conf.getProperty(XOozieClient.RM);
+        if (RM == null) {
+            throw new RuntimeException("jobtracker is not specified in conf");
         }
 
         String NN = conf.getProperty(XOozieClient.NN);

http://git-wip-us.apache.org/repos/asf/oozie/blob/a37835fe/core/pom.xml
----------------------------------------------------------------------
diff --git a/core/pom.xml b/core/pom.xml
index b72ea7d..6584af8 100644
--- a/core/pom.xml
+++ b/core/pom.xml
@@ -149,8 +149,8 @@
         </dependency>
 
         <dependency>
-            <groupId>org.apache.oozie</groupId>
-            <artifactId>oozie-hadoop-distcp</artifactId>
+            <groupId>org.apache.hadoop</groupId>
+            <artifactId>hadoop-distcp</artifactId>
             <scope>test</scope>
         </dependency>
 

http://git-wip-us.apache.org/repos/asf/oozie/blob/a37835fe/core/src/main/java/org/apache/oozie/action/hadoop/JavaActionExecutor.java
----------------------------------------------------------------------
diff --git 
a/core/src/main/java/org/apache/oozie/action/hadoop/JavaActionExecutor.java 
b/core/src/main/java/org/apache/oozie/action/hadoop/JavaActionExecutor.java
index 6893bb8..99e3344 100644
--- a/core/src/main/java/org/apache/oozie/action/hadoop/JavaActionExecutor.java
+++ b/core/src/main/java/org/apache/oozie/action/hadoop/JavaActionExecutor.java
@@ -45,7 +45,6 @@ import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.permission.AccessControlException;
-import org.apache.hadoop.hive.shims.HadoopShims;
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.mapred.JobClient;
 import org.apache.hadoop.mapred.JobConf;
@@ -72,12 +71,12 @@ import org.apache.oozie.service.URIHandlerService;
 import org.apache.oozie.service.WorkflowAppService;
 import org.apache.oozie.util.ELEvaluationException;
 import org.apache.oozie.util.ELEvaluator;
+import org.apache.oozie.util.XLog;
+import org.apache.oozie.util.XConfiguration;
+import org.apache.oozie.util.XmlUtils;
 import org.apache.oozie.util.JobUtils;
 import org.apache.oozie.util.LogUtils;
 import org.apache.oozie.util.PropertiesUtils;
-import org.apache.oozie.util.XConfiguration;
-import org.apache.oozie.util.XLog;
-import org.apache.oozie.util.XmlUtils;
 import org.jdom.Element;
 import org.jdom.JDOMException;
 import org.jdom.Namespace;
@@ -86,8 +85,6 @@ import org.jdom.Namespace;
 public class JavaActionExecutor extends ActionExecutor {
 
     protected static final String HADOOP_USER = "user.name";
-    public static final String HADOOP_JOB_TRACKER = "mapred.job.tracker";
-    public static final String HADOOP_JOB_TRACKER_2 = 
"mapreduce.jobtracker.address";
     public static final String HADOOP_YARN_RM = "yarn.resourcemanager.address";
     public static final String HADOOP_NAME_NODE = "fs.default.name";
     private static final String HADOOP_JOB_NAME = "mapred.job.name";
@@ -127,9 +124,7 @@ public class JavaActionExecutor extends ActionExecutor {
 
     static {
         DISALLOWED_PROPERTIES.add(HADOOP_USER);
-        DISALLOWED_PROPERTIES.add(HADOOP_JOB_TRACKER);
         DISALLOWED_PROPERTIES.add(HADOOP_NAME_NODE);
-        DISALLOWED_PROPERTIES.add(HADOOP_JOB_TRACKER_2);
         DISALLOWED_PROPERTIES.add(HADOOP_YARN_RM);
     }
 
@@ -145,8 +140,7 @@ public class JavaActionExecutor extends ActionExecutor {
         List<Class> classes = new ArrayList<Class>();
         classes.add(LauncherMapper.class);
         classes.add(OozieLauncherInputFormat.class);
-        classes.add(LauncherMainHadoopUtils.class);
-        classes.add(HadoopShims.class);
+        classes.add(LauncherMain.class);
         
classes.addAll(Services.get().get(URIHandlerService.class).getClassesForLauncher());
         return classes;
     }
@@ -222,8 +216,6 @@ public class JavaActionExecutor extends ActionExecutor {
             conf = new JobConf(false);
         }
         conf.set(HADOOP_USER, 
context.getProtoActionConf().get(WorkflowAppService.HADOOP_USER));
-        conf.set(HADOOP_JOB_TRACKER, jobTracker);
-        conf.set(HADOOP_JOB_TRACKER_2, jobTracker);
         conf.set(HADOOP_YARN_RM, jobTracker);
         conf.set(HADOOP_NAME_NODE, nameNode);
         conf.set("mapreduce.fileoutputcommitter.marksuccessfuljobs", "true");
@@ -255,7 +247,7 @@ public class JavaActionExecutor extends ActionExecutor {
             XConfiguration launcherConf = new XConfiguration();
             // Inject action defaults for launcher
             HadoopAccessorService has = 
Services.get().get(HadoopAccessorService.class);
-            XConfiguration actionDefaultConf = 
has.createActionDefaultConf(conf.get(HADOOP_JOB_TRACKER), getType());
+            XConfiguration actionDefaultConf = 
has.createActionDefaultConf(conf.get(HADOOP_YARN_RM), getType());
             injectLauncherProperties(actionDefaultConf, launcherConf);
             // Inject <job-xml> and <configuration> for launcher
             try {
@@ -524,7 +516,7 @@ public class JavaActionExecutor extends ActionExecutor {
             throws ActionExecutorException {
         try {
             HadoopAccessorService has = 
Services.get().get(HadoopAccessorService.class);
-            XConfiguration actionDefaults = 
has.createActionDefaultConf(actionConf.get(HADOOP_JOB_TRACKER), getType());
+            XConfiguration actionDefaults = 
has.createActionDefaultConf(actionConf.get(HADOOP_YARN_RM), getType());
             XConfiguration.injectDefaults(actionDefaults, actionConf);
             has.checkSupportedFilesystem(appPath.toUri());
 
@@ -1143,7 +1135,7 @@ public class JavaActionExecutor extends ActionExecutor {
             if (alreadyRunning && !isUserRetry) {
                 runningJob = jobClient.getJob(JobID.forName(launcherId));
                 if (runningJob == null) {
-                    String jobTracker = 
launcherJobConf.get(HADOOP_JOB_TRACKER);
+                    String jobTracker = launcherJobConf.get(HADOOP_YARN_RM);
                     throw new 
ActionExecutorException(ActionExecutorException.ErrorType.ERROR, "JA017",
                             "unknown job [{0}@{1}], cannot recover", 
launcherId, jobTracker);
                 }
@@ -1190,7 +1182,7 @@ public class JavaActionExecutor extends ActionExecutor {
                 LOG.debug("After submission get the launcherId " + launcherId);
             }
 
-            String jobTracker = launcherJobConf.get(HADOOP_JOB_TRACKER);
+            String jobTracker = launcherJobConf.get(HADOOP_YARN_RM);
             String consoleUrl = runningJob.getTrackingURL();
             context.setStartData(launcherId, jobTracker, consoleUrl);
         }

http://git-wip-us.apache.org/repos/asf/oozie/blob/a37835fe/core/src/main/java/org/apache/oozie/action/hadoop/LauncherMapperHelper.java
----------------------------------------------------------------------
diff --git 
a/core/src/main/java/org/apache/oozie/action/hadoop/LauncherMapperHelper.java 
b/core/src/main/java/org/apache/oozie/action/hadoop/LauncherMapperHelper.java
index ef6b99d..69e1044 100644
--- 
a/core/src/main/java/org/apache/oozie/action/hadoop/LauncherMapperHelper.java
+++ 
b/core/src/main/java/org/apache/oozie/action/hadoop/LauncherMapperHelper.java
@@ -44,7 +44,6 @@ import org.apache.hadoop.mapred.RunningJob;
 import org.apache.hadoop.mapred.Counters;
 import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.oozie.client.OozieClient;
-import org.apache.oozie.client.WorkflowAction;
 import org.apache.oozie.service.HadoopAccessorException;
 import org.apache.oozie.service.HadoopAccessorService;
 import org.apache.oozie.service.Services;
@@ -165,12 +164,12 @@ public class LauncherMapperHelper {
     public static void setupYarnRestartHandling(JobConf launcherJobConf, 
Configuration actionConf, String launcherTag,
                                                 long launcherTime)
             throws NoSuchAlgorithmException {
-        launcherJobConf.setLong(LauncherMainHadoopUtils.OOZIE_JOB_LAUNCH_TIME, 
launcherTime);
+        launcherJobConf.setLong(LauncherMain.OOZIE_JOB_LAUNCH_TIME, 
launcherTime);
         // Tags are limited to 100 chars so we need to hash them to make sure 
(the actionId otherwise doesn't have a max length)
         String tag = getTag(launcherTag);
         // keeping the oozie.child.mapreduce.job.tags instead of 
mapreduce.job.tags to avoid killing launcher itself.
         // mapreduce.job.tags should only go to child job launch by launcher.
-        actionConf.set(LauncherMainHadoopUtils.CHILD_MAPREDUCE_JOB_TAGS, tag);
+        actionConf.set(LauncherMain.CHILD_MAPREDUCE_JOB_TAGS, tag);
     }
 
     private static String getTag(String launcherTag) throws 
NoSuchAlgorithmException {

http://git-wip-us.apache.org/repos/asf/oozie/blob/a37835fe/core/src/main/java/org/apache/oozie/action/hadoop/SparkActionExecutor.java
----------------------------------------------------------------------
diff --git 
a/core/src/main/java/org/apache/oozie/action/hadoop/SparkActionExecutor.java 
b/core/src/main/java/org/apache/oozie/action/hadoop/SparkActionExecutor.java
index 97355fd..252f461 100644
--- a/core/src/main/java/org/apache/oozie/action/hadoop/SparkActionExecutor.java
+++ b/core/src/main/java/org/apache/oozie/action/hadoop/SparkActionExecutor.java
@@ -40,8 +40,7 @@ import org.jdom.Namespace;
 
 public class SparkActionExecutor extends JavaActionExecutor {
     public static final String SPARK_MAIN_CLASS_NAME = 
"org.apache.oozie.action.hadoop.SparkMain";
-    public static final String TASK_USER_PRECEDENCE = 
"mapreduce.task.classpath.user.precedence"; // hadoop-2
-    public static final String TASK_USER_CLASSPATH_PRECEDENCE = 
"mapreduce.user.classpath.first";  // hadoop-1
+    public static final String TASK_USER_PRECEDENCE = 
"mapreduce.task.classpath.user.precedence";
     public static final String SPARK_MASTER = "oozie.spark.master";
     public static final String SPARK_MODE = "oozie.spark.mode";
     public static final String SPARK_OPTS = "oozie.spark.spark-opts";
@@ -81,7 +80,7 @@ public class SparkActionExecutor extends JavaActionExecutor {
 
         StringBuilder sparkOptsSb = new StringBuilder();
         if (master.startsWith("yarn")) {
-            String resourceManager = actionConf.get(HADOOP_JOB_TRACKER);
+            String resourceManager = actionConf.get(HADOOP_YARN_RM);
             Map<String, String> sparkConfig = 
Services.get().get(SparkConfigurationService.class).getSparkConfig(resourceManager);
             for (Map.Entry<String, String> entry : sparkConfig.entrySet()) {
                 sparkOptsSb.append("--conf 
").append(entry.getKey()).append("=").append(entry.getValue()).append(" ");
@@ -106,9 +105,6 @@ public class SparkActionExecutor extends JavaActionExecutor 
{
         if (launcherJobConf.get("oozie.launcher." + TASK_USER_PRECEDENCE) == 
null) {
             launcherJobConf.set(TASK_USER_PRECEDENCE, "true");
         }
-        if (launcherJobConf.get("oozie.launcher." + 
TASK_USER_CLASSPATH_PRECEDENCE) == null) {
-            launcherJobConf.set(TASK_USER_CLASSPATH_PRECEDENCE, "true");
-        }
         return launcherJobConf;
     }
 

http://git-wip-us.apache.org/repos/asf/oozie/blob/a37835fe/core/src/main/java/org/apache/oozie/command/wf/SubmitHttpXCommand.java
----------------------------------------------------------------------
diff --git 
a/core/src/main/java/org/apache/oozie/command/wf/SubmitHttpXCommand.java 
b/core/src/main/java/org/apache/oozie/command/wf/SubmitHttpXCommand.java
index d2a2742..331bcf8 100644
--- a/core/src/main/java/org/apache/oozie/command/wf/SubmitHttpXCommand.java
+++ b/core/src/main/java/org/apache/oozie/command/wf/SubmitHttpXCommand.java
@@ -58,7 +58,7 @@ public abstract class SubmitHttpXCommand extends 
WorkflowXCommand<String> {
     protected static final Set<String> OPTIONAL_OOZIE_CONFS = new 
HashSet<String>();
 
     static {
-        MANDATORY_OOZIE_CONFS.add(XOozieClient.JT);
+        MANDATORY_OOZIE_CONFS.add(XOozieClient.RM);
         MANDATORY_OOZIE_CONFS.add(XOozieClient.NN);
         MANDATORY_OOZIE_CONFS.add(OozieClient.LIBPATH);
 

http://git-wip-us.apache.org/repos/asf/oozie/blob/a37835fe/core/src/main/java/org/apache/oozie/command/wf/SubmitMRXCommand.java
----------------------------------------------------------------------
diff --git 
a/core/src/main/java/org/apache/oozie/command/wf/SubmitMRXCommand.java 
b/core/src/main/java/org/apache/oozie/command/wf/SubmitMRXCommand.java
index 9124a45..64fa89a 100644
--- a/core/src/main/java/org/apache/oozie/command/wf/SubmitMRXCommand.java
+++ b/core/src/main/java/org/apache/oozie/command/wf/SubmitMRXCommand.java
@@ -41,14 +41,15 @@ public class SubmitMRXCommand extends SubmitHttpXCommand {
 
     static {
         SKIPPED_CONFS.add(WorkflowAppService.HADOOP_USER);
-        SKIPPED_CONFS.add(XOozieClient.JT);
+        SKIPPED_CONFS.add(XOozieClient.RM);
         SKIPPED_CONFS.add(XOozieClient.NN);
         // a brillant mind made a change in Configuration that 
'fs.default.name' key gets converted to 'fs.defaultFS'
         // in Hadoop 0.23, we need skip that one too, keeping the old one 
because of Hadoop 1
         SKIPPED_CONFS.add(XOozieClient.NN_2);
 
         DEPRECATE_MAP.put(XOozieClient.NN, XOozieClient.NN_2);
-        DEPRECATE_MAP.put(XOozieClient.JT, XOozieClient.JT_2);
+        DEPRECATE_MAP.put(XOozieClient.RM, "yarn.resourcemanager.address");
+        DEPRECATE_MAP.put(XOozieClient.NN, "fs.defaultFS");
         DEPRECATE_MAP.put(WorkflowAppService.HADOOP_USER, 
"mapreduce.job.user.name");
     }
 
@@ -96,8 +97,8 @@ public class SubmitMRXCommand extends SubmitHttpXCommand {
     protected Element generateSection(Configuration conf, Namespace ns) {
         Element mapreduce = new Element("map-reduce", ns);
         Element jt = new Element("job-tracker", ns);
-        String newJTVal = conf.get(DEPRECATE_MAP.get(XOozieClient.JT));
-        jt.addContent(newJTVal != null ? newJTVal : 
(conf.get(XOozieClient.JT)));
+        String newJTVal = conf.get(DEPRECATE_MAP.get(XOozieClient.RM));
+        jt.addContent(newJTVal != null ? newJTVal : 
(conf.get(XOozieClient.RM)));
         mapreduce.addContent(jt);
         Element nn = new Element("name-node", ns);
         String newNNVal = conf.get(DEPRECATE_MAP.get(XOozieClient.NN));

http://git-wip-us.apache.org/repos/asf/oozie/blob/a37835fe/core/src/main/java/org/apache/oozie/command/wf/SubmitScriptLanguageXCommand.java
----------------------------------------------------------------------
diff --git 
a/core/src/main/java/org/apache/oozie/command/wf/SubmitScriptLanguageXCommand.java
 
b/core/src/main/java/org/apache/oozie/command/wf/SubmitScriptLanguageXCommand.java
index 9d41305..b82369c 100644
--- 
a/core/src/main/java/org/apache/oozie/command/wf/SubmitScriptLanguageXCommand.java
+++ 
b/core/src/main/java/org/apache/oozie/command/wf/SubmitScriptLanguageXCommand.java
@@ -50,7 +50,7 @@ public abstract class SubmitScriptLanguageXCommand extends 
SubmitHttpXCommand {
         String name = getWorkflowName();
         Element ele = new Element(name, ns);
         Element jt = new Element("job-tracker", ns);
-        jt.addContent(conf.get(XOozieClient.JT));
+        jt.addContent(conf.get(XOozieClient.RM));
         ele.addContent(jt);
         Element nn = new Element("name-node", ns);
         nn.addContent(conf.get(XOozieClient.NN));

http://git-wip-us.apache.org/repos/asf/oozie/blob/a37835fe/core/src/main/java/org/apache/oozie/command/wf/SubmitSqoopXCommand.java
----------------------------------------------------------------------
diff --git 
a/core/src/main/java/org/apache/oozie/command/wf/SubmitSqoopXCommand.java 
b/core/src/main/java/org/apache/oozie/command/wf/SubmitSqoopXCommand.java
index 51b739e..51f3d03 100644
--- a/core/src/main/java/org/apache/oozie/command/wf/SubmitSqoopXCommand.java
+++ b/core/src/main/java/org/apache/oozie/command/wf/SubmitSqoopXCommand.java
@@ -50,7 +50,7 @@ public class SubmitSqoopXCommand extends SubmitHttpXCommand {
         String name = "sqoop";
         Element ele = new Element(name, ns);
         Element jt = new Element("job-tracker", ns);
-        jt.addContent(conf.get(XOozieClient.JT));
+        jt.addContent(conf.get(XOozieClient.RM));
         ele.addContent(jt);
         Element nn = new Element("name-node", ns);
         nn.addContent(conf.get(XOozieClient.NN));

http://git-wip-us.apache.org/repos/asf/oozie/blob/a37835fe/core/src/main/java/org/apache/oozie/service/HadoopAccessorService.java
----------------------------------------------------------------------
diff --git 
a/core/src/main/java/org/apache/oozie/service/HadoopAccessorService.java 
b/core/src/main/java/org/apache/oozie/service/HadoopAccessorService.java
index f171ea3..794e825 100644
--- a/core/src/main/java/org/apache/oozie/service/HadoopAccessorService.java
+++ b/core/src/main/java/org/apache/oozie/service/HadoopAccessorService.java
@@ -82,8 +82,6 @@ public class HadoopAccessorService implements Service {
     protected static final String JT_PRINCIPAL = 
"mapreduce.jobtracker.kerberos.principal";
     /** The Kerberos principal for the resource manager.*/
     protected static final String RM_PRINCIPAL = 
"yarn.resourcemanager.principal";
-    protected static final String HADOOP_JOB_TRACKER = "mapred.job.tracker";
-    protected static final String HADOOP_JOB_TRACKER_2 = 
"mapreduce.jobtracker.address";
     protected static final String HADOOP_YARN_RM = 
"yarn.resourcemanager.address";
     private static final Map<String, Text> mrTokenRenewers = new 
HashMap<String, Text>();
 
@@ -491,7 +489,7 @@ public class HadoopAccessorService implements Service {
         if (!conf.getBoolean(OOZIE_HADOOP_ACCESSOR_SERVICE_CREATED, false)) {
             throw new HadoopAccessorException(ErrorCode.E0903);
         }
-        String jobTracker = conf.get(JavaActionExecutor.HADOOP_JOB_TRACKER);
+        String jobTracker = conf.get(JavaActionExecutor.HADOOP_YARN_RM);
         validateJobTracker(jobTracker);
         try {
             UserGroupInformation ugi = getUGI(user);
@@ -606,10 +604,7 @@ public class HadoopAccessorService implements Service {
             renewer = mrTokenRenewers.get(servicePrincipal);
             if (renewer == null) {
                 // Mimic org.apache.hadoop.mapred.Master.getMasterPrincipal()
-                String target = jobConf.get(HADOOP_YARN_RM, 
jobConf.get(HADOOP_JOB_TRACKER_2));
-                if (target == null) {
-                    target = jobConf.get(HADOOP_JOB_TRACKER);
-                }
+                String target = jobConf.get(HADOOP_YARN_RM);
                 try {
                     String addr = 
NetUtils.createSocketAddr(target).getHostName();
                     renewer = new 
Text(SecurityUtil.getServerPrincipal(servicePrincipal, addr));

http://git-wip-us.apache.org/repos/asf/oozie/blob/a37835fe/core/src/main/java/org/apache/oozie/service/ShareLibService.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/oozie/service/ShareLibService.java 
b/core/src/main/java/org/apache/oozie/service/ShareLibService.java
index 66fd864..fa230da 100644
--- a/core/src/main/java/org/apache/oozie/service/ShareLibService.java
+++ b/core/src/main/java/org/apache/oozie/service/ShareLibService.java
@@ -51,14 +51,13 @@ import org.apache.hadoop.fs.permission.FsPermission;
 import org.apache.oozie.action.ActionExecutor;
 import org.apache.oozie.action.hadoop.JavaActionExecutor;
 import org.apache.oozie.client.rest.JsonUtils;
-import org.apache.oozie.hadoop.utils.HadoopShims;
+import com.google.common.annotations.VisibleForTesting;
+import org.apache.oozie.ErrorCode;
 import org.apache.oozie.util.Instrumentable;
 import org.apache.oozie.util.Instrumentation;
+import org.apache.oozie.util.FSUtils;
 import org.apache.oozie.util.XConfiguration;
 import org.apache.oozie.util.XLog;
-import com.google.common.annotations.VisibleForTesting;
-
-import org.apache.oozie.ErrorCode;
 import org.jdom.JDOMException;
 
 public class ShareLibService implements Service, Instrumentable {
@@ -217,7 +216,7 @@ public class ShareLibService implements Service, 
Instrumentable {
      *
      * @param fs the FileSystem
      * @param path the Path
-     * @param perm is permission
+     * @param fsPerm is permission
      * @throws IOException Signals that an I/O exception has occurred.
      */
     private void recursiveChangePermissions(FileSystem fs, Path path, 
FsPermission fsPerm) throws IOException {
@@ -352,14 +351,12 @@ public class ShareLibService implements Service, 
Instrumentable {
     }
 
     private void checkSymlink(String shareLibKey) throws IOException {
-        if (!HadoopShims.isSymlinkSupported() || 
symlinkMapping.get(shareLibKey) == null
-                || symlinkMapping.get(shareLibKey).isEmpty()) {
+        if (symlinkMapping.get(shareLibKey) == null || 
symlinkMapping.get(shareLibKey).isEmpty()) {
             return;
         }
 
-        HadoopShims fileSystem = new HadoopShims(fs);
         for (Path path : symlinkMapping.get(shareLibKey).keySet()) {
-            if 
(!symlinkMapping.get(shareLibKey).get(path).equals(fileSystem.getSymLinkTarget(path)))
 {
+            if 
(!symlinkMapping.get(shareLibKey).get(path).equals(FSUtils.getSymLinkTarget(fs, 
path))) {
                 synchronized (ShareLibService.class) {
                     Map<String, List<Path>> tmpShareLibMap = new 
HashMap<String, List<Path>>(shareLibMap);
 
@@ -370,7 +367,7 @@ public class ShareLibService implements Service, 
Instrumentable {
                             symlinkMapping);
 
                     LOG.info(MessageFormat.format("Symlink target for [{0}] 
has changed, was [{1}], now [{2}]",
-                            shareLibKey, path, 
fileSystem.getSymLinkTarget(path)));
+                            shareLibKey, path, FSUtils.getSymLinkTarget(fs, 
path)));
                     loadShareLibMetaFile(tmpShareLibMap, tmpSymlinkMapping, 
tmpShareLibConfigMap, sharelibMappingFile,
                             shareLibKey);
                     shareLibMap = tmpShareLibMap;
@@ -636,19 +633,18 @@ public class ShareLibService implements Service, 
Instrumentable {
             throws IOException {
         List<Path> listOfPaths = new ArrayList<Path>();
         Map<Path, Path> symlinkMappingforAction = new HashMap<Path, Path>();
-        HadoopShims fileSystem = new HadoopShims(fs);
 
         for (String dfsPath : pathList) {
             Path path = new Path(dfsPath);
             getPathRecursively(fs, new Path(dfsPath), listOfPaths, 
shareLibKey, shareLibConfigMap);
-            if (HadoopShims.isSymlinkSupported() && 
fileSystem.isSymlink(path)) {
-                symlinkMappingforAction.put(path, 
fileSystem.getSymLinkTarget(path));
+            if (FSUtils.isSymlink(fs, path)) {
+                symlinkMappingforAction.put(path, FSUtils.getSymLinkTarget(fs, 
path));
             }
         }
-        if (HadoopShims.isSymlinkSupported()) {
-            LOG.info("symlink for " + shareLibKey + ":" + 
symlinkMappingforAction);
-            tmpSymlinkMapping.put(shareLibKey, symlinkMappingforAction);
-        }
+
+        LOG.info("symlink for " + shareLibKey + ":" + symlinkMappingforAction);
+        tmpSymlinkMapping.put(shareLibKey, symlinkMappingforAction);
+
         tmpShareLibMap.put(shareLibKey, listOfPaths);
         LOG.info("Share lib for " + shareLibKey + ":" + listOfPaths);
     }

http://git-wip-us.apache.org/repos/asf/oozie/blob/a37835fe/core/src/main/java/org/apache/oozie/util/FSUtils.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/oozie/util/FSUtils.java 
b/core/src/main/java/org/apache/oozie/util/FSUtils.java
new file mode 100644
index 0000000..6d73fc7
--- /dev/null
+++ b/core/src/main/java/org/apache/oozie/util/FSUtils.java
@@ -0,0 +1,53 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.oozie.util;
+
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+
+import java.io.IOException;
+import java.net.URI;
+
+public final class FSUtils {
+    public static Path getSymLinkTarget(FileSystem fs, Path p) throws 
IOException {
+        try {
+            //getSymlink doesn't work with fragment name, need to remove 
fragment before calling getSymlink
+            Path tempPath = new URI(p.toString()).getFragment() == null ? p : 
new Path(new URI(p.toString()).getPath());
+            return fs.getFileLinkStatus(tempPath).getSymlink();
+        }
+        catch (java.net.URISyntaxException e) {
+            throw new IOException(e);
+        }
+    }
+
+    public static boolean isSymlink(FileSystem fs, Path p) throws IOException {
+        try {
+            //isSymlink doesn't work with fragment name, need to remove 
fragment before checking for symlink
+            Path tempPath = new URI(p.toString()).getFragment() == null ? p : 
new Path(new URI(p.toString()).getPath());
+            return fs.getFileLinkStatus(tempPath).isSymlink();
+        }
+        catch (java.net.URISyntaxException e) {
+            throw new IOException(e);
+        }
+    }
+
+    public static void createSymlink(FileSystem fs, Path target, Path link, 
boolean createParent) throws IOException {
+        fs.createSymlink(target, link, createParent);
+    }
+}

http://git-wip-us.apache.org/repos/asf/oozie/blob/a37835fe/core/src/main/java/org/apache/oozie/util/JobUtils.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/oozie/util/JobUtils.java 
b/core/src/main/java/org/apache/oozie/util/JobUtils.java
index a3a5fc0..cb53bc7 100644
--- a/core/src/main/java/org/apache/oozie/util/JobUtils.java
+++ b/core/src/main/java/org/apache/oozie/util/JobUtils.java
@@ -138,8 +138,7 @@ public class JobUtils {
     }
 
     /**
-     * This method provides a wrapper around hadoop 0.20/1.x and 0.23/2.x 
implementations.
-     * TODO: Remove the workaround when we drop the support for hadoop 0.20.
+     * This method provides a wrapper around hadoop 2.x implementations.
      * @param file Path of the file to be added
      * @param conf Configuration that contains the classpath setting
      * @param fs FileSystem with respect to which path should be interpreted 
(may be null)
@@ -148,24 +147,12 @@ public class JobUtils {
     public static void addFileToClassPath(Path file, Configuration conf, 
FileSystem fs) throws IOException {
         if (fs == null) {
             Configuration defaultConf = 
Services.get().get(HadoopAccessorService.class)
-                    
.createJobConf(conf.get(JavaActionExecutor.HADOOP_JOB_TRACKER));
+                    
.createJobConf(conf.get(JavaActionExecutor.HADOOP_YARN_RM));
             XConfiguration.copy(conf, defaultConf);
             // it fails with conf, therefore we pass defaultConf instead
             fs = file.getFileSystem(defaultConf);
         }
-        // Hadoop 0.20/1.x.
-        if 
(Services.get().get(HadoopAccessorService.class).getCachedConf().get("yarn.resourcemanager.webapp.address")
 == null) {
-            // Duplicate hadoop 1.x code to workaround MAPREDUCE-2361 in 
Hadoop 0.20
-            // Refer OOZIE-1806.
-            String filepath = file.toUri().getPath();
-            String classpath = conf.get("mapred.job.classpath.files");
-            conf.set("mapred.job.classpath.files",
-                    classpath == null ? filepath : classpath + 
System.getProperty("path.separator") + filepath);
-            URI uri = fs.makeQualified(file).toUri();
-            DistributedCache.addCacheFile(uri, conf);
-        }
-        else { // Hadoop 0.23/2.x
-            DistributedCache.addFileToClassPath(file, conf, fs);
-        }
+
+        DistributedCache.addFileToClassPath(file, conf, fs);
     }
 }

http://git-wip-us.apache.org/repos/asf/oozie/blob/a37835fe/core/src/test/java/org/apache/oozie/action/hadoop/TestFsActionExecutor.java
----------------------------------------------------------------------
diff --git 
a/core/src/test/java/org/apache/oozie/action/hadoop/TestFsActionExecutor.java 
b/core/src/test/java/org/apache/oozie/action/hadoop/TestFsActionExecutor.java
index 5345ae6..7f01680 100644
--- 
a/core/src/test/java/org/apache/oozie/action/hadoop/TestFsActionExecutor.java
+++ 
b/core/src/test/java/org/apache/oozie/action/hadoop/TestFsActionExecutor.java
@@ -538,9 +538,7 @@ public class TestFsActionExecutor extends 
ActionExecutorTestCase {
         assertEquals("rwx------", fs.getFileStatus(new Path(basePath + 
"/10/dir1")).getPermission().toString());
         assertEquals("rwx------", fs.getFileStatus(new Path(basePath + 
"/10/dir2")).getPermission().toString());
         assertEquals("rwx------", fs.getFileStatus(new Path(basePath + 
"/11/dir3")).getPermission().toString());
-        // HDFS-4659 introduced an incompatible change that causes the 
following to be "rwx------" when run against Hadoop 2.1.x
-        // but in Hadoop 1.x its still "rw-------" so we'll just skip 
verifying this for now.
-        //assertEquals("rw-------", fs.getFileStatus(new Path(basePath + 
"/10/dir1/file1")).getPermission().toString());
+        assertEquals("rwx------", fs.getFileStatus(new Path(basePath + 
"/10/dir1/file1")).getPermission().toString());
 
         fs.delete(basePath, true);
     }

http://git-wip-us.apache.org/repos/asf/oozie/blob/a37835fe/core/src/test/java/org/apache/oozie/action/hadoop/TestJavaActionExecutor.java
----------------------------------------------------------------------
diff --git 
a/core/src/test/java/org/apache/oozie/action/hadoop/TestJavaActionExecutor.java 
b/core/src/test/java/org/apache/oozie/action/hadoop/TestJavaActionExecutor.java
index 057c9fb..879bfeb 100644
--- 
a/core/src/test/java/org/apache/oozie/action/hadoop/TestJavaActionExecutor.java
+++ 
b/core/src/test/java/org/apache/oozie/action/hadoop/TestJavaActionExecutor.java
@@ -54,7 +54,6 @@ import org.apache.oozie.action.ActionExecutorException;
 import org.apache.oozie.client.OozieClient;
 import org.apache.oozie.client.WorkflowAction;
 import org.apache.oozie.client.WorkflowJob;
-import org.apache.oozie.hadoop.utils.HadoopShims;
 import org.apache.oozie.service.ConfigurationService;
 import org.apache.oozie.service.HadoopAccessorService;
 import org.apache.oozie.service.LiteWorkflowStoreService;
@@ -63,6 +62,7 @@ import org.apache.oozie.service.ShareLibService;
 import org.apache.oozie.service.UUIDService;
 import org.apache.oozie.service.WorkflowAppService;
 import org.apache.oozie.service.WorkflowStoreService;
+import org.apache.oozie.util.FSUtils;
 import org.apache.oozie.util.IOUtils;
 import org.apache.oozie.util.XConfiguration;
 import org.apache.oozie.util.XmlUtils;
@@ -113,7 +113,7 @@ public class TestJavaActionExecutor extends 
ActionExecutorTestCase {
         }
 
         conf = new XConfiguration();
-        conf.set("mapred.job.tracker", "a");
+        conf.set("yarn.resourcemanager.address", "a");
         try {
             JavaActionExecutor.checkForDisallowedProps(conf, "x");
             fail();
@@ -200,7 +200,7 @@ public class TestJavaActionExecutor extends 
ActionExecutorTestCase {
 
         conf = ae.createBaseHadoopConf(context, actionXml);
         assertEquals(protoConf.get(WorkflowAppService.HADOOP_USER), 
conf.get(WorkflowAppService.HADOOP_USER));
-        assertEquals(getJobTrackerUri(), conf.get("mapred.job.tracker"));
+        assertEquals(getJobTrackerUri(), 
conf.get("yarn.resourcemanager.address"));
         assertEquals(getNameNodeUri(), conf.get("fs.default.name"));
 
         conf = ae.createBaseHadoopConf(context, actionXml);
@@ -350,7 +350,7 @@ public class TestJavaActionExecutor extends 
ActionExecutorTestCase {
         assertNotNull(consoleUrl);
 
         JobConf jobConf = 
Services.get().get(HadoopAccessorService.class).createJobConf(jobTracker);
-        jobConf.set("mapred.job.tracker", jobTracker);
+        jobConf.set("yarn.resourcemanager.address", jobTracker);
 
         JobClient jobClient =
             
Services.get().get(HadoopAccessorService.class).createJobClient(getTestUser(), 
jobConf);
@@ -1991,12 +1991,7 @@ public class TestJavaActionExecutor extends 
ActionExecutorTestCase {
         assertEquals("-Xmx200m", 
jobConf.get(JavaActionExecutor.HADOOP_CHILD_JAVA_OPTS));
         assertEquals("-Xmx1024m -Djava.io.tmpdir=./usr", 
jobConf.get(JavaActionExecutor.HADOOP_MAP_JAVA_OPTS));
         assertEquals("-Xmx2560m -XX:NewRatio=8", 
jobConf.get(JavaActionExecutor.HADOOP_REDUCE_JAVA_OPTS));
-        // There's an extra parameter (-Xmx1024m) in here when using YARN 
that's not here when using MR1
-        if (HadoopShims.isYARN()) {
-            assertEquals("-Xmx1024m -Djava.io.tmpdir=./tmp", 
jobConf.get(JavaActionExecutor.YARN_AM_COMMAND_OPTS));
-        } else {
-            assertNull(jobConf.get(JavaActionExecutor.YARN_AM_COMMAND_OPTS));
-        }
+        assertEquals("-Xmx1024m -Djava.io.tmpdir=./tmp", 
jobConf.get(JavaActionExecutor.YARN_AM_COMMAND_OPTS));
     }
     public void testUpdateConfForUberMode() throws Exception {
         Element actionXml1 = XmlUtils
@@ -2032,15 +2027,10 @@ public class TestJavaActionExecutor extends 
ActionExecutorTestCase {
                 launcherConf.get("mapred.child.java.opts"));
         assertEquals("-Xmx2048m -Djava.net.preferIPv4Stack=true",
                 launcherConf.get("mapreduce.map.java.opts"));
-        // There's an extra parameter (-Xmx1024m) in here when using YARN 
that's not here when using MR1
-        if (HadoopShims.isYARN()) {
-            assertEquals("-Xmx1024m -Xmx2048m -Djava.net.preferIPv4Stack=true 
-Xmx2560m -Djava.io.tmpdir=./tmp",
-                    
launcherConf.get(JavaActionExecutor.YARN_AM_COMMAND_OPTS).trim());
-        }
-        else {
-            assertEquals("-Xmx2048m -Djava.net.preferIPv4Stack=true -Xmx2560m 
-Djava.io.tmpdir=./tmp",
-                    
launcherConf.get(JavaActionExecutor.YARN_AM_COMMAND_OPTS).trim());
-        }
+
+        assertEquals("-Xmx1024m -Xmx2048m -Djava.net.preferIPv4Stack=true 
-Xmx2560m -Djava.io.tmpdir=./tmp",
+                
launcherConf.get(JavaActionExecutor.YARN_AM_COMMAND_OPTS).trim());
+
         assertEquals(2560, heapSize);
 
         // env
@@ -2375,21 +2365,14 @@ public class TestJavaActionExecutor extends 
ActionExecutorTestCase {
 
         // Test when server side setting is not enabled
         JobConf launcherConf = ae.createLauncherConf(getFileSystem(), context, 
action, actionXml, actionConf);
-        if (HadoopShims.isYARN()) {
-            assertEquals("true", 
launcherConf.get(JavaActionExecutor.HADOOP_YARN_TIMELINE_SERVICE_ENABLED));
-        } else {
-            
assertNull(launcherConf.get(JavaActionExecutor.HADOOP_YARN_TIMELINE_SERVICE_ENABLED));
-        }
+        assertEquals("true", 
launcherConf.get(JavaActionExecutor.HADOOP_YARN_TIMELINE_SERVICE_ENABLED));
 
         ConfigurationService.set("oozie.action.launcher." + 
JavaActionExecutor.HADOOP_YARN_TIMELINE_SERVICE_ENABLED, "true");
 
         // Test when server side setting is enabled but tez-site.xml is not in 
DistributedCache
         launcherConf = ae.createLauncherConf(getFileSystem(), context, action, 
actionXml, actionConf);
-        if (HadoopShims.isYARN()) {
-            assertEquals("true", 
launcherConf.get(JavaActionExecutor.HADOOP_YARN_TIMELINE_SERVICE_ENABLED));
-        } else {
-            
assertNull(launcherConf.get(JavaActionExecutor.HADOOP_YARN_TIMELINE_SERVICE_ENABLED));
-        }
+        assertEquals("true", 
launcherConf.get(JavaActionExecutor.HADOOP_YARN_TIMELINE_SERVICE_ENABLED));
+
         final Path tezSite = new Path("/tmp/tez-site.xml");
         final FSDataOutputStream out = getFileSystem().create(tezSite);
         out.close();
@@ -2479,9 +2462,7 @@ public class TestJavaActionExecutor extends 
ActionExecutorTestCase {
         conf.set(WorkflowAppService.HADOOP_USER, getTestUser());
         ae.addToCache(conf, appPath, appJarFullPath.toString(), false);
         // assert that mapred.cache.files contains jar URI path (full on 
Hadoop-2)
-        Path jarPath = HadoopShims.isYARN() ?
-                new Path(appJarFullPath.toUri()):
-                new Path(appJarFullPath.toUri().getPath());
+        Path jarPath = new Path(appJarFullPath.toUri());
         
assertTrue(conf.get("mapred.cache.files").contains(jarPath.toString()));
         // assert that dist cache classpath contains jar URI path
         Path[] paths = DistributedCache.getFileClassPaths(conf);
@@ -2778,25 +2759,25 @@ public class TestJavaActionExecutor extends 
ActionExecutorTestCase {
 
         Configuration conf = new Configuration(false);
         Assert.assertEquals(0, conf.size());
-        conf.set("mapred.job.tracker", getJobTrackerUri());
+        conf.set("yarn.resourcemanager.address", getJobTrackerUri());
         ae.setupLauncherConf(conf, actionXmlWithConfiguration, null, null);
-        assertEquals(getJobTrackerUri(), conf.get("mapred.job.tracker"));
+        assertEquals(getJobTrackerUri(), 
conf.get("yarn.resourcemanager.address"));
         assertEquals("AA", conf.get("oozie.launcher.a"));
         assertEquals("AA", conf.get("a"));
         assertEquals("action.barbar", 
conf.get("oozie.launcher.action.foofoo"));
         assertEquals("action.barbar", conf.get("action.foofoo"));
         assertEquals("true", conf.get("mapreduce.job.ubertask.enable"));
         if (conf.size() == 7) {
-            assertEquals(getJobTrackerUri(), 
conf.get("mapreduce.jobtracker.address"));
+            assertEquals(getJobTrackerUri(), 
conf.get("yarn.resourcemanager.address"));
         } else {
             assertEquals(6, conf.size());
         }
 
         conf = new Configuration(false);
         Assert.assertEquals(0, conf.size());
-        conf.set("mapred.job.tracker", getJobTrackerUri());
+        conf.set("yarn.resourcemanager.address", getJobTrackerUri());
         ae.setupLauncherConf(conf, actionXmlWithoutConfiguration, null, null);
-        assertEquals(getJobTrackerUri(), conf.get("mapred.job.tracker"));
+        assertEquals(getJobTrackerUri(), 
conf.get("yarn.resourcemanager.address"));
         assertEquals("action.barbar", 
conf.get("oozie.launcher.action.foofoo"));
         assertEquals("action.barbar", conf.get("action.foofoo"));
         assertEquals("true", conf.get("mapreduce.job.ubertask.enable"));

http://git-wip-us.apache.org/repos/asf/oozie/blob/a37835fe/core/src/test/java/org/apache/oozie/client/TestOozieCLI.java
----------------------------------------------------------------------
diff --git a/core/src/test/java/org/apache/oozie/client/TestOozieCLI.java 
b/core/src/test/java/org/apache/oozie/client/TestOozieCLI.java
index 54bfc16..b939f58 100644
--- a/core/src/test/java/org/apache/oozie/client/TestOozieCLI.java
+++ b/core/src/test/java/org/apache/oozie/client/TestOozieCLI.java
@@ -31,7 +31,6 @@ import java.util.concurrent.Callable;
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.Path;
-import org.apache.oozie.BaseEngine;
 import org.apache.oozie.BuildInfo;
 import org.apache.oozie.cli.CLIParser;
 import org.apache.oozie.cli.OozieCLI;
@@ -136,7 +135,7 @@ public class TestOozieCLI extends DagServletTestCase {
         Properties props = new Properties();
         props.setProperty(OozieClient.USER_NAME, getTestUser());
         props.setProperty(XOozieClient.NN, "localhost:9000");
-        props.setProperty(XOozieClient.JT, "localhost:9001");
+        props.setProperty(XOozieClient.RM, "localhost:9001");
         props.setProperty("oozie.libpath", appPath);
         props.setProperty("mapred.output.dir", appPath);
         props.setProperty("a", "A");
@@ -155,7 +154,7 @@ public class TestOozieCLI extends DagServletTestCase {
         props.setProperty(OozieClient.APP_PATH, appPath);
         props.setProperty(OozieClient.RERUN_SKIP_NODES, "node");
         props.setProperty(XOozieClient.NN, "localhost:9000");
-        props.setProperty(XOozieClient.JT, "localhost:9001");
+        props.setProperty(XOozieClient.RM, "localhost:9001");
         if (useNewAPI) {
             props.setProperty("mapreduce.map.class", "mapper.class");
             props.setProperty("mapreduce.reduce.class", "reducer.class");

http://git-wip-us.apache.org/repos/asf/oozie/blob/a37835fe/core/src/test/java/org/apache/oozie/client/TestWorkflowXClient.java
----------------------------------------------------------------------
diff --git 
a/core/src/test/java/org/apache/oozie/client/TestWorkflowXClient.java 
b/core/src/test/java/org/apache/oozie/client/TestWorkflowXClient.java
index b06f81d..26d946f 100644
--- a/core/src/test/java/org/apache/oozie/client/TestWorkflowXClient.java
+++ b/core/src/test/java/org/apache/oozie/client/TestWorkflowXClient.java
@@ -29,8 +29,6 @@ import org.apache.oozie.servlet.MockDagEngineService;
 import org.apache.oozie.servlet.V1JobsServlet;
 import org.apache.oozie.servlet.V1AdminServlet;
 
-import java.io.File;
-
 public class TestWorkflowXClient extends DagServletTestCase {
 
     static {
@@ -60,7 +58,7 @@ public class TestWorkflowXClient extends DagServletTestCase {
                 Path libPath = new Path(getFsTestCaseDir(), "lib");
                 getFileSystem().mkdirs(libPath);
                 conf.setProperty(OozieClient.LIBPATH, libPath.toString());
-                conf.setProperty(XOozieClient.JT, "localhost:9001");
+                conf.setProperty(XOozieClient.RM, "localhost:9001");
                 conf.setProperty(XOozieClient.NN, "hdfs://localhost:9000");
                 String[] params = new String[]{"INPUT=input.txt"};
 
@@ -90,7 +88,7 @@ public class TestWorkflowXClient extends DagServletTestCase {
                 getFileSystem().mkdirs(libPath);
                 System.out.println(libPath.toString());
                 conf.setProperty(OozieClient.LIBPATH, libPath.toString());
-                conf.setProperty(XOozieClient.JT, "localhost:9001");
+                conf.setProperty(XOozieClient.RM, "localhost:9001");
                 conf.setProperty(XOozieClient.NN, "hdfs://localhost:9000");
                 String[] params = new String[]{"NAME=test"};
 
@@ -120,7 +118,7 @@ public class TestWorkflowXClient extends DagServletTestCase 
{
                 getFileSystem().mkdirs(libPath);
                 System.out.println(libPath.toString());
                 conf.setProperty(OozieClient.LIBPATH, libPath.toString());
-                conf.setProperty(XOozieClient.JT, "localhost:9001");
+                conf.setProperty(XOozieClient.RM, "localhost:9001");
                 conf.setProperty(XOozieClient.NN, "hdfs://localhost:9000");
 
                 assertEquals(MockDagEngineService.JOB_ID + wfCount + 
MockDagEngineService.JOB_ID_END,
@@ -156,7 +154,7 @@ public class TestWorkflowXClient extends DagServletTestCase 
{
                 catch (RuntimeException exception) {
                     assertEquals("java.lang.RuntimeException: jobtracker is 
not specified in conf", exception.toString());
                 }
-                conf.setProperty(XOozieClient.JT, "localhost:9001");
+                conf.setProperty(XOozieClient.RM, "localhost:9001");
                 try {
                     wc.submitMapReduce(conf);
                     fail("submit client without NN should throw exception");

http://git-wip-us.apache.org/repos/asf/oozie/blob/a37835fe/core/src/test/java/org/apache/oozie/command/wf/TestSubmitHiveXCommand.java
----------------------------------------------------------------------
diff --git 
a/core/src/test/java/org/apache/oozie/command/wf/TestSubmitHiveXCommand.java 
b/core/src/test/java/org/apache/oozie/command/wf/TestSubmitHiveXCommand.java
index 43edf5e..014daff 100644
--- a/core/src/test/java/org/apache/oozie/command/wf/TestSubmitHiveXCommand.java
+++ b/core/src/test/java/org/apache/oozie/command/wf/TestSubmitHiveXCommand.java
@@ -45,7 +45,7 @@ public class TestSubmitHiveXCommand extends XFsTestCase {
     public void testWFXmlGeneration() throws Exception {
         Configuration conf = new Configuration();
 
-        conf.set(XOozieClient.JT, "jobtracker");
+        conf.set(XOozieClient.RM, "jobtracker");
         conf.set(XOozieClient.NN, "namenode");
         conf.set(OozieClient.LIBPATH, "libpath");
 

http://git-wip-us.apache.org/repos/asf/oozie/blob/a37835fe/core/src/test/java/org/apache/oozie/command/wf/TestSubmitMRXCommand.java
----------------------------------------------------------------------
diff --git 
a/core/src/test/java/org/apache/oozie/command/wf/TestSubmitMRXCommand.java 
b/core/src/test/java/org/apache/oozie/command/wf/TestSubmitMRXCommand.java
index 5bc5747..388ff94 100644
--- a/core/src/test/java/org/apache/oozie/command/wf/TestSubmitMRXCommand.java
+++ b/core/src/test/java/org/apache/oozie/command/wf/TestSubmitMRXCommand.java
@@ -51,7 +51,7 @@ public class TestSubmitMRXCommand extends XFsTestCase {
     public void testWFXmlGeneration() throws Exception {
         Configuration conf = new Configuration(false);
 
-        conf.set(XOozieClient.JT, "jobtracker");
+        conf.set(XOozieClient.RM, "jobtracker");
         conf.set(XOozieClient.NN, "namenode");
         conf.set(OozieClient.LIBPATH, "libpath");
 
@@ -97,7 +97,7 @@ public class TestSubmitMRXCommand extends XFsTestCase {
     public void testWFXmlGenerationNegative1() throws Exception {
         Configuration conf = new Configuration();
 
-        conf.set(XOozieClient.JT, "jobtracker");
+        conf.set(XOozieClient.RM, "jobtracker");
         conf.set(XOozieClient.NN, "namenode");
         // conf.set(XOozieClient.LIBPATH, "libpath");
 
@@ -118,8 +118,8 @@ public class TestSubmitMRXCommand extends XFsTestCase {
     public void testWFXmlGenerationNewConfigProps() throws Exception {
         try {
             Configuration conf = new Configuration(false);
-            conf.set(XOozieClient.NN_2, "new_NN");
-            conf.set(XOozieClient.JT_2, "new_JT");
+            conf.set(XOozieClient.NN, "new_NN");
+            conf.set(XOozieClient.RM, "new_JT");
             conf.set("mapred.mapper.class", "TestMapper");
             conf.set("mapred.reducer.class", "TestReducer");
             conf.set("mapred.input.dir", "testInput");

http://git-wip-us.apache.org/repos/asf/oozie/blob/a37835fe/core/src/test/java/org/apache/oozie/command/wf/TestSubmitPigXCommand.java
----------------------------------------------------------------------
diff --git 
a/core/src/test/java/org/apache/oozie/command/wf/TestSubmitPigXCommand.java 
b/core/src/test/java/org/apache/oozie/command/wf/TestSubmitPigXCommand.java
index 5a1de25..e3ce56b 100644
--- a/core/src/test/java/org/apache/oozie/command/wf/TestSubmitPigXCommand.java
+++ b/core/src/test/java/org/apache/oozie/command/wf/TestSubmitPigXCommand.java
@@ -46,7 +46,7 @@ public class TestSubmitPigXCommand extends XFsTestCase {
     public void testWFXmlGeneration1() throws Exception {
         Configuration conf = new Configuration();
 
-        conf.set(XOozieClient.JT, "jobtracker");
+        conf.set(XOozieClient.RM, "jobtracker");
         conf.set(XOozieClient.NN, "namenode");
         conf.set(OozieClient.LIBPATH, "libpath");
 
@@ -118,7 +118,7 @@ public class TestSubmitPigXCommand extends XFsTestCase {
     public void testWFXmlGeneration2() throws Exception {
         Configuration conf = new Configuration();
 
-        conf.set(XOozieClient.JT, "jobtracker");
+        conf.set(XOozieClient.RM, "jobtracker");
         conf.set(XOozieClient.NN, "namenode");
         conf.set(OozieClient.LIBPATH, "libpath");
 
@@ -169,7 +169,7 @@ public class TestSubmitPigXCommand extends XFsTestCase {
     public void testWFXmlGenerationNegative1() throws Exception {
         Configuration conf = new Configuration();
 
-        conf.set(XOozieClient.JT, "jobtracker");
+        conf.set(XOozieClient.RM, "jobtracker");
         conf.set(XOozieClient.NN, "namenode");
         // conf.set(XOozieClient.LIBPATH, "libpath");
 

http://git-wip-us.apache.org/repos/asf/oozie/blob/a37835fe/core/src/test/java/org/apache/oozie/command/wf/TestSubmitSqoopXCommand.java
----------------------------------------------------------------------
diff --git 
a/core/src/test/java/org/apache/oozie/command/wf/TestSubmitSqoopXCommand.java 
b/core/src/test/java/org/apache/oozie/command/wf/TestSubmitSqoopXCommand.java
index 49b5028..05fef6a 100644
--- 
a/core/src/test/java/org/apache/oozie/command/wf/TestSubmitSqoopXCommand.java
+++ 
b/core/src/test/java/org/apache/oozie/command/wf/TestSubmitSqoopXCommand.java
@@ -46,7 +46,7 @@ public class TestSubmitSqoopXCommand extends XFsTestCase {
     public void testWFXmlGeneration() throws Exception {
         Configuration conf = new Configuration();
 
-        conf.set(XOozieClient.JT, "jobtracker");
+        conf.set(XOozieClient.RM, "jobtracker");
         conf.set(XOozieClient.NN, "namenode");
         conf.set(OozieClient.LIBPATH, "libpath");
 

http://git-wip-us.apache.org/repos/asf/oozie/blob/a37835fe/core/src/test/java/org/apache/oozie/service/TestHadoopAccessorService.java
----------------------------------------------------------------------
diff --git 
a/core/src/test/java/org/apache/oozie/service/TestHadoopAccessorService.java 
b/core/src/test/java/org/apache/oozie/service/TestHadoopAccessorService.java
index bbe2594..96faa48 100644
--- a/core/src/test/java/org/apache/oozie/service/TestHadoopAccessorService.java
+++ b/core/src/test/java/org/apache/oozie/service/TestHadoopAccessorService.java
@@ -182,7 +182,7 @@ public class TestHadoopAccessorService extends XTestCase {
         HadoopAccessorService has = 
Services.get().get(HadoopAccessorService.class);
         JobConf jobConf = new JobConf(false);
         assertEquals(new Text("oozie mr token"), 
has.getMRTokenRenewerInternal(jobConf));
-        jobConf.set("mapred.job.tracker", "localhost:50300");
+        jobConf.set("yarn.resourcemanager.address", "localhost:50300");
         jobConf.set("mapreduce.jobtracker.kerberos.principal", 
"mapred/_h...@kdc.domain.com");
         assertEquals(new Text("mapred/localh...@kdc.domain.com"), 
has.getMRTokenRenewerInternal(jobConf));
         jobConf = new JobConf(false);

http://git-wip-us.apache.org/repos/asf/oozie/blob/a37835fe/core/src/test/java/org/apache/oozie/service/TestShareLibService.java
----------------------------------------------------------------------
diff --git 
a/core/src/test/java/org/apache/oozie/service/TestShareLibService.java 
b/core/src/test/java/org/apache/oozie/service/TestShareLibService.java
index 7de7d78..6f25452 100644
--- a/core/src/test/java/org/apache/oozie/service/TestShareLibService.java
+++ b/core/src/test/java/org/apache/oozie/service/TestShareLibService.java
@@ -43,8 +43,8 @@ import org.apache.oozie.action.hadoop.JavaActionExecutor;
 import org.apache.oozie.action.hadoop.PigActionExecutor;
 import org.apache.oozie.action.hadoop.TestJavaActionExecutor;
 import org.apache.oozie.client.OozieClient;
-import org.apache.oozie.hadoop.utils.HadoopShims;
 import org.apache.oozie.test.XFsTestCase;
+import org.apache.oozie.util.FSUtils;
 import org.apache.oozie.util.IOUtils;
 import org.apache.oozie.util.XConfiguration;
 import org.apache.oozie.util.XmlUtils;
@@ -495,11 +495,8 @@ public class TestShareLibService extends XFsTestCase {
             
assertTrue(shareLibService.getShareLibJars("something_new").get(0).getName().endsWith("somethingNew.jar"));
             
assertTrue(shareLibService.getShareLibJars("pig").get(0).getName().endsWith("pig.jar"));
             
assertTrue(shareLibService.getShareLibJars("directjar").get(0).getName().endsWith("direct.jar"));
-            // Skipping for hadoop - 1.x because symlink is not supported
-            if (HadoopShims.isSymlinkSupported()) {
-                assertTrue(
-                        
shareLibService.getShareLibJars("linkFile").get(0).getName().endsWith("targetOfLinkFile.xml"));
-            }
+            
assertTrue(shareLibService.getShareLibJars("linkFile").get(0).getName().endsWith("targetOfLinkFile.xml"));
+
             List<Path> listOfPaths = 
shareLibService.getShareLibJars("directjar");
             for (Path p : listOfPaths) {
                 assertTrue(p.toString().startsWith("hdfs"));
@@ -615,11 +612,6 @@ public class TestShareLibService extends XFsTestCase {
 
     @Test
     public void testMetafileSymlink() throws ServiceException, IOException {
-        // Assume.assumeTrue("Skipping for hadoop - 
1.x",HadoopFileSystem.isSymlinkSupported());
-        if (!HadoopShims.isSymlinkSupported()) {
-            return;
-        }
-
         services = new Services();
         setSystemProps();
         Configuration conf = 
services.get(ConfigurationService.class).getConf();
@@ -651,15 +643,14 @@ public class TestShareLibService extends XFsTestCase {
 
             createFile(hive_site.toString());
 
-            HadoopShims fileSystem = new HadoopShims(fs);
-            fileSystem.createSymlink(basePath, symlink, true);
-            fileSystem.createSymlink(hive_site, symlink_hive_site, true);
+            FSUtils.createSymlink(fs, basePath, symlink, true);
+            FSUtils.createSymlink(fs, hive_site, symlink_hive_site, true);
 
             prop.put(ShareLibService.SHARE_LIB_CONF_PREFIX + ".pig", 
"/user/test/" + symlink.toString());
             prop.put(ShareLibService.SHARE_LIB_CONF_PREFIX + ".hive_conf", 
"/user/test/" + symlink_hive_site.toString()
                     + "#hive-site.xml");
             createTestShareLibMetaFile(fs, prop);
-            assertEquals(fileSystem.isSymlink(symlink), true);
+            assertEquals(FSUtils.isSymlink(fs, symlink), true);
 
             conf.set(ShareLibService.SHARELIB_MAPPING_FILE, fs.getUri() + 
"/user/test/config.properties");
             conf.set(ShareLibService.SHIP_LAUNCHER_JAR, "true");
@@ -667,9 +658,9 @@ public class TestShareLibService extends XFsTestCase {
                 ShareLibService shareLibService = 
Services.get().get(ShareLibService.class);
                 assertEquals(shareLibService.getShareLibJars("pig").size(), 2);
                 
assertEquals(shareLibService.getShareLibJars("hive_conf").size(), 1);
-                new HadoopShims(fs).createSymlink(basePath1, symlink, true);
-                new HadoopShims(fs).createSymlink(hive_site1, 
symlink_hive_site, true);
-                assertEquals(new 
HadoopShims(fs).getSymLinkTarget(shareLibService.getShareLibJars("hive_conf").get(0)),
+                FSUtils.createSymlink(fs, basePath1, symlink, true);
+                FSUtils.createSymlink(fs, hive_site1, symlink_hive_site, true);
+                assertEquals(FSUtils.getSymLinkTarget(fs, 
shareLibService.getShareLibJars("hive_conf").get(0)),
                         hive_site1);
                 assertEquals(shareLibService.getShareLibJars("pig").size(), 3);
             }
@@ -781,8 +772,7 @@ public class TestShareLibService extends XFsTestCase {
             String symlinkTarget = linkDir.toString() + Path.SEPARATOR + 
"targetOfLinkFile.xml";
             createFile(directJarPath);
             createFile(symlinkTarget);
-            HadoopShims fsShim = new HadoopShims(fs);
-            fsShim.createSymlink(new Path(symlinkTarget), new Path(symlink), 
true);
+            FSUtils.createSymlink(fs, new Path(symlinkTarget), new 
Path(symlink), true);
 
             prop.put(ShareLibService.SHARE_LIB_CONF_PREFIX + ".pig", 
"/user/test/" + basePath.toString());
             prop.put(ShareLibService.SHARE_LIB_CONF_PREFIX + ".something_new", 
"/user/test/" + somethingNew.toString());
@@ -991,16 +981,11 @@ public class TestShareLibService extends XFsTestCase {
     private void verifyFilesInDistributedCache(URI[] cacheFiles, String... 
files) {
 
         String cacheFilesStr = Arrays.toString(cacheFiles);
-        if (new HadoopShims(getFileSystem()).isYARN()) {
-            // Hadoop 2 has two extra jars
-            assertEquals(cacheFiles.length, files.length + 2);
-            assertTrue(cacheFilesStr.contains("MRAppJar.jar"));
-            
assertTrue(cacheFilesStr.contains("hadoop-mapreduce-client-jobclient-"));
+        // Hadoop 2 has the following jars too: MRAppJar.jar and 
hadoop-mapreduce-client-jobclient-
+        assertEquals(cacheFiles.length, files.length + 2);
+        assertTrue(cacheFilesStr.contains("MRAppJar.jar"));
+        
assertTrue(cacheFilesStr.contains("hadoop-mapreduce-client-jobclient-"));
 
-        }
-        else {
-            assertEquals(cacheFiles.length, files.length);
-        }
         for (String file : files) {
             assertTrue(cacheFilesStr.contains(file));
 

http://git-wip-us.apache.org/repos/asf/oozie/blob/a37835fe/core/src/test/java/org/apache/oozie/workflow/lite/TestLiteWorkflowAppParser.java
----------------------------------------------------------------------
diff --git 
a/core/src/test/java/org/apache/oozie/workflow/lite/TestLiteWorkflowAppParser.java
 
b/core/src/test/java/org/apache/oozie/workflow/lite/TestLiteWorkflowAppParser.java
index 9002b6c..ef6d019 100644
--- 
a/core/src/test/java/org/apache/oozie/workflow/lite/TestLiteWorkflowAppParser.java
+++ 
b/core/src/test/java/org/apache/oozie/workflow/lite/TestLiteWorkflowAppParser.java
@@ -61,7 +61,6 @@ public class TestLiteWorkflowAppParser extends XTestCase {
     private String cleanupXml(String xml) {
         xml = xml.replaceAll(" xmlns=?(\"|\')(\"|\')", "");
         xml = xml.replaceAll("\\s*<source>.*</source>", "");    // remove the 
<source> added by Hadoop 2
-        xml = xml.replaceAll("\\s*<!--Loaded from Unknown-->", "");   // 
remove the <!--LoadedfromUnknown--> added by Hadoop 1.2.1
         return xml;
     }
 

http://git-wip-us.apache.org/repos/asf/oozie/blob/a37835fe/distro/src/main/bin/addtowar.sh
----------------------------------------------------------------------
diff --git a/distro/src/main/bin/addtowar.sh b/distro/src/main/bin/addtowar.sh
index 3990409..9f8d53d 100644
--- a/distro/src/main/bin/addtowar.sh
+++ b/distro/src/main/bin/addtowar.sh
@@ -95,29 +95,13 @@ function checkOption() {
 #get the list of hadoop jars that will be injected based on the hadoop version
 function getHadoopJars() {
   version=$1
-  if [ "${version}" = "0.20.1" ]; then
-    #List is separated by ":"
-    hadoopJars="hadoop-core*.jar"
-  elif [ "${version}" = "0.20.2" ]; then
-    #List is separated by ":"
-    hadoopJars="hadoop-core*.jar"
-  elif [ "${version}" = "0.20.104" ]; then
-    #List is separated by ":"
-    
hadoopJars="hadoop-core*.jar:jackson-core-asl-*.jar:jackson-mapper-asl-*.jar"
-  elif [ "${version}" = "0.20.200" ]; then
-    #List is separated by ":"
-    
hadoopJars="hadoop-core*.jar:jackson-core-asl-*.jar:jackson-mapper-asl-*.jar:commons-configuration-*.jar"
-  elif [[ "${version}" =~ .*23 ]]; then
-    suffix="-[0-9.]*"
-    #List is separated by ":"
-    
hadoopJars="hadoop-mapreduce-client-core${suffix}.jar:hadoop-mapreduce-client-common${suffix}.jar:hadoop-mapreduce-client-jobclient${suffix}.jar:hadoop-mapreduce-client-app${suffix}.jar:hadoop-yarn-common${suffix}.jar:hadoop-yarn-api${suffix}.jar:hadoop-hdfs${suffix}.jar:hadoop-common${suffix}.jar:hadoop-auth${suffix}.jar:guava*.jar:protobuf-*.jar:avro-ipc-*.jar:jackson-core-asl-*.jar:jackson-mapper-asl-*.jar:commons-configuration-*.jar"
-  elif [[ "${version}" =~ 2.* ]]; then
+  if [[ "${version}" =~ 2.* ]]; then
     suffix="-[0-9.]*"
     #List is separated by ":"
     
hadoopJars="hadoop-mapreduce-client-core${suffix}.jar:hadoop-mapreduce-client-common${suffix}.jar:hadoop-mapreduce-client-jobclient${suffix}.jar:hadoop-mapreduce-client-app${suffix}.jar:hadoop-yarn-common${suffix}.jar:hadoop-yarn-api${suffix}.jar:hadoop-yarn-client${suffix}.jar:hadoop-hdfs${suffix}.jar:hadoop-common${suffix}.jar:hadoop-auth${suffix}.jar:guava*.jar:protobuf-*.jar:jackson-core-asl-*.jar:jackson-mapper-asl-*.jar:commons-configuration-*.jar:commons-cli-*.jar:commons-io-*.jar"
   else
     echo
-    echo "Exiting: Unsupported Hadoop version '${hadoopVer}', supported 
versions: 0.20.1, 0.20.2, 0.20.104, 0.20.200, 0.23.x and 2.x"
+    echo "Exiting: Unsupported Hadoop version '${hadoopVer}', supported 
versions: 2.x"
     echo
     cleanUp
     exit -1;

http://git-wip-us.apache.org/repos/asf/oozie/blob/a37835fe/docs/src/site/twiki/DG_QuickStart.twiki
----------------------------------------------------------------------
diff --git a/docs/src/site/twiki/DG_QuickStart.twiki 
b/docs/src/site/twiki/DG_QuickStart.twiki
index 41ddbd8..9c7e3d7 100644
--- a/docs/src/site/twiki/DG_QuickStart.twiki
+++ b/docs/src/site/twiki/DG_QuickStart.twiki
@@ -41,17 +41,14 @@ suitable when same oozie package needs to be used in 
multiple set-ups with diffe
 
 2. Build with -Puber which will bundle the required libraries in the oozie 
war. Further, the following options are
 available to customise the versions of the dependencies:
--P<profile> - default hadoop-1. Valid are hadoop-1, hadoop-0.23, hadoop-2 or 
hadoop-3. Choose the correct hadoop
-profile depending on the hadoop version used.
--Dhadoop.version=<version> - default 1.2.1 for hadoop-1, 0.23.5 for 
hadoop-0.23, 2.3.0 for hadoop-2 and 3.0.0-SNAPSHOT
-    for hadoop-3
+-Dhadoop.version=<version> - default 2.4.0
 -Dhadoop.auth.version=<version> - defaults to hadoop version
 -Ddistcp.version=<version> - defaults to hadoop version
 -Dpig.version=<version> - default 0.12.1
--Dpig.classifier=<classifier> - default none
+-Dpig.classifier=<classifier> - default h2
 -Dsqoop.version=<version> - default 1.4.3
--Dsqoop.classifier=<classifier> - default hadoop100
--Dtomcat.version=<version> - default 6.0.41
+-Dsqoop.classifier=<classifier> - default hadoop200
+-Dtomcat.version=<version> - default 6.0.44
 -Dopenjpa.version=<version> - default 2.2.2
 -Dxerces.version=<version> - default 2.10.0
 -Dcurator.version=<version> - default 2.5.0

http://git-wip-us.apache.org/repos/asf/oozie/blob/a37835fe/docs/src/site/twiki/ENG_Building.twiki
----------------------------------------------------------------------
diff --git a/docs/src/site/twiki/ENG_Building.twiki 
b/docs/src/site/twiki/ENG_Building.twiki
index e76368e..cce219e 100644
--- a/docs/src/site/twiki/ENG_Building.twiki
+++ b/docs/src/site/twiki/ENG_Building.twiki
@@ -112,9 +112,9 @@ Except for the options marked with =(*)=, the options can 
be specified in the =t
 of the Oozie project. The options marked with =(*)= are used in Maven POMs, 
thus they don't take effect if
 specified in the =test.properties= file (which is loaded by the =XTestCase= 
class at class initialization time).
 
-*hadoop.version* =(*)=: indicates the Hadoop version(Hadoop-1 or Hadoop-2) you 
wish to build Oozie against specifically. It will
-substitute this value in the Oozie POM properties and pull the corresponding 
Hadoop artifacts from Maven. Default version is 1.2.1
-for Hadoop-1 (the most common case). For Hadoop-2, the version you can pass is 
*2.3.0*.
+*hadoop.version* =(*)=: indicates the Hadoop version(Hadoop-2) you wish to 
build Oozie against specifically. It will
+substitute this value in the Oozie POM properties and pull the corresponding 
Hadoop artifacts from Maven.
+For Hadoop 2.x, the default (and minimum) version is 2.4.0.
 
 *generateSite* (*): generates Oozie documentation, default is undefined (no 
documentation is generated)
 
@@ -212,10 +212,7 @@ $ bin/mkdistro.sh [-DskipTests]
 Running =mkdistro.sh= will create the binary distribution of Oozie. The 
following options are available to customise
 the versions of the dependencies:
 -Puber - Bundle required hadoop and hcatalog libraries in oozie war
--P<profile> - default hadoop-1. Valid are hadoop-1, hadoop-0.23, hadoop-2 or 
hadoop-3. Choose the correct hadoop
-profile depending on the hadoop version used.
--Dhadoop.version=<version> - default 1.2.1 for hadoop-1, 0.23.5 for 
hadoop-0.23, 2.3.0 for hadoop-2 and 3.0
-.0-SNAPSHOT for hadoop-3
+-Dhadoop.version=<version> - default 2.4.0
 -Dhadoop.auth.version=<version> - defaults to hadoop version
 -Ddistcp.version=<version> - defaults to hadoop version
 -Dpig.version=<version> - default 0.12.1

http://git-wip-us.apache.org/repos/asf/oozie/blob/a37835fe/hadooplibs/hadoop-auth-0.23/pom.xml
----------------------------------------------------------------------
diff --git a/hadooplibs/hadoop-auth-0.23/pom.xml 
b/hadooplibs/hadoop-auth-0.23/pom.xml
deleted file mode 100644
index 7b3b466..0000000
--- a/hadooplibs/hadoop-auth-0.23/pom.xml
+++ /dev/null
@@ -1,45 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<!--
-  Licensed to the Apache Software Foundation (ASF) under one
-  or more contributor license agreements.  See the NOTICE file
-  distributed with this work for additional information
-  regarding copyright ownership.  The ASF licenses this file
-  to you under the Apache License, Version 2.0 (the
-  "License"); you may not use this file except in compliance
-  with the License.  You may obtain a copy of the License at
-
-       http://www.apache.org/licenses/LICENSE-2.0
-
-  Unless required by applicable law or agreed to in writing, software
-  distributed under the License is distributed on an "AS IS" BASIS,
-  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  See the License for the specific language governing permissions and
-  limitations under the License.
--->
-<project xmlns="http://maven.apache.org/POM/4.0.0"; 
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance";
-         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 
http://maven.apache.org/maven-v4_0_0.xsd";>
-    <modelVersion>4.0.0</modelVersion>
-    <parent>
-        <groupId>org.apache.oozie</groupId>
-        <artifactId>oozie-main</artifactId>
-        <version>4.3.0-SNAPSHOT</version>
-        <relativePath>../../pom.xml</relativePath>
-    </parent>
-    <groupId>org.apache.oozie</groupId>
-    <artifactId>oozie-hadoop-auth</artifactId>
-    <version>hadoop-0.23-4.3.0-SNAPSHOT</version>
-    <description>Apache Oozie Hadoop Auth</description>
-    <name>Apache Oozie Hadoop Auth ${project.version} Test</name>
-    <packaging>jar</packaging>
-
-    <dependencies>
-        <dependency>
-            <groupId>org.apache.hadoop</groupId>
-            <artifactId>hadoop-auth</artifactId>
-            <scope>compile</scope>
-        </dependency>
-    </dependencies>
-
-
-</project>
-

http://git-wip-us.apache.org/repos/asf/oozie/blob/a37835fe/hadooplibs/hadoop-auth-1/pom.xml
----------------------------------------------------------------------
diff --git a/hadooplibs/hadoop-auth-1/pom.xml b/hadooplibs/hadoop-auth-1/pom.xml
deleted file mode 100644
index 9da6c08..0000000
--- a/hadooplibs/hadoop-auth-1/pom.xml
+++ /dev/null
@@ -1,43 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<!--
-  Licensed to the Apache Software Foundation (ASF) under one
-  or more contributor license agreements.  See the NOTICE file
-  distributed with this work for additional information
-  regarding copyright ownership.  The ASF licenses this file
-  to you under the Apache License, Version 2.0 (the
-  "License"); you may not use this file except in compliance
-  with the License.  You may obtain a copy of the License at
-
-       http://www.apache.org/licenses/LICENSE-2.0
-
-  Unless required by applicable law or agreed to in writing, software
-  distributed under the License is distributed on an "AS IS" BASIS,
-  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  See the License for the specific language governing permissions and
-  limitations under the License.
--->
-<project xmlns="http://maven.apache.org/POM/4.0.0"; 
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance";
-         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 
http://maven.apache.org/maven-v4_0_0.xsd";>
-    <modelVersion>4.0.0</modelVersion>
-    <parent>
-        <groupId>org.apache.oozie</groupId>
-        <artifactId>oozie-main</artifactId>
-        <version>4.3.0-SNAPSHOT</version>
-        <relativePath>../../pom.xml</relativePath>
-    </parent>
-    <groupId>org.apache.oozie</groupId>
-    <artifactId>oozie-hadoop-auth</artifactId>
-    <version>hadoop-1-4.3.0-SNAPSHOT</version>
-    <description>Apache Oozie Hadoop Auth</description>
-    <name>Apache Oozie Hadoop Auth ${project.version}</name>
-    <packaging>jar</packaging>
-
-    <dependencies>
-        <dependency>
-            <groupId>org.apache.hadoop</groupId>
-            <artifactId>hadoop-core</artifactId>
-            <scope>compile</scope>
-        </dependency>
-    </dependencies>
-</project>
-

http://git-wip-us.apache.org/repos/asf/oozie/blob/a37835fe/hadooplibs/hadoop-auth-2/pom.xml
----------------------------------------------------------------------
diff --git a/hadooplibs/hadoop-auth-2/pom.xml b/hadooplibs/hadoop-auth-2/pom.xml
deleted file mode 100644
index 07df37b..0000000
--- a/hadooplibs/hadoop-auth-2/pom.xml
+++ /dev/null
@@ -1,43 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<!--
-  Licensed to the Apache Software Foundation (ASF) under one
-  or more contributor license agreements.  See the NOTICE file
-  distributed with this work for additional information
-  regarding copyright ownership.  The ASF licenses this file
-  to you under the Apache License, Version 2.0 (the
-  "License"); you may not use this file except in compliance
-  with the License.  You may obtain a copy of the License at
-
-       http://www.apache.org/licenses/LICENSE-2.0
-
-  Unless required by applicable law or agreed to in writing, software
-  distributed under the License is distributed on an "AS IS" BASIS,
-  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  See the License for the specific language governing permissions and
-  limitations under the License.
--->
-<project xmlns="http://maven.apache.org/POM/4.0.0"; 
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance";
-         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 
http://maven.apache.org/maven-v4_0_0.xsd";>
-    <modelVersion>4.0.0</modelVersion>
-    <parent>
-        <groupId>org.apache.oozie</groupId>
-        <artifactId>oozie-main</artifactId>
-        <version>4.3.0-SNAPSHOT</version>
-        <relativePath>../../pom.xml</relativePath>
-    </parent>
-    <groupId>org.apache.oozie</groupId>
-    <artifactId>oozie-hadoop-auth</artifactId>
-    <version>hadoop-2-4.3.0-SNAPSHOT</version>
-    <description>Apache Oozie Hadoop</description>
-    <name>Apache Oozie Hadoop Auth ${project.version} Test</name>
-    <packaging>jar</packaging>
-
-    <dependencies>
-        <dependency>
-            <groupId>org.apache.hadoop</groupId>
-            <artifactId>hadoop-auth</artifactId>
-            <scope>compile</scope>
-        </dependency>
-    </dependencies>
-</project>
-

http://git-wip-us.apache.org/repos/asf/oozie/blob/a37835fe/hadooplibs/hadoop-distcp-0.23/pom.xml
----------------------------------------------------------------------
diff --git a/hadooplibs/hadoop-distcp-0.23/pom.xml 
b/hadooplibs/hadoop-distcp-0.23/pom.xml
deleted file mode 100644
index 60c8e02..0000000
--- a/hadooplibs/hadoop-distcp-0.23/pom.xml
+++ /dev/null
@@ -1,43 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<!--
-  Licensed to the Apache Software Foundation (ASF) under one
-  or more contributor license agreements.  See the NOTICE file
-  distributed with this work for additional information
-  regarding copyright ownership.  The ASF licenses this file
-  to you under the Apache License, Version 2.0 (the
-  "License"); you may not use this file except in compliance
-  with the License.  You may obtain a copy of the License at
-
-       http://www.apache.org/licenses/LICENSE-2.0
-
-  Unless required by applicable law or agreed to in writing, software
-  distributed under the License is distributed on an "AS IS" BASIS,
-  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  See the License for the specific language governing permissions and
-  limitations under the License.
--->
-<project xmlns="http://maven.apache.org/POM/4.0.0"; 
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance";
-         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 
http://maven.apache.org/maven-v4_0_0.xsd";>
-    <modelVersion>4.0.0</modelVersion>
-    <parent>
-        <groupId>org.apache.oozie</groupId>
-        <artifactId>oozie-main</artifactId>
-        <version>4.3.0-SNAPSHOT</version>
-        <relativePath>../../pom.xml</relativePath>
-    </parent>
-    <groupId>org.apache.oozie</groupId>
-    <artifactId>oozie-hadoop-distcp</artifactId>
-    <version>hadoop-0.23-4.3.0-SNAPSHOT</version>
-    <description>Apache Oozie Hadoop Distcp ${project.version}</description>
-    <name>Apache Oozie Hadoop Distcp ${project.version}</name>
-    <packaging>jar</packaging>
-
-    <dependencies>
-        <dependency>
-            <groupId>org.apache.hadoop</groupId>
-            <artifactId>hadoop-distcp</artifactId>
-            <scope>compile</scope>
-        </dependency>
-    </dependencies>
-</project>
-

http://git-wip-us.apache.org/repos/asf/oozie/blob/a37835fe/hadooplibs/hadoop-distcp-1/pom.xml
----------------------------------------------------------------------
diff --git a/hadooplibs/hadoop-distcp-1/pom.xml 
b/hadooplibs/hadoop-distcp-1/pom.xml
deleted file mode 100644
index f617b69..0000000
--- a/hadooplibs/hadoop-distcp-1/pom.xml
+++ /dev/null
@@ -1,43 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<!--
-  Licensed to the Apache Software Foundation (ASF) under one
-  or more contributor license agreements.  See the NOTICE file
-  distributed with this work for additional information
-  regarding copyright ownership.  The ASF licenses this file
-  to you under the Apache License, Version 2.0 (the
-  "License"); you may not use this file except in compliance
-  with the License.  You may obtain a copy of the License at
-
-       http://www.apache.org/licenses/LICENSE-2.0
-
-  Unless required by applicable law or agreed to in writing, software
-  distributed under the License is distributed on an "AS IS" BASIS,
-  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  See the License for the specific language governing permissions and
-  limitations under the License.
--->
-<project xmlns="http://maven.apache.org/POM/4.0.0"; 
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance";
-         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 
http://maven.apache.org/maven-v4_0_0.xsd";>
-    <modelVersion>4.0.0</modelVersion>
-    <parent>
-        <groupId>org.apache.oozie</groupId>
-        <artifactId>oozie-main</artifactId>
-        <version>4.3.0-SNAPSHOT</version>
-        <relativePath>../../pom.xml</relativePath>
-    </parent>
-    <groupId>org.apache.oozie</groupId>
-    <artifactId>oozie-hadoop-distcp</artifactId>
-    <version>hadoop-1-4.3.0-SNAPSHOT</version>
-    <description>Apache Oozie Hadoop Distcp ${project.version}</description>
-    <name>Apache Oozie Hadoop Distcp ${project.version}</name>
-    <packaging>jar</packaging>
-
-    <dependencies>
-        <dependency>
-            <groupId>org.apache.hadoop</groupId>
-            <artifactId>hadoop-tools</artifactId>
-            <scope>compile</scope>
-        </dependency>
-    </dependencies>
-</project>
-

http://git-wip-us.apache.org/repos/asf/oozie/blob/a37835fe/hadooplibs/hadoop-distcp-2/pom.xml
----------------------------------------------------------------------
diff --git a/hadooplibs/hadoop-distcp-2/pom.xml 
b/hadooplibs/hadoop-distcp-2/pom.xml
deleted file mode 100644
index b7c3c63..0000000
--- a/hadooplibs/hadoop-distcp-2/pom.xml
+++ /dev/null
@@ -1,43 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<!--
-  Licensed to the Apache Software Foundation (ASF) under one
-  or more contributor license agreements.  See the NOTICE file
-  distributed with this work for additional information
-  regarding copyright ownership.  The ASF licenses this file
-  to you under the Apache License, Version 2.0 (the
-  "License"); you may not use this file except in compliance
-  with the License.  You may obtain a copy of the License at
-
-       http://www.apache.org/licenses/LICENSE-2.0
-
-  Unless required by applicable law or agreed to in writing, software
-  distributed under the License is distributed on an "AS IS" BASIS,
-  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  See the License for the specific language governing permissions and
-  limitations under the License.
--->
-<project xmlns="http://maven.apache.org/POM/4.0.0"; 
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance";
-         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 
http://maven.apache.org/maven-v4_0_0.xsd";>
-    <modelVersion>4.0.0</modelVersion>
-    <parent>
-        <groupId>org.apache.oozie</groupId>
-        <artifactId>oozie-main</artifactId>
-        <version>4.3.0-SNAPSHOT</version>
-        <relativePath>../../pom.xml</relativePath>
-    </parent>
-    <groupId>org.apache.oozie</groupId>
-    <artifactId>oozie-hadoop-distcp</artifactId>
-    <version>hadoop-2-4.3.0-SNAPSHOT</version>
-    <description>Apache Oozie Hadoop Distcp ${project.version}</description>
-    <name>Apache Oozie Hadoop Distcp ${project.version}</name>
-    <packaging>jar</packaging>
-
-    <dependencies>
-        <dependency>
-            <groupId>org.apache.hadoop</groupId>
-            <artifactId>hadoop-distcp</artifactId>
-            <scope>compile</scope>
-        </dependency>
-    </dependencies>
-</project>
-

http://git-wip-us.apache.org/repos/asf/oozie/blob/a37835fe/hadooplibs/hadoop-distcp-3/pom.xml
----------------------------------------------------------------------
diff --git a/hadooplibs/hadoop-distcp-3/pom.xml 
b/hadooplibs/hadoop-distcp-3/pom.xml
deleted file mode 100644
index 1bb2895..0000000
--- a/hadooplibs/hadoop-distcp-3/pom.xml
+++ /dev/null
@@ -1,43 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<!--
-  Licensed to the Apache Software Foundation (ASF) under one
-  or more contributor license agreements.  See the NOTICE file
-  distributed with this work for additional information
-  regarding copyright ownership.  The ASF licenses this file
-  to you under the Apache License, Version 2.0 (the
-  "License"); you may not use this file except in compliance
-  with the License.  You may obtain a copy of the License at
-
-       http://www.apache.org/licenses/LICENSE-2.0
-
-  Unless required by applicable law or agreed to in writing, software
-  distributed under the License is distributed on an "AS IS" BASIS,
-  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  See the License for the specific language governing permissions and
-  limitations under the License.
--->
-<project xmlns="http://maven.apache.org/POM/4.0.0"; 
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance";
-         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 
http://maven.apache.org/maven-v4_0_0.xsd";>
-    <modelVersion>4.0.0</modelVersion>
-    <parent>
-        <groupId>org.apache.oozie</groupId>
-        <artifactId>oozie-main</artifactId>
-        <version>4.3.0-SNAPSHOT</version>
-        <relativePath>../../pom.xml</relativePath>
-    </parent>
-    <groupId>org.apache.oozie</groupId>
-    <artifactId>oozie-hadoop-distcp</artifactId>
-    <version>hadoop-3-4.3.0-SNAPSHOT</version>
-    <description>Apache Oozie Hadoop Distcp ${project.version}</description>
-    <name>Apache Oozie Hadoop Distcp ${project.version}</name>
-    <packaging>jar</packaging>
-
-    <dependencies>
-        <dependency>
-            <groupId>org.apache.hadoop</groupId>
-            <artifactId>hadoop-distcp</artifactId>
-        </dependency>
-    </dependencies>
-
-</project>
-

http://git-wip-us.apache.org/repos/asf/oozie/blob/a37835fe/hadooplibs/hadoop-utils-0.23/pom.xml
----------------------------------------------------------------------
diff --git a/hadooplibs/hadoop-utils-0.23/pom.xml 
b/hadooplibs/hadoop-utils-0.23/pom.xml
deleted file mode 100644
index a55e647..0000000
--- a/hadooplibs/hadoop-utils-0.23/pom.xml
+++ /dev/null
@@ -1,42 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<!--
-  Licensed to the Apache Software Foundation (ASF) under one
-  or more contributor license agreements.  See the NOTICE file
-  distributed with this work for additional information
-  regarding copyright ownership.  The ASF licenses this file
-  to you under the Apache License, Version 2.0 (the
-  "License"); you may not use this file except in compliance
-  with the License.  You may obtain a copy of the License at
-
-       http://www.apache.org/licenses/LICENSE-2.0
-
-  Unless required by applicable law or agreed to in writing, software
-  distributed under the License is distributed on an "AS IS" BASIS,
-  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  See the License for the specific language governing permissions and
-  limitations under the License.
--->
-<project xmlns="http://maven.apache.org/POM/4.0.0"; 
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance";
-         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 
http://maven.apache.org/maven-v4_0_0.xsd";>
-    <modelVersion>4.0.0</modelVersion>
-    <parent>
-        <groupId>org.apache.oozie</groupId>
-        <artifactId>oozie-main</artifactId>
-        <version>4.3.0-SNAPSHOT</version>
-        <relativePath>../../pom.xml</relativePath>
-    </parent>
-    <groupId>org.apache.oozie</groupId>
-    <artifactId>oozie-hadoop-utils</artifactId>
-    <version>hadoop-0.23-4.3.0-SNAPSHOT</version>
-    <description>Apache Oozie Hadoop Utils</description>
-    <name>Apache Oozie Hadoop Utils</name>
-    <packaging>jar</packaging>
-
-    <dependencies>
-        <dependency>
-            <groupId>org.apache.hadoop</groupId>
-            <artifactId>hadoop-client</artifactId>
-            <scope>provided</scope>
-        </dependency>
-    </dependencies>
-</project>

Reply via email to