http://git-wip-us.apache.org/repos/asf/oozie/blob/21761f5b/core/src/main/java/org/apache/oozie/service/ShareLibService.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/oozie/service/ShareLibService.java 
b/core/src/main/java/org/apache/oozie/service/ShareLibService.java
index b79bd37..a4d2c02 100644
--- a/core/src/main/java/org/apache/oozie/service/ShareLibService.java
+++ b/core/src/main/java/org/apache/oozie/service/ShareLibService.java
@@ -51,14 +51,13 @@ import org.apache.hadoop.fs.permission.FsPermission;
 import org.apache.oozie.action.ActionExecutor;
 import org.apache.oozie.action.hadoop.JavaActionExecutor;
 import org.apache.oozie.client.rest.JsonUtils;
-import org.apache.oozie.hadoop.utils.HadoopShims;
+import com.google.common.annotations.VisibleForTesting;
+import org.apache.oozie.ErrorCode;
 import org.apache.oozie.util.Instrumentable;
 import org.apache.oozie.util.Instrumentation;
+import org.apache.oozie.util.FSUtils;
 import org.apache.oozie.util.XConfiguration;
 import org.apache.oozie.util.XLog;
-import com.google.common.annotations.VisibleForTesting;
-
-import org.apache.oozie.ErrorCode;
 import org.jdom.JDOMException;
 
 public class ShareLibService implements Service, Instrumentable {
@@ -194,7 +193,7 @@ public class ShareLibService implements Service, 
Instrumentable {
     private void setupLauncherLibPath(FileSystem fs, Path tmpLauncherLibPath) 
throws IOException {
 
         ActionService actionService = Services.get().get(ActionService.class);
-        List<Class> classes = JavaActionExecutor.getCommonLauncherClasses();
+        List<Class<?>> classes = JavaActionExecutor.getCommonLauncherClasses();
         Path baseDir = new Path(tmpLauncherLibPath, 
JavaActionExecutor.OOZIE_COMMON_LIBDIR);
         copyJarContainingClasses(classes, fs, baseDir, 
JavaActionExecutor.OOZIE_COMMON_LIBDIR);
         Set<String> actionTypes = actionService.getActionTypes();
@@ -217,7 +216,7 @@ public class ShareLibService implements Service, 
Instrumentable {
      *
      * @param fs the FileSystem
      * @param path the Path
-     * @param perm is permission
+     * @param fsPerm is permission
      * @throws IOException Signals that an I/O exception has occurred.
      */
     private void recursiveChangePermissions(FileSystem fs, Path path, 
FsPermission fsPerm) throws IOException {
@@ -225,7 +224,7 @@ public class ShareLibService implements Service, 
Instrumentable {
         FileStatus[] filesStatus = fs.listStatus(path);
         for (int i = 0; i < filesStatus.length; i++) {
             Path p = filesStatus[i].getPath();
-            if (filesStatus[i].isDir()) {
+            if (filesStatus[i].isDirectory()) {
                 recursiveChangePermissions(fs, p, fsPerm);
             }
             else {
@@ -243,11 +242,11 @@ public class ShareLibService implements Service, 
Instrumentable {
      * @param type is sharelib key
      * @throws IOException Signals that an I/O exception has occurred.
      */
-    private void copyJarContainingClasses(List<Class> classes, FileSystem fs, 
Path executorDir, String type)
+    private void copyJarContainingClasses(List<Class<?>> classes, FileSystem 
fs, Path executorDir, String type)
             throws IOException {
         fs.mkdirs(executorDir);
         Set<String> localJarSet = new HashSet<String>();
-        for (Class c : classes) {
+        for (Class<?> c : classes) {
             String localJar = findContainingJar(c);
             if (localJar != null) {
                 localJarSet.add(localJar);
@@ -302,7 +301,7 @@ public class ShareLibService implements Service, 
Instrumentable {
             }
 
             for (FileStatus file : status) {
-                if (file.isDir()) {
+                if (file.isDirectory()) {
                     getPathRecursively(fs, file.getPath(), listOfPaths, 
shareLibKey, shareLibConfigMap);
                 }
                 else {
@@ -352,14 +351,12 @@ public class ShareLibService implements Service, 
Instrumentable {
     }
 
     private void checkSymlink(String shareLibKey) throws IOException {
-        if (!HadoopShims.isSymlinkSupported() || 
symlinkMapping.get(shareLibKey) == null
-                || symlinkMapping.get(shareLibKey).isEmpty()) {
+        if (symlinkMapping.get(shareLibKey) == null || 
symlinkMapping.get(shareLibKey).isEmpty()) {
             return;
         }
 
-        HadoopShims fileSystem = new HadoopShims(fs);
         for (Path path : symlinkMapping.get(shareLibKey).keySet()) {
-            if 
(!symlinkMapping.get(shareLibKey).get(path).equals(fileSystem.getSymLinkTarget(path)))
 {
+            if 
(!symlinkMapping.get(shareLibKey).get(path).equals(FSUtils.getSymLinkTarget(fs, 
path))) {
                 synchronized (ShareLibService.class) {
                     Map<String, List<Path>> tmpShareLibMap = new 
HashMap<String, List<Path>>(shareLibMap);
 
@@ -370,7 +367,7 @@ public class ShareLibService implements Service, 
Instrumentable {
                             symlinkMapping);
 
                     LOG.info(MessageFormat.format("Symlink target for [{0}] 
has changed, was [{1}], now [{2}]",
-                            shareLibKey, path, 
fileSystem.getSymLinkTarget(path)));
+                            shareLibKey, path, FSUtils.getSymLinkTarget(fs, 
path)));
                     loadShareLibMetaFile(tmpShareLibMap, tmpSymlinkMapping, 
tmpShareLibConfigMap, sharelibMappingFile,
                             shareLibKey);
                     shareLibMap = tmpShareLibMap;
@@ -423,12 +420,12 @@ public class ShareLibService implements Service, 
Instrumentable {
      * @return the string
      */
     @VisibleForTesting
-    protected String findContainingJar(Class clazz) {
+    protected String findContainingJar(Class<?> clazz) {
         ClassLoader loader = clazz.getClassLoader();
         String classFile = clazz.getName().replaceAll("\\.", "/") + ".class";
         try {
-            for (Enumeration itr = loader.getResources(classFile); 
itr.hasMoreElements();) {
-                URL url = (URL) itr.nextElement();
+            for (Enumeration<URL> itr = loader.getResources(classFile); 
itr.hasMoreElements();) {
+                URL url = itr.nextElement();
                 if ("jar".equals(url.getProtocol())) {
                     String toReturn = url.getPath();
                     if (toReturn.startsWith("file:")) {
@@ -587,7 +584,7 @@ public class ShareLibService implements Service, 
Instrumentable {
         }
 
         for (FileStatus dir : dirList) {
-            if (!dir.isDir()) {
+            if (!dir.isDirectory()) {
                 continue;
             }
             List<Path> listOfPaths = new ArrayList<Path>();
@@ -636,19 +633,18 @@ public class ShareLibService implements Service, 
Instrumentable {
             throws IOException {
         List<Path> listOfPaths = new ArrayList<Path>();
         Map<Path, Path> symlinkMappingforAction = new HashMap<Path, Path>();
-        HadoopShims fileSystem = new HadoopShims(fs);
 
         for (String dfsPath : pathList) {
             Path path = new Path(dfsPath);
             getPathRecursively(fs, new Path(dfsPath), listOfPaths, 
shareLibKey, shareLibConfigMap);
-            if (HadoopShims.isSymlinkSupported() && 
fileSystem.isSymlink(path)) {
-                symlinkMappingforAction.put(fs.makeQualified(path), 
fileSystem.getSymLinkTarget(path));
+            if (FSUtils.isSymlink(fs, path)) {
+                symlinkMappingforAction.put(path, FSUtils.getSymLinkTarget(fs, 
path));
             }
         }
-        if (HadoopShims.isSymlinkSupported()) {
-            LOG.info("symlink for " + shareLibKey + ":" + 
symlinkMappingforAction);
-            tmpSymlinkMapping.put(shareLibKey, symlinkMappingforAction);
-        }
+
+        LOG.info("symlink for " + shareLibKey + ":" + symlinkMappingforAction);
+        tmpSymlinkMapping.put(shareLibKey, symlinkMappingforAction);
+
         tmpShareLibMap.put(shareLibKey, listOfPaths);
         LOG.info("Share lib for " + shareLibKey + ":" + listOfPaths);
     }

http://git-wip-us.apache.org/repos/asf/oozie/blob/21761f5b/core/src/main/java/org/apache/oozie/servlet/CallbackServlet.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/oozie/servlet/CallbackServlet.java 
b/core/src/main/java/org/apache/oozie/servlet/CallbackServlet.java
index 6123021..7d92ffc 100644
--- a/core/src/main/java/org/apache/oozie/servlet/CallbackServlet.java
+++ b/core/src/main/java/org/apache/oozie/servlet/CallbackServlet.java
@@ -40,10 +40,11 @@ import org.apache.oozie.util.PropertiesUtils;
 import org.apache.oozie.util.XLog;
 
 public class CallbackServlet extends JsonRestServlet {
+
     private static final String INSTRUMENTATION_NAME = "callback";
 
     private static final ResourceInfo RESOURCE_INFO =
-            new ResourceInfo("", Arrays.asList("POST", "GET"), 
Collections.EMPTY_LIST);
+            new ResourceInfo("", Arrays.asList("POST", "GET"), 
Collections.<ParameterInfo>emptyList());
 
     public final static String CONF_MAX_DATA_LEN = 
"oozie.servlet.CallbackServlet.max.data.len";
 

http://git-wip-us.apache.org/repos/asf/oozie/blob/21761f5b/core/src/main/java/org/apache/oozie/store/OozieSchema.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/oozie/store/OozieSchema.java 
b/core/src/main/java/org/apache/oozie/store/OozieSchema.java
index 23dd1e5..d2fdd28 100644
--- a/core/src/main/java/org/apache/oozie/store/OozieSchema.java
+++ b/core/src/main/java/org/apache/oozie/store/OozieSchema.java
@@ -21,6 +21,7 @@ package org.apache.oozie.store;
 import java.sql.Blob;
 import java.sql.Timestamp;
 import java.util.ArrayList;
+import java.util.Collections;
 import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
@@ -37,17 +38,21 @@ public class OozieSchema {
 
     private static final String OOZIE_VERSION = "0.1";
 
-    public static final Map<Table, List<Column>> TABLE_COLUMNS = new 
HashMap<Table, List<Column>>();
+    public static final Map<Table, List<Column>> TABLE_COLUMNS;
 
     static {
+        Map<Table, List<Column>> tmpColumns = new HashMap<>();
+
         for (Column column : OozieColumn.values()) {
-            List<Column> tColumns = TABLE_COLUMNS.get(column.table());
+            List<Column> tColumns = tmpColumns.get(column.table());
             if (tColumns == null) {
                 tColumns = new ArrayList<Column>();
-                TABLE_COLUMNS.put(column.table(), tColumns);
+                tmpColumns.put(column.table(), tColumns);
             }
             tColumns.add(column);
         }
+
+        TABLE_COLUMNS = Collections.unmodifiableMap(tmpColumns);
     }
 
     public static void setOozieDbName(String dbName) {

http://git-wip-us.apache.org/repos/asf/oozie/blob/21761f5b/core/src/main/java/org/apache/oozie/util/ClasspathUtils.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/oozie/util/ClasspathUtils.java 
b/core/src/main/java/org/apache/oozie/util/ClasspathUtils.java
new file mode 100644
index 0000000..5833607
--- /dev/null
+++ b/core/src/main/java/org/apache/oozie/util/ClasspathUtils.java
@@ -0,0 +1,142 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.oozie.util;
+
+import com.google.common.annotations.VisibleForTesting;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.mapreduce.MRConfig;
+import org.apache.hadoop.mapreduce.MRJobConfig;
+import org.apache.hadoop.mapreduce.v2.util.MRApps;
+import org.apache.hadoop.util.StringUtils;
+import org.apache.hadoop.yarn.api.ApplicationConstants;
+import org.apache.hadoop.yarn.conf.YarnConfiguration;
+
+import java.io.IOException;
+import java.net.URI;
+import java.util.Arrays;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Locale;
+import java.util.Map;
+
+public class ClasspathUtils {
+    private static boolean usingMiniYarnCluster = false;
+    private static final List<String> CLASSPATH_ENTRIES = Arrays.asList(
+            ApplicationConstants.Environment.PWD.$(),
+            ApplicationConstants.Environment.PWD.$() + Path.SEPARATOR + "*"
+    );
+
+    @VisibleForTesting
+    public static void setUsingMiniYarnCluster(boolean useMiniYarnCluster) {
+        usingMiniYarnCluster = useMiniYarnCluster;
+    }
+
+    // Adapted from MRApps#setClasspath.  Adds Yarn, HDFS, Common, and 
distributed cache jars.
+    public static void setupClasspath(Map<String, String> env, Configuration 
conf) throws IOException {
+        // Propagate the system classpath when using the mini cluster
+        if (usingMiniYarnCluster) {
+            MRApps.addToEnvironment(
+                    env,
+                    ApplicationConstants.Environment.CLASSPATH.name(),
+                    System.getProperty("java.class.path"), conf);
+        }
+
+        for (String entry : CLASSPATH_ENTRIES) {
+            MRApps.addToEnvironment(env, 
ApplicationConstants.Environment.CLASSPATH.name(), entry, conf);
+        }
+
+        // a * in the classpath will only find a .jar, so we need to filter out
+        // all .jars and add everything else
+        
addToClasspathIfNotJar(org.apache.hadoop.mapreduce.filecache.DistributedCache.getFileClassPaths(conf),
+                
org.apache.hadoop.mapreduce.filecache.DistributedCache.getCacheFiles(conf),
+                conf,
+                env, ApplicationConstants.Environment.PWD.$());
+        
addToClasspathIfNotJar(org.apache.hadoop.mapreduce.filecache.DistributedCache.getArchiveClassPaths(conf),
+                
org.apache.hadoop.mapreduce.filecache.DistributedCache.getCacheArchives(conf),
+                conf,
+                env, ApplicationConstants.Environment.PWD.$());
+
+
+        boolean crossPlatform = 
conf.getBoolean(MRConfig.MAPREDUCE_APP_SUBMISSION_CROSS_PLATFORM,
+                MRConfig.DEFAULT_MAPREDUCE_APP_SUBMISSION_CROSS_PLATFORM);
+
+        for (String c : 
conf.getStrings(YarnConfiguration.YARN_APPLICATION_CLASSPATH,
+                crossPlatform
+                        ? 
YarnConfiguration.DEFAULT_YARN_CROSS_PLATFORM_APPLICATION_CLASSPATH
+                        : 
YarnConfiguration.DEFAULT_YARN_APPLICATION_CLASSPATH)) {
+            MRApps.addToEnvironment(env, 
ApplicationConstants.Environment.CLASSPATH.name(),
+                    c.trim(), conf);
+        }
+    }
+
+    // Adapted from MRApps#setClasspath
+    public static void addMapReduceToClasspath(Map<String, String> env, 
Configuration conf) {
+        boolean crossPlatform = 
conf.getBoolean(MRConfig.MAPREDUCE_APP_SUBMISSION_CROSS_PLATFORM,
+                MRConfig.DEFAULT_MAPREDUCE_APP_SUBMISSION_CROSS_PLATFORM);
+
+        for (String c : 
conf.getStrings(MRJobConfig.MAPREDUCE_APPLICATION_CLASSPATH,
+                crossPlatform ?
+                        
StringUtils.getStrings(MRJobConfig.DEFAULT_MAPREDUCE_CROSS_PLATFORM_APPLICATION_CLASSPATH)
+                        : 
StringUtils.getStrings(MRJobConfig.DEFAULT_MAPREDUCE_APPLICATION_CLASSPATH))) {
+            MRApps.addToEnvironment(env, 
ApplicationConstants.Environment.CLASSPATH.name(),
+                    c.trim(), conf);
+        }
+    }
+
+    // Borrowed from MRApps#addToClasspathIfNotJar
+    private static void addToClasspathIfNotJar(Path[] paths,
+                                               URI[] withLinks, Configuration 
conf,
+                                               Map<String, String> environment,
+                                               String classpathEnvVar) throws 
IOException {
+        if (paths != null) {
+            HashMap<Path, String> linkLookup = new HashMap<Path, String>();
+            if (withLinks != null) {
+                for (URI u: withLinks) {
+                    Path p = new Path(u);
+                    FileSystem remoteFS = p.getFileSystem(conf);
+                    p = remoteFS.resolvePath(p.makeQualified(remoteFS.getUri(),
+                            remoteFS.getWorkingDirectory()));
+                    String name = (null == u.getFragment())
+                            ? p.getName() : u.getFragment();
+                    if (!name.toLowerCase(Locale.ENGLISH).endsWith(".jar")) {
+                        linkLookup.put(p, name);
+                    }
+                }
+            }
+
+            for (Path p : paths) {
+                FileSystem remoteFS = p.getFileSystem(conf);
+                p = remoteFS.resolvePath(p.makeQualified(remoteFS.getUri(),
+                        remoteFS.getWorkingDirectory()));
+                String name = linkLookup.get(p);
+                if (name == null) {
+                    name = p.getName();
+                }
+                if(!name.toLowerCase(Locale.ENGLISH).endsWith(".jar")) {
+                    MRApps.addToEnvironment(
+                            environment,
+                            classpathEnvVar,
+                            ApplicationConstants.Environment.PWD.$() + 
Path.SEPARATOR + name, conf);
+                }
+            }
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/oozie/blob/21761f5b/core/src/main/java/org/apache/oozie/util/FSUtils.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/oozie/util/FSUtils.java 
b/core/src/main/java/org/apache/oozie/util/FSUtils.java
new file mode 100644
index 0000000..6d73fc7
--- /dev/null
+++ b/core/src/main/java/org/apache/oozie/util/FSUtils.java
@@ -0,0 +1,53 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.oozie.util;
+
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+
+import java.io.IOException;
+import java.net.URI;
+
+public final class FSUtils {
+    public static Path getSymLinkTarget(FileSystem fs, Path p) throws 
IOException {
+        try {
+            //getSymlink doesn't work with fragment name, need to remove 
fragment before calling getSymlink
+            Path tempPath = new URI(p.toString()).getFragment() == null ? p : 
new Path(new URI(p.toString()).getPath());
+            return fs.getFileLinkStatus(tempPath).getSymlink();
+        }
+        catch (java.net.URISyntaxException e) {
+            throw new IOException(e);
+        }
+    }
+
+    public static boolean isSymlink(FileSystem fs, Path p) throws IOException {
+        try {
+            //isSymlink doesn't work with fragment name, need to remove 
fragment before checking for symlink
+            Path tempPath = new URI(p.toString()).getFragment() == null ? p : 
new Path(new URI(p.toString()).getPath());
+            return fs.getFileLinkStatus(tempPath).isSymlink();
+        }
+        catch (java.net.URISyntaxException e) {
+            throw new IOException(e);
+        }
+    }
+
+    public static void createSymlink(FileSystem fs, Path target, Path link, 
boolean createParent) throws IOException {
+        fs.createSymlink(target, link, createParent);
+    }
+}

http://git-wip-us.apache.org/repos/asf/oozie/blob/21761f5b/core/src/main/java/org/apache/oozie/util/IOUtils.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/oozie/util/IOUtils.java 
b/core/src/main/java/org/apache/oozie/util/IOUtils.java
index a6c2fda..3674dc4 100644
--- a/core/src/main/java/org/apache/oozie/util/IOUtils.java
+++ b/core/src/main/java/org/apache/oozie/util/IOUtils.java
@@ -190,31 +190,34 @@ public abstract class IOUtils {
 
     private static void zipDir(File dir, String relativePath, ZipOutputStream 
zos, boolean start) throws IOException {
         String[] dirList = dir.list();
-        for (String aDirList : dirList) {
-            File f = new File(dir, aDirList);
-            if (!f.isHidden()) {
-                if (f.isDirectory()) {
-                    if (!start) {
-                        ZipEntry dirEntry = new ZipEntry(relativePath + 
f.getName() + "/");
-                        zos.putNextEntry(dirEntry);
-                        zos.closeEntry();
+
+        if (dirList != null) {
+            for (String aDirList : dirList) {
+                File f = new File(dir, aDirList);
+                if (!f.isHidden()) {
+                    if (f.isDirectory()) {
+                        if (!start) {
+                            ZipEntry dirEntry = new ZipEntry(relativePath + 
f.getName() + "/");
+                            zos.putNextEntry(dirEntry);
+                            zos.closeEntry();
+                        }
+                        String filePath = f.getPath();
+                        File file = new File(filePath);
+                        zipDir(file, relativePath + f.getName() + "/", zos, 
false);
                     }
-                    String filePath = f.getPath();
-                    File file = new File(filePath);
-                    zipDir(file, relativePath + f.getName() + "/", zos, false);
-                }
-                else {
-                    ZipEntry anEntry = new ZipEntry(relativePath + 
f.getName());
-                    zos.putNextEntry(anEntry);
-                    InputStream is = new FileInputStream(f);
-                    byte[] arr = new byte[4096];
-                    int read = is.read(arr);
-                    while (read > -1) {
-                        zos.write(arr, 0, read);
-                        read = is.read(arr);
+                    else {
+                        ZipEntry anEntry = new ZipEntry(relativePath + 
f.getName());
+                        zos.putNextEntry(anEntry);
+                        InputStream is = new FileInputStream(f);
+                        byte[] arr = new byte[4096];
+                        int read = is.read(arr);
+                        while (read > -1) {
+                            zos.write(arr, 0, read);
+                            read = is.read(arr);
+                        }
+                        is.close();
+                        zos.closeEntry();
                     }
-                    is.close();
-                    zos.closeEntry();
                 }
             }
         }

http://git-wip-us.apache.org/repos/asf/oozie/blob/21761f5b/core/src/main/java/org/apache/oozie/util/JobUtils.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/oozie/util/JobUtils.java 
b/core/src/main/java/org/apache/oozie/util/JobUtils.java
index 63f88ac..5d948d9 100644
--- a/core/src/main/java/org/apache/oozie/util/JobUtils.java
+++ b/core/src/main/java/org/apache/oozie/util/JobUtils.java
@@ -138,8 +138,7 @@ public class JobUtils {
     }
 
     /**
-     * This method provides a wrapper around hadoop 0.20/1.x and 0.23/2.x 
implementations.
-     * TODO: Remove the workaround when we drop the support for hadoop 0.20.
+     * This method provides a wrapper around hadoop 2.x implementations.
      * @param file Path of the file to be added
      * @param conf Configuration that contains the classpath setting
      * @param fs FileSystem with respect to which path should be interpreted 
(may be null)
@@ -148,25 +147,13 @@ public class JobUtils {
     public static void addFileToClassPath(Path file, Configuration conf, 
FileSystem fs) throws IOException {
         if (fs == null) {
             Configuration defaultConf = 
Services.get().get(HadoopAccessorService.class)
-                    
.createJobConf(conf.get(JavaActionExecutor.HADOOP_JOB_TRACKER));
+                    
.createJobConf(conf.get(JavaActionExecutor.HADOOP_YARN_RM));
             XConfiguration.copy(conf, defaultConf);
             // it fails with conf, therefore we pass defaultConf instead
             fs = file.getFileSystem(defaultConf);
         }
-        // Hadoop 0.20/1.x.
-        if 
(Services.get().get(HadoopAccessorService.class).getCachedConf().get("yarn.resourcemanager.webapp.address")
 == null) {
-            // Duplicate hadoop 1.x code to workaround MAPREDUCE-2361 in 
Hadoop 0.20
-            // Refer OOZIE-1806.
-            String filepath = file.toUri().getPath();
-            String classpath = conf.get("mapred.job.classpath.files");
-            conf.set("mapred.job.classpath.files",
-                    classpath == null ? filepath : classpath + 
System.getProperty("path.separator") + filepath);
-            URI uri = fs.makeQualified(file).toUri();
-            DistributedCache.addCacheFile(uri, conf);
-        }
-        else { // Hadoop 2.x
-            DistributedCache.addFileToClassPath(file, conf, fs);
-        }
+
+        DistributedCache.addFileToClassPath(file, conf, fs);
     }
 
     public static String getRetryKey(WorkflowActionBean wfAction, String key) {

http://git-wip-us.apache.org/repos/asf/oozie/blob/21761f5b/core/src/main/resources/META-INF/persistence.xml
----------------------------------------------------------------------
diff --git a/core/src/main/resources/META-INF/persistence.xml 
b/core/src/main/resources/META-INF/persistence.xml
index edda2d5..bad9278 100644
--- a/core/src/main/resources/META-INF/persistence.xml
+++ b/core/src/main/resources/META-INF/persistence.xml
@@ -7,9 +7,9 @@
   to you under the Apache License, Version 2.0 (the
   "License"); you may not use this file except in compliance
   with the License.  You may obtain a copy of the License at
-  
+
        http://www.apache.org/licenses/LICENSE-2.0
-  
+
   Unless required by applicable law or agreed to in writing, software
   distributed under the License is distributed on an "AS IS" BASIS,
   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
@@ -123,7 +123,7 @@
                              
org.apache.oozie.client.rest.JsonCoordinatorAction;
                              org.apache.oozie.client.rest.JsonBundleJob;
                              
org.apache.oozie.util.db.ValidateConnectionBean)"></property>
-                             
+
             <property name="openjpa.DetachState" 
value="fetch-groups(DetachedStateField=true)"/>
             <property name="openjpa.LockManager" value="pessimistic"/>
             <property name="openjpa.ReadLockLevel" value="read"/>
@@ -226,7 +226,7 @@
 
             <property name="openjpa.ConnectionProperties" 
value="**INVALID**"/> <!--Set by StoreService at init time -->
 
-            <property name="openjpa.MetaDataFactory" 
+            <property name="openjpa.MetaDataFactory"
                       value="jpa(Types=org.apache.oozie.WorkflowActionBean;
                 org.apache.oozie.WorkflowJobBean;
                 org.apache.oozie.CoordinatorJobBean;

http://git-wip-us.apache.org/repos/asf/oozie/blob/21761f5b/core/src/main/resources/oozie-default.xml
----------------------------------------------------------------------
diff --git a/core/src/main/resources/oozie-default.xml 
b/core/src/main/resources/oozie-default.xml
index b22b1ce..a2d376a 100644
--- a/core/src/main/resources/oozie-default.xml
+++ b/core/src/main/resources/oozie-default.xml
@@ -1835,39 +1835,6 @@ will be the requeue interval for the actions which are 
waiting for a long time w
     </property>
 
     <property>
-        <name>oozie.action.launcher.mapreduce.job.ubertask.enable</name>
-        <value>true</value>
-        <description>
-            Enables Uber Mode for the launcher job in YARN/Hadoop 2 (no effect 
in Hadoop 1) for all action types by default.
-            This can be overridden on a per-action-type basis by setting
-            oozie.action.#action-type#.launcher.mapreduce.job.ubertask.enable 
in oozie-site.xml (where #action-type# is the action
-            type; for example, "pig").  And that can be overridden on a 
per-action basis by setting
-            oozie.launcher.mapreduce.job.ubertask.enable in an action's 
configuration section in a workflow.  In summary, the
-            priority is this:
-            1. action's configuration section in a workflow
-            2. 
oozie.action.#action-type#.launcher.mapreduce.job.ubertask.enable in oozie-site
-            3. oozie.action.launcher.mapreduce.job.ubertask.enable in 
oozie-site
-        </description>
-    </property>
-
-    <property>
-        <name>oozie.action.shell.launcher.mapreduce.job.ubertask.enable</name>
-        <value>false</value>
-        <description>
-            The Shell action may have issues with the $PATH environment when 
using Uber Mode, and so Uber Mode is disabled by
-            default for it.  See 
oozie.action.launcher.mapreduce.job.ubertask.enable
-        </description>
-    </property>
-
-    <property>
-        <name>oozie.action.launcher.mapreduce.input.format.class</name>
-        <value>org.apache.oozie.action.hadoop.OozieLauncherInputFormat</value>
-        <description>
-            Make the Launcher Mapper map-only job's InputFormat class 
pluggable in order to provide alternative implementations.
-        </description>
-    </property>
-
-    <property>
         <name>oozie.action.spark.setup.hadoop.conf.dir</name>
         <value>false</value>
         <description>

http://git-wip-us.apache.org/repos/asf/oozie/blob/21761f5b/core/src/main/resources/oozie-log4j.properties
----------------------------------------------------------------------
diff --git a/core/src/main/resources/oozie-log4j.properties 
b/core/src/main/resources/oozie-log4j.properties
index 05fb37a..c065f3c 100644
--- a/core/src/main/resources/oozie-log4j.properties
+++ b/core/src/main/resources/oozie-log4j.properties
@@ -6,9 +6,9 @@
 # to you under the Apache License, Version 2.0 (the
 # "License"); you may not use this file except in compliance
 # with the License.  You may obtain a copy of the License at
-# 
+#
 #      http://www.apache.org/licenses/LICENSE-2.0
-# 
+#
 # Unless required by applicable law or agreed to in writing, software
 # distributed under the License is distributed on an "AS IS" BASIS,
 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.

http://git-wip-us.apache.org/repos/asf/oozie/blob/21761f5b/core/src/test/java/org/apache/oozie/QueryServlet.java
----------------------------------------------------------------------
diff --git a/core/src/test/java/org/apache/oozie/QueryServlet.java 
b/core/src/test/java/org/apache/oozie/QueryServlet.java
new file mode 100644
index 0000000..8789438
--- /dev/null
+++ b/core/src/test/java/org/apache/oozie/QueryServlet.java
@@ -0,0 +1,40 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.oozie;
+
+import javax.servlet.ServletException;
+import javax.servlet.http.HttpServlet;
+import javax.servlet.http.HttpServletRequest;
+import javax.servlet.http.HttpServletResponse;
+import java.io.IOException;
+import java.net.URLDecoder;
+
+/**
+ * Servlet that keeps track of the last query string it recieved
+ */
+public class QueryServlet extends HttpServlet {
+
+    public static String lastQueryString = null;
+
+    protected void doGet(HttpServletRequest request, HttpServletResponse 
response) throws ServletException, IOException {
+        lastQueryString = URLDecoder.decode(request.getQueryString(), "UTF-8");
+        response.setStatus(HttpServletResponse.SC_OK);
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/oozie/blob/21761f5b/core/src/test/java/org/apache/oozie/action/hadoop/BlockingMapper.java
----------------------------------------------------------------------
diff --git 
a/core/src/test/java/org/apache/oozie/action/hadoop/BlockingMapper.java 
b/core/src/test/java/org/apache/oozie/action/hadoop/BlockingMapper.java
new file mode 100644
index 0000000..0f4dcd6
--- /dev/null
+++ b/core/src/test/java/org/apache/oozie/action/hadoop/BlockingMapper.java
@@ -0,0 +1,52 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.oozie.action.hadoop;
+
+import java.io.IOException;
+
+import org.apache.hadoop.mapred.JobConf;
+import org.apache.hadoop.mapred.Mapper;
+import org.apache.hadoop.mapred.OutputCollector;
+import org.apache.hadoop.mapred.Reporter;
+
+// A mapper task that blocks forever
+public class BlockingMapper implements Mapper<Object, Object, Object, Object> {
+
+    @Override
+    public void configure(JobConf job) {
+        // nop
+    }
+
+    @Override
+    public void close() throws IOException {
+        // nop
+    }
+
+    @Override
+    public void map(Object key, Object value, OutputCollector<Object, Object> 
output, Reporter reporter)
+            throws IOException {
+        try {
+            synchronized (this) {
+                wait();
+            }
+        } catch (InterruptedException e) {
+            throw new RuntimeException(e);
+        }
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/oozie/blob/21761f5b/core/src/test/java/org/apache/oozie/action/hadoop/CredentialForTest.java
----------------------------------------------------------------------
diff --git 
a/core/src/test/java/org/apache/oozie/action/hadoop/CredentialForTest.java 
b/core/src/test/java/org/apache/oozie/action/hadoop/CredentialForTest.java
index 0629891..23dd78a 100644
--- a/core/src/test/java/org/apache/oozie/action/hadoop/CredentialForTest.java
+++ b/core/src/test/java/org/apache/oozie/action/hadoop/CredentialForTest.java
@@ -20,20 +20,17 @@ package org.apache.oozie.action.hadoop;
 
 import java.util.Map.Entry;
 
-import org.apache.hadoop.mapred.JobConf;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.security.Credentials;
 import org.apache.oozie.ErrorCode;
 import org.apache.oozie.action.ActionExecutor.Context;
-import org.apache.oozie.action.hadoop.Credentials;
 import org.apache.oozie.action.hadoop.CredentialsProperties;
 
-
-
-@SuppressWarnings("deprecation")
-public class CredentialForTest extends Credentials {
+public class CredentialForTest implements CredentialsProvider {
 
     @Override
-    public void addtoJobConf(JobConf jobconf, CredentialsProperties props, 
Context context) throws Exception {
-
+    public void updateCredentials(Credentials credentials, Configuration 
config,
+            CredentialsProperties props, Context context) throws Exception {
         String paramA = null;
         String paramB = null;
         for (Entry<String, String>  parameter : 
props.getProperties().entrySet()) {
@@ -50,7 +47,7 @@ public class CredentialForTest extends Credentials {
             throw new CredentialException(ErrorCode.E0510, "required 
parameters is null.");
         }
 
-        jobconf.set(props.getName(), "testcert");
+        config.set(props.getName(), "testcert");
     }
 
 }

http://git-wip-us.apache.org/repos/asf/oozie/blob/21761f5b/core/src/test/java/org/apache/oozie/action/hadoop/InsertTestToken.java
----------------------------------------------------------------------
diff --git 
a/core/src/test/java/org/apache/oozie/action/hadoop/InsertTestToken.java 
b/core/src/test/java/org/apache/oozie/action/hadoop/InsertTestToken.java
index 9da8fbe..9558aa5 100644
--- a/core/src/test/java/org/apache/oozie/action/hadoop/InsertTestToken.java
+++ b/core/src/test/java/org/apache/oozie/action/hadoop/InsertTestToken.java
@@ -18,15 +18,15 @@
 
 package org.apache.oozie.action.hadoop;
 
+import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.io.Text;
-import org.apache.hadoop.mapred.JobConf;
 import 
org.apache.hadoop.mapreduce.security.token.delegation.DelegationTokenIdentifier;
+import org.apache.hadoop.security.Credentials;
 import org.apache.hadoop.security.token.Token;
 import org.apache.oozie.action.ActionExecutor.Context;
 import org.apache.oozie.util.XLog;
 
-
-public class InsertTestToken extends Credentials{
+public class InsertTestToken implements CredentialsProvider {
     public static String DUMMY_SECRET_KEY = "DummySecretKey";
     public InsertTestToken() {
     }
@@ -34,14 +34,16 @@ public class InsertTestToken extends Credentials{
     /* (non-Javadoc)
      * @see 
org.apache.oozie.action.hadoop.Credentials#addtoJobConf(org.apache.hadoop.mapred.JobConf,
 org.apache.oozie.action.hadoop.CredentialsProperties, 
org.apache.oozie.action.ActionExecutor.Context)
      */
+
     @Override
-    public void addtoJobConf(JobConf jobconf, CredentialsProperties props, 
Context context) throws Exception {
+    public void updateCredentials(Credentials  credentials, Configuration 
config, CredentialsProperties props, Context context)
+            throws Exception {
         try {
             Token<DelegationTokenIdentifier> abctoken = new 
Token<DelegationTokenIdentifier>();
-            jobconf.getCredentials().addToken(new Text("ABC Token"), abctoken);
+            credentials.addToken(new Text("ABC Token"), abctoken);
             XLog.getLog(getClass()).debug("Added the ABC token in job conf");
 
-            jobconf.getCredentials().addSecretKey(new Text(DUMMY_SECRET_KEY), 
DUMMY_SECRET_KEY.getBytes("UTF-8"));
+            credentials.addSecretKey(new Text(DUMMY_SECRET_KEY), 
DUMMY_SECRET_KEY.getBytes("UTF-8"));
             XLog.getLog(getClass()).debug("Added the " + DUMMY_SECRET_KEY + " 
in job conf");
         }
         catch (Exception e) {

http://git-wip-us.apache.org/repos/asf/oozie/blob/21761f5b/core/src/test/java/org/apache/oozie/action/hadoop/LauncherMainTester.java
----------------------------------------------------------------------
diff --git 
a/core/src/test/java/org/apache/oozie/action/hadoop/LauncherMainTester.java 
b/core/src/test/java/org/apache/oozie/action/hadoop/LauncherMainTester.java
index 4baed6e..16ab729 100644
--- a/core/src/test/java/org/apache/oozie/action/hadoop/LauncherMainTester.java
+++ b/core/src/test/java/org/apache/oozie/action/hadoop/LauncherMainTester.java
@@ -30,6 +30,7 @@ public class LauncherMainTester {
         if (args.length == 0) {
             System.out.println("Hello World!");
         }
+
         if (args.length == 1) {
             if (args[0].equals("throwable")) {
                 throw new Throwable("throwing throwable");

http://git-wip-us.apache.org/repos/asf/oozie/blob/21761f5b/core/src/test/java/org/apache/oozie/action/hadoop/MapperReducerForTest.java
----------------------------------------------------------------------
diff --git 
a/core/src/test/java/org/apache/oozie/action/hadoop/MapperReducerForTest.java 
b/core/src/test/java/org/apache/oozie/action/hadoop/MapperReducerForTest.java
index 8f08ddd..75ac716 100644
--- 
a/core/src/test/java/org/apache/oozie/action/hadoop/MapperReducerForTest.java
+++ 
b/core/src/test/java/org/apache/oozie/action/hadoop/MapperReducerForTest.java
@@ -30,7 +30,7 @@ import org.apache.hadoop.mapred.Reducer;
 import java.io.IOException;
 import java.util.Iterator;
 
-public class MapperReducerForTest implements Mapper, Reducer {
+public class MapperReducerForTest implements Mapper<Object, Object, Object, 
Object>, Reducer<Object, Object, Object, Object> {
     public static final String GROUP = "g";
     public static final String NAME = "c";
     /**
@@ -66,14 +66,14 @@ public class MapperReducerForTest implements Mapper, 
Reducer {
     public void close() throws IOException {
     }
 
-    @SuppressWarnings("unchecked")
-    public void map(Object key, Object value, OutputCollector collector, 
Reporter reporter) throws IOException {
+    @Override
+    public void map(Object key, Object value, OutputCollector<Object, Object> 
collector, Reporter reporter) throws IOException {
         collector.collect(key, value);
         reporter.incrCounter(GROUP, NAME, 5l);
     }
 
-    @SuppressWarnings("unchecked")
-    public void reduce(Object key, Iterator values, OutputCollector collector, 
Reporter reporter)
+    @Override
+    public void reduce(Object key, Iterator<Object> values, 
OutputCollector<Object, Object> collector, Reporter reporter)
             throws IOException {
         while (values.hasNext()) {
             collector.collect(key, values.next());

http://git-wip-us.apache.org/repos/asf/oozie/blob/21761f5b/core/src/test/java/org/apache/oozie/action/hadoop/TestDistCpActionExecutor.java
----------------------------------------------------------------------
diff --git 
a/core/src/test/java/org/apache/oozie/action/hadoop/TestDistCpActionExecutor.java
 
b/core/src/test/java/org/apache/oozie/action/hadoop/TestDistCpActionExecutor.java
index e8a140f..c1f0e6f 100644
--- 
a/core/src/test/java/org/apache/oozie/action/hadoop/TestDistCpActionExecutor.java
+++ 
b/core/src/test/java/org/apache/oozie/action/hadoop/TestDistCpActionExecutor.java
@@ -25,18 +25,10 @@ import java.io.OutputStream;
 import java.util.Arrays;
 
 import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.mapred.JobClient;
-import org.apache.hadoop.mapred.JobConf;
-import org.apache.hadoop.mapred.JobID;
-import org.apache.hadoop.mapred.RunningJob;
 import org.apache.oozie.WorkflowActionBean;
 import org.apache.oozie.WorkflowJobBean;
-import org.apache.oozie.action.hadoop.ActionExecutorTestCase.Context;
 import org.apache.oozie.client.WorkflowAction;
-import org.apache.oozie.service.HadoopAccessorService;
-import org.apache.oozie.service.Services;
 import org.apache.oozie.service.WorkflowAppService;
-import org.apache.oozie.test.XTestCase.Predicate;
 import org.apache.oozie.util.IOUtils;
 import org.apache.oozie.util.XConfiguration;
 
@@ -64,13 +56,8 @@ public class TestDistCpActionExecutor extends 
ActionExecutorTestCase{
                 "<arg>" + outputPath + "</arg>" +
                 "</distcp>";
         Context context = createContext(actionXml);
-        final RunningJob runningJob = submitAction(context);
-        waitFor(60 * 1000, new Predicate() {
-            public boolean evaluate() throws Exception {
-                return runningJob.isComplete();
-            }
-        });
-        assertTrue(runningJob.isSuccessful());
+        final String launcherId = submitAction(context);
+        waitUntilYarnAppDoneAndAssertSuccess(launcherId);
 
         waitFor(60 * 1000, new Predicate() {
             public boolean evaluate() throws Exception {
@@ -139,7 +126,7 @@ public class TestDistCpActionExecutor extends 
ActionExecutorTestCase{
         return new Context(wf, action);
     }
 
-    protected RunningJob submitAction(Context context) throws Exception {
+    protected String submitAction(Context context) throws Exception {
         DistcpActionExecutor ae = new DistcpActionExecutor();
 
         WorkflowAction action = context.getAction();
@@ -154,14 +141,8 @@ public class TestDistCpActionExecutor extends 
ActionExecutorTestCase{
         assertNotNull(jobTracker);
         assertNotNull(consoleUrl);
 
-        JobConf jobConf = 
Services.get().get(HadoopAccessorService.class).createJobConf(jobTracker);
-        jobConf.set("mapred.job.tracker", jobTracker);
-
-        JobClient jobClient =
-            
Services.get().get(HadoopAccessorService.class).createJobClient(getTestUser(), 
jobConf);
-        final RunningJob runningJob = jobClient.getJob(JobID.forName(jobId));
-        assertNotNull(runningJob);
-        return runningJob;
+        ae.submitLauncher(getFileSystem(), context, context.getAction());
+        return context.getAction().getExternalId();
     }
 
 }

http://git-wip-us.apache.org/repos/asf/oozie/blob/21761f5b/core/src/test/java/org/apache/oozie/action/hadoop/TestFSPrepareActions.java
----------------------------------------------------------------------
diff --git 
a/core/src/test/java/org/apache/oozie/action/hadoop/TestFSPrepareActions.java 
b/core/src/test/java/org/apache/oozie/action/hadoop/TestFSPrepareActions.java
index 386fef3..a08c16a 100644
--- 
a/core/src/test/java/org/apache/oozie/action/hadoop/TestFSPrepareActions.java
+++ 
b/core/src/test/java/org/apache/oozie/action/hadoop/TestFSPrepareActions.java
@@ -59,7 +59,7 @@ public class TestFSPrepareActions extends XFsTestCase {
         String prepareXML = "<prepare>" + "<delete path='" + newDir + "'/>" + 
"</prepare>";
 
         JobConf conf = createJobConf();
-        LauncherMapperHelper.setupLauncherURIHandlerConf(conf);
+        LauncherHelper.setupLauncherURIHandlerConf(conf);
         PrepareActionsDriver.doOperations(prepareXML, conf);
         assertFalse(fs.exists(newDir));
     }
@@ -85,7 +85,7 @@ public class TestFSPrepareActions extends XFsTestCase {
         String prepareXML = "<prepare>" + "<delete path='" + newDir + 
"/201[0-1]/*" + "'/>" + "</prepare>";
 
         JobConf conf = createJobConf();
-        LauncherMapperHelper.setupLauncherURIHandlerConf(conf);
+        LauncherHelper.setupLauncherURIHandlerConf(conf);
         PrepareActionsDriver.doOperations(prepareXML, conf);
         assertFalse(fs.exists(new Path(newDir + "/2010/10")));
         assertFalse(fs.exists(new Path(newDir + "/2011/10")));
@@ -107,7 +107,7 @@ public class TestFSPrepareActions extends XFsTestCase {
         String prepareXML = "<prepare>" + "<mkdir path='" + newDir + "'/>" + 
"</prepare>";
 
         JobConf conf = createJobConf();
-        LauncherMapperHelper.setupLauncherURIHandlerConf(conf);
+        LauncherHelper.setupLauncherURIHandlerConf(conf);
         PrepareActionsDriver.doOperations(prepareXML, conf);
         assertTrue(fs.exists(newDir));
     }
@@ -126,7 +126,7 @@ public class TestFSPrepareActions extends XFsTestCase {
 
         try {
             JobConf conf = createJobConf();
-            LauncherMapperHelper.setupLauncherURIHandlerConf(conf);
+            LauncherHelper.setupLauncherURIHandlerConf(conf);
             PrepareActionsDriver.doOperations(prepareXML, conf);
             fail("Expected to catch an exception but did not encounter any");
         } catch (LauncherException le) {
@@ -153,7 +153,7 @@ public class TestFSPrepareActions extends XFsTestCase {
         String prepareXML = "<prepare>" + "<mkdir path='" + noSchemePath + 
"'/>" + "</prepare>";
 
         JobConf conf = createJobConf();
-        LauncherMapperHelper.setupLauncherURIHandlerConf(conf);
+        LauncherHelper.setupLauncherURIHandlerConf(conf);
         PrepareActionsDriver.doOperations(prepareXML, conf);
 
         assertTrue(fs.exists(new Path(noSchemePath)));

http://git-wip-us.apache.org/repos/asf/oozie/blob/21761f5b/core/src/test/java/org/apache/oozie/action/hadoop/TestFsActionExecutor.java
----------------------------------------------------------------------
diff --git 
a/core/src/test/java/org/apache/oozie/action/hadoop/TestFsActionExecutor.java 
b/core/src/test/java/org/apache/oozie/action/hadoop/TestFsActionExecutor.java
index 09d723a..99e4d91 100644
--- 
a/core/src/test/java/org/apache/oozie/action/hadoop/TestFsActionExecutor.java
+++ 
b/core/src/test/java/org/apache/oozie/action/hadoop/TestFsActionExecutor.java
@@ -555,9 +555,7 @@ public class TestFsActionExecutor extends 
ActionExecutorTestCase {
         assertEquals("rwx------", fs.getFileStatus(new Path(basePath + 
"/10/dir1")).getPermission().toString());
         assertEquals("rwx------", fs.getFileStatus(new Path(basePath + 
"/10/dir2")).getPermission().toString());
         assertEquals("rwx------", fs.getFileStatus(new Path(basePath + 
"/11/dir3")).getPermission().toString());
-        // HDFS-4659 introduced an incompatible change that causes the 
following to be "rwx------" when run against Hadoop 2.1.x
-        // but in Hadoop 1.x its still "rw-------" so we'll just skip 
verifying this for now.
-        //assertEquals("rw-------", fs.getFileStatus(new Path(basePath + 
"/10/dir1/file1")).getPermission().toString());
+        assertEquals("rwx------", fs.getFileStatus(new Path(basePath + 
"/10/dir1/file1")).getPermission().toString());
 
         fs.delete(basePath, true);
     }

http://git-wip-us.apache.org/repos/asf/oozie/blob/21761f5b/core/src/test/java/org/apache/oozie/action/hadoop/TestHCatCredentials.java
----------------------------------------------------------------------
diff --git 
a/core/src/test/java/org/apache/oozie/action/hadoop/TestHCatCredentials.java 
b/core/src/test/java/org/apache/oozie/action/hadoop/TestHCatCredentials.java
index e6d43ca..204245c 100644
--- a/core/src/test/java/org/apache/oozie/action/hadoop/TestHCatCredentials.java
+++ b/core/src/test/java/org/apache/oozie/action/hadoop/TestHCatCredentials.java
@@ -34,6 +34,7 @@ import org.apache.commons.io.FileUtils;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.mapred.JobConf;
+import org.apache.hadoop.security.Credentials;
 import org.apache.oozie.service.HCatAccessorService;
 import org.apache.oozie.service.ServiceException;
 import org.apache.oozie.service.Services;
@@ -130,17 +131,18 @@ public class TestHCatCredentials {
         credProps.setProperties(new HashMap<String, String>());
         HCatCredentials hcatCred = new HCatCredentials();
         final JobConf jobConf = new JobConf(false);
+        Credentials credentials = new Credentials();
         PowerMockito.doAnswer(new Answer<Void>() {
             @Override
             public Void answer(InvocationOnMock invocation) throws Throwable {
                 Object[] args = invocation.getArguments();
-                JobConf jConf = (JobConf) args[0];
-                jConf.set(HCAT_METASTORE_PRINCIPAL, (String) args[1]);
-                jConf.set(HCAT_METASTORE_URI, (String) args[2]);
+                Configuration jConf = (Configuration) args[1];
+                jConf.set(HCAT_METASTORE_PRINCIPAL, (String) args[2]);
+                jConf.set(HCAT_METASTORE_URI, (String) args[3]);
                 return null;
             }
-        }).when(hcatCredHelper).set(jobConf, TEST_HIVE_METASTORE_PRINCIPAL2, 
TEST_HIVE_METASTORE_URI2);
-        hcatCred.addtoJobConf(jobConf, credProps, null);
+        }).when(hcatCredHelper).set(credentials, jobConf, 
TEST_HIVE_METASTORE_PRINCIPAL2, TEST_HIVE_METASTORE_URI2);
+        hcatCred.updateCredentials(credentials, jobConf, credProps, null);
         assertEquals(TEST_HIVE_METASTORE_PRINCIPAL2, 
jobConf.get(HCAT_METASTORE_PRINCIPAL));
         assertEquals(TEST_HIVE_METASTORE_URI2, 
jobConf.get(HCAT_METASTORE_URI));
         assertNull(jobConf.get(HIVE_METASTORE_PRINCIPAL));
@@ -155,19 +157,20 @@ public class TestHCatCredentials {
         credProps.setProperties(new HashMap<String, String>());
         HCatCredentials hcatCred = new HCatCredentials();
         final JobConf jobConf = new JobConf(false);
+        Credentials credentials = new Credentials();
         HCatCredentialHelper hcatCredHelper = 
Mockito.mock(HCatCredentialHelper.class);
         
PowerMockito.whenNew(HCatCredentialHelper.class).withNoArguments().thenReturn(hcatCredHelper);
         PowerMockito.doAnswer(new Answer<Void>() {
             @Override
             public Void answer(InvocationOnMock invocation) throws Throwable {
                 Object[] args = invocation.getArguments();
-                JobConf jConf = (JobConf) args[0];
-                jConf.set(HIVE_METASTORE_PRINCIPAL, (String) args[1]);
-                jConf.set(HIVE_METASTORE_URI, (String) args[2]);
+                Configuration jConf = (Configuration) args[1];
+                jConf.set(HIVE_METASTORE_PRINCIPAL, (String) args[2]);
+                jConf.set(HIVE_METASTORE_URI, (String) args[3]);
                 return null;
             }
-        }).when(hcatCredHelper).set(jobConf, TEST_HIVE_METASTORE_PRINCIPAL, 
TEST_HIVE_METASTORE_URI);
-        hcatCred.addtoJobConf(jobConf, credProps, null);
+        }).when(hcatCredHelper).set(credentials, jobConf, 
TEST_HIVE_METASTORE_PRINCIPAL, TEST_HIVE_METASTORE_URI);
+        hcatCred.updateCredentials(credentials, jobConf, credProps, null);
         assertEquals(TEST_HIVE_METASTORE_PRINCIPAL, 
jobConf.get(HIVE_METASTORE_PRINCIPAL));
         assertEquals(TEST_HIVE_METASTORE_URI, jobConf.get(HIVE_METASTORE_URI));
         assertNull(jobConf.get(HCAT_METASTORE_PRINCIPAL));
@@ -186,17 +189,18 @@ public class TestHCatCredentials {
         credProps.setProperties(prop);
         HCatCredentials hcatCred = new HCatCredentials();
         final JobConf jobConf = new JobConf(false);
+        Credentials credentials = new Credentials();
         PowerMockito.doAnswer(new Answer<Void>() {
             @Override
             public Void answer(InvocationOnMock invocation) throws Throwable {
                 Object[] args = invocation.getArguments();
-                JobConf jConf = (JobConf) args[0];
-                jConf.set(HCAT_METASTORE_PRINCIPAL, (String) args[1]);
-                jConf.set(HCAT_METASTORE_URI, (String) args[2]);
+                JobConf jConf = (JobConf) args[1];
+                jConf.set(HCAT_METASTORE_PRINCIPAL, (String) args[2]);
+                jConf.set(HCAT_METASTORE_URI, (String) args[3]);
                 return null;
             }
-        }).when(hcatCredHelper).set(jobConf, TEST_HIVE_METASTORE_PRINCIPAL2, 
TEST_HIVE_METASTORE_URI2);
-        hcatCred.addtoJobConf(jobConf, credProps, null);
+        }).when(hcatCredHelper).set(credentials, jobConf, 
TEST_HIVE_METASTORE_PRINCIPAL2, TEST_HIVE_METASTORE_URI2);
+        hcatCred.updateCredentials(credentials, jobConf, credProps, null);
         assertEquals(TEST_HIVE_METASTORE_PRINCIPAL2, 
jobConf.get(HCAT_METASTORE_PRINCIPAL));
         assertEquals(TEST_HIVE_METASTORE_URI2, 
jobConf.get(HCAT_METASTORE_URI));
         assertNull(jobConf.get(HIVE_METASTORE_PRINCIPAL));

http://git-wip-us.apache.org/repos/asf/oozie/blob/21761f5b/core/src/test/java/org/apache/oozie/action/hadoop/TestHCatPrepareActions.java
----------------------------------------------------------------------
diff --git 
a/core/src/test/java/org/apache/oozie/action/hadoop/TestHCatPrepareActions.java 
b/core/src/test/java/org/apache/oozie/action/hadoop/TestHCatPrepareActions.java
index d66d9c9..4fe9452 100644
--- 
a/core/src/test/java/org/apache/oozie/action/hadoop/TestHCatPrepareActions.java
+++ 
b/core/src/test/java/org/apache/oozie/action/hadoop/TestHCatPrepareActions.java
@@ -65,7 +65,7 @@ public class TestHCatPrepareActions extends XHCatTestCase {
                 + "</prepare>";
 
         JobConf conf = createJobConf();
-        LauncherMapperHelper.setupLauncherURIHandlerConf(conf);
+        LauncherHelper.setupLauncherURIHandlerConf(conf);
         PrepareActionsDriver.doOperations(prepareXML, conf);
         FileSystem fs = getFileSystem();
         assertFalse(fs.exists(new Path(part1)));

Reply via email to