Repository: hive
Updated Branches:
  refs/heads/master 3cba487f5 -> 8ce0118ff


HIVE-14037: java.lang.ClassNotFoundException for the jar in 
hive.reloadable.aux.jars.path in mapreduce (Reviewed by Ferdinand Xu)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/8ce0118f
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/8ce0118f
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/8ce0118f

Branch: refs/heads/master
Commit: 8ce0118ffe517f0c622571778251cbd9f760c4f5
Parents: 3cba487
Author: Aihua Xu <aihu...@apache.org>
Authored: Fri Jun 24 15:16:49 2016 -0400
Committer: Aihua Xu <aihu...@apache.org>
Committed: Thu Jun 30 14:09:41 2016 -0400

----------------------------------------------------------------------
 .../org/apache/hadoop/hive/conf/HiveConf.java   |  3 +-
 .../hive/common/util/HiveStringUtils.java       | 21 ++++-
 .../apache/hadoop/hive/ql/exec/Utilities.java   | 69 ++++++++++-----
 .../hadoop/hive/ql/exec/mr/ExecDriver.java      | 88 ++++++++------------
 .../hadoop/hive/ql/exec/mr/MapRedTask.java      | 26 ++----
 .../hadoop/hive/ql/session/SessionState.java    | 10 ++-
 .../hadoop/hive/ql/exec/TestUtilities.java      | 17 ++--
 ql/src/test/queries/clientpositive/reloadJar.q  | 17 ++++
 .../test/results/clientpositive/reloadJar.q.out | 64 ++++++++++++++
 9 files changed, 214 insertions(+), 101 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hive/blob/8ce0118f/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
----------------------------------------------------------------------
diff --git a/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java 
b/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
index d75ab40..ad467c5 100644
--- a/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
+++ b/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
@@ -936,7 +936,8 @@ public class HiveConf extends Configuration {
 
     // reloadable jars
     HIVERELOADABLEJARS("hive.reloadable.aux.jars.path", "",
-        "Jars can be renewed by executing reload command. And these jars can 
be "
+        "The locations of the plugin jars, which can be a comma-separated 
folders or jars. Jars can be renewed\n"
+        + "by executing reload command. And these jars can be "
             + "used as the auxiliary classes like creating a UDF or SerDe."),
 
     // hive added files and jars

http://git-wip-us.apache.org/repos/asf/hive/blob/8ce0118f/common/src/java/org/apache/hive/common/util/HiveStringUtils.java
----------------------------------------------------------------------
diff --git a/common/src/java/org/apache/hive/common/util/HiveStringUtils.java 
b/common/src/java/org/apache/hive/common/util/HiveStringUtils.java
index c2ff635..72c3fa9 100644
--- a/common/src/java/org/apache/hive/common/util/HiveStringUtils.java
+++ b/common/src/java/org/apache/hive/common/util/HiveStringUtils.java
@@ -43,6 +43,7 @@ import java.util.regex.Pattern;
 import com.google.common.collect.Interner;
 import com.google.common.collect.Interners;
 
+import org.apache.commons.lang.StringUtils;
 import org.apache.commons.lang3.text.translate.CharSequenceTranslator;
 import org.apache.commons.lang3.text.translate.EntityArrays;
 import org.apache.commons.lang3.text.translate.LookupTranslator;
@@ -901,6 +902,24 @@ public class HiveStringUtils {
   }
 
   /**
+   * Concatenates strings, using a separator. Empty/blank string or null will 
be
+   * ignored.
+   *
+   * @param strings Strings to join.
+   * @param separator Separator to join with.
+   */
+  public static String joinIgnoringEmpty(String[] strings, char separator) {
+    ArrayList<String> list = new ArrayList<String>();
+    for(String str : strings) {
+      if (StringUtils.isNotBlank(str)) {
+        list.add(str);
+      }
+    }
+
+    return StringUtils.join(list, separator);
+  }
+
+  /**
    * Convert SOME_STUFF to SomeStuff
    *
    * @param s input string
@@ -911,7 +930,7 @@ public class HiveStringUtils {
     String[] words = split(s.toLowerCase(Locale.US), ESCAPE_CHAR, '_');
 
     for (String word : words) {
-      sb.append(org.apache.commons.lang.StringUtils.capitalize(word));
+      sb.append(StringUtils.capitalize(word));
     }
 
     return sb.toString();

http://git-wip-us.apache.org/repos/asf/hive/blob/8ce0118f/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java 
b/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java
index 3fab298..12a929a 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java
@@ -36,6 +36,7 @@ import java.io.InputStream;
 import java.io.OutputStream;
 import java.io.Serializable;
 import java.net.URI;
+import java.net.URISyntaxException;
 import java.net.URL;
 import java.net.URLClassLoader;
 import java.net.URLDecoder;
@@ -85,6 +86,7 @@ import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
 import org.apache.hadoop.fs.ContentSummary;
 import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.GlobFilter;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.PathFilter;
 import org.apache.hadoop.fs.permission.FsPermission;
@@ -1735,31 +1737,58 @@ public final class Utilities {
     return oneurl;
   }
 
+  /**
+   * Get the URI of the path. Assume to be local file system if no scheme.
+   */
+  public static URI getURI(String path) throws URISyntaxException {
+    if (path == null) {
+      return null;
+    }
+
+    URI uri = new URI(path);
+    if (uri.getScheme() == null) {
+      // if no scheme in the path, we assume it's file on local fs.
+      uri = new File(path).toURI();
+    }
+
+    return uri;
+  }
+
     /**
-     * get the jar files from specified directory or get jar files by several 
jar names sperated by comma
-     * @param path
-     * @return
+     * Given a path string, get all the jars from the folder or the files 
themselves.
+     *
+     * @param pathString  the path string is the comma-separated path list
+     * @return            the list of the file names in the format of URI 
formats.
      */
-    public static Set<String> getJarFilesByPath(String path){
-        Set<String> result = new HashSet<String>();
-        if (path == null || path.isEmpty()) {
-            return result;
-        }
+    public static Set<String> getJarFilesByPath(String pathString, 
Configuration conf) {
+      Set<String> result = new HashSet<String>();
+      if (pathString == null || StringUtils.isBlank(pathString)) {
+          return result;
+      }
 
-        File paths = new File(path);
-        if (paths.exists() && paths.isDirectory()) {
-            // add all jar files under the reloadable auxiliary jar paths
-            Set<File> jarFiles = new HashSet<File>();
-            jarFiles.addAll(org.apache.commons.io.FileUtils.listFiles(
-                    paths, new String[]{"jar"}, true));
-            for (File f : jarFiles) {
-                result.add(f.getAbsolutePath());
+      String[] paths = pathString.split(",");
+      for(String path : paths) {
+        try {
+          Path p = new Path(getURI(path));
+          FileSystem fs = p.getFileSystem(conf);
+          if (!fs.exists(p)) {
+            LOG.error("The jar file path " + path + " doesn't exist");
+            continue;
+          }
+          if (fs.isDirectory(p)) {
+            // add all jar files under the folder
+            FileStatus[] files = fs.listStatus(p, new GlobFilter("*.jar"));
+            for(FileStatus file : files) {
+              result.add(file.getPath().toUri().toString());
             }
-        } else {
-            String[] files = path.split(",");
-            Collections.addAll(result, files);
+          } else {
+            result.add(p.toUri().toString());
+          }
+        } catch(URISyntaxException | IOException e) {
+          LOG.error("Invalid file path " + path, e);
         }
-        return result;
+      }
+      return result;
     }
 
   private static boolean useExistingClassLoader(ClassLoader cl) {

http://git-wip-us.apache.org/repos/asf/hive/blob/8ce0118f/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecDriver.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecDriver.java 
b/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecDriver.java
index 8a6499b..4a642db 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecDriver.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecDriver.java
@@ -91,6 +91,7 @@ import org.apache.hadoop.mapred.JobClient;
 import org.apache.hadoop.mapred.JobConf;
 import org.apache.hadoop.mapred.RunningJob;
 import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.hive.common.util.HiveStringUtils;
 import org.apache.logging.log4j.Level;
 import org.apache.logging.log4j.LogManager;
 import org.apache.logging.log4j.core.Appender;
@@ -140,6 +141,26 @@ public class ExecDriver extends Task<MapredWork> 
implements Serializable, Hadoop
   }
 
   /**
+   * Retrieve the resources from the current session and configuration for the 
given type.
+   * @return Comma-separated list of resources
+   */
+  protected static String getResource(HiveConf conf, SessionState.ResourceType 
resType) {
+    switch(resType) {
+    case JAR:
+      String addedJars = Utilities.getResourceFiles(conf, 
SessionState.ResourceType.JAR);
+      String auxJars = conf.getAuxJars();
+      String reloadableAuxJars = SessionState.get() == null ? null : 
SessionState.get().getReloadableAuxJars();
+      return HiveStringUtils.joinIgnoringEmpty(new String[]{addedJars, 
auxJars, reloadableAuxJars}, ',');
+    case FILE:
+      return Utilities.getResourceFiles(conf, SessionState.ResourceType.FILE);
+    case ARCHIVE:
+      return Utilities.getResourceFiles(conf, 
SessionState.ResourceType.ARCHIVE);
+    }
+
+    return null;
+  }
+
+  /**
    * Initialization when invoked from QL.
    */
   @Override
@@ -149,25 +170,10 @@ public class ExecDriver extends Task<MapredWork> 
implements Serializable, Hadoop
 
     job = new JobConf(conf, ExecDriver.class);
 
-    // NOTE: initialize is only called if it is in non-local mode.
-    // In case it's in non-local mode, we need to move the SessionState files
-    // and jars to jobConf.
-    // In case it's in local mode, MapRedTask will set the jobConf.
-    //
-    // "tmpfiles" and "tmpjars" are set by the method ExecDriver.execute(),
-    // which will be called by both local and NON-local mode.
-    String addedFiles = Utilities.getResourceFiles(job, 
SessionState.ResourceType.FILE);
-    if (StringUtils.isNotBlank(addedFiles)) {
-      HiveConf.setVar(job, ConfVars.HIVEADDEDFILES, addedFiles);
-    }
-    String addedJars = Utilities.getResourceFiles(job, 
SessionState.ResourceType.JAR);
-    if (StringUtils.isNotBlank(addedJars)) {
-      HiveConf.setVar(job, ConfVars.HIVEADDEDJARS, addedJars);
-    }
-    String addedArchives = Utilities.getResourceFiles(job, 
SessionState.ResourceType.ARCHIVE);
-    if (StringUtils.isNotBlank(addedArchives)) {
-      HiveConf.setVar(job, ConfVars.HIVEADDEDARCHIVES, addedArchives);
-    }
+    initializeFiles("tmpjars", getResource(conf, 
SessionState.ResourceType.JAR));
+    initializeFiles("tmpfiles", getResource(conf, 
SessionState.ResourceType.FILE));
+    initializeFiles("tmparchives", getResource(conf, 
SessionState.ResourceType.ARCHIVE));
+
     conf.stripHiddenConfigurations(job);
     this.jobExecHelper = new HadoopJobExecHelper(queryState, job, console, 
this, this);
   }
@@ -296,29 +302,10 @@ public class ExecDriver extends Task<MapredWork> 
implements Serializable, Hadoop
       throw new RuntimeException(e.getMessage(), e);
     }
 
-
     // No-Op - we don't really write anything here ..
     job.setOutputKeyClass(Text.class);
     job.setOutputValueClass(Text.class);
 
-    // Transfer HIVEAUXJARS and HIVEADDEDJARS to "tmpjars" so hadoop 
understands
-    // it
-    String auxJars = HiveConf.getVar(job, HiveConf.ConfVars.HIVEAUXJARS);
-    String addedJars = HiveConf.getVar(job, HiveConf.ConfVars.HIVEADDEDJARS);
-    if (StringUtils.isNotBlank(auxJars) || StringUtils.isNotBlank(addedJars)) {
-      String allJars = StringUtils.isNotBlank(auxJars) ? 
(StringUtils.isNotBlank(addedJars) ? addedJars
-          + "," + auxJars
-          : auxJars)
-          : addedJars;
-      LOG.info("adding libjars: " + allJars);
-      initializeFiles("tmpjars", allJars);
-    }
-
-    // Transfer HIVEADDEDFILES to "tmpfiles" so hadoop understands it
-    String addedFiles = HiveConf.getVar(job, HiveConf.ConfVars.HIVEADDEDFILES);
-    if (StringUtils.isNotBlank(addedFiles)) {
-      initializeFiles("tmpfiles", addedFiles);
-    }
     int returnVal = 0;
     boolean noName = StringUtils.isEmpty(job.get(MRJobConfig.JOB_NAME));
 
@@ -326,11 +313,6 @@ public class ExecDriver extends Task<MapredWork> 
implements Serializable, Hadoop
       // This is for a special case to ensure unit tests pass
       job.set(MRJobConfig.JOB_NAME, "JOB" + Utilities.randGen.nextInt());
     }
-    String addedArchives = HiveConf.getVar(job, 
HiveConf.ConfVars.HIVEADDEDARCHIVES);
-    // Transfer HIVEADDEDARCHIVES to "tmparchives" so hadoop understands it
-    if (StringUtils.isNotBlank(addedArchives)) {
-      initializeFiles("tmparchives", addedArchives);
-    }
 
     try{
       MapredLocalWork localwork = mWork.getMapRedLocalWork();
@@ -634,6 +616,7 @@ public class ExecDriver extends Task<MapredWork> implements 
Serializable, Hadoop
     String jobConfFileName = null;
     boolean noLog = false;
     String files = null;
+    String libjars = null;
     boolean localtask = false;
     try {
       for (int i = 0; i < args.length; i++) {
@@ -645,7 +628,9 @@ public class ExecDriver extends Task<MapredWork> implements 
Serializable, Hadoop
           noLog = true;
         } else if (args[i].equals("-files")) {
           files = args[++i];
-        } else if (args[i].equals("-localtask")) {
+        } else if (args[i].equals("-libjars")) {
+          libjars = args[++i];
+        }else if (args[i].equals("-localtask")) {
           localtask = true;
         }
       }
@@ -665,10 +650,15 @@ public class ExecDriver extends Task<MapredWork> 
implements Serializable, Hadoop
       conf.addResource(new Path(jobConfFileName));
     }
 
+    // Initialize the resources from command line
     if (files != null) {
       conf.set("tmpfiles", files);
     }
 
+    if (libjars != null) {
+      conf.set("tmpjars", libjars);
+    }
+
     if(UserGroupInformation.isSecurityEnabled()){
       String hadoopAuthToken = 
System.getenv(UserGroupInformation.HADOOP_TOKEN_FILE_LOCATION);
       if(hadoopAuthToken != null){
@@ -721,17 +711,11 @@ public class ExecDriver extends Task<MapredWork> 
implements Serializable, Hadoop
 
     // this is workaround for hadoop-17 - libjars are not added to classpath 
of the
     // child process. so we add it here explicitly
-
-    String auxJars = HiveConf.getVar(conf, HiveConf.ConfVars.HIVEAUXJARS);
-    String addedJars = HiveConf.getVar(conf, HiveConf.ConfVars.HIVEADDEDJARS);
     try {
       // see also - code in CliDriver.java
       ClassLoader loader = conf.getClassLoader();
-      if (StringUtils.isNotBlank(auxJars)) {
-        loader = Utilities.addToClassPath(loader, StringUtils.split(auxJars, 
","));
-      }
-      if (StringUtils.isNotBlank(addedJars)) {
-        loader = Utilities.addToClassPath(loader, StringUtils.split(addedJars, 
","));
+      if (StringUtils.isNotBlank(libjars)) {
+        loader = Utilities.addToClassPath(loader, StringUtils.split(libjars, 
","));
       }
       conf.setClassLoader(loader);
       // Also set this to the Thread ContextClassLoader, so new threads will

http://git-wip-us.apache.org/repos/asf/hive/blob/8ce0118f/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/MapRedTask.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/MapRedTask.java 
b/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/MapRedTask.java
index a42c2e9..ce1106d9 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/MapRedTask.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/MapRedTask.java
@@ -45,8 +45,11 @@ import org.apache.hadoop.hive.ql.plan.MapredWork;
 import org.apache.hadoop.hive.ql.plan.OperatorDesc;
 import org.apache.hadoop.hive.ql.plan.ReduceWork;
 import org.apache.hadoop.hive.ql.session.SessionState;
+import org.apache.hadoop.hive.ql.session.SessionState.ResourceType;
 import org.apache.hadoop.hive.shims.ShimLoader;
+import org.apache.hive.common.util.HiveStringUtils;
 import org.apache.hive.common.util.StreamPrinter;
+
 /**
  * Extension of ExecDriver:
  * - can optionally spawn a map-reduce task from a separate jvm
@@ -148,24 +151,8 @@ public class MapRedTask extends ExecDriver implements 
Serializable {
       String hadoopExec = conf.getVar(HiveConf.ConfVars.HADOOPBIN);
       String hiveJar = conf.getJar();
 
-      String libJarsOption;
-      String addedJars = Utilities.getResourceFiles(conf, 
SessionState.ResourceType.JAR);
-      conf.setVar(ConfVars.HIVEADDEDJARS, addedJars);
-      String auxJars = conf.getAuxJars();
-      // Put auxjars and addedjars together into libjars
-      if (StringUtils.isEmpty(addedJars)) {
-        if (StringUtils.isEmpty(auxJars)) {
-          libJarsOption = " ";
-        } else {
-          libJarsOption = " -libjars " + auxJars + " ";
-        }
-      } else {
-        if (StringUtils.isEmpty(auxJars)) {
-          libJarsOption = " -libjars " + addedJars + " ";
-        } else {
-          libJarsOption = " -libjars " + addedJars + "," + auxJars + " ";
-        }
-      }
+      String libJars = super.getResource(conf, ResourceType.JAR);
+      String libJarsOption = StringUtils.isEmpty(libJars) ? " " : " -libjars " 
+ libJars + " ";
 
       // Generate the hiveConfArgs after potentially adding the jars
       String hiveConfArgs = generateCmdLine(conf, ctx);
@@ -194,7 +181,8 @@ public class MapRedTask extends ExecDriver implements 
Serializable {
           + planPath.toString() + " " + isSilent + " " + hiveConfArgs;
 
       String workDir = (new File(".")).getCanonicalPath();
-      String files = Utilities.getResourceFiles(conf, 
SessionState.ResourceType.FILE);
+
+      String files = super.getResource(conf, ResourceType.FILE);
       if (!files.isEmpty()) {
         cmdLine = cmdLine + " -files " + files;
 

http://git-wip-us.apache.org/repos/asf/hive/blob/8ce0118f/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java 
b/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java
index 96c826b..d4051a1 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java
@@ -1109,7 +1109,7 @@ public class SessionState {
       return;
     }
 
-    Set<String> jarPaths = Utilities.getJarFilesByPath(renewableJarPath);
+    Set<String> jarPaths = Utilities.getJarFilesByPath(renewableJarPath, 
sessionConf);
 
     // load jars under the hive.reloadable.aux.jars.path
     if(!jarPaths.isEmpty()){
@@ -1659,6 +1659,14 @@ public class SessionState {
   public List<String> getForwardedAddresses() {
     return forwardedAddresses;
   }
+
+  /**
+   * Gets the comma-separated reloadable aux jars
+   * @return the list of reloadable aux jars
+   */
+  public String getReloadableAuxJars() {
+    return StringUtils.join(preReloadableAuxJars, ',');
+  }
 }
 
 class ResourceMaps {

http://git-wip-us.apache.org/repos/asf/hive/blob/8ce0118f/ql/src/test/org/apache/hadoop/hive/ql/exec/TestUtilities.java
----------------------------------------------------------------------
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/exec/TestUtilities.java 
b/ql/src/test/org/apache/hadoop/hive/ql/exec/TestUtilities.java
index cc59f13..3ce4723 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/exec/TestUtilities.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/exec/TestUtilities.java
@@ -24,8 +24,8 @@ import java.io.File;
 import java.io.IOException;
 import java.sql.Timestamp;
 import java.util.ArrayList;
-import java.util.HashSet;
 import java.util.List;
+import java.util.Set;
 
 import org.apache.commons.io.FileUtils;
 import org.apache.hadoop.hive.conf.HiveConf;
@@ -38,13 +38,13 @@ import org.apache.hadoop.hive.ql.session.SessionState;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDFFromUtcTimestamp;
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
 import org.apache.hadoop.mapred.JobConf;
+import org.junit.Assert;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 import com.google.common.collect.Sets;
 import com.google.common.io.Files;
 
-import junit.framework.Assert;
 import junit.framework.TestCase;
 
 public class TestUtilities extends TestCase {
@@ -118,20 +118,23 @@ public class TestUtilities extends TestCase {
   }
 
   public void testGetJarFilesByPath() {
+    HiveConf conf = new HiveConf(this.getClass());
     File f = Files.createTempDir();
     String jarFileName1 = f.getAbsolutePath() + File.separator + "a.jar";
     String jarFileName2 = f.getAbsolutePath() + File.separator + "b.jar";
     File jarFile = new File(jarFileName1);
     try {
       FileUtils.touch(jarFile);
-      HashSet<String> jars = (HashSet) 
Utilities.getJarFilesByPath(f.getAbsolutePath());
-      Assert.assertEquals(Sets.newHashSet(jarFile.getAbsolutePath()),jars);
+      Set<String> jars = Utilities.getJarFilesByPath(f.getAbsolutePath(), 
conf);
+      Assert.assertEquals(Sets.newHashSet("file://" + jarFileName1),jars);
+
+      jars = Utilities.getJarFilesByPath("/folder/not/exist", conf);
+      Assert.assertTrue(jars.isEmpty());
 
       File jarFile2 = new File(jarFileName2);
       FileUtils.touch(jarFile2);
-      String newPath = "file://" + jarFileName1 + "," + "file://" + 
jarFileName2;
-      jars = (HashSet) Utilities.getJarFilesByPath(newPath);
-
+      String newPath = "file://" + jarFileName1 + "," + "file://" + 
jarFileName2 + ",/file/not/exist";
+      jars = Utilities.getJarFilesByPath(newPath, conf);
       Assert.assertEquals(Sets.newHashSet("file://" + jarFileName1, "file://" 
+ jarFileName2), jars);
     } catch (IOException e) {
       LOG.error("failed to copy file to reloading folder", e);

http://git-wip-us.apache.org/repos/asf/hive/blob/8ce0118f/ql/src/test/queries/clientpositive/reloadJar.q
----------------------------------------------------------------------
diff --git a/ql/src/test/queries/clientpositive/reloadJar.q 
b/ql/src/test/queries/clientpositive/reloadJar.q
new file mode 100644
index 0000000..6768a4f
--- /dev/null
+++ b/ql/src/test/queries/clientpositive/reloadJar.q
@@ -0,0 +1,17 @@
+dfs -mkdir  ${system:test.tmp.dir}/aux;
+dfs -cp ${system:hive.root}/data/files/identity_udf.jar 
${system:test.tmp.dir}/aux/udfexample.jar;
+
+SET hive.reloadable.aux.jars.path=${system:test.tmp.dir}/aux;
+RELOAD;
+CREATE TEMPORARY FUNCTION example_iden AS 'IdentityStringUDF';
+
+EXPLAIN
+SELECT example_iden(key)
+FROM src LIMIT 1;
+
+SELECT example_iden(key)
+FROM src LIMIT 1;
+
+DROP TEMPORARY FUNCTION example_iden;
+
+dfs -rm -r ${system:test.tmp.dir}/aux;

http://git-wip-us.apache.org/repos/asf/hive/blob/8ce0118f/ql/src/test/results/clientpositive/reloadJar.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/reloadJar.q.out 
b/ql/src/test/results/clientpositive/reloadJar.q.out
new file mode 100644
index 0000000..6991660
--- /dev/null
+++ b/ql/src/test/results/clientpositive/reloadJar.q.out
@@ -0,0 +1,64 @@
+PREHOOK: query: CREATE TEMPORARY FUNCTION example_iden AS 'IdentityStringUDF'
+PREHOOK: type: CREATEFUNCTION
+PREHOOK: Output: example_iden
+POSTHOOK: query: CREATE TEMPORARY FUNCTION example_iden AS 'IdentityStringUDF'
+POSTHOOK: type: CREATEFUNCTION
+POSTHOOK: Output: example_iden
+PREHOOK: query: EXPLAIN
+SELECT example_iden(key)
+FROM src LIMIT 1
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN
+SELECT example_iden(key)
+FROM src LIMIT 1
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            alias: src
+            Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE 
Column stats: NONE
+            Select Operator
+              expressions: example_iden(key) (type: string)
+              outputColumnNames: _col0
+              Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE 
Column stats: NONE
+              Limit
+                Number of rows: 1
+                Statistics: Num rows: 1 Data size: 10 Basic stats: COMPLETE 
Column stats: NONE
+                File Output Operator
+                  compressed: false
+                  Statistics: Num rows: 1 Data size: 10 Basic stats: COMPLETE 
Column stats: NONE
+                  table:
+                      input format: 
org.apache.hadoop.mapred.SequenceFileInputFormat
+                      output format: 
org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: 1
+      Processor Tree:
+        ListSink
+
+PREHOOK: query: SELECT example_iden(key)
+FROM src LIMIT 1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT example_iden(key)
+FROM src LIMIT 1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+#### A masked pattern was here ####
+238
+PREHOOK: query: DROP TEMPORARY FUNCTION example_iden
+PREHOOK: type: DROPFUNCTION
+PREHOOK: Output: example_iden
+POSTHOOK: query: DROP TEMPORARY FUNCTION example_iden
+POSTHOOK: type: DROPFUNCTION
+POSTHOOK: Output: example_iden
+#### A masked pattern was here ####

Reply via email to