Repository: ignite
Updated Branches:
  refs/heads/ignite-3906 [created] 86579fed9


IGNITE-3906: Implemented additional user libs facility.


Project: http://git-wip-us.apache.org/repos/asf/ignite/repo
Commit: http://git-wip-us.apache.org/repos/asf/ignite/commit/268392de
Tree: http://git-wip-us.apache.org/repos/asf/ignite/tree/268392de
Diff: http://git-wip-us.apache.org/repos/asf/ignite/diff/268392de

Branch: refs/heads/ignite-3906
Commit: 268392de03782b27fcd6d33b707c35715ef580af
Parents: 409f043
Author: vozerov-gridgain <voze...@gridgain.com>
Authored: Thu Sep 15 12:19:17 2016 +0300
Committer: vozerov-gridgain <voze...@gridgain.com>
Committed: Thu Sep 15 12:19:17 2016 +0300

----------------------------------------------------------------------
 .../processors/hadoop/HadoopClassLoader.java    |   6 +-
 .../processors/hadoop/HadoopClasspathMain.java  |   2 +-
 .../processors/hadoop/HadoopClasspathUtils.java | 227 ++++++++++++++++---
 .../processors/hadoop/HadoopLocations.java      |  14 +-
 4 files changed, 211 insertions(+), 38 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ignite/blob/268392de/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopClassLoader.java
----------------------------------------------------------------------
diff --git 
a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopClassLoader.java
 
b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopClassLoader.java
index 389de8c..2e0e271 100644
--- 
a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopClassLoader.java
+++ 
b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopClassLoader.java
@@ -61,10 +61,8 @@ import org.objectweb.asm.commons.RemappingClassAdapter;
  * unavailable for parent.
  */
 public class HadoopClassLoader extends URLClassLoader implements ClassCache {
-    /**
-     * We are very parallel capable.
-     */
     static {
+        // We are very parallel capable.
         registerAsParallelCapable();
     }
 
@@ -498,7 +496,7 @@ public class HadoopClassLoader extends URLClassLoader 
implements ClassCache {
                 return hadoopUrls;
 
             try {
-                hadoopUrls = HadoopClasspathUtils.classpathUrls();
+                hadoopUrls = HadoopClasspathUtils.classpathForClassLoader();
             }
             catch (IOException e) {
                 throw new IgniteCheckedException("Failed to resolve Hadoop JAR 
locations: " + e.getMessage(), e);

http://git-wip-us.apache.org/repos/asf/ignite/blob/268392de/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopClasspathMain.java
----------------------------------------------------------------------
diff --git 
a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopClasspathMain.java
 
b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopClasspathMain.java
index 5279b7d..4069496 100644
--- 
a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopClasspathMain.java
+++ 
b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopClasspathMain.java
@@ -36,7 +36,7 @@ public class HadoopClasspathMain {
 
         StringBuilder sb = new StringBuilder();
 
-        for (String path : HadoopClasspathUtils.classpathForJavaProcess())
+        for (String path : HadoopClasspathUtils.classpathForProcess())
             sb.append(path).append(separator);
 
         System.out.println(sb);

http://git-wip-us.apache.org/repos/asf/ignite/blob/268392de/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopClasspathUtils.java
----------------------------------------------------------------------
diff --git 
a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopClasspathUtils.java
 
b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopClasspathUtils.java
index 121fcab..71acf16 100644
--- 
a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopClasspathUtils.java
+++ 
b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopClasspathUtils.java
@@ -27,6 +27,7 @@ import java.nio.file.Path;
 import java.nio.file.Paths;
 import java.util.ArrayList;
 import java.util.Collection;
+import java.util.LinkedList;
 import java.util.List;
 
 /**
@@ -36,18 +37,21 @@ public class HadoopClasspathUtils {
     /** Prefix directory. */
     public static final String PREFIX = "HADOOP_PREFIX";
 
-    /** Home directory. */
+    /** Hadoop home directory. */
     public static final String HOME = "HADOOP_HOME";
 
-    /** Home directory. */
+    /** Hadoop common directory. */
     public static final String COMMON_HOME = "HADOOP_COMMON_HOME";
 
-    /** Home directory. */
+    /** Hadoop HDFS directory. */
     public static final String HDFS_HOME = "HADOOP_HDFS_HOME";
 
-    /** Home directory. */
+    /** Hadoop mapred directory. */
     public static final String MAPRED_HOME = "HADOOP_MAPRED_HOME";
 
+    /** Arbitrary additional dependencies. Comply with standard Java classpath 
resolution. */
+    public static final String HADOOP_USER_LIBS = "HADOOP_USER_LIBS";
+
     /** Empty string. */
     private static final String EMPTY_STR = "";
 
@@ -57,16 +61,18 @@ public class HadoopClasspathUtils {
      * @return List of the class path elements.
      * @throws IOException If failed.
      */
-    public static List<String> classpathForJavaProcess() throws IOException {
+    public static List<String> classpathForProcess() throws IOException {
         List<String> res = new ArrayList<>();
 
         for (final SearchDirectory dir : classpathDirectories()) {
-            if (dir.hasFilter()) {
+            if (dir.useWildcard()) {
+                if (dir.files().length > 0)
+                    res.add(dir.absolutePath() + File.separator + '*');
+            }
+            else {
                 for (File file : dir.files())
                     res.add(file.getAbsolutePath());
             }
-            else
-                res.add(dir.absolutePath() + File.separator + '*');
         }
 
         return res;
@@ -78,7 +84,7 @@ public class HadoopClasspathUtils {
      * @return List of class path URLs.
      * @throws IOException If failed.
      */
-    public static List<URL> classpathUrls() throws IOException {
+    public static List<URL> classpathForClassLoader() throws IOException {
         List<URL> res = new ArrayList<>();
 
         for (SearchDirectory dir : classpathDirectories()) {
@@ -108,9 +114,11 @@ public class HadoopClasspathUtils {
         String hdfsHome = systemOrEnv(HDFS_HOME, EMPTY_STR);
         String mapredHome = systemOrEnv(MAPRED_HOME, EMPTY_STR);
 
+        String userLibs = systemOrEnv(HADOOP_USER_LIBS, null);
+
         // If any composite location is defined, use only them.
         if (!isEmpty(commonHome) || !isEmpty(hdfsHome) || 
!isEmpty(mapredHome)) {
-            HadoopLocations res = new HadoopLocations(hadoopHome, commonHome, 
hdfsHome, mapredHome);
+            HadoopLocations res = new HadoopLocations(hadoopHome, commonHome, 
hdfsHome, mapredHome, userLibs);
 
             if (res.valid())
                 return res;
@@ -132,7 +140,8 @@ public class HadoopClasspathUtils {
                 hadoopHome,
                 hadoopHome + "/share/hadoop/common",
                 hadoopHome + "/share/hadoop/hdfs",
-                hadoopHome + "/share/hadoop/mapreduce"
+                hadoopHome + "/share/hadoop/mapreduce",
+                userLibs
             );
 
             if (res.valid())
@@ -143,7 +152,8 @@ public class HadoopClasspathUtils {
                 hadoopHome,
                 hadoopHome,
                 hadoopHome + "/../hadoop-hdfs",
-                hadoopHome + "/../hadoop-mapreduce"
+                hadoopHome + "/../hadoop-mapreduce",
+                userLibs
             );
 
             if (res.valid())
@@ -154,7 +164,8 @@ public class HadoopClasspathUtils {
                 hadoopHome,
                 hadoopHome,
                 hadoopHome + "/../hadoop-hdfs-client",
-                hadoopHome + "/../hadoop-mapreduce-client"
+                hadoopHome + "/../hadoop-mapreduce-client",
+                userLibs
             );
 
             if (res.valid())
@@ -182,17 +193,64 @@ public class HadoopClasspathUtils {
 
         Collection<SearchDirectory> res = new ArrayList<>();
 
-        res.add(new SearchDirectory(new File(loc.common(), "lib"), null));
-        res.add(new SearchDirectory(new File(loc.hdfs(), "lib"), null));
-        res.add(new SearchDirectory(new File(loc.mapred(), "lib"), null));
+        res.add(new SearchDirectory(new File(loc.common(), "lib"), 
AcceptAllDirectoryFilter.INSTANCE));
+        res.add(new SearchDirectory(new File(loc.hdfs(), "lib"), 
AcceptAllDirectoryFilter.INSTANCE));
+        res.add(new SearchDirectory(new File(loc.mapred(), "lib"), 
AcceptAllDirectoryFilter.INSTANCE));
+
+        res.add(new SearchDirectory(new File(loc.common()), new 
PrefixDirectoryFilter("hadoop-common-")));
+        res.add(new SearchDirectory(new File(loc.common()), new 
PrefixDirectoryFilter("hadoop-auth-")));
 
-        res.add(new SearchDirectory(new File(loc.common()), "hadoop-common-"));
-        res.add(new SearchDirectory(new File(loc.common()), "hadoop-auth-"));
+        res.add(new SearchDirectory(new File(loc.hdfs()), new 
PrefixDirectoryFilter("hadoop-hdfs-")));
 
-        res.add(new SearchDirectory(new File(loc.hdfs()), "hadoop-hdfs-"));
+        res.add(new SearchDirectory(new File(loc.mapred()),
+            new PrefixDirectoryFilter("hadoop-mapreduce-client-common")));
+        res.add(new SearchDirectory(new File(loc.mapred()),
+            new PrefixDirectoryFilter("hadoop-mapreduce-client-core")));
 
-        res.add(new SearchDirectory(new File(loc.mapred()), 
"hadoop-mapreduce-client-common"));
-        res.add(new SearchDirectory(new File(loc.mapred()), 
"hadoop-mapreduce-client-core"));
+        res.addAll(parseUserLibs(loc.userLibs()));
+
+        return res;
+    }
+
+    /**
+     * Parse user libs.
+     *
+     * @param str Original string.
+     * @return Parsed libs search patterns.
+     * @throws IOException If failed.
+     */
+    private static Collection<SearchDirectory> parseUserLibs(String str) 
throws IOException {
+        Collection<SearchDirectory> res = new LinkedList<>();
+
+        if (!isEmpty(str)) {
+            String[] tokens = normalize(str).split(File.pathSeparator);
+
+            for (String token : tokens) {
+                // Skip empty tokens.
+                if (isEmpty(token))
+                    continue;
+
+                if (token.endsWith("*")) {
+                    // Wildcard.
+                    File dir = new File(token.substring(0, token.length() - 
1)).getParentFile();
+
+                    assert dir != null;
+
+                    res.add(new SearchDirectory(dir, 
AcceptAllDirectoryFilter.INSTANCE, false));
+                }
+                else {
+                    // Exact file.
+                    File file = new File(token);
+                    File dir = file.getParentFile();
+
+                    // Met "/" or "C:\" pattern - nothing to do with it.
+                    if (dir == null)
+                        continue;
+
+                    res.add(new SearchDirectory(dir, new 
ExactDirectoryFilter(file.getName()), false));
+                }
+            }
+        }
 
         return res;
     }
@@ -239,26 +297,55 @@ public class HadoopClasspathUtils {
     }
 
     /**
+     * NOramlize the string.
+     *
+     * @param str String.
+     * @return Normalized string.
+     */
+    private static String normalize(String str) {
+        assert str != null;
+
+        return str.trim().toLowerCase();
+    }
+
+    /**
      * Simple pair-like structure to hold directory name and a mask assigned 
to it.
      */
     private static class SearchDirectory {
         /** File. */
         private final File dir;
 
-        /** The mask. */
-        private final String filter;
+        /** Filter. */
+        private final DirectoryFilter filter;
+
+        /** Whether directory must exist. */
+        private final boolean strict;
+
+        /**
+         * Constructor for directory search with strict rule.
+         *
+         * @param dir Directory.
+         * @param filter Filter.
+         * @throws IOException If failed.
+         */
+        private SearchDirectory(File dir, DirectoryFilter filter) throws 
IOException {
+            this(dir, filter, true);
+        }
 
         /**
          * Constructor.
          *
          * @param dir Directory.
          * @param filter Filter.
+         * @param strict Whether directory must exist.
+         * @throws IOException If failed.
          */
-        private SearchDirectory(File dir, String filter) throws IOException {
+        private SearchDirectory(File dir, DirectoryFilter filter, boolean 
strict) throws IOException {
             this.dir = dir;
             this.filter = filter;
+            this.strict = strict;
 
-            if (!exists(dir.getAbsolutePath()))
+            if (strict && !exists(dir.getAbsolutePath()))
                 throw new IOException("Directory cannot be read: " + 
dir.getAbsolutePath());
         }
 
@@ -275,21 +362,97 @@ public class HadoopClasspathUtils {
         private File[] files() throws IOException {
             File[] files = dir.listFiles(new FilenameFilter() {
                 @Override public boolean accept(File dir, String name) {
-                    return filter == null || name.startsWith(filter);
+                    return filter.test(name);
                 }
             });
 
-            if (files == null)
-                throw new IOException("Path is not a directory. [dir=" + dir + 
']');
+            if (files == null) {
+                if (strict)
+                    throw new IOException("Failed to get directory files 
[dir=" + dir + ']');
+                else
+                    return new File[0];
+            }
+            else
+                return files;
+        }
+
+        /**
+         * @return {@code True} if wildcard can be used.
+         */
+        private boolean useWildcard() {
+            return filter instanceof AcceptAllDirectoryFilter;
+        }
+    }
+
+    /**
+     * Directory filter interface.
+     */
+    private static interface DirectoryFilter {
+        /**
+         * Test if file with this name should be included.
+         *
+         * @param name File name.
+         * @return {@code True} if passed.
+         */
+        public boolean test(String name);
+    }
+
+    /**
+     * Filter to accept all files.
+     */
+    public static class AcceptAllDirectoryFilter implements DirectoryFilter {
+        /** Singleton instance. */
+        public static final AcceptAllDirectoryFilter INSTANCE = new 
AcceptAllDirectoryFilter();
+
+        /** {@inheritDoc} */
+        @Override public boolean test(String name) {
+            return true;
+        }
+    }
+
+    /**
+     * Filter which uses prefix to filter files.
+     */
+    public static class PrefixDirectoryFilter implements DirectoryFilter {
+        /** Prefix. */
+        private final String prefix;
+
+        /**
+         * Constructor.
+         *
+         * @param prefix Prefix.
+         */
+        public PrefixDirectoryFilter(String prefix) {
+            assert prefix != null;
 
-            return files;
+            this.prefix = normalize(prefix);
         }
 
+        /** {@inheritDoc} */
+        @Override public boolean test(String name) {
+            return normalize(name).startsWith(prefix);
+        }
+    }
+
+    /**
+     * Filter which uses exact comparison.
+     */
+    public static class ExactDirectoryFilter implements DirectoryFilter {
+        /** Name. */
+        private final String name;
+
         /**
-         * @return {@code True} if filter exists.
+         * Constructor.
+         *
+         * @param name Name.
          */
-        private boolean hasFilter() {
-            return filter != null;
+        public ExactDirectoryFilter(String name) {
+            this.name = normalize(name);
+        }
+
+        /** {@inheritDoc} */
+        @Override public boolean test(String name) {
+            return normalize(name).equals(this.name);
         }
     }
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ignite/blob/268392de/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopLocations.java
----------------------------------------------------------------------
diff --git 
a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopLocations.java
 
b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopLocations.java
index a90007f..066cf7c 100644
--- 
a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopLocations.java
+++ 
b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopLocations.java
@@ -42,6 +42,9 @@ public class HadoopLocations {
     /** Whether mapred home exists. */
     private final boolean mapredExists;
 
+    /** User libs. */
+    private final String userLibs;
+
     /**
      * Constructor.
      *
@@ -49,14 +52,16 @@ public class HadoopLocations {
      * @param common Common home.
      * @param hdfs HDFS home.
      * @param mapred Mapred home.
+     * @param userLibs User libs.
      */
-    public HadoopLocations(String home, String common, String hdfs, String 
mapred) {
+    public HadoopLocations(String home, String common, String hdfs, String 
mapred, String userLibs) {
         assert common != null && hdfs != null && mapred != null;
 
         this.home = home;
         this.common = common;
         this.hdfs = hdfs;
         this.mapred = mapred;
+        this.userLibs = userLibs;
 
         commonExists = HadoopClasspathUtils.exists(common);
         hdfsExists = HadoopClasspathUtils.exists(hdfs);
@@ -92,6 +97,13 @@ public class HadoopLocations {
     }
 
     /**
+     * @return User libs.
+     */
+    public String userLibs() {
+        return userLibs;
+    }
+
+    /**
      * @return Whether common home exists.
      */
     public boolean commonExists() {

Reply via email to