Reverting.

Project: http://git-wip-us.apache.org/repos/asf/ignite/repo
Commit: http://git-wip-us.apache.org/repos/asf/ignite/commit/64e39ece
Tree: http://git-wip-us.apache.org/repos/asf/ignite/tree/64e39ece
Diff: http://git-wip-us.apache.org/repos/asf/ignite/diff/64e39ece

Branch: refs/heads/ignite-3929-1
Commit: 64e39ece8c827c0239211f5f040c53ab82c9404b
Parents: 04dd6f2
Author: vozerov-gridgain <voze...@gridgain.com>
Authored: Tue Sep 20 16:09:11 2016 +0300
Committer: vozerov-gridgain <voze...@gridgain.com>
Committed: Tue Sep 20 16:09:11 2016 +0300

----------------------------------------------------------------------
 .../processors/hadoop/HadoopClassLoader.java    | 246 ++++---------------
 .../processors/hadoop/HadoopSnappyTest.java     |   6 +-
 .../processors/hadoop/HadoopHelperImpl.java     |   2 +-
 .../internal/processors/hadoop/LoadHelper.java  |  47 ----
 .../ignite/internal/processors/hadoop/XXX.java  |   7 -
 5 files changed, 47 insertions(+), 261 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ignite/blob/64e39ece/modules/core/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopClassLoader.java
----------------------------------------------------------------------
diff --git 
a/modules/core/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopClassLoader.java
 
b/modules/core/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopClassLoader.java
index 0ca480c..5297cea 100644
--- 
a/modules/core/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopClassLoader.java
+++ 
b/modules/core/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopClassLoader.java
@@ -17,11 +17,7 @@
 
 package org.apache.ignite.internal.processors.hadoop;
 
-import java.util.Collections;
-import java.util.LinkedHashSet;
-import java.util.List;
 import org.apache.ignite.IgniteCheckedException;
-import org.apache.ignite.IgniteException;
 import org.apache.ignite.internal.util.ClassCache;
 import org.apache.ignite.internal.util.typedef.F;
 import org.apache.ignite.internal.util.typedef.internal.S;
@@ -65,7 +61,8 @@ public class HadoopClassLoader extends URLClassLoader 
implements ClassCache {
     public static final String CLS_DAEMON_REPLACE = 
"org.apache.ignite.internal.processors.hadoop.v2.HadoopDaemon";
 
     /** Hadoop class name: ShutdownHookManager replacement. */
-    public static final String CLS_SHUTDOWN_HOOK_MANAGER_REPLACE = 
"org.apache.ignite.internal.processors.hadoop.v2.HadoopShutdownHookManager";
+    public static final String CLS_SHUTDOWN_HOOK_MANAGER_REPLACE =
+        
"org.apache.ignite.internal.processors.hadoop.v2.HadoopShutdownHookManager";
 
     /** Name of libhadoop library. */
     private static final String LIBHADOOP = "hadoop.";
@@ -86,17 +83,17 @@ public class HadoopClassLoader extends URLClassLoader 
implements ClassCache {
     private final ConcurrentMap<String, Class> cacheMap = new 
ConcurrentHashMap<>();
 
     /** Diagnostic name of this class loader. */
-    @SuppressWarnings({"FieldCanBeLocal", "UnusedDeclaration"}) private final 
String name;
+    @SuppressWarnings({"FieldCanBeLocal", "UnusedDeclaration"})
+    private final String name;
 
     /** Native library names. */
-    private String[] loadedLibNames;
+    private final String[] libNames;
 
     /** Igfs Helper. */
     private final HadoopHelper helper;
 
     /**
      * Gets name for Job class loader. The name is specific for local node id.
-     *
      * @param locNodeId The local node id.
      * @return The class loader name.
      */
@@ -106,7 +103,6 @@ public class HadoopClassLoader extends URLClassLoader 
implements ClassCache {
 
     /**
      * Gets name for the task class loader. Task class loader
-     *
      * @param info The task info.
      * @param prefix Get only prefix (without task type and number)
      * @return The class loader name.
@@ -118,8 +114,6 @@ public class HadoopClassLoader extends URLClassLoader 
implements ClassCache {
             return "hadoop-task-" + info.jobId() + "-" + info.type() + "-" + 
info.taskNumber();
     }
 
-    private static final String[] DEFAULT_OPTIONAL_LIBS_TO_BE_LOADED = { 
"hadoop" };
-
     /**
      * Constructor.
      *
@@ -131,223 +125,71 @@ public class HadoopClassLoader extends URLClassLoader 
implements ClassCache {
         super(addHadoopUrls(urls), APP_CLS_LDR);
 
         assert !(getParent() instanceof HadoopClassLoader);
-        assert getClass().getClassLoader() == APP_CLS_LDR; // by definition, 
app cls loader created in such way.
 
         this.name = name;
+        this.libNames = libNames;
         this.helper = helper;
 
-        // TODO: for POC:
-        if (libNames == null)
-            libNames = new String[] { "hadoop" };
-
-        setNativeLibrariesToBeInjectedIfNeeded(this, libNames);
-    }
-
-    //    /**
-    //     * Workaround to load native Hadoop libraries. Java doesn't allow 
native libraries to be loaded from different
-    //     * classloaders. But we load Hadoop classes many times and one of 
these classes - {@code NativeCodeLoader} - tries
-
-    //     * to load the same native library over and over again.
-    //     * <p>
-    //     * To fix the problem, we force native library load in parent class 
loader and then "link" handle to this native
-    //     * library to our class loader. As a result, our class loader will 
think that the library is already loaded and will
-    //     * be able to link native methods.
-    //     *
-    //     * @see <a 
href="http://docs.oracle.com/javase/1.5.0/docs/guide/jni/spec/invocation.html#library_version";>
-    //     *     JNI specification</a>
-    //     */
-    //    private void initializeNativeLibraries() {
-    //        try {
-    //            // This must trigger native library load.
-    //            // TODO: Do not delegate to APP LDR
-    //            Class.forName(CLS_NATIVE_CODE_LOADER, true, this);
-    //
-    //            final Vector<Object> curVector = U.field(this, 
"nativeLibraries");
-    //
-    //            // TODO: Do not delegate to APP LDR
-    //            ClassLoader ldr = APP_CLS_LDR;
-    //
-    //            while (ldr != null) {
-    //                Vector vector = U.field(ldr, "nativeLibraries");
-    //
-    //                for (Object lib : vector) {
-    //                    String name = U.field(lib, "name");
-    //
-    //                    boolean add = name.contains(LIBHADOOP);
-    //
-    //                    if (!add && libNames != null) {
-    //                        for (String libName : libNames) {
-    //                            if (libName != null && 
name.contains(libName)) {
-    //                                add = true;
-    //
-    //                                break;
-    //                            }
-    //                        }
-    //                    }
-    //
-    //                    if (add) {
-    //                        curVector.add(lib);
-    //
-    //                        return;
-    //                    }
-    //                }
-    //
-    //                ldr = ldr.getParent();
-    //            }
-    //        }
-    //        catch (Exception e) {
-    //            U.quietAndWarn(null, "Failed to initialize Hadoop native 
library " +
-    //                "(native Hadoop methods might not work properly): " + e);
-    //        }
-    //    }
-
-    /** */
-    private static volatile Collection<Object> nativeLibrariesToBeInjected;
-
-    /**
-     * This method will be invoked for each created instance of 
HadoopClassLoader, but the list of native libraries will
-     * be loaded only once.
-     */
-    private static void 
setNativeLibrariesToBeInjectedIfNeeded(HadoopClassLoader instance, String[] 
libs) {
-        System.out.println("### Loading libs: " + libs);
-
-        if (libs == null)
-            return;
-//
-//        boolean created = false;
-
-        // 1. If needed, init the native lib data collection:
-        if (nativeLibrariesToBeInjected == null) {
-            synchronized (HadoopClassLoader.class) {
-                if (nativeLibrariesToBeInjected == null) {
-                    LinkedHashSet<Object> natives0 = 
getNativeLibraries(APP_CLS_LDR);
-
-                    instance.runLoadingCode(libs);
-
-                    LinkedHashSet<Object> natives1 = 
getNativeLibraries(APP_CLS_LDR);
-
-                    natives1.removeAll(natives0);
-
-                    nativeLibrariesToBeInjected = 
Collections.unmodifiableCollection(natives1);
-
-                    System.out.println("### Collected loaded libraries: " + 
nativeLibrariesToBeInjected);
-//
-//                    created = true;
-                }
-            }
-        }
-
-        assert nativeLibrariesToBeInjected != null;
-
-        // 2. Inject libraries:
-        //if (!created)
-            // This is an instance that did not load the libs:
-        instance.injectNatives();
+        initializeNativeLibraries();
     }
 
     /**
+     * Workaround to load native Hadoop libraries. Java doesn't allow native 
libraries to be loaded from different
+     * classloaders. But we load Hadoop classes many times and one of these 
classes - {@code NativeCodeLoader} - tries
+     * to load the same native library over and over again.
+     * <p>
+     * To fix the problem, we force native library load in parent class loader 
and then "link" handle to this native
+     * library to our class loader. As a result, our class loader will think 
that the library is already loaded and will
+     * be able to link native methods.
      *
-     * @return
-     */
-    private static LinkedHashSet<Object> getNativeLibraries(ClassLoader cl) {
-        Vector<Object> curVector = U.field(cl, "nativeLibraries");
-
-        return new LinkedHashSet<>(curVector);
-    }
-
-    /**
-     * Injects previously
+     * @see <a 
href="http://docs.oracle.com/javase/1.5.0/docs/guide/jni/spec/invocation.html#library_version";>
+     *     JNI specification</a>
      */
-    private void injectNatives() {
+    private void initializeNativeLibraries() {
         try {
-            // 2. Init this instance with the natives:
-            final Vector<Object> curVector = U.field(this, "nativeLibraries");
+            // This must trigger native library load.
+            // TODO: Do not delegate to APP LDR
+            Class.forName(CLS_NATIVE_CODE_LOADER, true, APP_CLS_LDR);
 
-            curVector.addAll(nativeLibrariesToBeInjected);
-        }
-        catch (Exception e) {
-            U.quietAndWarn(null, "Failed to initialize Hadoop native library " 
+
-                 "(native Hadoop methods might not work properly): " + e);
-        }
-    }
+            final Vector<Object> curVector = U.field(this, "nativeLibraries");
 
-    /**
-     *
-     * @return
-     */
-    private Collection<Object> collectNativeLibraries() {
-        List<Object> target = new ArrayList<>();
+            // TODO: Do not delegate to APP LDR
+            ClassLoader ldr = APP_CLS_LDR;
 
-        ClassLoader ldr = APP_CLS_LDR;
+            while (ldr != null) {
+                Vector vector = U.field(ldr, "nativeLibraries");
 
-        while (ldr != null) {
-            collectNativeLibrariesFromLoader(ldr, target);
+                for (Object lib : vector) {
+                    String name = U.field(lib, "name");
 
-            ldr = ldr.getParent();
-        }
+                    boolean add = name.contains(LIBHADOOP);
 
-        return Collections.unmodifiableList(target);
-    }
+                    if (!add && libNames != null) {
+                        for (String libName : libNames) {
+                            if (libName != null && name.contains(libName)) {
+                                add = true;
 
-    /**
-     * Run default or user code to force native libs loading:
-     */
-    private void runLoadingCode(String[] libs) {
-        try {
-//            // TODO: "XXX" is a special class loaded by Hadoop class loader 
(simulating Hadoop class).
-//            // NB: this sample class must *not* cause loading of any natives.
-//            Class<?> sampleCls = this.loadClass(XXX.class.getName(), true);
-//
-//            assert sampleCls != null;
-//            assert sampleCls.getClassLoader() == this;
+                                break;
+                            }
+                        }
+                    }
 
-            Collection<String> loadedLibs = new ArrayList<>();
+                    if (add) {
+                        curVector.add(lib);
 
-            for (String lib: libs) {
-                boolean ok = LoadHelper.tryLoad(null, lib);
+                        return;
+                    }
+                }
 
-                if (ok)
-                    loadedLibs.add(lib);
+                ldr = ldr.getParent();
             }
-
-            loadedLibNames = loadedLibs.toArray(new String[loadedLibs.size()]);
         }
         catch (Exception e) {
-            throw new IgniteException(e);
-        }
-
-    }
-
-    /**
-     *
-     * @param ldr
-     * @param target
-     */
-    private void collectNativeLibrariesFromLoader(ClassLoader ldr, 
Collection<Object> target) {
-        final Vector vector = U.field(ldr, "nativeLibraries");
-
-        for (Object lib : vector) {
-            String name = U.field(lib, "name");
-
-            // TODO: LIBHADOOP should be added implicitly into "libNames"
-            boolean addLib = false; //name.contains(LIBHADOOP);
-
-            if (loadedLibNames != null) {
-                for (String libName : loadedLibNames) {
-                    if (libName != null && name.contains(libName)) {
-                        addLib = true;
-
-                        break;
-                    }
-                }
-            }
-
-            if (addLib)
-                target.add(lib);
+            U.quietAndWarn(null, "Failed to initialize Hadoop native library " 
+
+                "(native Hadoop methods might not work properly): " + e);
         }
     }
 
-
     /** {@inheritDoc} */
     @Override protected Class<?> loadClass(String name, boolean resolve) 
throws ClassNotFoundException {
         try {

http://git-wip-us.apache.org/repos/asf/ignite/blob/64e39ece/modules/hadoop-impl/src/test/java/org/apache/ignite/internal/processors/hadoop/HadoopSnappyTest.java
----------------------------------------------------------------------
diff --git 
a/modules/hadoop-impl/src/test/java/org/apache/ignite/internal/processors/hadoop/HadoopSnappyTest.java
 
b/modules/hadoop-impl/src/test/java/org/apache/ignite/internal/processors/hadoop/HadoopSnappyTest.java
index 5d4de6f..656ba66 100644
--- 
a/modules/hadoop-impl/src/test/java/org/apache/ignite/internal/processors/hadoop/HadoopSnappyTest.java
+++ 
b/modules/hadoop-impl/src/test/java/org/apache/ignite/internal/processors/hadoop/HadoopSnappyTest.java
@@ -45,10 +45,10 @@ public class HadoopSnappyTest extends 
GridCommonAbstractTest {
      */
     public void testSnappy() throws Throwable {
         // Run Snappy test in default class loader:
-        //checkSnappy();
+        checkSnappy();
 
         // Run the same in several more class loaders simulating jobs and 
tasks:
-        for (int i = 0; i < 3; i++) {
+        for (int i = 0; i < 2; i++) {
             ClassLoader hadoopClsLdr = new HadoopClassLoader(null, "cl-" + i, 
null, new HadoopHelperImpl());
 
             Class<?> cls = 
(Class)Class.forName(HadoopSnappyTest.class.getName(), true, hadoopClsLdr);
@@ -57,8 +57,6 @@ public class HadoopSnappyTest extends GridCommonAbstractTest {
 
             U.invoke(cls, null, "checkSnappy");
         }
-
-        checkSnappy();
     }
 
     /**

http://git-wip-us.apache.org/repos/asf/ignite/blob/64e39ece/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopHelperImpl.java
----------------------------------------------------------------------
diff --git 
a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopHelperImpl.java
 
b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopHelperImpl.java
index 282e223..59a37e9 100644
--- 
a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopHelperImpl.java
+++ 
b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopHelperImpl.java
@@ -89,7 +89,7 @@ public class HadoopHelperImpl implements HadoopHelper {
      * @return {@code true} If this is Hadoop class.
      */
     @Override public boolean isHadoop(String cls) {
-        return cls.startsWith("org.apache.hadoop.") || 
cls.equals(XXX.class.getName());
+        return cls.startsWith("org.apache.hadoop.");
     }
 
     /**

http://git-wip-us.apache.org/repos/asf/ignite/blob/64e39ece/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/LoadHelper.java
----------------------------------------------------------------------
diff --git 
a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/LoadHelper.java
 
b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/LoadHelper.java
deleted file mode 100644
index 223855b..0000000
--- 
a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/LoadHelper.java
+++ /dev/null
@@ -1,47 +0,0 @@
-package org.apache.ignite.internal.processors.hadoop;
-
-import java.lang.reflect.Method;
-
-/**
- * Should be loaded with the
- */
-public class LoadHelper {
-
-//    private static Method method;
-//
-//    static {
-//        try {
-//            method = ClassLoader.class.getDeclaredMethod("loadLibrary",
-//                new Class[] {Class.class, String.class, boolean.class});
-//
-//            method.setAccessible(true);
-//        } catch (Exception e) {
-//            e.printStackTrace();
-//        }
-//    }
-
-    /**
-     * Utility method that loads given class by name with the given "caller" 
class.
-     *
-     * @return 'true' on success.
-     */
-    public static boolean tryLoad(Class caller, String libName) {
-        try {
-            System.loadLibrary(libName);
-
-            return true;
-        }
-        catch (Throwable t) {
-            // TODO:
-            t.printStackTrace();
-
-            return false;
-        }
-    }
-
-
-    public static void main(String[] args) {
-        System.load("/home/ivan/hadoop-2.6.0/lib/native/libhadoop.so");
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/ignite/blob/64e39ece/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/XXX.java
----------------------------------------------------------------------
diff --git 
a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/XXX.java
 
b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/XXX.java
deleted file mode 100644
index b230636..0000000
--- 
a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/XXX.java
+++ /dev/null
@@ -1,7 +0,0 @@
-package org.apache.ignite.internal.processors.hadoop;
-
-/**
- * Created by ivan on 19.09.16.
- */
-public class XXX {
-}

Reply via email to