Repository: oozie
Updated Branches:
  refs/heads/master 962d650bd -> e0016c93a


OOZIE-2658 --driver-class-path can overwrite the classpath in SparkMain 
(gezapeti via rkanter)


Project: http://git-wip-us.apache.org/repos/asf/oozie/repo
Commit: http://git-wip-us.apache.org/repos/asf/oozie/commit/e0016c93
Tree: http://git-wip-us.apache.org/repos/asf/oozie/tree/e0016c93
Diff: http://git-wip-us.apache.org/repos/asf/oozie/diff/e0016c93

Branch: refs/heads/master
Commit: e0016c93ad903bdee07fa63b9265382f1c6e3a62
Parents: 962d650
Author: Robert Kanter <rkan...@cloudera.com>
Authored: Mon Oct 10 21:02:47 2016 -0700
Committer: Robert Kanter <rkan...@cloudera.com>
Committed: Mon Oct 10 21:02:47 2016 -0700

----------------------------------------------------------------------
 release-log.txt                                 |  1 +
 .../apache/oozie/action/hadoop/SparkMain.java   | 64 +++++++++++---------
 2 files changed, 37 insertions(+), 28 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/oozie/blob/e0016c93/release-log.txt
----------------------------------------------------------------------
diff --git a/release-log.txt b/release-log.txt
index e855a66..a34525e 100644
--- a/release-log.txt
+++ b/release-log.txt
@@ -5,6 +5,7 @@ OOZIE-2634 Queue dump command message is confusing when the 
queue is empty (andr
 
 -- Oozie 4.3.0 release
 
+OOZIE-2658 --driver-class-path can overwrite the classpath in SparkMain 
(gezapeti via rkanter)
 OOZIE-1814 Oozie should mask any passwords in logs and REST interfaces 
(andras.piros via rkanter)
 OOZIE-2622 ExtJS 2.2 is no longer available (rkanter)
 OOZIE-2606 Set spark.yarn.jars to fix Spark 2.0 with Oozie (satishsaley via 
rohini)

http://git-wip-us.apache.org/repos/asf/oozie/blob/e0016c93/sharelib/spark/src/main/java/org/apache/oozie/action/hadoop/SparkMain.java
----------------------------------------------------------------------
diff --git 
a/sharelib/spark/src/main/java/org/apache/oozie/action/hadoop/SparkMain.java 
b/sharelib/spark/src/main/java/org/apache/oozie/action/hadoop/SparkMain.java
index 539fb5c..19a39a9 100644
--- a/sharelib/spark/src/main/java/org/apache/oozie/action/hadoop/SparkMain.java
+++ b/sharelib/spark/src/main/java/org/apache/oozie/action/hadoop/SparkMain.java
@@ -49,6 +49,7 @@ public class SparkMain extends LauncherMain {
     private static final String JOB_NAME_OPTION = "--name";
     private static final String CLASS_NAME_OPTION = "--class";
     private static final String VERBOSE_OPTION = "--verbose";
+    private static final String DRIVER_CLASSPATH_OPTION = 
"--driver-class-path";
     private static final String EXECUTOR_CLASSPATH = 
"spark.executor.extraClassPath=";
     private static final String DRIVER_CLASSPATH = 
"spark.driver.extraClassPath=";
     private static final String HIVE_SECURITY_TOKEN = 
"spark.yarn.security.tokens.hive.enabled";
@@ -116,29 +117,31 @@ public class SparkMain extends LauncherMain {
         if(jarPath!=null && jarPath.endsWith(".py")){
             isPyspark = true;
         }
-        boolean addedExecutorClasspath = false;
-        boolean addedDriverClasspath = false;
         boolean addedHiveSecurityToken = false;
         boolean addedHBaseSecurityToken = false;
+        StringBuilder driverClassPath = new StringBuilder();
+        StringBuilder executorClassPath = new StringBuilder();
         String sparkOpts = actionConf.get(SparkActionExecutor.SPARK_OPTS);
         if (StringUtils.isNotEmpty(sparkOpts)) {
             List<String> sparkOptions = splitSparkOpts(sparkOpts);
             for (int i = 0; i < sparkOptions.size(); i++) {
                 String opt = sparkOptions.get(i);
+                boolean addToSparkArgs = true;
                 if (yarnClusterMode || yarnClientMode) {
                     if (opt.startsWith(EXECUTOR_CLASSPATH)) {
-                        // Include the current working directory (of executor
-                        // container) in executor classpath, because it will 
contain
-                        // localized files
-                        opt = opt + File.pathSeparator + PWD;
-                        addedExecutorClasspath = true;
+                        
appendWithPathSeparator(opt.substring(EXECUTOR_CLASSPATH.length()), 
executorClassPath);
+                        addToSparkArgs = false;
                     }
                     if (opt.startsWith(DRIVER_CLASSPATH)) {
-                        // Include the current working directory (of driver
-                        // container) in executor classpath, because it will 
contain
-                        // localized files
-                        opt = opt + File.pathSeparator + PWD;
-                        addedDriverClasspath = true;
+                        
appendWithPathSeparator(opt.substring(DRIVER_CLASSPATH.length()), 
driverClassPath);
+                        addToSparkArgs = false;
+                    }
+                    if (opt.equals(DRIVER_CLASSPATH_OPTION)) {
+                        // we need the next element after this option
+                        appendWithPathSeparator(sparkOptions.get(i + 1), 
driverClassPath);
+                        // increase i to skip the next element.
+                        i++;
+                        addToSparkArgs = false;
                     }
                 }
                 if (opt.startsWith(HIVE_SECURITY_TOKEN)) {
@@ -147,25 +150,24 @@ public class SparkMain extends LauncherMain {
                 if (opt.startsWith(HBASE_SECURITY_TOKEN)) {
                     addedHBaseSecurityToken = true;
                 }
-                sparkArgs.add(opt);
+                if(addToSparkArgs) {
+                    sparkArgs.add(opt);
+                }
             }
         }
 
         if ((yarnClusterMode || yarnClientMode)) {
-            if (!addedExecutorClasspath) {
-                // Include the current working directory (of executor 
container)
-                // in executor classpath, because it will contain localized
-                // files
-                sparkArgs.add("--conf");
-                sparkArgs.add(EXECUTOR_CLASSPATH + PWD);
-            }
-            if (!addedDriverClasspath) {
-                // Include the current working directory (of driver container)
-                // in executor classpath, because it will contain localized
-                // files
-                sparkArgs.add("--conf");
-                sparkArgs.add(DRIVER_CLASSPATH + PWD);
-            }
+            // Include the current working directory (of executor container)
+            // in executor classpath, because it will contain localized
+            // files
+            appendWithPathSeparator(PWD, executorClassPath);
+            appendWithPathSeparator(PWD, driverClassPath);
+
+            sparkArgs.add("--conf");
+            sparkArgs.add(EXECUTOR_CLASSPATH + executorClassPath.toString());
+
+            sparkArgs.add("--conf");
+            sparkArgs.add(DRIVER_CLASSPATH + driverClassPath.toString());
         }
         sparkArgs.add("--conf");
         sparkArgs.add("spark.executor.extraJavaOptions=-Dlog4j.configuration=" 
+ SPARK_LOG4J_PROPS);
@@ -492,4 +494,10 @@ public class SparkMain extends LauncherMain {
         return manifest.getMainAttributes().getValue("Specification-Version");
     }
 
-}
\ No newline at end of file
+    private void appendWithPathSeparator(String what, StringBuilder to){
+        if(to.length() > 0){
+            to.append(File.pathSeparator);
+        }
+        to.append(what);
+    }
+}

Reply via email to