This is an automated email from the ASF dual-hosted git repository.

gezapeti pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/oozie.git


The following commit(s) were added to refs/heads/master by this push:
     new 90b9f10  OOZIE-3551 Spark shouldn't obtain tokens withing the Spark 
action (Liz Szilagyi via gezapeti)
90b9f10 is described below

commit 90b9f1077d63ce9dc255f8db170b68dcb8935910
Author: Gezapeti Cseh <gezap...@apache.org>
AuthorDate: Thu Feb 20 12:07:04 2020 +0100

    OOZIE-3551 Spark shouldn't obtain tokens withing the Spark action (Liz 
Szilagyi via gezapeti)
---
 release-log.txt                                    |  1 +
 .../oozie/action/hadoop/SparkArgsExtractor.java    | 18 +++++++++++
 .../action/hadoop/TestSparkArgsExtractor.java      | 35 ++++++++++++++++------
 3 files changed, 45 insertions(+), 9 deletions(-)

diff --git a/release-log.txt b/release-log.txt
index 31eeda3..ebe0f73 100644
--- a/release-log.txt
+++ b/release-log.txt
@@ -1,5 +1,6 @@
 -- Oozie 5.3.0 release (trunk - unreleased)
 
+OOZIE-3551 Spark shouldn't obtain tokens withing the Spark action (Liz 
Szilagyi via gezapeti)
 OOZIE-3574 JavaAction create incorrect fileSystem instance in addActionLibs 
method (zuston via asalamon74)
 OOZIE-3569 SSH Action should add checking success file (zuston via asalamon74)
 OOZIE-3305 Prometheus /metrics http endpoint for monitoring integration (qsbao 
via gezapeti)
diff --git 
a/sharelib/spark/src/main/java/org/apache/oozie/action/hadoop/SparkArgsExtractor.java
 
b/sharelib/spark/src/main/java/org/apache/oozie/action/hadoop/SparkArgsExtractor.java
index eece462..034a173 100644
--- 
a/sharelib/spark/src/main/java/org/apache/oozie/action/hadoop/SparkArgsExtractor.java
+++ 
b/sharelib/spark/src/main/java/org/apache/oozie/action/hadoop/SparkArgsExtractor.java
@@ -57,6 +57,8 @@ class SparkArgsExtractor {
     private static final String LOG4J_CONFIGURATION_JAVA_OPTION = 
"-Dlog4j.configuration=";
     private static final String SECURITY_TOKENS_HADOOPFS = 
"spark.yarn.security.tokens.hadoopfs.enabled";
     private static final String SECURITY_TOKENS_HIVE = 
"spark.yarn.security.tokens.hive.enabled";
+    private static final String SECURITY_TOKENS_HIVESERVER2 = 
"spark.yarn.security.tokens.hiveserver2.enabled";
+    private static final String SECURITY_TOKENS_HIVESTREAMING = 
"spark.yarn.security.tokens.hivestreaming.enabled";
     private static final String SECURITY_TOKENS_HBASE = 
"spark.yarn.security.tokens.hbase.enabled";
     private static final String SECURITY_CREDENTIALS_HADOOPFS = 
"spark.yarn.security.credentials.hadoopfs.enabled";
     private static final String SECURITY_CREDENTIALS_HIVE = 
"spark.yarn.security.credentials.hive.enabled";
@@ -139,6 +141,8 @@ class SparkArgsExtractor {
 
         boolean addedSecurityTokensHadoopFS = false;
         boolean addedSecurityTokensHive = false;
+        boolean addedSecurityTokensHiveserver2 = false;
+        boolean addedSecurityTokensHivestreaming = false;
         boolean addedSecurityTokensHBase = false;
 
         boolean addedSecurityCredentialsHadoopFS = false;
@@ -182,6 +186,12 @@ class SparkArgsExtractor {
                 if (opt.startsWith(SECURITY_TOKENS_HIVE)) {
                     addedSecurityTokensHive = true;
                 }
+                if (opt.startsWith(SECURITY_TOKENS_HIVESERVER2)) {
+                    addedSecurityTokensHiveserver2 = true;
+                }
+                if (opt.startsWith(SECURITY_TOKENS_HIVESTREAMING)) {
+                    addedSecurityTokensHivestreaming = true;
+                }
                 if (opt.startsWith(SECURITY_TOKENS_HBASE)) {
                     addedSecurityTokensHBase = true;
                 }
@@ -279,6 +289,14 @@ class SparkArgsExtractor {
             sparkArgs.add(CONF_OPTION);
             sparkArgs.add(SECURITY_TOKENS_HIVE + OPT_SEPARATOR + 
Boolean.toString(false));
         }
+        if (!addedSecurityTokensHiveserver2) {
+            sparkArgs.add(CONF_OPTION);
+            sparkArgs.add(SECURITY_TOKENS_HIVESERVER2 + OPT_SEPARATOR + 
Boolean.toString(false));
+        }
+        if (!addedSecurityTokensHivestreaming) {
+            sparkArgs.add(CONF_OPTION);
+            sparkArgs.add(SECURITY_TOKENS_HIVESTREAMING + OPT_SEPARATOR + 
Boolean.toString(false));
+        }
         if (!addedSecurityTokensHBase) {
             sparkArgs.add(CONF_OPTION);
             sparkArgs.add(SECURITY_TOKENS_HBASE + OPT_SEPARATOR + 
Boolean.toString(false));
diff --git 
a/sharelib/spark/src/test/java/org/apache/oozie/action/hadoop/TestSparkArgsExtractor.java
 
b/sharelib/spark/src/test/java/org/apache/oozie/action/hadoop/TestSparkArgsExtractor.java
index 496805b..0daae59 100644
--- 
a/sharelib/spark/src/test/java/org/apache/oozie/action/hadoop/TestSparkArgsExtractor.java
+++ 
b/sharelib/spark/src/test/java/org/apache/oozie/action/hadoop/TestSparkArgsExtractor.java
@@ -96,6 +96,8 @@ public class TestSparkArgsExtractor {
                                 "-XX:HeapDumpPath=/tmp 
-Dlog4j.configuration=spark-log4j.properties",
                         "--conf", 
"spark.yarn.security.tokens.hadoopfs.enabled=false",
                         "--conf", 
"spark.yarn.security.tokens.hive.enabled=false",
+                        "--conf", 
"spark.yarn.security.tokens.hiveserver2.enabled=false",
+                        "--conf", 
"spark.yarn.security.tokens.hivestreaming.enabled=false",
                         "--conf", 
"spark.yarn.security.tokens.hbase.enabled=false",
                         "--conf", 
"spark.yarn.security.credentials.hadoopfs.enabled=false",
                         "--conf", 
"spark.yarn.security.credentials.hive.enabled=false",
@@ -140,6 +142,8 @@ public class TestSparkArgsExtractor {
                         "--conf", "spark.driver.extraClassPath=ccc:$PWD/*",
                         "--conf", 
"spark.yarn.security.tokens.hadoopfs.enabled=false",
                         "--conf", 
"spark.yarn.security.tokens.hive.enabled=false",
+                        "--conf", 
"spark.yarn.security.tokens.hiveserver2.enabled=false",
+                        "--conf", 
"spark.yarn.security.tokens.hivestreaming.enabled=false",
                         "--conf", 
"spark.yarn.security.tokens.hbase.enabled=false",
                         "--conf", 
"spark.yarn.security.credentials.hadoopfs.enabled=false",
                         "--conf", 
"spark.yarn.security.credentials.hive.enabled=false",
@@ -179,6 +183,8 @@ public class TestSparkArgsExtractor {
                         "--conf", "spark.driver.extraClassPath=$PWD/*",
                         "--conf", 
"spark.yarn.security.tokens.hadoopfs.enabled=false",
                         "--conf", 
"spark.yarn.security.tokens.hive.enabled=false",
+                        "--conf", 
"spark.yarn.security.tokens.hiveserver2.enabled=false",
+                        "--conf", 
"spark.yarn.security.tokens.hivestreaming.enabled=false",
                         "--conf", 
"spark.yarn.security.tokens.hbase.enabled=false",
                         "--conf", 
"spark.yarn.security.credentials.hadoopfs.enabled=false",
                         "--conf", 
"spark.yarn.security.credentials.hive.enabled=false",
@@ -217,6 +223,8 @@ public class TestSparkArgsExtractor {
                         "--conf", "spark.driver.extraClassPath=aaa:$PWD/*",
                         "--conf", 
"spark.yarn.security.tokens.hadoopfs.enabled=false",
                         "--conf", 
"spark.yarn.security.tokens.hive.enabled=false",
+                        "--conf", 
"spark.yarn.security.tokens.hiveserver2.enabled=false",
+                        "--conf", 
"spark.yarn.security.tokens.hivestreaming.enabled=false",
                         "--conf", 
"spark.yarn.security.tokens.hbase.enabled=false",
                         "--conf", 
"spark.yarn.security.credentials.hadoopfs.enabled=false",
                         "--conf", 
"spark.yarn.security.credentials.hive.enabled=false",
@@ -260,6 +268,8 @@ public class TestSparkArgsExtractor {
                         "--conf", "spark.driver.extraClassPath=$PWD/*",
                         "--conf", 
"spark.yarn.security.tokens.hadoopfs.enabled=false",
                         "--conf", 
"spark.yarn.security.tokens.hive.enabled=false",
+                        "--conf", 
"spark.yarn.security.tokens.hiveserver2.enabled=false",
+                        "--conf", 
"spark.yarn.security.tokens.hivestreaming.enabled=false",
                         "--conf", 
"spark.yarn.security.tokens.hbase.enabled=false",
                         "--conf", 
"spark.yarn.security.credentials.hadoopfs.enabled=false",
                         "--conf", 
"spark.yarn.security.credentials.hive.enabled=false",
@@ -306,6 +316,8 @@ public class TestSparkArgsExtractor {
                         "--conf", "spark.driver.extraClassPath=$PWD/*",
                         "--conf", 
"spark.yarn.security.tokens.hadoopfs.enabled=false",
                         "--conf", 
"spark.yarn.security.tokens.hive.enabled=false",
+                        "--conf", 
"spark.yarn.security.tokens.hiveserver2.enabled=false",
+                        "--conf", 
"spark.yarn.security.tokens.hivestreaming.enabled=false",
                         "--conf", 
"spark.yarn.security.tokens.hbase.enabled=false",
                         "--conf", 
"spark.yarn.security.credentials.hadoopfs.enabled=false",
                         "--conf", 
"spark.yarn.security.credentials.hive.enabled=false",
@@ -354,16 +366,21 @@ public class TestSparkArgsExtractor {
         assertEquals("Spark args mismatch",
                 Arrays.asList("--master", "yarn", "--deploy-mode", "client", 
"--name", "Spark Copy File",
                         "--class", "org.apache.oozie.example.SparkFileCopy", 
"--conf",
-                        "spark.driver.extraJavaOptions=-Xmx234m 
-Dlog4j.configuration=spark-log4j.properties", "--conf",
-                        "spark.executor.extraClassPath=$PWD/*", "--conf", 
"spark.driver.extraClassPath=$PWD/*", "--conf",
-                        "spark.yarn.security.tokens.hadoopfs.enabled=false", 
"--conf",
-                        "spark.yarn.security.tokens.hive.enabled=false", 
"--conf", "spark.yarn.security.tokens.hbase.enabled=false",
-                        "--conf", 
"spark.yarn.security.credentials.hadoopfs.enabled=false", "--conf",
-                        "spark.yarn.security.credentials.hive.enabled=false", 
"--conf",
-                        "spark.yarn.security.credentials.hbase.enabled=false", 
"--conf",
-                        
"spark.executor.extraJavaOptions=-Dlog4j.configuration=spark-log4j.properties",
+                        "spark.driver.extraJavaOptions=-Xmx234m 
-Dlog4j.configuration=spark-log4j.properties",
+                        "--conf", "spark.executor.extraClassPath=$PWD/*",
+                        "--conf", "spark.driver.extraClassPath=$PWD/*",
+                        "--conf", 
"spark.yarn.security.tokens.hadoopfs.enabled=false",
+                        "--conf", 
"spark.yarn.security.tokens.hive.enabled=false",
+                        "--conf", 
"spark.yarn.security.tokens.hiveserver2.enabled=false",
+                        "--conf", 
"spark.yarn.security.tokens.hivestreaming.enabled=false",
+                        "--conf", 
"spark.yarn.security.tokens.hbase.enabled=false",
+                        "--conf", 
"spark.yarn.security.credentials.hadoopfs.enabled=false",
+                        "--conf", 
"spark.yarn.security.credentials.hive.enabled=false",
+                        "--conf", 
"spark.yarn.security.credentials.hbase.enabled=false",
+                        "--conf", 
"spark.executor.extraJavaOptions=-Dlog4j.configuration=spark-log4j.properties",
                         "--properties-file", 
"spark-defaults-oozie-generated.properties", "--files",
-                        "spark-log4j.properties,hive-site.xml", "--conf", 
"spark.yarn.jar=null", "--verbose", "/lib/test.jar",
+                        "spark-log4j.properties,hive-site.xml",
+                        "--conf", "spark.yarn.jar=null", "--verbose", 
"/lib/test.jar",
                         "arg0", "arg1"),
                 sparkArgs);
     }

Reply via email to