HIVE-12616 : NullPointerException when spark session is reused to run a mapjoin 
(Nemon Lou, via Szehon)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/d469e611
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/d469e611
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/d469e611

Branch: refs/heads/llap
Commit: d469e61108a1844fcc173674bfb2cd9f7ad01c18
Parents: 219d352
Author: Szehon Ho <sze...@cloudera.com>
Authored: Thu Mar 24 11:12:08 2016 -0700
Committer: Szehon Ho <sze...@cloudera.com>
Committed: Thu Mar 24 11:12:50 2016 -0700

----------------------------------------------------------------------
 .../apache/hadoop/hive/ql/exec/spark/HiveSparkClientFactory.java | 4 ++++
 1 file changed, 4 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hive/blob/d469e611/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/HiveSparkClientFactory.java
----------------------------------------------------------------------
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/HiveSparkClientFactory.java 
b/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/HiveSparkClientFactory.java
index 1798622..2427321 100644
--- 
a/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/HiveSparkClientFactory.java
+++ 
b/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/HiveSparkClientFactory.java
@@ -28,6 +28,7 @@ import java.util.Set;
 
 import org.apache.commons.compress.utils.CharsetNames;
 import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
+import org.apache.hadoop.hive.ql.session.SessionState;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 import org.apache.hadoop.hbase.HBaseConfiguration;
@@ -120,6 +121,9 @@ public class HiveSparkClientFactory {
       sparkMaster = sparkConf.get("spark.master");
       hiveConf.set("spark.master", sparkMaster);
     }
+    if (SessionState.get() != null && SessionState.get().getConf() != null) {
+      SessionState.get().getConf().set("spark.master", sparkMaster);
+    }
     if (sparkMaster.equals("yarn-cluster")) {
       sparkConf.put("spark.yarn.maxAppAttempts", "1");
     }

Reply via email to