HIVE-12708: Hive on Spark doesn't work with Kerboresed HBase [Spark Branch] (reviewed by Szehon)
Project: http://git-wip-us.apache.org/repos/asf/hive/repo Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/bb5ad573 Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/bb5ad573 Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/bb5ad573 Branch: refs/heads/master Commit: bb5ad5733d0d5dfc2489deb658beea609405a29c Parents: 9f57569 Author: Xuefu Zhang <[email protected]> Authored: Fri Dec 18 14:37:03 2015 -0800 Committer: Rui Li <[email protected]> Committed: Thu Jan 28 14:50:43 2016 +0800 ---------------------------------------------------------------------- .../hive/ql/exec/spark/HiveSparkClientFactory.java | 11 +++++++++++ 1 file changed, 11 insertions(+) ---------------------------------------------------------------------- http://git-wip-us.apache.org/repos/asf/hive/blob/bb5ad573/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/HiveSparkClientFactory.java ---------------------------------------------------------------------- diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/HiveSparkClientFactory.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/HiveSparkClientFactory.java index ec0fdea..9b2dce3 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/HiveSparkClientFactory.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/HiveSparkClientFactory.java @@ -30,6 +30,7 @@ import org.apache.commons.compress.utils.CharsetNames; import org.apache.hadoop.fs.CommonConfigurationKeysPublic; import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch; import org.apache.hadoop.hive.ql.io.HiveKey; @@ -67,6 +68,7 @@ public class HiveSparkClientFactory { public static Map<String, String> initiateSparkConf(HiveConf hiveConf) { Map<String, String> sparkConf = new HashMap<String, String>(); + HBaseConfiguration.addHbaseResources(hiveConf); // set default spark configurations. sparkConf.put("spark.master", SPARK_DEFAULT_MASTER); @@ -139,7 +141,16 @@ public class HiveSparkClientFactory { if (value != null && !value.isEmpty()) { sparkConf.put("spark.hadoop." + propertyName, value); } + } else if (propertyName.startsWith("hbase")) { + // Add HBase related configuration to Spark because in security mode, Spark needs it + // to generate hbase delegation token for Spark. This is a temp solution to deal with + // Spark problem. + String value = hiveConf.get(propertyName); + sparkConf.put("spark.hadoop." + propertyName, value); + LOG.info(String.format( + "load HBase configuration (%s -> %s).", propertyName, value)); } + if (RpcConfiguration.HIVE_SPARK_RSC_CONFIGS.contains(propertyName)) { String value = RpcConfiguration.getValue(hiveConf, propertyName); sparkConf.put(propertyName, value);
