HIVE-12611: Make sure spark.yarn.queue is effective and takes the value from 
mapreduce.job.queuename if given [Spark Branch] (Rui reviewed by Xuefu)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/e1a7503b
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/e1a7503b
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/e1a7503b

Branch: refs/heads/master
Commit: e1a7503b81a5509e391940709c8f9a6f552646a7
Parents: 34b41e3
Author: Rui Li <[email protected]>
Authored: Mon Jan 18 09:14:56 2016 +0800
Committer: Rui Li <[email protected]>
Committed: Thu Jan 28 14:52:38 2016 +0800

----------------------------------------------------------------------
 .../hadoop/hive/ql/exec/spark/HiveSparkClientFactory.java   | 9 +++++++++
 1 file changed, 9 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hive/blob/e1a7503b/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/HiveSparkClientFactory.java
----------------------------------------------------------------------
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/HiveSparkClientFactory.java 
b/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/HiveSparkClientFactory.java
index a832bf6..993d02b 100644
--- 
a/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/HiveSparkClientFactory.java
+++ 
b/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/HiveSparkClientFactory.java
@@ -174,6 +174,15 @@ public class HiveSparkClientFactory {
     classes.add(HiveKey.class.getName());
     sparkConf.put("spark.kryo.classesToRegister", 
Joiner.on(",").join(classes));
 
+    // set yarn queue name
+    final String sparkQueueNameKey = "spark.yarn.queue";
+    if (sparkMaster.startsWith("yarn") && hiveConf.get(sparkQueueNameKey) == 
null) {
+      String queueName = hiveConf.get("mapreduce.job.queuename");
+      if (queueName != null) {
+        sparkConf.put(sparkQueueNameKey, queueName);
+      }
+    }
+
     return sparkConf;
   }
 

Reply via email to