Repository: hive Updated Branches: refs/heads/master 62ebd1abb -> f4d7471ec
HIVE-15485: Investigate the DoAs failure in HoS (Chaoyu Tang, reviewed by Xuefu Zhang and Jimmy Xiang) Project: http://git-wip-us.apache.org/repos/asf/hive/repo Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/803e1e0b Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/803e1e0b Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/803e1e0b Branch: refs/heads/master Commit: 803e1e0b89140350c40a7ec5c42f217532550495 Parents: 62ebd1a Author: Chaoyu Tang <ct...@cloudera.com> Authored: Mon Jan 30 11:26:06 2017 -0500 Committer: Chaoyu Tang <ct...@cloudera.com> Committed: Mon Jan 30 11:26:06 2017 -0500 ---------------------------------------------------------------------- .../hive/spark/client/SparkClientImpl.java | 40 ++++++++++++++------ 1 file changed, 29 insertions(+), 11 deletions(-) ---------------------------------------------------------------------- http://git-wip-us.apache.org/repos/asf/hive/blob/803e1e0b/spark-client/src/main/java/org/apache/hive/spark/client/SparkClientImpl.java ---------------------------------------------------------------------- diff --git a/spark-client/src/main/java/org/apache/hive/spark/client/SparkClientImpl.java b/spark-client/src/main/java/org/apache/hive/spark/client/SparkClientImpl.java index 0da40dd..d4b63f0 100644 --- a/spark-client/src/main/java/org/apache/hive/spark/client/SparkClientImpl.java +++ b/spark-client/src/main/java/org/apache/hive/spark/client/SparkClientImpl.java @@ -335,7 +335,7 @@ class SparkClientImpl implements SparkClient { Preconditions.checkArgument(master != null, "spark.master is not defined."); String deployMode = conf.get("spark.submit.deployMode"); - List<String> argv = Lists.newArrayList(); + List<String> argv = Lists.newLinkedList(); if (sparkHome != null) { argv.add(new File(sparkHome, "bin/spark-submit").getAbsolutePath()); @@ -376,16 +376,6 @@ class SparkClientImpl implements SparkClient { argv.add("org.apache.spark.deploy.SparkSubmit"); } - if ("kerberos".equals(hiveConf.get(HADOOP_SECURITY_AUTHENTICATION))) { - String principal = SecurityUtil.getServerPrincipal(hiveConf.getVar(ConfVars.HIVE_SERVER2_KERBEROS_PRINCIPAL), - "0.0.0.0"); - String keyTabFile = hiveConf.getVar(ConfVars.HIVE_SERVER2_KERBEROS_KEYTAB); - argv.add("--principal"); - argv.add(principal); - argv.add("--keytab"); - argv.add(keyTabFile); - } - if (SparkClientUtilities.isYarnClusterMode(master, deployMode)) { String executorCores = conf.get("spark.executor.cores"); if (executorCores != null) { @@ -405,6 +395,34 @@ class SparkClientImpl implements SparkClient { argv.add(numOfExecutors); } } + // The options --principal/--keypad do not work with --proxy-user in spark-submit.sh + // (see HIVE-15485, SPARK-5493, SPARK-19143), so Hive could only support doAs or + // delegation token renewal, but not both. Since doAs is a more common case, if both + // are needed, we choose to favor doAs. So when doAs is enabled, we use kinit command, + // otherwise, we pass the principal/keypad to spark to support the token renewal for + // long-running application. + if ("kerberos".equals(hiveConf.get(HADOOP_SECURITY_AUTHENTICATION))) { + String principal = SecurityUtil.getServerPrincipal(hiveConf.getVar(ConfVars.HIVE_SERVER2_KERBEROS_PRINCIPAL), + "0.0.0.0"); + String keyTabFile = hiveConf.getVar(ConfVars.HIVE_SERVER2_KERBEROS_KEYTAB); + if (hiveConf.getBoolVar(HiveConf.ConfVars.HIVE_SERVER2_ENABLE_DOAS)) { + List<String> kinitArgv = Lists.newLinkedList(); + kinitArgv.add("kinit"); + kinitArgv.add(principal); + kinitArgv.add("-k"); + kinitArgv.add("-t"); + kinitArgv.add(keyTabFile + ";"); + kinitArgv.addAll(argv); + argv = kinitArgv; + } else { + // if doAs is not enabled, we pass the principal/keypad to spark-submit in order to + // support the possible delegation token renewal in Spark + argv.add("--principal"); + argv.add(principal); + argv.add("--keytab"); + argv.add(keyTabFile); + } + } if (hiveConf.getBoolVar(HiveConf.ConfVars.HIVE_SERVER2_ENABLE_DOAS)) { try { String currentUser = Utils.getUGI().getShortUserName();