Author: brock
Date: Mon Feb 23 02:44:47 2015
New Revision: 1661599
URL: http://svn.apache.org/r1661599
Log:
HIVE-9671 - Support Impersonation [Spark Branch] (Brock via Xuefu)
Modified:
hive/branches/spark/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java
hive/branches/spark/shims/0.23/src/main/java/org/apache/hadoop/hive/shims/Hadoop23Shims.java
hive/branches/spark/spark-client/src/main/java/org/apache/hive/spark/client/RemoteDriver.java
hive/branches/spark/spark-client/src/main/java/org/apache/hive/spark/client/SparkClientImpl.java
Modified:
hive/branches/spark/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java
URL:
http://svn.apache.org/viewvc/hive/branches/spark/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java?rev=1661599&r1=1661598&r2=1661599&view=diff
==============================================================================
---
hive/branches/spark/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java
(original)
+++
hive/branches/spark/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java
Mon Feb 23 02:44:47 2015
@@ -913,13 +913,15 @@ public class QTestUtil {
long endTime = System.currentTimeMillis() + 240000;
while (sparkSession.getMemoryAndCores().getSecond() <= 1) {
if (System.currentTimeMillis() >= endTime) {
- LOG.error("Timed out waiting for Spark cluster to init");
- break;
+ String msg = "Timed out waiting for Spark cluster to init";
+ throw new IllegalStateException(msg);
}
Thread.sleep(100);
}
} catch (Exception e) {
- LOG.error(e);
+ String msg = "Error trying to obtain executor info: " + e;
+ LOG.error(msg, e);
+ throw new IllegalStateException(msg, e);
}
}
}
Modified:
hive/branches/spark/shims/0.23/src/main/java/org/apache/hadoop/hive/shims/Hadoop23Shims.java
URL:
http://svn.apache.org/viewvc/hive/branches/spark/shims/0.23/src/main/java/org/apache/hadoop/hive/shims/Hadoop23Shims.java?rev=1661599&r1=1661598&r2=1661599&view=diff
==============================================================================
---
hive/branches/spark/shims/0.23/src/main/java/org/apache/hadoop/hive/shims/Hadoop23Shims.java
(original)
+++
hive/branches/spark/shims/0.23/src/main/java/org/apache/hadoop/hive/shims/Hadoop23Shims.java
Mon Feb 23 02:44:47 2015
@@ -416,6 +416,18 @@ public class Hadoop23Shims extends Hadoo
}
}
+ private void configureImpersonation(Configuration conf) {
+ String user;
+ try {
+ user = Utils.getUGI().getShortUserName();
+ } catch (Exception e) {
+ String msg = "Cannot obtain username: " + e;
+ throw new IllegalStateException(msg, e);
+ }
+ conf.set("hadoop.proxyuser." + user + ".groups", "*");
+ conf.set("hadoop.proxyuser." + user + ".hosts", "*");
+ }
+
/**
* Returns a shim to wrap MiniSparkOnYARNCluster
*/
@@ -435,10 +447,10 @@ public class Hadoop23Shims extends Hadoo
public MiniSparkShim(Configuration conf, int numberOfTaskTrackers,
String nameNode, int numDir) throws IOException {
-
mr = new MiniSparkOnYARNCluster("sparkOnYarn");
conf.set("fs.defaultFS", nameNode);
conf.set("yarn.resourcemanager.scheduler.class",
"org.apache.hadoop.yarn.server.resourcemanager.scheduler.fair.FairScheduler");
+ configureImpersonation(conf);
mr.init(conf);
mr.start();
this.conf = mr.getConfig();
@@ -493,6 +505,7 @@ public class Hadoop23Shims extends Hadoo
int numDataNodes,
boolean format,
String[] racks) throws IOException {
+ configureImpersonation(conf);
MiniDFSCluster miniDFSCluster = new MiniDFSCluster(conf, numDataNodes,
format, racks);
// Need to set the client's KeyProvider to the NN's for JKS,
Modified:
hive/branches/spark/spark-client/src/main/java/org/apache/hive/spark/client/RemoteDriver.java
URL:
http://svn.apache.org/viewvc/hive/branches/spark/spark-client/src/main/java/org/apache/hive/spark/client/RemoteDriver.java?rev=1661599&r1=1661598&r2=1661599&view=diff
==============================================================================
---
hive/branches/spark/spark-client/src/main/java/org/apache/hive/spark/client/RemoteDriver.java
(original)
+++
hive/branches/spark/spark-client/src/main/java/org/apache/hive/spark/client/RemoteDriver.java
Mon Feb 23 02:44:47 2015
@@ -166,7 +166,7 @@ public class RemoteDriver {
jcLock.notifyAll();
}
} catch (Exception e) {
- LOG.error("Failed to start SparkContext.", e);
+ LOG.error("Failed to start SparkContext: " + e, e);
shutdown(e);
synchronized (jcLock) {
jcLock.notifyAll();
@@ -203,7 +203,11 @@ public class RemoteDriver {
private synchronized void shutdown(Throwable error) {
if (running) {
- LOG.info("Shutting down remote driver.");
+ if (error == null) {
+ LOG.info("Shutting down remote driver.");
+ } else {
+ LOG.error("Shutting down remote driver due to error: " + error, error);
+ }
running = false;
for (JobWrapper<?> job : activeJobs.values()) {
cancelJob(job);
Modified:
hive/branches/spark/spark-client/src/main/java/org/apache/hive/spark/client/SparkClientImpl.java
URL:
http://svn.apache.org/viewvc/hive/branches/spark/spark-client/src/main/java/org/apache/hive/spark/client/SparkClientImpl.java?rev=1661599&r1=1661598&r2=1661599&view=diff
==============================================================================
---
hive/branches/spark/spark-client/src/main/java/org/apache/hive/spark/client/SparkClientImpl.java
(original)
+++
hive/branches/spark/spark-client/src/main/java/org/apache/hive/spark/client/SparkClientImpl.java
Mon Feb 23 02:44:47 2015
@@ -47,6 +47,7 @@ import java.util.concurrent.Future;
import java.util.concurrent.atomic.AtomicInteger;
import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.shims.Utils;
import org.apache.hive.spark.client.rpc.Rpc;
import org.apache.hive.spark.client.rpc.RpcConfiguration;
import org.apache.hive.spark.client.rpc.RpcServer;
@@ -350,6 +351,16 @@ class SparkClientImpl implements SparkCl
}
}
+ if (hiveConf.getBoolVar(HiveConf.ConfVars.HIVE_SERVER2_ENABLE_DOAS)) {
+ argv.add("--proxy-user");
+ try {
+ argv.add(Utils.getUGI().getShortUserName());
+ } catch (Exception e) {
+ String msg = "Cannot obtain username: " + e;
+ throw new IllegalStateException(msg, e);
+ }
+ }
+
argv.add("--properties-file");
argv.add(properties.getAbsolutePath());
argv.add("--class");