This is an automated email from the ASF dual-hosted git repository. ravipesala pushed a commit to branch master in repository https://gitbox.apache.org/repos/asf/carbondata.git
The following commit(s) were added to refs/heads/master by this push: new bf096e1 [CARBONDATA-3400] Support IndexSever for Spark-Shell in secure Mode(kerberos) bf096e1 is described below commit bf096e128f35865c7cd46cd5a5058c8e5227d773 Author: BJangir <babulaljangir...@gmail.com> AuthorDate: Mon May 27 15:26:21 2019 +0530 [CARBONDATA-3400] Support IndexSever for Spark-Shell in secure Mode(kerberos) Problem In spark-shell OR Spark-Submit mode, Application user and IndexServer User are different . Application user is based on Kinit user OR based on spark.yarn.principle user whereas Indexserver user is based on spark.carbon.indexserver.principal . it is possible that both are different as Indexserver should have it's own authentication principle and should not depend on Application principle so that any application's Query(Thrifserver,Spark-shell,Spark-sql,Spark-Submit) can be served from IndexServer. Solution Authenticate the IndexServer by it's own principle and keytab. keytab is required so that long run application (client and indexserver ) does not impacted on token expire. Note:- Spark-default.conf of Thriftserver (beeline), spark-submit ,spark-sql should have both spark.carbon.indexserver.principal and spark.carbon.indexserver.keytab. This closes #3240 --- .../scala/org/apache/carbondata/indexserver/IndexServer.scala | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/integration/spark2/src/main/scala/org/apache/carbondata/indexserver/IndexServer.scala b/integration/spark2/src/main/scala/org/apache/carbondata/indexserver/IndexServer.scala index e738fb3..f066095 100644 --- a/integration/spark2/src/main/scala/org/apache/carbondata/indexserver/IndexServer.scala +++ b/integration/spark2/src/main/scala/org/apache/carbondata/indexserver/IndexServer.scala @@ -167,9 +167,16 @@ object IndexServer extends ServerInterface { */ def getClient: ServerInterface = { import org.apache.hadoop.ipc.RPC + val indexServerUser = sparkSession.sparkContext.getConf + .get("spark.carbon.indexserver.principal", "") + val indexServerKeyTab = sparkSession.sparkContext.getConf + .get("spark.carbon.indexserver.keytab", "") + val ugi = UserGroupInformation.loginUserFromKeytabAndReturnUGI(indexServerUser, + indexServerKeyTab) + LOGGER.info("Login successful for user " + indexServerUser); RPC.getProxy(classOf[ServerInterface], RPC.getProtocolVersion(classOf[ServerInterface]), - new InetSocketAddress(serverIp, serverPort), UserGroupInformation.getLoginUser, + new InetSocketAddress(serverIp, serverPort), ugi, FileFactory.getConfiguration, NetUtils.getDefaultSocketFactory(FileFactory.getConfiguration)) } }