This is an automated email from the ASF dual-hosted git repository.
ulyssesyou pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/incubator-kyuubi.git
The following commit(s) were added to refs/heads/master by this push:
new 413ba2e [KYUUBI #1009][FOLLOWUP] Hive delegation token sent from
Kyuubi server is ignore by SQL engine
413ba2e is described below
commit 413ba2eb24d3d11f3b732abd887614a9e74ac2aa
Author: zhouyifan279 <[email protected]>
AuthorDate: Tue Sep 28 11:10:08 2021 +0800
[KYUUBI #1009][FOLLOWUP] Hive delegation token sent from Kyuubi server is
ignore by SQL engine
### _Why are the changes needed?_
Hive delegation token sent from Kyuubi server is ignore by SQL engine as
token alias does not match `hive.metastore.uris`.
```
21/09/27 14:22:39 INFO operation.ExecuteStatement: Processing morty's
query[5711be2b-1d2f-412a-a2e0-b52a5b0e0445]: RUNNING_STATE -> FINISHED_STATE,
statement: show databases, time taken: 0.056 seconds
21/09/27 14:22:39 INFO spark.SparkThriftBinaryFrontendService: Ignore Hive
token as engine has no Hive token ever before
21/09/27 14:22:39 INFO deploy.SparkHadoopUtil: Updating delegation tokens
for current user.
```
### _How was this patch tested?_
- [x] Add some test cases that check the changes thoroughly including
negative and positive cases if possible
- [x] Add screenshots for manual tests if appropriate
```
21/09/27 20:51:20 INFO operation.ExecuteStatement: Processing morty's
query[652a9f74-4813-406c-a842-6fc8b61baaf1]: RUNNING_STATE -> FINISHED_STATE,
statement: show databases, time taken: 0.05 seconds
21/09/27 20:51:20 INFO deploy.SparkHadoopUtil: Updating delegation tokens
for current user.
```
- [x] [Run
test](https://kyuubi.readthedocs.io/en/latest/develop_tools/testing.html#running-tests)
locally before make a pull request
Closes #1168 from zhouyifan279/#1009.
Closes #1009
fd326347 [zhouyifan279] [KYUUBI #1009][FOLLOWUP] Hive delegation token is
ignore by SQL engine
3302ee4b [zhouyifan279] [KYUUBI #1009][FOLLOWUP] Hive delegation token is
ignore by SQL engine
Authored-by: zhouyifan279 <[email protected]>
Signed-off-by: ulysses-you <[email protected]>
---
.../engine/spark/SparkThriftBinaryFrontendService.scala | 5 +++--
.../kyuubi/credentials/HiveDelegationTokenProvider.scala | 5 +++--
.../credentials/HiveDelegationTokenProviderSuite.scala | 13 ++++++++-----
3 files changed, 14 insertions(+), 9 deletions(-)
diff --git
a/externals/kyuubi-spark-sql-engine/src/main/scala/org/apache/kyuubi/engine/spark/SparkThriftBinaryFrontendService.scala
b/externals/kyuubi-spark-sql-engine/src/main/scala/org/apache/kyuubi/engine/spark/SparkThriftBinaryFrontendService.scala
index 28b5222..28cc145 100644
---
a/externals/kyuubi-spark-sql-engine/src/main/scala/org/apache/kyuubi/engine/spark/SparkThriftBinaryFrontendService.scala
+++
b/externals/kyuubi-spark-sql-engine/src/main/scala/org/apache/kyuubi/engine/spark/SparkThriftBinaryFrontendService.scala
@@ -104,9 +104,10 @@ class SparkThriftBinaryFrontendService(
warn(s"No matching Hive token found for engine metastore uris
$metastoreUris")
}
} else if (metastoreUris.isEmpty) {
- info(s"Ignore Hive token as engine metastore uris are empty")
+ info(s"Ignore Hive token as hive.metastore.uris are empty")
} else {
- info(s"Ignore Hive token as engine has not Hive token ever before")
+ // Either because Hive metastore is not secured or because engine is
launched with keytab
+ info(s"Ignore Hive token as engine does not need it")
}
}
diff --git
a/kyuubi-server/src/main/scala/org/apache/kyuubi/credentials/HiveDelegationTokenProvider.scala
b/kyuubi-server/src/main/scala/org/apache/kyuubi/credentials/HiveDelegationTokenProvider.scala
index e1236ff..b401cf2 100644
---
a/kyuubi-server/src/main/scala/org/apache/kyuubi/credentials/HiveDelegationTokenProvider.scala
+++
b/kyuubi-server/src/main/scala/org/apache/kyuubi/credentials/HiveDelegationTokenProvider.scala
@@ -33,12 +33,15 @@ class HiveDelegationTokenProvider extends
HadoopDelegationTokenProvider with Log
private var client: Option[IMetaStoreClient] = None
private var principal: String = _
+ private var tokenAlias: Text = _
override def serviceName: String = "hive"
override def initialize(hadoopConf: Configuration, kyuubiConf: KyuubiConf):
Unit = {
val conf = new HiveConf(hadoopConf, classOf[HiveConf])
val metastoreUris = conf.getTrimmed("hive.metastore.uris", "")
+ // SQL engine requires token alias to be `hive.metastore.uris`
+ tokenAlias = new Text(metastoreUris)
if (SecurityUtil.getAuthenticationMethod(hadoopConf) !=
AuthenticationMethod.SIMPLE
&& metastoreUris.nonEmpty
@@ -67,6 +70,4 @@ class HiveDelegationTokenProvider extends
HadoopDelegationTokenProvider with Log
}
override def close(): Unit = client.foreach(_.close())
-
- private def tokenAlias: Text = new Text("hive.server2.delegation.token")
}
diff --git
a/kyuubi-server/src/test/scala/org/apache/kyuubi/credentials/HiveDelegationTokenProviderSuite.scala
b/kyuubi-server/src/test/scala/org/apache/kyuubi/credentials/HiveDelegationTokenProviderSuite.scala
index d600a64..5f811fa 100644
---
a/kyuubi-server/src/test/scala/org/apache/kyuubi/credentials/HiveDelegationTokenProviderSuite.scala
+++
b/kyuubi-server/src/test/scala/org/apache/kyuubi/credentials/HiveDelegationTokenProviderSuite.scala
@@ -104,11 +104,14 @@ class HiveDelegationTokenProviderSuite extends
KerberizedTestHelper {
val credentials = new Credentials
provider.obtainDelegationTokens(owner, credentials)
- val token = credentials.getAllTokens.asScala
- .filter(_.getKind == DelegationTokenIdentifier.HIVE_DELEGATION_KIND)
- .head
- assert(token != null)
-
+ val aliasAndToken =
+ credentials.getTokenMap.asScala
+ .filter(_._2.getKind ==
DelegationTokenIdentifier.HIVE_DELEGATION_KIND)
+ .head
+ assert(aliasAndToken._1 == new
Text(hiveConf.getTrimmed("hive.metastore.uris")))
+ assert(aliasAndToken._2 != null)
+
+ val token = aliasAndToken._2
val tokenIdent =
token.decodeIdentifier().asInstanceOf[DelegationTokenIdentifier]
assertResult(DelegationTokenIdentifier.HIVE_DELEGATION_KIND)(token.getKind)
assertResult(new Text(owner))(tokenIdent.getOwner)