zxl-333 commented on issue #6890:
URL: https://github.com/apache/kyuubi/issues/6890#issuecomment-2592446070

   When I use the following configuration, the local iceberg table can be 
queried, but the remote iceberg table can see that **metadata** cannot be 
queried, and the log display is normal
   
   spark.kerberos.access.hadoopFileSystems  hdfs://myns,hdfs://mynsbackup
   spark.sql.catalog.hive_catalog     
org.apache.kyuubi.spark.connector.hive.HiveTableCatalog
   spark.sql.catalog.hive_catalog.hive.metastore.uris     
thrift://bigdata-1734405115-lhalh:9083,thrift://bigdata-1734405115-0xt70:9083
   
spark.sql.catalog.hive_catalog.hive.metastore.token.signature=thrift://bigdata-1734405115-lhalh:9083,thrift://bigdata-1734405115-0xt70:9083
   
   
   #local cluster metastore
   spark.sql.catalog.spark_catalog=org.apache.iceberg.spark.SparkSessionCatalog
   #spark.sql.catalog.spark_catalog=org.apache.iceberg.spark.SparkCatalog
   spark.sql.catalog.spark_catalog.type=hive
   
spark.sql.catalog.spark_catalog.uri=thrift://bigdata-1734358521-u7gjy:9083,thrift://bigdata-1734358521-gsy9x:9083
   
   #an other cluster metastore,remote metastore
   spark.sql.catalog.spark_catalog_ky=org.apache.iceberg.spark.SparkCatalog
   spark.sql.catalog.spark_catalog_ky.type=hive
   
spark.sql.catalog.spark_catalog_ky.uri=thrift://bigdata-1734405115-lhalh:9083,thrift://bigdata-1734405115-0xt70:9083
   
   
spark.sql.catalog.spark_catalog_ky=org.apache.kyuubi.spark.connector.hive.HiveTableCatalog
   
spark.sql.catalog.spark_catalog_ky.hive.metastore.uris=thrift://bigdata-1734405115-lhalh:9083,thrift://bigdata-1734405115-0xt70:9083
   
spark.sql.catalog.spark_catalog_ky.hive.metastore.kerberos.principal=hive/_h...@mr.0c20903122394b4293a44ead5cd1a27e.yun.cn
   spark.sql.catalog.spark_catalog_ky.hive.metastore.sasl.enabled=true
   
spark.sql.catalog.spark_catalog_ky.hive.metastore.token.signature=thrift://bigdata-1734405115-lhalh:9083,thrift://bigdata-1734405115-0xt70:9083
   
   
   
   I see the following information in kyuubi's engine log
   
   25/01/15 19:03:49 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of 
name hive.privilege.synchronizer does not exist
   25/01/15 19:03:49 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of 
name hive.server2.webui.cors.allowed.headers does not exist
   25/01/15 19:03:49 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of 
name hive.hook.proto.base-directory does not exist
   25/01/15 19:03:49 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of 
name hive.service.metrics.codahale.reporter.classes does not exist
   25/01/15 19:03:49 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of 
name hive.metastore.db.type does not exist
   25/01/15 19:03:49 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of 
name hive.metastore.warehouse.external.dir does not exist
   25/01/15 19:03:49 WARN org.apache.hadoop.hive.conf.HiveConf: HiveConf of 
name hive.server2.webui.enable.cors does not exist
   25/01/15 19:03:49 INFO 
org.apache.kyuubi.spark.connector.hive.KyuubiHiveConnectorDelegationTokenProvider:
 Getting Hive delegation token for hdfs against 
hive/_h...@mr.0c20903122394b4293a44ead5cd1a27e.ctyun.cn at 
thrift://bigdata-1734405115-lhalh:9083,thrift://bigdata-1734405115-0xt70:9083
   25/01/15 19:03:49 DEBUG org.apache.hadoop.security.UserGroupInformation: 
PrivilegedAction [as: 
hive/bigdata-1734358521-cp...@mr.733e690fe0a842a2a587a467c9a50520.yun.cn 
(auth:KERBEROS)][action: 
org.apache.kyuubi.spark.connector.hive.KyuubiHiveConnectorDelegationTokenProvider$$anon$1@674da77b]
   java.lang.Exception
           at 
org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1875)
           at 
org.apache.kyuubi.spark.connector.hive.KyuubiHiveConnectorDelegationTokenProvider.doAsRealUser(KyuubiHiveConnectorDelegationTokenProvider.scala:188)
           at 
org.apache.kyuubi.spark.connector.hive.KyuubiHiveConnectorDelegationTokenProvider.$anonfun$obtainDelegationTokens$7(KyuubiHiveConnectorDelegationTokenProvider.scala:153)
           at 
org.apache.kyuubi.spark.connector.hive.KyuubiHiveConnectorDelegationTokenProvider.$anonfun$obtainDelegationTokens$7$adapted(KyuubiHiveConnectorDelegationTokenProvider.scala:136)
           at scala.Option.foreach(Option.scala:407)
           at 
org.apache.kyuubi.spark.connector.hive.KyuubiHiveConnectorDelegationTokenProvider.$anonfun$obtainDelegationTokens$6(KyuubiHiveConnectorDelegationTokenProvider.scala:136)
           at org.apache.spark.util.Utils$.tryLogNonFatalError(Utils.scala:1484)
           at 
org.apache.kyuubi.spark.connector.hive.KyuubiHiveConnectorDelegationTokenProvider.$anonfun$obtainDelegationTokens$5(KyuubiHiveConnectorDelegationTokenProvider.scala:136)
           at 
org.apache.kyuubi.spark.connector.hive.KyuubiHiveConnectorDelegationTokenProvider.$anonfun$obtainDelegationTokens$5$adapted(KyuubiHiveConnectorDelegationTokenProvider.scala:133)
           at scala.collection.immutable.Set$Set2.foreach(Set.scala:181)
           at 
org.apache.kyuubi.spark.connector.hive.KyuubiHiveConnectorDelegationTokenProvider.obtainDelegationTokens(KyuubiHiveConnectorDelegationTokenProvider.scala:133)
           at 
org.apache.spark.deploy.security.HadoopDelegationTokenManager.$anonfun$obtainDelegationTokens$2(HadoopDelegationTokenManager.scala:164)
           at 
scala.collection.TraversableLike.$anonfun$flatMap$1(TraversableLike.scala:293)
           at scala.collection.Iterator.foreach(Iterator.scala:943)
           at scala.collection.Iterator.foreach$(Iterator.scala:943)
           at scala.collection.AbstractIterator.foreach(Iterator.scala:1431)
           at 
scala.collection.MapLike$DefaultValuesIterable.foreach(MapLike.scala:214)
           at 
scala.collection.TraversableLike.flatMap(TraversableLike.scala:293)
           at 
scala.collection.TraversableLike.flatMap$(TraversableLike.scala:290)
           at 
scala.collection.AbstractTraversable.flatMap(Traversable.scala:108)
           at 
org.apache.spark.deploy.security.HadoopDelegationTokenManager.org$apache$spark$deploy$security$HadoopDelegationTokenManager$$obtainDelegationTokens(HadoopDelegationTokenManager.scala:162)
           at 
org.apache.spark.deploy.security.HadoopDelegationTokenManager$$anon$2.run(HadoopDelegationTokenManager.scala:148)
           at 
org.apache.spark.deploy.security.HadoopDelegationTokenManager$$anon$2.run(HadoopDelegationTokenManager.scala:146)
           at java.security.AccessController.doPrivileged(Native Method)
           at javax.security.auth.Subject.doAs(Subject.java:422)
           at 
org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1878)
           at 
org.apache.spark.deploy.security.HadoopDelegationTokenManager.obtainDelegationTokens(HadoopDelegationTokenManager.scala:146)
           at 
org.apache.spark.deploy.yarn.Client.setupSecurityToken(Client.scala:352)
           at 
org.apache.spark.deploy.yarn.Client.createContainerLaunchContext(Client.scala:1140)
           at 
org.apache.spark.deploy.yarn.Client.submitApplication(Client.scala:220)
           at org.apache.spark.deploy.yarn.Client.run(Client.scala:1327)
           at 
org.apache.spark.deploy.yarn.YarnClusterApplication.start(Client.scala:1764)
           at 
org.apache.spark.deploy.SparkSubmit.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:984)
           at 
org.apache.spark.deploy.SparkSubmit$$anon$1.run(SparkSubmit.scala:175)
           at 
org.apache.spark.deploy.SparkSubmit$$anon$1.run(SparkSubmit.scala:173)
           at java.security.AccessController.doPrivileged(Native Method)
           at javax.security.auth.Subject.doAs(Subject.java:422)
           at 
org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1878)
           at 
org.apache.spark.deploy.SparkSubmit.doRunMain$1(SparkSubmit.scala:173)
           at org.apache.spark.deploy.SparkSubmit.submit(SparkSubmit.scala:214)
           at org.apache.spark.deploy.SparkSubmit.doSubmit(SparkSubmit.scala:90)
           at 
org.apache.spark.deploy.SparkSubmit$$anon$2.doSubmit(SparkSubmit.scala:1072)
           at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:1081)
           at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
   25/01/15 19:03:49 INFO hive.metastore: Trying to connect to metastore with 
URI thrift://bigdata-1734405115-0xt70:9083
   25/01/15 19:03:49 DEBUG org.apache.hadoop.security.UserGroupInformation: 
PrivilegedAction [as: 
hive/bigdata-1734358521-cp...@mr.733e690fe0a842a2a587a467c9a50520.yun.cn 
(auth:KERBEROS)][action: 
org.apache.hadoop.hive.thrift.HadoopThriftAuthBridge$Client$1@5934ca1e]
   java.lang.Exception
           at 
org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1875)
           at 
org.apache.hadoop.hive.thrift.HadoopThriftAuthBridge$Client.createClientTransport(HadoopThriftAuthBridge.java:208)
           at 
org.apache.hadoop.hive.metastore.HiveMetaStoreClient.open(HiveMetaStoreClient.java:432)
           at 
org.apache.hadoop.hive.metastore.HiveMetaStoreClient.<init>(HiveMetaStoreClient.java:245)
           at 
org.apache.kyuubi.spark.connector.hive.KyuubiHiveConnectorDelegationTokenProvider.$anonfun$obtainDelegationTokens$10(KyuubiHiveConnectorDelegationTokenProvider.scala:154)
           at 
scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)
           at 
org.apache.kyuubi.spark.connector.hive.KyuubiHiveConnectorDelegationTokenProvider$$anon$1.run(KyuubiHiveConnectorDelegationTokenProvider.scala:189)
           at java.security.AccessController.doPrivileged(Native Method)
           at javax.security.auth.Subject.doAs(Subject.java:422)
           at 
org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1878)
           at 
org.apache.kyuubi.spark.connector.hive.KyuubiHiveConnectorDelegationTokenProvider.doAsRealUser(KyuubiHiveConnectorDelegationTokenProvider.scala:188)
           at 
org.apache.kyuubi.spark.connector.hive.KyuubiHiveConnectorDelegationTokenProvider.$anonfun$obtainDelegationTokens$7(KyuubiHiveConnectorDelegationTokenProvider.scala:153)
           at 
org.apache.kyuubi.spark.connector.hive.KyuubiHiveConnectorDelegationTokenProvider.$anonfun$obtainDelegationTokens$7$adapted(KyuubiHiveConnectorDelegationTokenProvider.scala:136)
           at scala.Option.foreach(Option.scala:407)
           at 
org.apache.kyuubi.spark.connector.hive.KyuubiHiveConnectorDelegationTokenProvider.$anonfun$obtainDelegationTokens$6(KyuubiHiveConnectorDelegationTokenProvider.scala:136)
           at org.apache.spark.util.Utils$.tryLogNonFatalError(Utils.scala:1484)
           at 
org.apache.kyuubi.spark.connector.hive.KyuubiHiveConnectorDelegationTokenProvider.$anonfun$obtainDelegationTokens$5(KyuubiHiveConnectorDelegationTokenProvider.scala:136)
           at 
org.apache.kyuubi.spark.connector.hive.KyuubiHiveConnectorDelegationTokenProvider.$anonfun$obtainDelegationTokens$5$adapted(KyuubiHiveConnectorDelegationTokenProvider.scala:133)
           at scala.collection.immutable.Set$Set2.foreach(Set.scala:181)
           at 
org.apache.kyuubi.spark.connector.hive.KyuubiHiveConnectorDelegationTokenProvider.obtainDelegationTokens(KyuubiHiveConnectorDelegationTokenProvider.scala:133)
           at 
org.apache.spark.deploy.security.HadoopDelegationTokenManager.$anonfun$obtainDelegationTokens$2(HadoopDelegationTokenManager.scala:164)
           at 
scala.collection.TraversableLike.$anonfun$flatMap$1(TraversableLike.scala:293)
           at scala.collection.Iterator.foreach(Iterator.scala:943)
           at scala.collection.Iterator.foreach$(Iterator.scala:943)
           at scala.collection.AbstractIterator.foreach(Iterator.scala:1431)
           at 
scala.collection.MapLike$DefaultValuesIterable.foreach(MapLike.scala:214)
           at 
scala.collection.TraversableLike.flatMap(TraversableLike.scala:293)
           at 
scala.collection.TraversableLike.flatMap$(TraversableLike.scala:290)
           at 
scala.collection.AbstractTraversable.flatMap(Traversable.scala:108)
           at 
org.apache.spark.deploy.security.HadoopDelegationTokenManager.org$apache$spark$deploy$security$HadoopDelegationTokenManager$$obtainDelegationTokens(HadoopDelegationTokenManager.scala:162)
           at 
org.apache.spark.deploy.security.HadoopDelegationTokenManager$$anon$2.run(HadoopDelegationTokenManager.scala:148)
           at 
org.apache.spark.deploy.security.HadoopDelegationTokenManager$$anon$2.run(HadoopDelegationTokenManager.scala:146)
           at java.security.AccessController.doPrivileged(Native Method)
           at javax.security.auth.Subject.doAs(Subject.java:422)
           at 
org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1878)
           at 
org.apache.spark.deploy.security.HadoopDelegationTokenManager.obtainDelegationTokens(HadoopDelegationTokenManager.scala:146)
           at 
org.apache.spark.deploy.yarn.Client.setupSecurityToken(Client.scala:352)
           at 
org.apache.spark.deploy.yarn.Client.createContainerLaunchContext(Client.scala:1140)
           at 
org.apache.spark.deploy.yarn.Client.submitApplication(Client.scala:220)
           at org.apache.spark.deploy.yarn.Client.run(Client.scala:1327)
           at 
org.apache.spark.deploy.yarn.YarnClusterApplication.start(Client.scala:1764)
           at 
org.apache.spark.deploy.SparkSubmit.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:984)
           at 
org.apache.spark.deploy.SparkSubmit$$anon$1.run(SparkSubmit.scala:175)
           at 
org.apache.spark.deploy.SparkSubmit$$anon$1.run(SparkSubmit.scala:173)
           at java.security.AccessController.doPrivileged(Native Method)
           at javax.security.auth.Subject.doAs(Subject.java:422)
           at 
org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1878)
           at 
org.apache.spark.deploy.SparkSubmit.doRunMain$1(SparkSubmit.scala:173)
           at org.apache.spark.deploy.SparkSubmit.submit(SparkSubmit.scala:214)
           at org.apache.spark.deploy.SparkSubmit.doSubmit(SparkSubmit.scala:90)
           at 
org.apache.spark.deploy.SparkSubmit$$anon$2.doSubmit(SparkSubmit.scala:1072)
           at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:1081)
           at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
   


-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: notifications-unsubscr...@kyuubi.apache.org

For queries about this service, please contact Infrastructure at:
us...@infra.apache.org


---------------------------------------------------------------------
To unsubscribe, e-mail: notifications-unsubscr...@kyuubi.apache.org
For additional commands, e-mail: notifications-h...@kyuubi.apache.org

Reply via email to