[
https://issues.apache.org/jira/browse/HIVE-27816?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel&focusedCommentId=17778587#comment-17778587
]
xiongyinke commented on HIVE-27816:
-----------------------------------
https://github.com/apache/hive/pull/4821
> Hive Cli insert iceberg table exception of GSS initiate failed
> --------------------------------------------------------------
>
> Key: HIVE-27816
> URL: https://issues.apache.org/jira/browse/HIVE-27816
> Project: Hive
> Issue Type: Improvement
> Affects Versions: 2.3.8, 3.1.3, 4.0.0-alpha-2
> Reporter: xiongyinke
> Assignee: xiongyinke
> Priority: Major
>
> Hive 2.3.8+Iceberg 1.4.0+MR
> Just like [https://github.com/apache/iceberg/issues/3127] , when I commit
> Hive CLI job insert into iceberg table with kerberos HMS ,job failed the
> exception as follow.
> {code:java}
> 2023-10-19 20:12:16,898 ERROR [CommitterEvent Processor #1]
> org.apache.thrift.transport.TSaslTransport: SASL negotiation failure
> javax.security.sasl.SaslException: GSS initiate failed [Caused by
> GSSException: No valid credentials provided (Mechanism level: Failed to find
> any Kerberos tgt)]
> at
> com.sun.security.sasl.gsskerb.GssKrb5Client.evaluateChallenge(GssKrb5Client.java:211)
> at
> org.apache.thrift.transport.TSaslClientTransport.handleSaslStartMessage(TSaslClientTransport.java:94)
> at
> org.apache.thrift.transport.TSaslTransport.open(TSaslTransport.java:271)
> at
> org.apache.thrift.transport.TSaslClientTransport.open(TSaslClientTransport.java:37)
> at
> org.apache.hadoop.hive.thrift.client.TUGIAssumingTransport$1.run(TUGIAssumingTransport.java:52)
> at
> org.apache.hadoop.hive.thrift.client.TUGIAssumingTransport$1.run(TUGIAssumingTransport.java:49)
> at java.security.AccessController.doPrivileged(Native Method)
> at javax.security.auth.Subject.doAs(Subject.java:422)
> at
> org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1845)
> at
> org.apache.hadoop.hive.thrift.client.TUGIAssumingTransport.open(TUGIAssumingTransport.java:49)
> at
> org.apache.hadoop.hive.metastore.HiveMetaStoreClient.open(HiveMetaStoreClient.java:492)
> at
> org.apache.hadoop.hive.metastore.HiveMetaStoreClient.<init>(HiveMetaStoreClient.java:270)
> at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
> at
> sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
> at
> sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
> at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
> at
> org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1718)
> at
> org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.<init>(RetryingMetaStoreClient.java:83)
> at
> org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:133)
> at
> org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:104)
> at
> org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:97)
> at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
> at
> sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
> at
> sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
> at java.lang.reflect.Method.invoke(Method.java:498)
> at
> org.apache.iceberg.common.DynMethods$UnboundMethod.invokeChecked(DynMethods.java:60)
> at
> org.apache.iceberg.common.DynMethods$UnboundMethod.invoke(DynMethods.java:72)
> at
> org.apache.iceberg.common.DynMethods$StaticMethod.invoke(DynMethods.java:185)
> at
> org.apache.iceberg.hive.HiveClientPool.newClient(HiveClientPool.java:63)
> at
> org.apache.iceberg.hive.HiveClientPool.newClient(HiveClientPool.java:34)
> at org.apache.iceberg.ClientPoolImpl.get(ClientPoolImpl.java:125)
> at org.apache.iceberg.ClientPoolImpl.run(ClientPoolImpl.java:56)
> at org.apache.iceberg.ClientPoolImpl.run(ClientPoolImpl.java:51)
> at
> org.apache.iceberg.hive.CachedClientPool.run(CachedClientPool.java:122)
> at
> org.apache.iceberg.hive.HiveTableOperations.doRefresh(HiveTableOperations.java:158)
> at
> org.apache.iceberg.BaseMetastoreTableOperations.refresh(BaseMetastoreTableOperations.java:97)
> at
> org.apache.iceberg.BaseMetastoreTableOperations.current(BaseMetastoreTableOperations.java:80)
> at
> org.apache.iceberg.BaseMetastoreCatalog.loadTable(BaseMetastoreCatalog.java:47)
> at org.apache.iceberg.mr.Catalogs.loadTable(Catalogs.java:124)
> at org.apache.iceberg.mr.Catalogs.loadTable(Catalogs.java:111)
> at
> org.apache.iceberg.mr.hive.HiveIcebergOutputCommitter.commitTable(HiveIcebergOutputCommitter.java:320)
> at
> org.apache.iceberg.mr.hive.HiveIcebergOutputCommitter.lambda$commitJob$2(HiveIcebergOutputCommitter.java:214)
> at
> org.apache.iceberg.util.Tasks$Builder.runTaskWithRetry(Tasks.java:413)
> at
> org.apache.iceberg.util.Tasks$Builder.runSingleThreaded(Tasks.java:219)
> at org.apache.iceberg.util.Tasks$Builder.run(Tasks.java:203)
> at org.apache.iceberg.util.Tasks$Builder.run(Tasks.java:196)
> at
> org.apache.iceberg.mr.hive.HiveIcebergOutputCommitter.commitJob(HiveIcebergOutputCommitter.java:207)
> at
> org.apache.hadoop.mapred.OutputCommitter.commitJob(OutputCommitter.java:291)
> at
> org.apache.hadoop.mapreduce.v2.app.commit.CommitterEventHandler$EventProcessor.handleJobCommit(CommitterEventHandler.java:286)
> at
> org.apache.hadoop.mapreduce.v2.app.commit.CommitterEventHandler$EventProcessor.run(CommitterEventHandler.java:238)
> at
> java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
> at
> java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
> at java.lang.Thread.run(Thread.java:748)
> Caused by: GSSException: No valid credentials provided (Mechanism level:
> Failed to find any Kerberos tgt)
> at
> sun.security.jgss.krb5.Krb5InitCredential.getInstance(Krb5InitCredential.java:147)
> at
> sun.security.jgss.krb5.Krb5MechFactory.getCredentialElement(Krb5MechFactory.java:122)
> at
> sun.security.jgss.krb5.Krb5MechFactory.getMechanismContext(Krb5MechFactory.java:187)
> at
> sun.security.jgss.GSSManagerImpl.getMechanismContext(GSSManagerImpl.java:224)
> at
> sun.security.jgss.GSSContextImpl.initSecContext(GSSContextImpl.java:212)
> at
> sun.security.jgss.GSSContextImpl.initSecContext(GSSContextImpl.java:179)
> at
> com.sun.security.sasl.gsskerb.GssKrb5Client.evaluateChallenge(GssKrb5Client.java:192)
> ... 52 more{code}
--
This message was sent by Atlassian Jira
(v8.20.10#820010)