[
https://issues.apache.org/jira/browse/HIVE-26739?focusedWorklogId=839812&page=com.atlassian.jira.plugin.system.issuetabpanels:worklog-tabpanel#worklog-839812
]
ASF GitHub Bot logged work on HIVE-26739:
-----------------------------------------
Author: ASF GitHub Bot
Created on: 18/Jan/23 03:17
Start Date: 18/Jan/23 03:17
Worklog Time Spent: 10m
Work Description: xiuzhu9527 commented on PR #3764:
URL: https://github.com/apache/hive/pull/3764#issuecomment-1386425591
thx!
Issue Time Tracking
-------------------
Worklog Id: (was: 839812)
Time Spent: 50m (was: 40m)
> When kerberos is enabled, hiveserver2 error connecting metastore: No valid
> credentials provided
> -----------------------------------------------------------------------------------------------
>
> Key: HIVE-26739
> URL: https://issues.apache.org/jira/browse/HIVE-26739
> Project: Hive
> Issue Type: Bug
> Affects Versions: 2.0.0, 3.0.0
> Reporter: weiliang hao
> Priority: Major
> Labels: pull-request-available
> Time Spent: 50m
> Remaining Estimate: 0h
>
> If the environment variable HADOOP_USER_NAME exists, hiveserver2 error
> connecting metastore: No valid credentials provided.
> There is a problem with the getUGI method of the
> org.apache.hadoop.hive.shims.Utils class to obtain the UGI. It should be
> added to determine whether 'UserGroupInformation IsSecurityEnabled () `. If
> it is true, it returns' UserGroupInformation GetCurrentUser() `. If it is
> false, the user name is obtained from the environment variable
> HADOOP_USER_NAME to create a UGI
>
> {code:java}
> 2022-11-15T15:41:06,971 ERROR [HiveServer2-Background-Pool: Thread-36]
> transport.TSaslTransport: SASL negotiation failure
> javax.security.sasl.SaslException: GSS initiate failed
> at
> com.sun.security.sasl.gsskerb.GssKrb5Client.evaluateChallenge(GssKrb5Client.java:211)
> ~[?:1.8.0_144]
> at
> org.apache.thrift.transport.TSaslClientTransport.handleSaslStartMessage(TSaslClientTransport.java:94)
> ~[hive-exec-3.1.3.jar:3.1.3]
> at
> org.apache.thrift.transport.TSaslTransport.open(TSaslTransport.java:271)
> ~[hive-exec-3.1.3.jar:3.1.3]
> at
> org.apache.thrift.transport.TSaslClientTransport.open(TSaslClientTransport.java:37)
> ~[hive-exec-3.1.3.jar:3.1.3]
> at
> org.apache.hadoop.hive.metastore.security.TUGIAssumingTransport$1.run(TUGIAssumingTransport.java:51)
> ~[hive-exec-3.1.3.jar:3.1.3]
> at
> org.apache.hadoop.hive.metastore.security.TUGIAssumingTransport$1.run(TUGIAssumingTransport.java:48)
> ~[hive-exec-3.1.3.jar:3.1.3]
> at java.security.AccessController.doPrivileged(Native Method)
> ~[?:1.8.0_144]
> at javax.security.auth.Subject.doAs(Subject.java:422) ~[?:1.8.0_144]
> at
> org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1730)
> ~[hadoop-common-3.2.1.jar:?]
> at
> org.apache.hadoop.hive.metastore.security.TUGIAssumingTransport.open(TUGIAssumingTransport.java:48)
> ~[hive-exec-3.1.3.jar:3.1.3]
> at
> org.apache.hadoop.hive.metastore.HiveMetaStoreClient.open(HiveMetaStoreClient.java:516)
> ~[hive-exec-3.1.3.jar:3.1.3]
> at
> org.apache.hadoop.hive.metastore.HiveMetaStoreClient.<init>(HiveMetaStoreClient.java:224)
> ~[hive-exec-3.1.3.jar:3.1.3]
> at
> org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient.<init>(SessionHiveMetaStoreClient.java:94)
> ~[hive-exec-3.1.3.jar:3.1.3]
> at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native
> Method) ~[?:1.8.0_144]
> at
> sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
> ~[?:1.8.0_144]
> at
> sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
> ~[?:1.8.0_144]
> at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
> ~[?:1.8.0_144]
> at
> org.apache.hadoop.hive.metastore.utils.JavaUtils.newInstance(JavaUtils.java:84)
> ~[hive-exec-3.1.3.jar:3.1.3]
> at
> org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.<init>(RetryingMetaStoreClient.java:95)
> ~[hive-exec-3.1.3.jar:3.1.3]
> at
> org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:148)
> ~[hive-exec-3.1.3.jar:3.1.3]
> at
> org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:119)
> ~[hive-exec-3.1.3.jar:3.1.3]
> at
> org.apache.hadoop.hive.ql.metadata.Hive.createMetaStoreClient(Hive.java:4306)
> ~[hive-exec-3.1.3.jar:3.1.3]
> at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:4374)
> ~[hive-exec-3.1.3.jar:3.1.3]
> at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:4354)
> ~[hive-exec-3.1.3.jar:3.1.3]
> at
> org.apache.hadoop.hive.ql.metadata.Hive.getDatabase(Hive.java:1662)
> ~[hive-exec-3.1.3.jar:3.1.3]
> at
> org.apache.hadoop.hive.ql.metadata.Hive.databaseExists(Hive.java:1651)
> ~[hive-exec-3.1.3.jar:3.1.3]
> at
> org.apache.hadoop.hive.ql.exec.DDLTask.showTablesOrViews(DDLTask.java:2824)
> ~[hive-exec-3.1.3.jar:3.1.3]
> at org.apache.hadoop.hive.ql.exec.DDLTask.execute(DDLTask.java:509)
> ~[hive-exec-3.1.3.jar:3.1.3]
> at org.apache.hadoop.hive.ql.exec.Task.executeTask(Task.java:205)
> ~[hive-exec-3.1.3.jar:3.1.3]
> at
> org.apache.hadoop.hive.ql.exec.TaskRunner.runSequential(TaskRunner.java:97)
> ~[hive-exec-3.1.3.jar:3.1.3]
> at org.apache.hadoop.hive.ql.Driver.launchTask(Driver.java:2664)
> ~[hive-exec-3.1.3.jar:3.1.3]
> at org.apache.hadoop.hive.ql.Driver.execute(Driver.java:2335)
> ~[hive-exec-3.1.3.jar:3.1.3]
> at org.apache.hadoop.hive.ql.Driver.runInternal(Driver.java:2011)
> ~[hive-exec-3.1.3.jar:3.1.3]
> at org.apache.hadoop.hive.ql.Driver.run(Driver.java:1709)
> ~[hive-exec-3.1.3.jar:3.1.3]
> at org.apache.hadoop.hive.ql.Driver.run(Driver.java:1703)
> ~[hive-exec-3.1.3.jar:3.1.3]
> at
> org.apache.hadoop.hive.ql.reexec.ReExecDriver.run(ReExecDriver.java:157)
> ~[hive-exec-3.1.3.jar:3.1.3]
> at
> org.apache.hive.service.cli.operation.SQLOperation.runQuery(SQLOperation.java:224)
> ~[hive-service-3.1.3.jar:3.1.3]
> at
> org.apache.hive.service.cli.operation.SQLOperation.access$700(SQLOperation.java:87)
> ~[hive-service-3.1.3.jar:3.1.3]
> at
> org.apache.hive.service.cli.operation.SQLOperation$BackgroundWork$1.run(SQLOperation.java:316)
> ~[hive-service-3.1.3.jar:3.1.3]
> at java.security.AccessController.doPrivileged(Native Method)
> ~[?:1.8.0_144]
> at javax.security.auth.Subject.doAs(Subject.java:422) ~[?:1.8.0_144]
> at
> org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1730)
> ~[hadoop-common-3.2.1.jar:?]
> at
> org.apache.hive.service.cli.operation.SQLOperation$BackgroundWork.run(SQLOperation.java:329)
> ~[hive-service-3.1.3.jar:3.1.3]
> at
> java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:511)
> ~[?:1.8.0_144]
> at java.util.concurrent.FutureTask.run(FutureTask.java:266)
> ~[?:1.8.0_144]
> at
> java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
> ~[?:1.8.0_144]
> at
> java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
> ~[?:1.8.0_144]
> at java.lang.Thread.run(Thread.java:748) [?:1.8.0_144]
> Caused by: org.ietf.jgss.GSSException: No valid credentials provided
> (Mechanism level: Failed to find any Kerberos tgt)
> at
> sun.security.jgss.krb5.Krb5InitCredential.getInstance(Krb5InitCredential.java:147)
> ~[?:1.8.0_144]
> at
> sun.security.jgss.krb5.Krb5MechFactory.getCredentialElement(Krb5MechFactory.java:122)
> ~[?:1.8.0_144]
> at
> sun.security.jgss.krb5.Krb5MechFactory.getMechanismContext(Krb5MechFactory.java:187)
> ~[?:1.8.0_144]
> at
> sun.security.jgss.GSSManagerImpl.getMechanismContext(GSSManagerImpl.java:224)
> ~[?:1.8.0_144]
> at
> sun.security.jgss.GSSContextImpl.initSecContext(GSSContextImpl.java:212)
> ~[?:1.8.0_144]
> at
> sun.security.jgss.GSSContextImpl.initSecContext(GSSContextImpl.java:179)
> ~[?:1.8.0_144]
> at
> com.sun.security.sasl.gsskerb.GssKrb5Client.evaluateChallenge(GssKrb5Client.java:192)
> ~[?:1.8.0_144]
> ... 47 more {code}
--
This message was sent by Atlassian Jira
(v8.20.10#820010)