[
https://issues.apache.org/jira/browse/HIVE-24359?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel&focusedCommentId=17229536#comment-17229536
]
Rajkumar Singh commented on HIVE-24359:
---------------------------------------
even without HIVE-24089 this will fail/hung if doAs is enabled as we are
creating the proxy user here
https://github.com/apache/hive/blob/master/ql/src/java/org/apache/hadoop/hive/ql/txn/compactor/Worker.java#L539
if table/sds path owner is different than the hive.
> Hive Compaction hangs because of doAs when worker set to HS2
> ------------------------------------------------------------
>
> Key: HIVE-24359
> URL: https://issues.apache.org/jira/browse/HIVE-24359
> Project: Hive
> Issue Type: Bug
> Components: HiveServer2, Transactions
> Reporter: Chiran Ravani
> Priority: Critical
>
> When creating a managed table and inserting data using Impala, with
> compaction worker set to HiveServer2 - in secured environment (Kerberized
> Cluster). Worker thread hangs indefinitely expecting user to provide kerberos
> credentials from STDIN
> The problem appears to be because of no login context being sent from HS2 to
> HMS as part of QueryCompactor and HS2 JVM has property
> javax.security.auth.useSubjectCredsOnly is set to false. Which is causing it
> to prompt for logins via stdin, however setting to true also does not helo as
> the context does not seem to be passed in any case.
> Below is observed in HS2 Jstack. If you see the the thread is waiting for
> stdin "com.sun.security.auth.module.Krb5LoginModule.promptForName"
> {code}
> "c570-node2.abc.host.com-44_executor" #47 daemon prio=1 os_prio=0
> tid=0x0000000001506000 nid=0x1348 runnable [0x00007f1beea95000]
> java.lang.Thread.State: RUNNABLE
> at java.io.FileInputStream.readBytes(Native Method)
> at java.io.FileInputStream.read(FileInputStream.java:255)
> at java.io.BufferedInputStream.read1(BufferedInputStream.java:284)
> at java.io.BufferedInputStream.read(BufferedInputStream.java:345)
> - locked <0x000000009fa38c90> (a java.io.BufferedInputStream)
> at sun.nio.cs.StreamDecoder.readBytes(StreamDecoder.java:284)
> at sun.nio.cs.StreamDecoder.implRead(StreamDecoder.java:326)
> at sun.nio.cs.StreamDecoder.read(StreamDecoder.java:178)
> - locked <0x000000008c7d5010> (a java.io.InputStreamReader)
> at java.io.InputStreamReader.read(InputStreamReader.java:184)
> at java.io.BufferedReader.fill(BufferedReader.java:161)
> at java.io.BufferedReader.readLine(BufferedReader.java:324)
> - locked <0x000000008c7d5010> (a java.io.InputStreamReader)
> at java.io.BufferedReader.readLine(BufferedReader.java:389)
> at
> com.sun.security.auth.callback.TextCallbackHandler.readLine(TextCallbackHandler.java:153)
> at
> com.sun.security.auth.callback.TextCallbackHandler.handle(TextCallbackHandler.java:120)
> at
> com.sun.security.auth.module.Krb5LoginModule.promptForName(Krb5LoginModule.java:862)
> at
> com.sun.security.auth.module.Krb5LoginModule.attemptAuthentication(Krb5LoginModule.java:708)
> at
> com.sun.security.auth.module.Krb5LoginModule.login(Krb5LoginModule.java:617)
> at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
> at
> sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
> at
> sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
> at java.lang.reflect.Method.invoke(Method.java:498)
> at
> javax.security.auth.login.LoginContext.invoke(LoginContext.java:755)
> at
> javax.security.auth.login.LoginContext.access$000(LoginContext.java:195)
> at javax.security.auth.login.LoginContext$4.run(LoginContext.java:682)
> at javax.security.auth.login.LoginContext$4.run(LoginContext.java:680)
> at java.security.AccessController.doPrivileged(Native Method)
> at
> javax.security.auth.login.LoginContext.invokePriv(LoginContext.java:680)
> at javax.security.auth.login.LoginContext.login(LoginContext.java:587)
> at sun.security.jgss.GSSUtil.login(GSSUtil.java:258)
> at sun.security.jgss.krb5.Krb5Util.getInitialTicket(Krb5Util.java:175)
> at
> sun.security.jgss.krb5.Krb5InitCredential$1.run(Krb5InitCredential.java:341)
> at
> sun.security.jgss.krb5.Krb5InitCredential$1.run(Krb5InitCredential.java:337)
> at java.security.AccessController.doPrivileged(Native Method)
> at
> sun.security.jgss.krb5.Krb5InitCredential.getTgt(Krb5InitCredential.java:336)
> at
> sun.security.jgss.krb5.Krb5InitCredential.getInstance(Krb5InitCredential.java:146)
> at
> sun.security.jgss.krb5.Krb5MechFactory.getCredentialElement(Krb5MechFactory.java:122)
> at
> sun.security.jgss.krb5.Krb5MechFactory.getMechanismContext(Krb5MechFactory.java:189)
> at
> sun.security.jgss.GSSManagerImpl.getMechanismContext(GSSManagerImpl.java:224)
> at
> sun.security.jgss.GSSContextImpl.initSecContext(GSSContextImpl.java:212)
> at
> sun.security.jgss.GSSContextImpl.initSecContext(GSSContextImpl.java:179)
> at
> com.sun.security.sasl.gsskerb.GssKrb5Client.evaluateChallenge(GssKrb5Client.java:192)
> at
> org.apache.thrift.transport.TSaslClientTransport.handleSaslStartMessage(TSaslClientTransport.java:94)
> at
> org.apache.thrift.transport.TSaslTransport.open(TSaslTransport.java:271)
> at
> org.apache.thrift.transport.TSaslClientTransport.open(TSaslClientTransport.java:37)
> at
> org.apache.hadoop.hive.metastore.security.TUGIAssumingTransport$1.run(TUGIAssumingTransport.java:51)
> at
> org.apache.hadoop.hive.metastore.security.TUGIAssumingTransport$1.run(TUGIAssumingTransport.java:48)
> at java.security.AccessController.doPrivileged(Native Method)
> at javax.security.auth.Subject.doAs(Subject.java:422)
> at
> org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1898)
> at
> org.apache.hadoop.hive.metastore.security.TUGIAssumingTransport.open(TUGIAssumingTransport.java:48)
> at
> org.apache.hadoop.hive.metastore.HiveMetaStoreClient.open(HiveMetaStoreClient.java:631)
> at
> org.apache.hadoop.hive.metastore.HiveMetaStoreClient.<init>(HiveMetaStoreClient.java:241)
> at
> org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient.<init>(SessionHiveMetaStoreClient.java:118)
> at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native
> Method)
> at
> sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
> at
> sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
> at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
> at
> org.apache.hadoop.hive.metastore.utils.JavaUtils.newInstance(JavaUtils.java:84)
> at
> org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.<init>(RetryingMetaStoreClient.java:96)
> at
> org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:149)
> at
> org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:120)
> at
> org.apache.hadoop.hive.ql.metadata.Hive.createMetaStoreClient(Hive.java:5092)
> at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:5160)
> - locked <0x000000008c6b2118> (a
> org.apache.hadoop.hive.ql.metadata.Hive)
> at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:5140)
> - locked <0x000000008c6b2118> (a
> org.apache.hadoop.hive.ql.metadata.Hive)
> at org.apache.hadoop.hive.ql.Compiler.analyze(Compiler.java:190)
> at org.apache.hadoop.hive.ql.Compiler.compile(Compiler.java:104)
> at org.apache.hadoop.hive.ql.Driver.compile(Driver.java:194)
> at org.apache.hadoop.hive.ql.Driver.compileInternal(Driver.java:605)
> at org.apache.hadoop.hive.ql.Driver.runInternal(Driver.java:663)
> at org.apache.hadoop.hive.ql.Driver.run(Driver.java:495)
> at org.apache.hadoop.hive.ql.Driver.run(Driver.java:484)
> at
> org.apache.hadoop.hive.ql.DriverUtils.runOnDriver(DriverUtils.java:70)
> at
> org.apache.hadoop.hive.ql.DriverUtils.runOnDriver(DriverUtils.java:50)
> at
> org.apache.hadoop.hive.ql.txn.compactor.QueryCompactor.runCompactionQueries(QueryCompactor.java:122)
> at
> org.apache.hadoop.hive.ql.txn.compactor.MmMajorQueryCompactor.runCompaction(MmMajorQueryCompactor.java:68)
> at
> org.apache.hadoop.hive.ql.txn.compactor.CompactorMR.run(CompactorMR.java:234)
> at
> org.apache.hadoop.hive.ql.txn.compactor.Worker$1.run(Worker.java:542)
> at java.security.AccessController.doPrivileged(Native Method)
> at javax.security.auth.Subject.doAs(Subject.java:422)
> at
> org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1898)
> at
> org.apache.hadoop.hive.ql.txn.compactor.Worker.findNextCompactionAndExecute(Worker.java:539)
> at
> org.apache.hadoop.hive.ql.txn.compactor.Worker.lambda$run$0(Worker.java:105)
> at
> org.apache.hadoop.hive.ql.txn.compactor.Worker$$Lambda$109/1712344484.call(Unknown
> Source)
> at java.util.concurrent.FutureTask.run(FutureTask.java:266)
> at
> java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
> at
> java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
> at java.lang.Thread.run(Thread.java:748)
> {code}
> The problem becomes even worse ending up holding all Compilation threads
> (assuming that valus is restricted by
> hive.driver.parallel.compilation.global.limit=3) indefinitely and to add more
> to it compaction process does not run causing too many files (small) and
> performance issues.
> The issue appears to be after the change in HIVE-24089.
--
This message was sent by Atlassian Jira
(v8.3.4#803005)