[
https://issues.apache.org/jira/browse/HIVE-24159?focusedWorklogId=486885&page=com.atlassian.jira.plugin.system.issuetabpanels:worklog-tabpanel#worklog-486885
]
ASF GitHub Bot logged work on HIVE-24159:
-----------------------------------------
Author: ASF GitHub Bot
Created on: 21/Sep/20 12:16
Start Date: 21/Sep/20 12:16
Worklog Time Spent: 10m
Work Description: abstractdog merged pull request #1495:
URL: https://github.com/apache/hive/pull/1495
----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
For queries about this service, please contact Infrastructure at:
[email protected]
Issue Time Tracking
-------------------
Worklog Id: (was: 486885)
Time Spent: 40m (was: 0.5h)
> Kafka storage handler broken in secure environment pt2: short-circuit on
> non-secure environment
> -----------------------------------------------------------------------------------------------
>
> Key: HIVE-24159
> URL: https://issues.apache.org/jira/browse/HIVE-24159
> Project: Hive
> Issue Type: Improvement
> Reporter: László Bodor
> Assignee: László Bodor
> Priority: Major
> Labels: pull-request-available
> Time Spent: 40m
> Remaining Estimate: 0h
>
> As kafka_storage_handler.q was disabled by HIVE-23985, I haven't realized
> upstream that the kafka qtest fails. Instead of setting up a kerberized
> environment in qtest (which doesn't seem to be a usual usecase, e.g. haven't
> seen hive.server2.authentication.kerberos.principal used in *.q files) I
> managed to make the test with a simple
> UserGroupInformation.isSecurityEnabled() check, which can be also useful for
> every non-secure environment.
> For reference, the exception was:
> {code}
> 2020-09-14T03:30:01,217 ERROR [a42ef4c6-190c-47a6-86ad-8bf13b8a2dc1 main]
> tez.TezTask: Failed to execute tez graph.
> org.apache.kafka.common.KafkaException: Failed to create new KafkaAdminClient
> at
> org.apache.kafka.clients.admin.KafkaAdminClient.createInternal(KafkaAdminClient.java:451)
> ~[kafka-clients-2.4.1.7.1.4.0-SNAPSHOT.jar:?]
> at org.apache.kafka.clients.admin.Admin.create(Admin.java:59)
> ~[kafka-clients-2.4.1.7.1.4.0-SNAPSHOT.jar:?]
> at
> org.apache.kafka.clients.admin.AdminClient.create(AdminClient.java:39)
> ~[kafka-clients-2.4.1.7.1.4.0-SNAPSHOT.jar:?]
> at
> org.apache.hadoop.hive.ql.exec.tez.DagUtils.getKafkaDelegationTokenForBrokers(DagUtils.java:333)
> ~[hive-exec-3.1.3000.7.1.4.0-SNAPSHOT.jar:3.1.3000.7.1.4.0-SNAPSHOT]
> at
> org.apache.hadoop.hive.ql.exec.tez.DagUtils.getKafkaCredentials(DagUtils.java:301)
> ~[hive-exec-3.1.3000.7.1.4.0-SNAPSHOT.jar:3.1.3000.7.1.4.0-SNAPSHOT]
> at
> org.apache.hadoop.hive.ql.exec.tez.DagUtils.addCredentials(DagUtils.java:282)
> ~[hive-exec-3.1.3000.7.1.4.0-SNAPSHOT.jar:3.1.3000.7.1.4.0-SNAPSHOT]
> at org.apache.hadoop.hive.ql.exec.tez.TezTask.build(TezTask.java:516)
> ~[hive-exec-3.1.3000.7.1.4.0-SNAPSHOT.jar:3.1.3000.7.1.4.0-SNAPSHOT]
> at org.apache.hadoop.hive.ql.exec.tez.TezTask.execute(TezTask.java:223)
> [hive-exec-3.1.3000.7.1.4.0-SNAPSHOT.jar:3.1.3000.7.1.4.0-SNAPSHOT]
> at org.apache.hadoop.hive.ql.exec.Task.executeTask(Task.java:213)
> [hive-exec-3.1.3000.7.1.4.0-SNAPSHOT.jar:3.1.3000.7.1.4.0-SNAPSHOT]
> at
> org.apache.hadoop.hive.ql.exec.TaskRunner.runSequential(TaskRunner.java:105)
> [hive-exec-3.1.3000.7.1.4.0-SNAPSHOT.jar:3.1.3000.7.1.4.0-SNAPSHOT]
> at org.apache.hadoop.hive.ql.Executor.launchTask(Executor.java:357)
> [hive-exec-3.1.3000.7.1.4.0-SNAPSHOT.jar:?]
> at org.apache.hadoop.hive.ql.Executor.launchTasks(Executor.java:330)
> [hive-exec-3.1.3000.7.1.4.0-SNAPSHOT.jar:?]
> at org.apache.hadoop.hive.ql.Executor.runTasks(Executor.java:246)
> [hive-exec-3.1.3000.7.1.4.0-SNAPSHOT.jar:?]
> at org.apache.hadoop.hive.ql.Executor.execute(Executor.java:109)
> [hive-exec-3.1.3000.7.1.4.0-SNAPSHOT.jar:?]
> at org.apache.hadoop.hive.ql.Driver.runInternal(Driver.java:721)
> [hive-exec-3.1.3000.7.1.4.0-SNAPSHOT.jar:?]
> at org.apache.hadoop.hive.ql.Driver.run(Driver.java:488)
> [hive-exec-3.1.3000.7.1.4.0-SNAPSHOT.jar:?]
> at org.apache.hadoop.hive.ql.Driver.run(Driver.java:482)
> [hive-exec-3.1.3000.7.1.4.0-SNAPSHOT.jar:?]
> at
> org.apache.hadoop.hive.ql.reexec.ReExecDriver.run(ReExecDriver.java:166)
> [hive-exec-3.1.3000.7.1.4.0-SNAPSHOT.jar:3.1.3000.7.1.4.0-SNAPSHOT]
> at
> org.apache.hadoop.hive.ql.reexec.ReExecDriver.run(ReExecDriver.java:232)
> [hive-exec-3.1.3000.7.1.4.0-SNAPSHOT.jar:3.1.3000.7.1.4.0-SNAPSHOT]
> at
> org.apache.hadoop.hive.cli.CliDriver.processLocalCmd(CliDriver.java:247)
> [hive-cli-3.1.3000.7.1.4.0-SNAPSHOT.jar:?]
> at org.apache.hadoop.hive.cli.CliDriver.processCmd(CliDriver.java:193)
> [hive-cli-3.1.3000.7.1.4.0-SNAPSHOT.jar:?]
> at org.apache.hadoop.hive.cli.CliDriver.processLine(CliDriver.java:412)
> [hive-cli-3.1.3000.7.1.4.0-SNAPSHOT.jar:?]
> at org.apache.hadoop.hive.cli.CliDriver.processLine(CliDriver.java:343)
> [hive-cli-3.1.3000.7.1.4.0-SNAPSHOT.jar:?]
> at
> org.apache.hadoop.hive.ql.QTestUtil.executeClientInternal(QTestUtil.java:1465)
> [classes/:?]
> at
> org.apache.hadoop.hive.ql.QTestUtil.executeClient(QTestUtil.java:1438)
> [classes/:?]
> at
> org.apache.hadoop.hive.cli.control.CoreCliDriver.runTest(CoreCliDriver.java:194)
> [classes/:?]
> at
> org.apache.hadoop.hive.cli.control.CliAdapter.runTest(CliAdapter.java:104)
> [classes/:?]
> at
> org.apache.hadoop.hive.cli.TestMiniHiveKafkaCliDriver.testCliDriver(TestMiniHiveKafkaCliDriver.java:60)
> [test-classes/:?]
> at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
> ~[?:1.8.0_151]
> at
> sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
> ~[?:1.8.0_151]
> at
> sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
> ~[?:1.8.0_151]
> at java.lang.reflect.Method.invoke(Method.java:498) ~[?:1.8.0_151]
> at
> org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:47)
> [junit-4.11.jar:?]
> at
> org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:12)
> [junit-4.11.jar:?]
> at
> org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:44)
> [junit-4.11.jar:?]
> at
> org.junit.internal.runners.statements.InvokeMethod.evaluate(InvokeMethod.java:17)
> [junit-4.11.jar:?]
> at
> org.apache.hadoop.hive.cli.control.CliAdapter$2$1.evaluate(CliAdapter.java:92)
> [classes/:?]
> at org.junit.rules.RunRules.evaluate(RunRules.java:20)
> [junit-4.11.jar:?]
> at org.junit.runners.ParentRunner.runLeaf(ParentRunner.java:271)
> [junit-4.11.jar:?]
> at
> org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:70)
> [junit-4.11.jar:?]
> at
> org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:50)
> [junit-4.11.jar:?]
> at org.junit.runners.ParentRunner$3.run(ParentRunner.java:238)
> [junit-4.11.jar:?]
> at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:63)
> [junit-4.11.jar:?]
> at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:236)
> [junit-4.11.jar:?]
> at org.junit.runners.ParentRunner.access$000(ParentRunner.java:53)
> [junit-4.11.jar:?]
> at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:229)
> [junit-4.11.jar:?]
> at org.junit.runners.ParentRunner.run(ParentRunner.java:309)
> [junit-4.11.jar:?]
> at org.junit.runners.Suite.runChild(Suite.java:127) [junit-4.11.jar:?]
> at org.junit.runners.Suite.runChild(Suite.java:26) [junit-4.11.jar:?]
> at org.junit.runners.ParentRunner$3.run(ParentRunner.java:238)
> [junit-4.11.jar:?]
> at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:63)
> [junit-4.11.jar:?]
> at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:236)
> [junit-4.11.jar:?]
> at org.junit.runners.ParentRunner.access$000(ParentRunner.java:53)
> [junit-4.11.jar:?]
> at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:229)
> [junit-4.11.jar:?]
> at
> org.apache.hadoop.hive.cli.control.CliAdapter$1$1.evaluate(CliAdapter.java:73)
> [classes/:?]
> at org.junit.rules.RunRules.evaluate(RunRules.java:20)
> [junit-4.11.jar:?]
> at org.junit.runners.ParentRunner.run(ParentRunner.java:309)
> [junit-4.11.jar:?]
> at
> org.apache.maven.surefire.junit4.JUnit4Provider.execute(JUnit4Provider.java:365)
> [surefire-junit4-2.21.0.jar:2.21.0]
> at
> org.apache.maven.surefire.junit4.JUnit4Provider.executeWithRerun(JUnit4Provider.java:273)
> [surefire-junit4-2.21.0.jar:2.21.0]
> at
> org.apache.maven.surefire.junit4.JUnit4Provider.executeTestSet(JUnit4Provider.java:238)
> [surefire-junit4-2.21.0.jar:2.21.0]
> at
> org.apache.maven.surefire.junit4.JUnit4Provider.invoke(JUnit4Provider.java:159)
> [surefire-junit4-2.21.0.jar:2.21.0]
> at
> org.apache.maven.surefire.booter.ForkedBooter.invokeProviderInSameClassLoader(ForkedBooter.java:379)
> [surefire-booter-2.21.0.jar:2.21.0]
> at
> org.apache.maven.surefire.booter.ForkedBooter.runSuitesInProcess(ForkedBooter.java:340)
> [surefire-booter-2.21.0.jar:2.21.0]
> at
> org.apache.maven.surefire.booter.ForkedBooter.execute(ForkedBooter.java:125)
> [surefire-booter-2.21.0.jar:2.21.0]
> at
> org.apache.maven.surefire.booter.ForkedBooter.main(ForkedBooter.java:413)
> [surefire-booter-2.21.0.jar:2.21.0]
> Caused by: org.apache.kafka.common.KafkaException:
> javax.security.auth.login.LoginException: java.lang.IllegalArgumentException:
> Empty nameString not allowed
> at
> sun.security.krb5.PrincipalName.validateNameStrings(PrincipalName.java:174)
> at sun.security.krb5.PrincipalName.<init>(PrincipalName.java:397)
> at sun.security.krb5.PrincipalName.<init>(PrincipalName.java:468)
> at
> com.sun.security.auth.module.Krb5LoginModule.attemptAuthentication(Krb5LoginModule.java:650)
> at
> com.sun.security.auth.module.Krb5LoginModule.login(Krb5LoginModule.java:617)
> at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
> at
> sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
> at
> sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
> at java.lang.reflect.Method.invoke(Method.java:498)
> at javax.security.auth.login.LoginContext.invoke(LoginContext.java:755)
> at
> javax.security.auth.login.LoginContext.access$000(LoginContext.java:195)
> at javax.security.auth.login.LoginContext$4.run(LoginContext.java:682)
> at javax.security.auth.login.LoginContext$4.run(LoginContext.java:680)
> at java.security.AccessController.doPrivileged(Native Method)
> at
> javax.security.auth.login.LoginContext.invokePriv(LoginContext.java:680)
> at javax.security.auth.login.LoginContext.login(LoginContext.java:587)
> at
> org.apache.kafka.common.security.authenticator.AbstractLogin.login(AbstractLogin.java:60)
> at
> org.apache.kafka.common.security.kerberos.KerberosLogin.login(KerberosLogin.java:103)
> at
> org.apache.kafka.common.security.authenticator.LoginManager.<init>(LoginManager.java:62)
> at
> org.apache.kafka.common.security.authenticator.LoginManager.acquireLoginManager(LoginManager.java:105)
> at
> org.apache.kafka.common.network.SaslChannelBuilder.configure(SaslChannelBuilder.java:147)
> at
> org.apache.kafka.common.network.ChannelBuilders.create(ChannelBuilders.java:146)
> at
> org.apache.kafka.common.network.ChannelBuilders.clientChannelBuilder(ChannelBuilders.java:67)
> at
> org.apache.kafka.clients.ClientUtils.createChannelBuilder(ClientUtils.java:99)
> at
> org.apache.kafka.clients.admin.KafkaAdminClient.createInternal(KafkaAdminClient.java:426)
> at org.apache.kafka.clients.admin.Admin.create(Admin.java:59)
> at
> org.apache.kafka.clients.admin.AdminClient.create(AdminClient.java:39)
> at
> org.apache.hadoop.hive.ql.exec.tez.DagUtils.getKafkaDelegationTokenForBrokers(DagUtils.java:333)
> at
> org.apache.hadoop.hive.ql.exec.tez.DagUtils.getKafkaCredentials(DagUtils.java:301)
> at
> org.apache.hadoop.hive.ql.exec.tez.DagUtils.addCredentials(DagUtils.java:282)
> at org.apache.hadoop.hive.ql.exec.tez.TezTask.build(TezTask.java:516)
> at org.apache.hadoop.hive.ql.exec.tez.TezTask.execute(TezTask.java:223)
> at org.apache.hadoop.hive.ql.exec.Task.executeTask(Task.java:213)
> at
> org.apache.hadoop.hive.ql.exec.TaskRunner.runSequential(TaskRunner.java:105)
> at org.apache.hadoop.hive.ql.Executor.launchTask(Executor.java:357)
> at org.apache.hadoop.hive.ql.Executor.launchTasks(Executor.java:330)
> at org.apache.hadoop.hive.ql.Executor.runTasks(Executor.java:246)
> at org.apache.hadoop.hive.ql.Executor.execute(Executor.java:109)
> at org.apache.hadoop.hive.ql.Driver.runInternal(Driver.java:721)
> at org.apache.hadoop.hive.ql.Driver.run(Driver.java:488)
> at org.apache.hadoop.hive.ql.Driver.run(Driver.java:482)
> at
> org.apache.hadoop.hive.ql.reexec.ReExecDriver.run(ReExecDriver.java:166)
> at
> org.apache.hadoop.hive.ql.reexec.ReExecDriver.run(ReExecDriver.java:232)
> at
> org.apache.hadoop.hive.cli.CliDriver.processLocalCmd(CliDriver.java:247)
> at org.apache.hadoop.hive.cli.CliDriver.processCmd(CliDriver.java:193)
> at org.apache.hadoop.hive.cli.CliDriver.processLine(CliDriver.java:412)
> at org.apache.hadoop.hive.cli.CliDriver.processLine(CliDriver.java:343)
> at
> org.apache.hadoop.hive.ql.QTestUtil.executeClientInternal(QTestUtil.java:1465)
> at
> org.apache.hadoop.hive.ql.QTestUtil.executeClient(QTestUtil.java:1438)
> at
> org.apache.hadoop.hive.cli.control.CoreCliDriver.runTest(CoreCliDriver.java:194)
> at
> org.apache.hadoop.hive.cli.control.CliAdapter.runTest(CliAdapter.java:104)
> at
> org.apache.hadoop.hive.cli.TestMiniHiveKafkaCliDriver.testCliDriver(TestMiniHiveKafkaCliDriver.java:60)
> at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
> at
> sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
> at
> sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
> at java.lang.reflect.Method.invoke(Method.java:498)
> at
> org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:47)
> at
> org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:12)
> at
> org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:44)
> at
> org.junit.internal.runners.statements.InvokeMethod.evaluate(InvokeMethod.java:17)
> at
> org.apache.hadoop.hive.cli.control.CliAdapter$2$1.evaluate(CliAdapter.java:92)
> at org.junit.rules.RunRules.evaluate(RunRules.java:20)
> at org.junit.runners.ParentRunner.runLeaf(ParentRunner.java:271)
> at
> org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:70)
> at
> org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:50)
> at org.junit.runners.ParentRunner$3.run(ParentRunner.java:238)
> at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:63)
> at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:236)
> at org.junit.runners.ParentRunner.access$000(ParentRunner.java:53)
> at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:229)
> at org.junit.runners.ParentRunner.run(ParentRunner.java:309)
> at org.junit.runners.Suite.runChild(Suite.java:127)
> at org.junit.runners.Suite.runChild(Suite.java:26)
> at org.junit.runners.ParentRunner$3.run(ParentRunner.java:238)
> at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:63)
> at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:236)
> at org.junit.runners.ParentRunner.access$000(ParentRunner.java:53)
> at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:229)
> at
> org.apache.hadoop.hive.cli.control.CliAdapter$1$1.evaluate(CliAdapter.java:73)
> at org.junit.rules.RunRules.evaluate(RunRules.java:20)
> at org.junit.runners.ParentRunner.run(ParentRunner.java:309)
> at
> org.apache.maven.surefire.junit4.JUnit4Provider.execute(JUnit4Provider.java:365)
> at
> org.apache.maven.surefire.junit4.JUnit4Provider.executeWithRerun(JUnit4Provider.java:273)
> at
> org.apache.maven.surefire.junit4.JUnit4Provider.executeTestSet(JUnit4Provider.java:238)
> at
> org.apache.maven.surefire.junit4.JUnit4Provider.invoke(JUnit4Provider.java:159)
> at
> org.apache.maven.surefire.booter.ForkedBooter.invokeProviderInSameClassLoader(ForkedBooter.java:379)
> at
> org.apache.maven.surefire.booter.ForkedBooter.runSuitesInProcess(ForkedBooter.java:340)
> at
> org.apache.maven.surefire.booter.ForkedBooter.execute(ForkedBooter.java:125)
> at
> org.apache.maven.surefire.booter.ForkedBooter.main(ForkedBooter.java:413)
> at
> org.apache.kafka.common.network.SaslChannelBuilder.configure(SaslChannelBuilder.java:158)
> ~[kafka-clients-2.4.1.7.1.4.0-SNAPSHOT.jar:?]
> at
> org.apache.kafka.common.network.ChannelBuilders.create(ChannelBuilders.java:146)
> ~[kafka-clients-2.4.1.7.1.4.0-SNAPSHOT.jar:?]
> at
> org.apache.kafka.common.network.ChannelBuilders.clientChannelBuilder(ChannelBuilders.java:67)
> ~[kafka-clients-2.4.1.7.1.4.0-SNAPSHOT.jar:?]
> at
> org.apache.kafka.clients.ClientUtils.createChannelBuilder(ClientUtils.java:99)
> ~[kafka-clients-2.4.1.7.1.4.0-SNAPSHOT.jar:?]
> at
> org.apache.kafka.clients.admin.KafkaAdminClient.createInternal(KafkaAdminClient.java:426)
> ~[kafka-clients-2.4.1.7.1.4.0-SNAPSHOT.jar:?]
> ... 64 more
> {code}
--
This message was sent by Atlassian Jira
(v8.3.4#803005)