[
https://issues.apache.org/jira/browse/HIVE-27407?page=com.atlassian.jira.plugin.system.issuetabpanels:all-tabpanel
]
Stamatis Zampetakis resolved HIVE-27407.
----------------------------------------
Fix Version/s: Not Applicable
Resolution: Not A Problem
The Kafka version was downgraded in HIVE-27475 so this is no longer a problem.
The changes proposed in the PR were incorporated in HIVE-29238 as part of the
upgrade to Kafka version 3.9.1
> INSERT INTO external Kafka table fails with NoSuchFieldException
> ----------------------------------------------------------------
>
> Key: HIVE-27407
> URL: https://issues.apache.org/jira/browse/HIVE-27407
> Project: Hive
> Issue Type: Bug
> Components: HiveServer2, kafka integration
> Reporter: Stamatis Zampetakis
> Assignee: Stamatis Zampetakis
> Priority: Major
> Labels: pull-request-available
> Fix For: Not Applicable
>
>
> INSERT INTO statements involving external Kafka table currently fail with
> NoSuchFieldException during execution.
> The problem can be reproduced by running kafka_storage_handler.q (enabling it
> first since at the moment the test is disabled):
> {code}
> mvn -pl itests/qtest -Pitests test -Dtest=TestMiniHiveKafkaCliDriver
> -Dqfile=kafka_storage_handler.q
> {code}
> {noformat}
> java.lang.RuntimeException: Incompatible KafkaProducer version
> at
> org.apache.hadoop.hive.kafka.HiveKafkaProducer.getValue(HiveKafkaProducer.java:245)
> ~[kafka-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT]
> at
> org.apache.hadoop.hive.kafka.HiveKafkaProducer.getValue(HiveKafkaProducer.java:236)
> ~[kafka-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT]
> at
> org.apache.hadoop.hive.kafka.HiveKafkaProducer.resumeTransaction(HiveKafkaProducer.java:141)
> ~[kafka-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT]
> at
> org.apache.hadoop.hive.kafka.KafkaStorageHandler$2.lambda$perform$0(KafkaStorageHandler.java:302)
> ~[kafka-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT]
> at
> com.google.common.collect.RegularImmutableMap.forEach(RegularImmutableMap.java:153)
> ~[guava-22.0.jar:?]
> at
> org.apache.hadoop.hive.kafka.KafkaStorageHandler$2.perform(KafkaStorageHandler.java:298)
> ~[kafka-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT]
> at
> org.apache.hadoop.hive.kafka.KafkaStorageHandler$2.perform(KafkaStorageHandler.java:295)
> ~[kafka-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT]
> at org.apache.hadoop.hive.kafka.RetryUtils.retry(RetryUtils.java:93)
> ~[kafka-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT]
> at org.apache.hadoop.hive.kafka.RetryUtils.retry(RetryUtils.java:124)
> ~[kafka-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT]
> at
> org.apache.hadoop.hive.kafka.KafkaStorageHandler.commitInsertTable(KafkaStorageHandler.java:321)
> ~[kafka-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT]
> at
> org.apache.hadoop.hive.ql.ddl.misc.hooks.InsertCommitHookOperation.execute(InsertCommitHookOperation.java:45)
> ~[hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT]
> at org.apache.hadoop.hive.ql.ddl.DDLTask.execute(DDLTask.java:84)
> ~[hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT]
> at org.apache.hadoop.hive.ql.exec.Task.executeTask(Task.java:214)
> ~[hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT]
> at
> org.apache.hadoop.hive.ql.exec.TaskRunner.runSequential(TaskRunner.java:105)
> ~[hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT]
> at org.apache.hadoop.hive.ql.Executor.launchTask(Executor.java:354)
> ~[hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT]
> at org.apache.hadoop.hive.ql.Executor.launchTasks(Executor.java:327)
> ~[hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT]
> at org.apache.hadoop.hive.ql.Executor.runTasks(Executor.java:244)
> ~[hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT]
> at org.apache.hadoop.hive.ql.Executor.execute(Executor.java:105)
> ~[hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT]
> at org.apache.hadoop.hive.ql.Driver.execute(Driver.java:367)
> ~[hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT]
> at org.apache.hadoop.hive.ql.Driver.runInternal(Driver.java:205)
> ~[hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT]
> at org.apache.hadoop.hive.ql.Driver.run(Driver.java:154)
> ~[hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT]
> at org.apache.hadoop.hive.ql.Driver.run(Driver.java:149)
> ~[hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT]
> at
> org.apache.hadoop.hive.ql.reexec.ReExecDriver.run(ReExecDriver.java:185)
> ~[hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT]
> at
> org.apache.hadoop.hive.ql.reexec.ReExecDriver.run(ReExecDriver.java:228)
> ~[hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT]
> at
> org.apache.hadoop.hive.cli.CliDriver.processLocalCmd(CliDriver.java:257)
> ~[hive-cli-4.0.0-SNAPSHOT.jar:?]
> at org.apache.hadoop.hive.cli.CliDriver.processCmd1(CliDriver.java:201)
> ~[hive-cli-4.0.0-SNAPSHOT.jar:?]
> at org.apache.hadoop.hive.cli.CliDriver.processCmd(CliDriver.java:127)
> ~[hive-cli-4.0.0-SNAPSHOT.jar:?]
> at org.apache.hadoop.hive.cli.CliDriver.processLine(CliDriver.java:425)
> ~[hive-cli-4.0.0-SNAPSHOT.jar:?]
> at org.apache.hadoop.hive.cli.CliDriver.processLine(CliDriver.java:356)
> ~[hive-cli-4.0.0-SNAPSHOT.jar:?]
> at
> org.apache.hadoop.hive.ql.QTestUtil.executeClientInternal(QTestUtil.java:733)
> ~[hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT]
> at
> org.apache.hadoop.hive.ql.QTestUtil.executeClient(QTestUtil.java:703)
> ~[hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT]
> at
> org.apache.hadoop.hive.cli.control.CoreCliDriver.runTest(CoreCliDriver.java:115)
> ~[hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT]
> at
> org.apache.hadoop.hive.cli.control.CliAdapter.runTest(CliAdapter.java:157)
> ~[hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT]
> at
> org.apache.hadoop.hive.cli.TestMiniHiveKafkaCliDriver.testCliDriver(TestMiniHiveKafkaCliDriver.java:60)
> ~[test-classes/:?]
> at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
> ~[?:1.8.0_261]
> at
> sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
> ~[?:1.8.0_261]
> at
> sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
> ~[?:1.8.0_261]
> at java.lang.reflect.Method.invoke(Method.java:498) ~[?:1.8.0_261]
> at
> org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:59)
> ~[junit-4.13.2.jar:4.13.2]
> at
> org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:12)
> ~[junit-4.13.2.jar:4.13.2]
> at
> org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:56)
> ~[junit-4.13.2.jar:4.13.2]
> at
> org.junit.internal.runners.statements.InvokeMethod.evaluate(InvokeMethod.java:17)
> ~[junit-4.13.2.jar:4.13.2]
> at
> org.apache.hadoop.hive.cli.control.CliAdapter$2$1.evaluate(CliAdapter.java:135)
> ~[hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT]
> at org.junit.runners.ParentRunner$3.evaluate(ParentRunner.java:306)
> ~[junit-4.13.2.jar:4.13.2]
> at
> org.junit.runners.BlockJUnit4ClassRunner$1.evaluate(BlockJUnit4ClassRunner.java:100)
> ~[junit-4.13.2.jar:4.13.2]
> at org.junit.runners.ParentRunner.runLeaf(ParentRunner.java:366)
> ~[junit-4.13.2.jar:4.13.2]
> at
> org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:103)
> ~[junit-4.13.2.jar:4.13.2]
> at
> org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:63)
> ~[junit-4.13.2.jar:4.13.2]
> at org.junit.runners.ParentRunner$4.run(ParentRunner.java:331)
> ~[junit-4.13.2.jar:4.13.2]
> at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:79)
> ~[junit-4.13.2.jar:4.13.2]
> at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:329)
> ~[junit-4.13.2.jar:4.13.2]
> at org.junit.runners.ParentRunner.access$100(ParentRunner.java:66)
> ~[junit-4.13.2.jar:4.13.2]
> at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:293)
> ~[junit-4.13.2.jar:4.13.2]
> at org.junit.runners.ParentRunner.run(ParentRunner.java:413)
> ~[junit-4.13.2.jar:4.13.2]
> at org.junit.runners.Suite.runChild(Suite.java:128)
> ~[junit-4.13.2.jar:4.13.2]
> at org.junit.runners.Suite.runChild(Suite.java:27)
> ~[junit-4.13.2.jar:4.13.2]
> at org.junit.runners.ParentRunner$4.run(ParentRunner.java:331)
> ~[junit-4.13.2.jar:4.13.2]
> at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:79)
> ~[junit-4.13.2.jar:4.13.2]
> at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:329)
> ~[junit-4.13.2.jar:4.13.2]
> at org.junit.runners.ParentRunner.access$100(ParentRunner.java:66)
> ~[junit-4.13.2.jar:4.13.2]
> at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:293)
> ~[junit-4.13.2.jar:4.13.2]
> at
> org.apache.hadoop.hive.cli.control.CliAdapter$1$1.evaluate(CliAdapter.java:95)
> ~[hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT]
> at org.junit.rules.RunRules.evaluate(RunRules.java:20)
> ~[junit-4.13.2.jar:4.13.2]
> at org.junit.runners.ParentRunner$3.evaluate(ParentRunner.java:306)
> ~[junit-4.13.2.jar:4.13.2]
> at org.junit.runners.ParentRunner.run(ParentRunner.java:413)
> ~[junit-4.13.2.jar:4.13.2]
> at
> org.apache.maven.surefire.junit4.JUnit4Provider.execute(JUnit4Provider.java:365)
> ~[surefire-junit4-3.0.0-M4.jar:3.0.0-M4]
> at
> org.apache.maven.surefire.junit4.JUnit4Provider.executeWithRerun(JUnit4Provider.java:273)
> ~[surefire-junit4-3.0.0-M4.jar:3.0.0-M4]
> at
> org.apache.maven.surefire.junit4.JUnit4Provider.executeTestSet(JUnit4Provider.java:238)
> ~[surefire-junit4-3.0.0-M4.jar:3.0.0-M4]
> at
> org.apache.maven.surefire.junit4.JUnit4Provider.invoke(JUnit4Provider.java:159)
> ~[surefire-junit4-3.0.0-M4.jar:3.0.0-M4]
> at
> org.apache.maven.surefire.booter.ForkedBooter.runSuitesInProcess(ForkedBooter.java:377)
> ~[surefire-booter-3.0.0-M4.jar:3.0.0-M4]
> at
> org.apache.maven.surefire.booter.ForkedBooter.execute(ForkedBooter.java:138)
> ~[surefire-booter-3.0.0-M4.jar:3.0.0-M4]
> at
> org.apache.maven.surefire.booter.ForkedBooter.run(ForkedBooter.java:465)
> ~[surefire-booter-3.0.0-M4.jar:3.0.0-M4]
> at
> org.apache.maven.surefire.booter.ForkedBooter.main(ForkedBooter.java:451)
> ~[surefire-booter-3.0.0-M4.jar:3.0.0-M4]
> Caused by: java.lang.NoSuchFieldException: topicPartitionBookkeeper
> at java.lang.Class.getDeclaredField(Class.java:2070) ~[?:1.8.0_261]
> at
> org.apache.hadoop.hive.kafka.HiveKafkaProducer.getValue(HiveKafkaProducer.java:241)
> ~[kafka-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT]
> ... 72 more
> {noformat}
> Most likely the problem was caused by HIVE-27105 which upgraded Kafka version
> from 2.5.0 to 3.4.0. Given that kafka_storage_handler.q test was disabled the
> failure was hidden.
--
This message was sent by Atlassian Jira
(v8.20.10#820010)