[ 
https://issues.apache.org/jira/browse/HIVE-22019?page=com.atlassian.jira.plugin.system.issuetabpanels:all-tabpanel
 ]

Kishen Das updated HIVE-22019:
------------------------------
    Summary: [ Test-fix ] 
alter_table_update_status/alter_table_update_status_disable_bitvector/alter_partition_update_status
 fail when DbNotificationListener is installed  (was: 
alter_table_update_status/alter_table_update_status_disable_bitvector/alter_partition_update_status
 fail when DbNotificationListener is installed)

> [ Test-fix ] 
> alter_table_update_status/alter_table_update_status_disable_bitvector/alter_partition_update_status
>  fail when DbNotificationListener is installed
> --------------------------------------------------------------------------------------------------------------------------------------------------------------
>
>                 Key: HIVE-22019
>                 URL: https://issues.apache.org/jira/browse/HIVE-22019
>             Project: Hive
>          Issue Type: Sub-task
>            Reporter: Daniel Dai
>            Priority: Major
>
> Statement like:
> ALTER TABLE src_stat_n0 UPDATE STATISTICS for column key SET 
> ('numDVs'='1111','avgColLen'='1.111')
> fail when DbNotificationListener is installed with the message:
> {code}
> See ./ql/target/tmp/log/hive.log or ./itests/qtest/target/tmp/log/hive.log, 
> or check ./ql/target/surefire-reports or 
> ./itests/qtest/target/surefire-reports/ for specific test cases logs.
>  org.apache.hadoop.hive.ql.metadata.HiveException: 
> java.lang.IllegalArgumentException: Could not serialize 
> JSONUpdateTableColumnStatMessage : 
>  at 
> org.apache.hadoop.hive.ql.metadata.Hive.setPartitionColumnStatistics(Hive.java:5350)
>  at 
> org.apache.hadoop.hive.ql.exec.ColumnStatsUpdateTask.persistColumnStats(ColumnStatsUpdateTask.java:339)
>  at 
> org.apache.hadoop.hive.ql.exec.ColumnStatsUpdateTask.execute(ColumnStatsUpdateTask.java:347)
>  at org.apache.hadoop.hive.ql.exec.Task.executeTask(Task.java:212)
>  at 
> org.apache.hadoop.hive.ql.exec.TaskRunner.runSequential(TaskRunner.java:103)
>  at org.apache.hadoop.hive.ql.Driver.launchTask(Driver.java:2343)
>  at org.apache.hadoop.hive.ql.Driver.execute(Driver.java:1995)
>  at org.apache.hadoop.hive.ql.Driver.runInternal(Driver.java:1662)
>  at org.apache.hadoop.hive.ql.Driver.run(Driver.java:1422)
>  at org.apache.hadoop.hive.ql.Driver.run(Driver.java:1416)
>  at org.apache.hadoop.hive.ql.reexec.ReExecDriver.run(ReExecDriver.java:162)
>  at org.apache.hadoop.hive.ql.reexec.ReExecDriver.run(ReExecDriver.java:223)
>  at org.apache.hadoop.hive.cli.CliDriver.processLocalCmd(CliDriver.java:242)
>  at org.apache.hadoop.hive.cli.CliDriver.processCmd(CliDriver.java:189)
>  at org.apache.hadoop.hive.cli.CliDriver.processLine(CliDriver.java:408)
>  at org.apache.hadoop.hive.cli.CliDriver.processLine(CliDriver.java:340)
>  at 
> org.apache.hadoop.hive.ql.QTestUtil.executeClientInternal(QTestUtil.java:680)
>  at org.apache.hadoop.hive.ql.QTestUtil.executeClient(QTestUtil.java:651)
>  at 
> org.apache.hadoop.hive.cli.control.CoreCliDriver.runTest(CoreCliDriver.java:182)
>  at org.apache.hadoop.hive.cli.control.CliAdapter.runTest(CliAdapter.java:104)
>  at 
> org.apache.hadoop.hive.cli.TestCliDriver.testCliDriver(TestCliDriver.java:59)
>  at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
>  at 
> sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
>  at 
> sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
>  at java.lang.reflect.Method.invoke(Method.java:498)
>  at 
> org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:47)
>  at 
> org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:12)
>  at 
> org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:44)
>  at 
> org.junit.internal.runners.statements.InvokeMethod.evaluate(InvokeMethod.java:17)
>  at 
> org.apache.hadoop.hive.cli.control.CliAdapter$2$1.evaluate(CliAdapter.java:92)
>  at org.junit.rules.RunRules.evaluate(RunRules.java:20)
>  at org.junit.runners.ParentRunner.runLeaf(ParentRunner.java:271)
>  at 
> org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:70)
>  at 
> org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:50)
>  at org.junit.runners.ParentRunner$3.run(ParentRunner.java:238)
>  at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:63)
>  at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:236)
>  at org.junit.runners.ParentRunner.access$000(ParentRunner.java:53)
>  at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:229)
>  at org.junit.runners.ParentRunner.run(ParentRunner.java:309)
>  at org.junit.runners.Suite.runChild(Suite.java:127)
>  at org.junit.runners.Suite.runChild(Suite.java:26)
>  at org.junit.runners.ParentRunner$3.run(ParentRunner.java:238)
>  at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:63)
>  at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:236)
>  at org.junit.runners.ParentRunner.access$000(ParentRunner.java:53)
>  at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:229)
>  at 
> org.apache.hadoop.hive.cli.control.CliAdapter$1$1.evaluate(CliAdapter.java:73)
>  at org.junit.rules.RunRules.evaluate(RunRules.java:20)
>  at org.junit.runners.ParentRunner.run(ParentRunner.java:309)
>  at 
> org.apache.maven.surefire.junit4.JUnit4Provider.execute(JUnit4Provider.java:365)
>  at 
> org.apache.maven.surefire.junit4.JUnit4Provider.executeWithRerun(JUnit4Provider.java:273)
>  at 
> org.apache.maven.surefire.junit4.JUnit4Provider.executeTestSet(JUnit4Provider.java:238)
>  at 
> org.apache.maven.surefire.junit4.JUnit4Provider.invoke(JUnit4Provider.java:159)
>  at 
> org.apache.maven.surefire.booter.ForkedBooter.invokeProviderInSameClassLoader(ForkedBooter.java:379)
>  at 
> org.apache.maven.surefire.booter.ForkedBooter.runSuitesInProcess(ForkedBooter.java:340)
>  at 
> org.apache.maven.surefire.booter.ForkedBooter.execute(ForkedBooter.java:125)
>  at org.apache.maven.surefire.booter.ForkedBooter.main(ForkedBooter.java:413)
> Caused by: java.lang.IllegalArgumentException: Could not serialize 
> JSONUpdateTableColumnStatMessage : 
>  at 
> org.apache.hadoop.hive.metastore.messaging.json.JSONUpdateTableColumnStatMessage.<init>(JSONUpdateTableColumnStatMessage.java:71)
>  at 
> org.apache.hadoop.hive.metastore.messaging.MessageBuilder.buildUpdateTableColumnStatMessage(MessageBuilder.java:293)
>  at 
> org.apache.hive.hcatalog.listener.DbNotificationListener.onUpdateTableColumnStat(DbNotificationListener.java:760)
>  at 
> org.apache.hadoop.hive.metastore.MetaStoreListenerNotifier.lambda$static$8(MetaStoreListenerNotifier.java:232)
>  at 
> org.apache.hadoop.hive.metastore.MetaStoreListenerNotifier.notifyEvent(MetaStoreListenerNotifier.java:285)
>  at 
> org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.updateTableColumnStatsInternal(HiveMetaStore.java:6209)
>  at 
> org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.set_aggr_stats_for(HiveMetaStore.java:8145)
>  at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
>  at 
> sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
>  at 
> sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
>  at java.lang.reflect.Method.invoke(Method.java:498)
>  at 
> org.apache.hadoop.hive.metastore.RetryingHMSHandler.invokeInternal(RetryingHMSHandler.java:147)
>  at 
> org.apache.hadoop.hive.metastore.RetryingHMSHandler.invoke(RetryingHMSHandler.java:108)
>  at com.sun.proxy.$Proxy40.set_aggr_stats_for(Unknown Source)
>  at 
> org.apache.hadoop.hive.metastore.HiveMetaStoreClient.setPartitionColumnStatistics(HiveMetaStoreClient.java:2397)
>  at 
> org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient.setPartitionColumnStatistics(SessionHiveMetaStoreClient.java:477)
>  at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
>  at 
> sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
>  at 
> sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
>  at java.lang.reflect.Method.invoke(Method.java:498)
>  at 
> org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.invoke(RetryingMetaStoreClient.java:212)
>  at com.sun.proxy.$Proxy41.setPartitionColumnStatistics(Unknown Source)
>  at 
> org.apache.hadoop.hive.ql.metadata.Hive.setPartitionColumnStatistics(Hive.java:5347)
>  ... 57 more
> Caused by: org.apache.thrift.protocol.TProtocolException: Required field 
> 'maxColLen' is unset! Struct:StringColumnStatsData(maxColLen:0, 
> avgColLen:1.111, numNulls:0, numDVs:1111)
>  at 
> org.apache.hadoop.hive.metastore.api.StringColumnStatsData.validate(StringColumnStatsData.java:614)
>  at 
> org.apache.hadoop.hive.metastore.api.StringColumnStatsData$StringColumnStatsDataStandardScheme.write(StringColumnStatsData.java:718)
>  at 
> org.apache.hadoop.hive.metastore.api.StringColumnStatsData$StringColumnStatsDataStandardScheme.write(StringColumnStatsData.java:656)
>  at 
> org.apache.hadoop.hive.metastore.api.StringColumnStatsData.write(StringColumnStatsData.java:574)
>  at 
> org.apache.hadoop.hive.metastore.api.ColumnStatisticsData.standardSchemeWriteValue(ColumnStatisticsData.java:346)
>  at org.apache.thrift.TUnion$TUnionStandardScheme.write(TUnion.java:244)
>  at org.apache.thrift.TUnion$TUnionStandardScheme.write(TUnion.java:213)
>  at org.apache.thrift.TUnion.write(TUnion.java:152)
>  at 
> org.apache.hadoop.hive.metastore.api.ColumnStatisticsObj$ColumnStatisticsObjStandardScheme.write(ColumnStatisticsObj.java:554)
>  at 
> org.apache.hadoop.hive.metastore.api.ColumnStatisticsObj$ColumnStatisticsObjStandardScheme.write(ColumnStatisticsObj.java:492)
>  at 
> org.apache.hadoop.hive.metastore.api.ColumnStatisticsObj.write(ColumnStatisticsObj.java:418)
>  at 
> org.apache.hadoop.hive.metastore.api.ColumnStatistics$ColumnStatisticsStandardScheme.write(ColumnStatistics.java:583)
>  at 
> org.apache.hadoop.hive.metastore.api.ColumnStatistics$ColumnStatisticsStandardScheme.write(ColumnStatistics.java:511)
>  at 
> org.apache.hadoop.hive.metastore.api.ColumnStatistics.write(ColumnStatistics.java:438)
>  at org.apache.thrift.TSerializer.serialize(TSerializer.java:79)
>  at org.apache.thrift.TSerializer.toString(TSerializer.java:93)
>  at 
> org.apache.hadoop.hive.metastore.messaging.MessageBuilder.createTableColumnStatJson(MessageBuilder.java:381)
>  at 
> org.apache.hadoop.hive.metastore.messaging.json.JSONUpdateTableColumnStatMessage.<init>(JSONUpdateTableColumnStatMessage.java:68)
>  ... 79 more
> {code}
> This might be just a test fix, since change just some fields in complete 
> stats does not sound a right request.



--
This message was sent by Atlassian Jira
(v8.3.4#803005)

Reply via email to