[
https://issues.apache.org/jira/browse/HIVE-19917?page=com.atlassian.jira.plugin.system.issuetabpanels:all-tabpanel
]
Eugene Koifman resolved HIVE-19917.
-----------------------------------
Resolution: Fixed
Fix Version/s: 3.1.0
committed to branch-3, master
thanks Jason for the review
> Export of full CRUD transactional table fails if table is not in default
> database
> ---------------------------------------------------------------------------------
>
> Key: HIVE-19917
> URL: https://issues.apache.org/jira/browse/HIVE-19917
> Project: Hive
> Issue Type: Bug
> Components: Transactions
> Affects Versions: 3.0.0
> Reporter: Eugene Koifman
> Assignee: Eugene Koifman
> Priority: Major
> Fix For: 3.1.0
>
> Attachments: HIVE-19917.01.patch
>
>
> The actual issues is fixed by HIVE-19861.
> This is a follow up to add a test case.
> Issue:
> {noformat}
> org.apache.hadoop.hive.ql.metadata.HiveException:
> java.lang.IllegalArgumentException: Can not create a Path from a null string
> at org.apache.hadoop.hive.ql.metadata.Hive.createTable(Hive.java:940)
> ~[hive-exec-3.0.0.3.0.0.0-1485.jar:3.0.0.3.0.0.0-1485]
> at org.apache.hadoop.hive.ql.metadata.Hive.createTable(Hive.java:945)
> ~[hive-exec-3.0.0.3.0.0.0-1485.jar:3.0.0.3.0.0.0-1485]
> at
> org.apache.hadoop.hive.ql.exec.DDLTask.createTableLike(DDLTask.java:5099)
> ~[hive-exec-3.0.0.3.0.0.0-1485.jar:3.0.0.3.0.0.0-1485]
> at org.apache.hadoop.hive.ql.exec.DDLTask.execute(DDLTask.java:433)
> ~[hive-exec-3.0.0.3.0.0.0-1485.jar:3.0.0.3.0.0.0-1485]
> at
> org.apache.hadoop.hive.ql.parse.UpdateDeleteSemanticAnalyzer.analyzeAcidExport(UpdateDeleteSemanticAnalyzer.java:195)
> ~[hive-exec-3.0.0.3.0.0.0-1485.jar:3.0.0.3.0.0.0-1485]
> at
> org.apache.hadoop.hive.ql.parse.UpdateDeleteSemanticAnalyzer.analyzeInternal(UpdateDeleteSemanticAnalyzer.java:106)
> ~[hive-exec-3.0.0.3.0.0.0-1485.jar:3.0.0.3.0.0.0-1485]
> at
> org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer.analyze(BaseSemanticAnalyzer.java:288)
> ~[hive-exec-3.0.0.3.0.0.0-1485.jar:3.0.0.3.0.0.0-1485]
> at org.apache.hadoop.hive.ql.Driver.compile(Driver.java:658)
> ~[hive-exec-3.0.0.3.0.0.0-1485.jar:3.0.0.3.0.0.0-1485]
> at org.apache.hadoop.hive.ql.Driver.compileInternal(Driver.java:1813)
> ~[hive-exec-3.0.0.3.0.0.0-1485.jar:3.0.0.3.0.0.0-1485]
> at org.apache.hadoop.hive.ql.Driver.compileAndRespond(Driver.java:1760)
> ~[hive-exec-3.0.0.3.0.0.0-1485.jar:3.0.0.3.0.0.0-1485]
> at org.apache.hadoop.hive.ql.Driver.compileAndRespond(Driver.java:1755)
> ~[hive-exec-3.0.0.3.0.0.0-1485.jar:3.0.0.3.0.0.0-1485]
> at
> org.apache.hadoop.hive.ql.reexec.ReExecDriver.compileAndRespond(ReExecDriver.java:126)
> ~[hive-exec-3.0.0.3.0.0.0-1485.jar:3.0.0.3.0.0.0-1485]
> at
> org.apache.hive.service.cli.operation.SQLOperation.prepare(SQLOperation.java:194)
> ~[hive-service-3.0.0.3.0.0.0-1485.jar:3.0.0.3.0.0.0-1485]
> at
> org.apache.hive.service.cli.operation.SQLOperation.runInternal(SQLOperation.java:257)
> ~[hive-service-3.0.0.3.0.0.0-1485.jar:3.0.0.3.0.0.0-1485]
> at
> org.apache.hive.service.cli.operation.Operation.run(Operation.java:243)
> ~[hive-service-3.0.0.3.0.0.0-1485.jar:3.0.0.3.0.0.0-1485]
> at
> org.apache.hive.service.cli.session.HiveSessionImpl.executeStatementInternal(HiveSessionImpl.java:541)
> ~[hive-service-3.0.0.3.0.0.0-1485.jar:3.0.0.3.0.0.0-1485]
> at
> org.apache.hive.service.cli.session.HiveSessionImpl.executeStatementAsync(HiveSessionImpl.java:527)
> ~[hive-service-3.0.0.3.0.0.0-1485.jar:3.0.0.3.0.0.0-1485]
> at
> org.apache.hive.service.cli.CLIService.executeStatementAsync(CLIService.java:312)
> ~[hive-service-3.0.0.3.0.0.0-1485.jar:3.0.0.3.0.0.0-1485]
> at
> org.apache.hive.service.cli.thrift.ThriftCLIService.ExecuteStatement(ThriftCLIService.java:562)
> ~[hive-service-3.0.0.3.0.0.0-1485.jar:3.0.0.3.0.0.0-1485]
> at
> org.apache.hive.service.rpc.thrift.TCLIService$Processor$ExecuteStatement.getResult(TCLIService.java:1557)
> ~[hive-exec-3.0.0.3.0.0.0-1485.jar:3.0.0.3.0.0.0-1485]
> at
> org.apache.hive.service.rpc.thrift.TCLIService$Processor$ExecuteStatement.getResult(TCLIService.java:1542)
> ~[hive-exec-3.0.0.3.0.0.0-1485.jar:3.0.0.3.0.0.0-1485]
> at org.apache.thrift.ProcessFunction.process(ProcessFunction.java:39)
> ~[hive-exec-3.0.0.3.0.0.0-1485.jar:3.0.0.3.0.0.0-1485]
> at org.apache.thrift.TBaseProcessor.process(TBaseProcessor.java:39)
> ~[hive-exec-3.0.0.3.0.0.0-1485.jar:3.0.0.3.0.0.0-1485]
> at
> org.apache.hadoop.hive.metastore.security.HadoopThriftAuthBridge$Server$TUGIAssumingProcessor.process(HadoopThriftAuthBridge.java:647)
> ~[hive-exec-3.0.0.3.0.0.0-1485.jar:3.0.0.3.0.0.0-1485]
> at
> org.apache.thrift.server.TThreadPoolServer$WorkerProcess.run(TThreadPoolServer.java:286)
> ~[hive-exec-3.0.0.3.0.0.0-1485.jar:3.0.0.3.0.0.0-1485]
> at
> java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)
> ~[?:1.8.0_112]
> at
> java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)
> ~[?:1.8.0_112]
> at java.lang.Thread.run(Thread.java:745) [?:1.8.0_112]
> Caused by: java.lang.IllegalArgumentException: Can not create a Path from a
> null string
> at org.apache.hadoop.fs.Path.checkPathArg(Path.java:164)
> ~[hadoop-common-3.0.0.3.0.0.0-1485.jar:?]
> at org.apache.hadoop.fs.Path.<init>(Path.java:180)
> ~[hadoop-common-3.0.0.3.0.0.0-1485.jar:?]
> at
> org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient.createTempTable(SessionHiveMetaStoreClient.java:459)
> ~[hive-exec-3.0.0.3.0.0.0-1485.jar:3.0.0.3.0.0.0-1485]
> at
> org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient.create_table_with_environment_context(SessionHiveMetaStoreClient.java:117)
> ~[hive-exec-3.0.0.3.0.0.0-1485.jar:3.0.0.3.0.0.0-1485]
> at
> org.apache.hadoop.hive.metastore.HiveMetaStoreClient.createTable(HiveMetaStoreClient.java:831)
> ~[hive-exec-3.0.0.3.0.0.0-1485.jar:3.0.0.3.0.0.0-1485]
> at
> org.apache.hadoop.hive.metastore.HiveMetaStoreClient.createTable(HiveMetaStoreClient.java:816)
> ~[hive-exec-3.0.0.3.0.0.0-1485.jar:3.0.0.3.0.0.0-1485]
> at sun.reflect.GeneratedMethodAccessor124.invoke(Unknown Source) ~[?:?]
> at
> sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
> ~[?:1.8.0_112]
> at java.lang.reflect.Method.invoke(Method.java:498) ~[?:1.8.0_112]
> at
> org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.invoke(RetryingMetaStoreClient.java:212)
> ~[hive-exec-3.0.0.3.0.0.0-1485.jar:3.0.0.3.0.0.0-1485]
> at com.sun.proxy.$Proxy55.createTable(Unknown Source) ~[?:?]
> at sun.reflect.GeneratedMethodAccessor124.invoke(Unknown Source) ~[?:?]
> at
> sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
> ~[?:1.8.0_112]
> at java.lang.reflect.Method.invoke(Method.java:498) ~[?:1.8.0_112]
> at
> org.apache.hadoop.hive.metastore.HiveMetaStoreClient$SynchronizedHandler.invoke(HiveMetaStoreClient.java:2768)
> ~[hive-exec-3.0.0.3.0.0.0-1485.jar:3.0.0.3.0.0.0-1485]
> at com.sun.proxy.$Proxy55.createTable(Unknown Source) ~[?:?]
> at org.apache.hadoop.hive.ql.metadata.Hive.createTable(Hive.java:929)
> ~[hive-exec-3.0.0.3.0.0.0-1485.jar:3.0.0.3.0.0.0-1485]
> ... 27 more
> 2018-06-14T17:53:32,112 ERROR [07758225-f4e7-4fc2-a9e5-c6ed19e9fcfd
> HiveServer2-Handler-Pool: Thread-143]: metadata.Hive (:()) - Table
> tpch.tbl_export_05da5215_6695_420e_99e9_24a9bb5d1a39 not found:
> hive.tpch.tbl_export_05da5215_6695_420e_99e9_24a9bb5d1a39 table not found
> 2018-06-14T17:53:32,113 ERROR [07758225-f4e7-4fc2-a9e5-c6ed19e9fcfd
> HiveServer2-Handler-Pool: Thread-143]: ql.Driver (:()) - FAILED:
> SemanticException org.apache.hadoop.hive.ql.metadata.InvalidTableException:
> Table not found tbl_export_05da5215_6695_420e_99e9_24a9bb5d1a39
> org.apache.hadoop.hive.ql.parse.SemanticException:
> org.apache.hadoop.hive.ql.metadata.InvalidTableException: Table not found
> tbl_export_05da5215_6695_420e_99e9_24a9bb5d1a39
> at
> org.apache.hadoop.hive.ql.parse.UpdateDeleteSemanticAnalyzer.analyzeAcidExport(UpdateDeleteSemanticAnalyzer.java:198)
> at
> org.apache.hadoop.hive.ql.parse.UpdateDeleteSemanticAnalyzer.analyzeInternal(UpdateDeleteSemanticAnalyzer.java:106)
> at
> org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer.analyze(BaseSemanticAnalyzer.java:288)
> at org.apache.hadoop.hive.ql.Driver.compile(Driver.java:658)
> at org.apache.hadoop.hive.ql.Driver.compileInternal(Driver.java:1813)
> at org.apache.hadoop.hive.ql.Driver.compileAndRespond(Driver.java:1760)
> at org.apache.hadoop.hive.ql.Driver.compileAndRespond(Driver.java:1755)
> at
> org.apache.hadoop.hive.ql.reexec.ReExecDriver.compileAndRespond(ReExecDriver.java:126)
> at
> org.apache.hive.service.cli.operation.SQLOperation.prepare(SQLOperation.java:194)
> at
> org.apache.hive.service.cli.operation.SQLOperation.runInternal(SQLOperation.java:257)
> at
> org.apache.hive.service.cli.operation.Operation.run(Operation.java:243)
> at
> org.apache.hive.service.cli.session.HiveSessionImpl.executeStatementInternal(HiveSessionImpl.java:541)
> at
> org.apache.hive.service.cli.session.HiveSessionImpl.executeStatementAsync(HiveSessionImpl.java:527)
> at
> org.apache.hive.service.cli.CLIService.executeStatementAsync(CLIService.java:312)
> at
> org.apache.hive.service.cli.thrift.ThriftCLIService.ExecuteStatement(ThriftCLIService.java:562)
> at
> org.apache.hive.service.rpc.thrift.TCLIService$Processor$ExecuteStatement.getResult(TCLIService.java:1557)
> at
> org.apache.hive.service.rpc.thrift.TCLIService$Processor$ExecuteStatement.getResult(TCLIService.java:1542)
> at org.apache.thrift.ProcessFunction.process(ProcessFunction.java:39)
> at org.apache.thrift.TBaseProcessor.process(TBaseProcessor.java:39)
> at
> org.apache.hadoop.hive.metastore.security.HadoopThriftAuthBridge$Server$TUGIAssumingProcessor.process(HadoopThriftAuthBridge.java:647)
> at
> org.apache.thrift.server.TThreadPoolServer$WorkerProcess.run(TThreadPoolServer.java:286)
> at
> java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)
> at
> java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)
> at java.lang.Thread.run(Thread.java:745)
> Caused by: org.apache.hadoop.hive.ql.metadata.InvalidTableException: Table
> not found tbl_export_05da5215_6695_420e_99e9_24a9bb5d1a39
> at org.apache.hadoop.hive.ql.metadata.Hive.getTable(Hive.java:1141)
> at org.apache.hadoop.hive.ql.metadata.Hive.getTable(Hive.java:1092)
> at org.apache.hadoop.hive.ql.metadata.Hive.getTable(Hive.java:1079)
> at
> org.apache.hadoop.hive.ql.parse.UpdateDeleteSemanticAnalyzer.analyzeAcidExport(UpdateDeleteSemanticAnalyzer.java:196)
> ... 23 more
> {noformat}
--
This message was sent by Atlassian JIRA
(v7.6.3#76005)