[ 
https://issues.apache.org/jira/browse/HIVE-17886?page=com.atlassian.jira.plugin.system.issuetabpanels:all-tabpanel
 ]

Zoltan Haindrich updated HIVE-17886:
------------------------------------
    Attachment: repl-tc.hive.log

attached hive.log

> Fix failure of TestReplicationScenarios.testConstraints
> -------------------------------------------------------
>
>                 Key: HIVE-17886
>                 URL: https://issues.apache.org/jira/browse/HIVE-17886
>             Project: Hive
>          Issue Type: Bug
>            Reporter: Zoltan Haindrich
>         Attachments: repl-tc.hive.log
>
>
> after HIVE-16603 this test started failing
> {code}
> 2017-10-24T10:52:17,024 DEBUG [main] metastore.HiveMetaStoreClient: Unable to 
> shutdown metastore client. Will try closing transport directly.
> org.apache.thrift.transport.TTransportException: Cannot write to null 
> outputStream
>         at 
> org.apache.thrift.transport.TIOStreamTransport.write(TIOStreamTransport.java:142)
>  ~[libthrift-0.9.3.jar:0.9.3]
>         at 
> org.apache.thrift.protocol.TBinaryProtocol.writeI32(TBinaryProtocol.java:178) 
> ~[libthrift-0.9.3.jar:0.9.3]
>         at 
> org.apache.thrift.protocol.TBinaryProtocol.writeMessageBegin(TBinaryProtocol.java:106)
>  ~[libthrift-0.9.3.jar:0.9.3]
>         at org.apache.thrift.TServiceClient.sendBase(TServiceClient.java:70) 
> ~[libthrift-0.9.3.jar:0.9.3]
>         at 
> org.apache.thrift.TServiceClient.sendBaseOneway(TServiceClient.java:66) 
> ~[libthrift-0.9.3.jar:0.9.3]
>         at 
> com.facebook.fb303.FacebookService$Client.send_shutdown(FacebookService.java:436)
>  ~[libfb303-0.9.3.jar:?]
>         at 
> com.facebook.fb303.FacebookService$Client.shutdown(FacebookService.java:430) 
> ~[libfb303-0.9.3.jar:?]
>         at 
> org.apache.hadoop.hive.metastore.HiveMetaStoreClient.close(HiveMetaStoreClient.java:569)
>  [hive-metastore-3.0.0-SNAPSHOT.jar:3.0.0-SNAPSHOT]
>         at sun.reflect.GeneratedMethodAccessor45.invoke(Unknown Source) ~[?:?]
>         at 
> sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
>  ~[?:1.8.0_131]
>         at java.lang.reflect.Method.invoke(Method.java:498) ~[?:1.8.0_131]
>         at 
> org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.invoke(RetryingMetaStoreClient.java:173)
>  [hive-metastore-3.0.0-SNAPSHOT.jar:3.0.0-SNAPSHOT]
>         at com.sun.proxy.$Proxy38.close(Unknown Source) [?:?]
>         at sun.reflect.GeneratedMethodAccessor45.invoke(Unknown Source) ~[?:?]
>         at 
> sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
>  ~[?:1.8.0_131]
>         at java.lang.reflect.Method.invoke(Method.java:498) ~[?:1.8.0_131]
>         at 
> org.apache.hadoop.hive.metastore.HiveMetaStoreClient$SynchronizedHandler.invoke(HiveMetaStoreClient.java:2413)
>  [hive-metastore-3.0.0-SNAPSHOT.jar:3.0.0-SNAPSHOT]
>         at com.sun.proxy.$Proxy38.close(Unknown Source) [?:?]
>         at 
> org.apache.hadoop.hive.metastore.SynchronizedMetaStoreClient.close(SynchronizedMetaStoreClient.java:112)
>  [hive-exec-3.0.0-SNAPSHOT.jar:3.0.0-SNAPSHOT]
>         at org.apache.hadoop.hive.ql.metadata.Hive.close(Hive.java:425) 
> [hive-exec-3.0.0-SNAPSHOT.jar:3.0.0-SNAPSHOT]
>         at org.apache.hadoop.hive.ql.metadata.Hive.access$000(Hive.java:181) 
> [hive-exec-3.0.0-SNAPSHOT.jar:3.0.0-SNAPSHOT]
>         at org.apache.hadoop.hive.ql.metadata.Hive$1.remove(Hive.java:202) 
> [hive-exec-3.0.0-SNAPSHOT.jar:3.0.0-SNAPSHOT]
>         at 
> org.apache.hadoop.hive.ql.metadata.Hive.closeCurrent(Hive.java:388) 
> [hive-exec-3.0.0-SNAPSHOT.jar:3.0.0-SNAPSHOT]
>         at org.apache.hadoop.hive.ql.metadata.Hive.create(Hive.java:339) 
> [hive-exec-3.0.0-SNAPSHOT.jar:3.0.0-SNAPSHOT]
>         at org.apache.hadoop.hive.ql.metadata.Hive.getInternal(Hive.java:324) 
> [hive-exec-3.0.0-SNAPSHOT.jar:3.0.0-SNAPSHOT]
>         at 
> org.apache.hadoop.hive.ql.metadata.Hive.getWithFastCheck(Hive.java:316) 
> [hive-exec-3.0.0-SNAPSHOT.jar:3.0.0-SNAPSHOT]
>         at 
> org.apache.hadoop.hive.ql.metadata.Hive.getWithFastCheck(Hive.java:308) 
> [hive-exec-3.0.0-SNAPSHOT.jar:3.0.0-SNAPSHOT]
>         at org.apache.hadoop.hive.ql.exec.Task.getHive(Task.java:186) 
> [hive-exec-3.0.0-SNAPSHOT.jar:3.0.0-SNAPSHOT]
>         at 
> org.apache.hadoop.hive.ql.exec.repl.bootstrap.ReplLoadTask.execute(ReplLoadTask.java:73)
>  [hive-exec-3.0.0-SNAPSHOT.jar:3.0.0-SNAPSHOT]
>         at org.apache.hadoop.hive.ql.exec.Task.executeTask(Task.java:206) 
> [hive-exec-3.0.0-SNAPSHOT.jar:3.0.0-SNAPSHOT]
>         at 
> org.apache.hadoop.hive.ql.exec.TaskRunner.runSequential(TaskRunner.java:97) 
> [hive-exec-3.0.0-SNAPSHOT.jar:3.0.0-SNAPSHOT]
>         at org.apache.hadoop.hive.ql.Driver.launchTask(Driver.java:2276) 
> [hive-exec-3.0.0-SNAPSHOT.jar:3.0.0-SNAPSHOT]
>         at org.apache.hadoop.hive.ql.Driver.execute(Driver.java:1906) 
> [hive-exec-3.0.0-SNAPSHOT.jar:3.0.0-SNAPSHOT]
>         at org.apache.hadoop.hive.ql.Driver.runInternal(Driver.java:1623) 
> [hive-exec-3.0.0-SNAPSHOT.jar:3.0.0-SNAPSHOT]
>         at org.apache.hadoop.hive.ql.Driver.run(Driver.java:1362) 
> [hive-exec-3.0.0-SNAPSHOT.jar:3.0.0-SNAPSHOT]
>         at org.apache.hadoop.hive.ql.Driver.run(Driver.java:1352) 
> [hive-exec-3.0.0-SNAPSHOT.jar:3.0.0-SNAPSHOT]
>         at 
> org.apache.hadoop.hive.ql.parse.TestReplicationScenarios.run(TestReplicationScenarios.java:3578)
>  [test-classes/:?]
>         at 
> org.apache.hadoop.hive.ql.parse.TestReplicationScenarios.run(TestReplicationScenarios.java:3567)
>  [test-classes/:?]
>         at 
> org.apache.hadoop.hive.ql.parse.TestReplicationScenarios.testConstraints(TestReplicationScenarios.java:2946)
>  [test-classes/:?]
>         at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) 
> ~[?:1.8.0_131]
> {code}
> and after those there are 
> {code}
> 017-10-24T10:52:17,263 ERROR [pool-8-thread-8] metastore.RetryingHMSHandler: 
> MetaException(message:Trying to define foreign key but there are no primary 
> keys or unique keys for referenced table)
>         at 
> org.apache.hadoop.hive.metastore.ObjectStore.addForeignKeys(ObjectStore.java:4020)
>         at 
> org.apache.hadoop.hive.metastore.ObjectStore.addForeignKeys(ObjectStore.java:3886)
>         at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
>         at 
> sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
>         at 
> sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
>         at java.lang.reflect.Method.invoke(Method.java:498)
>         at 
> org.apache.hadoop.hive.metastore.RawStoreProxy.invoke(RawStoreProxy.java:97)
>         at com.sun.proxy.$Proxy31.addForeignKeys(Unknown Source)
>         at 
> org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.add_foreign_key(HiveMetaStore.java:1770)
>         at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
>         at 
> sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
>         at 
> sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
>         at java.lang.reflect.Method.invoke(Method.java:498)
>         at 
> org.apache.hadoop.hive.metastore.RetryingHMSHandler.invokeInternal(RetryingHMSHandler.java:148)
>         at 
> org.apache.hadoop.hive.metastore.RetryingHMSHandler.invoke(RetryingHMSHandler.java:107)
>         at com.sun.proxy.$Proxy33.add_foreign_key(Unknown Source)
>         at 
> org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore$Processor$add_foreign_key.getResult(ThriftHiveMetastore.java:12224)
>         at 
> org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore$Processor$add_foreign_key.getResult(ThriftHiveMetastore.java:12208)
>         at org.apache.thrift.ProcessFunction.process(ProcessFunction.java:39)
>         at 
> org.apache.hadoop.hive.metastore.TUGIBasedProcessor$1.run(TUGIBasedProcessor.java:110)
>         at 
> org.apache.hadoop.hive.metastore.TUGIBasedProcessor$1.run(TUGIBasedProcessor.java:106)
>         at java.security.AccessController.doPrivileged(Native Method)
>         at javax.security.auth.Subject.doAs(Subject.java:422)
>         at 
> org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1807)
>         at 
> org.apache.hadoop.hive.metastore.TUGIBasedProcessor.process(TUGIBasedProcessor.java:118)
>         at 
> org.apache.thrift.server.TThreadPoolServer$WorkerProcess.run(TThreadPoolServer.java:286)
>         at 
> java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
>         at 
> java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
>         at java.lang.Thread.run(Thread.java:748)
> {code}
> this test failure seems to be somehow machine dependent...not yet known why; 
> on an affected system the execution of this reproduces the problem 
> deterministically:
> {code}
> mvn install -Pitests -pl itests/hive-unit -DskipSparkTests  
> '-Dtest=TestReplicationScenarios#testConstraints' -q -T9  
> -Dmaven.surefire.plugin.version=2.20.1 
> {code}



--
This message was sent by Atlassian JIRA
(v6.4.14#64029)

Reply via email to