[ 
https://issues.apache.org/jira/browse/PHOENIX-1939?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel&focusedCommentId=14520447#comment-14520447
 ] 

Alicia Ying Shu commented on PHOENIX-1939:
------------------------------------------

The unit test failures related to UserDefinedFunction. Not related to this 
patch.

> Test are failing with DoNotRetryIOException: ATABLE: null
> ---------------------------------------------------------
>
>                 Key: PHOENIX-1939
>                 URL: https://issues.apache.org/jira/browse/PHOENIX-1939
>             Project: Phoenix
>          Issue Type: Bug
>            Reporter: Alicia Ying Shu
>            Assignee: Alicia Ying Shu
>         Attachments: Phoenix-1939.patch
>
>
> Some phoenix tests are failing with the message
> {noformat}
> 1) testIsNull[CREATE INDEX ATABLE_IDX ON aTable (a_integer DESC) INCLUDE (    
> A_STRING,     B_STRING,     A_DATE)](org.apache.phoenix.end2end.QueryIT)
> org.apache.phoenix.exception.PhoenixIOException: 
> org.apache.hadoop.hbase.DoNotRetryIOException: ATABLE: null
>       at 
> org.apache.phoenix.util.ServerUtil.createIOException(ServerUtil.java:83)
>       at 
> org.apache.phoenix.coprocessor.MetaDataEndpointImpl.createTable(MetaDataEndpointImpl.java:812)
>       at 
> org.apache.phoenix.coprocessor.generated.MetaDataProtos$MetaDataService.callMethod(MetaDataProtos.java:7771)
>       at 
> org.apache.hadoop.hbase.regionserver.HRegion.execService(HRegion.java:6830)
>       at 
> org.apache.hadoop.hbase.regionserver.HRegionServer.execServiceOnRegion(HRegionServer.java:3415)
>       at 
> org.apache.hadoop.hbase.regionserver.HRegionServer.execService(HRegionServer.java:3397)
>       at 
> org.apache.hadoop.hbase.protobuf.generated.ClientProtos$ClientService$2.callBlockingMethod(ClientProtos.java:29998)
>       at org.apache.hadoop.hbase.ipc.RpcServer.call(RpcServer.java:2078)
>       at org.apache.hadoop.hbase.ipc.CallRunner.run(CallRunner.java:108)
>       at 
> org.apache.hadoop.hbase.ipc.RpcExecutor.consumerLoop(RpcExecutor.java:114)
>       at org.apache.hadoop.hbase.ipc.RpcExecutor$1.run(RpcExecutor.java:94)
>       at java.lang.Thread.run(Thread.java:745)
> Caused by: java.lang.NullPointerException
>       at org.apache.phoenix.schema.PTableImpl.toProto(PTableImpl.java:934)
>       at 
> org.apache.phoenix.coprocessor.MetaDataEndpointImpl.createTable(MetaDataEndpointImpl.java:777)
>       ... 10 more
>       at 
> org.apache.phoenix.util.ServerUtil.parseServerException(ServerUtil.java:107)
>       at 
> org.apache.phoenix.query.ConnectionQueryServicesImpl.metaDataCoprocessorExec(ConnectionQueryServicesImpl.java:938)
>       at 
> org.apache.phoenix.query.ConnectionQueryServicesImpl.createTable(ConnectionQueryServicesImpl.java:1139)
>       at 
> org.apache.phoenix.query.DelegateConnectionQueryServices.createTable(DelegateConnectionQueryServices.java:110)
>       at 
> org.apache.phoenix.schema.MetaDataClient.createTableInternal(MetaDataClient.java:1527)
>       at 
> org.apache.phoenix.schema.MetaDataClient.createTable(MetaDataClient.java:535)
>       at 
> org.apache.phoenix.compile.CreateTableCompiler$2.execute(CreateTableCompiler.java:184)
>       at 
> org.apache.phoenix.jdbc.PhoenixStatement$2.call(PhoenixStatement.java:260)
>       at 
> org.apache.phoenix.jdbc.PhoenixStatement$2.call(PhoenixStatement.java:252)
>       at org.apache.phoenix.call.CallRunner.run(CallRunner.java:53)
>       at 
> org.apache.phoenix.jdbc.PhoenixStatement.executeMutation(PhoenixStatement.java:250)
>       at 
> org.apache.phoenix.jdbc.PhoenixStatement.execute(PhoenixStatement.java:1037)
>       at org.apache.phoenix.query.BaseTest.createTestTable(BaseTest.java:737)
>       at org.apache.phoenix.query.BaseTest.createTestTable(BaseTest.java:711)
>       at 
> org.apache.phoenix.query.BaseTest.ensureTableCreated(BaseTest.java:703)
>       at org.apache.phoenix.query.BaseTest.initATableValues(BaseTest.java:894)
>       at 
> org.apache.phoenix.query.BaseTest.initATableValues(BaseTest.java:1097)
>       at org.apache.phoenix.end2end.BaseQueryIT.initTable(BaseQueryIT.java:94)
>       at sun.reflect.GeneratedMethodAccessor5.invoke(Unknown Source)
>       at 
> sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
>       at java.lang.reflect.Method.invoke(Method.java:606)
>       at 
> org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:47)
>       at 
> org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:12)
>       at 
> org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:44)
>       at 
> org.junit.internal.runners.statements.RunBefores.evaluate(RunBefores.java:24)
>       at 
> org.junit.internal.runners.statements.RunAfters.evaluate(RunAfters.java:27)
>       at org.junit.runners.ParentRunner.runLeaf(ParentRunner.java:271)
>       at 
> org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:70)
>       at 
> org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:50)
>       at org.junit.runners.ParentRunner$3.run(ParentRunner.java:238)
>       at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:63)
>       at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:236)
>       at org.junit.runners.ParentRunner.access$000(ParentRunner.java:53)
>       at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:229)
>       at org.junit.runners.ParentRunner.run(ParentRunner.java:309)
>       at org.junit.runners.Suite.runChild(Suite.java:127)
>       at org.junit.runners.Suite.runChild(Suite.java:26)
>       at org.junit.runners.ParentRunner$3.run(ParentRunner.java:238)
>       at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:63)
>       at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:236)
>       at org.junit.runners.ParentRunner.access$000(ParentRunner.java:53)
>       at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:229)
>       at 
> org.junit.internal.runners.statements.RunBefores.evaluate(RunBefores.java:26)
>       at 
> org.junit.internal.runners.statements.RunAfters.evaluate(RunAfters.java:27)
>       at org.junit.runners.ParentRunner.run(ParentRunner.java:309)
>       at org.junit.runners.Suite.runChild(Suite.java:127)
>       at org.junit.runners.Suite.runChild(Suite.java:26)
>       at org.junit.runners.ParentRunner$3.run(ParentRunner.java:238)
>       at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:63)
>       at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:236)
>       at org.junit.runners.ParentRunner.access$000(ParentRunner.java:53)
>       at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:229)
>       at org.junit.runners.ParentRunner.run(ParentRunner.java:309)
>       at org.junit.runner.JUnitCore.run(JUnitCore.java:160)
>       at org.junit.runner.JUnitCore.run(JUnitCore.java:138)
>       at org.junit.runner.JUnitCore.run(JUnitCore.java:117)
>       at 
> org.apache.phoenix.end2end.End2EndTestDriver.doWork(End2EndTestDriver.java:198)
>       at 
> org.apache.hadoop.hbase.util.AbstractHBaseTool.run(AbstractHBaseTool.java:112)
>       at org.apache.hadoop.util.ToolRunner.run(ToolRunner.java:70)
>       at org.apache.hadoop.util.ToolRunner.run(ToolRunner.java:84)
>       at 
> org.apache.phoenix.end2end.End2EndTestDriver.main(End2EndTestDriver.java:58)
> Caused by: org.apache.hadoop.hbase.DoNotRetryIOException: 
> org.apache.hadoop.hbase.DoNotRetryIOException: ATABLE: null
>       at 
> org.apache.phoenix.util.ServerUtil.createIOException(ServerUtil.java:83)
>       at 
> org.apache.phoenix.coprocessor.MetaDataEndpointImpl.createTable(MetaDataEndpointImpl.java:812)
>       at 
> org.apache.phoenix.coprocessor.generated.MetaDataProtos$MetaDataService.callMethod(MetaDataProtos.java:7771)
>       at 
> org.apache.hadoop.hbase.regionserver.HRegion.execService(HRegion.java:6830)
>       at 
> org.apache.hadoop.hbase.regionserver.HRegionServer.execServiceOnRegion(HRegionServer.java:3415)
>       at 
> org.apache.hadoop.hbase.regionserver.HRegionServer.execService(HRegionServer.java:3397)
>       at 
> org.apache.hadoop.hbase.protobuf.generated.ClientProtos$ClientService$2.callBlockingMethod(ClientProtos.java:29998)
>       at org.apache.hadoop.hbase.ipc.RpcServer.call(RpcServer.java:2078)
>       at org.apache.hadoop.hbase.ipc.CallRunner.run(CallRunner.java:108)
>       at 
> org.apache.hadoop.hbase.ipc.RpcExecutor.consumerLoop(RpcExecutor.java:114)
>       at org.apache.hadoop.hbase.ipc.RpcExecutor$1.run(RpcExecutor.java:94)
>       at java.lang.Thread.run(Thread.java:745)
> Caused by: java.lang.NullPointerException
>       at org.apache.phoenix.schema.PTableImpl.toProto(PTableImpl.java:934)
>       at 
> org.apache.phoenix.coprocessor.MetaDataEndpointImpl.createTable(MetaDataEndpointImpl.java:777)
>       ... 10 more
>       at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
>       at 
> sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:57)
>       at 
> sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
>       at java.lang.reflect.Constructor.newInstance(Constructor.java:526)
>       at 
> org.apache.hadoop.ipc.RemoteException.instantiateException(RemoteException.java:106)
>       at 
> org.apache.hadoop.ipc.RemoteException.unwrapRemoteException(RemoteException.java:95)
>       at 
> org.apache.hadoop.hbase.protobuf.ProtobufUtil.getRemoteException(ProtobufUtil.java:306)
>       at 
> org.apache.hadoop.hbase.protobuf.ProtobufUtil.execService(ProtobufUtil.java:1667)
>       at 
> org.apache.hadoop.hbase.ipc.RegionCoprocessorRpcChannel$1.call(RegionCoprocessorRpcChannel.java:93)
>       at 
> org.apache.hadoop.hbase.ipc.RegionCoprocessorRpcChannel$1.call(RegionCoprocessorRpcChannel.java:90)
>       at 
> org.apache.hadoop.hbase.client.RpcRetryingCaller.callWithRetries(RpcRetryingCaller.java:123)
>       at 
> org.apache.hadoop.hbase.client.RpcRetryingCaller.callWithRetries(RpcRetryingCaller.java:92)
>       at 
> org.apache.hadoop.hbase.ipc.RegionCoprocessorRpcChannel.callExecService(RegionCoprocessorRpcChannel.java:96)
>       at 
> org.apache.hadoop.hbase.ipc.CoprocessorRpcChannel.callMethod(CoprocessorRpcChannel.java:51)
>       at 
> org.apache.phoenix.coprocessor.generated.MetaDataProtos$MetaDataService$Stub.createTable(MetaDataProtos.java:7914)
>       at 
> org.apache.phoenix.query.ConnectionQueryServicesImpl$4.call(ConnectionQueryServicesImpl.java:1151)
>       at 
> org.apache.phoenix.query.ConnectionQueryServicesImpl$4.call(ConnectionQueryServicesImpl.java:1140)
>       at org.apache.hadoop.hbase.client.HTable$16.call(HTable.java:1646)
>       at java.util.concurrent.FutureTask.run(FutureTask.java:262)
>       at 
> java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)
>       at 
> java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
>       at java.lang.Thread.run(Thread.java:745)
> Caused by: 
> org.apache.hadoop.hbase.ipc.RemoteWithExtrasException(org.apache.hadoop.hbase.DoNotRetryIOException):
>  org.apache.hadoop.hbase.DoNotRetryIOException: ATABLE: null
>       at 
> org.apache.phoenix.util.ServerUtil.createIOException(ServerUtil.java:83)
>       at 
> org.apache.phoenix.coprocessor.MetaDataEndpointImpl.createTable(MetaDataEndpointImpl.java:812)
>       at 
> org.apache.phoenix.coprocessor.generated.MetaDataProtos$MetaDataService.callMethod(MetaDataProtos.java:7771)
>       at 
> org.apache.hadoop.hbase.regionserver.HRegion.execService(HRegion.java:6830)
>       at 
> org.apache.hadoop.hbase.regionserver.HRegionServer.execServiceOnRegion(HRegionServer.java:3415)
>       at 
> org.apache.hadoop.hbase.regionserver.HRegionServer.execService(HRegionServer.java:3397)
>       at 
> org.apache.hadoop.hbase.protobuf.generated.ClientProtos$ClientService$2.callBlockingMethod(ClientProtos.java:29998)
>       at org.apache.hadoop.hbase.ipc.RpcServer.call(RpcServer.java:2078)
>       at org.apache.hadoop.hbase.ipc.CallRunner.run(CallRunner.java:108)
>       at 
> org.apache.hadoop.hbase.ipc.RpcExecutor.consumerLoop(RpcExecutor.java:114)
>       at org.apache.hadoop.hbase.ipc.RpcExecutor$1.run(RpcExecutor.java:94)
>       at java.lang.Thread.run(Thread.java:745)
> Caused by: java.lang.NullPointerException
>       at org.apache.phoenix.schema.PTableImpl.toProto(PTableImpl.java:934)
>       at 
> org.apache.phoenix.coprocessor.MetaDataEndpointImpl.createTable(MetaDataEndpointImpl.java:777)
>       ... 10 more
>       at org.apache.hadoop.hbase.ipc.RpcClient.call(RpcClient.java:1538)
>       at 
> org.apache.hadoop.hbase.ipc.RpcClient.callBlockingMethod(RpcClient.java:1724)
>       at 
> org.apache.hadoop.hbase.ipc.RpcClient$BlockingRpcChannelImplementation.callBlockingMethod(RpcClient.java:1777)
>       at 
> org.apache.hadoop.hbase.protobuf.generated.ClientProtos$ClientService$BlockingStub.execService(ClientProtos.java:30421)
>       at 
> org.apache.hadoop.hbase.protobuf.ProtobufUtil.execService(ProtobufUtil.java:1663)
>       ... 14 more
> {noformat}



--
This message was sent by Atlassian JIRA
(v6.3.4#6332)

Reply via email to