[
https://issues.apache.org/jira/browse/PHOENIX-2790?page=com.atlassian.jira.plugin.system.issuetabpanels:all-tabpanel
]
saurabh agarwal updated PHOENIX-2790:
-------------------------------------
Description:
Phoenix table become inaccessible after trying to create global index that
contains duplicate column names. The only way to resolve it is to drop
SYSTEM.CATALOG table and recreate table.
Here is detail to reproduce the issue.
>create table "WeatherData"
(
"domain" varchar not null,
"source" varchar not null,
"model" varchar not null,
"type" varchar not null,
"frequency" varchar not null,
"locationIdentifier" varchar not null,
"publicationDate" time not null,
"targetDate" time not null,
"maxTemperature" double,
"minTemperature" double,
"meanTemperature" double,
"solarRadiation" double,
constraint pk primary key( "domain", "source", "model", "type", "frequency",
"locationIdentifier", "publicationDate", "targetDate" )
);
> select count(*) from "WeatherData";
+------------------------------------------+
| COUNT(1) |
+------------------------------------------+
| 0 |
+------------------------------------------+
1 row selected (0.57 seconds)
>create index idx on "WeatherData"("maxTemperature") include( "domain",
>"source", "model", "type", "frequency", "locationIdentifier",
>"publicationDate", "targetDate", "minTemperature", "meanTemperature",
>"maxTemperature", "solarRadiation", "solarRadiation");
Error: ERROR 514 (42892): A duplicate column name was detected in the object
definition or ALTER TABLE statement. columnName=IDX.0:solarRadiation
(state=42892,code=514)
org.apache.phoenix.schema.ColumnAlreadyExistsException: ERROR 514 (42892): A
duplicate column name was detected in the object definition or ALTER TABLE
statement. columnName=IDX.0:solarRadiation
at org.apache.phoenix.schema.PTableImpl.init(PTableImpl.java:366)
at org.apache.phoenix.schema.PTableImpl.<init>(PTableImpl.java:276)
at org.apache.phoenix.schema.PTableImpl.makePTable(PTableImpl.java:253)
at
org.apache.phoenix.schema.MetaDataClient.createTableInternal(MetaDataClient.java:1928)
at
org.apache.phoenix.schema.MetaDataClient.createIndex(MetaDataClient.java:1281)
at
org.apache.phoenix.compile.CreateIndexCompiler$1.execute(CreateIndexCompiler.java:95)
at org.apache.phoenix.jdbc.PhoenixStatement$2.call(PhoenixStatement.java:304)
at org.apache.phoenix.jdbc.PhoenixStatement$2.call(PhoenixStatement.java:296)
at org.apache.phoenix.call.CallRunner.run(CallRunner.java:53)
at
org.apache.phoenix.jdbc.PhoenixStatement.executeMutation(PhoenixStatement.java:294)
at org.apache.phoenix.jdbc.PhoenixStatement.execute(PhoenixStatement.java:1254)
at sqlline.Commands.execute(Commands.java:822)
at sqlline.Commands.sql(Commands.java:732)
at sqlline.SqlLine.dispatch(SqlLine.java:808)
at sqlline.SqlLine.begin(SqlLine.java:681)
at sqlline.SqlLine.start(SqlLine.java:398)
at sqlline.SqlLine.main(SqlLine.java:292)
After that the table become inaccessible. Any operation on the table throw the
exception below.
> select count(*) from "WeatherData";
16/03/11 17:37:32 WARN ipc.CoprocessorRpcChannel: Call failed on IOException
org.apache.hadoop.hbase.DoNotRetryIOException:
org.apache.hadoop.hbase.DoNotRetryIOException: com.bloomb
erg.ds.WeatherSmallSalt: 35
at org.apache.phoenix.util.ServerUtil.createIOException(ServerUtil.java:84)
at
org.apache.phoenix.coprocessor.MetaDataEndpointImpl.getTable(MetaDataEndpointImpl.java:447)
at
org.apache.phoenix.coprocessor.generated.MetaDataProtos$MetaDataService.callMethod(MetaDataPr
otos.java:10505)
at org.apache.hadoop.hbase.regionserver.HRegion.execService(HRegion.java:7435)
at
org.apache.hadoop.hbase.regionserver.RSRpcServices.execServiceOnRegion(RSRpcServices.java:187
5)
at
org.apache.hadoop.hbase.regionserver.RSRpcServices.execService(RSRpcServices.java:1857)
at
org.apache.hadoop.hbase.protobuf.generated.ClientProtos$ClientService$2.callBlockingMethod(Cl
ientProtos.java:32209)
at org.apache.hadoop.hbase.ipc.RpcServer.call(RpcServer.java:2114)
at org.apache.hadoop.hbase.ipc.CallRunner.run(CallRunner.java:101)
at org.apache.hadoop.hbase.ipc.RpcExecutor.consumerLoop(RpcExecutor.java:130)
at org.apache.hadoop.hbase.ipc.RpcExecutor$1.run(RpcExecutor.java:107)
at java.lang.Thread.run(Thread.java:745)
Caused by: java.lang.ArrayIndexOutOfBoundsException: 35
at org.apache.phoenix.schema.PTableImpl.init(PTableImpl.java:354)
at org.apache.phoenix.schema.PTableImpl.<init>(PTableImpl.java:276)
at org.apache.phoenix.schema.PTableImpl.makePTable(PTableImpl.java:265)
at
org.apache.phoenix.coprocessor.MetaDataEndpointImpl.getTable(MetaDataEndpointImpl.java:826)
at
org.apache.phoenix.coprocessor.MetaDataEndpointImpl.buildTable(MetaDataEndpointImpl.java:462)
at
org.apache.phoenix.coprocessor.MetaDataEndpointImpl.doGetTable(MetaDataEndpointImpl.java:1696
)
at
org.apache.phoenix.coprocessor.MetaDataEndpointImpl.doGetTable(MetaDataEndpointImpl.java:1643
)
at
org.apache.phoenix.coprocessor.MetaDataEndpointImpl.addIndexToTable(MetaDataEndpointImpl.java
:526)
at
org.apache.phoenix.coprocessor.MetaDataEndpointImpl.getTable(MetaDataEndpointImpl.java:803)
at
org.apache.phoenix.coprocessor.MetaDataEndpointImpl.buildTable(MetaDataEndpointImpl.java:462)
at
org.apache.phoenix.coprocessor.MetaDataEndpointImpl.doGetTable(MetaDataEndpointImpl.java:1696
)
at
org.apache.phoenix.coprocessor.MetaDataEndpointImpl.doGetTable(MetaDataEndpointImpl.java:1643
)
at
org.apache.phoenix.coprocessor.MetaDataEndpointImpl.getTable(MetaDataEndpointImpl.java:430)
... 10 more
at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
at
sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
at
sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.j
ava:45)
at java.lang.reflect.Constructor.newInstance(Constructor.java:422)
at
org.apache.hadoop.ipc.RemoteException.instantiateException(RemoteException.java:106)
at
org.apache.hadoop.ipc.RemoteException.unwrapRemoteException(RemoteException.java:95)
at
org.apache.hadoop.hbase.protobuf.ProtobufUtil.getRemoteException(ProtobufUtil.java:325)
at
org.apache.hadoop.hbase.protobuf.ProtobufUtil.execService(ProtobufUtil.java:1622)
at
org.apache.hadoop.hbase.ipc.RegionCoprocessorRpcChannel$1.call(RegionCoprocessorRpcChannel.ja
va:92)
at
org.apache.hadoop.hbase.ipc.RegionCoprocessorRpcChannel$1.call(RegionCoprocessorRpcChannel.ja
va:89)
at
org.apache.hadoop.hbase.client.RpcRetryingCaller.callWithRetries(RpcRetryingCaller.java:126)
at
org.apache.hadoop.hbase.ipc.RegionCoprocessorRpcChannel.callExecService(RegionCoprocessorRpcC
hannel.java:95)
at
org.apache.hadoop.hbase.ipc.CoprocessorRpcChannel.callMethod(CoprocessorRpcChannel.java:56)
at
org.apache.phoenix.coprocessor.generated.MetaDataProtos$MetaDataService$Stub.getTable(MetaDat
aProtos.java:10665)
at
org.apache.phoenix.query.ConnectionQueryServicesImpl$7.call(ConnectionQueryServicesImpl.java:
1292)
at
org.apache.phoenix.query.ConnectionQueryServicesImpl$7.call(ConnectionQueryServicesImpl.java:
1279)
at org.apache.hadoop.hbase.client.HTable$16.call(HTable.java:1751)
at java.util.concurrent.FutureTask.run(FutureTask.java:266)
at
java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)
at
java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)
at java.lang.Thread.run(Thread.java:745)
Caused by:
org.apache.hadoop.hbase.ipc.RemoteWithExtrasException(org.apache.hadoop.hbase.DoNotRetryIOExc
eption): org.apache.hadoop.hbase.DoNotRetryIOException:
WeatherData: 35
at org.apache.phoenix.util.ServerUtil.createIOException(ServerUtil.java:84)
at
org.apache.phoenix.coprocessor.MetaDataEndpointImpl.getTable(MetaDataEndpointImpl.java:447)
at
org.apache.phoenix.coprocessor.generated.MetaDataProtos$MetaDataService.callMethod(MetaDataPr
otos.java:10505)
at org.apache.hadoop.hbase.regionserver.HRegion.execService(HRegion.java:7435)
at
org.apache.hadoop.hbase.regionserver.RSRpcServices.execServiceOnRegion(RSRpcServices.java:187
5)
at
org.apache.hadoop.hbase.regionserver.RSRpcServices.execService(RSRpcServices.java:1857)
at
org.apache.hadoop.hbase.protobuf.generated.ClientProtos$ClientService$2.callBlockingMethod(Cl
ientProtos.java:32209)
at org.apache.hadoop.hbase.ipc.RpcServer.call(RpcServer.java:2114)
at org.apache.hadoop.hbase.ipc.CallRunner.run(CallRunner.java:101)
at org.apache.hadoop.hbase.ipc.RpcExecutor.consumerLoop(RpcExecutor.java:130)
at org.apache.hadoop.hbase.ipc.RpcExecutor$1.run(RpcExecutor.java:107)
at java.lang.Thread.run(Thread.java:745)
Caused by: java.lang.ArrayIndexOutOfBoundsException: 35
at org.apache.phoenix.schema.PTableImpl.init(PTableImpl.java:354)
at org.apache.phoenix.schema.PTableImpl.<init>(PTableImpl.java:276)
at org.apache.phoenix.schema.PTableImpl.makePTable(PTableImpl.java:265)
at
org.apache.phoenix.coprocessor.MetaDataEndpointImpl.getTable(MetaDataEndpointImpl.java:826)
at
org.apache.phoenix.coprocessor.MetaDataEndpointImpl.buildTable(MetaDataEndpointImpl.java:462)
at
org.apache.phoenix.coprocessor.MetaDataEndpointImpl.doGetTable(MetaDataEndpointImpl.java:1696
)
at
org.apache.phoenix.coprocessor.MetaDataEndpointImpl.doGetTable(MetaDataEndpointImpl.java:1643
)
at
org.apache.phoenix.coprocessor.MetaDataEndpointImpl.addIndexToTable(MetaDataEndpointImpl.java
:526)
at
org.apache.phoenix.coprocessor.MetaDataEndpointImpl.getTable(MetaDataEndpointImpl.java:803)
at
org.apache.phoenix.coprocessor.MetaDataEndpointImpl.buildTable(MetaDataEndpointImpl.java:462)
at
org.apache.phoenix.coprocessor.MetaDataEndpointImpl.doGetTable(MetaDataEndpointImpl.java:1696
)
at
org.apache.phoenix.coprocessor.MetaDataEndpointImpl.doGetTable(MetaDataEndpointImpl.java:1643
)
at
org.apache.phoenix.coprocessor.MetaDataEndpointImpl.getTable(MetaDataEndpointImpl.java:430)
... 10 more
at org.apache.hadoop.hbase.ipc.RpcClientImpl.call(RpcClientImpl.java:1226)
at
org.apache.hadoop.hbase.ipc.AbstractRpcClient.callBlockingMethod(AbstractRpcClient.java:213)
at
org.apache.hadoop.hbase.ipc.AbstractRpcClient$BlockingRpcChannelImplementation.callBlockingMe
thod(AbstractRpcClient.java:287)
at
org.apache.hadoop.hbase.protobuf.generated.ClientProtos$ClientService$BlockingStub.execServic
e(ClientProtos.java:32675)
at
org.apache.hadoop.hbase.protobuf.ProtobufUtil.execService(ProtobufUtil.java:1618)
... 13 more
16/03/11 17:37:32 WARN client.HTable: Error calling coprocessor service
org.apache.phoenix.coprocessor.g enerated.MetaDataProtos$MetaDataService for
row \x00\x00WeatherData
java.util.concurrent.ExecutionException:
org.apache.hadoop.hbase.DoNotRetryIOException: org.apache.hadoo
p.hbase.DoNotRetryIOException: WeatherData: 35
at org.apache.phoenix.util.ServerUtil.createIOException(ServerUtil.java:84)
at
org.apache.phoenix.coprocessor.MetaDataEndpointImpl.getTable(MetaDataEndpointImpl.java:447)
at
org.apache.phoenix.coprocessor.generated.MetaDataProtos$MetaDataService.callMethod(MetaDataPr
otos.java:10505)
at org.apache.hadoop.hbase.regionserver.HRegion.execService(HRegion.java:7435)
at
org.apache.hadoop.hbase.regionserver.RSRpcServices.execServiceOnRegion(RSRpcServices.java:187
5)
at
org.apache.hadoop.hbase.regionserver.RSRpcServices.execService(RSRpcServices.java:1857)
at
org.apache.hadoop.hbase.protobuf.generated.ClientProtos$ClientService$2.callBlockingMethod(Cl
ientProtos.java:32209)
at org.apache.hadoop.hbase.ipc.RpcServer.call(RpcServer.java:2114)
at org.apache.hadoop.hbase.ipc.CallRunner.run(CallRunner.java:101)
at org.apache.hadoop.hbase.ipc.RpcExecutor.consumerLoop(RpcExecutor.java:130)
at org.apache.hadoop.hbase.ipc.RpcExecutor$1.run(RpcExecutor.java:107)
at java.lang.Thread.run(Thread.java:745)
Caused by: java.lang.ArrayIndexOutOfBoundsException: 35
at org.apache.phoenix.schema.PTableImpl.init(PTableImpl.java:354)
at org.apache.phoenix.schema.PTableImpl.<init>(PTableImpl.java:276)
at org.apache.phoenix.schema.PTableImpl.makePTable(PTableImpl.java:265)
at
org.apache.phoenix.coprocessor.MetaDataEndpointImpl.getTable(MetaDataEndpointImpl.java:826)
at
org.apache.phoenix.coprocessor.MetaDataEndpointImpl.buildTable(MetaDataEndpointImpl.java:462)
at
org.apache.phoenix.coprocessor.MetaDataEndpointImpl.doGetTable(MetaDataEndpointImpl.java:1696
)
at
org.apache.phoenix.coprocessor.MetaDataEndpointImpl.doGetTable(MetaDataEndpointImpl.java:1643
)
at
org.apache.phoenix.coprocessor.MetaDataEndpointImpl.addIndexToTable(MetaDataEndpointImpl.java
:526)
at
org.apache.phoenix.coprocessor.MetaDataEndpointImpl.getTable(MetaDataEndpointImpl.java:803)
at
org.apache.phoenix.coprocessor.MetaDataEndpointImpl.buildTable(MetaDataEndpointImpl.java:462)
at
org.apache.phoenix.coprocessor.MetaDataEndpointImpl.doGetTable(MetaDataEndpointImpl.java:1696
)
at
org.apache.phoenix.coprocessor.MetaDataEndpointImpl.doGetTable(MetaDataEndpointImpl.java:1643
)
at
org.apache.phoenix.coprocessor.MetaDataEndpointImpl.getTable(MetaDataEndpointImpl.java:430)
... 10 more
at java.util.concurrent.FutureTask.report(FutureTask.java:122)
at java.util.concurrent.FutureTask.get(FutureTask.java:192)
at org.apache.hadoop.hbase.client.HTable.coprocessorService(HTable.java:1763)
at org.apache.hadoop.hbase.client.HTable.coprocessorService(HTable.java:1719)
at
org.apache.phoenix.query.ConnectionQueryServicesImpl.metaDataCoprocessorExec(ConnectionQueryS
ervicesImpl.java:1026)
at
org.apache.phoenix.query.ConnectionQueryServicesImpl.metaDataCoprocessorExec(ConnectionQueryS
ervicesImpl.java:1006)
at
org.apache.phoenix.query.ConnectionQueryServicesImpl.getTable(ConnectionQueryServicesImpl.jav
a:1278)
at
org.apache.phoenix.schema.MetaDataClient.updateCache(MetaDataClient.java:415)
at
org.apache.phoenix.schema.MetaDataClient.updateCache(MetaDataClient.java:358)
at
org.apache.phoenix.schema.MetaDataClient.updateCache(MetaDataClient.java:354)
at
org.apache.phoenix.compile.FromCompiler$BaseColumnResolver.createTableRef(FromCompiler.java:4
13)
at
org.apache.phoenix.compile.FromCompiler$SingleTableColumnResolver.<init>(FromCompiler.java:28
8)
at
org.apache.phoenix.compile.FromCompiler.getResolverForQuery(FromCompiler.java:189)
at
org.apache.phoenix.jdbc.PhoenixStatement$ExecutableSelectStatement.compilePlan(PhoenixStateme
nt.java:358)
at
org.apache.phoenix.jdbc.PhoenixStatement$ExecutableSelectStatement.compilePlan(PhoenixStateme
nt.java:339)
at org.apache.phoenix.jdbc.PhoenixStatement$1.call(PhoenixStatement.java:247)
at org.apache.phoenix.jdbc.PhoenixStatement$1.call(PhoenixStatement.java:242)
at org.apache.phoenix.call.CallRunner.run(CallRunner.java:53)
at
org.apache.phoenix.jdbc.PhoenixStatement.executeQuery(PhoenixStatement.java:241)
at org.apache.phoenix.jdbc.PhoenixStatement.execute(PhoenixStatement.java:1257)
at sqlline.Commands.execute(Commands.java:822)
at sqlline.Commands.sql(Commands.java:732)
at sqlline.SqlLine.dispatch(SqlLine.java:808)
at sqlline.SqlLine.begin(SqlLine.java:681)
at sqlline.SqlLine.start(SqlLine.java:398)
at sqlline.SqlLine.main(SqlLine.java:292)
Caused by: org.apache.hadoop.hbase.DoNotRetryIOException:
org.apache.hadoop.hbase.DoNotRetryIOException: WeatherData: 35
at org.apache.phoenix.util.ServerUtil.createIOException(ServerUtil.java:84)
at
org.apache.phoenix.coprocessor.MetaDataEndpointImpl.getTable(MetaDataEndpointImpl.java:447)
at
org.apache.phoenix.coprocessor.generated.MetaDataProtos$MetaDataService.callMethod(MetaDataPr
otos.java:10505)
at org.apache.hadoop.hbase.regionserver.HRegion.execService(HRegion.java:7435)
at
org.apache.hadoop.hbase.regionserver.RSRpcServices.execServiceOnRegion(RSRpcServices.java:187
5)
at
org.apache.hadoop.hbase.regionserver.RSRpcServices.execService(RSRpcServices.java:1857)
at
org.apache.hadoop.hbase.protobuf.generated.ClientProtos$ClientService$2.callBlockingMethod(Cl
ientProtos.java:32209)
at org.apache.hadoop.hbase.ipc.RpcServer.call(RpcServer.java:2114)
at org.apache.hadoop.hbase.ipc.CallRunner.run(CallRunner.java:101)
at org.apache.hadoop.hbase.ipc.RpcExecutor.consumerLoop(RpcExecutor.java:130)
at org.apache.hadoop.hbase.ipc.RpcExecutor$1.run(RpcExecutor.java:107)
at java.lang.Thread.run(Thread.java:745)
Caused by: java.lang.ArrayIndexOutOfBoundsException: 35
at org.apache.phoenix.schema.PTableImpl.init(PTableImpl.java:354)
at org.apache.phoenix.schema.PTableImpl.<init>(PTableImpl.java:276)
at org.apache.phoenix.schema.PTableImpl.makePTable(PTableImpl.java:265)
at
org.apache.phoenix.coprocessor.MetaDataEndpointImpl.getTable(MetaDataEndpointImpl.java:826)
at
org.apache.phoenix.coprocessor.MetaDataEndpointImpl.buildTable(MetaDataEndpointImpl.java:462)
at
org.apache.phoenix.coprocessor.MetaDataEndpointImpl.doGetTable(MetaDataEndpointImpl.java:1696
)
at
org.apache.phoenix.coprocessor.MetaDataEndpointImpl.doGetTable(MetaDataEndpointImpl.java:1643
)
at
org.apache.phoenix.coprocessor.MetaDataEndpointImpl.addIndexToTable(MetaDataEndpointImpl.java
:526)
at
org.apache.phoenix.coprocessor.MetaDataEndpointImpl.getTable(MetaDataEndpointImpl.java:803)
at
org.apache.phoenix.coprocessor.MetaDataEndpointImpl.buildTable(MetaDataEndpointImpl.java:462)
at
org.apache.phoenix.coprocessor.MetaDataEndpointImpl.doGetTable(MetaDataEndpointImpl.java:1696
)
at
org.apache.phoenix.coprocessor.MetaDataEndpointImpl.doGetTable(MetaDataEndpointImpl.java:1643
)
at
org.apache.phoenix.coprocessor.MetaDataEndpointImpl.getTable(MetaDataEndpointImpl.java:430)
... 10 more
at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
at
sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
at
sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.j
ava:45)
at java.lang.reflect.Constructor.newInstance(Constructor.java:422)
at
org.apache.hadoop.ipc.RemoteException.instantiateException(RemoteException.java:106)
at
org.apache.hadoop.ipc.RemoteException.unwrapRemoteException(RemoteException.java:95)
at
org.apache.hadoop.hbase.protobuf.ProtobufUtil.getRemoteException(ProtobufUtil.java:325)
at
org.apache.hadoop.hbase.protobuf.ProtobufUtil.execService(ProtobufUtil.java:1622)
at
org.apache.hadoop.hbase.ipc.RegionCoprocessorRpcChannel$1.call(RegionCoprocessorRpcChannel.ja
va:92)
at
org.apache.hadoop.hbase.ipc.RegionCoprocessorRpcChannel$1.call(RegionCoprocessorRpcChannel.ja
va:89)
at
org.apache.hadoop.hbase.client.RpcRetryingCaller.callWithRetries(RpcRetryingCaller.java:126)
at
org.apache.hadoop.hbase.ipc.RegionCoprocessorRpcChannel.callExecService(RegionCoprocessorRpcC
hannel.java:95)
at
org.apache.hadoop.hbase.ipc.CoprocessorRpcChannel.callMethod(CoprocessorRpcChannel.java:56)
at
org.apache.phoenix.coprocessor.generated.MetaDataProtos$MetaDataService$Stub.getTable(MetaDat
aProtos.java:10665)
at
org.apache.phoenix.query.ConnectionQueryServicesImpl$7.call(ConnectionQueryServicesImpl.java:
1292)
at
org.apache.phoenix.query.ConnectionQueryServicesImpl$7.call(ConnectionQueryServicesImpl.java:
1279)
at org.apache.hadoop.hbase.client.HTable$16.call(HTable.java:1751)
at java.util.concurrent.FutureTask.run(FutureTask.java:266)
at
java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)
at
java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)
at java.lang.Thread.run(Thread.java:745)
Caused by:
org.apache.hadoop.hbase.ipc.RemoteWithExtrasException(org.apache.hadoop.hbase.DoNotRetryIOExc
eption): org.apache.hadoop.hbase.DoNotRetryIOException: WeatherData: 35
at org.apache.phoenix.util.ServerUtil.createIOException(ServerUtil.java:84)
at
org.apache.phoenix.coprocessor.MetaDataEndpointImpl.getTable(MetaDataEndpointImpl.java:447)
at
org.apache.phoenix.coprocessor.generated.MetaDataProtos$MetaDataService.callMethod(MetaDataPr
otos.java:10505)
at org.apache.hadoop.hbase.regionserver.HRegion.execService(HRegion.java:7435)
at
org.apache.hadoop.hbase.regionserver.RSRpcServices.execServiceOnRegion(RSRpcServices.java:187
5)
at
org.apache.hadoop.hbase.regionserver.RSRpcServices.execService(RSRpcServices.java:1857)
at
org.apache.hadoop.hbase.protobuf.generated.ClientProtos$ClientService$2.callBlockingMethod(Cl
ientProtos.java:32209)
at org.apache.hadoop.hbase.ipc.RpcServer.call(RpcServer.java:2114)
at org.apache.hadoop.hbase.ipc.CallRunner.run(CallRunner.java:101)
at org.apache.hadoop.hbase.ipc.RpcExecutor.consumerLoop(RpcExecutor.java:130)
at org.apache.hadoop.hbase.ipc.RpcExecutor$1.run(RpcExecutor.java:107)
at java.lang.Thread.run(Thread.java:745)
Caused by: java.lang.ArrayIndexOutOfBoundsException: 35
at org.apache.phoenix.schema.PTableImpl.init(PTableImpl.java:354)
at org.apache.phoenix.schema.PTableImpl.<init>(PTableImpl.java:276)
at org.apache.phoenix.schema.PTableImpl.makePTable(PTableImpl.java:265)
at
org.apache.phoenix.coprocessor.MetaDataEndpointImpl.getTable(MetaDataEndpointImpl.java:826)
at
org.apache.phoenix.coprocessor.MetaDataEndpointImpl.buildTable(MetaDataEndpointImpl.java:462)
at
org.apache.phoenix.coprocessor.MetaDataEndpointImpl.doGetTable(MetaDataEndpointImpl.java:1696
)
at
org.apache.phoenix.coprocessor.MetaDataEndpointImpl.doGetTable(MetaDataEndpointImpl.java:1643
)
at
org.apache.phoenix.coprocessor.MetaDataEndpointImpl.addIndexToTable(MetaDataEndpointImpl.java
:526)
at
org.apache.phoenix.coprocessor.MetaDataEndpointImpl.getTable(MetaDataEndpointImpl.java:803)
at
org.apache.phoenix.coprocessor.MetaDataEndpointImpl.buildTable(MetaDataEndpointImpl.java:462)
at
org.apache.phoenix.coprocessor.MetaDataEndpointImpl.doGetTable(MetaDataEndpointImpl.java:1696
)
at
org.apache.phoenix.coprocessor.MetaDataEndpointImpl.doGetTable(MetaDataEndpointImpl.java:1643
)
at
org.apache.phoenix.coprocessor.MetaDataEndpointImpl.getTable(MetaDataEndpointImpl.java:430)
... 10 more
at org.apache.hadoop.hbase.ipc.RpcClientImpl.call(RpcClientImpl.java:1226)
at
org.apache.hadoop.hbase.ipc.AbstractRpcClient.callBlockingMethod(AbstractRpcClient.java:213)
at
org.apache.hadoop.hbase.ipc.AbstractRpcClient$BlockingRpcChannelImplementation.callBlockingMe
thod(AbstractRpcClient.java:287)
at
org.apache.hadoop.hbase.protobuf.generated.ClientProtos$ClientService$BlockingStub.execServic
e(ClientProtos.java:32675)
at
org.apache.hadoop.hbase.protobuf.ProtobufUtil.execService(ProtobufUtil.java:1618)
... 13 more
Error: org.apache.hadoop.hbase.DoNotRetryIOException: WeatherData: 35
at org.apache.phoenix.util.ServerUtil.createIOException(ServerUtil.java:84)
at
org.apache.phoenix.coprocessor.MetaDataEndpointImpl.getTable(MetaDataEndpointImpl.java:447)
at
org.apache.phoenix.coprocessor.generated.MetaDataProtos$MetaDataService.callMethod(MetaDataPr
otos.java:10505)
at org.apache.hadoop.hbase.regionserver.HRegion.execService(HRegion.java:7435)
at
org.apache.hadoop.hbase.regionserver.RSRpcServices.execServiceOnRegion(RSRpcServices.java:187
5)
at
org.apache.hadoop.hbase.regionserver.RSRpcServices.execService(RSRpcServices.java:1857)
at
org.apache.hadoop.hbase.protobuf.generated.ClientProtos$ClientService$2.callBlockingMethod(Cl
ientProtos.java:32209)
at org.apache.hadoop.hbase.ipc.RpcServer.call(RpcServer.java:2114)
at org.apache.hadoop.hbase.ipc.CallRunner.run(CallRunner.java:101)
at org.apache.hadoop.hbase.ipc.RpcExecutor.consumerLoop(RpcExecutor.java:130)
at org.apache.hadoop.hbase.ipc.RpcExecutor$1.run(RpcExecutor.java:107)
at java.lang.Thread.run(Thread.java:745)
Caused by: java.lang.ArrayIndexOutOfBoundsException: 35
at org.apache.phoenix.schema.PTableImpl.init(PTableImpl.java:354)
at org.apache.phoenix.schema.PTableImpl.<init>(PTableImpl.java:276)
at org.apache.phoenix.schema.PTableImpl.makePTable(PTableImpl.java:265)
at
org.apache.phoenix.coprocessor.MetaDataEndpointImpl.getTable(MetaDataEndpointImpl.java:826)
at
org.apache.phoenix.coprocessor.MetaDataEndpointImpl.buildTable(MetaDataEndpointImpl.java:462)
at
org.apache.phoenix.coprocessor.MetaDataEndpointImpl.doGetTable(MetaDataEndpointImpl.java:1696
)
at
org.apache.phoenix.coprocessor.MetaDataEndpointImpl.doGetTable(MetaDataEndpointImpl.java:1643
)
at
org.apache.phoenix.coprocessor.MetaDataEndpointImpl.addIndexToTable(MetaDataEndpointImpl.java
:526)
at
org.apache.phoenix.coprocessor.MetaDataEndpointImpl.getTable(MetaDataEndpointImpl.java:803)
at
org.apache.phoenix.coprocessor.MetaDataEndpointImpl.buildTable(MetaDataEndpointImpl.java:462)
at
org.apache.phoenix.coprocessor.MetaDataEndpointImpl.doGetTable(MetaDataEndpointImpl.java:1696
)
at
org.apache.phoenix.coprocessor.MetaDataEndpointImpl.doGetTable(MetaDataEndpointImpl.java:1643
)
at
org.apache.phoenix.coprocessor.MetaDataEndpointImpl.getTable(MetaDataEndpointImpl.java:430)
... 10 more (state=08000,code=101)
org.apache.phoenix.exception.PhoenixIOException:
org.apache.hadoop.hbase.DoNotRetryIOException: com.bloo
mberg.ds.WeatherSmallSalt: 35
at org.apache.phoenix.util.ServerUtil.createIOException(ServerUtil.java:84)
at
org.apache.phoenix.coprocessor.MetaDataEndpointImpl.getTable(MetaDataEndpointImpl.java:447)
at
org.apache.phoenix.coprocessor.generated.MetaDataProtos$MetaDataService.callMethod(MetaDataPr
otos.java:10505)
at org.apache.hadoop.hbase.regionserver.HRegion.execService(HRegion.java:7435)
at
org.apache.hadoop.hbase.regionserver.RSRpcServices.execServiceOnRegion(RSRpcServices.java:187
5)
at
org.apache.hadoop.hbase.regionserver.RSRpcServices.execService(RSRpcServices.java:1857)
at
org.apache.hadoop.hbase.protobuf.generated.ClientProtos$ClientService$2.callBlockingMethod(Cl
ientProtos.java:32209)
at org.apache.hadoop.hbase.ipc.RpcServer.call(RpcServer.java:2114)
at org.apache.hadoop.hbase.ipc.CallRunner.run(CallRunner.java:101)
at org.apache.hadoop.hbase.ipc.RpcExecutor.consumerLoop(RpcExecutor.java:130)
at org.apache.hadoop.hbase.ipc.RpcExecutor$1.run(RpcExecutor.java:107)
at java.lang.Thread.run(Thread.java:745)
Caused by: java.lang.ArrayIndexOutOfBoundsException: 35
at org.apache.phoenix.schema.PTableImpl.init(PTableImpl.java:354)
at org.apache.phoenix.schema.PTableImpl.<init>(PTableImpl.java:276)
at org.apache.phoenix.schema.PTableImpl.makePTable(PTableImpl.java:265)
at
org.apache.phoenix.coprocessor.MetaDataEndpointImpl.getTable(MetaDataEndpointImpl.java:826)
at
org.apache.phoenix.coprocessor.MetaDataEndpointImpl.buildTable(MetaDataEndpointImpl.java:462)
at
org.apache.phoenix.coprocessor.MetaDataEndpointImpl.doGetTable(MetaDataEndpointImpl.java:1696
)
at
org.apache.phoenix.coprocessor.MetaDataEndpointImpl.doGetTable(MetaDataEndpointImpl.java:1643
)
at
org.apache.phoenix.coprocessor.MetaDataEndpointImpl.addIndexToTable(MetaDataEndpointImpl.java
:526)
at
org.apache.phoenix.coprocessor.MetaDataEndpointImpl.getTable(MetaDataEndpointImpl.java:803)
at
org.apache.phoenix.coprocessor.MetaDataEndpointImpl.buildTable(MetaDataEndpointImpl.java:462)
at
org.apache.phoenix.coprocessor.MetaDataEndpointImpl.doGetTable(MetaDataEndpointImpl.java:1696
)
at
org.apache.phoenix.coprocessor.MetaDataEndpointImpl.doGetTable(MetaDataEndpointImpl.java:1643
)
at
org.apache.phoenix.coprocessor.MetaDataEndpointImpl.getTable(MetaDataEndpointImpl.java:430)
... 10 more
at org.apache.phoenix.util.ServerUtil.parseServerException(ServerUtil.java:108)
at
org.apache.phoenix.query.ConnectionQueryServicesImpl.metaDataCoprocessorExec(ConnectionQueryS
ervicesImpl.java:1043)
at
org.apache.phoenix.query.ConnectionQueryServicesImpl.metaDataCoprocessorExec(ConnectionQueryS
ervicesImpl.java:1006)
at
org.apache.phoenix.query.ConnectionQueryServicesImpl.getTable(ConnectionQueryServicesImpl.jav
a:1278)
at
org.apache.phoenix.schema.MetaDataClient.updateCache(MetaDataClient.java:415)
at
org.apache.phoenix.schema.MetaDataClient.updateCache(MetaDataClient.java:358)
at
org.apache.phoenix.schema.MetaDataClient.updateCache(MetaDataClient.java:354)
at
org.apache.phoenix.compile.FromCompiler$BaseColumnResolver.createTableRef(FromCompiler.java:4
13)
at
org.apache.phoenix.compile.FromCompiler$SingleTableColumnResolver.<init>(FromCompiler.java:28
8)
at
org.apache.phoenix.compile.FromCompiler.getResolverForQuery(FromCompiler.java:189)
at
org.apache.phoenix.jdbc.PhoenixStatement$ExecutableSelectStatement.compilePlan(PhoenixStateme
nt.java:358)
at
org.apache.phoenix.jdbc.PhoenixStatement$ExecutableSelectStatement.compilePlan(PhoenixStateme
nt.java:339)
at org.apache.phoenix.jdbc.PhoenixStatement$1.call(PhoenixStatement.java:247)
at org.apache.phoenix.jdbc.PhoenixStatement$1.call(PhoenixStatement.java:242)
at org.apache.phoenix.call.CallRunner.run(CallRunner.java:53)
at
org.apache.phoenix.jdbc.PhoenixStatement.executeQuery(PhoenixStatement.java:241)
at org.apache.phoenix.jdbc.PhoenixStatement.execute(PhoenixStatement.java:1257)
at sqlline.Commands.execute(Commands.java:822)
at sqlline.Commands.sql(Commands.java:732)
at sqlline.SqlLine.dispatch(SqlLine.java:808)
at sqlline.SqlLine.begin(SqlLine.java:681)
at sqlline.SqlLine.start(SqlLine.java:398)
was:
Phoenix table become inaccessible after trying to create global index that
contain duplicate column names. The only way to resolve it is to drop
SYSTEM.CATALOG table and recreate table.
Here is detail to reproduce the issue.
>create table "WeatherData"
(
"domain" varchar not null,
"source" varchar not null,
"model" varchar not null,
"type" varchar not null,
"frequency" varchar not null,
"locationIdentifier" varchar not null,
"publicationDate" time not null,
"targetDate" time not null,
"maxTemperature" double,
"minTemperature" double,
"meanTemperature" double,
"solarRadiation" double,
constraint pk primary key( "domain", "source", "model", "type", "frequency",
"locationIdentifier", "publicationDate", "targetDate" )
);
> select count(*) from "WeatherData";
+------------------------------------------+
| COUNT(1) |
+------------------------------------------+
| 0 |
+------------------------------------------+
1 row selected (0.57 seconds)
>create index idx on "WeatherData"("maxTemperature") include( "domain",
>"source", "model", "type", "frequency", "locationIdentifier",
>"publicationDate", "targetDate", "minTemperature", "meanTemperature",
>"maxTemperature", "solarRadiation", "solarRadiation");
Error: ERROR 514 (42892): A duplicate column name was detected in the object
definition or ALTER TABLE statement. columnName=IDX.0:solarRadiation
(state=42892,code=514)
org.apache.phoenix.schema.ColumnAlreadyExistsException: ERROR 514 (42892): A
duplicate column name was detected in the object definition or ALTER TABLE
statement. columnName=IDX.0:solarRadiation
at org.apache.phoenix.schema.PTableImpl.init(PTableImpl.java:366)
at org.apache.phoenix.schema.PTableImpl.<init>(PTableImpl.java:276)
at org.apache.phoenix.schema.PTableImpl.makePTable(PTableImpl.java:253)
at
org.apache.phoenix.schema.MetaDataClient.createTableInternal(MetaDataClient.java:1928)
at
org.apache.phoenix.schema.MetaDataClient.createIndex(MetaDataClient.java:1281)
at
org.apache.phoenix.compile.CreateIndexCompiler$1.execute(CreateIndexCompiler.java:95)
at org.apache.phoenix.jdbc.PhoenixStatement$2.call(PhoenixStatement.java:304)
at org.apache.phoenix.jdbc.PhoenixStatement$2.call(PhoenixStatement.java:296)
at org.apache.phoenix.call.CallRunner.run(CallRunner.java:53)
at
org.apache.phoenix.jdbc.PhoenixStatement.executeMutation(PhoenixStatement.java:294)
at org.apache.phoenix.jdbc.PhoenixStatement.execute(PhoenixStatement.java:1254)
at sqlline.Commands.execute(Commands.java:822)
at sqlline.Commands.sql(Commands.java:732)
at sqlline.SqlLine.dispatch(SqlLine.java:808)
at sqlline.SqlLine.begin(SqlLine.java:681)
at sqlline.SqlLine.start(SqlLine.java:398)
at sqlline.SqlLine.main(SqlLine.java:292)
After that the table become inaccessible. Any operation on the table throw the
exception below.
> select count(*) from "WeatherData";
16/03/11 17:37:32 WARN ipc.CoprocessorRpcChannel: Call failed on IOException
org.apache.hadoop.hbase.DoNotRetryIOException:
org.apache.hadoop.hbase.DoNotRetryIOException: com.bloomb
erg.ds.WeatherSmallSalt: 35
at org.apache.phoenix.util.ServerUtil.createIOException(ServerUtil.java:84)
at
org.apache.phoenix.coprocessor.MetaDataEndpointImpl.getTable(MetaDataEndpointImpl.java:447)
at
org.apache.phoenix.coprocessor.generated.MetaDataProtos$MetaDataService.callMethod(MetaDataPr
otos.java:10505)
at org.apache.hadoop.hbase.regionserver.HRegion.execService(HRegion.java:7435)
at
org.apache.hadoop.hbase.regionserver.RSRpcServices.execServiceOnRegion(RSRpcServices.java:187
5)
at
org.apache.hadoop.hbase.regionserver.RSRpcServices.execService(RSRpcServices.java:1857)
at
org.apache.hadoop.hbase.protobuf.generated.ClientProtos$ClientService$2.callBlockingMethod(Cl
ientProtos.java:32209)
at org.apache.hadoop.hbase.ipc.RpcServer.call(RpcServer.java:2114)
at org.apache.hadoop.hbase.ipc.CallRunner.run(CallRunner.java:101)
at org.apache.hadoop.hbase.ipc.RpcExecutor.consumerLoop(RpcExecutor.java:130)
at org.apache.hadoop.hbase.ipc.RpcExecutor$1.run(RpcExecutor.java:107)
at java.lang.Thread.run(Thread.java:745)
Caused by: java.lang.ArrayIndexOutOfBoundsException: 35
at org.apache.phoenix.schema.PTableImpl.init(PTableImpl.java:354)
at org.apache.phoenix.schema.PTableImpl.<init>(PTableImpl.java:276)
at org.apache.phoenix.schema.PTableImpl.makePTable(PTableImpl.java:265)
at
org.apache.phoenix.coprocessor.MetaDataEndpointImpl.getTable(MetaDataEndpointImpl.java:826)
at
org.apache.phoenix.coprocessor.MetaDataEndpointImpl.buildTable(MetaDataEndpointImpl.java:462)
at
org.apache.phoenix.coprocessor.MetaDataEndpointImpl.doGetTable(MetaDataEndpointImpl.java:1696
)
at
org.apache.phoenix.coprocessor.MetaDataEndpointImpl.doGetTable(MetaDataEndpointImpl.java:1643
)
at
org.apache.phoenix.coprocessor.MetaDataEndpointImpl.addIndexToTable(MetaDataEndpointImpl.java
:526)
at
org.apache.phoenix.coprocessor.MetaDataEndpointImpl.getTable(MetaDataEndpointImpl.java:803)
at
org.apache.phoenix.coprocessor.MetaDataEndpointImpl.buildTable(MetaDataEndpointImpl.java:462)
at
org.apache.phoenix.coprocessor.MetaDataEndpointImpl.doGetTable(MetaDataEndpointImpl.java:1696
)
at
org.apache.phoenix.coprocessor.MetaDataEndpointImpl.doGetTable(MetaDataEndpointImpl.java:1643
)
at
org.apache.phoenix.coprocessor.MetaDataEndpointImpl.getTable(MetaDataEndpointImpl.java:430)
... 10 more
at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
at
sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
at
sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.j
ava:45)
at java.lang.reflect.Constructor.newInstance(Constructor.java:422)
at
org.apache.hadoop.ipc.RemoteException.instantiateException(RemoteException.java:106)
at
org.apache.hadoop.ipc.RemoteException.unwrapRemoteException(RemoteException.java:95)
at
org.apache.hadoop.hbase.protobuf.ProtobufUtil.getRemoteException(ProtobufUtil.java:325)
at
org.apache.hadoop.hbase.protobuf.ProtobufUtil.execService(ProtobufUtil.java:1622)
at
org.apache.hadoop.hbase.ipc.RegionCoprocessorRpcChannel$1.call(RegionCoprocessorRpcChannel.ja
va:92)
at
org.apache.hadoop.hbase.ipc.RegionCoprocessorRpcChannel$1.call(RegionCoprocessorRpcChannel.ja
va:89)
at
org.apache.hadoop.hbase.client.RpcRetryingCaller.callWithRetries(RpcRetryingCaller.java:126)
at
org.apache.hadoop.hbase.ipc.RegionCoprocessorRpcChannel.callExecService(RegionCoprocessorRpcC
hannel.java:95)
at
org.apache.hadoop.hbase.ipc.CoprocessorRpcChannel.callMethod(CoprocessorRpcChannel.java:56)
at
org.apache.phoenix.coprocessor.generated.MetaDataProtos$MetaDataService$Stub.getTable(MetaDat
aProtos.java:10665)
at
org.apache.phoenix.query.ConnectionQueryServicesImpl$7.call(ConnectionQueryServicesImpl.java:
1292)
at
org.apache.phoenix.query.ConnectionQueryServicesImpl$7.call(ConnectionQueryServicesImpl.java:
1279)
at org.apache.hadoop.hbase.client.HTable$16.call(HTable.java:1751)
at java.util.concurrent.FutureTask.run(FutureTask.java:266)
at
java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)
at
java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)
at java.lang.Thread.run(Thread.java:745)
Caused by:
org.apache.hadoop.hbase.ipc.RemoteWithExtrasException(org.apache.hadoop.hbase.DoNotRetryIOExc
eption): org.apache.hadoop.hbase.DoNotRetryIOException:
WeatherData: 35
at org.apache.phoenix.util.ServerUtil.createIOException(ServerUtil.java:84)
at
org.apache.phoenix.coprocessor.MetaDataEndpointImpl.getTable(MetaDataEndpointImpl.java:447)
at
org.apache.phoenix.coprocessor.generated.MetaDataProtos$MetaDataService.callMethod(MetaDataPr
otos.java:10505)
at org.apache.hadoop.hbase.regionserver.HRegion.execService(HRegion.java:7435)
at
org.apache.hadoop.hbase.regionserver.RSRpcServices.execServiceOnRegion(RSRpcServices.java:187
5)
at
org.apache.hadoop.hbase.regionserver.RSRpcServices.execService(RSRpcServices.java:1857)
at
org.apache.hadoop.hbase.protobuf.generated.ClientProtos$ClientService$2.callBlockingMethod(Cl
ientProtos.java:32209)
at org.apache.hadoop.hbase.ipc.RpcServer.call(RpcServer.java:2114)
at org.apache.hadoop.hbase.ipc.CallRunner.run(CallRunner.java:101)
at org.apache.hadoop.hbase.ipc.RpcExecutor.consumerLoop(RpcExecutor.java:130)
at org.apache.hadoop.hbase.ipc.RpcExecutor$1.run(RpcExecutor.java:107)
at java.lang.Thread.run(Thread.java:745)
Caused by: java.lang.ArrayIndexOutOfBoundsException: 35
at org.apache.phoenix.schema.PTableImpl.init(PTableImpl.java:354)
at org.apache.phoenix.schema.PTableImpl.<init>(PTableImpl.java:276)
at org.apache.phoenix.schema.PTableImpl.makePTable(PTableImpl.java:265)
at
org.apache.phoenix.coprocessor.MetaDataEndpointImpl.getTable(MetaDataEndpointImpl.java:826)
at
org.apache.phoenix.coprocessor.MetaDataEndpointImpl.buildTable(MetaDataEndpointImpl.java:462)
at
org.apache.phoenix.coprocessor.MetaDataEndpointImpl.doGetTable(MetaDataEndpointImpl.java:1696
)
at
org.apache.phoenix.coprocessor.MetaDataEndpointImpl.doGetTable(MetaDataEndpointImpl.java:1643
)
at
org.apache.phoenix.coprocessor.MetaDataEndpointImpl.addIndexToTable(MetaDataEndpointImpl.java
:526)
at
org.apache.phoenix.coprocessor.MetaDataEndpointImpl.getTable(MetaDataEndpointImpl.java:803)
at
org.apache.phoenix.coprocessor.MetaDataEndpointImpl.buildTable(MetaDataEndpointImpl.java:462)
at
org.apache.phoenix.coprocessor.MetaDataEndpointImpl.doGetTable(MetaDataEndpointImpl.java:1696
)
at
org.apache.phoenix.coprocessor.MetaDataEndpointImpl.doGetTable(MetaDataEndpointImpl.java:1643
)
at
org.apache.phoenix.coprocessor.MetaDataEndpointImpl.getTable(MetaDataEndpointImpl.java:430)
... 10 more
at org.apache.hadoop.hbase.ipc.RpcClientImpl.call(RpcClientImpl.java:1226)
at
org.apache.hadoop.hbase.ipc.AbstractRpcClient.callBlockingMethod(AbstractRpcClient.java:213)
at
org.apache.hadoop.hbase.ipc.AbstractRpcClient$BlockingRpcChannelImplementation.callBlockingMe
thod(AbstractRpcClient.java:287)
at
org.apache.hadoop.hbase.protobuf.generated.ClientProtos$ClientService$BlockingStub.execServic
e(ClientProtos.java:32675)
at
org.apache.hadoop.hbase.protobuf.ProtobufUtil.execService(ProtobufUtil.java:1618)
... 13 more
16/03/11 17:37:32 WARN client.HTable: Error calling coprocessor service
org.apache.phoenix.coprocessor.g enerated.MetaDataProtos$MetaDataService for
row \x00\x00WeatherData
java.util.concurrent.ExecutionException:
org.apache.hadoop.hbase.DoNotRetryIOException: org.apache.hadoo
p.hbase.DoNotRetryIOException: WeatherData: 35
at org.apache.phoenix.util.ServerUtil.createIOException(ServerUtil.java:84)
at
org.apache.phoenix.coprocessor.MetaDataEndpointImpl.getTable(MetaDataEndpointImpl.java:447)
at
org.apache.phoenix.coprocessor.generated.MetaDataProtos$MetaDataService.callMethod(MetaDataPr
otos.java:10505)
at org.apache.hadoop.hbase.regionserver.HRegion.execService(HRegion.java:7435)
at
org.apache.hadoop.hbase.regionserver.RSRpcServices.execServiceOnRegion(RSRpcServices.java:187
5)
at
org.apache.hadoop.hbase.regionserver.RSRpcServices.execService(RSRpcServices.java:1857)
at
org.apache.hadoop.hbase.protobuf.generated.ClientProtos$ClientService$2.callBlockingMethod(Cl
ientProtos.java:32209)
at org.apache.hadoop.hbase.ipc.RpcServer.call(RpcServer.java:2114)
at org.apache.hadoop.hbase.ipc.CallRunner.run(CallRunner.java:101)
at org.apache.hadoop.hbase.ipc.RpcExecutor.consumerLoop(RpcExecutor.java:130)
at org.apache.hadoop.hbase.ipc.RpcExecutor$1.run(RpcExecutor.java:107)
at java.lang.Thread.run(Thread.java:745)
Caused by: java.lang.ArrayIndexOutOfBoundsException: 35
at org.apache.phoenix.schema.PTableImpl.init(PTableImpl.java:354)
at org.apache.phoenix.schema.PTableImpl.<init>(PTableImpl.java:276)
at org.apache.phoenix.schema.PTableImpl.makePTable(PTableImpl.java:265)
at
org.apache.phoenix.coprocessor.MetaDataEndpointImpl.getTable(MetaDataEndpointImpl.java:826)
at
org.apache.phoenix.coprocessor.MetaDataEndpointImpl.buildTable(MetaDataEndpointImpl.java:462)
at
org.apache.phoenix.coprocessor.MetaDataEndpointImpl.doGetTable(MetaDataEndpointImpl.java:1696
)
at
org.apache.phoenix.coprocessor.MetaDataEndpointImpl.doGetTable(MetaDataEndpointImpl.java:1643
)
at
org.apache.phoenix.coprocessor.MetaDataEndpointImpl.addIndexToTable(MetaDataEndpointImpl.java
:526)
at
org.apache.phoenix.coprocessor.MetaDataEndpointImpl.getTable(MetaDataEndpointImpl.java:803)
at
org.apache.phoenix.coprocessor.MetaDataEndpointImpl.buildTable(MetaDataEndpointImpl.java:462)
at
org.apache.phoenix.coprocessor.MetaDataEndpointImpl.doGetTable(MetaDataEndpointImpl.java:1696
)
at
org.apache.phoenix.coprocessor.MetaDataEndpointImpl.doGetTable(MetaDataEndpointImpl.java:1643
)
at
org.apache.phoenix.coprocessor.MetaDataEndpointImpl.getTable(MetaDataEndpointImpl.java:430)
... 10 more
at java.util.concurrent.FutureTask.report(FutureTask.java:122)
at java.util.concurrent.FutureTask.get(FutureTask.java:192)
at org.apache.hadoop.hbase.client.HTable.coprocessorService(HTable.java:1763)
at org.apache.hadoop.hbase.client.HTable.coprocessorService(HTable.java:1719)
at
org.apache.phoenix.query.ConnectionQueryServicesImpl.metaDataCoprocessorExec(ConnectionQueryS
ervicesImpl.java:1026)
at
org.apache.phoenix.query.ConnectionQueryServicesImpl.metaDataCoprocessorExec(ConnectionQueryS
ervicesImpl.java:1006)
at
org.apache.phoenix.query.ConnectionQueryServicesImpl.getTable(ConnectionQueryServicesImpl.jav
a:1278)
at
org.apache.phoenix.schema.MetaDataClient.updateCache(MetaDataClient.java:415)
at
org.apache.phoenix.schema.MetaDataClient.updateCache(MetaDataClient.java:358)
at
org.apache.phoenix.schema.MetaDataClient.updateCache(MetaDataClient.java:354)
at
org.apache.phoenix.compile.FromCompiler$BaseColumnResolver.createTableRef(FromCompiler.java:4
13)
at
org.apache.phoenix.compile.FromCompiler$SingleTableColumnResolver.<init>(FromCompiler.java:28
8)
at
org.apache.phoenix.compile.FromCompiler.getResolverForQuery(FromCompiler.java:189)
at
org.apache.phoenix.jdbc.PhoenixStatement$ExecutableSelectStatement.compilePlan(PhoenixStateme
nt.java:358)
at
org.apache.phoenix.jdbc.PhoenixStatement$ExecutableSelectStatement.compilePlan(PhoenixStateme
nt.java:339)
at org.apache.phoenix.jdbc.PhoenixStatement$1.call(PhoenixStatement.java:247)
at org.apache.phoenix.jdbc.PhoenixStatement$1.call(PhoenixStatement.java:242)
at org.apache.phoenix.call.CallRunner.run(CallRunner.java:53)
at
org.apache.phoenix.jdbc.PhoenixStatement.executeQuery(PhoenixStatement.java:241)
at org.apache.phoenix.jdbc.PhoenixStatement.execute(PhoenixStatement.java:1257)
at sqlline.Commands.execute(Commands.java:822)
at sqlline.Commands.sql(Commands.java:732)
at sqlline.SqlLine.dispatch(SqlLine.java:808)
at sqlline.SqlLine.begin(SqlLine.java:681)
at sqlline.SqlLine.start(SqlLine.java:398)
at sqlline.SqlLine.main(SqlLine.java:292)
Caused by: org.apache.hadoop.hbase.DoNotRetryIOException:
org.apache.hadoop.hbase.DoNotRetryIOException: WeatherData: 35
at org.apache.phoenix.util.ServerUtil.createIOException(ServerUtil.java:84)
at
org.apache.phoenix.coprocessor.MetaDataEndpointImpl.getTable(MetaDataEndpointImpl.java:447)
at
org.apache.phoenix.coprocessor.generated.MetaDataProtos$MetaDataService.callMethod(MetaDataPr
otos.java:10505)
at org.apache.hadoop.hbase.regionserver.HRegion.execService(HRegion.java:7435)
at
org.apache.hadoop.hbase.regionserver.RSRpcServices.execServiceOnRegion(RSRpcServices.java:187
5)
at
org.apache.hadoop.hbase.regionserver.RSRpcServices.execService(RSRpcServices.java:1857)
at
org.apache.hadoop.hbase.protobuf.generated.ClientProtos$ClientService$2.callBlockingMethod(Cl
ientProtos.java:32209)
at org.apache.hadoop.hbase.ipc.RpcServer.call(RpcServer.java:2114)
at org.apache.hadoop.hbase.ipc.CallRunner.run(CallRunner.java:101)
at org.apache.hadoop.hbase.ipc.RpcExecutor.consumerLoop(RpcExecutor.java:130)
at org.apache.hadoop.hbase.ipc.RpcExecutor$1.run(RpcExecutor.java:107)
at java.lang.Thread.run(Thread.java:745)
Caused by: java.lang.ArrayIndexOutOfBoundsException: 35
at org.apache.phoenix.schema.PTableImpl.init(PTableImpl.java:354)
at org.apache.phoenix.schema.PTableImpl.<init>(PTableImpl.java:276)
at org.apache.phoenix.schema.PTableImpl.makePTable(PTableImpl.java:265)
at
org.apache.phoenix.coprocessor.MetaDataEndpointImpl.getTable(MetaDataEndpointImpl.java:826)
at
org.apache.phoenix.coprocessor.MetaDataEndpointImpl.buildTable(MetaDataEndpointImpl.java:462)
at
org.apache.phoenix.coprocessor.MetaDataEndpointImpl.doGetTable(MetaDataEndpointImpl.java:1696
)
at
org.apache.phoenix.coprocessor.MetaDataEndpointImpl.doGetTable(MetaDataEndpointImpl.java:1643
)
at
org.apache.phoenix.coprocessor.MetaDataEndpointImpl.addIndexToTable(MetaDataEndpointImpl.java
:526)
at
org.apache.phoenix.coprocessor.MetaDataEndpointImpl.getTable(MetaDataEndpointImpl.java:803)
at
org.apache.phoenix.coprocessor.MetaDataEndpointImpl.buildTable(MetaDataEndpointImpl.java:462)
at
org.apache.phoenix.coprocessor.MetaDataEndpointImpl.doGetTable(MetaDataEndpointImpl.java:1696
)
at
org.apache.phoenix.coprocessor.MetaDataEndpointImpl.doGetTable(MetaDataEndpointImpl.java:1643
)
at
org.apache.phoenix.coprocessor.MetaDataEndpointImpl.getTable(MetaDataEndpointImpl.java:430)
... 10 more
at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
at
sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
at
sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.j
ava:45)
at java.lang.reflect.Constructor.newInstance(Constructor.java:422)
at
org.apache.hadoop.ipc.RemoteException.instantiateException(RemoteException.java:106)
at
org.apache.hadoop.ipc.RemoteException.unwrapRemoteException(RemoteException.java:95)
at
org.apache.hadoop.hbase.protobuf.ProtobufUtil.getRemoteException(ProtobufUtil.java:325)
at
org.apache.hadoop.hbase.protobuf.ProtobufUtil.execService(ProtobufUtil.java:1622)
at
org.apache.hadoop.hbase.ipc.RegionCoprocessorRpcChannel$1.call(RegionCoprocessorRpcChannel.ja
va:92)
at
org.apache.hadoop.hbase.ipc.RegionCoprocessorRpcChannel$1.call(RegionCoprocessorRpcChannel.ja
va:89)
at
org.apache.hadoop.hbase.client.RpcRetryingCaller.callWithRetries(RpcRetryingCaller.java:126)
at
org.apache.hadoop.hbase.ipc.RegionCoprocessorRpcChannel.callExecService(RegionCoprocessorRpcC
hannel.java:95)
at
org.apache.hadoop.hbase.ipc.CoprocessorRpcChannel.callMethod(CoprocessorRpcChannel.java:56)
at
org.apache.phoenix.coprocessor.generated.MetaDataProtos$MetaDataService$Stub.getTable(MetaDat
aProtos.java:10665)
at
org.apache.phoenix.query.ConnectionQueryServicesImpl$7.call(ConnectionQueryServicesImpl.java:
1292)
at
org.apache.phoenix.query.ConnectionQueryServicesImpl$7.call(ConnectionQueryServicesImpl.java:
1279)
at org.apache.hadoop.hbase.client.HTable$16.call(HTable.java:1751)
at java.util.concurrent.FutureTask.run(FutureTask.java:266)
at
java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)
at
java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)
at java.lang.Thread.run(Thread.java:745)
Caused by:
org.apache.hadoop.hbase.ipc.RemoteWithExtrasException(org.apache.hadoop.hbase.DoNotRetryIOExc
eption): org.apache.hadoop.hbase.DoNotRetryIOException: WeatherData: 35
at org.apache.phoenix.util.ServerUtil.createIOException(ServerUtil.java:84)
at
org.apache.phoenix.coprocessor.MetaDataEndpointImpl.getTable(MetaDataEndpointImpl.java:447)
at
org.apache.phoenix.coprocessor.generated.MetaDataProtos$MetaDataService.callMethod(MetaDataPr
otos.java:10505)
at org.apache.hadoop.hbase.regionserver.HRegion.execService(HRegion.java:7435)
at
org.apache.hadoop.hbase.regionserver.RSRpcServices.execServiceOnRegion(RSRpcServices.java:187
5)
at
org.apache.hadoop.hbase.regionserver.RSRpcServices.execService(RSRpcServices.java:1857)
at
org.apache.hadoop.hbase.protobuf.generated.ClientProtos$ClientService$2.callBlockingMethod(Cl
ientProtos.java:32209)
at org.apache.hadoop.hbase.ipc.RpcServer.call(RpcServer.java:2114)
at org.apache.hadoop.hbase.ipc.CallRunner.run(CallRunner.java:101)
at org.apache.hadoop.hbase.ipc.RpcExecutor.consumerLoop(RpcExecutor.java:130)
at org.apache.hadoop.hbase.ipc.RpcExecutor$1.run(RpcExecutor.java:107)
at java.lang.Thread.run(Thread.java:745)
Caused by: java.lang.ArrayIndexOutOfBoundsException: 35
at org.apache.phoenix.schema.PTableImpl.init(PTableImpl.java:354)
at org.apache.phoenix.schema.PTableImpl.<init>(PTableImpl.java:276)
at org.apache.phoenix.schema.PTableImpl.makePTable(PTableImpl.java:265)
at
org.apache.phoenix.coprocessor.MetaDataEndpointImpl.getTable(MetaDataEndpointImpl.java:826)
at
org.apache.phoenix.coprocessor.MetaDataEndpointImpl.buildTable(MetaDataEndpointImpl.java:462)
at
org.apache.phoenix.coprocessor.MetaDataEndpointImpl.doGetTable(MetaDataEndpointImpl.java:1696
)
at
org.apache.phoenix.coprocessor.MetaDataEndpointImpl.doGetTable(MetaDataEndpointImpl.java:1643
)
at
org.apache.phoenix.coprocessor.MetaDataEndpointImpl.addIndexToTable(MetaDataEndpointImpl.java
:526)
at
org.apache.phoenix.coprocessor.MetaDataEndpointImpl.getTable(MetaDataEndpointImpl.java:803)
at
org.apache.phoenix.coprocessor.MetaDataEndpointImpl.buildTable(MetaDataEndpointImpl.java:462)
at
org.apache.phoenix.coprocessor.MetaDataEndpointImpl.doGetTable(MetaDataEndpointImpl.java:1696
)
at
org.apache.phoenix.coprocessor.MetaDataEndpointImpl.doGetTable(MetaDataEndpointImpl.java:1643
)
at
org.apache.phoenix.coprocessor.MetaDataEndpointImpl.getTable(MetaDataEndpointImpl.java:430)
... 10 more
at org.apache.hadoop.hbase.ipc.RpcClientImpl.call(RpcClientImpl.java:1226)
at
org.apache.hadoop.hbase.ipc.AbstractRpcClient.callBlockingMethod(AbstractRpcClient.java:213)
at
org.apache.hadoop.hbase.ipc.AbstractRpcClient$BlockingRpcChannelImplementation.callBlockingMe
thod(AbstractRpcClient.java:287)
at
org.apache.hadoop.hbase.protobuf.generated.ClientProtos$ClientService$BlockingStub.execServic
e(ClientProtos.java:32675)
at
org.apache.hadoop.hbase.protobuf.ProtobufUtil.execService(ProtobufUtil.java:1618)
... 13 more
Error: org.apache.hadoop.hbase.DoNotRetryIOException: WeatherData: 35
at org.apache.phoenix.util.ServerUtil.createIOException(ServerUtil.java:84)
at
org.apache.phoenix.coprocessor.MetaDataEndpointImpl.getTable(MetaDataEndpointImpl.java:447)
at
org.apache.phoenix.coprocessor.generated.MetaDataProtos$MetaDataService.callMethod(MetaDataPr
otos.java:10505)
at org.apache.hadoop.hbase.regionserver.HRegion.execService(HRegion.java:7435)
at
org.apache.hadoop.hbase.regionserver.RSRpcServices.execServiceOnRegion(RSRpcServices.java:187
5)
at
org.apache.hadoop.hbase.regionserver.RSRpcServices.execService(RSRpcServices.java:1857)
at
org.apache.hadoop.hbase.protobuf.generated.ClientProtos$ClientService$2.callBlockingMethod(Cl
ientProtos.java:32209)
at org.apache.hadoop.hbase.ipc.RpcServer.call(RpcServer.java:2114)
at org.apache.hadoop.hbase.ipc.CallRunner.run(CallRunner.java:101)
at org.apache.hadoop.hbase.ipc.RpcExecutor.consumerLoop(RpcExecutor.java:130)
at org.apache.hadoop.hbase.ipc.RpcExecutor$1.run(RpcExecutor.java:107)
at java.lang.Thread.run(Thread.java:745)
Caused by: java.lang.ArrayIndexOutOfBoundsException: 35
at org.apache.phoenix.schema.PTableImpl.init(PTableImpl.java:354)
at org.apache.phoenix.schema.PTableImpl.<init>(PTableImpl.java:276)
at org.apache.phoenix.schema.PTableImpl.makePTable(PTableImpl.java:265)
at
org.apache.phoenix.coprocessor.MetaDataEndpointImpl.getTable(MetaDataEndpointImpl.java:826)
at
org.apache.phoenix.coprocessor.MetaDataEndpointImpl.buildTable(MetaDataEndpointImpl.java:462)
at
org.apache.phoenix.coprocessor.MetaDataEndpointImpl.doGetTable(MetaDataEndpointImpl.java:1696
)
at
org.apache.phoenix.coprocessor.MetaDataEndpointImpl.doGetTable(MetaDataEndpointImpl.java:1643
)
at
org.apache.phoenix.coprocessor.MetaDataEndpointImpl.addIndexToTable(MetaDataEndpointImpl.java
:526)
at
org.apache.phoenix.coprocessor.MetaDataEndpointImpl.getTable(MetaDataEndpointImpl.java:803)
at
org.apache.phoenix.coprocessor.MetaDataEndpointImpl.buildTable(MetaDataEndpointImpl.java:462)
at
org.apache.phoenix.coprocessor.MetaDataEndpointImpl.doGetTable(MetaDataEndpointImpl.java:1696
)
at
org.apache.phoenix.coprocessor.MetaDataEndpointImpl.doGetTable(MetaDataEndpointImpl.java:1643
)
at
org.apache.phoenix.coprocessor.MetaDataEndpointImpl.getTable(MetaDataEndpointImpl.java:430)
... 10 more (state=08000,code=101)
org.apache.phoenix.exception.PhoenixIOException:
org.apache.hadoop.hbase.DoNotRetryIOException: com.bloo
mberg.ds.WeatherSmallSalt: 35
at org.apache.phoenix.util.ServerUtil.createIOException(ServerUtil.java:84)
at
org.apache.phoenix.coprocessor.MetaDataEndpointImpl.getTable(MetaDataEndpointImpl.java:447)
at
org.apache.phoenix.coprocessor.generated.MetaDataProtos$MetaDataService.callMethod(MetaDataPr
otos.java:10505)
at org.apache.hadoop.hbase.regionserver.HRegion.execService(HRegion.java:7435)
at
org.apache.hadoop.hbase.regionserver.RSRpcServices.execServiceOnRegion(RSRpcServices.java:187
5)
at
org.apache.hadoop.hbase.regionserver.RSRpcServices.execService(RSRpcServices.java:1857)
at
org.apache.hadoop.hbase.protobuf.generated.ClientProtos$ClientService$2.callBlockingMethod(Cl
ientProtos.java:32209)
at org.apache.hadoop.hbase.ipc.RpcServer.call(RpcServer.java:2114)
at org.apache.hadoop.hbase.ipc.CallRunner.run(CallRunner.java:101)
at org.apache.hadoop.hbase.ipc.RpcExecutor.consumerLoop(RpcExecutor.java:130)
at org.apache.hadoop.hbase.ipc.RpcExecutor$1.run(RpcExecutor.java:107)
at java.lang.Thread.run(Thread.java:745)
Caused by: java.lang.ArrayIndexOutOfBoundsException: 35
at org.apache.phoenix.schema.PTableImpl.init(PTableImpl.java:354)
at org.apache.phoenix.schema.PTableImpl.<init>(PTableImpl.java:276)
at org.apache.phoenix.schema.PTableImpl.makePTable(PTableImpl.java:265)
at
org.apache.phoenix.coprocessor.MetaDataEndpointImpl.getTable(MetaDataEndpointImpl.java:826)
at
org.apache.phoenix.coprocessor.MetaDataEndpointImpl.buildTable(MetaDataEndpointImpl.java:462)
at
org.apache.phoenix.coprocessor.MetaDataEndpointImpl.doGetTable(MetaDataEndpointImpl.java:1696
)
at
org.apache.phoenix.coprocessor.MetaDataEndpointImpl.doGetTable(MetaDataEndpointImpl.java:1643
)
at
org.apache.phoenix.coprocessor.MetaDataEndpointImpl.addIndexToTable(MetaDataEndpointImpl.java
:526)
at
org.apache.phoenix.coprocessor.MetaDataEndpointImpl.getTable(MetaDataEndpointImpl.java:803)
at
org.apache.phoenix.coprocessor.MetaDataEndpointImpl.buildTable(MetaDataEndpointImpl.java:462)
at
org.apache.phoenix.coprocessor.MetaDataEndpointImpl.doGetTable(MetaDataEndpointImpl.java:1696
)
at
org.apache.phoenix.coprocessor.MetaDataEndpointImpl.doGetTable(MetaDataEndpointImpl.java:1643
)
at
org.apache.phoenix.coprocessor.MetaDataEndpointImpl.getTable(MetaDataEndpointImpl.java:430)
... 10 more
at org.apache.phoenix.util.ServerUtil.parseServerException(ServerUtil.java:108)
at
org.apache.phoenix.query.ConnectionQueryServicesImpl.metaDataCoprocessorExec(ConnectionQueryS
ervicesImpl.java:1043)
at
org.apache.phoenix.query.ConnectionQueryServicesImpl.metaDataCoprocessorExec(ConnectionQueryS
ervicesImpl.java:1006)
at
org.apache.phoenix.query.ConnectionQueryServicesImpl.getTable(ConnectionQueryServicesImpl.jav
a:1278)
at
org.apache.phoenix.schema.MetaDataClient.updateCache(MetaDataClient.java:415)
at
org.apache.phoenix.schema.MetaDataClient.updateCache(MetaDataClient.java:358)
at
org.apache.phoenix.schema.MetaDataClient.updateCache(MetaDataClient.java:354)
at
org.apache.phoenix.compile.FromCompiler$BaseColumnResolver.createTableRef(FromCompiler.java:4
13)
at
org.apache.phoenix.compile.FromCompiler$SingleTableColumnResolver.<init>(FromCompiler.java:28
8)
at
org.apache.phoenix.compile.FromCompiler.getResolverForQuery(FromCompiler.java:189)
at
org.apache.phoenix.jdbc.PhoenixStatement$ExecutableSelectStatement.compilePlan(PhoenixStateme
nt.java:358)
at
org.apache.phoenix.jdbc.PhoenixStatement$ExecutableSelectStatement.compilePlan(PhoenixStateme
nt.java:339)
at org.apache.phoenix.jdbc.PhoenixStatement$1.call(PhoenixStatement.java:247)
at org.apache.phoenix.jdbc.PhoenixStatement$1.call(PhoenixStatement.java:242)
at org.apache.phoenix.call.CallRunner.run(CallRunner.java:53)
at
org.apache.phoenix.jdbc.PhoenixStatement.executeQuery(PhoenixStatement.java:241)
at org.apache.phoenix.jdbc.PhoenixStatement.execute(PhoenixStatement.java:1257)
at sqlline.Commands.execute(Commands.java:822)
at sqlline.Commands.sql(Commands.java:732)
at sqlline.SqlLine.dispatch(SqlLine.java:808)
at sqlline.SqlLine.begin(SqlLine.java:681)
at sqlline.SqlLine.start(SqlLine.java:398)
> Phoenix table become inaccessible after trying to create global index that
> contain duplicate column names.
> -----------------------------------------------------------------------------------------------------------
>
> Key: PHOENIX-2790
> URL: https://issues.apache.org/jira/browse/PHOENIX-2790
> Project: Phoenix
> Issue Type: Bug
> Affects Versions: 4.4.0
> Reporter: saurabh agarwal
>
> Phoenix table become inaccessible after trying to create global index that
> contains duplicate column names. The only way to resolve it is to drop
> SYSTEM.CATALOG table and recreate table.
> Here is detail to reproduce the issue.
> >create table "WeatherData"
> (
> "domain" varchar not null,
> "source" varchar not null,
> "model" varchar not null,
> "type" varchar not null,
> "frequency" varchar not null,
> "locationIdentifier" varchar not null,
> "publicationDate" time not null,
> "targetDate" time not null,
> "maxTemperature" double,
> "minTemperature" double,
> "meanTemperature" double,
> "solarRadiation" double,
> constraint pk primary key( "domain", "source", "model", "type", "frequency",
> "locationIdentifier", "publicationDate", "targetDate" )
> );
> > select count(*) from "WeatherData";
> +------------------------------------------+
> | COUNT(1) |
> +------------------------------------------+
> | 0 |
> +------------------------------------------+
> 1 row selected (0.57 seconds)
> >create index idx on "WeatherData"("maxTemperature") include( "domain",
> >"source", "model", "type", "frequency", "locationIdentifier",
> >"publicationDate", "targetDate", "minTemperature", "meanTemperature",
> >"maxTemperature", "solarRadiation", "solarRadiation");
> Error: ERROR 514 (42892): A duplicate column name was detected in the object
> definition or ALTER TABLE statement. columnName=IDX.0:solarRadiation
> (state=42892,code=514)
> org.apache.phoenix.schema.ColumnAlreadyExistsException: ERROR 514 (42892): A
> duplicate column name was detected in the object definition or ALTER TABLE
> statement. columnName=IDX.0:solarRadiation
> at org.apache.phoenix.schema.PTableImpl.init(PTableImpl.java:366)
> at org.apache.phoenix.schema.PTableImpl.<init>(PTableImpl.java:276)
> at org.apache.phoenix.schema.PTableImpl.makePTable(PTableImpl.java:253)
> at
> org.apache.phoenix.schema.MetaDataClient.createTableInternal(MetaDataClient.java:1928)
>
> at
> org.apache.phoenix.schema.MetaDataClient.createIndex(MetaDataClient.java:1281)
>
> at
> org.apache.phoenix.compile.CreateIndexCompiler$1.execute(CreateIndexCompiler.java:95)
>
> at org.apache.phoenix.jdbc.PhoenixStatement$2.call(PhoenixStatement.java:304)
> at org.apache.phoenix.jdbc.PhoenixStatement$2.call(PhoenixStatement.java:296)
> at org.apache.phoenix.call.CallRunner.run(CallRunner.java:53)
> at
> org.apache.phoenix.jdbc.PhoenixStatement.executeMutation(PhoenixStatement.java:294)
>
> at
> org.apache.phoenix.jdbc.PhoenixStatement.execute(PhoenixStatement.java:1254)
> at sqlline.Commands.execute(Commands.java:822)
> at sqlline.Commands.sql(Commands.java:732)
> at sqlline.SqlLine.dispatch(SqlLine.java:808)
> at sqlline.SqlLine.begin(SqlLine.java:681)
> at sqlline.SqlLine.start(SqlLine.java:398)
> at sqlline.SqlLine.main(SqlLine.java:292)
> After that the table become inaccessible. Any operation on the table throw
> the exception below.
> > select count(*) from "WeatherData";
> 16/03/11 17:37:32 WARN ipc.CoprocessorRpcChannel: Call failed on IOException
> org.apache.hadoop.hbase.DoNotRetryIOException:
> org.apache.hadoop.hbase.DoNotRetryIOException: com.bloomb
> erg.ds.WeatherSmallSalt: 35
> at org.apache.phoenix.util.ServerUtil.createIOException(ServerUtil.java:84)
> at
> org.apache.phoenix.coprocessor.MetaDataEndpointImpl.getTable(MetaDataEndpointImpl.java:447)
>
> at
> org.apache.phoenix.coprocessor.generated.MetaDataProtos$MetaDataService.callMethod(MetaDataPr
> otos.java:10505)
> at
> org.apache.hadoop.hbase.regionserver.HRegion.execService(HRegion.java:7435)
> at
> org.apache.hadoop.hbase.regionserver.RSRpcServices.execServiceOnRegion(RSRpcServices.java:187
> 5)
> at
> org.apache.hadoop.hbase.regionserver.RSRpcServices.execService(RSRpcServices.java:1857)
>
> at
> org.apache.hadoop.hbase.protobuf.generated.ClientProtos$ClientService$2.callBlockingMethod(Cl
> ientProtos.java:32209)
> at org.apache.hadoop.hbase.ipc.RpcServer.call(RpcServer.java:2114)
> at org.apache.hadoop.hbase.ipc.CallRunner.run(CallRunner.java:101)
> at org.apache.hadoop.hbase.ipc.RpcExecutor.consumerLoop(RpcExecutor.java:130)
> at org.apache.hadoop.hbase.ipc.RpcExecutor$1.run(RpcExecutor.java:107)
> at java.lang.Thread.run(Thread.java:745)
> Caused by: java.lang.ArrayIndexOutOfBoundsException: 35
> at org.apache.phoenix.schema.PTableImpl.init(PTableImpl.java:354)
> at org.apache.phoenix.schema.PTableImpl.<init>(PTableImpl.java:276)
> at org.apache.phoenix.schema.PTableImpl.makePTable(PTableImpl.java:265)
> at
> org.apache.phoenix.coprocessor.MetaDataEndpointImpl.getTable(MetaDataEndpointImpl.java:826)
>
> at
> org.apache.phoenix.coprocessor.MetaDataEndpointImpl.buildTable(MetaDataEndpointImpl.java:462)
>
> at
> org.apache.phoenix.coprocessor.MetaDataEndpointImpl.doGetTable(MetaDataEndpointImpl.java:1696
> )
> at
> org.apache.phoenix.coprocessor.MetaDataEndpointImpl.doGetTable(MetaDataEndpointImpl.java:1643
> )
> at
> org.apache.phoenix.coprocessor.MetaDataEndpointImpl.addIndexToTable(MetaDataEndpointImpl.java
> :526)
> at
> org.apache.phoenix.coprocessor.MetaDataEndpointImpl.getTable(MetaDataEndpointImpl.java:803)
>
> at
> org.apache.phoenix.coprocessor.MetaDataEndpointImpl.buildTable(MetaDataEndpointImpl.java:462)
>
> at
> org.apache.phoenix.coprocessor.MetaDataEndpointImpl.doGetTable(MetaDataEndpointImpl.java:1696
> )
> at
> org.apache.phoenix.coprocessor.MetaDataEndpointImpl.doGetTable(MetaDataEndpointImpl.java:1643
> )
> at
> org.apache.phoenix.coprocessor.MetaDataEndpointImpl.getTable(MetaDataEndpointImpl.java:430)
>
> ... 10 more
> at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
> at
> sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
>
> at
> sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.j
> ava:45)
> at java.lang.reflect.Constructor.newInstance(Constructor.java:422)
> at
> org.apache.hadoop.ipc.RemoteException.instantiateException(RemoteException.java:106)
>
> at
> org.apache.hadoop.ipc.RemoteException.unwrapRemoteException(RemoteException.java:95)
>
> at
> org.apache.hadoop.hbase.protobuf.ProtobufUtil.getRemoteException(ProtobufUtil.java:325)
>
> at
> org.apache.hadoop.hbase.protobuf.ProtobufUtil.execService(ProtobufUtil.java:1622)
>
> at
> org.apache.hadoop.hbase.ipc.RegionCoprocessorRpcChannel$1.call(RegionCoprocessorRpcChannel.ja
> va:92)
> at
> org.apache.hadoop.hbase.ipc.RegionCoprocessorRpcChannel$1.call(RegionCoprocessorRpcChannel.ja
> va:89)
> at
> org.apache.hadoop.hbase.client.RpcRetryingCaller.callWithRetries(RpcRetryingCaller.java:126)
>
> at
> org.apache.hadoop.hbase.ipc.RegionCoprocessorRpcChannel.callExecService(RegionCoprocessorRpcC
> hannel.java:95)
> at
> org.apache.hadoop.hbase.ipc.CoprocessorRpcChannel.callMethod(CoprocessorRpcChannel.java:56)
>
> at
> org.apache.phoenix.coprocessor.generated.MetaDataProtos$MetaDataService$Stub.getTable(MetaDat
> aProtos.java:10665)
> at
> org.apache.phoenix.query.ConnectionQueryServicesImpl$7.call(ConnectionQueryServicesImpl.java:
> 1292)
> at
> org.apache.phoenix.query.ConnectionQueryServicesImpl$7.call(ConnectionQueryServicesImpl.java:
> 1279)
> at org.apache.hadoop.hbase.client.HTable$16.call(HTable.java:1751)
> at java.util.concurrent.FutureTask.run(FutureTask.java:266)
> at
> java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)
>
> at
> java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)
>
> at java.lang.Thread.run(Thread.java:745)
> Caused by:
> org.apache.hadoop.hbase.ipc.RemoteWithExtrasException(org.apache.hadoop.hbase.DoNotRetryIOExc
> eption): org.apache.hadoop.hbase.DoNotRetryIOException:
> WeatherData: 35
> at org.apache.phoenix.util.ServerUtil.createIOException(ServerUtil.java:84)
> at
> org.apache.phoenix.coprocessor.MetaDataEndpointImpl.getTable(MetaDataEndpointImpl.java:447)
>
> at
> org.apache.phoenix.coprocessor.generated.MetaDataProtos$MetaDataService.callMethod(MetaDataPr
> otos.java:10505)
> at
> org.apache.hadoop.hbase.regionserver.HRegion.execService(HRegion.java:7435)
> at
> org.apache.hadoop.hbase.regionserver.RSRpcServices.execServiceOnRegion(RSRpcServices.java:187
> 5)
> at
> org.apache.hadoop.hbase.regionserver.RSRpcServices.execService(RSRpcServices.java:1857)
>
> at
> org.apache.hadoop.hbase.protobuf.generated.ClientProtos$ClientService$2.callBlockingMethod(Cl
> ientProtos.java:32209)
> at org.apache.hadoop.hbase.ipc.RpcServer.call(RpcServer.java:2114)
> at org.apache.hadoop.hbase.ipc.CallRunner.run(CallRunner.java:101)
> at org.apache.hadoop.hbase.ipc.RpcExecutor.consumerLoop(RpcExecutor.java:130)
> at org.apache.hadoop.hbase.ipc.RpcExecutor$1.run(RpcExecutor.java:107)
> at java.lang.Thread.run(Thread.java:745)
> Caused by: java.lang.ArrayIndexOutOfBoundsException: 35
> at org.apache.phoenix.schema.PTableImpl.init(PTableImpl.java:354)
> at org.apache.phoenix.schema.PTableImpl.<init>(PTableImpl.java:276)
> at org.apache.phoenix.schema.PTableImpl.makePTable(PTableImpl.java:265)
> at
> org.apache.phoenix.coprocessor.MetaDataEndpointImpl.getTable(MetaDataEndpointImpl.java:826)
>
> at
> org.apache.phoenix.coprocessor.MetaDataEndpointImpl.buildTable(MetaDataEndpointImpl.java:462)
>
> at
> org.apache.phoenix.coprocessor.MetaDataEndpointImpl.doGetTable(MetaDataEndpointImpl.java:1696
> )
> at
> org.apache.phoenix.coprocessor.MetaDataEndpointImpl.doGetTable(MetaDataEndpointImpl.java:1643
> )
> at
> org.apache.phoenix.coprocessor.MetaDataEndpointImpl.addIndexToTable(MetaDataEndpointImpl.java
> :526)
> at
> org.apache.phoenix.coprocessor.MetaDataEndpointImpl.getTable(MetaDataEndpointImpl.java:803)
>
> at
> org.apache.phoenix.coprocessor.MetaDataEndpointImpl.buildTable(MetaDataEndpointImpl.java:462)
>
> at
> org.apache.phoenix.coprocessor.MetaDataEndpointImpl.doGetTable(MetaDataEndpointImpl.java:1696
> )
> at
> org.apache.phoenix.coprocessor.MetaDataEndpointImpl.doGetTable(MetaDataEndpointImpl.java:1643
> )
> at
> org.apache.phoenix.coprocessor.MetaDataEndpointImpl.getTable(MetaDataEndpointImpl.java:430)
>
> ... 10 more
> at org.apache.hadoop.hbase.ipc.RpcClientImpl.call(RpcClientImpl.java:1226)
> at
> org.apache.hadoop.hbase.ipc.AbstractRpcClient.callBlockingMethod(AbstractRpcClient.java:213)
>
> at
> org.apache.hadoop.hbase.ipc.AbstractRpcClient$BlockingRpcChannelImplementation.callBlockingMe
> thod(AbstractRpcClient.java:287)
> at
> org.apache.hadoop.hbase.protobuf.generated.ClientProtos$ClientService$BlockingStub.execServic
> e(ClientProtos.java:32675)
> at
> org.apache.hadoop.hbase.protobuf.ProtobufUtil.execService(ProtobufUtil.java:1618)
>
> ... 13 more
> 16/03/11 17:37:32 WARN client.HTable: Error calling coprocessor service
> org.apache.phoenix.coprocessor.g enerated.MetaDataProtos$MetaDataService for
> row \x00\x00WeatherData
> java.util.concurrent.ExecutionException:
> org.apache.hadoop.hbase.DoNotRetryIOException: org.apache.hadoo
> p.hbase.DoNotRetryIOException: WeatherData: 35
> at org.apache.phoenix.util.ServerUtil.createIOException(ServerUtil.java:84)
> at
> org.apache.phoenix.coprocessor.MetaDataEndpointImpl.getTable(MetaDataEndpointImpl.java:447)
>
> at
> org.apache.phoenix.coprocessor.generated.MetaDataProtos$MetaDataService.callMethod(MetaDataPr
> otos.java:10505)
> at
> org.apache.hadoop.hbase.regionserver.HRegion.execService(HRegion.java:7435)
> at
> org.apache.hadoop.hbase.regionserver.RSRpcServices.execServiceOnRegion(RSRpcServices.java:187
> 5)
> at
> org.apache.hadoop.hbase.regionserver.RSRpcServices.execService(RSRpcServices.java:1857)
>
> at
> org.apache.hadoop.hbase.protobuf.generated.ClientProtos$ClientService$2.callBlockingMethod(Cl
> ientProtos.java:32209)
> at org.apache.hadoop.hbase.ipc.RpcServer.call(RpcServer.java:2114)
> at org.apache.hadoop.hbase.ipc.CallRunner.run(CallRunner.java:101)
> at org.apache.hadoop.hbase.ipc.RpcExecutor.consumerLoop(RpcExecutor.java:130)
> at org.apache.hadoop.hbase.ipc.RpcExecutor$1.run(RpcExecutor.java:107)
> at java.lang.Thread.run(Thread.java:745)
> Caused by: java.lang.ArrayIndexOutOfBoundsException: 35
> at org.apache.phoenix.schema.PTableImpl.init(PTableImpl.java:354)
> at org.apache.phoenix.schema.PTableImpl.<init>(PTableImpl.java:276)
> at org.apache.phoenix.schema.PTableImpl.makePTable(PTableImpl.java:265)
> at
> org.apache.phoenix.coprocessor.MetaDataEndpointImpl.getTable(MetaDataEndpointImpl.java:826)
>
> at
> org.apache.phoenix.coprocessor.MetaDataEndpointImpl.buildTable(MetaDataEndpointImpl.java:462)
>
> at
> org.apache.phoenix.coprocessor.MetaDataEndpointImpl.doGetTable(MetaDataEndpointImpl.java:1696
> )
> at
> org.apache.phoenix.coprocessor.MetaDataEndpointImpl.doGetTable(MetaDataEndpointImpl.java:1643
> )
> at
> org.apache.phoenix.coprocessor.MetaDataEndpointImpl.addIndexToTable(MetaDataEndpointImpl.java
> :526)
> at
> org.apache.phoenix.coprocessor.MetaDataEndpointImpl.getTable(MetaDataEndpointImpl.java:803)
>
> at
> org.apache.phoenix.coprocessor.MetaDataEndpointImpl.buildTable(MetaDataEndpointImpl.java:462)
>
> at
> org.apache.phoenix.coprocessor.MetaDataEndpointImpl.doGetTable(MetaDataEndpointImpl.java:1696
> )
> at
> org.apache.phoenix.coprocessor.MetaDataEndpointImpl.doGetTable(MetaDataEndpointImpl.java:1643
> )
> at
> org.apache.phoenix.coprocessor.MetaDataEndpointImpl.getTable(MetaDataEndpointImpl.java:430)
>
> ... 10 more
> at java.util.concurrent.FutureTask.report(FutureTask.java:122)
> at java.util.concurrent.FutureTask.get(FutureTask.java:192)
> at org.apache.hadoop.hbase.client.HTable.coprocessorService(HTable.java:1763)
> at org.apache.hadoop.hbase.client.HTable.coprocessorService(HTable.java:1719)
> at
> org.apache.phoenix.query.ConnectionQueryServicesImpl.metaDataCoprocessorExec(ConnectionQueryS
> ervicesImpl.java:1026)
> at
> org.apache.phoenix.query.ConnectionQueryServicesImpl.metaDataCoprocessorExec(ConnectionQueryS
> ervicesImpl.java:1006)
> at
> org.apache.phoenix.query.ConnectionQueryServicesImpl.getTable(ConnectionQueryServicesImpl.jav
> a:1278)
> at
> org.apache.phoenix.schema.MetaDataClient.updateCache(MetaDataClient.java:415)
> at
> org.apache.phoenix.schema.MetaDataClient.updateCache(MetaDataClient.java:358)
> at
> org.apache.phoenix.schema.MetaDataClient.updateCache(MetaDataClient.java:354)
> at
> org.apache.phoenix.compile.FromCompiler$BaseColumnResolver.createTableRef(FromCompiler.java:4
> 13)
> at
> org.apache.phoenix.compile.FromCompiler$SingleTableColumnResolver.<init>(FromCompiler.java:28
> 8)
> at
> org.apache.phoenix.compile.FromCompiler.getResolverForQuery(FromCompiler.java:189)
>
> at
> org.apache.phoenix.jdbc.PhoenixStatement$ExecutableSelectStatement.compilePlan(PhoenixStateme
> nt.java:358)
> at
> org.apache.phoenix.jdbc.PhoenixStatement$ExecutableSelectStatement.compilePlan(PhoenixStateme
> nt.java:339)
> at org.apache.phoenix.jdbc.PhoenixStatement$1.call(PhoenixStatement.java:247)
> at org.apache.phoenix.jdbc.PhoenixStatement$1.call(PhoenixStatement.java:242)
> at org.apache.phoenix.call.CallRunner.run(CallRunner.java:53)
> at
> org.apache.phoenix.jdbc.PhoenixStatement.executeQuery(PhoenixStatement.java:241)
>
> at
> org.apache.phoenix.jdbc.PhoenixStatement.execute(PhoenixStatement.java:1257)
> at sqlline.Commands.execute(Commands.java:822)
> at sqlline.Commands.sql(Commands.java:732)
> at sqlline.SqlLine.dispatch(SqlLine.java:808)
> at sqlline.SqlLine.begin(SqlLine.java:681)
> at sqlline.SqlLine.start(SqlLine.java:398)
> at sqlline.SqlLine.main(SqlLine.java:292)
> Caused by: org.apache.hadoop.hbase.DoNotRetryIOException:
> org.apache.hadoop.hbase.DoNotRetryIOException: WeatherData: 35
> at org.apache.phoenix.util.ServerUtil.createIOException(ServerUtil.java:84)
> at
> org.apache.phoenix.coprocessor.MetaDataEndpointImpl.getTable(MetaDataEndpointImpl.java:447)
>
> at
> org.apache.phoenix.coprocessor.generated.MetaDataProtos$MetaDataService.callMethod(MetaDataPr
> otos.java:10505)
> at
> org.apache.hadoop.hbase.regionserver.HRegion.execService(HRegion.java:7435)
> at
> org.apache.hadoop.hbase.regionserver.RSRpcServices.execServiceOnRegion(RSRpcServices.java:187
> 5)
> at
> org.apache.hadoop.hbase.regionserver.RSRpcServices.execService(RSRpcServices.java:1857)
>
> at
> org.apache.hadoop.hbase.protobuf.generated.ClientProtos$ClientService$2.callBlockingMethod(Cl
> ientProtos.java:32209)
> at org.apache.hadoop.hbase.ipc.RpcServer.call(RpcServer.java:2114)
> at org.apache.hadoop.hbase.ipc.CallRunner.run(CallRunner.java:101)
> at org.apache.hadoop.hbase.ipc.RpcExecutor.consumerLoop(RpcExecutor.java:130)
> at org.apache.hadoop.hbase.ipc.RpcExecutor$1.run(RpcExecutor.java:107)
> at java.lang.Thread.run(Thread.java:745)
> Caused by: java.lang.ArrayIndexOutOfBoundsException: 35
> at org.apache.phoenix.schema.PTableImpl.init(PTableImpl.java:354)
> at org.apache.phoenix.schema.PTableImpl.<init>(PTableImpl.java:276)
> at org.apache.phoenix.schema.PTableImpl.makePTable(PTableImpl.java:265)
> at
> org.apache.phoenix.coprocessor.MetaDataEndpointImpl.getTable(MetaDataEndpointImpl.java:826)
>
> at
> org.apache.phoenix.coprocessor.MetaDataEndpointImpl.buildTable(MetaDataEndpointImpl.java:462)
>
> at
> org.apache.phoenix.coprocessor.MetaDataEndpointImpl.doGetTable(MetaDataEndpointImpl.java:1696
> )
> at
> org.apache.phoenix.coprocessor.MetaDataEndpointImpl.doGetTable(MetaDataEndpointImpl.java:1643
> )
> at
> org.apache.phoenix.coprocessor.MetaDataEndpointImpl.addIndexToTable(MetaDataEndpointImpl.java
> :526)
> at
> org.apache.phoenix.coprocessor.MetaDataEndpointImpl.getTable(MetaDataEndpointImpl.java:803)
>
> at
> org.apache.phoenix.coprocessor.MetaDataEndpointImpl.buildTable(MetaDataEndpointImpl.java:462)
>
> at
> org.apache.phoenix.coprocessor.MetaDataEndpointImpl.doGetTable(MetaDataEndpointImpl.java:1696
> )
> at
> org.apache.phoenix.coprocessor.MetaDataEndpointImpl.doGetTable(MetaDataEndpointImpl.java:1643
> )
> at
> org.apache.phoenix.coprocessor.MetaDataEndpointImpl.getTable(MetaDataEndpointImpl.java:430)
>
> ... 10 more
> at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
> at
> sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
>
> at
> sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.j
> ava:45)
> at java.lang.reflect.Constructor.newInstance(Constructor.java:422)
> at
> org.apache.hadoop.ipc.RemoteException.instantiateException(RemoteException.java:106)
>
> at
> org.apache.hadoop.ipc.RemoteException.unwrapRemoteException(RemoteException.java:95)
>
> at
> org.apache.hadoop.hbase.protobuf.ProtobufUtil.getRemoteException(ProtobufUtil.java:325)
>
> at
> org.apache.hadoop.hbase.protobuf.ProtobufUtil.execService(ProtobufUtil.java:1622)
>
> at
> org.apache.hadoop.hbase.ipc.RegionCoprocessorRpcChannel$1.call(RegionCoprocessorRpcChannel.ja
> va:92)
> at
> org.apache.hadoop.hbase.ipc.RegionCoprocessorRpcChannel$1.call(RegionCoprocessorRpcChannel.ja
> va:89)
> at
> org.apache.hadoop.hbase.client.RpcRetryingCaller.callWithRetries(RpcRetryingCaller.java:126)
>
> at
> org.apache.hadoop.hbase.ipc.RegionCoprocessorRpcChannel.callExecService(RegionCoprocessorRpcC
> hannel.java:95)
> at
> org.apache.hadoop.hbase.ipc.CoprocessorRpcChannel.callMethod(CoprocessorRpcChannel.java:56)
>
> at
> org.apache.phoenix.coprocessor.generated.MetaDataProtos$MetaDataService$Stub.getTable(MetaDat
> aProtos.java:10665)
> at
> org.apache.phoenix.query.ConnectionQueryServicesImpl$7.call(ConnectionQueryServicesImpl.java:
> 1292)
> at
> org.apache.phoenix.query.ConnectionQueryServicesImpl$7.call(ConnectionQueryServicesImpl.java:
> 1279)
> at org.apache.hadoop.hbase.client.HTable$16.call(HTable.java:1751)
> at java.util.concurrent.FutureTask.run(FutureTask.java:266)
> at
> java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)
>
> at
> java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)
>
> at java.lang.Thread.run(Thread.java:745)
> Caused by:
> org.apache.hadoop.hbase.ipc.RemoteWithExtrasException(org.apache.hadoop.hbase.DoNotRetryIOExc
> eption): org.apache.hadoop.hbase.DoNotRetryIOException: WeatherData: 35
> at org.apache.phoenix.util.ServerUtil.createIOException(ServerUtil.java:84)
> at
> org.apache.phoenix.coprocessor.MetaDataEndpointImpl.getTable(MetaDataEndpointImpl.java:447)
>
> at
> org.apache.phoenix.coprocessor.generated.MetaDataProtos$MetaDataService.callMethod(MetaDataPr
> otos.java:10505)
> at
> org.apache.hadoop.hbase.regionserver.HRegion.execService(HRegion.java:7435)
> at
> org.apache.hadoop.hbase.regionserver.RSRpcServices.execServiceOnRegion(RSRpcServices.java:187
> 5)
> at
> org.apache.hadoop.hbase.regionserver.RSRpcServices.execService(RSRpcServices.java:1857)
>
> at
> org.apache.hadoop.hbase.protobuf.generated.ClientProtos$ClientService$2.callBlockingMethod(Cl
> ientProtos.java:32209)
> at org.apache.hadoop.hbase.ipc.RpcServer.call(RpcServer.java:2114)
> at org.apache.hadoop.hbase.ipc.CallRunner.run(CallRunner.java:101)
> at org.apache.hadoop.hbase.ipc.RpcExecutor.consumerLoop(RpcExecutor.java:130)
> at org.apache.hadoop.hbase.ipc.RpcExecutor$1.run(RpcExecutor.java:107)
> at java.lang.Thread.run(Thread.java:745)
> Caused by: java.lang.ArrayIndexOutOfBoundsException: 35
> at org.apache.phoenix.schema.PTableImpl.init(PTableImpl.java:354)
> at org.apache.phoenix.schema.PTableImpl.<init>(PTableImpl.java:276)
> at org.apache.phoenix.schema.PTableImpl.makePTable(PTableImpl.java:265)
> at
> org.apache.phoenix.coprocessor.MetaDataEndpointImpl.getTable(MetaDataEndpointImpl.java:826)
>
> at
> org.apache.phoenix.coprocessor.MetaDataEndpointImpl.buildTable(MetaDataEndpointImpl.java:462)
>
> at
> org.apache.phoenix.coprocessor.MetaDataEndpointImpl.doGetTable(MetaDataEndpointImpl.java:1696
> )
> at
> org.apache.phoenix.coprocessor.MetaDataEndpointImpl.doGetTable(MetaDataEndpointImpl.java:1643
> )
> at
> org.apache.phoenix.coprocessor.MetaDataEndpointImpl.addIndexToTable(MetaDataEndpointImpl.java
> :526)
> at
> org.apache.phoenix.coprocessor.MetaDataEndpointImpl.getTable(MetaDataEndpointImpl.java:803)
>
> at
> org.apache.phoenix.coprocessor.MetaDataEndpointImpl.buildTable(MetaDataEndpointImpl.java:462)
>
> at
> org.apache.phoenix.coprocessor.MetaDataEndpointImpl.doGetTable(MetaDataEndpointImpl.java:1696
> )
> at
> org.apache.phoenix.coprocessor.MetaDataEndpointImpl.doGetTable(MetaDataEndpointImpl.java:1643
> )
> at
> org.apache.phoenix.coprocessor.MetaDataEndpointImpl.getTable(MetaDataEndpointImpl.java:430)
>
> ... 10 more
> at org.apache.hadoop.hbase.ipc.RpcClientImpl.call(RpcClientImpl.java:1226)
> at
> org.apache.hadoop.hbase.ipc.AbstractRpcClient.callBlockingMethod(AbstractRpcClient.java:213)
>
> at
> org.apache.hadoop.hbase.ipc.AbstractRpcClient$BlockingRpcChannelImplementation.callBlockingMe
> thod(AbstractRpcClient.java:287)
> at
> org.apache.hadoop.hbase.protobuf.generated.ClientProtos$ClientService$BlockingStub.execServic
> e(ClientProtos.java:32675)
> at
> org.apache.hadoop.hbase.protobuf.ProtobufUtil.execService(ProtobufUtil.java:1618)
>
> ... 13 more
> Error: org.apache.hadoop.hbase.DoNotRetryIOException: WeatherData: 35
> at org.apache.phoenix.util.ServerUtil.createIOException(ServerUtil.java:84)
> at
> org.apache.phoenix.coprocessor.MetaDataEndpointImpl.getTable(MetaDataEndpointImpl.java:447)
>
> at
> org.apache.phoenix.coprocessor.generated.MetaDataProtos$MetaDataService.callMethod(MetaDataPr
> otos.java:10505)
> at
> org.apache.hadoop.hbase.regionserver.HRegion.execService(HRegion.java:7435)
> at
> org.apache.hadoop.hbase.regionserver.RSRpcServices.execServiceOnRegion(RSRpcServices.java:187
> 5)
> at
> org.apache.hadoop.hbase.regionserver.RSRpcServices.execService(RSRpcServices.java:1857)
>
> at
> org.apache.hadoop.hbase.protobuf.generated.ClientProtos$ClientService$2.callBlockingMethod(Cl
> ientProtos.java:32209)
> at org.apache.hadoop.hbase.ipc.RpcServer.call(RpcServer.java:2114)
> at org.apache.hadoop.hbase.ipc.CallRunner.run(CallRunner.java:101)
> at org.apache.hadoop.hbase.ipc.RpcExecutor.consumerLoop(RpcExecutor.java:130)
> at org.apache.hadoop.hbase.ipc.RpcExecutor$1.run(RpcExecutor.java:107)
> at java.lang.Thread.run(Thread.java:745)
> Caused by: java.lang.ArrayIndexOutOfBoundsException: 35
> at org.apache.phoenix.schema.PTableImpl.init(PTableImpl.java:354)
> at org.apache.phoenix.schema.PTableImpl.<init>(PTableImpl.java:276)
> at org.apache.phoenix.schema.PTableImpl.makePTable(PTableImpl.java:265)
> at
> org.apache.phoenix.coprocessor.MetaDataEndpointImpl.getTable(MetaDataEndpointImpl.java:826)
>
> at
> org.apache.phoenix.coprocessor.MetaDataEndpointImpl.buildTable(MetaDataEndpointImpl.java:462)
>
> at
> org.apache.phoenix.coprocessor.MetaDataEndpointImpl.doGetTable(MetaDataEndpointImpl.java:1696
> )
> at
> org.apache.phoenix.coprocessor.MetaDataEndpointImpl.doGetTable(MetaDataEndpointImpl.java:1643
> )
> at
> org.apache.phoenix.coprocessor.MetaDataEndpointImpl.addIndexToTable(MetaDataEndpointImpl.java
> :526)
> at
> org.apache.phoenix.coprocessor.MetaDataEndpointImpl.getTable(MetaDataEndpointImpl.java:803)
>
> at
> org.apache.phoenix.coprocessor.MetaDataEndpointImpl.buildTable(MetaDataEndpointImpl.java:462)
>
> at
> org.apache.phoenix.coprocessor.MetaDataEndpointImpl.doGetTable(MetaDataEndpointImpl.java:1696
> )
> at
> org.apache.phoenix.coprocessor.MetaDataEndpointImpl.doGetTable(MetaDataEndpointImpl.java:1643
> )
> at
> org.apache.phoenix.coprocessor.MetaDataEndpointImpl.getTable(MetaDataEndpointImpl.java:430)
>
> ... 10 more (state=08000,code=101)
> org.apache.phoenix.exception.PhoenixIOException:
> org.apache.hadoop.hbase.DoNotRetryIOException: com.bloo
> mberg.ds.WeatherSmallSalt: 35
> at org.apache.phoenix.util.ServerUtil.createIOException(ServerUtil.java:84)
> at
> org.apache.phoenix.coprocessor.MetaDataEndpointImpl.getTable(MetaDataEndpointImpl.java:447)
>
> at
> org.apache.phoenix.coprocessor.generated.MetaDataProtos$MetaDataService.callMethod(MetaDataPr
> otos.java:10505)
> at
> org.apache.hadoop.hbase.regionserver.HRegion.execService(HRegion.java:7435)
> at
> org.apache.hadoop.hbase.regionserver.RSRpcServices.execServiceOnRegion(RSRpcServices.java:187
> 5)
> at
> org.apache.hadoop.hbase.regionserver.RSRpcServices.execService(RSRpcServices.java:1857)
>
> at
> org.apache.hadoop.hbase.protobuf.generated.ClientProtos$ClientService$2.callBlockingMethod(Cl
> ientProtos.java:32209)
> at org.apache.hadoop.hbase.ipc.RpcServer.call(RpcServer.java:2114)
> at org.apache.hadoop.hbase.ipc.CallRunner.run(CallRunner.java:101)
> at org.apache.hadoop.hbase.ipc.RpcExecutor.consumerLoop(RpcExecutor.java:130)
> at org.apache.hadoop.hbase.ipc.RpcExecutor$1.run(RpcExecutor.java:107)
> at java.lang.Thread.run(Thread.java:745)
> Caused by: java.lang.ArrayIndexOutOfBoundsException: 35
> at org.apache.phoenix.schema.PTableImpl.init(PTableImpl.java:354)
> at org.apache.phoenix.schema.PTableImpl.<init>(PTableImpl.java:276)
> at org.apache.phoenix.schema.PTableImpl.makePTable(PTableImpl.java:265)
> at
> org.apache.phoenix.coprocessor.MetaDataEndpointImpl.getTable(MetaDataEndpointImpl.java:826)
>
> at
> org.apache.phoenix.coprocessor.MetaDataEndpointImpl.buildTable(MetaDataEndpointImpl.java:462)
>
> at
> org.apache.phoenix.coprocessor.MetaDataEndpointImpl.doGetTable(MetaDataEndpointImpl.java:1696
> )
> at
> org.apache.phoenix.coprocessor.MetaDataEndpointImpl.doGetTable(MetaDataEndpointImpl.java:1643
> )
> at
> org.apache.phoenix.coprocessor.MetaDataEndpointImpl.addIndexToTable(MetaDataEndpointImpl.java
> :526)
> at
> org.apache.phoenix.coprocessor.MetaDataEndpointImpl.getTable(MetaDataEndpointImpl.java:803)
>
> at
> org.apache.phoenix.coprocessor.MetaDataEndpointImpl.buildTable(MetaDataEndpointImpl.java:462)
>
> at
> org.apache.phoenix.coprocessor.MetaDataEndpointImpl.doGetTable(MetaDataEndpointImpl.java:1696
> )
> at
> org.apache.phoenix.coprocessor.MetaDataEndpointImpl.doGetTable(MetaDataEndpointImpl.java:1643
> )
> at
> org.apache.phoenix.coprocessor.MetaDataEndpointImpl.getTable(MetaDataEndpointImpl.java:430)
>
> ... 10 more
> at
> org.apache.phoenix.util.ServerUtil.parseServerException(ServerUtil.java:108)
> at
> org.apache.phoenix.query.ConnectionQueryServicesImpl.metaDataCoprocessorExec(ConnectionQueryS
> ervicesImpl.java:1043)
> at
> org.apache.phoenix.query.ConnectionQueryServicesImpl.metaDataCoprocessorExec(ConnectionQueryS
> ervicesImpl.java:1006)
> at
> org.apache.phoenix.query.ConnectionQueryServicesImpl.getTable(ConnectionQueryServicesImpl.jav
> a:1278)
> at
> org.apache.phoenix.schema.MetaDataClient.updateCache(MetaDataClient.java:415)
> at
> org.apache.phoenix.schema.MetaDataClient.updateCache(MetaDataClient.java:358)
> at
> org.apache.phoenix.schema.MetaDataClient.updateCache(MetaDataClient.java:354)
> at
> org.apache.phoenix.compile.FromCompiler$BaseColumnResolver.createTableRef(FromCompiler.java:4
> 13)
> at
> org.apache.phoenix.compile.FromCompiler$SingleTableColumnResolver.<init>(FromCompiler.java:28
> 8)
> at
> org.apache.phoenix.compile.FromCompiler.getResolverForQuery(FromCompiler.java:189)
>
> at
> org.apache.phoenix.jdbc.PhoenixStatement$ExecutableSelectStatement.compilePlan(PhoenixStateme
> nt.java:358)
> at
> org.apache.phoenix.jdbc.PhoenixStatement$ExecutableSelectStatement.compilePlan(PhoenixStateme
> nt.java:339)
> at org.apache.phoenix.jdbc.PhoenixStatement$1.call(PhoenixStatement.java:247)
> at org.apache.phoenix.jdbc.PhoenixStatement$1.call(PhoenixStatement.java:242)
> at org.apache.phoenix.call.CallRunner.run(CallRunner.java:53)
> at
> org.apache.phoenix.jdbc.PhoenixStatement.executeQuery(PhoenixStatement.java:241)
>
> at
> org.apache.phoenix.jdbc.PhoenixStatement.execute(PhoenixStatement.java:1257)
> at sqlline.Commands.execute(Commands.java:822)
> at sqlline.Commands.sql(Commands.java:732)
> at sqlline.SqlLine.dispatch(SqlLine.java:808)
> at sqlline.SqlLine.begin(SqlLine.java:681)
> at sqlline.SqlLine.start(SqlLine.java:398)
--
This message was sent by Atlassian JIRA
(v6.3.4#6332)