[
https://issues.apache.org/jira/browse/CARBONDATA-3045?page=com.atlassian.jira.plugin.system.issuetabpanels:all-tabpanel
]
U Shaw updated CARBONDATA-3045:
-------------------------------
Docs Text:
2018-10-26T12:43:02.090+0800 DEBUG query-execution-30
com.facebook.presto.execution.QueryStateMachine Query
20181026_044301_00071_zyny6 failed
java.lang.RuntimeException: Unable to get the Query Model
at
org.apache.carbondata.presto.CarbondataPageSourceProvider.createReader(CarbondataPageSourceProvider.java:114)
at
org.apache.carbondata.presto.CarbondataPageSourceProvider.createPageSource(CarbondataPageSourceProvider.java:85)
at
com.facebook.presto.spi.connector.classloader.ClassLoaderSafeConnectorPageSourceProvider.createPageSource(ClassLoaderSafeConnectorPageSourceProvider.java:44)
at
com.facebook.presto.split.PageSourceManager.createPageSource(PageSourceManager.java:56)
at
com.facebook.presto.operator.TableScanOperator.getOutput(TableScanOperator.java:239)
at com.facebook.presto.operator.Driver.processInternal(Driver.java:373)
at
com.facebook.presto.operator.Driver.lambda$processFor$8(Driver.java:282)
at com.facebook.presto.operator.Driver.tryWithLock(Driver.java:672)
at com.facebook.presto.operator.Driver.processFor(Driver.java:276)
at
com.facebook.presto.execution.SqlTaskExecution$DriverSplitRunner.processFor(SqlTaskExecution.java:1053)
at
com.facebook.presto.execution.executor.PrioritizedSplitRunner.process(PrioritizedSplitRunner.java:162)
at
com.facebook.presto.execution.executor.TaskExecutor$TaskRunner.run(TaskExecutor.java:477)
at
java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
at
java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
at java.lang.Thread.run(Thread.java:748)
Caused by: java.io.FileNotFoundException: File does not exist:
/user/carbondata/CarbonStore/tpcds_1/store_returns/Fact/Part0/Segment_0/part-0-0_batchno0-0-0-1540489806155.carbondata
at
org.apache.hadoop.hdfs.server.namenode.INodeFile.valueOf(INodeFile.java:71)
at
org.apache.hadoop.hdfs.server.namenode.INodeFile.valueOf(INodeFile.java:61)
at
org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getBlockLocationsInt(FSNamesystem.java:1828)
at
org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getBlockLocations(FSNamesystem.java:1799)
at
org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getBlockLocations(FSNamesystem.java:1712)
at
org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.getBlockLocations(NameNodeRpcServer.java:587)
at
org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.getBlockLocations(ClientNamenodeProtocolServerSideTranslatorPB.java:365)
at
org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java)
at
org.apache.hadoop.ipc.ProtobufRpcEngine$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine.java:616)
at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:969)
at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:2049)
at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:2045)
at java.security.AccessController.doPrivileged(Native Method)
at javax.security.auth.Subject.doAs(Subject.java:422)
at
org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1657)
at org.apache.hadoop.ipc.Server$Handler.run(Server.java:2043)
at sun.reflect.GeneratedConstructorAccessor119.newInstance(Unknown
Source)
at
sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
at
org.apache.hadoop.ipc.RemoteException.instantiateException(RemoteException.java:106)
at
org.apache.hadoop.ipc.RemoteException.unwrapRemoteException(RemoteException.java:73)
at
org.apache.hadoop.hdfs.DFSClient.callGetBlockLocations(DFSClient.java:1228)
at
org.apache.hadoop.hdfs.DFSClient.getLocatedBlocks(DFSClient.java:1213)
at
org.apache.hadoop.hdfs.DFSClient.getLocatedBlocks(DFSClient.java:1201)
at
org.apache.hadoop.hdfs.DFSInputStream.fetchLocatedBlocksAndGetLastBlockLength(DFSInputStream.java:306)
at
org.apache.hadoop.hdfs.DFSInputStream.openInfo(DFSInputStream.java:272)
at org.apache.hadoop.hdfs.DFSInputStream.<init>(DFSInputStream.java:264)
at org.apache.hadoop.hdfs.DFSClient.open(DFSClient.java:1526)
at
org.apache.hadoop.hdfs.DistributedFileSystem$3.doCall(DistributedFileSystem.java:304)
at
org.apache.hadoop.hdfs.DistributedFileSystem$3.doCall(DistributedFileSystem.java:299)
at
org.apache.hadoop.fs.FileSystemLinkResolver.resolve(FileSystemLinkResolver.java:81)
at
org.apache.hadoop.hdfs.DistributedFileSystem.open(DistributedFileSystem.java:312)
at org.apache.hadoop.fs.FileSystem.open(FileSystem.java:769)
at
org.apache.carbondata.core.datastore.filesystem.AbstractDFSCarbonFile.getDataInputStream(AbstractDFSCarbonFile.java:320)
at
org.apache.carbondata.core.datastore.filesystem.AbstractDFSCarbonFile.getDataInputStream(AbstractDFSCarbonFile.java:284)
at
org.apache.carbondata.core.datastore.impl.FileFactory.getDataInputStream(FileFactory.java:125)
at
org.apache.carbondata.core.datastore.impl.FileFactory.getDataInputStream(FileFactory.java:116)
at
org.apache.carbondata.core.reader.ThriftReader.open(ThriftReader.java:101)
at
org.apache.carbondata.core.reader.CarbonHeaderReader.readHeader(CarbonHeaderReader.java:60)
at
org.apache.carbondata.core.util.DataFileFooterConverterV3.readDataFileFooter(DataFileFooterConverterV3.java:63)
at
org.apache.carbondata.core.util.CarbonUtil.getDataFileFooter(CarbonUtil.java:964)
at
org.apache.carbondata.core.util.CarbonUtil.readMetadatFile(CarbonUtil.java:940)
at
org.apache.carbondata.core.scan.executor.impl.AbstractQueryExecutor.getDataBlocks(AbstractQueryExecutor.java:220)
at
org.apache.carbondata.core.scan.executor.impl.AbstractQueryExecutor.initQuery(AbstractQueryExecutor.java:141)
at
org.apache.carbondata.core.scan.executor.impl.AbstractQueryExecutor.getBlockExecutionInfos(AbstractQueryExecutor.java:401)
at
org.apache.carbondata.core.scan.executor.impl.VectorDetailQueryExecutor.execute(VectorDetailQueryExecutor.java:44)
at
org.apache.carbondata.presto.CarbondataPageSourceProvider.createReader(CarbondataPageSourceProvider.java:106)
... 14 more
Caused by: org.apache.hadoop.ipc.RemoteException: File does not exist:
/user/carbondata/CarbonStore/tpcds_1/store_returns/Fact/Part0/Segment_0/part-0-0_batchno0-0-0-1540489806155.carbondata
at
org.apache.hadoop.hdfs.server.namenode.INodeFile.valueOf(INodeFile.java:71)
at
org.apache.hadoop.hdfs.server.namenode.INodeFile.valueOf(INodeFile.java:61)
at
org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getBlockLocationsInt(FSNamesystem.java:1828)
at
org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getBlockLocations(FSNamesystem.java:1799)
at
org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getBlockLocations(FSNamesystem.java:1712)
at
org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.getBlockLocations(NameNodeRpcServer.java:587)
at
org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.getBlockLocations(ClientNamenodeProtocolServerSideTranslatorPB.java:365)
at
org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java)
at
org.apache.hadoop.ipc.ProtobufRpcEngine$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine.java:616)
at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:969)
at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:2049)
at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:2045)
at java.security.AccessController.doPrivileged(Native Method)
at javax.security.auth.Subject.doAs(Subject.java:422)
at
org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1657)
at org.apache.hadoop.ipc.Server$Handler.run(Server.java:2043)
at org.apache.hadoop.ipc.Client.call(Client.java:1475)
at org.apache.hadoop.ipc.Client.call(Client.java:1412)
at
org.apache.hadoop.ipc.ProtobufRpcEngine$Invoker.invoke(ProtobufRpcEngine.java:229)
at com.sun.proxy.$Proxy198.getBlockLocations(Unknown Source)
at
org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.getBlockLocations(ClientNamenodeProtocolTranslatorPB.java:255)
at sun.reflect.GeneratedMethodAccessor349.invoke(Unknown Source)
at
sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:498)
at
org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:191)
at
org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:102)
at com.sun.proxy.$Proxy199.getBlockLocations(Unknown Source)
at
org.apache.hadoop.hdfs.DFSClient.callGetBlockLocations(DFSClient.java:1226)
... 39 more
018-10-26T17:48:29.240+0800 DEBUG query-execution-3
com.facebook.presto.event.query.QueryMonitor Error creating explain plan
com.facebook.presto.spi.PrestoException: Unknown transaction ID:
26e028da-fe9a-41e8-964d-c778d8759a77. Possibly expired? Commands ignored until
end of transaction block
at
com.facebook.presto.transaction.InMemoryTransactionManager.unknownTransactionError(InMemoryTransactionManager.java:280)
at
com.facebook.presto.transaction.InMemoryTransactionManager.getTransactionMetadata(InMemoryTransactionManager.java:259)
at
com.facebook.presto.transaction.InMemoryTransactionManager.getCatalogMetadata(InMemoryTransactionManager.java:201)
at
com.facebook.presto.metadata.MetadataManager.getCatalogMetadata(MetadataManager.java:952)
at
com.facebook.presto.metadata.MetadataManager.getMetadata(MetadataManager.java:967)
at
com.facebook.presto.metadata.MetadataManager.getTableStatistics(MetadataManager.java:400)
at
com.facebook.presto.cost.TableScanStatsRule.doCalculate(TableScanStatsRule.java:66)
at
com.facebook.presto.cost.TableScanStatsRule.doCalculate(TableScanStatsRule.java:41)
at
com.facebook.presto.cost.SimpleStatsRule.calculate(SimpleStatsRule.java:39)
at
com.facebook.presto.cost.ComposableStatsCalculator.calculateStats(ComposableStatsCalculator.java:80)
at
com.facebook.presto.cost.ComposableStatsCalculator.calculateStats(ComposableStatsCalculator.java:70)
at
com.facebook.presto.cost.FragmentedPlanStatsCalculator.calculateStats(FragmentedPlanStatsCalculator.java:50)
at
com.facebook.presto.cost.CachingStatsProvider.getStats(CachingStatsProvider.java:70)
at
com.facebook.presto.cost.FragmentedPlanStatsCalculator.calculateRemoteSourceStats(FragmentedPlanStatsCalculator.java:57)
at
com.facebook.presto.cost.FragmentedPlanStatsCalculator.calculateStats(FragmentedPlanStatsCalculator.java:48)
at
com.facebook.presto.cost.CachingStatsProvider.getStats(CachingStatsProvider.java:70)
at
com.facebook.presto.cost.OutputStatsRule.calculate(OutputStatsRule.java:41)
at
com.facebook.presto.cost.OutputStatsRule.calculate(OutputStatsRule.java:27)
at
com.facebook.presto.cost.ComposableStatsCalculator.calculateStats(ComposableStatsCalculator.java:80)
at
com.facebook.presto.cost.ComposableStatsCalculator.calculateStats(ComposableStatsCalculator.java:70)
at
com.facebook.presto.cost.FragmentedPlanStatsCalculator.calculateStats(FragmentedPlanStatsCalculator.java:50)
at
com.facebook.presto.cost.CachingStatsProvider.getStats(CachingStatsProvider.java:70)
at
com.facebook.presto.sql.planner.planPrinter.PlanPrinter$Visitor.isKnownPlanNodeStatsOrCost(PlanPrinter.java:1394)
at java.util.stream.MatchOps$1MatchSink.accept(MatchOps.java:90)
at
java.util.Spliterators$ArraySpliterator.tryAdvance(Spliterators.java:958)
at
java.util.stream.ReferencePipeline.forEachWithCancel(ReferencePipeline.java:126)
at
java.util.stream.AbstractPipeline.copyIntoWithCancel(AbstractPipeline.java:498)
at java.util.stream.AbstractPipeline.copyInto(AbstractPipeline.java:485)
at
java.util.stream.AbstractPipeline.wrapAndCopyInto(AbstractPipeline.java:471)
at
java.util.stream.MatchOps$MatchOp.evaluateSequential(MatchOps.java:230)
at
java.util.stream.MatchOps$MatchOp.evaluateSequential(MatchOps.java:196)
at java.util.stream.AbstractPipeline.evaluate(AbstractPipeline.java:234)
at
java.util.stream.ReferencePipeline.anyMatch(ReferencePipeline.java:449)
at
com.facebook.presto.sql.planner.planPrinter.PlanPrinter$Visitor.printPlanNodesStatsAndCost(PlanPrinter.java:1384)
at
com.facebook.presto.sql.planner.planPrinter.PlanPrinter$Visitor.visitOutput(PlanPrinter.java:1011)
at
com.facebook.presto.sql.planner.planPrinter.PlanPrinter$Visitor.visitOutput(PlanPrinter.java:517)
at
com.facebook.presto.sql.planner.plan.OutputNode.accept(OutputNode.java:82)
at
com.facebook.presto.sql.planner.planPrinter.PlanPrinter.<init>(PlanPrinter.java:180)
at
com.facebook.presto.sql.planner.planPrinter.PlanPrinter.textLogicalPlan(PlanPrinter.java:203)
at
com.facebook.presto.sql.planner.planPrinter.PlanPrinter.formatFragment(PlanPrinter.java:294)
at
com.facebook.presto.sql.planner.planPrinter.PlanPrinter.textDistributedPlan(PlanPrinter.java:215)
at
com.facebook.presto.event.query.QueryMonitor.queryCompletedEvent(QueryMonitor.java:216)
at
com.facebook.presto.execution.SqlQueryManager.lambda$createQueryInternal$4(SqlQueryManager.java:496)
at
com.facebook.presto.execution.QueryStateMachine.lambda$addQueryInfoStateChangeListener$10(QueryStateMachine.java:791)
at
com.facebook.presto.execution.StateMachine.lambda$fireStateChanged$0(StateMachine.java:222)
at
java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
at
java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
at java.lang.Thread.run(Thread.java:748)
was:
2018-10-26T12:43:02.090+0800 DEBUG query-execution-30
com.facebook.presto.execution.QueryStateMachine Query
20181026_044301_00071_zyny6 failed
java.lang.RuntimeException: Unable to get the Query Model
at
org.apache.carbondata.presto.CarbondataPageSourceProvider.createReader(CarbondataPageSourceProvider.java:114)
at
org.apache.carbondata.presto.CarbondataPageSourceProvider.createPageSource(CarbondataPageSourceProvider.java:85)
at
com.facebook.presto.spi.connector.classloader.ClassLoaderSafeConnectorPageSourceProvider.createPageSource(ClassLoaderSafeConnectorPageSourceProvider.java:44)
at
com.facebook.presto.split.PageSourceManager.createPageSource(PageSourceManager.java:56)
at
com.facebook.presto.operator.TableScanOperator.getOutput(TableScanOperator.java:239)
at com.facebook.presto.operator.Driver.processInternal(Driver.java:373)
at
com.facebook.presto.operator.Driver.lambda$processFor$8(Driver.java:282)
at com.facebook.presto.operator.Driver.tryWithLock(Driver.java:672)
at com.facebook.presto.operator.Driver.processFor(Driver.java:276)
at
com.facebook.presto.execution.SqlTaskExecution$DriverSplitRunner.processFor(SqlTaskExecution.java:1053)
at
com.facebook.presto.execution.executor.PrioritizedSplitRunner.process(PrioritizedSplitRunner.java:162)
at
com.facebook.presto.execution.executor.TaskExecutor$TaskRunner.run(TaskExecutor.java:477)
at
java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
at
java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
at java.lang.Thread.run(Thread.java:748)
Caused by: java.io.FileNotFoundException: File does not exist:
/user/carbondata/CarbonStore/tpcds_1/store_returns/Fact/Part0/Segment_0/part-0-0_batchno0-0-0-1540489806155.carbondata
at
org.apache.hadoop.hdfs.server.namenode.INodeFile.valueOf(INodeFile.java:71)
at
org.apache.hadoop.hdfs.server.namenode.INodeFile.valueOf(INodeFile.java:61)
at
org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getBlockLocationsInt(FSNamesystem.java:1828)
at
org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getBlockLocations(FSNamesystem.java:1799)
at
org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getBlockLocations(FSNamesystem.java:1712)
at
org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.getBlockLocations(NameNodeRpcServer.java:587)
at
org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.getBlockLocations(ClientNamenodeProtocolServerSideTranslatorPB.java:365)
at
org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java)
at
org.apache.hadoop.ipc.ProtobufRpcEngine$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine.java:616)
at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:969)
at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:2049)
at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:2045)
at java.security.AccessController.doPrivileged(Native Method)
at javax.security.auth.Subject.doAs(Subject.java:422)
at
org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1657)
at org.apache.hadoop.ipc.Server$Handler.run(Server.java:2043)
at sun.reflect.GeneratedConstructorAccessor119.newInstance(Unknown
Source)
at
sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
at
org.apache.hadoop.ipc.RemoteException.instantiateException(RemoteException.java:106)
at
org.apache.hadoop.ipc.RemoteException.unwrapRemoteException(RemoteException.java:73)
at
org.apache.hadoop.hdfs.DFSClient.callGetBlockLocations(DFSClient.java:1228)
at
org.apache.hadoop.hdfs.DFSClient.getLocatedBlocks(DFSClient.java:1213)
at
org.apache.hadoop.hdfs.DFSClient.getLocatedBlocks(DFSClient.java:1201)
at
org.apache.hadoop.hdfs.DFSInputStream.fetchLocatedBlocksAndGetLastBlockLength(DFSInputStream.java:306)
at
org.apache.hadoop.hdfs.DFSInputStream.openInfo(DFSInputStream.java:272)
at org.apache.hadoop.hdfs.DFSInputStream.<init>(DFSInputStream.java:264)
at org.apache.hadoop.hdfs.DFSClient.open(DFSClient.java:1526)
at
org.apache.hadoop.hdfs.DistributedFileSystem$3.doCall(DistributedFileSystem.java:304)
at
org.apache.hadoop.hdfs.DistributedFileSystem$3.doCall(DistributedFileSystem.java:299)
at
org.apache.hadoop.fs.FileSystemLinkResolver.resolve(FileSystemLinkResolver.java:81)
at
org.apache.hadoop.hdfs.DistributedFileSystem.open(DistributedFileSystem.java:312)
at org.apache.hadoop.fs.FileSystem.open(FileSystem.java:769)
at
org.apache.carbondata.core.datastore.filesystem.AbstractDFSCarbonFile.getDataInputStream(AbstractDFSCarbonFile.java:320)
at
org.apache.carbondata.core.datastore.filesystem.AbstractDFSCarbonFile.getDataInputStream(AbstractDFSCarbonFile.java:284)
at
org.apache.carbondata.core.datastore.impl.FileFactory.getDataInputStream(FileFactory.java:125)
at
org.apache.carbondata.core.datastore.impl.FileFactory.getDataInputStream(FileFactory.java:116)
at
org.apache.carbondata.core.reader.ThriftReader.open(ThriftReader.java:101)
at
org.apache.carbondata.core.reader.CarbonHeaderReader.readHeader(CarbonHeaderReader.java:60)
at
org.apache.carbondata.core.util.DataFileFooterConverterV3.readDataFileFooter(DataFileFooterConverterV3.java:63)
at
org.apache.carbondata.core.util.CarbonUtil.getDataFileFooter(CarbonUtil.java:964)
at
org.apache.carbondata.core.util.CarbonUtil.readMetadatFile(CarbonUtil.java:940)
at
org.apache.carbondata.core.scan.executor.impl.AbstractQueryExecutor.getDataBlocks(AbstractQueryExecutor.java:220)
at
org.apache.carbondata.core.scan.executor.impl.AbstractQueryExecutor.initQuery(AbstractQueryExecutor.java:141)
at
org.apache.carbondata.core.scan.executor.impl.AbstractQueryExecutor.getBlockExecutionInfos(AbstractQueryExecutor.java:401)
at
org.apache.carbondata.core.scan.executor.impl.VectorDetailQueryExecutor.execute(VectorDetailQueryExecutor.java:44)
at
org.apache.carbondata.presto.CarbondataPageSourceProvider.createReader(CarbondataPageSourceProvider.java:106)
... 14 more
Caused by: org.apache.hadoop.ipc.RemoteException: File does not exist:
/user/carbondata/CarbonStore/tpcds_1/store_returns/Fact/Part0/Segment_0/part-0-0_batchno0-0-0-1540489806155.carbondata
at
org.apache.hadoop.hdfs.server.namenode.INodeFile.valueOf(INodeFile.java:71)
at
org.apache.hadoop.hdfs.server.namenode.INodeFile.valueOf(INodeFile.java:61)
at
org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getBlockLocationsInt(FSNamesystem.java:1828)
at
org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getBlockLocations(FSNamesystem.java:1799)
at
org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getBlockLocations(FSNamesystem.java:1712)
at
org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.getBlockLocations(NameNodeRpcServer.java:587)
at
org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.getBlockLocations(ClientNamenodeProtocolServerSideTranslatorPB.java:365)
at
org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java)
at
org.apache.hadoop.ipc.ProtobufRpcEngine$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine.java:616)
at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:969)
at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:2049)
at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:2045)
at java.security.AccessController.doPrivileged(Native Method)
at javax.security.auth.Subject.doAs(Subject.java:422)
at
org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1657)
at org.apache.hadoop.ipc.Server$Handler.run(Server.java:2043)
at org.apache.hadoop.ipc.Client.call(Client.java:1475)
at org.apache.hadoop.ipc.Client.call(Client.java:1412)
at
org.apache.hadoop.ipc.ProtobufRpcEngine$Invoker.invoke(ProtobufRpcEngine.java:229)
at com.sun.proxy.$Proxy198.getBlockLocations(Unknown Source)
at
org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.getBlockLocations(ClientNamenodeProtocolTranslatorPB.java:255)
at sun.reflect.GeneratedMethodAccessor349.invoke(Unknown Source)
at
sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:498)
at
org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:191)
at
org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:102)
at com.sun.proxy.$Proxy199.getBlockLocations(Unknown Source)
at
org.apache.hadoop.hdfs.DFSClient.callGetBlockLocations(DFSClient.java:1226)
... 39 more
> query failed: Unable to get the Query Model
> -------------------------------------------
>
> Key: CARBONDATA-3045
> URL: https://issues.apache.org/jira/browse/CARBONDATA-3045
> Project: CarbonData
> Issue Type: Bug
> Components: presto-integration
> Affects Versions: 1.5.0
> Environment: presto-0.210 hadoop-2.7.2
> Reporter: U Shaw
> Priority: Major
> Fix For: 1.5.0
>
>
> When i restart presto,the first query was normal. If i query another table,
> it will report this error. It is normal to excute under spark-shell.
> Create Table SQL Script just like : carbon.sql("create table call_center(
> cc_call_center_sk int, cc_call_center_id string, cc_rec_start_date
> string, cc_rec_end_date string, cc_closed_date_sk int, cc_open_date_sk int,
> cc_name string, cc_class string, cc_employees int, cc_sq_ft int, cc_hours
> string, cc_manager string, cc_mkt_id int, cc_mkt_class string, cc_mkt_desc
> string, cc_market_manager string, cc_division int, cc_division_name
> string, cc_company int, cc_company_name string, cc_street_number string,
> cc_street_name string, cc_street_type string, cc_suite_number string,
> cc_city string, cc_county string, cc_state string, cc_zip string,
> cc_country string, cc_gmt_offset double, cc_tax_percentage double)
> STORED BY 'org.apache.carbondata.format' TBLPROPERTIES
> ('table_blocksize'='300','SORT_COLUMNS'='')")
--
This message was sent by Atlassian JIRA
(v7.6.3#76005)