[
https://issues.apache.org/jira/browse/DRILL-6623?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel&focusedCommentId=16551400#comment-16551400
]
Robert Hou commented on DRILL-6623:
-----------------------------------
A similar error message occurs when running this query on MapR Drill 1.13.
[#14] Query failed:
oadd.org.apache.drill.common.exceptions.UserRemoteException: SYSTEM ERROR:
IndexOutOfBoundsException: index: -8373248, length: 0 (expected: range(0,
32768))
Fragment 0:0
[Error Id: b3a702b8-1a46-4e8c-8d86-a7d4bda88acd on qa-node186.qa.lab:31010]
(java.lang.IndexOutOfBoundsException) index: -8373248, length: 0 (expected:
range(0, 32768))
io.netty.buffer.AbstractByteBuf.checkIndex0():1125
io.netty.buffer.AbstractByteBuf.checkIndex():1120
io.netty.buffer.UnsafeByteBufUtil.setBytes():349
io.netty.buffer.PooledUnsafeDirectByteBuf.setBytes():199
io.netty.buffer.WrappedByteBuf.setBytes():397
io.netty.buffer.UnsafeDirectLittleEndian.setBytes():37
io.netty.buffer.DrillBuf.setBytes():767
org.apache.drill.exec.vector.VarCharVector$Mutator.setSafe():577
org.apache.drill.exec.vector.NullableVarCharVector$MutatorImpl.fillEmpties():929
org.apache.drill.exec.vector.NullableVarCharVector$MutatorImpl.setValueCount():1057
org.apache.drill.exec.physical.impl.project.ProjectRecordBatch.setValueCount():272
org.apache.drill.exec.physical.impl.project.ProjectRecordBatch.doWork():205
org.apache.drill.exec.record.AbstractSingleRecordBatch.innerNext():97
org.apache.drill.exec.physical.impl.project.ProjectRecordBatch.innerNext():134
org.apache.drill.exec.record.AbstractRecordBatch.next():164
org.apache.drill.exec.record.AbstractRecordBatch.next():119
org.apache.drill.exec.record.AbstractRecordBatch.next():109
org.apache.drill.exec.record.AbstractSingleRecordBatch.innerNext():51
org.apache.drill.exec.physical.impl.project.ProjectRecordBatch.innerNext():134
org.apache.drill.exec.record.AbstractRecordBatch.next():164
org.apache.drill.exec.physical.impl.BaseRootExec.next():105
org.apache.drill.exec.physical.impl.ScreenCreator$ScreenRoot.innerNext():83
org.apache.drill.exec.physical.impl.BaseRootExec.next():95
org.apache.drill.exec.work.fragment.FragmentExecutor$1.run():233
org.apache.drill.exec.work.fragment.FragmentExecutor$1.run():226
java.security.AccessController.doPrivileged():-2
javax.security.auth.Subject.doAs():422
org.apache.hadoop.security.UserGroupInformation.doAs():1633
org.apache.drill.exec.work.fragment.FragmentExecutor.run():226
org.apache.drill.common.SelfCleaningRunnable.run():38
java.util.concurrent.ThreadPoolExecutor.runWorker():1149
java.util.concurrent.ThreadPoolExecutor$Worker.run():624
java.lang.Thread.run():748
at
oadd.org.apache.drill.exec.rpc.user.QueryResultHandler.resultArrived(QueryResultHandler.java:123)
at
oadd.org.apache.drill.exec.rpc.user.UserClient.handle(UserClient.java:422)
at
oadd.org.apache.drill.exec.rpc.user.UserClient.handle(UserClient.java:96)
at
oadd.org.apache.drill.exec.rpc.RpcBus$InboundHandler.decode(RpcBus.java:274)
at
oadd.org.apache.drill.exec.rpc.RpcBus$InboundHandler.decode(RpcBus.java:244)
at
oadd.io.netty.handler.codec.MessageToMessageDecoder.channelRead(MessageToMessageDecoder.java:88)
at
oadd.io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:356)
at
oadd.io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:342)
at
oadd.io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:335)
at
oadd.io.netty.handler.timeout.IdleStateHandler.channelRead(IdleStateHandler.java:287)
at
oadd.io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:356)
at
oadd.io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:342)
at
oadd.io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:335)
at
oadd.io.netty.handler.codec.MessageToMessageDecoder.channelRead(MessageToMessageDecoder.java:102)
at
oadd.io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:356)
at
oadd.io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:342)
at
oadd.io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:335)
at
oadd.io.netty.handler.codec.ByteToMessageDecoder.fireChannelRead(ByteToMessageDecoder.java:312)
at
oadd.io.netty.handler.codec.ByteToMessageDecoder.channelRead(ByteToMessageDecoder.java:286)
at
oadd.io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:356)
at
oadd.io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:342)
at
oadd.io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:335)
at
oadd.io.netty.channel.ChannelInboundHandlerAdapter.channelRead(ChannelInboundHandlerAdapter.java:86)
at
oadd.io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:356)
at
oadd.io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:342)
at
oadd.io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:335)
at
oadd.io.netty.channel.DefaultChannelPipeline$HeadContext.channelRead(DefaultChannelPipeline.java:1294)
at
oadd.io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:356)
at
oadd.io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:342)
at
oadd.io.netty.channel.DefaultChannelPipeline.fireChannelRead(DefaultChannelPipeline.java:911)
at
oadd.io.netty.channel.nio.AbstractNioByteChannel$NioByteUnsafe.read(AbstractNioByteChannel.java:131)
at
oadd.io.netty.channel.nio.NioEventLoop.processSelectedKey(NioEventLoop.java:645)
at
oadd.io.netty.channel.nio.NioEventLoop.processSelectedKeysOptimized(NioEventLoop.java:580)
at
oadd.io.netty.channel.nio.NioEventLoop.processSelectedKeys(NioEventLoop.java:497)
at oadd.io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:459)
at
oadd.io.netty.util.concurrent.SingleThreadEventExecutor$2.run(SingleThreadEventExecutor.java:131)
at java.lang.Thread.run(Thread.java:748)
> Drill encounters exception IndexOutOfBoundsException: writerIndex: -8373248
> (expected: readerIndex(0) <= writerIndex <= capacity(32768))
> ----------------------------------------------------------------------------------------------------------------------------------------
>
> Key: DRILL-6623
> URL: https://issues.apache.org/jira/browse/DRILL-6623
> Project: Apache Drill
> Issue Type: Bug
> Components: Execution - Relational Operators
> Affects Versions: 1.14.0
> Reporter: Robert Hou
> Assignee: Pritesh Maker
> Priority: Major
> Attachments: 24aedae9-d1f3-8e12-2e1f-0479915c61b1.sys.drill,
> drillbit.log.61b1
>
>
> This is the query:
> alter session set `planner.width.max_per_node` = 1;
> alter session set `planner.width.max_per_query` = 1;
> select * from (
> select
> split_part(CharacterValuea, '8', 1) CharacterValuea,
> split_part(CharacterValueb, '8', 1) CharacterValueb,
> split_part(CharacterValuec, '8', 2) CharacterValuec,
> split_part(CharacterValued, '8', 3) CharacterValued,
> split_part(CharacterValuee, 'b', 1) CharacterValuee
> from (select * from
> dfs.`/drill/testdata/batch_memory/character5_1MB_1GB.parquet` order by
> CharacterValuea) d where d.CharacterValuea = '1234567890123110');
> The query works with a smaller table.
> This is the stack trace:
> {noformat}
> 2018-07-19 16:59:48,803 [24aedae9-d1f3-8e12-2e1f-0479915c61b1:frag:0:0] ERROR
> o.a.d.e.w.fragment.FragmentExecutor - SYSTEM ERROR:
> IndexOutOfBoundsException: writerIndex: -8373248 (expected: readerIndex(0) <=
> writerIndex <= capacity(32768))
> Fragment 0:0
> [Error Id: edc75560-41ca-4fdd-907f-060be1795786 on qa-node186.qa.lab:31010]
> org.apache.drill.common.exceptions.UserException: SYSTEM ERROR:
> IndexOutOfBoundsException: writerIndex: -8373248 (expected: readerIndex(0) <=
> writerIndex <= capacity(32768))
> Fragment 0:0
> [Error Id: edc75560-41ca-4fdd-907f-060be1795786 on qa-node186.qa.lab:31010]
> at
> org.apache.drill.common.exceptions.UserException$Builder.build(UserException.java:633)
> ~[drill-common-1.14.0-SNAPSHOT.jar:1.14.0-SNAPSHOT]
> at
> org.apache.drill.exec.work.fragment.FragmentExecutor.sendFinalState(FragmentExecutor.java:361)
> [drill-java-exec-1.14.0-SNAPSHOT.jar:1.14.0-SNAPSHOT]
> at
> org.apache.drill.exec.work.fragment.FragmentExecutor.cleanup(FragmentExecutor.java:216)
> [drill-java-exec-1.14.0-SNAPSHOT.jar:1.14.0-SNAPSHOT]
> at
> org.apache.drill.exec.work.fragment.FragmentExecutor.run(FragmentExecutor.java:327)
> [drill-java-exec-1.14.0-SNAPSHOT.jar:1.14.0-SNAPSHOT]
> at
> org.apache.drill.common.SelfCleaningRunnable.run(SelfCleaningRunnable.java:38)
> [drill-common-1.14.0-SNAPSHOT.jar:1.14.0-SNAPSHOT]
> at
> java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
> [na:1.8.0_161]
> at
> java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
> [na:1.8.0_161]
> at java.lang.Thread.run(Thread.java:748) [na:1.8.0_161]
> Caused by: java.lang.IndexOutOfBoundsException: writerIndex: -8373248
> (expected: readerIndex(0) <= writerIndex <= capacity(32768))
> at
> io.netty.buffer.AbstractByteBuf.writerIndex(AbstractByteBuf.java:104)
> ~[netty-buffer-4.0.48.Final.jar:4.0.48.Final]
> at
> org.apache.drill.exec.vector.VarCharVector$Mutator.setValueCount(VarCharVector.java:810)
> ~[vector-1.14.0-SNAPSHOT.jar:1.14.0-SNAPSHOT]
> at
> org.apache.drill.exec.vector.NullableVarCharVector$Mutator.setValueCount(NullableVarCharVector.java:641)
> ~[vector-1.14.0-SNAPSHOT.jar:1.14.0-SNAPSHOT]
> at
> org.apache.drill.exec.physical.impl.project.ProjectRecordBatch.setValueCount(ProjectRecordBatch.java:329)
> ~[drill-java-exec-1.14.0-SNAPSHOT.jar:1.14.0-SNAPSHOT]
> at
> org.apache.drill.exec.physical.impl.project.ProjectRecordBatch.doWork(ProjectRecordBatch.java:242)
> ~[drill-java-exec-1.14.0-SNAPSHOT.jar:1.14.0-SNAPSHOT]
> at
> org.apache.drill.exec.record.AbstractUnaryRecordBatch.innerNext(AbstractUnaryRecordBatch.java:117)
> ~[drill-java-exec-1.14.0-SNAPSHOT.jar:1.14.0-SNAPSHOT]
> at
> org.apache.drill.exec.physical.impl.project.ProjectRecordBatch.innerNext(ProjectRecordBatch.java:142)
> ~[drill-java-exec-1.14.0-SNAPSHOT.jar:1.14.0-SNAPSHOT]
> at
> org.apache.drill.exec.record.AbstractRecordBatch.next(AbstractRecordBatch.java:172)
> ~[drill-java-exec-1.14.0-SNAPSHOT.jar:1.14.0-SNAPSHOT]
> at
> org.apache.drill.exec.record.AbstractRecordBatch.next(AbstractRecordBatch.java:119)
> ~[drill-java-exec-1.14.0-SNAPSHOT.jar:1.14.0-SNAPSHOT]
> at
> org.apache.drill.exec.record.AbstractRecordBatch.next(AbstractRecordBatch.java:109)
> ~[drill-java-exec-1.14.0-SNAPSHOT.jar:1.14.0-SNAPSHOT]
> at
> org.apache.drill.exec.record.AbstractUnaryRecordBatch.innerNext(AbstractUnaryRecordBatch.java:63)
> ~[drill-java-exec-1.14.0-SNAPSHOT.jar:1.14.0-SNAPSHOT]
> at
> org.apache.drill.exec.physical.impl.project.ProjectRecordBatch.innerNext(ProjectRecordBatch.java:142)
> ~[drill-java-exec-1.14.0-SNAPSHOT.jar:1.14.0-SNAPSHOT]
> at
> org.apache.drill.exec.record.AbstractRecordBatch.next(AbstractRecordBatch.java:172)
> ~[drill-java-exec-1.14.0-SNAPSHOT.jar:1.14.0-SNAPSHOT]
> at
> org.apache.drill.exec.physical.impl.BaseRootExec.next(BaseRootExec.java:103)
> ~[drill-java-exec-1.14.0-SNAPSHOT.jar:1.14.0-SNAPSHOT]
> at
> org.apache.drill.exec.physical.impl.ScreenCreator$ScreenRoot.innerNext(ScreenCreator.java:83)
> ~[drill-java-exec-1.14.0-SNAPSHOT.jar:1.14.0-SNAPSHOT]
> at
> org.apache.drill.exec.physical.impl.BaseRootExec.next(BaseRootExec.java:93)
> ~[drill-java-exec-1.14.0-SNAPSHOT.jar:1.14.0-SNAPSHOT]
> at
> org.apache.drill.exec.work.fragment.FragmentExecutor$1.run(FragmentExecutor.java:294)
> ~[drill-java-exec-1.14.0-SNAPSHOT.jar:1.14.0-SNAPSHOT]
> at
> org.apache.drill.exec.work.fragment.FragmentExecutor$1.run(FragmentExecutor.java:281)
> ~[drill-java-exec-1.14.0-SNAPSHOT.jar:1.14.0-SNAPSHOT]
> at java.security.AccessController.doPrivileged(Native Method)
> ~[na:1.8.0_161]
> at javax.security.auth.Subject.doAs(Subject.java:422) ~[na:1.8.0_161]
> at
> org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1595)
> ~[hadoop-common-2.7.0-mapr-1707.jar:na]
> at
> org.apache.drill.exec.work.fragment.FragmentExecutor.run(FragmentExecutor.java:281)
> [drill-java-exec-1.14.0-SNAPSHOT.jar:1.14.0-SNAPSHOT]
> ... 4 common frames omitted
> {noformat}
> This is the explain plan:
> {noformat}
> | 00-00 Screen : rowType = RecordType(ANY CharacterValuea, ANY
> CharacterValueb, ANY CharacterValuec, ANY CharacterValued, ANY
> CharacterValuee): rowcount = 9216000.0, cumulative cost = {5.815296E8 rows,
> 8.786270178575306E9 cpu, 0.0 io, 1.00663296E12 network, 9.8304E8 memory}, id
> = 3374
> 00-01 ProjectAllowDup(CharacterValuea=[$0], CharacterValueb=[$1],
> CharacterValuec=[$2], CharacterValued=[$3], CharacterValuee=[$4]) : rowType =
> RecordType(ANY CharacterValuea, ANY CharacterValueb, ANY CharacterValuec, ANY
> CharacterValued, ANY CharacterValuee): rowcount = 9216000.0, cumulative cost
> = {5.80608E8 rows, 8.785348578575306E9 cpu, 0.0 io, 1.00663296E12 network,
> 9.8304E8 memory}, id = 3373
> 00-02 Project(CharacterValuea=[SPLIT_PART(ITEM($0, 'CharacterValuea'),
> '8', 1)], CharacterValueb=[SPLIT_PART(ITEM($0, 'CharacterValueb'), '8', 1)],
> CharacterValuec=[SPLIT_PART(ITEM($0, 'CharacterValuec'), '8', 2)],
> CharacterValued=[SPLIT_PART(ITEM($0, 'CharacterValued'), '8', 3)],
> CharacterValuee=[SPLIT_PART(ITEM($0, 'CharacterValuee'), 'b', 1)]) : rowType
> = RecordType(ANY CharacterValuea, ANY CharacterValueb, ANY CharacterValuec,
> ANY CharacterValued, ANY CharacterValuee): rowcount = 9216000.0, cumulative
> cost = {5.71392E8 rows, 8.739268578575306E9 cpu, 0.0 io, 1.00663296E12
> network, 9.8304E8 memory}, id = 3372
> 00-03 SelectionVectorRemover : rowType = RecordType(DYNAMIC_STAR
> T3¦¦**): rowcount = 9216000.0, cumulative cost = {5.62176E8 rows,
> 8.554948578575305E9 cpu, 0.0 io, 1.00663296E12 network, 9.8304E8 memory}, id
> = 3371
> 00-04 Filter(condition=[=(ITEM($0, 'CharacterValuea'),
> '1234567890123110')]) : rowType = RecordType(DYNAMIC_STAR T3¦¦**): rowcount =
> 9216000.0, cumulative cost = {5.5296E8 rows, 8.545732578575305E9 cpu, 0.0 io,
> 1.00663296E12 network, 9.8304E8 memory}, id = 3370
> 00-05 Project(T3¦¦**=[$0]) : rowType = RecordType(DYNAMIC_STAR
> T3¦¦**): rowcount = 6.144E7, cumulative cost = {4.9152E8 rows,
> 8.263108578575305E9 cpu, 0.0 io, 1.00663296E12 network, 9.8304E8 memory}, id
> = 3369
> 00-06 SingleMergeExchange(sort0=[1]) : rowType =
> RecordType(DYNAMIC_STAR T3¦¦**, ANY CharacterValuea): rowcount = 6.144E7,
> cumulative cost = {4.3008E8 rows, 8.201668578575305E9 cpu, 0.0 io,
> 1.00663296E12 network, 9.8304E8 memory}, id = 3368
> 01-01 OrderedMuxExchange(sort0=[1]) : rowType =
> RecordType(DYNAMIC_STAR T3¦¦**, ANY CharacterValuea): rowcount = 6.144E7,
> cumulative cost = {3.6864E8 rows, 7.710148578575305E9 cpu, 0.0 io,
> 5.0331648E11 network, 9.8304E8 memory}, id = 3367
> 02-01 SelectionVectorRemover : rowType =
> RecordType(DYNAMIC_STAR T3¦¦**, ANY CharacterValuea): rowcount = 6.144E7,
> cumulative cost = {3.072E8 rows, 7.648708578575305E9 cpu, 0.0 io,
> 5.0331648E11 network, 9.8304E8 memory}, id = 3366
> 02-02 Sort(sort0=[$1], dir0=[ASC]) : rowType =
> RecordType(DYNAMIC_STAR T3¦¦**, ANY CharacterValuea): rowcount = 6.144E7,
> cumulative cost = {2.4576E8 rows, 7.587268578575305E9 cpu, 0.0 io,
> 5.0331648E11 network, 9.8304E8 memory}, id = 3365
> 02-03 HashToRandomExchange(dist0=[[$1]]) : rowType =
> RecordType(DYNAMIC_STAR T3¦¦**, ANY CharacterValuea): rowcount = 6.144E7,
> cumulative cost = {1.8432E8 rows, 1.2288E9 cpu, 0.0 io, 5.0331648E11 network,
> 0.0 memory}, id = 3364
> 03-01 Project(T3¦¦**=[$0], CharacterValuea=[$1]) :
> rowType = RecordType(DYNAMIC_STAR T3¦¦**, ANY CharacterValuea): rowcount =
> 6.144E7, cumulative cost = {1.2288E8 rows, 2.4576E8 cpu, 0.0 io, 0.0 network,
> 0.0 memory}, id = 3363
> 03-02 Scan(groupscan=[ParquetGroupScan
> [entries=[ReadEntryWithPath
> [path=maprfs:///drill/testdata/batch_memory/character5_1MB_1GB.parquet]],
> selectionRoot=maprfs:/drill/testdata/batch_memory/character5_1MB_1GB.parquet,
> numFiles=1, numRowGroups=25, usedMetadataFile=false, columns=[`**`]]]) :
> rowType = RecordType(DYNAMIC_STAR **, ANY CharacterValuea): rowcount =
> 6.144E7, cumulative cost = {6.144E7 rows, 1.2288E8 cpu, 0.0 io, 0.0 network,
> 0.0 memory}, id = 3362
> {noformat}
> The table can be found in /home/MAPRTECH/qa/rhou/drill6623.
> I have attached the profile and the drillbit.log.
> This was encountered on the Apache Drill release with the latest code in July
> 19. This is the commit id:
> | 1.14.0-SNAPSHOT | 85344abd1ddb73448bdf67cdc6883cb98795a910 | DRILL-6614:
> Allow usage of MapRDBFormatPlugin for HiveStoragePlugin | 19.07.2018 @
> 10:39:36 PDT | [email protected] | 19.07.2018 @ 15:44:52 PDT |
--
This message was sent by Atlassian JIRA
(v7.6.3#76005)