Hello -- an Avro error like that usually indicates a mismatch between your record and the schema the bolt is expecting. Can you share your schema and the record giving this error?
On Fri, Nov 25, 2016 at 5:08 PM Yi Jiang <yi.ji...@ubisoft.com> wrote: > Hello, Guys > > I am recently using the storm-hdfs bolt 1.0.2, our storm-core is also > 1.0.2, and when I try to send the generic record to the bolt, it gives me > the following error. It is from the library, I am not sure who can answer > me how to fix that. FYI, the storm-hdfs is generating the perfect csv > format. Thank you so much. > > > > 20024 > [Thread-103-dl_raw_ubiservices_tracking.context_stop_inc_avro_bolt-executor[22 > 22]] ERROR o.a.s.util - Async loop died! > > java.lang.RuntimeException: > org.apache.avro.file.DataFileWriter$AppendWriteException: > java.lang.ArrayIndexOutOfBoundsException: 15 > > at > org.apache.storm.utils.DisruptorQueue.consumeBatchToCursor(DisruptorQueue.java:464) > ~[storm-core-1.0.2.jar:1.0.2] > > at > org.apache.storm.utils.DisruptorQueue.consumeBatchWhenAvailable(DisruptorQueue.java:430) > ~[storm-core-1.0.2.jar:1.0.2] > > at > org.apache.storm.disruptor$consume_batch_when_available.invoke(disruptor.clj:73) > ~[storm-core-1.0.2.jar:1.0.2] > > at > org.apache.storm.daemon.executor$fn__8058$fn__8071$fn__8124.invoke(executor.clj:850) > ~[storm-core-1.0.2.jar:1.0.2] > > at > org.apache.storm.util$async_loop$fn__624.invoke(util.clj:484) > [storm-core-1.0.2.jar:1.0.2] > > at clojure.lang.AFn.run(AFn.java:22) [clojure-1.7.0.jar:?] > > at java.lang.Thread.run(Thread.java:745) [?:1.7.0_79] > > Caused by: org.apache.avro.file.DataFileWriter$AppendWriteException: > java.lang.ArrayIndexOutOfBoundsException: 15 > > at > org.apache.avro.file.DataFileWriter.append(DataFileWriter.java:308) > ~[avro-1.7.6-cdh5.8.2.jar:1.7.6-cdh5.8.2] > > at > org.apache.storm.hdfs.bolt.AvroGenericRecordBolt.writeTuple(AvroGenericRecordBolt.java:105) > ~[storm-hdfs-1.0.1.jar:1.0.1] > > at > org.apache.storm.hdfs.bolt.AbstractHdfsBolt.execute(AbstractHdfsBolt.java:154) > ~[storm-hdfs-1.0.1.jar:1.0.1] > > at > org.apache.storm.daemon.executor$fn__8058$tuple_action_fn__8060.invoke(executor.clj:731) > ~[storm-core-1.0.2.jar:1.0.2] > > at > org.apache.storm.daemon.executor$mk_task_receiver$fn__7979.invoke(executor.clj:464) > ~[storm-core-1.0.2.jar:1.0.2] > > at > org.apache.storm.disruptor$clojure_handler$reify__7492.onEvent(disruptor.clj:40) > ~[storm-core-1.0.2.jar:1.0.2] > > at > org.apache.storm.utils.DisruptorQueue.consumeBatchToCursor(DisruptorQueue.java:451) > ~[storm-core-1.0.2.jar:1.0.2] > > ... 6 more > > Caused by: java.lang.ArrayIndexOutOfBoundsException: 15 > > at > org.apache.avro.generic.GenericData$Record.get(GenericData.java:210) > ~[avro-1.7.6-cdh5.8.2.jar:1.7.6-cdh5.8.2] > > at > org.apache.avro.generic.GenericData.getField(GenericData.java:658) > ~[avro-1.7.6-cdh5.8.2.jar:1.7.6-cdh5.8.2] > > at > org.apache.avro.generic.GenericData.getField(GenericData.java:673) > ~[avro-1.7.6-cdh5.8.2.jar:1.7.6-cdh5.8.2] > > at > org.apache.avro.generic.GenericDatumWriter.writeField(GenericDatumWriter.java:151) > ~[avro-1.7.6-cdh5.8.2.jar:1.7.6-cdh5.8.2] > > at > org.apache.avro.generic.GenericDatumWriter.writeRecord(GenericDatumWriter.java:143) > ~[avro-1.7.6-cdh5.8.2.jar:1.7.6-cdh5.8.2] > > at > org.apache.avro.generic.GenericDatumWriter.writeWithoutConversion(GenericDatumWriter.java:105) > ~[avro-1.7.6-cdh5.8.2.jar:1.7.6-cdh5.8.2] > > at > org.apache.avro.generic.GenericDatumWriter.write(GenericDatumWriter.java:73) > ~[avro-1.7.6-cdh5.8.2.jar:1.7.6-cdh5.8.2] > > at > org.apache.avro.generic.GenericDatumWriter.write(GenericDatumWriter.java:60) > ~[avro-1.7.6-cdh5.8.2.jar:1.7.6-cdh5.8.2] > > at > org.apache.avro.file.DataFileWriter.append(DataFileWriter.java:302) > ~[avro-1.7.6-cdh5.8.2.jar:1.7.6-cdh5.8.2] > > at > org.apache.storm.hdfs.bolt.AvroGenericRecordBolt.writeTuple(AvroGenericRecordBolt.java:105) > ~[storm-hdfs-1.0.1.jar:1.0.1] > > at > org.apache.storm.hdfs.bolt.AbstractHdfsBolt.execute(AbstractHdfsBolt.java:154) > ~[storm-hdfs-1.0.1.jar:1.0.1] > > at > org.apache.storm.daemon.executor$fn__8058$tuple_action_fn__8060.invoke(executor.clj:731) > ~[storm-core-1.0.2.jar:1.0.2] > > at > org.apache.storm.daemon.executor$mk_task_receiver$fn__7979.invoke(executor.clj:464) > ~[storm-core-1.0.2.jar:1.0.2] > > at > org.apache.storm.disruptor$clojure_handler$reify__7492.onEvent(disruptor.clj:40) > ~[storm-core-1.0.2.jar:1.0.2] > > at > org.apache.storm.utils.DisruptorQueue.consumeBatchToCursor(DisruptorQueue.java:451) > ~[storm-core-1.0.2.jar:1.0.2] > > ... 6 more > > 20026 > [Thread-103-dl_raw_ubiservices_tracking.context_stop_inc_avro_bolt-executor[22 > 22]] ERROR o.a.s.d.executor - > > java.lang.RuntimeException: > org.apache.avro.file.DataFileWriter$AppendWriteException: > java.lang.ArrayIndexOutOfBoundsException: 15 > > at > org.apache.storm.utils.DisruptorQueue.consumeBatchToCursor(DisruptorQueue.java:464) > ~[storm-core-1.0.2.jar:1.0.2] > > at > org.apache.storm.utils.DisruptorQueue.consumeBatchWhenAvailable(DisruptorQueue.java:430) > ~[storm-core-1.0.2.jar:1.0.2] > > at > org.apache.storm.disruptor$consume_batch_when_available.invoke(disruptor.clj:73) > ~[storm-core-1.0.2.jar:1.0.2] > > at > org.apache.storm.daemon.executor$fn__8058$fn__8071$fn__8124.invoke(executor.clj:850) > ~[storm-core-1.0.2.jar:1.0.2] > > at > org.apache.storm.util$async_loop$fn__624.invoke(util.clj:484) > [storm-core-1.0.2.jar:1.0.2] > > at clojure.lang.AFn.run(AFn.java:22) [clojure-1.7.0.jar:?] > > at java.lang.Thread.run(Thread.java:745) [?:1.7.0_79] > > Caused by: org.apache.avro.file.DataFileWriter$AppendWriteException: > java.lang.ArrayIndexOutOfBoundsException: 15 > > at > org.apache.avro.file.DataFileWriter.append(DataFileWriter.java:308) > ~[avro-1.7.6-cdh5.8.2.jar:1.7.6-cdh5.8.2] > > at > org.apache.storm.hdfs.bolt.AvroGenericRecordBolt.writeTuple(AvroGenericRecordBolt.java:105) > ~[storm-hdfs-1.0.1.jar:1.0.1] > > at > org.apache.storm.hdfs.bolt.AbstractHdfsBolt.execute(AbstractHdfsBolt.java:154) > ~[storm-hdfs-1.0.1.jar:1.0.1] > > at > org.apache.storm.daemon.executor$fn__8058$tuple_action_fn__8060.invoke(executor.clj:731) > ~[storm-core-1.0.2.jar:1.0.2] > > at > org.apache.storm.daemon.executor$mk_task_receiver$fn__7979.invoke(executor.clj:464) > ~[storm-core-1.0.2.jar:1.0.2] > > at > org.apache.storm.disruptor$clojure_handler$reify__7492.onEvent(disruptor.clj:40) > ~[storm-core-1.0.2.jar:1.0.2] > > at > org.apache.storm.utils.DisruptorQueue.consumeBatchToCursor(DisruptorQueue.java:451) > ~[storm-core-1.0.2.jar:1.0.2] > > ... 6 more > > Caused by: java.lang.ArrayIndexOutOfBoundsException: 15 > > at > org.apache.avro.generic.GenericData$Record.get(GenericData.java:210) > ~[avro-1.7.6-cdh5.8.2.jar:1.7.6-cdh5.8.2] > > at > org.apache.avro.generic.GenericData.getField(GenericData.java:658) > ~[avro-1.7.6-cdh5.8.2.jar:1.7.6-cdh5.8.2] > > at > org.apache.avro.generic.GenericData.getField(GenericData.java:673) > ~[avro-1.7.6-cdh5.8.2.jar:1.7.6-cdh5.8.2] > > at > org.apache.avro.generic.GenericDatumWriter.writeField(GenericDatumWriter.java:151) > ~[avro-1.7.6-cdh5.8.2.jar:1.7.6-cdh5.8.2] > > at > org.apache.avro.generic.GenericDatumWriter.writeRecord(GenericDatumWriter.java:143) > ~[avro-1.7.6-cdh5.8.2.jar:1.7.6-cdh5.8.2] > > at > org.apache.avro.generic.GenericDatumWriter.writeWithoutConversion(GenericDatumWriter.java:105) > ~[avro-1.7.6-cdh5.8.2.jar:1.7.6-cdh5.8.2] > > at > org.apache.avro.generic.GenericDatumWriter.write(GenericDatumWriter.java:73) > ~[avro-1.7.6-cdh5.8.2.jar:1.7.6-cdh5.8.2] > > at > org.apache.avro.generic.GenericDatumWriter.write(GenericDatumWriter.java:60) > ~[avro-1.7.6-cdh5.8.2.jar:1.7.6-cdh5.8.2] > > at > org.apache.avro.file.DataFileWriter.append(DataFileWriter.java:302) > ~[avro-1.7.6-cdh5.8.2.jar:1.7.6-cdh5.8.2] > > at > org.apache.storm.hdfs.bolt.AvroGenericRecordBolt.writeTuple(AvroGenericRecordBolt.java:105) > ~[storm-hdfs-1.0.1.jar:1.0.1] > > at > org.apache.storm.hdfs.bolt.AbstractHdfsBolt.execute(AbstractHdfsBolt.java:154) > ~[storm-hdfs-1.0.1.jar:1.0.1] > > at > org.apache.storm.daemon.executor$fn__8058$tuple_action_fn__8060.invoke(executor.clj:731) > ~[storm-core-1.0.2.jar:1.0.2] > > at > org.apache.storm.daemon.executor$mk_task_receiver$fn__7979.invoke(executor.clj:464) > ~[storm-core-1.0.2.jar:1.0.2] > > at > org.apache.storm.disruptor$clojure_handler$reify__7492.onEvent(disruptor.clj:40) > ~[storm-core-1.0.2.jar:1.0.2] > > at > org.apache.storm.utils.DisruptorQueue.consumeBatchToCursor(DisruptorQueue.java:451) > ~[storm-core-1.0.2.jar:1.0.2] > > ... 6 more > > 20028 > [Thread-87-dl_raw_ubiservices_tracking.context_stop_inc_avro_bolt-executor[25 > 25]] ERROR o.a.s.util - Async loop died! > > java.lang.RuntimeException: > org.apache.avro.file.DataFileWriter$AppendWriteException: > java.lang.ArrayIndexOutOfBoundsException: 15 > > at > org.apache.storm.utils.DisruptorQueue.consumeBatchToCursor(DisruptorQueue.java:464) > ~[storm-core-1.0.2.jar:1.0.2] > > at > org.apache.storm.utils.DisruptorQueue.consumeBatchWhenAvailable(DisruptorQueue.java:430) > ~[storm-core-1.0.2.jar:1.0.2] > > at > org.apache.storm.disruptor$consume_batch_when_available.invoke(disruptor.clj:73) > ~[storm-core-1.0.2.jar:1.0.2] > > at > org.apache.storm.daemon.executor$fn__8058$fn__8071$fn__8124.invoke(executor.clj:850) > ~[storm-core-1.0.2.jar:1.0.2] > > at > org.apache.storm.util$async_loop$fn__624.invoke(util.clj:484) > [storm-core-1.0.2.jar:1.0.2] > > at clojure.lang.AFn.run(AFn.java:22) [clojure-1.7.0.jar:?] > > at java.lang.Thread.run(Thread.java:745) [?:1.7.0_79] > > > > Yi >