[
https://issues.apache.org/jira/browse/FLINK-18354?page=com.atlassian.jira.plugin.system.issuetabpanels:all-tabpanel
]
Yangyingbo updated FLINK-18354:
-------------------------------
Description:
{code:java}
{code}
when i use ParquetAvroWriters.forGenericRecord(Schema schema) write data to
parquet ,it has occur some error:
mycode:
{code:java}
//
//transfor 2 dataStream
// TupleTypeInfo tupleTypeInfo = new TupleTypeInfo(GenericData.Record.class,
BasicTypeInfo.STRING_TYPE_INFO, BasicTypeInfo.STRING_TYPE_INFO);
TupleTypeInfo tupleTypeInfo = new
TupleTypeInfo(BasicTypeInfo.STRING_TYPE_INFO, BasicTypeInfo.STRING_TYPE_INFO);
DataStream testDataStream = flinkTableEnv.toAppendStream(test, tupleTypeInfo);
testDataStream.print().setParallelism(1);
ArrayList<org.apache.avro.Schema.Field> fields = new
ArrayList<org.apache.avro.Schema.Field>();
fields.add(new org.apache.avro.Schema.Field("id",
org.apache.avro.Schema.create(org.apache.avro.Schema.Type.STRING), "id",
JsonProperties.NULL_VALUE));
fields.add(new org.apache.avro.Schema.Field("time",
org.apache.avro.Schema.create(org.apache.avro.Schema.Type.STRING), "time",
JsonProperties.NULL_VALUE));
org.apache.avro.Schema parquetSinkSchema =
org.apache.avro.Schema.createRecord("pi", "flinkParquetSink", "flink.parquet",
true, fields);
String fileSinkPath = "./xxx.text/rs6/";
StreamingFileSink<GenericRecord> parquetSink = StreamingFileSink.
forBulkFormat(new Path(fileSinkPath),
ParquetAvroWriters.forGenericRecord(parquetSinkSchema))
.withRollingPolicy(OnCheckpointRollingPolicy.build())
.build();
testDataStream.addSink(parquetSink).setParallelism(1);
flinkTableEnv.execute("ReadFromKafkaConnectorWriteToLocalFileJava");
{code}
and this error:
{code:java}
// code placeholder
09:29:50,283 INFO org.apache.flink.runtime.taskmanager.Task
- Sink: Unnamed (1/1) (79505cb6ab2df38886663fd99461315a) switched from RUNNING
to FAILED.09:29:50,283 INFO org.apache.flink.runtime.taskmanager.Task
- Sink: Unnamed (1/1) (79505cb6ab2df38886663fd99461315a) switched
from RUNNING to FAILED.java.lang.ClassCastException:
org.apache.flink.api.java.tuple.Tuple2 cannot be cast to
org.apache.avro.generic.IndexedRecord at
org.apache.avro.generic.GenericData.getField(GenericData.java:697) at
org.apache.parquet.avro.AvroWriteSupport.writeRecordFields(AvroWriteSupport.java:188)
at org.apache.parquet.avro.AvroWriteSupport.write(AvroWriteSupport.java:165)
at
org.apache.parquet.hadoop.InternalParquetRecordWriter.write(InternalParquetRecordWriter.java:128)
at org.apache.parquet.hadoop.ParquetWriter.write(ParquetWriter.java:299) at
org.apache.flink.formats.parquet.ParquetBulkWriter.addElement(ParquetBulkWriter.java:52)
at
org.apache.flink.streaming.api.functions.sink.filesystem.BulkPartWriter.write(BulkPartWriter.java:50)
at
org.apache.flink.streaming.api.functions.sink.filesystem.Bucket.write(Bucket.java:214)
at
org.apache.flink.streaming.api.functions.sink.filesystem.Buckets.onElement(Buckets.java:274)
at
org.apache.flink.streaming.api.functions.sink.filesystem.StreamingFileSink.invoke(StreamingFileSink.java:445)
at
org.apache.flink.streaming.api.operators.StreamSink.processElement(StreamSink.java:56)
at
org.apache.flink.streaming.runtime.tasks.OneInputStreamTask$StreamTaskNetworkOutput.emitRecord(OneInputStreamTask.java:173)
at
org.apache.flink.streaming.runtime.io.StreamTaskNetworkInput.processElement(StreamTaskNetworkInput.java:151)
at
org.apache.flink.streaming.runtime.io.StreamTaskNetworkInput.emitNext(StreamTaskNetworkInput.java:128)
at
org.apache.flink.streaming.runtime.io.StreamOneInputProcessor.processInput(StreamOneInputProcessor.java:69)
at
org.apache.flink.streaming.runtime.tasks.StreamTask.processInput(StreamTask.java:311)
at
org.apache.flink.streaming.runtime.tasks.mailbox.MailboxProcessor.runMailboxLoop(MailboxProcessor.java:187)
at
org.apache.flink.streaming.runtime.tasks.StreamTask.runMailboxLoop(StreamTask.java:487)
at
org.apache.flink.streaming.runtime.tasks.StreamTask.invoke(StreamTask.java:470)
at org.apache.flink.runtime.taskmanager.Task.doRun(Task.java:707) at
org.apache.flink.runtime.taskmanager.Task.run(Task.java:532) at
java.lang.Thread.run(Thread.java:748)09:29:50,284 INFO
org.apache.flink.runtime.taskmanager.Task - Freeing task
resources for Sink: Unnamed (1/1)
(79505cb6ab2df38886663fd99461315a).09:29:50,285 INFO
org.apache.flink.runtime.taskmanager.Task - Ensuring all
FileSystem streams are closed for task Sink: Unnamed (1/1)
(79505cb6ab2df38886663fd99461315a) [FAILED]09:29:50,289 INFO
org.apache.flink.runtime.taskexecutor.TaskExecutor - Un-registering
task and sending final execution state FAILED to JobManager for task Sink:
Unnamed (1/1) 79505cb6ab2df38886663fd99461315a.09:29:50,293 INFO
org.apache.flink.runtime.executiongraph.ExecutionGraph - Sink: Unnamed
(1/1) (79505cb6ab2df38886663fd99461315a) switched from RUNNING to
FAILED.java.lang.ClassCastException: org.apache.flink.api.java.tuple.Tuple2
cannot be cast to org.apache.avro.generic.IndexedRecord at
org.apache.avro.generic.GenericData.getField(GenericData.java:697) at
org.apache.parquet.avro.AvroWriteSupport.writeRecordFields(AvroWriteSupport.java:188)
at org.apache.parquet.avro.AvroWriteSupport.write(AvroWriteSupport.java:165)
{code}
was:
{code:java}
{code}
when i use ParquetAvroWriters.forGenericRecord(Schema schema) write data to
parquet ,it has occur some error:
mycode:
{code:java}
//
//transfor 2 dataStream
// TupleTypeInfo tupleTypeInfo = new TupleTypeInfo(GenericData.Record.class,
BasicTypeInfo.STRING_TYPE_INFO, BasicTypeInfo.STRING_TYPE_INFO);
TupleTypeInfo tupleTypeInfo = new
TupleTypeInfo(BasicTypeInfo.STRING_TYPE_INFO, BasicTypeInfo.STRING_TYPE_INFO);
DataStream testDataStream = flinkTableEnv.toAppendStream(test, tupleTypeInfo);
testDataStream.print().setParallelism(1);
ArrayList<org.apache.avro.Schema.Field> fields = new
ArrayList<org.apache.avro.Schema.Field>();
fields.add(new org.apache.avro.Schema.Field("id",
org.apache.avro.Schema.create(org.apache.avro.Schema.Type.STRING), "id",
JsonProperties.NULL_VALUE));
fields.add(new org.apache.avro.Schema.Field("time",
org.apache.avro.Schema.create(org.apache.avro.Schema.Type.STRING), "time",
JsonProperties.NULL_VALUE));
org.apache.avro.Schema parquetSinkSchema =
org.apache.avro.Schema.createRecord("pi", "flinkParquetSink", "flink.parquet",
true, fields);
String fileSinkPath = "./xxx.text/rs6/";
StreamingFileSink<GenericRecord> parquetSink = StreamingFileSink.
forBulkFormat(new Path(fileSinkPath),
ParquetAvroWriters.forGenericRecord(parquetSinkSchema))
.withRollingPolicy(OnCheckpointRollingPolicy.build())
.build();
testDataStream.addSink(parquetSink).setParallelism(1);
flinkTableEnv.execute("ReadFromKafkaConnectorWriteToLocalFileJava");
{code}
and this error:
```
09:29:50,283 INFO org.apache.flink.runtime.taskmanager.Task
- Sink: Unnamed (1/1) (79505cb6ab2df38886663fd99461315a) switched from RUNNING
to FAILED.09:29:50,283 INFO org.apache.flink.runtime.taskmanager.Task
- Sink: Unnamed (1/1) (79505cb6ab2df38886663fd99461315a) switched
from RUNNING to FAILED.java.lang.ClassCastException:
org.apache.flink.api.java.tuple.Tuple2 cannot be cast to
org.apache.avro.generic.IndexedRecord at
org.apache.avro.generic.GenericData.getField(GenericData.java:697) at
org.apache.parquet.avro.AvroWriteSupport.writeRecordFields(AvroWriteSupport.java:188)
at org.apache.parquet.avro.AvroWriteSupport.write(AvroWriteSupport.java:165)
at
org.apache.parquet.hadoop.InternalParquetRecordWriter.write(InternalParquetRecordWriter.java:128)
at org.apache.parquet.hadoop.ParquetWriter.write(ParquetWriter.java:299) at
org.apache.flink.formats.parquet.ParquetBulkWriter.addElement(ParquetBulkWriter.java:52)
at
org.apache.flink.streaming.api.functions.sink.filesystem.BulkPartWriter.write(BulkPartWriter.java:50)
at
org.apache.flink.streaming.api.functions.sink.filesystem.Bucket.write(Bucket.java:214)
at
org.apache.flink.streaming.api.functions.sink.filesystem.Buckets.onElement(Buckets.java:274)
at
org.apache.flink.streaming.api.functions.sink.filesystem.StreamingFileSink.invoke(StreamingFileSink.java:445)
at
org.apache.flink.streaming.api.operators.StreamSink.processElement(StreamSink.java:56)
at
org.apache.flink.streaming.runtime.tasks.OneInputStreamTask$StreamTaskNetworkOutput.emitRecord(OneInputStreamTask.java:173)
at
org.apache.flink.streaming.runtime.io.StreamTaskNetworkInput.processElement(StreamTaskNetworkInput.java:151)
at
org.apache.flink.streaming.runtime.io.StreamTaskNetworkInput.emitNext(StreamTaskNetworkInput.java:128)
at
org.apache.flink.streaming.runtime.io.StreamOneInputProcessor.processInput(StreamOneInputProcessor.java:69)
at
org.apache.flink.streaming.runtime.tasks.StreamTask.processInput(StreamTask.java:311)
at
org.apache.flink.streaming.runtime.tasks.mailbox.MailboxProcessor.runMailboxLoop(MailboxProcessor.java:187)
at
org.apache.flink.streaming.runtime.tasks.StreamTask.runMailboxLoop(StreamTask.java:487)
at
org.apache.flink.streaming.runtime.tasks.StreamTask.invoke(StreamTask.java:470)
at org.apache.flink.runtime.taskmanager.Task.doRun(Task.java:707) at
org.apache.flink.runtime.taskmanager.Task.run(Task.java:532) at
java.lang.Thread.run(Thread.java:748)09:29:50,284 INFO
org.apache.flink.runtime.taskmanager.Task - Freeing task
resources for Sink: Unnamed (1/1)
(79505cb6ab2df38886663fd99461315a).09:29:50,285 INFO
org.apache.flink.runtime.taskmanager.Task - Ensuring all
FileSystem streams are closed for task Sink: Unnamed (1/1)
(79505cb6ab2df38886663fd99461315a) [FAILED]09:29:50,289 INFO
org.apache.flink.runtime.taskexecutor.TaskExecutor - Un-registering
task and sending final execution state FAILED to JobManager for task Sink:
Unnamed (1/1) 79505cb6ab2df38886663fd99461315a.09:29:50,293 INFO
org.apache.flink.runtime.executiongraph.ExecutionGraph - Sink: Unnamed
(1/1) (79505cb6ab2df38886663fd99461315a) switched from RUNNING to
FAILED.java.lang.ClassCastException: org.apache.flink.api.java.tuple.Tuple2
cannot be cast to org.apache.avro.generic.IndexedRecord at
org.apache.avro.generic.GenericData.getField(GenericData.java:697) at
org.apache.parquet.avro.AvroWriteSupport.writeRecordFields(AvroWriteSupport.java:188)
at org.apache.parquet.avro.AvroWriteSupport.write(AvroWriteSupport.java:165)
```
> when use ParquetAvroWriters.forGenericRecord(Schema schema) error
> java.lang.ClassCastException: org.apache.flink.api.java.tuple.Tuple2 cannot
> be cast to org.apache.avro.generic.IndexedRecord
> -----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------
>
> Key: FLINK-18354
> URL: https://issues.apache.org/jira/browse/FLINK-18354
> Project: Flink
> Issue Type: Bug
> Components: API / DataStream, Formats (JSON, Avro, Parquet, ORC,
> SequenceFile)
> Affects Versions: 1.10.0
> Reporter: Yangyingbo
> Priority: Major
>
> {code:java}
> {code}
> when i use ParquetAvroWriters.forGenericRecord(Schema schema) write data to
> parquet ,it has occur some error:
> mycode:
>
> {code:java}
> //
> //transfor 2 dataStream
> // TupleTypeInfo tupleTypeInfo = new TupleTypeInfo(GenericData.Record.class,
> BasicTypeInfo.STRING_TYPE_INFO, BasicTypeInfo.STRING_TYPE_INFO);
> TupleTypeInfo tupleTypeInfo = new
> TupleTypeInfo(BasicTypeInfo.STRING_TYPE_INFO, BasicTypeInfo.STRING_TYPE_INFO);
> DataStream testDataStream = flinkTableEnv.toAppendStream(test,
> tupleTypeInfo);
> testDataStream.print().setParallelism(1);
> ArrayList<org.apache.avro.Schema.Field> fields = new
> ArrayList<org.apache.avro.Schema.Field>();
> fields.add(new org.apache.avro.Schema.Field("id",
> org.apache.avro.Schema.create(org.apache.avro.Schema.Type.STRING), "id",
> JsonProperties.NULL_VALUE));
> fields.add(new org.apache.avro.Schema.Field("time",
> org.apache.avro.Schema.create(org.apache.avro.Schema.Type.STRING), "time",
> JsonProperties.NULL_VALUE));
> org.apache.avro.Schema parquetSinkSchema =
> org.apache.avro.Schema.createRecord("pi", "flinkParquetSink",
> "flink.parquet", true, fields);
> String fileSinkPath = "./xxx.text/rs6/";
> StreamingFileSink<GenericRecord> parquetSink = StreamingFileSink.
> forBulkFormat(new Path(fileSinkPath),
> ParquetAvroWriters.forGenericRecord(parquetSinkSchema))
> .withRollingPolicy(OnCheckpointRollingPolicy.build())
> .build();
> testDataStream.addSink(parquetSink).setParallelism(1);
> flinkTableEnv.execute("ReadFromKafkaConnectorWriteToLocalFileJava");
> {code}
> and this error:
> {code:java}
> // code placeholder
> 09:29:50,283 INFO org.apache.flink.runtime.taskmanager.Task
> - Sink: Unnamed (1/1) (79505cb6ab2df38886663fd99461315a) switched from
> RUNNING to FAILED.09:29:50,283 INFO
> org.apache.flink.runtime.taskmanager.Task - Sink: Unnamed
> (1/1) (79505cb6ab2df38886663fd99461315a) switched from RUNNING to
> FAILED.java.lang.ClassCastException: org.apache.flink.api.java.tuple.Tuple2
> cannot be cast to org.apache.avro.generic.IndexedRecord at
> org.apache.avro.generic.GenericData.getField(GenericData.java:697) at
> org.apache.parquet.avro.AvroWriteSupport.writeRecordFields(AvroWriteSupport.java:188)
> at org.apache.parquet.avro.AvroWriteSupport.write(AvroWriteSupport.java:165)
> at
> org.apache.parquet.hadoop.InternalParquetRecordWriter.write(InternalParquetRecordWriter.java:128)
> at org.apache.parquet.hadoop.ParquetWriter.write(ParquetWriter.java:299) at
> org.apache.flink.formats.parquet.ParquetBulkWriter.addElement(ParquetBulkWriter.java:52)
> at
> org.apache.flink.streaming.api.functions.sink.filesystem.BulkPartWriter.write(BulkPartWriter.java:50)
> at
> org.apache.flink.streaming.api.functions.sink.filesystem.Bucket.write(Bucket.java:214)
> at
> org.apache.flink.streaming.api.functions.sink.filesystem.Buckets.onElement(Buckets.java:274)
> at
> org.apache.flink.streaming.api.functions.sink.filesystem.StreamingFileSink.invoke(StreamingFileSink.java:445)
> at
> org.apache.flink.streaming.api.operators.StreamSink.processElement(StreamSink.java:56)
> at
> org.apache.flink.streaming.runtime.tasks.OneInputStreamTask$StreamTaskNetworkOutput.emitRecord(OneInputStreamTask.java:173)
> at
> org.apache.flink.streaming.runtime.io.StreamTaskNetworkInput.processElement(StreamTaskNetworkInput.java:151)
> at
> org.apache.flink.streaming.runtime.io.StreamTaskNetworkInput.emitNext(StreamTaskNetworkInput.java:128)
> at
> org.apache.flink.streaming.runtime.io.StreamOneInputProcessor.processInput(StreamOneInputProcessor.java:69)
> at
> org.apache.flink.streaming.runtime.tasks.StreamTask.processInput(StreamTask.java:311)
> at
> org.apache.flink.streaming.runtime.tasks.mailbox.MailboxProcessor.runMailboxLoop(MailboxProcessor.java:187)
> at
> org.apache.flink.streaming.runtime.tasks.StreamTask.runMailboxLoop(StreamTask.java:487)
> at
> org.apache.flink.streaming.runtime.tasks.StreamTask.invoke(StreamTask.java:470)
> at org.apache.flink.runtime.taskmanager.Task.doRun(Task.java:707) at
> org.apache.flink.runtime.taskmanager.Task.run(Task.java:532) at
> java.lang.Thread.run(Thread.java:748)09:29:50,284 INFO
> org.apache.flink.runtime.taskmanager.Task - Freeing task
> resources for Sink: Unnamed (1/1)
> (79505cb6ab2df38886663fd99461315a).09:29:50,285 INFO
> org.apache.flink.runtime.taskmanager.Task - Ensuring all
> FileSystem streams are closed for task Sink: Unnamed (1/1)
> (79505cb6ab2df38886663fd99461315a) [FAILED]09:29:50,289 INFO
> org.apache.flink.runtime.taskexecutor.TaskExecutor -
> Un-registering task and sending final execution state FAILED to JobManager
> for task Sink: Unnamed (1/1) 79505cb6ab2df38886663fd99461315a.09:29:50,293
> INFO org.apache.flink.runtime.executiongraph.ExecutionGraph - Sink:
> Unnamed (1/1) (79505cb6ab2df38886663fd99461315a) switched from RUNNING to
> FAILED.java.lang.ClassCastException: org.apache.flink.api.java.tuple.Tuple2
> cannot be cast to org.apache.avro.generic.IndexedRecord at
> org.apache.avro.generic.GenericData.getField(GenericData.java:697) at
> org.apache.parquet.avro.AvroWriteSupport.writeRecordFields(AvroWriteSupport.java:188)
> at org.apache.parquet.avro.AvroWriteSupport.write(AvroWriteSupport.java:165)
> {code}
>
--
This message was sent by Atlassian Jira
(v8.3.4#803005)