AngersZhuuuu edited a comment on pull request #30850:
URL: https://github.com/apache/spark/pull/30850#issuecomment-748435343
Change Ut `SPARK-25313 Insert overwrite directory should output correct
schema`
```
test("SPARK-25313 Insert overwrite directory should output correct schema") {
withSQLConf(CONVERT_METASTORE_PARQUET.key -> "false") {
withTable("tbl") {
withView("view1") {
spark.sql("CREATE TABLE tbl(id long)")
spark.sql("INSERT OVERWRITE TABLE tbl VALUES 4")
spark.sql("CREATE VIEW view1 AS SELECT id FROM tbl")
spark.sql("SELECT ID, IF(1=1,1,0) FROM view1").show()
withTempPath { path =>
spark.sql(s"INSERT OVERWRITE LOCAL DIRECTORY
'${path.getCanonicalPath}' " +
"STORED AS PARQUET SELECT IF(1=1,1,0)")
val expectedSchema = StructType(Seq(StructField("ID", LongType,
true)))
assert(spark.read.parquet(path.toString).schema ==
expectedSchema)
checkAnswer(spark.read.parquet(path.toString), Seq(Row(4)))
}
}
}
}
}
```
Failed with
```
Caused by: org.apache.hadoop.hive.ql.metadata.HiveException:
java.lang.IllegalArgumentException: field ended by ';': expected ';' but got
'IF' at line 1: optional int32 (IF
at
org.apache.hadoop.hive.ql.io.HiveFileFormatUtils.getHiveRecordWriter(HiveFileFormatUtils.java:274)
at
org.apache.spark.sql.hive.execution.HiveOutputWriter.<init>(HiveFileFormat.scala:131)
at
org.apache.spark.sql.hive.execution.HiveFileFormat$$anon$1.newInstance(HiveFileFormat.scala:104)
at
org.apache.spark.sql.execution.datasources.SingleDirectoryDataWriter.newOutputWriter(FileFormatDataWriter.scala:126)
at
org.apache.spark.sql.execution.datasources.SingleDirectoryDataWriter.<init>(FileFormatDataWriter.scala:111)
at
org.apache.spark.sql.execution.datasources.FileFormatWriter$.executeTask(FileFormatWriter.scala:269)
at
org.apache.spark.sql.execution.datasources.FileFormatWriter$.$anonfun$write$15(FileFormatWriter.scala:210)
at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:90)
at org.apache.spark.scheduler.Task.run(Task.scala:131)
at
org.apache.spark.executor.Executor$TaskRunner.$anonfun$run$3(Executor.scala:497)
at org.apache.spark.util.Utils$.tryWithSafeFinally(Utils.scala:1439)
at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:500)
... 3 more
Caused by: java.lang.IllegalArgumentException: field ended by ';': expected
';' but got 'IF' at line 1: optional int32 (IF
at
org.apache.parquet.schema.MessageTypeParser.check(MessageTypeParser.java:214)
at
org.apache.parquet.schema.MessageTypeParser.addPrimitiveType(MessageTypeParser.java:187)
at
org.apache.parquet.schema.MessageTypeParser.addType(MessageTypeParser.java:111)
at
org.apache.parquet.schema.MessageTypeParser.addGroupTypeFields(MessageTypeParser.java:99)
at
org.apache.parquet.schema.MessageTypeParser.parse(MessageTypeParser.java:92)
at
org.apache.parquet.schema.MessageTypeParser.parseMessageType(MessageTypeParser.java:82)
at
org.apache.hadoop.hive.ql.io.parquet.write.DataWritableWriteSupport.getSchema(DataWritableWriteSupport.java:43)
at
org.apache.hadoop.hive.ql.io.parquet.write.DataWritableWriteSupport.init(DataWritableWriteSupport.java:48)
at
org.apache.parquet.hadoop.ParquetOutputFormat.getRecordWriter(ParquetOutputFormat.java:388)
at
org.apache.parquet.hadoop.ParquetOutputFormat.getRecordWriter(ParquetOutputFormat.java:354)
at
org.apache.hadoop.hive.ql.io.parquet.write.ParquetRecordWriterWrapper.<init>(ParquetRecordWriterWrapper.java:70)
at
org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat.getParquerRecordWriterWrapper(MapredParquetOutputFormat.java:137)
at
org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat.getHiveRecordWriter(MapredParquetOutputFormat.java:126)
at
org.apache.hadoop.hive.ql.io.HiveFileFormatUtils.getRecordWriter(HiveFileFormatUtils.java:286)
```
Change format to ORC work well.
----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
For queries about this service, please contact Infrastructure at:
[email protected]
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]