Github user cloud-fan commented on a diff in the pull request:
https://github.com/apache/spark/pull/19702#discussion_r149924096
--- Diff:
sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetFileFormat.scala
---
@@ -428,15 +417,9 @@ object ParquetFileFormat extends Logging {
private[parquet] def readSchema(
footers: Seq[Footer], sparkSession: SparkSession):
Option[StructType] = {
- def parseParquetSchema(schema: MessageType): StructType = {
- val converter = new ParquetSchemaConverter(
- sparkSession.sessionState.conf.isParquetBinaryAsString,
- sparkSession.sessionState.conf.isParquetBinaryAsString,
- sparkSession.sessionState.conf.writeLegacyParquetFormat,
- sparkSession.sessionState.conf.isParquetINT64AsTimestampMillis)
-
- converter.convert(schema)
- }
+ val converter = new ParquetToSparkSchemaConverter(
+ sparkSession.sessionState.conf.isParquetBinaryAsString,
+ sparkSession.sessionState.conf.isParquetBinaryAsString)
--- End diff --
good catch! It's an existing type ...
---
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]