Github user mallman commented on a diff in the pull request:
https://github.com/apache/spark/pull/21320#discussion_r190485768
--- Diff:
sql/core/src/main/scala/org/apache/spark/sql/execution/DataSourceScanExec.scala
---
@@ -286,7 +286,19 @@ case class FileSourceScanExec(
} getOrElse {
metadata
}
- withOptPartitionCount
+ val withOptColumnCount = relation.fileFormat match {
+ case columnar: ColumnarFileFormat =>
+ SparkSession
+ .getActiveSession
+ .map { sparkSession =>
+ val columnCount = columnar.columnCountForSchema(sparkSession,
requiredSchema)
+ withOptPartitionCount + ("ColumnCount" -> columnCount.toString)
--- End diff --
Replied above.
---
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]