cloud-fan commented on code in PR #36327:
URL: https://github.com/apache/spark/pull/36327#discussion_r857501073
##########
sql/core/src/main/scala/org/apache/spark/sql/execution/DataSourceScanExec.scala:
##########
@@ -230,17 +214,28 @@ case class FileSourceScanExec(
vectorTypes ++
Seq.fill(metadataColumns.size)(classOf[ConstantColumnVector].getName)
}
- private lazy val driverMetrics: HashMap[String, Long] = HashMap.empty
+ lazy val driverMetrics = Map(
+ "numFiles" -> SQLMetrics.createMetric(sparkContext, "number of files
read"),
+ "metadataTime" -> SQLMetrics.createTimingMetric(sparkContext, "metadata
time"),
+ "filesSize" -> SQLMetrics.createSizeMetric(sparkContext, "size of files
read")
+ ) ++ {
+ if (relation.partitionSchema.nonEmpty) {
+ Map(
+ "numPartitions" -> SQLMetrics.createMetric(sparkContext, "number of
partitions read"),
+ "pruningTime" ->
+ SQLMetrics.createTimingMetric(sparkContext, "dynamic partition
pruning time"))
+ } else {
+ Map.empty[String, SQLMetric]
+ }
+ } ++ staticMetrics
/**
* Send the driver-side metrics. Before calling this function,
selectedPartitions has
* been initialized. See SPARK-26327 for more details.
*/
- private def sendDriverMetrics(): Unit = {
- driverMetrics.foreach(e => metrics(e._1).add(e._2))
Review Comment:
previously `driverMetrics` is not `Map[String, SQLMetric]`, but just
`Map[String, Long]`, so we need to update the real `SQLMetric` here . Now the
refactor makes `driverMetrics` real `SQLMetric`
--
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
To unsubscribe, e-mail: [email protected]
For queries about this service, please contact Infrastructure at:
[email protected]
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]