xupefei commented on code in PR #48920:
URL: https://github.com/apache/spark/pull/48920#discussion_r1852456594
##########
connector/connect/client/jvm/src/main/scala/org/apache/spark/sql/SparkSession.scala:
##########
@@ -409,11 +405,26 @@ class SparkSession private[sql] (
val plan = proto.Plan.newBuilder().setCommand(command).build()
// .toSeq forces that the iterator is consumed and closed. On top, ignore
all
// progress messages.
- client.execute(plan).filter(!_.hasExecutionProgress).toSeq
+ execute(plan).filter(!_.hasExecutionProgress).toSeq
}
- private[sql] def execute(plan: proto.Plan):
CloseableIterator[ExecutePlanResponse] =
- client.execute(plan)
+ /**
+ * The real `execute` method that calls into `SparkConnectClient`.
+ *
+ * Here we inject a lazy map to process registered observed metrics, so
consumers of the returned
+ * iterator does not need to worry about it.
+ *
+ * Please make sure all `execute` overrides call this method.
+ */
+ private[sql] def execute(plan: proto.Plan):
CloseableIterator[ExecutePlanResponse] = {
Review Comment:
Good idea. Done!
--
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
To unsubscribe, e-mail: [email protected]
For queries about this service, please contact Infrastructure at:
[email protected]
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]