Github user JoshRosen commented on a diff in the pull request:
https://github.com/apache/spark/pull/10835#discussion_r50805064
--- Diff: core/src/main/scala/org/apache/spark/executor/TaskMetrics.scala
---
@@ -230,86 +297,119 @@ class TaskMetrics extends Serializable {
*/
def shuffleWriteMetrics: Option[ShuffleWriteMetrics] =
_shuffleWriteMetrics
- @deprecated("setting ShuffleWriteMetrics is for internal use only",
"2.0.0")
- def shuffleWriteMetrics_=(swm: Option[ShuffleWriteMetrics]): Unit = {
- _shuffleWriteMetrics = swm
- }
-
/**
* Get or create a new [[ShuffleWriteMetrics]] associated with this task.
*/
private[spark] def registerShuffleWriteMetrics(): ShuffleWriteMetrics =
synchronized {
_shuffleWriteMetrics.getOrElse {
- val metrics = new ShuffleWriteMetrics
+ val metrics = new ShuffleWriteMetrics(initialAccumsMap)
_shuffleWriteMetrics = Some(metrics)
metrics
}
}
- private var _updatedBlockStatuses: Seq[(BlockId, BlockStatus)] =
- Seq.empty[(BlockId, BlockStatus)]
-
- /**
- * Storage statuses of any blocks that have been updated as a result of
this task.
- */
- def updatedBlockStatuses: Seq[(BlockId, BlockStatus)] =
_updatedBlockStatuses
- @deprecated("setting updated blocks is for internal use only", "2.0.0")
- def updatedBlocks_=(ub: Option[Seq[(BlockId, BlockStatus)]]): Unit = {
- _updatedBlockStatuses = ub.getOrElse(Seq.empty[(BlockId, BlockStatus)])
- }
+ /* ========================== *
+ | OTHER THINGS |
+ * ========================== */
- private[spark] def incUpdatedBlockStatuses(v: Seq[(BlockId,
BlockStatus)]): Unit = {
- _updatedBlockStatuses ++= v
+ private[spark] def registerAccumulator(a: Accumulable[_, _]): Unit = {
+ accums += a
}
- private[spark] def setUpdatedBlockStatuses(v: Seq[(BlockId,
BlockStatus)]): Unit = {
- _updatedBlockStatuses = v
+ /**
+ * Return the latest updates of accumulators in this task.
+ */
+ def accumulatorUpdates(): Seq[AccumulableInfo] = accums.map { a =>
+ new AccumulableInfo(
+ a.id, a.name.orNull, Some(a.localValue), None, a.isInternal,
a.countFailedValues)
}
- @deprecated("use updatedBlockStatuses instead", "2.0.0")
- def updatedBlocks: Option[Seq[(BlockId, BlockStatus)]] = {
- if (_updatedBlockStatuses.nonEmpty) Some(_updatedBlockStatuses) else
None
+ // If we are reconstructing this TaskMetrics on the driver, some metrics
may already be set.
+ // If so, initialize all relevant metrics classes so listeners can
access them downstream.
+ {
--- End diff --
Tossing these braces here to avoid scope escape of these variables is an
idiom that we don't use super often in Spark but which is fine by me in this
sort of circumstances. I suppose that this could have gone into an `init()`
method which is immediately called after its definition, but this also seems
fien.
---
If your project is set up for it, you can reply to this email and have your
reply appear on GitHub as well. If your project does not have this feature
enabled and wishes so, or if the feature is enabled but not working, please
contact infrastructure at [email protected] or file a JIRA ticket
with INFRA.
---
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]