Ngone51 commented on a change in pull request #28895:
URL: https://github.com/apache/spark/pull/28895#discussion_r444037964
##########
File path: core/src/main/scala/org/apache/spark/MapOutputTracker.scala
##########
@@ -800,35 +770,20 @@ private[spark] class MapOutputTrackerWorker(conf:
SparkConf) extends MapOutputTr
// Get blocks sizes by executor Id. Note that zero-sized blocks are excluded
in the result.
override def getMapSizesByExecutorId(
- shuffleId: Int,
- startPartition: Int,
- endPartition: Int)
- : Iterator[(BlockManagerId, Seq[(BlockId, Long, Int)])] = {
- logDebug(s"Fetching outputs for shuffle $shuffleId, partitions
$startPartition-$endPartition")
- val statuses = getStatuses(shuffleId, conf)
- try {
- MapOutputTracker.convertMapStatuses(
- shuffleId, startPartition, endPartition, statuses, 0, statuses.length)
- } catch {
- case e: MetadataFetchFailedException =>
- // We experienced a fetch failure so our mapStatuses cache is
outdated; clear it:
- mapStatuses.clear()
- throw e
- }
- }
-
- override def getMapSizesByRange(
shuffleId: Int,
startMapIndex: Int,
endMapIndex: Int,
startPartition: Int,
- endPartition: Int): Iterator[(BlockManagerId, Seq[(BlockId, Long,
Int)])] = {
- logDebug(s"Fetching outputs for shuffle $shuffleId, mappers
$startMapIndex-$endMapIndex" +
- s"partitions $startPartition-$endPartition")
+ endPartition: Int)
+ : Iterator[(BlockManagerId, Seq[(BlockId, Long, Int)])] = {
Review comment:
this actually extends from getMapSizesByExecutorId above and nothing
changes.
----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
For queries about this service, please contact Infrastructure at:
[email protected]
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]