Ngone51 commented on a change in pull request #28895:
URL: https://github.com/apache/spark/pull/28895#discussion_r444037747
##########
File path: core/src/main/scala/org/apache/spark/MapOutputTracker.scala
##########
@@ -737,35 +721,21 @@ private[spark] class MapOutputTrackerMaster(
// Get blocks sizes by executor Id. Note that zero-sized blocks are excluded
in the result.
// This method is only called in local-mode.
def getMapSizesByExecutorId(
- shuffleId: Int,
- startPartition: Int,
- endPartition: Int)
- : Iterator[(BlockManagerId, Seq[(BlockId, Long, Int)])] = {
- logDebug(s"Fetching outputs for shuffle $shuffleId, partitions
$startPartition-$endPartition")
- shuffleStatuses.get(shuffleId) match {
- case Some (shuffleStatus) =>
- shuffleStatus.withMapStatuses { statuses =>
- MapOutputTracker.convertMapStatuses(
- shuffleId, startPartition, endPartition, statuses, 0,
shuffleStatus.mapStatuses.length)
- }
- case None =>
- Iterator.empty
- }
- }
-
- override def getMapSizesByRange(
shuffleId: Int,
startMapIndex: Int,
endMapIndex: Int,
startPartition: Int,
- endPartition: Int): Iterator[(BlockManagerId, Seq[(BlockId, Long,
Int)])] = {
- logDebug(s"Fetching outputs for shuffle $shuffleId, mappers
$startMapIndex-$endMapIndex" +
- s"partitions $startPartition-$endPartition")
+ endPartition: Int)
+ : Iterator[(BlockManagerId, Seq[(BlockId, Long, Int)])] = {
Review comment:
this actually extends from `getMapSizesByExecutorId` above and its
indent is wrong previously.
----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
For queries about this service, please contact Infrastructure at:
[email protected]
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]