LuciferYang commented on code in PR #38427:
URL: https://github.com/apache/spark/pull/38427#discussion_r1008675452


##########
core/src/main/scala/org/apache/spark/storage/ShuffleBlockFetcherIterator.scala:
##########
@@ -530,7 +530,7 @@ final class ShuffleBlockFetcherIterator(
           case _ => (doBatchFetch, false)
         }
       }
-      createFetchRequests(curBlocks.toSeq, address, isLast = true, 
collectedRemoteRequests,
+      createFetchRequests(curBlocks.toIndexedSeq, address, isLast = true, 
collectedRemoteRequests,

Review Comment:
   ditto



##########
core/src/main/scala/org/apache/spark/storage/ShuffleBlockFetcherIterator.scala:
##########
@@ -502,20 +502,20 @@ final class ShuffleBlockFetcherIterator(
         case ShuffleBlockChunkId(_, _, _, _) =>
           if (curRequestSize >= targetRemoteRequestSize ||
             curBlocks.size >= maxBlocksInFlightPerAddress) {
-            curBlocks = createFetchRequests(curBlocks.toSeq, address, isLast = 
false,
+            curBlocks = createFetchRequests(curBlocks.toIndexedSeq, address, 
isLast = false,
               collectedRemoteRequests, enableBatchFetch = false)
             curRequestSize = curBlocks.map(_.size).sum
           }
         case ShuffleMergedBlockId(_, _, _) =>
           if (curBlocks.size >= maxBlocksInFlightPerAddress) {
-            curBlocks = createFetchRequests(curBlocks.toSeq, address, isLast = 
false,
+            curBlocks = createFetchRequests(curBlocks.toIndexedSeq, address, 
isLast = false,

Review Comment:
   ditto



##########
core/src/main/scala/org/apache/spark/storage/ShuffleBlockFetcherIterator.scala:
##########
@@ -502,20 +502,20 @@ final class ShuffleBlockFetcherIterator(
         case ShuffleBlockChunkId(_, _, _, _) =>
           if (curRequestSize >= targetRemoteRequestSize ||
             curBlocks.size >= maxBlocksInFlightPerAddress) {
-            curBlocks = createFetchRequests(curBlocks.toSeq, address, isLast = 
false,
+            curBlocks = createFetchRequests(curBlocks.toIndexedSeq, address, 
isLast = false,
               collectedRemoteRequests, enableBatchFetch = false)
             curRequestSize = curBlocks.map(_.size).sum
           }
         case ShuffleMergedBlockId(_, _, _) =>
           if (curBlocks.size >= maxBlocksInFlightPerAddress) {
-            curBlocks = createFetchRequests(curBlocks.toSeq, address, isLast = 
false,
+            curBlocks = createFetchRequests(curBlocks.toIndexedSeq, address, 
isLast = false,
               collectedRemoteRequests, enableBatchFetch = false, 
forMergedMetas = true)
           }
         case _ =>
           // For batch fetch, the actual block in flight should count for 
merged block.
           val mayExceedsMaxBlocks = !doBatchFetch && curBlocks.size >= 
maxBlocksInFlightPerAddress
           if (curRequestSize >= targetRemoteRequestSize || 
mayExceedsMaxBlocks) {
-            curBlocks = createFetchRequests(curBlocks.toSeq, address, isLast = 
false,
+            curBlocks = createFetchRequests(curBlocks.toIndexedSeq, address, 
isLast = false,

Review Comment:
   ditto



##########
core/src/main/scala/org/apache/spark/storage/ShuffleBlockFetcherIterator.scala:
##########
@@ -502,20 +502,20 @@ final class ShuffleBlockFetcherIterator(
         case ShuffleBlockChunkId(_, _, _, _) =>
           if (curRequestSize >= targetRemoteRequestSize ||
             curBlocks.size >= maxBlocksInFlightPerAddress) {
-            curBlocks = createFetchRequests(curBlocks.toSeq, address, isLast = 
false,
+            curBlocks = createFetchRequests(curBlocks.toIndexedSeq, address, 
isLast = false,

Review Comment:
   ditto



##########
core/src/main/scala/org/apache/spark/storage/ShuffleBlockFetcherIterator.scala:
##########
@@ -276,7 +276,7 @@ final class ShuffleBlockFetcherIterator(
           val (size, mapIndex) = infoMap(blockId)
           FetchBlockInfo(BlockId(blockId), size, mapIndex)
         }
-        results.put(DeferFetchRequestResult(FetchRequest(address, 
blocks.toSeq)))
+        results.put(DeferFetchRequestResult(FetchRequest(address, 
blocks.toIndexedSeq)))

Review Comment:
   When using Scala 2.12, `blocks.toSeq` is a redundant operation, which will 
not cause any memory copy . However, 'blocks.toIndexedSeq' will cause a memory 
copy. This change may be beneficial to Scala 2.13, but it will harm the 
performance of Scala 2.12. 
   
   The current default version is still Scala 2.12, so I suggest leave this 
line as it is and add some TODOs to facilitate change when Scala 2.13 is the 
default version



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: [email protected]

For queries about this service, please contact Infrastructure at:
[email protected]


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to