redsanket commented on a change in pull request #23677: [SPARK-26755] : 
Optimize Spark Scheduler to dequeue speculative tasks…
URL: https://github.com/apache/spark/pull/23677#discussion_r251871222
 
 

 ##########
 File path: core/src/main/scala/org/apache/spark/scheduler/TaskSetManager.scala
 ##########
 @@ -245,6 +257,28 @@ private[spark] class TaskSetManager(
     allPendingTasks += index  // No point scanning this whole list to find the 
old task there
   }
 
+  private[spark] def addPendingSpeculativeTask(index: Int) {
+    for (loc <- tasks(index).preferredLocations) {
+      loc match {
+        case e: ExecutorCacheTaskLocation =>
+          pendingSpeculatableTasksForExecutor.getOrElseUpdate(
+            e.executorId, new ArrayBuffer) += index
+        case _ =>
+      }
+      pendingSpeculatableTasksForHost.getOrElseUpdate(loc.host, new 
ArrayBuffer) += index
+      for (rack <- sched.getRackForHost(loc.host)) {
+        pendingSpeculatableTasksForRack.getOrElseUpdate(rack, new ArrayBuffer) 
+= index
+      }
+    }
+
+    if (tasks(index).preferredLocations == Nil) {
+        pendingSpeculatableTasksWithNoPrefs += index
+    }
+
+    // No point scanning this whole list to find the old task there
+    allPendingSpeculatableTasks += index
 
 Review comment:
   qq does this need to hold all tasks or only with TaskLocality.ANY pref... 
seems like a waste to scan all the tasks within it for getting non local tasks, 
could be wrong in my understanding however thanks

----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
[email protected]


With regards,
Apache Git Services

---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to