srowen commented on a change in pull request #23540: [SPARK-26615][Core] Fixing 
transport server/client resource leaks in the core unittests
URL: https://github.com/apache/spark/pull/23540#discussion_r247926574
 
 

 ##########
 File path: 
core/src/test/scala/org/apache/spark/SparkContextSchedulerCreationSuite.scala
 ##########
 @@ -23,110 +23,129 @@ import org.apache.spark.internal.Logging
 import org.apache.spark.scheduler.{SchedulerBackend, TaskScheduler, 
TaskSchedulerImpl}
 import org.apache.spark.scheduler.cluster.StandaloneSchedulerBackend
 import org.apache.spark.scheduler.local.LocalSchedulerBackend
+import org.apache.spark.util.Utils
 
 
 class SparkContextSchedulerCreationSuite
   extends SparkFunSuite with LocalSparkContext with PrivateMethodTester with 
Logging {
 
-  def createTaskScheduler(master: String): TaskSchedulerImpl =
-    createTaskScheduler(master, "client")
+  def createTaskScheduler(master: String)(body: TaskSchedulerImpl => Unit): 
Unit =
+    createTaskScheduler(master, "client")(body)
 
-  def createTaskScheduler(master: String, deployMode: String): 
TaskSchedulerImpl =
-    createTaskScheduler(master, deployMode, new SparkConf())
+  def createTaskScheduler(master: String, deployMode: String)(
+      body: TaskSchedulerImpl => Unit): Unit =
+    createTaskScheduler(master, deployMode, new SparkConf())(body)
 
   def createTaskScheduler(
       master: String,
       deployMode: String,
-      conf: SparkConf): TaskSchedulerImpl = {
+      conf: SparkConf)(body: TaskSchedulerImpl => Unit): Unit = {
     // Create local SparkContext to setup a SparkEnv. We don't actually want 
to start() the
     // real schedulers, so we don't want to create a full SparkContext with 
the desired scheduler.
     sc = new SparkContext("local", "test", conf)
     val createTaskSchedulerMethod =
       PrivateMethod[Tuple2[SchedulerBackend, 
TaskScheduler]]('createTaskScheduler)
-    val (_, sched) = SparkContext invokePrivate createTaskSchedulerMethod(sc, 
master, deployMode)
-    sched.asInstanceOf[TaskSchedulerImpl]
+    val (_, sched) =
+      SparkContext invokePrivate createTaskSchedulerMethod(sc, master, 
deployMode)
+    try {
+      body(sched.asInstanceOf[TaskSchedulerImpl])
+    } finally {
+      Utils.tryLogNonFatalError {
+        sched.stop()
+      }
+    }
   }
 
+  def noOp(taskSchedulerImpl: TaskSchedulerImpl): Unit = {}
 
 Review comment:
   Could you just make a no-op block the default value of the argument?

----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
us...@infra.apache.org


With regards,
Apache Git Services

---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org
For additional commands, e-mail: reviews-h...@spark.apache.org

Reply via email to