Ngone51 commented on a change in pull request #29228:
URL: https://github.com/apache/spark/pull/29228#discussion_r471247984



##########
File path: 
core/src/test/scala/org/apache/spark/scheduler/DAGSchedulerSuite.scala
##########
@@ -295,7 +298,20 @@ class DAGSchedulerSuite extends SparkFunSuite with 
LocalSparkContext with TimeLi
 
   override def beforeEach(): Unit = {
     super.beforeEach()
-    init(new SparkConf())
+  }
+
+  override protected def test(testName: String, testTags: Tag*)(testFun: => 
Any)
+      (implicit pos: Position): Unit = {
+    testWithSparkConf(testName, testTags: _*)()(testFun)(pos)
+  }
+
+  private def testWithSparkConf(testName: String, testTags: Tag*)

Review comment:
       I tend to agree with @jiangxb1987 .  There're only 7 places using the 
`testWithSparkConf()` comparing to other 81 tests within the 
`DAGSchedulerSuite`. We could just call `init` inside each test to ease other 
tests who needs to call `afterEach` inside the `test`.
   
   And I have another idea for the whole thing. That is, we could probably 
initialize the `SparkContext` like this:
   
   
   ```scala
   trait LocalSparkContext ... {
     @transient private var _sc: SparkContext = _
      private val conf = new SparkConf()
   
     def sc: SparkContext = {
         if (_sc == null) {
           _sc = new SparkContext(conf)
         }
         _sc
     }
   
     def withConf(pairs: (String, String)*) = {
       if (_sc != null) { 
          // probably, log warning when SparkContext already initialized
          // since configurations won't take effect in this case
        }
        paris.foreach { case (k, v) => conf.set(k, v) }
     }
   
     override def afterEach(): Unit = {
       try {
         resetSparkContext()
       } finally {
         super.afterEach()
       }
     }
   
     def resetSparkContext(): Unit = {
       LocalSparkContext.stop(sc)
       ResourceProfile.clearDefaultProfile()
       sc = null
     }
   }
   ```
   
   @beliefer @jiangxb1987 WDYT?
   
   
   
   

##########
File path: 
core/src/test/scala/org/apache/spark/scheduler/DAGSchedulerSuite.scala
##########
@@ -295,7 +298,20 @@ class DAGSchedulerSuite extends SparkFunSuite with 
LocalSparkContext with TimeLi
 
   override def beforeEach(): Unit = {
     super.beforeEach()
-    init(new SparkConf())
+  }
+
+  override protected def test(testName: String, testTags: Tag*)(testFun: => 
Any)
+      (implicit pos: Position): Unit = {
+    testWithSparkConf(testName, testTags: _*)()(testFun)(pos)
+  }
+
+  private def testWithSparkConf(testName: String, testTags: Tag*)

Review comment:
       I tend to agree with @jiangxb1987 .  There're only 7 places using the 
`testWithSparkConf()` comparing to other 81 tests within the 
`DAGSchedulerSuite`. We could just call `init` inside each test to ease other 
tests who needs to call `afterEach` inside the `test`.
   
   And I have another idea for the whole thing. That is, we could probably 
initialize the `SparkContext` like this:
   
   
   ```scala
   trait LocalSparkContext ... {
     @transient private var _sc: SparkContext = _
      private val conf = new SparkConf()
   
     def sc: SparkContext = {
         if (_sc == null) {
           _sc = new SparkContext(conf)
         }
         _sc
     }
   
     def withConf(pairs: (String, String)*) = {
       if (_sc != null) { 
          // probably, log warning when SparkContext already initialized
          // since configurations won't take effect in this case
        }
        paris.foreach { case (k, v) => conf.set(k, v) }
     }
   
     override def afterEach(): Unit = {
       try {
         resetSparkContext()
       } finally {
         super.afterEach()
       }
     }
   
     def resetSparkContext(): Unit = {
       LocalSparkContext.stop(sc)
       ResourceProfile.clearDefaultProfile()
       sc = null
     }
   }
   ```
   
   @beliefer @jiangxb1987 WDYT?
   
   
   
   




----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

For queries about this service, please contact Infrastructure at:
[email protected]



---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to