Ngone51 commented on a change in pull request #29228:
URL: https://github.com/apache/spark/pull/29228#discussion_r473734385



##########
File path: core/src/test/scala/org/apache/spark/LocalSparkContext.scala
##########
@@ -22,12 +22,37 @@ import org.scalatest.BeforeAndAfterAll
 import org.scalatest.BeforeAndAfterEach
 import org.scalatest.Suite
 
+import org.apache.spark.internal.Logging
 import org.apache.spark.resource.ResourceProfile
 
 /** Manages a local `sc` `SparkContext` variable, correctly stopping it after 
each test. */
-trait LocalSparkContext extends BeforeAndAfterEach with BeforeAndAfterAll { 
self: Suite =>
+trait LocalSparkContext extends Logging
+  with BeforeAndAfterEach with BeforeAndAfterAll { self: Suite =>
+
+  private var _conf: SparkConf = new SparkConf()
 
   @transient var sc: SparkContext = _
+  @transient private var _sc: SparkContext = _
+
+  /**
+   * Currently, we are focusing on the reconstruction of LocalSparkContext, so 
this method
+   * was created temporarily. When the migration work is completed, this 
method will be
+   * renamed to `sc` and the variable `sc` will be deleted.
+   */
+  def sparkCtx: SparkContext = {
+    if (_sc == null) {
+      _sc = new SparkContext(_conf)
+    }
+    _sc
+  }
+
+  def setConf(pairs: (String, String)*): Unit = {
+    if (_sc != null) {
+      logWarning("Because SparkContext already initialized, " +
+        "since configurations won't take effect in this case.")

Review comment:
       nit: `These configurations ${paris.mkString(", ")} won't take effect 
since the SparkContext has been already initialized.`

##########
File path: core/src/test/scala/org/apache/spark/LocalSparkContext.scala
##########
@@ -22,12 +22,37 @@ import org.scalatest.BeforeAndAfterAll
 import org.scalatest.BeforeAndAfterEach
 import org.scalatest.Suite
 
+import org.apache.spark.internal.Logging
 import org.apache.spark.resource.ResourceProfile
 
 /** Manages a local `sc` `SparkContext` variable, correctly stopping it after 
each test. */
-trait LocalSparkContext extends BeforeAndAfterEach with BeforeAndAfterAll { 
self: Suite =>
+trait LocalSparkContext extends Logging

Review comment:
       I think we usually put `Logging` at the end.

##########
File path: core/src/test/scala/org/apache/spark/LocalSparkContext.scala
##########
@@ -22,12 +22,37 @@ import org.scalatest.BeforeAndAfterAll
 import org.scalatest.BeforeAndAfterEach
 import org.scalatest.Suite
 
+import org.apache.spark.internal.Logging
 import org.apache.spark.resource.ResourceProfile
 
 /** Manages a local `sc` `SparkContext` variable, correctly stopping it after 
each test. */
-trait LocalSparkContext extends BeforeAndAfterEach with BeforeAndAfterAll { 
self: Suite =>
+trait LocalSparkContext extends Logging
+  with BeforeAndAfterEach with BeforeAndAfterAll { self: Suite =>
+
+  private var _conf: SparkConf = new SparkConf()

Review comment:
       I think `SparkConf` should have the default values for `master` and 
`appName`. So test suites extend it could use the SparkContext directly without 
any specific configurations when the test doesn't really care.

##########
File path: 
core/src/test/scala/org/apache/spark/scheduler/DAGSchedulerSuite.scala
##########
@@ -295,11 +295,20 @@ class DAGSchedulerSuite extends SparkFunSuite with 
LocalSparkContext with TimeLi
 
   override def beforeEach(): Unit = {
     super.beforeEach()
-    init(new SparkConf())
+    firstInit = true
+    setConf("spark.master" -> "local[2]", "spark.app.name" -> 
"DAGSchedulerSuite")

Review comment:
       We'd better to expose the `conf` via a function to set the conf. 
`setConf` is designed to be used by the test only.




----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

For queries about this service, please contact Infrastructure at:
[email protected]



---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to