This is an automated email from the ASF dual-hosted git repository.

meng pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new d98a5ce  [SPARK-28115][CORE][TEST] Fix flaky test: 
SparkContextSuite.test resource scheduling under local-cluster mode
d98a5ce is described below

commit d98a5ce34d6b4b098d30c26c89a9d65d931f930d
Author: Xingbo Jiang <xingbo.ji...@databricks.com>
AuthorDate: Thu Jun 20 13:23:29 2019 -0700

    [SPARK-28115][CORE][TEST] Fix flaky test: SparkContextSuite.test resource 
scheduling under local-cluster mode
    
    ## What changes were proposed in this pull request?
    
    The test `SparkContextSuite.test resource scheduling under local-cluster 
mode` has been flaky, because it expects the size of 
`sc.statusTracker.getExecutorInfos` be the same as the number of executors, 
while the returned list contains both the driver and executors.
    
    ## How was this patch tested?
    
    Updated existing tests.
    
    Closes #24917 from jiangxb1987/getExecutorInfos.
    
    Authored-by: Xingbo Jiang <xingbo.ji...@databricks.com>
    Signed-off-by: Xiangrui Meng <m...@databricks.com>
---
 .../src/main/scala/org/apache/spark/SparkStatusTracker.scala |  1 +
 core/src/test/scala/org/apache/spark/SparkContextSuite.scala | 12 +++---------
 2 files changed, 4 insertions(+), 9 deletions(-)

diff --git a/core/src/main/scala/org/apache/spark/SparkStatusTracker.scala 
b/core/src/main/scala/org/apache/spark/SparkStatusTracker.scala
index 815237e..555c085 100644
--- a/core/src/main/scala/org/apache/spark/SparkStatusTracker.scala
+++ b/core/src/main/scala/org/apache/spark/SparkStatusTracker.scala
@@ -99,6 +99,7 @@ class SparkStatusTracker private[spark] (sc: SparkContext, 
store: AppStatusStore
   /**
    * Returns information of all known executors, including host, port, 
cacheSize, numRunningTasks
    * and memory metrics.
+   * Note this include information for both the driver and executors.
    */
   def getExecutorInfos: Array[SparkExecutorInfo] = {
     store.executorList(true).map { exec =>
diff --git a/core/src/test/scala/org/apache/spark/SparkContextSuite.scala 
b/core/src/test/scala/org/apache/spark/SparkContextSuite.scala
index fa2c4bd..628ac60 100644
--- a/core/src/test/scala/org/apache/spark/SparkContextSuite.scala
+++ b/core/src/test/scala/org/apache/spark/SparkContextSuite.scala
@@ -750,9 +750,7 @@ class SparkContextSuite extends SparkFunSuite with 
LocalSparkContext with Eventu
       sc = new SparkContext(conf)
 
       // Ensure all executors has started
-      eventually(timeout(10.seconds)) {
-        assert(sc.statusTracker.getExecutorInfos.size == 1)
-      }
+      TestUtils.waitUntilExecutorsUp(sc, 1, 10000)
       assert(sc.resources.size === 1)
       assert(sc.resources.get(GPU).get.addresses === Array("5", "6"))
       assert(sc.resources.get(GPU).get.name === "gpu")
@@ -780,9 +778,7 @@ class SparkContextSuite extends SparkFunSuite with 
LocalSparkContext with Eventu
       sc = new SparkContext(conf)
 
       // Ensure all executors has started
-      eventually(timeout(10.seconds)) {
-        assert(sc.statusTracker.getExecutorInfos.size == 1)
-      }
+      TestUtils.waitUntilExecutorsUp(sc, 1, 10000)
       // driver gpu resources file should take precedence over the script
       assert(sc.resources.size === 1)
       assert(sc.resources.get(GPU).get.addresses === Array("0", "1", "8"))
@@ -855,9 +851,7 @@ class SparkContextSuite extends SparkFunSuite with 
LocalSparkContext with Eventu
       sc = new SparkContext(conf)
 
       // Ensure all executors has started
-      eventually(timeout(60.seconds)) {
-        assert(sc.statusTracker.getExecutorInfos.size == 3)
-      }
+      TestUtils.waitUntilExecutorsUp(sc, 3, 60000)
 
       val rdd = sc.makeRDD(1 to 10, 9).mapPartitions { it =>
         val context = TaskContext.get()


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to