This is an automated email from the ASF dual-hosted git repository.

dongjoon pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new b8d6669  [SPARK-26082][MESOS][FOLLOWUP] Fix Scala-2.11 build
b8d6669 is described below

commit b8d666940b9186d2118c70b218b66ef038479b0e
Author: Jungtaek Lim (HeartSaVioR) <kabh...@gmail.com>
AuthorDate: Sat Feb 9 13:46:52 2019 -0800

    [SPARK-26082][MESOS][FOLLOWUP] Fix Scala-2.11 build
    
    ## What changes were proposed in this pull request?
    
    #23744 added a UT to prevent a future regression. However, it breaks 
Scala-2.11 build. This fixes that.
    
    ## How was this patch tested?
    
    Manual test with Scala-2.11 profile.
    
    Closes #23755 from HeartSaVioR/SPARK-26082-FOLLOW-UP-V2.
    
    Authored-by: Jungtaek Lim (HeartSaVioR) <kabh...@gmail.com>
    Signed-off-by: Dongjoon Hyun <dh...@apple.com>
---
 .../spark/scheduler/cluster/mesos/MesosClusterSchedulerSuite.scala    | 4 ++--
 1 file changed, 2 insertions(+), 2 deletions(-)

diff --git 
a/resource-managers/mesos/src/test/scala/org/apache/spark/scheduler/cluster/mesos/MesosClusterSchedulerSuite.scala
 
b/resource-managers/mesos/src/test/scala/org/apache/spark/scheduler/cluster/mesos/MesosClusterSchedulerSuite.scala
index ff63987..858146b 100644
--- 
a/resource-managers/mesos/src/test/scala/org/apache/spark/scheduler/cluster/mesos/MesosClusterSchedulerSuite.scala
+++ 
b/resource-managers/mesos/src/test/scala/org/apache/spark/scheduler/cluster/mesos/MesosClusterSchedulerSuite.scala
@@ -276,7 +276,7 @@ class MesosClusterSchedulerSuite extends SparkFunSuite with 
LocalSparkContext wi
 
     val launchedTasks = Utils.verifyTaskLaunched(driver, "o1")
     val uris = launchedTasks.head.getCommand.getUrisList
-    assert(uris.stream().allMatch(_.getCache))
+    assert(uris.asScala.forall(_.getCache))
   }
 
   test("supports disabling fetcher cache") {
@@ -300,7 +300,7 @@ class MesosClusterSchedulerSuite extends SparkFunSuite with 
LocalSparkContext wi
 
     val launchedTasks = Utils.verifyTaskLaunched(driver, "o1")
     val uris = launchedTasks.head.getCommand.getUrisList
-    assert(uris.stream().allMatch(!_.getCache))
+    assert(uris.asScala.forall(!_.getCache))
   }
 
   test("accept/decline offers with driver constraints") {


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to