tgravescs commented on a change in pull request #24374: [SPARK-27366][CORE] 
Support GPU Resources in Spark job scheduling
URL: https://github.com/apache/spark/pull/24374#discussion_r289434975
 
 

 ##########
 File path: core/src/test/scala/org/apache/spark/SparkContextSuite.scala
 ##########
 @@ -822,13 +823,66 @@ class SparkContextSuite extends SparkFunSuite with 
LocalSparkContext with Eventu
       "spark.task.resource.gpu.count = 2"))
   }
 
+  test("Parse resources executor config not the same multiple numbers of the 
task requirements") {
+    val conf = new SparkConf()
+      .set(SPARK_TASK_RESOURCE_PREFIX + "gpu" + SPARK_RESOURCE_COUNT_SUFFIX, 
"2")
+      .set(SPARK_EXECUTOR_RESOURCE_PREFIX + "gpu" + 
SPARK_RESOURCE_COUNT_SUFFIX, "4")
+      .setMaster("local-cluster[1, 1, 1024]")
+      .setAppName("test-cluster")
+
+    var error = intercept[SparkException] {
+      sc = new SparkContext(conf)
+    }.getMessage()
+
+    assert(error.contains("The value of executor resource config: " +
+      "spark.executor.resource.gpu.count = 4 has to be 1 times the number of 
the task config: " +
+      "spark.task.resource.gpu.count = 2"))
+  }
+
   def mockDiscoveryScript(file: File, result: String): String = {
     Files.write(s"echo $result", file, StandardCharsets.UTF_8)
     JavaFiles.setPosixFilePermissions(file.toPath(),
       EnumSet.of(OWNER_READ, OWNER_EXECUTE, OWNER_WRITE))
     file.getPath()
   }
 
+  test("test resource scheduling under local-cluster mode") {
+    import org.apache.spark.TestUtils._
+
 
 Review comment:
   add in     assume(!(Utils.isWindows))  so it skips on windows

----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
[email protected]


With regards,
Apache Git Services

---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to