ramaddepally closed pull request #23504: [SPARK-26585][K8S] Add additional
integration tests for K8s Scheduler Backend
URL: https://github.com/apache/spark/pull/23504
This is a PR merged from a forked repository.
As GitHub hides the original diff on merge, it is displayed below for
the sake of provenance:
As this is a foreign pull request (from a fork), the diff is supplied
below (as it won't show otherwise due to GitHub magic):
diff --git
a/resource-managers/kubernetes/integration-tests/src/test/scala/org/apache/spark/deploy/k8s/integrationtest/BasicTestsSuite.scala
b/resource-managers/kubernetes/integration-tests/src/test/scala/org/apache/spark/deploy/k8s/integrationtest/BasicTestsSuite.scala
index 4e749c40563dc..e11b2b808c1d4 100644
---
a/resource-managers/kubernetes/integration-tests/src/test/scala/org/apache/spark/deploy/k8s/integrationtest/BasicTestsSuite.scala
+++
b/resource-managers/kubernetes/integration-tests/src/test/scala/org/apache/spark/deploy/k8s/integrationtest/BasicTestsSuite.scala
@@ -93,6 +93,95 @@ private[spark] trait BasicTestsSuite { k8sSuite:
KubernetesSuite =>
.set("spark.files", REMOTE_PAGE_RANK_DATA_FILE)
runSparkRemoteCheckAndVerifyCompletion(appArgs =
Array(REMOTE_PAGE_RANK_FILE_NAME))
}
+
+ test("Run SparkPi with driver and executor image specified independently",
k8sTestTag) {
+ sparkAppConf.remove("spark.kubernetes.container.image")
+ sparkAppConf.set("spark.kubernetes.driver.container.image", image)
+ sparkAppConf.set("spark.kubernetes.executor.container.image", image)
+ runSparkPiAndVerifyCompletion()
+ }
+
+ test("Run SparkPi with custom cpu requirements", k8sTestTag) {
+ sparkAppConf.set("spark.kubernetes.driver.limit.cores", "2")
+ sparkAppConf.set("spark.kubernetes.executor.limit.cores", "2")
+ sparkAppConf.set("spark.driver.cores", ".8")
+ sparkAppConf.set("spark.kubernetes.executor.request.cores", ".5")
+
+ runSparkPiAndVerifyCompletion(
+ driverPodChecker = (driverPod: Pod) => {
+
assert(driverPod.getSpec.getContainers.get(0).getResources.getLimits.get("cpu")
+ .getAmount === "2")
+
assert(driverPod.getSpec.getContainers.get(0).getResources.getRequests.get("cpu")
+ .getAmount === "800m")
+ },
+ executorPodChecker = (executorPod: Pod) => {
+
assert(executorPod.getSpec.getContainers.get(0).getResources.getLimits.get("cpu")
+ .getAmount === "2")
+
assert(executorPod.getSpec.getContainers.get(0).getResources.getRequests.get("cpu")
+ .getAmount === "500m")
+ }
+ )
+ }
+
+ test("Run SparkPi with custom memory and memory overhead factor
requirements", k8sTestTag) {
+ val memDriver = 512
+ val memExecutor = 256
+ val memOverheadConstant = 0.8
+ val minMemoryOverhead = 384
+ sparkAppConf.set("spark.driver.memory", s"${memDriver}m")
+ sparkAppConf.set("spark.executor.memory", s"${memExecutor}m")
+ sparkAppConf.set("spark.kubernetes.memoryOverheadFactor",
s"$memOverheadConstant")
+
+
+ runSparkPiAndVerifyCompletion(
+ driverPodChecker = (driverPod: Pod) => {
+ val memory = s"${(memDriver + math.max(memDriver * memOverheadConstant,
+ minMemoryOverhead)).toInt}Mi"
+
assert(driverPod.getSpec.getContainers.get(0).getResources.getRequests.get("memory")
+ .getAmount === memory)
+
assert(driverPod.getSpec.getContainers.get(0).getResources.getLimits.get("memory")
+ .getAmount === memory)
+ },
+ executorPodChecker = (executorPod: Pod) => {
+ val memory = s"${(memExecutor + math.max(memExecutor *
memOverheadConstant,
+ minMemoryOverhead)).toInt}Mi"
+
assert(executorPod.getSpec.getContainers.get(0).getResources.getRequests.get("memory")
+ .getAmount === memory)
+
assert(executorPod.getSpec.getContainers.get(0).getResources.getLimits.get("memory")
+ .getAmount === memory)
+ }
+ )
+ }
+
+ test("Run SparkPi with custom memory and memory overhead requirements",
k8sTestTag) {
+ val memDriver = 512
+ val memExecutor = 256
+ val memOverheadConstant = 0.8
+ val memDriverOverhead = 200
+ val memExecutorOverhead = 100
+ sparkAppConf.set("spark.driver.memory", s"${memDriver}m")
+ sparkAppConf.set("spark.executor.memory", s"${memExecutor}m")
+ sparkAppConf.set("spark.kubernetes.memoryOverheadFactor",
s"$memOverheadConstant")
+ sparkAppConf.set("spark.driver.memoryOverhead", "200")
+ sparkAppConf.set("spark.executor.memoryOverhead", "100")
+
+ runSparkPiAndVerifyCompletion(
+ driverPodChecker = (driverPod: Pod) => {
+ val memory = s"${memDriver + memDriverOverhead}Mi"
+
assert(driverPod.getSpec.getContainers.get(0).getResources.getRequests.get("memory")
+ .getAmount === memory)
+
assert(driverPod.getSpec.getContainers.get(0).getResources.getLimits.get("memory")
+ .getAmount === memory)
+ },
+ executorPodChecker = (executorPod: Pod) => {
+ val memory = s"${memExecutor + memExecutorOverhead}Mi"
+
assert(executorPod.getSpec.getContainers.get(0).getResources.getRequests.get("memory")
+ .getAmount === memory)
+
assert(executorPod.getSpec.getContainers.get(0).getResources.getLimits.get("memory")
+ .getAmount === memory)
+ }
+ )
+ }
}
private[spark] object BasicTestsSuite {
diff --git
a/resource-managers/kubernetes/integration-tests/src/test/scala/org/apache/spark/deploy/k8s/integrationtest/KubernetesTestComponents.scala
b/resource-managers/kubernetes/integration-tests/src/test/scala/org/apache/spark/deploy/k8s/integrationtest/KubernetesTestComponents.scala
index 250eaab9e9808..ad3dbe2cdb0f5 100644
---
a/resource-managers/kubernetes/integration-tests/src/test/scala/org/apache/spark/deploy/k8s/integrationtest/KubernetesTestComponents.scala
+++
b/resource-managers/kubernetes/integration-tests/src/test/scala/org/apache/spark/deploy/k8s/integrationtest/KubernetesTestComponents.scala
@@ -90,6 +90,8 @@ private[spark] class SparkAppConf {
override def toString: String = map.toString
def toStringArray: Iterable[String] = map.toList.flatMap(t => List("--conf",
s"${t._1}=${t._2}"))
+
+ def remove(key: String): Unit = map.remove(key)
}
private[spark] case class SparkAppArguments(
----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on GitHub and use the
URL above to go to the specific comment.
For queries about this service, please contact Infrastructure at:
[email protected]
With regards,
Apache Git Services
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]