This is an automated email from the ASF dual-hosted git repository.
dongjoon pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git
The following commit(s) were added to refs/heads/master by this push:
new 78ec6f783b54 [SPARK-55173][K8S][TESTS] Improve K8s IT to use
`TestConstants` consistently and `match` syntax for extensibility
78ec6f783b54 is described below
commit 78ec6f783b5404ec7289f24dfb6c8919a0fabd7d
Author: Dongjoon Hyun <[email protected]>
AuthorDate: Mon Jan 26 09:32:34 2026 +0900
[SPARK-55173][K8S][TESTS] Improve K8s IT to use `TestConstants`
consistently and `match` syntax for extensibility
### What changes were proposed in this pull request?
This PR aims to improve K8s integration tests by refactoring
- To re-use `TestConstants` consistently, e.g., `"minikube"` ->
`BACKEND_MINIKUBE` and `"docker-desktop"` -> `BACKEND_DOCKER_DESKTOP`.
- To use `match` syntax (instead of `if` statement) for extensibility.
```scala
- val storageClassName = if (testBackend == MinikubeTestBackend)
"standard" else "hostpath"
- val hostname = if (testBackend == MinikubeTestBackend) "minikube" else
"docker-desktop"
+ val (storageClassName, hostname) = testBackend match {
+ case MinikubeTestBackend => ("standard", BACKEND_MINIKUBE)
+ case DockerForDesktopBackend => ("hostpath", BACKEND_DOCKER_DESKTOP)
+ case _ => ("hostpath", BACKEND_DOCKER_DESKTOP)
+ }
```
### Why are the changes needed?
This helps us maintain the test backend consistently and add a new backend
easily.
### Does this PR introduce _any_ user-facing change?
No, this is a test-only change.
### How was this patch tested?
Pass the CIs with the existing test cases.
Manually check the previous code patterns like the following.
**BEFORE**
```
$ git grep 'testBackend ==' | wc -l
4
```
```
$ git grep "docker-desktop" | grep scala
resource-managers/kubernetes/integration-tests/src/test/scala/org/apache/spark/deploy/k8s/integrationtest/PVTestsSuite.scala:
val hostname = if (testBackend == MinikubeTestBackend) "minikube" else
"docker-desktop"
resource-managers/kubernetes/integration-tests/src/test/scala/org/apache/spark/deploy/k8s/integrationtest/TestConstants.scala:
val BACKEND_DOCKER_DESKTOP = "docker-desktop"
```
**AFTER**
```
$ git grep 'testBackend ==' | wc -l
0
```
```
$ git grep "docker-desktop" | grep scala
resource-managers/kubernetes/integration-tests/src/test/scala/org/apache/spark/deploy/k8s/integrationtest/TestConstants.scala:
val BACKEND_DOCKER_DESKTOP = "docker-desktop"
```
### Was this patch authored or co-authored using generative AI tooling?
No.
Closes #53956 from dongjoon-hyun/SPARK-55173.
Authored-by: Dongjoon Hyun <[email protected]>
Signed-off-by: Dongjoon Hyun <[email protected]>
---
.../apache/spark/deploy/k8s/integrationtest/PVTestsSuite.scala | 9 +++++++--
.../apache/spark/deploy/k8s/integrationtest/VolumeSuite.scala | 10 ++++++++--
.../deploy/k8s/integrationtest/backend/minikube/Minikube.scala | 3 ++-
3 files changed, 17 insertions(+), 5 deletions(-)
diff --git
a/resource-managers/kubernetes/integration-tests/src/test/scala/org/apache/spark/deploy/k8s/integrationtest/PVTestsSuite.scala
b/resource-managers/kubernetes/integration-tests/src/test/scala/org/apache/spark/deploy/k8s/integrationtest/PVTestsSuite.scala
index 216441418aea..17c5c1055d4d 100644
---
a/resource-managers/kubernetes/integration-tests/src/test/scala/org/apache/spark/deploy/k8s/integrationtest/PVTestsSuite.scala
+++
b/resource-managers/kubernetes/integration-tests/src/test/scala/org/apache/spark/deploy/k8s/integrationtest/PVTestsSuite.scala
@@ -23,14 +23,19 @@ import org.scalatest.concurrent.{Eventually,
PatienceConfiguration}
import org.scalatest.time.{Milliseconds, Span}
import org.apache.spark.deploy.k8s.integrationtest.KubernetesSuite._
+import org.apache.spark.deploy.k8s.integrationtest.TestConstants._
+import
org.apache.spark.deploy.k8s.integrationtest.backend.docker.DockerForDesktopBackend
import
org.apache.spark.deploy.k8s.integrationtest.backend.minikube.MinikubeTestBackend
private[spark] trait PVTestsSuite { k8sSuite: KubernetesSuite =>
import PVTestsSuite._
private def setupLocalStorage(): Unit = {
- val storageClassName = if (testBackend == MinikubeTestBackend) "standard"
else "hostpath"
- val hostname = if (testBackend == MinikubeTestBackend) "minikube" else
"docker-desktop"
+ val (storageClassName, hostname) = testBackend match {
+ case MinikubeTestBackend => ("standard", BACKEND_MINIKUBE)
+ case DockerForDesktopBackend => ("hostpath", BACKEND_DOCKER_DESKTOP)
+ case _ => ("hostpath", BACKEND_DOCKER_DESKTOP)
+ }
val pvBuilder = new PersistentVolumeBuilder()
.withKind("PersistentVolume")
.withApiVersion("v1")
diff --git
a/resource-managers/kubernetes/integration-tests/src/test/scala/org/apache/spark/deploy/k8s/integrationtest/VolumeSuite.scala
b/resource-managers/kubernetes/integration-tests/src/test/scala/org/apache/spark/deploy/k8s/integrationtest/VolumeSuite.scala
index c57e4b4578d6..745e294598ef 100644
---
a/resource-managers/kubernetes/integration-tests/src/test/scala/org/apache/spark/deploy/k8s/integrationtest/VolumeSuite.scala
+++
b/resource-managers/kubernetes/integration-tests/src/test/scala/org/apache/spark/deploy/k8s/integrationtest/VolumeSuite.scala
@@ -99,7 +99,10 @@ private[spark] trait VolumeSuite { k8sSuite: KubernetesSuite
=>
}
test("A driver-only Spark job with an OnDemand PVC volume", k8sTestTag) {
- val storageClassName = if (testBackend == MinikubeTestBackend) "standard"
else "hostpath"
+ val storageClassName = testBackend match {
+ case MinikubeTestBackend => "standard"
+ case _ => "hostpath"
+ }
val DRIVER_PREFIX = "spark.kubernetes.driver.volumes.persistentVolumeClaim"
sparkAppConf
.set("spark.kubernetes.driver.master", "local[10]")
@@ -148,7 +151,10 @@ private[spark] trait VolumeSuite { k8sSuite:
KubernetesSuite =>
}
test("A Spark job with two executors with OnDemand PVC volumes", k8sTestTag)
{
- val storageClassName = if (testBackend == MinikubeTestBackend) "standard"
else "hostpath"
+ val storageClassName = testBackend match {
+ case MinikubeTestBackend => "standard"
+ case _ => "hostpath"
+ }
val EXECUTOR_PREFIX =
"spark.kubernetes.executor.volumes.persistentVolumeClaim"
sparkAppConf
.set("spark.executor.instances", "2")
diff --git
a/resource-managers/kubernetes/integration-tests/src/test/scala/org/apache/spark/deploy/k8s/integrationtest/backend/minikube/Minikube.scala
b/resource-managers/kubernetes/integration-tests/src/test/scala/org/apache/spark/deploy/k8s/integrationtest/backend/minikube/Minikube.scala
index 1d2f27fe7b98..acfa237228ad 100644
---
a/resource-managers/kubernetes/integration-tests/src/test/scala/org/apache/spark/deploy/k8s/integrationtest/backend/minikube/Minikube.scala
+++
b/resource-managers/kubernetes/integration-tests/src/test/scala/org/apache/spark/deploy/k8s/integrationtest/backend/minikube/Minikube.scala
@@ -19,6 +19,7 @@ package
org.apache.spark.deploy.k8s.integrationtest.backend.minikube
import io.fabric8.kubernetes.client.{Config, KubernetesClient,
KubernetesClientBuilder}
import org.apache.spark.deploy.k8s.integrationtest.ProcessUtils
+import org.apache.spark.deploy.k8s.integrationtest.TestConstants._
import org.apache.spark.internal.Logging
// TODO support windows
@@ -57,7 +58,7 @@ private[spark] object Minikube extends Logging {
"non-numeric suffix is intentionally dropped)")
}
- new
KubernetesClientBuilder().withConfig(Config.autoConfigure("minikube")).build()
+ new
KubernetesClientBuilder().withConfig(Config.autoConfigure(BACKEND_MINIKUBE)).build()
}
def getMinikubeStatus(): MinikubeStatus.Value = {
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]