This is an automated email from the ASF dual-hosted git repository.

yangjie01 pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new d114e262ba29 [SPARK-44495][INFRA][K8S] Use the latest minikube in K8s 
IT
d114e262ba29 is described below

commit d114e262ba295995bb6a85035c1717cd353a526a
Author: Dongjoon Hyun <dh...@apple.com>
AuthorDate: Sun Jan 21 08:53:16 2024 +0800

    [SPARK-44495][INFRA][K8S] Use the latest minikube in K8s IT
    
    ### What changes were proposed in this pull request?
    
    This PR aims to recover GitHub Action K8s IT to use the latest Minikube and 
to make it sure that Apache Spark K8s module are tested with all Minikubes 
without any issues.
    
    **BEFORE**
    - Minikube: v1.30.1
    - K8s: v1.26.3
    
    **AFTER**
    - Minikube: v1.32.0
    - K8s: v1.28.3
    
    ### Why are the changes needed?
    
    - Previously, it was pinned due to the failure.
    - After this PR, we will track the latest Minikube and K8s version always.
    
    ### Does this PR introduce _any_ user-facing change?
    
    No.
    
    ### How was this patch tested?
    
    Pass the CIs.
    
    ### Was this patch authored or co-authored using generative AI tooling?
    
    No.
    
    Closes #44813 from dongjoon-hyun/SPARK-44495.
    
    Authored-by: Dongjoon Hyun <dh...@apple.com>
    Signed-off-by: yangjie01 <yangji...@baidu.com>
---
 .github/workflows/build_and_test.yml                              | 8 +++-----
 .../deploy/k8s/integrationtest/KubernetesTestComponents.scala     | 2 ++
 .../apache/spark/deploy/k8s/integrationtest/PVTestsSuite.scala    | 3 ++-
 .../spark/deploy/k8s/integrationtest/VolcanoTestsSuite.scala      | 4 ++--
 4 files changed, 9 insertions(+), 8 deletions(-)

diff --git a/.github/workflows/build_and_test.yml 
b/.github/workflows/build_and_test.yml
index 99bb2b12e083..69636629ca9d 100644
--- a/.github/workflows/build_and_test.yml
+++ b/.github/workflows/build_and_test.yml
@@ -1063,9 +1063,7 @@ jobs:
       - name: start minikube
         run: |
           # See more in "Installation" https://minikube.sigs.k8s.io/docs/start/
-          # curl -LO 
https://storage.googleapis.com/minikube/releases/latest/minikube-linux-amd64
-          # TODO(SPARK-44495): Resume to use the latest minikube for 
k8s-integration-tests.
-          curl -LO 
https://storage.googleapis.com/minikube/releases/v1.30.1/minikube-linux-amd64
+          curl -LO 
https://storage.googleapis.com/minikube/releases/latest/minikube-linux-amd64
           sudo install minikube-linux-amd64 /usr/local/bin/minikube
           rm minikube-linux-amd64
           # Github Action limit cpu:2, memory: 6947MB, limit to 2U6G for 
better resource statistic
@@ -1074,7 +1072,7 @@ jobs:
         run: |
           kubectl get pods -A
           kubectl describe node
-      - name: Run Spark on K8S integration test (With driver cpu 0.5, executor 
cpu 0.2 limited)
+      - name: Run Spark on K8S integration test
         run: |
           # Prepare PV test
           PVC_TMP_DIR=$(mktemp -d)
@@ -1084,7 +1082,7 @@ jobs:
           kubectl create clusterrolebinding serviceaccounts-cluster-admin 
--clusterrole=cluster-admin --group=system:serviceaccounts || true
           kubectl apply -f 
https://raw.githubusercontent.com/volcano-sh/volcano/v1.8.2/installer/volcano-development.yaml
 || true
           eval $(minikube docker-env)
-          build/sbt -Phadoop-3 -Psparkr -Pkubernetes -Pvolcano 
-Pkubernetes-integration-tests -Dspark.kubernetes.test.driverRequestCores=0.5 
-Dspark.kubernetes.test.executorRequestCores=0.2 
-Dspark.kubernetes.test.volcanoMaxConcurrencyJobNum=1 -Dtest.exclude.tags=local 
"kubernetes-integration-tests/test"
+          build/sbt -Phadoop-3 -Psparkr -Pkubernetes -Pvolcano 
-Pkubernetes-integration-tests 
-Dspark.kubernetes.test.volcanoMaxConcurrencyJobNum=1 -Dtest.exclude.tags=local 
"kubernetes-integration-tests/test"
       - name: Upload Spark on K8S integration tests log files
         if: ${{ !success() }}
         uses: actions/upload-artifact@v4
diff --git 
a/resource-managers/kubernetes/integration-tests/src/test/scala/org/apache/spark/deploy/k8s/integrationtest/KubernetesTestComponents.scala
 
b/resource-managers/kubernetes/integration-tests/src/test/scala/org/apache/spark/deploy/k8s/integrationtest/KubernetesTestComponents.scala
index 3762c31538dc..9581a78619dd 100644
--- 
a/resource-managers/kubernetes/integration-tests/src/test/scala/org/apache/spark/deploy/k8s/integrationtest/KubernetesTestComponents.scala
+++ 
b/resource-managers/kubernetes/integration-tests/src/test/scala/org/apache/spark/deploy/k8s/integrationtest/KubernetesTestComponents.scala
@@ -75,6 +75,8 @@ private[spark] class KubernetesTestComponents(val 
kubernetesClient: KubernetesCl
       .set(UI_ENABLED.key, "true")
       .set("spark.kubernetes.submission.waitAppCompletion", "false")
       .set("spark.kubernetes.authenticate.driver.serviceAccountName", 
serviceAccountName)
+      .set("spark.kubernetes.driver.request.cores", "0.2")
+      .set("spark.kubernetes.executor.request.cores", "0.2")
   }
 }
 
diff --git 
a/resource-managers/kubernetes/integration-tests/src/test/scala/org/apache/spark/deploy/k8s/integrationtest/PVTestsSuite.scala
 
b/resource-managers/kubernetes/integration-tests/src/test/scala/org/apache/spark/deploy/k8s/integrationtest/PVTestsSuite.scala
index 3ffe0f9dc5c6..1b5a25306671 100644
--- 
a/resource-managers/kubernetes/integration-tests/src/test/scala/org/apache/spark/deploy/k8s/integrationtest/PVTestsSuite.scala
+++ 
b/resource-managers/kubernetes/integration-tests/src/test/scala/org/apache/spark/deploy/k8s/integrationtest/PVTestsSuite.scala
@@ -30,6 +30,7 @@ private[spark] trait PVTestsSuite { k8sSuite: KubernetesSuite 
=>
 
   private def setupLocalStorage(): Unit = {
     val storageClassName = if (testBackend == MinikubeTestBackend) "standard" 
else "hostpath"
+    val hostname = if (testBackend == MinikubeTestBackend) "minikube" else 
"docker-desktop"
     val pvBuilder = new PersistentVolumeBuilder()
       .withKind("PersistentVolume")
       .withApiVersion("v1")
@@ -48,7 +49,7 @@ private[spark] trait PVTestsSuite { k8sSuite: KubernetesSuite 
=>
                 .withMatchExpressions(new NodeSelectorRequirementBuilder()
                   .withKey("kubernetes.io/hostname")
                   .withOperator("In")
-                  .withValues("minikube", "m01", "docker-desktop")
+                  .withValues(hostname)
                   .build()).build())
             .endRequired()
           .endNodeAffinity()
diff --git 
a/resource-managers/kubernetes/integration-tests/src/test/scala/org/apache/spark/deploy/k8s/integrationtest/VolcanoTestsSuite.scala
 
b/resource-managers/kubernetes/integration-tests/src/test/scala/org/apache/spark/deploy/k8s/integrationtest/VolcanoTestsSuite.scala
index 0cfe65b75e6b..2f414b72ee19 100644
--- 
a/resource-managers/kubernetes/integration-tests/src/test/scala/org/apache/spark/deploy/k8s/integrationtest/VolcanoTestsSuite.scala
+++ 
b/resource-managers/kubernetes/integration-tests/src/test/scala/org/apache/spark/deploy/k8s/integrationtest/VolcanoTestsSuite.scala
@@ -496,8 +496,8 @@ private[spark] object VolcanoTestsSuite extends 
SparkFunSuite {
   val DRIVER_PG_TEMPLATE_MEMORY_3G = new File(
     
getClass.getResource("/volcano/driver-podgroup-template-memory-3g.yml").getFile
   ).getAbsolutePath
-  val DRIVER_REQUEST_CORES = 
sys.props.get(CONFIG_DRIVER_REQUEST_CORES).getOrElse("1")
-  val EXECUTOR_REQUEST_CORES = 
sys.props.get(CONFIG_EXECUTOR_REQUEST_CORES).getOrElse("1")
+  val DRIVER_REQUEST_CORES = 
sys.props.get(CONFIG_DRIVER_REQUEST_CORES).getOrElse("0.2")
+  val EXECUTOR_REQUEST_CORES = 
sys.props.get(CONFIG_EXECUTOR_REQUEST_CORES).getOrElse("0.2")
   val VOLCANO_MAX_JOB_NUM = 
sys.props.get(CONFIG_KEY_VOLCANO_MAX_JOB_NUM).getOrElse("2")
   val TEMP_DIR = "/tmp/"
 }


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to