This is an automated email from the ASF dual-hosted git repository.

dongjoon pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/spark-kubernetes-operator.git


The following commit(s) were added to refs/heads/main by this push:
     new 1741985  [SPARK-49468] Add Hot Properties Reload Test
1741985 is described below

commit 17419850f5d7842ca07bf012240ce23dce7bdb3e
Author: Qi Tan <[email protected]>
AuthorDate: Sat Aug 31 11:47:10 2024 -0700

    [SPARK-49468] Add Hot Properties Reload Test
    
    ### What changes were proposed in this pull request?
    Add one more e2e test to validate the hot properties reload.
    
    ### Why are the changes needed?
    1. It will verify Spark Operator reload the properties from dynamic config 
map
    2. It will verify the watched namespace worked as expected
    
    ### Does this PR introduce _any_ user-facing change?
    no
    
    ### How was this patch tested?
    Test from personal GitHub workflow Actions
    
    ### Was this patch authored or co-authored using generative AI tooling?
    no
    
    Closes #112, #114
    
    Closes #115 from TQJADE/hot-properties-test.
    
    Authored-by: Qi Tan <[email protected]>
    Signed-off-by: Dongjoon Hyun <[email protected]>
---
 .github/workflows/build_and_test.yml               | 19 +++++-
 .../helm/spark-kubernetes-operator/values.yaml     |  2 +-
 .../spark-application/spark-state-transition.yaml  |  2 +-
 .../dynamic-config-values.yaml}                    | 36 +++++------
 tests/e2e/state-transition/chainsaw-test.yaml      |  3 +
 tests/e2e/watched-namespaces/chainsaw-test.yaml    | 74 ++++++++++++++++++++++
 .../spark-example.yaml}                            | 22 +++----
 .../spark-operator-dynamic-config-1.yaml}          | 30 +++++----
 .../spark-operator-dynamic-config-2.yaml}          | 30 +++++----
 9 files changed, 157 insertions(+), 61 deletions(-)

diff --git a/.github/workflows/build_and_test.yml 
b/.github/workflows/build_and_test.yml
index 144b2d6..4cf6814 100644
--- a/.github/workflows/build_and_test.yml
+++ b/.github/workflows/build_and_test.yml
@@ -73,6 +73,8 @@ jobs:
           - spark-versions
           - python
           - state-transition
+        dynamic_config_test_group:
+          - watched-namespaces
     steps:
       - name: Checkout repository
         uses: actions/checkout@v4
@@ -98,7 +100,7 @@ jobs:
         run: |
           kubectl get pods -A
           kubectl describe node
-      - name: Run Spark K8s Operator on K8S
+      - name: Run Spark K8s Operator on K8S with Dynamic Configuration Disabled
         run: |
           kubectl create clusterrolebinding serviceaccounts-cluster-admin 
--clusterrole=cluster-admin --group=system:serviceaccounts || true
           eval $(minikube docker-env)
@@ -108,9 +110,22 @@ jobs:
           helm test spark-kubernetes-operator
           # Use remote host' s docker image
           minikube docker-env --unset
-      - name: Run E2E Test
+      - name: Run E2E Test with Dynamic Configuration Disabled
         run: |
           chainsaw test --test-dir ./tests/e2e/${{ matrix.test_group }} 
--parallel 2
+      - name: Run Spark K8s Operator on K8S with Dynamic Configuration Enabled
+        run: |
+          helm uninstall spark-kubernetes-operator
+          eval $(minikube docker-env)
+          helm install spark-kubernetes-operator --create-namespace -f \
+          build-tools/helm/spark-kubernetes-operator/values.yaml -f \
+          tests/e2e/helm/dynamic-config-values.yaml \
+          build-tools/helm/spark-kubernetes-operator/
+          minikube docker-env --unset
+      - name: Run E2E Test with Dynamic Configuration Enabled
+        run: |
+          chainsaw test --test-dir ./tests/e2e/${{ 
matrix.dynamic_config_test_group }} --parallel 2
+
   lint:
     name: "Linter and documentation"
     runs-on: ubuntu-latest
diff --git a/build-tools/helm/spark-kubernetes-operator/values.yaml 
b/build-tools/helm/spark-kubernetes-operator/values.yaml
index a379ac7..9f78ffe 100644
--- a/build-tools/helm/spark-kubernetes-operator/values.yaml
+++ b/build-tools/helm/spark-kubernetes-operator/values.yaml
@@ -188,4 +188,4 @@ operatorConfiguration:
       "helm.sh/resource-policy": keep
     data:
       # Spark Operator Config Runtime Properties Overrides. e.g.
-      spark.kubernetes.operator.reconciler.intervalSeconds: 60
+      spark.kubernetes.operator.reconciler.intervalSeconds: "60"
diff --git a/tests/e2e/assertions/spark-application/spark-state-transition.yaml 
b/tests/e2e/assertions/spark-application/spark-state-transition.yaml
index fc330cd..c28f7a6 100644
--- a/tests/e2e/assertions/spark-application/spark-state-transition.yaml
+++ b/tests/e2e/assertions/spark-application/spark-state-transition.yaml
@@ -19,7 +19,7 @@ apiVersion: spark.apache.org/v1alpha1
 kind: SparkApplication
 metadata:
   name: spark-job-succeeded-test
-  namespace: default
+  namespace: ($SPARK_APP_NAMESPACE)
 status:
   stateTransitionHistory:
     (*.currentStateSummary):
diff --git a/tests/e2e/assertions/spark-application/spark-state-transition.yaml 
b/tests/e2e/helm/dynamic-config-values.yaml
similarity index 64%
copy from tests/e2e/assertions/spark-application/spark-state-transition.yaml
copy to tests/e2e/helm/dynamic-config-values.yaml
index fc330cd..1fea7ae 100644
--- a/tests/e2e/assertions/spark-application/spark-state-transition.yaml
+++ b/tests/e2e/helm/dynamic-config-values.yaml
@@ -1,4 +1,3 @@
-#
 # Licensed to the Apache Software Foundation (ASF) under one or more
 # contributor license agreements.  See the NOTICE file distributed with
 # this work for additional information regarding copyright ownership.
@@ -6,27 +5,28 @@
 # (the "License"); you may not use this file except in compliance with
 # the License.  You may obtain a copy of the License at
 #
-#    http://www.apache.org/licenses/LICENSE-2.0
+#     http://www.apache.org/licenses/LICENSE-2.0
 #
 # Unless required by applicable law or agreed to in writing, software
 # distributed under the License is distributed on an "AS IS" BASIS,
 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 # See the License for the specific language governing permissions and
 # limitations under the License.
-#
 
-apiVersion: spark.apache.org/v1alpha1
-kind: SparkApplication
-metadata:
-  name: spark-job-succeeded-test
-  namespace: default
-status:
-  stateTransitionHistory:
-    (*.currentStateSummary):
-      - "Submitted"
-      - "DriverRequested"
-      - "DriverStarted"
-      - "DriverReady"
-      - "RunningHealthy"
-      - "Succeeded"
-      - "ResourceReleased"
\ No newline at end of file
+appResources:
+  namespaces:
+    overrideWatchedNamespaces: false
+    data:
+      - "spark-1"
+      - "spark-2"
+  role:
+    create: true
+  roleBinding:
+    create: true
+
+operatorConfiguration:
+  dynamicConfig:
+    enable: true
+    create: true
+    data:
+      spark.kubernetes.operator.watchedNamespaces: "default"
\ No newline at end of file
diff --git a/tests/e2e/state-transition/chainsaw-test.yaml 
b/tests/e2e/state-transition/chainsaw-test.yaml
index 01b9ba1..46f5c1f 100644
--- a/tests/e2e/state-transition/chainsaw-test.yaml
+++ b/tests/e2e/state-transition/chainsaw-test.yaml
@@ -36,6 +36,9 @@ spec:
             value: ($FILE_NAME)
         content: kubectl apply -f $FILE_NAME
     - assert:
+        bindings:
+          - name: SPARK_APP_NAMESPACE
+            value: default
         timeout: 60s
         file: "../assertions/spark-application/spark-state-transition.yaml"
     catch:
diff --git a/tests/e2e/watched-namespaces/chainsaw-test.yaml 
b/tests/e2e/watched-namespaces/chainsaw-test.yaml
new file mode 100644
index 0000000..82ed409
--- /dev/null
+++ b/tests/e2e/watched-namespaces/chainsaw-test.yaml
@@ -0,0 +1,74 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+apiVersion: chainsaw.kyverno.io/v1alpha1
+kind: Test
+metadata:
+  name: spark-operator-dynamic-configuration-validation
+spec:
+  steps:
+  - try:
+      - apply:
+          file: spark-operator-dynamic-config-1.yaml
+      - sleep:
+          duration: 30s
+      - script:
+          content:
+            kubectl logs -n default $(kubectl get pods -o=name -l 
app.kubernetes.io/component=operator-deployment,app.kubernetes.io/name=spark-kubernetes-operator)
+          check:
+            (contains($stdout, 'Updating operator namespaces to [default, 
spark-1]')): true
+      - apply:
+          bindings:
+            - name: SPARK_APP_NAMESPACE
+              value: spark-1
+          file: spark-example.yaml
+      - assert:
+          bindings:
+          - name: SPARK_APP_NAMESPACE
+            value: spark-1
+          timeout: 60s
+          file: "../assertions/spark-application/spark-state-transition.yaml"
+      - apply:
+          bindings:
+            - name: SPARK_APP_NAMESPACE
+              value: spark-2
+          file: spark-example.yaml
+      - sleep:
+          duration: 60s
+      - script:
+          content:
+            kubectl get sparkapplication spark-job-succeeded-test -n spark-2 
-o json | jq ".status"
+          check:
+            (contains($stdout, 'null')): true
+    catch:
+      - podLogs:
+          namespace: default
+          selector: 
app.kubernetes.io/component=operator-deployment,app.kubernetes.io/name=spark-kubernetes-operator
+      - describe:
+          apiVersion: spark.apache.org/v1alpha1
+          kind: SparkApplication
+          namespace: spark-1
+      - describe:
+          apiVersion: spark.apache.org/v1alpha1
+          kind: SparkApplication
+          namespace: spark-2
+    finally:
+      - script:
+          content: |
+            kubectl delete sparkapplication spark-job-succeeded-test -n 
spark-1 --ignore-not-found=true
+            kubectl delete sparkapplication spark-job-succeeded-test -n 
spark-2 --ignore-not-found=true
+            kubectl replace -f spark-operator-dynamic-config-2.yaml
\ No newline at end of file
diff --git a/tests/e2e/assertions/spark-application/spark-state-transition.yaml 
b/tests/e2e/watched-namespaces/spark-example.yaml
similarity index 66%
copy from tests/e2e/assertions/spark-application/spark-state-transition.yaml
copy to tests/e2e/watched-namespaces/spark-example.yaml
index fc330cd..dba59ab 100644
--- a/tests/e2e/assertions/spark-application/spark-state-transition.yaml
+++ b/tests/e2e/watched-namespaces/spark-example.yaml
@@ -19,14 +19,14 @@ apiVersion: spark.apache.org/v1alpha1
 kind: SparkApplication
 metadata:
   name: spark-job-succeeded-test
-  namespace: default
-status:
-  stateTransitionHistory:
-    (*.currentStateSummary):
-      - "Submitted"
-      - "DriverRequested"
-      - "DriverStarted"
-      - "DriverReady"
-      - "RunningHealthy"
-      - "Succeeded"
-      - "ResourceReleased"
\ No newline at end of file
+  namespace: ($SPARK_APP_NAMESPACE)
+spec:
+  mainClass: "org.apache.spark.examples.SparkPi"
+  jars: 
"local:///opt/spark/examples/jars/spark-examples_2.13-4.0.0-preview1.jar"
+  sparkConf:
+    spark.executor.instances: "1"
+    spark.kubernetes.container.image: 
"spark:4.0.0-preview1-scala2.13-java17-ubuntu"
+    spark.kubernetes.authenticate.driver.serviceAccountName: "spark"
+  runtimeVersions:
+    sparkVersion: 4.0.0-preview1
+    scalaVersion: "2.13"
\ No newline at end of file
diff --git a/tests/e2e/assertions/spark-application/spark-state-transition.yaml 
b/tests/e2e/watched-namespaces/spark-operator-dynamic-config-1.yaml
similarity index 56%
copy from tests/e2e/assertions/spark-application/spark-state-transition.yaml
copy to tests/e2e/watched-namespaces/spark-operator-dynamic-config-1.yaml
index fc330cd..5ac86fb 100644
--- a/tests/e2e/assertions/spark-application/spark-state-transition.yaml
+++ b/tests/e2e/watched-namespaces/spark-operator-dynamic-config-1.yaml
@@ -15,18 +15,20 @@
 # limitations under the License.
 #
 
-apiVersion: spark.apache.org/v1alpha1
-kind: SparkApplication
+apiVersion: v1
+data:
+  spark.kubernetes.operator.watchedNamespaces: default, spark-1
+kind: ConfigMap
 metadata:
-  name: spark-job-succeeded-test
-  namespace: default
-status:
-  stateTransitionHistory:
-    (*.currentStateSummary):
-      - "Submitted"
-      - "DriverRequested"
-      - "DriverStarted"
-      - "DriverReady"
-      - "RunningHealthy"
-      - "Succeeded"
-      - "ResourceReleased"
\ No newline at end of file
+  annotations:
+    helm.sh/resource-policy: keep
+    meta.helm.sh/release-name: spark-kubernetes-operator
+    meta.helm.sh/release-namespace: default
+  labels:
+    app.kubernetes.io/component: operator-dynamic-config-overrides
+    app.kubernetes.io/managed-by: Helm
+    app.kubernetes.io/name: spark-kubernetes-operator
+    app.kubernetes.io/version: 0.1.0-SNAPSHOT
+    helm.sh/chart: spark-kubernetes-operator-0.1.0-SNAPSHOT
+  name: spark-kubernetes-operator-dynamic-configuration
+  namespace: default
\ No newline at end of file
diff --git a/tests/e2e/assertions/spark-application/spark-state-transition.yaml 
b/tests/e2e/watched-namespaces/spark-operator-dynamic-config-2.yaml
similarity index 57%
copy from tests/e2e/assertions/spark-application/spark-state-transition.yaml
copy to tests/e2e/watched-namespaces/spark-operator-dynamic-config-2.yaml
index fc330cd..a2b89a6 100644
--- a/tests/e2e/assertions/spark-application/spark-state-transition.yaml
+++ b/tests/e2e/watched-namespaces/spark-operator-dynamic-config-2.yaml
@@ -15,18 +15,20 @@
 # limitations under the License.
 #
 
-apiVersion: spark.apache.org/v1alpha1
-kind: SparkApplication
+apiVersion: v1
+data:
+  spark.kubernetes.operator.watchedNamespaces: default
+kind: ConfigMap
 metadata:
-  name: spark-job-succeeded-test
-  namespace: default
-status:
-  stateTransitionHistory:
-    (*.currentStateSummary):
-      - "Submitted"
-      - "DriverRequested"
-      - "DriverStarted"
-      - "DriverReady"
-      - "RunningHealthy"
-      - "Succeeded"
-      - "ResourceReleased"
\ No newline at end of file
+  annotations:
+    helm.sh/resource-policy: keep
+    meta.helm.sh/release-name: spark-kubernetes-operator
+    meta.helm.sh/release-namespace: default
+  labels:
+    app.kubernetes.io/component: operator-dynamic-config-overrides
+    app.kubernetes.io/managed-by: Helm
+    app.kubernetes.io/name: spark-kubernetes-operator
+    app.kubernetes.io/version: 0.1.0-SNAPSHOT
+    helm.sh/chart: spark-kubernetes-operator-0.1.0-SNAPSHOT
+  name: spark-kubernetes-operator-dynamic-configuration
+  namespace: default
\ No newline at end of file


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to