This is an automated email from the ASF dual-hosted git repository.

chengpan pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/kyuubi.git


The following commit(s) were added to refs/heads/master by this push:
     new 6297651d8 [KYUUBI #6163] Set default Spark version to 3.5
6297651d8 is described below

commit 6297651d83a38f2c7dcf0f65fb4676bbc2420ce0
Author: Cheng Pan <[email protected]>
AuthorDate: Tue Mar 12 16:22:37 2024 +0800

    [KYUUBI #6163] Set default Spark version to 3.5
    
    # :mag: Description
    ## Issue References ๐Ÿ”—
    
    Kyuubi fully supports Spark 3.5 now, this pull request aims to set the 
default Spark to 3.5 in Kyuubi 1.9
    
    ## Types of changes :bookmark:
    
    - [ ] Bugfix (non-breaking change which fixes an issue)
    - [ ] New feature (non-breaking change which adds functionality)
    - [ ] Breaking change (fix or feature that would cause existing 
functionality to change)
    
    ## Test Plan ๐Ÿงช
    
    Pass GA.
    
    ---
    
    # Checklist ๐Ÿ“
    
    - [x] This patch was not authored or co-authored using [Generative 
Tooling](https://www.apache.org/legal/generative-tooling.html)
    
    **Be nice. Be informative.**
    
    Closes #6163 from pan3793/spark-3.5-default.
    
    Closes #6163
    
    f386aeb7a [Cheng Pan] Set default Spark version to 3.5
    
    Authored-by: Cheng Pan <[email protected]>
    Signed-off-by: Cheng Pan <[email protected]>
---
 .github/workflows/master.yml                       | 22 +++++++++++-----------
 .github/workflows/nightly.yml                      |  2 +-
 build/release/release.sh                           | 14 +++++++-------
 docs/contributing/code/building.md                 |  4 ++--
 docs/extensions/engines/spark/lineage.md           |  7 ++++---
 docs/extensions/engines/spark/rules.md             |  2 +-
 .../org/apache/spark/sql/ZorderCoreBenchmark.scala |  4 ++--
 extensions/spark/kyuubi-spark-authz/README.md      |  4 ++--
 extensions/spark/kyuubi-spark-lineage/README.md    |  5 +++--
 .../deployment/KyuubiOnKubernetesTestsSuite.scala  |  2 +-
 .../test/spark/SparkOnKubernetesTestsSuite.scala   |  2 +-
 pom.xml                                            | 12 ++++++------
 12 files changed, 41 insertions(+), 39 deletions(-)

diff --git a/.github/workflows/master.yml b/.github/workflows/master.yml
index bc82025ed..ad76dfd4e 100644
--- a/.github/workflows/master.yml
+++ b/.github/workflows/master.yml
@@ -57,25 +57,25 @@ jobs:
         comment: ["normal"]
         include:
           - java: 8
-            spark: '3.4'
+            spark: '3.5'
             spark-archive: 
'-Dspark.archive.mirror=https://archive.apache.org/dist/spark/spark-3.1.3 
-Dspark.archive.name=spark-3.1.3-bin-hadoop3.2.tgz -Pzookeeper-3.6'
             exclude-tags: 
'-Dmaven.plugin.scalatest.exclude.tags=org.scalatest.tags.Slow,org.apache.kyuubi.tags.DeltaTest,org.apache.kyuubi.tags.IcebergTest,org.apache.kyuubi.tags.DeltaTest,org.apache.kyuubi.tags.HudiTest,org.apache.kyuubi.tags.SparkLocalClusterTest'
             comment: 'verify-on-spark-3.1-binary'
           - java: 8
-            spark: '3.4'
+            spark: '3.5'
             spark-archive: 
'-Dspark.archive.mirror=https://archive.apache.org/dist/spark/spark-3.2.4 
-Dspark.archive.name=spark-3.2.4-bin-hadoop3.2.tgz -Pzookeeper-3.6'
             exclude-tags: 
'-Dmaven.plugin.scalatest.exclude.tags=org.scalatest.tags.Slow,org.apache.kyuubi.tags.DeltaTest,org.apache.kyuubi.tags.IcebergTest,org.apache.kyuubi.tags.DeltaTest,org.apache.kyuubi.tags.HudiTest,org.apache.kyuubi.tags.SparkLocalClusterTest'
             comment: 'verify-on-spark-3.2-binary'
           - java: 8
-            spark: '3.4'
+            spark: '3.5'
             spark-archive: 
'-Dspark.archive.mirror=https://archive.apache.org/dist/spark/spark-3.3.3 
-Dspark.archive.name=spark-3.3.3-bin-hadoop3.tgz -Pzookeeper-3.6'
             exclude-tags: 
'-Dmaven.plugin.scalatest.exclude.tags=org.scalatest.tags.Slow,org.apache.kyuubi.tags.DeltaTest,org.apache.kyuubi.tags.IcebergTest,org.apache.kyuubi.tags.DeltaTest,org.apache.kyuubi.tags.HudiTest,org.apache.kyuubi.tags.SparkLocalClusterTest'
             comment: 'verify-on-spark-3.3-binary'
           - java: 8
-            spark: '3.4'
-            spark-archive: 
'-Dspark.archive.mirror=https://archive.apache.org/dist/spark/spark-3.5.1 
-Dspark.archive.name=spark-3.5.1-bin-hadoop3.tgz -Pzookeeper-3.6'
+            spark: '3.5'
+            spark-archive: 
'-Dspark.archive.mirror=https://archive.apache.org/dist/spark/spark-3.4.2 
-Dspark.archive.name=spark-3.4.2-bin-hadoop3.tgz -Pzookeeper-3.6'
             exclude-tags: 
'-Dmaven.plugin.scalatest.exclude.tags=org.scalatest.tags.Slow,org.apache.kyuubi.tags.DeltaTest,org.apache.kyuubi.tags.IcebergTest,org.apache.kyuubi.tags.PaimonTest,org.apache.kyuubi.tags.SparkLocalClusterTest'
-            comment: 'verify-on-spark-3.5-binary'
+            comment: 'verify-on-spark-3.4-binary'
         exclude:
           # SPARK-33772: Spark supports JDK 17 since 3.3.0
           - java: 17
@@ -105,7 +105,7 @@ jobs:
           python-version: '3.9'
       - name: Build and test Kyuubi and Spark with maven w/o linters
         run: |
-          if [[ "${{ matrix.java }}" == "8" && "${{ matrix.spark }}" == "3.4" 
&& "${{ matrix.spark-archive }}" == "" ]]; then
+          if [[ "${{ matrix.java }}" == "8" && "${{ matrix.spark }}" == "3.5" 
&& "${{ matrix.spark-archive }}" == "" ]]; then
             MVN_OPT="${MVN_OPT} -Pcodecov"
           fi
           TEST_MODULES="dev/kyuubi-codecov"
@@ -114,7 +114,7 @@ jobs:
       - name: Code coverage
         if: |
           matrix.java == 8 &&
-          matrix.spark == '3.4' &&
+          matrix.spark == '3.5' &&
           matrix.spark-archive == ''
         uses: codecov/codecov-action@v3
         with:
@@ -140,7 +140,7 @@ jobs:
         java:
           - '8'
         spark:
-          - '3.4'
+          - '3.5'
     steps:
       - uses: actions/checkout@v4
       - name: Free up disk space
@@ -374,8 +374,8 @@ jobs:
           # 
https://minikube.sigs.k8s.io/docs/handbook/pushing/#7-loading-directly-to-in-cluster-container-runtime
           minikube image load apache/kyuubi:latest
           # pre-install spark into minikube
-          docker pull apache/spark:3.4.2
-          minikube image load apache/spark:3.4.2
+          docker pull apache/spark:3.5.1
+          minikube image load apache/spark:3.5.1
       - name: kubectl pre-check
         run: |
           kubectl get nodes
diff --git a/.github/workflows/nightly.yml b/.github/workflows/nightly.yml
index 1ba696bbe..728c3ad5c 100644
--- a/.github/workflows/nightly.yml
+++ b/.github/workflows/nightly.yml
@@ -29,7 +29,7 @@ jobs:
     strategy:
       matrix:
         profiles:
-          - '-Pspark-master -pl externals/kyuubi-spark-sql-engine -am'
+          - '-Pscala-2.13 -Pspark-master -pl externals/kyuubi-spark-sql-engine 
-am'
     env:
       SPARK_LOCAL_IP: localhost
     steps:
diff --git a/build/release/release.sh b/build/release/release.sh
index de32a492e..e1aaae83f 100755
--- a/build/release/release.sh
+++ b/build/release/release.sh
@@ -120,18 +120,18 @@ upload_nexus_staging() {
     -s "${KYUUBI_DIR}/build/release/asf-settings.xml" \
     -pl extensions/spark/kyuubi-extension-spark-3-3 -am
 
-  # Spark Extension Plugin for Spark 3.5
-  ${KYUUBI_DIR}/build/mvn clean deploy -DskipTests 
-Papache-release,flink-provided,spark-provided,hive-provided,spark-3.5 \
+  # Spark Extension Plugin for Spark 3.4
+  ${KYUUBI_DIR}/build/mvn clean deploy -DskipTests 
-Papache-release,flink-provided,spark-provided,hive-provided,spark-3.4 \
     -s "${KYUUBI_DIR}/build/release/asf-settings.xml" \
-    -pl extensions/spark/kyuubi-extension-spark-3-5 -am
+    -pl extensions/spark/kyuubi-extension-spark-3-4 -am
 
-  # Spark Hive/TPC-DS/TPC-H Connector built with default Spark version (3.4) 
and Scala 2.13
-  ${KYUUBI_DIR}/build/mvn clean deploy -DskipTests 
-Papache-release,flink-provided,spark-provided,hive-provided,spark-3.4,scala-2.13
 \
+  # Spark Hive/TPC-DS/TPC-H Connector built with default Spark version (3.5) 
and Scala 2.13
+  ${KYUUBI_DIR}/build/mvn clean deploy -DskipTests 
-Papache-release,flink-provided,spark-provided,hive-provided,spark-3.5,scala-2.13
 \
     -s "${KYUUBI_DIR}/build/release/asf-settings.xml" \
     -pl 
extensions/spark/kyuubi-spark-connector-hive,extensions/spark/kyuubi-spark-connector-tpcds,extensions/spark/kyuubi-spark-connector-tpch
 -am
 
-  # All modules including Spark Extension Plugin and Connectors built with 
default Spark version (3.4) and default Scala version (2.12)
-  ${KYUUBI_DIR}/build/mvn clean deploy -DskipTests 
-Papache-release,flink-provided,spark-provided,hive-provided,spark-3.4 \
+  # All modules including Spark Extension Plugin and Connectors built with 
default Spark version (3.5) and default Scala version (2.12)
+  ${KYUUBI_DIR}/build/mvn clean deploy -DskipTests 
-Papache-release,flink-provided,spark-provided,hive-provided,spark-3.5 \
     -s "${KYUUBI_DIR}/build/release/asf-settings.xml"
 }
 
diff --git a/docs/contributing/code/building.md 
b/docs/contributing/code/building.md
index 82409fc9e..502925874 100644
--- a/docs/contributing/code/building.md
+++ b/docs/contributing/code/building.md
@@ -65,8 +65,8 @@ Since v1.1.0, Kyuubi support building with different Spark 
profiles,
 |-------------|---------|-------|
 | -Pspark-3.2 |         | 1.4.0 |
 | -Pspark-3.3 |         | 1.6.0 |
-| -Pspark-3.4 | โœ“       | 1.8.0 |
-| -Pspark-3.5 |         | 1.8.0 |
+| -Pspark-3.4 |         | 1.8.0 |
+| -Pspark-3.5 | โœ“       | 1.8.0 |
 
 ## Building Kyuubi Against Different Scala Versions
 
diff --git a/docs/extensions/engines/spark/lineage.md 
b/docs/extensions/engines/spark/lineage.md
index 2dbb2a026..b634473d4 100644
--- a/docs/extensions/engines/spark/lineage.md
+++ b/docs/extensions/engines/spark/lineage.md
@@ -117,7 +117,7 @@ Sometimes, it may be incompatible with other Spark 
distributions, then you may n
 For example,
 
 ```shell
-build/mvn clean package -pl :kyuubi-spark-lineage_2.12 -am -DskipTests 
-Dspark.version=3.1.2
+build/mvn clean package -pl :kyuubi-spark-lineage_2.12 -am -DskipTests 
-Dspark.version=3.5.1
 ```
 
 The available `spark.version`s are shown in the following table.
@@ -125,11 +125,12 @@ The available `spark.version`s are shown in the following 
table.
 | Spark Version | Supported | Remark |
 |:-------------:|:---------:|:------:|
 |    master     |     โˆš     |   -    |
+|     3.5.x     |     โˆš     |   -    |
 |     3.4.x     |     โˆš     |   -    |
 |     3.3.x     |     โˆš     |   -    |
 |     3.2.x     |     โˆš     |   -    |
-|     3.1.x     |     โˆš     |   -    |
-|     3.0.x     |     โˆš     |   -    |
+|     3.1.x     |     x     |   -    |
+|     3.0.x     |     x     |   -    |
 |     2.4.x     |     x     |   -    |
 
 Currently, Spark released with Scala 2.12 are supported.
diff --git a/docs/extensions/engines/spark/rules.md 
b/docs/extensions/engines/spark/rules.md
index c8bd6b4d5..986fda14c 100644
--- a/docs/extensions/engines/spark/rules.md
+++ b/docs/extensions/engines/spark/rules.md
@@ -49,7 +49,7 @@ And don't worry, Kyuubi will support the new Apache Spark 
version in the future.
 | kyuubi-extension-spark-3-2 | 3.2.x                      | 1.4.0-incubating | 
N/A   | 1.4.0-incubating                  | spark-3.2     |
 | kyuubi-extension-spark-3-3 | 3.3.x                      | 1.6.0-incubating | 
N/A   | 1.6.0-incubating                  | spark-3.3     |
 | kyuubi-extension-spark-3-4 | 3.4.x                      | 1.8.0            | 
N/A   | 1.8.0                             | spark-3.4     |
-| kyuubi-extension-spark-3-4 | 3.5.x                      | 1.8.0            | 
N/A   | N/A                               | spark-3.5     |
+| kyuubi-extension-spark-3-5 | 3.5.x                      | 1.8.0            | 
N/A   | 1.9.0                             | spark-3.5     |
 
 1. Check the matrix that if you are using the supported Spark version, and 
find the corresponding Kyuubi Spark SQL Extension jar
 2. Get the Kyuubi Spark SQL Extension jar
diff --git 
a/extensions/spark/kyuubi-extension-spark-3-4/src/test/scala/org/apache/spark/sql/ZorderCoreBenchmark.scala
 
b/extensions/spark/kyuubi-extension-spark-3-4/src/test/scala/org/apache/spark/sql/ZorderCoreBenchmark.scala
index b2ce305e4..7af1ca048 100644
--- 
a/extensions/spark/kyuubi-extension-spark-3-4/src/test/scala/org/apache/spark/sql/ZorderCoreBenchmark.scala
+++ 
b/extensions/spark/kyuubi-extension-spark-3-4/src/test/scala/org/apache/spark/sql/ZorderCoreBenchmark.scala
@@ -29,8 +29,8 @@ import org.apache.kyuubi.sql.zorder.ZorderBytesUtils
  *
  * {{{
  *   RUN_BENCHMARK=1 ./build/mvn clean test \
- *   -pl extensions/spark/kyuubi-extension-spark-3-4 -am \
- *   -Pspark-3.4,kyuubi-extension-spark-3-4 \
+ *   -pl extensions/spark/kyuubi-extension-spark-3-5 -am \
+ *   -Pspark-3.5,kyuubi-extension-spark-3-5 \
  *   -Dtest=none -DwildcardSuites=org.apache.spark.sql.ZorderCoreBenchmark
  * }}}
  */
diff --git a/extensions/spark/kyuubi-spark-authz/README.md 
b/extensions/spark/kyuubi-spark-authz/README.md
index 43ee45b09..eb295c68c 100644
--- a/extensions/spark/kyuubi-spark-authz/README.md
+++ b/extensions/spark/kyuubi-spark-authz/README.md
@@ -34,8 +34,8 @@ build/mvn clean package -DskipTests -pl 
:kyuubi-spark-authz_2.12 -am -Dspark.ver
 `-Dspark.version=`
 
 - [x] master
-- [x] 3.5.x
-- [x] 3.4.x (default)
+- [x] 3.5.x (default)
+- [x] 3.4.x
 - [x] 3.3.x
 - [x] 3.2.x
 - [x] 3.1.x
diff --git a/extensions/spark/kyuubi-spark-lineage/README.md 
b/extensions/spark/kyuubi-spark-lineage/README.md
index 1c42d3736..3f24cd173 100644
--- a/extensions/spark/kyuubi-spark-lineage/README.md
+++ b/extensions/spark/kyuubi-spark-lineage/README.md
@@ -34,8 +34,9 @@ build/mvn clean package -DskipTests -pl 
:kyuubi-spark-lineage_2.12 -am -Dspark.v
 `-Dspark.version=`
 
 - [x] master
-- [ ] 3.4.x
-- [x] 3.3.x (default)
+- [x] 3.5.x (default)
+- [x] 3.4.x
+- [x] 3.3.x
 - [x] 3.2.x
 - [x] 3.1.x
 
diff --git 
a/integration-tests/kyuubi-kubernetes-it/src/test/scala/org/apache/kyuubi/kubernetes/test/deployment/KyuubiOnKubernetesTestsSuite.scala
 
b/integration-tests/kyuubi-kubernetes-it/src/test/scala/org/apache/kyuubi/kubernetes/test/deployment/KyuubiOnKubernetesTestsSuite.scala
index 9d47ab998..1256687ae 100644
--- 
a/integration-tests/kyuubi-kubernetes-it/src/test/scala/org/apache/kyuubi/kubernetes/test/deployment/KyuubiOnKubernetesTestsSuite.scala
+++ 
b/integration-tests/kyuubi-kubernetes-it/src/test/scala/org/apache/kyuubi/kubernetes/test/deployment/KyuubiOnKubernetesTestsSuite.scala
@@ -55,7 +55,7 @@ class KyuubiOnKubernetesWithSparkTestsBase extends 
WithKyuubiServerOnKubernetes
       Map(
         "spark.master" -> s"k8s://$miniKubeApiMaster",
         // We should update spark docker image in 
./github/workflows/master.yml at the same time
-        "spark.kubernetes.container.image" -> "apache/spark:3.4.2",
+        "spark.kubernetes.container.image" -> "apache/spark:3.5.1",
         "spark.kubernetes.container.image.pullPolicy" -> "IfNotPresent",
         "spark.executor.memory" -> "512M",
         "spark.driver.memory" -> "1024M",
diff --git 
a/integration-tests/kyuubi-kubernetes-it/src/test/scala/org/apache/kyuubi/kubernetes/test/spark/SparkOnKubernetesTestsSuite.scala
 
b/integration-tests/kyuubi-kubernetes-it/src/test/scala/org/apache/kyuubi/kubernetes/test/spark/SparkOnKubernetesTestsSuite.scala
index a32a45d6c..cf4b3ff3b 100644
--- 
a/integration-tests/kyuubi-kubernetes-it/src/test/scala/org/apache/kyuubi/kubernetes/test/spark/SparkOnKubernetesTestsSuite.scala
+++ 
b/integration-tests/kyuubi-kubernetes-it/src/test/scala/org/apache/kyuubi/kubernetes/test/spark/SparkOnKubernetesTestsSuite.scala
@@ -50,7 +50,7 @@ abstract class SparkOnKubernetesSuiteBase
     // TODO Support more Spark version
     // Spark official docker image: https://hub.docker.com/r/apache/spark/tags
     KyuubiConf().set("spark.master", s"k8s://$apiServerAddress")
-      .set("spark.kubernetes.container.image", "apache/spark:3.4.2")
+      .set("spark.kubernetes.container.image", "apache/spark:3.5.1")
       .set("spark.kubernetes.container.image.pullPolicy", "IfNotPresent")
       .set("spark.executor.instances", "1")
       .set("spark.executor.memory", "512M")
diff --git a/pom.xml b/pom.xml
index 85b2e0232..7e1d0748d 100644
--- a/pom.xml
+++ b/pom.xml
@@ -133,8 +133,8 @@
         <commons-io.version>2.11.0</commons-io.version>
         <commons-lang.version>2.6</commons-lang.version>
         <commons-lang3.version>3.13.0</commons-lang3.version>
-        <delta.artifact>delta-core</delta.artifact>
-        <delta.version>2.4.0</delta.version>
+        <delta.artifact>delta-spark</delta.artifact>
+        <delta.version>3.1.0</delta.version>
         <failsafe.verion>3.3.2</failsafe.verion>
         <fb303.version>0.9.3</fb303.version>
         <flexmark.version>0.62.2</flexmark.version>
@@ -198,10 +198,10 @@
         <snakeyaml.version>2.2</snakeyaml.version>
         <!--
           DO NOT forget to change the following properties when change the 
minor version of Spark:
-          `delta.version`, `maven.plugin.scalatest.exclude.tags`
+          `delta.version`, `delta.artifact`, 
`maven.plugin.scalatest.exclude.tags`
           -->
-        <spark.version>3.4.2</spark.version>
-        <spark.binary.version>3.4</spark.binary.version>
+        <spark.version>3.5.1</spark.version>
+        <spark.binary.version>3.5</spark.binary.version>
         <spark.archive.scala.suffix></spark.archive.scala.suffix>
         
<spark.archive.name>spark-${spark.version}-bin-hadoop3${spark.archive.scala.suffix}.tgz</spark.archive.name>
         
<spark.archive.mirror>${apache.archive.dist}/spark/spark-${spark.version}</spark.archive.mirror>
@@ -235,7 +235,7 @@
         <maven.plugin.frontend.version>1.12.1</maven.plugin.frontend.version>
         <maven.plugin.scala.version>4.8.0</maven.plugin.scala.version>
         <maven.plugin.scalatest.version>2.2.0</maven.plugin.scalatest.version>
-        
<maven.plugin.scalatest.exclude.tags>org.scalatest.tags.Slow,org.apache.kyuubi.tags.IcebergTest,org.apache.kyuubi.tags.DeltaTest,org.apache.kyuubi.tags.HudiTest</maven.plugin.scalatest.exclude.tags>
+        
<maven.plugin.scalatest.exclude.tags>org.scalatest.tags.Slow</maven.plugin.scalatest.exclude.tags>
         
<maven.plugin.scalatest.include.tags></maven.plugin.scalatest.include.tags>
         
<maven.plugin.scalatest.debug.enabled>false</maven.plugin.scalatest.debug.enabled>
         <maven.plugin.spotless.version>2.30.0</maven.plugin.spotless.version>

Reply via email to