This is an automated email from the ASF dual-hosted git repository.

chengpan pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/kyuubi.git


The following commit(s) were added to refs/heads/master by this push:
     new 8baaba56b2 [KYUUBI #7254] Initial support Spark 4.1
8baaba56b2 is described below

commit 8baaba56b2ecba6a8021cb3588a3c7589b6ee9e2
Author: Cheng Pan <[email protected]>
AuthorDate: Wed Dec 17 14:41:44 2025 +0800

    [KYUUBI #7254] Initial support Spark 4.1
    
    ### Why are the changes needed?
    
    - Engine - passes compile and all UTs except for one case.
    - Server - passes all UTs and Spark engine integration tests.
    - Lineage plugin - passes compile and all UTs
    - AuthZ plugin - passes compile
    - TPC-H/TPC-DS connectors - passes compile and all UTs
    - KSHC - passes compile
    - Spark extension - TBD
    
    Switch to Python 3.11 for Spark 4 in GHA, due to the support of Python 3.9 
is EOL and dropped by Spark 4.1
    
    ### How was this patch tested?
    
    GHA.
    
    ### Was this patch authored or co-authored using generative AI tooling?
    
    No.
    
    Closes #7254 from pan3793/spark-4.1.
    
    Closes #7254
    
    716c3fb6f [Fei Wang] Update 
externals/kyuubi-spark-sql-engine/src/test/scala/org/apache/kyuubi/engine/spark/operation/SparkArrowbasedOperationSuite.scala
    7d3854900 [Cheng Pan] update docs and dev scripts
    d0463f322 [Cheng Pan] fix build
    1d1ab46b1 [Cheng Pan] Spark 4.1.0
    1becdf886 [Cheng Pan] disable arrow-based query metrics for spark 4.1
    50d2b573b [Cheng Pan] enable spark lineage plugin test for spark 4.1
    5e9bacb07 [Cheng Pan] use python 3.11 for spark 4
    de40ae588 [Cheng Pan] relax error message match pattern
    e29deed73 [Cheng Pan] adapt to SPARK-54259
    d27a7735f [Cheng Pan] Adapt SPARK-52065
    af309ea1c [Cheng Pan] fix
    4273ec392 [Cheng Pan] adapt SPARK-53459
    ef3a426f0 [Cheng Pan] release:17
    e901bd2ee [Cheng Pan] Test with Spark 4.1 preview4
    
    Lead-authored-by: Cheng Pan <[email protected]>
    Co-authored-by: Fei Wang <[email protected]>
    Signed-off-by: Cheng Pan <[email protected]>
---
 .github/workflows/license.yml                      |  2 +-
 .github/workflows/master.yml                       | 20 ++++++++++-
 .github/workflows/style.yml                        |  1 +
 build/release/release.sh                           | 14 ++++++--
 dev/kyuubi-codecov/pom.xml                         | 13 +++++++
 dev/reformat                                       |  2 +-
 docs/quick_start/quick_start.rst                   | 32 ++++++++---------
 extensions/spark/kyuubi-spark-authz/README.md      |  4 ++-
 extensions/spark/kyuubi-spark-lineage/README.md    |  1 +
 .../helper/SparkSQLLineageParseHelper.scala        | 10 +++---
 .../engine/spark/operation/PlanOnlyStatement.scala | 42 +++++++++++++++++++---
 .../org/apache/spark/kyuubi/SparkUtilsHelper.scala | 13 ++++++-
 .../operation/SparkArrowbasedOperationSuite.scala  |  2 ++
 .../engine/spark/SparkProcessBuilderSuite.scala    |  4 +--
 .../kyuubi/server/rest/client/BatchCliSuite.scala  |  6 ++--
 pom.xml                                            | 23 ++++++++++++
 16 files changed, 151 insertions(+), 38 deletions(-)

diff --git a/.github/workflows/license.yml b/.github/workflows/license.yml
index 84db7cb01f..a0fb02777c 100644
--- a/.github/workflows/license.yml
+++ b/.github/workflows/license.yml
@@ -44,7 +44,7 @@ jobs:
       - run: |
           build/mvn org.apache.rat:apache-rat-plugin:check \
             -Ptpcds -Pkubernetes-it \
-            -Pspark-3.3 -Pspark-3.4 -Pspark-3.5 -Pspark-4.0
+            -Pspark-3.3 -Pspark-3.4 -Pspark-3.5 -Pspark-4.0 -Pspark-4.1
       - name: Upload rat report
         if: failure()
         uses: actions/upload-artifact@v4
diff --git a/.github/workflows/master.yml b/.github/workflows/master.yml
index 804918a63b..242e54456f 100644
--- a/.github/workflows/master.yml
+++ b/.github/workflows/master.yml
@@ -51,6 +51,8 @@ jobs:
         java:
           - 8
           - 17
+        python:
+          - '3.9'
         spark:
           - '3.3'
           - '3.4'
@@ -60,25 +62,41 @@ jobs:
         comment: ["normal"]
         include:
           - java: 21
+            python: '3.11'
             spark: '4.0'
             spark-archive: '-Pscala-2.13'
             exclude-tags: ''
             comment: 'normal'
+          - java: 21
+            python: '3.11'
+            spark: '4.1'
+            spark-archive: '-Pscala-2.13'
+            exclude-tags: ''
+            comment: 'normal'
           - java: 8
+            python: '3.9'
             spark: '3.5'
             spark-archive: 
'-Dspark.archive.mirror=https://www.apache.org/dyn/closer.lua/spark/spark-3.3.3 
-Dspark.archive.name=spark-3.3.3-bin-hadoop3.tgz -Pzookeeper-3.6'
             exclude-tags: 
'-Dmaven.plugin.scalatest.exclude.tags=org.scalatest.tags.Slow,org.apache.kyuubi.tags.DeltaTest,org.apache.kyuubi.tags.IcebergTest,org.apache.kyuubi.tags.PaimonTest,org.apache.kyuubi.tags.HudiTest,org.apache.kyuubi.tags.SparkLocalClusterTest'
             comment: 'verify-on-spark-3.3-binary'
           - java: 8
+            python: '3.9'
             spark: '3.5'
             spark-archive: 
'-Dspark.archive.mirror=https://www.apache.org/dyn/closer.lua/spark/spark-3.4.3 
-Dspark.archive.name=spark-3.4.3-bin-hadoop3.tgz -Pzookeeper-3.6'
             exclude-tags: 
'-Dmaven.plugin.scalatest.exclude.tags=org.scalatest.tags.Slow,org.apache.kyuubi.tags.DeltaTest,org.apache.kyuubi.tags.IcebergTest,org.apache.kyuubi.tags.PaimonTest,org.apache.kyuubi.tags.SparkLocalClusterTest'
             comment: 'verify-on-spark-3.4-binary'
           - java: 17
+            python: '3.11'
             spark: '3.5'
             spark-archive: '-Pscala-2.13 
-Dspark.archive.mirror=https://www.apache.org/dyn/closer.lua/spark/spark-4.0.1 
-Dspark.archive.name=spark-4.0.1-bin-hadoop3.tgz'
             exclude-tags: 
'-Dmaven.plugin.scalatest.exclude.tags=org.scalatest.tags.Slow,org.apache.kyuubi.tags.DeltaTest,org.apache.kyuubi.tags.IcebergTest,org.apache.kyuubi.tags.PaimonTest,org.apache.kyuubi.tags.SparkLocalClusterTest'
             comment: 'verify-on-spark-4.0-binary'
+          - java: 17
+            python: '3.11'
+            spark: '3.5'
+            spark-archive: '-Pscala-2.13 
-Dspark.archive.mirror=https://www.apache.org/dyn/closer.lua/spark/spark-4.1.0 
-Dspark.archive.name=spark-4.1.0-bin-hadoop3.tgz'
+            exclude-tags: 
'-Dmaven.plugin.scalatest.exclude.tags=org.scalatest.tags.Slow,org.apache.kyuubi.tags.DeltaTest,org.apache.kyuubi.tags.IcebergTest,org.apache.kyuubi.tags.PaimonTest,org.apache.kyuubi.tags.HudiTest,org.apache.kyuubi.tags.SparkLocalClusterTest'
+            comment: 'verify-on-spark-4.1-binary'
     env:
       SPARK_LOCAL_IP: localhost
     steps:
@@ -100,7 +118,7 @@ jobs:
       - name: Setup Python
         uses: actions/setup-python@v5
         with:
-          python-version: '3.9'
+          python-version: ${{ matrix.python }}
       - name: Build and test Kyuubi and Spark with maven w/o linters
         run: |
           if [[ "${{ matrix.java }}" == "8" && "${{ matrix.spark }}" == "3.5" 
&& "${{ matrix.spark-archive }}" == "" ]]; then
diff --git a/.github/workflows/style.yml b/.github/workflows/style.yml
index 369b650b74..11eba10233 100644
--- a/.github/workflows/style.yml
+++ b/.github/workflows/style.yml
@@ -69,6 +69,7 @@ jobs:
           build/mvn clean install -pl 
extensions/spark/kyuubi-extension-spark-3-4 -Pspark-3.4
           build/mvn clean install -pl 
extensions/spark/kyuubi-extension-spark-3-5,extensions/spark/kyuubi-spark-connector-hive
 -Pspark-3.5
           build/mvn clean install -pl 
extensions/spark/kyuubi-extension-spark-4-0 -Pspark-4.0 -Pscala-2.13
+          build/mvn clean install -pl 
extensions/spark/kyuubi-extension-spark-4-1 -Pspark-4.1 -Pscala-2.13
 
       - name: Scalastyle with maven
         id: scalastyle-check
diff --git a/build/release/release.sh b/build/release/release.sh
index 121e3df098..b80173fd52 100755
--- a/build/release/release.sh
+++ b/build/release/release.sh
@@ -110,16 +110,26 @@ upload_svn_staging() {
 }
 
 upload_nexus_staging() {
-  # Spark Extension Plugin for Spark 3.3
+  # Spark Extension Plugin for Spark 3.3 and Scala 2.12
   ${KYUUBI_DIR}/build/mvn clean deploy -DskipTests 
-Papache-release,flink-provided,spark-provided,hive-provided,spark-3.3 \
     -s "${KYUUBI_DIR}/build/release/asf-settings.xml" \
     -pl extensions/spark/kyuubi-extension-spark-3-3 -am
 
-  # Spark Extension Plugin for Spark 3.4
+  # Spark Extension Plugin for Spark 3.4 and Scala 2.12
   ${KYUUBI_DIR}/build/mvn clean deploy -DskipTests 
-Papache-release,flink-provided,spark-provided,hive-provided,spark-3.4 \
     -s "${KYUUBI_DIR}/build/release/asf-settings.xml" \
     -pl extensions/spark/kyuubi-extension-spark-3-4 -am
 
+  # Spark Extension Plugin for Spark 4.0 and Scala 2.13
+  ${KYUUBI_DIR}/build/mvn clean deploy -DskipTests 
-Papache-release,flink-provided,spark-provided,hive-provided,spark-4.0,scala-2.13
 \
+    -s "${KYUUBI_DIR}/build/release/asf-settings.xml" \
+    -pl extensions/spark/kyuubi-extension-spark-4-0 -am
+
+  # Spark Extension Plugin for Spark 4.1 and Scala 2.13
+  ${KYUUBI_DIR}/build/mvn clean deploy -DskipTests 
-Papache-release,flink-provided,spark-provided,hive-provided,spark-4.1,scala-2.13
 \
+    -s "${KYUUBI_DIR}/build/release/asf-settings.xml" \
+    -pl extensions/spark/kyuubi-extension-spark-4-1 -am
+
   # Spark Hive/TPC-DS/TPC-H Connector built with default Spark version (3.5) 
and Scala 2.13
   ${KYUUBI_DIR}/build/mvn clean deploy -DskipTests 
-Papache-release,flink-provided,spark-provided,hive-provided,spark-3.5,scala-2.13
 \
     -s "${KYUUBI_DIR}/build/release/asf-settings.xml" \
diff --git a/dev/kyuubi-codecov/pom.xml b/dev/kyuubi-codecov/pom.xml
index d6f92ad97b..a5841c58d1 100644
--- a/dev/kyuubi-codecov/pom.xml
+++ b/dev/kyuubi-codecov/pom.xml
@@ -251,6 +251,19 @@
                 </dependency>
             </dependencies>
         </profile>
+        <profile>
+            <id>spark-4.1</id>
+            <dependencies>
+                <!-- TODO: support Spark extension -->
+                <!-- TODO: support KSHC -->
+                <!-- TODO: support authz -->
+                <dependency>
+                    <groupId>org.apache.kyuubi</groupId>
+                    
<artifactId>kyuubi-spark-lineage_${scala.binary.version}</artifactId>
+                    <version>${project.version}</version>
+                </dependency>
+            </dependencies>
+        </profile>
         <profile>
             <id>codecov</id>
             <build>
diff --git a/dev/reformat b/dev/reformat
index 2b4e77314d..a2deea54a3 100755
--- a/dev/reformat
+++ b/dev/reformat
@@ -20,7 +20,7 @@ set -x
 
 KYUUBI_HOME="$(cd "`dirname "$0"`/.."; pwd)"
 
-PROFILES="-Pflink-provided,hive-provided,spark-provided,spark-4.0,spark-3.5,spark-3.4,spark-3.3,tpcds,kubernetes-it"
+PROFILES="-Pflink-provided,hive-provided,spark-provided,spark-4.1,spark-4.0,spark-3.5,spark-3.4,spark-3.3,tpcds,kubernetes-it"
 
 # python style checks rely on `black` in path
 if ! command -v black &> /dev/null
diff --git a/docs/quick_start/quick_start.rst b/docs/quick_start/quick_start.rst
index 8cb1162d00..d09659c95c 100644
--- a/docs/quick_start/quick_start.rst
+++ b/docs/quick_start/quick_start.rst
@@ -36,23 +36,23 @@ For quick start deployment, we need to prepare the 
following stuffs:
 These essential components are JVM-based applications. So, the JRE needs to be
 pre-installed and the ``JAVA_HOME`` is correctly set to each component.
 
- ================ ============ ==================== 
=======================================================
-  Component        Role         Version              Remarks
- ================ ============ ==================== 
=======================================================
-  **Java**         JRE          8/11/17              Officially released 
against JDK8
-  **Kyuubi**       Gateway      \ |release| \        - Kyuubi Server
-                   Engine lib                        - Kyuubi Engine
-                   Beeline                           - Kyuubi Beeline
-  **Spark**        Engine       3.3 to 3.5, 4.0      A Spark distribution
-  **Flink**        Engine       1.17 to 1.20         A Flink distribution
-  **Trino**        Engine       N/A                  A Trino cluster allows to 
access via trino-client v411
-  **Doris**        Engine       N/A                  A Doris cluster
-  **Hive**         Engine       - 2.1-cdh6/2.3/3.1   - A Hive distribution
-                   Metastore    - N/A                - An optional and 
external metadata store,
-                                                       whose version is 
decided by engines
+ ================ ============ ======================== 
=======================================================
+  Component        Role         Version                  Remarks
+ ================ ============ ======================== 
=======================================================
+  **Java**         JRE          8, 11, 17                Officially released 
against JDK8
+  **Kyuubi**       Gateway      \ |release| \            - Kyuubi Server
+                   Engine lib                            - Kyuubi Engine
+                   Beeline                               - Kyuubi Beeline
+  **Spark**        Engine       3.3 to 3.5, 4.0 to 4.1   A Spark distribution
+  **Flink**        Engine       1.17 to 1.20             A Flink distribution
+  **Trino**        Engine       N/A                      A Trino cluster 
allows to access via trino-client v411
+  **Doris**        Engine       N/A                      A Doris cluster
+  **Hive**         Engine       - 2.1-cdh6, 2.3, 3.1     - A Hive distribution
+                   Metastore    - N/A                    - An optional and 
external metadata store,
+                                                           whose version is 
decided by engines
   **Zookeeper**    HA           >=3.4.x
-  **Disk**         Storage      N/A                  N/A
- ================ ============ ==================== 
=======================================================
+  **Disk**         Storage      N/A                      N/A
+ ================ ============ ======================== 
=======================================================
 
 The other internal or external parts listed in the above sheet can be used 
individually
 or all together. For example, you can use Kyuubi, Spark and Flink to build a 
streaming
diff --git a/extensions/spark/kyuubi-spark-authz/README.md 
b/extensions/spark/kyuubi-spark-authz/README.md
index c63da2fcf4..3f34f6f181 100644
--- a/extensions/spark/kyuubi-spark-authz/README.md
+++ b/extensions/spark/kyuubi-spark-authz/README.md
@@ -33,7 +33,8 @@ build/mvn clean package -DskipTests -pl 
:kyuubi-spark-authz_2.12 -am -Dspark.ver
 
 `-Dspark.version=`
 
-- [x] master
+- [ ] 4.1.x
+- [ ] 4.0.x
 - [x] 3.5.x (default)
 - [x] 3.4.x
 - [x] 3.3.x
@@ -46,6 +47,7 @@ build/mvn clean package -DskipTests -pl 
:kyuubi-spark-authz_2.12 -am -Dspark.ver
 
 `-Dranger.version=`
 
+- [ ] 2.7.x
 - [x] 2.6.x (default)
 - [x] 2.5.x
 - [x] 2.4.x
diff --git a/extensions/spark/kyuubi-spark-lineage/README.md 
b/extensions/spark/kyuubi-spark-lineage/README.md
index db6b867fee..12d2766431 100644
--- a/extensions/spark/kyuubi-spark-lineage/README.md
+++ b/extensions/spark/kyuubi-spark-lineage/README.md
@@ -33,6 +33,7 @@ build/mvn clean package -DskipTests -pl 
:kyuubi-spark-lineage_2.12 -am -Dspark.v
 
 `-Dspark.version=`
 
+- [x] 4.1.x
 - [x] 4.0.x
 - [x] 3.5.x (default)
 - [x] 3.4.x
diff --git 
a/extensions/spark/kyuubi-spark-lineage/src/main/scala/org/apache/kyuubi/plugin/lineage/helper/SparkSQLLineageParseHelper.scala
 
b/extensions/spark/kyuubi-spark-lineage/src/main/scala/org/apache/kyuubi/plugin/lineage/helper/SparkSQLLineageParseHelper.scala
index dc6673d4cf..7f7248669f 100644
--- 
a/extensions/spark/kyuubi-spark-lineage/src/main/scala/org/apache/kyuubi/plugin/lineage/helper/SparkSQLLineageParseHelper.scala
+++ 
b/extensions/spark/kyuubi-spark-lineage/src/main/scala/org/apache/kyuubi/plugin/lineage/helper/SparkSQLLineageParseHelper.scala
@@ -367,10 +367,10 @@ trait LineageParser {
         val matchedActions = getField[Seq[MergeAction]](plan, "matchedActions")
         val notMatchedActions = getField[Seq[MergeAction]](plan, 
"notMatchedActions")
         val allAssignments = (matchedActions ++ notMatchedActions).collect {
-          case UpdateAction(_, assignments) => assignments
-          case InsertAction(_, assignments) => assignments
+          case ua: UpdateAction => ua.assignments
+          case ia: InsertAction => ia.assignments
         }.flatten
-        val nextColumnsLlineage = ListMap(allAssignments.map { assignment =>
+        val nextColumnsLineage = ListMap(allAssignments.map { assignment =>
           (
             assignment.key.asInstanceOf[Attribute],
             assignment.value.references)
@@ -379,11 +379,11 @@ trait LineageParser {
         val sourceTable = getField[LogicalPlan](plan, "sourceTable")
         val targetColumnsLineage = extractColumnsLineage(
           targetTable,
-          nextColumnsLlineage.map { case (k, _) => (k, AttributeSet(k)) },
+          nextColumnsLineage.map { case (k, _) => (k, AttributeSet(k)) },
           inputTablesByPlan)
         val sourceColumnsLineage = extractColumnsLineage(
           sourceTable,
-          nextColumnsLlineage,
+          nextColumnsLineage,
           inputTablesByPlan)
         val targetColumnsWithTargetTable = 
targetColumnsLineage.values.flatten.map { column =>
           val unquotedQualifiedName = (column.qualifier :+ 
column.name).mkString(".")
diff --git 
a/externals/kyuubi-spark-sql-engine/src/main/scala/org/apache/kyuubi/engine/spark/operation/PlanOnlyStatement.scala
 
b/externals/kyuubi-spark-sql-engine/src/main/scala/org/apache/kyuubi/engine/spark/operation/PlanOnlyStatement.scala
index 3c8d670c05..b48863e3c8 100644
--- 
a/externals/kyuubi-spark-sql-engine/src/main/scala/org/apache/kyuubi/engine/spark/operation/PlanOnlyStatement.scala
+++ 
b/externals/kyuubi-spark-sql-engine/src/main/scala/org/apache/kyuubi/engine/spark/operation/PlanOnlyStatement.scala
@@ -17,11 +17,14 @@
 
 package org.apache.kyuubi.engine.spark.operation
 
+import java.lang.{Boolean => JBoolean}
+
 import com.fasterxml.jackson.databind.ObjectMapper
 import com.fasterxml.jackson.module.scala.DefaultScalaModule
 import org.apache.spark.kyuubi.SparkUtilsHelper
 import org.apache.spark.sql.{Row, SparkSession}
 import org.apache.spark.sql.catalyst.plans.logical.LogicalPlan
+import org.apache.spark.sql.catalyst.trees.TreeNode
 import org.apache.spark.sql.execution.CommandExecutionMode
 import org.apache.spark.sql.internal.SQLConf
 import org.apache.spark.sql.types.StructType
@@ -29,11 +32,13 @@ import org.apache.spark.sql.types.StructType
 import org.apache.kyuubi.KyuubiSQLException
 import org.apache.kyuubi.config.KyuubiConf.{LINEAGE_PARSER_PLUGIN_PROVIDER, 
OPERATION_PLAN_ONLY_EXCLUDES, OPERATION_PLAN_ONLY_OUT_STYLE}
 import org.apache.kyuubi.engine.spark.KyuubiSparkUtil.getSessionConf
+import org.apache.kyuubi.engine.spark.operation.PlanOnlyStatement._
 import org.apache.kyuubi.operation.{AnalyzeMode, ArrayFetchIterator, 
ExecutionMode, IterableFetchIterator, JsonStyle, LineageMode, OperationHandle, 
OptimizeMode, OptimizeWithStatsMode, ParseMode, PhysicalMode, PlainStyle, 
PlanOnlyMode, PlanOnlyStyle, UnknownMode, UnknownStyle}
 import org.apache.kyuubi.operation.PlanOnlyMode.{notSupportedModeError, 
unknownModeError}
 import org.apache.kyuubi.operation.PlanOnlyStyle.{notSupportedStyleError, 
unknownStyleError}
 import org.apache.kyuubi.operation.log.OperationLog
 import org.apache.kyuubi.session.Session
+import org.apache.kyuubi.util.reflect.DynMethods
 
 /**
  * Perform the statement parsing, analyzing or optimizing only without 
executing it
@@ -110,11 +115,8 @@ class PlanOnlyStatement(
         spark.sessionState.analyzer.checkAnalysis(analyzed)
         val optimized = spark.sessionState.optimizer.execute(analyzed)
         optimized.stats
-        iter = new IterableFetchIterator(Seq(Row(optimized.treeString(
-          verbose = true,
-          addSuffix = true,
-          SQLConf.get.maxToStringFields,
-          printOperatorId = false))))
+        iter = new IterableFetchIterator(
+          Seq(Row(treeString(optimized, verbose = true, addSuffix = true))))
       case PhysicalMode =>
         val physical = spark.sessionState.executePlan(plan, 
CommandExecutionMode.SKIP).sparkPlan
         iter = new IterableFetchIterator(Seq(Row(physical.toString())))
@@ -184,3 +186,33 @@ class PlanOnlyStatement(
   }
 
 }
+
+object PlanOnlyStatement {
+
+  private val uboundTreeStringMehod = DynMethods.builder("treeString")
+    .impl( // SPARK-52065 (4.1.0)
+      classOf[TreeNode[_]],
+      classOf[Boolean],
+      classOf[Boolean],
+      classOf[Int],
+      classOf[Boolean],
+      classOf[Boolean])
+    .impl(
+      classOf[TreeNode[_]],
+      classOf[Boolean],
+      classOf[Boolean],
+      classOf[Int],
+      classOf[Boolean])
+    .build()
+
+  def treeString(
+      tree: TreeNode[_],
+      verbose: JBoolean,
+      addSuffix: JBoolean = false,
+      maxFields: Integer = SQLConf.get.maxToStringFields,
+      printOperatorId: JBoolean = false,
+      printOutputColumns: JBoolean = false): String = {
+    uboundTreeStringMehod.bind(tree)
+      .invoke(verbose, addSuffix, maxFields, printOperatorId, 
printOutputColumns)
+  }
+}
diff --git 
a/externals/kyuubi-spark-sql-engine/src/main/scala/org/apache/spark/kyuubi/SparkUtilsHelper.scala
 
b/externals/kyuubi-spark-sql-engine/src/main/scala/org/apache/spark/kyuubi/SparkUtilsHelper.scala
index 106be3fc78..0cf8327867 100644
--- 
a/externals/kyuubi-spark-sql-engine/src/main/scala/org/apache/spark/kyuubi/SparkUtilsHelper.scala
+++ 
b/externals/kyuubi-spark-sql-engine/src/main/scala/org/apache/spark/kyuubi/SparkUtilsHelper.scala
@@ -23,6 +23,7 @@ import org.apache.spark.SparkConf
 import org.apache.spark.util.Utils
 
 import org.apache.kyuubi.Logging
+import org.apache.kyuubi.util.reflect.{DynClasses, DynMethods}
 
 /**
  * A place to invoke non-public APIs of [[Utils]], anything to be added here 
need to
@@ -37,11 +38,21 @@ object SparkUtilsHelper extends Logging {
     Utils.redact(regex, text)
   }
 
+  private val readOnlySparkConfCls = DynClasses.builder()
+    .impl("org.apache.spark.ReadOnlySparkConf")
+    .orNull()
+    .build()
+
+  private val getLocalDirMethod = DynMethods.builder("getLocalDir")
+    .impl(Utils.getClass, readOnlySparkConfCls) // SPARK-53459 (4.1.0)
+    .impl(Utils.getClass, classOf[SparkConf])
+    .build(Utils)
+
   /**
    * Get the path of a temporary directory.
    */
   def getLocalDir(conf: SparkConf): String = {
-    Utils.getLocalDir(conf)
+    getLocalDirMethod.invoke(conf)
   }
 
   def classesArePresent(className: String): Boolean = {
diff --git 
a/externals/kyuubi-spark-sql-engine/src/test/scala/org/apache/kyuubi/engine/spark/operation/SparkArrowbasedOperationSuite.scala
 
b/externals/kyuubi-spark-sql-engine/src/test/scala/org/apache/kyuubi/engine/spark/operation/SparkArrowbasedOperationSuite.scala
index 14f900d90f..0993c78236 100644
--- 
a/externals/kyuubi-spark-sql-engine/src/test/scala/org/apache/kyuubi/engine/spark/operation/SparkArrowbasedOperationSuite.scala
+++ 
b/externals/kyuubi-spark-sql-engine/src/test/scala/org/apache/kyuubi/engine/spark/operation/SparkArrowbasedOperationSuite.scala
@@ -133,6 +133,8 @@ class SparkArrowbasedOperationSuite extends 
WithSparkSQLEngine with SparkDataTyp
   }
 
   test("arrow-based query metrics") {
+    // TODO: the issue is being investigated by Fu Chen
+    assume(SPARK_ENGINE_RUNTIME_VERSION < "4.1")
     val listener = new SQLMetricsListener
     withJdbcStatement() { statement =>
       withSparkListener(listener) {
diff --git 
a/kyuubi-server/src/test/scala/org/apache/kyuubi/engine/spark/SparkProcessBuilderSuite.scala
 
b/kyuubi-server/src/test/scala/org/apache/kyuubi/engine/spark/SparkProcessBuilderSuite.scala
index 3975b3780d..84e297bf94 100644
--- 
a/kyuubi-server/src/test/scala/org/apache/kyuubi/engine/spark/SparkProcessBuilderSuite.scala
+++ 
b/kyuubi-server/src/test/scala/org/apache/kyuubi/engine/spark/SparkProcessBuilderSuite.scala
@@ -64,8 +64,8 @@ class SparkProcessBuilderSuite extends KerberizedTestHelper 
with MockitoSugar {
     processBuilder.start
     eventually(timeout(90.seconds), interval(500.milliseconds)) {
       val error = processBuilder.getError
-      assert(error.getMessage.contains(
-        "java.lang.IllegalArgumentException: spark.ui.port should be int, but 
was abc"))
+      assert(error.getMessage.contains("spark.ui.port should be int") ||
+        error.getMessage.contains("INVALID_CONF_VALUE.TYPE_MISMATCH"))
       assert(error.isInstanceOf[KyuubiSQLException])
     }
 
diff --git 
a/kyuubi-server/src/test/scala/org/apache/kyuubi/server/rest/client/BatchCliSuite.scala
 
b/kyuubi-server/src/test/scala/org/apache/kyuubi/server/rest/client/BatchCliSuite.scala
index 7167ce230f..4574d8b8c2 100644
--- 
a/kyuubi-server/src/test/scala/org/apache/kyuubi/server/rest/client/BatchCliSuite.scala
+++ 
b/kyuubi-server/src/test/scala/org/apache/kyuubi/server/rest/client/BatchCliSuite.scala
@@ -259,7 +259,7 @@ class BatchCliSuite extends RestClientTestHelper with 
TestPrematureExit with Bat
       "--forward")
     result = testPrematureExitForControlCli(logArgs, "")
     assert(result.contains(s"Submitted application: $sparkBatchTestAppName"))
-    assert(result.contains("Shutdown hook called"))
+    assert(result.contains("Successfully stopped SparkContext"))
   }
 
   test("submit batch test") {
@@ -272,7 +272,7 @@ class BatchCliSuite extends RestClientTestHelper with 
TestPrematureExit with Bat
       ldapUserPasswd)
     val result = testPrematureExitForControlCli(submitArgs, "")
     assert(result.contains(s"Submitted application: $sparkBatchTestAppName"))
-    assert(result.contains("Shutdown hook called"))
+    assert(result.contains("Successfully stopped SparkContext"))
   }
 
   test("submit batch test with waitCompletion=false") {
@@ -289,7 +289,7 @@ class BatchCliSuite extends RestClientTestHelper with 
TestPrematureExit with Bat
       s"${CtlConf.CTL_BATCH_LOG_QUERY_INTERVAL.key}=100")
     val result = testPrematureExitForControlCli(submitArgs, "")
     assert(result.contains("bin/spark-submit"))
-    assert(!result.contains("Shutdown hook called"))
+    assert(!result.contains("Successfully stopped SparkContext"))
   }
 
   test("list batch test") {
diff --git a/pom.xml b/pom.xml
index 9414f1fca7..111a652870 100644
--- a/pom.xml
+++ b/pom.xml
@@ -2059,6 +2059,29 @@
             </properties>
         </profile>
 
+        <profile>
+            <id>spark-4.1</id>
+            <modules>
+                <module>extensions/spark/kyuubi-spark-connector-hive</module>
+            </modules>
+            <properties>
+                <maven.compiler.release>17</maven.compiler.release>
+                <enforcer.maxJdkVersion>17</enforcer.maxJdkVersion>
+                <spark.version>4.1.0</spark.version>
+                <spark.binary.version>4.0</spark.binary.version>
+                <antlr4.version>4.13.1</antlr4.version>
+                <delta.version>4.0.0</delta.version>
+                
<delta.artifact>delta-spark_${scala.binary.version}</delta.artifact>
+                <!-- TODO: update once Hudi support Spark 4.1 -->
+                
<hudi.artifact>hudi-spark3.5-bundle_${scala.binary.version}</hudi.artifact>
+                <!-- TODO: update once Paimon support Spark 4.1.
+                           paimon-spark-3.5 contains Scala 2.12 classes cause 
conflicts with Scala 2.13 -->
+                <paimon.artifact>paimon-common</paimon.artifact>
+                
<maven.plugin.scalatest.exclude.tags>org.scalatest.tags.Slow,org.apache.kyuubi.tags.DeltaTest,org.apache.kyuubi.tags.IcebergTest,org.apache.kyuubi.tags.PaimonTest,org.apache.kyuubi.tags.HudiTest</maven.plugin.scalatest.exclude.tags>
+                
<spark.archive.name>spark-${spark.version}-bin-hadoop3.tgz</spark.archive.name>
+            </properties>
+        </profile>
+
         <profile>
             <id>spark-master</id>
             <properties>

Reply via email to