This is an automated email from the ASF dual-hosted git repository.

chengpan pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/kyuubi.git


The following commit(s) were added to refs/heads/master by this push:
     new 063a192c7 [KYUUBI #6545] Deprecate and remove building support for 
Spark 3.2
063a192c7 is described below

commit 063a192c7a67d5588de7f2a1ce5c5e6d8d5869e0
Author: Cheng Pan <[email protected]>
AuthorDate: Mon Jul 22 11:59:34 2024 +0800

    [KYUUBI #6545] Deprecate and remove building support for Spark 3.2
    
    # :mag: Description
    
    This pull request aims to remove building support for Spark 3.2, while 
still keeping the engine support for Spark 3.2.
    
    Mailing list discussion: 
https://lists.apache.org/thread/l74n5zl1w7s0bmr5ovxmxq58yqy8hqzc
    
    - Remove Maven profile `spark-3.2`, and references on docs, release 
scripts, etc.
    - Keep the cross-version verification to ensure that the Spark SQL engine 
built on the default Spark version (3.5) still works well on Spark 3.2 runtime.
    - Merge `kyuubi-extension-spark-common` into `kyuubi-extension-spark-3-3`
    - Remove `log4j.properties` as Spark moves to Log4j2 since 3.3 (SPARK-37814)
    
    ## Types of changes :bookmark:
    
    - [ ] Bugfix (non-breaking change which fixes an issue)
    - [ ] New feature (non-breaking change which adds functionality)
    - [x] Breaking change (fix or feature that would cause existing 
functionality to change)
    
    ## Test Plan ๐Ÿงช
    
    Pass GHA.
    
    ---
    
    # Checklist ๐Ÿ“
    
    - [x] This patch was not authored or co-authored using [Generative 
Tooling](https://www.apache.org/legal/generative-tooling.html)
    
    **Be nice. Be informative.**
    
    Closes #6545 from pan3793/deprecate-spark-3.2.
    
    Closes #6545
    
    54c172528 [Cheng Pan] fix
    f4602e805 [Cheng Pan] Deprecate and remove building support for Spark 3.2
    2e083f89f [Cheng Pan] fix style
    458a92c53 [Cheng Pan] nit
    929e1df36 [Cheng Pan] Deprecate and remove building support for Spark 3.2
    
    Authored-by: Cheng Pan <[email protected]>
    Signed-off-by: Cheng Pan <[email protected]>
---
 .github/workflows/license.yml                      |   2 +-
 .github/workflows/master.yml                       |   5 -
 .github/workflows/publish-snapshot-nexus.yml       |  12 +-
 .github/workflows/style.yml                        |   8 +-
 build/release/release.sh                           |   5 -
 dev/kyuubi-codecov/pom.xml                         |  20 +-
 dev/reformat                                       |   2 +-
 docs/connector/spark/hudi.rst                      |   9 +-
 docs/contributing/code/building.md                 |   1 -
 docs/deployment/migration-guide.md                 |  13 +-
 docs/deployment/spark/gluten.md                    |   4 +-
 docs/extensions/engines/spark/rules.md             |   2 +-
 docs/extensions/engines/spark/z-order.md           |   2 -
 .../spark/kyuubi-extension-spark-3-2/pom.xml       | 157 ----------------
 .../apache/kyuubi/sql/DropIgnoreNonexistent.scala  |  54 ------
 .../kyuubi/sql/InsertShuffleNodeBeforeJoin.scala   |  90 ---------
 .../kyuubi/sql/KyuubiSparkSQLCommonExtension.scala |  50 -----
 .../kyuubi/sql/KyuubiSparkSQLExtension.scala       |  44 -----
 .../apache/kyuubi/sql/KyuubiSparkSQLParser.scala   | 132 -------------
 .../apache/kyuubi/sql/RebalanceBeforeWriting.scala |  59 ------
 .../sql/watchdog/ForcedMaxOutputRowsRule.scala     |  46 -----
 .../src/test/resources/log4j.properties            |  42 -----
 .../spark/sql/DropIgnoreNonexistentSuite.scala     |  54 ------
 .../spark/sql/FinalStageConfigIsolationSuite.scala | 207 ---------------------
 .../sql/InsertShuffleNodeBeforeJoinSuite.scala     |  19 --
 .../spark/sql/RebalanceBeforeWritingSuite.scala    | 190 -------------------
 .../scala/org/apache/spark/sql/WatchDogSuite.scala |  20 --
 .../scala/org/apache/spark/sql/ZorderSuite.scala   |  36 ----
 .../benchmarks/ZorderCoreBenchmark-results.txt     |   0
 .../spark/kyuubi-extension-spark-3-3/pom.xml       |  61 +++---
 .../antlr4/org/apache/kyuubi/sql/KyuubiSparkSQL.g4 |   0
 .../kyuubi/sql/KyuubiEnsureRequirements.scala      |   0
 .../kyuubi/sql/KyuubiQueryStagePreparation.scala   |   0
 .../org/apache/kyuubi/sql/KyuubiSQLConf.scala      |   0
 .../kyuubi/sql/KyuubiSQLExtensionException.scala   |   0
 .../kyuubi/sql/KyuubiSparkSQLAstBuilder.scala      |   0
 .../kyuubi/sql/MarkNumOutputColumnsRule.scala      |   0
 .../kyuubi/sql/RepartitionBeforeWritingBase.scala  |   0
 .../sql/watchdog/ForcedMaxOutputRowsBase.scala     |   0
 .../KyuubiUnsupportedOperationsCheck.scala         |   0
 .../sql/watchdog/KyuubiWatchDogException.scala     |   0
 .../kyuubi/sql/watchdog/MaxScanStrategy.scala      |   0
 .../sql/zorder/InsertZorderBeforeWritingBase.scala |   0
 .../sql/zorder/OptimizeZorderCommandBase.scala     |   0
 .../sql/zorder/OptimizeZorderStatementBase.scala   |   0
 .../kyuubi/sql/zorder/ResolveZorderBase.scala      |   0
 .../org/apache/kyuubi/sql/zorder/ZorderBase.scala  |   0
 .../kyuubi/sql/zorder/ZorderBytesUtils.scala       |   0
 .../spark/sql/PruneFileSourcePartitionHelper.scala |   0
 .../sql/InsertShuffleNodeBeforeJoinSuiteBase.scala |   0
 .../spark/sql/KyuubiSparkSQLExtensionTest.scala    |   0
 .../org/apache/spark/sql/WatchDogSuiteBase.scala   |   0
 .../org/apache/spark/sql/ZorderCoreBenchmark.scala |   0
 .../org/apache/spark/sql/ZorderSuiteBase.scala     |   0
 .../spark/sql/benchmark/KyuubiBenchmarkBase.scala  |   0
 .../spark/kyuubi-extension-spark-common/pom.xml    | 152 ---------------
 .../src/test/resources/log4j2-test.xml             |  43 -----
 .../src/test/scala/resources/log4j.properties      |  42 -----
 .../src/test/resources/log4j.properties            |  42 -----
 .../src/test/resources/log4j.properties            |  42 -----
 .../src/test/resources/log4j.properties            |  42 -----
 .../src/test/resources/log4j.properties            |  42 -----
 .../src/test/resources/log4j.properties            |  42 -----
 .../kyuubi/engine/spark/SparkSQLEngine.scala       |   3 +
 pom.xml                                            |  21 ---
 65 files changed, 62 insertions(+), 1755 deletions(-)

diff --git a/.github/workflows/license.yml b/.github/workflows/license.yml
index 6b894dbc3..ad1330c51 100644
--- a/.github/workflows/license.yml
+++ b/.github/workflows/license.yml
@@ -45,7 +45,7 @@ jobs:
       - run: >-
           build/mvn org.apache.rat:apache-rat-plugin:check
           -Ptpcds -Pkubernetes-it
-          -Pspark-3.2 -Pspark-3.3 -Pspark-3.4 -Pspark-3.5
+          -Pspark-3.3 -Pspark-3.4 -Pspark-3.5
       - name: Upload rat report
         if: failure()
         uses: actions/upload-artifact@v3
diff --git a/.github/workflows/master.yml b/.github/workflows/master.yml
index 6c4ee314b..4e2d329b4 100644
--- a/.github/workflows/master.yml
+++ b/.github/workflows/master.yml
@@ -48,7 +48,6 @@ jobs:
           - 8
           - 17
         spark:
-          - '3.2'
           - '3.3'
           - '3.4'
           - '3.5'
@@ -81,10 +80,6 @@ jobs:
             spark-archive: '-Pscala-2.13 
-Dspark.archive.mirror=https://archive.apache.org/dist/spark/spark-4.0.0-preview1
 -Dspark.archive.name=spark-4.0.0-preview1-bin-hadoop3.tgz'
             exclude-tags: 
'-Dmaven.plugin.scalatest.exclude.tags=org.scalatest.tags.Slow,org.apache.kyuubi.tags.DeltaTest,org.apache.kyuubi.tags.IcebergTest,org.apache.kyuubi.tags.PaimonTest,org.apache.kyuubi.tags.SparkLocalClusterTest'
             comment: 'verify-on-spark-4.0-binary'
-        exclude:
-          # SPARK-33772: Spark supports JDK 17 since 3.3.0
-          - java: 17
-            spark: '3.2'
     env:
       SPARK_LOCAL_IP: localhost
     steps:
diff --git a/.github/workflows/publish-snapshot-nexus.yml 
b/.github/workflows/publish-snapshot-nexus.yml
index 4af12e94c..4713d0887 100644
--- a/.github/workflows/publish-snapshot-nexus.yml
+++ b/.github/workflows/publish-snapshot-nexus.yml
@@ -30,19 +30,15 @@ jobs:
       matrix:
         branch:
           - master
-          - branch-1.7
           - branch-1.8
+          - branch-1.9
         profiles:
-          - -Pflink-provided,spark-provided,hive-provided,spark-3.2
-          - -Pflink-provided,spark-provided,hive-provided,spark-3.3,tpcds
+          - -Pflink-provided,spark-provided,hive-provided,spark-3.3
+          - -Pflink-provided,spark-provided,hive-provided,spark-3.4,tpcds
         include:
-          - branch: master
-            profiles: -Pflink-provided,spark-provided,hive-provided,spark-3.4
           - branch: master
             profiles: -Pflink-provided,spark-provided,hive-provided,spark-3.5
-          - branch: branch-1.8
-            profiles: -Pflink-provided,spark-provided,hive-provided,spark-3.4
-          - branch: branch-1.8
+          - branch: branch-1.9
             profiles: -Pflink-provided,spark-provided,hive-provided,spark-3.5
     steps:
       - uses: actions/checkout@v4
diff --git a/.github/workflows/style.yml b/.github/workflows/style.yml
index 4f9d1a3c2..90437579f 100644
--- a/.github/workflows/style.yml
+++ b/.github/workflows/style.yml
@@ -34,7 +34,7 @@ jobs:
     strategy:
       matrix:
         profiles:
-          - 
'-Pflink-provided,hive-provided,spark-provided,spark-3.5,spark-3.4,spark-3.3,spark-3.2,tpcds,kubernetes-it'
+          - 
'-Pflink-provided,hive-provided,spark-provided,spark-3.5,spark-3.4,spark-3.3,tpcds,kubernetes-it'
 
     steps:
       - uses: actions/checkout@v4
@@ -65,10 +65,10 @@ jobs:
         if: steps.modules-check.conclusion == 'success' && 
steps.modules-check.outcome == 'failure'
         run: |
           MVN_OPT="-DskipTests -Dorg.slf4j.simpleLogger.defaultLogLevel=warn 
-Dmaven.javadoc.skip=true -Drat.skip=true -Dscalastyle.skip=true 
-Dspotless.check.skip"
-          build/mvn clean install ${MVN_OPT} 
-Pflink-provided,hive-provided,spark-provided,spark-3.2,tpcds
-          build/mvn clean install ${MVN_OPT} -pl 
extensions/spark/kyuubi-extension-spark-3-3,extensions/spark/kyuubi-spark-connector-hive
 -Pspark-3.3
+          build/mvn clean install ${MVN_OPT} 
-Pflink-provided,hive-provided,spark-provided,tpcds
+          build/mvn clean install ${MVN_OPT} -pl 
extensions/spark/kyuubi-extension-spark-3-3 -Pspark-3.3
           build/mvn clean install ${MVN_OPT} -pl 
extensions/spark/kyuubi-extension-spark-3-4 -Pspark-3.4
-          build/mvn clean install ${MVN_OPT} -pl 
extensions/spark/kyuubi-extension-spark-3-5 -Pspark-3.5
+          build/mvn clean install ${MVN_OPT} -pl 
extensions/spark/kyuubi-extension-spark-3-5,extensions/spark/kyuubi-spark-connector-hive
 -Pspark-3.5
 
       - name: Scalastyle with maven
         id: scalastyle-check
diff --git a/build/release/release.sh b/build/release/release.sh
index e1aaae83f..121e3df09 100755
--- a/build/release/release.sh
+++ b/build/release/release.sh
@@ -110,11 +110,6 @@ upload_svn_staging() {
 }
 
 upload_nexus_staging() {
-  # Spark Extension Plugin for Spark 3.2
-  ${KYUUBI_DIR}/build/mvn clean deploy -DskipTests 
-Papache-release,flink-provided,spark-provided,hive-provided,spark-3.2 \
-    -s "${KYUUBI_DIR}/build/release/asf-settings.xml" \
-    -pl extensions/spark/kyuubi-extension-spark-3-2 -am
-
   # Spark Extension Plugin for Spark 3.3
   ${KYUUBI_DIR}/build/mvn clean deploy -DskipTests 
-Papache-release,flink-provided,spark-provided,hive-provided,spark-3.3 \
     -s "${KYUUBI_DIR}/build/release/asf-settings.xml" \
diff --git a/dev/kyuubi-codecov/pom.xml b/dev/kyuubi-codecov/pom.xml
index f224b8cff..4e3a0b81f 100644
--- a/dev/kyuubi-codecov/pom.xml
+++ b/dev/kyuubi-codecov/pom.xml
@@ -155,21 +155,6 @@
     </build>
 
     <profiles>
-        <profile>
-            <id>spark-3.2</id>
-            <dependencies>
-                <dependency>
-                    <groupId>org.apache.kyuubi</groupId>
-                    
<artifactId>kyuubi-extension-spark-3-2_${scala.binary.version}</artifactId>
-                    <version>${project.version}</version>
-                </dependency>
-                <dependency>
-                    <groupId>org.apache.kyuubi</groupId>
-                    
<artifactId>kyuubi-spark-authz_${scala.binary.version}</artifactId>
-                    <version>${project.version}</version>
-                </dependency>
-            </dependencies>
-        </profile>
         <profile>
             <id>spark-3.3</id>
             <dependencies>
@@ -203,6 +188,11 @@
                     
<artifactId>kyuubi-spark-connector-hive_${scala.binary.version}</artifactId>
                     <version>${project.version}</version>
                 </dependency>
+                <dependency>
+                    <groupId>org.apache.kyuubi</groupId>
+                    
<artifactId>kyuubi-spark-authz_${scala.binary.version}</artifactId>
+                    <version>${project.version}</version>
+                </dependency>
             </dependencies>
         </profile>
         <profile>
diff --git a/dev/reformat b/dev/reformat
index eca7b9f5d..121098704 100755
--- a/dev/reformat
+++ b/dev/reformat
@@ -20,7 +20,7 @@ set -x
 
 KYUUBI_HOME="$(cd "`dirname "$0"`/.."; pwd)"
 
-PROFILES="-Pflink-provided,hive-provided,spark-provided,spark-3.5,spark-3.4,spark-3.3,spark-3.2,tpcds,kubernetes-it"
+PROFILES="-Pflink-provided,hive-provided,spark-provided,spark-3.5,spark-3.4,spark-3.3,tpcds,kubernetes-it"
 
 # python style checks rely on `black` in path
 if ! command -v black &> /dev/null
diff --git a/docs/connector/spark/hudi.rst b/docs/connector/spark/hudi.rst
index b46c30e4c..0222c959f 100644
--- a/docs/connector/spark/hudi.rst
+++ b/docs/connector/spark/hudi.rst
@@ -45,7 +45,7 @@ The **classpath** of Kyuubi Spark SQL engine with Hudi 
supported consists of
 
 1. kyuubi-spark-sql-engine-\ |release|\ _2.12.jar, the engine jar deployed 
with a Kyuubi distribution
 2. a copy of Spark distribution
-3. hudi-spark<spark.version>-bundle_<scala.version>-<hudi.version>.jar 
(example: hudi-spark3.2-bundle_2.12-0.11.1.jar), which can be found in the 
`Maven Central`_
+3. hudi-spark<spark.version>-bundle_<scala.version>-<hudi.version>.jar 
(example: hudi-spark3.5-bundle_2.12:0.15.0.jar), which can be found in the 
`Maven Central`_
 
 In order to make the Hudi packages visible for the runtime classpath of 
engines, we can use one of these methods:
 
@@ -61,15 +61,12 @@ To activate functionality of Hudi, we can set the following 
configurations:
 
 .. code-block:: properties
 
-   # Spark 3.2
+   # Spark 3.2 to 3.5
    spark.serializer=org.apache.spark.serializer.KryoSerializer
+   spark.kryo.registrator=org.apache.spark.HoodieSparkKryoRegistrar
    spark.sql.extensions=org.apache.spark.sql.hudi.HoodieSparkSessionExtension
    
spark.sql.catalog.spark_catalog=org.apache.spark.sql.hudi.catalog.HoodieCatalog
 
-   # Spark 3.1
-   spark.serializer=org.apache.spark.serializer.KryoSerializer
-   spark.sql.extensions=org.apache.spark.sql.hudi.HoodieSparkSessionExtension
-
 Hudi Operations
 ---------------
 
diff --git a/docs/contributing/code/building.md 
b/docs/contributing/code/building.md
index 502925874..bc507bd3f 100644
--- a/docs/contributing/code/building.md
+++ b/docs/contributing/code/building.md
@@ -63,7 +63,6 @@ Since v1.1.0, Kyuubi support building with different Spark 
profiles,
 
 |   Profile   | Default | Since |
 |-------------|---------|-------|
-| -Pspark-3.2 |         | 1.4.0 |
 | -Pspark-3.3 |         | 1.6.0 |
 | -Pspark-3.4 |         | 1.8.0 |
 | -Pspark-3.5 | โœ“       | 1.8.0 |
diff --git a/docs/deployment/migration-guide.md 
b/docs/deployment/migration-guide.md
index 37e1c9a9d..6899359c8 100644
--- a/docs/deployment/migration-guide.md
+++ b/docs/deployment/migration-guide.md
@@ -21,16 +21,17 @@
 
 * Since Kyuubi 1.10, `beeline` is deprecated and will be removed in the 
future, please use `kyuubi-beeline` instead.
 * Since Kyuubi 1.10, the support of Spark engine for Spark 3.1 is removed.
+* Since Kyuubi 1.10, the support of Spark engine for Spark 3.2 is deprecated, 
and will be removed in the future.
 * Since Kyuubi 1.10, the support of Flink engine for Flink 1.16 is removed.
 
 ## Upgrading from Kyuubi 1.8 to 1.9
 
-* Since Kyuubi 1.9.0, `kyuubi.session.conf.advisor` can be set as a sequence, 
Kyuubi supported chaining SessionConfAdvisors.
-* Since Kyuubi 1.9.0, the support of Derby is removal for Kyuubi metastore.
-* Since Kyuubi 1.9.0, the support of Spark SQL engine for Spark 3.1 is 
deprecated, and will be removed in the future.
-* Since Kyuubi 1.9.0, the support of Spark extensions for Spark 3.1 is 
removed, please use Spark 3.2 or higher versions.
-* Since Kyuubi 1.9.0, `kyuubi.frontend.login.timeout`, 
`kyuubi.frontend.thrift.login.timeout`, `kyuubi.frontend.backoff.slot.length`, 
`kyuubi.frontend.thrift.backoff.slot.length` are removed.
-* Since Kyuubi 1.9.0, the support of Flink engine for Flink 1.16 is 
deprecated, and will be removed in the future.
+* Since Kyuubi 1.9, `kyuubi.session.conf.advisor` can be set as a sequence, 
Kyuubi supported chaining SessionConfAdvisors.
+* Since Kyuubi 1.9, the support of Derby is removal for Kyuubi metastore.
+* Since Kyuubi 1.9, the support of Spark SQL engine for Spark 3.1 is 
deprecated, and will be removed in the future.
+* Since Kyuubi 1.9, the support of Spark extensions for Spark 3.1 is removed, 
please use Spark 3.2 or higher versions.
+* Since Kyuubi 1.9, `kyuubi.frontend.login.timeout`, 
`kyuubi.frontend.thrift.login.timeout`, `kyuubi.frontend.backoff.slot.length`, 
`kyuubi.frontend.thrift.backoff.slot.length` are removed.
+* Since Kyuubi 1.9, the support of Flink engine for Flink 1.16 is deprecated, 
and will be removed in the future.
 
 ## Upgrading from Kyuubi 1.8.0 to 1.8.1
 
diff --git a/docs/deployment/spark/gluten.md b/docs/deployment/spark/gluten.md
index 371b74edf..1a45180d2 100644
--- a/docs/deployment/spark/gluten.md
+++ b/docs/deployment/spark/gluten.md
@@ -18,7 +18,7 @@
 
 # Gluten
 
-[Gluten](https://oap-project.github.io/gluten/) is a Spark plugin developed by 
Intel, designed to accelerate Apache Spark with native libraries. Currently, 
only CentOS 7/8 and Ubuntu 20.04/22.04, along with Spark 3.2/3.3/3.4, are 
supported. Users can employ the following methods to utilize the Gluten with 
Velox native libraries.
+[Gluten](https://oap-project.github.io/gluten/) is a Spark plugin developed by 
Intel, designed to accelerate Apache Spark with native libraries. Currently, 
only CentOS 7/8 and Ubuntu 20.04/22.04, along with Spark 3.3/3.4, are 
supported. Users can employ the following methods to utilize the Gluten with 
Velox native libraries.
 
 ## Building(with velox Backend)
 
@@ -30,7 +30,7 @@ Git clone gluten project, use gluten build script 
`buildbundle-veloxbe.sh`, and
 git clone https://github.com/oap-project/gluten.git
 cd /path/to/gluten
 
-## The script builds two jars for spark 3.2.x, 3.3.x, and 3.4.x.
+## The script builds two jars for spark 3.3.x, and 3.4.x.
 ./dev/buildbundle-veloxbe.sh
 ```
 
diff --git a/docs/extensions/engines/spark/rules.md 
b/docs/extensions/engines/spark/rules.md
index bb6ee559c..bb46174c7 100644
--- a/docs/extensions/engines/spark/rules.md
+++ b/docs/extensions/engines/spark/rules.md
@@ -46,7 +46,7 @@ And don't worry, Kyuubi will support the new Apache Spark 
version in the future.
 | Kyuubi Spark SQL extension | Supported Spark version(s) | Available since  | 
 EOL  | Bundled in Binary release tarball | Maven profile |
 
|----------------------------|----------------------------|------------------|-------|-----------------------------------|---------------|
 | kyuubi-extension-spark-3-1 | 3.1.x                      | 1.3.0-incubating | 
1.8.0 | 1.3.0-incubating                  | spark-3.1     |
-| kyuubi-extension-spark-3-2 | 3.2.x                      | 1.4.0-incubating | 
N/A   | 1.4.0-incubating                  | spark-3.2     |
+| kyuubi-extension-spark-3-2 | 3.2.x                      | 1.4.0-incubating | 
1.9.0 | 1.4.0-incubating                  | spark-3.2     |
 | kyuubi-extension-spark-3-3 | 3.3.x                      | 1.6.0-incubating | 
N/A   | 1.6.0-incubating                  | spark-3.3     |
 | kyuubi-extension-spark-3-4 | 3.4.x                      | 1.8.0            | 
N/A   | 1.8.0                             | spark-3.4     |
 | kyuubi-extension-spark-3-5 | 3.5.x                      | 1.8.0            | 
N/A   | 1.9.0                             | spark-3.5     |
diff --git a/docs/extensions/engines/spark/z-order.md 
b/docs/extensions/engines/spark/z-order.md
index c7e6e049b..80e85cdf4 100644
--- a/docs/extensions/engines/spark/z-order.md
+++ b/docs/extensions/engines/spark/z-order.md
@@ -78,8 +78,6 @@ This feature is inside Kyuubi extension, so you should apply 
the extension to Sp
 - add extension jar: `copy $KYUUBI_HOME/extension/kyuubi-extension-spark-3-5* 
$SPARK_HOME/jars/`
 - add config into `spark-defaults.conf`: 
`spark.sql.extensions=org.apache.kyuubi.sql.KyuubiSparkSQLExtension`
 
-Due to the extension, z-order only works with Spark 3.2 and higher version.
-
 ### Optimize history data
 
 If you want to optimize the history data of a table, the `OPTIMIZE ...` syntax 
is good to go. Due to Spark SQL doesn't support read and overwrite same 
datasource table, the syntax can only support to optimize Hive table.
diff --git a/extensions/spark/kyuubi-extension-spark-3-2/pom.xml 
b/extensions/spark/kyuubi-extension-spark-3-2/pom.xml
deleted file mode 100644
index a5abb9133..000000000
--- a/extensions/spark/kyuubi-extension-spark-3-2/pom.xml
+++ /dev/null
@@ -1,157 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<!--
-  ~ Licensed to the Apache Software Foundation (ASF) under one or more
-  ~ contributor license agreements.  See the NOTICE file distributed with
-  ~ this work for additional information regarding copyright ownership.
-  ~ The ASF licenses this file to You under the Apache License, Version 2.0
-  ~ (the "License"); you may not use this file except in compliance with
-  ~ the License.  You may obtain a copy of the License at
-  ~
-  ~    http://www.apache.org/licenses/LICENSE-2.0
-  ~
-  ~ Unless required by applicable law or agreed to in writing, software
-  ~ distributed under the License is distributed on an "AS IS" BASIS,
-  ~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  ~ See the License for the specific language governing permissions and
-  ~ limitations under the License.
-  -->
-<project xmlns="http://maven.apache.org/POM/4.0.0"; 
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance";
-         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 
http://maven.apache.org/xsd/maven-4.0.0.xsd";>
-    <modelVersion>4.0.0</modelVersion>
-    <parent>
-        <groupId>org.apache.kyuubi</groupId>
-        <artifactId>kyuubi-parent</artifactId>
-        <version>1.10.0-SNAPSHOT</version>
-        <relativePath>../../../pom.xml</relativePath>
-    </parent>
-
-    <artifactId>kyuubi-extension-spark-3-2_${scala.binary.version}</artifactId>
-    <packaging>jar</packaging>
-    <name>Kyuubi Dev Spark Extensions (for Spark 3.2)</name>
-    <url>https://kyuubi.apache.org/</url>
-
-    <dependencies>
-        <dependency>
-            <groupId>org.apache.kyuubi</groupId>
-            
<artifactId>kyuubi-extension-spark-common_${scala.binary.version}</artifactId>
-            <version>${project.version}</version>
-        </dependency>
-
-        <dependency>
-            <groupId>org.apache.kyuubi</groupId>
-            
<artifactId>kyuubi-extension-spark-common_${scala.binary.version}</artifactId>
-            <version>${project.version}</version>
-            <type>test-jar</type>
-            <scope>test</scope>
-        </dependency>
-
-        <dependency>
-            <groupId>org.scala-lang</groupId>
-            <artifactId>scala-library</artifactId>
-            <scope>provided</scope>
-        </dependency>
-
-        <dependency>
-            <groupId>org.apache.spark</groupId>
-            <artifactId>spark-sql_${scala.binary.version}</artifactId>
-            <scope>provided</scope>
-        </dependency>
-
-        <dependency>
-            <groupId>org.apache.spark</groupId>
-            <artifactId>spark-hive_${scala.binary.version}</artifactId>
-            <scope>provided</scope>
-        </dependency>
-
-        <dependency>
-            <groupId>org.apache.hadoop</groupId>
-            <artifactId>hadoop-client-api</artifactId>
-            <scope>provided</scope>
-        </dependency>
-
-        <dependency>
-            <groupId>org.apache.spark</groupId>
-            <artifactId>spark-core_${scala.binary.version}</artifactId>
-            <type>test-jar</type>
-            <scope>test</scope>
-        </dependency>
-
-        <dependency>
-            <groupId>org.apache.spark</groupId>
-            <artifactId>spark-catalyst_${scala.binary.version}</artifactId>
-            <type>test-jar</type>
-            <scope>test</scope>
-        </dependency>
-
-        <dependency>
-            <groupId>org.scalatestplus</groupId>
-            <artifactId>scalacheck-1-17_${scala.binary.version}</artifactId>
-            <scope>test</scope>
-        </dependency>
-
-        <dependency>
-            <groupId>org.apache.spark</groupId>
-            <artifactId>spark-sql_${scala.binary.version}</artifactId>
-            <version>${spark.version}</version>
-            <type>test-jar</type>
-            <scope>test</scope>
-        </dependency>
-
-        <dependency>
-            <groupId>org.apache.hadoop</groupId>
-            <artifactId>hadoop-client-runtime</artifactId>
-            <scope>test</scope>
-        </dependency>
-
-        <dependency>
-            <groupId>javax.servlet</groupId>
-            <artifactId>javax.servlet-api</artifactId>
-            <scope>test</scope>
-        </dependency>
-
-        <dependency>
-            <groupId>jakarta.servlet</groupId>
-            <artifactId>jakarta.servlet-api</artifactId>
-            <scope>test</scope>
-        </dependency>
-
-        <dependency>
-            <groupId>org.apache.logging.log4j</groupId>
-            <artifactId>log4j-1.2-api</artifactId>
-            <scope>test</scope>
-        </dependency>
-
-        <dependency>
-            <groupId>org.apache.logging.log4j</groupId>
-            <artifactId>log4j-slf4j-impl</artifactId>
-            <scope>test</scope>
-        </dependency>
-    </dependencies>
-
-    <build>
-        <plugins>
-            <plugin>
-                <groupId>org.apache.maven.plugins</groupId>
-                <artifactId>maven-shade-plugin</artifactId>
-                <configuration>
-                    <shadedArtifactAttached>false</shadedArtifactAttached>
-                    <artifactSet>
-                        <includes>
-                            <include>org.apache.kyuubi:*</include>
-                        </includes>
-                    </artifactSet>
-                </configuration>
-                <executions>
-                    <execution>
-                        <goals>
-                            <goal>shade</goal>
-                        </goals>
-                        <phase>package</phase>
-                    </execution>
-                </executions>
-            </plugin>
-        </plugins>
-        
<outputDirectory>target/scala-${scala.binary.version}/classes</outputDirectory>
-        
<testOutputDirectory>target/scala-${scala.binary.version}/test-classes</testOutputDirectory>
-    </build>
-</project>
diff --git 
a/extensions/spark/kyuubi-extension-spark-3-2/src/main/scala/org/apache/kyuubi/sql/DropIgnoreNonexistent.scala
 
b/extensions/spark/kyuubi-extension-spark-3-2/src/main/scala/org/apache/kyuubi/sql/DropIgnoreNonexistent.scala
deleted file mode 100644
index 6e07df09f..000000000
--- 
a/extensions/spark/kyuubi-extension-spark-3-2/src/main/scala/org/apache/kyuubi/sql/DropIgnoreNonexistent.scala
+++ /dev/null
@@ -1,54 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.kyuubi.sql
-
-import org.apache.spark.sql.SparkSession
-import org.apache.spark.sql.catalyst.analysis.{UnresolvedRelation, 
UnresolvedTableOrView, UnresolvedView}
-import org.apache.spark.sql.catalyst.plans.logical.{DropTable, DropView, 
LogicalPlan, NoopCommand, UncacheTable}
-import org.apache.spark.sql.catalyst.rules.Rule
-import org.apache.spark.sql.execution.command.{AlterTableDropPartitionCommand, 
DropDatabaseCommand, DropFunctionCommand, DropTableCommand}
-
-import org.apache.kyuubi.sql.KyuubiSQLConf._
-
-case class DropIgnoreNonexistent(session: SparkSession) extends 
Rule[LogicalPlan] {
-
-  override def apply(plan: LogicalPlan): LogicalPlan = {
-    if (conf.getConf(DROP_IGNORE_NONEXISTENT)) {
-      plan match {
-        case i @ AlterTableDropPartitionCommand(_, _, false, _, _) =>
-          i.copy(ifExists = true)
-        case i @ DropTableCommand(_, false, _, _) =>
-          i.copy(ifExists = true)
-        case i @ DropDatabaseCommand(_, false, _) =>
-          i.copy(ifExists = true)
-        case i @ DropFunctionCommand(_, _, false, _) =>
-          i.copy(ifExists = true)
-        // like: 
org.apache.spark.sql.catalyst.analysis.ResolveCommandsWithIfExists
-        case DropTable(u: UnresolvedTableOrView, false, _) =>
-          NoopCommand("DROP TABLE", u.multipartIdentifier)
-        case DropView(u: UnresolvedView, false) =>
-          NoopCommand("DROP VIEW", u.multipartIdentifier)
-        case UncacheTable(u: UnresolvedRelation, false, _) =>
-          NoopCommand("UNCACHE TABLE", u.multipartIdentifier)
-        case _ => plan
-      }
-    } else {
-      plan
-    }
-  }
-
-}
diff --git 
a/extensions/spark/kyuubi-extension-spark-3-2/src/main/scala/org/apache/kyuubi/sql/InsertShuffleNodeBeforeJoin.scala
 
b/extensions/spark/kyuubi-extension-spark-3-2/src/main/scala/org/apache/kyuubi/sql/InsertShuffleNodeBeforeJoin.scala
deleted file mode 100644
index 247647771..000000000
--- 
a/extensions/spark/kyuubi-extension-spark-3-2/src/main/scala/org/apache/kyuubi/sql/InsertShuffleNodeBeforeJoin.scala
+++ /dev/null
@@ -1,90 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.kyuubi.sql
-
-import org.apache.spark.sql.catalyst.plans.physical.Distribution
-import org.apache.spark.sql.catalyst.rules.Rule
-import org.apache.spark.sql.execution.{SortExec, SparkPlan}
-import org.apache.spark.sql.execution.adaptive.QueryStageExec
-import org.apache.spark.sql.execution.aggregate.BaseAggregateExec
-import org.apache.spark.sql.execution.exchange.{Exchange, ShuffleExchangeExec}
-import org.apache.spark.sql.execution.joins.{ShuffledHashJoinExec, 
SortMergeJoinExec}
-import org.apache.spark.sql.internal.SQLConf
-
-import org.apache.kyuubi.sql.KyuubiSQLConf._
-
-/**
- * Insert shuffle node before join if it doesn't exist to make 
`OptimizeSkewedJoin` works.
- */
-object InsertShuffleNodeBeforeJoin extends Rule[SparkPlan] {
-
-  override def apply(plan: SparkPlan): SparkPlan = {
-    // this rule has no meaning without AQE
-    if (!conf.getConf(FORCE_SHUFFLE_BEFORE_JOIN) ||
-      !conf.getConf(SQLConf.ADAPTIVE_EXECUTION_ENABLED)) {
-      return plan
-    }
-
-    val newPlan = insertShuffleBeforeJoin(plan)
-    if (plan.fastEquals(newPlan)) {
-      plan
-    } else {
-      // make sure the output partitioning and ordering will not be broken.
-      KyuubiEnsureRequirements.apply(newPlan)
-    }
-  }
-
-  private def insertShuffleBeforeJoin(plan: SparkPlan): SparkPlan = plan 
transformUp {
-    case smj @ SortMergeJoinExec(_, _, _, _, l, r, _) =>
-      
smj.withNewChildren(checkAndInsertShuffle(smj.requiredChildDistribution.head, 
l) ::
-        checkAndInsertShuffle(smj.requiredChildDistribution(1), r) :: Nil)
-
-    case shj: ShuffledHashJoinExec =>
-      if (!shj.left.isInstanceOf[Exchange] && 
!shj.right.isInstanceOf[Exchange]) {
-        
shj.withNewChildren(withShuffleExec(shj.requiredChildDistribution.head, 
shj.left) ::
-          withShuffleExec(shj.requiredChildDistribution(1), shj.right) :: Nil)
-      } else if (!shj.left.isInstanceOf[Exchange]) {
-        shj.withNewChildren(
-          withShuffleExec(shj.requiredChildDistribution.head, shj.left) :: 
shj.right :: Nil)
-      } else if (!shj.right.isInstanceOf[Exchange]) {
-        shj.withNewChildren(
-          shj.left :: withShuffleExec(shj.requiredChildDistribution(1), 
shj.right) :: Nil)
-      } else {
-        shj
-      }
-  }
-
-  private def checkAndInsertShuffle(
-      distribution: Distribution,
-      child: SparkPlan): SparkPlan = child match {
-    case SortExec(_, _, _: Exchange, _) =>
-      child
-    case SortExec(_, _, _: QueryStageExec, _) =>
-      child
-    case sort @ SortExec(_, _, agg: BaseAggregateExec, _) =>
-      sort.withNewChildren(withShuffleExec(distribution, agg) :: Nil)
-    case _ =>
-      withShuffleExec(distribution, child)
-  }
-
-  private def withShuffleExec(distribution: Distribution, child: SparkPlan): 
SparkPlan = {
-    val numPartitions = distribution.requiredNumPartitions
-      .getOrElse(conf.numShufflePartitions)
-    ShuffleExchangeExec(distribution.createPartitioning(numPartitions), child)
-  }
-}
diff --git 
a/extensions/spark/kyuubi-extension-spark-3-2/src/main/scala/org/apache/kyuubi/sql/KyuubiSparkSQLCommonExtension.scala
 
b/extensions/spark/kyuubi-extension-spark-3-2/src/main/scala/org/apache/kyuubi/sql/KyuubiSparkSQLCommonExtension.scala
deleted file mode 100644
index 62aa88b98..000000000
--- 
a/extensions/spark/kyuubi-extension-spark-3-2/src/main/scala/org/apache/kyuubi/sql/KyuubiSparkSQLCommonExtension.scala
+++ /dev/null
@@ -1,50 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.kyuubi.sql
-
-import org.apache.spark.sql.SparkSessionExtensions
-
-import org.apache.kyuubi.sql.zorder.{InsertZorderBeforeWritingDatasource, 
InsertZorderBeforeWritingHive, ResolveZorder}
-
-class KyuubiSparkSQLCommonExtension extends (SparkSessionExtensions => Unit) {
-  override def apply(extensions: SparkSessionExtensions): Unit = {
-    KyuubiSparkSQLCommonExtension.injectCommonExtensions(extensions)
-  }
-}
-
-object KyuubiSparkSQLCommonExtension {
-  def injectCommonExtensions(extensions: SparkSessionExtensions): Unit = {
-    // inject zorder parser and related rules
-    extensions.injectParser { case (_, parser) => new 
SparkKyuubiSparkSQLParser(parser) }
-    extensions.injectResolutionRule(ResolveZorder)
-
-    // Note that:
-    // InsertZorderBeforeWritingDatasource and InsertZorderBeforeWritingHive
-    // should be applied before
-    // RepartitionBeforeWriting and RebalanceBeforeWriting
-    // because we can only apply one of them (i.e. Global Sort or 
Repartition/Rebalance)
-    extensions.injectPostHocResolutionRule(InsertZorderBeforeWritingDatasource)
-    extensions.injectPostHocResolutionRule(InsertZorderBeforeWritingHive)
-    extensions.injectPostHocResolutionRule(FinalStageConfigIsolationCleanRule)
-
-    extensions.injectQueryStagePrepRule(_ => InsertShuffleNodeBeforeJoin)
-
-    extensions.injectPostHocResolutionRule(session => 
MarkNumOutputColumnsRule(session))
-    extensions.injectQueryStagePrepRule(FinalStageConfigIsolation(_))
-  }
-}
diff --git 
a/extensions/spark/kyuubi-extension-spark-3-2/src/main/scala/org/apache/kyuubi/sql/KyuubiSparkSQLExtension.scala
 
b/extensions/spark/kyuubi-extension-spark-3-2/src/main/scala/org/apache/kyuubi/sql/KyuubiSparkSQLExtension.scala
deleted file mode 100644
index 9a0f5b1bb..000000000
--- 
a/extensions/spark/kyuubi-extension-spark-3-2/src/main/scala/org/apache/kyuubi/sql/KyuubiSparkSQLExtension.scala
+++ /dev/null
@@ -1,44 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.kyuubi.sql
-
-import org.apache.spark.sql.SparkSessionExtensions
-
-import org.apache.kyuubi.sql.watchdog.{ForcedMaxOutputRowsRule, 
KyuubiUnsupportedOperationsCheck, MaxScanStrategy}
-
-// scalastyle:off line.size.limit
-/**
- * Depend on Spark SQL Extension framework, we can use this extension follow 
steps
- *   1. move this jar into $SPARK_HOME/jars
- *   2. add config into `spark-defaults.conf`: 
`spark.sql.extensions=org.apache.kyuubi.sql.KyuubiSparkSQLExtension`
- */
-// scalastyle:on line.size.limit
-class KyuubiSparkSQLExtension extends (SparkSessionExtensions => Unit) {
-  override def apply(extensions: SparkSessionExtensions): Unit = {
-    KyuubiSparkSQLCommonExtension.injectCommonExtensions(extensions)
-
-    extensions.injectPostHocResolutionRule(RebalanceBeforeWritingDatasource)
-    extensions.injectPostHocResolutionRule(RebalanceBeforeWritingHive)
-    extensions.injectPostHocResolutionRule(DropIgnoreNonexistent)
-
-    // watchdog extension
-    extensions.injectCheckRule(_ => KyuubiUnsupportedOperationsCheck)
-    extensions.injectOptimizerRule(ForcedMaxOutputRowsRule)
-    extensions.injectPlannerStrategy(MaxScanStrategy)
-  }
-}
diff --git 
a/extensions/spark/kyuubi-extension-spark-3-2/src/main/scala/org/apache/kyuubi/sql/KyuubiSparkSQLParser.scala
 
b/extensions/spark/kyuubi-extension-spark-3-2/src/main/scala/org/apache/kyuubi/sql/KyuubiSparkSQLParser.scala
deleted file mode 100644
index 87c10bc34..000000000
--- 
a/extensions/spark/kyuubi-extension-spark-3-2/src/main/scala/org/apache/kyuubi/sql/KyuubiSparkSQLParser.scala
+++ /dev/null
@@ -1,132 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.kyuubi.sql
-
-import org.antlr.v4.runtime._
-import org.antlr.v4.runtime.atn.PredictionMode
-import org.antlr.v4.runtime.misc.{Interval, ParseCancellationException}
-import org.apache.spark.sql.AnalysisException
-import org.apache.spark.sql.catalyst.{FunctionIdentifier, SQLConfHelper, 
TableIdentifier}
-import org.apache.spark.sql.catalyst.expressions.Expression
-import org.apache.spark.sql.catalyst.parser.{ParseErrorListener, 
ParseException, ParserInterface, PostProcessor}
-import org.apache.spark.sql.catalyst.plans.logical.LogicalPlan
-import org.apache.spark.sql.catalyst.trees.Origin
-import org.apache.spark.sql.types.{DataType, StructType}
-
-abstract class KyuubiSparkSQLParserBase extends ParserInterface with 
SQLConfHelper {
-  def delegate: ParserInterface
-  def astBuilder: KyuubiSparkSQLAstBuilder
-
-  override def parsePlan(sqlText: String): LogicalPlan = parse(sqlText) { 
parser =>
-    astBuilder.visit(parser.singleStatement()) match {
-      case optimize: UnparsedPredicateOptimize =>
-        astBuilder.buildOptimizeStatement(optimize, delegate.parseExpression)
-      case plan: LogicalPlan => plan
-      case _ => delegate.parsePlan(sqlText)
-    }
-  }
-
-  protected def parse[T](command: String)(toResult: KyuubiSparkSQLParser => 
T): T = {
-    val lexer = new KyuubiSparkSQLLexer(
-      new UpperCaseCharStream(CharStreams.fromString(command)))
-    lexer.removeErrorListeners()
-    lexer.addErrorListener(ParseErrorListener)
-
-    val tokenStream = new CommonTokenStream(lexer)
-    val parser = new KyuubiSparkSQLParser(tokenStream)
-    parser.addParseListener(PostProcessor)
-    parser.removeErrorListeners()
-    parser.addErrorListener(ParseErrorListener)
-
-    try {
-      try {
-        // first, try parsing with potentially faster SLL mode
-        parser.getInterpreter.setPredictionMode(PredictionMode.SLL)
-        toResult(parser)
-      } catch {
-        case _: ParseCancellationException =>
-          // if we fail, parse with LL mode
-          tokenStream.seek(0) // rewind input stream
-          parser.reset()
-
-          // Try Again.
-          parser.getInterpreter.setPredictionMode(PredictionMode.LL)
-          toResult(parser)
-      }
-    } catch {
-      case e: ParseException if e.command.isDefined =>
-        throw e
-      case e: ParseException =>
-        throw e.withCommand(command)
-      case e: AnalysisException =>
-        val position = Origin(e.line, e.startPosition)
-        throw new ParseException(Option(command), e.message, position, 
position)
-    }
-  }
-
-  override def parseExpression(sqlText: String): Expression = {
-    delegate.parseExpression(sqlText)
-  }
-
-  override def parseTableIdentifier(sqlText: String): TableIdentifier = {
-    delegate.parseTableIdentifier(sqlText)
-  }
-
-  override def parseFunctionIdentifier(sqlText: String): FunctionIdentifier = {
-    delegate.parseFunctionIdentifier(sqlText)
-  }
-
-  override def parseMultipartIdentifier(sqlText: String): Seq[String] = {
-    delegate.parseMultipartIdentifier(sqlText)
-  }
-
-  override def parseTableSchema(sqlText: String): StructType = {
-    delegate.parseTableSchema(sqlText)
-  }
-
-  override def parseDataType(sqlText: String): DataType = {
-    delegate.parseDataType(sqlText)
-  }
-}
-
-class SparkKyuubiSparkSQLParser(
-    override val delegate: ParserInterface)
-  extends KyuubiSparkSQLParserBase {
-  def astBuilder: KyuubiSparkSQLAstBuilder = new KyuubiSparkSQLAstBuilder
-}
-
-/* Copied from Apache Spark's to avoid dependency on Spark Internals */
-class UpperCaseCharStream(wrapped: CodePointCharStream) extends CharStream {
-  override def consume(): Unit = wrapped.consume()
-  override def getSourceName(): String = wrapped.getSourceName
-  override def index(): Int = wrapped.index
-  override def mark(): Int = wrapped.mark
-  override def release(marker: Int): Unit = wrapped.release(marker)
-  override def seek(where: Int): Unit = wrapped.seek(where)
-  override def size(): Int = wrapped.size
-
-  override def getText(interval: Interval): String = wrapped.getText(interval)
-
-  // scalastyle:off
-  override def LA(i: Int): Int = {
-    val la = wrapped.LA(i)
-    if (la == 0 || la == IntStream.EOF) la
-    else Character.toUpperCase(la)
-  }
-  // scalastyle:on
-}
diff --git 
a/extensions/spark/kyuubi-extension-spark-3-2/src/main/scala/org/apache/kyuubi/sql/RebalanceBeforeWriting.scala
 
b/extensions/spark/kyuubi-extension-spark-3-2/src/main/scala/org/apache/kyuubi/sql/RebalanceBeforeWriting.scala
deleted file mode 100644
index 8f7f17c4a..000000000
--- 
a/extensions/spark/kyuubi-extension-spark-3-2/src/main/scala/org/apache/kyuubi/sql/RebalanceBeforeWriting.scala
+++ /dev/null
@@ -1,59 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.kyuubi.sql
-
-import org.apache.spark.sql.SparkSession
-import org.apache.spark.sql.catalyst.expressions.Attribute
-import org.apache.spark.sql.catalyst.plans.logical._
-
-trait RepartitionBuilderWithRebalance extends RepartitionBuilder {
-  override def buildRepartition(
-      dynamicPartitionColumns: Seq[Attribute],
-      query: LogicalPlan): LogicalPlan = {
-    RebalancePartitions(dynamicPartitionColumns, query)
-  }
-
-  override def canInsertRepartitionByExpression(plan: LogicalPlan): Boolean = {
-    super.canInsertRepartitionByExpression(plan) && {
-      plan match {
-        case _: RebalancePartitions => false
-        case _ => true
-      }
-    }
-  }
-}
-
-/**
- * For datasource table, there two commands can write data to table
- * 1. InsertIntoHadoopFsRelationCommand
- * 2. CreateDataSourceTableAsSelectCommand
- * This rule add a RebalancePartitions node between write and query
- */
-case class RebalanceBeforeWritingDatasource(session: SparkSession)
-  extends RepartitionBeforeWritingDatasourceBase
-  with RepartitionBuilderWithRebalance {}
-
-/**
- * For Hive table, there two commands can write data to table
- * 1. InsertIntoHiveTable
- * 2. CreateHiveTableAsSelectCommand
- * This rule add a RebalancePartitions node between write and query
- */
-case class RebalanceBeforeWritingHive(session: SparkSession)
-  extends RepartitionBeforeWritingHiveBase
-  with RepartitionBuilderWithRebalance {}
diff --git 
a/extensions/spark/kyuubi-extension-spark-3-2/src/main/scala/org/apache/kyuubi/sql/watchdog/ForcedMaxOutputRowsRule.scala
 
b/extensions/spark/kyuubi-extension-spark-3-2/src/main/scala/org/apache/kyuubi/sql/watchdog/ForcedMaxOutputRowsRule.scala
deleted file mode 100644
index a3d990b10..000000000
--- 
a/extensions/spark/kyuubi-extension-spark-3-2/src/main/scala/org/apache/kyuubi/sql/watchdog/ForcedMaxOutputRowsRule.scala
+++ /dev/null
@@ -1,46 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.kyuubi.sql.watchdog
-
-import org.apache.spark.sql.SparkSession
-import org.apache.spark.sql.catalyst.plans.logical.{Aggregate, CommandResult, 
LogicalPlan, Union, WithCTE}
-import org.apache.spark.sql.execution.command.DataWritingCommand
-
-case class ForcedMaxOutputRowsRule(sparkSession: SparkSession) extends 
ForcedMaxOutputRowsBase {
-
-  override protected def isChildAggregate(a: Aggregate): Boolean = false
-
-  override protected def canInsertLimitInner(p: LogicalPlan): Boolean = p 
match {
-    case WithCTE(plan, _) => this.canInsertLimitInner(plan)
-    case plan: LogicalPlan => plan match {
-        case Union(children, _, _) => !children.exists {
-            case _: DataWritingCommand => true
-            case p: CommandResult if 
p.commandLogicalPlan.isInstanceOf[DataWritingCommand] => true
-            case _ => false
-          }
-        case _ => super.canInsertLimitInner(plan)
-      }
-  }
-
-  override protected def canInsertLimit(p: LogicalPlan, maxOutputRowsOpt: 
Option[Int]): Boolean = {
-    p match {
-      case WithCTE(plan, _) => this.canInsertLimit(plan, maxOutputRowsOpt)
-      case _ => super.canInsertLimit(p, maxOutputRowsOpt)
-    }
-  }
-}
diff --git 
a/extensions/spark/kyuubi-extension-spark-3-2/src/test/resources/log4j.properties
 
b/extensions/spark/kyuubi-extension-spark-3-2/src/test/resources/log4j.properties
deleted file mode 100644
index ce168ae48..000000000
--- 
a/extensions/spark/kyuubi-extension-spark-3-2/src/test/resources/log4j.properties
+++ /dev/null
@@ -1,42 +0,0 @@
-#
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#    http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-# Set everything to be logged to the file target/unit-tests.log
-log4j.rootLogger=INFO, CA, FA
-
-# Console Appender
-log4j.appender.CA=org.apache.log4j.ConsoleAppender
-log4j.appender.CA.layout=org.apache.log4j.PatternLayout
-log4j.appender.CA.layout.ConversionPattern=%d{HH:mm:ss.SSS} %p %c: %m%n
-log4j.appender.CA.Threshold = FATAL
-
-# File Appender
-log4j.appender.FA=org.apache.log4j.FileAppender
-log4j.appender.FA.append=false
-log4j.appender.FA.file=target/unit-tests.log
-log4j.appender.FA.layout=org.apache.log4j.PatternLayout
-log4j.appender.FA.layout.ConversionPattern=%d{HH:mm:ss.SSS} %t %p %c{2}: %m%n
-log4j.appender.FA.Threshold = DEBUG
-
-# SPARK-34128: Suppress undesirable TTransportException warnings involved in 
THRIFT-4805
-log4j.appender.CA.filter.1=org.apache.log4j.varia.StringMatchFilter
-log4j.appender.CA.filter.1.StringToMatch=Thrift error occurred during 
processing of message
-log4j.appender.CA.filter.1.AcceptOnMatch=false
-
-log4j.appender.FA.filter.1=org.apache.log4j.varia.StringMatchFilter
-log4j.appender.FA.filter.1.StringToMatch=Thrift error occurred during 
processing of message
-log4j.appender.FA.filter.1.AcceptOnMatch=false
diff --git 
a/extensions/spark/kyuubi-extension-spark-3-2/src/test/scala/org/apache/spark/sql/DropIgnoreNonexistentSuite.scala
 
b/extensions/spark/kyuubi-extension-spark-3-2/src/test/scala/org/apache/spark/sql/DropIgnoreNonexistentSuite.scala
deleted file mode 100644
index 9fc4522b4..000000000
--- 
a/extensions/spark/kyuubi-extension-spark-3-2/src/test/scala/org/apache/spark/sql/DropIgnoreNonexistentSuite.scala
+++ /dev/null
@@ -1,54 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.spark.sql
-
-import org.apache.spark.sql.catalyst.plans.logical.NoopCommand
-import org.apache.spark.sql.execution.command._
-
-import org.apache.kyuubi.sql.KyuubiSQLConf
-
-class DropIgnoreNonexistentSuite extends KyuubiSparkSQLExtensionTest {
-
-  test("drop ignore nonexistent") {
-    withSQLConf(KyuubiSQLConf.DROP_IGNORE_NONEXISTENT.key -> "true") {
-      // drop nonexistent database
-      val df1 = sql("DROP DATABASE nonexistent_database")
-      
assert(df1.queryExecution.analyzed.asInstanceOf[DropDatabaseCommand].ifExists 
== true)
-
-      // drop nonexistent table
-      val df2 = sql("DROP TABLE nonexistent_table")
-      assert(df2.queryExecution.analyzed.isInstanceOf[NoopCommand])
-
-      // drop nonexistent view
-      val df3 = sql("DROP VIEW nonexistent_view")
-      assert(df3.queryExecution.analyzed.isInstanceOf[NoopCommand])
-
-      // drop nonexistent function
-      val df4 = sql("DROP FUNCTION nonexistent_function")
-      
assert(df4.queryExecution.analyzed.asInstanceOf[DropFunctionCommand].ifExists 
== true)
-
-      // drop nonexistent PARTITION
-      withTable("test") {
-        sql("CREATE TABLE IF NOT EXISTS test(i int) PARTITIONED BY (p int)")
-        val df5 = sql("ALTER TABLE test DROP PARTITION (p = 1)")
-        assert(df5.queryExecution.analyzed
-          .asInstanceOf[AlterTableDropPartitionCommand].ifExists == true)
-      }
-    }
-  }
-
-}
diff --git 
a/extensions/spark/kyuubi-extension-spark-3-2/src/test/scala/org/apache/spark/sql/FinalStageConfigIsolationSuite.scala
 
b/extensions/spark/kyuubi-extension-spark-3-2/src/test/scala/org/apache/spark/sql/FinalStageConfigIsolationSuite.scala
deleted file mode 100644
index e1ad1b2ca..000000000
--- 
a/extensions/spark/kyuubi-extension-spark-3-2/src/test/scala/org/apache/spark/sql/FinalStageConfigIsolationSuite.scala
+++ /dev/null
@@ -1,207 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.spark.sql
-
-import org.apache.spark.sql.execution.adaptive.{AQEShuffleReadExec, 
QueryStageExec}
-import org.apache.spark.sql.internal.SQLConf
-
-import org.apache.kyuubi.sql.{FinalStageConfigIsolation, KyuubiSQLConf, 
MarkNumOutputColumnsRule}
-
-class FinalStageConfigIsolationSuite extends KyuubiSparkSQLExtensionTest {
-  override protected def beforeAll(): Unit = {
-    super.beforeAll()
-    setupData()
-  }
-
-  test("final stage config set reset check") {
-    withSQLConf(
-      KyuubiSQLConf.FINAL_STAGE_CONFIG_ISOLATION.key -> "true",
-      KyuubiSQLConf.FINAL_STAGE_CONFIG_ISOLATION_WRITE_ONLY.key -> "false",
-      "spark.sql.finalStage.adaptive.coalescePartitions.minPartitionNum" -> 
"1",
-      "spark.sql.finalStage.adaptive.advisoryPartitionSizeInBytes" -> "100") {
-      // use loop to double check final stage config doesn't affect the sql 
query each other
-      (1 to 3).foreach { _ =>
-        sql("SELECT COUNT(*) FROM VALUES(1) as t(c)").collect()
-        assert(spark.sessionState.conf.getConfString(
-          
"spark.sql.previousStage.adaptive.coalescePartitions.minPartitionNum") ===
-          FinalStageConfigIsolation.INTERNAL_UNSET_CONFIG_TAG)
-        assert(spark.sessionState.conf.getConfString(
-          "spark.sql.adaptive.coalescePartitions.minPartitionNum") ===
-          "1")
-        assert(spark.sessionState.conf.getConfString(
-          "spark.sql.finalStage.adaptive.coalescePartitions.minPartitionNum") 
===
-          "1")
-
-        // 64MB
-        assert(spark.sessionState.conf.getConfString(
-          "spark.sql.previousStage.adaptive.advisoryPartitionSizeInBytes") ===
-          "67108864b")
-        assert(spark.sessionState.conf.getConfString(
-          "spark.sql.adaptive.advisoryPartitionSizeInBytes") ===
-          "100")
-        assert(spark.sessionState.conf.getConfString(
-          "spark.sql.finalStage.adaptive.advisoryPartitionSizeInBytes") ===
-          "100")
-      }
-
-      sql("SET spark.sql.adaptive.advisoryPartitionSizeInBytes=1")
-      assert(spark.sessionState.conf.getConfString(
-        "spark.sql.adaptive.advisoryPartitionSizeInBytes") ===
-        "1")
-      assert(!spark.sessionState.conf.contains(
-        "spark.sql.previousStage.adaptive.advisoryPartitionSizeInBytes"))
-
-      sql("SET a=1")
-      assert(spark.sessionState.conf.getConfString("a") === "1")
-
-      sql("RESET spark.sql.adaptive.coalescePartitions.minPartitionNum")
-      assert(!spark.sessionState.conf.contains(
-        "spark.sql.adaptive.coalescePartitions.minPartitionNum"))
-      assert(!spark.sessionState.conf.contains(
-        "spark.sql.previousStage.adaptive.coalescePartitions.minPartitionNum"))
-
-      sql("RESET a")
-      assert(!spark.sessionState.conf.contains("a"))
-    }
-  }
-
-  test("final stage config isolation") {
-    def checkPartitionNum(
-        sqlString: String,
-        previousPartitionNum: Int,
-        finalPartitionNum: Int): Unit = {
-      val df = sql(sqlString)
-      df.collect()
-      val shuffleReaders = collect(df.queryExecution.executedPlan) {
-        case customShuffleReader: AQEShuffleReadExec => customShuffleReader
-      }
-      assert(shuffleReaders.nonEmpty)
-      // reorder stage by stage id to ensure we get the right stage
-      val sortedShuffleReaders = shuffleReaders.sortWith {
-        case (s1, s2) =>
-          s1.child.asInstanceOf[QueryStageExec].id < 
s2.child.asInstanceOf[QueryStageExec].id
-      }
-      if (sortedShuffleReaders.length > 1) {
-        assert(sortedShuffleReaders.head.partitionSpecs.length === 
previousPartitionNum)
-      }
-      assert(sortedShuffleReaders.last.partitionSpecs.length === 
finalPartitionNum)
-      assert(df.rdd.partitions.length === finalPartitionNum)
-      assert(MarkNumOutputColumnsRule.numOutputColumns(spark).isEmpty)
-    }
-
-    withSQLConf(
-      SQLConf.AUTO_BROADCASTJOIN_THRESHOLD.key -> "-1",
-      SQLConf.COALESCE_PARTITIONS_MIN_PARTITION_NUM.key -> "1",
-      SQLConf.SHUFFLE_PARTITIONS.key -> "3",
-      KyuubiSQLConf.FINAL_STAGE_CONFIG_ISOLATION.key -> "true",
-      KyuubiSQLConf.FINAL_STAGE_CONFIG_ISOLATION_WRITE_ONLY.key -> "false",
-      "spark.sql.adaptive.advisoryPartitionSizeInBytes" -> "1",
-      "spark.sql.adaptive.coalescePartitions.minPartitionSize" -> "1",
-      "spark.sql.finalStage.adaptive.advisoryPartitionSizeInBytes" -> 
"10000000") {
-
-      // use loop to double check final stage config doesn't affect the sql 
query each other
-      (1 to 3).foreach { _ =>
-        checkPartitionNum(
-          "SELECT c1, count(*) FROM t1 GROUP BY c1",
-          1,
-          1)
-
-        checkPartitionNum(
-          "SELECT c2, count(*) FROM (SELECT c1, count(*) as c2 FROM t1 GROUP 
BY c1) GROUP BY c2",
-          3,
-          1)
-
-        checkPartitionNum(
-          "SELECT t1.c1, count(*) FROM t1 JOIN t2 ON t1.c2 = t2.c2 GROUP BY 
t1.c1",
-          3,
-          1)
-
-        checkPartitionNum(
-          """
-            | SELECT /*+ REPARTITION */
-            | t1.c1, count(*) FROM t1
-            | JOIN t2 ON t1.c2 = t2.c2
-            | JOIN t3 ON t1.c1 = t3.c1
-            | GROUP BY t1.c1
-            |""".stripMargin,
-          3,
-          1)
-
-        // one shuffle reader
-        checkPartitionNum(
-          """
-            | SELECT /*+ BROADCAST(t1) */
-            | t1.c1, t2.c2 FROM t1
-            | JOIN t2 ON t1.c2 = t2.c2
-            | DISTRIBUTE BY c1
-            |""".stripMargin,
-          1,
-          1)
-
-        // test ReusedExchange
-        checkPartitionNum(
-          """
-            |SELECT /*+ REPARTITION */ t0.c2 FROM (
-            |SELECT t1.c1, (count(*) + c1) as c2 FROM t1 GROUP BY t1.c1
-            |) t0 JOIN (
-            |SELECT t1.c1, (count(*) + c1) as c2 FROM t1 GROUP BY t1.c1
-            |) t1 ON t0.c2 = t1.c2
-            |""".stripMargin,
-          3,
-          1)
-
-        // one shuffle reader
-        checkPartitionNum(
-          """
-            |SELECT t0.c1 FROM (
-            |SELECT t1.c1 FROM t1 GROUP BY t1.c1
-            |) t0 JOIN (
-            |SELECT t1.c1 FROM t1 GROUP BY t1.c1
-            |) t1 ON t0.c1 = t1.c1
-            |""".stripMargin,
-          1,
-          1)
-      }
-    }
-  }
-
-  test("final stage config isolation write only") {
-    withSQLConf(
-      KyuubiSQLConf.FINAL_STAGE_CONFIG_ISOLATION.key -> "true",
-      KyuubiSQLConf.FINAL_STAGE_CONFIG_ISOLATION_WRITE_ONLY.key -> "true",
-      "spark.sql.finalStage.adaptive.advisoryPartitionSizeInBytes" -> "7") {
-      sql("set spark.sql.adaptive.advisoryPartitionSizeInBytes=5")
-      sql("SELECT * FROM t1").count()
-      assert(MarkNumOutputColumnsRule.numOutputColumns(spark).isEmpty)
-      
assert(spark.conf.getOption("spark.sql.adaptive.advisoryPartitionSizeInBytes")
-        .contains("5"))
-
-      withTable("tmp") {
-        sql("CREATE TABLE t1 USING PARQUET SELECT /*+ repartition */ 1 AS c1, 
'a' AS c2")
-        assert(MarkNumOutputColumnsRule.numOutputColumns(spark).contains("2"))
-        
assert(spark.conf.getOption("spark.sql.adaptive.advisoryPartitionSizeInBytes")
-          .contains("7"))
-      }
-
-      sql("SELECT * FROM t1").count()
-      assert(MarkNumOutputColumnsRule.numOutputColumns(spark).isEmpty)
-      
assert(spark.conf.getOption("spark.sql.adaptive.advisoryPartitionSizeInBytes")
-        .contains("5"))
-    }
-  }
-}
diff --git 
a/extensions/spark/kyuubi-extension-spark-3-2/src/test/scala/org/apache/spark/sql/InsertShuffleNodeBeforeJoinSuite.scala
 
b/extensions/spark/kyuubi-extension-spark-3-2/src/test/scala/org/apache/spark/sql/InsertShuffleNodeBeforeJoinSuite.scala
deleted file mode 100644
index f0d384657..000000000
--- 
a/extensions/spark/kyuubi-extension-spark-3-2/src/test/scala/org/apache/spark/sql/InsertShuffleNodeBeforeJoinSuite.scala
+++ /dev/null
@@ -1,19 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.spark.sql
-
-class InsertShuffleNodeBeforeJoinSuite extends 
InsertShuffleNodeBeforeJoinSuiteBase
diff --git 
a/extensions/spark/kyuubi-extension-spark-3-2/src/test/scala/org/apache/spark/sql/RebalanceBeforeWritingSuite.scala
 
b/extensions/spark/kyuubi-extension-spark-3-2/src/test/scala/org/apache/spark/sql/RebalanceBeforeWritingSuite.scala
deleted file mode 100644
index 2edf34808..000000000
--- 
a/extensions/spark/kyuubi-extension-spark-3-2/src/test/scala/org/apache/spark/sql/RebalanceBeforeWritingSuite.scala
+++ /dev/null
@@ -1,190 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.spark.sql
-
-import org.apache.spark.sql.catalyst.expressions.Attribute
-import org.apache.spark.sql.catalyst.plans.logical.RebalancePartitions
-import org.apache.spark.sql.hive.HiveUtils
-import 
org.apache.spark.sql.hive.execution.OptimizedCreateHiveTableAsSelectCommand
-
-import org.apache.kyuubi.sql.KyuubiSQLConf
-
-class RebalanceBeforeWritingSuite extends KyuubiSparkSQLExtensionTest {
-  test("check rebalance exists") {
-    def check(df: => DataFrame, expectedRebalanceNum: Int = 1): Unit = {
-      
withSQLConf(KyuubiSQLConf.INSERT_REPARTITION_BEFORE_WRITE_IF_NO_SHUFFLE.key -> 
"true") {
-        assert(
-          df.queryExecution.analyzed.collect {
-            case r: RebalancePartitions => r
-          }.size == expectedRebalanceNum)
-      }
-      
withSQLConf(KyuubiSQLConf.INSERT_REPARTITION_BEFORE_WRITE_IF_NO_SHUFFLE.key -> 
"false") {
-        assert(
-          df.queryExecution.analyzed.collect {
-            case r: RebalancePartitions => r
-          }.isEmpty)
-      }
-    }
-
-    // It's better to set config explicitly in case of we change the default 
value.
-    withSQLConf(KyuubiSQLConf.INSERT_REPARTITION_BEFORE_WRITE.key -> "true") {
-      Seq("USING PARQUET", "").foreach { storage =>
-        withTable("tmp1") {
-          sql(s"CREATE TABLE tmp1 (c1 int) $storage PARTITIONED BY (c2 
string)")
-          check(sql("INSERT INTO TABLE tmp1 PARTITION(c2='a') " +
-            "SELECT * FROM VALUES(1),(2) AS t(c1)"))
-        }
-
-        withTable("tmp1", "tmp2") {
-          sql(s"CREATE TABLE tmp1 (c1 int) $storage PARTITIONED BY (c2 
string)")
-          sql(s"CREATE TABLE tmp2 (c1 int) $storage PARTITIONED BY (c2 
string)")
-          check(
-            sql(
-              """FROM VALUES(1),(2)
-                |INSERT INTO TABLE tmp1 PARTITION(c2='a') SELECT *
-                |INSERT INTO TABLE tmp2 PARTITION(c2='a') SELECT *
-                |""".stripMargin),
-            2)
-        }
-
-        withTable("tmp1") {
-          sql(s"CREATE TABLE tmp1 (c1 int) $storage")
-          check(sql("INSERT INTO TABLE tmp1 SELECT * FROM VALUES(1),(2),(3) AS 
t(c1)"))
-        }
-
-        withTable("tmp1", "tmp2") {
-          sql(s"CREATE TABLE tmp1 (c1 int) $storage")
-          sql(s"CREATE TABLE tmp2 (c1 int) $storage")
-          check(
-            sql(
-              """FROM VALUES(1),(2),(3)
-                |INSERT INTO TABLE tmp1 SELECT *
-                |INSERT INTO TABLE tmp2 SELECT *
-                |""".stripMargin),
-            2)
-        }
-
-        withTable("tmp1") {
-          sql(s"CREATE TABLE tmp1 $storage AS SELECT * FROM VALUES(1),(2),(3) 
AS t(c1)")
-        }
-
-        withTable("tmp1") {
-          sql(s"CREATE TABLE tmp1 $storage PARTITIONED BY(c2) AS " +
-            s"SELECT * FROM VALUES(1, 'a'),(2, 'b') AS t(c1, c2)")
-        }
-      }
-    }
-  }
-
-  test("check rebalance does not exists") {
-    def check(df: DataFrame): Unit = {
-      assert(
-        df.queryExecution.analyzed.collect {
-          case r: RebalancePartitions => r
-        }.isEmpty)
-    }
-
-    withSQLConf(
-      KyuubiSQLConf.INSERT_REPARTITION_BEFORE_WRITE.key -> "true",
-      KyuubiSQLConf.INSERT_REPARTITION_BEFORE_WRITE_IF_NO_SHUFFLE.key -> 
"true") {
-      // test no write command
-      check(sql("SELECT * FROM VALUES(1, 'a'),(2, 'b') AS t(c1, c2)"))
-      check(sql("SELECT count(*) FROM VALUES(1, 'a'),(2, 'b') AS t(c1, c2)"))
-
-      // test not supported plan
-      withTable("tmp1") {
-        sql(s"CREATE TABLE tmp1 (c1 int) PARTITIONED BY (c2 string)")
-        check(sql("INSERT INTO TABLE tmp1 PARTITION(c2) " +
-          "SELECT /*+ repartition(10) */ * FROM VALUES(1, 'a'),(2, 'b') AS 
t(c1, c2)"))
-        check(sql("INSERT INTO TABLE tmp1 PARTITION(c2) " +
-          "SELECT * FROM VALUES(1, 'a'),(2, 'b') AS t(c1, c2) ORDER BY c1"))
-        check(sql("INSERT INTO TABLE tmp1 PARTITION(c2) " +
-          "SELECT * FROM VALUES(1, 'a'),(2, 'b') AS t(c1, c2) LIMIT 10"))
-      }
-    }
-
-    withSQLConf(KyuubiSQLConf.INSERT_REPARTITION_BEFORE_WRITE.key -> "false") {
-      Seq("USING PARQUET", "").foreach { storage =>
-        withTable("tmp1") {
-          sql(s"CREATE TABLE tmp1 (c1 int) $storage PARTITIONED BY (c2 
string)")
-          check(sql("INSERT INTO TABLE tmp1 PARTITION(c2) " +
-            "SELECT * FROM VALUES(1, 'a'),(2, 'b') AS t(c1, c2)"))
-        }
-
-        withTable("tmp1") {
-          sql(s"CREATE TABLE tmp1 (c1 int) $storage")
-          check(sql("INSERT INTO TABLE tmp1 SELECT * FROM VALUES(1),(2),(3) AS 
t(c1)"))
-        }
-
-        withTable("tmp1") {
-          sql(s"CREATE TABLE tmp1 $storage AS SELECT * FROM VALUES(1),(2),(3) 
AS t(c1)")
-        }
-
-        withTable("tmp1") {
-          sql(s"CREATE TABLE tmp1 $storage PARTITIONED BY(c2) AS " +
-            s"SELECT * FROM VALUES(1, 'a'),(2, 'b') AS t(c1, c2)")
-        }
-      }
-    }
-  }
-
-  test("test dynamic partition write") {
-    def checkRepartitionExpression(df: DataFrame): Unit = {
-      assert(df.queryExecution.analyzed.collect {
-        case r: RebalancePartitions if r.partitionExpressions.size == 1 =>
-          assert(r.partitionExpressions.head.asInstanceOf[Attribute].name === 
"c2")
-          r
-      }.size == 1)
-    }
-
-    withSQLConf(
-      KyuubiSQLConf.INSERT_REPARTITION_BEFORE_WRITE.key -> "true",
-      KyuubiSQLConf.INSERT_REPARTITION_BEFORE_WRITE_IF_NO_SHUFFLE.key -> 
"true") {
-      Seq("USING PARQUET", "").foreach { storage =>
-        withTable("tmp1") {
-          sql(s"CREATE TABLE tmp1 (c1 int) $storage PARTITIONED BY (c2 
string)")
-          checkRepartitionExpression(sql("INSERT INTO TABLE tmp1 SELECT 1 as 
c1, 'a' as c2 "))
-        }
-
-        withTable("tmp1") {
-          checkRepartitionExpression(
-            sql("CREATE TABLE tmp1 PARTITIONED BY(C2) SELECT 1 as c1, 'a' as 
c2 "))
-        }
-      }
-    }
-  }
-
-  test("OptimizedCreateHiveTableAsSelectCommand") {
-    withSQLConf(
-      HiveUtils.CONVERT_METASTORE_PARQUET.key -> "true",
-      HiveUtils.CONVERT_METASTORE_CTAS.key -> "true",
-      KyuubiSQLConf.INSERT_REPARTITION_BEFORE_WRITE_IF_NO_SHUFFLE.key -> 
"true") {
-      withTable("t") {
-        val df = sql(s"CREATE TABLE t STORED AS parquet AS SELECT 1 as a")
-        val ctas = df.queryExecution.analyzed.collect {
-          case _: OptimizedCreateHiveTableAsSelectCommand => true
-        }
-        assert(ctas.size == 1)
-        val repartition = df.queryExecution.analyzed.collect {
-          case _: RebalancePartitions => true
-        }
-        assert(repartition.size == 1)
-      }
-    }
-  }
-}
diff --git 
a/extensions/spark/kyuubi-extension-spark-3-2/src/test/scala/org/apache/spark/sql/WatchDogSuite.scala
 
b/extensions/spark/kyuubi-extension-spark-3-2/src/test/scala/org/apache/spark/sql/WatchDogSuite.scala
deleted file mode 100644
index 957089340..000000000
--- 
a/extensions/spark/kyuubi-extension-spark-3-2/src/test/scala/org/apache/spark/sql/WatchDogSuite.scala
+++ /dev/null
@@ -1,20 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.spark.sql
-
-class WatchDogSuite extends WatchDogSuiteBase {}
diff --git 
a/extensions/spark/kyuubi-extension-spark-3-2/src/test/scala/org/apache/spark/sql/ZorderSuite.scala
 
b/extensions/spark/kyuubi-extension-spark-3-2/src/test/scala/org/apache/spark/sql/ZorderSuite.scala
deleted file mode 100644
index 29a166abf..000000000
--- 
a/extensions/spark/kyuubi-extension-spark-3-2/src/test/scala/org/apache/spark/sql/ZorderSuite.scala
+++ /dev/null
@@ -1,36 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.spark.sql
-
-import org.apache.spark.sql.catalyst.parser.ParserInterface
-
-import org.apache.kyuubi.sql.SparkKyuubiSparkSQLParser
-
-trait ParserSuite { self: ZorderSuiteBase =>
-  override def createParser: ParserInterface = {
-    new SparkKyuubiSparkSQLParser(spark.sessionState.sqlParser)
-  }
-}
-
-class ZorderWithCodegenEnabledSuite
-  extends ZorderWithCodegenEnabledSuiteBase
-  with ParserSuite {}
-
-class ZorderWithCodegenDisabledSuite
-  extends ZorderWithCodegenDisabledSuiteBase
-  with ParserSuite {}
diff --git 
a/extensions/spark/kyuubi-extension-spark-common/benchmarks/ZorderCoreBenchmark-results.txt
 
b/extensions/spark/kyuubi-extension-spark-3-3/benchmarks/ZorderCoreBenchmark-results.txt
similarity index 100%
rename from 
extensions/spark/kyuubi-extension-spark-common/benchmarks/ZorderCoreBenchmark-results.txt
rename to 
extensions/spark/kyuubi-extension-spark-3-3/benchmarks/ZorderCoreBenchmark-results.txt
diff --git a/extensions/spark/kyuubi-extension-spark-3-3/pom.xml 
b/extensions/spark/kyuubi-extension-spark-3-3/pom.xml
index c918de9c0..de7fe9bde 100644
--- a/extensions/spark/kyuubi-extension-spark-3-3/pom.xml
+++ b/extensions/spark/kyuubi-extension-spark-3-3/pom.xml
@@ -31,36 +31,6 @@
     <url>https://kyuubi.apache.org/</url>
 
     <dependencies>
-        <dependency>
-            <groupId>org.apache.kyuubi</groupId>
-            
<artifactId>kyuubi-extension-spark-common_${scala.binary.version}</artifactId>
-            <version>${project.version}</version>
-        </dependency>
-
-        <dependency>
-            <groupId>org.apache.kyuubi</groupId>
-            <artifactId>kyuubi-download</artifactId>
-            <version>${project.version}</version>
-            <type>pom</type>
-            <scope>test</scope>
-        </dependency>
-
-        <dependency>
-            <groupId>org.apache.kyuubi</groupId>
-            
<artifactId>kyuubi-extension-spark-common_${scala.binary.version}</artifactId>
-            <version>${project.version}</version>
-            <type>test-jar</type>
-            <scope>test</scope>
-        </dependency>
-
-        <dependency>
-            <groupId>org.apache.kyuubi</groupId>
-            <artifactId>kyuubi-util-scala_${scala.binary.version}</artifactId>
-            <version>${project.version}</version>
-            <type>test-jar</type>
-            <scope>test</scope>
-        </dependency>
-
         <dependency>
             <groupId>org.scala-lang</groupId>
             <artifactId>scala-library</artifactId>
@@ -85,6 +55,22 @@
             <scope>provided</scope>
         </dependency>
 
+        <dependency>
+            <groupId>org.apache.kyuubi</groupId>
+            <artifactId>kyuubi-download</artifactId>
+            <version>${project.version}</version>
+            <type>pom</type>
+            <scope>test</scope>
+        </dependency>
+
+        <dependency>
+            <groupId>org.apache.kyuubi</groupId>
+            <artifactId>kyuubi-util-scala_${scala.binary.version}</artifactId>
+            <version>${project.version}</version>
+            <type>test-jar</type>
+            <scope>test</scope>
+        </dependency>
+
         <dependency>
             <groupId>org.apache.spark</groupId>
             <artifactId>spark-core_${scala.binary.version}</artifactId>
@@ -130,6 +116,12 @@
             <artifactId>jakarta.servlet-api</artifactId>
             <scope>test</scope>
         </dependency>
+
+        <dependency>
+            <groupId>org.apache.logging.log4j</groupId>
+            <artifactId>log4j-slf4j-impl</artifactId>
+            <scope>test</scope>
+        </dependency>
     </dependencies>
 
     <build>
@@ -167,6 +159,15 @@
                     </environmentVariables>
                 </configuration>
             </plugin>
+            <plugin>
+                <groupId>org.antlr</groupId>
+                <artifactId>antlr4-maven-plugin</artifactId>
+                <configuration>
+                    <visitor>true</visitor>
+                    
<sourceDirectory>${project.basedir}/src/main/antlr4</sourceDirectory>
+                </configuration>
+            </plugin>
+
             <plugin>
                 <groupId>org.apache.maven.plugins</groupId>
                 <artifactId>maven-shade-plugin</artifactId>
diff --git 
a/extensions/spark/kyuubi-extension-spark-common/src/main/antlr4/org/apache/kyuubi/sql/KyuubiSparkSQL.g4
 
b/extensions/spark/kyuubi-extension-spark-3-3/src/main/antlr4/org/apache/kyuubi/sql/KyuubiSparkSQL.g4
similarity index 100%
rename from 
extensions/spark/kyuubi-extension-spark-common/src/main/antlr4/org/apache/kyuubi/sql/KyuubiSparkSQL.g4
rename to 
extensions/spark/kyuubi-extension-spark-3-3/src/main/antlr4/org/apache/kyuubi/sql/KyuubiSparkSQL.g4
diff --git 
a/extensions/spark/kyuubi-extension-spark-common/src/main/scala/org/apache/kyuubi/sql/KyuubiEnsureRequirements.scala
 
b/extensions/spark/kyuubi-extension-spark-3-3/src/main/scala/org/apache/kyuubi/sql/KyuubiEnsureRequirements.scala
similarity index 100%
rename from 
extensions/spark/kyuubi-extension-spark-common/src/main/scala/org/apache/kyuubi/sql/KyuubiEnsureRequirements.scala
rename to 
extensions/spark/kyuubi-extension-spark-3-3/src/main/scala/org/apache/kyuubi/sql/KyuubiEnsureRequirements.scala
diff --git 
a/extensions/spark/kyuubi-extension-spark-common/src/main/scala/org/apache/kyuubi/sql/KyuubiQueryStagePreparation.scala
 
b/extensions/spark/kyuubi-extension-spark-3-3/src/main/scala/org/apache/kyuubi/sql/KyuubiQueryStagePreparation.scala
similarity index 100%
rename from 
extensions/spark/kyuubi-extension-spark-common/src/main/scala/org/apache/kyuubi/sql/KyuubiQueryStagePreparation.scala
rename to 
extensions/spark/kyuubi-extension-spark-3-3/src/main/scala/org/apache/kyuubi/sql/KyuubiQueryStagePreparation.scala
diff --git 
a/extensions/spark/kyuubi-extension-spark-common/src/main/scala/org/apache/kyuubi/sql/KyuubiSQLConf.scala
 
b/extensions/spark/kyuubi-extension-spark-3-3/src/main/scala/org/apache/kyuubi/sql/KyuubiSQLConf.scala
similarity index 100%
rename from 
extensions/spark/kyuubi-extension-spark-common/src/main/scala/org/apache/kyuubi/sql/KyuubiSQLConf.scala
rename to 
extensions/spark/kyuubi-extension-spark-3-3/src/main/scala/org/apache/kyuubi/sql/KyuubiSQLConf.scala
diff --git 
a/extensions/spark/kyuubi-extension-spark-common/src/main/scala/org/apache/kyuubi/sql/KyuubiSQLExtensionException.scala
 
b/extensions/spark/kyuubi-extension-spark-3-3/src/main/scala/org/apache/kyuubi/sql/KyuubiSQLExtensionException.scala
similarity index 100%
rename from 
extensions/spark/kyuubi-extension-spark-common/src/main/scala/org/apache/kyuubi/sql/KyuubiSQLExtensionException.scala
rename to 
extensions/spark/kyuubi-extension-spark-3-3/src/main/scala/org/apache/kyuubi/sql/KyuubiSQLExtensionException.scala
diff --git 
a/extensions/spark/kyuubi-extension-spark-common/src/main/scala/org/apache/kyuubi/sql/KyuubiSparkSQLAstBuilder.scala
 
b/extensions/spark/kyuubi-extension-spark-3-3/src/main/scala/org/apache/kyuubi/sql/KyuubiSparkSQLAstBuilder.scala
similarity index 100%
rename from 
extensions/spark/kyuubi-extension-spark-common/src/main/scala/org/apache/kyuubi/sql/KyuubiSparkSQLAstBuilder.scala
rename to 
extensions/spark/kyuubi-extension-spark-3-3/src/main/scala/org/apache/kyuubi/sql/KyuubiSparkSQLAstBuilder.scala
diff --git 
a/extensions/spark/kyuubi-extension-spark-common/src/main/scala/org/apache/kyuubi/sql/MarkNumOutputColumnsRule.scala
 
b/extensions/spark/kyuubi-extension-spark-3-3/src/main/scala/org/apache/kyuubi/sql/MarkNumOutputColumnsRule.scala
similarity index 100%
rename from 
extensions/spark/kyuubi-extension-spark-common/src/main/scala/org/apache/kyuubi/sql/MarkNumOutputColumnsRule.scala
rename to 
extensions/spark/kyuubi-extension-spark-3-3/src/main/scala/org/apache/kyuubi/sql/MarkNumOutputColumnsRule.scala
diff --git 
a/extensions/spark/kyuubi-extension-spark-common/src/main/scala/org/apache/kyuubi/sql/RepartitionBeforeWritingBase.scala
 
b/extensions/spark/kyuubi-extension-spark-3-3/src/main/scala/org/apache/kyuubi/sql/RepartitionBeforeWritingBase.scala
similarity index 100%
rename from 
extensions/spark/kyuubi-extension-spark-common/src/main/scala/org/apache/kyuubi/sql/RepartitionBeforeWritingBase.scala
rename to 
extensions/spark/kyuubi-extension-spark-3-3/src/main/scala/org/apache/kyuubi/sql/RepartitionBeforeWritingBase.scala
diff --git 
a/extensions/spark/kyuubi-extension-spark-common/src/main/scala/org/apache/kyuubi/sql/watchdog/ForcedMaxOutputRowsBase.scala
 
b/extensions/spark/kyuubi-extension-spark-3-3/src/main/scala/org/apache/kyuubi/sql/watchdog/ForcedMaxOutputRowsBase.scala
similarity index 100%
rename from 
extensions/spark/kyuubi-extension-spark-common/src/main/scala/org/apache/kyuubi/sql/watchdog/ForcedMaxOutputRowsBase.scala
rename to 
extensions/spark/kyuubi-extension-spark-3-3/src/main/scala/org/apache/kyuubi/sql/watchdog/ForcedMaxOutputRowsBase.scala
diff --git 
a/extensions/spark/kyuubi-extension-spark-common/src/main/scala/org/apache/kyuubi/sql/watchdog/KyuubiUnsupportedOperationsCheck.scala
 
b/extensions/spark/kyuubi-extension-spark-3-3/src/main/scala/org/apache/kyuubi/sql/watchdog/KyuubiUnsupportedOperationsCheck.scala
similarity index 100%
rename from 
extensions/spark/kyuubi-extension-spark-common/src/main/scala/org/apache/kyuubi/sql/watchdog/KyuubiUnsupportedOperationsCheck.scala
rename to 
extensions/spark/kyuubi-extension-spark-3-3/src/main/scala/org/apache/kyuubi/sql/watchdog/KyuubiUnsupportedOperationsCheck.scala
diff --git 
a/extensions/spark/kyuubi-extension-spark-common/src/main/scala/org/apache/kyuubi/sql/watchdog/KyuubiWatchDogException.scala
 
b/extensions/spark/kyuubi-extension-spark-3-3/src/main/scala/org/apache/kyuubi/sql/watchdog/KyuubiWatchDogException.scala
similarity index 100%
rename from 
extensions/spark/kyuubi-extension-spark-common/src/main/scala/org/apache/kyuubi/sql/watchdog/KyuubiWatchDogException.scala
rename to 
extensions/spark/kyuubi-extension-spark-3-3/src/main/scala/org/apache/kyuubi/sql/watchdog/KyuubiWatchDogException.scala
diff --git 
a/extensions/spark/kyuubi-extension-spark-common/src/main/scala/org/apache/kyuubi/sql/watchdog/MaxScanStrategy.scala
 
b/extensions/spark/kyuubi-extension-spark-3-3/src/main/scala/org/apache/kyuubi/sql/watchdog/MaxScanStrategy.scala
similarity index 100%
rename from 
extensions/spark/kyuubi-extension-spark-common/src/main/scala/org/apache/kyuubi/sql/watchdog/MaxScanStrategy.scala
rename to 
extensions/spark/kyuubi-extension-spark-3-3/src/main/scala/org/apache/kyuubi/sql/watchdog/MaxScanStrategy.scala
diff --git 
a/extensions/spark/kyuubi-extension-spark-common/src/main/scala/org/apache/kyuubi/sql/zorder/InsertZorderBeforeWritingBase.scala
 
b/extensions/spark/kyuubi-extension-spark-3-3/src/main/scala/org/apache/kyuubi/sql/zorder/InsertZorderBeforeWritingBase.scala
similarity index 100%
rename from 
extensions/spark/kyuubi-extension-spark-common/src/main/scala/org/apache/kyuubi/sql/zorder/InsertZorderBeforeWritingBase.scala
rename to 
extensions/spark/kyuubi-extension-spark-3-3/src/main/scala/org/apache/kyuubi/sql/zorder/InsertZorderBeforeWritingBase.scala
diff --git 
a/extensions/spark/kyuubi-extension-spark-common/src/main/scala/org/apache/kyuubi/sql/zorder/OptimizeZorderCommandBase.scala
 
b/extensions/spark/kyuubi-extension-spark-3-3/src/main/scala/org/apache/kyuubi/sql/zorder/OptimizeZorderCommandBase.scala
similarity index 100%
rename from 
extensions/spark/kyuubi-extension-spark-common/src/main/scala/org/apache/kyuubi/sql/zorder/OptimizeZorderCommandBase.scala
rename to 
extensions/spark/kyuubi-extension-spark-3-3/src/main/scala/org/apache/kyuubi/sql/zorder/OptimizeZorderCommandBase.scala
diff --git 
a/extensions/spark/kyuubi-extension-spark-common/src/main/scala/org/apache/kyuubi/sql/zorder/OptimizeZorderStatementBase.scala
 
b/extensions/spark/kyuubi-extension-spark-3-3/src/main/scala/org/apache/kyuubi/sql/zorder/OptimizeZorderStatementBase.scala
similarity index 100%
rename from 
extensions/spark/kyuubi-extension-spark-common/src/main/scala/org/apache/kyuubi/sql/zorder/OptimizeZorderStatementBase.scala
rename to 
extensions/spark/kyuubi-extension-spark-3-3/src/main/scala/org/apache/kyuubi/sql/zorder/OptimizeZorderStatementBase.scala
diff --git 
a/extensions/spark/kyuubi-extension-spark-common/src/main/scala/org/apache/kyuubi/sql/zorder/ResolveZorderBase.scala
 
b/extensions/spark/kyuubi-extension-spark-3-3/src/main/scala/org/apache/kyuubi/sql/zorder/ResolveZorderBase.scala
similarity index 100%
rename from 
extensions/spark/kyuubi-extension-spark-common/src/main/scala/org/apache/kyuubi/sql/zorder/ResolveZorderBase.scala
rename to 
extensions/spark/kyuubi-extension-spark-3-3/src/main/scala/org/apache/kyuubi/sql/zorder/ResolveZorderBase.scala
diff --git 
a/extensions/spark/kyuubi-extension-spark-common/src/main/scala/org/apache/kyuubi/sql/zorder/ZorderBase.scala
 
b/extensions/spark/kyuubi-extension-spark-3-3/src/main/scala/org/apache/kyuubi/sql/zorder/ZorderBase.scala
similarity index 100%
rename from 
extensions/spark/kyuubi-extension-spark-common/src/main/scala/org/apache/kyuubi/sql/zorder/ZorderBase.scala
rename to 
extensions/spark/kyuubi-extension-spark-3-3/src/main/scala/org/apache/kyuubi/sql/zorder/ZorderBase.scala
diff --git 
a/extensions/spark/kyuubi-extension-spark-common/src/main/scala/org/apache/kyuubi/sql/zorder/ZorderBytesUtils.scala
 
b/extensions/spark/kyuubi-extension-spark-3-3/src/main/scala/org/apache/kyuubi/sql/zorder/ZorderBytesUtils.scala
similarity index 100%
rename from 
extensions/spark/kyuubi-extension-spark-common/src/main/scala/org/apache/kyuubi/sql/zorder/ZorderBytesUtils.scala
rename to 
extensions/spark/kyuubi-extension-spark-3-3/src/main/scala/org/apache/kyuubi/sql/zorder/ZorderBytesUtils.scala
diff --git 
a/extensions/spark/kyuubi-extension-spark-common/src/main/scala/org/apache/spark/sql/PruneFileSourcePartitionHelper.scala
 
b/extensions/spark/kyuubi-extension-spark-3-3/src/main/scala/org/apache/spark/sql/PruneFileSourcePartitionHelper.scala
similarity index 100%
rename from 
extensions/spark/kyuubi-extension-spark-common/src/main/scala/org/apache/spark/sql/PruneFileSourcePartitionHelper.scala
rename to 
extensions/spark/kyuubi-extension-spark-3-3/src/main/scala/org/apache/spark/sql/PruneFileSourcePartitionHelper.scala
diff --git 
a/extensions/spark/kyuubi-extension-spark-common/src/test/scala/org/apache/spark/sql/InsertShuffleNodeBeforeJoinSuiteBase.scala
 
b/extensions/spark/kyuubi-extension-spark-3-3/src/test/scala/org/apache/spark/sql/InsertShuffleNodeBeforeJoinSuiteBase.scala
similarity index 100%
rename from 
extensions/spark/kyuubi-extension-spark-common/src/test/scala/org/apache/spark/sql/InsertShuffleNodeBeforeJoinSuiteBase.scala
rename to 
extensions/spark/kyuubi-extension-spark-3-3/src/test/scala/org/apache/spark/sql/InsertShuffleNodeBeforeJoinSuiteBase.scala
diff --git 
a/extensions/spark/kyuubi-extension-spark-common/src/test/scala/org/apache/spark/sql/KyuubiSparkSQLExtensionTest.scala
 
b/extensions/spark/kyuubi-extension-spark-3-3/src/test/scala/org/apache/spark/sql/KyuubiSparkSQLExtensionTest.scala
similarity index 100%
rename from 
extensions/spark/kyuubi-extension-spark-common/src/test/scala/org/apache/spark/sql/KyuubiSparkSQLExtensionTest.scala
rename to 
extensions/spark/kyuubi-extension-spark-3-3/src/test/scala/org/apache/spark/sql/KyuubiSparkSQLExtensionTest.scala
diff --git 
a/extensions/spark/kyuubi-extension-spark-common/src/test/scala/org/apache/spark/sql/WatchDogSuiteBase.scala
 
b/extensions/spark/kyuubi-extension-spark-3-3/src/test/scala/org/apache/spark/sql/WatchDogSuiteBase.scala
similarity index 100%
rename from 
extensions/spark/kyuubi-extension-spark-common/src/test/scala/org/apache/spark/sql/WatchDogSuiteBase.scala
rename to 
extensions/spark/kyuubi-extension-spark-3-3/src/test/scala/org/apache/spark/sql/WatchDogSuiteBase.scala
diff --git 
a/extensions/spark/kyuubi-extension-spark-common/src/test/scala/org/apache/spark/sql/ZorderCoreBenchmark.scala
 
b/extensions/spark/kyuubi-extension-spark-3-3/src/test/scala/org/apache/spark/sql/ZorderCoreBenchmark.scala
similarity index 100%
rename from 
extensions/spark/kyuubi-extension-spark-common/src/test/scala/org/apache/spark/sql/ZorderCoreBenchmark.scala
rename to 
extensions/spark/kyuubi-extension-spark-3-3/src/test/scala/org/apache/spark/sql/ZorderCoreBenchmark.scala
diff --git 
a/extensions/spark/kyuubi-extension-spark-common/src/test/scala/org/apache/spark/sql/ZorderSuiteBase.scala
 
b/extensions/spark/kyuubi-extension-spark-3-3/src/test/scala/org/apache/spark/sql/ZorderSuiteBase.scala
similarity index 100%
rename from 
extensions/spark/kyuubi-extension-spark-common/src/test/scala/org/apache/spark/sql/ZorderSuiteBase.scala
rename to 
extensions/spark/kyuubi-extension-spark-3-3/src/test/scala/org/apache/spark/sql/ZorderSuiteBase.scala
diff --git 
a/extensions/spark/kyuubi-extension-spark-common/src/test/scala/org/apache/spark/sql/benchmark/KyuubiBenchmarkBase.scala
 
b/extensions/spark/kyuubi-extension-spark-3-3/src/test/scala/org/apache/spark/sql/benchmark/KyuubiBenchmarkBase.scala
similarity index 100%
rename from 
extensions/spark/kyuubi-extension-spark-common/src/test/scala/org/apache/spark/sql/benchmark/KyuubiBenchmarkBase.scala
rename to 
extensions/spark/kyuubi-extension-spark-3-3/src/test/scala/org/apache/spark/sql/benchmark/KyuubiBenchmarkBase.scala
diff --git a/extensions/spark/kyuubi-extension-spark-common/pom.xml 
b/extensions/spark/kyuubi-extension-spark-common/pom.xml
deleted file mode 100644
index c621fe409..000000000
--- a/extensions/spark/kyuubi-extension-spark-common/pom.xml
+++ /dev/null
@@ -1,152 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<!--
-  ~ Licensed to the Apache Software Foundation (ASF) under one or more
-  ~ contributor license agreements.  See the NOTICE file distributed with
-  ~ this work for additional information regarding copyright ownership.
-  ~ The ASF licenses this file to You under the Apache License, Version 2.0
-  ~ (the "License"); you may not use this file except in compliance with
-  ~ the License.  You may obtain a copy of the License at
-  ~
-  ~    http://www.apache.org/licenses/LICENSE-2.0
-  ~
-  ~ Unless required by applicable law or agreed to in writing, software
-  ~ distributed under the License is distributed on an "AS IS" BASIS,
-  ~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  ~ See the License for the specific language governing permissions and
-  ~ limitations under the License.
-  -->
-<project xmlns="http://maven.apache.org/POM/4.0.0"; 
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance";
-         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 
http://maven.apache.org/xsd/maven-4.0.0.xsd";>
-    <modelVersion>4.0.0</modelVersion>
-    <parent>
-        <groupId>org.apache.kyuubi</groupId>
-        <artifactId>kyuubi-parent</artifactId>
-        <version>1.10.0-SNAPSHOT</version>
-        <relativePath>../../../pom.xml</relativePath>
-    </parent>
-
-    
<artifactId>kyuubi-extension-spark-common_${scala.binary.version}</artifactId>
-    <packaging>jar</packaging>
-    <name>Kyuubi Dev Spark Extensions Common (for Spark 3)</name>
-    <url>https://kyuubi.apache.org/</url>
-
-    <dependencies>
-        <dependency>
-            <groupId>org.scala-lang</groupId>
-            <artifactId>scala-library</artifactId>
-            <scope>provided</scope>
-        </dependency>
-
-        <dependency>
-            <groupId>org.apache.spark</groupId>
-            <artifactId>spark-sql_${scala.binary.version}</artifactId>
-            <scope>provided</scope>
-        </dependency>
-
-        <dependency>
-            <groupId>org.apache.spark</groupId>
-            <artifactId>spark-hive_${scala.binary.version}</artifactId>
-            <scope>provided</scope>
-        </dependency>
-
-        <dependency>
-            <groupId>org.apache.hadoop</groupId>
-            <artifactId>hadoop-client-api</artifactId>
-            <scope>provided</scope>
-        </dependency>
-
-        <dependency>
-            <groupId>org.apache.spark</groupId>
-            <artifactId>spark-core_${scala.binary.version}</artifactId>
-            <type>test-jar</type>
-            <scope>test</scope>
-        </dependency>
-
-        <dependency>
-            <groupId>org.apache.spark</groupId>
-            <artifactId>spark-catalyst_${scala.binary.version}</artifactId>
-            <type>test-jar</type>
-            <scope>test</scope>
-        </dependency>
-
-        <dependency>
-            <groupId>org.scalatestplus</groupId>
-            <artifactId>scalacheck-1-17_${scala.binary.version}</artifactId>
-            <scope>test</scope>
-        </dependency>
-
-        <dependency>
-            <groupId>org.apache.spark</groupId>
-            <artifactId>spark-sql_${scala.binary.version}</artifactId>
-            <type>test-jar</type>
-            <scope>test</scope>
-        </dependency>
-
-        <dependency>
-            <groupId>org.apache.hadoop</groupId>
-            <artifactId>hadoop-client-runtime</artifactId>
-            <scope>test</scope>
-        </dependency>
-
-        <dependency>
-            <groupId>javax.servlet</groupId>
-            <artifactId>javax.servlet-api</artifactId>
-            <scope>test</scope>
-        </dependency>
-
-        <dependency>
-            <groupId>jakarta.servlet</groupId>
-            <artifactId>jakarta.servlet-api</artifactId>
-            <scope>test</scope>
-        </dependency>
-
-        <dependency>
-            <groupId>org.apache.logging.log4j</groupId>
-            <artifactId>log4j-1.2-api</artifactId>
-            <scope>test</scope>
-        </dependency>
-
-        <dependency>
-            <groupId>org.apache.logging.log4j</groupId>
-            <artifactId>log4j-slf4j-impl</artifactId>
-            <scope>test</scope>
-        </dependency>
-    </dependencies>
-
-    <build>
-        <plugins>
-            <plugin>
-                <groupId>org.antlr</groupId>
-                <artifactId>antlr4-maven-plugin</artifactId>
-                <configuration>
-                    <visitor>true</visitor>
-                    
<sourceDirectory>${project.basedir}/src/main/antlr4</sourceDirectory>
-                </configuration>
-            </plugin>
-
-            <plugin>
-                <groupId>org.apache.maven.plugins</groupId>
-                <artifactId>maven-dependency-plugin</artifactId>
-                <configuration>
-                    <skip>true</skip>
-                </configuration>
-            </plugin>
-
-            <plugin>
-                <groupId>org.apache.maven.plugins</groupId>
-                <artifactId>maven-jar-plugin</artifactId>
-                <executions>
-                    <execution>
-                        <id>prepare-test-jar</id>
-                        <goals>
-                            <goal>test-jar</goal>
-                        </goals>
-                        <phase>test-compile</phase>
-                    </execution>
-                </executions>
-            </plugin>
-        </plugins>
-        
<outputDirectory>target/scala-${scala.binary.version}/classes</outputDirectory>
-        
<testOutputDirectory>target/scala-${scala.binary.version}/test-classes</testOutputDirectory>
-    </build>
-</project>
diff --git 
a/extensions/spark/kyuubi-extension-spark-common/src/test/resources/log4j2-test.xml
 
b/extensions/spark/kyuubi-extension-spark-common/src/test/resources/log4j2-test.xml
deleted file mode 100644
index 3110216c1..000000000
--- 
a/extensions/spark/kyuubi-extension-spark-common/src/test/resources/log4j2-test.xml
+++ /dev/null
@@ -1,43 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<!--
-  ~ Licensed to the Apache Software Foundation (ASF) under one or more
-  ~ contributor license agreements.  See the NOTICE file distributed with
-  ~ this work for additional information regarding copyright ownership.
-  ~ The ASF licenses this file to You under the Apache License, Version 2.0
-  ~ (the "License"); you may not use this file except in compliance with
-  ~ the License.  You may obtain a copy of the License at
-  ~
-  ~     http://www.apache.org/licenses/LICENSE-2.0
-  ~
-  ~ Unless required by applicable law or agreed to in writing, software
-  ~ distributed under the License is distributed on an "AS IS" BASIS,
-  ~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  ~ See the License for the specific language governing permissions and
-  ~ limitations under the License.
-  -->
-
-<!-- Extra logging related to initialization of Log4j. 
- Set to debug or trace if log4j initialization is failing. -->
-<Configuration status="WARN">
-    <Appenders>
-        <Console name="stdout" target="SYSTEM_OUT">
-            <PatternLayout pattern="%d{HH:mm:ss.SSS} %p %c: %m%n%ex"/>
-            <Filters>
-                <ThresholdFilter level="FATAL"/>
-                <RegexFilter regex=".*Thrift error occurred during processing 
of message.*" onMatch="DENY" onMismatch="NEUTRAL"/>
-            </Filters>
-        </Console>
-        <File name="file" fileName="target/unit-tests.log">
-            <PatternLayout pattern="%d{HH:mm:ss.SSS} %t %p %c{1}: %m%n%ex"/>
-            <Filters>
-                <RegexFilter regex=".*Thrift error occurred during processing 
of message.*" onMatch="DENY" onMismatch="NEUTRAL"/>
-            </Filters>
-        </File>
-    </Appenders>
-    <Loggers>
-        <Root level="INFO">
-            <AppenderRef ref="stdout"/>
-            <AppenderRef ref="file"/>
-        </Root>
-    </Loggers>
-</Configuration>
diff --git 
a/extensions/spark/kyuubi-extension-spark-jdbc-dialect/src/test/scala/resources/log4j.properties
 
b/extensions/spark/kyuubi-extension-spark-jdbc-dialect/src/test/scala/resources/log4j.properties
deleted file mode 100644
index ce168ae48..000000000
--- 
a/extensions/spark/kyuubi-extension-spark-jdbc-dialect/src/test/scala/resources/log4j.properties
+++ /dev/null
@@ -1,42 +0,0 @@
-#
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#    http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-# Set everything to be logged to the file target/unit-tests.log
-log4j.rootLogger=INFO, CA, FA
-
-# Console Appender
-log4j.appender.CA=org.apache.log4j.ConsoleAppender
-log4j.appender.CA.layout=org.apache.log4j.PatternLayout
-log4j.appender.CA.layout.ConversionPattern=%d{HH:mm:ss.SSS} %p %c: %m%n
-log4j.appender.CA.Threshold = FATAL
-
-# File Appender
-log4j.appender.FA=org.apache.log4j.FileAppender
-log4j.appender.FA.append=false
-log4j.appender.FA.file=target/unit-tests.log
-log4j.appender.FA.layout=org.apache.log4j.PatternLayout
-log4j.appender.FA.layout.ConversionPattern=%d{HH:mm:ss.SSS} %t %p %c{2}: %m%n
-log4j.appender.FA.Threshold = DEBUG
-
-# SPARK-34128: Suppress undesirable TTransportException warnings involved in 
THRIFT-4805
-log4j.appender.CA.filter.1=org.apache.log4j.varia.StringMatchFilter
-log4j.appender.CA.filter.1.StringToMatch=Thrift error occurred during 
processing of message
-log4j.appender.CA.filter.1.AcceptOnMatch=false
-
-log4j.appender.FA.filter.1=org.apache.log4j.varia.StringMatchFilter
-log4j.appender.FA.filter.1.StringToMatch=Thrift error occurred during 
processing of message
-log4j.appender.FA.filter.1.AcceptOnMatch=false
diff --git 
a/extensions/spark/kyuubi-spark-authz/src/test/resources/log4j.properties 
b/extensions/spark/kyuubi-spark-authz/src/test/resources/log4j.properties
deleted file mode 100644
index ce168ae48..000000000
--- a/extensions/spark/kyuubi-spark-authz/src/test/resources/log4j.properties
+++ /dev/null
@@ -1,42 +0,0 @@
-#
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#    http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-# Set everything to be logged to the file target/unit-tests.log
-log4j.rootLogger=INFO, CA, FA
-
-# Console Appender
-log4j.appender.CA=org.apache.log4j.ConsoleAppender
-log4j.appender.CA.layout=org.apache.log4j.PatternLayout
-log4j.appender.CA.layout.ConversionPattern=%d{HH:mm:ss.SSS} %p %c: %m%n
-log4j.appender.CA.Threshold = FATAL
-
-# File Appender
-log4j.appender.FA=org.apache.log4j.FileAppender
-log4j.appender.FA.append=false
-log4j.appender.FA.file=target/unit-tests.log
-log4j.appender.FA.layout=org.apache.log4j.PatternLayout
-log4j.appender.FA.layout.ConversionPattern=%d{HH:mm:ss.SSS} %t %p %c{2}: %m%n
-log4j.appender.FA.Threshold = DEBUG
-
-# SPARK-34128: Suppress undesirable TTransportException warnings involved in 
THRIFT-4805
-log4j.appender.CA.filter.1=org.apache.log4j.varia.StringMatchFilter
-log4j.appender.CA.filter.1.StringToMatch=Thrift error occurred during 
processing of message
-log4j.appender.CA.filter.1.AcceptOnMatch=false
-
-log4j.appender.FA.filter.1=org.apache.log4j.varia.StringMatchFilter
-log4j.appender.FA.filter.1.StringToMatch=Thrift error occurred during 
processing of message
-log4j.appender.FA.filter.1.AcceptOnMatch=false
diff --git 
a/extensions/spark/kyuubi-spark-connector-common/src/test/resources/log4j.properties
 
b/extensions/spark/kyuubi-spark-connector-common/src/test/resources/log4j.properties
deleted file mode 100644
index ce168ae48..000000000
--- 
a/extensions/spark/kyuubi-spark-connector-common/src/test/resources/log4j.properties
+++ /dev/null
@@ -1,42 +0,0 @@
-#
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#    http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-# Set everything to be logged to the file target/unit-tests.log
-log4j.rootLogger=INFO, CA, FA
-
-# Console Appender
-log4j.appender.CA=org.apache.log4j.ConsoleAppender
-log4j.appender.CA.layout=org.apache.log4j.PatternLayout
-log4j.appender.CA.layout.ConversionPattern=%d{HH:mm:ss.SSS} %p %c: %m%n
-log4j.appender.CA.Threshold = FATAL
-
-# File Appender
-log4j.appender.FA=org.apache.log4j.FileAppender
-log4j.appender.FA.append=false
-log4j.appender.FA.file=target/unit-tests.log
-log4j.appender.FA.layout=org.apache.log4j.PatternLayout
-log4j.appender.FA.layout.ConversionPattern=%d{HH:mm:ss.SSS} %t %p %c{2}: %m%n
-log4j.appender.FA.Threshold = DEBUG
-
-# SPARK-34128: Suppress undesirable TTransportException warnings involved in 
THRIFT-4805
-log4j.appender.CA.filter.1=org.apache.log4j.varia.StringMatchFilter
-log4j.appender.CA.filter.1.StringToMatch=Thrift error occurred during 
processing of message
-log4j.appender.CA.filter.1.AcceptOnMatch=false
-
-log4j.appender.FA.filter.1=org.apache.log4j.varia.StringMatchFilter
-log4j.appender.FA.filter.1.StringToMatch=Thrift error occurred during 
processing of message
-log4j.appender.FA.filter.1.AcceptOnMatch=false
diff --git 
a/extensions/spark/kyuubi-spark-connector-tpcds/src/test/resources/log4j.properties
 
b/extensions/spark/kyuubi-spark-connector-tpcds/src/test/resources/log4j.properties
deleted file mode 100644
index ce168ae48..000000000
--- 
a/extensions/spark/kyuubi-spark-connector-tpcds/src/test/resources/log4j.properties
+++ /dev/null
@@ -1,42 +0,0 @@
-#
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#    http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-# Set everything to be logged to the file target/unit-tests.log
-log4j.rootLogger=INFO, CA, FA
-
-# Console Appender
-log4j.appender.CA=org.apache.log4j.ConsoleAppender
-log4j.appender.CA.layout=org.apache.log4j.PatternLayout
-log4j.appender.CA.layout.ConversionPattern=%d{HH:mm:ss.SSS} %p %c: %m%n
-log4j.appender.CA.Threshold = FATAL
-
-# File Appender
-log4j.appender.FA=org.apache.log4j.FileAppender
-log4j.appender.FA.append=false
-log4j.appender.FA.file=target/unit-tests.log
-log4j.appender.FA.layout=org.apache.log4j.PatternLayout
-log4j.appender.FA.layout.ConversionPattern=%d{HH:mm:ss.SSS} %t %p %c{2}: %m%n
-log4j.appender.FA.Threshold = DEBUG
-
-# SPARK-34128: Suppress undesirable TTransportException warnings involved in 
THRIFT-4805
-log4j.appender.CA.filter.1=org.apache.log4j.varia.StringMatchFilter
-log4j.appender.CA.filter.1.StringToMatch=Thrift error occurred during 
processing of message
-log4j.appender.CA.filter.1.AcceptOnMatch=false
-
-log4j.appender.FA.filter.1=org.apache.log4j.varia.StringMatchFilter
-log4j.appender.FA.filter.1.StringToMatch=Thrift error occurred during 
processing of message
-log4j.appender.FA.filter.1.AcceptOnMatch=false
diff --git 
a/extensions/spark/kyuubi-spark-connector-tpch/src/test/resources/log4j.properties
 
b/extensions/spark/kyuubi-spark-connector-tpch/src/test/resources/log4j.properties
deleted file mode 100644
index ce168ae48..000000000
--- 
a/extensions/spark/kyuubi-spark-connector-tpch/src/test/resources/log4j.properties
+++ /dev/null
@@ -1,42 +0,0 @@
-#
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#    http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-# Set everything to be logged to the file target/unit-tests.log
-log4j.rootLogger=INFO, CA, FA
-
-# Console Appender
-log4j.appender.CA=org.apache.log4j.ConsoleAppender
-log4j.appender.CA.layout=org.apache.log4j.PatternLayout
-log4j.appender.CA.layout.ConversionPattern=%d{HH:mm:ss.SSS} %p %c: %m%n
-log4j.appender.CA.Threshold = FATAL
-
-# File Appender
-log4j.appender.FA=org.apache.log4j.FileAppender
-log4j.appender.FA.append=false
-log4j.appender.FA.file=target/unit-tests.log
-log4j.appender.FA.layout=org.apache.log4j.PatternLayout
-log4j.appender.FA.layout.ConversionPattern=%d{HH:mm:ss.SSS} %t %p %c{2}: %m%n
-log4j.appender.FA.Threshold = DEBUG
-
-# SPARK-34128: Suppress undesirable TTransportException warnings involved in 
THRIFT-4805
-log4j.appender.CA.filter.1=org.apache.log4j.varia.StringMatchFilter
-log4j.appender.CA.filter.1.StringToMatch=Thrift error occurred during 
processing of message
-log4j.appender.CA.filter.1.AcceptOnMatch=false
-
-log4j.appender.FA.filter.1=org.apache.log4j.varia.StringMatchFilter
-log4j.appender.FA.filter.1.StringToMatch=Thrift error occurred during 
processing of message
-log4j.appender.FA.filter.1.AcceptOnMatch=false
diff --git 
a/extensions/spark/kyuubi-spark-lineage/src/test/resources/log4j.properties 
b/extensions/spark/kyuubi-spark-lineage/src/test/resources/log4j.properties
deleted file mode 100644
index ce168ae48..000000000
--- a/extensions/spark/kyuubi-spark-lineage/src/test/resources/log4j.properties
+++ /dev/null
@@ -1,42 +0,0 @@
-#
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#    http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-# Set everything to be logged to the file target/unit-tests.log
-log4j.rootLogger=INFO, CA, FA
-
-# Console Appender
-log4j.appender.CA=org.apache.log4j.ConsoleAppender
-log4j.appender.CA.layout=org.apache.log4j.PatternLayout
-log4j.appender.CA.layout.ConversionPattern=%d{HH:mm:ss.SSS} %p %c: %m%n
-log4j.appender.CA.Threshold = FATAL
-
-# File Appender
-log4j.appender.FA=org.apache.log4j.FileAppender
-log4j.appender.FA.append=false
-log4j.appender.FA.file=target/unit-tests.log
-log4j.appender.FA.layout=org.apache.log4j.PatternLayout
-log4j.appender.FA.layout.ConversionPattern=%d{HH:mm:ss.SSS} %t %p %c{2}: %m%n
-log4j.appender.FA.Threshold = DEBUG
-
-# SPARK-34128: Suppress undesirable TTransportException warnings involved in 
THRIFT-4805
-log4j.appender.CA.filter.1=org.apache.log4j.varia.StringMatchFilter
-log4j.appender.CA.filter.1.StringToMatch=Thrift error occurred during 
processing of message
-log4j.appender.CA.filter.1.AcceptOnMatch=false
-
-log4j.appender.FA.filter.1=org.apache.log4j.varia.StringMatchFilter
-log4j.appender.FA.filter.1.StringToMatch=Thrift error occurred during 
processing of message
-log4j.appender.FA.filter.1.AcceptOnMatch=false
diff --git 
a/externals/kyuubi-spark-sql-engine/src/main/scala/org/apache/kyuubi/engine/spark/SparkSQLEngine.scala
 
b/externals/kyuubi-spark-sql-engine/src/main/scala/org/apache/kyuubi/engine/spark/SparkSQLEngine.scala
index a78b13c1b..d2e205d31 100644
--- 
a/externals/kyuubi-spark-sql-engine/src/main/scala/org/apache/kyuubi/engine/spark/SparkSQLEngine.scala
+++ 
b/externals/kyuubi-spark-sql-engine/src/main/scala/org/apache/kyuubi/engine/spark/SparkSQLEngine.scala
@@ -383,6 +383,9 @@ object SparkSQLEngine extends Logging {
   }
 
   def main(args: Array[String]): Unit = {
+    if (KyuubiSparkUtil.SPARK_ENGINE_RUNTIME_VERSION === "3.2") {
+      warn("The support for Spark 3.2 is deprecated, and will be removed in 
the next version.")
+    }
     val startedTime = System.currentTimeMillis()
     val submitTime = kyuubiConf.getOption(KYUUBI_ENGINE_SUBMIT_TIME_KEY) match 
{
       case Some(t) => t.toLong
diff --git a/pom.xml b/pom.xml
index e44a8e48a..450b137ff 100644
--- a/pom.xml
+++ b/pom.xml
@@ -1442,7 +1442,6 @@
                         </environmentVariables>
                         <systemProperties>
                             <log4j.ignoreTCL>true</log4j.ignoreTCL>
-                            
<log4j.configuration>file:src/test/resources/log4j.properties</log4j.configuration>
                             
<log4j2.configurationFile>src/test/resources/log4j2-test.xml</log4j2.configurationFile>
                             
<java.io.tmpdir>${project.build.directory}/tmp</java.io.tmpdir>
                             <spark.driver.memory>1g</spark.driver.memory>
@@ -1470,7 +1469,6 @@
                         </environmentVariables>
                         <systemProperties>
                             <log4j.ignoreTCL>true</log4j.ignoreTCL>
-                            
<log4j.configuration>file:src/test/resources/log4j.properties</log4j.configuration>
                             
<log4j2.configurationFile>src/test/resources/log4j2-test.xml</log4j2.configurationFile>
                             
<java.io.tmpdir>${project.build.directory}/tmp</java.io.tmpdir>
                             <spark.driver.memory>1g</spark.driver.memory>
@@ -1978,28 +1976,9 @@
             </build>
         </profile>
 
-        <profile>
-            <id>spark-3.2</id>
-            <modules>
-                <module>extensions/spark/kyuubi-extension-spark-common</module>
-                <module>extensions/spark/kyuubi-extension-spark-3-2</module>
-            </modules>
-            <properties>
-                <spark.version>3.2.4</spark.version>
-                <spark.binary.version>3.2</spark.binary.version>
-                
<delta.artifact>delta-core_${scala.binary.version}</delta.artifact>
-                <delta.version>2.0.2</delta.version>
-                <!-- Iceberg 1.5.0 removed support for Spark 3.2. 
apache/iceberg#9295 -->
-                <iceberg.version>1.4.3</iceberg.version>
-                
<spark.archive.name>spark-${spark.version}-bin-hadoop3.2${spark.archive.scala.suffix}.tgz</spark.archive.name>
-                
<maven.plugin.scalatest.exclude.tags>org.scalatest.tags.Slow</maven.plugin.scalatest.exclude.tags>
-            </properties>
-        </profile>
-
         <profile>
             <id>spark-3.3</id>
             <modules>
-                <module>extensions/spark/kyuubi-extension-spark-common</module>
                 <module>extensions/spark/kyuubi-extension-spark-3-3</module>
                 <module>extensions/spark/kyuubi-spark-connector-hive</module>
             </modules>

Reply via email to