This is an automated email from the ASF dual-hosted git repository.

csy pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/auron.git


The following commit(s) were added to refs/heads/master by this push:
     new 9b079f8a [AURON #1404] Support for Spark 4.0.2 Compatibility in Auron. 
(#1990)
9b079f8a is described below

commit 9b079f8ac3466f48ad5e72c478e722cb8da37ba0
Author: slfan1989 <[email protected]>
AuthorDate: Mon Feb 9 15:36:09 2026 +0800

    [AURON #1404] Support for Spark 4.0.2 Compatibility in Auron. (#1990)
    
    ### Which issue does this PR close?
    
    Closes #1404.
    
    ### Rationale for this change
    
    [AURON#1404] Support for Spark 4.0.2 Compatibility in Auron.
    
    ### What changes are included in this PR?
    
    To support **Spark 4**, Auron needs to be adapted accordingly.
    Currently, `Celeborn` already supports **Spark 4.0**, and `Iceberg` has
    also supported **Spark 4.0** for some time. The Iceberg community has
    already voted to deprecate support for **Spark 3.4**, and it will be
    removed soon.
    
    For this PR, I have made the following changes:
    
    - Three changes encountered during compilation:
    
    - `NativeShuffleExchangeExec#ShuffleWriteProcessor`: Due to SPARK-44605
    restructuring the write method in the API, I refactored the partition
    and rdd handling here to retrieve them from dependencies for
    compatibility with other interfaces. In the future, we should switch to
    the new interface and make further changes to nativeRssShuffleWrite /
    nativeShuffleWrite.
    
    - `NativeBroadcastExchangeBase#getBroadcastTimeout`: In Spark 4.0,
    getBroadcastTimeout needs to be fetched from getActiveSession.
    
    - `NativeBroadcastExchangeBase#getRelationFuture`: In Spark 4.0, the
    type of SparkSession has changed to
    org.apache.spark.sql.classic.SparkSession, so I made the necessary
    adjustments to the way it is accessed.
    
    
    ### Are there any user-facing changes?
    
    No.
    
    ### How was this patch tested?
    
    CI.
    
    ---------
    
    Signed-off-by: slfan1989 <[email protected]>
---
 .github/workflows/tpcds-reusable.yml               |  5 ++-
 .github/workflows/tpcds.yml                        |  9 +++++
 auron-build.sh                                     |  2 +-
 .../sql/execution/ui/AuronAllExecutionsPage.scala  |  2 +-
 dev/auron-it/pom.xml                               | 39 +++++++++++++++++++
 pom.xml                                            | 42 +++++++++++++++++++++
 .../sql/auron/InterceptedValidateSparkPlan.scala   |  4 +-
 .../org/apache/spark/sql/auron/ShimsImpl.scala     | 44 +++++++++++-----------
 .../execution/auron/plan/ConvertToNativeExec.scala |  2 +-
 .../sql/execution/auron/plan/NativeAggExec.scala   | 10 ++---
 .../auron/plan/NativeBroadcastExchangeExec.scala   |  2 +-
 .../auron/plan/NativeCollectLimitExec.scala        |  2 +-
 .../execution/auron/plan/NativeExpandExec.scala    |  2 +-
 .../execution/auron/plan/NativeFilterExec.scala    |  2 +-
 .../execution/auron/plan/NativeGenerateExec.scala  |  2 +-
 .../auron/plan/NativeGlobalLimitExec.scala         |  2 +-
 .../auron/plan/NativeLocalLimitExec.scala          |  2 +-
 .../NativeParquetInsertIntoHiveTableExec.scala     |  6 +--
 .../auron/plan/NativeParquetSinkExec.scala         |  2 +-
 .../auron/plan/NativePartialTakeOrderedExec.scala  |  2 +-
 .../auron/plan/NativeProjectExecProvider.scala     |  2 +-
 .../plan/NativeRenameColumnsExecProvider.scala     |  2 +-
 .../auron/plan/NativeShuffleExchangeExec.scala     | 10 ++---
 .../sql/execution/auron/plan/NativeSortExec.scala  |  2 +-
 .../auron/plan/NativeTakeOrderedExec.scala         |  2 +-
 .../sql/execution/auron/plan/NativeUnionExec.scala |  2 +-
 .../execution/auron/plan/NativeWindowExec.scala    |  2 +-
 .../shuffle/AuronBlockStoreShuffleReader.scala     |  2 +-
 .../auron/shuffle/AuronRssShuffleManagerBase.scala |  2 +-
 .../auron/shuffle/AuronShuffleManager.scala        |  4 +-
 .../auron/shuffle/AuronShuffleWriter.scala         |  2 +-
 .../joins/auron/plan/NativeBroadcastJoinExec.scala | 12 +++---
 .../plan/NativeShuffledHashJoinExecProvider.scala  |  2 +-
 .../plan/NativeSortMergeJoinExecProvider.scala     |  2 +-
 .../execution/AuronAdaptiveQueryExecSuite.scala    |  2 +-
 .../auron/columnar/AuronColumnarArray.scala        |  2 +-
 .../auron/columnar/AuronColumnarBatchRow.scala     |  2 +-
 .../auron/columnar/AuronColumnarStruct.scala       |  2 +-
 .../auron/plan/NativeBroadcastExchangeBase.scala   |  2 +-
 .../auron/shuffle/AuronShuffleDependency.scala     |  2 +-
 .../auron/plan/NativeHiveTableScanBase.scala       |  2 +-
 41 files changed, 169 insertions(+), 76 deletions(-)

diff --git a/.github/workflows/tpcds-reusable.yml 
b/.github/workflows/tpcds-reusable.yml
index fe6c73d1..d3c264a4 100644
--- a/.github/workflows/tpcds-reusable.yml
+++ b/.github/workflows/tpcds-reusable.yml
@@ -226,7 +226,7 @@ jobs:
         if: steps.cache-spark-bin.outputs.cache-hit != 'true'
         run: |
           SPARK_PATH="spark/spark-${{ 
steps.get-dependency-version.outputs.sparkversion }}"
-          if [ ${{ inputs.scalaver }} = "2.13" && "${{ inputs.sparkver }}" != 
"spark-4.1" ]; then
+          if [ ${{ inputs.scalaver }} = "2.13" && "${{ inputs.sparkver }}" != 
"spark-4.0" && "${{ inputs.sparkver }}" != "spark-4.1" ]; then
             SPARK_FILE="spark-${{ 
steps.get-dependency-version.outputs.sparkversion }}-bin-${{ 
inputs.hadoop-profile }}-scala${{ inputs.scalaver }}.tgz"
           else
             SPARK_FILE="spark-${{ 
steps.get-dependency-version.outputs.sparkversion }}-bin-${{ 
inputs.hadoop-profile }}.tgz"
@@ -393,7 +393,8 @@ jobs:
         uses: actions/upload-artifact@v6
         with:
           name: >
-            rss-log${{ inputs.celebornver && format('-{0}', 
inputs.celebornver) || '' 
+            rss-log-${{ inputs.sparkver }}_${{ inputs.scalaver }}-jdk-${{ 
inputs.javaver
+            }}${{ inputs.celebornver && format('-{0}', inputs.celebornver) || 
''
             }}${{ inputs.unifflever && format('-{0}', inputs.unifflever) || '' 
}}
           path: |
             /tmp/rss/logs/*
diff --git a/.github/workflows/tpcds.yml b/.github/workflows/tpcds.yml
index e68e79db..f4c9c2cd 100644
--- a/.github/workflows/tpcds.yml
+++ b/.github/workflows/tpcds.yml
@@ -88,6 +88,15 @@ jobs:
       scalaver: '2.13'
       hadoop-profile: 'hadoop3'
 
+  test-spark-40-jdk21-scala-2-13:
+    name: Test spark-4.0 JDK21 Scala-2.13
+    uses: ./.github/workflows/tpcds-reusable.yml
+    with:
+      sparkver: spark-4.0
+      javaver: '21'
+      scalaver: '2.13'
+      hadoop-profile: 'hadoop3'
+
   test-spark-41-jdk21-scala-2-13:
     name: Test spark-4.1 JDK21 Scala-2.13
     uses: ./.github/workflows/tpcds-reusable.yml
diff --git a/auron-build.sh b/auron-build.sh
index e949021b..47e4a5ea 100755
--- a/auron-build.sh
+++ b/auron-build.sh
@@ -30,7 +30,7 @@
 #   Define constants for supported component versions
 # -----------------------------------------------------------------------------
 SUPPORTED_OS_IMAGES=("centos7" "ubuntu24" "rockylinux8" "debian11" 
"azurelinux3")
-SUPPORTED_SPARK_VERSIONS=("3.0" "3.1" "3.2" "3.3" "3.4" "3.5" "4.1")
+SUPPORTED_SPARK_VERSIONS=("3.0" "3.1" "3.2" "3.3" "3.4" "3.5" "4.0" "4.1")
 SUPPORTED_SCALA_VERSIONS=("2.12" "2.13")
 SUPPORTED_CELEBORN_VERSIONS=("0.5" "0.6")
 # Currently only one supported version, but kept plural for consistency
diff --git 
a/auron-spark-ui/src/main/scala/org/apache/spark/sql/execution/ui/AuronAllExecutionsPage.scala
 
b/auron-spark-ui/src/main/scala/org/apache/spark/sql/execution/ui/AuronAllExecutionsPage.scala
index 9b4630af..56f96d95 100644
--- 
a/auron-spark-ui/src/main/scala/org/apache/spark/sql/execution/ui/AuronAllExecutionsPage.scala
+++ 
b/auron-spark-ui/src/main/scala/org/apache/spark/sql/execution/ui/AuronAllExecutionsPage.scala
@@ -52,7 +52,7 @@ private[ui] class AuronAllExecutionsPage(parent: AuronSQLTab) 
extends WebUIPage(
     UIUtils.headerSparkPage(request, "Auron", summary, parent)
   }
 
-  @sparkver("4.1")
+  @sparkver("4.0 / 4.1")
   override def render(request: jakarta.servlet.http.HttpServletRequest): 
Seq[Node] = {
     val buildInfo = sqlStore.buildInfo()
     val infos =
diff --git a/dev/auron-it/pom.xml b/dev/auron-it/pom.xml
index 37d4f3bc..657f0863 100644
--- a/dev/auron-it/pom.xml
+++ b/dev/auron-it/pom.xml
@@ -331,6 +331,45 @@
       </properties>
     </profile>
 
+    <profile>
+      <id>spark-4.0</id>
+      <properties>
+        <shimName>spark-4.0</shimName>
+        <sparkVersion>4.0.2</sparkVersion>
+      </properties>
+      <build>
+        <plugins>
+          <plugin>
+            <groupId>org.apache.maven.plugins</groupId>
+            <artifactId>maven-enforcer-plugin</artifactId>
+            <version>${maven-enforcer-plugin.version}</version>
+            <executions>
+              <execution>
+                <id>spark40-enforce-java-scala-version</id>
+                <goals>
+                  <goal>enforce</goal>
+                </goals>
+                <configuration>
+                  <rules>
+                    <!-- Spark 4.0 requires JDK 17+ and Scala 2.13.x -->
+                    <requireJavaVersion>
+                      <version>[17,)</version>
+                      <message>Spark 4.0 requires JDK 17 or higher. Current: 
${java.version}</message>
+                    </requireJavaVersion>
+                    <requireProperty>
+                      <property>scalaLongVersion</property>
+                      <regex>2\.13\.\d+</regex>
+                      <regexMessage>Spark 4.0 requires Scala 2.13.x. Current: 
${scalaLongVersion}</regexMessage>
+                    </requireProperty>
+                  </rules>
+                </configuration>
+              </execution>
+            </executions>
+          </plugin>
+        </plugins>
+      </build>
+    </profile>
+
     <profile>
       <id>spark-4.1</id>
       <properties>
diff --git a/pom.xml b/pom.xml
index 3419a9e8..faccbd3b 100644
--- a/pom.xml
+++ b/pom.xml
@@ -836,6 +836,48 @@
       </properties>
     </profile>
 
+    <profile>
+      <id>spark-4.0</id>
+      <properties>
+        <shimName>spark-4.0</shimName>
+        <scalaTestVersion>3.2.9</scalaTestVersion>
+        <sparkVersion>4.0.2</sparkVersion>
+        <shortSparkVersion>4.0</shortSparkVersion>
+        <nettyVersion>4.1.111.Final</nettyVersion>
+      </properties>
+      <build>
+        <plugins>
+          <plugin>
+            <groupId>org.apache.maven.plugins</groupId>
+            <artifactId>maven-enforcer-plugin</artifactId>
+            <version>${maven-enforcer-plugin.version}</version>
+            <executions>
+              <execution>
+                <id>spark40-enforce-java-scala-version</id>
+                <goals>
+                  <goal>enforce</goal>
+                </goals>
+                <configuration>
+                  <rules>
+                    <!-- Spark 4.0 requires JDK 17+ and Scala 2.13.x -->
+                    <requireJavaVersion>
+                      <version>[17,)</version>
+                      <message>Spark 4.0 requires JDK 17 or higher. Current: 
${java.version}</message>
+                    </requireJavaVersion>
+                    <requireProperty>
+                      <property>scalaLongVersion</property>
+                      <regex>2\.13\.\d+</regex>
+                      <regexMessage>Spark 4.0 requires Scala 2.13.x. Current: 
${scalaLongVersion}</regexMessage>
+                    </requireProperty>
+                  </rules>
+                </configuration>
+              </execution>
+            </executions>
+          </plugin>
+        </plugins>
+      </build>
+    </profile>
+
     <profile>
       <id>spark-4.1</id>
       <properties>
diff --git 
a/spark-extension-shims-spark/src/main/scala/org/apache/spark/sql/auron/InterceptedValidateSparkPlan.scala
 
b/spark-extension-shims-spark/src/main/scala/org/apache/spark/sql/auron/InterceptedValidateSparkPlan.scala
index d8b574b3..b61dba4e 100644
--- 
a/spark-extension-shims-spark/src/main/scala/org/apache/spark/sql/auron/InterceptedValidateSparkPlan.scala
+++ 
b/spark-extension-shims-spark/src/main/scala/org/apache/spark/sql/auron/InterceptedValidateSparkPlan.scala
@@ -25,7 +25,7 @@ import org.apache.auron.sparkver
 
 object InterceptedValidateSparkPlan extends Logging {
 
-  @sparkver("3.2 / 3.3 / 3.4 / 3.5 / 4.1")
+  @sparkver("3.2 / 3.3 / 3.4 / 3.5 / 4.0 / 4.1")
   def validate(plan: SparkPlan): Unit = {
     import org.apache.spark.sql.execution.adaptive.BroadcastQueryStageExec
     import org.apache.spark.sql.execution.auron.plan.NativeRenameColumnsBase
@@ -79,7 +79,7 @@ object InterceptedValidateSparkPlan extends Logging {
     throw new UnsupportedOperationException("validate is not supported in 
spark 3.0.3 or 3.1.3")
   }
 
-  @sparkver("3.2 / 3.3 / 3.4 / 3.5 / 4.1")
+  @sparkver("3.2 / 3.3 / 3.4 / 3.5 / 4.0 / 4.1")
   private def errorOnInvalidBroadcastQueryStage(plan: SparkPlan): Unit = {
     import org.apache.spark.sql.execution.adaptive.InvalidAQEPlanException
     throw InvalidAQEPlanException("Invalid broadcast query stage", plan)
diff --git 
a/spark-extension-shims-spark/src/main/scala/org/apache/spark/sql/auron/ShimsImpl.scala
 
b/spark-extension-shims-spark/src/main/scala/org/apache/spark/sql/auron/ShimsImpl.scala
index b1339ee4..cc176314 100644
--- 
a/spark-extension-shims-spark/src/main/scala/org/apache/spark/sql/auron/ShimsImpl.scala
+++ 
b/spark-extension-shims-spark/src/main/scala/org/apache/spark/sql/auron/ShimsImpl.scala
@@ -125,10 +125,12 @@ class ShimsImpl extends Shims with Logging {
   override def shimVersion: String = "spark-3.4"
   @sparkver("3.5")
   override def shimVersion: String = "spark-3.5"
+  @sparkver("4.0")
+  override def shimVersion: String = "spark-4.0"
   @sparkver("4.1")
   override def shimVersion: String = "spark-4.1"
 
-  @sparkver("3.2 / 3.3 / 3.4 / 3.5 / 4.1")
+  @sparkver("3.2 / 3.3 / 3.4 / 3.5 / 4.0 / 4.1")
   override def initExtension(): Unit = {
     ValidateSparkPlanInjector.inject()
 
@@ -288,16 +290,16 @@ class ShimsImpl extends Shims with Logging {
       child: SparkPlan): NativeGenerateBase =
     NativeGenerateExec(generator, requiredChildOutput, outer, generatorOutput, 
child)
 
-  @sparkver("3.4 / 3.5 / 4.1")
+  @sparkver("3.4 / 3.5 / 4.0 / 4.1")
   private def effectiveLimit(rawLimit: Int): Int =
     if (rawLimit == -1) Int.MaxValue else rawLimit
 
-  @sparkver("3.4 / 3.5 / 4.1")
+  @sparkver("3.4 / 3.5 / 4.0 / 4.1")
   override def getLimitAndOffset(plan: GlobalLimitExec): (Int, Int) = {
     (effectiveLimit(plan.limit), plan.offset)
   }
 
-  @sparkver("3.4 / 3.5 / 4.1")
+  @sparkver("3.4 / 3.5 / 4.0 / 4.1")
   override def getLimitAndOffset(plan: TakeOrderedAndProjectExec): (Int, Int) 
= {
     (effectiveLimit(plan.limit), plan.offset)
   }
@@ -311,7 +313,7 @@ class ShimsImpl extends Shims with Logging {
   override def createNativeLocalLimitExec(limit: Int, child: SparkPlan): 
NativeLocalLimitBase =
     NativeLocalLimitExec(limit, child)
 
-  @sparkver("3.4 / 3.5 / 4.1")
+  @sparkver("3.4 / 3.5 / 4.0 / 4.1")
   override def getLimitAndOffset(plan: CollectLimitExec): (Int, Int) = {
     (effectiveLimit(plan.limit), plan.offset)
   }
@@ -459,7 +461,7 @@ class ShimsImpl extends Shims with Logging {
       length: Long,
       numRecords: Long): FileSegment = new FileSegment(file, offset, length)
 
-  @sparkver("3.2 / 3.3 / 3.4 / 3.5 / 4.1")
+  @sparkver("3.2 / 3.3 / 3.4 / 3.5 / 4.0 / 4.1")
   override def commit(
       dep: ShuffleDependency[_, _, _],
       shuffleBlockResolver: IndexShuffleBlockResolver,
@@ -634,7 +636,7 @@ class ShimsImpl extends Shims with Logging {
     expr.asInstanceOf[AggregateExpression].filter
   }
 
-  @sparkver("3.2 / 3.3 / 3.4 / 3.5 / 4.1")
+  @sparkver("3.2 / 3.3 / 3.4 / 3.5 / 4.0 / 4.1")
   private def isAQEShuffleRead(exec: SparkPlan): Boolean = {
     import org.apache.spark.sql.execution.adaptive.AQEShuffleReadExec
     exec.isInstanceOf[AQEShuffleReadExec]
@@ -646,7 +648,7 @@ class ShimsImpl extends Shims with Logging {
     exec.isInstanceOf[CustomShuffleReaderExec]
   }
 
-  @sparkver("3.2 / 3.3 / 3.4 / 3.5 / 4.1")
+  @sparkver("3.2 / 3.3 / 3.4 / 3.5 / 4.0 / 4.1")
   private def executeNativeAQEShuffleReader(exec: SparkPlan): NativeRDD = {
     import org.apache.spark.sql.execution.adaptive.AQEShuffleReadExec
     import org.apache.spark.sql.execution.CoalescedMapperPartitionSpec
@@ -946,7 +948,7 @@ class ShimsImpl extends Shims with Logging {
     }
   }
 
-  @sparkver("3.2 / 3.3 / 3.4 / 3.5 / 4.1")
+  @sparkver("3.2 / 3.3 / 3.4 / 3.5 / 4.0 / 4.1")
   override def getSqlContext(sparkPlan: SparkPlan): SQLContext =
     sparkPlan.session.sqlContext
 
@@ -968,7 +970,7 @@ class ShimsImpl extends Shims with Logging {
       size: Long): PartitionedFile =
     PartitionedFile(partitionValues, filePath, offset, size)
 
-  @sparkver("3.4 / 3.5 / 4.1")
+  @sparkver("3.4 / 3.5 / 4.0 / 4.1")
   override def getPartitionedFile(
       partitionValues: InternalRow,
       filePath: String,
@@ -979,7 +981,7 @@ class ShimsImpl extends Shims with Logging {
     PartitionedFile(partitionValues, SparkPath.fromPath(new Path(filePath)), 
offset, size)
   }
 
-  @sparkver("3.1 / 3.2 / 3.3 / 3.4 / 3.5 / 4.1")
+  @sparkver("3.1 / 3.2 / 3.3 / 3.4 / 3.5 / 4.0 / 4.1")
   override def getMinPartitionNum(sparkSession: SparkSession): Int =
     sparkSession.sessionState.conf.filesMinPartitionNum
       .getOrElse(sparkSession.sparkContext.defaultParallelism)
@@ -1002,13 +1004,13 @@ class ShimsImpl extends Shims with Logging {
   }
 
   @nowarn("cat=unused") // Some params temporarily unused
-  @sparkver("3.4 / 3.5 / 4.1")
+  @sparkver("3.4 / 3.5 / 4.0 / 4.1")
   private def convertPromotePrecision(
       e: Expression,
       isPruningExpr: Boolean,
       fallback: Expression => pb.PhysicalExprNode): 
Option[pb.PhysicalExprNode] = None
 
-  @sparkver("3.3 / 3.4 / 3.5 / 4.1")
+  @sparkver("3.3 / 3.4 / 3.5 / 4.0 / 4.1")
   private def convertBloomFilterAgg(agg: AggregateFunction): 
Option[pb.PhysicalAggExprNode] = {
     import 
org.apache.spark.sql.catalyst.expressions.aggregate.BloomFilterAggregate
     agg match {
@@ -1038,7 +1040,7 @@ class ShimsImpl extends Shims with Logging {
   @sparkver("3.0 / 3.1 / 3.2")
   private def convertBloomFilterAgg(agg: AggregateFunction): 
Option[pb.PhysicalAggExprNode] = None
 
-  @sparkver("3.3 / 3.4 / 3.5 / 4.1")
+  @sparkver("3.3 / 3.4 / 3.5 / 4.0 / 4.1")
   private def convertBloomFilterMightContain(
       e: Expression,
       isPruningExpr: Boolean,
@@ -1073,7 +1075,7 @@ class ShimsImpl extends Shims with Logging {
     exec.initialPlan
   }
 
-  @sparkver("3.1 / 3.2 / 3.3 / 3.4 / 3.5 / 4.1")
+  @sparkver("3.1 / 3.2 / 3.3 / 3.4 / 3.5 / 4.0 / 4.1")
   override def getAdaptiveInputPlan(exec: AdaptiveSparkPlanExec): SparkPlan = {
     exec.inputPlan
   }
@@ -1103,7 +1105,7 @@ class ShimsImpl extends Shims with Logging {
       })
   }
 
-  @sparkver("3.1 / 3.2 / 3.3 / 3.4 / 3.5 / 4.1")
+  @sparkver("3.1 / 3.2 / 3.3 / 3.4 / 3.5 / 4.0 / 4.1")
   override def getJoinBuildSide(exec: SparkPlan): JoinBuildSide = {
     import org.apache.spark.sql.catalyst.optimizer.BuildLeft
     convertJoinBuildSide(
@@ -1114,19 +1116,19 @@ class ShimsImpl extends Shims with Logging {
       })
   }
 
-  @sparkver("3.2 / 3.3 / 3.4 / 3.5 / 4.1")
+  @sparkver("3.2 / 3.3 / 3.4 / 3.5 / 4.0 / 4.1")
   override def getIsSkewJoinFromSHJ(exec: ShuffledHashJoinExec): Boolean = 
exec.isSkewJoin
 
   @sparkver("3.0 / 3.1")
   override def getIsSkewJoinFromSHJ(exec: ShuffledHashJoinExec): Boolean = 
false
 
-  @sparkver("3.1 / 3.2 / 3.3 / 3.4 / 3.5 / 4.1")
+  @sparkver("3.1 / 3.2 / 3.3 / 3.4 / 3.5 / 4.0 / 4.1")
   override def getShuffleOrigin(exec: ShuffleExchangeExec): Option[Any] = 
Some(exec.shuffleOrigin)
 
   @sparkver("3.0")
   override def getShuffleOrigin(exec: ShuffleExchangeExec): Option[Any] = None
 
-  @sparkver("3.1 / 3.2 / 3.3 / 3.4 / 3.5 / 4.1")
+  @sparkver("3.1 / 3.2 / 3.3 / 3.4 / 3.5 / 4.0 / 4.1")
   override def isNullAwareAntiJoin(exec: BroadcastHashJoinExec): Boolean =
     exec.isNullAwareAntiJoin
 
@@ -1137,7 +1139,7 @@ class ShimsImpl extends Shims with Logging {
 case class ForceNativeExecutionWrapper(override val child: SparkPlan)
     extends ForceNativeExecutionWrapperBase(child) {
 
-  @sparkver("3.2 / 3.3 / 3.4 / 3.5 / 4.1")
+  @sparkver("3.2 / 3.3 / 3.4 / 3.5 / 4.0 / 4.1")
   override def withNewChildInternal(newChild: SparkPlan): SparkPlan =
     copy(child = newChild)
 
@@ -1152,6 +1154,6 @@ case class NativeExprWrapper(
     override val nullable: Boolean)
     extends NativeExprWrapperBase(nativeExpr, dataType, nullable) {
 
-  @sparkver("3.2 / 3.3 / 3.4 / 3.5 / 4.1")
+  @sparkver("3.2 / 3.3 / 3.4 / 3.5 / 4.0 / 4.1")
   override def withNewChildrenInternal(newChildren: IndexedSeq[Expression]): 
Expression = copy()
 }
diff --git 
a/spark-extension-shims-spark/src/main/scala/org/apache/spark/sql/execution/auron/plan/ConvertToNativeExec.scala
 
b/spark-extension-shims-spark/src/main/scala/org/apache/spark/sql/execution/auron/plan/ConvertToNativeExec.scala
index 2028ac1f..ead37c9f 100644
--- 
a/spark-extension-shims-spark/src/main/scala/org/apache/spark/sql/execution/auron/plan/ConvertToNativeExec.scala
+++ 
b/spark-extension-shims-spark/src/main/scala/org/apache/spark/sql/execution/auron/plan/ConvertToNativeExec.scala
@@ -22,7 +22,7 @@ import org.apache.auron.sparkver
 
 case class ConvertToNativeExec(override val child: SparkPlan) extends 
ConvertToNativeBase(child) {
 
-  @sparkver("3.2 / 3.3 / 3.4 / 3.5 / 4.1")
+  @sparkver("3.2 / 3.3 / 3.4 / 3.5 / 4.0 / 4.1")
   override protected def withNewChildInternal(newChild: SparkPlan): SparkPlan =
     copy(child = newChild)
 
diff --git 
a/spark-extension-shims-spark/src/main/scala/org/apache/spark/sql/execution/auron/plan/NativeAggExec.scala
 
b/spark-extension-shims-spark/src/main/scala/org/apache/spark/sql/execution/auron/plan/NativeAggExec.scala
index f9623bd2..c72569d5 100644
--- 
a/spark-extension-shims-spark/src/main/scala/org/apache/spark/sql/execution/auron/plan/NativeAggExec.scala
+++ 
b/spark-extension-shims-spark/src/main/scala/org/apache/spark/sql/execution/auron/plan/NativeAggExec.scala
@@ -44,22 +44,22 @@ case class NativeAggExec(
       child)
     with BaseAggregateExec {
 
-  @sparkver("3.1 / 3.2 / 3.3 / 3.4 / 3.5 / 4.1")
+  @sparkver("3.1 / 3.2 / 3.3 / 3.4 / 3.5 / 4.0 / 4.1")
   override val requiredChildDistributionExpressions: Option[Seq[Expression]] =
     theRequiredChildDistributionExpressions
 
-  @sparkver("3.3 / 3.4 / 3.5 / 4.1")
+  @sparkver("3.3 / 3.4 / 3.5 / 4.0 / 4.1")
   override val initialInputBufferOffset: Int = theInitialInputBufferOffset
 
-  @sparkver("3.2 / 3.3 / 3.4 / 3.5 / 4.1")
+  @sparkver("3.2 / 3.3 / 3.4 / 3.5 / 4.0 / 4.1")
   override def isStreaming: Boolean = false
 
-  @sparkver("3.2 / 3.3 / 3.4 / 3.5 / 4.1")
+  @sparkver("3.2 / 3.3 / 3.4 / 3.5 / 4.0 / 4.1")
   override def numShufflePartitions: Option[Int] = None
 
   override def resultExpressions: Seq[NamedExpression] = outputAttributes
 
-  @sparkver("3.2 / 3.3 / 3.4 / 3.5 / 4.1")
+  @sparkver("3.2 / 3.3 / 3.4 / 3.5 / 4.0 / 4.1")
   override protected def withNewChildInternal(newChild: SparkPlan): SparkPlan =
     copy(child = newChild)
 
diff --git 
a/spark-extension-shims-spark/src/main/scala/org/apache/spark/sql/execution/auron/plan/NativeBroadcastExchangeExec.scala
 
b/spark-extension-shims-spark/src/main/scala/org/apache/spark/sql/execution/auron/plan/NativeBroadcastExchangeExec.scala
index 3a9ad1a9..1f0434ce 100644
--- 
a/spark-extension-shims-spark/src/main/scala/org/apache/spark/sql/execution/auron/plan/NativeBroadcastExchangeExec.scala
+++ 
b/spark-extension-shims-spark/src/main/scala/org/apache/spark/sql/execution/auron/plan/NativeBroadcastExchangeExec.scala
@@ -43,7 +43,7 @@ case class NativeBroadcastExchangeExec(mode: BroadcastMode, 
override val child:
     relationFuturePromise.future
   }
 
-  @sparkver("3.2 / 3.3 / 3.4 / 3.5 / 4.1")
+  @sparkver("3.2 / 3.3 / 3.4 / 3.5 / 4.0 / 4.1")
   override protected def withNewChildInternal(newChild: SparkPlan): SparkPlan =
     copy(child = newChild)
 
diff --git 
a/spark-extension-shims-spark/src/main/scala/org/apache/spark/sql/execution/auron/plan/NativeCollectLimitExec.scala
 
b/spark-extension-shims-spark/src/main/scala/org/apache/spark/sql/execution/auron/plan/NativeCollectLimitExec.scala
index 4af2597f..476db05e 100644
--- 
a/spark-extension-shims-spark/src/main/scala/org/apache/spark/sql/execution/auron/plan/NativeCollectLimitExec.scala
+++ 
b/spark-extension-shims-spark/src/main/scala/org/apache/spark/sql/execution/auron/plan/NativeCollectLimitExec.scala
@@ -23,7 +23,7 @@ import org.apache.auron.sparkver
 case class NativeCollectLimitExec(limit: Int, offset: Int, override val child: 
SparkPlan)
     extends NativeCollectLimitBase(limit, offset, child) {
 
-  @sparkver("3.2 / 3.3 / 3.4 / 3.5 / 4.1")
+  @sparkver("3.2 / 3.3 / 3.4 / 3.5 / 4.0 / 4.1")
   override protected def withNewChildInternal(newChild: SparkPlan): SparkPlan =
     copy(child = newChild)
 
diff --git 
a/spark-extension-shims-spark/src/main/scala/org/apache/spark/sql/execution/auron/plan/NativeExpandExec.scala
 
b/spark-extension-shims-spark/src/main/scala/org/apache/spark/sql/execution/auron/plan/NativeExpandExec.scala
index d83a1b1f..4b3d221f 100644
--- 
a/spark-extension-shims-spark/src/main/scala/org/apache/spark/sql/execution/auron/plan/NativeExpandExec.scala
+++ 
b/spark-extension-shims-spark/src/main/scala/org/apache/spark/sql/execution/auron/plan/NativeExpandExec.scala
@@ -28,7 +28,7 @@ case class NativeExpandExec(
     override val child: SparkPlan)
     extends NativeExpandBase(projections, output, child) {
 
-  @sparkver("3.2 / 3.3 / 3.4 / 3.5 / 4.1")
+  @sparkver("3.2 / 3.3 / 3.4 / 3.5 / 4.0 / 4.1")
   override protected def withNewChildInternal(newChild: SparkPlan): SparkPlan =
     copy(child = newChild)
 
diff --git 
a/spark-extension-shims-spark/src/main/scala/org/apache/spark/sql/execution/auron/plan/NativeFilterExec.scala
 
b/spark-extension-shims-spark/src/main/scala/org/apache/spark/sql/execution/auron/plan/NativeFilterExec.scala
index 0b51523f..d5335101 100644
--- 
a/spark-extension-shims-spark/src/main/scala/org/apache/spark/sql/execution/auron/plan/NativeFilterExec.scala
+++ 
b/spark-extension-shims-spark/src/main/scala/org/apache/spark/sql/execution/auron/plan/NativeFilterExec.scala
@@ -24,7 +24,7 @@ import org.apache.auron.sparkver
 case class NativeFilterExec(condition: Expression, override val child: 
SparkPlan)
     extends NativeFilterBase(condition, child) {
 
-  @sparkver("3.2 / 3.3 / 3.4 / 3.5 / 4.1")
+  @sparkver("3.2 / 3.3 / 3.4 / 3.5 / 4.0 / 4.1")
   override protected def withNewChildInternal(newChild: SparkPlan): SparkPlan =
     copy(child = newChild)
 
diff --git 
a/spark-extension-shims-spark/src/main/scala/org/apache/spark/sql/execution/auron/plan/NativeGenerateExec.scala
 
b/spark-extension-shims-spark/src/main/scala/org/apache/spark/sql/execution/auron/plan/NativeGenerateExec.scala
index 3d2a1510..04ad22db 100644
--- 
a/spark-extension-shims-spark/src/main/scala/org/apache/spark/sql/execution/auron/plan/NativeGenerateExec.scala
+++ 
b/spark-extension-shims-spark/src/main/scala/org/apache/spark/sql/execution/auron/plan/NativeGenerateExec.scala
@@ -30,7 +30,7 @@ case class NativeGenerateExec(
     override val child: SparkPlan)
     extends NativeGenerateBase(generator, requiredChildOutput, outer, 
generatorOutput, child) {
 
-  @sparkver("3.2 / 3.3 / 3.4 / 3.5 / 4.1")
+  @sparkver("3.2 / 3.3 / 3.4 / 3.5 / 4.0 / 4.1")
   override protected def withNewChildInternal(newChild: SparkPlan): SparkPlan =
     copy(child = newChild)
 
diff --git 
a/spark-extension-shims-spark/src/main/scala/org/apache/spark/sql/execution/auron/plan/NativeGlobalLimitExec.scala
 
b/spark-extension-shims-spark/src/main/scala/org/apache/spark/sql/execution/auron/plan/NativeGlobalLimitExec.scala
index 4b077812..4a1407ca 100644
--- 
a/spark-extension-shims-spark/src/main/scala/org/apache/spark/sql/execution/auron/plan/NativeGlobalLimitExec.scala
+++ 
b/spark-extension-shims-spark/src/main/scala/org/apache/spark/sql/execution/auron/plan/NativeGlobalLimitExec.scala
@@ -23,7 +23,7 @@ import org.apache.auron.sparkver
 case class NativeGlobalLimitExec(limit: Int, offset: Int, override val child: 
SparkPlan)
     extends NativeGlobalLimitBase(limit, offset, child) {
 
-  @sparkver("3.2 / 3.3 / 3.4 / 3.5 / 4.1")
+  @sparkver("3.2 / 3.3 / 3.4 / 3.5 / 4.0 / 4.1")
   override protected def withNewChildInternal(newChild: SparkPlan): SparkPlan =
     copy(child = newChild)
 
diff --git 
a/spark-extension-shims-spark/src/main/scala/org/apache/spark/sql/execution/auron/plan/NativeLocalLimitExec.scala
 
b/spark-extension-shims-spark/src/main/scala/org/apache/spark/sql/execution/auron/plan/NativeLocalLimitExec.scala
index 4b44aca9..bb84b180 100644
--- 
a/spark-extension-shims-spark/src/main/scala/org/apache/spark/sql/execution/auron/plan/NativeLocalLimitExec.scala
+++ 
b/spark-extension-shims-spark/src/main/scala/org/apache/spark/sql/execution/auron/plan/NativeLocalLimitExec.scala
@@ -23,7 +23,7 @@ import org.apache.auron.sparkver
 case class NativeLocalLimitExec(limit: Int, override val child: SparkPlan)
     extends NativeLocalLimitBase(limit, child) {
 
-  @sparkver("3.2 / 3.3 / 3.4 / 3.5 / 4.1")
+  @sparkver("3.2 / 3.3 / 3.4 / 3.5 / 4.0 / 4.1")
   override protected def withNewChildInternal(newChild: SparkPlan): SparkPlan =
     copy(child = newChild)
 
diff --git 
a/spark-extension-shims-spark/src/main/scala/org/apache/spark/sql/execution/auron/plan/NativeParquetInsertIntoHiveTableExec.scala
 
b/spark-extension-shims-spark/src/main/scala/org/apache/spark/sql/execution/auron/plan/NativeParquetInsertIntoHiveTableExec.scala
index e13f1346..d7311d44 100644
--- 
a/spark-extension-shims-spark/src/main/scala/org/apache/spark/sql/execution/auron/plan/NativeParquetInsertIntoHiveTableExec.scala
+++ 
b/spark-extension-shims-spark/src/main/scala/org/apache/spark/sql/execution/auron/plan/NativeParquetInsertIntoHiveTableExec.scala
@@ -69,7 +69,7 @@ case class NativeParquetInsertIntoHiveTableExec(
       metrics)
   }
 
-  @sparkver("4.1")
+  @sparkver("4.0 / 4.1")
   override protected def getInsertIntoHiveTableCommand(
       table: CatalogTable,
       partition: Map[String, Option[String]],
@@ -88,7 +88,7 @@ case class NativeParquetInsertIntoHiveTableExec(
       metrics)
   }
 
-  @sparkver("3.2 / 3.3 / 3.4 / 3.5 / 4.1")
+  @sparkver("3.2 / 3.3 / 3.4 / 3.5 / 4.0 / 4.1")
   override protected def withNewChildInternal(newChild: SparkPlan): SparkPlan =
     copy(child = newChild)
 
@@ -295,7 +295,7 @@ case class NativeParquetInsertIntoHiveTableExec(
     }
   }
 
-  @sparkver("4.1")
+  @sparkver("4.0 / 4.1")
   class AuronInsertIntoHiveTable41(
       table: CatalogTable,
       partition: Map[String, Option[String]],
diff --git 
a/spark-extension-shims-spark/src/main/scala/org/apache/spark/sql/execution/auron/plan/NativeParquetSinkExec.scala
 
b/spark-extension-shims-spark/src/main/scala/org/apache/spark/sql/execution/auron/plan/NativeParquetSinkExec.scala
index 5b548eda..78056cbd 100644
--- 
a/spark-extension-shims-spark/src/main/scala/org/apache/spark/sql/execution/auron/plan/NativeParquetSinkExec.scala
+++ 
b/spark-extension-shims-spark/src/main/scala/org/apache/spark/sql/execution/auron/plan/NativeParquetSinkExec.scala
@@ -31,7 +31,7 @@ case class NativeParquetSinkExec(
     override val metrics: Map[String, SQLMetric])
     extends NativeParquetSinkBase(sparkSession, table, partition, child, 
metrics) {
 
-  @sparkver("3.2 / 3.3 / 3.4 / 3.5 / 4.1")
+  @sparkver("3.2 / 3.3 / 3.4 / 3.5 / 4.0 / 4.1")
   override protected def withNewChildInternal(newChild: SparkPlan): SparkPlan =
     copy(child = newChild)
 
diff --git 
a/spark-extension-shims-spark/src/main/scala/org/apache/spark/sql/execution/auron/plan/NativePartialTakeOrderedExec.scala
 
b/spark-extension-shims-spark/src/main/scala/org/apache/spark/sql/execution/auron/plan/NativePartialTakeOrderedExec.scala
index eafb355c..ec1563c8 100644
--- 
a/spark-extension-shims-spark/src/main/scala/org/apache/spark/sql/execution/auron/plan/NativePartialTakeOrderedExec.scala
+++ 
b/spark-extension-shims-spark/src/main/scala/org/apache/spark/sql/execution/auron/plan/NativePartialTakeOrderedExec.scala
@@ -29,7 +29,7 @@ case class NativePartialTakeOrderedExec(
     override val metrics: Map[String, SQLMetric])
     extends NativePartialTakeOrderedBase(limit, sortOrder, child, metrics) {
 
-  @sparkver("3.2 / 3.3 / 3.4 / 3.5 / 4.1")
+  @sparkver("3.2 / 3.3 / 3.4 / 3.5 / 4.0 / 4.1")
   override protected def withNewChildInternal(newChild: SparkPlan): SparkPlan =
     copy(child = newChild)
 
diff --git 
a/spark-extension-shims-spark/src/main/scala/org/apache/spark/sql/execution/auron/plan/NativeProjectExecProvider.scala
 
b/spark-extension-shims-spark/src/main/scala/org/apache/spark/sql/execution/auron/plan/NativeProjectExecProvider.scala
index 6902c9f5..e341dffd 100644
--- 
a/spark-extension-shims-spark/src/main/scala/org/apache/spark/sql/execution/auron/plan/NativeProjectExecProvider.scala
+++ 
b/spark-extension-shims-spark/src/main/scala/org/apache/spark/sql/execution/auron/plan/NativeProjectExecProvider.scala
@@ -22,7 +22,7 @@ import org.apache.spark.sql.execution.SparkPlan
 import org.apache.auron.sparkver
 
 case object NativeProjectExecProvider {
-  @sparkver("3.4 / 3.5 / 4.1")
+  @sparkver("3.4 / 3.5 / 4.0 / 4.1")
   def provide(projectList: Seq[NamedExpression], child: SparkPlan): 
NativeProjectBase = {
     import org.apache.spark.sql.execution.OrderPreservingUnaryExecNode
     import org.apache.spark.sql.execution.PartitioningPreservingUnaryExecNode
diff --git 
a/spark-extension-shims-spark/src/main/scala/org/apache/spark/sql/execution/auron/plan/NativeRenameColumnsExecProvider.scala
 
b/spark-extension-shims-spark/src/main/scala/org/apache/spark/sql/execution/auron/plan/NativeRenameColumnsExecProvider.scala
index b3278248..6e62ba14 100644
--- 
a/spark-extension-shims-spark/src/main/scala/org/apache/spark/sql/execution/auron/plan/NativeRenameColumnsExecProvider.scala
+++ 
b/spark-extension-shims-spark/src/main/scala/org/apache/spark/sql/execution/auron/plan/NativeRenameColumnsExecProvider.scala
@@ -21,7 +21,7 @@ import org.apache.spark.sql.execution.SparkPlan
 import org.apache.auron.sparkver
 
 case object NativeRenameColumnsExecProvider {
-  @sparkver("3.4 / 3.5 / 4.1")
+  @sparkver("3.4 / 3.5 / 4.0 / 4.1")
   def provide(child: SparkPlan, renamedColumnNames: Seq[String]): 
NativeRenameColumnsBase = {
     import org.apache.spark.sql.catalyst.expressions.NamedExpression
     import org.apache.spark.sql.catalyst.expressions.SortOrder
diff --git 
a/spark-extension-shims-spark/src/main/scala/org/apache/spark/sql/execution/auron/plan/NativeShuffleExchangeExec.scala
 
b/spark-extension-shims-spark/src/main/scala/org/apache/spark/sql/execution/auron/plan/NativeShuffleExchangeExec.scala
index d23e07c3..ae828bf2 100644
--- 
a/spark-extension-shims-spark/src/main/scala/org/apache/spark/sql/execution/auron/plan/NativeShuffleExchangeExec.scala
+++ 
b/spark-extension-shims-spark/src/main/scala/org/apache/spark/sql/execution/auron/plan/NativeShuffleExchangeExec.scala
@@ -131,7 +131,7 @@ case class NativeShuffleExchangeExec(
         internalWrite(rdd, dep, mapId, context, partition)
       }
 
-      @sparkver("4.1")
+      @sparkver("4.0 / 4.1")
       override def write(
           inputs: Iterator[_],
           dep: ShuffleDependency[_, _, _],
@@ -194,7 +194,7 @@ case class NativeShuffleExchangeExec(
   // for databricks testing
   val causedBroadcastJoinBuildOOM = false
 
-  @sparkver("3.5 / 4.1")
+  @sparkver("3.5 / 4.0 / 4.1")
   override def advisoryPartitionSize: Option[Long] = None
 
   // If users specify the num partitions via APIs like `repartition`, we 
shouldn't change it.
@@ -203,13 +203,13 @@ case class NativeShuffleExchangeExec(
   override def canChangeNumPartitions: Boolean =
     outputPartitioning != SinglePartition
 
-  @sparkver("3.1 / 3.2 / 3.3 / 3.4 / 3.5 / 4.1")
+  @sparkver("3.1 / 3.2 / 3.3 / 3.4 / 3.5 / 4.0 / 4.1")
   override def shuffleOrigin: 
org.apache.spark.sql.execution.exchange.ShuffleOrigin = {
     import org.apache.spark.sql.execution.exchange.ShuffleOrigin;
     _shuffleOrigin.get.asInstanceOf[ShuffleOrigin]
   }
 
-  @sparkver("3.2 / 3.3 / 3.4 / 3.5 / 4.1")
+  @sparkver("3.2 / 3.3 / 3.4 / 3.5 / 4.0 / 4.1")
   override protected def withNewChildInternal(newChild: SparkPlan): SparkPlan =
     copy(child = newChild)
 
@@ -217,7 +217,7 @@ case class NativeShuffleExchangeExec(
   override def withNewChildren(newChildren: Seq[SparkPlan]): SparkPlan =
     copy(child = newChildren.head)
 
-  @sparkver("4.1")
+  @sparkver("4.0 / 4.1")
   override def shuffleId: Int = {
     shuffleDependency.shuffleId
   }
diff --git 
a/spark-extension-shims-spark/src/main/scala/org/apache/spark/sql/execution/auron/plan/NativeSortExec.scala
 
b/spark-extension-shims-spark/src/main/scala/org/apache/spark/sql/execution/auron/plan/NativeSortExec.scala
index 1e1896f2..6d47afb8 100644
--- 
a/spark-extension-shims-spark/src/main/scala/org/apache/spark/sql/execution/auron/plan/NativeSortExec.scala
+++ 
b/spark-extension-shims-spark/src/main/scala/org/apache/spark/sql/execution/auron/plan/NativeSortExec.scala
@@ -27,7 +27,7 @@ case class NativeSortExec(
     override val child: SparkPlan)
     extends NativeSortBase(sortOrder, global, child) {
 
-  @sparkver("3.2 / 3.3 / 3.4 / 3.5 / 4.1")
+  @sparkver("3.2 / 3.3 / 3.4 / 3.5 / 4.0 / 4.1")
   override protected def withNewChildInternal(newChild: SparkPlan): SparkPlan =
     copy(child = newChild)
 
diff --git 
a/spark-extension-shims-spark/src/main/scala/org/apache/spark/sql/execution/auron/plan/NativeTakeOrderedExec.scala
 
b/spark-extension-shims-spark/src/main/scala/org/apache/spark/sql/execution/auron/plan/NativeTakeOrderedExec.scala
index 310d22f9..2c939c4f 100644
--- 
a/spark-extension-shims-spark/src/main/scala/org/apache/spark/sql/execution/auron/plan/NativeTakeOrderedExec.scala
+++ 
b/spark-extension-shims-spark/src/main/scala/org/apache/spark/sql/execution/auron/plan/NativeTakeOrderedExec.scala
@@ -28,7 +28,7 @@ case class NativeTakeOrderedExec(
     override val child: SparkPlan)
     extends NativeTakeOrderedBase(limit, offset, sortOrder, child) {
 
-  @sparkver("3.2 / 3.3 / 3.4 / 3.5 / 4.1")
+  @sparkver("3.2 / 3.3 / 3.4 / 3.5 / 4.0 / 4.1")
   override protected def withNewChildInternal(newChild: SparkPlan): SparkPlan =
     copy(child = newChild)
 
diff --git 
a/spark-extension-shims-spark/src/main/scala/org/apache/spark/sql/execution/auron/plan/NativeUnionExec.scala
 
b/spark-extension-shims-spark/src/main/scala/org/apache/spark/sql/execution/auron/plan/NativeUnionExec.scala
index 8406f0fc..f4d4ad44 100644
--- 
a/spark-extension-shims-spark/src/main/scala/org/apache/spark/sql/execution/auron/plan/NativeUnionExec.scala
+++ 
b/spark-extension-shims-spark/src/main/scala/org/apache/spark/sql/execution/auron/plan/NativeUnionExec.scala
@@ -26,7 +26,7 @@ case class NativeUnionExec(
     override val output: Seq[Attribute])
     extends NativeUnionBase(children, output) {
 
-  @sparkver("3.2 / 3.3 / 3.4 / 3.5 / 4.1")
+  @sparkver("3.2 / 3.3 / 3.4 / 3.5 / 4.0 / 4.1")
   override protected def withNewChildrenInternal(newChildren: 
IndexedSeq[SparkPlan]): SparkPlan =
     copy(children = newChildren)
 
diff --git 
a/spark-extension-shims-spark/src/main/scala/org/apache/spark/sql/execution/auron/plan/NativeWindowExec.scala
 
b/spark-extension-shims-spark/src/main/scala/org/apache/spark/sql/execution/auron/plan/NativeWindowExec.scala
index 7f2b2ff6..028c41a2 100644
--- 
a/spark-extension-shims-spark/src/main/scala/org/apache/spark/sql/execution/auron/plan/NativeWindowExec.scala
+++ 
b/spark-extension-shims-spark/src/main/scala/org/apache/spark/sql/execution/auron/plan/NativeWindowExec.scala
@@ -31,7 +31,7 @@ case class NativeWindowExec(
     override val child: SparkPlan)
     extends NativeWindowBase(windowExpression, partitionSpec, orderSpec, 
groupLimit, child) {
 
-  @sparkver("3.2 / 3.3 / 3.4 / 3.5 / 4.1")
+  @sparkver("3.2 / 3.3 / 3.4 / 3.5 / 4.0 / 4.1")
   override protected def withNewChildInternal(newChild: SparkPlan): SparkPlan =
     copy(child = newChild)
 
diff --git 
a/spark-extension-shims-spark/src/main/scala/org/apache/spark/sql/execution/auron/shuffle/AuronBlockStoreShuffleReader.scala
 
b/spark-extension-shims-spark/src/main/scala/org/apache/spark/sql/execution/auron/shuffle/AuronBlockStoreShuffleReader.scala
index 64d239bd..cd8a0ba0 100644
--- 
a/spark-extension-shims-spark/src/main/scala/org/apache/spark/sql/execution/auron/shuffle/AuronBlockStoreShuffleReader.scala
+++ 
b/spark-extension-shims-spark/src/main/scala/org/apache/spark/sql/execution/auron/shuffle/AuronBlockStoreShuffleReader.scala
@@ -41,7 +41,7 @@ class AuronBlockStoreShuffleReader[K, C](
   private val _ = mapOutputTracker
 
   override def readBlocks(): Iterator[InputStream] = {
-    @sparkver("3.2 / 3.3 / 3.4 / 3.5 / 4.1")
+    @sparkver("3.2 / 3.3 / 3.4 / 3.5 / 4.0 / 4.1")
     def fetchIterator = new ShuffleBlockFetcherIterator(
       context,
       blockManager.blockStoreClient,
diff --git 
a/spark-extension-shims-spark/src/main/scala/org/apache/spark/sql/execution/auron/shuffle/AuronRssShuffleManagerBase.scala
 
b/spark-extension-shims-spark/src/main/scala/org/apache/spark/sql/execution/auron/shuffle/AuronRssShuffleManagerBase.scala
index e9bf42e5..f83a1636 100644
--- 
a/spark-extension-shims-spark/src/main/scala/org/apache/spark/sql/execution/auron/shuffle/AuronRssShuffleManagerBase.scala
+++ 
b/spark-extension-shims-spark/src/main/scala/org/apache/spark/sql/execution/auron/shuffle/AuronRssShuffleManagerBase.scala
@@ -77,7 +77,7 @@ abstract class AuronRssShuffleManagerBase(_conf: SparkConf) 
extends ShuffleManag
       context: TaskContext,
       metrics: ShuffleWriteMetricsReporter): ShuffleWriter[K, V]
 
-  @sparkver("3.1 / 3.2 / 3.3 / 3.4 / 3.5 / 4.1")
+  @sparkver("3.1 / 3.2 / 3.3 / 3.4 / 3.5 / 4.0 / 4.1")
   override def getReader[K, C](
       handle: ShuffleHandle,
       startMapIndex: Int,
diff --git 
a/spark-extension-shims-spark/src/main/scala/org/apache/spark/sql/execution/auron/shuffle/AuronShuffleManager.scala
 
b/spark-extension-shims-spark/src/main/scala/org/apache/spark/sql/execution/auron/shuffle/AuronShuffleManager.scala
index 5a73cd77..eba8b15f 100644
--- 
a/spark-extension-shims-spark/src/main/scala/org/apache/spark/sql/execution/auron/shuffle/AuronShuffleManager.scala
+++ 
b/spark-extension-shims-spark/src/main/scala/org/apache/spark/sql/execution/auron/shuffle/AuronShuffleManager.scala
@@ -52,7 +52,7 @@ class AuronShuffleManager(conf: SparkConf) extends 
ShuffleManager with Logging {
     sortShuffleManager.registerShuffle(shuffleId, dependency)
   }
 
-  @sparkver("3.2 / 3.3 / 3.4 / 3.5 / 4.1")
+  @sparkver("3.2 / 3.3 / 3.4 / 3.5 / 4.0 / 4.1")
   override def getReader[K, C](
       handle: ShuffleHandle,
       startMapIndex: Int,
@@ -67,7 +67,7 @@ class AuronShuffleManager(conf: SparkConf) extends 
ShuffleManager with Logging {
 
       @sparkver("3.2")
       def shuffleMergeFinalized = 
baseShuffleHandle.dependency.shuffleMergeFinalized
-      @sparkver("3.3 / 3.4 / 3.5 / 4.1")
+      @sparkver("3.3 / 3.4 / 3.5 / 4.0 / 4.1")
       def shuffleMergeFinalized = 
baseShuffleHandle.dependency.isShuffleMergeFinalizedMarked
 
       val (blocksByAddress, canEnableBatchFetch) =
diff --git 
a/spark-extension-shims-spark/src/main/scala/org/apache/spark/sql/execution/auron/shuffle/AuronShuffleWriter.scala
 
b/spark-extension-shims-spark/src/main/scala/org/apache/spark/sql/execution/auron/shuffle/AuronShuffleWriter.scala
index a6d57df1..7e670b71 100644
--- 
a/spark-extension-shims-spark/src/main/scala/org/apache/spark/sql/execution/auron/shuffle/AuronShuffleWriter.scala
+++ 
b/spark-extension-shims-spark/src/main/scala/org/apache/spark/sql/execution/auron/shuffle/AuronShuffleWriter.scala
@@ -23,6 +23,6 @@ import org.apache.auron.sparkver
 class AuronShuffleWriter[K, V](metrics: ShuffleWriteMetricsReporter)
     extends AuronShuffleWriterBase[K, V](metrics) {
 
-  @sparkver("3.2 / 3.3 / 3.4 / 3.5 / 4.1")
+  @sparkver("3.2 / 3.3 / 3.4 / 3.5 / 4.0 / 4.1")
   override def getPartitionLengths(): Array[Long] = partitionLengths
 }
diff --git 
a/spark-extension-shims-spark/src/main/scala/org/apache/spark/sql/execution/joins/auron/plan/NativeBroadcastJoinExec.scala
 
b/spark-extension-shims-spark/src/main/scala/org/apache/spark/sql/execution/joins/auron/plan/NativeBroadcastJoinExec.scala
index 4244642e..f04770f0 100644
--- 
a/spark-extension-shims-spark/src/main/scala/org/apache/spark/sql/execution/joins/auron/plan/NativeBroadcastJoinExec.scala
+++ 
b/spark-extension-shims-spark/src/main/scala/org/apache/spark/sql/execution/joins/auron/plan/NativeBroadcastJoinExec.scala
@@ -48,7 +48,7 @@ case class NativeBroadcastJoinExec(
 
   override val condition: Option[Expression] = None
 
-  @sparkver("3.1 / 3.2 / 3.3 / 3.4 / 3.5 / 4.1")
+  @sparkver("3.1 / 3.2 / 3.3 / 3.4 / 3.5 / 4.0 / 4.1")
   override def buildSide: org.apache.spark.sql.catalyst.optimizer.BuildSide =
     broadcastSide match {
       case JoinBuildLeft => org.apache.spark.sql.catalyst.optimizer.BuildLeft
@@ -61,7 +61,7 @@ case class NativeBroadcastJoinExec(
     case JoinBuildRight => org.apache.spark.sql.execution.joins.BuildRight
   }
 
-  @sparkver("3.1 / 3.2 / 3.3 / 3.4 / 3.5 / 4.1")
+  @sparkver("3.1 / 3.2 / 3.3 / 3.4 / 3.5 / 4.0 / 4.1")
   override def requiredChildDistribution
       : List[org.apache.spark.sql.catalyst.plans.physical.Distribution] = {
     import org.apache.spark.sql.catalyst.plans.physical.BroadcastDistribution
@@ -80,22 +80,22 @@ case class NativeBroadcastJoinExec(
   override def rewriteKeyExprToLong(exprs: Seq[Expression]): Seq[Expression] =
     HashJoin.rewriteKeyExpr(exprs)
 
-  @sparkver("3.1 / 3.2 / 3.3 / 3.4 / 3.5 / 4.1")
+  @sparkver("3.1 / 3.2 / 3.3 / 3.4 / 3.5 / 4.0 / 4.1")
   override def supportCodegen: Boolean = false
 
-  @sparkver("3.1 / 3.2 / 3.3 / 3.4 / 3.5 / 4.1")
+  @sparkver("3.1 / 3.2 / 3.3 / 3.4 / 3.5 / 4.0 / 4.1")
   override def inputRDDs(): Nothing = {
     throw new NotImplementedError("NativeBroadcastJoin dose not support 
codegen")
   }
 
-  @sparkver("3.1 / 3.2 / 3.3 / 3.4 / 3.5 / 4.1")
+  @sparkver("3.1 / 3.2 / 3.3 / 3.4 / 3.5 / 4.0 / 4.1")
   override protected def prepareRelation(
       ctx: org.apache.spark.sql.catalyst.expressions.codegen.CodegenContext)
       : org.apache.spark.sql.execution.joins.HashedRelationInfo = {
     throw new NotImplementedError("NativeBroadcastJoin dose not support 
codegen")
   }
 
-  @sparkver("3.2 / 3.3 / 3.4 / 3.5 / 4.1")
+  @sparkver("3.2 / 3.3 / 3.4 / 3.5 / 4.0 / 4.1")
   override protected def withNewChildrenInternal(
       newLeft: SparkPlan,
       newRight: SparkPlan): SparkPlan =
diff --git 
a/spark-extension-shims-spark/src/main/scala/org/apache/spark/sql/execution/joins/auron/plan/NativeShuffledHashJoinExecProvider.scala
 
b/spark-extension-shims-spark/src/main/scala/org/apache/spark/sql/execution/joins/auron/plan/NativeShuffledHashJoinExecProvider.scala
index 44db7391..1e68e8ba 100644
--- 
a/spark-extension-shims-spark/src/main/scala/org/apache/spark/sql/execution/joins/auron/plan/NativeShuffledHashJoinExecProvider.scala
+++ 
b/spark-extension-shims-spark/src/main/scala/org/apache/spark/sql/execution/joins/auron/plan/NativeShuffledHashJoinExecProvider.scala
@@ -29,7 +29,7 @@ import org.apache.auron.sparkver
 
 case object NativeShuffledHashJoinExecProvider {
 
-  @sparkver("3.2 / 3.3 / 3.4 / 3.5 / 4.1")
+  @sparkver("3.2 / 3.3 / 3.4 / 3.5 / 4.0 / 4.1")
   def provide(
       left: SparkPlan,
       right: SparkPlan,
diff --git 
a/spark-extension-shims-spark/src/main/scala/org/apache/spark/sql/execution/joins/auron/plan/NativeSortMergeJoinExecProvider.scala
 
b/spark-extension-shims-spark/src/main/scala/org/apache/spark/sql/execution/joins/auron/plan/NativeSortMergeJoinExecProvider.scala
index e77ec6a3..d815d56a 100644
--- 
a/spark-extension-shims-spark/src/main/scala/org/apache/spark/sql/execution/joins/auron/plan/NativeSortMergeJoinExecProvider.scala
+++ 
b/spark-extension-shims-spark/src/main/scala/org/apache/spark/sql/execution/joins/auron/plan/NativeSortMergeJoinExecProvider.scala
@@ -25,7 +25,7 @@ import org.apache.auron.sparkver
 
 case object NativeSortMergeJoinExecProvider {
 
-  @sparkver("3.2 / 3.3 / 3.4 / 3.5 / 4.1")
+  @sparkver("3.2 / 3.3 / 3.4 / 3.5 / 4.0 / 4.1")
   def provide(
       left: SparkPlan,
       right: SparkPlan,
diff --git 
a/spark-extension-shims-spark/src/test/scala/org/apache/spark/sql/execution/AuronAdaptiveQueryExecSuite.scala
 
b/spark-extension-shims-spark/src/test/scala/org/apache/spark/sql/execution/AuronAdaptiveQueryExecSuite.scala
index dc4f9ff5..eb1ca490 100644
--- 
a/spark-extension-shims-spark/src/test/scala/org/apache/spark/sql/execution/AuronAdaptiveQueryExecSuite.scala
+++ 
b/spark-extension-shims-spark/src/test/scala/org/apache/spark/sql/execution/AuronAdaptiveQueryExecSuite.scala
@@ -20,7 +20,7 @@ import org.apache.spark.sql.AuronQueryTest
 
 import org.apache.auron.{sparkverEnableMembers, BaseAuronSQLSuite}
 
-@sparkverEnableMembers("3.5 / 4.1")
+@sparkverEnableMembers("3.5 / 4.0 / 4.1")
 class AuronAdaptiveQueryExecSuite extends AuronQueryTest with 
BaseAuronSQLSuite {
 
   import org.apache.spark.scheduler.{SparkListener, SparkListenerEvent}
diff --git 
a/spark-extension/src/main/scala/org/apache/spark/sql/execution/auron/columnar/AuronColumnarArray.scala
 
b/spark-extension/src/main/scala/org/apache/spark/sql/execution/auron/columnar/AuronColumnarArray.scala
index 4930f991..7d998b0a 100644
--- 
a/spark-extension/src/main/scala/org/apache/spark/sql/execution/auron/columnar/AuronColumnarArray.scala
+++ 
b/spark-extension/src/main/scala/org/apache/spark/sql/execution/auron/columnar/AuronColumnarArray.scala
@@ -166,7 +166,7 @@ class AuronColumnarArray(data: AuronColumnVector, offset: 
Int, length: Int) exte
     throw new UnsupportedOperationException
   }
 
-  @sparkver("4.1")
+  @sparkver("4.0 / 4.1")
   override def getVariant(i: Int): org.apache.spark.unsafe.types.VariantVal = {
     throw new UnsupportedOperationException
   }
diff --git 
a/spark-extension/src/main/scala/org/apache/spark/sql/execution/auron/columnar/AuronColumnarBatchRow.scala
 
b/spark-extension/src/main/scala/org/apache/spark/sql/execution/auron/columnar/AuronColumnarBatchRow.scala
index 62c6ed96..9908b346 100644
--- 
a/spark-extension/src/main/scala/org/apache/spark/sql/execution/auron/columnar/AuronColumnarBatchRow.scala
+++ 
b/spark-extension/src/main/scala/org/apache/spark/sql/execution/auron/columnar/AuronColumnarBatchRow.scala
@@ -146,7 +146,7 @@ class AuronColumnarBatchRow(columns: 
Array[AuronColumnVector], var rowId: Int =
     throw new UnsupportedOperationException
   }
 
-  @sparkver("4.1")
+  @sparkver("4.0 / 4.1")
   override def getVariant(i: Int): org.apache.spark.unsafe.types.VariantVal = {
     throw new UnsupportedOperationException
   }
diff --git 
a/spark-extension/src/main/scala/org/apache/spark/sql/execution/auron/columnar/AuronColumnarStruct.scala
 
b/spark-extension/src/main/scala/org/apache/spark/sql/execution/auron/columnar/AuronColumnarStruct.scala
index 75842e6e..8cee4d67 100644
--- 
a/spark-extension/src/main/scala/org/apache/spark/sql/execution/auron/columnar/AuronColumnarStruct.scala
+++ 
b/spark-extension/src/main/scala/org/apache/spark/sql/execution/auron/columnar/AuronColumnarStruct.scala
@@ -155,7 +155,7 @@ class AuronColumnarStruct(data: AuronColumnVector, rowId: 
Int) extends InternalR
     throw new UnsupportedOperationException
   }
 
-  @sparkver("4.1")
+  @sparkver("4.0 / 4.1")
   override def getVariant(i: Int): org.apache.spark.unsafe.types.VariantVal = {
     throw new UnsupportedOperationException
   }
diff --git 
a/spark-extension/src/main/scala/org/apache/spark/sql/execution/auron/plan/NativeBroadcastExchangeBase.scala
 
b/spark-extension/src/main/scala/org/apache/spark/sql/execution/auron/plan/NativeBroadcastExchangeBase.scala
index d51e0830..f3fb5f25 100644
--- 
a/spark-extension/src/main/scala/org/apache/spark/sql/execution/auron/plan/NativeBroadcastExchangeBase.scala
+++ 
b/spark-extension/src/main/scala/org/apache/spark/sql/execution/auron/plan/NativeBroadcastExchangeBase.scala
@@ -268,7 +268,7 @@ abstract class NativeBroadcastExchangeBase(mode: 
BroadcastMode, override val chi
     }
   }
 
-  @sparkver("4.1")
+  @sparkver("4.0 / 4.1")
   private def getRelationFuture = {
     SQLExecution.withThreadLocalCaptured[Broadcast[Any]](
       this.session.sqlContext.sparkSession,
diff --git 
a/spark-extension/src/main/scala/org/apache/spark/sql/execution/auron/shuffle/AuronShuffleDependency.scala
 
b/spark-extension/src/main/scala/org/apache/spark/sql/execution/auron/shuffle/AuronShuffleDependency.scala
index 5243958d..cc1c0d5e 100644
--- 
a/spark-extension/src/main/scala/org/apache/spark/sql/execution/auron/shuffle/AuronShuffleDependency.scala
+++ 
b/spark-extension/src/main/scala/org/apache/spark/sql/execution/auron/shuffle/AuronShuffleDependency.scala
@@ -53,7 +53,7 @@ class AuronShuffleDependency[K: ClassTag, V: ClassTag, C: 
ClassTag](
   def getInputRdd: RDD[_ <: Product2[K, V]] = null
 
   // For Spark 4+ compatibility: _rdd is required to create 
NativeRDD.ShuffleWrite in ShuffleWriteProcessor.write
-  @sparkver("4.1")
+  @sparkver("4.0 / 4.1")
   def getInputRdd: RDD[_ <: Product2[K, V]] = _rdd
 }
 
diff --git 
a/spark-extension/src/main/scala/org/apache/spark/sql/hive/execution/auron/plan/NativeHiveTableScanBase.scala
 
b/spark-extension/src/main/scala/org/apache/spark/sql/hive/execution/auron/plan/NativeHiveTableScanBase.scala
index 2e57ba6e..9b2c0273 100644
--- 
a/spark-extension/src/main/scala/org/apache/spark/sql/hive/execution/auron/plan/NativeHiveTableScanBase.scala
+++ 
b/spark-extension/src/main/scala/org/apache/spark/sql/hive/execution/auron/plan/NativeHiveTableScanBase.scala
@@ -143,7 +143,7 @@ abstract class NativeHiveTableScanBase(basedHiveScan: 
HiveTableScanExec)
 
   override protected def doCanonicalize(): SparkPlan = 
basedHiveScan.canonicalized
 
-  @sparkver("3.0 / 3.1 / 3.2 / 3.3 / 3.4 / 3.5 / 4.1")
+  @sparkver("3.0 / 3.1 / 3.2 / 3.3 / 3.4 / 3.5 / 4.0 / 4.1")
   override def simpleString(maxFields: Int): String =
     s"$nodeName (${basedHiveScan.simpleString(maxFields)})"
 }

Reply via email to