This is an automated email from the ASF dual-hosted git repository.

richox pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/auron.git


The following commit(s) were added to refs/heads/master by this push:
     new 843f7abc [AURON #1654] Add AuronQueryTest base class for SQL testing 
(#1655)
843f7abc is described below

commit 843f7abc5185d559ce9d697316d6eb38b46ff48e
Author: Thomas <[email protected]>
AuthorDate: Tue Nov 25 12:44:29 2025 +0800

    [AURON #1654] Add AuronQueryTest base class for SQL testing (#1655)
    
    * Introduce AuronQueryTest
    
    * fixup
    
    * fixup
    
    * fixup
    
    * fixup
    
    * fixup
    
    * fixup
    
    * fixup
    
    * fixup
    
    * fixup
    
    * fixup
    
    * format code style
---
 .../AuronCheckConvertBroadcastExchangeSuite.scala  |  10 +-
 .../AuronCheckConvertShuffleExchangeSuite.scala    |  10 +-
 .../AuronEmptyNativeRddSuite.scala                 |   6 +-
 .../AuronFunctionSuite.scala                       | 247 +++++----------------
 .../AuronQuerySuite.scala                          | 184 +++++++--------
 .../AuronSQLTestHelper.scala                       |   2 +-
 .../BaseAuronSQLSuite.scala                        |   2 +-
 .../EmptyNativeRddSuite.scala                      |  10 +-
 .../NativeConvertersSuite.scala                    |  10 +-
 .../org/apache/spark/sql/AuronQueryTest.scala      |  90 ++++++++
 .../spark/sql/auron/BuildInfoAuronSQLSuite.scala   |  38 ----
 .../AuronAdaptiveQueryExecSuite.scala              |  12 +-
 .../BuildInfoInSparkUISuite.scala}                 |  10 +-
 13 files changed, 258 insertions(+), 373 deletions(-)

diff --git 
a/spark-extension-shims-spark/src/test/scala/org/apache/spark/sql/auron/AuronCheckConvertBroadcastExchangeSuite.scala
 
b/spark-extension-shims-spark/src/test/scala/org.apache.auron/AuronCheckConvertBroadcastExchangeSuite.scala
similarity index 97%
rename from 
spark-extension-shims-spark/src/test/scala/org/apache/spark/sql/auron/AuronCheckConvertBroadcastExchangeSuite.scala
rename to 
spark-extension-shims-spark/src/test/scala/org.apache.auron/AuronCheckConvertBroadcastExchangeSuite.scala
index e04d4bea..73ba99cc 100644
--- 
a/spark-extension-shims-spark/src/test/scala/org/apache/spark/sql/auron/AuronCheckConvertBroadcastExchangeSuite.scala
+++ 
b/spark-extension-shims-spark/src/test/scala/org.apache.auron/AuronCheckConvertBroadcastExchangeSuite.scala
@@ -14,19 +14,19 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package org.apache.spark.sql.auron
+package org.apache.auron
 
-import org.apache.spark.sql.{QueryTest, Row, SparkSession}
+import org.apache.spark.sql.{AuronQueryTest, Row, SparkSession}
+import org.apache.spark.sql.auron.AuronConverters
 import org.apache.spark.sql.execution.adaptive.AdaptiveSparkPlanExec
 import org.apache.spark.sql.execution.auron.plan.NativeBroadcastExchangeExec
 import org.apache.spark.sql.execution.exchange.BroadcastExchangeExec
 import org.apache.spark.sql.test.SharedSparkSession
 
 class AuronCheckConvertBroadcastExchangeSuite
-    extends QueryTest
+    extends AuronQueryTest
     with SharedSparkSession
-    with AuronSQLTestHelper
-    with org.apache.spark.sql.execution.adaptive.AdaptiveSparkPlanHelper {
+    with AuronSQLTestHelper {
   import testImplicits._
 
   test(
diff --git 
a/spark-extension-shims-spark/src/test/scala/org/apache/spark/sql/auron/AuronCheckConvertShuffleExchangeSuite.scala
 
b/spark-extension-shims-spark/src/test/scala/org.apache.auron/AuronCheckConvertShuffleExchangeSuite.scala
similarity index 94%
rename from 
spark-extension-shims-spark/src/test/scala/org/apache/spark/sql/auron/AuronCheckConvertShuffleExchangeSuite.scala
rename to 
spark-extension-shims-spark/src/test/scala/org.apache.auron/AuronCheckConvertShuffleExchangeSuite.scala
index 1d303064..39721ce9 100644
--- 
a/spark-extension-shims-spark/src/test/scala/org/apache/spark/sql/auron/AuronCheckConvertShuffleExchangeSuite.scala
+++ 
b/spark-extension-shims-spark/src/test/scala/org.apache.auron/AuronCheckConvertShuffleExchangeSuite.scala
@@ -14,18 +14,18 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package org.apache.spark.sql.auron
+package org.apache.auron
 
-import org.apache.spark.sql.{QueryTest, Row, SparkSession}
+import org.apache.spark.sql.{AuronQueryTest, Row, SparkSession}
+import org.apache.spark.sql.auron.AuronConverters
 import org.apache.spark.sql.execution.auron.plan.NativeShuffleExchangeExec
 import org.apache.spark.sql.execution.exchange.ShuffleExchangeExec
 import org.apache.spark.sql.test.SharedSparkSession
 
 class AuronCheckConvertShuffleExchangeSuite
-    extends QueryTest
+    extends AuronQueryTest
     with SharedSparkSession
-    with AuronSQLTestHelper
-    with org.apache.spark.sql.execution.adaptive.AdaptiveSparkPlanHelper {
+    with AuronSQLTestHelper {
 
   test(
     "test set auron shuffle manager convert to native shuffle exchange where 
set spark.auron.enable is true") {
diff --git 
a/spark-extension-shims-spark/src/test/scala/org/apache/spark/sql/auron/AuronEmptyNativeRddSuite.scala
 
b/spark-extension-shims-spark/src/test/scala/org.apache.auron/AuronEmptyNativeRddSuite.scala
similarity index 95%
rename from 
spark-extension-shims-spark/src/test/scala/org/apache/spark/sql/auron/AuronEmptyNativeRddSuite.scala
rename to 
spark-extension-shims-spark/src/test/scala/org.apache.auron/AuronEmptyNativeRddSuite.scala
index 6de32bd7..7998ac7b 100644
--- 
a/spark-extension-shims-spark/src/test/scala/org/apache/spark/sql/auron/AuronEmptyNativeRddSuite.scala
+++ 
b/spark-extension-shims-spark/src/test/scala/org.apache.auron/AuronEmptyNativeRddSuite.scala
@@ -14,13 +14,15 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package org.apache.spark.sql.auron
+package org.apache.auron
 
+import org.apache.spark.sql.AuronQueryTest
+import org.apache.spark.sql.auron.{AuronConverters, EmptyNativeRDD, NativeRDD}
 import org.apache.spark.sql.execution.adaptive.AdaptiveSparkPlanExec
 import org.apache.spark.sql.execution.auron.plan.{NativeOrcScanExec, 
NativeParquetScanExec}
 
 class AuronEmptyNativeRddSuite
-    extends org.apache.spark.sql.QueryTest
+    extends AuronQueryTest
     with BaseAuronSQLSuite
     with AuronSQLTestHelper {
 
diff --git 
a/spark-extension-shims-spark/src/test/scala/org/apache/spark/sql/auron/AuronFunctionSuite.scala
 
b/spark-extension-shims-spark/src/test/scala/org.apache.auron/AuronFunctionSuite.scala
similarity index 64%
rename from 
spark-extension-shims-spark/src/test/scala/org/apache/spark/sql/auron/AuronFunctionSuite.scala
rename to 
spark-extension-shims-spark/src/test/scala/org.apache.auron/AuronFunctionSuite.scala
index 7fbb5d3d..21fbf1cd 100644
--- 
a/spark-extension-shims-spark/src/test/scala/org/apache/spark/sql/auron/AuronFunctionSuite.scala
+++ 
b/spark-extension-shims-spark/src/test/scala/org.apache.auron/AuronFunctionSuite.scala
@@ -14,28 +14,21 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package org.apache.spark.sql.auron
+package org.apache.auron
 
 import java.text.SimpleDateFormat
 
-import scala.collection.mutable.ArrayBuffer
-
-import org.apache.spark.sql.Row
-import org.apache.spark.sql.execution.adaptive.AdaptiveSparkPlanHelper
+import org.apache.spark.sql.AuronQueryTest
 
 import org.apache.auron.util.AuronTestUtils
 
-class AuronFunctionSuite
-    extends org.apache.spark.sql.QueryTest
-    with BaseAuronSQLSuite
-    with AdaptiveSparkPlanHelper {
+class AuronFunctionSuite extends AuronQueryTest with BaseAuronSQLSuite {
 
   test("sum function with float input") {
     if (AuronTestUtils.isSparkV31OrGreater) {
       withTable("t1") {
         sql("create table t1 using parquet as select 1.0f as c1")
-        val df = sql("select sum(c1) from t1")
-        checkAnswer(df, Seq(Row(1.0)))
+        checkSparkAnswerAndOperator("select sum(c1) from t1")
       }
     }
   }
@@ -53,16 +46,7 @@ class AuronFunctionSuite
           |  sha2(concat(c1, version), 512) as sha512
           |from t1
           |""".stripMargin
-      val df = sql(functions)
-      checkAnswer(
-        df,
-        Seq(
-          Row(
-            "562d20689257f3f3a04ee9afb86d0ece2af106cf6c6e5e7d266043088ce5fbc0",
-            "562d20689257f3f3a04ee9afb86d0ece2af106cf6c6e5e7d266043088ce5fbc0",
-            "d0c8e9ccd5c7b3fdbacd2cfd6b4d65ca8489983b5e8c7c64cd77b634",
-            
"77c1199808053619c29e9af2656e1ad2614772f6ea605d5757894d6aec2dfaf34ff6fd662def3b79e429e9ae5ecbfed1",
-            
"c4e27d35517ca62243c1f322d7922dac175830be4668e8a1cf3befdcd287bb5b6f8c5f041c9d89e4609c8cfa242008c7c7133af1685f57bac9052c1212f1d089")))
+      checkSparkAnswerAndOperator(functions)
     }
   }
 
@@ -78,8 +62,7 @@ class AuronFunctionSuite
           |  select md5(concat(c1, version)) as md5 from t1
           |) b on md5(concat(a.c1, a.version)) = b.md5
           |""".stripMargin
-      val df = sql(functions)
-      checkAnswer(df, Seq(Row("9ff36a3857e29335d03cf6bef2147119")))
+      checkSparkAnswerAndOperator(functions)
     }
   }
 
@@ -90,8 +73,7 @@ class AuronFunctionSuite
         """
           |select hash(arr) from t1
           |""".stripMargin
-      val df = sql(functions)
-      checkAnswer(df, Seq(Row(-222940379)))
+      checkSparkAnswerAndOperator(functions)
     }
   }
 
@@ -99,8 +81,7 @@ class AuronFunctionSuite
     withTable("t1") {
       sql("create table t1(c1 double) using parquet")
       sql("insert into t1 values(0.0), (1.1), (2.2)")
-      val df = sql("select expm1(c1) from t1")
-      checkAnswer(df, Seq(Row(0.0), Row(2.0041660239464334), 
Row(8.025013499434122)))
+      checkSparkAnswerAndOperator("select expm1(c1) from t1")
     }
   }
 
@@ -108,8 +89,7 @@ class AuronFunctionSuite
     withTable("t1") {
       sql("create table t1(c1 int) using parquet")
       sql("insert into t1 values(5)")
-      val df = sql("select factorial(c1) from t1")
-      checkAnswer(df, Seq(Row(120)))
+      checkSparkAnswerAndOperator("select factorial(c1) from t1")
     }
   }
 
@@ -117,8 +97,7 @@ class AuronFunctionSuite
     withTable("t1") {
       sql("create table t1(c1 int, c2 string) using parquet")
       sql("insert into t1 values(17, 'Spark SQL')")
-      val df = sql("select hex(c1), hex(c2) from t1")
-      checkAnswer(df, Seq(Row("11", "537061726B2053514C")))
+      checkSparkAnswerAndOperator("select hex(c1), hex(c2) from t1")
     }
   }
 
@@ -127,8 +106,7 @@ class AuronFunctionSuite
       withTable("t1") {
         sql("create table t1(c1 double) using parquet")
         sql("insert into t1 values(10.0), (20.0), (30.0), (31.0), (null)")
-        val df = sql("select stddev_samp(c1) from t1")
-        checkAnswer(df, Seq(Row(9.844626283748239)))
+        checkSparkAnswerAndOperator("select stddev_samp(c1) from t1")
       }
     }
   }
@@ -137,8 +115,7 @@ class AuronFunctionSuite
     withTable("t1") {
       sql("create table t1(c1 string) using parquet")
       sql("insert into t1 values('Auron Spark SQL')")
-      val df = sql("select regexp_extract(c1, '^A(.*)L$', 1) from t1")
-      checkAnswer(df, Seq(Row("uron Spark SQ")))
+      checkSparkAnswerAndOperator("select regexp_extract(c1, '^A(.*)L$', 1) 
from t1")
     }
   }
 
@@ -150,25 +127,9 @@ class AuronFunctionSuite
       sql(s"INSERT INTO t2 VALUES($intPi)")
 
       val scales = -6 to 6
-      val expectedResults = Map(
-        -6 -> 314000000,
-        -5 -> 314200000,
-        -4 -> 314160000,
-        -3 -> 314159000,
-        -2 -> 314159300,
-        -1 -> 314159270,
-        0 -> 314159265,
-        1 -> 314159265,
-        2 -> 314159265,
-        3 -> 314159265,
-        4 -> 314159265,
-        5 -> 314159265,
-        6 -> 314159265)
 
       scales.foreach { scale =>
-        val df = sql(s"SELECT round(c1, $scale) AS xx FROM t2")
-        val expected = expectedResults(scale)
-        checkAnswer(df, Seq(Row(expected)))
+        checkSparkAnswerAndOperator(s"SELECT round(c1, $scale) AS xx FROM t2")
       }
     }
   }
@@ -180,25 +141,9 @@ class AuronFunctionSuite
       val doublePi: Double = math.Pi
       sql(s"insert into t1 values($doublePi)")
       val scales = -6 to 6
-      val expectedResults = Map(
-        -6 -> 0.0,
-        -5 -> 0.0,
-        -4 -> 0.0,
-        -3 -> 0.0,
-        -2 -> 0.0,
-        -1 -> 0.0,
-        0 -> 3.0,
-        1 -> 3.1,
-        2 -> 3.14,
-        3 -> 3.142,
-        4 -> 3.1416,
-        5 -> 3.14159,
-        6 -> 3.141593)
 
       scales.foreach { scale =>
-        val df = sql(s"select round(c1, $scale) from t1")
-        val expected = expectedResults(scale)
-        checkAnswer(df, Seq(Row(expected)))
+        checkSparkAnswerAndOperator(s"select round(c1, $scale) from t1")
       }
     }
   }
@@ -207,29 +152,13 @@ class AuronFunctionSuite
     withTable("t1") {
       sql("CREATE TABLE t1 (c1 FLOAT) USING parquet")
 
-      val floatPi: Float = 3.1415f
+      val floatPi: Float = math.Pi.toFloat
       sql(s"INSERT INTO t1 VALUES($floatPi)")
 
       val scales = -6 to 6
-      val expectedResults = Map(
-        -6 -> 0.0f,
-        -5 -> 0.0f,
-        -4 -> 0.0f,
-        -3 -> 0.0f,
-        -2 -> 0.0f,
-        -1 -> 0.0f,
-        0 -> 3.0f,
-        1 -> 3.1f,
-        2 -> 3.14f,
-        3 -> 3.142f,
-        4 -> 3.1415f,
-        5 -> 3.1415f,
-        6 -> 3.1415f)
 
       scales.foreach { scale =>
-        val df = sql(s"select round(c1, $scale) from t1")
-        val expected = expectedResults(scale)
-        checkAnswer(df, Seq(Row(expected)))
+        checkSparkAnswerAndOperator(s"select round(c1, $scale) from t1")
       }
     }
   }
@@ -242,25 +171,9 @@ class AuronFunctionSuite
       sql(s"INSERT INTO t1 VALUES($shortPi)")
 
       val scales = -6 to 6
-      val expectedResults = Map(
-        -6 -> 0.toShort,
-        -5 -> 0.toShort,
-        -4 -> 30000.toShort,
-        -3 -> 31000.toShort,
-        -2 -> 31400.toShort,
-        -1 -> 31420.toShort,
-        0 -> 31415.toShort,
-        1 -> 31415.toShort,
-        2 -> 31415.toShort,
-        3 -> 31415.toShort,
-        4 -> 31415.toShort,
-        5 -> 31415.toShort,
-        6 -> 31415.toShort)
 
       scales.foreach { scale =>
-        val df = sql(s"SELECT round(c1, $scale) FROM t1")
-        val expected = expectedResults(scale)
-        checkAnswer(df, Seq(Row(expected)))
+        checkSparkAnswerAndOperator(s"SELECT round(c1, $scale) FROM t1")
       }
     }
   }
@@ -273,25 +186,8 @@ class AuronFunctionSuite
       sql(s"INSERT INTO t1 VALUES($longPi)")
 
       val scales = -6 to 6
-      val expectedResults = Map(
-        -6 -> 31415926536000000L,
-        -5 -> 31415926535900000L,
-        -4 -> 31415926535900000L,
-        -3 -> 31415926535898000L,
-        -2 -> 31415926535897900L,
-        -1 -> 31415926535897930L,
-        0 -> 31415926535897932L,
-        1 -> 31415926535897932L,
-        2 -> 31415926535897932L,
-        3 -> 31415926535897932L,
-        4 -> 31415926535897932L,
-        5 -> 31415926535897932L,
-        6 -> 31415926535897932L)
-
       scales.foreach { scale =>
-        val df = sql(s"SELECT round(c1, $scale) FROM t1")
-        val expected = expectedResults(scale)
-        checkAnswer(df, Seq(Row(expected)))
+        checkSparkAnswerAndOperator(s"SELECT round(c1, $scale) FROM t1")
       }
     }
   }
@@ -308,37 +204,48 @@ class AuronFunctionSuite
           |from t1
             """.stripMargin
 
-      val df = sql(functions)
-
-      checkAnswer(df, Seq(Row(8.0, 8.0)))
+      checkSparkAnswerAndOperator(functions)
     }
   }
 
   test("pow/power should accept mixed numeric types and return double") {
-    val df = sql("select pow(2, 3.0), pow(2.0, 3), power(1.5, 2)")
-    checkAnswer(df, Seq(Row(8.0, 8.0, 2.25)))
+    withTable("t1") {
+      sql("create table t1(c1 double, c2 double) using parquet")
+      sql("insert into t1 values(2, 3.0), (2.0, 3), (1.5, 2)")
+      checkSparkAnswerAndOperator("select pow(c1, c2) from t1")
+    }
   }
 
   test("pow: zero base with negative exponent yields +infinity") {
-    val df = sql("select pow(0.0, -2.5), power(0.0, -3)")
-    // Spark prints Infinity as Double.PositiveInfinity
-    checkAnswer(df, Seq(Row(Double.PositiveInfinity, Double.PositiveInfinity)))
+    withTable("t1") {
+      sql("create table t1(c1 double, c2 double) using parquet")
+      sql("insert into t1 values(0.0, -2.5), (0.0, -3)")
+      checkSparkAnswerAndOperator("select pow(c1, c2) from t1")
+    }
   }
 
   test("pow: zero to the zero equals one") {
-    val df = sql("select pow(0.0, 0.0)")
-    checkAnswer(df, Seq(Row(1.0)))
+    withTable("t1") {
+      sql("create table t1(c1 double, c2 double) using parquet")
+      sql("insert into t1 values(0.0, 0.0)")
+      checkSparkAnswerAndOperator("select pow(c1, c2) from t1")
+    }
   }
 
   test("pow: negative base with fractional exponent is NaN") {
-    val df = sql("select pow(-2, 0.5)")
-    assert(df.collect().head.getDouble(0).isNaN)
+    withTable("t1") {
+      sql("create table t1(c1 double, c2 double) using parquet")
+      sql("insert into t1 values(-2, 0.5)")
+      checkSparkAnswerAndOperator("select pow(c1, c2) from t1")
+    }
   }
 
   test("pow null propagation") {
-    val df = sql("select pow(null, 2), power(2, null), pow(null, null)")
-    val row = df.collect().head
-    assert(row.isNullAt(0) && row.isNullAt(1) && row.isNullAt(2))
+    withTable("t1") {
+      sql("create table t1(c1 double, c2 double) using parquet")
+      sql("insert into t1 values(null, 2),(2, null),(null, null)")
+      checkSparkAnswerAndOperator("select pow(c1, c2) from t1")
+    }
   }
 
   test("test function least") {
@@ -380,27 +287,7 @@ class AuronFunctionSuite
           |    test_least
         """.stripMargin
 
-      val df = sql(functions)
-
-      checkAnswer(
-        df,
-        Seq(
-          Row(
-            "a",
-            1,
-            -1,
-            "a",
-            null,
-            "a",
-            -1.0,
-            -1.0,
-            -1.0f,
-            1,
-            "aaaa",
-            false,
-            date,
-            dateTimeStampMin,
-            minValue)))
+      checkSparkAnswerAndOperator(functions)
     }
   }
 
@@ -449,28 +336,7 @@ class AuronFunctionSuite
           |    t1
         """.stripMargin
 
-      val df = sql(functions)
-      checkAnswer(
-        df,
-        Seq(
-          Row(
-            "c",
-            2,
-            2,
-            "ccc",
-            null,
-            "c",
-            2.5,
-            2,
-            2.5f,
-            longMax,
-            2,
-            2,
-            "abc",
-            true,
-            date,
-            dateTimeStampMax)))
-
+      checkSparkAnswerAndOperator(functions)
     }
   }
 
@@ -494,9 +360,7 @@ class AuronFunctionSuite
           |from t1_find_in_set
         """.stripMargin
 
-      val df = sql(functions)
-      df.show()
-      checkAnswer(df, Seq(Row(1, 2, 3, 1, 4, 1, 4, 6, null)))
+      checkSparkAnswerAndOperator(functions)
     }
   }
 
@@ -516,9 +380,7 @@ class AuronFunctionSuite
           |    test_is_nan
         """.stripMargin
 
-      val df = sql(functions)
-      df.show()
-      checkAnswer(df, Seq(Row(true, true, false, false, false)))
+      checkSparkAnswerAndOperator(functions)
     }
   }
 
@@ -532,15 +394,15 @@ class AuronFunctionSuite
              |CAST(NULL AS INT)        AS null_int
              |""".stripMargin)
 
-      val df = sql(s"""SELECT
+      val sqlStr = s"""SELECT
                       |nvl2(null_int, int_val, 999)          AS int_only,
                       |nvl2(1,  str_val, int_val)            AS has_str,
                       |nvl2(null_int, int_val, str_val)      AS str_in_false,
                       |nvl2(1,  arr_val, array(888))         AS has_array,
                       |nvl2(null_int, null_str,  null_str)   AS all_null
-                      |FROM  t1""".stripMargin)
+                      |FROM  t1""".stripMargin
 
-      checkAnswer(df, Row(999, "X", "X", ArrayBuffer(1, 2, 3), null))
+      checkSparkAnswerAndOperator(sqlStr)
     }
   }
 
@@ -556,8 +418,7 @@ class AuronFunctionSuite
           |from t1
                     """.stripMargin
 
-      val df = sql(functions)
-      checkAnswer(df, Seq(Row("base", "base", 4)))
+      checkSparkAnswerAndOperator(functions)
     }
   }
 
@@ -579,9 +440,7 @@ class AuronFunctionSuite
           |from test_levenshtein
         """.stripMargin
 
-      val df = sql(functions)
-      df.show()
-      checkAnswer(df, Seq(Row(null, null, 0, 0, 3, 1, 3, 4)))
+      checkSparkAnswerAndOperator(functions)
     }
   }
 }
diff --git 
a/spark-extension-shims-spark/src/test/scala/org/apache/spark/sql/auron/AuronQuerySuite.scala
 
b/spark-extension-shims-spark/src/test/scala/org.apache.auron/AuronQuerySuite.scala
similarity index 72%
rename from 
spark-extension-shims-spark/src/test/scala/org/apache/spark/sql/auron/AuronQuerySuite.scala
rename to 
spark-extension-shims-spark/src/test/scala/org.apache.auron/AuronQuerySuite.scala
index 1d368493..9151eec8 100644
--- 
a/spark-extension-shims-spark/src/test/scala/org/apache/spark/sql/auron/AuronQuerySuite.scala
+++ 
b/spark-extension-shims-spark/src/test/scala/org.apache.auron/AuronQuerySuite.scala
@@ -14,26 +14,21 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package org.apache.spark.sql.auron
+package org.apache.auron
 
-import scala.collection.mutable.ArrayBuffer
-
-import org.apache.spark.sql.Row
+import org.apache.spark.sql.{AuronQueryTest, Row}
+import org.apache.spark.sql.auron.AuronConf
 
 import org.apache.auron.util.AuronTestUtils
 
-class AuronQuerySuite
-    extends org.apache.spark.sql.QueryTest
-    with BaseAuronSQLSuite
-    with AuronSQLTestHelper {
+class AuronQuerySuite extends AuronQueryTest with BaseAuronSQLSuite with 
AuronSQLTestHelper {
   import testImplicits._
 
   test("test partition path has url encoded character") {
     withTable("t1") {
       sql(
         "create table t1 using parquet PARTITIONED BY (part) as select 1 as 
c1, 2 as c2, 'test test' as part")
-      val df = sql("select * from t1")
-      checkAnswer(df, Seq(Row(1, 2, "test test")))
+      checkSparkAnswerAndOperator("select * from t1")
     }
   }
 
@@ -41,29 +36,26 @@ class AuronQuerySuite
     withTable("t1", "t2") {
       sql("create table t1 using parquet as select 1 as c1, 2 as c2")
       sql("create table t2 using parquet as select 1 as c1, 3 as c3")
-      val df = sql("select 1 from t1 left join t2")
-      checkAnswer(df, Seq(Row(1)))
+      checkSparkAnswerAndOperator("select 1 from t1 left join t2")
     }
   }
 
   test("test filter with year function") {
     withTable("t1") {
       sql("create table t1 using parquet as select '2024-12-18' as event_time")
-      checkAnswer(
-        sql("""
+      checkSparkAnswerAndOperator(s"""
             |select year, count(*)
             |from (select event_time, year(event_time) as year from t1) t
             |where year <= 2024
             |group by year
-            |""".stripMargin),
-        Seq(Row(2024, 1)))
+            |""".stripMargin)
     }
   }
 
   test("test select multiple spark ext functions with the same signature") {
     withTable("t1") {
       sql("create table t1 using parquet as select '2024-12-18' as event_time")
-      checkAnswer(sql("select year(event_time), month(event_time) from t1"), 
Seq(Row(2024, 12)))
+      checkSparkAnswerAndOperator("select year(event_time), month(event_time) 
from t1")
     }
   }
 
@@ -82,9 +74,7 @@ class AuronQuerySuite
         sql(createTableStatement(format))
         sql(
           "insert into test_with_complex_type select 1 as id, map('zero', '0', 
'one', '1') as m, array('test','auron') as l, 'auron' as s")
-        checkAnswer(
-          sql("select id,l,m from test_with_complex_type"),
-          Seq(Row(1, ArrayBuffer("test", "auron"), Map("one" -> "1", "zero" -> 
"0"))))
+        checkSparkAnswerAndOperator("select id,l,m from 
test_with_complex_type")
       })
   }
 
@@ -92,16 +82,14 @@ class AuronQuerySuite
     withTable("t1", "t2") {
       sql("create table t1(c1 binary, c2 int) using parquet")
       sql("insert into t1 values (cast('test1' as binary), 1), (cast('test2' 
as binary), 2)")
-      val df = sql("select c2 from t1 order by c1")
-      checkAnswer(df, Seq(Row(1), Row(2)))
+      checkSparkAnswerAndOperator("select c2 from t1 order by c1")
     }
   }
 
   test("repartition over MapType") {
     withTable("t_map") {
       sql("create table t_map using parquet as select map('a', '1', 'b', '2') 
as data_map")
-      val df = sql("SELECT /*+ repartition(10) */ data_map FROM t_map")
-      checkAnswer(df, Seq(Row(Map("a" -> "1", "b" -> "2"))))
+      checkSparkAnswerAndOperator("SELECT /*+ repartition(10) */ data_map FROM 
t_map")
     }
   }
 
@@ -109,8 +97,7 @@ class AuronQuerySuite
     withTable("t_map_struct") {
       sql(
         "create table t_map_struct using parquet as select named_struct('m', 
map('x', '1')) as data_struct")
-      val df = sql("SELECT /*+ repartition(10) */ data_struct FROM 
t_map_struct")
-      checkAnswer(df, Seq(Row(Row(Map("x" -> "1")))))
+      checkSparkAnswerAndOperator("SELECT /*+ repartition(10) */ data_struct 
FROM t_map_struct")
     }
   }
 
@@ -120,8 +107,7 @@ class AuronQuerySuite
           |create table t_array_map using parquet as
           |select array(map('k1', 1, 'k2', 2), map('k3', 3)) as array_of_map
           |""".stripMargin)
-      val df = sql("SELECT /*+ repartition(10) */ array_of_map FROM 
t_array_map")
-      checkAnswer(df, Seq(Row(Seq(Map("k1" -> 1, "k2" -> 2), Map("k3" -> 3)))))
+      checkSparkAnswerAndOperator("SELECT /*+ repartition(10) */ array_of_map 
FROM t_array_map")
     }
   }
 
@@ -131,8 +117,7 @@ class AuronQuerySuite
           |create table t_struct_map using parquet as
           |select named_struct('id', 101, 'metrics', map('ctr', 0.123d, 'cvr', 
0.045d)) as user_metrics
           |""".stripMargin)
-      val df = sql("SELECT /*+ repartition(10) */ user_metrics FROM 
t_struct_map")
-      checkAnswer(df, Seq(Row(Row(101, Map("ctr" -> 0.123, "cvr" -> 0.045)))))
+      checkSparkAnswerAndOperator("SELECT /*+ repartition(10) */ user_metrics 
FROM t_struct_map")
     }
   }
 
@@ -145,8 +130,8 @@ class AuronQuerySuite
           |  'item2', named_struct('count', 7, 'score', 9.1d)
           |) as map_struct_value
           |""".stripMargin)
-      val df = sql("SELECT /*+ repartition(10) */ map_struct_value FROM 
t_map_struct_value")
-      checkAnswer(df, Seq(Row(Map("item1" -> Row(3, 4.5), "item2" -> Row(7, 
9.1)))))
+      checkSparkAnswerAndOperator(
+        "SELECT /*+ repartition(10) */ map_struct_value FROM 
t_map_struct_value")
     }
   }
 
@@ -159,11 +144,7 @@ class AuronQuerySuite
           |  'outer2', map('inner3', 30)
           |) as nested_map
           |""".stripMargin)
-      val df = sql("SELECT /*+ repartition(10) */ nested_map FROM 
t_nested_map")
-      checkAnswer(
-        df,
-        Seq(Row(
-          Map("outer1" -> Map("inner1" -> 10, "inner2" -> 20), "outer2" -> 
Map("inner3" -> 30)))))
+      checkSparkAnswerAndOperator("SELECT /*+ repartition(10) */ nested_map 
FROM t_nested_map")
     }
   }
 
@@ -176,28 +157,22 @@ class AuronQuerySuite
           |  named_struct('name', 'user2', 'features', map('f3', 3.5d))
           |) as user_feature_array
           |""".stripMargin)
-      val df = sql("SELECT /*+ repartition(10) */ user_feature_array FROM 
t_array_struct_map")
-      checkAnswer(
-        df,
-        Seq(
-          Row(
-            Seq(Row("user1", Map("f1" -> 1.0f, "f2" -> 2.0f)), Row("user2", 
Map("f3" -> 3.5f))))))
+      checkSparkAnswerAndOperator(
+        "SELECT /*+ repartition(10) */ user_feature_array FROM 
t_array_struct_map")
     }
   }
 
   test("log function with negative input") {
     withTable("t1") {
       sql("create table t1 using parquet as select -1 as c1")
-      val df = sql("select ln(c1) from t1")
-      checkAnswer(df, Seq(Row(null)))
+      checkSparkAnswerAndOperator("select ln(c1) from t1")
     }
   }
 
   test("floor function with long input") {
     withTable("t1") {
       sql("create table t1 using parquet as select 1L as c1, 2.2 as c2")
-      val df = sql("select floor(c1), floor(c2) from t1")
-      checkAnswer(df, Seq(Row(1, 2)))
+      checkSparkAnswerAndOperator("select floor(c1), floor(c2) from t1")
     }
   }
 
@@ -213,9 +188,7 @@ class AuronQuerySuite
            | test_hive_orc_impl
            | VALUES(9, '12', 2020)
                """.stripMargin)
-
-      val df = spark.sql("SELECT _col2 FROM test_hive_orc_impl")
-      checkAnswer(df, Row("12"))
+      checkSparkAnswerAndOperator("SELECT _col2 FROM test_hive_orc_impl")
     }
   }
 
@@ -231,7 +204,7 @@ class AuronQuerySuite
               .write
               .orc(path)
             val correctAnswer = Seq(Row(1, 2), Row(3, 4), Row(5, 6), Row(null, 
null))
-            checkAnswer(spark.read.orc(path), correctAnswer)
+            checkSparkAnswerAndOperator(() => spark.read.orc(path))
 
             withTable("t") {
               sql(s"CREATE EXTERNAL TABLE t(c3 INT, c2 INT) USING ORC LOCATION 
'$path'")
@@ -242,7 +215,7 @@ class AuronQuerySuite
                 Seq(Row(null, 2), Row(null, 4), Row(null, 6), Row(null, null))
               }
 
-              checkAnswer(spark.table("t"), expected)
+              checkSparkAnswerAndOperator(() => spark.table("t"))
             }
           }
         }
@@ -263,7 +236,7 @@ class AuronQuerySuite
               .partitionBy("p")
               .orc(path)
             val correctAnswer = Seq(Row(1, 2, 1), Row(3, 4, 2), Row(5, 6, 3), 
Row(null, null, 4))
-            checkAnswer(spark.read.orc(path), correctAnswer)
+            checkSparkAnswerAndOperator(() => spark.read.orc(path))
 
             withTable("t") {
               sql(s"""
@@ -279,7 +252,7 @@ class AuronQuerySuite
                 Seq(Row(null, 2, 1), Row(null, 4, 2), Row(null, 6, 3), 
Row(null, null, 4))
               }
 
-              checkAnswer(spark.table("t"), expected)
+              checkSparkAnswerAndOperator(() => spark.table("t"))
             }
           }
         }
@@ -297,15 +270,13 @@ class AuronQuerySuite
           |union all select '2024-12-18'
           |""".stripMargin)
 
-      checkAnswer(
-        sql("""
+      checkSparkAnswerAndOperator("""
             |select q, count(*)
             |from (select event_time, quarter(event_time) as q from t1) t
             |where q <= 3
             |group by q
             |order by q
-            |""".stripMargin),
-        Seq(Row(1, 1), Row(2, 1), Row(3, 1)))
+            |""".stripMargin)
     }
   }
 
@@ -374,26 +345,25 @@ class AuronQuerySuite
         checkAnswer(sql(q), Seq(expected))
     }
   }
-
   test("test filter with hour function") {
     withEnvConf("spark.auron.datetime.extract.enabled" -> "true") {
       withTable("t_hour") {
         sql("""
-            |create table t_hour using parquet as
-            |select to_timestamp('2024-12-18 01:23:45') as event_time union all
-            |select to_timestamp('2024-12-18 08:00:00') union all
-            |select to_timestamp('2024-12-18 08:59:59')
-            |""".stripMargin)
+              |create table t_hour using parquet as
+              |select to_timestamp('2024-12-18 01:23:45') as event_time union 
all
+              |select to_timestamp('2024-12-18 08:00:00') union all
+              |select to_timestamp('2024-12-18 08:59:59')
+              |""".stripMargin)
 
         // Keep rows where HOUR >= 8, then group by hour
         checkAnswer(
           sql("""
-              |select h, count(*)
-              |from (select hour(event_time) as h from t_hour) t
-              |where h >= 8
-              |group by h
-              |order by h
-              |""".stripMargin),
+                |select h, count(*)
+                |from (select hour(event_time) as h from t_hour) t
+                |where h >= 8
+                |group by h
+                |order by h
+                |""".stripMargin),
           Seq(Row(8, 2)))
       }
     }
@@ -403,20 +373,20 @@ class AuronQuerySuite
     withEnvConf("spark.auron.datetime.extract.enabled" -> "true") {
       withTable("t_minute") {
         sql("""
-            |create table t_minute using parquet as
-            |select to_timestamp('2024-12-18 00:00:00') as event_time union all
-            |select to_timestamp('2024-12-18 00:30:00') union all
-            |select to_timestamp('2024-12-18 12:30:59')
-            |""".stripMargin)
+              |create table t_minute using parquet as
+              |select to_timestamp('2024-12-18 00:00:00') as event_time union 
all
+              |select to_timestamp('2024-12-18 00:30:00') union all
+              |select to_timestamp('2024-12-18 12:30:59')
+              |""".stripMargin)
 
         // Keep rows where MINUTE = 30, then group by minute
         checkAnswer(
           sql("""
-              |select m, count(*)
-              |from (select minute(event_time) as m from t_minute) t
-              |where m = 30
-              |group by m
-              |""".stripMargin),
+                |select m, count(*)
+                |from (select minute(event_time) as m from t_minute) t
+                |where m = 30
+                |group by m
+                |""".stripMargin),
           Seq(Row(30, 2)))
       }
     }
@@ -426,20 +396,20 @@ class AuronQuerySuite
     withEnvConf("spark.auron.datetime.extract.enabled" -> "true") {
       withTable("t_second") {
         sql("""
-            |create table t_second using parquet as
-            |select to_timestamp('2024-12-18 00:00:00') as event_time union all
-            |select to_timestamp('2024-12-18 01:23:00') union all
-            |select to_timestamp('2024-12-18 23:59:45')
-            |""".stripMargin)
+              |create table t_second using parquet as
+              |select to_timestamp('2024-12-18 00:00:00') as event_time union 
all
+              |select to_timestamp('2024-12-18 01:23:00') union all
+              |select to_timestamp('2024-12-18 23:59:45')
+              |""".stripMargin)
 
         // Keep rows where SECOND = 0, then group by second
         checkAnswer(
           sql("""
-              |select s, count(*)
-              |from (select second(event_time) as s from t_second) t
-              |where s = 0
-              |group by s
-              |""".stripMargin),
+                |select s, count(*)
+                |from (select second(event_time) as s from t_second) t
+                |where s = 0
+                |group by s
+                |""".stripMargin),
           Seq(Row(0, 2)))
       }
     }
@@ -453,13 +423,13 @@ class AuronQuerySuite
           "create table t_date_parts using parquet as select date'2024-12-18' 
as d union all select date'2024-12-19'")
         checkAnswer(
           sql("""
-              |select
-              |  hour(d)   as h,
-              |  minute(d) as m,
-              |  second(d) as s
-              |from t_date_parts
-              |order by d
-              |""".stripMargin),
+                |select
+                |  hour(d)   as h,
+                |  minute(d) as m,
+                |  second(d) as s
+                |from t_date_parts
+                |order by d
+                |""".stripMargin),
           Seq(Row(0, 0, 0), Row(0, 0, 0)))
       }
     }
@@ -470,15 +440,15 @@ class AuronQuerySuite
       withTable("t_tz") {
         // Construct: UTC 1970-01-01 00:00:00 → Asia/Shanghai => local 08:00:00
         sql("""
-            |create table t_tz using parquet as
-            |select from_utc_timestamp(to_timestamp('1970-01-01 00:00:00'), 
'Asia/Shanghai') as ts
-            |""".stripMargin)
+              |create table t_tz using parquet as
+              |select from_utc_timestamp(to_timestamp('1970-01-01 00:00:00'), 
'Asia/Shanghai') as ts
+              |""".stripMargin)
 
         checkAnswer(
           sql("""
-              |select hour(ts), minute(ts), second(ts)
-              |from t_tz
-              |""".stripMargin),
+                |select hour(ts), minute(ts), second(ts)
+                |from t_tz
+                |""".stripMargin),
           Seq(Row(8, 0, 0)))
       }
     }
@@ -488,10 +458,10 @@ class AuronQuerySuite
     withEnvConf("spark.auron.datetime.extract.enabled" -> "true") {
       withTable("t_tz2") {
         sql("""
-            |create table t_tz2 using parquet as
-            |select from_utc_timestamp(to_timestamp('2000-01-01 00:00:00'), 
'Asia/Kolkata')   as ts1,  -- +05:30
-            |       from_utc_timestamp(to_timestamp('2000-01-01 00:00:00'), 
'Asia/Kathmandu') as ts2   -- +05:45
-            |""".stripMargin)
+              |create table t_tz2 using parquet as
+              |select from_utc_timestamp(to_timestamp('2000-01-01 00:00:00'), 
'Asia/Kolkata')   as ts1,  -- +05:30
+              |       from_utc_timestamp(to_timestamp('2000-01-01 00:00:00'), 
'Asia/Kathmandu') as ts2   -- +05:45
+              |""".stripMargin)
 
         // Kolkata -> 05:30:00; Kathmandu -> 05:45:00
         checkAnswer(
diff --git 
a/spark-extension-shims-spark/src/test/scala/org/apache/spark/sql/auron/AuronSQLTestHelper.scala
 
b/spark-extension-shims-spark/src/test/scala/org.apache.auron/AuronSQLTestHelper.scala
similarity index 97%
rename from 
spark-extension-shims-spark/src/test/scala/org/apache/spark/sql/auron/AuronSQLTestHelper.scala
rename to 
spark-extension-shims-spark/src/test/scala/org.apache.auron/AuronSQLTestHelper.scala
index 45b63c3f..cef12dbd 100644
--- 
a/spark-extension-shims-spark/src/test/scala/org/apache/spark/sql/auron/AuronSQLTestHelper.scala
+++ 
b/spark-extension-shims-spark/src/test/scala/org.apache.auron/AuronSQLTestHelper.scala
@@ -14,7 +14,7 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package org.apache.spark.sql.auron
+package org.apache.auron
 
 import org.apache.spark.sql.internal.SQLConf
 
diff --git 
a/spark-extension-shims-spark/src/test/scala/org/apache/spark/sql/auron/BaseAuronSQLSuite.scala
 
b/spark-extension-shims-spark/src/test/scala/org.apache.auron/BaseAuronSQLSuite.scala
similarity index 97%
rename from 
spark-extension-shims-spark/src/test/scala/org/apache/spark/sql/auron/BaseAuronSQLSuite.scala
rename to 
spark-extension-shims-spark/src/test/scala/org.apache.auron/BaseAuronSQLSuite.scala
index 6e30c960..27105064 100644
--- 
a/spark-extension-shims-spark/src/test/scala/org/apache/spark/sql/auron/BaseAuronSQLSuite.scala
+++ 
b/spark-extension-shims-spark/src/test/scala/org.apache.auron/BaseAuronSQLSuite.scala
@@ -14,7 +14,7 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package org.apache.spark.sql.auron
+package org.apache.auron
 
 import org.apache.spark.SparkConf
 import org.apache.spark.sql.test.SharedSparkSession
diff --git 
a/spark-extension-shims-spark/src/test/scala/org/apache/spark/sql/auron/EmptyNativeRddSuite.scala
 
b/spark-extension-shims-spark/src/test/scala/org.apache.auron/EmptyNativeRddSuite.scala
similarity index 85%
rename from 
spark-extension-shims-spark/src/test/scala/org/apache/spark/sql/auron/EmptyNativeRddSuite.scala
rename to 
spark-extension-shims-spark/src/test/scala/org.apache.auron/EmptyNativeRddSuite.scala
index 59263544..6a6716e0 100644
--- 
a/spark-extension-shims-spark/src/test/scala/org/apache/spark/sql/auron/EmptyNativeRddSuite.scala
+++ 
b/spark-extension-shims-spark/src/test/scala/org.apache.auron/EmptyNativeRddSuite.scala
@@ -14,12 +14,12 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package org.apache.spark.sql.auron
+package org.apache.auron
 
-class EmptyNativeRddSuite
-    extends org.apache.spark.sql.QueryTest
-    with BaseAuronSQLSuite
-    with AuronSQLTestHelper {
+import org.apache.spark.sql.AuronQueryTest
+import org.apache.spark.sql.auron.EmptyNativeRDD
+
+class EmptyNativeRddSuite extends AuronQueryTest with BaseAuronSQLSuite {
 
   test("test empty native rdd") {
     val sc = spark.sparkContext
diff --git 
a/spark-extension-shims-spark/src/test/scala/org/apache/spark/sql/auron/NativeConvertersSuite.scala
 
b/spark-extension-shims-spark/src/test/scala/org.apache.auron/NativeConvertersSuite.scala
similarity index 92%
rename from 
spark-extension-shims-spark/src/test/scala/org/apache/spark/sql/auron/NativeConvertersSuite.scala
rename to 
spark-extension-shims-spark/src/test/scala/org.apache.auron/NativeConvertersSuite.scala
index d5a1a60c..0a574dfb 100644
--- 
a/spark-extension-shims-spark/src/test/scala/org/apache/spark/sql/auron/NativeConvertersSuite.scala
+++ 
b/spark-extension-shims-spark/src/test/scala/org.apache.auron/NativeConvertersSuite.scala
@@ -14,16 +14,20 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package org.apache.spark.sql.auron
+package org.apache.auron
 
-import org.apache.spark.sql.QueryTest
+import org.apache.spark.sql.AuronQueryTest
+import org.apache.spark.sql.auron.{AuronConf, NativeConverters}
 import org.apache.spark.sql.catalyst.expressions.Cast
 import org.apache.spark.sql.catalyst.expressions.Literal
 import org.apache.spark.sql.types.{BooleanType, DataType, IntegerType, 
StringType}
 
 import org.apache.auron.protobuf.ScalarFunction
 
-class NativeConvertersSuite extends QueryTest with BaseAuronSQLSuite with 
AuronSQLTestHelper {
+class NativeConvertersSuite
+    extends AuronQueryTest
+    with BaseAuronSQLSuite
+    with AuronSQLTestHelper {
 
   private def assertTrimmedCast(rawValue: String, targetType: DataType): Unit 
= {
     val expr = Cast(Literal.create(rawValue, StringType), targetType)
diff --git 
a/spark-extension-shims-spark/src/test/scala/org/apache/spark/sql/AuronQueryTest.scala
 
b/spark-extension-shims-spark/src/test/scala/org/apache/spark/sql/AuronQueryTest.scala
new file mode 100644
index 00000000..7bd17b62
--- /dev/null
+++ 
b/spark-extension-shims-spark/src/test/scala/org/apache/spark/sql/AuronQueryTest.scala
@@ -0,0 +1,90 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.spark.sql
+
+import org.apache.spark.sql.auron.NativeSupports
+import org.apache.spark.sql.execution.{LeafExecNode, SparkPlan, UnaryExecNode}
+import org.apache.spark.sql.execution.adaptive.AdaptiveSparkPlanHelper
+import org.apache.spark.sql.test.SQLTestUtils
+import org.scalatest.BeforeAndAfterEach
+
+/**
+ * Base test class under org.apache.spark.sql to use package-private 
[[SQLTestUtils]]; extends
+ * [[QueryTest]] for comparisons and checks.
+ */
+abstract class AuronQueryTest
+    extends QueryTest
+    with SQLTestUtils
+    with BeforeAndAfterEach
+    with AdaptiveSparkPlanHelper {
+
+  /**
+   * Assert results match vanilla Spark, skip operator checks.
+   */
+  protected def checkSparkAnswer(sqlStr: String): DataFrame = {
+    checkSparkAnswerAndOperator(() => sql(sqlStr), requireNative = false)
+  }
+
+  /**
+   * Assert results match vanilla Spark, fail if any operator is not native.
+   */
+  protected def checkSparkAnswerAndOperator(sqlStr: String): DataFrame = {
+    checkSparkAnswerAndOperator(() => sql(sqlStr), requireNative = true)
+  }
+
+  /**
+   * Assert results match vanilla Spark, fail if any operator is not native.
+   */
+  protected def checkSparkAnswerAndOperator(
+      dataframe: () => DataFrame,
+      requireNative: Boolean = true): DataFrame = {
+
+    var expected: Seq[Row] = null
+    withSQLConf("spark.auron.enable" -> "false") {
+      val dfSpark = dataframe()
+      expected = dfSpark.collect()
+    }
+
+    val dfAuron = dataframe()
+    checkAnswer(dfAuron, expected)
+
+    if (requireNative) {
+      val plan = stripAQEPlan(dfAuron.queryExecution.executedPlan)
+      plan
+        .collectFirst { case op if !isNativeOrPassThrough(op) => op }
+        .foreach { op: SparkPlan =>
+          fail(s"""
+               |Found non-native operator: ${op.nodeName}
+               |plan: ${plan}""".stripMargin)
+        }
+    }
+
+    dfAuron
+  }
+
+  protected def isNativeOrPassThrough(op: SparkPlan): Boolean = op match {
+    case _: NativeSupports => true
+    case e: UnaryExecNode
+        if Seq("QueryStage", "InputAdapter", "CustomShuffleRead", 
"AQEShuffleRead")
+          .exists(e.nodeName.contains) || 
e.nodeName.startsWith("WholeStageCodegen") =>
+      true
+    case e: LeafExecNode
+        if Seq("ShuffleQueryStage", 
"BroadcastQueryStage").exists(e.nodeName.contains) =>
+      true
+    case _ => false
+  }
+}
diff --git 
a/spark-extension-shims-spark/src/test/scala/org/apache/spark/sql/auron/BuildInfoAuronSQLSuite.scala
 
b/spark-extension-shims-spark/src/test/scala/org/apache/spark/sql/auron/BuildInfoAuronSQLSuite.scala
deleted file mode 100644
index 5ef9055c..00000000
--- 
a/spark-extension-shims-spark/src/test/scala/org/apache/spark/sql/auron/BuildInfoAuronSQLSuite.scala
+++ /dev/null
@@ -1,38 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.spark.sql.auron
-
-import org.apache.spark.SparkConf
-import org.apache.spark.sql.test.SharedSparkSession
-
-trait BuildInfoAuronSQLSuite extends SharedSparkSession {
-
-  override protected def sparkConf: SparkConf = {
-    super.sparkConf
-      .set("spark.sql.extensions", 
"org.apache.spark.sql.auron.AuronSparkSessionExtension")
-      .set(
-        "spark.shuffle.manager",
-        "org.apache.spark.sql.execution.auron.shuffle.AuronShuffleManager")
-      .set("spark.memory.offHeap.enabled", "false")
-      .set("spark.eventLog.enabled", "true")
-      .set("spark.ui.enabled", "true")
-      .set("spark.auron.ui.enabled", "true")
-      .set("spark.ui.port", "4040")
-      .set("spark.auron.enable", "true")
-  }
-
-}
diff --git 
a/spark-extension-shims-spark/src/test/scala/org/apache/spark/sql/auron/AuronAdaptiveQueryExecSuite.scala
 
b/spark-extension-shims-spark/src/test/scala/org/apache/spark/sql/execution/AuronAdaptiveQueryExecSuite.scala
similarity index 94%
rename from 
spark-extension-shims-spark/src/test/scala/org/apache/spark/sql/auron/AuronAdaptiveQueryExecSuite.scala
rename to 
spark-extension-shims-spark/src/test/scala/org/apache/spark/sql/execution/AuronAdaptiveQueryExecSuite.scala
index c27f47a5..fc4757b8 100644
--- 
a/spark-extension-shims-spark/src/test/scala/org/apache/spark/sql/auron/AuronAdaptiveQueryExecSuite.scala
+++ 
b/spark-extension-shims-spark/src/test/scala/org/apache/spark/sql/execution/AuronAdaptiveQueryExecSuite.scala
@@ -14,18 +14,16 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package org.apache.spark.sql.auron
+package org.apache.spark.sql.execution
 
-import org.apache.auron.sparkverEnableMembers
+import org.apache.spark.sql.AuronQueryTest
+
+import org.apache.auron.{sparkverEnableMembers, BaseAuronSQLSuite}
 
 @sparkverEnableMembers("3.5")
-class AuronAdaptiveQueryExecSuite
-    extends org.apache.spark.sql.QueryTest
-    with BaseAuronSQLSuite
-    with org.apache.spark.sql.execution.adaptive.AdaptiveSparkPlanHelper {
+class AuronAdaptiveQueryExecSuite extends AuronQueryTest with 
BaseAuronSQLSuite {
 
   import org.apache.spark.scheduler.{SparkListener, SparkListenerEvent}
-  import org.apache.spark.sql.execution.{PartialReducerPartitionSpec, 
SparkPlan}
   import org.apache.spark.sql.execution.adaptive.{AdaptiveSparkPlanExec, 
AQEShuffleReadExec}
   import org.apache.spark.sql.execution.columnar.InMemoryTableScanExec
   import org.apache.spark.sql.execution.exchange.Exchange
diff --git 
a/spark-extension-shims-spark/src/test/scala/org/apache/spark/sql/auron/BuildinfoInSparkUISuite.scala
 
b/spark-extension-shims-spark/src/test/scala/org/apache/spark/sql/execution/BuildInfoInSparkUISuite.scala
similarity index 88%
rename from 
spark-extension-shims-spark/src/test/scala/org/apache/spark/sql/auron/BuildinfoInSparkUISuite.scala
rename to 
spark-extension-shims-spark/src/test/scala/org/apache/spark/sql/execution/BuildInfoInSparkUISuite.scala
index 46365cc5..e030f895 100644
--- 
a/spark-extension-shims-spark/src/test/scala/org/apache/spark/sql/auron/BuildinfoInSparkUISuite.scala
+++ 
b/spark-extension-shims-spark/src/test/scala/org/apache/spark/sql/execution/BuildInfoInSparkUISuite.scala
@@ -14,18 +14,18 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package org.apache.spark.sql.auron
+package org.apache.spark.sql.execution
 
 import java.io.File
 
 import org.apache.spark.SparkConf
+import org.apache.spark.sql.AuronQueryTest
 import org.apache.spark.sql.execution.ui.AuronSQLAppStatusListener
 import org.apache.spark.util.Utils
 
-class BuildinfoInSparkUISuite
-    extends org.apache.spark.sql.QueryTest
-    with BuildInfoAuronSQLSuite
-    with AuronSQLTestHelper {
+import org.apache.auron.BaseAuronSQLSuite
+
+class BuildInfoInSparkUISuite extends AuronQueryTest with BaseAuronSQLSuite {
 
   var testDir: File = _
 

Reply via email to