This is an automated email from the ASF dual-hosted git repository.
yuanzhou pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/incubator-gluten.git
The following commit(s) were added to refs/heads/main by this push:
new b8a11149cd [GLUTEN-11088][VL] Fix MiscOperatorSuite in Spark-4.0
(#11147)
b8a11149cd is described below
commit b8a11149cddb47f9026f62346dfd6ff4b93c5c91
Author: Rong Ma <[email protected]>
AuthorDate: Fri Nov 21 14:36:12 2025 +0000
[GLUTEN-11088][VL] Fix MiscOperatorSuite in Spark-4.0 (#11147)
* fix MiscOperatorSuite
---
.../scala/org/apache/gluten/execution/MiscOperatorSuite.scala | 10 ++++------
.../src/test/scala/org/apache/spark/sql/GlutenQueryTest.scala | 10 ++--------
2 files changed, 6 insertions(+), 14 deletions(-)
diff --git
a/backends-velox/src/test/scala/org/apache/gluten/execution/MiscOperatorSuite.scala
b/backends-velox/src/test/scala/org/apache/gluten/execution/MiscOperatorSuite.scala
index f67ec53ec0..ad54cf8a49 100644
---
a/backends-velox/src/test/scala/org/apache/gluten/execution/MiscOperatorSuite.scala
+++
b/backends-velox/src/test/scala/org/apache/gluten/execution/MiscOperatorSuite.scala
@@ -135,8 +135,7 @@ class MiscOperatorSuite extends
VeloxWholeStageTransformerSuite with AdaptiveSpa
checkLengthAndPlan(df, 2)
}
- // TODO: fix on spark-4.0
- testWithMaxSparkVersion("is_not_null", "3.5") {
+ test("is_not_null") {
val df = runQueryAndCompare(
"select l_orderkey from lineitem where l_comment is not null " +
"and l_orderkey = 1") { _ => }
@@ -177,10 +176,10 @@ class MiscOperatorSuite extends
VeloxWholeStageTransformerSuite with AdaptiveSpa
}
// TODO: fix on spark-4.0
- testWithMaxSparkVersion("and pushdown", "3.5") {
+ test("and pushdown") {
val df = runQueryAndCompare(
"select l_orderkey from lineitem where l_orderkey > 2 " +
- "and l_orderkey = 1") { _ => }
+ "and l_orderkey < 2") { _ => }
assert(df.isEmpty)
checkLengthAndPlan(df, 0)
}
@@ -353,8 +352,7 @@ class MiscOperatorSuite extends
VeloxWholeStageTransformerSuite with AdaptiveSpa
checkLengthAndPlan(df, 7)
}
- // TODO: fix on spark-4.0
- testWithMaxSparkVersion("window expression", "3.5") {
+ test("window expression") {
runQueryAndCompare(
"select max(l_partkey) over" +
" (partition by l_suppkey order by l_commitdate" +
diff --git
a/gluten-substrait/src/test/scala/org/apache/spark/sql/GlutenQueryTest.scala
b/gluten-substrait/src/test/scala/org/apache/spark/sql/GlutenQueryTest.scala
index ff08f0171b..559bf2e7b7 100644
--- a/gluten-substrait/src/test/scala/org/apache/spark/sql/GlutenQueryTest.scala
+++ b/gluten-substrait/src/test/scala/org/apache/spark/sql/GlutenQueryTest.scala
@@ -35,7 +35,6 @@ import
org.apache.spark.sql.execution.adaptive.{AdaptiveSparkPlanExec, ShuffleQu
import org.apache.spark.sql.execution.adaptive.AdaptiveSparkPlanHelper
import org.apache.spark.sql.execution.columnar.InMemoryRelation
import org.apache.spark.storage.StorageLevel
-import org.apache.spark.util.SparkVersionUtil
import org.junit.Assert
import org.scalatest.Assertions
@@ -357,14 +356,9 @@ abstract class GlutenQueryTest extends PlanTest with
AdaptiveSparkPlanHelper {
}
private def getExecutedPlan(plan: SparkPlan): Seq[SparkPlan] = {
- val stripPlan = if (SparkVersionUtil.gteSpark40) {
- stripAQEPlan(plan)
- } else {
- plan
- }
- val subTree = stripPlan match {
+ val subTree = plan match {
case exec: AdaptiveSparkPlanExec =>
- getExecutedPlan(exec.executedPlan)
+ getExecutedPlan(stripAQEPlan(exec))
case cmd: CommandResultExec =>
getExecutedPlan(cmd.commandPhysicalPlan)
case s: ShuffleQueryStageExec =>
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]