This is an automated email from the ASF dual-hosted git repository.

lzljs3620320 pushed a commit to branch release-1.3
in repository https://gitbox.apache.org/repos/asf/paimon.git

commit 867629ec23eab03fc4e8002a9649dcbd52e5b766
Author: Zouxxyy <[email protected]>
AuthorDate: Mon Oct 13 21:01:14 2025 +0800

    [spark] Ensure compatibility of resolveFilter in lower version spark3.4 
(#6387)
---
 .../analysis/expressions/ExpressionHelper.scala    | 23 -------------
 .../analysis/expressions/ExpressionHelper.scala    | 38 +++++++++++-----------
 2 files changed, 19 insertions(+), 42 deletions(-)

diff --git 
a/paimon-spark/paimon-spark-3.2/src/main/scala/org/apache/paimon/spark/catalyst/analysis/expressions/ExpressionHelper.scala
 
b/paimon-spark/paimon-spark-3.2/src/main/scala/org/apache/paimon/spark/catalyst/analysis/expressions/ExpressionHelper.scala
index 6cc1ce4794..56223c36cd 100644
--- 
a/paimon-spark/paimon-spark-3.2/src/main/scala/org/apache/paimon/spark/catalyst/analysis/expressions/ExpressionHelper.scala
+++ 
b/paimon-spark/paimon-spark-3.2/src/main/scala/org/apache/paimon/spark/catalyst/analysis/expressions/ExpressionHelper.scala
@@ -23,11 +23,7 @@ import org.apache.paimon.spark.SparkFilterConverter
 import org.apache.paimon.types.RowType
 
 import org.apache.spark.sql.PaimonUtils.{normalizeExprs, translateFilter}
-import org.apache.spark.sql.SparkSession
 import org.apache.spark.sql.catalyst.expressions.{Attribute, Expression}
-import org.apache.spark.sql.catalyst.optimizer.ConstantFolding
-import org.apache.spark.sql.catalyst.plans.logical.Filter
-import org.apache.spark.sql.execution.datasources.v2.DataSourceV2Relation
 
 trait ExpressionHelper extends ExpressionHelperBase {
 
@@ -56,23 +52,4 @@ trait ExpressionHelper extends ExpressionHelperBase {
       Some(PredicateBuilder.and(predicates: _*))
     }
   }
-
-  def resolveFilter(
-      spark: SparkSession,
-      relation: DataSourceV2Relation,
-      conditionSql: String): Expression = {
-    val unResolvedExpression = 
spark.sessionState.sqlParser.parseExpression(conditionSql)
-    val filter = Filter(unResolvedExpression, relation)
-    spark.sessionState.analyzer.execute(filter) match {
-      case filter: Filter =>
-        try {
-          ConstantFolding.apply(filter).asInstanceOf[Filter].condition
-        } catch {
-          case _: Throwable => filter.condition
-        }
-      case _ =>
-        throw new RuntimeException(
-          s"Could not resolve expression $conditionSql in relation: $relation")
-    }
-  }
 }
diff --git 
a/paimon-spark/paimon-spark-common/src/main/scala/org/apache/paimon/spark/catalyst/analysis/expressions/ExpressionHelper.scala
 
b/paimon-spark/paimon-spark-common/src/main/scala/org/apache/paimon/spark/catalyst/analysis/expressions/ExpressionHelper.scala
index fbb1aa7d8f..8bc976b866 100644
--- 
a/paimon-spark/paimon-spark-common/src/main/scala/org/apache/paimon/spark/catalyst/analysis/expressions/ExpressionHelper.scala
+++ 
b/paimon-spark/paimon-spark-common/src/main/scala/org/apache/paimon/spark/catalyst/analysis/expressions/ExpressionHelper.scala
@@ -69,25 +69,6 @@ trait ExpressionHelper extends ExpressionHelperBase {
       Some(PredicateBuilder.and(predicates: _*))
     }
   }
-
-  def resolveFilter(
-      spark: SparkSession,
-      relation: DataSourceV2Relation,
-      conditionSql: String): Expression = {
-    val unResolvedExpression = 
spark.sessionState.sqlParser.parseExpression(conditionSql)
-    val filter = Filter(unResolvedExpression, relation)
-    spark.sessionState.analyzer.executeAndCheck(filter, new 
QueryPlanningTracker) match {
-      case filter: Filter =>
-        try {
-          ConstantFolding.apply(filter).asInstanceOf[Filter].condition
-        } catch {
-          case _: Throwable => filter.condition
-        }
-      case _ =>
-        throw new RuntimeException(
-          s"Could not resolve expression $conditionSql in relation: $relation")
-    }
-  }
 }
 
 trait ExpressionHelperBase extends PredicateHelper {
@@ -210,6 +191,25 @@ trait ExpressionHelperBase extends PredicateHelper {
         s"Unsupported update expression: $other, only support update with 
PrimitiveType and StructType.")
   }
 
+  def resolveFilter(
+      spark: SparkSession,
+      relation: DataSourceV2Relation,
+      conditionSql: String): Expression = {
+    val unResolvedExpression = 
spark.sessionState.sqlParser.parseExpression(conditionSql)
+    val filter = Filter(unResolvedExpression, relation)
+    spark.sessionState.analyzer.execute(filter) match {
+      case filter: Filter =>
+        try {
+          ConstantFolding.apply(filter).asInstanceOf[Filter].condition
+        } catch {
+          case _: Throwable => filter.condition
+        }
+      case _ =>
+        throw new RuntimeException(
+          s"Could not resolve expression $conditionSql in relation: $relation")
+    }
+  }
+
   def splitPruePartitionAndOtherPredicates(
       condition: Expression,
       partitionColumns: Seq[String],

Reply via email to