This is an automated email from the ASF dual-hosted git repository.

wenchen pushed a commit to branch branch-4.0
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/branch-4.0 by this push:
     new d007615ac254 [SPARK-52014][SQL] Support FoldableUnevaluable in 
HiveGenericUDFEvaluator
d007615ac254 is described below

commit d007615ac2547cb1637fceeb1b362fde9616aa98
Author: Kun Wan <wanku...@163.com>
AuthorDate: Tue May 13 17:20:23 2025 +0200

    [SPARK-52014][SQL] Support FoldableUnevaluable in HiveGenericUDFEvaluator
    
    ### What changes were proposed in this pull request?
    
    FoldableUnevaluable expression will throw exception in 
HiveGenericUDFEvaluator, we should skip eval() for them.
    
    
https://github.com/apache/spark/blob/aff3a33eef551e6f5f4b8bd601dbbb3f0ccf4ba1/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveInspectors.scala#L928
    
    ### Why are the changes needed?
    
    Bug fix for HiveGenericUDFEvaluator
    
    ### Does this PR introduce _any_ user-facing change?
    
    No
    
    ### How was this patch tested?
    
    Add UT
    
    ### Was this patch authored or co-authored using generative AI tooling?
    
    No
    
    Closes #50800 from wankunde/SPARK-52014.
    
    Authored-by: Kun Wan <wanku...@163.com>
    Signed-off-by: Wenchen Fan <wenc...@databricks.com>
    (cherry picked from commit 897150a4ffd0c5d275f6e2dffd2c56dff3cd9be0)
    Signed-off-by: Wenchen Fan <wenc...@databricks.com>
---
 .../main/scala/org/apache/spark/sql/hive/HiveInspectors.scala |  4 ++++
 .../org/apache/spark/sql/hive/execution/HiveUDFSuite.scala    | 11 +++++++++++
 2 files changed, 15 insertions(+)

diff --git 
a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveInspectors.scala 
b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveInspectors.scala
index 9f1954cbf686..c0169bcbb879 100644
--- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveInspectors.scala
+++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveInspectors.scala
@@ -919,6 +919,10 @@ private[hive] trait HiveInspectors {
     // We will enumerate all of the possible constant expressions, throw 
exception if we missed
     case Literal(_, dt) =>
       throw SparkException.internalError(s"Hive doesn't support the constant 
type [$dt].")
+    // FoldableUnevaluable will be replaced with a foldable value in 
FinishAnalysis rule,
+    // skip eval() for them.
+    case _ if expr.collectFirst { case e: FoldableUnevaluable => e }.isDefined 
=>
+      toInspector(expr.dataType)
     // ideally, we don't test the foldable here(but in optimizer), however, 
some of the
     // Hive UDF / UDAF requires its argument to be constant objectinspector, 
we do it eagerly.
     case _ if expr.foldable => toInspector(Literal.create(expr.eval(), 
expr.dataType))
diff --git 
a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveUDFSuite.scala
 
b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveUDFSuite.scala
index 9a047d808c3e..b7c466f3ed6a 100644
--- 
a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveUDFSuite.scala
+++ 
b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveUDFSuite.scala
@@ -826,6 +826,17 @@ class HiveUDFSuite extends QueryTest with 
TestHiveSingleton with SQLTestUtils {
       }
     }
   }
+
+  test("SPARK-52014: Support FoldableUnevaluable in HiveGenericUDFEvaluator") {
+    withUserDefinedFunction("hive_concat" -> true) {
+      sql(s"CREATE TEMPORARY FUNCTION hive_concat AS 
'${classOf[GenericUDFConcat].getName}'")
+      assert(sql(
+        s"""SELECT hive_concat(
+           |         date_format(CAST(CURRENT_DATE() AS DATE), 'yyyyMMdd'),
+           |         now())""".stripMargin).collect().length == 1)
+    }
+    hiveContext.reset()
+  }
 }
 
 class TestPair(x: Int, y: Int) extends Writable with Serializable {


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to