This is an automated email from the ASF dual-hosted git repository.

wenchen pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new 513532966302 [SPARK-51119][SQL][FOLLOW-UP] Readers on executors 
resolving EXISTS_DEFAULT should not call catalogs
513532966302 is described below

commit 5135329663020d3a0df6ec0443c9447ea22ce2c5
Author: Szehon Ho <[email protected]>
AuthorDate: Tue Feb 11 16:23:26 2025 +0800

    [SPARK-51119][SQL][FOLLOW-UP] Readers on executors resolving EXISTS_DEFAULT 
should not call catalogs
    
    ### What changes were proposed in this pull request?
     Code cleanup for https://github.com/apache/spark/pull/49840/.  
Literal#fromSQL should be the inverse of Literal#sql.  The cast handling is an 
artifact of the calling ResolveDefaultColumns object that adds the cast when 
making EXISTS_DEFAULT, so its handling is moved to ResolveDefaultColumns as 
well.
    
    ### Why are the changes needed?
    Code cleanup
    
    ### Does this PR introduce _any_ user-facing change?
    No
    
    ### How was this patch tested?
    Existing tests
    
    ### Was this patch authored or co-authored using generative AI tooling?
    No
    
    Closes #49881 from szehon-ho/SPARK-51119-follow.
    
    Authored-by: Szehon Ho <[email protected]>
    Signed-off-by: Wenchen Fan <[email protected]>
---
 .../org/apache/spark/sql/catalyst/expressions/literals.scala |  7 +++----
 .../spark/sql/catalyst/util/ResolveDefaultColumnsUtil.scala  | 12 +++++++++---
 2 files changed, 12 insertions(+), 7 deletions(-)

diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/literals.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/literals.scala
index 3f9fba185ca4..960f0ee7a729 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/literals.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/literals.scala
@@ -268,6 +268,9 @@ object Literal {
       s"but class ${Utils.getSimpleName(value.getClass)} found.")
   }
 
+  /**
+   * Inverse of [[Literal.sql]]
+   */
   def fromSQL(sql: String): Expression = {
     CatalystSqlParser.parseExpression(sql).transformUp {
       case u: UnresolvedFunction =>
@@ -278,10 +281,6 @@ object Literal {
         assert(u.orderingWithinGroup.isEmpty)
         assert(!u.isInternal)
         
FunctionRegistry.builtin.lookupFunction(FunctionIdentifier(u.nameParts.head), 
u.arguments)
-    } match {
-      case c: Cast if c.needsTimeZone =>
-        c.withTimeZone(SQLConf.get.sessionLocalTimeZone)
-      case e: Expression => e
     }
   }
 }
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/ResolveDefaultColumnsUtil.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/ResolveDefaultColumnsUtil.scala
index 6499e5c40049..323f715ede2c 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/ResolveDefaultColumnsUtil.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/ResolveDefaultColumnsUtil.scala
@@ -320,14 +320,20 @@ object ResolveDefaultColumns extends QueryErrorsBase
   }
 
   /**
-   * Analyze EXISTS_DEFAULT value.  This skips some steps of analyze as most 
of the
-   * analysis has been done before.
+   * Analyze EXISTS_DEFAULT value.  EXISTS_DEFAULT value was created from 
CURRENT_DEFAQULT
+   * via [[analyze]] and thus this can skip most of those steps.
    */
   private def analyzeExistenceDefaultValue(field: StructField): Expression = {
     val defaultSQL = 
field.metadata.getString(EXISTS_DEFAULT_COLUMN_METADATA_KEY)
 
     // Parse the expression.
-    val expr = Literal.fromSQL(defaultSQL)
+    val expr = Literal.fromSQL(defaultSQL) match {
+      // EXISTS_DEFAULT will have a cast from analyze() due to 
coerceDefaultValue
+      // hence we need to add timezone to the cast if necessary
+      case c: Cast if c.needsTimeZone =>
+        c.withTimeZone(SQLConf.get.sessionLocalTimeZone)
+      case e: Expression => e
+    }
 
     // Check invariants
     if (expr.containsPattern(PLAN_EXPRESSION)) {


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to