cloud-fan commented on a change in pull request #35304:
URL: https://github.com/apache/spark/pull/35304#discussion_r791431162



##########
File path: 
sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Analyzer.scala
##########
@@ -2114,6 +2126,14 @@ class Analyzer(override val catalogManager: 
CatalogManager)
       }
     }
 
+    def lookupBuiltinOrTempTableFunction(name: Seq[String]): 
Option[ExpressionInfo] = {

Review comment:
       We also need to call this method when resolving `UnresolvedFunc`

##########
File path: 
sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Analyzer.scala
##########
@@ -2006,21 +2006,33 @@ class Analyzer(override val catalogManager: 
CatalogManager)
     override def apply(plan: LogicalPlan): LogicalPlan = {
       val externalFunctionNameSet = new mutable.HashSet[Seq[String]]()
 
-      
plan.resolveExpressionsWithPruning(_.containsAnyPattern(UNRESOLVED_FUNCTION)) {
-        case f @ UnresolvedFunction(nameParts, _, _, _, _) =>
-          if 
(ResolveFunctions.lookupBuiltinOrTempFunction(nameParts).isDefined) {
+      def lookupFunction[T <: TreeNode[_]](
+          f: T,
+          nameParts: Seq[String],
+          lookupBuiltinOrTempFunc: Seq[String] => Option[ExpressionInfo]): T = 
{
+        if (lookupBuiltinOrTempFunc(nameParts).isDefined) {
+          f
+        } else {
+          val CatalogAndIdentifier(catalog, ident) = 
expandIdentifier(nameParts)
+          val fullName = normalizeFuncName(catalog.name +: ident.namespace :+ 
ident.name)
+          if (externalFunctionNameSet.contains(fullName)) {
+            f
+          } else if (catalog.asFunctionCatalog.functionExists(ident)) {

Review comment:
       this checks both scalar and table functions, shall we differentiate them?
   
   One problem is: only the v1 catalog supports table function, so there is no 
v2 API for it. The logic will be a bit complicatd
   ```
   if (isSessionCatalog) {
     sessionCatalog.tableFunctionExist(...)
   } else {
     false
   }
   ```

##########
File path: 
sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalog.scala
##########
@@ -1553,18 +1553,23 @@ class SessionCatalog(
 
   /**
    * Look up the `ExpressionInfo` of the given function by name if it's a 
built-in or temp function.
-   * This supports both scalar and table functions.
+   * This only supports scalar functions.
    */
   def lookupBuiltinOrTempFunction(name: String): Option[ExpressionInfo] = {
     
FunctionRegistry.builtinOperators.get(name.toLowerCase(Locale.ROOT)).orElse {
-      def lookup(ident: FunctionIdentifier): Option[ExpressionInfo] = {
-        functionRegistry.lookupFunction(ident).orElse(
-          tableFunctionRegistry.lookupFunction(ident))
-      }
-      synchronized(lookupTempFuncWithViewContext(name, isBuiltinFunction, 
lookup))
+      synchronized(
+        lookupTempFuncWithViewContext(name, isBuiltinFunction, 
functionRegistry.lookupFunction))
     }
   }
 
+  /**
+   * Look up the `ExpressionInfo` of the given table function by name if it's a
+   * built-in or temp function.
+   */
+  def lookupBuiltinOrTempTableFunction(name: String): Option[ExpressionInfo] = 
synchronized {
+    lookupTempFuncWithViewContext(name, isBuiltinFunction, 
tableFunctionRegistry.lookupFunction)

Review comment:
       shall we pass the correct `isBuiltinFunction` function to only check 
table functions?




-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: [email protected]

For queries about this service, please contact Infrastructure at:
[email protected]



---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to