cloud-fan commented on a change in pull request #29054:
URL: https://github.com/apache/spark/pull/29054#discussion_r496703373



##########
File path: 
sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveSessionCatalog.scala
##########
@@ -69,49 +119,18 @@ private[sql] class HiveSessionCatalog(
     // Current thread context classloader may not be the one loaded the class. 
Need to switch
     // context classloader to initialize instance properly.
     Utils.withContextClassLoader(clazz.getClassLoader) {
-      Try(super.makeFunctionExpression(name, clazz, input)).getOrElse {
-        var udfExpr: Option[Expression] = None
-        try {
-          // When we instantiate hive UDF wrapper class, we may throw 
exception if the input
-          // expressions don't satisfy the hive UDF, such as type mismatch, 
input number
-          // mismatch, etc. Here we catch the exception and throw 
AnalysisException instead.
-          if (classOf[UDF].isAssignableFrom(clazz)) {
-            udfExpr = Some(HiveSimpleUDF(name, new 
HiveFunctionWrapper(clazz.getName), input))
-            udfExpr.get.dataType // Force it to check input data types.
-          } else if (classOf[GenericUDF].isAssignableFrom(clazz)) {
-            udfExpr = Some(HiveGenericUDF(name, new 
HiveFunctionWrapper(clazz.getName), input))
-            udfExpr.get.dataType // Force it to check input data types.
-          } else if 
(classOf[AbstractGenericUDAFResolver].isAssignableFrom(clazz)) {
-            udfExpr = Some(HiveUDAFFunction(name, new 
HiveFunctionWrapper(clazz.getName), input))
-            udfExpr.get.dataType // Force it to check input data types.
-          } else if (classOf[UDAF].isAssignableFrom(clazz)) {
-            udfExpr = Some(HiveUDAFFunction(
-              name,
-              new HiveFunctionWrapper(clazz.getName),
-              input,
-              isUDAFBridgeRequired = true))
-            udfExpr.get.dataType // Force it to check input data types.
-          } else if (classOf[GenericUDTF].isAssignableFrom(clazz)) {
-            udfExpr = Some(HiveGenericUDTF(name, new 
HiveFunctionWrapper(clazz.getName), input))
-            udfExpr.get.asInstanceOf[HiveGenericUDTF].elementSchema // Force 
it to check data types.
-          }
-        } catch {
-          case NonFatal(e) =>
-            val noHandlerMsg = s"No handler for UDF/UDAF/UDTF 
'${clazz.getCanonicalName}': $e"
-            val errorMsg =
-              if (classOf[GenericUDTF].isAssignableFrom(clazz)) {
-                s"$noHandlerMsg\nPlease make sure your function overrides " +
-                  "`public StructObjectInspector initialize(ObjectInspector[] 
args)`."
-              } else {
-                noHandlerMsg
-              }
-            val analysisException = new AnalysisException(errorMsg)
-            analysisException.setStackTrace(e.getStackTrace)
-            throw analysisException
-        }
-        udfExpr.getOrElse {
-          throw new AnalysisException(s"No handler for UDF/UDAF/UDTF 
'${clazz.getCanonicalName}'")
-        }
+      try {
+        super.makeFunctionExpression(name, clazz, input)
+      } catch {
+        case _: InvalidUDFClassException =>
+          // If `super.makeFunctionExpression` throw 
`InvalidUDFClassException`, we construct
+          // Hive UDF/UDAF/UDTF with function definition.
+          makeHiveFunctionExpression(name, clazz, input)
+        case e: AnalysisException =>

Review comment:
       nit: `case e => throw e`
   
   The only exception is `InvalidUDFClassException`, where we need to try with 
Hive UDF class. For other exceptions, just re-throw.




----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

For queries about this service, please contact Infrastructure at:
[email protected]



---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to