davidm-db commented on code in PR #47423:
URL: https://github.com/apache/spark/pull/47423#discussion_r1713789002


##########
sql/core/src/main/scala/org/apache/spark/sql/scripting/SqlScriptingInterpreter.scala:
##########
@@ -58,44 +61,108 @@ case class SqlScriptingInterpreter() {
       case _ => None
     }
 
+  private def transformBodyIntoExec(
+      compoundBody: CompoundBody,
+      isExitHandler: Boolean = false,
+      label: String = ""): CompoundBodyExec = {
+    val variables = compoundBody.collection.flatMap {
+      case st: SingleStatement => getDeclareVarNameFromPlan(st.parsedPlan)
+      case _ => None
+    }
+    val dropVariables = variables
+      .map(varName => DropVariable(varName, ifExists = true))
+      .map(new SingleStatementExec(_, Origin(), isInternal = true))
+      .reverse
+
+    val conditionHandlerMap = mutable.HashMap[String, ErrorHandlerExec]()
+    val handlers = ListBuffer[ErrorHandlerExec]()
+    compoundBody.handlers.foreach(handler => {
+      val handlerBodyExec =
+        transformBodyIntoExec(handler.body,
+          handler.handlerType == HandlerType.EXIT,
+          compoundBody.label.get)
+      val handlerExec = new ErrorHandlerExec(handlerBodyExec)
+
+      handler.conditions.foreach(condition => {
+        val conditionValue = compoundBody.conditions.getOrElse(condition, 
condition)
+        conditionHandlerMap.get(conditionValue) match {
+          case Some(_) =>
+            throw SqlScriptingErrors.duplicateHandlerForSameSqlState(
+              CurrentOrigin.get, conditionValue)
+          case None => conditionHandlerMap.put(conditionValue, handlerExec)
+        }
+      })
+
+      handlers += handlerExec
+    })
+
+    if (isExitHandler) {
+      val leave = new LeaveStatementExec(label)
+      val statements = compoundBody.collection.map(st => 
transformTreeIntoExecutable(st)) ++
+        dropVariables :+ leave
+
+      return new CompoundBodyExec(
+        compoundBody.label,
+        statements,
+        conditionHandlerMap,
+        session)
+    }
+
+    new CompoundBodyExec(
+      compoundBody.label,
+      compoundBody.collection.map(st => transformTreeIntoExecutable(st)) ++ 
dropVariables,
+      conditionHandlerMap,
+      session)
+  }
+
   /**
    * Transform the parsed tree to the executable node.
    *
    * @param node
    *   Root node of the parsed tree.
-   * @param session
-   *   Spark session that SQL script is executed within.
    * @return
    *   Executable statement.
    */
-  private def transformTreeIntoExecutable(
-      node: CompoundPlanStatement, session: SparkSession): 
CompoundStatementExec =
+  private def transformTreeIntoExecutable(node: CompoundPlanStatement): 
CompoundStatementExec =
     node match {
       case body: CompoundBody =>
         // TODO [SPARK-48530]: Current logic doesn't support scoped variables 
and shadowing.
-        val variables = body.collection.flatMap {
-          case st: SingleStatement => getDeclareVarNameFromPlan(st.parsedPlan)
-          case _ => None
-        }
-        val dropVariables = variables
-          .map(varName => DropVariable(varName, ifExists = true))
-          .map(new SingleStatementExec(_, Origin(), isInternal = true))
-          .reverse
-        new CompoundBodyExec(
-          body.collection.map(st => transformTreeIntoExecutable(st, session)) 
++ dropVariables)
+        transformBodyIntoExec(body)

Review Comment:
   should we pass label here as well?



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: [email protected]

For queries about this service, please contact Infrastructure at:
[email protected]


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to