This is an automated email from the ASF dual-hosted git repository.

wenchen pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new 54dee4a4b59c [SPARK-53537][CORE] Adding Support for Parsing CONTINUE 
HANDLER
54dee4a4b59c is described below

commit 54dee4a4b59cf3817f15da46d35f42a81a3b1c07
Author: Teodor Djelic <130703036+teodordje...@users.noreply.github.com>
AuthorDate: Sat Sep 13 21:34:09 2025 +0800

    [SPARK-53537][CORE] Adding Support for Parsing CONTINUE HANDLER
    
    ### What changes were proposed in this pull request?
    
    This PR lays the foundation for implementing support for `CONTINUE 
HANDLER`. `CONTINUE HANDLER` no longer throws an exception that it is not 
supported, and now behaves like an `EXIT HANDLER`.
    
    Feature is under a new feature switch 
`spark.sql.scripting.continueHandlerEnabled` inside `SQLConfig.scala` .
    
    ### Why are the changes needed?
    
    We want to add support for executing `CONTINUE HANDLER`. This feature is 
supported in documentation, but is not supported as of now inside SQL Scripts. 
Change is supposed to enable parsing of `CONTINUE HANDLER`.
    
    ### Does this PR introduce _any_ user-facing change?
    
    No.
    
    ### How was this patch tested?
    
    Tests were added to `SqlScriptingInterpreterSuite.scala` and 
`SqlScriptingParserSuite.scala` to facilitate the `CONTINUE HANDLER`.
    
    Both test suites have `spark.sql.scripting.continueHandlerEnabled` config 
entry set to `true` before all tests, and unset after all of the tests have 
been run.
    
    ### Was this patch authored or co-authored using generative AI tooling?
    
    No.
    
    Closes #52298 from TeodorDjelic/parse-continue-handlers.
    
    Authored-by: Teodor Djelic <130703036+teodordje...@users.noreply.github.com>
    Signed-off-by: Wenchen Fan <wenc...@databricks.com>
---
 .../spark/sql/catalyst/parser/AstBuilder.scala     |  10 +-
 .../org/apache/spark/sql/internal/SQLConf.scala    |  11 +
 .../catalyst/parser/SqlScriptingParserSuite.scala  | 353 +++++++++++++++++++--
 .../scripting/SqlScriptingInterpreterSuite.scala   | 149 ++++++++-
 4 files changed, 498 insertions(+), 25 deletions(-)

diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala
index b42cba86e0fb..dd768b7356f2 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala
@@ -281,11 +281,15 @@ class AstBuilder extends DataTypeAstBuilder
       parsingCtx: SqlScriptingParsingContext): ExceptionHandler = {
     val exceptionHandlerTriggers = 
visitConditionValuesImpl(ctx.conditionValues())
 
-    if (Option(ctx.CONTINUE()).isDefined) {
-      throw SqlScriptingErrors.continueHandlerNotSupported(CurrentOrigin.get)
+    val handlerType = if (Option(ctx.CONTINUE()).isDefined) {
+      if (!conf.getConf(SQLConf.SQL_SCRIPTING_CONTINUE_HANDLER_ENABLED)) {
+        throw SqlScriptingErrors.continueHandlerNotSupported(CurrentOrigin.get)
+      }
+      ExceptionHandlerType.CONTINUE
+    } else {
+      ExceptionHandlerType.EXIT
     }
 
-    val handlerType = ExceptionHandlerType.EXIT
     val body = if (Option(ctx.beginEndCompoundBlock()).isDefined) {
       visitBeginEndCompoundBlockImpl(
         ctx.beginEndCompoundBlock(),
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/internal/SQLConf.scala 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/internal/SQLConf.scala
index 2466bd654f31..17b8dd493cf8 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/internal/SQLConf.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/internal/SQLConf.scala
@@ -4160,6 +4160,17 @@ object SQLConf {
       .booleanConf
       .createWithDefault(true)
 
+  val SQL_SCRIPTING_CONTINUE_HANDLER_ENABLED =
+    buildConf("spark.sql.scripting.continueHandlerEnabled")
+      .internal()
+      .doc("EXPERIMENTAL FEATURE/WORK IN PROGRESS: SQL Scripting CONTINUE 
HANDLER feature " +
+        "is under development and still not working as intended. This feature 
switch is intended " +
+        "to be used internally for development and testing, not by end users. 
" +
+        "YOU ARE ADVISED AGAINST USING THIS FEATURE AS ITS NOT FINISHED.")
+      .version("4.1.0")
+      .booleanConf
+      .createWithDefault(false)
+
   val CONCAT_BINARY_AS_STRING = 
buildConf("spark.sql.function.concatBinaryAsString")
     .doc("When this option is set to false and all inputs are binary, 
`functions.concat` returns " +
       "an output as binary. Otherwise, it returns as a string.")
diff --git 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/SqlScriptingParserSuite.scala
 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/SqlScriptingParserSuite.scala
index 9902374ce8e9..298329db1ee3 100644
--- 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/SqlScriptingParserSuite.scala
+++ 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/SqlScriptingParserSuite.scala
@@ -23,10 +23,21 @@ import org.apache.spark.sql.catalyst.plans.SQLHelper
 import org.apache.spark.sql.catalyst.plans.logical.{CompoundBody, 
CreateVariable, ExceptionHandler, ForStatement, IfElseStatement, 
IterateStatement, LeaveStatement, LoopStatement, Project, RepeatStatement, 
SearchedCaseStatement, SetVariable, SimpleCaseStatement, SingleStatement, 
WhileStatement}
 import org.apache.spark.sql.errors.DataTypeErrors.toSQLId
 import org.apache.spark.sql.exceptions.SqlScriptingException
+import org.apache.spark.sql.internal.SQLConf
 
 class SqlScriptingParserSuite extends SparkFunSuite with SQLHelper {
   import CatalystSqlParser._
 
+  protected override def beforeAll(): Unit = {
+    super.beforeAll()
+    conf.setConf(SQLConf.SQL_SCRIPTING_CONTINUE_HANDLER_ENABLED, true)
+  }
+
+  protected override def afterAll(): Unit = {
+    conf.unsetConf(SQLConf.SQL_SCRIPTING_CONTINUE_HANDLER_ENABLED.key)
+    super.afterAll()
+  }
+
   // Tests
   test("single select") {
     val sqlScriptText = "SELECT 1;"
@@ -2843,11 +2854,35 @@ class SqlScriptingParserSuite extends SparkFunSuite 
with SQLHelper {
   }
 
   test("continue handler not supported") {
+    withSQLConf(SQLConf.SQL_SCRIPTING_CONTINUE_HANDLER_ENABLED.key -> "false") 
{
+      val sqlScript =
+        """
+          |BEGIN
+          |  DECLARE OR REPLACE flag INT = -1;
+          |  DECLARE CONTINUE HANDLER FOR SQLSTATE '22012'
+          |  BEGIN
+          |    SET flag = 1;
+          |  END;
+          |  SELECT 1/0;
+          |  SELECT flag;
+          |END
+          |""".stripMargin
+
+      checkError(
+        exception = intercept[SqlScriptingException] {
+          parsePlan(sqlScript)
+        },
+        condition = "UNSUPPORTED_FEATURE.CONTINUE_EXCEPTION_HANDLER",
+        parameters = Map.empty)
+    }
+  }
+
+  test("declare exit handler for qualified condition name that is not 
supported") {
     val sqlScript =
       """
         |BEGIN
         |  DECLARE OR REPLACE flag INT = -1;
-        |  DECLARE CONTINUE HANDLER FOR SQLSTATE '22012'
+        |  DECLARE EXIT HANDLER FOR qualified.condition.name
         |  BEGIN
         |    SET flag = 1;
         |  END;
@@ -2859,16 +2894,16 @@ class SqlScriptingParserSuite extends SparkFunSuite 
with SQLHelper {
       exception = intercept[SqlScriptingException] {
         parsePlan(sqlScript)
       },
-      condition = "UNSUPPORTED_FEATURE.CONTINUE_EXCEPTION_HANDLER",
-      parameters = Map.empty)
+      condition = "INVALID_HANDLER_DECLARATION.CONDITION_NOT_FOUND",
+      parameters = Map("condition" -> "QUALIFIED.CONDITION.NAME"))
   }
 
-  test("declare handler for qualified condition name that is not supported") {
+  test("declare continue handler for qualified condition name that is not 
supported") {
     val sqlScript =
       """
         |BEGIN
         |  DECLARE OR REPLACE flag INT = -1;
-        |  DECLARE EXIT HANDLER FOR qualified.condition.name
+        |  DECLARE CONTINUE HANDLER FOR qualified.condition.name
         |  BEGIN
         |    SET flag = 1;
         |  END;
@@ -2884,7 +2919,7 @@ class SqlScriptingParserSuite extends SparkFunSuite with 
SQLHelper {
       parameters = Map("condition" -> "QUALIFIED.CONDITION.NAME"))
   }
 
-  test("declare handler for undefined condition") {
+  test("declare exit handler for undefined condition") {
     val sqlScriptText =
       """
         |BEGIN
@@ -2901,7 +2936,24 @@ class SqlScriptingParserSuite extends SparkFunSuite with 
SQLHelper {
     assert(exception.origin.line.contains(2))
   }
 
-  test("declare handler in wrong place") {
+  test("declare continue handler for undefined condition") {
+    val sqlScriptText =
+      """
+        |BEGIN
+        |  DECLARE CONTINUE HANDLER FOR undefined_condition BEGIN SELECT 1; 
END;
+        |  SELECT 1;
+        |END""".stripMargin
+    val exception = intercept[SqlScriptingException] {
+      parsePlan(sqlScriptText)
+    }
+    checkError(
+      exception = exception,
+      condition = "INVALID_HANDLER_DECLARATION.CONDITION_NOT_FOUND",
+      parameters = Map("condition" -> "UNDEFINED_CONDITION"))
+    assert(exception.origin.line.contains(2))
+  }
+
+  test("declare exit handler in wrong place") {
     val sqlScriptText =
       """
         |BEGIN
@@ -2918,7 +2970,24 @@ class SqlScriptingParserSuite extends SparkFunSuite with 
SQLHelper {
     assert(exception.origin.line.contains(2))
   }
 
-  test("duplicate condition in handler declaration") {
+  test("declare continue handler in wrong place") {
+    val sqlScriptText =
+      """
+        |BEGIN
+        |  SELECT 1;
+        |  DECLARE CONTINUE HANDLER FOR DIVIDE_BY_ZERO BEGIN SELECT 1; END;
+        |END""".stripMargin
+    val exception = intercept[SqlScriptingException] {
+      parsePlan(sqlScriptText)
+    }
+    checkError(
+      exception = exception,
+      condition = "INVALID_HANDLER_DECLARATION.WRONG_PLACE_OF_DECLARATION",
+      parameters = Map.empty)
+    assert(exception.origin.line.contains(2))
+  }
+
+  test("duplicate condition in exit handler declaration") {
     val sqlScript =
       """
         |BEGIN
@@ -2940,7 +3009,29 @@ class SqlScriptingParserSuite extends SparkFunSuite with 
SQLHelper {
       parameters = Map("condition" -> "DUPLICATE_CONDITION"))
   }
 
-  test("duplicate sqlState in handler declaration") {
+  test("duplicate condition in continue handler declaration") {
+    val sqlScript =
+      """
+        |BEGIN
+        |  DECLARE OR REPLACE flag INT = -1;
+        |  DECLARE DUPLICATE_CONDITION CONDITION FOR SQLSTATE '12345';
+        |  DECLARE CONTINUE HANDLER FOR duplicate_condition, 
duplicate_condition
+        |  BEGIN
+        |    SET flag = 1;
+        |  END;
+        |  SELECT 1/0;
+        |  SELECT flag;
+        |END
+        |""".stripMargin
+    checkError(
+      exception = intercept[SqlScriptingException] {
+        parsePlan(sqlScript)
+      },
+      condition = 
"INVALID_HANDLER_DECLARATION.DUPLICATE_CONDITION_IN_HANDLER_DECLARATION",
+      parameters = Map("condition" -> "DUPLICATE_CONDITION"))
+  }
+
+  test("duplicate sqlState in exit handler declaration") {
     val sqlScript =
       """
         |BEGIN
@@ -2961,7 +3052,28 @@ class SqlScriptingParserSuite extends SparkFunSuite with 
SQLHelper {
       parameters = Map("sqlState" -> "12345"))
   }
 
-  test("invalid condition combination in handler declaration") {
+  test("duplicate sqlState in continue handler declaration") {
+    val sqlScript =
+      """
+        |BEGIN
+        |  DECLARE OR REPLACE flag INT = -1;
+        |  DECLARE CONTINUE HANDLER FOR SQLSTATE '12345', SQLSTATE '12345'
+        |  BEGIN
+        |    SET flag = 1;
+        |  END;
+        |  SELECT 1/0;
+        |  SELECT flag;
+        |END
+        |""".stripMargin
+    checkError(
+      exception = intercept[SqlScriptingException] {
+        parsePlan(sqlScript)
+      },
+      condition = 
"INVALID_HANDLER_DECLARATION.DUPLICATE_SQLSTATE_IN_HANDLER_DECLARATION",
+      parameters = Map("sqlState" -> "12345"))
+  }
+
+  test("invalid condition combination in exit handler declaration") {
     val sqlScript =
       """
         |BEGIN
@@ -2982,7 +3094,28 @@ class SqlScriptingParserSuite extends SparkFunSuite with 
SQLHelper {
       parameters = Map.empty)
   }
 
-  test("declare handler with compound body") {
+  test("invalid condition combination in continue handler declaration") {
+    val sqlScript =
+      """
+        |BEGIN
+        |  DECLARE OR REPLACE flag INT = -1;
+        |  DECLARE CONTINUE HANDLER FOR SQLEXCEPTION, SQLSTATE '12345'
+        |  BEGIN
+        |    SET flag = 1;
+        |  END;
+        |  SELECT 1/0;
+        |  SELECT flag;
+        |END
+        |""".stripMargin
+    checkError(
+      exception = intercept[SqlScriptingException] {
+        parsePlan(sqlScript)
+      },
+      condition = "INVALID_HANDLER_DECLARATION.INVALID_CONDITION_COMBINATION",
+      parameters = Map.empty)
+  }
+
+  test("declare exit handler with compound body") {
     val sqlScriptText =
       """
         |BEGIN
@@ -2999,9 +3132,26 @@ class SqlScriptingParserSuite extends SparkFunSuite with 
SQLHelper {
       .parsedPlan.isInstanceOf[Project])
   }
 
+  test("declare continue handler with compound body") {
+    val sqlScriptText =
+      """
+        |BEGIN
+        |  DECLARE CONTINUE HANDLER FOR DIVIDE_BY_ZERO BEGIN SELECT 1; END;
+        |END""".stripMargin
+    val tree = parsePlan(sqlScriptText).asInstanceOf[CompoundBody]
+    assert(tree.handlers.length == 1)
+    assert(tree.handlers.head.isInstanceOf[ExceptionHandler])
+    assert(tree.handlers.head.exceptionHandlerTriggers.conditions.size == 1)
+    
assert(tree.handlers.head.exceptionHandlerTriggers.conditions.contains("DIVIDE_BY_ZERO"))
+    assert(tree.handlers.head.body.collection.size == 1)
+    
assert(tree.handlers.head.body.collection.head.isInstanceOf[SingleStatement])
+    
assert(tree.handlers.head.body.collection.head.asInstanceOf[SingleStatement]
+      .parsedPlan.isInstanceOf[Project])
+  }
+
   // This test works because END is not keyword here but a part of the 
statement.
   // It represents the name of the column in returned dataframe.
-  test("declare handler single statement with END") {
+  test("declare exit handler single statement with END") {
     val sqlScriptText =
       """
         |BEGIN
@@ -3018,7 +3168,24 @@ class SqlScriptingParserSuite extends SparkFunSuite with 
SQLHelper {
       .parsedPlan.isInstanceOf[Project])
   }
 
-  test("declare handler single statement") {
+  test("declare continue handler single statement with END") {
+    val sqlScriptText =
+      """
+        |BEGIN
+        |  DECLARE CONTINUE HANDLER FOR DIVIDE_BY_ZERO SELECT 1 END;
+        |END""".stripMargin
+    val tree = parsePlan(sqlScriptText).asInstanceOf[CompoundBody]
+    assert(tree.handlers.length == 1)
+    assert(tree.handlers.head.isInstanceOf[ExceptionHandler])
+    assert(tree.handlers.head.exceptionHandlerTriggers.conditions.size == 1)
+    
assert(tree.handlers.head.exceptionHandlerTriggers.conditions.contains("DIVIDE_BY_ZERO"))
+    assert(tree.handlers.head.body.collection.size == 1)
+    
assert(tree.handlers.head.body.collection.head.isInstanceOf[SingleStatement])
+    
assert(tree.handlers.head.body.collection.head.asInstanceOf[SingleStatement]
+      .parsedPlan.isInstanceOf[Project])
+  }
+
+  test("declare exit handler single statement") {
     val sqlScriptText =
       """
         |BEGIN
@@ -3035,7 +3202,24 @@ class SqlScriptingParserSuite extends SparkFunSuite with 
SQLHelper {
       .parsedPlan.isInstanceOf[Project])
   }
 
-  test("declare handler set statement") {
+  test("declare continue handler single statement") {
+    val sqlScriptText =
+      """
+        |BEGIN
+        |  DECLARE CONTINUE HANDLER FOR DIVIDE_BY_ZERO SELECT 1;
+        |END""".stripMargin
+    val tree = parsePlan(sqlScriptText).asInstanceOf[CompoundBody]
+    assert(tree.handlers.length == 1)
+    assert(tree.handlers.head.isInstanceOf[ExceptionHandler])
+    assert(tree.handlers.head.exceptionHandlerTriggers.conditions.size == 1)
+    
assert(tree.handlers.head.exceptionHandlerTriggers.conditions.contains("DIVIDE_BY_ZERO"))
+    assert(tree.handlers.head.body.collection.size == 1)
+    
assert(tree.handlers.head.body.collection.head.isInstanceOf[SingleStatement])
+    
assert(tree.handlers.head.body.collection.head.asInstanceOf[SingleStatement]
+      .parsedPlan.isInstanceOf[Project])
+  }
+
+  test("declare exit handler set statement") {
     val sqlScriptText =
       """
         |BEGIN
@@ -3052,7 +3236,24 @@ class SqlScriptingParserSuite extends SparkFunSuite with 
SQLHelper {
       .parsedPlan.isInstanceOf[SetVariable])
   }
 
-  test("declare handler with multiple conditions/sqlstates") {
+  test("declare continue handler set statement") {
+    val sqlScriptText =
+      """
+        |BEGIN
+        |  DECLARE CONTINUE HANDLER FOR DIVIDE_BY_ZERO SET test_var = 1;
+        |END""".stripMargin
+    val tree = parsePlan(sqlScriptText).asInstanceOf[CompoundBody]
+    assert(tree.handlers.length == 1)
+    assert(tree.handlers.head.isInstanceOf[ExceptionHandler])
+    assert(tree.handlers.head.exceptionHandlerTriggers.conditions.size == 1)
+    
assert(tree.handlers.head.exceptionHandlerTriggers.conditions.contains("DIVIDE_BY_ZERO"))
+    assert(tree.handlers.head.body.collection.size == 1)
+    
assert(tree.handlers.head.body.collection.head.isInstanceOf[SingleStatement])
+    
assert(tree.handlers.head.body.collection.head.asInstanceOf[SingleStatement]
+      .parsedPlan.isInstanceOf[SetVariable])
+  }
+
+  test("declare exit handler with multiple conditions/sqlstates") {
     val sqlScriptText =
       """
         |BEGIN
@@ -3075,7 +3276,30 @@ class SqlScriptingParserSuite extends SparkFunSuite with 
SQLHelper {
       .parsedPlan.isInstanceOf[SetVariable])
   }
 
-  test("declare handler for SQLEXCEPTION") {
+  test("declare continue handler with multiple conditions/sqlstates") {
+    val sqlScriptText =
+      """
+        |BEGIN
+        |  DECLARE TEST_CONDITION_1 CONDITION FOR SQLSTATE '12345';
+        |  DECLARE TEST_CONDITION_2 CONDITION FOR SQLSTATE '54321';
+        |  DECLARE CONTINUE HANDLER FOR SQLSTATE '22012', TEST_CONDITION_1, 
test_condition_2
+        |    SET test_var = 1;
+        |END""".stripMargin
+    val tree = parsePlan(sqlScriptText).asInstanceOf[CompoundBody]
+    assert(tree.handlers.length == 1)
+    assert(tree.handlers.head.isInstanceOf[ExceptionHandler])
+    assert(tree.handlers.head.exceptionHandlerTriggers.conditions.size == 2)
+    
assert(tree.handlers.head.exceptionHandlerTriggers.conditions.contains("TEST_CONDITION_1"))
+    
assert(tree.handlers.head.exceptionHandlerTriggers.conditions.contains("TEST_CONDITION_2"))
+    assert(tree.handlers.head.exceptionHandlerTriggers.sqlStates.size == 1)
+    
assert(tree.handlers.head.exceptionHandlerTriggers.sqlStates.contains("22012"))
+    assert(tree.handlers.head.body.collection.size == 1)
+    
assert(tree.handlers.head.body.collection.head.isInstanceOf[SingleStatement])
+    
assert(tree.handlers.head.body.collection.head.asInstanceOf[SingleStatement]
+      .parsedPlan.isInstanceOf[SetVariable])
+  }
+
+  test("declare exit handler for SQLEXCEPTION") {
     val sqlScriptText =
       """
         |BEGIN
@@ -3091,7 +3315,23 @@ class SqlScriptingParserSuite extends SparkFunSuite with 
SQLHelper {
     assert(tree.handlers.head.body.collection.size == 1)
   }
 
-  test("declare handler for NOT FOUND") {
+  test("declare continue handler for SQLEXCEPTION") {
+    val sqlScriptText =
+      """
+        |BEGIN
+        |  DECLARE CONTINUE HANDLER FOR SQLEXCEPTION SET test_var = 1;
+        |END""".stripMargin
+    val tree = parsePlan(sqlScriptText).asInstanceOf[CompoundBody]
+    assert(tree.handlers.length == 1)
+    assert(tree.handlers.head.isInstanceOf[ExceptionHandler])
+    assert(tree.handlers.head.exceptionHandlerTriggers.conditions.isEmpty)
+    assert(tree.handlers.head.exceptionHandlerTriggers.sqlStates.isEmpty)
+    assert(tree.handlers.head.exceptionHandlerTriggers.sqlException) // true
+    assert(!tree.handlers.head.exceptionHandlerTriggers.notFound) // false
+    assert(tree.handlers.head.body.collection.size == 1)
+  }
+
+  test("declare exit handler for NOT FOUND") {
     val sqlScriptText =
       """
         |BEGIN
@@ -3107,7 +3347,23 @@ class SqlScriptingParserSuite extends SparkFunSuite with 
SQLHelper {
     assert(tree.handlers.head.body.collection.size == 1)
   }
 
-  test("declare handler with condition and sqlstate with same value") {
+  test("declare continue handler for NOT FOUND") {
+    val sqlScriptText =
+      """
+        |BEGIN
+        |  DECLARE CONTINUE HANDLER FOR NOT FOUND SET test_var = 1;
+        |END""".stripMargin
+    val tree = parsePlan(sqlScriptText).asInstanceOf[CompoundBody]
+    assert(tree.handlers.length == 1)
+    assert(tree.handlers.head.isInstanceOf[ExceptionHandler])
+    assert(tree.handlers.head.exceptionHandlerTriggers.conditions.isEmpty)
+    assert(tree.handlers.head.exceptionHandlerTriggers.sqlStates.isEmpty)
+    assert(!tree.handlers.head.exceptionHandlerTriggers.sqlException) // true
+    assert(tree.handlers.head.exceptionHandlerTriggers.notFound) // false
+    assert(tree.handlers.head.body.collection.size == 1)
+  }
+
+  test("declare exit handler with condition and sqlstate with same value") {
     val sqlScriptText =
       """
         |BEGIN
@@ -3126,7 +3382,26 @@ class SqlScriptingParserSuite extends SparkFunSuite with 
SQLHelper {
     assert(tree.handlers.head.body.collection.size == 1)
   }
 
-  test("declare handler for condition in parent scope") {
+  test("declare continue handler with condition and sqlstate with same value") 
{
+    val sqlScriptText =
+      """
+        |BEGIN
+        |  DECLARE K2000 CONDITION FOR SQLSTATE '12345';
+        |  DECLARE CONTINUE HANDLER FOR K2000, SQLSTATE VALUE 'K2000' SET 
test_var = 1;
+        |END""".stripMargin
+    val tree = parsePlan(sqlScriptText).asInstanceOf[CompoundBody]
+    assert(tree.handlers.length == 1)
+    assert(tree.handlers.head.isInstanceOf[ExceptionHandler])
+    assert(tree.handlers.head.exceptionHandlerTriggers.conditions.size == 1)
+    
assert(tree.handlers.head.exceptionHandlerTriggers.conditions.contains("K2000"))
+    assert(tree.handlers.head.exceptionHandlerTriggers.sqlStates.size == 1)
+    
assert(tree.handlers.head.exceptionHandlerTriggers.sqlStates.contains("K2000"))
+    assert(!tree.handlers.head.exceptionHandlerTriggers.sqlException) // true
+    assert(!tree.handlers.head.exceptionHandlerTriggers.notFound) // false
+    assert(tree.handlers.head.body.collection.size == 1)
+  }
+
+  test("declare exit handler for condition in parent scope") {
     val sqlScriptText =
       """
         |BEGIN
@@ -3144,7 +3419,25 @@ class SqlScriptingParserSuite extends SparkFunSuite with 
SQLHelper {
     assert(handlerBody.handlers.head.body.collection.size == 1)
   }
 
-  test("declare nested handler for condition in parent scope of parent 
handler") {
+  test("declare continue handler for condition in parent scope") {
+    val sqlScriptText =
+      """
+        |BEGIN
+        |  DECLARE TEST_CONDITION CONDITION FOR SQLSTATE '12345';
+        |  BEGIN
+        |    DECLARE CONTINUE HANDLER FOR TEST_CONDITION SET test_var = 1;
+        |  END;
+        |END""".stripMargin
+    val tree = parsePlan(sqlScriptText).asInstanceOf[CompoundBody]
+    val handlerBody = tree.collection.head.asInstanceOf[CompoundBody]
+    assert(handlerBody.handlers.length == 1)
+    assert(handlerBody.handlers.head.isInstanceOf[ExceptionHandler])
+    assert(handlerBody.handlers.head.exceptionHandlerTriggers.conditions.size 
== 1)
+    
assert(handlerBody.handlers.head.exceptionHandlerTriggers.conditions.contains("TEST_CONDITION"))
+    assert(handlerBody.handlers.head.body.collection.size == 1)
+  }
+
+  test("declare nested exit handler for condition in parent scope of parent 
handler") {
     val sqlScriptText =
       """
         |BEGIN
@@ -3164,6 +3457,26 @@ class SqlScriptingParserSuite extends SparkFunSuite with 
SQLHelper {
     
assert(handlerBody.exceptionHandlerTriggers.conditions.contains("TEST_CONDITION"))
   }
 
+  test("declare nested continue handler for condition in parent scope of 
parent handler") {
+    val sqlScriptText =
+      """
+        |BEGIN
+        |  DECLARE TEST_CONDITION CONDITION FOR SQLSTATE '12345';
+        |  BEGIN
+        |    DECLARE CONTINUE HANDLER FOR DIVIDE_BY_ZERO
+        |      BEGIN
+        |        DECLARE CONTINUE HANDLER FOR TEST_CONDITION SET test_var = 1;
+        |      END;
+        |  END;
+        |END""".stripMargin
+    val tree = parsePlan(sqlScriptText).asInstanceOf[CompoundBody]
+    val handlerBody = tree
+      .collection.head.asInstanceOf[CompoundBody]
+      .handlers.head.body.asInstanceOf[CompoundBody]
+      .handlers.head
+    
assert(handlerBody.exceptionHandlerTriggers.conditions.contains("TEST_CONDITION"))
+  }
+
   // Helper methods
   def cleanupStatementString(statementStr: String): String = {
     statementStr
diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/scripting/SqlScriptingInterpreterSuite.scala
 
b/sql/core/src/test/scala/org/apache/spark/sql/scripting/SqlScriptingInterpreterSuite.scala
index 684a5a72e6d8..6671b52381c2 100644
--- 
a/sql/core/src/test/scala/org/apache/spark/sql/scripting/SqlScriptingInterpreterSuite.scala
+++ 
b/sql/core/src/test/scala/org/apache/spark/sql/scripting/SqlScriptingInterpreterSuite.scala
@@ -38,6 +38,16 @@ class SqlScriptingInterpreterSuite
     with SharedSparkSession
     with SqlScriptingTestUtils {
 
+  protected override def beforeAll(): Unit = {
+    super.beforeAll()
+    conf.setConf(SQLConf.SQL_SCRIPTING_CONTINUE_HANDLER_ENABLED, true)
+  }
+
+  protected override def afterAll(): Unit = {
+    conf.unsetConf(SQLConf.SQL_SCRIPTING_CONTINUE_HANDLER_ENABLED.key)
+    super.afterAll()
+  }
+
   // Helpers
   private def runSqlScript(
       sqlText: String,
@@ -3467,7 +3477,7 @@ class SqlScriptingInterpreterSuite
     }
   }
 
-  test("Duplicate SQLEXCEPTION Handler") {
+  test("Duplicate SQLEXCEPTION EXIT/EXIT Handler") {
     val sqlScript =
       """
         |BEGIN
@@ -3490,7 +3500,7 @@ class SqlScriptingInterpreterSuite
     )
   }
 
-  test("Duplicate NOT FOUND Handler") {
+  test("Duplicate NOT FOUND EXIT/EXIT Handler") {
     val sqlScript =
       """
         |BEGIN
@@ -3511,4 +3521,139 @@ class SqlScriptingInterpreterSuite
       parameters = Map("condition" -> "NOT FOUND")
     )
   }
+
+  test("Duplicate SQLEXCEPTION CONTINUE/CONTINUE Handler") {
+    val sqlScript =
+      """
+        |BEGIN
+        |  DECLARE CONTINUE HANDLER FOR SQLEXCEPTION
+        |  BEGIN
+        |    SELECT 1;
+        |  END;
+        |  DECLARE CONTINUE HANDLER FOR SQLEXCEPTION
+        |  BEGIN
+        |    SELECT 2;
+        |  END;
+        |
+        |END""".stripMargin
+    checkError(
+      exception = intercept[SqlScriptingException] {
+        runSqlScript(sqlScript)
+      },
+      condition = "DUPLICATE_EXCEPTION_HANDLER.CONDITION",
+      parameters = Map("condition" -> "SQLEXCEPTION")
+    )
+  }
+
+  test("Duplicate NOT FOUND CONTINUE/CONTINUE Handler") {
+    val sqlScript =
+      """
+        |BEGIN
+        |  DECLARE CONTINUE HANDLER FOR NOT FOUND
+        |  BEGIN
+        |    SELECT 1;
+        |  END;
+        |  DECLARE CONTINUE HANDLER FOR NOT FOUND
+        |  BEGIN
+        |    SELECT 2;
+        |  END;
+        |END""".stripMargin
+    checkError(
+      exception = intercept[SqlScriptingException] {
+        runSqlScript(sqlScript)
+      },
+      condition = "DUPLICATE_EXCEPTION_HANDLER.CONDITION",
+      parameters = Map("condition" -> "NOT FOUND")
+    )
+  }
+
+  test("Duplicate SQLEXCEPTION EXIT/CONTINUE Handler") {
+    val sqlScript =
+      """
+        |BEGIN
+        |  DECLARE EXIT HANDLER FOR SQLEXCEPTION
+        |  BEGIN
+        |    SELECT 1;
+        |  END;
+        |  DECLARE CONTINUE HANDLER FOR SQLEXCEPTION
+        |  BEGIN
+        |    SELECT 2;
+        |  END;
+        |
+        |END""".stripMargin
+    checkError(
+      exception = intercept[SqlScriptingException] {
+        runSqlScript(sqlScript)
+      },
+      condition = "DUPLICATE_EXCEPTION_HANDLER.CONDITION",
+      parameters = Map("condition" -> "SQLEXCEPTION")
+    )
+  }
+
+  test("Duplicate NOT FOUND EXIT/CONTINUE Handler") {
+    val sqlScript =
+      """
+        |BEGIN
+        |  DECLARE EXIT HANDLER FOR NOT FOUND
+        |  BEGIN
+        |    SELECT 1;
+        |  END;
+        |  DECLARE CONTINUE HANDLER FOR NOT FOUND
+        |  BEGIN
+        |    SELECT 2;
+        |  END;
+        |END""".stripMargin
+    checkError(
+      exception = intercept[SqlScriptingException] {
+        runSqlScript(sqlScript)
+      },
+      condition = "DUPLICATE_EXCEPTION_HANDLER.CONDITION",
+      parameters = Map("condition" -> "NOT FOUND")
+    )
+  }
+
+  test("Duplicate SQLEXCEPTION CONTINUE/EXIT Handler") {
+    val sqlScript =
+      """
+        |BEGIN
+        |  DECLARE CONTINUE HANDLER FOR SQLEXCEPTION
+        |  BEGIN
+        |    SELECT 1;
+        |  END;
+        |  DECLARE EXIT HANDLER FOR SQLEXCEPTION
+        |  BEGIN
+        |    SELECT 2;
+        |  END;
+        |
+        |END""".stripMargin
+    checkError(
+      exception = intercept[SqlScriptingException] {
+        runSqlScript(sqlScript)
+      },
+      condition = "DUPLICATE_EXCEPTION_HANDLER.CONDITION",
+      parameters = Map("condition" -> "SQLEXCEPTION")
+    )
+  }
+
+  test("Duplicate NOT FOUND CONTINUE/EXIT Handler") {
+    val sqlScript =
+      """
+        |BEGIN
+        |  DECLARE CONTINUE HANDLER FOR NOT FOUND
+        |  BEGIN
+        |    SELECT 1;
+        |  END;
+        |  DECLARE EXIT HANDLER FOR NOT FOUND
+        |  BEGIN
+        |    SELECT 2;
+        |  END;
+        |END""".stripMargin
+    checkError(
+      exception = intercept[SqlScriptingException] {
+        runSqlScript(sqlScript)
+      },
+      condition = "DUPLICATE_EXCEPTION_HANDLER.CONDITION",
+      parameters = Map("condition" -> "NOT FOUND")
+    )
+  }
 }


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to