This is an automated email from the ASF dual-hosted git repository.

maxgekk pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new f291125ae08 [SPARK-40324][SQL] Provide a query context of 
`ParseException`
f291125ae08 is described below

commit f291125ae08ff98fed9106c4273349ceb7966697
Author: Max Gekk <max.g...@gmail.com>
AuthorDate: Tue Sep 6 12:18:08 2022 +0300

    [SPARK-40324][SQL] Provide a query context of `ParseException`
    
    ### What changes were proposed in this pull request?
    In the PR, I propose to extend `ParseException` and add new field 
`queryContext` which contains an array of query contexts. By default, the field 
is initialized by the context of currently processed tree node.
    
    Also I removed the wrapper `validateParsingError()` of `checkError()` to 
use last one directly.
    
    ### Why are the changes needed?
    To improve user experience with Spark SQL by providing more context of 
parsing errors.
    
    ### Does this PR introduce _any_ user-facing change?
    No, it extends existing functionality.
    
    ### How was this patch tested?
    By running the affected test suites:
    ```
    $ build/sbt "sql/testOnly *QueryParsingErrorsSuite"
    $ build/sbt "sql/testOnly org.apache.spark.sql.SQLQueryTestSuite"
    $ build/sbt -Phive -Phive-thriftserver "test:testOnly 
*TruncateTableParserSuite"
    $ build/sbt -Phive-2.3 -Phive-thriftserver "test:testOnly 
*ShowPartitionsParserSuite"
    ```
    
    Closes #37794 from MaxGekk/query-context-parse-exception.
    
    Authored-by: Max Gekk <max.g...@gmail.com>
    Signed-off-by: Max Gekk <max.g...@gmail.com>
---
 .../scala/org/apache/spark/SparkFunSuite.scala     |  17 +
 .../spark/sql/catalyst/parser/ParseDriver.scala    |  30 +-
 .../spark/sql/catalyst/trees/SQLQueryContext.scala |   2 +-
 .../resources/sql-tests/results/describe.sql.out   |   9 +-
 .../sql-tests/results/join-lateral.sql.out         |  18 +-
 .../resources/sql-tests/results/transform.sql.out  |  18 +-
 .../resources/sql-tests/results/window.sql.out     |   9 +-
 .../spark/sql/errors/QueryErrorsSuiteBase.scala    |  15 -
 .../spark/sql/errors/QueryParsingErrorsSuite.scala | 468 +++++++++++++--------
 .../command/ShowPartitionsParserSuite.scala        |   9 +-
 .../command/TruncateTableParserSuite.scala         |   6 +-
 11 files changed, 398 insertions(+), 203 deletions(-)

diff --git a/core/src/test/scala/org/apache/spark/SparkFunSuite.scala 
b/core/src/test/scala/org/apache/spark/SparkFunSuite.scala
index 6e44645379b..c15ebab511b 100644
--- a/core/src/test/scala/org/apache/spark/SparkFunSuite.scala
+++ b/core/src/test/scala/org/apache/spark/SparkFunSuite.scala
@@ -362,6 +362,23 @@ abstract class SparkFunSuite
       context: QueryContext): Unit =
     checkError(exception, errorClass, None, None, parameters, false, 
Array(context))
 
+  protected def checkError(
+      exception: SparkThrowable,
+      errorClass: String,
+      errorSubClass: String,
+      sqlState: String,
+      context: QueryContext): Unit =
+    checkError(exception, errorClass, Some(errorSubClass), None, Map.empty, 
false, Array(context))
+
+  protected def checkError(
+      exception: SparkThrowable,
+      errorClass: String,
+      errorSubClass: String,
+      sqlState: String,
+      parameters: Map[String, String],
+      context: QueryContext): Unit =
+    checkError(exception, errorClass, Some(errorSubClass), None, parameters, 
false, Array(context))
+
   class LogAppender(msg: String = "", maxEvents: Int = 1000)
       extends AbstractAppender("logAppender", null, null, true, 
Property.EMPTY_ARRAY) {
     private val _loggingEvents = new ArrayBuffer[LogEvent]()
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/ParseDriver.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/ParseDriver.scala
index 76757f9fc21..5eaef76a747 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/ParseDriver.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/ParseDriver.scala
@@ -21,14 +21,14 @@ import org.antlr.v4.runtime.atn.PredictionMode
 import org.antlr.v4.runtime.misc.{Interval, ParseCancellationException}
 import org.antlr.v4.runtime.tree.TerminalNodeImpl
 
-import org.apache.spark.SparkThrowableHelper
+import org.apache.spark.{QueryContext, SparkThrowableHelper}
 import org.apache.spark.internal.Logging
 import org.apache.spark.sql.AnalysisException
 import org.apache.spark.sql.catalyst.{FunctionIdentifier, SQLConfHelper, 
TableIdentifier}
 import org.apache.spark.sql.catalyst.expressions.Expression
 import org.apache.spark.sql.catalyst.parser.ParserUtils.withOrigin
 import org.apache.spark.sql.catalyst.plans.logical.LogicalPlan
-import org.apache.spark.sql.catalyst.trees.Origin
+import org.apache.spark.sql.catalyst.trees.{CurrentOrigin, Origin}
 import org.apache.spark.sql.errors.QueryParsingErrors
 import org.apache.spark.sql.types.{DataType, StructType}
 
@@ -238,7 +238,8 @@ class ParseException(
     val stop: Origin,
     errorClass: Option[String] = None,
     errorSubClass: Option[String] = None,
-    messageParameters: Array[String] = Array.empty)
+    messageParameters: Array[String] = Array.empty,
+    queryContext: Array[QueryContext] = ParseException.getQueryContext())
   extends AnalysisException(
     message,
     start.line,
@@ -315,12 +316,23 @@ class ParseException(
   }
 
   def withCommand(cmd: String): ParseException = {
-    // PARSE_EMPTY_STATEMENT error class overrides the PARSE_SYNTAX_ERROR when 
cmd is empty
-    if (cmd.trim().isEmpty && errorClass.isDefined && errorClass.get == 
"PARSE_SYNTAX_ERROR") {
-      new ParseException(Option(cmd), start, stop, "PARSE_EMPTY_STATEMENT", 
Array[String]())
-    } else {
-      new ParseException(Option(cmd), message, start, stop, errorClass, None, 
messageParameters)
-    }
+    val (cls, subCls, params) =
+      if (errorClass == Some("PARSE_SYNTAX_ERROR") && cmd.trim().isEmpty) {
+        // PARSE_EMPTY_STATEMENT error class overrides the PARSE_SYNTAX_ERROR 
when cmd is empty
+        (Some("PARSE_EMPTY_STATEMENT"), None, Array[String]())
+      } else {
+        (errorClass, errorSubClass, messageParameters)
+      }
+    new ParseException(Option(cmd), message, start, stop, cls, subCls, params, 
queryContext)
+  }
+
+  override def getQueryContext: Array[QueryContext] = queryContext
+}
+
+object ParseException {
+  def getQueryContext(): Array[QueryContext] = {
+    val context = CurrentOrigin.get.context
+    if (context.isValid) Array(context) else Array.empty
   }
 }
 
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/trees/SQLQueryContext.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/trees/SQLQueryContext.scala
index 2f6b0f885a0..99889cf7dae 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/trees/SQLQueryContext.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/trees/SQLQueryContext.scala
@@ -124,7 +124,7 @@ case class SQLQueryContext(
     }
   }
 
-  private def isValid: Boolean = {
+  def isValid: Boolean = {
     sqlText.isDefined && originStartIndex.isDefined && 
originStopIndex.isDefined &&
       originStartIndex.get >= 0 && originStopIndex.get < sqlText.get.length &&
       originStartIndex.get <= originStopIndex.get
diff --git a/sql/core/src/test/resources/sql-tests/results/describe.sql.out 
b/sql/core/src/test/resources/sql-tests/results/describe.sql.out
index 5cbc80d932e..45df378495c 100644
--- a/sql/core/src/test/resources/sql-tests/results/describe.sql.out
+++ b/sql/core/src/test/resources/sql-tests/results/describe.sql.out
@@ -387,7 +387,14 @@ org.apache.spark.sql.catalyst.parser.ParseException
   "sqlState" : "42000",
   "messageParameters" : {
     "inputString" : "PARTITION specification is incomplete: `d`"
-  }
+  },
+  "queryContext" : [ {
+    "objectType" : "",
+    "objectName" : "",
+    "startIndex" : 1,
+    "stopIndex" : 28,
+    "fragment" : "DESC t PARTITION (c='Us', d)"
+  } ]
 }
 
 
diff --git a/sql/core/src/test/resources/sql-tests/results/join-lateral.sql.out 
b/sql/core/src/test/resources/sql-tests/results/join-lateral.sql.out
index 12982778565..eee62aba35f 100644
--- a/sql/core/src/test/resources/sql-tests/results/join-lateral.sql.out
+++ b/sql/core/src/test/resources/sql-tests/results/join-lateral.sql.out
@@ -152,7 +152,14 @@ org.apache.spark.sql.catalyst.parser.ParseException
 {
   "errorClass" : "UNSUPPORTED_FEATURE",
   "errorSubClass" : "LATERAL_NATURAL_JOIN",
-  "sqlState" : "0A000"
+  "sqlState" : "0A000",
+  "queryContext" : [ {
+    "objectType" : "",
+    "objectName" : "",
+    "startIndex" : 18,
+    "stopIndex" : 60,
+    "fragment" : "NATURAL JOIN LATERAL (SELECT c1 + c2 AS c2)"
+  } ]
 }
 
 
@@ -165,7 +172,14 @@ org.apache.spark.sql.catalyst.parser.ParseException
 {
   "errorClass" : "UNSUPPORTED_FEATURE",
   "errorSubClass" : "LATERAL_JOIN_USING",
-  "sqlState" : "0A000"
+  "sqlState" : "0A000",
+  "queryContext" : [ {
+    "objectType" : "",
+    "objectName" : "",
+    "startIndex" : 18,
+    "stopIndex" : 63,
+    "fragment" : "JOIN LATERAL (SELECT c1 + c2 AS c2) USING (c2)"
+  } ]
 }
 
 
diff --git a/sql/core/src/test/resources/sql-tests/results/transform.sql.out 
b/sql/core/src/test/resources/sql-tests/results/transform.sql.out
index 4b6c12e992f..190e8a4cd68 100644
--- a/sql/core/src/test/resources/sql-tests/results/transform.sql.out
+++ b/sql/core/src/test/resources/sql-tests/results/transform.sql.out
@@ -718,7 +718,14 @@ org.apache.spark.sql.catalyst.parser.ParseException
 {
   "errorClass" : "UNSUPPORTED_FEATURE",
   "errorSubClass" : "TRANSFORM_DISTINCT_ALL",
-  "sqlState" : "0A000"
+  "sqlState" : "0A000",
+  "queryContext" : [ {
+    "objectType" : "",
+    "objectName" : "",
+    "startIndex" : 1,
+    "stopIndex" : 92,
+    "fragment" : "SELECT TRANSFORM(DISTINCT b, a, c)\n  USING 'cat' AS (a, b, 
c)\nFROM script_trans\nWHERE a <= 4"
+  } ]
 }
 
 
@@ -734,7 +741,14 @@ org.apache.spark.sql.catalyst.parser.ParseException
 {
   "errorClass" : "UNSUPPORTED_FEATURE",
   "errorSubClass" : "TRANSFORM_DISTINCT_ALL",
-  "sqlState" : "0A000"
+  "sqlState" : "0A000",
+  "queryContext" : [ {
+    "objectType" : "",
+    "objectName" : "",
+    "startIndex" : 1,
+    "stopIndex" : 87,
+    "fragment" : "SELECT TRANSFORM(ALL b, a, c)\n  USING 'cat' AS (a, b, 
c)\nFROM script_trans\nWHERE a <= 4"
+  } ]
 }
 
 
diff --git a/sql/core/src/test/resources/sql-tests/results/window.sql.out 
b/sql/core/src/test/resources/sql-tests/results/window.sql.out
index dd7a159936e..59345a4773f 100644
--- a/sql/core/src/test/resources/sql-tests/results/window.sql.out
+++ b/sql/core/src/test/resources/sql-tests/results/window.sql.out
@@ -915,7 +915,14 @@ org.apache.spark.sql.catalyst.parser.ParseException
   "sqlState" : "42000",
   "messageParameters" : {
     "inputString" : "The definition of window `w` is repetitive."
-  }
+  },
+  "queryContext" : [ {
+    "objectType" : "",
+    "objectName" : "",
+    "startIndex" : 226,
+    "stopIndex" : 394,
+    "fragment" : "WINDOW\n    w AS (ORDER BY salary DESC ROWS BETWEEN 
UNBOUNDED PRECEDING AND 1 FOLLOWING),\n    w AS (ORDER BY salary DESC ROWS 
BETWEEN UNBOUNDED PRECEDING AND 2 FOLLOWING)"
+  } ]
 }
 
 
diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryErrorsSuiteBase.scala
 
b/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryErrorsSuiteBase.scala
index 525771f3038..02463f9e65a 100644
--- 
a/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryErrorsSuiteBase.scala
+++ 
b/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryErrorsSuiteBase.scala
@@ -18,25 +18,10 @@
 package org.apache.spark.sql.errors
 
 import org.apache.spark.QueryContext
-import org.apache.spark.sql.catalyst.parser.ParseException
 import org.apache.spark.sql.test.SharedSparkSession
 
 trait QueryErrorsSuiteBase extends SharedSparkSession {
 
-  def validateParsingError(
-      sqlText: String,
-      errorClass: String,
-      errorSubClass: Option[String] = None,
-      sqlState: String,
-      parameters: Map[String, String] = Map.empty): Unit = {
-    checkError(
-      exception = intercept[ParseException](sql(sqlText)),
-      errorClass = errorClass,
-      errorSubClass = errorSubClass,
-      sqlState = Some(sqlState),
-      parameters = parameters)
-  }
-
   case class ExpectedContext(
       objectType: String,
       objectName: String,
diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryParsingErrorsSuite.scala
 
b/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryParsingErrorsSuite.scala
index e9379b461ec..96ebf3ccf0b 100644
--- 
a/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryParsingErrorsSuite.scala
+++ 
b/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryParsingErrorsSuite.scala
@@ -17,287 +17,383 @@
 
 package org.apache.spark.sql.errors
 
+import org.apache.spark.SparkThrowable
 import org.apache.spark.sql.QueryTest
+import org.apache.spark.sql.catalyst.parser.ParseException
 
 // Turn of the length check because most of the tests check entire error 
messages
 // scalastyle:off line.size.limit
 class QueryParsingErrorsSuite extends QueryTest with QueryErrorsSuiteBase {
 
+  private def parseException(sqlText: String): SparkThrowable = {
+    intercept[ParseException](sql(sqlText).collect())
+  }
+
   test("UNSUPPORTED_FEATURE: LATERAL join with NATURAL join not supported") {
-    validateParsingError(
-      sqlText = "SELECT * FROM t1 NATURAL JOIN LATERAL (SELECT c1 + c2 AS c2)",
+    checkError(
+      exception = parseException("SELECT * FROM t1 NATURAL JOIN LATERAL 
(SELECT c1 + c2 AS c2)"),
       errorClass = "UNSUPPORTED_FEATURE",
-      errorSubClass = Some("LATERAL_NATURAL_JOIN"),
-      sqlState = "0A000")
+      errorSubClass = "LATERAL_NATURAL_JOIN",
+      sqlState = "0A000",
+      context = ExpectedContext(
+        fragment = "NATURAL JOIN LATERAL (SELECT c1 + c2 AS c2)",
+        start = 17,
+        stop = 59))
   }
 
   test("UNSUPPORTED_FEATURE: LATERAL join with USING join not supported") {
-    validateParsingError(
-      sqlText = "SELECT * FROM t1 JOIN LATERAL (SELECT c1 + c2 AS c2) USING 
(c2)",
+    checkError(
+      exception = parseException("SELECT * FROM t1 JOIN LATERAL (SELECT c1 + 
c2 AS c2) USING (c2)"),
       errorClass = "UNSUPPORTED_FEATURE",
-      errorSubClass = Some("LATERAL_JOIN_USING"),
-      sqlState = "0A000")
+      errorSubClass = "LATERAL_JOIN_USING",
+      sqlState = "0A000",
+      context = ExpectedContext(
+        fragment = "JOIN LATERAL (SELECT c1 + c2 AS c2) USING (c2)",
+        start = 17,
+        stop = 62))
   }
 
   test("UNSUPPORTED_FEATURE: Unsupported LATERAL join type") {
-    Seq("RIGHT OUTER", "FULL OUTER", "LEFT SEMI", "LEFT ANTI").foreach { 
joinType =>
-      validateParsingError(
-        sqlText = s"SELECT * FROM t1 $joinType JOIN LATERAL (SELECT c1 + c2 AS 
c3) ON c2 = c3",
+    Seq(
+      "RIGHT OUTER" -> (17, 74),
+      "FULL OUTER" -> (17, 73),
+      "LEFT SEMI" -> (17, 72),
+      "LEFT ANTI" -> (17, 72)).foreach { case (joinType, (start, stop)) =>
+      checkError(
+        exception = parseException(s"SELECT * FROM t1 $joinType JOIN LATERAL 
(SELECT c1 + c2 AS c3) ON c2 = c3"),
         errorClass = "UNSUPPORTED_FEATURE",
-        errorSubClass = Some("LATERAL_JOIN_OF_TYPE"),
+        errorSubClass = "LATERAL_JOIN_OF_TYPE",
         sqlState = "0A000",
-        parameters = Map("joinType" -> joinType))
+        parameters = Map("joinType" -> joinType),
+        context = ExpectedContext(
+          fragment = s"$joinType JOIN LATERAL (SELECT c1 + c2 AS c3) ON c2 = 
c3",
+          start = start,
+          stop = stop))
     }
   }
 
   test("INVALID_SQL_SYNTAX: LATERAL can only be used with subquery") {
     Seq(
-      "SELECT * FROM t1, LATERAL t2" -> 26,
-      "SELECT * FROM t1 JOIN LATERAL t2" -> 30,
-      "SELECT * FROM t1, LATERAL (t2 JOIN t3)" -> 26,
-      "SELECT * FROM t1, LATERAL (LATERAL t2)" -> 26,
-      "SELECT * FROM t1, LATERAL VALUES (0, 1)" -> 26,
-      "SELECT * FROM t1, LATERAL RANGE(0, 1)" -> 26
-    ).foreach { case (sqlText, pos) =>
-      validateParsingError(
-        sqlText = sqlText,
+      ", LATERAL t2" -> ("FROM t1, LATERAL t2", 9, 27),
+      " JOIN LATERAL t2" -> ("JOIN LATERAL t2", 17, 31),
+      ", LATERAL (t2 JOIN t3)" -> ("FROM t1, LATERAL (t2 JOIN t3)", 9, 37),
+      ", LATERAL (LATERAL t2)" -> ("FROM t1, LATERAL (LATERAL t2)", 9, 37),
+      ", LATERAL VALUES (0, 1)" -> ("FROM t1, LATERAL VALUES (0, 1)", 9, 38),
+      ", LATERAL RANGE(0, 1)" -> ("FROM t1, LATERAL RANGE(0, 1)", 9, 36)
+    ).foreach { case (sqlText, (fragment, start, stop)) =>
+      checkError(
+        exception = parseException(s"SELECT * FROM t1$sqlText"),
         errorClass = "INVALID_SQL_SYNTAX",
         sqlState = "42000",
-        parameters = Map("inputString" -> "LATERAL can only be used with 
subquery."))
+        parameters = Map("inputString" -> "LATERAL can only be used with 
subquery."),
+        context = ExpectedContext(fragment, start, stop))
     }
   }
 
   test("UNSUPPORTED_FEATURE: NATURAL CROSS JOIN is not supported") {
-    validateParsingError(
-      sqlText = "SELECT * FROM a NATURAL CROSS JOIN b",
+    checkError(
+      exception = parseException("SELECT * FROM a NATURAL CROSS JOIN b"),
       errorClass = "UNSUPPORTED_FEATURE",
-      errorSubClass = Some("NATURAL_CROSS_JOIN"),
-      sqlState = "0A000")
+      errorSubClass = "NATURAL_CROSS_JOIN",
+      sqlState = "0A000",
+      context = ExpectedContext(
+        fragment = "NATURAL CROSS JOIN b",
+        start = 16,
+        stop = 35))
   }
 
   test("INVALID_SQL_SYNTAX: redefine window") {
-    validateParsingError(
-      sqlText = "SELECT min(a) OVER win FROM t1 WINDOW win AS win, win AS 
win2",
+    checkError(
+      exception = parseException("SELECT min(a) OVER win FROM t1 WINDOW win AS 
win, win AS win2"),
       errorClass = "INVALID_SQL_SYNTAX",
       sqlState = "42000",
-      parameters = Map("inputString" -> "The definition of window `win` is 
repetitive."))
+      parameters = Map("inputString" -> "The definition of window `win` is 
repetitive."),
+      context = ExpectedContext(
+        fragment = "WINDOW win AS win, win AS win2",
+        start = 31,
+        stop = 60))
   }
 
   test("INVALID_SQL_SYNTAX: invalid window reference") {
-    validateParsingError(
-      sqlText = "SELECT min(a) OVER win FROM t1 WINDOW win AS win",
+    checkError(
+      exception = parseException("SELECT min(a) OVER win FROM t1 WINDOW win AS 
win"),
       errorClass = "INVALID_SQL_SYNTAX",
       sqlState = "42000",
-      parameters = Map("inputString" -> "Window reference `win` is not a 
window specification."))
+      parameters = Map("inputString" -> "Window reference `win` is not a 
window specification."),
+      context = ExpectedContext(
+        fragment = "WINDOW win AS win",
+        start = 31,
+        stop = 47))
   }
 
   test("INVALID_SQL_SYNTAX: window reference cannot be resolved") {
-    validateParsingError(
-      sqlText = "SELECT min(a) OVER win FROM t1 WINDOW win AS win2",
+    checkError(
+      exception = parseException("SELECT min(a) OVER win FROM t1 WINDOW win AS 
win2"),
       errorClass = "INVALID_SQL_SYNTAX",
       sqlState = "42000",
-      parameters = Map("inputString" -> "Cannot resolve window reference 
`win2`."))
+      parameters = Map("inputString" -> "Cannot resolve window reference 
`win2`."),
+      context = ExpectedContext(
+        fragment = "WINDOW win AS win2",
+        start = 31,
+        stop = 48))
   }
 
   test("UNSUPPORTED_FEATURE: TRANSFORM does not support DISTINCT/ALL") {
-    validateParsingError(
-      sqlText = "SELECT TRANSFORM(DISTINCT a) USING 'a' FROM t",
+    val sqlText = "SELECT TRANSFORM(DISTINCT a) USING 'a' FROM t"
+    checkError(
+      exception = parseException(sqlText),
       errorClass = "UNSUPPORTED_FEATURE",
-      errorSubClass = Some("TRANSFORM_DISTINCT_ALL"),
-      sqlState = "0A000")
+      errorSubClass = "TRANSFORM_DISTINCT_ALL",
+      sqlState = "0A000",
+      context = ExpectedContext(
+        fragment = sqlText,
+        start = 0,
+        stop = 44))
   }
 
   test("UNSUPPORTED_FEATURE: In-memory mode does not support TRANSFORM with 
serde") {
-    validateParsingError(
-      sqlText = "SELECT TRANSFORM(a) ROW FORMAT SERDE " +
-        "'org.apache.hadoop.hive.serde2.OpenCSVSerde' USING 'a' FROM t",
+    val sqlText = "SELECT TRANSFORM(a) ROW FORMAT SERDE " +
+      "'org.apache.hadoop.hive.serde2.OpenCSVSerde' USING 'a' FROM t"
+    checkError(
+      exception = parseException(sqlText),
       errorClass = "UNSUPPORTED_FEATURE",
-      errorSubClass = Some("TRANSFORM_NON_HIVE"),
-      sqlState = "0A000")
+      errorSubClass = "TRANSFORM_NON_HIVE",
+      sqlState = "0A000",
+      context = ExpectedContext(
+        fragment = sqlText,
+        start = 0,
+        stop = 97))
   }
 
   test("INVALID_SQL_SYNTAX: Too many arguments for transform") {
-    validateParsingError(
-      sqlText = "CREATE TABLE table(col int) PARTITIONED BY (years(col,col))",
+    checkError(
+      exception = parseException("CREATE TABLE table(col int) PARTITIONED BY 
(years(col,col))"),
       errorClass = "INVALID_SQL_SYNTAX",
       sqlState = "42000",
-      parameters = Map("inputString" -> "Too many arguments for transform 
`years`"))
+      parameters = Map("inputString" -> "Too many arguments for transform 
`years`"),
+      context = ExpectedContext(
+        fragment = "years(col,col)",
+        start = 44,
+        stop = 57))
   }
 
   test("INVALID_SQL_SYNTAX: Invalid table value function name") {
-    validateParsingError(
-      sqlText = "SELECT * FROM db.func()",
+    checkError(
+      exception = parseException("SELECT * FROM db.func()"),
       errorClass = "INVALID_SQL_SYNTAX",
       sqlState = "42000",
-      parameters = Map("inputString" -> "table valued function cannot specify 
database name "))
-
-    validateParsingError(
-      sqlText = "SELECT * FROM ns.db.func()",
+      parameters = Map("inputString" -> "table valued function cannot specify 
database name "),
+      context = ExpectedContext(
+        fragment = "db.func()",
+        start = 14,
+        stop = 22))
+
+    checkError(
+      exception = parseException("SELECT * FROM ns.db.func()"),
       errorClass = "INVALID_SQL_SYNTAX",
       sqlState = "42000",
-      parameters = Map("inputString" -> "table valued function cannot specify 
database name "))
+      parameters = Map("inputString" -> "table valued function cannot specify 
database name "),
+      context = ExpectedContext(
+        fragment = "ns.db.func()",
+        start = 14,
+        stop = 25))
   }
 
   test("INVALID_SQL_SYNTAX: Invalid scope in show functions") {
-    validateParsingError(
-      sqlText = "SHOW sys FUNCTIONS",
+    val sqlText = "SHOW sys FUNCTIONS"
+    checkError(
+      exception = parseException(sqlText),
       errorClass = "INVALID_SQL_SYNTAX",
       sqlState = "42000",
-      parameters = Map("inputString" -> "SHOW `sys` FUNCTIONS not supported"))
+      parameters = Map("inputString" -> "SHOW `sys` FUNCTIONS not supported"),
+      context = ExpectedContext(
+        fragment = sqlText,
+        start = 0,
+        stop = 17))
   }
 
   test("INVALID_SQL_SYNTAX: Invalid pattern in show functions") {
-    validateParsingError(
-      sqlText = "SHOW FUNCTIONS IN db f1",
+    val sqlText1 = "SHOW FUNCTIONS IN db f1"
+    checkError(
+      exception = parseException(sqlText1),
       errorClass = "INVALID_SQL_SYNTAX",
       sqlState = "42000",
       parameters = Map("inputString" ->
-        "Invalid pattern in SHOW FUNCTIONS: `f1`. It must be a \"STRING\" 
literal."))
-    validateParsingError(
-      sqlText = "SHOW FUNCTIONS IN db LIKE f1",
+        "Invalid pattern in SHOW FUNCTIONS: `f1`. It must be a \"STRING\" 
literal."),
+      context = ExpectedContext(
+        fragment = sqlText1,
+        start = 0,
+        stop = 22))
+    val sqlText2 = "SHOW FUNCTIONS IN db LIKE f1"
+    checkError(
+      exception = parseException(sqlText2),
       errorClass = "INVALID_SQL_SYNTAX",
       sqlState = "42000",
       parameters = Map("inputString" ->
-        "Invalid pattern in SHOW FUNCTIONS: `f1`. It must be a \"STRING\" 
literal."))
+        "Invalid pattern in SHOW FUNCTIONS: `f1`. It must be a \"STRING\" 
literal."),
+      context = ExpectedContext(
+        fragment = sqlText2,
+        start = 0,
+        stop = 27))
   }
 
   test("INVALID_SQL_SYNTAX: Create function with both if not exists and 
replace") {
     val sqlText =
-      """
-        |CREATE OR REPLACE FUNCTION IF NOT EXISTS func1 as
+      """CREATE OR REPLACE FUNCTION IF NOT EXISTS func1 as
         |'com.matthewrathbone.example.SimpleUDFExample' USING JAR 
'/path/to/jar1',
-        |JAR '/path/to/jar2'
-        |""".stripMargin
+        |JAR '/path/to/jar2'""".stripMargin
 
-    validateParsingError(
-      sqlText = sqlText,
+    checkError(
+      exception = parseException(sqlText),
       errorClass = "INVALID_SQL_SYNTAX",
       sqlState = "42000",
       parameters = Map("inputString" ->
-        "CREATE FUNCTION with both IF NOT EXISTS and REPLACE is not allowed."))
+        "CREATE FUNCTION with both IF NOT EXISTS and REPLACE is not allowed."),
+      context = ExpectedContext(
+        fragment = sqlText,
+        start = 0,
+        stop = 142))
   }
 
   test("INVALID_SQL_SYNTAX: Create temporary function with if not exists") {
     val sqlText =
-      """
-        |CREATE TEMPORARY FUNCTION IF NOT EXISTS func1 as
+      """CREATE TEMPORARY FUNCTION IF NOT EXISTS func1 as
         |'com.matthewrathbone.example.SimpleUDFExample' USING JAR 
'/path/to/jar1',
-        |JAR '/path/to/jar2'
-        |""".stripMargin
+        |JAR '/path/to/jar2'""".stripMargin
 
-    validateParsingError(
-      sqlText = sqlText,
+    checkError(
+      exception = parseException(sqlText),
       errorClass = "INVALID_SQL_SYNTAX",
       sqlState = "42000",
       parameters = Map("inputString" ->
-        "It is not allowed to define a TEMPORARY FUNCTION with IF NOT 
EXISTS."))
+        "It is not allowed to define a TEMPORARY FUNCTION with IF NOT 
EXISTS."),
+      context = ExpectedContext(
+        fragment = sqlText,
+        start = 0,
+        stop = 141))
   }
 
   test("INVALID_SQL_SYNTAX: Create temporary function with multi-part name") {
     val sqlText =
-      """
-        |CREATE TEMPORARY FUNCTION ns.db.func as
+      """CREATE TEMPORARY FUNCTION ns.db.func as
         |'com.matthewrathbone.example.SimpleUDFExample' USING JAR 
'/path/to/jar1',
-        |JAR '/path/to/jar2'
-        |""".stripMargin
+        |JAR '/path/to/jar2'""".stripMargin
 
-    validateParsingError(
-      sqlText = sqlText,
+    checkError(
+      exception = parseException(sqlText),
       errorClass = "INVALID_SQL_SYNTAX",
       sqlState = "42000",
-      parameters = Map("inputString" -> "Unsupported function name 
`ns`.`db`.`func`"))
+      parameters = Map("inputString" -> "Unsupported function name 
`ns`.`db`.`func`"),
+      context = ExpectedContext(
+        fragment = sqlText,
+        start = 0,
+        stop = 132))
   }
 
   test("INVALID_SQL_SYNTAX: Specifying database while creating temporary 
function") {
     val sqlText =
-      """
-        |CREATE TEMPORARY FUNCTION db.func as
+      """CREATE TEMPORARY FUNCTION db.func as
         |'com.matthewrathbone.example.SimpleUDFExample' USING JAR 
'/path/to/jar1',
-        |JAR '/path/to/jar2'
-        |""".stripMargin
+        |JAR '/path/to/jar2'""".stripMargin
 
-    validateParsingError(
-      sqlText = sqlText,
+    checkError(
+      exception = parseException(sqlText),
       errorClass = "INVALID_SQL_SYNTAX",
       sqlState = "42000",
       parameters = Map("inputString" ->
-        "Specifying a database in CREATE TEMPORARY FUNCTION is not allowed: 
`db`"))
+        "Specifying a database in CREATE TEMPORARY FUNCTION is not allowed: 
`db`"),
+      context = ExpectedContext(
+        fragment = sqlText,
+        start = 0,
+        stop = 129))
   }
 
   test("INVALID_SQL_SYNTAX: Drop temporary function requires a single part 
name") {
-    validateParsingError(
-      sqlText = "DROP TEMPORARY FUNCTION db.func",
+    val sqlText = "DROP TEMPORARY FUNCTION db.func"
+    checkError(
+      exception = parseException(sqlText),
       errorClass = "INVALID_SQL_SYNTAX",
       sqlState = "42000",
       parameters = Map("inputString" ->
-        "DROP TEMPORARY FUNCTION requires a single part name but got: 
`db`.`func`"))
+        "DROP TEMPORARY FUNCTION requires a single part name but got: 
`db`.`func`"),
+      context = ExpectedContext(
+        fragment = sqlText,
+        start = 0,
+        stop = 30))
   }
 
   test("DUPLICATE_KEY: Found duplicate partition keys") {
-    validateParsingError(
-      sqlText = "INSERT OVERWRITE TABLE table PARTITION(p1='1', p1='1') SELECT 
'col1', 'col2'",
+    checkError(
+      exception = parseException("INSERT OVERWRITE TABLE table 
PARTITION(p1='1', p1='1') SELECT 'col1', 'col2'"),
       errorClass = "DUPLICATE_KEY",
       sqlState = "23000",
-      parameters = Map("keyColumn" -> "`p1`"))
+      parameters = Map("keyColumn" -> "`p1`"),
+      context = ExpectedContext(
+        fragment = "PARTITION(p1='1', p1='1')",
+        start = 29,
+        stop = 53))
   }
 
   test("DUPLICATE_KEY: in table properties") {
-    validateParsingError(
-      sqlText = "ALTER TABLE dbx.tab1 SET TBLPROPERTIES ('key1' = '1', 'key1' 
= '2')",
+    checkError(
+      exception = parseException("ALTER TABLE dbx.tab1 SET TBLPROPERTIES 
('key1' = '1', 'key1' = '2')"),
       errorClass = "DUPLICATE_KEY",
       sqlState = "23000",
-      parameters = Map("keyColumn" -> "`key1`"))
+      parameters = Map("keyColumn" -> "`key1`"),
+      context = ExpectedContext(
+        fragment = "('key1' = '1', 'key1' = '2')",
+        start = 39,
+        stop = 66))
   }
 
   test("PARSE_EMPTY_STATEMENT: empty input") {
-    validateParsingError(
-      sqlText = "",
+    checkError(
+      exception = parseException(""),
       errorClass = "PARSE_EMPTY_STATEMENT",
-      sqlState = "42000")
+      sqlState = Some("42000"))
 
-    validateParsingError(
-      sqlText = "   ",
+    checkError(
+      exception = parseException("   "),
       errorClass = "PARSE_EMPTY_STATEMENT",
-      sqlState = "42000")
+      sqlState = Some("42000"))
 
-    validateParsingError(
-      sqlText = " \n",
+    checkError(
+      exception = parseException(" \n"),
       errorClass = "PARSE_EMPTY_STATEMENT",
-      sqlState = "42000")
+      sqlState = Some("42000"))
   }
 
   test("PARSE_SYNTAX_ERROR: no viable input") {
-    val sqlText = "select ((r + 1) "
-    validateParsingError(
-      sqlText = sqlText,
+    checkError(
+      exception = parseException("select ((r + 1) "),
       errorClass = "PARSE_SYNTAX_ERROR",
       sqlState = "42000",
       parameters = Map("error" -> "end of input", "hint" -> ""))
   }
 
   test("PARSE_SYNTAX_ERROR: extraneous input") {
-    validateParsingError(
-      sqlText = "select 1 1",
+    checkError(
+      exception = parseException("select 1 1"),
       errorClass = "PARSE_SYNTAX_ERROR",
       sqlState = "42000",
       parameters = Map("error" -> "'1'", "hint" -> ": extra input '1'"))
 
-    validateParsingError(
-      sqlText = "select *\nfrom r as q t",
+    checkError(
+      exception = parseException("select *\nfrom r as q t"),
       errorClass = "PARSE_SYNTAX_ERROR",
       sqlState = "42000",
       parameters = Map("error" -> "'t'", "hint" -> ": extra input 't'"))
   }
 
   test("PARSE_SYNTAX_ERROR: mismatched input") {
-    validateParsingError(
-      sqlText = "select * from r order by q from t",
+    checkError(
+      exception = parseException("select * from r order by q from t"),
       errorClass = "PARSE_SYNTAX_ERROR",
       sqlState = "42000",
       parameters = Map("error" -> "'from'", "hint" -> ""))
 
-    validateParsingError(
-      sqlText = "select *\nfrom r\norder by q\nfrom t",
+    checkError(
+      exception = parseException("select *\nfrom r\norder by q\nfrom t"),
       errorClass = "PARSE_SYNTAX_ERROR",
       sqlState = "42000",
       parameters = Map("error" -> "'from'", "hint" -> ""))
@@ -305,14 +401,14 @@ class QueryParsingErrorsSuite extends QueryTest with 
QueryErrorsSuiteBase {
 
   test("PARSE_SYNTAX_ERROR: jargon token substitute to user-facing language") {
     // '<EOF>' -> end of input
-    validateParsingError(
-      sqlText = "select count(*",
+    checkError(
+      exception = parseException("select count(*"),
       errorClass = "PARSE_SYNTAX_ERROR",
       sqlState = "42000",
       parameters = Map("error" -> "end of input", "hint" -> ""))
 
-    validateParsingError(
-      sqlText = "select 1 as a from",
+    checkError(
+      exception = parseException("select 1 as a from"),
       errorClass = "PARSE_SYNTAX_ERROR",
       sqlState = "42000",
       parameters = Map("error" -> "end of input", "hint" -> ""))
@@ -320,108 +416,144 @@ class QueryParsingErrorsSuite extends QueryTest with 
QueryErrorsSuiteBase {
 
   test("PARSE_SYNTAX_ERROR - SPARK-21136: " +
     "misleading error message due to problematic antlr grammar") {
-    validateParsingError(
-      sqlText = "select * from a left join_ b on a.id = b.id",
+    checkError(
+      exception = parseException("select * from a left join_ b on a.id = 
b.id"),
       errorClass = "PARSE_SYNTAX_ERROR",
       sqlState = "42000",
       parameters = Map("error" -> "'join_'", "hint" -> ": missing 'JOIN'"))
 
-    validateParsingError(
-      sqlText = "select * from test where test.t is like 'test'",
+    checkError(
+      exception = parseException("select * from test where test.t is like 
'test'"),
       errorClass = "PARSE_SYNTAX_ERROR",
       sqlState = "42000",
       parameters = Map("error" -> "'is'", "hint" -> ""))
 
-    validateParsingError(
-      sqlText = "SELECT * FROM test WHERE x NOT NULL",
+    checkError(
+      exception = parseException("SELECT * FROM test WHERE x NOT NULL"),
       errorClass = "PARSE_SYNTAX_ERROR",
       sqlState = "42000",
       parameters = Map("error" -> "'NOT'", "hint" -> ""))
   }
 
   test("INVALID_SQL_SYNTAX: show table partition key must set value") {
-    validateParsingError(
-      sqlText = "SHOW TABLE EXTENDED IN default LIKE 'employee' PARTITION 
(grade)",
+    checkError(
+      exception = parseException("SHOW TABLE EXTENDED IN default LIKE 
'employee' PARTITION (grade)"),
       errorClass = "INVALID_SQL_SYNTAX",
       sqlState = "42000",
-      parameters = Map("inputString" -> "Partition key `grade` must set value 
(can't be empty)."))
+      parameters = Map("inputString" -> "Partition key `grade` must set value 
(can't be empty)."),
+      context = ExpectedContext(
+        fragment = "PARTITION (grade)",
+        start = 47,
+        stop = 63))
   }
 
   test("INVALID_SQL_SYNTAX: expected a column reference for transform bucket") 
{
-    validateParsingError(
-      sqlText =
-        "CREATE TABLE my_tab(a INT, b STRING) USING parquet PARTITIONED BY 
(bucket(32, a, 66))",
+    checkError(
+      exception = parseException("CREATE TABLE my_tab(a INT, b STRING) USING 
parquet PARTITIONED BY (bucket(32, a, 66))"),
       errorClass = "INVALID_SQL_SYNTAX",
       sqlState = "42000",
-      parameters = Map("inputString" -> "Expected a column reference for 
transform `bucket`: 66"))
+      parameters = Map("inputString" -> "Expected a column reference for 
transform `bucket`: 66"),
+      context = ExpectedContext(
+        fragment = "bucket(32, a, 66)",
+        start = 67,
+        stop = 83))
   }
 
   test("UNSUPPORTED_FEATURE: DESC TABLE COLUMN for a specific partition") {
-    validateParsingError(
-      sqlText = "DESCRIBE TABLE EXTENDED customer PARTITION (grade = 'A') 
customer.age",
+    val sqlText = "DESCRIBE TABLE EXTENDED customer PARTITION (grade = 'A') 
customer.age"
+    checkError(
+      exception = parseException(sqlText),
       errorClass = "UNSUPPORTED_FEATURE",
-      errorSubClass = Some("DESC_TABLE_COLUMN_PARTITION"),
-      sqlState = "0A000")
+      errorSubClass = "DESC_TABLE_COLUMN_PARTITION",
+      sqlState = "0A000",
+      context = ExpectedContext(
+        fragment = sqlText,
+        start = 0,
+        stop = 68))
   }
 
   test("INVALID_SQL_SYNTAX: PARTITION specification is incomplete") {
-    validateParsingError(
-      sqlText = "DESCRIBE TABLE EXTENDED customer PARTITION (grade)",
+    val sqlText = "DESCRIBE TABLE EXTENDED customer PARTITION (grade)"
+    checkError(
+      exception = parseException(sqlText),
       errorClass = "INVALID_SQL_SYNTAX",
       sqlState = "42000",
-      parameters = Map("inputString" -> "PARTITION specification is 
incomplete: `grade`"))
+      parameters = Map("inputString" -> "PARTITION specification is 
incomplete: `grade`"),
+      context = ExpectedContext(
+        fragment = sqlText,
+        start = 0,
+        stop = 49))
   }
 
   test("UNSUPPORTED_FEATURE: cannot set reserved namespace property") {
-    val sql = "CREATE NAMESPACE IF NOT EXISTS a.b.c WITH PROPERTIES 
('location'='/home/user/db')"
-    validateParsingError(
-      sqlText = sql,
+    val sqlText = "CREATE NAMESPACE IF NOT EXISTS a.b.c WITH PROPERTIES 
('location'='/home/user/db')"
+    checkError(
+      exception = parseException(sqlText),
       errorClass = "UNSUPPORTED_FEATURE",
-      errorSubClass = Some("SET_NAMESPACE_PROPERTY"),
+      errorSubClass = "SET_NAMESPACE_PROPERTY",
       sqlState = "0A000",
       parameters = Map(
         "property" -> "location",
-        "msg" -> "please use the LOCATION clause to specify it"))
+        "msg" -> "please use the LOCATION clause to specify it"),
+      context = ExpectedContext(
+        fragment = sqlText,
+        start = 0,
+        stop = 80))
   }
 
   test("UNSUPPORTED_FEATURE: cannot set reserved table property") {
-    val sql = "CREATE TABLE student (id INT, name STRING, age INT) " +
+    val sqlText = "CREATE TABLE student (id INT, name STRING, age INT) " +
       "USING PARQUET TBLPROPERTIES ('provider'='parquet')"
-    validateParsingError(
-      sqlText = sql,
+    checkError(
+      exception = parseException(sqlText),
       errorClass = "UNSUPPORTED_FEATURE",
-      errorSubClass = Some("SET_TABLE_PROPERTY"),
+      errorSubClass = "SET_TABLE_PROPERTY",
       sqlState = "0A000",
       parameters = Map(
         "property" -> "provider",
-        "msg" -> "please use the USING clause to specify it"))
+        "msg" -> "please use the USING clause to specify it"),
+      context = ExpectedContext(
+        fragment = sqlText,
+        start = 0,
+        stop = 101))
   }
 
   test("INVALID_PROPERTY_KEY: invalid property key for set quoted 
configuration") {
-    val sql = "set =`value`"
-    validateParsingError(
-      sqlText = sql,
+    val sqlText = "set =`value`"
+    checkError(
+      exception = parseException(sqlText),
       errorClass = "INVALID_PROPERTY_KEY",
       sqlState = null,
-      parameters = Map("key" -> "\"\"", "value" -> "\"value\""))
+      parameters = Map("key" -> "\"\"", "value" -> "\"value\""),
+      context = ExpectedContext(
+        fragment = sqlText,
+        start = 0,
+        stop = 11))
   }
 
   test("INVALID_PROPERTY_VALUE: invalid property value for set quoted 
configuration") {
-    val sql = "set `key`=1;2;;"
-    validateParsingError(
-      sqlText = sql,
+    checkError(
+      exception = parseException("set `key`=1;2;;"),
       errorClass = "INVALID_PROPERTY_VALUE",
       sqlState = null,
-      parameters = Map("value" -> "\"1;2;;\"", "key" -> "\"key\""))
+      parameters = Map("value" -> "\"1;2;;\"", "key" -> "\"key\""),
+      context = ExpectedContext(
+        fragment = "set `key`=1;2",
+        start = 0,
+        stop = 12))
   }
 
   test("UNSUPPORTED_FEATURE: cannot set Properties and DbProperties at the 
same time") {
-    val sql = "CREATE NAMESPACE IF NOT EXISTS a.b.c WITH PROPERTIES ('a'='a', 
'b'='b', 'c'='c') " +
+    val sqlText = "CREATE NAMESPACE IF NOT EXISTS a.b.c WITH PROPERTIES 
('a'='a', 'b'='b', 'c'='c') " +
       "WITH DBPROPERTIES('a'='a', 'b'='b', 'c'='c')"
-    validateParsingError(
-      sqlText = sql,
+    checkError(
+      exception = parseException(sqlText),
       errorClass = "UNSUPPORTED_FEATURE",
-      errorSubClass = Some("SET_PROPERTIES_AND_DBPROPERTIES"),
-      sqlState = "0A000")
+      errorSubClass = "SET_PROPERTIES_AND_DBPROPERTIES",
+      sqlState = "0A000",
+      context = ExpectedContext(
+        fragment = sqlText,
+        start = 0,
+        stop = 124))
   }
 }
diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/ShowPartitionsParserSuite.scala
 
b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/ShowPartitionsParserSuite.scala
index c9cc5911484..eba9c92e3ca 100644
--- 
a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/ShowPartitionsParserSuite.scala
+++ 
b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/ShowPartitionsParserSuite.scala
@@ -22,7 +22,6 @@ import 
org.apache.spark.sql.catalyst.parser.CatalystSqlParser.parsePlan
 import org.apache.spark.sql.catalyst.parser.ParseException
 import org.apache.spark.sql.catalyst.plans.logical.ShowPartitions
 import org.apache.spark.sql.errors.QueryErrorsSuiteBase
-import org.apache.spark.sql.execution.SparkSqlParser
 
 class ShowPartitionsParserSuite extends AnalysisTest with QueryErrorsSuiteBase 
{
   test("SHOW PARTITIONS") {
@@ -50,10 +49,14 @@ class ShowPartitionsParserSuite extends AnalysisTest with 
QueryErrorsSuiteBase {
   test("empty values in non-optional partition specs") {
     checkError(
       exception = intercept[ParseException] {
-        new SparkSqlParser().parsePlan("SHOW PARTITIONS dbx.tab1 PARTITION 
(a='1', b)")
+        parsePlan("SHOW PARTITIONS dbx.tab1 PARTITION (a='1', b)")
       },
       errorClass = "INVALID_SQL_SYNTAX",
       sqlState = "42000",
-      parameters = Map("inputString" -> "Partition key `b` must set value 
(can't be empty)."))
+      parameters = Map("inputString" -> "Partition key `b` must set value 
(can't be empty)."),
+      context = ExpectedContext(
+        fragment = "PARTITION (a='1', b)",
+        start = 25,
+        stop = 44))
   }
 }
diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/TruncateTableParserSuite.scala
 
b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/TruncateTableParserSuite.scala
index ef45cdebbe8..4a38ed9e2b9 100644
--- 
a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/TruncateTableParserSuite.scala
+++ 
b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/TruncateTableParserSuite.scala
@@ -53,6 +53,10 @@ class TruncateTableParserSuite extends AnalysisTest with 
QueryErrorsSuiteBase {
       },
       errorClass = "INVALID_SQL_SYNTAX",
       sqlState = "42000",
-      parameters = Map("inputString" -> "Partition key `b` must set value 
(can't be empty)."))
+      parameters = Map("inputString" -> "Partition key `b` must set value 
(can't be empty)."),
+      context = ExpectedContext(
+        fragment = "PARTITION (a='1', b)",
+        start = 24,
+        stop = 43))
   }
 }


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to