This is an automated email from the ASF dual-hosted git repository.

wenchen pushed a commit to branch branch-3.3
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/branch-3.3 by this push:
     new b9055a4  [SPARK-38650][SQL] Better ParseException message for char 
types without length
b9055a4 is described below

commit b9055a48a3150bcba2bc1708a57bfb48761ea8a1
Author: Xinyi Yu <xinyi...@databricks.com>
AuthorDate: Thu Mar 31 13:24:59 2022 +0800

    [SPARK-38650][SQL] Better ParseException message for char types without 
length
    
    ### What changes were proposed in this pull request?
    
    This PR improves the error messages for the char / varchar / character 
datatypes without length. It also added related testcases.
    
    #### Details
    We support char and varchar types. But when users input the type without 
length, the message is confusing and not helpful at all:
    
    ```
    > SELECT cast('a' as CHAR)
    
    DataType char is not supported.(line 1, pos 19)
    
    == SQL ==
    SELECT cast('a' AS CHAR)
    -------------------^^^
    ```
    In the after case, the messages would be:
    ```
    Datatype char requires a length parameter, for example char(10). Please 
specify the length.
    
    == SQL ==
    SELECT cast('a' AS CHAR)
    -------------------^^^
    ```
    
    ### Why are the changes needed?
    To improve error messages for better usability.
    
    ### Does this PR introduce _any_ user-facing change?
    If error messages are considered as user-facing changes, then yes. It 
improves the messages as above.
    
    ### How was this patch tested?
    It's tested by newly added unit tests.
    
    Closes #35966 from anchovYu/better-msg-for-char.
    
    Authored-by: Xinyi Yu <xinyi...@databricks.com>
    Signed-off-by: Wenchen Fan <wenc...@databricks.com>
    (cherry picked from commit d678ed488d176c89df7bff39c4f8b4675232b667)
    Signed-off-by: Wenchen Fan <wenc...@databricks.com>
---
 core/src/main/resources/error/error-classes.json            |  4 ++++
 .../org/apache/spark/sql/catalyst/parser/AstBuilder.scala   |  2 ++
 .../org/apache/spark/sql/errors/QueryParsingErrors.scala    |  4 ++++
 .../apache/spark/sql/catalyst/parser/ErrorParserSuite.scala | 13 +++++++++++++
 4 files changed, 23 insertions(+)

diff --git a/core/src/main/resources/error/error-classes.json 
b/core/src/main/resources/error/error-classes.json
index e159e7c..d9e2e74 100644
--- a/core/src/main/resources/error/error-classes.json
+++ b/core/src/main/resources/error/error-classes.json
@@ -138,6 +138,10 @@
     "message" : [ "PARTITION clause cannot contain a non-partition column 
name: %s" ],
     "sqlState" : "42000"
   },
+  "PARSE_CHAR_MISSING_LENGTH" : {
+    "message" : [ "DataType %s requires a length parameter, for example 
%s(10). Please specify the length." ],
+    "sqlState" : "42000"
+  },
   "PARSE_EMPTY_STATEMENT" : {
     "message" : [ "Syntax error, unexpected empty statement" ],
     "sqlState" : "42000"
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala
index 9266388..3a22c5e 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala
@@ -2671,6 +2671,8 @@ class AstBuilder extends SqlBaseParserBaseVisitor[AnyRef] 
with SQLConfHelper wit
         DecimalType(precision.getText.toInt, scale.getText.toInt)
       case ("void", Nil) => NullType
       case ("interval", Nil) => CalendarIntervalType
+      case (dt @ ("character" | "char" | "varchar"), Nil) =>
+        throw QueryParsingErrors.charTypeMissingLengthError(dt, ctx)
       case (dt, params) =>
         val dtStr = if (params.nonEmpty) s"$dt(${params.mkString(",")})" else 
dt
         throw QueryParsingErrors.dataTypeUnsupportedError(dtStr, ctx)
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryParsingErrors.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryParsingErrors.scala
index c092958..e41c4cd 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryParsingErrors.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryParsingErrors.scala
@@ -220,6 +220,10 @@ object QueryParsingErrors {
     new ParseException(s"DataType $dataType is not supported.", ctx)
   }
 
+  def charTypeMissingLengthError(dataType: String, ctx: 
PrimitiveDataTypeContext): Throwable = {
+    new ParseException("PARSE_CHAR_MISSING_LENGTH", Array(dataType, dataType), 
ctx)
+  }
+
   def partitionTransformNotExpectedError(
       name: String, describe: String, ctx: ApplyTransformContext): Throwable = 
{
     new ParseException(s"Expected a column reference for transform $name: 
$describe", ctx)
diff --git 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/ErrorParserSuite.scala
 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/ErrorParserSuite.scala
index 20e17a8..c42f725 100644
--- 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/ErrorParserSuite.scala
+++ 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/ErrorParserSuite.scala
@@ -255,4 +255,17 @@ class ErrorParserSuite extends AnalysisTest {
         |SELECT b
       """.stripMargin, 2, 9, 10, msg + " test-table")
   }
+
+  test("datatype not supported") {
+    // general bad types
+    intercept("SELECT cast(1 as badtype)", 1, 17, 17, "DataType badtype is not 
supported.")
+
+    // special handling on char and varchar
+    intercept("SELECT cast('a' as CHAR)", "PARSE_CHAR_MISSING_LENGTH", 1, 19, 
19,
+      "DataType char requires a length parameter")
+    intercept("SELECT cast('a' as Varchar)", "PARSE_CHAR_MISSING_LENGTH", 1, 
19, 19,
+      "DataType varchar requires a length parameter")
+    intercept("SELECT cast('a' as Character)", "PARSE_CHAR_MISSING_LENGTH", 1, 
19, 19,
+      "DataType character requires a length parameter")
+  }
 }

---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to