This is an automated email from the ASF dual-hosted git repository.

gurwls223 pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new 04d72659a88 [SPARK-41979][SQL] Add missing dots for error messages in 
error classes
04d72659a88 is described below

commit 04d72659a88c3e94ff97f27e5481bc130b824b0a
Author: itholic <[email protected]>
AuthorDate: Mon Jan 23 17:17:06 2023 +0900

    [SPARK-41979][SQL] Add missing dots for error messages in error classes
    
    ### What changes were proposed in this pull request?
    
    This PR proposes to add missing dots for error messages in error classes.
    
    This PR also fixes related tests, and includes a minor error message fix.
    
    ### Why are the changes needed?
    
    To keep consistency across all error messages. Error messages should end 
with a dot.
    
    ### Does this PR introduce _any_ user-facing change?
    
    No
    
    ### How was this patch tested?
    
    `./build/sbt "sql/testOnly org.apache.spark.sql.SQLQueryTestSuite*`
    
    Closes #39505 from itholic/missing_dots.
    
    Authored-by: itholic <[email protected]>
    Signed-off-by: Hyukjin Kwon <[email protected]>
---
 core/src/main/resources/error/error-classes.json   | 614 ++++++++++-----------
 .../org/apache/spark/SparkThrowableSuite.scala     |   4 +-
 .../spark/metrics/sink/GraphiteSinkSuite.scala     |  13 +-
 .../org/apache/spark/sql/types/DataTypeSuite.scala |  15 +-
 .../results/ansi/datetime-parsing-invalid.sql.out  |   6 +-
 .../org/apache/spark/sql/DataFrameSuite.scala      |  24 +-
 .../spark/sql/execution/SQLViewTestSuite.scala     |  13 +-
 .../sql/execution/datasources/FileIndexSuite.scala |  13 +-
 .../datasources/parquet/ParquetSchemaSuite.scala   |  24 +-
 .../sql/execution/joins/HashedRelationSuite.scala  |  11 +-
 .../streaming/sources/ForeachWriterSuite.scala     |  10 +-
 .../spark/sql/hive/thriftserver/CliSuite.scala     |   2 +-
 12 files changed, 399 insertions(+), 350 deletions(-)

diff --git a/core/src/main/resources/error/error-classes.json 
b/core/src/main/resources/error/error-classes.json
index 5340ba2abc2..08ce9fe1021 100644
--- a/core/src/main/resources/error/error-classes.json
+++ b/core/src/main/resources/error/error-classes.json
@@ -31,7 +31,7 @@
   },
   "CANNOT_CONSTRUCT_PROTOBUF_DESCRIPTOR" : {
     "message" : [
-      "Error constructing FileDescriptor for <descFilePath>"
+      "Error constructing FileDescriptor for <descFilePath>."
     ]
   },
   "CANNOT_CONVERT_PROTOBUF_FIELD_TYPE_TO_SQL_TYPE" : {
@@ -46,7 +46,7 @@
   },
   "CANNOT_CONVERT_SQL_TYPE_TO_PROTOBUF_ENUM_TYPE" : {
     "message" : [
-      "Cannot convert SQL <sqlColumn> to Protobuf <protobufColumn> because 
<data> cannot be written since it's not defined in ENUM <enumString>"
+      "Cannot convert SQL <sqlColumn> to Protobuf <protobufColumn> because 
<data> cannot be written since it's not defined in ENUM <enumString>."
     ]
   },
   "CANNOT_CONVERT_SQL_TYPE_TO_PROTOBUF_FIELD_TYPE" : {
@@ -72,7 +72,7 @@
   },
   "CANNOT_PARSE_DECIMAL" : {
     "message" : [
-      "Cannot parse decimal"
+      "Cannot parse decimal."
     ],
     "sqlState" : "22018"
   },
@@ -84,7 +84,7 @@
   },
   "CANNOT_PARSE_PROTOBUF_DESCRIPTOR" : {
     "message" : [
-      "Error parsing file <descFilePath> descriptor byte[] into Descriptor 
object"
+      "Error parsing file <descFilePath> descriptor byte[] into Descriptor 
object."
     ]
   },
   "CANNOT_PARSE_TIMESTAMP" : {
@@ -179,7 +179,7 @@
   },
   "CREATE_TABLE_COLUMN_OPTION_DUPLICATE" : {
     "message" : [
-      "CREATE TABLE column <columnName> specifies option \"<optionName>\" more 
than once, which is invalid"
+      "CREATE TABLE column <columnName> specifies option \"<optionName>\" more 
than once, which is invalid."
     ],
     "sqlState" : "42710"
   },
@@ -272,7 +272,7 @@
       },
       "INPUT_SIZE_NOT_ONE" : {
         "message" : [
-          "Length of <exprName> should be 1"
+          "Length of <exprName> should be 1."
         ]
       },
       "INVALID_ARG_VALUE" : {
@@ -342,7 +342,7 @@
       },
       "PARAMETER_CONSTRAINT_VIOLATION" : {
         "message" : [
-          "The <leftExprName>(<leftExprValue>) must be <constraint> the 
<rightExprName>(<rightExprValue>)"
+          "The <leftExprName>(<leftExprValue>) must be <constraint> the 
<rightExprName>(<rightExprValue>)."
         ]
       },
       "RANGE_FRAME_INVALID_TYPE" : {
@@ -364,7 +364,7 @@
         "message" : [
           "<functionName> uses the wrong parameter type. The parameter type 
must conform to:",
           "1. The start and stop expressions must resolve to the same type.",
-          "2. If start and stop expressions resolve to the <startType> type, 
then the step expression must resolve to the <stepType> type",
+          "2. If start and stop expressions resolve to the <startType> type, 
then the step expression must resolve to the <stepType> type.",
           "3. Otherwise, if start and stop expressions resolve to the 
<otherStartType> type, then the step expression must resolve to the same type."
         ]
       },
@@ -400,7 +400,7 @@
       },
       "UNEXPECTED_CLASS_TYPE" : {
         "message" : [
-          "class <className> not found"
+          "class <className> not found."
         ]
       },
       "UNEXPECTED_INPUT_TYPE" : {
@@ -410,7 +410,7 @@
       },
       "UNEXPECTED_NULL" : {
         "message" : [
-          "The <exprName> must not be null"
+          "The <exprName> must not be null."
         ]
       },
       "UNEXPECTED_RETURN_TYPE" : {
@@ -420,7 +420,7 @@
       },
       "UNEXPECTED_STATIC_METHOD" : {
         "message" : [
-          "cannot find a static method <methodName> that matches the argument 
types in <className>"
+          "cannot find a static method <methodName> that matches the argument 
types in <className>."
         ]
       },
       "UNSUPPORTED_INPUT_TYPE" : {
@@ -430,7 +430,7 @@
       },
       "VALUE_OUT_OF_RANGE" : {
         "message" : [
-          "The <exprName> must be between <valueRange> (current value = 
<currentValue>)"
+          "The <exprName> must be between <valueRange> (current value = 
<currentValue>)."
         ]
       },
       "WRONG_NUM_ARGS" : {
@@ -490,7 +490,7 @@
   },
   "DUPLICATE_KEY" : {
     "message" : [
-      "Found duplicate keys <keyColumn>"
+      "Found duplicate keys <keyColumn>."
     ],
     "sqlState" : "23505"
   },
@@ -507,7 +507,7 @@
   },
   "FAILED_EXECUTE_UDF" : {
     "message" : [
-      "Failed to execute user defined function (<functionName>: (<signature>) 
=> <result>)"
+      "Failed to execute user defined function (<functionName>: (<signature>) 
=> <result>)."
     ],
     "sqlState" : "39000"
   },
@@ -519,7 +519,7 @@
   },
   "FAILED_RENAME_PATH" : {
     "message" : [
-      "Failed to rename <sourcePath> to <targetPath> as destination already 
exists"
+      "Failed to rename <sourcePath> to <targetPath> as destination already 
exists."
     ],
     "sqlState" : "42K04"
   },
@@ -537,7 +537,7 @@
   },
   "GRAPHITE_SINK_INVALID_PROTOCOL" : {
     "message" : [
-      "Invalid Graphite protocol: <protocol>"
+      "Invalid Graphite protocol: <protocol>."
     ]
   },
   "GRAPHITE_SINK_PROPERTY_MISSING" : {
@@ -547,19 +547,19 @@
   },
   "GROUPING_COLUMN_MISMATCH" : {
     "message" : [
-      "Column of grouping (<grouping>) can't be found in grouping columns 
<groupingColumns>"
+      "Column of grouping (<grouping>) can't be found in grouping columns 
<groupingColumns>."
     ],
     "sqlState" : "42803"
   },
   "GROUPING_ID_COLUMN_MISMATCH" : {
     "message" : [
-      "Columns of grouping_id (<groupingIdColumn>) does not match grouping 
columns (<groupByColumns>)"
+      "Columns of grouping_id (<groupingIdColumn>) does not match grouping 
columns (<groupByColumns>)."
     ],
     "sqlState" : "42803"
   },
   "GROUPING_SIZE_LIMIT_EXCEEDED" : {
     "message" : [
-      "Grouping sets size cannot be greater than <maxSize>"
+      "Grouping sets size cannot be greater than <maxSize>."
     ],
     "sqlState" : "54000"
   },
@@ -600,7 +600,7 @@
       "DATETIME_PATTERN_RECOGNITION" : {
         "message" : [
           "Spark >= 3.0:",
-          "Fail to recognize <pattern> pattern in the DateTimeFormatter. 1) 
You can set <config> to \"LEGACY\" to restore the behavior before Spark 3.0. 2) 
You can form a valid datetime pattern with the guide from 
https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html";
+          "Fail to recognize <pattern> pattern in the DateTimeFormatter. 1) 
You can set <config> to \"LEGACY\" to restore the behavior before Spark 3.0. 2) 
You can form a valid datetime pattern with the guide from 
https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html.";
         ]
       },
       "PARSE_DATETIME_BY_NEW_PARSER" : {
@@ -695,7 +695,7 @@
   },
   "INVALID_BUCKET_FILE" : {
     "message" : [
-      "Invalid bucket file: <path>"
+      "Invalid bucket file: <path>."
     ]
   },
   "INVALID_BYTE_STRING" : {
@@ -856,7 +856,7 @@
       },
       "PATTERN" : {
         "message" : [
-          "<value>"
+          "<value>."
         ]
       },
       "ZERO_INDEX" : {
@@ -869,7 +869,7 @@
   },
   "INVALID_PROPERTY_KEY" : {
     "message" : [
-      "<key> is an invalid property key, please use quotes, e.g. SET 
<key>=<value>"
+      "<key> is an invalid property key, please use quotes, e.g. SET 
<key>=<value>."
     ],
     "sqlState" : "42602"
   },
@@ -910,7 +910,7 @@
   },
   "INVALID_SQL_SYNTAX" : {
     "message" : [
-      "Invalid SQL syntax: <inputString>"
+      "Invalid SQL syntax: <inputString>."
     ],
     "sqlState" : "42000"
   },
@@ -921,7 +921,7 @@
     "subClass" : {
       "SCALAR_SUBQUERY_RETURN_MORE_THAN_ONE_OUTPUT_COLUMN" : {
         "message" : [
-          "Scalar subquery must return only one column, but got <number>"
+          "Scalar subquery must return only one column, but got <number>."
         ]
       }
     },
@@ -971,13 +971,13 @@
   },
   "MISSING_STATIC_PARTITION_COLUMN" : {
     "message" : [
-      "Unknown static partition column: <columnName>"
+      "Unknown static partition column: <columnName>."
     ],
     "sqlState" : "42000"
   },
   "MULTI_UDF_INTERFACE_ERROR" : {
     "message" : [
-      "Not allowed to implement multiple UDF interfaces, UDF class <className>"
+      "Not allowed to implement multiple UDF interfaces, UDF class 
<className>."
     ]
   },
   "NESTED_AGGREGATE_FUNCTION" : {
@@ -1028,12 +1028,12 @@
   },
   "NO_SQL_TYPE_IN_PROTOBUF_SCHEMA" : {
     "message" : [
-      "Cannot find <catalystFieldPath> in Protobuf schema"
+      "Cannot find <catalystFieldPath> in Protobuf schema."
     ]
   },
   "NO_UDF_INTERFACE" : {
     "message" : [
-      "UDF class <className> doesn't implement any UDF interface"
+      "UDF class <className> doesn't implement any UDF interface."
     ]
   },
   "NULLABLE_ARRAY_OR_MAP_ELEMENT" : {
@@ -1086,13 +1086,13 @@
   },
   "PARSE_EMPTY_STATEMENT" : {
     "message" : [
-      "Syntax error, unexpected empty statement"
+      "Syntax error, unexpected empty statement."
     ],
     "sqlState" : "42617"
   },
   "PARSE_SYNTAX_ERROR" : {
     "message" : [
-      "Syntax error at or near <error><hint>"
+      "Syntax error at or near <error><hint>."
     ],
     "sqlState" : "42601"
   },
@@ -1119,38 +1119,38 @@
   },
   "PIVOT_VALUE_DATA_TYPE_MISMATCH" : {
     "message" : [
-      "Invalid pivot value '<value>': value data type <valueType> does not 
match pivot column data type <pivotType>"
+      "Invalid pivot value '<value>': value data type <valueType> does not 
match pivot column data type <pivotType>."
     ],
     "sqlState" : "42K09"
   },
   "PROTOBUF_DEPENDENCY_NOT_FOUND" : {
     "message" : [
-      "Could not find dependency: <dependencyName>"
+      "Could not find dependency: <dependencyName>."
     ]
   },
   "PROTOBUF_DESCRIPTOR_FILE_NOT_FOUND" : {
     "message" : [
-      "Error reading Protobuf descriptor file at path: <filePath>"
+      "Error reading Protobuf descriptor file at path: <filePath>."
     ]
   },
   "PROTOBUF_FIELD_MISSING" : {
     "message" : [
-      "Searching for <field> in Protobuf schema at <protobufSchema> gave 
<matchSize> matches. Candidates: <matches>"
+      "Searching for <field> in Protobuf schema at <protobufSchema> gave 
<matchSize> matches. Candidates: <matches>."
     ]
   },
   "PROTOBUF_FIELD_MISSING_IN_SQL_SCHEMA" : {
     "message" : [
-      "Found <field> in Protobuf schema but there is no match in the SQL 
schema"
+      "Found <field> in Protobuf schema but there is no match in the SQL 
schema."
     ]
   },
   "PROTOBUF_FIELD_TYPE_MISMATCH" : {
     "message" : [
-      "Type mismatch encountered for field: <field>"
+      "Type mismatch encountered for field: <field>."
     ]
   },
   "PROTOBUF_MESSAGE_NOT_FOUND" : {
     "message" : [
-      "Unable to locate Message <messageName> in Descriptor"
+      "Unable to locate Message <messageName> in Descriptor."
     ]
   },
   "PROTOBUF_TYPE_NOT_SUPPORT" : {
@@ -1165,7 +1165,7 @@
   },
   "RENAME_SRC_PATH_NOT_FOUND" : {
     "message" : [
-      "Failed to rename as <sourcePath> was not found"
+      "Failed to rename as <sourcePath> was not found."
     ],
     "sqlState" : "42K03"
   },
@@ -1268,19 +1268,19 @@
   },
   "TEMP_VIEW_NAME_TOO_MANY_NAME_PARTS" : {
     "message" : [
-      "CREATE TEMPORARY VIEW or the corresponding Dataset APIs only accept 
single-part view names, but got: <actualName>"
+      "CREATE TEMPORARY VIEW or the corresponding Dataset APIs only accept 
single-part view names, but got: <actualName>."
     ],
     "sqlState" : "428EK"
   },
   "TOO_MANY_ARRAY_ELEMENTS" : {
     "message" : [
-      "Cannot initialize array with <numElements> elements of size <size>"
+      "Cannot initialize array with <numElements> elements of size <size>."
     ],
     "sqlState" : "54000"
   },
   "UNABLE_TO_ACQUIRE_MEMORY" : {
     "message" : [
-      "Unable to acquire <requestedBytes> bytes of memory, got <receivedBytes>"
+      "Unable to acquire <requestedBytes> bytes of memory, got 
<receivedBytes>."
     ],
     "sqlState" : "53200"
   },
@@ -1309,7 +1309,7 @@
   },
   "UNKNOWN_PROTOBUF_MESSAGE_TYPE" : {
     "message" : [
-      "Attempting to treat <descriptorName> as a Message, but it was 
<containingType>"
+      "Attempting to treat <descriptorName> as a Message, but it was 
<containingType>."
     ]
   },
   "UNPIVOT_REQUIRES_ATTRIBUTES" : {
@@ -1320,25 +1320,25 @@
   },
   "UNPIVOT_REQUIRES_VALUE_COLUMNS" : {
     "message" : [
-      "At least one value column needs to be specified for UNPIVOT, all 
columns specified as ids"
+      "At least one value column needs to be specified for UNPIVOT, all 
columns specified as ids."
     ],
     "sqlState" : "42K0A"
   },
   "UNPIVOT_VALUE_DATA_TYPE_MISMATCH" : {
     "message" : [
-      "Unpivot value columns must share a least common type, some types do 
not: [<types>]"
+      "Unpivot value columns must share a least common type, some types do 
not: [<types>]."
     ],
     "sqlState" : "42K09"
   },
   "UNPIVOT_VALUE_SIZE_MISMATCH" : {
     "message" : [
-      "All unpivot value columns must have the same size as there are value 
column names (<names>)"
+      "All unpivot value columns must have the same size as there are value 
column names (<names>)."
     ],
     "sqlState" : "428C4"
   },
   "UNRECOGNIZED_SQL_TYPE" : {
     "message" : [
-      "Unrecognized SQL type <typeName>"
+      "Unrecognized SQL type <typeName>."
     ],
     "sqlState" : "42704"
   },
@@ -1360,7 +1360,7 @@
       },
       "WITH_SUGGESTION" : {
         "message" : [
-          "Did you mean one of the following? [<proposal>]"
+          "Did you mean one of the following? [<proposal>]."
         ]
       }
     },
@@ -1378,7 +1378,7 @@
       },
       "WITH_SUGGESTION" : {
         "message" : [
-          "Did you mean one of the following? [<proposal>]"
+          "Did you mean one of the following? [<proposal>]."
         ]
       }
     },
@@ -1396,7 +1396,7 @@
       },
       "WITH_SUGGESTION" : {
         "message" : [
-          "Otherwise did you mean one of the following column(s)? [<proposal>]"
+          "Otherwise did you mean one of the following column(s)? 
[<proposal>]."
         ]
       }
     },
@@ -1410,7 +1410,7 @@
   },
   "UNSUPPORTED_DATATYPE" : {
     "message" : [
-      "Unsupported data type <typeName>"
+      "Unsupported data type <typeName>."
     ],
     "sqlState" : "0A000"
   },
@@ -1474,7 +1474,7 @@
       },
       "INSERT_PARTITION_SPEC_IF_NOT_EXISTS" : {
         "message" : [
-          "INSERT INTO <tableName> IF NOT EXISTS in the PARTITION spec."
+          "INSERT INTO <tableName> with IF NOT EXISTS in the PARTITION spec."
         ]
       },
       "JDBC_TRANSACTION" : {
@@ -1587,12 +1587,12 @@
     "subClass" : {
       "MULTI_GENERATOR" : {
         "message" : [
-          "only one generator allowed per <clause> clause but found <num>: 
<generators>"
+          "only one generator allowed per <clause> clause but found <num>: 
<generators>."
         ]
       },
       "NESTED_IN_EXPRESSIONS" : {
         "message" : [
-          "nested in expressions <expression>"
+          "nested in expressions <expression>."
         ]
       },
       "NOT_GENERATOR" : {
@@ -1602,7 +1602,7 @@
       },
       "OUTSIDE_SELECT" : {
         "message" : [
-          "outside the SELECT clause, found: <plan>"
+          "outside the SELECT clause, found: <plan>."
         ]
       }
     },
@@ -1610,7 +1610,7 @@
   },
   "UNSUPPORTED_GROUPING_EXPRESSION" : {
     "message" : [
-      "grouping()/grouping_id() can only be used with GroupingSets/Cube/Rollup"
+      "grouping()/grouping_id() can only be used with 
GroupingSets/Cube/Rollup."
     ]
   },
   "UNSUPPORTED_SAVE_MODE" : {
@@ -1637,67 +1637,67 @@
     "subClass" : {
       "ACCESSING_OUTER_QUERY_COLUMN_IS_NOT_ALLOWED" : {
         "message" : [
-          "Accessing outer query column is not allowed in this 
location<treeNode>"
+          "Accessing outer query column is not allowed in this 
location<treeNode>."
         ]
       },
       "AGGREGATE_FUNCTION_MIXED_OUTER_LOCAL_REFERENCES" : {
         "message" : [
-          "Found an aggregate function in a correlated predicate that has both 
outer and local references, which is not supported: <function>"
+          "Found an aggregate function in a correlated predicate that has both 
outer and local references, which is not supported: <function>."
         ]
       },
       "CORRELATED_COLUMN_IS_NOT_ALLOWED_IN_PREDICATE" : {
         "message" : [
-          "Correlated column is not allowed in predicate: <treeNode>"
+          "Correlated column is not allowed in predicate: <treeNode>."
         ]
       },
       "CORRELATED_COLUMN_NOT_FOUND" : {
         "message" : [
-          "A correlated outer name reference within a subquery expression body 
was not found in the enclosing query: <value>"
+          "A correlated outer name reference within a subquery expression body 
was not found in the enclosing query: <value>."
         ]
       },
       "CORRELATED_REFERENCE" : {
         "message" : [
-          "Expressions referencing the outer query are not supported outside 
of WHERE/HAVING clauses: <sqlExprs>"
+          "Expressions referencing the outer query are not supported outside 
of WHERE/HAVING clauses: <sqlExprs>."
         ]
       },
       "LATERAL_JOIN_CONDITION_NON_DETERMINISTIC" : {
         "message" : [
-          "Lateral join condition cannot be non-deterministic: <condition>"
+          "Lateral join condition cannot be non-deterministic: <condition>."
         ]
       },
       "MUST_AGGREGATE_CORRELATED_SCALAR_SUBQUERY" : {
         "message" : [
-          "Correlated scalar subqueries in the GROUP BY clause must also be in 
the aggregate expressions<treeNode>"
+          "Correlated scalar subqueries in the GROUP BY clause must also be in 
the aggregate expressions<treeNode>."
         ]
       },
       "MUST_AGGREGATE_CORRELATED_SCALAR_SUBQUERY_OUTPUT" : {
         "message" : [
-          "The output of a correlated scalar subquery must be aggregated"
+          "The output of a correlated scalar subquery must be aggregated."
         ]
       },
       "NON_CORRELATED_COLUMNS_IN_GROUP_BY" : {
         "message" : [
-          "A GROUP BY clause in a scalar correlated subquery cannot contain 
non-correlated columns: <value>"
+          "A GROUP BY clause in a scalar correlated subquery cannot contain 
non-correlated columns: <value>."
         ]
       },
       "NON_DETERMINISTIC_LATERAL_SUBQUERIES" : {
         "message" : [
-          "Non-deterministic lateral subqueries are not supported when joining 
with outer relations that produce more than one row<treeNode>"
+          "Non-deterministic lateral subqueries are not supported when joining 
with outer relations that produce more than one row<treeNode>."
         ]
       },
       "UNSUPPORTED_CORRELATED_REFERENCE_DATA_TYPE" : {
         "message" : [
-          "Correlated column reference '<expr>' cannot be <dataType> type"
+          "Correlated column reference '<expr>' cannot be <dataType> type."
         ]
       },
       "UNSUPPORTED_CORRELATED_SCALAR_SUBQUERY" : {
         "message" : [
-          "Correlated scalar subqueries can only be used in filters, 
aggregations, projections, and UPDATE/MERGE/DELETE commands<treeNode>"
+          "Correlated scalar subqueries can only be used in filters, 
aggregations, projections, and UPDATE/MERGE/DELETE commands<treeNode>."
         ]
       },
       "UNSUPPORTED_IN_EXISTS_SUBQUERY" : {
         "message" : [
-          "IN/EXISTS predicate subqueries can only be used in filters, joins, 
aggregations, window functions, projections, and UPDATE/MERGE/DELETE 
commands<treeNode>"
+          "IN/EXISTS predicate subqueries can only be used in filters, joins, 
aggregations, window functions, projections, and UPDATE/MERGE/DELETE 
commands<treeNode>."
         ]
       }
     },
@@ -1712,9 +1712,9 @@
   "UNTYPED_SCALA_UDF" : {
     "message" : [
       "You're using untyped Scala UDF, which does not have the input type 
information. Spark may blindly pass null to the Scala closure with 
primitive-type argument, and the closure will see the default value of the Java 
type for the null argument, e.g. `udf((x: Int) => x, IntegerType)`, the result 
is 0 for null input. To get rid of this error, you could:",
-      "1. use typed Scala UDF APIs(without return type parameter), e.g. 
`udf((x: Int) => x)`",
-      "2. use Java UDF APIs, e.g. `udf(new UDF1[String, Integer] { override 
def call(s: String): Integer = s.length() }, IntegerType)`, if input types are 
all non primitive",
-      "3. set \"spark.sql.legacy.allowUntypedScalaUDF\" to \"true\" and use 
this API with caution"
+      "1. use typed Scala UDF APIs(without return type parameter), e.g. 
`udf((x: Int) => x)`.",
+      "2. use Java UDF APIs, e.g. `udf(new UDF1[String, Integer] { override 
def call(s: String): Integer = s.length() }, IntegerType)`, if input types are 
all non primitive.",
+      "3. set \"spark.sql.legacy.allowUntypedScalaUDF\" to \"true\" and use 
this API with caution."
     ]
   },
   "VIEW_ALREADY_EXISTS" : {
@@ -1762,12 +1762,12 @@
   },
   "_LEGACY_ERROR_TEMP_0001" : {
     "message" : [
-      "Invalid InsertIntoContext"
+      "Invalid InsertIntoContext."
     ]
   },
   "_LEGACY_ERROR_TEMP_0002" : {
     "message" : [
-      "INSERT OVERWRITE DIRECTORY is not supported"
+      "INSERT OVERWRITE DIRECTORY is not supported."
     ]
   },
   "_LEGACY_ERROR_TEMP_0003" : {
@@ -2083,7 +2083,7 @@
   },
   "_LEGACY_ERROR_TEMP_1013" : {
     "message" : [
-      "<nameParts> is a <viewStr>. '<cmd>' expects a table.<hintStr>"
+      "<nameParts> is a <viewStr>. '<cmd>' expects a table.<hintStr>."
     ]
   },
   "_LEGACY_ERROR_TEMP_1014" : {
@@ -2093,7 +2093,7 @@
   },
   "_LEGACY_ERROR_TEMP_1015" : {
     "message" : [
-      "<identifier> is a table. '<cmd>' expects a view.<hintStr>"
+      "<identifier> is a table. '<cmd>' expects a view.<hintStr>."
     ]
   },
   "_LEGACY_ERROR_TEMP_1016" : {
@@ -2103,7 +2103,7 @@
   },
   "_LEGACY_ERROR_TEMP_1017" : {
     "message" : [
-      "<name> is a built-in/temporary function. '<cmd>' expects a persistent 
function.<hintStr>"
+      "<name> is a built-in/temporary function. '<cmd>' expects a persistent 
function.<hintStr>."
     ]
   },
   "_LEGACY_ERROR_TEMP_1018" : {
@@ -2475,8 +2475,8 @@
   "_LEGACY_ERROR_TEMP_1114" : {
     "message" : [
       "The streaming sources in a query do not have a common supported 
execution mode.",
-      "Sources support micro-batch: <microBatchSources>",
-      "Sources support continuous: <continuousSources>"
+      "Sources support micro-batch: <microBatchSources>.",
+      "Sources support continuous: <continuousSources>."
     ]
   },
   "_LEGACY_ERROR_TEMP_1117" : {
@@ -2705,15 +2705,15 @@
   "_LEGACY_ERROR_TEMP_1163" : {
     "message" : [
       "Specified partitioning does not match that of the existing table 
<tableName>.",
-      "Specified partition columns: [<specifiedPartCols>]",
-      "Existing partition columns: [<existingPartCols>]"
+      "Specified partition columns: [<specifiedPartCols>].",
+      "Existing partition columns: [<existingPartCols>]."
     ]
   },
   "_LEGACY_ERROR_TEMP_1164" : {
     "message" : [
       "Specified bucketing does not match that of the existing table 
<tableName>.",
-      "Specified bucketing: <specifiedBucketString>",
-      "Existing bucketing: <existingBucketString>"
+      "Specified bucketing: <specifiedBucketString>.",
+      "Existing bucketing: <existingBucketString>."
     ]
   },
   "_LEGACY_ERROR_TEMP_1165" : {
@@ -2739,8 +2739,8 @@
   "_LEGACY_ERROR_TEMP_1169" : {
     "message" : [
       "Requested partitioning does not match the table <tableName>:",
-      "Requested partitions: <normalizedPartSpec>",
-      "Table partitions: <partColNames>"
+      "Requested partitions: <normalizedPartSpec>.",
+      "Table partitions: <partColNames>."
     ]
   },
   "_LEGACY_ERROR_TEMP_1170" : {
@@ -2776,7 +2776,7 @@
   "_LEGACY_ERROR_TEMP_1176" : {
     "message" : [
       "The SQL query of view <viewName> has an incompatible schema change and 
column <colName> cannot be resolved. Expected <expectedNum> columns named 
<colName> but got <actualCols>.",
-      "Please try to re-create the view by running: <viewDDL>"
+      "Please try to re-create the view by running: <viewDDL>."
     ]
   },
   "_LEGACY_ERROR_TEMP_1177" : {
@@ -2876,7 +2876,7 @@
   },
   "_LEGACY_ERROR_TEMP_1198" : {
     "message" : [
-      "Function '<unbound>' cannot process input: (<arguments>): <unsupported>"
+      "Function '<unbound>' cannot process input: (<arguments>): 
<unsupported>."
     ]
   },
   "_LEGACY_ERROR_TEMP_1199" : {
@@ -2897,21 +2897,21 @@
   "_LEGACY_ERROR_TEMP_1202" : {
     "message" : [
       "Cannot write to '<tableName>', too many data columns:",
-      "Table columns: <tableColumns>",
-      "Data columns: <dataColumns>"
+      "Table columns: <tableColumns>.",
+      "Data columns: <dataColumns>."
     ]
   },
   "_LEGACY_ERROR_TEMP_1203" : {
     "message" : [
       "Cannot write to '<tableName>', not enough data columns:",
-      "Table columns: <tableColumns>",
-      "Data columns: <dataColumns>"
+      "Table columns: <tableColumns>.",
+      "Data columns: <dataColumns>."
     ]
   },
   "_LEGACY_ERROR_TEMP_1204" : {
     "message" : [
       "Cannot write incompatible data to table '<tableName>':",
-      "- <errors>"
+      "- <errors>."
     ]
   },
   "_LEGACY_ERROR_TEMP_1205" : {
@@ -2954,7 +2954,7 @@
       "Found conflicting attributes <conflictingAttrs> in the condition 
joining outer plan:",
       "<outerPlan>",
       "and subplan:",
-      "<subplan>"
+      "<subplan>."
     ]
   },
   "_LEGACY_ERROR_TEMP_1213" : {
@@ -2979,7 +2979,7 @@
   },
   "_LEGACY_ERROR_TEMP_1219" : {
     "message" : [
-      "Hive metastore does not support altering database location"
+      "Hive metastore does not support altering database location."
     ]
   },
   "_LEGACY_ERROR_TEMP_1220" : {
@@ -3174,7 +3174,7 @@
   },
   "_LEGACY_ERROR_TEMP_1270" : {
     "message" : [
-      "SHOW CREATE TABLE is not supported on a temporary view: <table>"
+      "SHOW CREATE TABLE is not supported on a temporary view: <table>."
     ]
   },
   "_LEGACY_ERROR_TEMP_1271" : {
@@ -3204,7 +3204,7 @@
   "_LEGACY_ERROR_TEMP_1275" : {
     "message" : [
       "Failed to execute SHOW CREATE TABLE against table/view <table>, which 
is created by Hive and uses the following unsupported feature(s)",
-      "<features>"
+      "<features>."
     ]
   },
   "_LEGACY_ERROR_TEMP_1276" : {
@@ -3447,7 +3447,7 @@
   "_LEGACY_ERROR_TEMP_1326" : {
     "message" : [
       "Cannot modify the value of a Spark config: <key>.",
-      "See also 
'https://spark.apache.org/docs/latest/sql-migration-guide.html#ddl-statements'"
+      "See also 
'https://spark.apache.org/docs/latest/sql-migration-guide.html#ddl-statements'."
     ]
   },
   "_LEGACY_ERROR_TEMP_1327" : {
@@ -3473,7 +3473,7 @@
   "_LEGACY_ERROR_TEMP_1331" : {
     "message" : [
       "Missing field <fieldName> in table <table> with schema:",
-      "<schema>"
+      "<schema>."
     ]
   },
   "_LEGACY_ERROR_TEMP_1332" : {
@@ -3508,12 +3508,12 @@
   },
   "_LEGACY_ERROR_TEMP_1338" : {
     "message" : [
-      "Sinks cannot request distribution and ordering in continuous execution 
mode"
+      "Sinks cannot request distribution and ordering in continuous execution 
mode."
     ]
   },
   "_LEGACY_ERROR_TEMP_1339" : {
     "message" : [
-      "Failed to execute INSERT INTO command because the VALUES list contains 
a DEFAULT column reference as part of another expression; this is not allowed"
+      "Failed to execute INSERT INTO command because the VALUES list contains 
a DEFAULT column reference as part of another expression; this is not allowed."
     ]
   },
   "_LEGACY_ERROR_TEMP_1340" : {
@@ -3563,27 +3563,27 @@
   },
   "_LEGACY_ERROR_TEMP_2003" : {
     "message" : [
-      "Unsuccessful try to zip maps with <size> unique keys due to exceeding 
the array size limit <maxRoundedArrayLength>"
+      "Unsuccessful try to zip maps with <size> unique keys due to exceeding 
the array size limit <maxRoundedArrayLength>."
     ]
   },
   "_LEGACY_ERROR_TEMP_2004" : {
     "message" : [
-      "no default for type <dataType>"
+      "no default for type <dataType>."
     ]
   },
   "_LEGACY_ERROR_TEMP_2005" : {
     "message" : [
-      "Type <dataType> does not support ordered operations"
+      "Type <dataType> does not support ordered operations."
     ]
   },
   "_LEGACY_ERROR_TEMP_2006" : {
     "message" : [
-      "The specified group index cannot be less than zero"
+      "The specified group index cannot be less than zero."
     ]
   },
   "_LEGACY_ERROR_TEMP_2007" : {
     "message" : [
-      "Regex group count is <groupCount>, but the specified group index is 
<groupIndex>"
+      "Regex group count is <groupCount>, but the specified group index is 
<groupIndex>."
     ]
   },
   "_LEGACY_ERROR_TEMP_2008" : {
@@ -3598,7 +3598,7 @@
   },
   "_LEGACY_ERROR_TEMP_2011" : {
     "message" : [
-      "Unexpected data type <dataType>"
+      "Unexpected data type <dataType>."
     ]
   },
   "_LEGACY_ERROR_TEMP_2013" : {
@@ -3608,12 +3608,12 @@
   },
   "_LEGACY_ERROR_TEMP_2014" : {
     "message" : [
-      "<funcName> is not matched at addNewFunction"
+      "<funcName> is not matched at addNewFunction."
     ]
   },
   "_LEGACY_ERROR_TEMP_2015" : {
     "message" : [
-      "Cannot generate <codeType> code for incomparable type: <dataType>"
+      "Cannot generate <codeType> code for incomparable type: <dataType>."
     ]
   },
   "_LEGACY_ERROR_TEMP_2016" : {
@@ -3623,7 +3623,7 @@
   },
   "_LEGACY_ERROR_TEMP_2017" : {
     "message" : [
-      "not resolved"
+      "not resolved."
     ]
   },
   "_LEGACY_ERROR_TEMP_2018" : {
@@ -3633,17 +3633,17 @@
   },
   "_LEGACY_ERROR_TEMP_2020" : {
     "message" : [
-      "Couldn't find a valid constructor on <cls>"
+      "Couldn't find a valid constructor on <cls>."
     ]
   },
   "_LEGACY_ERROR_TEMP_2021" : {
     "message" : [
-      "Couldn't find a primary constructor on <cls>"
+      "Couldn't find a primary constructor on <cls>."
     ]
   },
   "_LEGACY_ERROR_TEMP_2022" : {
     "message" : [
-      "Unsupported natural join type <joinType>"
+      "Unsupported natural join type <joinType>."
     ]
   },
   "_LEGACY_ERROR_TEMP_2023" : {
@@ -3658,7 +3658,7 @@
   },
   "_LEGACY_ERROR_TEMP_2025" : {
     "message" : [
-      "<className> must override either <m1> or <m2>"
+      "<className> must override either <m1> or <m2>."
     ]
   },
   "_LEGACY_ERROR_TEMP_2026" : {
@@ -3668,22 +3668,22 @@
   },
   "_LEGACY_ERROR_TEMP_2027" : {
     "message" : [
-      "Unexpected operator <op> in correlated subquery<pos>"
+      "Unexpected operator <op> in correlated subquery<pos>."
     ]
   },
   "_LEGACY_ERROR_TEMP_2028" : {
     "message" : [
-      "This line should be unreachable<err>"
+      "This line should be unreachable<err>."
     ]
   },
   "_LEGACY_ERROR_TEMP_2029" : {
     "message" : [
-      "Not supported rounding mode: <roundMode>"
+      "Not supported rounding mode: <roundMode>."
     ]
   },
   "_LEGACY_ERROR_TEMP_2030" : {
     "message" : [
-      "Can not handle nested schema yet...  plan <plan>"
+      "Can not handle nested schema yet...  plan <plan>."
     ]
   },
   "_LEGACY_ERROR_TEMP_2031" : {
@@ -3698,47 +3698,47 @@
   },
   "_LEGACY_ERROR_TEMP_2033" : {
     "message" : [
-      "Unable to create database <name> as failed to create its directory 
<locationUri>"
+      "Unable to create database <name> as failed to create its directory 
<locationUri>."
     ]
   },
   "_LEGACY_ERROR_TEMP_2034" : {
     "message" : [
-      "Unable to drop database <name> as failed to delete its directory 
<locationUri>"
+      "Unable to drop database <name> as failed to delete its directory 
<locationUri>."
     ]
   },
   "_LEGACY_ERROR_TEMP_2035" : {
     "message" : [
-      "Unable to create table <table> as failed to create its directory 
<defaultTableLocation>"
+      "Unable to create table <table> as failed to create its directory 
<defaultTableLocation>."
     ]
   },
   "_LEGACY_ERROR_TEMP_2036" : {
     "message" : [
-      "Unable to delete partition path <partitionPath>"
+      "Unable to delete partition path <partitionPath>."
     ]
   },
   "_LEGACY_ERROR_TEMP_2037" : {
     "message" : [
-      "Unable to drop table <table> as failed to delete its directory <dir>"
+      "Unable to drop table <table> as failed to delete its directory <dir>."
     ]
   },
   "_LEGACY_ERROR_TEMP_2038" : {
     "message" : [
-      "Unable to rename table <oldName> to <newName> as failed to rename its 
directory <oldDir>"
+      "Unable to rename table <oldName> to <newName> as failed to rename its 
directory <oldDir>."
     ]
   },
   "_LEGACY_ERROR_TEMP_2039" : {
     "message" : [
-      "Unable to create partition path <partitionPath>"
+      "Unable to create partition path <partitionPath>."
     ]
   },
   "_LEGACY_ERROR_TEMP_2040" : {
     "message" : [
-      "Unable to rename partition path <oldPartPath>"
+      "Unable to rename partition path <oldPartPath>."
     ]
   },
   "_LEGACY_ERROR_TEMP_2041" : {
     "message" : [
-      "<methodName> is not implemented"
+      "<methodName> is not implemented."
     ]
   },
   "_LEGACY_ERROR_TEMP_2042" : {
@@ -3748,12 +3748,12 @@
   },
   "_LEGACY_ERROR_TEMP_2043" : {
     "message" : [
-      "- <sqlValue> caused overflow"
+      "- <sqlValue> caused overflow."
     ]
   },
   "_LEGACY_ERROR_TEMP_2044" : {
     "message" : [
-      "<sqlValue1> <symbol> <sqlValue2> caused overflow"
+      "<sqlValue1> <symbol> <sqlValue2> caused overflow."
     ]
   },
   "_LEGACY_ERROR_TEMP_2045" : {
@@ -3763,12 +3763,12 @@
   },
   "_LEGACY_ERROR_TEMP_2046" : {
     "message" : [
-      "[BUG] Not a DataSourceRDDPartition: <split>"
+      "[BUG] Not a DataSourceRDDPartition: <split>."
     ]
   },
   "_LEGACY_ERROR_TEMP_2047" : {
     "message" : [
-      "'path' is not specified"
+      "'path' is not specified."
     ]
   },
   "_LEGACY_ERROR_TEMP_2048" : {
@@ -3778,22 +3778,22 @@
   },
   "_LEGACY_ERROR_TEMP_2049" : {
     "message" : [
-      "Data source <className> does not support streamed <operator>"
+      "Data source <className> does not support streamed <operator>."
     ]
   },
   "_LEGACY_ERROR_TEMP_2050" : {
     "message" : [
-      "Expected exactly one path to be specified, but got: <paths>"
+      "Expected exactly one path to be specified, but got: <paths>."
     ]
   },
   "_LEGACY_ERROR_TEMP_2052" : {
     "message" : [
-      "<className> was removed in Spark 2.0. Please check if your library is 
compatible with Spark 2.0"
+      "<className> was removed in Spark 2.0. Please check if your library is 
compatible with Spark 2.0."
     ]
   },
   "_LEGACY_ERROR_TEMP_2053" : {
     "message" : [
-      "buildReader is not supported for <format>"
+      "buildReader is not supported for <format>."
     ]
   },
   "_LEGACY_ERROR_TEMP_2055" : {
@@ -3804,34 +3804,34 @@
   },
   "_LEGACY_ERROR_TEMP_2056" : {
     "message" : [
-      "Unable to clear output directory <staticPrefixPath> prior to writing to 
it"
+      "Unable to clear output directory <staticPrefixPath> prior to writing to 
it."
     ]
   },
   "_LEGACY_ERROR_TEMP_2057" : {
     "message" : [
-      "Unable to clear partition directory <path> prior to writing to it"
+      "Unable to clear partition directory <path> prior to writing to it."
     ]
   },
   "_LEGACY_ERROR_TEMP_2058" : {
     "message" : [
-      "Failed to cast value `<value>` to `<dataType>` for partition column 
`<columnName>`"
+      "Failed to cast value `<value>` to `<dataType>` for partition column 
`<columnName>`."
     ]
   },
   "_LEGACY_ERROR_TEMP_2059" : {
     "message" : [
-      "End of stream"
+      "End of stream."
     ]
   },
   "_LEGACY_ERROR_TEMP_2060" : {
     "message" : [
       "The fallback v1 relation reports inconsistent schema:",
-      "Schema of v2 scan: <v2Schema>",
-      "Schema of v1 relation: <v1Schema>"
+      "Schema of v2 scan: <v2Schema>.",
+      "Schema of v1 relation: <v1Schema>."
     ]
   },
   "_LEGACY_ERROR_TEMP_2061" : {
     "message" : [
-      "No records should be returned from EmptyDataReader"
+      "No records should be returned from EmptyDataReader."
     ]
   },
   "_LEGACY_ERROR_TEMP_2062" : {
@@ -3842,7 +3842,7 @@
   },
   "_LEGACY_ERROR_TEMP_2063" : {
     "message" : [
-      "Parquet column cannot be converted in file <filePath>. Column: 
<column>, Expected: <logicalType>, Found: <physicalType>"
+      "Parquet column cannot be converted in file <filePath>. Column: 
<column>, Expected: <logicalType>, Found: <physicalType>."
     ]
   },
   "_LEGACY_ERROR_TEMP_2064" : {
@@ -3857,22 +3857,22 @@
   },
   "_LEGACY_ERROR_TEMP_2066" : {
     "message" : [
-      "Invalid namespace name: <namespace>"
+      "Invalid namespace name: <namespace>."
     ]
   },
   "_LEGACY_ERROR_TEMP_2067" : {
     "message" : [
-      "Unsupported partition transform: <transform>"
+      "Unsupported partition transform: <transform>."
     ]
   },
   "_LEGACY_ERROR_TEMP_2068" : {
     "message" : [
-      "Missing database location"
+      "Missing database location."
     ]
   },
   "_LEGACY_ERROR_TEMP_2069" : {
     "message" : [
-      "Cannot remove reserved property: <property>"
+      "Cannot remove reserved property: <property>."
     ]
   },
   "_LEGACY_ERROR_TEMP_2070" : {
@@ -3882,27 +3882,27 @@
   },
   "_LEGACY_ERROR_TEMP_2071" : {
     "message" : [
-      "Commit denied for partition <partId> (task <taskId>, attempt 
<attemptId>, stage <stageId>.<stageAttempt>)"
+      "Commit denied for partition <partId> (task <taskId>, attempt 
<attemptId>, stage <stageId>.<stageAttempt>)."
     ]
   },
   "_LEGACY_ERROR_TEMP_2072" : {
     "message" : [
-      "Table implementation does not support writes: <ident>"
+      "Table implementation does not support writes: <ident>."
     ]
   },
   "_LEGACY_ERROR_TEMP_2073" : {
     "message" : [
-      "Cannot create JDBC table with partition"
+      "Cannot create JDBC table with partition."
     ]
   },
   "_LEGACY_ERROR_TEMP_2074" : {
     "message" : [
-      "user-specified schema"
+      "user-specified schema."
     ]
   },
   "_LEGACY_ERROR_TEMP_2075" : {
     "message" : [
-      "Write is not supported for binary file data source"
+      "Write is not supported for binary file data source."
     ]
   },
   "_LEGACY_ERROR_TEMP_2076" : {
@@ -3912,7 +3912,7 @@
   },
   "_LEGACY_ERROR_TEMP_2077" : {
     "message" : [
-      "Unsupported field name: <fieldName>"
+      "Unsupported field name: <fieldName>."
     ]
   },
   "_LEGACY_ERROR_TEMP_2078" : {
@@ -3937,27 +3937,27 @@
   },
   "_LEGACY_ERROR_TEMP_2082" : {
     "message" : [
-      "Can't get JDBC type for <catalogString>"
+      "Can't get JDBC type for <catalogString>."
     ]
   },
   "_LEGACY_ERROR_TEMP_2083" : {
     "message" : [
-      "Unsupported type <content>"
+      "Unsupported type <content>."
     ]
   },
   "_LEGACY_ERROR_TEMP_2084" : {
     "message" : [
-      "Unsupported array element type <catalogString> based on binary"
+      "Unsupported array element type <catalogString> based on binary."
     ]
   },
   "_LEGACY_ERROR_TEMP_2085" : {
     "message" : [
-      "Nested arrays unsupported"
+      "Nested arrays unsupported."
     ]
   },
   "_LEGACY_ERROR_TEMP_2086" : {
     "message" : [
-      "Can't translate non-null value for field <pos>"
+      "Can't translate non-null value for field <pos>."
     ]
   },
   "_LEGACY_ERROR_TEMP_2087" : {
@@ -3972,7 +3972,7 @@
   },
   "_LEGACY_ERROR_TEMP_2089" : {
     "message" : [
-      "DataType: <catalogString>"
+      "DataType: <catalogString>."
     ]
   },
   "_LEGACY_ERROR_TEMP_2090" : {
@@ -3982,27 +3982,27 @@
   },
   "_LEGACY_ERROR_TEMP_2091" : {
     "message" : [
-      "Could not read footer for file: <file>"
+      "Could not read footer for file: <file>."
     ]
   },
   "_LEGACY_ERROR_TEMP_2092" : {
     "message" : [
-      "Could not read footer for file: <file>"
+      "Could not read footer for file: <file>."
     ]
   },
   "_LEGACY_ERROR_TEMP_2093" : {
     "message" : [
-      "Found duplicate field(s) \"<requiredFieldName>\": <matchedOrcFields> in 
case-insensitive mode"
+      "Found duplicate field(s) \"<requiredFieldName>\": <matchedOrcFields> in 
case-insensitive mode."
     ]
   },
   "_LEGACY_ERROR_TEMP_2094" : {
     "message" : [
-      "Found duplicate field(s) \"<requiredId>\": <matchedFields> in id 
mapping mode"
+      "Found duplicate field(s) \"<requiredId>\": <matchedFields> in id 
mapping mode."
     ]
   },
   "_LEGACY_ERROR_TEMP_2095" : {
     "message" : [
-      "Failed to merge incompatible schemas <left> and <right>"
+      "Failed to merge incompatible schemas <left> and <right>."
     ]
   },
   "_LEGACY_ERROR_TEMP_2096" : {
@@ -4012,32 +4012,32 @@
   },
   "_LEGACY_ERROR_TEMP_2097" : {
     "message" : [
-      "Could not execute broadcast in <timeout> secs. You can increase the 
timeout for broadcasts via <broadcastTimeout> or disable broadcast join by 
setting <autoBroadcastJoinThreshold> to -1"
+      "Could not execute broadcast in <timeout> secs. You can increase the 
timeout for broadcasts via <broadcastTimeout> or disable broadcast join by 
setting <autoBroadcastJoinThreshold> to -1."
     ]
   },
   "_LEGACY_ERROR_TEMP_2098" : {
     "message" : [
-      "Could not compare cost with <cost>"
+      "Could not compare cost with <cost>."
     ]
   },
   "_LEGACY_ERROR_TEMP_2099" : {
     "message" : [
-      "Unsupported data type: <dt>"
+      "Unsupported data type: <dt>."
     ]
   },
   "_LEGACY_ERROR_TEMP_2100" : {
     "message" : [
-      "not support type: <dataType>"
+      "not support type: <dataType>."
     ]
   },
   "_LEGACY_ERROR_TEMP_2101" : {
     "message" : [
-      "Not support non-primitive type now"
+      "Not support non-primitive type now."
     ]
   },
   "_LEGACY_ERROR_TEMP_2102" : {
     "message" : [
-      "Unsupported type: <catalogString>"
+      "Unsupported type: <catalogString>."
     ]
   },
   "_LEGACY_ERROR_TEMP_2103" : {
@@ -4047,42 +4047,42 @@
   },
   "_LEGACY_ERROR_TEMP_2104" : {
     "message" : [
-      "End of the iterator"
+      "End of the iterator."
     ]
   },
   "_LEGACY_ERROR_TEMP_2105" : {
     "message" : [
-      "Could not allocate memory to grow BytesToBytesMap"
+      "Could not allocate memory to grow BytesToBytesMap."
     ]
   },
   "_LEGACY_ERROR_TEMP_2106" : {
     "message" : [
-      "Can't acquire <size> bytes memory to build hash relation, got <got> 
bytes"
+      "Can't acquire <size> bytes memory to build hash relation, got <got> 
bytes."
     ]
   },
   "_LEGACY_ERROR_TEMP_2107" : {
     "message" : [
-      "There is not enough memory to build hash map"
+      "There is not enough memory to build hash map."
     ]
   },
   "_LEGACY_ERROR_TEMP_2108" : {
     "message" : [
-      "Does not support row that is larger than 256M"
+      "Does not support row that is larger than 256M."
     ]
   },
   "_LEGACY_ERROR_TEMP_2109" : {
     "message" : [
-      "Cannot build HashedRelation with more than 1/3 billions unique keys"
+      "Cannot build HashedRelation with more than 1/3 billions unique keys."
     ]
   },
   "_LEGACY_ERROR_TEMP_2110" : {
     "message" : [
-      "Can not build a HashedRelation that is larger than 8G"
+      "Can not build a HashedRelation that is larger than 8G."
     ]
   },
   "_LEGACY_ERROR_TEMP_2111" : {
     "message" : [
-      "failed to push a row into <rowQueue>"
+      "failed to push a row into <rowQueue>."
     ]
   },
   "_LEGACY_ERROR_TEMP_2112" : {
@@ -4092,22 +4092,22 @@
   },
   "_LEGACY_ERROR_TEMP_2113" : {
     "message" : [
-      "Unable to parse <stats> as a percentile"
+      "Unable to parse <stats> as a percentile."
     ]
   },
   "_LEGACY_ERROR_TEMP_2114" : {
     "message" : [
-      "<stats> is not a recognised statistic"
+      "<stats> is not a recognised statistic."
     ]
   },
   "_LEGACY_ERROR_TEMP_2115" : {
     "message" : [
-      "Unknown column: <unknownColumn>"
+      "Unknown column: <unknownColumn>."
     ]
   },
   "_LEGACY_ERROR_TEMP_2116" : {
     "message" : [
-      "Unexpected: <o>"
+      "Unexpected: <o>."
     ]
   },
   "_LEGACY_ERROR_TEMP_2120" : {
@@ -4122,7 +4122,7 @@
   },
   "_LEGACY_ERROR_TEMP_2122" : {
     "message" : [
-      "Failed parsing <simpleString>: <raw>"
+      "Failed parsing <simpleString>: <raw>."
     ]
   },
   "_LEGACY_ERROR_TEMP_2123" : {
@@ -4132,12 +4132,12 @@
   },
   "_LEGACY_ERROR_TEMP_2124" : {
     "message" : [
-      "Failed to merge decimal types with incompatible scale <leftScale> and 
<rightScale>"
+      "Failed to merge decimal types with incompatible scale <leftScale> and 
<rightScale>."
     ]
   },
   "_LEGACY_ERROR_TEMP_2125" : {
     "message" : [
-      "Failed to merge incompatible data types <leftCatalogString> and 
<rightCatalogString>"
+      "Failed to merge incompatible data types <leftCatalogString> and 
<rightCatalogString>."
     ]
   },
   "_LEGACY_ERROR_TEMP_2126" : {
@@ -4157,17 +4157,17 @@
   },
   "_LEGACY_ERROR_TEMP_2129" : {
     "message" : [
-      "Conflict found: Field <field> <actual> differs from <field> <expected> 
derived from <candidate>"
+      "Conflict found: Field <field> <actual> differs from <field> <expected> 
derived from <candidate>."
     ]
   },
   "_LEGACY_ERROR_TEMP_2130" : {
     "message" : [
-      "Fail to recognize '<pattern>' pattern in the DateTimeFormatter. You can 
form a valid datetime pattern with the guide from 
https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html";
+      "Fail to recognize '<pattern>' pattern in the DateTimeFormatter. You can 
form a valid datetime pattern with the guide from 
https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html.";
     ]
   },
   "_LEGACY_ERROR_TEMP_2131" : {
     "message" : [
-      "Exception when registering StreamingQueryListener"
+      "Exception when registering StreamingQueryListener."
     ]
   },
   "_LEGACY_ERROR_TEMP_2132" : {
@@ -4187,28 +4187,28 @@
   },
   "_LEGACY_ERROR_TEMP_2135" : {
     "message" : [
-      "Failed to parse an empty string for data type <dataType>"
+      "Failed to parse an empty string for data type <dataType>."
     ]
   },
   "_LEGACY_ERROR_TEMP_2138" : {
     "message" : [
-      "Cannot have circular references in bean class, but got the circular 
reference of class <clazz>"
+      "Cannot have circular references in bean class, but got the circular 
reference of class <clazz>."
     ]
   },
   "_LEGACY_ERROR_TEMP_2139" : {
     "message" : [
-      "cannot have circular references in class, but got the circular 
reference of class <t>"
+      "cannot have circular references in class, but got the circular 
reference of class <t>."
     ]
   },
   "_LEGACY_ERROR_TEMP_2140" : {
     "message" : [
       "`<fieldName>` is not a valid identifier of Java and cannot be used as 
field name",
-      "<walkedTypePath>"
+      "<walkedTypePath>."
     ]
   },
   "_LEGACY_ERROR_TEMP_2142" : {
     "message" : [
-      "Attributes for type <schema> is not supported"
+      "Attributes for type <schema> is not supported."
     ]
   },
   "_LEGACY_ERROR_TEMP_2144" : {
@@ -4218,17 +4218,17 @@
   },
   "_LEGACY_ERROR_TEMP_2145" : {
     "message" : [
-      "<paramName> cannot be more than one character"
+      "<paramName> cannot be more than one character."
     ]
   },
   "_LEGACY_ERROR_TEMP_2146" : {
     "message" : [
-      "<paramName> should be an integer. Found <value>"
+      "<paramName> should be an integer. Found <value>."
     ]
   },
   "_LEGACY_ERROR_TEMP_2147" : {
     "message" : [
-      "<paramName> flag can be true or false"
+      "<paramName> flag can be true or false."
     ]
   },
   "_LEGACY_ERROR_TEMP_2148" : {
@@ -4244,23 +4244,23 @@
   "_LEGACY_ERROR_TEMP_2151" : {
     "message" : [
       "Error while decoding: <e>",
-      "<expressions>"
+      "<expressions>."
     ]
   },
   "_LEGACY_ERROR_TEMP_2152" : {
     "message" : [
       "Error while encoding: <e>",
-      "<expressions>"
+      "<expressions>."
     ]
   },
   "_LEGACY_ERROR_TEMP_2153" : {
     "message" : [
-      "class <clsName> has unexpected serializer: <objSerializer>"
+      "class <clsName> has unexpected serializer: <objSerializer>."
     ]
   },
   "_LEGACY_ERROR_TEMP_2154" : {
     "message" : [
-      "Failed to get outer pointer for <innerCls>"
+      "Failed to get outer pointer for <innerCls>."
     ]
   },
   "_LEGACY_ERROR_TEMP_2155" : {
@@ -4270,7 +4270,7 @@
   },
   "_LEGACY_ERROR_TEMP_2156" : {
     "message" : [
-      "The size function doesn't support the operand type <dataType>"
+      "The size function doesn't support the operand type <dataType>."
     ]
   },
   "_LEGACY_ERROR_TEMP_2157" : {
@@ -4305,12 +4305,12 @@
   },
   "_LEGACY_ERROR_TEMP_2163" : {
     "message" : [
-      "Initial type <dataType> must be a <target>"
+      "Initial type <dataType> must be a <target>."
     ]
   },
   "_LEGACY_ERROR_TEMP_2164" : {
     "message" : [
-      "Initial type <dataType> must be an <arrayType>, a <structType> or a 
<mapType>"
+      "Initial type <dataType> must be an <arrayType>, a <structType> or a 
<mapType>."
     ]
   },
   "_LEGACY_ERROR_TEMP_2165" : {
@@ -4320,7 +4320,7 @@
   },
   "_LEGACY_ERROR_TEMP_2166" : {
     "message" : [
-      "Malformed JSON"
+      "Malformed JSON."
     ]
   },
   "_LEGACY_ERROR_TEMP_2167" : {
@@ -4335,7 +4335,7 @@
   },
   "_LEGACY_ERROR_TEMP_2169" : {
     "message" : [
-      "This method should not be called in the analyzer"
+      "This method should not be called in the analyzer."
     ]
   },
   "_LEGACY_ERROR_TEMP_2170" : {
@@ -4343,18 +4343,18 @@
       "Cannot safely merge SERDEPROPERTIES:",
       "<props1>",
       "<props2>",
-      "The conflict keys: <conflictKeys>"
+      "The conflict keys: <conflictKeys>."
     ]
   },
   "_LEGACY_ERROR_TEMP_2171" : {
     "message" : [
-      "Not supported pair: <r1>, <r2> at <function>()"
+      "Not supported pair: <r1>, <r2> at <function>()."
     ]
   },
   "_LEGACY_ERROR_TEMP_2172" : {
     "message" : [
       "Once strategy's idempotence is broken for batch <batchName>",
-      "<plan>"
+      "<plan>."
     ]
   },
   "_LEGACY_ERROR_TEMP_2173" : {
@@ -4384,17 +4384,17 @@
   },
   "_LEGACY_ERROR_TEMP_2178" : {
     "message" : [
-      "Remote operations not supported"
+      "Remote operations not supported."
     ]
   },
   "_LEGACY_ERROR_TEMP_2179" : {
     "message" : [
-      "HiveServer2 Kerberos principal or keytab is not correctly configured"
+      "HiveServer2 Kerberos principal or keytab is not correctly configured."
     ]
   },
   "_LEGACY_ERROR_TEMP_2180" : {
     "message" : [
-      "Parent SparkUI to attach this tab to not found!"
+      "Parent SparkUI to attach this tab to not found."
     ]
   },
   "_LEGACY_ERROR_TEMP_2181" : {
@@ -4405,8 +4405,8 @@
   "_LEGACY_ERROR_TEMP_2182" : {
     "message" : [
       "Requested partitioning does not match the <tableIdentifier> table:",
-      "Requested partitions: <partitionKeys>",
-      "Table partitions: <partitionColumnNames>"
+      "Requested partitions: <partitionKeys>.",
+      "Table partitions: <partitionColumnNames>."
     ]
   },
   "_LEGACY_ERROR_TEMP_2183" : {
@@ -4416,7 +4416,7 @@
   },
   "_LEGACY_ERROR_TEMP_2184" : {
     "message" : [
-      "Cannot remove partition directory '<partitionPath>'"
+      "Cannot remove partition directory '<partitionPath>'."
     ]
   },
   "_LEGACY_ERROR_TEMP_2185" : {
@@ -4431,12 +4431,12 @@
   },
   "_LEGACY_ERROR_TEMP_2187" : {
     "message" : [
-      "<message>, db: <dbName>, table: <tableName>"
+      "<message>, db: <dbName>, table: <tableName>."
     ]
   },
   "_LEGACY_ERROR_TEMP_2188" : {
     "message" : [
-      "Cannot recognize hive type string: <fieldType>, column: <fieldName>"
+      "Cannot recognize hive type string: <fieldType>, column: <fieldName>."
     ]
   },
   "_LEGACY_ERROR_TEMP_2189" : {
@@ -4446,22 +4446,22 @@
   },
   "_LEGACY_ERROR_TEMP_2190" : {
     "message" : [
-      "DROP TABLE ... PURGE"
+      "DROP TABLE ... PURGE."
     ]
   },
   "_LEGACY_ERROR_TEMP_2191" : {
     "message" : [
-      "ALTER TABLE ... DROP PARTITION ... PURGE"
+      "ALTER TABLE ... DROP PARTITION ... PURGE."
     ]
   },
   "_LEGACY_ERROR_TEMP_2192" : {
     "message" : [
-      "Partition filter cannot have both `\"` and `'` characters"
+      "Partition filter cannot have both `\"` and `'` characters."
     ]
   },
   "_LEGACY_ERROR_TEMP_2193" : {
     "message" : [
-      "Caught Hive MetaException attempting to get partition metadata by 
filter from Hive. You can set the Spark configuration setting 
<hiveMetastorePartitionPruningFallbackOnException> to true to work around this 
problem, however this will result in degraded performance. Please report a bug: 
https://issues.apache.org/jira/browse/SPARK";
+      "Caught Hive MetaException attempting to get partition metadata by 
filter from Hive. You can set the Spark configuration setting 
<hiveMetastorePartitionPruningFallbackOnException> to true to work around this 
problem, however this will result in degraded performance. Please report a bug: 
https://issues.apache.org/jira/browse/SPARK.";
     ]
   },
   "_LEGACY_ERROR_TEMP_2194" : {
@@ -4476,22 +4476,22 @@
   },
   "_LEGACY_ERROR_TEMP_2196" : {
     "message" : [
-      "Unable to fetch tables of db <dbName>"
+      "Unable to fetch tables of db <dbName>."
     ]
   },
   "_LEGACY_ERROR_TEMP_2197" : {
     "message" : [
-      "LOCATION clause illegal for view partition"
+      "LOCATION clause illegal for view partition."
     ]
   },
   "_LEGACY_ERROR_TEMP_2198" : {
     "message" : [
-      "Failed to rename as <dstPath> already exists"
+      "Failed to rename as <dstPath> already exists."
     ]
   },
   "_LEGACY_ERROR_TEMP_2199" : {
     "message" : [
-      "Failed to rename temp file <srcPath> to <dstPath> as rename returned 
false"
+      "Failed to rename temp file <srcPath> to <dstPath> as rename returned 
false."
     ]
   },
   "_LEGACY_ERROR_TEMP_2200" : {
@@ -4514,37 +4514,37 @@
   },
   "_LEGACY_ERROR_TEMP_2201" : {
     "message" : [
-      "Partition column <col> not found in schema <schema>"
+      "Partition column <col> not found in schema <schema>."
     ]
   },
   "_LEGACY_ERROR_TEMP_2203" : {
     "message" : [
-      "Cannot set timeout duration without enabling processing time timeout in 
[map|flatMap]GroupsWithState"
+      "Cannot set timeout duration without enabling processing time timeout in 
[map|flatMap]GroupsWithState."
     ]
   },
   "_LEGACY_ERROR_TEMP_2204" : {
     "message" : [
-      "Cannot get event time watermark timestamp without setting watermark 
before [map|flatMap]GroupsWithState"
+      "Cannot get event time watermark timestamp without setting watermark 
before [map|flatMap]GroupsWithState."
     ]
   },
   "_LEGACY_ERROR_TEMP_2205" : {
     "message" : [
-      "Cannot set timeout timestamp without enabling event time timeout in 
[map|flatMapGroupsWithState"
+      "Cannot set timeout timestamp without enabling event time timeout in 
[map|flatMapGroupsWithState."
     ]
   },
   "_LEGACY_ERROR_TEMP_2206" : {
     "message" : [
-      "Unable to find batch <batchMetadataFile>"
+      "Unable to find batch <batchMetadataFile>."
     ]
   },
   "_LEGACY_ERROR_TEMP_2207" : {
     "message" : [
-      "Multiple streaming queries are concurrently using <path>"
+      "Multiple streaming queries are concurrently using <path>."
     ]
   },
   "_LEGACY_ERROR_TEMP_2208" : {
     "message" : [
-      "<commitProtocol> does not support adding files with an absolute path"
+      "<commitProtocol> does not support adding files with an absolute path."
     ]
   },
   "_LEGACY_ERROR_TEMP_2209" : {
@@ -4559,47 +4559,47 @@
   },
   "_LEGACY_ERROR_TEMP_2210" : {
     "message" : [
-      "StreamingRelationExec cannot be executed"
+      "StreamingRelationExec cannot be executed."
     ]
   },
   "_LEGACY_ERROR_TEMP_2211" : {
     "message" : [
-      "Invalid output mode: <outputMode>"
+      "Invalid output mode: <outputMode>."
     ]
   },
   "_LEGACY_ERROR_TEMP_2212" : {
     "message" : [
-      "Invalid catalog name: <name>"
+      "Invalid catalog name: <name>."
     ]
   },
   "_LEGACY_ERROR_TEMP_2214" : {
     "message" : [
-      "Plugin class for catalog '<name>' does not implement CatalogPlugin: 
<pluginClassName>"
+      "Plugin class for catalog '<name>' does not implement CatalogPlugin: 
<pluginClassName>."
     ]
   },
   "_LEGACY_ERROR_TEMP_2215" : {
     "message" : [
-      "Cannot find catalog plugin class for catalog '<name>': 
<pluginClassName>"
+      "Cannot find catalog plugin class for catalog '<name>': 
<pluginClassName>."
     ]
   },
   "_LEGACY_ERROR_TEMP_2216" : {
     "message" : [
-      "Failed to find public no-arg constructor for catalog '<name>': 
<pluginClassName>)"
+      "Failed to find public no-arg constructor for catalog '<name>': 
<pluginClassName>)."
     ]
   },
   "_LEGACY_ERROR_TEMP_2217" : {
     "message" : [
-      "Failed to call public no-arg constructor for catalog '<name>': 
<pluginClassName>)"
+      "Failed to call public no-arg constructor for catalog '<name>': 
<pluginClassName>)."
     ]
   },
   "_LEGACY_ERROR_TEMP_2218" : {
     "message" : [
-      "Cannot instantiate abstract catalog plugin class for catalog '<name>': 
<pluginClassName>"
+      "Cannot instantiate abstract catalog plugin class for catalog '<name>': 
<pluginClassName>."
     ]
   },
   "_LEGACY_ERROR_TEMP_2219" : {
     "message" : [
-      "Failed during instantiating constructor for catalog '<name>': 
<pluginClassName>"
+      "Failed during instantiating constructor for catalog '<name>': 
<pluginClassName>."
     ]
   },
   "_LEGACY_ERROR_TEMP_2220" : {
@@ -4629,12 +4629,12 @@
   },
   "_LEGACY_ERROR_TEMP_2226" : {
     "message" : [
-      "null literals can't be casted to <name>"
+      "null literals can't be casted to <name>."
     ]
   },
   "_LEGACY_ERROR_TEMP_2227" : {
     "message" : [
-      "<name> is not an UserDefinedType. Please make sure registering an 
UserDefinedType for <userClass>"
+      "<name> is not an UserDefinedType. Please make sure registering an 
UserDefinedType for <userClass>."
     ]
   },
   "_LEGACY_ERROR_TEMP_2228" : {
@@ -4659,12 +4659,12 @@
   },
   "_LEGACY_ERROR_TEMP_2232" : {
     "message" : [
-      "Value at index <index> is null"
+      "Value at index <index> is null."
     ]
   },
   "_LEGACY_ERROR_TEMP_2233" : {
     "message" : [
-      "Only Data Sources providing FileFormat are supported: <providingClass>"
+      "Only Data Sources providing FileFormat are supported: <providingClass>."
     ]
   },
   "_LEGACY_ERROR_TEMP_2234" : {
@@ -4679,7 +4679,7 @@
   },
   "_LEGACY_ERROR_TEMP_2236" : {
     "message" : [
-      "Unrecognized compression scheme type ID: <typeId>"
+      "Unrecognized compression scheme type ID: <typeId>."
     ]
   },
   "_LEGACY_ERROR_TEMP_2237" : {
@@ -4699,7 +4699,7 @@
   },
   "_LEGACY_ERROR_TEMP_2240" : {
     "message" : [
-      "Unable to create Parquet converter for data type <t> whose Parquet type 
is <parquetType>"
+      "Unable to create Parquet converter for data type <t> whose Parquet type 
is <parquetType>."
     ]
   },
   "_LEGACY_ERROR_TEMP_2241" : {
@@ -4724,28 +4724,28 @@
   },
   "_LEGACY_ERROR_TEMP_2245" : {
     "message" : [
-      "Table does not support overwrite by expression: <table>"
+      "Table does not support overwrite by expression: <table>."
     ]
   },
   "_LEGACY_ERROR_TEMP_2246" : {
     "message" : [
-      "Table does not support dynamic partition overwrite: <table>"
+      "Table does not support dynamic partition overwrite: <table>."
     ]
   },
   "_LEGACY_ERROR_TEMP_2247" : {
     "message" : [
       "Failed merging schema:",
-      "<schema>"
+      "<schema>."
     ]
   },
   "_LEGACY_ERROR_TEMP_2248" : {
     "message" : [
-      "Cannot broadcast the table over <maxBroadcastTableRows> rows: <numRows> 
rows"
+      "Cannot broadcast the table over <maxBroadcastTableRows> rows: <numRows> 
rows."
     ]
   },
   "_LEGACY_ERROR_TEMP_2249" : {
     "message" : [
-      "Cannot broadcast the table that is larger than 
<maxBroadcastTableBytes>GB: <dataSize> GB"
+      "Cannot broadcast the table that is larger than 
<maxBroadcastTableBytes>GB: <dataSize> GB."
     ]
   },
   "_LEGACY_ERROR_TEMP_2250" : {
@@ -4760,7 +4760,7 @@
   },
   "_LEGACY_ERROR_TEMP_2252" : {
     "message" : [
-      "Cannot merge <className> with <otherClass>"
+      "Cannot merge <className> with <otherClass>."
     ]
   },
   "_LEGACY_ERROR_TEMP_2253" : {
@@ -4770,22 +4770,22 @@
   },
   "_LEGACY_ERROR_TEMP_2254" : {
     "message" : [
-      "Data read failed"
+      "Data read failed."
     ]
   },
   "_LEGACY_ERROR_TEMP_2255" : {
     "message" : [
-      "Epoch marker generation failed"
+      "Epoch marker generation failed."
     ]
   },
   "_LEGACY_ERROR_TEMP_2256" : {
     "message" : [
-      "Foreach writer has been aborted due to a task failure"
+      "Foreach writer has been aborted due to a task failure."
     ]
   },
   "_LEGACY_ERROR_TEMP_2258" : {
     "message" : [
-      "Error reading delta file <fileToRead> of <clazz>: key size cannot be 
<keySize>"
+      "Error reading delta file <fileToRead> of <clazz>: key size cannot be 
<keySize>."
     ]
   },
   "_LEGACY_ERROR_TEMP_2259" : {
@@ -4805,7 +4805,7 @@
   },
   "_LEGACY_ERROR_TEMP_2262" : {
     "message" : [
-      "latestOffset(Offset, ReadLimit) should be called instead of this method"
+      "latestOffset(Offset, ReadLimit) should be called instead of this 
method."
     ]
   },
   "_LEGACY_ERROR_TEMP_2263" : {
@@ -4829,27 +4829,27 @@
   },
   "_LEGACY_ERROR_TEMP_2264" : {
     "message" : [
-      "Subprocess exited with status <exitCode>. Error: <stderrBuffer>"
+      "Subprocess exited with status <exitCode>. Error: <stderrBuffer>."
     ]
   },
   "_LEGACY_ERROR_TEMP_2265" : {
     "message" : [
-      "<nodeName> without serde does not support <dt> as output data type"
+      "<nodeName> without serde does not support <dt> as output data type."
     ]
   },
   "_LEGACY_ERROR_TEMP_2266" : {
     "message" : [
-      "Invalid `startIndex` provided for generating iterator over the array. 
Total elements: <numRows>, requested `startIndex`: <startIndex>"
+      "Invalid `startIndex` provided for generating iterator over the array. 
Total elements: <numRows>, requested `startIndex`: <startIndex>."
     ]
   },
   "_LEGACY_ERROR_TEMP_2267" : {
     "message" : [
-      "The backing <className> has been modified since the creation of this 
Iterator"
+      "The backing <className> has been modified since the creation of this 
Iterator."
     ]
   },
   "_LEGACY_ERROR_TEMP_2268" : {
     "message" : [
-      "<nodeName> does not implement doExecuteBroadcast"
+      "<nodeName> does not implement doExecuteBroadcast."
     ]
   },
   "_LEGACY_ERROR_TEMP_2269" : {
@@ -4859,12 +4859,12 @@
   },
   "_LEGACY_ERROR_TEMP_2270" : {
     "message" : [
-      "comment on table is not supported"
+      "comment on table is not supported."
     ]
   },
   "_LEGACY_ERROR_TEMP_2271" : {
     "message" : [
-      "UpdateColumnNullability is not supported"
+      "UpdateColumnNullability is not supported."
     ]
   },
   "_LEGACY_ERROR_TEMP_2272" : {
@@ -4889,7 +4889,7 @@
   },
   "_LEGACY_ERROR_TEMP_2276" : {
     "message" : [
-      "Hive table <tableName> with ANSI intervals is not supported"
+      "Hive table <tableName> with ANSI intervals is not supported."
     ]
   },
   "_LEGACY_ERROR_TEMP_2277" : {
@@ -4899,7 +4899,7 @@
   },
   "_LEGACY_ERROR_TEMP_2278" : {
     "message" : [
-      "The input <valueType> '<input>' does not match the given number format: 
'<format>'"
+      "The input <valueType> '<input>' does not match the given number format: 
'<format>'."
     ]
   },
   "_LEGACY_ERROR_TEMP_2279" : {
@@ -4909,17 +4909,17 @@
   },
   "_LEGACY_ERROR_TEMP_2280" : {
     "message" : [
-      "Create namespace comment is not supported"
+      "Create namespace comment is not supported."
     ]
   },
   "_LEGACY_ERROR_TEMP_2281" : {
     "message" : [
-      "Remove namespace comment is not supported"
+      "Remove namespace comment is not supported."
     ]
   },
   "_LEGACY_ERROR_TEMP_2282" : {
     "message" : [
-      "Drop namespace restrict is not supported"
+      "Drop namespace restrict is not supported."
     ]
   },
   "_LEGACY_ERROR_TEMP_2300" : {
@@ -4934,22 +4934,22 @@
   },
   "_LEGACY_ERROR_TEMP_2302" : {
     "message" : [
-      "'<name>' does not support more than one sources"
+      "'<name>' does not support more than one sources."
     ]
   },
   "_LEGACY_ERROR_TEMP_2303" : {
     "message" : [
-      "incompatible types found in column <name> for inline table"
+      "incompatible types found in column <name> for inline table."
     ]
   },
   "_LEGACY_ERROR_TEMP_2304" : {
     "message" : [
-      "cannot evaluate expression <sqlExpr> in inline table definition"
+      "cannot evaluate expression <sqlExpr> in inline table definition."
     ]
   },
   "_LEGACY_ERROR_TEMP_2305" : {
     "message" : [
-      "expected <numCols> columns but found <rowSize> columns in row <ri>"
+      "expected <numCols> columns but found <rowSize> columns in row <ri>."
     ]
   },
   "_LEGACY_ERROR_TEMP_2306" : {
@@ -4964,27 +4964,27 @@
   },
   "_LEGACY_ERROR_TEMP_2308" : {
     "message" : [
-      "could not resolve `<name>` to a table-valued function"
+      "could not resolve `<name>` to a table-valued function."
     ]
   },
   "_LEGACY_ERROR_TEMP_2309" : {
     "message" : [
-      "cannot resolve <sqlExpr> in MERGE command given columns [<cols>]"
+      "cannot resolve <sqlExpr> in MERGE command given columns [<cols>]."
     ]
   },
   "_LEGACY_ERROR_TEMP_2311" : {
     "message" : [
-      "'writeTo' can not be called on streaming Dataset/DataFrame"
+      "'writeTo' can not be called on streaming Dataset/DataFrame."
     ]
   },
   "_LEGACY_ERROR_TEMP_2312" : {
     "message" : [
-      "'write' can not be called on streaming Dataset/DataFrame"
+      "'write' can not be called on streaming Dataset/DataFrame."
     ]
   },
   "_LEGACY_ERROR_TEMP_2313" : {
     "message" : [
-      "Hint not found: <name>"
+      "Hint not found: <name>."
     ]
   },
   "_LEGACY_ERROR_TEMP_2314" : {
@@ -4994,17 +4994,17 @@
   },
   "_LEGACY_ERROR_TEMP_2315" : {
     "message" : [
-      "cannot resolve '<sqlExpr>' due to data type mismatch: <msg><hint>"
+      "cannot resolve '<sqlExpr>' due to data type mismatch: <msg><hint>."
     ]
   },
   "_LEGACY_ERROR_TEMP_2316" : {
     "message" : [
-      "observed metrics should be named: <operator>"
+      "observed metrics should be named: <operator>."
     ]
   },
   "_LEGACY_ERROR_TEMP_2317" : {
     "message" : [
-      "window expressions are not allowed in observed metrics, but found: 
<sqlExpr>"
+      "window expressions are not allowed in observed metrics, but found: 
<sqlExpr>."
     ]
   },
   "_LEGACY_ERROR_TEMP_2318" : {
@@ -5014,17 +5014,17 @@
   },
   "_LEGACY_ERROR_TEMP_2319" : {
     "message" : [
-      "nested aggregates are not allowed in observed metrics, but found: 
<sqlExpr>"
+      "nested aggregates are not allowed in observed metrics, but found: 
<sqlExpr>."
     ]
   },
   "_LEGACY_ERROR_TEMP_2320" : {
     "message" : [
-      "distinct aggregates are not allowed in observed metrics, but found: 
<sqlExpr>"
+      "distinct aggregates are not allowed in observed metrics, but found: 
<sqlExpr>."
     ]
   },
   "_LEGACY_ERROR_TEMP_2321" : {
     "message" : [
-      "aggregates with filter predicate are not allowed in observed metrics, 
but found: <sqlExpr>"
+      "aggregates with filter predicate are not allowed in observed metrics, 
but found: <sqlExpr>."
     ]
   },
   "_LEGACY_ERROR_TEMP_2322" : {
@@ -5034,42 +5034,42 @@
   },
   "_LEGACY_ERROR_TEMP_2323" : {
     "message" : [
-      "Cannot <op> column, because <fieldNames> already exists in <struct>"
+      "Cannot <op> column, because <fieldNames> already exists in <struct>."
     ]
   },
   "_LEGACY_ERROR_TEMP_2324" : {
     "message" : [
-      "Cannot update <table> field <fieldName> type: update a struct by 
updating its fields"
+      "Cannot update <table> field <fieldName> type: update a struct by 
updating its fields."
     ]
   },
   "_LEGACY_ERROR_TEMP_2325" : {
     "message" : [
-      "Cannot update <table> field <fieldName> type: update a map by updating 
<fieldName>.key or <fieldName>.value"
+      "Cannot update <table> field <fieldName> type: update a map by updating 
<fieldName>.key or <fieldName>.value."
     ]
   },
   "_LEGACY_ERROR_TEMP_2326" : {
     "message" : [
-      "Cannot update <table> field <fieldName> type: update the element by 
updating <fieldName>.element"
+      "Cannot update <table> field <fieldName> type: update the element by 
updating <fieldName>.element."
     ]
   },
   "_LEGACY_ERROR_TEMP_2327" : {
     "message" : [
-      "Cannot update <table> field <fieldName> type: update a 
UserDefinedType[<udtSql>] by updating its fields"
+      "Cannot update <table> field <fieldName> type: update a 
UserDefinedType[<udtSql>] by updating its fields."
     ]
   },
   "_LEGACY_ERROR_TEMP_2328" : {
     "message" : [
-      "Cannot update <table> field <fieldName> to interval type"
+      "Cannot update <table> field <fieldName> to interval type."
     ]
   },
   "_LEGACY_ERROR_TEMP_2329" : {
     "message" : [
-      "Cannot update <table> field <fieldName>: <oldType> cannot be cast to 
<newType>"
+      "Cannot update <table> field <fieldName>: <oldType> cannot be cast to 
<newType>."
     ]
   },
   "_LEGACY_ERROR_TEMP_2330" : {
     "message" : [
-      "Cannot change nullable column to non-nullable: <fieldName>"
+      "Cannot change nullable column to non-nullable: <fieldName>."
     ]
   },
   "_LEGACY_ERROR_TEMP_2331" : {
@@ -5119,7 +5119,7 @@
   },
   "_LEGACY_ERROR_TEMP_2407" : {
     "message" : [
-      "grouping_id() can only be used with GroupingSets/Cube/Rollup"
+      "grouping_id() can only be used with GroupingSets/Cube/Rollup."
     ]
   },
   "_LEGACY_ERROR_TEMP_2408" : {
@@ -5129,12 +5129,12 @@
   },
   "_LEGACY_ERROR_TEMP_2409" : {
     "message" : [
-      "Distinct window functions are not supported: <w>"
+      "Distinct window functions are not supported: <w>."
     ]
   },
   "_LEGACY_ERROR_TEMP_2410" : {
     "message" : [
-      "<wf> function can only be evaluated in an ordered row-based window 
frame with a single offset: <w>"
+      "<wf> function can only be evaluated in an ordered row-based window 
frame with a single offset: <w>."
     ]
   },
   "_LEGACY_ERROR_TEMP_2411" : {
@@ -5204,7 +5204,7 @@
   },
   "_LEGACY_ERROR_TEMP_2427" : {
     "message" : [
-      "sorting is not supported for columns of type <type>"
+      "sorting is not supported for columns of type <type>."
     ]
   },
   "_LEGACY_ERROR_TEMP_2428" : {
@@ -5214,12 +5214,12 @@
   },
   "_LEGACY_ERROR_TEMP_2430" : {
     "message" : [
-      "<operator> can only be performed on tables with compatible column 
types. The <ci> column of the <ti> table is <dt1> type which is not compatible 
with <dt2> at the same column of the first table.<hint>"
+      "<operator> can only be performed on tables with compatible column 
types. The <ci> column of the <ti> table is <dt1> type which is not compatible 
with <dt2> at the same column of the first table.<hint>."
     ]
   },
   "_LEGACY_ERROR_TEMP_2431" : {
     "message" : [
-      "Invalid partitioning: <cols> is missing or is in a map or array"
+      "Invalid partitioning: <cols> is missing or is in a map or array."
     ]
   },
   "_LEGACY_ERROR_TEMP_2432" : {
@@ -5230,35 +5230,35 @@
   "_LEGACY_ERROR_TEMP_2433" : {
     "message" : [
       "Only a single table generating function is allowed in a SELECT clause, 
found:",
-      "<sqlExprs>"
+      "<sqlExprs>."
     ]
   },
   "_LEGACY_ERROR_TEMP_2434" : {
     "message" : [
       "Failure when resolving conflicting references in Join:",
       "<plan>",
-      "Conflicting attributes: <conflictingAttributes>"
+      "Conflicting attributes: <conflictingAttributes>."
     ]
   },
   "_LEGACY_ERROR_TEMP_2435" : {
     "message" : [
       "Failure when resolving conflicting references in Intersect:",
       "<plan>",
-      "Conflicting attributes: <conflictingAttributes>"
+      "Conflicting attributes: <conflictingAttributes>."
     ]
   },
   "_LEGACY_ERROR_TEMP_2436" : {
     "message" : [
       "Failure when resolving conflicting references in Except:",
       "<plan>",
-      "Conflicting attributes: <conflictingAttributes>"
+      "Conflicting attributes: <conflictingAttributes>."
     ]
   },
   "_LEGACY_ERROR_TEMP_2437" : {
     "message" : [
       "Failure when resolving conflicting references in AsOfJoin:",
       "<plan>",
-      "Conflicting attributes: <conflictingAttributes>"
+      "Conflicting attributes: <conflictingAttributes>."
     ]
   },
   "_LEGACY_ERROR_TEMP_2438" : {
@@ -5270,28 +5270,28 @@
     "message" : [
       "nondeterministic expressions are only allowed in Project, Filter, 
Aggregate or Window, found:",
       "<sqlExprs>",
-      "in operator <operator>"
+      "in operator <operator>."
     ]
   },
   "_LEGACY_ERROR_TEMP_2441" : {
     "message" : [
       "The query operator `<operator>` contains one or more unsupported 
expression types Aggregate, Window or Generate.",
-      "Invalid expressions: [<invalidExprSqls>]"
+      "Invalid expressions: [<invalidExprSqls>]."
     ]
   },
   "_LEGACY_ERROR_TEMP_2443" : {
     "message" : [
-      "Multiple definitions of observed metrics named '<name>': <plan>"
+      "Multiple definitions of observed metrics named '<name>': <plan>."
     ]
   },
   "_LEGACY_ERROR_TEMP_2444" : {
     "message" : [
-      "Function '<funcName>' does not implement ScalarFunction or 
AggregateFunction"
+      "Function '<funcName>' does not implement ScalarFunction or 
AggregateFunction."
     ]
   },
   "_LEGACY_ERROR_TEMP_2445" : {
     "message" : [
-      "grouping() can only be used with GroupingSets/Cube/Rollup"
+      "grouping() can only be used with GroupingSets/Cube/Rollup."
     ]
   },
   "_LEGACY_ERROR_TEMP_2446" : {
diff --git a/core/src/test/scala/org/apache/spark/SparkThrowableSuite.scala 
b/core/src/test/scala/org/apache/spark/SparkThrowableSuite.scala
index 198383a2888..c20c287c564 100644
--- a/core/src/test/scala/org/apache/spark/SparkThrowableSuite.scala
+++ b/core/src/test/scala/org/apache/spark/SparkThrowableSuite.scala
@@ -194,7 +194,7 @@ class SparkThrowableSuite extends SparkFunSuite {
         Map("objectName" -> "`foo`", "proposal" -> "`bar`, `baz`")
       ) ==
       "[UNRESOLVED_COLUMN.WITH_SUGGESTION] A column or function parameter with 
" +
-        "name `foo` cannot be resolved. Did you mean one of the following? 
[`bar`, `baz`]"
+        "name `foo` cannot be resolved. Did you mean one of the following? 
[`bar`, `baz`]."
     )
 
     assert(
@@ -206,7 +206,7 @@ class SparkThrowableSuite extends SparkFunSuite {
         ""
       ) ==
       "[UNRESOLVED_COLUMN.WITH_SUGGESTION] A column or function parameter with 
" +
-        "name `foo` cannot be resolved. Did you mean one of the following? 
[`bar`, `baz`]"
+        "name `foo` cannot be resolved. Did you mean one of the following? 
[`bar`, `baz`]."
     )
   }
 
diff --git 
a/core/src/test/scala/org/apache/spark/metrics/sink/GraphiteSinkSuite.scala 
b/core/src/test/scala/org/apache/spark/metrics/sink/GraphiteSinkSuite.scala
index 3a23379f1de..eabede303e0 100644
--- a/core/src/test/scala/org/apache/spark/metrics/sink/GraphiteSinkSuite.scala
+++ b/core/src/test/scala/org/apache/spark/metrics/sink/GraphiteSinkSuite.scala
@@ -113,11 +113,12 @@ class GraphiteSinkSuite extends SparkFunSuite {
     props.put("protocol", "http")
     val registry = new MetricRegistry
 
-    val e = intercept[SparkException] {
-      new GraphiteSink(props, registry)
-    }
-    assert(e.getErrorClass === "GRAPHITE_SINK_INVALID_PROTOCOL")
-    assert(e.getMessage ===
-      "[GRAPHITE_SINK_INVALID_PROTOCOL] Invalid Graphite protocol: http")
+    checkError(
+      exception = intercept[SparkException] {
+        new GraphiteSink(props, registry)
+      },
+      errorClass = "GRAPHITE_SINK_INVALID_PROTOCOL",
+      parameters = Map("protocol" -> "http")
+    )
   }
 }
diff --git 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/types/DataTypeSuite.scala 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/types/DataTypeSuite.scala
index 3b15620cd69..8208c1f7fb6 100644
--- a/sql/catalyst/src/test/scala/org/apache/spark/sql/types/DataTypeSuite.scala
+++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/types/DataTypeSuite.scala
@@ -153,11 +153,16 @@ class DataTypeSuite extends SparkFunSuite {
     val right = StructType(
       StructField("b", LongType) :: Nil)
 
-    val message = intercept[SparkException] {
-      left.merge(right)
-    }.getMessage
-    assert(message.equals("Failed to merge fields 'b' and 'b'. " +
-      "Failed to merge incompatible data types float and bigint"))
+    checkError(
+      exception = intercept[SparkException] {
+        left.merge(right)
+      },
+      errorClass = "_LEGACY_ERROR_TEMP_2123",
+      parameters = Map(
+        "leftName" -> "b",
+        "rightName" -> "b",
+        "message" -> "Failed to merge incompatible data types float and 
bigint.")
+    )
   }
 
   test("existsRecursively") {
diff --git 
a/sql/core/src/test/resources/sql-tests/results/ansi/datetime-parsing-invalid.sql.out
 
b/sql/core/src/test/resources/sql-tests/results/ansi/datetime-parsing-invalid.sql.out
index 091db833c89..ea67b974a5f 100644
--- 
a/sql/core/src/test/resources/sql-tests/results/ansi/datetime-parsing-invalid.sql.out
+++ 
b/sql/core/src/test/resources/sql-tests/results/ansi/datetime-parsing-invalid.sql.out
@@ -163,7 +163,7 @@ org.apache.spark.SparkDateTimeException
   "sqlState" : "22007",
   "messageParameters" : {
     "ansiConfig" : "\"spark.sql.ansi.enabled\"",
-    "message" : "Conflict found: Field DayOfMonth 30 differs from DayOfMonth 
31 derived from 1970-12-31"
+    "message" : "Conflict found: Field DayOfMonth 30 differs from DayOfMonth 
31 derived from 1970-12-31."
   }
 }
 
@@ -179,7 +179,7 @@ org.apache.spark.SparkDateTimeException
   "sqlState" : "22007",
   "messageParameters" : {
     "ansiConfig" : "\"spark.sql.ansi.enabled\"",
-    "message" : "Conflict found: Field MonthOfYear 11 differs from MonthOfYear 
12 derived from 1970-12-31"
+    "message" : "Conflict found: Field MonthOfYear 11 differs from MonthOfYear 
12 derived from 1970-12-31."
   }
 }
 
@@ -211,7 +211,7 @@ org.apache.spark.SparkDateTimeException
   "sqlState" : "22007",
   "messageParameters" : {
     "ansiConfig" : "\"spark.sql.ansi.enabled\"",
-    "message" : "Conflict found: Field DayOfMonth 30 differs from DayOfMonth 
31 derived from 1970-12-31"
+    "message" : "Conflict found: Field DayOfMonth 30 differs from DayOfMonth 
31 derived from 1970-12-31."
   }
 }
 
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/DataFrameSuite.scala 
b/sql/core/src/test/scala/org/apache/spark/sql/DataFrameSuite.scala
index a7bb0a2d1bd..72e459ca198 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/DataFrameSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/DataFrameSuite.scala
@@ -29,7 +29,7 @@ import scala.util.Random
 
 import org.scalatest.matchers.should.Matchers._
 
-import org.apache.spark.SparkException
+import org.apache.spark.{SparkException, SparkIllegalArgumentException}
 import org.apache.spark.api.python.PythonEvalType
 import org.apache.spark.scheduler.{SparkListener, SparkListenerJobEnd}
 import org.apache.spark.sql.catalyst.{InternalRow, TableIdentifier}
@@ -1142,15 +1142,21 @@ class DataFrameSuite extends QueryTest
     val onlyPercentiles = person2.summary("0.1%", "99.9%")
     assert(onlyPercentiles.count() === 2)
 
-    val fooE = intercept[IllegalArgumentException] {
-      person2.summary("foo")
-    }
-    assert(fooE.getMessage === "foo is not a recognised statistic")
+    checkError(
+      exception = intercept[SparkIllegalArgumentException] {
+        person2.summary("foo")
+      },
+      errorClass = "_LEGACY_ERROR_TEMP_2114",
+      parameters = Map("stats" -> "foo")
+    )
 
-    val parseE = intercept[IllegalArgumentException] {
-      person2.summary("foo%")
-    }
-    assert(parseE.getMessage === "Unable to parse foo% as a percentile")
+    checkError(
+      exception = intercept[SparkIllegalArgumentException] {
+        person2.summary("foo%")
+      },
+      errorClass = "_LEGACY_ERROR_TEMP_2113",
+      parameters = Map("stats" -> "foo%")
+    )
   }
 
   test("apply on query results (SPARK-5462)") {
diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/execution/SQLViewTestSuite.scala 
b/sql/core/src/test/scala/org/apache/spark/sql/execution/SQLViewTestSuite.scala
index 9051d56fd68..15d14714e7d 100644
--- 
a/sql/core/src/test/scala/org/apache/spark/sql/execution/SQLViewTestSuite.scala
+++ 
b/sql/core/src/test/scala/org/apache/spark/sql/execution/SQLViewTestSuite.scala
@@ -584,8 +584,17 @@ class PersistedViewTestSuite extends SQLViewTestSuite with 
SharedSparkSession {
         val e = intercept[AnalysisException] {
           sql(s"SELECT * FROM test_view")
         }
-        assert(e.getMessage.contains("re-create the view by running: CREATE OR 
REPLACE"))
-        val ddl = e.getMessage.split(": ").last
+        checkError(
+          exception = e,
+          errorClass = "_LEGACY_ERROR_TEMP_1176",
+          parameters = Map(
+            "viewName" -> "`spark_catalog`.`default`.`test_view`",
+            "viewDDL" ->
+              "CREATE OR REPLACE VIEW spark_catalog.default.test_view  AS 
SELECT * FROM t",
+            "actualCols" -> "[]", "colName" -> "col_j",
+            "expectedNum" -> "1")
+        )
+        val ddl = e.getMessageParameters.get("viewDDL")
         sql(ddl)
         checkAnswer(sql("select * FROM test_view"), Row(1))
       }
diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/FileIndexSuite.scala
 
b/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/FileIndexSuite.scala
index 07018508b91..9ac61f0cee5 100644
--- 
a/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/FileIndexSuite.scala
+++ 
b/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/FileIndexSuite.scala
@@ -29,7 +29,7 @@ import org.apache.hadoop.fs.viewfs.ViewFileSystem
 import org.mockito.ArgumentMatchers.any
 import org.mockito.Mockito.{mock, when}
 
-import org.apache.spark.SparkException
+import org.apache.spark.{SparkException, SparkRuntimeException}
 import org.apache.spark.metrics.source.HiveCatalogMetrics
 import org.apache.spark.sql.SparkSession
 import org.apache.spark.sql.catalyst.util._
@@ -133,10 +133,13 @@ class FileIndexSuite extends SharedSparkSession {
       val schema = StructType(Seq(StructField("a", IntegerType, false)))
       withSQLConf(SQLConf.VALIDATE_PARTITION_COLUMNS.key -> "true") {
         val fileIndex = new InMemoryFileIndex(spark, Seq(path), Map.empty, 
Some(schema))
-        val msg = intercept[RuntimeException] {
-          fileIndex.partitionSpec()
-        }.getMessage
-        assert(msg == "Failed to cast value `foo` to `IntegerType` for 
partition column `a`")
+        checkError(
+          exception = intercept[SparkRuntimeException] {
+            fileIndex.partitionSpec()
+          },
+          errorClass = "_LEGACY_ERROR_TEMP_2058",
+          parameters = Map("value" -> "foo", "dataType" -> "IntegerType", 
"columnName" -> "a")
+        )
       }
 
       withSQLConf(SQLConf.VALIDATE_PARTITION_COLUMNS.key -> "false") {
diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetSchemaSuite.scala
 
b/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetSchemaSuite.scala
index 96f9ff58e85..4c0fbfda681 100644
--- 
a/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetSchemaSuite.scala
+++ 
b/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetSchemaSuite.scala
@@ -1001,9 +1001,29 @@ class ParquetSchemaSuite extends ParquetSchemaTest {
       val col = spark.read.parquet(file).schema.fields.filter(_.name == "a")
       assert(col.length == 1)
       if (col(0).dataType == StringType) {
-        assert(errMsg.contains("Column: [a], Expected: int, Found: BINARY"))
+        checkError(
+          exception = e.getCause.asInstanceOf[SparkException],
+          errorClass = "_LEGACY_ERROR_TEMP_2063",
+          parameters = Map(
+            "filePath" ->
+              s".*${dir.getCanonicalPath}.*",
+            "column" -> "\\[a\\]",
+            "logicalType" -> "int",
+            "physicalType" -> "BINARY"),
+          matchPVals = true
+        )
       } else {
-        assert(errMsg.endsWith("Column: [a], Expected: string, Found: INT32"))
+        checkError(
+          exception = e.getCause.asInstanceOf[SparkException],
+          errorClass = "_LEGACY_ERROR_TEMP_2063",
+          parameters = Map(
+            "filePath" ->
+              s".*${dir.getCanonicalPath}.*",
+            "column" -> "\\[a\\]",
+            "logicalType" -> "string",
+            "physicalType" -> "INT32"),
+          matchPVals = true
+        )
       }
     }
   }
diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/execution/joins/HashedRelationSuite.scala
 
b/sql/core/src/test/scala/org/apache/spark/sql/execution/joins/HashedRelationSuite.scala
index 73cd2fb6e79..686fb2d838b 100644
--- 
a/sql/core/src/test/scala/org/apache/spark/sql/execution/joins/HashedRelationSuite.scala
+++ 
b/sql/core/src/test/scala/org/apache/spark/sql/execution/joins/HashedRelationSuite.scala
@@ -535,10 +535,13 @@ class HashedRelationSuite extends SharedSparkSession {
       buffer.append(keyIterator.next().getLong(0))
     }
     // attempt an illegal next() call
-    val caught = intercept[SparkException] {
-      keyIterator.next()
-    }
-    assert(caught.getLocalizedMessage === "End of the iterator")
+    checkError(
+      exception = intercept[SparkException] {
+        keyIterator.next()
+      },
+      errorClass = "_LEGACY_ERROR_TEMP_2104",
+      parameters = Map.empty
+    )
     assert(buffer.sortWith(_ < _) === randomArray)
     buffer.clear()
 
diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/execution/streaming/sources/ForeachWriterSuite.scala
 
b/sql/core/src/test/scala/org/apache/spark/sql/execution/streaming/sources/ForeachWriterSuite.scala
index bd9fa6a386d..b12450167d7 100644
--- 
a/sql/core/src/test/scala/org/apache/spark/sql/execution/streaming/sources/ForeachWriterSuite.scala
+++ 
b/sql/core/src/test/scala/org/apache/spark/sql/execution/streaming/sources/ForeachWriterSuite.scala
@@ -261,7 +261,7 @@ class ForeachWriterSuite extends StreamTest with 
SharedSparkSession with BeforeA
     }
   }
 
-  testQuietly("foreach with error not caused by ForeachWriter") {
+  test("foreach with error not caused by ForeachWriter") {
     withTempDir { checkpointDir =>
       val input = MemoryStream[Int]
       val query = input.toDS().repartition(1).map(_ / 0).writeStream
@@ -283,9 +283,11 @@ class ForeachWriterSuite extends StreamTest with 
SharedSparkSession with BeforeA
       assert(allEvents(0)(0) === ForeachWriterSuite.Open(partition = 0, 
version = 0))
       // `close` should be called with the error
       val errorEvent = allEvents(0)(1).asInstanceOf[ForeachWriterSuite.Close]
-      assert(errorEvent.error.get.isInstanceOf[SparkException])
-      assert(errorEvent.error.get.getMessage ===
-        "Foreach writer has been aborted due to a task failure")
+      checkError(
+        exception = errorEvent.error.get.asInstanceOf[SparkException],
+        errorClass = "_LEGACY_ERROR_TEMP_2256",
+        parameters = Map.empty
+      )
     }
   }
 }
diff --git 
a/sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/CliSuite.scala
 
b/sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/CliSuite.scala
index 3f1fe8932d5..3ab6dcdd995 100644
--- 
a/sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/CliSuite.scala
+++ 
b/sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/CliSuite.scala
@@ -641,7 +641,7 @@ class CliSuite extends SparkFunSuite {
 
   test("SPARK-37694: delete [jar|file|archive] shall use spark sql processor") 
{
     runCliWithin(2.minute, errorResponses = Seq("ParseException"))(
-      "delete jar dummy.jar;" -> "Syntax error at or near 'jar': missing 
'FROM'(line 1, pos 7)")
+      "delete jar dummy.jar;" -> "Syntax error at or near 'jar': missing 
'FROM'.(line 1, pos 7)")
   }
 
   test("SPARK-37906: Spark SQL CLI should not pass final comment") {


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]


Reply via email to