cloud-fan commented on code in PR #36639:
URL: https://github.com/apache/spark/pull/36639#discussion_r879992776


##########
core/src/main/resources/error/error-classes.json:
##########
@@ -1,319 +1,519 @@
 {
   "AMBIGUOUS_FIELD_NAME" : {
-    "message" : [ "Field name <fieldName> is ambiguous and has <n> matching 
fields in the struct." ],
+    "message" : [
+      "Field name <fieldName> is ambiguous and has <n> matching fields in the 
struct."
+    ],
     "sqlState" : "42000"
   },
   "ARITHMETIC_OVERFLOW" : {
-    "message" : [ "<message>.<alternative> If necessary set <config> to 
\"false\" (except for ANSI interval type) to bypass this error." ],
+    "message" : [
+      "<message>.<alternative> If necessary set <config> to \"false\" (except 
for ANSI interval type) to bypass this error."
+    ],
     "sqlState" : "22003"
   },
   "CANNOT_CAST_DATATYPE" : {
-    "message" : [ "Cannot cast <sourceType> to <targetType>." ],
+    "message" : [
+      "Cannot cast <sourceType> to <targetType>."
+    ],
     "sqlState" : "22005"
   },
   "CANNOT_CHANGE_DECIMAL_PRECISION" : {
-    "message" : [ "<value> cannot be represented as Decimal(<precision>, 
<scale>). If necessary set <config> to \"false\" to bypass this error." ],
+    "message" : [
+      "<value> cannot be represented as Decimal(<precision>, <scale>). If 
necessary set <config> to \"false\" to bypass this error."
+    ],
     "sqlState" : "22005"
   },
   "CANNOT_PARSE_DECIMAL" : {
-    "message" : [ "Cannot parse decimal" ],
+    "message" : [
+      "Cannot parse decimal"
+    ],
     "sqlState" : "42000"
   },
   "CANNOT_UP_CAST_DATATYPE" : {
-    "message" : [ "Cannot up cast <value> from <sourceType> to 
<targetType>.\n<details>" ]
+    "message" : [
+      "Cannot up cast <value> from <sourceType> to <targetType>.",
+      "<details>"
+    ]
   },
   "CAST_INVALID_INPUT" : {
-    "message" : [ "The value <value> of the type <sourceType> cannot be cast 
to <targetType> because it is malformed. To return NULL instead, use 
`try_cast`. If necessary set <config> to \"false\" to bypass this error." ],
+    "message" : [
+      "The value <value> of the type <sourceType> cannot be cast to 
<targetType> because it is malformed. To return NULL instead, use `try_cast`. 
If necessary set <config> to \"false\" to bypass this error."
+    ],
     "sqlState" : "42000"
   },
   "CAST_OVERFLOW" : {
-    "message" : [ "The value <value> of the type <sourceType> cannot be cast 
to <targetType> due to an overflow. To return NULL instead, use `try_cast`. If 
necessary set <config> to \"false\" to bypass this error." ],
+    "message" : [
+      "The value <value> of the type <sourceType> cannot be cast to 
<targetType> due to an overflow. To return NULL instead, use `try_cast`. If 
necessary set <config> to \"false\" to bypass this error."
+    ],
     "sqlState" : "22005"
   },
   "CONCURRENT_QUERY" : {
-    "message" : [ "Another instance of this query was just started by a 
concurrent session." ]
+    "message" : [
+      "Another instance of this query was just started by a concurrent 
session."
+    ]
   },
   "DATETIME_OVERFLOW" : {
-    "message" : [ "Datetime operation overflow: <operation>." ],
+    "message" : [
+      "Datetime operation overflow: <operation>."
+    ],
     "sqlState" : "22008"
   },
   "DIVIDE_BY_ZERO" : {
-    "message" : [ "Division by zero. To return NULL instead, use `try_divide`. 
If necessary set <config> to \"false\" (except for ANSI interval type) to 
bypass this error." ],
+    "message" : [
+      "Division by zero. To return NULL instead, use `try_divide`. If 
necessary set <config> to \"false\" (except for ANSI interval type) to bypass 
this error."
+    ],
     "sqlState" : "22012"
   },
   "DUPLICATE_KEY" : {
-    "message" : [ "Found duplicate keys <keyColumn>" ],
+    "message" : [
+      "Found duplicate keys <keyColumn>"
+    ],
     "sqlState" : "23000"
   },
   "FAILED_EXECUTE_UDF" : {
-    "message" : [ "Failed to execute user defined function (<functionName>: 
(<signature>) => <result>)" ]
+    "message" : [
+      "Failed to execute user defined function (<functionName>: (<signature>) 
=> <result>)"
+    ]
   },
   "FAILED_RENAME_PATH" : {
-    "message" : [ "Failed to rename <sourcePath> to <targetPath> as 
destination already exists" ],
+    "message" : [
+      "Failed to rename <sourcePath> to <targetPath> as destination already 
exists"
+    ],
     "sqlState" : "22023"
   },
   "FAILED_SET_ORIGINAL_PERMISSION_BACK" : {
-    "message" : [ "Failed to set original permission <permission> back to the 
created path: <path>. Exception: <message>" ]
+    "message" : [
+      "Failed to set original permission <permission> back to the created 
path: <path>. Exception: <message>"
+    ]
   },
   "FORBIDDEN_OPERATION" : {
-    "message" : [ "The operation <statement> is not allowed on <objectType>: 
<objectName>" ]
+    "message" : [
+      "The operation <statement> is not allowed on <objectType>: <objectName>"
+    ]
   },
   "GRAPHITE_SINK_INVALID_PROTOCOL" : {
-    "message" : [ "Invalid Graphite protocol: <protocol>" ]
+    "message" : [
+      "Invalid Graphite protocol: <protocol>"
+    ]
   },
   "GRAPHITE_SINK_PROPERTY_MISSING" : {
-    "message" : [ "Graphite sink requires '<property>' property." ]
+    "message" : [
+      "Graphite sink requires '<property>' property."
+    ]
   },
   "GROUPING_COLUMN_MISMATCH" : {
-    "message" : [ "Column of grouping (<grouping>) can't be found in grouping 
columns <groupingColumns>" ],
+    "message" : [
+      "Column of grouping (<grouping>) can't be found in grouping columns 
<groupingColumns>"
+    ],
     "sqlState" : "42000"
   },
   "GROUPING_ID_COLUMN_MISMATCH" : {
-    "message" : [ "Columns of grouping_id (<groupingIdColumn>) does not match 
grouping columns (<groupByColumns>)" ],
+    "message" : [
+      "Columns of grouping_id (<groupingIdColumn>) does not match grouping 
columns (<groupByColumns>)"
+    ],
     "sqlState" : "42000"
   },
   "GROUPING_SIZE_LIMIT_EXCEEDED" : {
-    "message" : [ "Grouping sets size cannot be greater than <maxSize>" ]
+    "message" : [
+      "Grouping sets size cannot be greater than <maxSize>"
+    ]
   },
   "INCOMPARABLE_PIVOT_COLUMN" : {
-    "message" : [ "Invalid pivot column <columnName>. Pivot columns must be 
comparable." ],
+    "message" : [
+      "Invalid pivot column <columnName>. Pivot columns must be comparable."
+    ],
     "sqlState" : "42000"
   },
   "INCOMPATIBLE_DATASOURCE_REGISTER" : {
-    "message" : [ "Detected an incompatible DataSourceRegister. Please remove 
the incompatible library from classpath or upgrade it. Error: <message>" ]
+    "message" : [
+      "Detected an incompatible DataSourceRegister. Please remove the 
incompatible library from classpath or upgrade it. Error: <message>"
+    ]
   },
   "INCONSISTENT_BEHAVIOR_CROSS_VERSION" : {
-    "message" : [ "You may get a different result due to the upgrading to" ],
+    "message" : [
+      "You may get a different result due to the upgrading to"
+    ],
     "subClass" : {
       "DATETIME_PATTERN_RECOGNITION" : {
-        "message" : [ " Spark >= 3.0: \nFail to recognize <pattern> pattern in 
the DateTimeFormatter. 1) You can set <config> to \"LEGACY\" to restore the 
behavior before Spark 3.0. 2) You can form a valid datetime pattern with the 
guide from https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html"; ]
+        "message" : [
+          " Spark >= 3.0: ",

Review Comment:
   super nit: can we move the leading space to the parent error class? This 
appears in every sub error class.



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: [email protected]

For queries about this service, please contact Infrastructure at:
[email protected]


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to