MaxGekk commented on code in PR #47802:
URL: https://github.com/apache/spark/pull/47802#discussion_r1724911679


##########
sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala:
##########
@@ -1952,18 +1952,18 @@ private[sql] object QueryExecutionErrors extends 
QueryErrorsBase with ExecutionE
   def cannotCreateParquetConverterForDecimalTypeError(
       t: DecimalType, parquetType: String): SparkRuntimeException = {
     new SparkRuntimeException(
-      errorClass = "_LEGACY_ERROR_TEMP_2239",
+      errorClass = "PARQUET_CONVERSION_FAILURE.DECIMAL",
       messageParameters = Map(
-        "t" -> t.json,
+        "t" -> t.sql,

Review Comment:
   ```suggestion
           "t" -> toSQLType(t),
   ```



##########
common/utils/src/main/resources/error/error-conditions.json:
##########
@@ -3580,6 +3580,23 @@
     ],
     "sqlState" : "42805"
   },
+  "PARQUET_CONVERSION_FAILURE" : {
+    "message" : [
+      "Unable to create a Parquet converter for the data type <dataType> whose 
Parquet type is <parquetType>."
+    ],
+    "subClass": {
+      "WITHOUT_DECIMAL_METADATA" : {
+        "message" : [
+          "Unable to create a Parquet converter for <typeName> whose Parquet 
type is <parquetType> without decimal metadata. Please read this column/field 
as Spark BINARY type."

Review Comment:
   I don't need to repeat parent class error message otherwise users will see 
duplicates.



##########
sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala:
##########
@@ -1943,7 +1943,7 @@ private[sql] object QueryExecutionErrors extends 
QueryErrorsBase with ExecutionE
   def cannotCreateParquetConverterForTypeError(
       t: DecimalType, parquetType: String): SparkRuntimeException = {
     new SparkRuntimeException(
-      errorClass = "_LEGACY_ERROR_TEMP_2238",
+      errorClass = "PARQUET_CONVERSION_FAILURE_WITHOUT_DECIMAL_METADATA",
       messageParameters = Map(
         "typeName" -> t.typeName,

Review Comment:
   ```suggestion
           "typeName" -> toSQLType(t),
   ```



##########
sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala:
##########
@@ -1952,18 +1952,18 @@ private[sql] object QueryExecutionErrors extends 
QueryErrorsBase with ExecutionE
   def cannotCreateParquetConverterForDecimalTypeError(
       t: DecimalType, parquetType: String): SparkRuntimeException = {
     new SparkRuntimeException(
-      errorClass = "_LEGACY_ERROR_TEMP_2239",
+      errorClass = "PARQUET_CONVERSION_FAILURE.DECIMAL",
       messageParameters = Map(
-        "t" -> t.json,
+        "t" -> t.sql,
         "parquetType" -> parquetType))
   }
 
   def cannotCreateParquetConverterForDataTypeError(
       t: DataType, parquetType: String): SparkRuntimeException = {
     new SparkRuntimeException(
-      errorClass = "_LEGACY_ERROR_TEMP_2240",
+      errorClass = "PARQUET_CONVERSION_FAILURE",
       messageParameters = Map(
-        "t" -> t.json,
+        "dataType" -> t.sql,

Review Comment:
   ```suggestion
           "t" -> toSQLType(t),
   ```



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: [email protected]

For queries about this service, please contact Infrastructure at:
[email protected]


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to