karenfeng commented on a change in pull request #34178:
URL: https://github.com/apache/spark/pull/34178#discussion_r728475198



##########
File path: core/src/main/resources/error/error-classes.json
##########
@@ -136,10 +194,26 @@
     "message" : [ "Unsupported data type %s" ],

Review comment:
       Can you make this class name `UNSUPPORTED_DATA_TYPE`?

##########
File path: 
sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala
##########
@@ -723,45 +725,50 @@ object QueryExecutionErrors {
   }
 
   def dataTypeUnsupportedYetError(dataType: DataType): Throwable = {
-    new UnsupportedOperationException(s"$dataType is not supported yet.")
+    new SparkUnsupportedOperationException(errorClass = "UNSUPPORTED_DATATYPE",
+      messageParameters = Array(dataType.toString))
   }
 
   def unsupportedOperationForDataTypeError(dataType: DataType): Throwable = {
-    new UnsupportedOperationException(s"DataType: ${dataType.catalogString}")
+    new SparkUnsupportedOperationException(errorClass = 
"UNSUPPORTED_OPERATION_FOR_DATA_TYPE",

Review comment:
       Should this also be `UNSUPPORTED_DATATYPE`?

##########
File path: core/src/main/resources/error/error-classes.json
##########
@@ -11,10 +11,32 @@
     "message" : [ "%s cannot be represented as Decimal(%s, %s)." ],
     "sqlState" : "22005"
   },
+  "CANNOT_EVALUATE_EXPRESSION" : {
+    "message" : [ "Cannot evaluate expression: %s" ]
+  },
+  "CANNOT_GENERATE_CODE_FOR_EXPRESSION" : {

Review comment:
       These look like they came from 
https://github.com/apache/spark/pull/34177/files, can you remove them here?
   
   

##########
File path: core/src/main/resources/error/error-classes.json
##########
@@ -136,10 +194,26 @@
     "message" : [ "Unsupported data type %s" ],
     "sqlState" : "0A000"
   },
+  "UNSUPPORTED_DDL" : {

Review comment:
       This is pretty jargon-y; maybe `UNSUPPORTED_COMMAND`?

##########
File path: 
sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala
##########
@@ -723,45 +725,50 @@ object QueryExecutionErrors {
   }
 
   def dataTypeUnsupportedYetError(dataType: DataType): Throwable = {
-    new UnsupportedOperationException(s"$dataType is not supported yet.")
+    new SparkUnsupportedOperationException(errorClass = "UNSUPPORTED_DATATYPE",
+      messageParameters = Array(dataType.toString))
   }
 
   def unsupportedOperationForDataTypeError(dataType: DataType): Throwable = {
-    new UnsupportedOperationException(s"DataType: ${dataType.catalogString}")
+    new SparkUnsupportedOperationException(errorClass = 
"UNSUPPORTED_OPERATION_FOR_DATA_TYPE",
+      messageParameters = Array(dataType.catalogString))
   }
 
   def inputFilterNotFullyConvertibleError(owner: String): Throwable = {
-    new SparkException(s"The input filter of $owner should be fully 
convertible.")
+    new SparkException(errorClass = "INPUT_FILTER_NOT_FULLY_CONVERTIBLE",
+      messageParameters = Array(owner), null)
   }
 
   def cannotReadFooterForFileError(file: Path, e: IOException): Throwable = {
-    new SparkException(s"Could not read footer for file: $file", e)
+    new SparkException(errorClass = "CANNOT_READ_FOOTER_FOR_FILE",
+      messageParameters = Array(file.toString), e)
   }
 
   def cannotReadFooterForFileError(file: FileStatus, e: RuntimeException): 
Throwable = {
-    new IOException(s"Could not read footer for file: $file", e)
+    new SparkIOException(errorClass = "CANNOT_READ_FOOTER_FOR_FILE",
+      messageParameters = Array(file.toString), e)
   }
 
   def foundDuplicateFieldInCaseInsensitiveModeError(
       requiredFieldName: String, matchedOrcFields: String): Throwable = {
-    new RuntimeException(
-      s"""
-         |Found duplicate field(s) "$requiredFieldName": $matchedOrcFields
-         |in case-insensitive mode
-       """.stripMargin.replaceAll("\n", " "))
+    new SparkRuntimeException(errorClass = 
"FOUND_DUPLICATE_FIELD_IN_CASE_INSENSITIVE_MODE",
+      messageParameters = Array(requiredFieldName, matchedOrcFields))
   }
 
   def failedToMergeIncompatibleSchemasError(
       left: StructType, right: StructType, e: Throwable): Throwable = {
-    new SparkException(s"Failed to merge incompatible schemas $left and 
$right", e)
+    new SparkException(errorClass = "FAILED_TO_MERGE_INCOMPATIBLE_SCHEMAS",
+      messageParameters = Array(left.toString, right.toString), e)
   }
 
   def ddlUnsupportedTemporarilyError(ddl: String): Throwable = {
-    new UnsupportedOperationException(s"$ddl is not supported temporarily.")
+    new SparkUnsupportedOperationException(errorClass = "UNSUPPORTED_DDL",
+      messageParameters = Array(ddl))
   }
 
   def operatingOnCanonicalizationPlanError(): Throwable = {

Review comment:
       It looks like this isn't used in Spark anymore; can we remove it?

##########
File path: core/src/main/resources/error/error-classes.json
##########
@@ -11,10 +11,32 @@
     "message" : [ "%s cannot be represented as Decimal(%s, %s)." ],
     "sqlState" : "22005"
   },
+  "CANNOT_EVALUATE_EXPRESSION" : {

Review comment:
       These look like they came from 
https://github.com/apache/spark/pull/34177/files, can you remove them here?

##########
File path: 
sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala
##########
@@ -723,45 +725,50 @@ object QueryExecutionErrors {
   }
 
   def dataTypeUnsupportedYetError(dataType: DataType): Throwable = {
-    new UnsupportedOperationException(s"$dataType is not supported yet.")
+    new SparkUnsupportedOperationException(errorClass = "UNSUPPORTED_DATATYPE",
+      messageParameters = Array(dataType.toString))
   }
 
   def unsupportedOperationForDataTypeError(dataType: DataType): Throwable = {
-    new UnsupportedOperationException(s"DataType: ${dataType.catalogString}")
+    new SparkUnsupportedOperationException(errorClass = 
"UNSUPPORTED_OPERATION_FOR_DATA_TYPE",
+      messageParameters = Array(dataType.catalogString))
   }
 
   def inputFilterNotFullyConvertibleError(owner: String): Throwable = {
-    new SparkException(s"The input filter of $owner should be fully 
convertible.")
+    new SparkException(errorClass = "INPUT_FILTER_NOT_FULLY_CONVERTIBLE",

Review comment:
       @gengliangwang - is this an internal error?




-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: [email protected]

For queries about this service, please contact Infrastructure at:
[email protected]



---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to