karenfeng commented on a change in pull request #34177:
URL: https://github.com/apache/spark/pull/34177#discussion_r728439645



##########
File path: 
sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala
##########
@@ -258,25 +258,28 @@ object QueryExecutionErrors {
   }
 
   def cannotGenerateCodeForUnsupportedTypeError(dataType: DataType): Throwable 
= {
-    new IllegalArgumentException(s"cannot generate code for unsupported type: 
$dataType")
+    new SparkIllegalArgumentException(errorClass = 
"CANNOT_GENERATE_CODE_FOR_UNSUPPORTED_TYPE",
+      messageParameters = Array(dataType.typeName))
   }
 
   def cannotInterpolateClassIntoCodeBlockError(arg: Any): Throwable = {
-    new IllegalArgumentException(
-      s"Can not interpolate ${arg.getClass.getName} into code block.")
+    new SparkIllegalArgumentException(errorClass = 
"CANNOT_INTERPOLATE_CLASS_INTO_CODE_BLOCK",
+      messageParameters = Array(arg.getClass.getName))
   }
 
   def customCollectionClsNotResolvedError(): Throwable = {
-    new UnsupportedOperationException("not resolved")
+    new SparkUnsupportedOperationException(errorClass = 
"CUSTOM_COLLECTION_CLASS_NOT_RESOLVED",
+      messageParameters = Array.empty)
   }
 
   def classUnsupportedByMapObjectsError(cls: Class[_]): RuntimeException = {
-    new RuntimeException(s"class `${cls.getName}` is not supported by 
`MapObjects` as " +

Review comment:
       Is this an internal error @cloud-fan? I can't quite understand when a 
user would encounter this.

##########
File path: 
sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala
##########
@@ -258,25 +258,28 @@ object QueryExecutionErrors {
   }
 
   def cannotGenerateCodeForUnsupportedTypeError(dataType: DataType): Throwable 
= {

Review comment:
       Looks like this is actually not used elsewhere. Can you remove it?

##########
File path: core/src/main/resources/error/error-classes.json
##########
@@ -105,25 +152,43 @@
     "message" : [ "PARTITION clause cannot contain a non-partition column 
name: %s" ],
     "sqlState" : "42000"
   },
+  "NULL_AS_MAP_KEY_NOT_ALLOWED" : {
+    "message" : [ "Cannot use null as map key!" ],
+    "sqlState" : "42000"
+  },
   "PIVOT_VALUE_DATA_TYPE_MISMATCH" : {
     "message" : [ "Invalid pivot value '%s': value data type %s does not match 
pivot column data type %s" ],
     "sqlState" : "42000"
   },
+  "PRIMARY_CONSTRUCTOR_NOT_FOUND" : {
+    "message" : [ "Couldn't find a primary constructor on %s" ]
+  },
+  "REACHED_UNREACHABLE_LINE" : {
+    "message" : [ "This line should be unreachable %s" ],
+    "sqlState" : "42000"
+  },
   "RENAME_SRC_PATH_NOT_FOUND" : {
     "message" : [ "Failed to rename as %s was not found" ],
     "sqlState" : "22023"
   },
-  "ROW_FROM_CSV_PARSER_NOT_EXPECTED" : {
-    "message" : [ "Expected one row from CSV parser." ],
+  "ROW_FIELD_CANNOT_BE_NULL" : {
+    "message" : [ "The %sth field '%s' of input row cannot be null." ],
     "sqlState" : "42000"
   },
+  "ROW_FROM_CSV_PARSER_NOT_EXPECTED" : {

Review comment:
       While we're here - how about `UNEXPECTED_ROW_FROM_CSV_PARSER`? To make 
it match the other classes a bit better.

##########
File path: 
sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala
##########
@@ -258,25 +258,28 @@ object QueryExecutionErrors {
   }
 
   def cannotGenerateCodeForUnsupportedTypeError(dataType: DataType): Throwable 
= {
-    new IllegalArgumentException(s"cannot generate code for unsupported type: 
$dataType")
+    new SparkIllegalArgumentException(errorClass = 
"CANNOT_GENERATE_CODE_FOR_UNSUPPORTED_TYPE",
+      messageParameters = Array(dataType.typeName))
   }
 
   def cannotInterpolateClassIntoCodeBlockError(arg: Any): Throwable = {
-    new IllegalArgumentException(
-      s"Can not interpolate ${arg.getClass.getName} into code block.")
+    new SparkIllegalArgumentException(errorClass = 
"CANNOT_INTERPOLATE_CLASS_INTO_CODE_BLOCK",
+      messageParameters = Array(arg.getClass.getName))
   }
 
   def customCollectionClsNotResolvedError(): Throwable = {
-    new UnsupportedOperationException("not resolved")

Review comment:
       This should be an internal error due to some bug in analysis; 
@cloud-fan, can you confirm? If so, we should use the `INTERNAL_ERROR` class.

##########
File path: 
sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala
##########
@@ -286,60 +289,73 @@ object QueryExecutionErrors {
   }
 
   def constructorNotFoundError(cls: String): Throwable = {
-    new RuntimeException(s"Couldn't find a valid constructor on $cls")
+    new SparkRuntimeException(errorClass = "CONSTRUCTOR_NOT_FOUND",
+      messageParameters = Array(cls))
   }
 
   def primaryConstructorNotFoundError(cls: Class[_]): Throwable = {
-    new RuntimeException(s"Couldn't find a primary constructor on $cls")
+    new SparkRuntimeException(errorClass = "PRIMARY_CONSTRUCTOR_NOT_FOUND",
+      messageParameters = Array(cls.getName))
   }
 
   def unsupportedNaturalJoinTypeError(joinType: JoinType): Throwable = {
-    new RuntimeException("Unsupported natural join type " + joinType)
+    new SparkRuntimeException(errorClass = "UNSUPPORTED_NATURAL_JOIN_TYPE",
+      messageParameters = Array(joinType.toString))
   }
 
   def notExpectedUnresolvedEncoderError(attr: AttributeReference): Throwable = 
{
-    new RuntimeException(s"Unresolved encoder expected, but $attr was found.")
+    new SparkRuntimeException(errorClass = "EXPECTED_UNRESOLVED_ENCODER",
+      messageParameters = Array(attr.toString))
   }
 
   def unsupportedEncoderError(): Throwable = {
-    new RuntimeException("Only expression encoders are supported for now.")
+    new SparkRuntimeException(errorClass = "UNSUPPORTED_ENCODER",
+      messageParameters = Array.empty)
   }
 
   def notOverrideExpectedMethodsError(className: String, m1: String, m2: 
String): Throwable = {
-    new RuntimeException(s"$className must override either $m1 or $m2")

Review comment:
       This is an internal error; can you use INTERNAL_ERROR instead?

##########
File path: 
sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala
##########
@@ -258,25 +258,28 @@ object QueryExecutionErrors {
   }
 
   def cannotGenerateCodeForUnsupportedTypeError(dataType: DataType): Throwable 
= {
-    new IllegalArgumentException(s"cannot generate code for unsupported type: 
$dataType")
+    new SparkIllegalArgumentException(errorClass = 
"CANNOT_GENERATE_CODE_FOR_UNSUPPORTED_TYPE",
+      messageParameters = Array(dataType.typeName))
   }
 
   def cannotInterpolateClassIntoCodeBlockError(arg: Any): Throwable = {

Review comment:
       This feels like it might be an internal error... @rednaxelafx, can you 
confirm? If so, we should use the `INTERNAL_ERROR` class.

##########
File path: 
sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala
##########
@@ -286,60 +289,73 @@ object QueryExecutionErrors {
   }
 
   def constructorNotFoundError(cls: String): Throwable = {
-    new RuntimeException(s"Couldn't find a valid constructor on $cls")

Review comment:
       This feels like an internal error. @cloud-fan, can you confirm?

##########
File path: 
sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala
##########
@@ -286,60 +289,73 @@ object QueryExecutionErrors {
   }
 
   def constructorNotFoundError(cls: String): Throwable = {
-    new RuntimeException(s"Couldn't find a valid constructor on $cls")
+    new SparkRuntimeException(errorClass = "CONSTRUCTOR_NOT_FOUND",
+      messageParameters = Array(cls))
   }
 
   def primaryConstructorNotFoundError(cls: Class[_]): Throwable = {
-    new RuntimeException(s"Couldn't find a primary constructor on $cls")

Review comment:
       This feels like an internal error. @cloud-fan, can you confirm?

##########
File path: 
sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala
##########
@@ -286,60 +289,73 @@ object QueryExecutionErrors {
   }
 
   def constructorNotFoundError(cls: String): Throwable = {
-    new RuntimeException(s"Couldn't find a valid constructor on $cls")
+    new SparkRuntimeException(errorClass = "CONSTRUCTOR_NOT_FOUND",
+      messageParameters = Array(cls))
   }
 
   def primaryConstructorNotFoundError(cls: Class[_]): Throwable = {
-    new RuntimeException(s"Couldn't find a primary constructor on $cls")
+    new SparkRuntimeException(errorClass = "PRIMARY_CONSTRUCTOR_NOT_FOUND",
+      messageParameters = Array(cls.getName))
   }
 
   def unsupportedNaturalJoinTypeError(joinType: JoinType): Throwable = {
-    new RuntimeException("Unsupported natural join type " + joinType)
+    new SparkRuntimeException(errorClass = "UNSUPPORTED_NATURAL_JOIN_TYPE",
+      messageParameters = Array(joinType.toString))
   }
 
   def notExpectedUnresolvedEncoderError(attr: AttributeReference): Throwable = 
{
-    new RuntimeException(s"Unresolved encoder expected, but $attr was found.")
+    new SparkRuntimeException(errorClass = "EXPECTED_UNRESOLVED_ENCODER",
+      messageParameters = Array(attr.toString))
   }
 
   def unsupportedEncoderError(): Throwable = {
-    new RuntimeException("Only expression encoders are supported for now.")
+    new SparkRuntimeException(errorClass = "UNSUPPORTED_ENCODER",
+      messageParameters = Array.empty)
   }
 
   def notOverrideExpectedMethodsError(className: String, m1: String, m2: 
String): Throwable = {
-    new RuntimeException(s"$className must override either $m1 or $m2")
+    new SparkRuntimeException(errorClass = "EXPECTED_METHODS_NOT_OVERRIDDEN",
+      messageParameters = Array(className, m1, m2))
   }
 
   def failToConvertValueToJsonError(value: AnyRef, cls: Class[_], dataType: 
DataType): Throwable = {
-    new RuntimeException(s"Failed to convert value $value (class of $cls) " +
-      s"with the type of $dataType to JSON.")
+    new SparkRuntimeException(errorClass = "FAILED_TO_CONVERT_VALUE_TO_JSON",
+      messageParameters = Array(value.toString, cls.getName, 
dataType.typeName))
   }
 
   def unexpectedOperatorInCorrelatedSubquery(op: LogicalPlan, pos: String = 
""): Throwable = {
-    new RuntimeException(s"Unexpected operator $op in correlated subquery" + 
pos)
+    new SparkRuntimeException(errorClass = 
"UNEXPECTED_OPERATOR_IN_CORRELATED_SUBQUERY",
+      messageParameters = Array(op.toString, pos))
   }
 
   def unreachableError(err: String = ""): Throwable = {
-    new RuntimeException("This line should be unreachable" + err)

Review comment:
       This should be an internal error.

##########
File path: 
sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala
##########
@@ -286,60 +289,73 @@ object QueryExecutionErrors {
   }
 
   def constructorNotFoundError(cls: String): Throwable = {
-    new RuntimeException(s"Couldn't find a valid constructor on $cls")
+    new SparkRuntimeException(errorClass = "CONSTRUCTOR_NOT_FOUND",
+      messageParameters = Array(cls))
   }
 
   def primaryConstructorNotFoundError(cls: Class[_]): Throwable = {
-    new RuntimeException(s"Couldn't find a primary constructor on $cls")
+    new SparkRuntimeException(errorClass = "PRIMARY_CONSTRUCTOR_NOT_FOUND",
+      messageParameters = Array(cls.getName))
   }
 
   def unsupportedNaturalJoinTypeError(joinType: JoinType): Throwable = {
-    new RuntimeException("Unsupported natural join type " + joinType)
+    new SparkRuntimeException(errorClass = "UNSUPPORTED_NATURAL_JOIN_TYPE",
+      messageParameters = Array(joinType.toString))
   }
 
   def notExpectedUnresolvedEncoderError(attr: AttributeReference): Throwable = 
{
-    new RuntimeException(s"Unresolved encoder expected, but $attr was found.")
+    new SparkRuntimeException(errorClass = "EXPECTED_UNRESOLVED_ENCODER",
+      messageParameters = Array(attr.toString))
   }
 
   def unsupportedEncoderError(): Throwable = {
-    new RuntimeException("Only expression encoders are supported for now.")
+    new SparkRuntimeException(errorClass = "UNSUPPORTED_ENCODER",
+      messageParameters = Array.empty)
   }
 
   def notOverrideExpectedMethodsError(className: String, m1: String, m2: 
String): Throwable = {
-    new RuntimeException(s"$className must override either $m1 or $m2")
+    new SparkRuntimeException(errorClass = "EXPECTED_METHODS_NOT_OVERRIDDEN",
+      messageParameters = Array(className, m1, m2))
   }
 
   def failToConvertValueToJsonError(value: AnyRef, cls: Class[_], dataType: 
DataType): Throwable = {
-    new RuntimeException(s"Failed to convert value $value (class of $cls) " +
-      s"with the type of $dataType to JSON.")
+    new SparkRuntimeException(errorClass = "FAILED_TO_CONVERT_VALUE_TO_JSON",
+      messageParameters = Array(value.toString, cls.getName, 
dataType.typeName))
   }
 
   def unexpectedOperatorInCorrelatedSubquery(op: LogicalPlan, pos: String = 
""): Throwable = {
-    new RuntimeException(s"Unexpected operator $op in correlated subquery" + 
pos)
+    new SparkRuntimeException(errorClass = 
"UNEXPECTED_OPERATOR_IN_CORRELATED_SUBQUERY",
+      messageParameters = Array(op.toString, pos))
   }
 
   def unreachableError(err: String = ""): Throwable = {
-    new RuntimeException("This line should be unreachable" + err)
+    new SparkRuntimeException(errorClass = "REACHED_UNREACHABLE_LINE",
+      messageParameters = Array(err))
   }
 
   def unsupportedRoundingMode(roundMode: BigDecimal.RoundingMode.Value): 
Throwable = {
-    new RuntimeException(s"Not supported rounding mode: $roundMode")
+    new SparkRuntimeException(errorClass = "UNSUPPORTED_ROUNDING_MODE",
+      messageParameters = Array(roundMode.toString))
   }
 
   def resolveCannotHandleNestedSchema(plan: LogicalPlan): Throwable = {
-    new RuntimeException(s"Can not handle nested schema yet...  plan $plan")

Review comment:
       @cloud-fan, when will a user encounter this?

##########
File path: 
sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala
##########
@@ -286,60 +289,73 @@ object QueryExecutionErrors {
   }
 
   def constructorNotFoundError(cls: String): Throwable = {
-    new RuntimeException(s"Couldn't find a valid constructor on $cls")
+    new SparkRuntimeException(errorClass = "CONSTRUCTOR_NOT_FOUND",
+      messageParameters = Array(cls))
   }
 
   def primaryConstructorNotFoundError(cls: Class[_]): Throwable = {
-    new RuntimeException(s"Couldn't find a primary constructor on $cls")
+    new SparkRuntimeException(errorClass = "PRIMARY_CONSTRUCTOR_NOT_FOUND",
+      messageParameters = Array(cls.getName))
   }
 
   def unsupportedNaturalJoinTypeError(joinType: JoinType): Throwable = {
-    new RuntimeException("Unsupported natural join type " + joinType)
+    new SparkRuntimeException(errorClass = "UNSUPPORTED_NATURAL_JOIN_TYPE",
+      messageParameters = Array(joinType.toString))
   }
 
   def notExpectedUnresolvedEncoderError(attr: AttributeReference): Throwable = 
{
-    new RuntimeException(s"Unresolved encoder expected, but $attr was found.")

Review comment:
       This is an internal error; can you use `INTERNAL_ERROR` instead?

##########
File path: core/src/main/resources/error/error-classes.json
##########
@@ -105,25 +152,43 @@
     "message" : [ "PARTITION clause cannot contain a non-partition column 
name: %s" ],
     "sqlState" : "42000"
   },
+  "NULL_AS_MAP_KEY_NOT_ALLOWED" : {
+    "message" : [ "Cannot use null as map key!" ],
+    "sqlState" : "42000"
+  },
   "PIVOT_VALUE_DATA_TYPE_MISMATCH" : {
     "message" : [ "Invalid pivot value '%s': value data type %s does not match 
pivot column data type %s" ],
     "sqlState" : "42000"
   },
+  "PRIMARY_CONSTRUCTOR_NOT_FOUND" : {
+    "message" : [ "Couldn't find a primary constructor on %s" ]
+  },
+  "REACHED_UNREACHABLE_LINE" : {
+    "message" : [ "This line should be unreachable %s" ],
+    "sqlState" : "42000"
+  },
   "RENAME_SRC_PATH_NOT_FOUND" : {
     "message" : [ "Failed to rename as %s was not found" ],
     "sqlState" : "22023"
   },
-  "ROW_FROM_CSV_PARSER_NOT_EXPECTED" : {
-    "message" : [ "Expected one row from CSV parser." ],
+  "ROW_FIELD_CANNOT_BE_NULL" : {

Review comment:
       This is a bit verbose as well; maybe `NULL_FIELD`?

##########
File path: core/src/main/resources/error/error-classes.json
##########
@@ -105,25 +152,43 @@
     "message" : [ "PARTITION clause cannot contain a non-partition column 
name: %s" ],
     "sqlState" : "42000"
   },
+  "NULL_AS_MAP_KEY_NOT_ALLOWED" : {

Review comment:
       This is a bit verbose; maybe `INVALID_MAP_KEY`?




-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: [email protected]

For queries about this service, please contact Infrastructure at:
[email protected]



---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to