This is an automated email from the ASF dual-hosted git repository.
maxgekk pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git
The following commit(s) were added to refs/heads/master by this push:
new 3c74aed2cbd [SPARK-39229][SQL] Separate query contexts from
error-classes.json
3c74aed2cbd is described below
commit 3c74aed2cbde2968fab93b2799a56d075420e7d3
Author: Gengliang Wang <[email protected]>
AuthorDate: Thu May 19 11:00:16 2022 +0300
[SPARK-39229][SQL] Separate query contexts from error-classes.json
### What changes were proposed in this pull request?
Separate query contexts for runtime errors from error-classes.json.
### Why are the changes needed?
The message is JSON should only contain parameters explicitly thrown. It is
more elegant to separate query contexts from error-classes.json.
### Does this PR introduce _any_ user-facing change?
No
### How was this patch tested?
Existing UT
Closes #36604 from gengliangwang/refactorErrorClass.
Authored-by: Gengliang Wang <[email protected]>
Signed-off-by: Max Gekk <[email protected]>
---
.../apache/spark/memory/SparkOutOfMemoryError.java | 2 +-
core/src/main/resources/error/error-classes.json | 10 +++----
.../main/scala/org/apache/spark/ErrorInfo.scala | 9 ++++--
.../scala/org/apache/spark/SparkException.scala | 34 +++++++++++++++-------
.../org/apache/spark/SparkThrowableSuite.scala | 2 +-
.../spark/sql/errors/QueryExecutionErrors.scala | 29 +++++++++---------
6 files changed, 53 insertions(+), 33 deletions(-)
diff --git
a/core/src/main/java/org/apache/spark/memory/SparkOutOfMemoryError.java
b/core/src/main/java/org/apache/spark/memory/SparkOutOfMemoryError.java
index 22dfe4d4dbe..c5f19a0c201 100644
--- a/core/src/main/java/org/apache/spark/memory/SparkOutOfMemoryError.java
+++ b/core/src/main/java/org/apache/spark/memory/SparkOutOfMemoryError.java
@@ -39,7 +39,7 @@ public final class SparkOutOfMemoryError extends
OutOfMemoryError implements Spa
}
public SparkOutOfMemoryError(String errorClass, String[]
messageParameters) {
- super(SparkThrowableHelper.getMessage(errorClass, messageParameters));
+ super(SparkThrowableHelper.getMessage(errorClass, messageParameters,
""));
this.errorClass = errorClass;
this.messageParameters = messageParameters;
}
diff --git a/core/src/main/resources/error/error-classes.json
b/core/src/main/resources/error/error-classes.json
index f4eadd4a368..21fde82adbb 100644
--- a/core/src/main/resources/error/error-classes.json
+++ b/core/src/main/resources/error/error-classes.json
@@ -4,7 +4,7 @@
"sqlState" : "42000"
},
"ARITHMETIC_OVERFLOW" : {
- "message" : [ "<message>.<alternative> If necessary set <config> to
\"false\" (except for ANSI interval type) to bypass this error.<context>" ],
+ "message" : [ "<message>.<alternative> If necessary set <config> to
\"false\" (except for ANSI interval type) to bypass this error." ],
"sqlState" : "22003"
},
"CANNOT_CAST_DATATYPE" : {
@@ -12,7 +12,7 @@
"sqlState" : "22005"
},
"CANNOT_CHANGE_DECIMAL_PRECISION" : {
- "message" : [ "<value> cannot be represented as Decimal(<precision>,
<scale>). If necessary set <config> to \"false\" to bypass this
error.<details>" ],
+ "message" : [ "<value> cannot be represented as Decimal(<precision>,
<scale>). If necessary set <config> to \"false\" to bypass this error." ],
"sqlState" : "22005"
},
"CANNOT_PARSE_DECIMAL" : {
@@ -23,7 +23,7 @@
"message" : [ "Cannot up cast <value> from <sourceType> to
<targetType>.\n<details>" ]
},
"CAST_INVALID_INPUT" : {
- "message" : [ "The value <value> of the type <sourceType> cannot be cast
to <targetType> because it is malformed. To return NULL instead, use
`try_cast`. If necessary set <config> to \"false\" to bypass this
error.<details>" ],
+ "message" : [ "The value <value> of the type <sourceType> cannot be cast
to <targetType> because it is malformed. To return NULL instead, use
`try_cast`. If necessary set <config> to \"false\" to bypass this error." ],
"sqlState" : "42000"
},
"CAST_OVERFLOW" : {
@@ -38,7 +38,7 @@
"sqlState" : "22008"
},
"DIVIDE_BY_ZERO" : {
- "message" : [ "Division by zero. To return NULL instead, use `try_divide`.
If necessary set <config> to \"false\" (except for ANSI interval type) to
bypass this error.<details>" ],
+ "message" : [ "Division by zero. To return NULL instead, use `try_divide`.
If necessary set <config> to \"false\" (except for ANSI interval type) to
bypass this error." ],
"sqlState" : "22012"
},
"DUPLICATE_KEY" : {
@@ -138,7 +138,7 @@
"sqlState" : "42000"
},
"MAP_KEY_DOES_NOT_EXIST" : {
- "message" : [ "Key <keyValue> does not exist. To return NULL instead, use
`try_element_at`. If necessary set <config> to \"false\" to bypass this
error.<details>" ]
+ "message" : [ "Key <keyValue> does not exist. To return NULL instead, use
`try_element_at`. If necessary set <config> to \"false\" to bypass this error."
]
},
"MISSING_COLUMN" : {
"message" : [ "Column '<columnName>' does not exist. Did you mean one of
the following? [<proposal>]" ],
diff --git a/core/src/main/scala/org/apache/spark/ErrorInfo.scala
b/core/src/main/scala/org/apache/spark/ErrorInfo.scala
index 0447572bb1c..e11e6485851 100644
--- a/core/src/main/scala/org/apache/spark/ErrorInfo.scala
+++ b/core/src/main/scala/org/apache/spark/ErrorInfo.scala
@@ -71,7 +71,10 @@ private[spark] object SparkThrowableHelper {
mapper.readValue(errorClassesUrl, new TypeReference[SortedMap[String,
ErrorInfo]]() {})
}
- def getMessage(errorClass: String, messageParameters: Array[String]): String
= {
+ def getMessage(
+ errorClass: String,
+ messageParameters: Array[String],
+ queryContext: String = ""): String = {
val errorInfo = errorClassToInfoMap.getOrElse(errorClass,
throw new IllegalArgumentException(s"Cannot find error class
'$errorClass'"))
if (errorInfo.subClass.isDefined) {
@@ -82,11 +85,11 @@ private[spark] object SparkThrowableHelper {
val subMessageParameters = messageParameters.tail
"[" + errorClass + "." + subErrorClass + "] " +
String.format((errorInfo.messageFormat +
errorSubInfo.messageFormat).replaceAll("<[a-zA-Z0-9_-]+>", "%s"),
- subMessageParameters: _*)
+ subMessageParameters: _*) + queryContext
} else {
"[" + errorClass + "] " + String.format(
errorInfo.messageFormat.replaceAll("<[a-zA-Z0-9_-]+>", "%s"),
- messageParameters: _*)
+ messageParameters: _*) + queryContext
}
}
diff --git a/core/src/main/scala/org/apache/spark/SparkException.scala
b/core/src/main/scala/org/apache/spark/SparkException.scala
index 4feea6151b9..c28624cc7a0 100644
--- a/core/src/main/scala/org/apache/spark/SparkException.scala
+++ b/core/src/main/scala/org/apache/spark/SparkException.scala
@@ -84,8 +84,12 @@ private[spark] class SparkUpgradeException(
/**
* Arithmetic exception thrown from Spark with an error class.
*/
-private[spark] class SparkArithmeticException(errorClass: String,
messageParameters: Array[String])
- extends ArithmeticException(SparkThrowableHelper.getMessage(errorClass,
messageParameters))
+private[spark] class SparkArithmeticException(
+ errorClass: String,
+ messageParameters: Array[String],
+ queryContext: String = "")
+ extends ArithmeticException(
+ SparkThrowableHelper.getMessage(errorClass, messageParameters,
queryContext))
with SparkThrowable {
override def getErrorClass: String = errorClass
@@ -132,9 +136,13 @@ private[spark] class SparkConcurrentModificationException(
/**
* Datetime exception thrown from Spark with an error class.
*/
-private[spark] class SparkDateTimeException(errorClass: String,
messageParameters: Array[String])
+private[spark] class SparkDateTimeException(
+ errorClass: String,
+ messageParameters: Array[String],
+ queryContext: String = "")
extends DateTimeException(
- SparkThrowableHelper.getMessage(errorClass, messageParameters)) with
SparkThrowable {
+ SparkThrowableHelper.getMessage(errorClass, messageParameters,
queryContext))
+ with SparkThrowable {
override def getErrorClass: String = errorClass
}
@@ -168,9 +176,11 @@ private[spark] class SparkFileNotFoundException(
*/
private[spark] class SparkNumberFormatException(
errorClass: String,
- messageParameters: Array[String])
+ messageParameters: Array[String],
+ queryContext: String)
extends NumberFormatException(
- SparkThrowableHelper.getMessage(errorClass, messageParameters)) with
SparkThrowable {
+ SparkThrowableHelper.getMessage(errorClass, messageParameters,
queryContext))
+ with SparkThrowable {
override def getErrorClass: String = errorClass
}
@@ -226,9 +236,11 @@ private[spark] class SparkIOException(
private[spark] class SparkRuntimeException(
errorClass: String,
messageParameters: Array[String],
- cause: Throwable = null)
+ cause: Throwable = null,
+ queryContext: String = "")
extends RuntimeException(
- SparkThrowableHelper.getMessage(errorClass, messageParameters), cause)
with SparkThrowable {
+ SparkThrowableHelper.getMessage(errorClass, messageParameters,
queryContext), cause)
+ with SparkThrowable {
override def getErrorClass: String = errorClass
}
@@ -274,9 +286,11 @@ private[spark] class SparkSQLException(
*/
private[spark] class SparkNoSuchElementException(
errorClass: String,
- messageParameters: Array[String])
+ messageParameters: Array[String],
+ queryContext: String)
extends NoSuchElementException(
- SparkThrowableHelper.getMessage(errorClass, messageParameters)) with
SparkThrowable {
+ SparkThrowableHelper.getMessage(errorClass, messageParameters,
queryContext))
+ with SparkThrowable {
override def getErrorClass: String = errorClass
}
diff --git a/core/src/test/scala/org/apache/spark/SparkThrowableSuite.scala
b/core/src/test/scala/org/apache/spark/SparkThrowableSuite.scala
index 6321d8a78ed..d6348e1aff1 100644
--- a/core/src/test/scala/org/apache/spark/SparkThrowableSuite.scala
+++ b/core/src/test/scala/org/apache/spark/SparkThrowableSuite.scala
@@ -127,7 +127,7 @@ class SparkThrowableSuite extends SparkFunSuite {
assert(getMessage("DIVIDE_BY_ZERO", Array("foo", "bar", "baz")) ==
"[DIVIDE_BY_ZERO] Division by zero. " +
"To return NULL instead, use `try_divide`. If necessary set foo to
\"false\" " +
- "(except for ANSI interval type) to bypass this error.bar")
+ "(except for ANSI interval type) to bypass this error.")
}
test("Error message is formatted") {
diff --git
a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala
b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala
index 6c750ab49cf..a155b0694b5 100644
---
a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala
+++
b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala
@@ -104,8 +104,8 @@ object QueryExecutionErrors extends QueryErrorsBase {
value.toDebugString,
decimalPrecision.toString,
decimalScale.toString,
- toSQLConf(SQLConf.ANSI_ENABLED.key),
- context))
+ toSQLConf(SQLConf.ANSI_ENABLED.key)),
+ queryContext = context)
}
def invalidInputInCastToDatetimeError(
@@ -119,8 +119,8 @@ object QueryExecutionErrors extends QueryErrorsBase {
toSQLValue(value, from),
toSQLType(from),
toSQLType(to),
- toSQLConf(SQLConf.ANSI_ENABLED.key),
- errorContext))
+ toSQLConf(SQLConf.ANSI_ENABLED.key)),
+ queryContext = errorContext)
}
def invalidInputSyntaxForBooleanError(
@@ -132,8 +132,8 @@ object QueryExecutionErrors extends QueryErrorsBase {
toSQLValue(s, StringType),
toSQLType(StringType),
toSQLType(BooleanType),
- toSQLConf(SQLConf.ANSI_ENABLED.key),
- errorContext))
+ toSQLConf(SQLConf.ANSI_ENABLED.key)),
+ queryContext = errorContext)
}
def invalidInputInCastToNumberError(
@@ -146,8 +146,8 @@ object QueryExecutionErrors extends QueryErrorsBase {
toSQLValue(s, StringType),
toSQLType(StringType),
toSQLType(to),
- toSQLConf(SQLConf.ANSI_ENABLED.key),
- errorContext))
+ toSQLConf(SQLConf.ANSI_ENABLED.key)),
+ queryContext = errorContext)
}
def cannotCastFromNullTypeError(to: DataType): Throwable = {
@@ -180,7 +180,8 @@ object QueryExecutionErrors extends QueryErrorsBase {
def divideByZeroError(context: String): ArithmeticException = {
new SparkArithmeticException(
errorClass = "DIVIDE_BY_ZERO",
- messageParameters = Array(toSQLConf(SQLConf.ANSI_ENABLED.key), context))
+ messageParameters = Array(toSQLConf(SQLConf.ANSI_ENABLED.key)),
+ queryContext = context)
}
def invalidArrayIndexError(index: Int, numElements: Int):
ArrayIndexOutOfBoundsException = {
@@ -218,8 +219,8 @@ object QueryExecutionErrors extends QueryErrorsBase {
errorClass = "MAP_KEY_DOES_NOT_EXIST",
messageParameters = Array(
toSQLValue(key, dataType),
- toSQLConf(SQLConf.ANSI_ENABLED.key),
- context))
+ toSQLConf(SQLConf.ANSI_ENABLED.key)),
+ queryContext = context)
}
def invalidFractionOfSecondError(): DateTimeException = {
@@ -477,8 +478,10 @@ object QueryExecutionErrors extends QueryErrorsBase {
hint: String = "",
errorContext: String = ""): ArithmeticException = {
val alternative = if (hint.nonEmpty) s" To return NULL instead, use
'$hint'." else ""
- new SparkArithmeticException("ARITHMETIC_OVERFLOW",
- Array(message, alternative, SQLConf.ANSI_ENABLED.key, errorContext))
+ new SparkArithmeticException(
+ errorClass = "ARITHMETIC_OVERFLOW",
+ messageParameters = Array(message, alternative,
SQLConf.ANSI_ENABLED.key),
+ queryContext = errorContext)
}
def unaryMinusCauseOverflowError(originValue: Int): ArithmeticException = {
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]