MaxGekk commented on code in PR #38273:
URL: https://github.com/apache/spark/pull/38273#discussion_r1001374815


##########
core/src/main/resources/error/error-classes.json:
##########
@@ -276,6 +276,11 @@
     ],
     "sqlState" : "22008"
   },
+  "DECIMAL_PRECISION_EXCEEDS_MAX_PRECISION" : {
+    "message" : [
+      "Decimal precision <precision> exceeds max precision <maxPrecision>"

Review Comment:
   minor but for consistency:
   ```suggestion
         "Decimal precision <precision> exceeds max precision <maxPrecision>."
   ```



##########
core/src/main/scala/org/apache/spark/SparkException.scala:
##########
@@ -348,3 +348,19 @@ private[spark] class SparkSQLFeatureNotSupportedException(
 
   override def getErrorClass: String = errorClass
 }
+
+/**
+ * Exception thrown from Spark Streaming framework.
+ */
+private[spark] class SparkStreamingException(

Review Comment:
   Could you re-use the existing exception: `SparkRuntimeException`, please. We 
don't need to introduce additional things as we already have error classes that 
users can use to distinguish errors. 



##########
core/src/main/resources/error/error-classes.json:
##########
@@ -605,6 +620,11 @@
     ],
     "sqlState" : "22005"
   },
+  "OUT_OF_DECIMAL_TYPE_RANGE" : {
+    "message" : [
+      "Out of decimal type range: <value>"

Review Comment:
   ```suggestion
         "Out of decimal type range: <value>."
   ```



##########
sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala:
##########
@@ -2384,8 +2387,28 @@ private[sql] object QueryExecutionErrors extends 
QueryErrorsBase {
     new SparkException("Foreach writer has been aborted due to a task failure")
   }
 
-  def integerOverflowError(message: String): Throwable = {
-    new ArithmeticException(s"Integer overflow. $message")
+  def incorrectRumpUpRate(rowsPerSecond: Long,
+                          maxSeconds: Long,
+                          rampUpTimeSeconds: Long): Throwable = {

Review Comment:
   Could you fix indentation here. See 
https://github.com/databricks/scala-style-guide#spacing-and-indentation



##########
sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala:
##########
@@ -1260,27 +1260,30 @@ private[sql] object QueryExecutionErrors extends 
QueryErrorsBase {
 
   def unscaledValueTooLargeForPrecisionError(): SparkArithmeticException = {
     new SparkArithmeticException(
-      errorClass = "_LEGACY_ERROR_TEMP_2117",
-      messageParameters = Map("ansiConfig" -> 
toSQLConf(SQLConf.ANSI_ENABLED.key)),
+      errorClass = "UNSCALED_VALUE_TOO_LARGE_FOR_PRECISION",
+      messageParameters = Map(
+        "ansiConfig" -> SQLConf.ANSI_ENABLED.key),

Review Comment:
   Wrap the config by `toSQLConf()`, see example in the file.



##########
sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala:
##########
@@ -2384,8 +2387,28 @@ private[sql] object QueryExecutionErrors extends 
QueryErrorsBase {
     new SparkException("Foreach writer has been aborted due to a task failure")
   }
 
-  def integerOverflowError(message: String): Throwable = {
-    new ArithmeticException(s"Integer overflow. $message")
+  def incorrectRumpUpRate(rowsPerSecond: Long,
+                          maxSeconds: Long,
+                          rampUpTimeSeconds: Long): Throwable = {
+    new SparkStreamingException(
+      errorClass = "INCORRECT_RUMP_UP_RATE",
+      messageParameters = Map(
+        "rowsPerSecond" -> rowsPerSecond.toString,
+        "maxSeconds" -> maxSeconds.toString,
+        "rampUpTimeSeconds" -> rampUpTimeSeconds.toString
+      ))
+  }
+
+  def incorrectEndOffset(rowsPerSecond: Long,
+                         maxSeconds: Long,
+                         endSeconds: Long): Throwable = {

Review Comment:
   Please, fix indentation.



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: [email protected]

For queries about this service, please contact Infrastructure at:
[email protected]


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to