srielau commented on code in PR #37520:
URL: https://github.com/apache/spark/pull/37520#discussion_r948171033


##########
core/src/test/scala/org/apache/spark/SparkThrowableSuite.scala:
##########
@@ -222,4 +222,66 @@ class SparkThrowableSuite extends SparkFunSuite {
         assert(false)
     }
   }
+
+  test("get message in the specified format") {
+    import ErrorMessageFormat._
+    class TestQueryContext extends QueryContext {
+      override val objectName = "v1"
+      override val objectType = "VIEW"
+      override val startIndex = 2
+      override val stopIndex = -1
+      override val fragment = "1 / 0"
+    }
+    val e = new SparkArithmeticException(
+      errorClass = "DIVIDE_BY_ZERO",
+      errorSubClass = None,
+      messageParameters = Array("CONFIG"),
+      context = Array(new TestQueryContext),
+      summary = "Query summary")
+
+    assert(SparkThrowableHelper.getMessage(e, PRETTY) ===
+      "[DIVIDE_BY_ZERO] Division by zero. Use `try_divide` to tolerate divisor 
being 0 " +
+      "and return NULL instead. If necessary set CONFIG to \"false\" to bypass 
this error." +
+      "\nQuery summary")
+    // scalastyle:off line.size.limit
+    assert(SparkThrowableHelper.getMessage(e, MINIMAL) ===
+      """{
+        |  "errorClass" : "DIVIDE_BY_ZERO",
+        |  "sqlState" : "22012",
+        |  "messageParameters" : {
+        |    "config" : "CONFIG"
+        |  },
+        |  "queryContext" : [ {
+        |    "objectType" : "VIEW",
+        |    "objectName" : "v1",
+        |    "startIndex" : 3,
+        |    "fragment" : "1 / 0"
+        |  } ]
+        |}""".stripMargin)
+    assert(SparkThrowableHelper.getMessage(e, STANDARD) ===
+      """{
+        |  "errorClass" : "DIVIDE_BY_ZERO",
+        |  "message" : "Division by zero. Use `try_divide` to tolerate divisor 
being 0 and return NULL instead. If necessary set <config> to \"false\" to 
bypass this error.",
+        |  "sqlState" : "22012",
+        |  "messageParameters" : {
+        |    "config" : "CONFIG"
+        |  },
+        |  "queryContext" : [ {
+        |    "objectType" : "VIEW",
+        |    "objectName" : "v1",
+        |    "startIndex" : 3,
+        |    "fragment" : "1 / 0"
+        |  } ]
+        |}""".stripMargin)
+      // scalastyle:on line.size.limit
+    // Legacy mode when an exception does not have any error class
+    class LegacyException extends Throwable with SparkThrowable {
+      override def getErrorClass: String = null
+      override def getMessage: String = "Test message"
+    }
+    val e2 = new LegacyException
+    assert(SparkThrowableHelper.getMessage(e2, MINIMAL) ===
+      """{"errorClass":"legacy","messageParameters":{"message":"Test 
message"},""" +

Review Comment:
   Nit: Should we capitalize LEGACY? 



##########
sql/catalyst/src/main/scala/org/apache/spark/sql/internal/SQLConf.scala:
##########
@@ -3876,6 +3876,15 @@ object SQLConf {
       .booleanConf
       .createWithDefault(false)
 
+  val ERROR_MESSAGE_FORMAT = buildConf("spark.sql.error.messageFormat")
+    .doc("When PRETTY, the error message consists of textual representation of 
error class, " +
+      " message and query context. The MINIMAL and STANDARD formats are JSON 
formats where " +
+      "STANDARD is pretty JSON with additional JSON field `message`.")

Review Comment:
   ```suggestion
       .doc("When PRETTY, the error message consists of textual representation 
of error class, " +
         " message and query context. The MINIMAL and STANDARD formats are 
pretty JSON formats where " +
         "STANDARD includes an additional JSON field `message`.")
   ```



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: [email protected]

For queries about this service, please contact Infrastructure at:
[email protected]


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to