sunchao commented on code in PR #52068:
URL: https://github.com/apache/spark/pull/52068#discussion_r2437201054
##########
core/src/main/scala/org/apache/spark/deploy/SparkSubmit.scala:
##########
@@ -1058,6 +1062,26 @@ private[spark] class SparkSubmit extends Logging {
/** Throw a SparkException with the given error message. */
private def error(msg: String): Unit = throw new SparkException(msg)
+ /**
+ * Store the diagnostics using the SparkDiagnosticsSetter.
+ */
+ private def storeDiagnostics(args: SparkSubmitArguments, sparkConf:
SparkConf,
+ t: Throwable): Unit = {
+ // Swallow exceptions when storing diagnostics, this shouldn't fail the
application.
+ try {
+ if (args.master.startsWith("k8s") && !isShell(args.primaryResource)
+ && !isSqlShell(args.mainClass) && !isThriftServer(args.mainClass)) {
+ val diagnostics = SparkStringUtils.abbreviate(
+ org.apache.hadoop.util.StringUtils.stringifyException(t),
+ KUBERNETES_DIAGNOSTICS_MESSAGE_LIMIT_BYTES)
+ SparkSubmitUtils.
Review Comment:
Can we check whether the setter is set at the beginning and bailout early if
that is the case?
##########
core/src/main/scala/org/apache/spark/deploy/SparkSubmit.scala:
##########
@@ -1255,3 +1297,10 @@ private[spark] trait SparkSubmitOperation {
def supports(master: String): Boolean
}
+
+private[spark] trait SparkDiagnosticsSetter {
Review Comment:
Can we add some documentation for this class and its methods?
##########
core/src/main/scala/org/apache/spark/deploy/SparkSubmit.scala:
##########
@@ -1058,6 +1062,26 @@ private[spark] class SparkSubmit extends Logging {
/** Throw a SparkException with the given error message. */
private def error(msg: String): Unit = throw new SparkException(msg)
+ /**
+ * Store the diagnostics using the SparkDiagnosticsSetter.
+ */
+ private def storeDiagnostics(args: SparkSubmitArguments, sparkConf:
SparkConf,
Review Comment:
nit: style
##########
core/src/main/scala/org/apache/spark/deploy/SparkSubmit.scala:
##########
@@ -1233,6 +1257,24 @@ private[spark] object SparkSubmitUtils {
case _ => throw new SparkException(s"Spark config without '=': $pair")
}
}
+
+ private[deploy] def getSparkDiagnosticsSetters(
Review Comment:
ditto: coding style
```scala
private[deploy] def getSparkDiagnosticsSetters(
master: String,
sparkConf: SparkConf): Option[SparkDiagnosticsSetter] = {
```
##########
core/src/main/scala/org/apache/spark/deploy/SparkSubmit.scala:
##########
@@ -1058,6 +1062,26 @@ private[spark] class SparkSubmit extends Logging {
/** Throw a SparkException with the given error message. */
private def error(msg: String): Unit = throw new SparkException(msg)
+ /**
+ * Store the diagnostics using the SparkDiagnosticsSetter.
+ */
+ private def storeDiagnostics(args: SparkSubmitArguments, sparkConf:
SparkConf,
+ t: Throwable): Unit = {
+ // Swallow exceptions when storing diagnostics, this shouldn't fail the
application.
+ try {
+ if (args.master.startsWith("k8s") && !isShell(args.primaryResource)
+ && !isSqlShell(args.mainClass) && !isThriftServer(args.mainClass)) {
+ val diagnostics = SparkStringUtils.abbreviate(
Review Comment:
Can we move this logic into `setDiagnostics` implementation?
--
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
To unsubscribe, e-mail: [email protected]
For queries about this service, please contact Infrastructure at:
[email protected]
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]