This is an automated email from the ASF dual-hosted git repository.
gurwls223 pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git
The following commit(s) were added to refs/heads/master by this push:
new f97515ed820 [SPARK-39973][CORE] Suppress error logs when the number of
timers is set to 0
f97515ed820 is described below
commit f97515ed8207c71d92448338b3390f5bfb0571f1
Author: Hyukjin Kwon <[email protected]>
AuthorDate: Mon Aug 8 18:58:49 2022 +0900
[SPARK-39973][CORE] Suppress error logs when the number of timers is set to 0
### What changes were proposed in this pull request?
This PR proposes to:
- Suppress error logs when the number of timers is set to 0.
- Add documentation for describing its behaviour clearly:
- 0: disable the feature
- -1: no limit
- Otherwise, explicit limit.
- Throw an exception if the configuration is set to the number lower than
-1.
This is an internal configuration, numbers lower than -1 don't make
much sense, and this configuration sort of less known. So I think it's pretty
safe to do this.
### Why are the changes needed?
To avoid noisy error logs, and document the feature properly.
### Does this PR introduce _any_ user-facing change?
Yes. When `spark.scheduler.listenerbus.metrics.maxListenerClassesTimed` is
set to `0`, it does not show a warning such as:
```
LiveListenerBusMetrics: Not measuring processing time for listener class
org.apache.spark.sql.util.ExecutionListenerBus because a maximum of 0 listener
classes are already timed.
```
### How was this patch tested?
Unittest is added.
Closes #37432 from HyukjinKwon/SPARK-39973.
Authored-by: Hyukjin Kwon <[email protected]>
Signed-off-by: Hyukjin Kwon <[email protected]>
---
.../scala/org/apache/spark/internal/config/package.scala | 4 ++++
.../org/apache/spark/scheduler/LiveListenerBus.scala | 8 ++++++--
.../org/apache/spark/scheduler/SparkListenerSuite.scala | 16 ++++++++++++++++
3 files changed, 26 insertions(+), 2 deletions(-)
diff --git a/core/src/main/scala/org/apache/spark/internal/config/package.scala
b/core/src/main/scala/org/apache/spark/internal/config/package.scala
index 72a03a4d1fb..589a1ffa713 100644
--- a/core/src/main/scala/org/apache/spark/internal/config/package.scala
+++ b/core/src/main/scala/org/apache/spark/internal/config/package.scala
@@ -931,8 +931,12 @@ package object config {
private[spark] val LISTENER_BUS_METRICS_MAX_LISTENER_CLASSES_TIMED =
ConfigBuilder("spark.scheduler.listenerbus.metrics.maxListenerClassesTimed")
.internal()
+ .doc("The number of listeners that have timers to track the elapsed time
of" +
+ "processing events. If 0 is set, disables this feature. If -1 is set,"
+
+ "it sets no limit to the number.")
.version("2.3.0")
.intConf
+ .checkValue(_ >= -1, "The number of listeners should be larger than -1.")
.createWithDefault(128)
private[spark] val LISTENER_BUS_LOG_SLOW_EVENT_ENABLED =
diff --git
a/core/src/main/scala/org/apache/spark/scheduler/LiveListenerBus.scala
b/core/src/main/scala/org/apache/spark/scheduler/LiveListenerBus.scala
index 4be4e7a8875..104038fc209 100644
--- a/core/src/main/scala/org/apache/spark/scheduler/LiveListenerBus.scala
+++ b/core/src/main/scala/org/apache/spark/scheduler/LiveListenerBus.scala
@@ -292,10 +292,14 @@ private[spark] class LiveListenerBusMetrics(conf:
SparkConf)
val maxTimed = conf.get(LISTENER_BUS_METRICS_MAX_LISTENER_CLASSES_TIMED)
perListenerClassTimers.get(className).orElse {
if (perListenerClassTimers.size == maxTimed) {
- logError(s"Not measuring processing time for listener class
$className because a " +
- s"maximum of $maxTimed listener classes are already timed.")
+ if (maxTimed != 0) {
+ // Explicitly disabled.
+ logError(s"Not measuring processing time for listener class
$className because a " +
+ s"maximum of $maxTimed listener classes are already timed.")
+ }
None
} else {
+ // maxTimed is either -1 (no limit), or an explicit number.
perListenerClassTimers(className) =
metricRegistry.timer(MetricRegistry.name("listenerProcessingTime",
className))
perListenerClassTimers.get(className)
diff --git
a/core/src/test/scala/org/apache/spark/scheduler/SparkListenerSuite.scala
b/core/src/test/scala/org/apache/spark/scheduler/SparkListenerSuite.scala
index d72744c5cc3..dca915e0a97 100644
--- a/core/src/test/scala/org/apache/spark/scheduler/SparkListenerSuite.scala
+++ b/core/src/test/scala/org/apache/spark/scheduler/SparkListenerSuite.scala
@@ -599,6 +599,22 @@ class SparkListenerSuite extends SparkFunSuite with
LocalSparkContext with Match
assert(bus.getQueueCapacity(EVENT_LOG_QUEUE) == Some(2))
}
+ test("SPARK-39973: Suppress error logs when the number of timers is set to
0") {
+ sc = new SparkContext(
+ "local",
+ "SparkListenerSuite",
+ new SparkConf().set(
+ LISTENER_BUS_METRICS_MAX_LISTENER_CLASSES_TIMED.key, 0.toString))
+ val testAppender = new LogAppender("Error logger for timers")
+ withLogAppender(testAppender) {
+ sc.addSparkListener(new SparkListener { })
+ sc.addSparkListener(new SparkListener { })
+ }
+ assert(!testAppender.loggingEvents
+ .exists(_.getMessage.getFormattedMessage.contains(
+ "Not measuring processing time for listener")))
+ }
+
/**
* Assert that the given list of numbers has an average that is greater than
zero.
*/
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]