srowen commented on code in PR #42999:
URL: https://github.com/apache/spark/pull/42999#discussion_r1337714285
##########
core/src/main/scala/org/apache/spark/util/Utils.scala:
##########
@@ -2323,31 +2323,75 @@ private[spark] object Utils
* configure a new log4j level
*/
def setLogLevel(l: Level): Unit = {
- val (ctx, loggerConfig) = getLogContext
+ val (ctx, loggerConfig) = getLogContext()
loggerConfig.setLevel(l)
ctx.updateLoggers()
// Setting threshold to null as rootLevel will define log level for
spark-shell
Logging.sparkShellThresholdLevel = null
}
+ /**
+ * configure a new log4j level for specific package or class
+ *
+ * @param name package or class name such as "org.apache.spark" or
+ * "org.apache.spark.SparkContext"
+ * @param level new level of [[org.apache.log4j.Level]]
+ */
+ def setLogLevel(name: String, level: Level): Unit = {
Review Comment:
I don't think we need this override, do we? the original method didn't
support this.
Id also imagine that the old method just just call this one with some value
representing the root logger
##########
python/pyspark/context.py:
##########
@@ -534,6 +534,27 @@ def setLogLevel(self, logLevel: str) -> None:
"""
self._jsc.setLogLevel(logLevel)
+ def setLogLevel(self, logName: str, logLevel: str) -> None:
Review Comment:
Yeah, I mean, this is all you've exposed in R and Python, so not sure you
should/need to expose more in Java/Scala
##########
core/src/main/scala/org/apache/spark/util/Utils.scala:
##########
@@ -2323,31 +2323,75 @@ private[spark] object Utils
* configure a new log4j level
*/
def setLogLevel(l: Level): Unit = {
- val (ctx, loggerConfig) = getLogContext
+ val (ctx, loggerConfig) = getLogContext()
loggerConfig.setLevel(l)
ctx.updateLoggers()
// Setting threshold to null as rootLevel will define log level for
spark-shell
Logging.sparkShellThresholdLevel = null
}
+ /**
+ * configure a new log4j level for specific package or class
+ *
+ * @param name package or class name such as "org.apache.spark" or
+ * "org.apache.spark.SparkContext"
+ * @param level new level of [[org.apache.log4j.Level]]
+ */
+ def setLogLevel(name: String, level: Level): Unit = {
+ val (ctx, loggerConfig) = getLogContext(name)
+ if (loggerConfig != null) {
+ loggerConfig.setLevel(level)
+ logInfo(s"Logger ${loggerConfig.getName} level changed into $level")
+ } else {
+ val newLoggerConfig = new LoggerConfig(name, level, true)
+ ctx.getConfiguration.addLogger(name, newLoggerConfig)
+ logInfo(s"Added new logger $name = $level")
+ }
+ ctx.updateLoggers()
+ }
+
+ /**
+ * get logger level for specific package or class
+ *
+ * @param name package or class name such as "org.apache.spark" or
+ * "org.apache.spark.SparkContext"
+ */
+ def getLoggerLevel(name: String): Option[Level] = {
+ val (ctx, loggerConfig) = getLogContext(name)
+ Option(loggerConfig).map(c => c.getLevel)
+ }
+
+ /**
+ * remove logger for specific package or class
+ *
+ * @param name package or class name such as "org.apache.spark" or
+ * "org.apache.spark.SparkContext"
+ */
+ def removeLogger(name: String): Unit = {
Review Comment:
Do you really need get/remove? not sure it's necessary
--
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
To unsubscribe, e-mail: [email protected]
For queries about this service, please contact Infrastructure at:
[email protected]
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]