Github user squito commented on a diff in the pull request:

    https://github.com/apache/spark/pull/9816#discussion_r45647600
  
    --- Diff: core/src/main/scala/org/apache/spark/Logging.scala ---
    @@ -119,30 +119,31 @@ trait Logging {
         val usingLog4j12 = 
"org.slf4j.impl.Log4jLoggerFactory".equals(binderClass)
         if (usingLog4j12) {
           val log4j12Initialized = 
LogManager.getRootLogger.getAllAppenders.hasMoreElements
    +      // scalastyle:off println
           if (!log4j12Initialized) {
    -        // scalastyle:off println
    -        if (Utils.isInInterpreter) {
    -          val replDefaultLogProps = 
"org/apache/spark/log4j-defaults-repl.properties"
    -          
Option(Utils.getSparkClassLoader.getResource(replDefaultLogProps)) match {
    -            case Some(url) =>
    -              PropertyConfigurator.configure(url)
    -              System.err.println(s"Using Spark's repl log4j profile: 
$replDefaultLogProps")
    -              System.err.println("To adjust logging level use 
sc.setLogLevel(\"INFO\")")
    -            case None =>
    -              System.err.println(s"Spark was unable to load 
$replDefaultLogProps")
    -          }
    -        } else {
    -          val defaultLogProps = 
"org/apache/spark/log4j-defaults.properties"
    -          Option(Utils.getSparkClassLoader.getResource(defaultLogProps)) 
match {
    -            case Some(url) =>
    -              PropertyConfigurator.configure(url)
    -              System.err.println(s"Using Spark's default log4j profile: 
$defaultLogProps")
    -            case None =>
    -              System.err.println(s"Spark was unable to load 
$defaultLogProps")
    -          }
    +        val defaultLogProps = "org/apache/spark/log4j-defaults.properties"
    +        Option(Utils.getSparkClassLoader.getResource(defaultLogProps)) 
match {
    +          case Some(url) =>
    +            PropertyConfigurator.configure(url)
    +            System.err.println(s"Using Spark's default log4j profile: 
$defaultLogProps")
    +          case None =>
    +            System.err.println(s"Spark was unable to load 
$defaultLogProps")
             }
    -        // scalastyle:on println
           }
    +
    +      if (Utils.isInInterpreter) {
    +        // Use the repl's main class to define the default log level when 
running the shell,
    +        // overriding the root logger's config if they're different.
    +        val rootLogger = LogManager.getRootLogger()
    +        val replLogger = LogManager.getLogger("org.apache.spark.repl.Main")
    +        val replLevel = if (replLogger.getLevel() != null) 
replLogger.getLevel() else Level.WARN
    --- End diff --
    
    `Option(replLogger.getLevel()).getOrElse(Level.WARN)`


---
If your project is set up for it, you can reply to this email and have your
reply appear on GitHub as well. If your project does not have this feature
enabled and wishes so, or if the feature is enabled but not working, please
contact infrastructure at [email protected] or file a JIRA ticket
with INFRA.
---

---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to