This is an automated email from the ASF dual-hosted git repository.

dongjoon pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new ae4625c9c375 [SPARK-50433][DOCS][TESTS] Fix confguring log4j2 guide 
docs for Spark on YARN and UT
ae4625c9c375 is described below

commit ae4625c9c375ebda82210ec17903b0941f060114
Author: Cheng Pan <[email protected]>
AuthorDate: Mon Dec 2 11:37:06 2024 -0800

    [SPARK-50433][DOCS][TESTS] Fix confguring log4j2 guide docs for Spark on 
YARN and UT
    
    ### What changes were proposed in this pull request?
    
    As title.
    
    ### Why are the changes needed?
    
    SPARK-37814 (3.3.0) migrated logging system from log4j1 to log4j2, we 
should updated the docs as well.
    
    ### Does this PR introduce _any_ user-facing change?
    
    Yes, docs are updated.
    
    ### How was this patch tested?
    
    Review.
    
    ### Was this patch authored or co-authored using generative AI tooling?
    
    No.
    
    Closes #48981 from pan3793/SPARK-50433.
    
    Lead-authored-by: Cheng Pan <[email protected]>
    Co-authored-by: Cheng Pan <[email protected]>
    Signed-off-by: Dongjoon Hyun <[email protected]>
---
 .../scala/org/apache/spark/sql/test/IntegrationTestUtils.scala    | 2 +-
 .../org/apache/spark/examples/streaming/KinesisWordCountASL.scala | 2 +-
 docs/running-on-yarn.md                                           | 8 ++++----
 .../org/apache/spark/examples/streaming/StreamingExamples.scala   | 2 +-
 .../scala/org/apache/spark/deploy/yarn/YarnClusterSuite.scala     | 2 +-
 5 files changed, 8 insertions(+), 8 deletions(-)

diff --git 
a/connector/connect/client/jvm/src/test/scala/org/apache/spark/sql/test/IntegrationTestUtils.scala
 
b/connector/connect/client/jvm/src/test/scala/org/apache/spark/sql/test/IntegrationTestUtils.scala
index 61d08912aec2..3ae9b9fc73b4 100644
--- 
a/connector/connect/client/jvm/src/test/scala/org/apache/spark/sql/test/IntegrationTestUtils.scala
+++ 
b/connector/connect/client/jvm/src/test/scala/org/apache/spark/sql/test/IntegrationTestUtils.scala
@@ -74,7 +74,7 @@ object IntegrationTestUtils {
 
         // Redirect server log into console
         "--conf",
-        s"spark.driver.extraJavaOptions=-Dlog4j.configuration=$log4j2")
+        s"spark.driver.extraJavaOptions=-Dlog4j.configurationFile=$log4j2")
     } else Seq.empty
   }
 
diff --git 
a/connector/kinesis-asl/src/main/scala/org/apache/spark/examples/streaming/KinesisWordCountASL.scala
 
b/connector/kinesis-asl/src/main/scala/org/apache/spark/examples/streaming/KinesisWordCountASL.scala
index 4835e9de086c..cc24c378f4cb 100644
--- 
a/connector/kinesis-asl/src/main/scala/org/apache/spark/examples/streaming/KinesisWordCountASL.scala
+++ 
b/connector/kinesis-asl/src/main/scala/org/apache/spark/examples/streaming/KinesisWordCountASL.scala
@@ -275,7 +275,7 @@ private[streaming] object StreamingExamples extends Logging 
{
       // We first log something to initialize Spark's default logging, then we 
override the
       // logging level.
       logInfo("Setting log level to [WARN] for streaming example." +
-        " To override add a custom log4j.properties to the classpath.")
+        " To override add a custom log4j2.properties to the classpath.")
       Configurator.setRootLevel(Level.WARN)
     }
   }
diff --git a/docs/running-on-yarn.md b/docs/running-on-yarn.md
index d149f9196b34..aefa979946a6 100644
--- a/docs/running-on-yarn.md
+++ b/docs/running-on-yarn.md
@@ -124,15 +124,15 @@ all environment variables used for launching each 
container. This process is use
 classpath problems in particular. (Note that enabling this requires admin 
privileges on cluster
 settings and a restart of all node managers. Thus, this is not applicable to 
hosted clusters).
 
-To use a custom log4j configuration for the application master or executors, 
here are the options:
+To use a custom log4j2 configuration for the application master or executors, 
here are the options:
 
-- upload a custom `log4j.properties` using `spark-submit`, by adding it to the 
`--files` list of files
+- upload a custom `log4j2.properties` using `spark-submit`, by adding it to 
the `--files` list of files
   to be uploaded with the application.
-- add `-Dlog4j.configuration=<location of configuration file>` to 
`spark.driver.extraJavaOptions`
+- add `-Dlog4j.configurationFile=<location of configuration file>` to 
`spark.driver.extraJavaOptions`
   (for the driver) or `spark.executor.extraJavaOptions` (for executors). Note 
that if using a file,
   the `file:` protocol should be explicitly provided, and the file needs to 
exist locally on all
   the nodes.
-- update the `$SPARK_CONF_DIR/log4j.properties` file and it will be 
automatically uploaded along
+- update the `$SPARK_CONF_DIR/log4j2.properties` file and it will be 
automatically uploaded along
   with the other configurations. Note that other 2 options has higher priority 
than this option if
   multiple options are specified.
 
diff --git 
a/examples/src/main/scala/org/apache/spark/examples/streaming/StreamingExamples.scala
 
b/examples/src/main/scala/org/apache/spark/examples/streaming/StreamingExamples.scala
index 20c5eb170015..9289b005e3ba 100644
--- 
a/examples/src/main/scala/org/apache/spark/examples/streaming/StreamingExamples.scala
+++ 
b/examples/src/main/scala/org/apache/spark/examples/streaming/StreamingExamples.scala
@@ -31,7 +31,7 @@ object StreamingExamples extends Logging {
       // We first log something to initialize Spark's default logging, then we 
override the
       // logging level.
       logInfo("Setting log level to [WARN] for streaming example." +
-        " To override add a custom log4j.properties to the classpath.")
+        " To override add a custom log4j2.properties to the classpath.")
       Configurator.setRootLevel(Level.WARN)
     }
   }
diff --git 
a/resource-managers/yarn/src/test/scala/org/apache/spark/deploy/yarn/YarnClusterSuite.scala
 
b/resource-managers/yarn/src/test/scala/org/apache/spark/deploy/yarn/YarnClusterSuite.scala
index 92d9f2d62d1c..71843b7f90b1 100644
--- 
a/resource-managers/yarn/src/test/scala/org/apache/spark/deploy/yarn/YarnClusterSuite.scala
+++ 
b/resource-managers/yarn/src/test/scala/org/apache/spark/deploy/yarn/YarnClusterSuite.scala
@@ -293,7 +293,7 @@ class YarnClusterSuite extends BaseYarnClusterSuite {
   }
 
   test("running Spark in yarn-cluster mode displays driver log links") {
-    val log4jConf = new File(tempDir, "log4j.properties")
+    val log4jConf = new File(tempDir, "log4j2.properties")
     val logOutFile = new File(tempDir, "logs")
     Files.asCharSink(log4jConf, StandardCharsets.UTF_8).write(
       s"""rootLogger.level = debug


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to