This is an automated email from the ASF dual-hosted git repository.

dongjoon pushed a commit to branch branch-3.5
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/branch-3.5 by this push:
     new 1d6f7adbe622 [SPARK-50433][DOCS][TESTS][3.5] Fix configuring log4j2 
guide docs for Spark on YARN and UT
1d6f7adbe622 is described below

commit 1d6f7adbe622f8433b9e22e87fd191316ad86053
Author: Cheng Pan <[email protected]>
AuthorDate: Tue Dec 3 08:31:07 2024 -0800

    [SPARK-50433][DOCS][TESTS][3.5] Fix configuring log4j2 guide docs for Spark 
on YARN and UT
    
    Backport https://github.com/apache/spark/pull/48981 to 3.5
    
    ### What changes were proposed in this pull request?
    
    As title.
    
    ### Why are the changes needed?
    
    SPARK-37814 (3.3.0) migrated logging system from log4j1 to log4j2, we 
should updated the docs as well.
    
    ### Does this PR introduce _any_ user-facing change?
    
    Yes, docs are updated.
    
    ### How was this patch tested?
    
    Review.
    
    ### Was this patch authored or co-authored using generative AI tooling?
    
    No.
    
    Closes #49044 from pan3793/SPARK-50433-3.5.
    
    Authored-by: Cheng Pan <[email protected]>
    Signed-off-by: Dongjoon Hyun <[email protected]>
---
 .../scala/org/apache/spark/sql/test/IntegrationTestUtils.scala    | 2 +-
 .../org/apache/spark/examples/streaming/KinesisWordCountASL.scala | 2 +-
 docs/running-on-yarn.md                                           | 8 ++++----
 .../org/apache/spark/examples/streaming/StreamingExamples.scala   | 2 +-
 .../scala/org/apache/spark/deploy/yarn/YarnClusterSuite.scala     | 2 +-
 5 files changed, 8 insertions(+), 8 deletions(-)

diff --git 
a/connector/connect/client/jvm/src/test/scala/org/apache/spark/sql/test/IntegrationTestUtils.scala
 
b/connector/connect/client/jvm/src/test/scala/org/apache/spark/sql/test/IntegrationTestUtils.scala
index 61d08912aec2..3ae9b9fc73b4 100644
--- 
a/connector/connect/client/jvm/src/test/scala/org/apache/spark/sql/test/IntegrationTestUtils.scala
+++ 
b/connector/connect/client/jvm/src/test/scala/org/apache/spark/sql/test/IntegrationTestUtils.scala
@@ -74,7 +74,7 @@ object IntegrationTestUtils {
 
         // Redirect server log into console
         "--conf",
-        s"spark.driver.extraJavaOptions=-Dlog4j.configuration=$log4j2")
+        s"spark.driver.extraJavaOptions=-Dlog4j.configurationFile=$log4j2")
     } else Seq.empty
   }
 
diff --git 
a/connector/kinesis-asl/src/main/scala/org/apache/spark/examples/streaming/KinesisWordCountASL.scala
 
b/connector/kinesis-asl/src/main/scala/org/apache/spark/examples/streaming/KinesisWordCountASL.scala
index 7d12af3256f1..d388b480e065 100644
--- 
a/connector/kinesis-asl/src/main/scala/org/apache/spark/examples/streaming/KinesisWordCountASL.scala
+++ 
b/connector/kinesis-asl/src/main/scala/org/apache/spark/examples/streaming/KinesisWordCountASL.scala
@@ -275,7 +275,7 @@ private[streaming] object StreamingExamples extends Logging 
{
       // We first log something to initialize Spark's default logging, then we 
override the
       // logging level.
       logInfo("Setting log level to [WARN] for streaming example." +
-        " To override add a custom log4j.properties to the classpath.")
+        " To override add a custom log4j2.properties to the classpath.")
       Configurator.setRootLevel(Level.WARN)
     }
   }
diff --git a/docs/running-on-yarn.md b/docs/running-on-yarn.md
index ce7121b806cb..d0e725f6a98f 100644
--- a/docs/running-on-yarn.md
+++ b/docs/running-on-yarn.md
@@ -121,15 +121,15 @@ all environment variables used for launching each 
container. This process is use
 classpath problems in particular. (Note that enabling this requires admin 
privileges on cluster
 settings and a restart of all node managers. Thus, this is not applicable to 
hosted clusters).
 
-To use a custom log4j configuration for the application master or executors, 
here are the options:
+To use a custom log4j2 configuration for the application master or executors, 
here are the options:
 
-- upload a custom `log4j.properties` using `spark-submit`, by adding it to the 
`--files` list of files
+- upload a custom `log4j2.properties` using `spark-submit`, by adding it to 
the `--files` list of files
   to be uploaded with the application.
-- add `-Dlog4j.configuration=<location of configuration file>` to 
`spark.driver.extraJavaOptions`
+- add `-Dlog4j.configurationFile=<location of configuration file>` to 
`spark.driver.extraJavaOptions`
   (for the driver) or `spark.executor.extraJavaOptions` (for executors). Note 
that if using a file,
   the `file:` protocol should be explicitly provided, and the file needs to 
exist locally on all
   the nodes.
-- update the `$SPARK_CONF_DIR/log4j.properties` file and it will be 
automatically uploaded along
+- update the `$SPARK_CONF_DIR/log4j2.properties` file and it will be 
automatically uploaded along
   with the other configurations. Note that other 2 options has higher priority 
than this option if
   multiple options are specified.
 
diff --git 
a/examples/src/main/scala/org/apache/spark/examples/streaming/StreamingExamples.scala
 
b/examples/src/main/scala/org/apache/spark/examples/streaming/StreamingExamples.scala
index 20c5eb170015..9289b005e3ba 100644
--- 
a/examples/src/main/scala/org/apache/spark/examples/streaming/StreamingExamples.scala
+++ 
b/examples/src/main/scala/org/apache/spark/examples/streaming/StreamingExamples.scala
@@ -31,7 +31,7 @@ object StreamingExamples extends Logging {
       // We first log something to initialize Spark's default logging, then we 
override the
       // logging level.
       logInfo("Setting log level to [WARN] for streaming example." +
-        " To override add a custom log4j.properties to the classpath.")
+        " To override add a custom log4j2.properties to the classpath.")
       Configurator.setRootLevel(Level.WARN)
     }
   }
diff --git 
a/resource-managers/yarn/src/test/scala/org/apache/spark/deploy/yarn/YarnClusterSuite.scala
 
b/resource-managers/yarn/src/test/scala/org/apache/spark/deploy/yarn/YarnClusterSuite.scala
index 2637b2eab80e..5cd69314d28f 100644
--- 
a/resource-managers/yarn/src/test/scala/org/apache/spark/deploy/yarn/YarnClusterSuite.scala
+++ 
b/resource-managers/yarn/src/test/scala/org/apache/spark/deploy/yarn/YarnClusterSuite.scala
@@ -293,7 +293,7 @@ class YarnClusterSuite extends BaseYarnClusterSuite {
   }
 
   test("running Spark in yarn-cluster mode displays driver log links") {
-    val log4jConf = new File(tempDir, "log4j.properties")
+    val log4jConf = new File(tempDir, "log4j2.properties")
     val logOutFile = new File(tempDir, "logs")
     Files.write(
       s"""rootLogger.level = debug


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to