This is an automated email from the ASF dual-hosted git repository.
dongjoon pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git
The following commit(s) were added to refs/heads/master by this push:
new fe73039 [SPARK-37791][EXAMPLES] Use log4j2 in examples
fe73039 is described below
commit fe73039f991ce2c44bc5bb2dc845c735e6959c14
Author: William Hyun <[email protected]>
AuthorDate: Thu Dec 30 18:32:01 2021 -0800
[SPARK-37791][EXAMPLES] Use log4j2 in examples
### What changes were proposed in this pull request?
This PR aims to use log4j2 in examples.
### Why are the changes needed?
Since Spark is migrating to log4j2, we best use this in our examples.
### Does this PR introduce _any_ user-facing change?
No.
### How was this patch tested?
Manually review.
Closes #35074 from williamhyun/log4j2.
Authored-by: William Hyun <[email protected]>
Signed-off-by: Dongjoon Hyun <[email protected]>
---
.../org/apache/spark/examples/mllib/BinaryClassification.scala | 5 +++--
.../main/scala/org/apache/spark/examples/mllib/DenseKMeans.scala | 5 +++--
.../main/scala/org/apache/spark/examples/mllib/LDAExample.scala | 5 +++--
.../main/scala/org/apache/spark/examples/mllib/MovieLensALS.scala | 5 +++--
.../spark/examples/mllib/PowerIterationClusteringExample.scala | 5 +++--
.../scala/org/apache/spark/examples/mllib/SparseNaiveBayes.scala | 5 +++--
.../org/apache/spark/examples/streaming/StreamingExamples.scala | 8 +++++---
.../org/apache/spark/examples/streaming/KinesisWordCountASL.scala | 8 +++++---
8 files changed, 28 insertions(+), 18 deletions(-)
diff --git
a/examples/src/main/scala/org/apache/spark/examples/mllib/BinaryClassification.scala
b/examples/src/main/scala/org/apache/spark/examples/mllib/BinaryClassification.scala
index 6fc3501..6748ffb 100644
---
a/examples/src/main/scala/org/apache/spark/examples/mllib/BinaryClassification.scala
+++
b/examples/src/main/scala/org/apache/spark/examples/mllib/BinaryClassification.scala
@@ -18,7 +18,8 @@
// scalastyle:off println
package org.apache.spark.examples.mllib
-import org.apache.log4j.{Level, Logger}
+import org.apache.logging.log4j.Level
+import org.apache.logging.log4j.core.config.Configurator
import scopt.OptionParser
import org.apache.spark.{SparkConf, SparkContext}
@@ -105,7 +106,7 @@ object BinaryClassification {
val conf = new SparkConf().setAppName(s"BinaryClassification with $params")
val sc = new SparkContext(conf)
- Logger.getRootLogger.setLevel(Level.WARN)
+ Configurator.setRootLevel(Level.WARN)
val examples = MLUtils.loadLibSVMFile(sc, params.input).cache()
diff --git
a/examples/src/main/scala/org/apache/spark/examples/mllib/DenseKMeans.scala
b/examples/src/main/scala/org/apache/spark/examples/mllib/DenseKMeans.scala
index 0259df2..0aa30a6 100644
--- a/examples/src/main/scala/org/apache/spark/examples/mllib/DenseKMeans.scala
+++ b/examples/src/main/scala/org/apache/spark/examples/mllib/DenseKMeans.scala
@@ -18,7 +18,8 @@
// scalastyle:off println
package org.apache.spark.examples.mllib
-import org.apache.log4j.{Level, Logger}
+import org.apache.logging.log4j.Level
+import org.apache.logging.log4j.core.config.Configurator
import scopt.OptionParser
import org.apache.spark.{SparkConf, SparkContext}
@@ -79,7 +80,7 @@ object DenseKMeans {
val conf = new SparkConf().setAppName(s"DenseKMeans with $params")
val sc = new SparkContext(conf)
- Logger.getRootLogger.setLevel(Level.WARN)
+ Configurator.setRootLevel(Level.WARN)
val examples = sc.textFile(params.input).map { line =>
Vectors.dense(line.split(' ').map(_.toDouble))
diff --git
a/examples/src/main/scala/org/apache/spark/examples/mllib/LDAExample.scala
b/examples/src/main/scala/org/apache/spark/examples/mllib/LDAExample.scala
index 605ca68..a3006a1 100644
--- a/examples/src/main/scala/org/apache/spark/examples/mllib/LDAExample.scala
+++ b/examples/src/main/scala/org/apache/spark/examples/mllib/LDAExample.scala
@@ -20,7 +20,8 @@ package org.apache.spark.examples.mllib
import java.util.Locale
-import org.apache.log4j.{Level, Logger}
+import org.apache.logging.log4j.Level
+import org.apache.logging.log4j.core.config.Configurator
import scopt.OptionParser
import org.apache.spark.{SparkConf, SparkContext}
@@ -111,7 +112,7 @@ object LDAExample {
val conf = new SparkConf().setAppName(s"LDAExample with $params")
val sc = new SparkContext(conf)
- Logger.getRootLogger.setLevel(Level.WARN)
+ Configurator.setRootLevel(Level.WARN)
// Load documents, and prepare them for LDA.
val preprocessStart = System.nanoTime()
diff --git
a/examples/src/main/scala/org/apache/spark/examples/mllib/MovieLensALS.scala
b/examples/src/main/scala/org/apache/spark/examples/mllib/MovieLensALS.scala
index 92c85c9..23523d7 100644
--- a/examples/src/main/scala/org/apache/spark/examples/mllib/MovieLensALS.scala
+++ b/examples/src/main/scala/org/apache/spark/examples/mllib/MovieLensALS.scala
@@ -20,7 +20,8 @@ package org.apache.spark.examples.mllib
import scala.collection.mutable
-import org.apache.log4j.{Level, Logger}
+import org.apache.logging.log4j.Level
+import org.apache.logging.log4j.core.config.Configurator
import scopt.OptionParser
import org.apache.spark.{SparkConf, SparkContext}
@@ -103,7 +104,7 @@ object MovieLensALS {
}
val sc = new SparkContext(conf)
- Logger.getRootLogger.setLevel(Level.WARN)
+ Configurator.setRootLevel(Level.WARN)
val implicitPrefs = params.implicitPrefs
diff --git
a/examples/src/main/scala/org/apache/spark/examples/mllib/PowerIterationClusteringExample.scala
b/examples/src/main/scala/org/apache/spark/examples/mllib/PowerIterationClusteringExample.scala
index eaf1dac..2a77702 100644
---
a/examples/src/main/scala/org/apache/spark/examples/mllib/PowerIterationClusteringExample.scala
+++
b/examples/src/main/scala/org/apache/spark/examples/mllib/PowerIterationClusteringExample.scala
@@ -18,7 +18,8 @@
// scalastyle:off println
package org.apache.spark.examples.mllib
-import org.apache.log4j.{Level, Logger}
+import org.apache.logging.log4j.Level
+import org.apache.logging.log4j.core.config.Configurator
import scopt.OptionParser
import org.apache.spark.{SparkConf, SparkContext}
@@ -90,7 +91,7 @@ object PowerIterationClusteringExample {
.setAppName(s"PowerIterationClustering with $params")
val sc = new SparkContext(conf)
- Logger.getRootLogger.setLevel(Level.WARN)
+ Configurator.setRootLevel(Level.WARN)
// $example on$
val circlesRdd = generateCirclesRdd(sc, params.k, params.numPoints)
diff --git
a/examples/src/main/scala/org/apache/spark/examples/mllib/SparseNaiveBayes.scala
b/examples/src/main/scala/org/apache/spark/examples/mllib/SparseNaiveBayes.scala
index b501f4d..cadfcf2 100644
---
a/examples/src/main/scala/org/apache/spark/examples/mllib/SparseNaiveBayes.scala
+++
b/examples/src/main/scala/org/apache/spark/examples/mllib/SparseNaiveBayes.scala
@@ -18,7 +18,8 @@
// scalastyle:off println
package org.apache.spark.examples.mllib
-import org.apache.log4j.{Level, Logger}
+import org.apache.logging.log4j.Level
+import org.apache.logging.log4j.core.config.Configurator
import scopt.OptionParser
import org.apache.spark.{SparkConf, SparkContext}
@@ -70,7 +71,7 @@ object SparseNaiveBayes {
val conf = new SparkConf().setAppName(s"SparseNaiveBayes with $params")
val sc = new SparkContext(conf)
- Logger.getRootLogger.setLevel(Level.WARN)
+ Configurator.setRootLevel(Level.WARN)
val minPartitions =
if (params.minPartitions > 0) params.minPartitions else
sc.defaultMinPartitions
diff --git
a/examples/src/main/scala/org/apache/spark/examples/streaming/StreamingExamples.scala
b/examples/src/main/scala/org/apache/spark/examples/streaming/StreamingExamples.scala
index 073f972..fe336b5 100644
---
a/examples/src/main/scala/org/apache/spark/examples/streaming/StreamingExamples.scala
+++
b/examples/src/main/scala/org/apache/spark/examples/streaming/StreamingExamples.scala
@@ -17,7 +17,9 @@
package org.apache.spark.examples.streaming
-import org.apache.log4j.{Level, Logger}
+import org.apache.logging.log4j.{Level, LogManager}
+import org.apache.logging.log4j.core.Logger
+import org.apache.logging.log4j.core.config.Configurator
import org.apache.spark.internal.Logging
@@ -26,13 +28,13 @@ object StreamingExamples extends Logging {
/** Set reasonable logging levels for streaming if the user has not
configured log4j. */
def setStreamingLogLevels(): Unit = {
- val log4jInitialized = Logger.getRootLogger.getAllAppenders.hasMoreElements
+ val log4jInitialized =
!LogManager.getRootLogger.asInstanceOf[Logger].getAppenders.isEmpty
if (!log4jInitialized) {
// We first log something to initialize Spark's default logging, then we
override the
// logging level.
logInfo("Setting log level to [WARN] for streaming example." +
" To override add a custom log4j.properties to the classpath.")
- Logger.getRootLogger.setLevel(Level.WARN)
+ Configurator.setRootLevel(Level.WARN)
}
}
}
diff --git
a/external/kinesis-asl/src/main/scala/org/apache/spark/examples/streaming/KinesisWordCountASL.scala
b/external/kinesis-asl/src/main/scala/org/apache/spark/examples/streaming/KinesisWordCountASL.scala
index d6a9160..ea47665 100644
---
a/external/kinesis-asl/src/main/scala/org/apache/spark/examples/streaming/KinesisWordCountASL.scala
+++
b/external/kinesis-asl/src/main/scala/org/apache/spark/examples/streaming/KinesisWordCountASL.scala
@@ -25,7 +25,9 @@ import scala.util.Random
import com.amazonaws.auth.DefaultAWSCredentialsProviderChain
import com.amazonaws.services.kinesis.AmazonKinesisClient
import com.amazonaws.services.kinesis.model.PutRecordRequest
-import org.apache.log4j.{Level, Logger}
+import org.apache.logging.log4j.{Level, LogManager}
+import org.apache.logging.log4j.core.Logger
+import org.apache.logging.log4j.core.config.Configurator
import org.apache.spark.SparkConf
import org.apache.spark.internal.Logging
@@ -270,13 +272,13 @@ object KinesisWordProducerASL {
private[streaming] object StreamingExamples extends Logging {
// Set reasonable logging levels for streaming if the user has not
configured log4j.
def setStreamingLogLevels(): Unit = {
- val log4jInitialized = Logger.getRootLogger.getAllAppenders.hasMoreElements
+ val log4jInitialized =
!LogManager.getRootLogger.asInstanceOf[Logger].getAppenders.isEmpty
if (!log4jInitialized) {
// We first log something to initialize Spark's default logging, then we
override the
// logging level.
logInfo("Setting log level to [WARN] for streaming example." +
" To override add a custom log4j.properties to the classpath.")
- Logger.getRootLogger.setLevel(Level.WARN)
+ Configurator.setRootLevel(Level.WARN)
}
}
}
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]