This is an automated email from the ASF dual-hosted git repository. eyal pushed a commit to branch master in repository https://gitbox.apache.org/repos/asf/datafu.git
The following commit(s) were added to refs/heads/master by this push: new e0e9e6c Log4j2 Upgrade new a304691 Merge pull request #31 from arpitbhardwaj/log4j-upgrade e0e9e6c is described below commit e0e9e6c20444ac17dc15ff513abf1805db14de57 Author: arpitbhardwaj <arpitbhardwa...@gmail.com> AuthorDate: Wed Oct 12 10:51:38 2022 +0530 Log4j2 Upgrade --- datafu-spark/build.gradle | 12 ++++++++++-- .../main/scala/datafu/spark/PythonPathsManager.scala | 6 ++---- .../main/scala/datafu/spark/ScalaPythonBridge.scala | 8 ++++---- .../spark/utils/overwrites/SparkPythonRunner.scala | 5 ++--- .../resources/{log4j.properties => log4j2.properties} | 19 +++++++++++++++---- .../scala/datafu/spark/TestScalaPythonBridge.scala | 7 ++----- .../src/test/scala/datafu/spark/TestSparkUDAFs.scala | 4 ++-- gradle/dependency-versions.gradle | 1 + 8 files changed, 38 insertions(+), 24 deletions(-) diff --git a/datafu-spark/build.gradle b/datafu-spark/build.gradle index 5740f02..6b86920 100644 --- a/datafu-spark/build.gradle +++ b/datafu-spark/build.gradle @@ -55,10 +55,13 @@ cleanEclipse { } dependencies { + compile "org.apache.logging.log4j:log4j-api:$log4j2Version" compile "org.scala-lang:scala-library:$scalaVersion" - compile "org.apache.spark:spark-core_" + scalaCompatVersion + ":" + sparkVersion + compile ("org.apache.spark:spark-core_" + scalaCompatVersion + ":" + sparkVersion) compile "org.apache.spark:spark-hive_" + scalaCompatVersion + ":" + sparkVersion + testCompile "org.apache.logging.log4j:log4j-1.2-api:$log4j2Version" + testCompile "org.apache.logging.log4j:log4j-slf4j-impl:$log4j2Version" testCompile "com.holdenkarau:spark-testing-base_" + scalaCompatVersion + ":" + sparkVersion + "_" + sparkTestingBaseVersion testCompile "org.scalatest:scalatest_" + scalaCompatVersion + ":" + scalaTestVersion } @@ -140,7 +143,6 @@ tasks.compileTestScala.dependsOn('zipPySpark') tasks.eclipse.dependsOn('downloadPy4js') tasks.compileTestScala.dependsOn('downloadPy4js') - test { systemProperty 'datafu.jar.dir', file('build/libs') systemProperty 'datafu.data.dir', file('data') @@ -149,3 +151,9 @@ test { maxHeapSize = "2G" } + +configurations.all { + exclude group: 'log4j', module:'log4j' + exclude group: 'log4j', module:'apache-log4j-extras' + exclude group: 'org.slf4j', module:'slf4j-log4j12' +} \ No newline at end of file diff --git a/datafu-spark/src/main/scala/datafu/spark/PythonPathsManager.scala b/datafu-spark/src/main/scala/datafu/spark/PythonPathsManager.scala index 26377b8..b96bde7 100644 --- a/datafu-spark/src/main/scala/datafu/spark/PythonPathsManager.scala +++ b/datafu-spark/src/main/scala/datafu/spark/PythonPathsManager.scala @@ -23,10 +23,8 @@ import java.net.JarURLConnection import java.nio.file.Paths import java.util import java.util.{MissingResourceException, ServiceLoader} - import scala.collection.JavaConverters._ - -import org.apache.log4j.Logger +import org.apache.logging.log4j.{LogManager, Logger} /** @@ -70,7 +68,7 @@ object PythonPathsManager { case class ResolvedResource(resource: PythonResource, resolvedLocation: String) - private val logger: Logger = Logger.getLogger(getClass) + private val logger: Logger = LogManager.getLogger(getClass) val resources: Seq[ResolvedResource] = ServiceLoader diff --git a/datafu-spark/src/main/scala/datafu/spark/ScalaPythonBridge.scala b/datafu-spark/src/main/scala/datafu/spark/ScalaPythonBridge.scala index 1acacaa..a641b0d 100644 --- a/datafu-spark/src/main/scala/datafu/spark/ScalaPythonBridge.scala +++ b/datafu-spark/src/main/scala/datafu/spark/ScalaPythonBridge.scala @@ -18,12 +18,12 @@ */ package datafu.spark +import org.apache.logging.log4j.LogManager + import java.io._ import java.net.URL import java.nio.file.Files import java.util.UUID - -import org.slf4j.LoggerFactory import org.apache.spark.SparkConf import org.apache.spark.api.java.JavaSparkContext import org.apache.spark.datafu.deploy.SparkPythonRunner @@ -40,7 +40,7 @@ import org.apache.spark.sql.SparkSession */ case class ScalaPythonBridgeRunner(extraPath: String = "") { - val logger = LoggerFactory.getLogger(this.getClass) + val logger = LogManager.getLogger(this.getClass) // for the bridge we take the full resolved location, // since this runs on the driver where the files are local: logger.info("constructing PYTHONPATH") @@ -134,7 +134,7 @@ object ScalaPythonBridge { // need empty ctor for py4j gateway */ object ResourceCloning { - private val logger = LoggerFactory.getLogger(this.getClass) + private val logger = LogManager.getLogger(this.getClass) val uuid = UUID.randomUUID().toString.substring(6) val outputTempDir = new File(System.getProperty("java.io.tmpdir"), diff --git a/datafu-spark/src/main/scala/spark/utils/overwrites/SparkPythonRunner.scala b/datafu-spark/src/main/scala/spark/utils/overwrites/SparkPythonRunner.scala index 36e8615..14b4f02 100644 --- a/datafu-spark/src/main/scala/spark/utils/overwrites/SparkPythonRunner.scala +++ b/datafu-spark/src/main/scala/spark/utils/overwrites/SparkPythonRunner.scala @@ -17,11 +17,10 @@ package org.apache.spark.datafu.deploy import java.io._ - import scala.collection.JavaConverters._ import scala.collection.mutable.ArrayBuffer import datafu.spark.ScalaPythonBridge -import org.apache.log4j.Logger +import org.apache.logging.log4j.{LogManager, Logger} import org.apache.spark.api.python.PythonUtils import org.apache.spark.deploy.PythonRunner import org.apache.spark.util.Utils @@ -40,7 +39,7 @@ import org.apache.spark.util.Utils case class SparkPythonRunner(pyPaths: String, otherArgs: Array[String] = Array()) { - val logger: Logger = Logger.getLogger(getClass) + val logger: Logger = LogManager.getLogger(getClass) val (reader, writer, process) = initPythonEnv() def runPyFile(pythonFile: String): String = { diff --git a/datafu-spark/src/test/resources/log4j.properties b/datafu-spark/src/test/resources/log4j2.properties similarity index 61% rename from datafu-spark/src/test/resources/log4j.properties rename to datafu-spark/src/test/resources/log4j2.properties index bc52d61..519088c 100644 --- a/datafu-spark/src/test/resources/log4j.properties +++ b/datafu-spark/src/test/resources/log4j2.properties @@ -15,8 +15,19 @@ # specific language governing permissions and limitations # under the License. -log4j.rootCategory=WARN, stdout +# Extra logging related to initialization of Log4j +# Set to debug or trace if log4j initialization is failing +status = warn +# Name of the configuration +name = ConsoleLogConfigDemo -log4j.appender.stdout=org.apache.log4j.ConsoleAppender -log4j.appender.stdout.layout=org.apache.log4j.PatternLayout -log4j.appender.stdout.layout.ConversionPattern=%d %5p %c{3} - %m%n +# Console appender configuration +appender.console.type = Console +appender.console.name = consoleLogger +appender.console.layout.type = PatternLayout +appender.console.layout.pattern = %d %5p %c{3} - %m%n + +# Root logger level +rootLogger.level = warn +# Root logger referring to console appender +rootLogger.appenderRef.stdout.ref = consoleLogger diff --git a/datafu-spark/src/test/scala/datafu/spark/TestScalaPythonBridge.scala b/datafu-spark/src/test/scala/datafu/spark/TestScalaPythonBridge.scala index b67386a..b529145 100644 --- a/datafu-spark/src/test/scala/datafu/spark/TestScalaPythonBridge.scala +++ b/datafu-spark/src/test/scala/datafu/spark/TestScalaPythonBridge.scala @@ -19,22 +19,19 @@ package datafu.spark import java.io.File - import scala.util.Try - import com.holdenkarau.spark.testing.Utils +import org.apache.logging.log4j.LogManager import org.junit._ import org.junit.runner.RunWith import org.scalatest.FunSuite import org.scalatest.junit.JUnitRunner -import org.slf4j.LoggerFactory - import org.apache.spark.SparkConf import org.apache.spark.sql.SparkSession object TestScalaPythonBridge { - val logger = LoggerFactory.getLogger(this.getClass) + val logger = LogManager.getLogger(this.getClass) def getNewRunner(): ScalaPythonBridgeRunner = { val runner = ScalaPythonBridgeRunner() diff --git a/datafu-spark/src/test/scala/datafu/spark/TestSparkUDAFs.scala b/datafu-spark/src/test/scala/datafu/spark/TestSparkUDAFs.scala index aadd059..37949c3 100644 --- a/datafu-spark/src/test/scala/datafu/spark/TestSparkUDAFs.scala +++ b/datafu-spark/src/test/scala/datafu/spark/TestSparkUDAFs.scala @@ -19,11 +19,11 @@ package datafu.spark import com.holdenkarau.spark.testing.DataFrameSuiteBase +import org.apache.logging.log4j.LogManager import org.junit.Assert import org.junit.runner.RunWith import org.scalatest.FunSuite import org.scalatest.junit.JUnitRunner -import org.slf4j.LoggerFactory import org.apache.spark.SparkConf import org.apache.spark.sql.DataFrame import org.apache.spark.sql.Row @@ -45,7 +45,7 @@ class UdafTests extends FunSuite with DataFrameSuiteBase { override def conf: SparkConf = super.conf.set(CATALOG_IMPLEMENTATION.key, "hive") - val logger = LoggerFactory.getLogger(this.getClass) + val logger = LogManager.getLogger(this.getClass) val inputSchema = List( StructField("col_grp", StringType, true), diff --git a/gradle/dependency-versions.gradle b/gradle/dependency-versions.gradle index 22bb7d6..091b37d 100644 --- a/gradle/dependency-versions.gradle +++ b/gradle/dependency-versions.gradle @@ -27,6 +27,7 @@ ext { guavaVersion="20.0" jodaTimeVersion="1.6" log4jVersion="1.2.17" + log4j2Version="2.19.0" mavenVersion="2.1.3" jlineVersion="0.9.94" pigVersion="0.14.0"