This is an automated email from the ASF dual-hosted git repository.

leesf pushed a commit to branch redo-log
in repository https://gitbox.apache.org/repos/asf/incubator-hudi.git


The following commit(s) were added to refs/heads/redo-log by this push:
     new f88d6ca  [HUDI-454] Redo hudi-cli log statements using SLF4J (#1178)
f88d6ca is described below

commit f88d6cab7e586f7273bb3a39311b643408638f86
Author: ForwardXu <forwardxu...@gmail.com>
AuthorDate: Tue Jan 7 09:21:40 2020 +0800

    [HUDI-454] Redo hudi-cli log statements using SLF4J (#1178)
---
 hudi-cli/pom.xml                                                  | 5 +++--
 .../main/java/org/apache/hudi/cli/commands/CompactionCommand.java | 6 +++---
 .../org/apache/hudi/cli/commands/HDFSParquetImportCommand.java    | 4 ----
 .../src/main/java/org/apache/hudi/cli/commands/SparkMain.java     | 7 ++++---
 .../main/java/org/apache/hudi/cli/utils/InputStreamConsumer.java  | 8 ++++----
 hudi-cli/src/main/java/org/apache/hudi/cli/utils/SparkUtil.java   | 2 --
 hudi-cli/src/main/scala/org/apache/hudi/cli/DedupeSparkJob.scala  | 6 +++---
 7 files changed, 17 insertions(+), 21 deletions(-)

diff --git a/hudi-cli/pom.xml b/hudi-cli/pom.xml
index 62b55de..74b31bc 100644
--- a/hudi-cli/pom.xml
+++ b/hudi-cli/pom.xml
@@ -146,8 +146,9 @@
 
     <!-- Logging -->
     <dependency>
-      <groupId>log4j</groupId>
-      <artifactId>log4j</artifactId>
+      <groupId>org.slf4j</groupId>
+      <artifactId>slf4j-api</artifactId>
+      <version>${slf4j.version}</version>
     </dependency>
 
     <dependency>
diff --git 
a/hudi-cli/src/main/java/org/apache/hudi/cli/commands/CompactionCommand.java 
b/hudi-cli/src/main/java/org/apache/hudi/cli/commands/CompactionCommand.java
index 716b810..a75249e 100644
--- a/hudi-cli/src/main/java/org/apache/hudi/cli/commands/CompactionCommand.java
+++ b/hudi-cli/src/main/java/org/apache/hudi/cli/commands/CompactionCommand.java
@@ -42,10 +42,10 @@ import org.apache.hudi.exception.HoodieIOException;
 import org.apache.hadoop.fs.FSDataInputStream;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
-import org.apache.log4j.LogManager;
-import org.apache.log4j.Logger;
 import org.apache.spark.launcher.SparkLauncher;
 import org.apache.spark.util.Utils;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.springframework.shell.core.CommandMarker;
 import org.springframework.shell.core.annotation.CliCommand;
 import org.springframework.shell.core.annotation.CliOption;
@@ -68,7 +68,7 @@ import java.util.stream.Collectors;
 @Component
 public class CompactionCommand implements CommandMarker {
 
-  private static final Logger LOG = 
LogManager.getLogger(CompactionCommand.class);
+  private static final Logger LOG = 
LoggerFactory.getLogger(CompactionCommand.class);
 
   private static final String TMP_DIR = "/tmp/";
 
diff --git 
a/hudi-cli/src/main/java/org/apache/hudi/cli/commands/HDFSParquetImportCommand.java
 
b/hudi-cli/src/main/java/org/apache/hudi/cli/commands/HDFSParquetImportCommand.java
index e892ca9..06a59ff 100644
--- 
a/hudi-cli/src/main/java/org/apache/hudi/cli/commands/HDFSParquetImportCommand.java
+++ 
b/hudi-cli/src/main/java/org/apache/hudi/cli/commands/HDFSParquetImportCommand.java
@@ -24,8 +24,6 @@ import org.apache.hudi.cli.utils.InputStreamConsumer;
 import org.apache.hudi.cli.utils.SparkUtil;
 import org.apache.hudi.utilities.HDFSParquetImporter.FormatValidator;
 
-import org.apache.log4j.LogManager;
-import org.apache.log4j.Logger;
 import org.apache.spark.launcher.SparkLauncher;
 import org.apache.spark.util.Utils;
 import org.springframework.shell.core.CommandMarker;
@@ -41,8 +39,6 @@ import scala.collection.JavaConverters;
 @Component
 public class HDFSParquetImportCommand implements CommandMarker {
 
-  private static final Logger LOG = 
LogManager.getLogger(HDFSParquetImportCommand.class);
-
   @CliCommand(value = "hdfsparquetimport", help = "Imports Parquet dataset to 
a hoodie dataset")
   public String convert(
       @CliOption(key = "upsert", mandatory = false, unspecifiedDefaultValue = 
"false",
diff --git a/hudi-cli/src/main/java/org/apache/hudi/cli/commands/SparkMain.java 
b/hudi-cli/src/main/java/org/apache/hudi/cli/commands/SparkMain.java
index 5d08921..15fe31b 100644
--- a/hudi-cli/src/main/java/org/apache/hudi/cli/commands/SparkMain.java
+++ b/hudi-cli/src/main/java/org/apache/hudi/cli/commands/SparkMain.java
@@ -32,16 +32,17 @@ import org.apache.hudi.utilities.HoodieCompactionAdminTool;
 import org.apache.hudi.utilities.HoodieCompactionAdminTool.Operation;
 import org.apache.hudi.utilities.HoodieCompactor;
 
-import org.apache.log4j.Logger;
 import org.apache.spark.api.java.JavaSparkContext;
 import org.apache.spark.sql.SQLContext;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * This class deals with initializing spark context based on command entered 
to hudi-cli.
  */
 public class SparkMain {
 
-  private static final Logger LOG = Logger.getLogger(SparkMain.class);
+  private static final Logger LOG = LoggerFactory.getLogger(SparkMain.class);
 
   /**
    * Commands.
@@ -52,7 +53,7 @@ public class SparkMain {
 
   public static void main(String[] args) throws Exception {
     String command = args[0];
-    LOG.info("Invoking SparkMain:" + command);
+    LOG.info("Invoking SparkMain:{}", command);
 
     SparkCommand cmd = SparkCommand.valueOf(command);
 
diff --git 
a/hudi-cli/src/main/java/org/apache/hudi/cli/utils/InputStreamConsumer.java 
b/hudi-cli/src/main/java/org/apache/hudi/cli/utils/InputStreamConsumer.java
index 73aa45c..33a9a15 100644
--- a/hudi-cli/src/main/java/org/apache/hudi/cli/utils/InputStreamConsumer.java
+++ b/hudi-cli/src/main/java/org/apache/hudi/cli/utils/InputStreamConsumer.java
@@ -22,14 +22,15 @@ import java.io.BufferedReader;
 import java.io.IOException;
 import java.io.InputStream;
 import java.io.InputStreamReader;
-import java.util.logging.Logger;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * This class is responsible to read a Process output.
  */
 public class InputStreamConsumer extends Thread {
 
-  private static final Logger LOG = 
Logger.getLogger(InputStreamConsumer.class.getName());
+  private static final Logger LOG = 
LoggerFactory.getLogger(InputStreamConsumer.class.getName());
   private InputStream is;
 
   public InputStreamConsumer(InputStream is) {
@@ -46,8 +47,7 @@ public class InputStreamConsumer extends Thread {
         LOG.info(line);
       }
     } catch (IOException ioe) {
-      LOG.severe(ioe.toString());
-      ioe.printStackTrace();
+      LOG.error("Run the InputStreamConsumer is failed:", ioe);
     }
   }
 
diff --git a/hudi-cli/src/main/java/org/apache/hudi/cli/utils/SparkUtil.java 
b/hudi-cli/src/main/java/org/apache/hudi/cli/utils/SparkUtil.java
index 5b5a3f5..4ea6f50 100644
--- a/hudi-cli/src/main/java/org/apache/hudi/cli/utils/SparkUtil.java
+++ b/hudi-cli/src/main/java/org/apache/hudi/cli/utils/SparkUtil.java
@@ -24,7 +24,6 @@ import org.apache.hudi.cli.commands.SparkMain;
 import org.apache.hudi.common.util.FSUtils;
 import org.apache.hudi.common.util.StringUtils;
 
-import org.apache.log4j.Logger;
 import org.apache.spark.SparkConf;
 import org.apache.spark.api.java.JavaSparkContext;
 import org.apache.spark.launcher.SparkLauncher;
@@ -38,7 +37,6 @@ import java.util.Map;
  */
 public class SparkUtil {
 
-  private static final Logger LOG = Logger.getLogger(SparkUtil.class);
   public static final String DEFUALT_SPARK_MASTER = "yarn-client";
 
   /**
diff --git a/hudi-cli/src/main/scala/org/apache/hudi/cli/DedupeSparkJob.scala 
b/hudi-cli/src/main/scala/org/apache/hudi/cli/DedupeSparkJob.scala
index f40501a..e192795 100644
--- a/hudi-cli/src/main/scala/org/apache/hudi/cli/DedupeSparkJob.scala
+++ b/hudi-cli/src/main/scala/org/apache/hudi/cli/DedupeSparkJob.scala
@@ -25,8 +25,8 @@ import org.apache.hudi.common.table.HoodieTableMetaClient
 import org.apache.hudi.common.table.view.HoodieTableFileSystemView
 import org.apache.hudi.common.util.FSUtils
 import org.apache.hudi.exception.HoodieException
-import org.apache.log4j.Logger
 import org.apache.spark.sql.{DataFrame, SQLContext}
+import org.slf4j.{Logger, LoggerFactory}
 
 import scala.collection.JavaConversions._
 import scala.collection.mutable._
@@ -43,7 +43,7 @@ class DedupeSparkJob(basePath: String,
 
 
   val sparkHelper = new SparkHelper(sqlContext, fs)
-  val LOG = Logger.getLogger(this.getClass)
+  val LOG: Logger = LoggerFactory.getLogger(this.getClass)
 
 
   /**
@@ -150,7 +150,7 @@ class DedupeSparkJob(basePath: String,
       val commitTime = 
FSUtils.getCommitTime(fileNameToPathMap(fileName).getName)
       val badFilePath = new 
Path(s"$repairOutputPath/${fileNameToPathMap(fileName).getName}.bad")
       val newFilePath = new 
Path(s"$repairOutputPath/${fileNameToPathMap(fileName).getName}")
-      LOG.info(" Skipping and writing new file for : " + fileName)
+      LOG.info(" Skipping and writing new file for : {}", fileName)
       SparkHelpers.skipKeysAndWriteNewFile(commitTime, fs, badFilePath, 
newFilePath, dupeFixPlan(fileName))
       fs.delete(badFilePath, false)
     }

Reply via email to