This is an automated email from the ASF dual-hosted git repository.

danny0405 pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/hudi.git


The following commit(s) were added to refs/heads/master by this push:
     new 77e2f42c6e4 [HUDI-7071] Throw exceptions when clustering/index job 
fail (#10050)
77e2f42c6e4 is described below

commit 77e2f42c6e49478cadf2b6bc5c732ee8673a8d69
Author: ksmou <[email protected]>
AuthorDate: Sat Dec 2 11:42:32 2023 +0800

    [HUDI-7071] Throw exceptions when clustering/index job fail (#10050)
---
 .../java/org/apache/hudi/utilities/HoodieClusteringJob.java | 13 ++++++-------
 .../java/org/apache/hudi/utilities/HoodieCompactor.java     | 12 +++---------
 .../main/java/org/apache/hudi/utilities/HoodieIndexer.java  |  9 ++++-----
 3 files changed, 13 insertions(+), 21 deletions(-)

diff --git 
a/hudi-utilities/src/main/java/org/apache/hudi/utilities/HoodieClusteringJob.java
 
b/hudi-utilities/src/main/java/org/apache/hudi/utilities/HoodieClusteringJob.java
index 5cb6b9bbb15..ef7de13b34f 100644
--- 
a/hudi-utilities/src/main/java/org/apache/hudi/utilities/HoodieClusteringJob.java
+++ 
b/hudi-utilities/src/main/java/org/apache/hudi/utilities/HoodieClusteringJob.java
@@ -29,6 +29,7 @@ import org.apache.hudi.common.table.timeline.HoodieTimeline;
 import org.apache.hudi.common.util.Option;
 import org.apache.hudi.common.util.StringUtils;
 import org.apache.hudi.config.HoodieCleanConfig;
+import org.apache.hudi.exception.HoodieException;
 import org.apache.hudi.table.HoodieSparkTable;
 
 import com.beust.jcommander.JCommander;
@@ -146,19 +147,17 @@ public class HoodieClusteringJob {
 
     if (cfg.help || args.length == 0) {
       cmd.usage();
-      System.exit(1);
+      throw new HoodieException("Clustering failed for basePath: " + 
cfg.basePath);
     }
 
     final JavaSparkContext jsc = UtilHelpers.buildSparkContext("clustering-" + 
cfg.tableName, cfg.sparkMaster, cfg.sparkMemory);
-    HoodieClusteringJob clusteringJob = new HoodieClusteringJob(jsc, cfg);
-    int result = clusteringJob.cluster(cfg.retry);
+    int result = new HoodieClusteringJob(jsc, cfg).cluster(cfg.retry);
     String resultMsg = String.format("Clustering with basePath: %s, tableName: 
%s, runningMode: %s",
         cfg.basePath, cfg.tableName, cfg.runningMode);
-    if (result == -1) {
-      LOG.error(resultMsg + " failed");
-    } else {
-      LOG.info(resultMsg + " success");
+    if (result != 0) {
+      throw new HoodieException(resultMsg + " failed");
     }
+    LOG.info(resultMsg + " success");
     jsc.stop();
   }
 
diff --git 
a/hudi-utilities/src/main/java/org/apache/hudi/utilities/HoodieCompactor.java 
b/hudi-utilities/src/main/java/org/apache/hudi/utilities/HoodieCompactor.java
index c8bdf0da3a0..74229be7ca6 100644
--- 
a/hudi-utilities/src/main/java/org/apache/hudi/utilities/HoodieCompactor.java
+++ 
b/hudi-utilities/src/main/java/org/apache/hudi/utilities/HoodieCompactor.java
@@ -172,18 +172,12 @@ public class HoodieCompactor {
       throw new HoodieException("Fail to run compaction for " + cfg.tableName 
+ ", return code: " + 1);
     }
     final JavaSparkContext jsc = UtilHelpers.buildSparkContext("compactor-" + 
cfg.tableName, cfg.sparkMaster, cfg.sparkMemory);
-    int ret = 0;
-    try {
-      ret = new HoodieCompactor(jsc, cfg).compact(cfg.retry);
-    } catch (Throwable throwable) {
-      throw new HoodieException("Fail to run compaction for " + cfg.tableName 
+ ", return code: " + ret, throwable);
-    } finally {
-      jsc.stop();
-    }
-
+    int ret = new HoodieCompactor(jsc, cfg).compact(cfg.retry);
     if (ret != 0) {
       throw new HoodieException("Fail to run compaction for " + cfg.tableName 
+ ", return code: " + ret);
     }
+    LOG.info("Success to run compaction for " + cfg.tableName);
+    jsc.stop();
   }
 
   public int compact(int retry) {
diff --git 
a/hudi-utilities/src/main/java/org/apache/hudi/utilities/HoodieIndexer.java 
b/hudi-utilities/src/main/java/org/apache/hudi/utilities/HoodieIndexer.java
index 58c4eb46992..5c626a53ae7 100644
--- a/hudi-utilities/src/main/java/org/apache/hudi/utilities/HoodieIndexer.java
+++ b/hudi-utilities/src/main/java/org/apache/hudi/utilities/HoodieIndexer.java
@@ -149,7 +149,7 @@ public class HoodieIndexer {
 
     if (cfg.help || args.length == 0) {
       cmd.usage();
-      System.exit(1);
+      throw new HoodieException("Indexing failed for basePath : " + 
cfg.basePath);
     }
 
     final JavaSparkContext jsc = UtilHelpers.buildSparkContext("indexing-" + 
cfg.tableName, cfg.sparkMaster, cfg.sparkMemory);
@@ -157,11 +157,10 @@ public class HoodieIndexer {
     int result = indexer.start(cfg.retry);
     String resultMsg = String.format("Indexing with basePath: %s, tableName: 
%s, runningMode: %s",
         cfg.basePath, cfg.tableName, cfg.runningMode);
-    if (result == -1) {
-      LOG.error(resultMsg + " failed");
-    } else {
-      LOG.info(resultMsg + " success");
+    if (result != 0) {
+      throw new HoodieException(resultMsg + " failed");
     }
+    LOG.info(resultMsg + " success");
     jsc.stop();
   }
 

Reply via email to