This is an automated email from the ASF dual-hosted git repository.

vinoth pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/incubator-hudi.git


The following commit(s) were added to refs/heads/master by this push:
     new 0f892ef  [HUDI-692] Add delete savepoint for cli (#1397)
0f892ef is described below

commit 0f892ef62c76436b17030e6edb4642f476d7de1e
Author: hongdd <jn_...@163.com>
AuthorDate: Thu Mar 12 07:49:02 2020 +0800

    [HUDI-692] Add delete savepoint for cli (#1397)
    
    * Add delete savepoint for cli
    * Add check
    * Move JavaSparkContext to try
---
 .../hudi/cli/commands/SavepointsCommand.java       | 39 +++++++++++++++++-----
 1 file changed, 30 insertions(+), 9 deletions(-)

diff --git 
a/hudi-cli/src/main/java/org/apache/hudi/cli/commands/SavepointsCommand.java 
b/hudi-cli/src/main/java/org/apache/hudi/cli/commands/SavepointsCommand.java
index 65a813d..9ef15ac 100644
--- a/hudi-cli/src/main/java/org/apache/hudi/cli/commands/SavepointsCommand.java
+++ b/hudi-cli/src/main/java/org/apache/hudi/cli/commands/SavepointsCommand.java
@@ -76,17 +76,17 @@ public class SavepointsCommand implements CommandMarker {
       return "Commit " + commitTime + " not found in Commits " + timeline;
     }
 
-    JavaSparkContext jsc = SparkUtil.initJavaSparkConf("Create Savepoint");
-    HoodieWriteClient client = createHoodieClient(jsc, 
metaClient.getBasePath());
     String result;
-    if (client.savepoint(commitTime, user, comments)) {
-      // Refresh the current
-      refreshMetaClient();
-      result = String.format("The commit \"%s\" has been savepointed.", 
commitTime);
-    } else {
-      result = String.format("Failed: Could not savepoint commit \"%s\".", 
commitTime);
+    try (JavaSparkContext jsc = SparkUtil.initJavaSparkConf("Create 
Savepoint")) {
+      HoodieWriteClient client = createHoodieClient(jsc, 
metaClient.getBasePath());
+      if (client.savepoint(commitTime, user, comments)) {
+        // Refresh the current
+        refreshMetaClient();
+        result = String.format("The commit \"%s\" has been savepointed.", 
commitTime);
+      } else {
+        result = String.format("Failed: Could not savepoint commit \"%s\".", 
commitTime);
+      }
     }
-    jsc.close();
     return result;
   }
 
@@ -127,6 +127,27 @@ public class SavepointsCommand implements CommandMarker {
     return "Metadata for table " + 
HoodieCLI.getTableMetaClient().getTableConfig().getTableName() + " refreshed.";
   }
 
+  @CliCommand(value = "savepoint delete", help = "Delete the savepoint")
+  public String deleteSavepoint(@CliOption(key = {"commit"}, help = "Delete a 
savepoint") final String commitTime) throws Exception {
+    HoodieTableMetaClient metaClient = HoodieCLI.getTableMetaClient();
+    HoodieTimeline completedInstants = 
metaClient.getActiveTimeline().getSavePointTimeline().filterCompletedInstants();
+    if (completedInstants.empty()) {
+      throw new HoodieException("There are no completed savepoint to run 
delete");
+    }
+    HoodieInstant savePoint = new HoodieInstant(false, 
HoodieTimeline.SAVEPOINT_ACTION, commitTime);
+
+    if (!completedInstants.containsInstant(savePoint)) {
+      return "Commit " + commitTime + " not found in Commits " + 
completedInstants;
+    }
+
+    try (JavaSparkContext jsc = SparkUtil.initJavaSparkConf("Delete 
Savepoint")) {
+      HoodieWriteClient client = createHoodieClient(jsc, 
metaClient.getBasePath());
+      client.deleteSavepoint(commitTime);
+      refreshMetaClient();
+    }
+    return "Savepoint " + commitTime + " deleted";
+  }
+
   private static HoodieWriteClient createHoodieClient(JavaSparkContext jsc, 
String basePath) throws Exception {
     HoodieWriteConfig config = 
HoodieWriteConfig.newBuilder().withPath(basePath)
         
.withIndexConfig(HoodieIndexConfig.newBuilder().withIndexType(HoodieIndex.IndexType.BLOOM).build()).build();

Reply via email to