This is an automated email from the ASF dual-hosted git repository.
yihua pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/hudi.git
The following commit(s) were added to refs/heads/master by this push:
new f30b3aef3e [MINOR] Fix a potential NPE and some finer points of hudi
cli (#5656)
f30b3aef3e is described below
commit f30b3aef3e5e12e9119d6b5294c94a0282719e00
Author: luoyajun <[email protected]>
AuthorDate: Wed May 25 02:13:18 2022 +0800
[MINOR] Fix a potential NPE and some finer points of hudi cli (#5656)
---
.../org/apache/hudi/cli/commands/MetadataCommand.java | 2 +-
.../java/org/apache/hudi/cli/commands/SparkMain.java | 2 +-
.../hudi/cli/commands/UpgradeOrDowngradeCommand.java | 4 ++--
.../java/org/apache/hudi/cli/utils/SparkUtil.java | 19 +++++++++++--------
.../org/apache/hudi/cli/testutils/SparkUtilTest.java | 11 +++++++++++
5 files changed, 26 insertions(+), 12 deletions(-)
diff --git
a/hudi-cli/src/main/java/org/apache/hudi/cli/commands/MetadataCommand.java
b/hudi-cli/src/main/java/org/apache/hudi/cli/commands/MetadataCommand.java
index 8ab6c0ca4f..637f1393f5 100644
--- a/hudi-cli/src/main/java/org/apache/hudi/cli/commands/MetadataCommand.java
+++ b/hudi-cli/src/main/java/org/apache/hudi/cli/commands/MetadataCommand.java
@@ -364,7 +364,7 @@ public class MetadataCommand implements CommandMarker {
private void initJavaSparkContext(Option<String> userDefinedMaster) {
if (jsc == null) {
- jsc = SparkUtil.initJavaSparkConf(SparkUtil.getDefaultConf("HoodieCLI",
userDefinedMaster));
+ jsc =
SparkUtil.initJavaSparkContext(SparkUtil.getDefaultConf("HoodieCLI",
userDefinedMaster));
}
}
}
\ No newline at end of file
diff --git a/hudi-cli/src/main/java/org/apache/hudi/cli/commands/SparkMain.java
b/hudi-cli/src/main/java/org/apache/hudi/cli/commands/SparkMain.java
index 323c7bb5c3..9fe83f1995 100644
--- a/hudi-cli/src/main/java/org/apache/hudi/cli/commands/SparkMain.java
+++ b/hudi-cli/src/main/java/org/apache/hudi/cli/commands/SparkMain.java
@@ -95,7 +95,7 @@ public class SparkMain {
LOG.info("Invoking SparkMain: " + commandString);
final SparkCommand cmd = SparkCommand.valueOf(commandString);
- JavaSparkContext jsc = SparkUtil.initJavaSparkConf("hoodie-cli-" +
commandString,
+ JavaSparkContext jsc = SparkUtil.initJavaSparkContext("hoodie-cli-" +
commandString,
Option.of(args[1]), Option.of(args[2]));
int returnCode = 0;
diff --git
a/hudi-cli/src/main/java/org/apache/hudi/cli/commands/UpgradeOrDowngradeCommand.java
b/hudi-cli/src/main/java/org/apache/hudi/cli/commands/UpgradeOrDowngradeCommand.java
index a5e513c614..2ddb88792c 100644
---
a/hudi-cli/src/main/java/org/apache/hudi/cli/commands/UpgradeOrDowngradeCommand.java
+++
b/hudi-cli/src/main/java/org/apache/hudi/cli/commands/UpgradeOrDowngradeCommand.java
@@ -56,7 +56,7 @@ public class UpgradeOrDowngradeCommand implements
CommandMarker {
if (exitCode != 0) {
return String.format("Failed: Could not Upgrade/Downgrade Hoodie table
to \"%s\".", toVersion);
}
- return String.format("Hoodie table upgraded/downgraded to ", toVersion);
+ return String.format("Hoodie table upgraded/downgraded to %s", toVersion);
}
@CliCommand(value = "downgrade table", help = "Downgrades a table")
@@ -78,6 +78,6 @@ public class UpgradeOrDowngradeCommand implements
CommandMarker {
if (exitCode != 0) {
return String.format("Failed: Could not Upgrade/Downgrade Hoodie table
to \"%s\".", toVersion);
}
- return String.format("Hoodie table upgraded/downgraded to ", toVersion);
+ return String.format("Hoodie table upgraded/downgraded to %s", toVersion);
}
}
diff --git a/hudi-cli/src/main/java/org/apache/hudi/cli/utils/SparkUtil.java
b/hudi-cli/src/main/java/org/apache/hudi/cli/utils/SparkUtil.java
index ae99b0b824..bcccb66b37 100644
--- a/hudi-cli/src/main/java/org/apache/hudi/cli/utils/SparkUtil.java
+++ b/hudi-cli/src/main/java/org/apache/hudi/cli/utils/SparkUtil.java
@@ -32,8 +32,8 @@ import org.apache.spark.launcher.SparkLauncher;
import java.io.File;
import java.net.URISyntaxException;
+import java.util.Arrays;
import java.util.Map;
-import java.util.Objects;
import java.util.Properties;
/**
@@ -56,9 +56,12 @@ public class SparkUtil {
if (!StringUtils.isNullOrEmpty(propertiesFile)) {
sparkLauncher.setPropertiesFile(propertiesFile);
}
+
File libDirectory = new File(new File(currentJar).getParent(), "lib");
- for (String library : Objects.requireNonNull(libDirectory.list())) {
- sparkLauncher.addJar(new File(libDirectory, library).getAbsolutePath());
+ // This lib directory may be not required, such as providing libraries
through a bundle jar
+ if (libDirectory.exists()) {
+ Arrays.stream(libDirectory.list()).forEach(library ->
+ sparkLauncher.addJar(new File(libDirectory,
library).getAbsolutePath()));
}
return sparkLauncher;
}
@@ -99,20 +102,20 @@ public class SparkUtil {
return sparkConf;
}
- public static JavaSparkContext initJavaSparkConf(String name) {
- return initJavaSparkConf(name, Option.empty(), Option.empty());
+ public static JavaSparkContext initJavaSparkContext(String name) {
+ return initJavaSparkContext(name, Option.empty(), Option.empty());
}
- public static JavaSparkContext initJavaSparkConf(String name, Option<String>
master, Option<String> executorMemory) {
+ public static JavaSparkContext initJavaSparkContext(String name,
Option<String> master, Option<String> executorMemory) {
SparkConf sparkConf = getDefaultConf(name, master);
if (executorMemory.isPresent()) {
sparkConf.set(HoodieCliSparkConfig.CLI_EXECUTOR_MEMORY,
executorMemory.get());
}
- return initJavaSparkConf(sparkConf);
+ return initJavaSparkContext(sparkConf);
}
- public static JavaSparkContext initJavaSparkConf(SparkConf sparkConf) {
+ public static JavaSparkContext initJavaSparkContext(SparkConf sparkConf) {
SparkRDDWriteClient.registerClasses(sparkConf);
JavaSparkContext jsc = new JavaSparkContext(sparkConf);
jsc.hadoopConfiguration().setBoolean(HoodieCliSparkConfig.CLI_PARQUET_ENABLE_SUMMARY_METADATA,
false);
diff --git
a/hudi-cli/src/test/java/org/apache/hudi/cli/testutils/SparkUtilTest.java
b/hudi-cli/src/test/java/org/apache/hudi/cli/testutils/SparkUtilTest.java
index 4966438292..a470ee1c2a 100644
--- a/hudi-cli/src/test/java/org/apache/hudi/cli/testutils/SparkUtilTest.java
+++ b/hudi-cli/src/test/java/org/apache/hudi/cli/testutils/SparkUtilTest.java
@@ -22,11 +22,22 @@ import org.apache.hudi.common.util.Option;
import org.apache.hudi.cli.utils.SparkUtil;
import org.apache.spark.SparkConf;
+import org.apache.spark.launcher.SparkLauncher;
import org.junit.jupiter.api.Test;
+import java.net.URISyntaxException;
+
import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertNotNull;
public class SparkUtilTest {
+
+ @Test
+ public void testInitSparkLauncher() throws URISyntaxException {
+ SparkLauncher sparkLauncher = SparkUtil.initLauncher(null);
+ assertNotNull(sparkLauncher);
+ }
+
@Test
public void testGetDefaultSparkConf() {
SparkConf sparkConf = SparkUtil.getDefaultConf("test-spark-app",
Option.of(""));