This is an automated email from the ASF dual-hosted git repository.

leesf pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/incubator-hudi.git


The following commit(s) were added to refs/heads/master by this push:
     new 46842f4  [MINOR] Remove the declaration of thrown RuntimeException 
(#1305)
46842f4 is described below

commit 46842f4e92202bca9a3a4f972f481c93ccc29a3c
Author: lamber-ken <lamber...@163.com>
AuthorDate: Wed Feb 5 23:23:20 2020 +0800

    [MINOR] Remove the declaration of thrown RuntimeException (#1305)
---
 .../apache/hudi/cli/commands/SparkEnvCommand.java  |  3 +-
 .../apache/hudi/avro/MercifulJsonConverter.java    | 39 ++++++++--------------
 .../hudi/common/table/HoodieTableMetaClient.java   |  5 ++-
 .../org/apache/hudi/common/util/RocksDBDAO.java    |  2 +-
 .../hudi/exception/TableNotFoundException.java     |  3 +-
 .../org/apache/hudi/hive/HoodieHiveClient.java     |  4 +--
 .../apache/hudi/utilities/HDFSParquetImporter.java |  4 +--
 .../hudi/utilities/HoodieWithTimelineServer.java   |  2 +-
 8 files changed, 23 insertions(+), 39 deletions(-)

diff --git 
a/hudi-cli/src/main/java/org/apache/hudi/cli/commands/SparkEnvCommand.java 
b/hudi-cli/src/main/java/org/apache/hudi/cli/commands/SparkEnvCommand.java
index e5a8d4e..d209a08 100644
--- a/hudi-cli/src/main/java/org/apache/hudi/cli/commands/SparkEnvCommand.java
+++ b/hudi-cli/src/main/java/org/apache/hudi/cli/commands/SparkEnvCommand.java
@@ -37,8 +37,7 @@ public class SparkEnvCommand implements CommandMarker {
   public static Map<String, String> env = new HashMap<String, String>();
 
   @CliCommand(value = "set", help = "Set spark launcher env to cli")
-  public void setEnv(@CliOption(key = {"conf"}, help = "Env config to be set") 
final String confMap)
-      throws IllegalArgumentException {
+  public void setEnv(@CliOption(key = {"conf"}, help = "Env config to be set") 
final String confMap) {
     String[] map = confMap.split("=");
     if (map.length != 2) {
       throw new IllegalArgumentException("Illegal set parameter, please use 
like [set --conf SPARK_HOME=/usr/etc/spark]");
diff --git 
a/hudi-common/src/main/java/org/apache/hudi/avro/MercifulJsonConverter.java 
b/hudi-common/src/main/java/org/apache/hudi/avro/MercifulJsonConverter.java
index 20b00f1..3f5df01 100644
--- a/hudi-common/src/main/java/org/apache/hudi/avro/MercifulJsonConverter.java
+++ b/hudi-common/src/main/java/org/apache/hudi/avro/MercifulJsonConverter.java
@@ -143,15 +143,13 @@ public class MercifulJsonConverter {
       return res.getRight();
     }
 
-    protected abstract Pair<Boolean, Object> convert(Object value, String 
name, Schema schema)
-        throws HoodieJsonToAvroConversionException;
+    protected abstract Pair<Boolean, Object> convert(Object value, String 
name, Schema schema);
   }
 
   private static JsonToAvroFieldProcessor generateBooleanTypeHandler() {
     return new JsonToAvroFieldProcessor() {
       @Override
-      public Pair<Boolean, Object> convert(Object value, String name, Schema 
schema)
-          throws HoodieJsonToAvroConversionException {
+      public Pair<Boolean, Object> convert(Object value, String name, Schema 
schema) {
         if (value instanceof Boolean) {
           return Pair.of(true, value);
         }
@@ -163,8 +161,7 @@ public class MercifulJsonConverter {
   private static JsonToAvroFieldProcessor generateIntTypeHandler() {
     return new JsonToAvroFieldProcessor() {
       @Override
-      public Pair<Boolean, Object> convert(Object value, String name, Schema 
schema)
-          throws HoodieJsonToAvroConversionException {
+      public Pair<Boolean, Object> convert(Object value, String name, Schema 
schema) {
         if (value instanceof Number) {
           return Pair.of(true, ((Number) value).intValue());
         } else if (value instanceof String) {
@@ -178,8 +175,7 @@ public class MercifulJsonConverter {
   private static JsonToAvroFieldProcessor generateDoubleTypeHandler() {
     return new JsonToAvroFieldProcessor() {
       @Override
-      public Pair<Boolean, Object> convert(Object value, String name, Schema 
schema)
-          throws HoodieJsonToAvroConversionException {
+      public Pair<Boolean, Object> convert(Object value, String name, Schema 
schema) {
         if (value instanceof Number) {
           return Pair.of(true, ((Number) value).doubleValue());
         } else if (value instanceof String) {
@@ -193,8 +189,7 @@ public class MercifulJsonConverter {
   private static JsonToAvroFieldProcessor generateFloatTypeHandler() {
     return new JsonToAvroFieldProcessor() {
       @Override
-      public Pair<Boolean, Object> convert(Object value, String name, Schema 
schema)
-          throws HoodieJsonToAvroConversionException {
+      public Pair<Boolean, Object> convert(Object value, String name, Schema 
schema) {
         if (value instanceof Number) {
           return Pair.of(true, ((Number) value).floatValue());
         } else if (value instanceof String) {
@@ -208,8 +203,7 @@ public class MercifulJsonConverter {
   private static JsonToAvroFieldProcessor generateLongTypeHandler() {
     return new JsonToAvroFieldProcessor() {
       @Override
-      public Pair<Boolean, Object> convert(Object value, String name, Schema 
schema)
-          throws HoodieJsonToAvroConversionException {
+      public Pair<Boolean, Object> convert(Object value, String name, Schema 
schema) {
         if (value instanceof Number) {
           return Pair.of(true, ((Number) value).longValue());
         } else if (value instanceof String) {
@@ -223,8 +217,7 @@ public class MercifulJsonConverter {
   private static JsonToAvroFieldProcessor generateStringTypeHandler() {
     return new JsonToAvroFieldProcessor() {
       @Override
-      public Pair<Boolean, Object> convert(Object value, String name, Schema 
schema)
-          throws HoodieJsonToAvroConversionException {
+      public Pair<Boolean, Object> convert(Object value, String name, Schema 
schema) {
         return Pair.of(true, value.toString());
       }
     };
@@ -233,8 +226,7 @@ public class MercifulJsonConverter {
   private static JsonToAvroFieldProcessor generateBytesTypeHandler() {
     return new JsonToAvroFieldProcessor() {
       @Override
-      public Pair<Boolean, Object> convert(Object value, String name, Schema 
schema)
-          throws HoodieJsonToAvroConversionException {
+      public Pair<Boolean, Object> convert(Object value, String name, Schema 
schema) {
         return Pair.of(true, value.toString().getBytes());
       }
     };
@@ -243,8 +235,7 @@ public class MercifulJsonConverter {
   private static JsonToAvroFieldProcessor generateFixedTypeHandler() {
     return new JsonToAvroFieldProcessor() {
       @Override
-      public Pair<Boolean, Object> convert(Object value, String name, Schema 
schema)
-          throws HoodieJsonToAvroConversionException {
+      public Pair<Boolean, Object> convert(Object value, String name, Schema 
schema) {
         byte[] src = value.toString().getBytes();
         byte[] dst = new byte[schema.getFixedSize()];
         System.arraycopy(src, 0, dst, 0, Math.min(schema.getFixedSize(), 
src.length));
@@ -256,8 +247,7 @@ public class MercifulJsonConverter {
   private static JsonToAvroFieldProcessor generateEnumTypeHandler() {
     return new JsonToAvroFieldProcessor() {
       @Override
-      public Pair<Boolean, Object> convert(Object value, String name, Schema 
schema)
-          throws HoodieJsonToAvroConversionException {
+      public Pair<Boolean, Object> convert(Object value, String name, Schema 
schema) {
         if (schema.getEnumSymbols().contains(value.toString())) {
           return Pair.of(true, new GenericData.EnumSymbol(schema, 
value.toString()));
         }
@@ -270,8 +260,7 @@ public class MercifulJsonConverter {
   private static JsonToAvroFieldProcessor generateRecordTypeHandler() {
     return new JsonToAvroFieldProcessor() {
       @Override
-      public Pair<Boolean, Object> convert(Object value, String name, Schema 
schema)
-          throws HoodieJsonToAvroConversionException {
+      public Pair<Boolean, Object> convert(Object value, String name, Schema 
schema) {
         GenericRecord result = new GenericData.Record(schema);
         return Pair.of(true, convertJsonToAvro((Map<String, Object>) value, 
schema));
       }
@@ -281,8 +270,7 @@ public class MercifulJsonConverter {
   private static JsonToAvroFieldProcessor generateArrayTypeHandler() {
     return new JsonToAvroFieldProcessor() {
       @Override
-      public Pair<Boolean, Object> convert(Object value, String name, Schema 
schema)
-          throws HoodieJsonToAvroConversionException {
+      public Pair<Boolean, Object> convert(Object value, String name, Schema 
schema) {
         Schema elementSchema = schema.getElementType();
         List<Object> listRes = new ArrayList<>();
         for (Object v : (List) value) {
@@ -296,8 +284,7 @@ public class MercifulJsonConverter {
   private static JsonToAvroFieldProcessor generateMapTypeHandler() {
     return new JsonToAvroFieldProcessor() {
       @Override
-      public Pair<Boolean, Object> convert(Object value, String name, Schema 
schema)
-          throws HoodieJsonToAvroConversionException {
+      public Pair<Boolean, Object> convert(Object value, String name, Schema 
schema) {
         Schema valueSchema = schema.getValueType();
         Map<String, Object> mapRes = new HashMap<>();
         for (Map.Entry<String, Object> v : ((Map<String, Object>) 
value).entrySet()) {
diff --git 
a/hudi-common/src/main/java/org/apache/hudi/common/table/HoodieTableMetaClient.java
 
b/hudi-common/src/main/java/org/apache/hudi/common/table/HoodieTableMetaClient.java
index c1f326a..f66348e 100644
--- 
a/hudi-common/src/main/java/org/apache/hudi/common/table/HoodieTableMetaClient.java
+++ 
b/hudi-common/src/main/java/org/apache/hudi/common/table/HoodieTableMetaClient.java
@@ -84,7 +84,7 @@ public class HoodieTableMetaClient implements Serializable {
   private HoodieArchivedTimeline archivedTimeline;
   private ConsistencyGuardConfig consistencyGuardConfig = 
ConsistencyGuardConfig.newBuilder().build();
 
-  public HoodieTableMetaClient(Configuration conf, String basePath) throws 
TableNotFoundException {
+  public HoodieTableMetaClient(Configuration conf, String basePath) {
     // Do not load any timeline by default
     this(conf, basePath, false);
   }
@@ -104,8 +104,7 @@ public class HoodieTableMetaClient implements Serializable {
   }
 
   public HoodieTableMetaClient(Configuration conf, String basePath, boolean 
loadActiveTimelineOnLoad,
-      ConsistencyGuardConfig consistencyGuardConfig, 
Option<TimelineLayoutVersion> layoutVersion, String payloadClassName)
-      throws TableNotFoundException {
+      ConsistencyGuardConfig consistencyGuardConfig, 
Option<TimelineLayoutVersion> layoutVersion, String payloadClassName) {
     LOG.info("Loading HoodieTableMetaClient from " + basePath);
     this.basePath = basePath;
     this.consistencyGuardConfig = consistencyGuardConfig;
diff --git 
a/hudi-common/src/main/java/org/apache/hudi/common/util/RocksDBDAO.java 
b/hudi-common/src/main/java/org/apache/hudi/common/util/RocksDBDAO.java
index 0115ec2..ec46af4 100644
--- a/hudi-common/src/main/java/org/apache/hudi/common/util/RocksDBDAO.java
+++ b/hudi-common/src/main/java/org/apache/hudi/common/util/RocksDBDAO.java
@@ -83,7 +83,7 @@ public class RocksDBDAO {
   /**
    * Initialized Rocks DB instance.
    */
-  private void init() throws HoodieException {
+  private void init() {
     try {
       LOG.info("DELETING RocksDB persisted at " + rocksDBBasePath);
       FileIOUtils.deleteDirectory(new File(rocksDBBasePath));
diff --git 
a/hudi-common/src/main/java/org/apache/hudi/exception/TableNotFoundException.java
 
b/hudi-common/src/main/java/org/apache/hudi/exception/TableNotFoundException.java
index 07d256b..7666e90 100644
--- 
a/hudi-common/src/main/java/org/apache/hudi/exception/TableNotFoundException.java
+++ 
b/hudi-common/src/main/java/org/apache/hudi/exception/TableNotFoundException.java
@@ -38,8 +38,7 @@ public class TableNotFoundException extends HoodieException {
     return "Hoodie table not found in path " + basePath;
   }
 
-  public static void checkTableValidity(FileSystem fs, Path basePathDir, Path 
metaPathDir)
-      throws TableNotFoundException {
+  public static void checkTableValidity(FileSystem fs, Path basePathDir, Path 
metaPathDir) {
     // Check if the base path is found
     try {
       if (!fs.exists(basePathDir) || !fs.isDirectory(basePathDir)) {
diff --git a/hudi-hive/src/main/java/org/apache/hudi/hive/HoodieHiveClient.java 
b/hudi-hive/src/main/java/org/apache/hudi/hive/HoodieHiveClient.java
index 2081738..16902b1 100644
--- a/hudi-hive/src/main/java/org/apache/hudi/hive/HoodieHiveClient.java
+++ b/hudi-hive/src/main/java/org/apache/hudi/hive/HoodieHiveClient.java
@@ -498,12 +498,12 @@ public class HoodieHiveClient {
    *
    * @param sql SQL statement to execute
    */
-  public CommandProcessorResponse updateHiveSQLUsingHiveDriver(String sql) 
throws HoodieHiveSyncException {
+  public CommandProcessorResponse updateHiveSQLUsingHiveDriver(String sql) {
     List<CommandProcessorResponse> responses = 
updateHiveSQLs(Collections.singletonList(sql));
     return responses.get(responses.size() - 1);
   }
 
-  private List<CommandProcessorResponse> updateHiveSQLs(List<String> sqls) 
throws HoodieHiveSyncException {
+  private List<CommandProcessorResponse> updateHiveSQLs(List<String> sqls) {
     SessionState ss = null;
     org.apache.hadoop.hive.ql.Driver hiveDriver = null;
     List<CommandProcessorResponse> responses = new ArrayList<>();
diff --git 
a/hudi-utilities/src/main/java/org/apache/hudi/utilities/HDFSParquetImporter.java
 
b/hudi-utilities/src/main/java/org/apache/hudi/utilities/HDFSParquetImporter.java
index aaddee7..218df22 100644
--- 
a/hudi-utilities/src/main/java/org/apache/hudi/utilities/HDFSParquetImporter.java
+++ 
b/hudi-utilities/src/main/java/org/apache/hudi/utilities/HDFSParquetImporter.java
@@ -217,7 +217,7 @@ public class HDFSParquetImporter implements Serializable {
     List<String> validCommands = Arrays.asList("insert", "upsert", 
"bulkinsert");
 
     @Override
-    public void validate(String name, String value) throws ParameterException {
+    public void validate(String name, String value) {
       if (value == null || !validCommands.contains(value.toLowerCase())) {
         throw new ParameterException(
             String.format("Invalid command: value:%s: supported commands:%s", 
value, validCommands));
@@ -230,7 +230,7 @@ public class HDFSParquetImporter implements Serializable {
     List<String> validFormats = Collections.singletonList("parquet");
 
     @Override
-    public void validate(String name, String value) throws ParameterException {
+    public void validate(String name, String value) {
       if (value == null || !validFormats.contains(value)) {
         throw new ParameterException(
             String.format("Invalid format type: value:%s: supported 
formats:%s", value, validFormats));
diff --git 
a/hudi-utilities/src/main/java/org/apache/hudi/utilities/HoodieWithTimelineServer.java
 
b/hudi-utilities/src/main/java/org/apache/hudi/utilities/HoodieWithTimelineServer.java
index feb2c21..e7e5cb8 100644
--- 
a/hudi-utilities/src/main/java/org/apache/hudi/utilities/HoodieWithTimelineServer.java
+++ 
b/hudi-utilities/src/main/java/org/apache/hudi/utilities/HoodieWithTimelineServer.java
@@ -90,7 +90,7 @@ public class HoodieWithTimelineServer implements Serializable 
{
     Preconditions.checkArgument(gotMessages.equals(messages), "Got expected 
reply from Server");
   }
 
-  public String sendRequest(String driverHost, int port) throws 
RuntimeException {
+  public String sendRequest(String driverHost, int port) {
     String url = String.format("http://%s:%d/";, driverHost, port);
     try (CloseableHttpClient client = HttpClientBuilder.create().build()) {
 

Reply via email to