This is an automated email from the ASF dual-hosted git repository.

leesf pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/incubator-hudi.git


The following commit(s) were added to refs/heads/master by this push:
     new 32bada2  [HUDI-889] Writer supports useJdbc configuration when hive 
synchronization is enabled (#1627)
32bada2 is described below

commit 32bada29dc95f1d5910713ae6b4f4a4ef39677c9
Author: cxzl25 <[email protected]>
AuthorDate: Thu May 14 00:20:13 2020 +0800

    [HUDI-889] Writer supports useJdbc configuration when hive synchronization 
is enabled (#1627)
---
 hudi-spark/src/main/java/org/apache/hudi/DataSourceUtils.java        | 2 ++
 hudi-spark/src/main/scala/org/apache/hudi/DataSourceOptions.scala    | 2 ++
 hudi-spark/src/main/scala/org/apache/hudi/HoodieSparkSqlWriter.scala | 4 +++-
 3 files changed, 7 insertions(+), 1 deletion(-)

diff --git a/hudi-spark/src/main/java/org/apache/hudi/DataSourceUtils.java 
b/hudi-spark/src/main/java/org/apache/hudi/DataSourceUtils.java
index 34f2ef2..9983c19 100644
--- a/hudi-spark/src/main/java/org/apache/hudi/DataSourceUtils.java
+++ b/hudi-spark/src/main/java/org/apache/hudi/DataSourceUtils.java
@@ -284,6 +284,8 @@ public class DataSourceUtils {
     hiveSyncConfig.partitionValueExtractorClass =
         
props.getString(DataSourceWriteOptions.HIVE_PARTITION_EXTRACTOR_CLASS_OPT_KEY(),
             SlashEncodedDayPartitionValueExtractor.class.getName());
+    hiveSyncConfig.useJdbc = 
Boolean.valueOf(props.getString(DataSourceWriteOptions.HIVE_USE_JDBC_OPT_KEY(),
+            DataSourceWriteOptions.DEFAULT_HIVE_USE_JDBC_OPT_VAL()));
     return hiveSyncConfig;
   }
 }
diff --git a/hudi-spark/src/main/scala/org/apache/hudi/DataSourceOptions.scala 
b/hudi-spark/src/main/scala/org/apache/hudi/DataSourceOptions.scala
index 9d7d6cc..3d1172f 100644
--- a/hudi-spark/src/main/scala/org/apache/hudi/DataSourceOptions.scala
+++ b/hudi-spark/src/main/scala/org/apache/hudi/DataSourceOptions.scala
@@ -264,6 +264,7 @@ object DataSourceWriteOptions {
   val HIVE_PARTITION_EXTRACTOR_CLASS_OPT_KEY = 
"hoodie.datasource.hive_sync.partition_extractor_class"
   val HIVE_ASSUME_DATE_PARTITION_OPT_KEY = 
"hoodie.datasource.hive_sync.assume_date_partitioning"
   val HIVE_USE_PRE_APACHE_INPUT_FORMAT_OPT_KEY = 
"hoodie.datasource.hive_sync.use_pre_apache_input_format"
+  val HIVE_USE_JDBC_OPT_KEY = "hoodie.datasource.hive_sync.use_jdbc"
 
   // DEFAULT FOR HIVE SPECIFIC CONFIGS
   val DEFAULT_HIVE_SYNC_ENABLED_OPT_VAL = "false"
@@ -276,4 +277,5 @@ object DataSourceWriteOptions {
   val DEFAULT_HIVE_PARTITION_EXTRACTOR_CLASS_OPT_VAL = 
classOf[SlashEncodedDayPartitionValueExtractor].getCanonicalName
   val DEFAULT_HIVE_ASSUME_DATE_PARTITION_OPT_VAL = "false"
   val DEFAULT_USE_PRE_APACHE_INPUT_FORMAT_OPT_VAL = "false"
+  val DEFAULT_HIVE_USE_JDBC_OPT_VAL = "true"
 }
diff --git 
a/hudi-spark/src/main/scala/org/apache/hudi/HoodieSparkSqlWriter.scala 
b/hudi-spark/src/main/scala/org/apache/hudi/HoodieSparkSqlWriter.scala
index 5456782..efcf5e1 100644
--- a/hudi-spark/src/main/scala/org/apache/hudi/HoodieSparkSqlWriter.scala
+++ b/hudi-spark/src/main/scala/org/apache/hudi/HoodieSparkSqlWriter.scala
@@ -217,7 +217,8 @@ private[hudi] object HoodieSparkSqlWriter {
       HIVE_URL_OPT_KEY -> DEFAULT_HIVE_URL_OPT_VAL,
       HIVE_PARTITION_FIELDS_OPT_KEY -> DEFAULT_HIVE_PARTITION_FIELDS_OPT_VAL,
       HIVE_PARTITION_EXTRACTOR_CLASS_OPT_KEY -> 
DEFAULT_HIVE_PARTITION_EXTRACTOR_CLASS_OPT_VAL,
-      HIVE_STYLE_PARTITIONING_OPT_KEY -> 
DEFAULT_HIVE_STYLE_PARTITIONING_OPT_VAL
+      HIVE_STYLE_PARTITIONING_OPT_KEY -> 
DEFAULT_HIVE_STYLE_PARTITIONING_OPT_VAL,
+      HIVE_USE_JDBC_OPT_KEY -> DEFAULT_HIVE_USE_JDBC_OPT_VAL
     ) ++ translateStorageTypeToTableType(parameters)
   }
 
@@ -248,6 +249,7 @@ private[hudi] object HoodieSparkSqlWriter {
     hiveSyncConfig.partitionFields =
       
ListBuffer(parameters(HIVE_PARTITION_FIELDS_OPT_KEY).split(",").map(_.trim).filter(!_.isEmpty).toList:
 _*)
     hiveSyncConfig.partitionValueExtractorClass = 
parameters(HIVE_PARTITION_EXTRACTOR_CLASS_OPT_KEY)
+    hiveSyncConfig.useJdbc = parameters(HIVE_USE_JDBC_OPT_KEY).toBoolean
     hiveSyncConfig
   }
 

Reply via email to