This is an automated email from the ASF dual-hosted git repository.

danny0405 pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/hudi.git


The following commit(s) were added to refs/heads/master by this push:
     new be0a852dab0 [HUDI-6774] Prefix HiveConf properties to Hoodie catalog 
properties map with '.hadoop' (#10686)
be0a852dab0 is described below

commit be0a852dab0e2865ac3119a9eb190ab9d2d76ec8
Author: wombatu-kun <[email protected]>
AuthorDate: Wed Feb 21 11:22:56 2024 +0700

    [HUDI-6774] Prefix HiveConf properties to Hoodie catalog properties map 
with '.hadoop' (#10686)
    
    Co-authored-by: Vova Kolmakov <[email protected]>
---
 .../org/apache/hudi/table/catalog/HoodieHiveCatalog.java    |  1 +
 .../apache/hudi/table/catalog/TestHoodieHiveCatalog.java    | 13 +++++++++++++
 2 files changed, 14 insertions(+)

diff --git 
a/hudi-flink-datasource/hudi-flink/src/main/java/org/apache/hudi/table/catalog/HoodieHiveCatalog.java
 
b/hudi-flink-datasource/hudi-flink/src/main/java/org/apache/hudi/table/catalog/HoodieHiveCatalog.java
index 0d3a478f59d..3e409d11f5d 100644
--- 
a/hudi-flink-datasource/hudi-flink/src/main/java/org/apache/hudi/table/catalog/HoodieHiveCatalog.java
+++ 
b/hudi-flink-datasource/hudi-flink/src/main/java/org/apache/hudi/table/catalog/HoodieHiveCatalog.java
@@ -549,6 +549,7 @@ public class HoodieHiveCatalog extends AbstractCatalog {
     hiveTable.setCreateTime((int) (System.currentTimeMillis() / 1000));
 
     Map<String, String> properties = new HashMap<>(table.getOptions());
+    hiveConf.getAllProperties().forEach((k, v) -> properties.put("hadoop." + 
k, String.valueOf(v)));
 
     if (external) {
       hiveTable.setTableType(TableType.EXTERNAL_TABLE.toString());
diff --git 
a/hudi-flink-datasource/hudi-flink/src/test/java/org/apache/hudi/table/catalog/TestHoodieHiveCatalog.java
 
b/hudi-flink-datasource/hudi-flink/src/test/java/org/apache/hudi/table/catalog/TestHoodieHiveCatalog.java
index 3ee85a46fc4..45fc3d6f386 100644
--- 
a/hudi-flink-datasource/hudi-flink/src/test/java/org/apache/hudi/table/catalog/TestHoodieHiveCatalog.java
+++ 
b/hudi-flink-datasource/hudi-flink/src/test/java/org/apache/hudi/table/catalog/TestHoodieHiveCatalog.java
@@ -370,6 +370,19 @@ public class TestHoodieHiveCatalog {
     assertThrows(NoSuchObjectException.class, () -> 
getHivePartition(partitionSpec));
   }
 
+  @Test
+  public void testMappingHiveConfPropsToHiveTableParams() throws 
TableAlreadyExistException, DatabaseNotExistException, TableNotExistException {
+    HoodieHiveCatalog catalog = 
HoodieCatalogTestUtils.createHiveCatalog("myCatalog", true);
+    catalog.open();
+    Map<String, String> originOptions = new HashMap<>();
+    originOptions.put(FactoryUtil.CONNECTOR.key(), "hudi");
+    CatalogTable table = new CatalogTableImpl(schema, originOptions, "hudi 
table");
+    catalog.createTable(tablePath, table, false);
+
+    Table hiveTable = hoodieCatalog.getHiveTable(tablePath);
+    assertEquals("false", 
hiveTable.getParameters().get("hadoop.hive.metastore.schema.verification"));
+  }
+
   private Partition getHivePartition(CatalogPartitionSpec partitionSpec) 
throws Exception {
     return hoodieCatalog.getClient().getPartition(
         tablePath.getDatabaseName(),

Reply via email to