This is an automated email from the ASF dual-hosted git repository.
danny0405 pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/hudi.git
The following commit(s) were added to refs/heads/master by this push:
new 2be7c3502d [HUDI-4548] Unpack the column max/min to string instead of
Utf8 for Mor table (#6311)
2be7c3502d is described below
commit 2be7c3502dc58955a314a790ab31f1bb83db1cf4
Author: Danny Chan <[email protected]>
AuthorDate: Mon Aug 8 10:03:16 2022 +0800
[HUDI-4548] Unpack the column max/min to string instead of Utf8 for Mor
table (#6311)
---
.../main/java/org/apache/hudi/metadata/HoodieTableMetadataUtil.java | 2 ++
.../src/test/java/org/apache/hudi/table/ITTestHoodieDataSource.java | 6 ++++--
2 files changed, 6 insertions(+), 2 deletions(-)
diff --git
a/hudi-common/src/main/java/org/apache/hudi/metadata/HoodieTableMetadataUtil.java
b/hudi-common/src/main/java/org/apache/hudi/metadata/HoodieTableMetadataUtil.java
index 2c5b8db0ed..c7a0df5d6a 100644
---
a/hudi-common/src/main/java/org/apache/hudi/metadata/HoodieTableMetadataUtil.java
+++
b/hudi-common/src/main/java/org/apache/hudi/metadata/HoodieTableMetadataUtil.java
@@ -1323,6 +1323,8 @@ public class HoodieTableMetadataUtil {
return (Long) val;
case STRING:
+ // unpack the avro Utf8 if possible
+ return val.toString();
case FLOAT:
case DOUBLE:
case BOOLEAN:
diff --git
a/hudi-flink-datasource/hudi-flink/src/test/java/org/apache/hudi/table/ITTestHoodieDataSource.java
b/hudi-flink-datasource/hudi-flink/src/test/java/org/apache/hudi/table/ITTestHoodieDataSource.java
index c40831639b..e40c40c996 100644
---
a/hudi-flink-datasource/hudi-flink/src/test/java/org/apache/hudi/table/ITTestHoodieDataSource.java
+++
b/hudi-flink-datasource/hudi-flink/src/test/java/org/apache/hudi/table/ITTestHoodieDataSource.java
@@ -1297,14 +1297,16 @@ public class ITTestHoodieDataSource extends
AbstractTestBase {
assertRowsEquals(partitionResult, "[+I[1, 2022-02-02]]");
}
- @Test
- void testWriteAndReadWithDataSkipping() {
+ @ParameterizedTest
+ @EnumSource(value = HoodieTableType.class)
+ void testWriteAndReadWithDataSkipping(HoodieTableType tableType) {
TableEnvironment tableEnv = batchTableEnv;
String hoodieTableDDL = sql("t1")
.option(FlinkOptions.PATH, tempFile.getAbsolutePath())
.option(FlinkOptions.METADATA_ENABLED, true)
.option("hoodie.metadata.index.column.stats.enable", true)
.option(FlinkOptions.READ_DATA_SKIPPING_ENABLED, true)
+ .option(FlinkOptions.TABLE_TYPE,tableType)
.end();
tableEnv.executeSql(hoodieTableDDL);