This is an automated email from the ASF dual-hosted git repository.
vinoth pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/incubator-hudi.git
The following commit(s) were added to refs/heads/master by this push:
new 9f51b99 [MINOR] Updated HoodieMergeOnReadTestUtils for future testing
requirements (#1456)
9f51b99 is described below
commit 9f51b99174da2ce72adc632b9ef84cfd3760c153
Author: Prashant Wason <[email protected]>
AuthorDate: Mon Mar 30 07:36:12 2020 -0700
[MINOR] Updated HoodieMergeOnReadTestUtils for future testing requirements
(#1456)
1. getRecordsUsingInputFormat() can take a custom Configuration which can
be used to specify HUDI table properties (e.g. <table>.consume.mode or
<table>.consume.start.timestamp)
2. Fixed the return to return an empty List rather than raise an Exception
if no records are found
---
.../org/apache/hudi/common/HoodieMergeOnReadTestUtils.java | 14 ++++++++++----
1 file changed, 10 insertions(+), 4 deletions(-)
diff --git
a/hudi-client/src/test/java/org/apache/hudi/common/HoodieMergeOnReadTestUtils.java
b/hudi-client/src/test/java/org/apache/hudi/common/HoodieMergeOnReadTestUtils.java
index 04030bd..02b93d1 100644
---
a/hudi-client/src/test/java/org/apache/hudi/common/HoodieMergeOnReadTestUtils.java
+++
b/hudi-client/src/test/java/org/apache/hudi/common/HoodieMergeOnReadTestUtils.java
@@ -44,9 +44,13 @@ import java.util.stream.Collectors;
* Utility methods to aid in testing MergeOnRead (workaround for
HoodieReadClient for MOR).
*/
public class HoodieMergeOnReadTestUtils {
-
public static List<GenericRecord> getRecordsUsingInputFormat(List<String>
inputPaths, String basePath) {
- JobConf jobConf = new JobConf();
+ return getRecordsUsingInputFormat(inputPaths, basePath, new
Configuration());
+ }
+
+ public static List<GenericRecord> getRecordsUsingInputFormat(List<String>
inputPaths, String basePath,
+ Configuration
conf) {
+ JobConf jobConf = new JobConf(conf);
Schema schema = HoodieAvroUtils.addMetadataFields(
new
Schema.Parser().parse(HoodieTestDataGenerator.TRIP_EXAMPLE_SCHEMA));
HoodieParquetRealtimeInputFormat inputFormat = new
HoodieParquetRealtimeInputFormat();
@@ -64,8 +68,10 @@ public class HoodieMergeOnReadTestUtils {
// writable returns an array with [field1, field2,
_hoodie_commit_time,
// _hoodie_commit_seqno]
Writable[] values = writable.get();
+ final int[] fieldIndex = {0};
+ assert schema.getFields().size() <= values.length;
schema.getFields().forEach(field -> {
- newRecord.set(field, values[2]);
+ newRecord.set(field, values[fieldIndex[0]++]);
});
records.add(newRecord.build());
}
@@ -76,7 +82,7 @@ public class HoodieMergeOnReadTestUtils {
}).reduce((a, b) -> {
a.addAll(b);
return a;
- }).get();
+ }).orElse(new ArrayList<GenericRecord>());
}
private static void setPropsForInputFormat(HoodieParquetRealtimeInputFormat
inputFormat, JobConf jobConf,