This is an automated email from the ASF dual-hosted git repository.
yihua pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/hudi.git
The following commit(s) were added to refs/heads/master by this push:
new fe488bc1b64 [HUDI-7381] Fix flaky test introduced in PR 10619 (#10674)
fe488bc1b64 is described below
commit fe488bc1b649f1a9f90fcc178923ee12be3ce90f
Author: Rajesh Mahindra <[email protected]>
AuthorDate: Thu Feb 15 16:40:56 2024 -0800
[HUDI-7381] Fix flaky test introduced in PR 10619 (#10674)
Co-authored-by: rmahindra123 <[email protected]>
---
.../table/action/compact/TestHoodieCompactor.java | 21 +++++++++------------
1 file changed, 9 insertions(+), 12 deletions(-)
diff --git
a/hudi-client/hudi-spark-client/src/test/java/org/apache/hudi/table/action/compact/TestHoodieCompactor.java
b/hudi-client/hudi-spark-client/src/test/java/org/apache/hudi/table/action/compact/TestHoodieCompactor.java
index 313f14ce989..4ad19bfbfc4 100644
---
a/hudi-client/hudi-spark-client/src/test/java/org/apache/hudi/table/action/compact/TestHoodieCompactor.java
+++
b/hudi-client/hudi-spark-client/src/test/java/org/apache/hudi/table/action/compact/TestHoodieCompactor.java
@@ -195,19 +195,18 @@ public class TestHoodieCompactor extends
HoodieSparkClientTestHarness {
String newCommitTime = "100";
writeClient.startCommitWithTime(newCommitTime);
- List<HoodieRecord> records = dataGen.generateInserts(newCommitTime, 100);
+ List<HoodieRecord> records = dataGen.generateInserts(newCommitTime,
1000);
JavaRDD<HoodieRecord> recordsRDD = jsc.parallelize(records, 1);
writeClient.insert(recordsRDD, newCommitTime).collect();
- // Update all the 100 records
- newCommitTime = "101";
- updateRecords(config, newCommitTime, records);
-
- assertLogFilesNumEqualsTo(config, 1);
-
- String compactionInstantTime = "102";
- HoodieData<WriteStatus> result = compact(writeClient,
compactionInstantTime);
-
+ // Update all the 1000 records across 5 commits to generate sufficient
log files.
+ int i = 1;
+ for (; i < 5; i++) {
+ newCommitTime = String.format("10%s", i);
+ updateRecords(config, newCommitTime, records);
+ assertLogFilesNumEqualsTo(config, i);
+ }
+ HoodieData<WriteStatus> result = compact(writeClient,
String.format("10%s", i));
verifyCompaction(result);
// Verify compaction.requested, compaction.completed metrics counts.
@@ -243,7 +242,6 @@ public class TestHoodieCompactor extends
HoodieSparkClientTestHarness {
assertLogFilesNumEqualsTo(config, 1);
HoodieData<WriteStatus> result = compact(writeClient, "10" + (i + 1));
-
verifyCompaction(result);
// Verify compaction.requested, compaction.completed metrics counts.
@@ -304,7 +302,6 @@ public class TestHoodieCompactor extends
HoodieSparkClientTestHarness {
for (String partitionPath : dataGen.getPartitionPaths()) {
assertTrue(writeStatuses.stream().anyMatch(writeStatus ->
writeStatus.getStat().getPartitionPath().contentEquals(partitionPath)));
}
-
writeStatuses.forEach(writeStatus -> {
final HoodieWriteStat.RuntimeStats stats =
writeStatus.getStat().getRuntimeStats();
assertNotNull(stats);