This is an automated email from the ASF dual-hosted git repository.
codope pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/hudi.git
The following commit(s) were added to refs/heads/master by this push:
new 932cbc867a5 [HUDI-6445] Triage ci flakiness and some test fies (#9534)
932cbc867a5 is described below
commit 932cbc867a586391b4ee4a3e2fbb54f09cdaaf1c
Author: Sivabalan Narayanan <[email protected]>
AuthorDate: Fri Aug 25 09:54:06 2023 -0400
[HUDI-6445] Triage ci flakiness and some test fies (#9534)
Fixed metrics in tests. (disabled metrics).
Fixed Java tests to use local FS instead of hdfs.
Removed some of parametrized tests for java.
---------
Co-authored-by: Sagar Sumit <[email protected]>
---
.../hudi/client/TestJavaHoodieBackedMetadata.java | 16 +-
.../TestHoodieJavaClientOnCopyOnWriteStorage.java | 185 +++++++++------------
.../testutils/HoodieJavaClientTestHarness.java | 140 ++++++++--------
.../hudi/testutils/TestHoodieMetadataBase.java | 2 +-
.../functional/TestHoodieBackedMetadata.java | 18 +-
.../client/functional/TestHoodieMetadataBase.java | 2 +-
.../realtime/TestHoodieRealtimeRecordReader.java | 7 +-
.../apache/hudi/functional/TestBootstrapRead.java | 2 +-
.../functional/TestNewHoodieParquetFileFormat.java | 4 +-
9 files changed, 174 insertions(+), 202 deletions(-)
diff --git
a/hudi-client/hudi-java-client/src/test/java/org/apache/hudi/client/TestJavaHoodieBackedMetadata.java
b/hudi-client/hudi-java-client/src/test/java/org/apache/hudi/client/TestJavaHoodieBackedMetadata.java
index 7226563feaa..b22fa76788d 100644
---
a/hudi-client/hudi-java-client/src/test/java/org/apache/hudi/client/TestJavaHoodieBackedMetadata.java
+++
b/hudi-client/hudi-java-client/src/test/java/org/apache/hudi/client/TestJavaHoodieBackedMetadata.java
@@ -185,14 +185,10 @@ public class TestJavaHoodieBackedMetadata extends
TestHoodieMetadataBase {
public static List<Arguments> tableOperationsTestArgs() {
return asList(
- Arguments.of(COPY_ON_WRITE, true, true),
- Arguments.of(COPY_ON_WRITE, true, false),
- Arguments.of(COPY_ON_WRITE, false, true),
- Arguments.of(COPY_ON_WRITE, false, false),
- Arguments.of(MERGE_ON_READ, true, true),
- Arguments.of(MERGE_ON_READ, true, false),
- Arguments.of(MERGE_ON_READ, false, true),
- Arguments.of(MERGE_ON_READ, false, false)
+ Arguments.of(COPY_ON_WRITE, true),
+ Arguments.of(COPY_ON_WRITE, false),
+ Arguments.of(MERGE_ON_READ, true),
+ Arguments.of(MERGE_ON_READ, false)
);
}
@@ -284,14 +280,14 @@ public class TestJavaHoodieBackedMetadata extends
TestHoodieMetadataBase {
*/
@ParameterizedTest
@MethodSource("tableOperationsTestArgs")
- public void testTableOperations(HoodieTableType tableType, boolean
enableFullScan, boolean enableMetrics) throws Exception {
+ public void testTableOperations(HoodieTableType tableType, boolean
enableFullScan) throws Exception {
List<Long> commitTimeList = new ArrayList<>();
commitTimeList.add(Long.parseLong(HoodieActiveTimeline.createNewInstantTime()));
for (int i = 0; i < 8; i++) {
long nextCommitTime =
getNextCommitTime(commitTimeList.get(commitTimeList.size() - 1));
commitTimeList.add(nextCommitTime);
}
- init(tableType, true, enableFullScan, enableMetrics, false);
+ init(tableType, true, enableFullScan, false, false);
doWriteInsertAndUpsert(testTable, commitTimeList.get(0).toString(),
commitTimeList.get(1).toString(), false);
// trigger an upsert
diff --git
a/hudi-client/hudi-java-client/src/test/java/org/apache/hudi/client/functional/TestHoodieJavaClientOnCopyOnWriteStorage.java
b/hudi-client/hudi-java-client/src/test/java/org/apache/hudi/client/functional/TestHoodieJavaClientOnCopyOnWriteStorage.java
index a3a0b726619..211dc0129e6 100644
---
a/hudi-client/hudi-java-client/src/test/java/org/apache/hudi/client/functional/TestHoodieJavaClientOnCopyOnWriteStorage.java
+++
b/hudi-client/hudi-java-client/src/test/java/org/apache/hudi/client/functional/TestHoodieJavaClientOnCopyOnWriteStorage.java
@@ -150,16 +150,10 @@ public class TestHoodieJavaClientOnCopyOnWriteStorage
extends HoodieJavaClientTe
private static final String CLUSTERING_FAILURE = "CLUSTERING FAILURE";
- private static Stream<Arguments> populateMetaFieldsParams() {
- return Arrays.stream(new Boolean[][] {{true}, {false}}).map(Arguments::of);
- }
-
private static Stream<Arguments>
rollbackAfterConsistencyCheckFailureParams() {
return Stream.of(
- Arguments.of(true, true),
- Arguments.of(true, false),
- Arguments.of(false, true),
- Arguments.of(false, false)
+ Arguments.of(true),
+ Arguments.of(false)
);
}
@@ -173,56 +167,50 @@ public class TestHoodieJavaClientOnCopyOnWriteStorage
extends HoodieJavaClientTe
/**
* Test Auto Commit behavior for HoodieWriteClient insert API.
*/
- @ParameterizedTest
- @MethodSource("populateMetaFieldsParams")
- public void testAutoCommitOnInsert(boolean populateMetaFields) throws
Exception {
- testAutoCommit(HoodieJavaWriteClient::insert, false, populateMetaFields);
+ @Test
+ public void testAutoCommitOnInsert() throws Exception {
+ testAutoCommit(HoodieJavaWriteClient::insert, false, true);
}
/**
* Test Auto Commit behavior for HoodieWriteClient insertPrepped API.
*/
- @ParameterizedTest
- @MethodSource("populateMetaFieldsParams")
- public void testAutoCommitOnInsertPrepped(boolean populateMetaFields) throws
Exception {
- testAutoCommit(HoodieJavaWriteClient::insertPreppedRecords, true,
populateMetaFields);
+ @Test
+ public void testAutoCommitOnInsertPrepped() throws Exception {
+ testAutoCommit(HoodieJavaWriteClient::insertPreppedRecords, true, true);
}
/**
* Test Auto Commit behavior for HoodieWriteClient upsert API.
*/
- @ParameterizedTest
- @MethodSource("populateMetaFieldsParams")
- public void testAutoCommitOnUpsert(boolean populateMetaFields) throws
Exception {
- testAutoCommit(HoodieJavaWriteClient::upsert, false, populateMetaFields);
+ @Test
+ public void testAutoCommitOnUpsert() throws Exception {
+ testAutoCommit(HoodieJavaWriteClient::upsert, false, true);
}
/**
* Test Auto Commit behavior for HoodieWriteClient upsert Prepped API.
*/
- @ParameterizedTest
- @MethodSource("populateMetaFieldsParams")
- public void testAutoCommitOnUpsertPrepped(boolean populateMetaFields) throws
Exception {
- testAutoCommit(HoodieJavaWriteClient::upsertPreppedRecords, true,
populateMetaFields);
+ @Test
+ public void testAutoCommitOnUpsertPrepped() throws Exception {
+ testAutoCommit(HoodieJavaWriteClient::upsertPreppedRecords, true, true);
}
/**
* Test Auto Commit behavior for HoodieWriteClient bulk-insert API.
*/
- @ParameterizedTest
- @MethodSource("populateMetaFieldsParams")
- public void testAutoCommitOnBulkInsert(boolean populateMetaFields) throws
Exception {
- testAutoCommit(HoodieJavaWriteClient::bulkInsert, false,
populateMetaFields);
+ @Test
+ public void testAutoCommitOnBulkInsert() throws Exception {
+ testAutoCommit(HoodieJavaWriteClient::bulkInsert, false, true);
}
/**
* Test Auto Commit behavior for HoodieWriteClient bulk-insert prepped API.
*/
- @ParameterizedTest
- @MethodSource("populateMetaFieldsParams")
- public void testAutoCommitOnBulkInsertPrepped(boolean populateMetaFields)
throws Exception {
+ @Test
+ public void testAutoCommitOnBulkInsertPrepped() throws Exception {
testAutoCommit((writeClient, recordRDD, instantTime) ->
writeClient.bulkInsertPreppedRecords(recordRDD, instantTime,
- Option.empty()), true, populateMetaFields);
+ Option.empty()), true, true);
}
/**
@@ -264,37 +252,33 @@ public class TestHoodieJavaClientOnCopyOnWriteStorage
extends HoodieJavaClientTe
/**
* Test De-duplication behavior for HoodieWriteClient insert API.
*/
- @ParameterizedTest
- @MethodSource("populateMetaFieldsParams")
- public void testDeduplicationOnInsert(boolean populateMetaFields) throws
Exception {
- testDeduplication(HoodieJavaWriteClient::insert, populateMetaFields);
+ @Test
+ public void testDeduplicationOnInsert() throws Exception {
+ testDeduplication(HoodieJavaWriteClient::insert, true);
}
/**
* Test De-duplication behavior for HoodieWriteClient insert API.
*/
- @ParameterizedTest
- @MethodSource("populateMetaFieldsParams")
- public void testDeduplicationKeepOperationFieldOnInsert(boolean
populateMetaFields) throws Exception {
- testDeduplicationKeepOperation(HoodieJavaWriteClient::insert,
populateMetaFields);
+ @Test
+ public void testDeduplicationKeepOperationFieldOnInsert() throws Exception {
+ testDeduplicationKeepOperation(HoodieJavaWriteClient::insert, true);
}
/**
* Test De-duplication behavior for HoodieWriteClient bulk-insert API.
*/
- @ParameterizedTest
- @MethodSource("populateMetaFieldsParams")
- public void testDeduplicationOnBulkInsert(boolean populateMetaFields) throws
Exception {
- testDeduplication(HoodieJavaWriteClient::bulkInsert, populateMetaFields);
+ @Test
+ public void testDeduplicationOnBulkInsert() throws Exception {
+ testDeduplication(HoodieJavaWriteClient::bulkInsert, true);
}
/**
* Test De-duplication behavior for HoodieWriteClient upsert API.
*/
- @ParameterizedTest
- @MethodSource("populateMetaFieldsParams")
- public void testDeduplicationOnUpsert(boolean populateMetaFields) throws
Exception {
- testDeduplication(HoodieJavaWriteClient::upsert, populateMetaFields);
+ @Test
+ public void testDeduplicationOnUpsert() throws Exception {
+ testDeduplication(HoodieJavaWriteClient::upsert, true);
}
/**
@@ -436,22 +420,20 @@ public class TestHoodieJavaClientOnCopyOnWriteStorage
extends HoodieJavaClientTe
/**
* Test Upsert API.
*/
- @ParameterizedTest
- @MethodSource("populateMetaFieldsParams")
- public void testUpserts(boolean populateMetaFields) throws Exception {
+ @Test
+ public void testUpserts() throws Exception {
HoodieWriteConfig.Builder cfgBuilder =
getConfigBuilder().withRollbackUsingMarkers(true);
- addConfigsForPopulateMetaFields(cfgBuilder, populateMetaFields);
+ addConfigsForPopulateMetaFields(cfgBuilder, true);
testUpsertsInternal(cfgBuilder.build(), HoodieJavaWriteClient::upsert,
false);
}
/**
* Test UpsertPrepped API.
*/
- @ParameterizedTest
- @MethodSource("populateMetaFieldsParams")
- public void testUpsertsPrepped(boolean populateMetaFields) throws Exception {
+ @Test
+ public void testUpsertsPrepped() throws Exception {
HoodieWriteConfig.Builder cfgBuilder =
getConfigBuilder().withRollbackUsingMarkers(true);
- addConfigsForPopulateMetaFields(cfgBuilder, populateMetaFields);
+ addConfigsForPopulateMetaFields(cfgBuilder, true);
testUpsertsInternal(cfgBuilder.build(),
HoodieJavaWriteClient::upsertPreppedRecords, true);
}
@@ -602,22 +584,19 @@ public class TestHoodieJavaClientOnCopyOnWriteStorage
extends HoodieJavaClientTe
/**
* Test Insert API for HoodieConcatHandle.
*/
- @ParameterizedTest
- @MethodSource("populateMetaFieldsParams")
- public void testInsertsWithHoodieConcatHandle(boolean populateMetaFields)
throws Exception {
+ @Test
+ public void testInsertsWithHoodieConcatHandle() throws Exception {
HoodieWriteConfig.Builder cfgBuilder = getConfigBuilder();
- addConfigsForPopulateMetaFields(cfgBuilder, populateMetaFields);
+ addConfigsForPopulateMetaFields(cfgBuilder, true);
testHoodieConcatHandle(cfgBuilder.build(), false);
}
/**
* Test InsertPrepped API for HoodieConcatHandle.
*/
- @ParameterizedTest
- @MethodSource("populateMetaFieldsParams")
- public void testInsertsPreppedWithHoodieConcatHandle(boolean
populateMetaFields) throws Exception {
+ public void testInsertsPreppedWithHoodieConcatHandle() throws Exception {
HoodieWriteConfig.Builder cfgBuilder = getConfigBuilder();
- addConfigsForPopulateMetaFields(cfgBuilder, populateMetaFields);
+ addConfigsForPopulateMetaFields(cfgBuilder, true);
testHoodieConcatHandle(cfgBuilder.build(), true);
}
@@ -711,11 +690,9 @@ public class TestHoodieJavaClientOnCopyOnWriteStorage
extends HoodieJavaClientTe
/**
* Tests deletion of records.
*/
- @ParameterizedTest
- @MethodSource("populateMetaFieldsParams")
- public void testDeletes(boolean populateMetaFields) throws Exception {
+ public void testDeletes() throws Exception {
HoodieWriteConfig.Builder cfgBuilder =
getConfigBuilder(HoodieFailedWritesCleaningPolicy.LAZY);
- addConfigsForPopulateMetaFields(cfgBuilder, populateMetaFields);
+ addConfigsForPopulateMetaFields(cfgBuilder, true);
HoodieJavaWriteClient client = getHoodieWriteClient(cfgBuilder.build());
/**
* Write 1 (inserts and deletes) Write actual 200 insert records and
ignore 100 delete records
@@ -736,7 +713,7 @@ public class TestHoodieJavaClientOnCopyOnWriteStorage
extends HoodieJavaClientTe
writeBatch(client, newCommitTime, initCommitTime, Option.empty(),
initCommitTime,
// unused as genFn uses hard-coded number of inserts/updates/deletes
-1, recordGenFunction, HoodieJavaWriteClient::upsert, true, 200, 200,
1, false,
- populateMetaFields);
+ true);
/**
* Write 2 (deletes+writes).
@@ -753,7 +730,7 @@ public class TestHoodieJavaClientOnCopyOnWriteStorage
extends HoodieJavaClientTe
};
writeBatch(client, newCommitTime, prevCommitTime, Option.empty(),
initCommitTime, 75, recordGenFunction,
HoodieJavaWriteClient::upsert, true, 25, 175, 2, false,
- populateMetaFields);
+ true);
}
/**
@@ -762,11 +739,10 @@ public class TestHoodieJavaClientOnCopyOnWriteStorage
extends HoodieJavaClientTe
*
* @throws Exception
*/
- @ParameterizedTest
- @MethodSource("populateMetaFieldsParams")
- public void testDeletesForInsertsInSameBatch(boolean populateMetaFields)
throws Exception {
+ @Test
+ public void testDeletesForInsertsInSameBatch() throws Exception {
HoodieWriteConfig.Builder cfgBuilder =
getConfigBuilder(HoodieFailedWritesCleaningPolicy.LAZY);
- addConfigsForPopulateMetaFields(cfgBuilder, populateMetaFields);
+ addConfigsForPopulateMetaFields(cfgBuilder, true);
HoodieJavaWriteClient client = getHoodieWriteClient(cfgBuilder.build());
/**
* Write 200 inserts and issue deletes to a subset(50) of inserts.
@@ -787,7 +763,7 @@ public class TestHoodieJavaClientOnCopyOnWriteStorage
extends HoodieJavaClientTe
writeBatch(client, newCommitTime, initCommitTime, Option.empty(),
initCommitTime,
-1, recordGenFunction, HoodieJavaWriteClient::upsert, true, 150, 150,
1, false,
- populateMetaFields);
+ true);
}
@Test
@@ -958,11 +934,11 @@ public class TestHoodieJavaClientOnCopyOnWriteStorage
extends HoodieJavaClientTe
}
private HoodieWriteMetadata<List<WriteStatus>>
performClustering(HoodieClusteringConfig clusteringConfig,
- boolean
populateMetaFields,
- boolean
completeClustering,
- String
validatorClasses,
- String
sqlQueryForEqualityValidation, String sqlQueryForSingleResultValidation,
-
Pair<List<HoodieRecord>, List<String>> allRecords) throws IOException {
+ boolean
populateMetaFields,
+ boolean
completeClustering,
+ String
validatorClasses,
+ String
sqlQueryForEqualityValidation, String sqlQueryForSingleResultValidation,
+
Pair<List<HoodieRecord>, List<String>> allRecords) throws IOException {
HoodiePreCommitValidatorConfig validatorConfig =
HoodiePreCommitValidatorConfig.newBuilder()
.withPreCommitValidator(StringUtils.nullToEmpty(validatorClasses))
.withPrecommitValidatorEqualitySqlQueries(sqlQueryForEqualityValidation)
@@ -1101,14 +1077,13 @@ public class TestHoodieJavaClientOnCopyOnWriteStorage
extends HoodieJavaClientTe
/**
* Test delete with delete api.
*/
- @ParameterizedTest
- @MethodSource("populateMetaFieldsParams")
- public void testDeletesWithoutInserts(boolean populateMetaFields) {
+ @Test
+ public void testDeletesWithoutInserts() {
final String testPartitionPath = "2016/09/26";
final int insertSplitLimit = 100;
// setup the small file handling params
HoodieWriteConfig config = getSmallInsertWriteConfig(insertSplitLimit,
- TRIP_EXAMPLE_SCHEMA, dataGen.getEstimatedFileSizeInBytes(150),
populateMetaFields, populateMetaFields
+ TRIP_EXAMPLE_SCHEMA, dataGen.getEstimatedFileSizeInBytes(150), true,
true
? new Properties() : getPropertiesForKeyGen());
dataGen = new HoodieTestDataGenerator(new String[] {testPartitionPath});
HoodieJavaWriteClient client = getHoodieWriteClient(config);
@@ -1125,12 +1100,11 @@ public class TestHoodieJavaClientOnCopyOnWriteStorage
extends HoodieJavaClientTe
/**
* Test to ensure commit metadata points to valid files.
*/
- @ParameterizedTest
- @MethodSource("populateMetaFieldsParams")
- public void testCommitWritesRelativePaths(boolean populateMetaFields) throws
Exception {
+ @Test
+ public void testCommitWritesRelativePaths() throws Exception {
HoodieWriteConfig.Builder cfgBuilder =
getConfigBuilder().withAutoCommit(false);
- addConfigsForPopulateMetaFields(cfgBuilder, populateMetaFields);
+ addConfigsForPopulateMetaFields(cfgBuilder, true);
try (HoodieJavaWriteClient client =
getHoodieWriteClient(cfgBuilder.build());) {
HoodieTableMetaClient metaClient =
HoodieTableMetaClient.builder().setConf(hadoopConf).setBasePath(basePath).build();
HoodieJavaTable table = HoodieJavaTable.create(cfgBuilder.build(),
context, metaClient);
@@ -1171,11 +1145,10 @@ public class TestHoodieJavaClientOnCopyOnWriteStorage
extends HoodieJavaClientTe
/**
* Test to ensure commit metadata points to valid files.10.
*/
- @ParameterizedTest
- @MethodSource("populateMetaFieldsParams")
- public void testMetadataStatsOnCommit(boolean populateMetaFields) throws
Exception {
+ @Test
+ public void testMetadataStatsOnCommit() throws Exception {
HoodieWriteConfig.Builder cfgBuilder =
getConfigBuilder().withAutoCommit(false);
- addConfigsForPopulateMetaFields(cfgBuilder, populateMetaFields);
+ addConfigsForPopulateMetaFields(cfgBuilder, true);
HoodieWriteConfig cfg = cfgBuilder.build();
HoodieJavaWriteClient client = getHoodieWriteClient(cfg);
@@ -1304,18 +1277,16 @@ public class TestHoodieJavaClientOnCopyOnWriteStorage
extends HoodieJavaClientTe
@ParameterizedTest
@MethodSource("rollbackAfterConsistencyCheckFailureParams")
- public void testRollbackAfterConsistencyCheckFailureUsingFileList(boolean
enableOptimisticConsistencyGuard, boolean populateMetCols) throws Exception {
- testRollbackAfterConsistencyCheckFailureUsingFileList(false,
enableOptimisticConsistencyGuard, populateMetCols);
+ public void testRollbackAfterConsistencyCheckFailureUsingFileList(boolean
enableOptimisticConsistencyGuard) throws Exception {
+ testRollbackAfterConsistencyCheckFailureUsingFileList(false,
enableOptimisticConsistencyGuard, true);
}
@ParameterizedTest
@MethodSource("rollbackAfterConsistencyCheckFailureParams")
- public void testRollbackAfterConsistencyCheckFailureUsingMarkers(boolean
enableOptimisticConsistencyGuard, boolean populateMetCols) throws Exception {
- testRollbackAfterConsistencyCheckFailureUsingFileList(true,
enableOptimisticConsistencyGuard, populateMetCols);
+ public void testRollbackAfterConsistencyCheckFailureUsingMarkers(boolean
enableOptimisticConsistencyGuard) throws Exception {
+ testRollbackAfterConsistencyCheckFailureUsingFileList(true,
enableOptimisticConsistencyGuard, true);
}
- //@ParameterizedTest
- //@MethodSource("rollbackFailedCommitsParams")
@Test
public void testRollbackFailedCommits() throws Exception {
// HoodieFailedWritesCleaningPolicy cleaningPolicy, boolean
populateMetaFields
@@ -1395,12 +1366,11 @@ public class TestHoodieJavaClientOnCopyOnWriteStorage
extends HoodieJavaClientTe
}
}
- @ParameterizedTest
- @MethodSource("populateMetaFieldsParams")
- public void testRollbackFailedCommitsToggleCleaningPolicy(boolean
populateMetaFields) throws Exception {
+ @Test
+ public void testRollbackFailedCommitsToggleCleaningPolicy() throws Exception
{
HoodieTestUtils.init(hadoopConf, basePath);
HoodieFailedWritesCleaningPolicy cleaningPolicy = EAGER;
- HoodieJavaWriteClient client = new HoodieJavaWriteClient(context,
getParallelWritingWriteConfig(cleaningPolicy, populateMetaFields));
+ HoodieJavaWriteClient client = new HoodieJavaWriteClient(context,
getParallelWritingWriteConfig(cleaningPolicy, true));
// Perform 1 successful writes to table
writeBatch(client, "100", "100", Option.of(Arrays.asList("100")), "100",
100, dataGen::generateInserts, HoodieJavaWriteClient::bulkInsert,
false, 100, 300,
@@ -1414,12 +1384,12 @@ public class TestHoodieJavaClientOnCopyOnWriteStorage
extends HoodieJavaClientTe
// Toggle cleaning policy to LAZY
cleaningPolicy = HoodieFailedWritesCleaningPolicy.LAZY;
// Perform 2 failed writes to table
- client = new HoodieJavaWriteClient(context,
getParallelWritingWriteConfig(cleaningPolicy, populateMetaFields));
+ client = new HoodieJavaWriteClient(context,
getParallelWritingWriteConfig(cleaningPolicy, true));
writeBatch(client, "300", "200", Option.of(Arrays.asList("300")), "300",
100, dataGen::generateInserts, HoodieJavaWriteClient::bulkInsert,
false, 100, 300,
0, false);
client.close();
- client = new HoodieJavaWriteClient(context,
getParallelWritingWriteConfig(cleaningPolicy, populateMetaFields));
+ client = new HoodieJavaWriteClient(context,
getParallelWritingWriteConfig(cleaningPolicy, true));
writeBatch(client, "400", "300", Option.of(Arrays.asList("400")), "400",
100, dataGen::generateInserts, HoodieJavaWriteClient::bulkInsert,
false, 100, 300,
0, false);
@@ -1435,25 +1405,26 @@ public class TestHoodieJavaClientOnCopyOnWriteStorage
extends HoodieJavaClientTe
assertTrue(timeline.getTimelineOfActions(
CollectionUtils.createSet(ROLLBACK_ACTION)).countInstants() == 3);
// Perform 2 failed commits
- client = new HoodieJavaWriteClient(context,
getParallelWritingWriteConfig(cleaningPolicy, populateMetaFields));
+ client = new HoodieJavaWriteClient(context,
getParallelWritingWriteConfig(cleaningPolicy, true));
writeBatch(client, "500", "400", Option.of(Arrays.asList("300")), "300",
100, dataGen::generateInserts, HoodieJavaWriteClient::bulkInsert,
false, 100, 300,
0, false);
client.close();
- client = new HoodieJavaWriteClient(context,
getParallelWritingWriteConfig(cleaningPolicy, populateMetaFields));
+ client = new HoodieJavaWriteClient(context,
getParallelWritingWriteConfig(cleaningPolicy, true));
writeBatch(client, "600", "500", Option.of(Arrays.asList("400")), "400",
100, dataGen::generateInserts, HoodieJavaWriteClient::bulkInsert,
false, 100, 300,
0, false);
client.close();
// Toggle cleaning policy to EAGER
cleaningPolicy = EAGER;
- client = new HoodieJavaWriteClient(context,
getParallelWritingWriteConfig(cleaningPolicy, populateMetaFields));
+ client = new HoodieJavaWriteClient(context,
getParallelWritingWriteConfig(cleaningPolicy, true));
client.startCommit();
timeline = metaClient.getActiveTimeline().reload();
// since OCC is enabled, hudi auto flips the cleaningPolicy to Lazy.
assertTrue(timeline.getTimelineOfActions(
CollectionUtils.createSet(ROLLBACK_ACTION)).countInstants() == 3);
assertTrue(timeline.getCommitsTimeline().filterCompletedInstants().countInstants()
== 1);
+ client.close();
}
@Test
diff --git
a/hudi-client/hudi-java-client/src/test/java/org/apache/hudi/testutils/HoodieJavaClientTestHarness.java
b/hudi-client/hudi-java-client/src/test/java/org/apache/hudi/testutils/HoodieJavaClientTestHarness.java
index aaf072e7b98..68b7ed18a7f 100644
---
a/hudi-client/hudi-java-client/src/test/java/org/apache/hudi/testutils/HoodieJavaClientTestHarness.java
+++
b/hudi-client/hudi-java-client/src/test/java/org/apache/hudi/testutils/HoodieJavaClientTestHarness.java
@@ -86,6 +86,7 @@ import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.io.hfile.CacheConfig;
import org.apache.hadoop.hbase.io.hfile.HFile;
import org.apache.hadoop.hbase.io.hfile.HFileScanner;
+import org.junit.jupiter.api.AfterAll;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach;
import org.slf4j.Logger;
@@ -131,9 +132,14 @@ public abstract class HoodieJavaClientTestHarness extends
HoodieWriterClientTest
protected HoodieTableFileSystemView tableView;
protected HoodieJavaWriteClient writeClient;
+ @AfterAll
+ public static void tearDownAll() throws IOException {
+ FileSystem.closeAll();
+ }
+
@BeforeEach
protected void initResources() throws IOException {
- basePath = tempDir.resolve("java_client_tests" +
System.currentTimeMillis()).toUri().getPath();
+ basePath = tempDir.resolve("java_client_tests" +
System.currentTimeMillis()).toAbsolutePath().toUri().getPath();
hadoopConf = new Configuration();
taskContextSupplier = new TestJavaTaskContextSupplier();
context = new HoodieJavaEngineContext(hadoopConf, taskContextSupplier);
@@ -142,6 +148,14 @@ public abstract class HoodieJavaClientTestHarness extends
HoodieWriterClientTest
initMetaClient();
}
+ @AfterEach
+ protected void cleanupResources() throws IOException {
+ cleanupClients();
+ cleanupTestDataGenerator();
+ cleanupFileSystem();
+ cleanupExecutorService();
+ }
+
public class TestJavaTaskContextSupplier extends TaskContextSupplier {
int partitionId = 0;
int stageId = 0;
@@ -172,14 +186,6 @@ public abstract class HoodieJavaClientTestHarness extends
HoodieWriterClientTest
}
}
- @AfterEach
- protected void cleanupResources() throws IOException {
- cleanupClients();
- cleanupTestDataGenerator();
- cleanupFileSystem();
- cleanupExecutorService();
- }
-
protected void initFileSystem(String basePath, Configuration hadoopConf) {
if (basePath == null) {
throw new IllegalStateException("The base path has not been
initialized.");
@@ -423,9 +429,9 @@ public abstract class HoodieJavaClientTestHarness extends
HoodieWriterClientTest
}
public List<WriteStatus> insertFirstBatch(HoodieWriteConfig writeConfig,
HoodieJavaWriteClient client, String newCommitTime,
- String initCommitTime, int
numRecordsInThisCommit,
- Function3<List<WriteStatus>,
HoodieJavaWriteClient, List<HoodieRecord>, String> writeFn, boolean
isPreppedAPI,
- boolean assertForCommit, int
expRecordsInThisCommit) throws Exception {
+ String initCommitTime, int
numRecordsInThisCommit,
+ Function3<List<WriteStatus>,
HoodieJavaWriteClient, List<HoodieRecord>, String> writeFn, boolean
isPreppedAPI,
+ boolean assertForCommit, int
expRecordsInThisCommit) throws Exception {
return insertFirstBatch(writeConfig, client, newCommitTime,
initCommitTime, numRecordsInThisCommit, writeFn, isPreppedAPI, assertForCommit,
expRecordsInThisCommit, true);
}
@@ -445,9 +451,9 @@ public abstract class HoodieJavaClientTestHarness extends
HoodieWriterClientTest
* @throws Exception in case of error
*/
public List<WriteStatus> insertFirstBatch(HoodieWriteConfig writeConfig,
HoodieJavaWriteClient client, String newCommitTime,
- String initCommitTime, int
numRecordsInThisCommit,
- Function3<List<WriteStatus>,
HoodieJavaWriteClient, List<HoodieRecord>, String> writeFn, boolean
isPreppedAPI,
- boolean assertForCommit, int
expRecordsInThisCommit, boolean filterForCommitTimeWithAssert) throws Exception
{
+ String initCommitTime, int
numRecordsInThisCommit,
+ Function3<List<WriteStatus>,
HoodieJavaWriteClient, List<HoodieRecord>, String> writeFn, boolean
isPreppedAPI,
+ boolean assertForCommit, int
expRecordsInThisCommit, boolean filterForCommitTimeWithAssert) throws Exception
{
final Function2<List<HoodieRecord>, String, Integer> recordGenFunction =
generateWrapRecordsFn(isPreppedAPI, writeConfig,
dataGen::generateInserts);
@@ -473,9 +479,9 @@ public abstract class HoodieJavaClientTestHarness extends
HoodieWriterClientTest
* @throws Exception in case of error
*/
public List<WriteStatus> insertBatch(HoodieWriteConfig writeConfig,
HoodieJavaWriteClient client, String newCommitTime,
- String initCommitTime, int
numRecordsInThisCommit,
- Function3<List<WriteStatus>,
HoodieJavaWriteClient, List<HoodieRecord>, String> writeFn, boolean
isPreppedAPI,
- boolean assertForCommit, int
expRecordsInThisCommit, int expTotalRecords, int expTotalCommits,
Option<String> partition) throws Exception {
+ String initCommitTime, int
numRecordsInThisCommit,
+ Function3<List<WriteStatus>,
HoodieJavaWriteClient, List<HoodieRecord>, String> writeFn, boolean
isPreppedAPI,
+ boolean assertForCommit, int
expRecordsInThisCommit, int expTotalRecords, int expTotalCommits,
Option<String> partition) throws Exception {
if (partition.isPresent()) {
final Function3<List<HoodieRecord>, String, Integer, String>
recordGenFunction =
@@ -494,10 +500,10 @@ public abstract class HoodieJavaClientTestHarness extends
HoodieWriterClientTest
}
public List<WriteStatus> updateBatch(HoodieWriteConfig writeConfig,
HoodieJavaWriteClient client, String newCommitTime,
- String prevCommitTime,
Option<List<String>> commitTimesBetweenPrevAndNew, String initCommitTime,
- int numRecordsInThisCommit,
- Function3<List<WriteStatus>,
HoodieJavaWriteClient, List<HoodieRecord>, String> writeFn, boolean
isPreppedAPI,
- boolean assertForCommit, int
expRecordsInThisCommit, int expTotalRecords, int expTotalCommits) throws
Exception {
+ String prevCommitTime,
Option<List<String>> commitTimesBetweenPrevAndNew, String initCommitTime,
+ int numRecordsInThisCommit,
+ Function3<List<WriteStatus>,
HoodieJavaWriteClient, List<HoodieRecord>, String> writeFn, boolean
isPreppedAPI,
+ boolean assertForCommit, int
expRecordsInThisCommit, int expTotalRecords, int expTotalCommits) throws
Exception {
return updateBatch(writeConfig, client, newCommitTime, prevCommitTime,
commitTimesBetweenPrevAndNew, initCommitTime, numRecordsInThisCommit, writeFn,
isPreppedAPI, assertForCommit, expRecordsInThisCommit,
expTotalRecords, expTotalCommits, true);
}
@@ -522,11 +528,11 @@ public abstract class HoodieJavaClientTestHarness extends
HoodieWriterClientTest
* @throws Exception in case of error
*/
public List<WriteStatus> updateBatch(HoodieWriteConfig writeConfig,
HoodieJavaWriteClient client, String newCommitTime,
- String prevCommitTime,
Option<List<String>> commitTimesBetweenPrevAndNew, String initCommitTime,
- int numRecordsInThisCommit,
- Function3<List<WriteStatus>,
HoodieJavaWriteClient, List<HoodieRecord>, String> writeFn, boolean
isPreppedAPI,
- boolean assertForCommit, int
expRecordsInThisCommit, int expTotalRecords, int expTotalCommits,
- boolean
filterForCommitTimeWithAssert) throws Exception {
+ String prevCommitTime,
Option<List<String>> commitTimesBetweenPrevAndNew, String initCommitTime,
+ int numRecordsInThisCommit,
+ Function3<List<WriteStatus>,
HoodieJavaWriteClient, List<HoodieRecord>, String> writeFn, boolean
isPreppedAPI,
+ boolean assertForCommit, int
expRecordsInThisCommit, int expTotalRecords, int expTotalCommits,
+ boolean filterForCommitTimeWithAssert)
throws Exception {
final Function2<List<HoodieRecord>, String, Integer> recordGenFunction =
generateWrapRecordsFn(isPreppedAPI, writeConfig,
dataGen::generateUniqueUpdates);
@@ -536,8 +542,8 @@ public abstract class HoodieJavaClientTestHarness extends
HoodieWriterClientTest
}
public List<WriteStatus> deleteBatch(HoodieWriteConfig writeConfig,
HoodieJavaWriteClient client, String newCommitTime, String prevCommitTime,
- String initCommitTime, int
numRecordsInThisCommit, boolean isPreppedAPI, boolean assertForCommit,
- int expRecordsInThisCommit, int
expTotalRecords) throws Exception {
+ String initCommitTime, int
numRecordsInThisCommit, boolean isPreppedAPI, boolean assertForCommit,
+ int expRecordsInThisCommit, int
expTotalRecords) throws Exception {
return deleteBatch(writeConfig, client, newCommitTime, prevCommitTime,
initCommitTime, numRecordsInThisCommit, isPreppedAPI,
assertForCommit, expRecordsInThisCommit, expTotalRecords, true);
}
@@ -559,8 +565,8 @@ public abstract class HoodieJavaClientTestHarness extends
HoodieWriterClientTest
* @throws Exception in case of error
*/
public List<WriteStatus> deleteBatch(HoodieWriteConfig writeConfig,
HoodieJavaWriteClient client, String newCommitTime,
- String prevCommitTime, String
initCommitTime, int numRecordsInThisCommit, boolean isPreppedAPI,
- boolean assertForCommit, int
expRecordsInThisCommit, int expTotalRecords, boolean
filterForCommitTimeWithAssert) throws Exception {
+ String prevCommitTime, String
initCommitTime, int numRecordsInThisCommit, boolean isPreppedAPI,
+ boolean assertForCommit, int
expRecordsInThisCommit, int expTotalRecords, boolean
filterForCommitTimeWithAssert) throws Exception {
if (isPreppedAPI) {
final Function2<List<HoodieRecord>, String, Integer> recordGenFunction =
@@ -592,20 +598,20 @@ public abstract class HoodieJavaClientTestHarness extends
HoodieWriterClientTest
}
public List<WriteStatus> writeBatch(HoodieJavaWriteClient client, String
newCommitTime, String prevCommitTime,
- Option<List<String>>
commitTimesBetweenPrevAndNew, String initCommitTime, int numRecordsInThisCommit,
- Function2<List<HoodieRecord>, String,
Integer> recordGenFunction,
- Function3<List<WriteStatus>,
HoodieJavaWriteClient, List<HoodieRecord>, String> writeFn,
- boolean assertForCommit, int
expRecordsInThisCommit, int expTotalRecords, int expTotalCommits, boolean
doCommit) throws Exception {
+ Option<List<String>>
commitTimesBetweenPrevAndNew, String initCommitTime, int numRecordsInThisCommit,
+ Function2<List<HoodieRecord>, String,
Integer> recordGenFunction,
+ Function3<List<WriteStatus>,
HoodieJavaWriteClient, List<HoodieRecord>, String> writeFn,
+ boolean assertForCommit, int
expRecordsInThisCommit, int expTotalRecords, int expTotalCommits, boolean
doCommit) throws Exception {
return writeBatch(client, newCommitTime, prevCommitTime,
commitTimesBetweenPrevAndNew, initCommitTime, numRecordsInThisCommit,
recordGenFunction,
writeFn, assertForCommit, expRecordsInThisCommit, expTotalRecords,
expTotalCommits, doCommit, true);
}
public List<WriteStatus> writeBatch(HoodieJavaWriteClient client, String
newCommitTime, String prevCommitTime,
- Option<List<String>>
commitTimesBetweenPrevAndNew, String initCommitTime, int numRecordsInThisCommit,
- Function3<List<HoodieRecord>, String,
Integer, String> recordGenFunction,
- Function3<List<WriteStatus>,
HoodieJavaWriteClient, List<HoodieRecord>, String> writeFn,
- boolean assertForCommit, int
expRecordsInThisCommit, int expTotalRecords, int expTotalCommits,
- boolean doCommit, String partition)
throws Exception {
+ Option<List<String>>
commitTimesBetweenPrevAndNew, String initCommitTime, int numRecordsInThisCommit,
+ Function3<List<HoodieRecord>, String,
Integer, String> recordGenFunction,
+ Function3<List<WriteStatus>,
HoodieJavaWriteClient, List<HoodieRecord>, String> writeFn,
+ boolean assertForCommit, int
expRecordsInThisCommit, int expTotalRecords, int expTotalCommits,
+ boolean doCommit, String partition)
throws Exception {
return writeBatch(client, newCommitTime, prevCommitTime,
commitTimesBetweenPrevAndNew, initCommitTime, numRecordsInThisCommit,
recordGenFunction,
writeFn, assertForCommit, expRecordsInThisCommit, expTotalRecords,
expTotalCommits, doCommit, true, partition);
}
@@ -629,11 +635,11 @@ public abstract class HoodieJavaClientTestHarness extends
HoodieWriterClientTest
* @throws Exception in case of error
*/
public List<WriteStatus> writeBatch(HoodieJavaWriteClient client, String
newCommitTime, String prevCommitTime,
- Option<List<String>>
commitTimesBetweenPrevAndNew, String initCommitTime, int numRecordsInThisCommit,
- Function2<List<HoodieRecord>, String,
Integer> recordGenFunction,
- Function3<List<WriteStatus>,
HoodieJavaWriteClient, List<HoodieRecord>, String> writeFn,
- boolean assertForCommit, int
expRecordsInThisCommit, int expTotalRecords, int expTotalCommits, boolean
doCommit,
- boolean
filterForCommitTimeWithAssert) throws Exception {
+ Option<List<String>>
commitTimesBetweenPrevAndNew, String initCommitTime, int numRecordsInThisCommit,
+ Function2<List<HoodieRecord>, String,
Integer> recordGenFunction,
+ Function3<List<WriteStatus>,
HoodieJavaWriteClient, List<HoodieRecord>, String> writeFn,
+ boolean assertForCommit, int
expRecordsInThisCommit, int expTotalRecords, int expTotalCommits, boolean
doCommit,
+ boolean filterForCommitTimeWithAssert)
throws Exception {
List<HoodieRecord> records = recordGenFunction.apply(newCommitTime,
numRecordsInThisCommit);
return writeBatchHelper(client, newCommitTime, prevCommitTime,
commitTimesBetweenPrevAndNew, initCommitTime,
@@ -642,12 +648,12 @@ public abstract class HoodieJavaClientTestHarness extends
HoodieWriterClientTest
}
public List<WriteStatus> writeBatch(HoodieJavaWriteClient client, String
newCommitTime, String prevCommitTime,
- Option<List<String>>
commitTimesBetweenPrevAndNew, String initCommitTime, int numRecordsInThisCommit,
- Function3<List<HoodieRecord>, String,
Integer, String> recordGenFunction,
- Function3<List<WriteStatus>,
HoodieJavaWriteClient, List<HoodieRecord>, String> writeFn,
- boolean assertForCommit, int
expRecordsInThisCommit, int expTotalRecords, int expTotalCommits, boolean
doCommit,
- boolean filterForCommitTimeWithAssert,
- String partition) throws Exception {
+ Option<List<String>>
commitTimesBetweenPrevAndNew, String initCommitTime, int numRecordsInThisCommit,
+ Function3<List<HoodieRecord>, String,
Integer, String> recordGenFunction,
+ Function3<List<WriteStatus>,
HoodieJavaWriteClient, List<HoodieRecord>, String> writeFn,
+ boolean assertForCommit, int
expRecordsInThisCommit, int expTotalRecords, int expTotalCommits, boolean
doCommit,
+ boolean filterForCommitTimeWithAssert,
+ String partition) throws Exception {
List<HoodieRecord> records = recordGenFunction.apply(newCommitTime,
numRecordsInThisCommit, partition);
return writeBatchHelper(client, newCommitTime, prevCommitTime,
commitTimesBetweenPrevAndNew, initCommitTime,
@@ -656,11 +662,11 @@ public abstract class HoodieJavaClientTestHarness extends
HoodieWriterClientTest
}
private List<WriteStatus> writeBatchHelper(HoodieJavaWriteClient client,
String newCommitTime, String prevCommitTime,
- Option<List<String>>
commitTimesBetweenPrevAndNew, String initCommitTime,
- int numRecordsInThisCommit,
List<HoodieRecord> records,
- Function3<List<WriteStatus>,
HoodieJavaWriteClient, List<HoodieRecord>, String> writeFn,
- boolean assertForCommit, int
expRecordsInThisCommit, int expTotalRecords,
- int expTotalCommits, boolean
doCommit, boolean filterForCommitTimeWithAssert) throws IOException {
+ Option<List<String>>
commitTimesBetweenPrevAndNew, String initCommitTime,
+ int numRecordsInThisCommit,
List<HoodieRecord> records,
+ Function3<List<WriteStatus>,
HoodieJavaWriteClient, List<HoodieRecord>, String> writeFn,
+ boolean assertForCommit, int
expRecordsInThisCommit, int expTotalRecords,
+ int expTotalCommits, boolean
doCommit, boolean filterForCommitTimeWithAssert) throws IOException {
// Write 1 (only inserts)
client.startCommitWithTime(newCommitTime);
@@ -716,8 +722,8 @@ public abstract class HoodieJavaClientTestHarness extends
HoodieWriterClientTest
* Generate wrapper for record generation function for testing Prepped APIs.
*
* @param isPreppedAPI Flag to indicate if this is for testing
prepped-version of APIs
- * @param writeConfig Hoodie Write Config
- * @param wrapped Actual Records Generation function
+ * @param writeConfig Hoodie Write Config
+ * @param wrapped Actual Records Generation function
* @return Wrapped Function
*/
public Function2<List<HoodieRecord>, String, Integer>
generateWrapRecordsFn(boolean isPreppedAPI,
@@ -734,8 +740,8 @@ public abstract class HoodieJavaClientTestHarness extends
HoodieWriterClientTest
* Generate wrapper for record generation function for testing Prepped APIs.
*
* @param isPreppedAPI Flag to indicate if this is for testing
prepped-version of APIs
- * @param writeConfig Hoodie Write Config
- * @param wrapped Actual Records Generation function (for partition)
+ * @param writeConfig Hoodie Write Config
+ * @param wrapped Actual Records Generation function (for partition)
* @return Wrapped Function
*/
public Function3<List<HoodieRecord>, String, Integer, String>
generateWrapRecordsForPartitionFn(boolean isPreppedAPI,
@@ -752,7 +758,7 @@ public abstract class HoodieJavaClientTestHarness extends
HoodieWriterClientTest
* to be already de-duped and have location set. This wrapper takes care of
record-location setting. Uniqueness is
* guaranteed by record-generation function itself.
*
- * @param writeConfig Hoodie Write Config
+ * @param writeConfig Hoodie Write Config
* @param recordsGenFunction Records Generation function
* @return Wrapped function
*/
@@ -776,7 +782,7 @@ public abstract class HoodieJavaClientTestHarness extends
HoodieWriterClientTest
* to be already de-duped and have location set. This wrapper takes care of
record-location setting. Uniqueness is
* guaranteed by record-generation function itself.
*
- * @param writeConfig Hoodie Write Config
+ * @param writeConfig Hoodie Write Config
* @param recordsGenFunction Records Generation function (for partition)
* @return Wrapped function
*/
@@ -799,8 +805,8 @@ public abstract class HoodieJavaClientTestHarness extends
HoodieWriterClientTest
* Generate wrapper for delete key generation function for testing Prepped
APIs.
*
* @param isPreppedAPI Flag to indicate if this is for testing
prepped-version of APIs
- * @param writeConfig Hoodie Write Config
- * @param wrapped Actual Records Generation function
+ * @param writeConfig Hoodie Write Config
+ * @param wrapped Actual Records Generation function
* @return Wrapped Function
*/
public Function<Integer, List<HoodieKey>> generateWrapDeleteKeysFn(boolean
isPreppedAPI,
@@ -845,7 +851,7 @@ public abstract class HoodieJavaClientTestHarness extends
HoodieWriterClientTest
}
private List<WriteStatus> getWriteStatusAndVerifyDeleteOperation(String
newCommitTime, String prevCommitTime, String initCommitTime, boolean
assertForCommit, int expRecordsInThisCommit,
- int
expTotalRecords, boolean filerForCommitTimeWithAssert, List<WriteStatus>
result) {
+ int
expTotalRecords, boolean filerForCommitTimeWithAssert, List<WriteStatus>
result) {
assertNoWriteErrors(result);
// verify that there is a commit
@@ -882,7 +888,7 @@ public abstract class HoodieJavaClientTestHarness extends
HoodieWriterClientTest
}
public long numRowsInCommit(String basePath, HoodieTimeline commitTimeline,
- String instantTime, boolean
filterByCommitTime) {
+ String instantTime, boolean filterByCommitTime) {
HoodieInstant commitInstant = new HoodieInstant(false,
HoodieTimeline.COMMIT_ACTION, instantTime);
if (!commitTimeline.containsInstant(commitInstant)) {
throw new HoodieException("No commit exists at " + instantTime);
@@ -891,7 +897,7 @@ public abstract class HoodieJavaClientTestHarness extends
HoodieWriterClientTest
HashMap<String, String> paths =
getLatestFileIDsToFullPath(basePath, commitTimeline,
Arrays.asList(commitInstant));
return paths.values().stream().flatMap(path ->
-
BaseFileUtils.getInstance(path).readAvroRecords(context.getHadoopConf().get(),
new Path(path)).stream())
+
BaseFileUtils.getInstance(path).readAvroRecords(context.getHadoopConf().get(),
new Path(path)).stream())
.filter(record -> {
if (filterByCommitTime) {
Object commitTime =
record.get(HoodieRecord.COMMIT_TIME_METADATA_FIELD);
diff --git
a/hudi-client/hudi-java-client/src/test/java/org/apache/hudi/testutils/TestHoodieMetadataBase.java
b/hudi-client/hudi-java-client/src/test/java/org/apache/hudi/testutils/TestHoodieMetadataBase.java
index f556bc18541..e7f13991add 100644
---
a/hudi-client/hudi-java-client/src/test/java/org/apache/hudi/testutils/TestHoodieMetadataBase.java
+++
b/hudi-client/hudi-java-client/src/test/java/org/apache/hudi/testutils/TestHoodieMetadataBase.java
@@ -303,7 +303,7 @@ public class TestHoodieMetadataBase extends
HoodieJavaClientTestHarness {
.ignoreSpuriousDeletes(validateMetadataPayloadConsistency)
.build())
.withMetricsConfig(HoodieMetricsConfig.newBuilder().on(enableMetrics)
- .withExecutorMetrics(true).build())
+ .withExecutorMetrics(enableMetrics).build())
.withMetricsGraphiteConfig(HoodieMetricsGraphiteConfig.newBuilder()
.usePrefix("unit-test").build())
.withRollbackUsingMarkers(useRollbackUsingMarkers)
diff --git
a/hudi-client/hudi-spark-client/src/test/java/org/apache/hudi/client/functional/TestHoodieBackedMetadata.java
b/hudi-client/hudi-spark-client/src/test/java/org/apache/hudi/client/functional/TestHoodieBackedMetadata.java
index 464d47b2a27..26dc41f73a3 100644
---
a/hudi-client/hudi-spark-client/src/test/java/org/apache/hudi/client/functional/TestHoodieBackedMetadata.java
+++
b/hudi-client/hudi-spark-client/src/test/java/org/apache/hudi/client/functional/TestHoodieBackedMetadata.java
@@ -203,14 +203,10 @@ public class TestHoodieBackedMetadata extends
TestHoodieMetadataBase {
public static List<Arguments> tableOperationsTestArgs() {
return asList(
- Arguments.of(COPY_ON_WRITE, true, true),
- Arguments.of(COPY_ON_WRITE, true, false),
- Arguments.of(COPY_ON_WRITE, false, true),
- Arguments.of(COPY_ON_WRITE, false, false),
- Arguments.of(MERGE_ON_READ, true, true),
- Arguments.of(MERGE_ON_READ, true, false),
- Arguments.of(MERGE_ON_READ, false, true),
- Arguments.of(MERGE_ON_READ, false, false)
+ Arguments.of(COPY_ON_WRITE, true),
+ Arguments.of(COPY_ON_WRITE, false),
+ Arguments.of(MERGE_ON_READ, true),
+ Arguments.of(MERGE_ON_READ, false)
);
}
@@ -479,14 +475,14 @@ public class TestHoodieBackedMetadata extends
TestHoodieMetadataBase {
*/
@ParameterizedTest
@MethodSource("tableOperationsTestArgs")
- public void testTableOperations(HoodieTableType tableType, boolean
enableFullScan, boolean enableMetrics) throws Exception {
+ public void testTableOperations(HoodieTableType tableType, boolean
enableFullScan) throws Exception {
List<Long> commitTimeList = new ArrayList<>();
commitTimeList.add(Long.parseLong(HoodieActiveTimeline.createNewInstantTime()));
for (int i = 0; i < 8; i++) {
long nextCommitTime =
getNextCommitTime(commitTimeList.get(commitTimeList.size() - 1));
commitTimeList.add(nextCommitTime);
}
- init(tableType, true, enableFullScan, enableMetrics, false);
+ init(tableType, true, enableFullScan, false, false);
doWriteInsertAndUpsert(testTable, commitTimeList.get(0).toString(),
commitTimeList.get(1).toString(), false);
// trigger an upsert
@@ -2726,7 +2722,7 @@ public class TestHoodieBackedMetadata extends
TestHoodieMetadataBase {
public void testbootstrapWithEmptyCommit() throws Exception {
init(HoodieTableType.COPY_ON_WRITE);
- HoodieWriteConfig writeConfig = getWriteConfigBuilder(true, true,
true).build();
+ HoodieWriteConfig writeConfig = getWriteConfigBuilder(true, true,
false).build();
initWriteConfigAndMetatableWriter(writeConfig, true);
testTable.doWriteOperation(HoodieActiveTimeline.createNewInstantTime(),
INSERT, Collections.EMPTY_LIST, 0);
syncTableMetadata(writeConfig);
diff --git
a/hudi-client/hudi-spark-client/src/test/java/org/apache/hudi/client/functional/TestHoodieMetadataBase.java
b/hudi-client/hudi-spark-client/src/test/java/org/apache/hudi/client/functional/TestHoodieMetadataBase.java
index 62148acbf5b..e0a00c24e92 100644
---
a/hudi-client/hudi-spark-client/src/test/java/org/apache/hudi/client/functional/TestHoodieMetadataBase.java
+++
b/hudi-client/hudi-spark-client/src/test/java/org/apache/hudi/client/functional/TestHoodieMetadataBase.java
@@ -344,7 +344,7 @@ public class TestHoodieMetadataBase extends
HoodieSparkClientTestHarness {
.ignoreSpuriousDeletes(validateMetadataPayloadConsistency)
.build())
.withMetricsConfig(HoodieMetricsConfig.newBuilder().on(enableMetrics)
- .withExecutorMetrics(true).build())
+ .withExecutorMetrics(enableMetrics).build())
.withMetricsGraphiteConfig(HoodieMetricsGraphiteConfig.newBuilder()
.usePrefix("unit-test").build())
.withRollbackUsingMarkers(useRollbackUsingMarkers)
diff --git
a/hudi-hadoop-mr/src/test/java/org/apache/hudi/hadoop/realtime/TestHoodieRealtimeRecordReader.java
b/hudi-hadoop-mr/src/test/java/org/apache/hudi/hadoop/realtime/TestHoodieRealtimeRecordReader.java
index 6c530833d55..9fca206ac26 100644
---
a/hudi-hadoop-mr/src/test/java/org/apache/hudi/hadoop/realtime/TestHoodieRealtimeRecordReader.java
+++
b/hudi-hadoop-mr/src/test/java/org/apache/hudi/hadoop/realtime/TestHoodieRealtimeRecordReader.java
@@ -44,12 +44,12 @@ import org.apache.hudi.common.util.collection.Pair;
import org.apache.hudi.exception.HoodieException;
import org.apache.hudi.hadoop.RealtimeFileStatus;
import org.apache.hudi.hadoop.config.HoodieRealtimeConfig;
-import org.apache.hudi.hadoop.utils.HoodieRealtimeRecordReaderUtils;
import org.apache.hudi.hadoop.testutils.InputFormatTestUtil;
+import org.apache.hudi.hadoop.utils.HoodieRealtimeRecordReaderUtils;
-import org.apache.avro.generic.GenericRecord;
import org.apache.avro.Schema;
import org.apache.avro.Schema.Field;
+import org.apache.avro.generic.GenericRecord;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
@@ -71,8 +71,8 @@ import org.apache.hadoop.mapred.InputSplit;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.RecordReader;
import org.apache.hadoop.mapred.Reporter;
-
import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Disabled;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.io.TempDir;
import org.junit.jupiter.params.ParameterizedTest;
@@ -101,6 +101,7 @@ import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertTrue;
import static org.junit.jupiter.params.provider.Arguments.arguments;
+@Disabled("HUDI-6755")
public class TestHoodieRealtimeRecordReader {
private static final String PARTITION_COLUMN = "datestr";
diff --git
a/hudi-spark-datasource/hudi-spark/src/test/java/org/apache/hudi/functional/TestBootstrapRead.java
b/hudi-spark-datasource/hudi-spark/src/test/java/org/apache/hudi/functional/TestBootstrapRead.java
index f57be60461a..d926a3be5a4 100644
---
a/hudi-spark-datasource/hudi-spark/src/test/java/org/apache/hudi/functional/TestBootstrapRead.java
+++
b/hudi-spark-datasource/hudi-spark/src/test/java/org/apache/hudi/functional/TestBootstrapRead.java
@@ -63,7 +63,7 @@ public class TestBootstrapRead extends TestBootstrapReadBase {
@ParameterizedTest
@MethodSource("testArgs")
- public void runTests(String bootstrapType, Boolean dashPartitions,
HoodieTableType tableType, Integer nPartitions) {
+ public void testBootstrapFunctional(String bootstrapType, Boolean
dashPartitions, HoodieTableType tableType, Integer nPartitions) {
this.bootstrapType = bootstrapType;
this.dashPartitions = dashPartitions;
this.tableType = tableType;
diff --git
a/hudi-spark-datasource/hudi-spark/src/test/java/org/apache/hudi/functional/TestNewHoodieParquetFileFormat.java
b/hudi-spark-datasource/hudi-spark/src/test/java/org/apache/hudi/functional/TestNewHoodieParquetFileFormat.java
index ef6814f21c5..ec719414dc8 100644
---
a/hudi-spark-datasource/hudi-spark/src/test/java/org/apache/hudi/functional/TestNewHoodieParquetFileFormat.java
+++
b/hudi-spark-datasource/hudi-spark/src/test/java/org/apache/hudi/functional/TestNewHoodieParquetFileFormat.java
@@ -24,6 +24,7 @@ import org.apache.hudi.common.model.HoodieTableType;
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.SaveMode;
+import org.junit.jupiter.api.Disabled;
import org.junit.jupiter.api.Tag;
import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.Arguments;
@@ -38,6 +39,7 @@ import static
org.apache.hudi.common.model.HoodieTableType.MERGE_ON_READ;
import static org.junit.jupiter.api.Assertions.assertEquals;
@Tag("functional")
+@Disabled("HUDI-6756")
public class TestNewHoodieParquetFileFormat extends TestBootstrapReadBase {
private static Stream<Arguments> testArgs() {
@@ -54,7 +56,7 @@ public class TestNewHoodieParquetFileFormat extends
TestBootstrapReadBase {
@ParameterizedTest
@MethodSource("testArgs")
- public void runTests(HoodieTableType tableType, Integer nPartitions) {
+ public void testNewParquetFileFormat(HoodieTableType tableType, Integer
nPartitions) {
this.bootstrapType = nPartitions == 0 ? "metadata" : "mixed";
this.dashPartitions = true;
this.tableType = tableType;