rahil-c commented on code in PR #13642:
URL: https://github.com/apache/hudi/pull/13642#discussion_r2250663063


##########
hudi-client/hudi-spark-client/src/test/java/org/apache/hudi/table/upgrade/TestUpgradeDowngrade.java:
##########
@@ -394,6 +395,115 @@ public void 
testUpgradeFourtoFiveWithHiveStyleDefaultPartitionWithSkipValidation
     testUpgradeFourToFiveInternal(true, true, true);
   }
 
+  @Test
+  public void testUpgradeFourToFiveWithMetadataTableFailure() throws Exception 
{
+    // Follow same setup of testUpgradeFourToFiveInternal
+    String tableName = metaClient.getTableConfig().getTableName();
+    cleanUp();
+    initSparkContexts();
+    initPath();
+    initTestDataGenerator();
+
+    Map<String, String> params = new HashMap<>();
+    addNewTableParamsToProps(params, tableName);
+    Properties properties = new Properties();
+    params.forEach((k, v) -> properties.setProperty(k, v));
+
+    initMetaClient(getTableType(), properties);
+    HoodieWriteConfig cfg = getConfigBuilder()
+        .withRollbackUsingMarkers(false)
+        .withWriteTableVersion(6)
+        
.withMetadataConfig(HoodieMetadataConfig.newBuilder().enable(true).build())
+        .withProps(params)
+        .build();
+    SparkRDDWriteClient client = getHoodieWriteClient(cfg);
+    // Write inserts
+    doInsert(client);
+
+    downgradeTableConfigsFromFiveToFour(cfg);
+    // end of setup of testUpgradeFourToFiveInternal
+
+    // Now test specific case of Metadata table failure during upgrade process
+    HoodieTableVersion originalVersion = 
metaClient.getTableConfig().getTableVersion();
+    
+    // Get metadata table path and corrupt its properties files, to trigger 
exception
+    String metadataTablePath = HoodieTableMetadata.getMetadataTableBasePath(
+        metaClient.getBasePath().toString());
+    StoragePath metadataHoodiePath = new StoragePath(metadataTablePath, 
HoodieTableMetaClient.METAFOLDER_NAME);
+    StoragePath propsPath = new StoragePath(metadataHoodiePath, 
HoodieTableConfig.HOODIE_PROPERTIES_FILE);
+    StoragePath backupPropsPath = new StoragePath(metadataHoodiePath, 
HoodieTableConfig.HOODIE_PROPERTIES_FILE_BACKUP);
+    
+    // Corrupt both properties files with invalid content
+    String corruptedContent = 
"CORRUPTED_INVALID_CONTENT\n\nTHIS_IS_NOT_VALID_PROPERTIES_FORMAT";
+    try (OutputStream propsOut = metaClient.getStorage().create(propsPath, 
true);
+         OutputStream backupOut = 
metaClient.getStorage().create(backupPropsPath, true)) {

Review Comment:
   Dicussed in https://github.com/apache/hudi/pull/13642#discussion_r2250660876



##########
hudi-client/hudi-spark-client/src/test/java/org/apache/hudi/table/upgrade/TestUpgradeDowngrade.java:
##########
@@ -394,6 +395,115 @@ public void 
testUpgradeFourtoFiveWithHiveStyleDefaultPartitionWithSkipValidation
     testUpgradeFourToFiveInternal(true, true, true);
   }
 
+  @Test
+  public void testUpgradeFourToFiveWithMetadataTableFailure() throws Exception 
{
+    // Follow same setup of testUpgradeFourToFiveInternal
+    String tableName = metaClient.getTableConfig().getTableName();
+    cleanUp();
+    initSparkContexts();
+    initPath();
+    initTestDataGenerator();
+
+    Map<String, String> params = new HashMap<>();
+    addNewTableParamsToProps(params, tableName);
+    Properties properties = new Properties();
+    params.forEach((k, v) -> properties.setProperty(k, v));
+
+    initMetaClient(getTableType(), properties);
+    HoodieWriteConfig cfg = getConfigBuilder()
+        .withRollbackUsingMarkers(false)
+        .withWriteTableVersion(6)
+        
.withMetadataConfig(HoodieMetadataConfig.newBuilder().enable(true).build())
+        .withProps(params)
+        .build();
+    SparkRDDWriteClient client = getHoodieWriteClient(cfg);
+    // Write inserts
+    doInsert(client);
+
+    downgradeTableConfigsFromFiveToFour(cfg);
+    // end of setup of testUpgradeFourToFiveInternal
+
+    // Now test specific case of Metadata table failure during upgrade process
+    HoodieTableVersion originalVersion = 
metaClient.getTableConfig().getTableVersion();
+    
+    // Get metadata table path and corrupt its properties files, to trigger 
exception
+    String metadataTablePath = HoodieTableMetadata.getMetadataTableBasePath(
+        metaClient.getBasePath().toString());
+    StoragePath metadataHoodiePath = new StoragePath(metadataTablePath, 
HoodieTableMetaClient.METAFOLDER_NAME);
+    StoragePath propsPath = new StoragePath(metadataHoodiePath, 
HoodieTableConfig.HOODIE_PROPERTIES_FILE);
+    StoragePath backupPropsPath = new StoragePath(metadataHoodiePath, 
HoodieTableConfig.HOODIE_PROPERTIES_FILE_BACKUP);
+    
+    // Corrupt both properties files with invalid content
+    String corruptedContent = 
"CORRUPTED_INVALID_CONTENT\n\nTHIS_IS_NOT_VALID_PROPERTIES_FORMAT";
+    try (OutputStream propsOut = metaClient.getStorage().create(propsPath, 
true);
+         OutputStream backupOut = 
metaClient.getStorage().create(backupPropsPath, true)) {

Review Comment:
   Discussed in https://github.com/apache/hudi/pull/13642#discussion_r2250660876



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: [email protected]

For queries about this service, please contact Infrastructure at:
[email protected]

Reply via email to