This is an automated email from the ASF dual-hosted git repository.
vbalaji pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/hudi.git
The following commit(s) were added to refs/heads/master by this push:
new df2345b9b3e [MINOR] Fix testRenamePartition (#10064)
df2345b9b3e is described below
commit df2345b9b3e21cad812a0b08cd4085a7d99e76f4
Author: Lin Liu <[email protected]>
AuthorDate: Wed Dec 13 15:10:23 2023 -0800
[MINOR] Fix testRenamePartition (#10064)
---
hudi-cli/src/main/java/org/apache/hudi/cli/commands/SparkMain.java | 2 +-
.../src/test/java/org/apache/hudi/cli/commands/TestRepairsCommand.java | 2 +-
2 files changed, 2 insertions(+), 2 deletions(-)
diff --git a/hudi-cli/src/main/java/org/apache/hudi/cli/commands/SparkMain.java
b/hudi-cli/src/main/java/org/apache/hudi/cli/commands/SparkMain.java
index 93c57940c43..71c6de7cf72 100644
--- a/hudi-cli/src/main/java/org/apache/hudi/cli/commands/SparkMain.java
+++ b/hudi-cli/src/main/java/org/apache/hudi/cli/commands/SparkMain.java
@@ -494,7 +494,7 @@ public class SparkMain {
StructType structType = recordsToRewrite.schema();
int partitionIndex = structType.fieldIndex(partitionFieldProp);
-
recordsToRewrite.withColumn(metaClient.getTableConfig().getPartitionFieldProp(),
functions.lit(null).cast(structType.apply(partitionIndex).dataType()))
+
recordsToRewrite.withColumn(metaClient.getTableConfig().getPartitionFieldProp(),
functions.lit(newPartition).cast(structType.apply(partitionIndex).dataType()))
.write()
.options(propsMap)
.option("hoodie.datasource.write.operation",
WriteOperationType.BULK_INSERT.value())
diff --git
a/hudi-cli/src/test/java/org/apache/hudi/cli/commands/TestRepairsCommand.java
b/hudi-cli/src/test/java/org/apache/hudi/cli/commands/TestRepairsCommand.java
index f7c0e60f69f..2f87eac233d 100644
---
a/hudi-cli/src/test/java/org/apache/hudi/cli/commands/TestRepairsCommand.java
+++
b/hudi-cli/src/test/java/org/apache/hudi/cli/commands/TestRepairsCommand.java
@@ -424,7 +424,7 @@ public class TestRepairsCommand extends
CLIFunctionalTestHarness {
// all records from old partition should have been migrated to new
partition
totalRecs = sqlContext.read().format("hudi").load(tablePath)
- .filter(HoodieRecord.PARTITION_PATH_METADATA_FIELD + " == '" +
"2016/03/18" + "'").count();
+ .filter(HoodieRecord.PARTITION_PATH_METADATA_FIELD + " == \"" +
"2016/03/18" + "\"").count();
assertEquals(totalRecs, totalRecsInOldPartition);
}
}