This is an automated email from the ASF dual-hosted git repository.

danny0405 pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/hudi.git


The following commit(s) were added to refs/heads/master by this push:
     new 4588c35ec00 [HUDI-7998] Fix insert overwrite hudi table with static 
partition whe… (#11643)
4588c35ec00 is described below

commit 4588c35ec00181d40a0fc066ac8088ee36be7681
Author: KnightChess <[email protected]>
AuthorDate: Fri Jul 19 18:44:42 2024 +0800

    [HUDI-7998] Fix insert overwrite hudi table with static partition whe… 
(#11643)
---
 .../src/main/scala/org/apache/spark/sql/hudi/ProvidesHoodieConfig.scala | 2 +-
 .../src/test/scala/org/apache/spark/sql/hudi/dml/TestInsertTable.scala  | 2 ++
 2 files changed, 3 insertions(+), 1 deletion(-)

diff --git 
a/hudi-spark-datasource/hudi-spark-common/src/main/scala/org/apache/spark/sql/hudi/ProvidesHoodieConfig.scala
 
b/hudi-spark-datasource/hudi-spark-common/src/main/scala/org/apache/spark/sql/hudi/ProvidesHoodieConfig.scala
index 070f507a252..440980dff2b 100644
--- 
a/hudi-spark-datasource/hudi-spark-common/src/main/scala/org/apache/spark/sql/hudi/ProvidesHoodieConfig.scala
+++ 
b/hudi-spark-datasource/hudi-spark-common/src/main/scala/org/apache/spark/sql/hudi/ProvidesHoodieConfig.scala
@@ -384,7 +384,7 @@ trait ProvidesHoodieConfig extends Logging {
       val staticPartitionValues = partitionSpec.filter(p => 
p._2.isDefined).mapValues(_.get)
       val predicates = staticPartitionValues.map { case (k, v) =>
         val partition = AttributeReference(k, partitionNameToType(k))()
-        val value = Literal(v)
+        val value = HoodieSqlCommonUtils.castIfNeeded(Literal.create(v), 
partitionNameToType(k))
         EqualTo(partition, value)
       }.toSeq
       
Option(fileIndex.getPartitionPaths(predicates).map(_.getPath).mkString(","))
diff --git 
a/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/dml/TestInsertTable.scala
 
b/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/dml/TestInsertTable.scala
index f53db8e5819..d5a7ea3567c 100644
--- 
a/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/dml/TestInsertTable.scala
+++ 
b/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/dml/TestInsertTable.scala
@@ -1515,6 +1515,8 @@ class TestInsertTable extends HoodieSparkSqlTestBase {
          """.stripMargin)
     // NOTE: We have to drop type-literal prefix since Spark doesn't parse 
type literals appropriately
     spark.sql(s"insert into $tableName partition(dt = 
${dropTypeLiteralPrefix(partitionValue)}) select 1, 'a1', 10")
+    // try again to trigger hoodieFileIndex
+    spark.sql(s"insert overwrite $tableName partition(dt = 
${dropTypeLiteralPrefix(partitionValue)}) select 1, 'a1', 10")
     spark.sql(s"insert into $tableName select 2, 'a2', 10, $partitionValue")
     checkAnswer(s"select id, name, price, cast(dt as string) from $tableName 
order by id")(
       Seq(1, "a1", 10, extractRawValue(partitionValue).toString),

Reply via email to