This is an automated email from the ASF dual-hosted git repository.

danny0405 pushed a commit to branch release-0.10.0
in repository https://gitbox.apache.org/repos/asf/hudi.git

commit 63a7df79e5e97159b39ae4354462b52d18899f22
Author: 董可伦 <[email protected]>
AuthorDate: Tue Nov 30 14:36:36 2021 +0800

    [HUDI-2642] Add support ignoring case in update sql operation (#3882)
    
    (cherry picked from commit a398aad1fcbd5ff36eb7bc750fcc418d78b3a97e)
---
 .../hudi/command/UpdateHoodieTableCommand.scala    |  5 ++-
 .../apache/spark/sql/hudi/TestUpdateTable.scala    | 45 +++++++++++++++++++++-
 2 files changed, 47 insertions(+), 3 deletions(-)

diff --git 
a/hudi-spark-datasource/hudi-spark/src/main/scala/org/apache/spark/sql/hudi/command/UpdateHoodieTableCommand.scala
 
b/hudi-spark-datasource/hudi-spark/src/main/scala/org/apache/spark/sql/hudi/command/UpdateHoodieTableCommand.scala
index 60122ce..7397b0d 100644
--- 
a/hudi-spark-datasource/hudi-spark/src/main/scala/org/apache/spark/sql/hudi/command/UpdateHoodieTableCommand.scala
+++ 
b/hudi-spark-datasource/hudi-spark/src/main/scala/org/apache/spark/sql/hudi/command/UpdateHoodieTableCommand.scala
@@ -51,7 +51,10 @@ case class UpdateHoodieTableCommand(updateTable: 
UpdateTable) extends RunnableCo
     }.toMap
 
     val updateExpressions = table.output
-      .map(attr => name2UpdateValue.getOrElse(attr.name, attr))
+      .map(attr => {
+        val UpdateValueOption = name2UpdateValue.find(f => 
sparkSession.sessionState.conf.resolver(f._1, attr.name))
+        if(UpdateValueOption.isEmpty) attr else UpdateValueOption.get._2
+      })
       .filter { // filter the meta columns
         case attr: AttributeReference =>
           !HoodieRecord.HOODIE_META_COLUMNS.asScala.toSet.contains(attr.name)
diff --git 
a/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/TestUpdateTable.scala
 
b/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/TestUpdateTable.scala
index 2537c9c..82d067c 100644
--- 
a/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/TestUpdateTable.scala
+++ 
b/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/TestUpdateTable.scala
@@ -27,6 +27,47 @@ class TestUpdateTable extends TestHoodieSqlBase {
         spark.sql(
           s"""
              |create table $tableName (
+             |  ID int,
+             |  NAME string,
+             |  PRICE double,
+             |  TS long
+             |) using hudi
+             | location '${tmp.getCanonicalPath}/$tableName'
+             | options (
+             |  type = '$tableType',
+             |  primaryKey = 'ID',
+             |  preCombineField = 'TS'
+             | )
+       """.stripMargin)
+        // insert data to table
+        spark.sql(s"insert into $tableName select 1, 'a1', 10, 1000")
+        checkAnswer(s"select id, name, price, ts from $tableName")(
+          Seq(1, "a1", 10.0, 1000)
+        )
+
+        // update data
+        spark.sql(s"update $tableName set price = 20 where id = 1")
+        checkAnswer(s"select id, name, price, ts from $tableName")(
+          Seq(1, "a1", 20.0, 1000)
+        )
+
+        // update data
+        spark.sql(s"update $tableName set price = price * 2 where id = 1")
+        checkAnswer(s"select id, name, price, ts from $tableName")(
+          Seq(1, "a1", 40.0, 1000)
+        )
+      }
+    }
+  }
+
+  test("Test ignoring case for Update Table") {
+    withTempDir { tmp =>
+      Seq("cow", "mor").foreach {tableType =>
+        val tableName = generateTableName
+        // create table
+        spark.sql(
+          s"""
+             |create table $tableName (
              |  id int,
              |  name string,
              |  price double,
@@ -46,13 +87,13 @@ class TestUpdateTable extends TestHoodieSqlBase {
         )
 
         // update data
-        spark.sql(s"update $tableName set price = 20 where id = 1")
+        spark.sql(s"update $tableName set PRICE = 20 where ID = 1")
         checkAnswer(s"select id, name, price, ts from $tableName")(
           Seq(1, "a1", 20.0, 1000)
         )
 
         // update data
-        spark.sql(s"update $tableName set price = price * 2 where id = 1")
+        spark.sql(s"update $tableName set PRICE = PRICE * 2 where ID = 1")
         checkAnswer(s"select id, name, price, ts from $tableName")(
           Seq(1, "a1", 40.0, 1000)
         )

Reply via email to