This is an automated email from the ASF dual-hosted git repository.
vbalaji pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/hudi.git
The following commit(s) were added to refs/heads/master by this push:
new ad8a15aa45b [HUDI-7423] Support table type name incase-sensitive when
create table in sparksql (#10703)
ad8a15aa45b is described below
commit ad8a15aa45be39216e8558a3ff1a3406cfc60bfa
Author: xuzifu666 <[email protected]>
AuthorDate: Tue Feb 20 12:41:08 2024 +0800
[HUDI-7423] Support table type name incase-sensitive when create table in
sparksql (#10703)
* [HUDI-7423] Support table type name case-sensitive when create table in
sparksql
* add comments
---
.../apache/spark/sql/hudi/HoodieOptionConfig.scala | 3 +-
.../apache/spark/sql/hudi/TestInsertTable.scala | 52 ++++++++++++++++++++++
2 files changed, 54 insertions(+), 1 deletion(-)
diff --git
a/hudi-spark-datasource/hudi-spark-common/src/main/scala/org/apache/spark/sql/hudi/HoodieOptionConfig.scala
b/hudi-spark-datasource/hudi-spark-common/src/main/scala/org/apache/spark/sql/hudi/HoodieOptionConfig.scala
index cc2d8903a16..46a00480808 100644
---
a/hudi-spark-datasource/hudi-spark-common/src/main/scala/org/apache/spark/sql/hudi/HoodieOptionConfig.scala
+++
b/hudi-spark-datasource/hudi-spark-common/src/main/scala/org/apache/spark/sql/hudi/HoodieOptionConfig.scala
@@ -155,7 +155,8 @@ object HoodieOptionConfig {
def mapSqlOptionsToTableConfigs(options: Map[String, String]): Map[String,
String] = {
options.map { case (k, v) =>
if (sqlOptionKeyToTableConfigKey.contains(k)) {
- sqlOptionKeyToTableConfigKey(k) ->
sqlOptionValueToHoodieConfigValue.getOrElse(v, v)
+ // support table type incase-sensitive
+ sqlOptionKeyToTableConfigKey(k) ->
sqlOptionValueToHoodieConfigValue.getOrElse(v.toLowerCase, v)
} else {
k -> v
}
diff --git
a/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/TestInsertTable.scala
b/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/TestInsertTable.scala
index 21369ea34e0..7ee3626e34b 100644
---
a/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/TestInsertTable.scala
+++
b/hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/TestInsertTable.scala
@@ -40,6 +40,58 @@ import java.util.concurrent.CountDownLatch
class TestInsertTable extends HoodieSparkSqlTestBase {
+ test("Test table type name incase-sensitive test") {
+ withRecordType()(withTempDir { tmp =>
+ val targetTable = generateTableName
+ val tablePath = s"${tmp.getCanonicalPath}/$targetTable"
+
+ spark.sql(
+ s"""
+ |create table ${targetTable} (
+ | `id` string,
+ | `name` string,
+ | `dt` bigint,
+ | `day` STRING,
+ | `hour` INT
+ |) using hudi
+ |tblproperties (
+ | 'primaryKey' = 'id',
+ | 'type' = 'MOR',
+ | 'preCombineField'='dt',
+ | 'hoodie.index.type' = 'BUCKET',
+ | 'hoodie.bucket.index.hash.field' = 'id',
+ | 'hoodie.bucket.index.num.buckets'=512
+ | )
+ partitioned by (`day`,`hour`)
+ location '${tablePath}'
+ """.stripMargin)
+
+ spark.sql(
+ s"""
+ |insert into ${targetTable}
+ |select '1' as id, 'aa' as name, 123 as dt, '2024-02-19' as `day`,
10 as `hour`
+ |""".stripMargin)
+
+ spark.sql(
+ s"""
+ |merge into ${targetTable} as target
+ |using (
+ |select '2' as id, 'bb' as name, 456 as dt, '2024-02-19' as `day`,
10 as `hour`
+ |) as source
+ |on target.id = source.id
+ |when matched then update set *
+ |when not matched then insert *
+ |""".stripMargin
+ )
+
+ // check result after insert and merge data into target table
+ checkAnswer(s"select id, name, dt, day, hour from $targetTable limit
10")(
+ Seq("1", "aa", 123, "2024-02-19", 10),
+ Seq("2", "bb", 456, "2024-02-19", 10)
+ )
+ })
+ }
+
test("Test Insert Into with values") {
withRecordType()(withTempDir { tmp =>
val tableName = generateTableName