leesf commented on code in PR #11568:
URL: https://github.com/apache/hudi/pull/11568#discussion_r1667793800


##########
hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/dml/TestInsertTable.scala:
##########
@@ -2826,4 +2826,163 @@ class TestInsertTable extends HoodieSparkSqlTestBase {
     spark.sessionState.conf.unsetConf("hoodie.datasource.insert.dup.policy")
     spark.sessionState.conf.unsetConf("hoodie.datasource.write.operation")
   }
+
+  test("Test insert into with special cols") {
+    withTempDir { tmp =>
+      if (HoodieSparkUtils.gteqSpark3_2) {
+        val targetTableA = generateTableName
+        val tablePathA = s"${tmp.getCanonicalPath}/$targetTableA"
+        if (HoodieSparkUtils.isSpark3_4) {
+          spark.sql("set spark.sql.defaultColumn.enabled = false")
+        }
+
+        spark.sql(
+          s"""
+             |create table if not exists $targetTableA (
+             | id bigint,
+             | name string,
+             | price double
+             |) using hudi
+             |tblproperties (
+             | primaryKey = 'id',
+             | type = 'mor',
+             | preCombineField = 'name'
+             |) location '$tablePathA'
+             |""".stripMargin)
+
+        spark.sql(s"insert into $targetTableA (id, price, name) values (1, 
12.1, 'aaa')")
+
+        checkAnswer(s"select id, price, name from $targetTableA")(
+          Seq(1, 12.1, "aaa")
+        )
+
+        val targetTableB = generateTableName
+        val tablePathB = s"${tmp.getCanonicalPath}/$targetTableB"
+
+        spark.sql(
+          s"""
+             |create table if not exists $targetTableB (
+             | id bigint,
+             | name string,
+             | price double,
+             | day string,
+             | hour string
+             |) using hudi
+             |tblproperties (
+             | primaryKey = 'id',
+             | type = 'mor',
+             | preCombineField = 'name'
+             |) partitioned by (day, hour)
+             |location '$tablePathB'
+             |""".stripMargin)
+
+        spark.sql(s"insert into $targetTableB (id, day, price, name, hour) " +
+          s"values (2, '01', 12.2, 'bbb', '02')")
+
+        spark.sql(s"insert into $targetTableB (id, day, price, name, hour) " +
+          s"select id, '01' as dt, price, name, '03' as hour from 
$targetTableA")
+
+        spark.sql(s"insert into $targetTableB partition(day='02', hour) (id, 
hour, price, name) " +
+          s"values (3, '01', 12.3, 'ccc')")
+
+        spark.sql(s"insert into $targetTableB partition(day='02', hour='02') 
(id, price, name) " +
+          s"values (4, 12.4, 'ddd')")
+
+        checkAnswer(s"select id, price, name, day, hour from $targetTableB")(
+          Seq(2, 12.2, "bbb", "01", "02"),
+          Seq(1, 12.1, "aaa", "01", "03"),
+          Seq(3, 12.3, "ccc", "02", "01"),
+          Seq(4, 12.4, "ddd", "02", "02")
+        )
+
+        if (HoodieSparkUtils.isSpark3_4) {
+          spark.sql("set spark.sql.defaultColumn.enabled = true")
+          checkExceptionContain(s"insert into $targetTableB (id, day, price, 
name, hour) " +
+            s"select id, '01' as dt, price, name, '03' as hour from 
$targetTableA")(
+            "hudi not support specified cols when enable default columns")
+        }
+      }
+    }
+  }
+
+  test("Test insert overwrite with special cols") {

Review Comment:
   ditto



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: [email protected]

For queries about this service, please contact Infrastructure at:
[email protected]

Reply via email to