zhangyue19921010 commented on code in PR #8714:
URL: https://github.com/apache/hudi/pull/8714#discussion_r1198708847


##########
hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/avro/TestAvroSerDe.scala:
##########
@@ -20,11 +20,12 @@ package org.apache.spark.sql.avro
 import org.apache.avro.generic.GenericData
 import org.apache.hudi.SparkAdapterSupport
 import org.apache.hudi.avro.model.{HoodieMetadataColumnStats, IntWrapper}
+import org.apache.spark.internal.Logging
 import org.apache.spark.sql.avro.SchemaConverters.SchemaType
 import org.junit.jupiter.api.Assertions.assertEquals
 import org.junit.jupiter.api.Test
 
-class TestAvroSerDe extends SparkAdapterSupport {
+class TestAvroSerDe extends SparkAdapterSupport with Logging {

Review Comment:
   Removed



##########
.github/workflows/bot.yml:
##########
@@ -63,6 +63,10 @@ jobs:
             sparkProfile: "spark3.1"
             sparkModules: "hudi-spark-datasource/hudi-spark3.1.x"
 
+          - scalaProfile: "scala-2.12"

Review Comment:
   Sure. Base image for this new bundle validation is not ready. Will add this 
validation as next step.



##########
hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/TestMergeIntoTable.scala:
##########
@@ -114,58 +114,65 @@ class TestMergeIntoTable extends HoodieSparkSqlTestBase 
with ScalaAssertionSuppo
     })
   }
 
-  test("Test MergeInto with more than once update actions") {
-    withRecordType()(withTempDir {tmp =>
-      val targetTable = generateTableName
-      spark.sql(
-        s"""
-           |create table ${targetTable} (
-           |  id int,
-           |  name string,
-           |  data int,
-           |  country string,
-           |  ts bigint
-           |) using hudi
-           |tblproperties (
-           |  type = 'cow',
-           |  primaryKey = 'id',
-           |  preCombineField = 'ts'
-           | )
-           |partitioned by (country)
-           |location '${tmp.getCanonicalPath}/$targetTable'
-           |""".stripMargin)
-      spark.sql(
-        s"""
-           |merge into ${targetTable} as target
-           |using (
-           |select 1 as id, 'lb' as name, 6 as data, 'shu' as country, 
1646643193 as ts
-           |) source
-           |on source.id = target.id
-           |when matched then
-           |update set *
-           |when not matched then
-           |insert *
-           |""".stripMargin)
-      spark.sql(
-        s"""
-           |merge into ${targetTable} as target
-           |using (
-           |select 1 as id, 'lb' as name, 5 as data, 'shu' as country, 
1646643196 as ts
-           |) source
-           |on source.id = target.id
-           |when matched and source.data > target.data then
-           |update set target.data = source.data, target.ts = source.ts
-           |when matched and source.data = 5 then
-           |update set target.data = source.data, target.ts = source.ts
-           |when not matched then
-           |insert *
-           |""".stripMargin)
-
-      checkAnswer(s"select id, name, data, country, ts from $targetTable")(
-        Seq(1, "lb", 5, "shu", 1646643196L)
-      )
+  /**
+   * For spark3.0.x didn't support 'UPDATE and DELETE can appear at most once 
in MATCHED clauses in a MERGE statement'
+   * details: 
org.apache.spark.sql.catalyst.parser.AstBuilder#visitMergeIntoTable

Review Comment:
   Changed.



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: [email protected]

For queries about this service, please contact Infrastructure at:
[email protected]

Reply via email to