gengliangwang commented on code in PR #40996:
URL: https://github.com/apache/spark/pull/40996#discussion_r1182929279


##########
sql/core/src/test/scala/org/apache/spark/sql/catalyst/analysis/ResolveDefaultColumnsSuite.scala:
##########
@@ -91,6 +96,120 @@ class ResolveDefaultColumnsSuite extends QueryTest with 
SharedSparkSession {
         checkAnswer(spark.table("demos.test_ts"),
           sql("select null, timestamp'2023-01-01'"))
       }
+      withTable("demos.test_ts") {
+        // If the provided default value is a literal of a wider type than the 
target column, but
+        // the literal value fits within the narrower type, just coerce it for 
convenience.
+        sql("create table demos.test_ts (id int default 42L) using parquet")
+        sql("insert into demos.test_ts(id) values (default)")
+        checkAnswer(spark.table("demos.test_ts"),
+          sql("select 42"))
+        // If the provided default value is a literal of a completely 
different type than the target
+        // column such that no coercion is possible, throw an error.
+        assert(intercept[AnalysisException](
+          sql("create table demos.test_ts_other (id int default 'abc') using 
parquet"))
+        .getMessage.contains("statement provided a value of incompatible 
type"))
+      }
+    }
+  }
+
+  test("SPARK-43313: Add missing default values for MERGE INSERT actions") {
+    // Create a new relation type that defines the 'customSchemaForInserts' 
method.
+    // This implementation drops the last table column as it represents an 
internal pseudocolumn.
+    case class TableWithCustomInsertSchema(output: Seq[Attribute], 
numMetadataColumns: Int)

Review Comment:
   Let's move the case class out of the test method.



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: [email protected]

For queries about this service, please contact Infrastructure at:
[email protected]


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to