cloud-fan commented on code in PR #40908: URL: https://github.com/apache/spark/pull/40908#discussion_r1219760457
########## sql/core/src/test/scala/org/apache/spark/sql/SQLInsertTestSuite.scala: ########## @@ -123,6 +127,77 @@ trait SQLInsertTestSuite extends QueryTest with SQLTestUtils { } } + test("insert with column list - by name") { + withTable("t1") { + val cols = Seq("c1", "c2", "c3") + val df = Seq((3, 2, 1)).toDF(cols.reverse: _*) + createTable("t1", cols, Seq("int", "int", "int")) + processInsert("t1", df, overwrite = false, byName = true) + verifyTable("t1", df.selectExpr(cols: _*)) + } + } + + test("insert with column list - by name + partitioned table") { + val cols = Seq("c1", "c2", "c3", "c4") + val df = Seq((4, 3, 2, 1)).toDF(cols.reverse: _*) + withTable("t1") { + createTable("t1", cols, Seq("int", "int", "int", "int"), cols.takeRight(2)) + processInsert("t1", df, overwrite = false, byName = true) + verifyTable("t1", df.selectExpr(cols: _*)) + } + + withTable("t1") { + createTable("t1", cols, Seq("int", "int", "int", "int"), cols.takeRight(2)) + processInsert("t1", df.selectExpr("c2", "c1", "c4"), + partitionExprs = Seq("c3=3", "c4"), overwrite = false, byName = true) + verifyTable("t1", df.selectExpr(cols: _*)) + } + + withTable("t1") { + createTable("t1", cols, Seq("int", "int", "int", "int"), cols.takeRight(2)) + processInsert("t1", df.selectExpr("c2", "c1"), + partitionExprs = Seq("c3=3", "c4=4"), overwrite = false, byName = true) + verifyTable("t1", df.selectExpr(cols: _*)) + } + } + + test("insert with column list - by name unsupported case") { + withTable("t1") { + withView("tmp_view") { + val cols = Seq("c1", "c2", "c3") + Seq((3, 2, 1)).toDF(cols.reverse: _*).createTempView("tmp_view") + createTable("t1", cols, Seq("int", "int", "int")) + checkError( + exception = intercept[ParseException]( + sql("INSERT OVERWRITE TABLE t1 BY NAME SELECT * FROM tmp_view") + ), + errorClass = "PARSE_SYNTAX_ERROR", + parameters = Map( + "error" -> "'BY'", + "hint" -> "") + ) + } + } + + withTable("t1") { + withView("tmp_view") { + val cols = Seq("c1", "c2", "c3") + Seq((3, 2, 1)).toDF(cols.reverse: _*).createTempView("tmp_view") + createTable("t1", cols, Seq("int", "int", "int")) + checkError( + exception = intercept[ParseException]( + sql("INSERT INTO TABLE t1 BY NAME (c1,c2) SELECT * FROM tmp_view") Review Comment: ditto -- This is an automated message from the Apache Git Service. To respond to the message, please log on to GitHub and use the URL above to go to the specific comment. To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org For queries about this service, please contact Infrastructure at: us...@infra.apache.org --------------------------------------------------------------------- To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org For additional commands, e-mail: reviews-h...@spark.apache.org