AngersZhuuuu commented on a change in pull request #30421:
URL: https://github.com/apache/spark/pull/30421#discussion_r583489421
##########
File path:
sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala
##########
@@ -505,10 +505,14 @@ class AstBuilder extends SqlBaseBaseVisitor[AnyRef] with
SQLConfHelper with Logg
protected def visitStringConstant(
Review comment:
> does it work for `ADD PARTITION(a=date'xxx')`, etc.?
Support and add test case in UT
##########
File path: sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala
##########
@@ -4022,6 +4023,46 @@ class SQLQuerySuite extends QueryTest with
SharedSparkSession with AdaptiveSpark
}
}
+ test("SPARK-33474: Support TypeConstructed partition spec value") {
Review comment:
> let's move the test to `SQLInsertTestSuite`
Done
##########
File path:
sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/DDLParserSuite.scala
##########
@@ -2488,4 +2488,27 @@ class DDLParserSuite extends AnalysisTest {
testCreateOrReplaceDdl(sql, expectedTableSpec, expectedIfNotExists = false)
}
+
+ test("SPARK-33474: Support TypeConstructed partition spec value") {
+ def insertPartitionPlan(part: String): InsertIntoStatement = {
+ InsertIntoStatement(
+ UnresolvedRelation(Seq("t")),
+ Map("part" -> Some(part)),
+ Seq.empty[String],
+ UnresolvedInlineTable(Seq("col1"), Seq(Seq(Literal("a")))),
+ overwrite = false, ifPartitionNotExists = false)
+ }
+
+ val dateTypeSql = "INSERT INTO t PARTITION(part = date'2019-01-02')
VALUES('a')"
+ val interval = new CalendarInterval(7, 1, 1000).toString
+ val intervalTypeSql = s"INSERT INTO t PARTITION(part =
interval'$interval') VALUES('a')"
+ val timestamp = "2019-01-02 11:11:11"
+ val timestampTypeSql = s"INSERT INTO t PARTITION(part =
timestamp'$timestamp') VALUES('a')"
+ val binaryTypeSql = s"INSERT INTO t PARTITION(part =
X'537061726B2053514C') VALUES('a')"
Review comment:
> Can we generate the binary of `537061726B2053514C` instead of hardcode
it?
Done
##########
File path:
sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/DDLParserSuite.scala
##########
@@ -2488,4 +2488,27 @@ class DDLParserSuite extends AnalysisTest {
testCreateOrReplaceDdl(sql, expectedTableSpec, expectedIfNotExists = false)
}
+
+ test("SPARK-33474: Support TypeConstructed partition spec value") {
Review comment:
> nit: `Support typed literals as partition spec values`
Both done
##########
File path:
sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala
##########
@@ -505,10 +505,14 @@ class AstBuilder extends SqlBaseBaseVisitor[AnyRef] with
SQLConfHelper with Logg
protected def visitStringConstant(
ctx: ConstantContext,
legacyNullAsString: Boolean): String = withOrigin(ctx) {
- ctx match {
- case _: NullLiteralContext if !legacyNullAsString => null
- case s: StringLiteralContext => createString(s)
- case o => o.getText
+ expression(ctx) match {
+ case Literal(null, _) if !legacyNullAsString => null
+ case l @ Literal(null, _) => l.toString
+ case l: Literal =>
Review comment:
> can we add a TODO here? For v2 commands, we will cast the string back
to its actual value, which is a waste and can be improved in the future.
Done
----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
For queries about this service, please contact Infrastructure at:
[email protected]
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]