MaxGekk commented on a change in pull request #31421:
URL: https://github.com/apache/spark/pull/31421#discussion_r568030062



##########
File path: 
sql/core/src/test/scala/org/apache/spark/sql/execution/command/v1/ShowPartitionsSuite.scala
##########
@@ -81,6 +82,18 @@ trait ShowPartitionsSuiteBase extends 
command.ShowPartitionsSuiteBase {
         Row("p1=__HIVE_DEFAULT_PARTITION__"))
     }
   }
+
+  test("SPARK-33591: null as string partition literal value 'null' after 
setting legacy conf") {

Review comment:
       Could you move this test to 
`org.apache.spark.sql.execution.command.ShowPartitionsSuiteBase`. It should 
pass for DS v2 too.

##########
File path: 
sql/core/src/test/scala/org/apache/spark/sql/execution/command/v1/ShowPartitionsSuite.scala
##########
@@ -81,6 +82,18 @@ trait ShowPartitionsSuiteBase extends 
command.ShowPartitionsSuiteBase {
         Row("p1=__HIVE_DEFAULT_PARTITION__"))
     }
   }
+
+  test("SPARK-33591: null as string partition literal value 'null' after 
setting legacy conf") {
+    val t = "part_table"
+    withSQLConf(SQLConf.LEGACY_PARSE_NULL_PARTITION_SPEC_AS_STRING_LITERAL.key 
-> "true") {
+      withTable(t) {
+        sql(s"CREATE TABLE $t (col1 INT, p1 STRING) $defaultUsing PARTITIONED 
BY (p1)")
+        sql(s"INSERT INTO TABLE $t PARTITION (p1 = null) SELECT 0")
+        checkAnswer(sql(s"SHOW PARTITIONS $t"), Row("p1=null"))
+        checkAnswer(sql(s"SHOW PARTITIONS $t PARTITION (p1 = null)"), 
Row("p1=null"))
+      }
+    }
+  }

Review comment:
       ```suggestion
     test("SPARK-33591: null as string partition literal value 'null' after 
setting legacy conf") {
       
withSQLConf(SQLConf.LEGACY_PARSE_NULL_PARTITION_SPEC_AS_STRING_LITERAL.key -> 
"true") {
         withNamespaceAndTable("ns", "tbl") { t =>
           sql(s"CREATE TABLE $t (col1 INT, p1 STRING) $defaultUsing 
PARTITIONED BY (p1)")
           sql(s"INSERT INTO TABLE $t PARTITION (p1 = null) SELECT 0")
           runShowPartitionsSql(s"SHOW PARTITIONS $t", Row("p1=null") :: Nil)
           runShowPartitionsSql(s"SHOW PARTITIONS $t PARTITION (p1 = null)", 
Row("p1=null") :: Nil)
         }
       }
     }
   ```

##########
File path: 
sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala
##########
@@ -474,7 +474,9 @@ class AstBuilder extends SqlBaseBaseVisitor[AnyRef] with 
SQLConfHelper with Logg
       ctx: PartitionSpecContext): Map[String, Option[String]] = 
withOrigin(ctx) {
     val parts = ctx.partitionVal.asScala.map { pVal =>
       val name = pVal.identifier.getText
-      val value = Option(pVal.constant).map(visitStringConstant)
+      val processNullLiteral =
+        
!conf.getConf(SQLConf.LEGACY_PARSE_NULL_PARTITION_SPEC_AS_STRING_LITERAL)

Review comment:
       Could you move the config read out of `map()`. Don't need to re-read it 
every partition part.




----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

For queries about this service, please contact Infrastructure at:
[email protected]



---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to