yaooqinn commented on a change in pull request #31320:
URL: https://github.com/apache/spark/pull/31320#discussion_r563734770
##########
File path:
sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/InsertIntoHadoopFsRelationCommand.scala
##########
@@ -227,10 +227,8 @@ case class InsertIntoHadoopFsRelationCommand(
val staticPartitionPrefix = if (staticPartitions.nonEmpty) {
"/" + partitionColumns.flatMap { p =>
staticPartitions.get(p.name) match {
- case Some(value) =>
- Some(escapePathName(p.name) + "=" + escapePathName(value))
- case None =>
- None
+ case Some(value) => Some(getPartitionPathString(p.name, value))
Review comment:
thanks and the `value` can be reduced too
##########
File path: sql/core/src/test/scala/org/apache/spark/sql/SQLInsertTestSuite.scala
##########
@@ -200,6 +200,14 @@ trait SQLInsertTestSuite extends QueryTest with
SQLTestUtils {
e1.getMessage.contains(v2Msg))
}
}
+
+ test("SPARK-34223: static partition with null raise NPE") {
Review comment:
```logtalk
SPARK-34223: static partition with null raise NPE *** FAILED *** (19
milliseconds)
[info] org.apache.spark.sql.AnalysisException: Cannot translate expression
to source filter: null
[info] at
org.apache.spark.sql.execution.datasources.v2.V2Writes$$anonfun$apply$1.$anonfun$applyOrElse$1(V2Writes.scala:50)
[info] at scala.collection.immutable.List.flatMap(List.scala:366)
[info] at
org.apache.spark.sql.execution.datasources.v2.V2Writes$$anonfun$apply$1.applyOrElse(V2Writes.scala:47)
[info] at
org.apache.spark.sql.execution.datasources.v2.V2Writes$$anonfun$apply$1.applyOrElse(V2Writes.scala:39)
[info] at
org.apache.spark.sql.catalyst.trees.TreeNode.$anonfun$transformDown$1(TreeNode.scala:317)
[info] at
org.apache.spark.sql.catalyst.trees.CurrentOrigin$.withOrigin(TreeNode.scala:73)
[info] at
org.apache.spark.sql.catalyst.trees.TreeNode.transformDown(TreeNode.scala:317)
[info] at
org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.org$apache$spark$sql$catalyst$plans$logical$AnalysisHelper$$super$transformDown(LogicalPlan.scala:29)
[info] at
org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper.transformDown(AnalysisHelper.scala:171)
[info] at
org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper.transformDown$(AnalysisHelper.scala:169)
[info] at
org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.transformDown(LogicalPlan.scala:29)
[info] at
org.apache.spark.sql.execution.datasources.v2.V2Writes$.apply(V2Writes.scala:39)
[info] at
org.apache.spark.sql.execution.datasources.v2.V2Writes$.apply(V2Writes.scala:35)
[info] at
org.apache.spark.sql.catalyst.rules.RuleExecutor.$anonfun$execute$2(RuleExecutor.scala:216)
[info] at
scala.collection.LinearSeqOptimized.foldLeft(LinearSeqOptimized.scala:126)
[info] at
scala.collection.LinearSeqOptimized.foldLeft$(LinearSeqOptimized.scala:122)
[info] at scala.collection.immutable.List.foldLeft(List.scala:91)
[info] at
org.apache.spark.sql.catalyst.rules.RuleExecutor.$anonfun$execute$1(RuleExecutor.scala:213)
[info] at
org.apache.spark.sql.catalyst.rules.RuleExecutor.$anonfun$execute$1$adapted(RuleExecutor.scala:205)
[info] at scala.collection.immutable.List.foreach(List.scala:431)
[info] at
org.apache.spark.sql.catalyst.rules.RuleExecutor.execute(RuleExecutor.scala:205)
[info] at
org.apache.spark.sql.catalyst.rules.RuleExecutor.$anonfun$executeAndTrack$1(RuleExecutor.scala:183)
[info] at
org.apache.spark.sql.catalyst.QueryPlanningTracker$.withTracker(QueryPlanningTracker.scala:88)
[info] at
org.apache.spark.sql.catalyst.rules.RuleExecutor.executeAndTrack(RuleExecutor.scala:183)
[info] at
org.apache.spark.sql.execution.QueryExecution.$anonfun$optimizedPlan$1(QueryExecution.scala:87)
[info] at
org.apache.spark.sql.catalyst.QueryPlanningTracker.measurePhase(QueryPlanningTracker.scala:111)
[info] at
org.apache.spark.sql.execution.QueryExecution.$anonfun$executePhase$1(QueryExecution.scala:143)
[info] at
org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:772)
[info] at
org.apache.spark.sql.execution.QueryExecution.executePhase(QueryExecution.scala:143)
[info] at
org.apache.spark.sql.execution.QueryExecution.optimizedPlan$lzycompute(QueryExecution.scala:84)
[info] at
org.apache.spark.sql.execution.QueryExecution.optimizedPlan(QueryExecution.scala:84)
[info] at
org.apache.spark.sql.execution.QueryExecution.assertOptimized(QueryExecution.scala:95)
[info] at
org.apache.spark.sql.execution.QueryExecution.executedPlan$lzycompute(QueryExecution.scala:113)
[info] at
org.apache.spark.sql.execution.QueryExecution.executedPlan(QueryExecution.scala:110)
[info] at
org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$5(SQLExecution.scala:101)
[info] at
org.apache.spark.sql.execution.SQLExecution$.withSQLConfPropagated(SQLExecution.scala:163)
[info] at
org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$1(SQLExecution.scala:90)
[info] at
org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:772)
[info] at
org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution.scala:64)
[info] at org.apache.spark.sql.Dataset.withAction(Dataset.scala:3697)
[info] at org.apache.spark.sql.Dataset.<init>(Dataset.scala:228)
[info] at org.apache.spark.sql.Dataset$.$anonfun$ofRows$2(Dataset.scala:99)
[info] at
org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:772)
[info] at org.apache.spark.sql.Dataset$.ofRows(Dataset.scala:96)
[info] at
org.apache.spark.sql.SparkSession.$anonfun$sql$1(SparkSession.scala:615)
[info] at
org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:772)
[info] at org.apache.spark.sql.SparkSession.sql(SparkSession.scala:610)
[info] at
org.apache.spark.sql.test.SQLTestUtilsBase.$anonfun$sql$1(SQLTestUtils.scala:231)
[info] at
org.apache.spark.sql.SQLInsertTestSuite.$anonfun$$init$$42(SQLInsertTestSuite.scala:207)
[info] at
scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)
[info] at org.apache.spark.util.Utils$.tryWithSafeFinally(Utils.scala:1437)
[info] at
org.apache.spark.sql.test.SQLTestUtilsBase.withTable(SQLTestUtils.scala:305)
[info] at
org.apache.spark.sql.test.SQLTestUtilsBase.withTable$(SQLTestUtils.scala:303)
[info] at
org.apache.spark.sql.DSV2SQLInsertTestSuite.withTable(SQLInsertTestSuite.scala:220)
[info] at
org.apache.spark.sql.SQLInsertTestSuite.$anonfun$$init$$41(SQLInsertTestSuite.scala:205)
[info] at
scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)
[info] at org.scalatest.OutcomeOf.outcomeOf(OutcomeOf.scala:85)
[info] at org.scalatest.OutcomeOf.outcomeOf$(OutcomeOf.scala:83)
[info] at org.scalatest.OutcomeOf$.outcomeOf(OutcomeOf.scala:104)
[info] at org.scalatest.Transformer.apply(Transformer.scala:22)
[info] at org.scalatest.Transformer.apply(Transformer.scala:20)
[info] at
org.scalatest.funsuite.AnyFunSuiteLike$$anon$1.apply(AnyFunSuiteLike.scala:190)
[info] at
org.apache.spark.SparkFunSuite.withFixture(SparkFunSuite.scala:176)
[info] at
org.scalatest.funsuite.AnyFunSuiteLike.invokeWithFixture$1(AnyFunSuiteLike.scala:188)
[info] at
org.scalatest.funsuite.AnyFunSuiteLike.$anonfun$runTest$1(AnyFunSuiteLike.scala:200)
[info] at org.scalatest.SuperEngine.runTestImpl(Engine.scala:306)
[info] at
org.scalatest.funsuite.AnyFunSuiteLike.runTest(AnyFunSuiteLike.scala:200)
[info] at
org.scalatest.funsuite.AnyFunSuiteLike.runTest$(AnyFunSuiteLike.scala:182)
[info] at
org.apache.spark.SparkFunSuite.org$scalatest$BeforeAndAfterEach$$super$runTest(SparkFunSuite.scala:61)
[info] at
org.scalatest.BeforeAndAfterEach.runTest(BeforeAndAfterEach.scala:234)
[info] at
org.scalatest.BeforeAndAfterEach.runTest$(BeforeAndAfterEach.scala:227)
[info] at org.apache.spark.SparkFunSuite.runTest(SparkFunSuite.scala:61)
[info] at
org.scalatest.funsuite.AnyFunSuiteLike.$anonfun$runTests$1(AnyFunSuiteLike.scala:233)
[info] at
org.scalatest.SuperEngine.$anonfun$runTestsInBranch$1(Engine.scala:413)
[info] at scala.collection.immutable.List.foreach(List.scala:431)
[info] at org.scalatest.SuperEngine.traverseSubNodes$1(Engine.scala:401)
[info] at org.scalatest.SuperEngine.runTestsInBranch(Engine.scala:396)
[info] at org.scalatest.SuperEngine.runTestsImpl(Engine.scala:475)
```
I notice this error too, put it here to ease to refer
----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
For queries about this service, please contact Infrastructure at:
[email protected]
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]