cxzl25 commented on a change in pull request #29316:
URL: https://github.com/apache/spark/pull/29316#discussion_r486313196



##########
File path: 
sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/rules.scala
##########
@@ -402,6 +403,22 @@ case class PreprocessTableInsertion(conf: SQLConf) extends 
Rule[LogicalPlan] {
           s"including ${staticPartCols.size} partition column(s) having 
constant value(s).")
     }
 
+    val partitionsTrackedByCatalog = conf.manageFilesourcePartitions &&
+      catalogTable.isDefined &&
+      catalogTable.get.partitionColumnNames.nonEmpty &&
+      catalogTable.get.tracksPartitionsInCatalog
+    // check static partition
+    if (partitionsTrackedByCatalog &&
+      normalizedPartSpec.nonEmpty &&
+      staticPartCols.size == partColNames.size) {

Review comment:
       hive> insert overwrite table t1 partition (d='',h) select 1,'';
   ```
   ERROR ql.Driver (SessionState.java:printError(956)) - FAILED: 
IllegalArgumentException Can not create a Path from an empty string
   java.lang.IllegalArgumentException: Can not create a Path from an empty 
string
           at org.apache.hadoop.fs.Path.checkPathArg(Path.java:127)
           at org.apache.hadoop.fs.Path.<init>(Path.java:135)
           at org.apache.hadoop.fs.Path.<init>(Path.java:94)
           at 
org.apache.hadoop.hive.ql.parse.SemanticAnalyzer.genFileSinkPlan(SemanticAnalyzer.java:6070)
   ```
   
   spark-sql> insert overwrite table t1 partition (d='',h) select 1,'';
   ```
   [main] INFO InsertIntoHiveTable: Partition `x`.`t1` 
{d=__HIVE_DEFAULT_PARTITION__, h=__HIVE_DEFAULT_PARTITION__} stats: 
[numFiles=1, numRows=0, totalSize=212]
   ```
   




----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

For queries about this service, please contact Infrastructure at:
[email protected]



---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to