AngersZhuuuu commented on a change in pull request #20999:
URL: https://github.com/apache/spark/pull/20999#discussion_r778581745



##########
File path: 
sql/core/src/main/scala/org/apache/spark/sql/execution/command/ddl.scala
##########
@@ -524,35 +524,114 @@ case class AlterTableRenamePartitionCommand(
  */
 case class AlterTableDropPartitionCommand(
     tableName: TableIdentifier,
-    specs: Seq[TablePartitionSpec],
+    partitionsFilters: Seq[Seq[Expression]],
     ifExists: Boolean,
     purge: Boolean,
     retainData: Boolean)
   extends RunnableCommand {
 
   override def run(sparkSession: SparkSession): Seq[Row] = {
     val catalog = sparkSession.sessionState.catalog
+    val timeZone = Option(sparkSession.sessionState.conf.sessionLocalTimeZone)
     val table = catalog.getTableMetadata(tableName)
+    val partitionColumns = table.partitionColumnNames
+    val partitionAttributes = table.partitionSchema.toAttributes.map(a => 
a.name -> a).toMap
     DDLUtils.verifyAlterTableType(catalog, table, isView = false)
     DDLUtils.verifyPartitionProviderIsHive(sparkSession, table, "ALTER TABLE 
DROP PARTITION")
 
-    val normalizedSpecs = specs.map { spec =>
-      PartitioningUtils.normalizePartitionSpec(
-        spec,
-        table.partitionColumnNames,
-        table.identifier.quotedString,
-        sparkSession.sessionState.conf.resolver)
+    val resolvedSpecs = partitionsFilters.flatMap { filtersSpec =>
+      if (hasComplexFilters(filtersSpec)) {
+        generatePartitionSpec(filtersSpec,
+          partitionColumns,
+          partitionAttributes,
+          table.identifier,
+          catalog,
+          sparkSession.sessionState.conf.resolver,
+          timeZone,
+          ifExists)
+      } else {
+        val partitionSpec = filtersSpec.map {
+          case EqualTo(key: Attribute, Literal(value, StringType)) =>
+            key.name -> value.toString
+        }.toMap
+        PartitioningUtils.normalizePartitionSpec(
+          partitionSpec,
+          partitionColumns,
+          table.identifier.quotedString,
+          sparkSession.sessionState.conf.resolver) :: Nil
+      }
     }
 

Review comment:
       Should check  resolvedSpecs here to throw error message if total 
resolved spec is empty.

##########
File path: 
sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/namedExpressions.scala
##########
@@ -391,6 +391,35 @@ case class OuterReference(e: NamedExpression)
   override def newInstance(): NamedExpression = OuterReference(e.newInstance())
 }
 
+/**
+ * A place holder used to hold the name of the partition attributes specified 
when running commands
+ * involving partitions, eg. ALTER TABLE ... DROP PARTITIONS.
+ */
+case class PartitioningAttribute(
+    name: String,
+    override val exprId: ExprId = NamedExpression.newExprId)
+  extends Attribute with Unevaluable {
+  // We need a dataType to be used during analysis for resolving the 
expressions (see
+  // checkInputDataTypes). The String type is used because all the literals in 
PARTITION operations
+  // are parsed as strings and eventually casted later.
+  override def dataType: DataType = StringType
+  override def nullable: Boolean = false
+
+  override def qualifier: Seq[String] = throw new UnsupportedOperationException
+  override def withNullability(newNullability: Boolean): Attribute =
+    throw new UnsupportedOperationException
+  override def newInstance(): Attribute = throw new 
UnsupportedOperationException
+  override def withQualifier(newQualifier: Seq[String]): Attribute =
+    throw new UnsupportedOperationException
+  override def withName(newName: String): Attribute = throw new 
UnsupportedOperationException
+  override def withMetadata(newMetadata: Metadata): Attribute =
+    throw new UnsupportedOperationException
+
+  override lazy val canonicalized: Expression = this.copy(exprId = ExprId(0))
+
+  override def withExprId(newExprId: ExprId): Attribute = throw new 
UnsupportedOperationException

Review comment:
       override def sql: String = name
   
   Can make the error message more clear.




-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: [email protected]

For queries about this service, please contact Infrastructure at:
[email protected]



---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to