Github user cloud-fan commented on a diff in the pull request:
https://github.com/apache/spark/pull/20999#discussion_r225076055
--- Diff:
sql/core/src/main/scala/org/apache/spark/sql/execution/command/ddl.scala ---
@@ -523,35 +523,114 @@ case class AlterTableRenamePartitionCommand(
*/
case class AlterTableDropPartitionCommand(
tableName: TableIdentifier,
- specs: Seq[TablePartitionSpec],
+ partitionsFilters: Seq[Seq[Expression]],
ifExists: Boolean,
purge: Boolean,
retainData: Boolean)
extends RunnableCommand {
override def run(sparkSession: SparkSession): Seq[Row] = {
val catalog = sparkSession.sessionState.catalog
+ val timeZone =
Option(sparkSession.sessionState.conf.sessionLocalTimeZone)
val table = catalog.getTableMetadata(tableName)
+ val partitionColumns = table.partitionColumnNames
+ val partitionAttributes = table.partitionSchema.toAttributes.map(a =>
a.name -> a).toMap
DDLUtils.verifyAlterTableType(catalog, table, isView = false)
DDLUtils.verifyPartitionProviderIsHive(sparkSession, table, "ALTER
TABLE DROP PARTITION")
- val normalizedSpecs = specs.map { spec =>
- PartitioningUtils.normalizePartitionSpec(
- spec,
- table.partitionColumnNames,
- table.identifier.quotedString,
- sparkSession.sessionState.conf.resolver)
+ val resolvedSpecs = partitionsFilters.flatMap { filtersSpec =>
+ if (hasComplexFilters(filtersSpec)) {
+ generatePartitionSpec(filtersSpec,
+ partitionColumns,
+ partitionAttributes,
+ table.identifier,
+ catalog,
+ sparkSession.sessionState.conf.resolver,
+ timeZone,
+ ifExists)
+ } else {
+ val partitionSpec = filtersSpec.map {
+ case EqualTo(key: Attribute, Literal(value, StringType)) =>
+ key.name -> value.toString
+ }.toMap
+ PartitioningUtils.normalizePartitionSpec(
+ partitionSpec,
+ partitionColumns,
+ table.identifier.quotedString,
+ sparkSession.sessionState.conf.resolver) :: Nil
+ }
}
catalog.dropPartitions(
--- End diff --
does hive have an API to drop partitions with a predicate? I think the
current approach is very inefficient with non-equal partition predicates.
---
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]