Github user gatorsmile commented on a diff in the pull request:
https://github.com/apache/spark/pull/19841#discussion_r154575693
--- Diff:
sql/hive/src/main/scala/org/apache/spark/sql/hive/execution/InsertIntoHiveTable.scala
---
@@ -104,14 +105,61 @@ case class InsertIntoHiveTable(
val partitionColumns =
fileSinkConf.getTableInfo.getProperties.getProperty("partition_columns")
val partitionColumnNames =
Option(partitionColumns).map(_.split("/")).getOrElse(Array.empty)
- // By this time, the partition map must match the table's partition
columns
- if (partitionColumnNames.toSet != partition.keySet) {
- throw new SparkException(
- s"""Requested partitioning does not match the
${table.identifier.table} table:
- |Requested partitions: ${partition.keys.mkString(",")}
- |Table partitions:
${table.partitionColumnNames.mkString(",")}""".stripMargin)
+ try {
+ // By this time, the partition map must match the table's partition
columns
+ if (partitionColumnNames.toSet != partition.keySet) {
+ throw new SparkException(
+ s"""Requested partitioning does not match the
${table.identifier.table} table:
+ |Requested partitions: ${partition.keys.mkString(",")}
+ |Table partitions:
${table.partitionColumnNames.mkString(",")}""".stripMargin)
+ }
+
+ validatePartitionSpec(hadoopConf, numDynamicPartitions,
numStaticPartitions,
+ partitionSpec, partitionColumnNames)
+
+ validateBucketSpec(hadoopConf)
+
+ val partitionAttributes =
partitionColumnNames.takeRight(numDynamicPartitions).map { name =>
+ query.resolve(name :: Nil,
sparkSession.sessionState.analyzer.resolver).getOrElse {
+ throw new AnalysisException(
+ s"Unable to resolve $name given
[${query.output.map(_.name).mkString(", ")}]")
+ }.asInstanceOf[Attribute]
+ }
+
--- End diff --
```Scala
try {
```
should start from this line, right?
No need to create `validatePartitionSpec ` and `validateBucketSpec ` in
this PR. We want to minimized the code changes.
---
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]