codope commented on code in PR #9724:
URL: https://github.com/apache/hudi/pull/9724#discussion_r1326995566
##########
hudi-spark-datasource/hudi-spark3.2plus-common/src/main/scala/org/apache/spark/sql/hudi/command/AlterTableCommand.scala:
##########
@@ -102,14 +102,17 @@ case class AlterTableCommand(table: CatalogTable,
changes: Seq[TableChange], cha
SchemaChangeUtils.applyTableChanges2Schema(oldSchema, addChange)
}
- def applyDeleteAction2Schema(sparkSession: SparkSession, oldSchema:
InternalSchema, deleteChanges: Seq[DeleteColumn]): InternalSchema = {
+ private def applyDeleteAction2Schema(sparkSession: SparkSession, oldSchema:
InternalSchema, deleteChanges: Seq[DeleteColumn]): InternalSchema = {
val deleteChange = TableChanges.ColumnDeleteChange.get(oldSchema)
deleteChanges.foreach { c =>
val originalColName = c.fieldNames().mkString(".")
checkSchemaChange(Seq(originalColName), table)
deleteChange.deleteColumn(originalColName)
}
- SchemaChangeUtils.applyTableChanges2Schema(oldSchema,
deleteChange).setSchemaId(oldSchema.getMaxColumnId)
Review Comment:
this is where `schemaId` was incorrectly set.
--
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
To unsubscribe, e-mail: [email protected]
For queries about this service, please contact Infrastructure at:
[email protected]