ctring commented on code in PR #49559:
URL: https://github.com/apache/spark/pull/49559#discussion_r1924272176
##########
sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/CheckAnalysis.scala:
##########
@@ -1622,60 +1622,84 @@ trait CheckAnalysis extends PredicateHelper with
LookupCatalog with QueryErrorsB
case RenameColumn(table: ResolvedTable, col: ResolvedFieldName, newName)
=>
checkColumnNotExists("rename", col.path :+ newName, table.schema)
- case a @ AlterColumn(table: ResolvedTable, col: ResolvedFieldName, _, _,
_, _, _) =>
- val fieldName = col.name.quoted
- if (a.dataType.isDefined) {
- val field = CharVarcharUtils.getRawType(col.field.metadata)
- .map(dt => col.field.copy(dataType = dt))
- .getOrElse(col.field)
- val newDataType = a.dataType.get
- newDataType match {
- case _: StructType => alter.failAnalysis(
- "CANNOT_UPDATE_FIELD.STRUCT_TYPE",
- Map("table" -> toSQLId(table.name), "fieldName" ->
toSQLId(fieldName)))
- case _: MapType => alter.failAnalysis(
- "CANNOT_UPDATE_FIELD.MAP_TYPE",
- Map("table" -> toSQLId(table.name), "fieldName" ->
toSQLId(fieldName)))
- case _: ArrayType => alter.failAnalysis(
- "CANNOT_UPDATE_FIELD.ARRAY_TYPE",
- Map("table" -> toSQLId(table.name), "fieldName" ->
toSQLId(fieldName)))
- case u: UserDefinedType[_] => alter.failAnalysis(
- "CANNOT_UPDATE_FIELD.USER_DEFINED_TYPE",
- Map(
- "table" -> toSQLId(table.name),
- "fieldName" -> toSQLId(fieldName),
- "udtSql" -> toSQLType(u)))
- case _: CalendarIntervalType | _: AnsiIntervalType =>
alter.failAnalysis(
- "CANNOT_UPDATE_FIELD.INTERVAL_TYPE",
- Map("table" -> toSQLId(table.name), "fieldName" ->
toSQLId(fieldName)))
- case _ => // update is okay
- }
-
- // We don't need to handle nested types here which shall fail before.
- def canAlterColumnType(from: DataType, to: DataType): Boolean =
(from, to) match {
- case (CharType(l1), CharType(l2)) => l1 == l2
- case (CharType(l1), VarcharType(l2)) => l1 <= l2
- case (VarcharType(l1), VarcharType(l2)) => l1 <= l2
- case _ => Cast.canUpCast(from, to)
- }
- if (!canAlterColumnType(field.dataType, newDataType)) {
+ case AlterColumns(table: ResolvedTable, columns, specs) =>
+ val groupedColumns = columns.groupBy(_.name)
+ groupedColumns.collect {
+ case (name, occurrences) if occurrences.length > 1 =>
alter.failAnalysis(
- errorClass = "NOT_SUPPORTED_CHANGE_COLUMN",
+ errorClass = "NOT_SUPPORTED_CHANGE_SAME_COLUMN",
messageParameters = Map(
"table" -> toSQLId(table.name),
- "originName" -> toSQLId(fieldName),
- "originType" -> toSQLType(field.dataType),
- "newName" -> toSQLId(fieldName),
- "newType" -> toSQLType(newDataType)))
- }
+ "fieldName" -> toSQLId(name)))
}
- if (a.nullable.isDefined) {
- if (!a.nullable.get && col.field.nullable) {
+ groupedColumns.keys.foreach { name =>
+ val child = groupedColumns.keys.find(child => child != name &&
child.startsWith(name))
Review Comment:
`name` and `child` are `Seq` of the name parts so `startsWith` is matching
the name part prefix.
--
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
To unsubscribe, e-mail: [email protected]
For queries about this service, please contact Infrastructure at:
[email protected]
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]