cloud-fan commented on a change in pull request #33113:
URL: https://github.com/apache/spark/pull/33113#discussion_r663721254



##########
File path: 
sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/CheckAnalysis.scala
##########
@@ -1088,8 +1058,51 @@ trait CheckAnalysis extends PredicateHelper with 
LookupCatalog {
     }
 
     alter match {
-      case AlterTableRenameColumn(table: ResolvedTable, 
ResolvedFieldName(name), newName) =>
-        checkColumnNotExists(name.init :+ newName, table.schema)
+      case AlterTableRenameColumn(table: ResolvedTable, col: 
ResolvedFieldName, newName) =>
+        checkColumnNotExists(col.path :+ newName, table.schema)
+      case a @ AlterTableAlterColumn(table: ResolvedTable, col: 
ResolvedFieldName, _, _, _, _) =>
+        val fieldName = col.name.quoted
+        if (a.dataType.isDefined) {
+          val field = CharVarcharUtils.getRawType(col.field.metadata)
+            .map(dt => col.field.copy(dataType = dt))
+            .getOrElse(col.field)
+          val newDataType = a.dataType.get
+          newDataType match {

Review comment:
       Actually, this is OK. In the analzer, we will set the `newDataType` to 
None if it's the same with the existing data type in the table.

##########
File path: 
sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Analyzer.scala
##########
@@ -3527,13 +3527,35 @@ class Analyzer(override val catalogManager: 
CatalogManager)
    * Rule to mostly resolve, normalize and rewrite column names based on case 
sensitivity
    * for alter table commands.
    */
-  object ResolveFieldNames extends Rule[LogicalPlan] {
+  object ResolveAlterTableCommands extends Rule[LogicalPlan] {
     def apply(plan: LogicalPlan): LogicalPlan = plan.resolveOperatorsUp {
       case a: AlterTableCommand if a.table.resolved =>
-        a.transformExpressions {
+        val table = a.table.asInstanceOf[ResolvedTable]
+        val transformed = a.transformExpressions {
           case u: UnresolvedFieldName =>
-            val table = a.table.asInstanceOf[ResolvedTable]
-            resolveFieldNames(table.schema, 
u.name).map(ResolvedFieldName(_)).getOrElse(u)
+            resolveFieldNames(table.schema, u.name).getOrElse(u)
+          case u: UnresolvedFieldPosition => u.position match {
+            case after: After =>
+              resolveFieldNames(table.schema, u.fieldName.init :+ 
after.column())

Review comment:
       https://github.com/apache/spark/pull/33213




-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: [email protected]

For queries about this service, please contact Infrastructure at:
[email protected]



---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to