[ 
https://issues.apache.org/jira/browse/SPARK-36955?page=com.atlassian.jira.plugin.system.issuetabpanels:all-tabpanel
 ]

Liu Shuo updated SPARK-36955:
-----------------------------
    Description: 
1. create a hive table
{code:java}
CREATE TABLE tbl
STORED AS PARQUET
AS SELECT
 '1st' AS key,
 '2nd' AS value,
 map('name', 'Bob') as name;
{code}
2. alter complex column's comment
{code:java}
ALTER TABLE tbl CHANGE COLUMN name name MAP<STRING, STRING> COMMENT 'temp 
comment';
{code}
it fails with the following error:
{code:java}
ALTER TABLE CHANGE COLUMN is not supported for changing column 'name' with type 
'MapType(StringType,StringType,false)' to 'name' with type 
'MapType(StringType,StringType,true)'ALTER TABLE CHANGE COLUMN is not supported 
for changing column 'name' with type 'MapType(StringType,StringType,false)' to 
'name' with type 
'MapType(StringType,StringType,true)'org.apache.spark.sql.AnalysisException: 
ALTER TABLE CHANGE COLUMN is not supported for changing column 'name' with type 
'MapType(StringType,StringType,false)' to 'name' with type 
'MapType(StringType,StringType,true)' at 
org.apache.spark.sql.execution.command.AlterTableChangeColumnCommand.run(ddl.scala:349)
 at 
org.apache.spark.sql.execution.command.ExecutedCommandExec.sideEffectResult$lzycompute(commands.scala:75)
 at 
org.apache.spark.sql.execution.command.ExecutedCommandExec.sideEffectResult(commands.scala:73)
 at 
org.apache.spark.sql.execution.command.ExecutedCommandExec.executeCollect(commands.scala:84)
 at 
org.apache.spark.sql.execution.QueryExecution$$anonfun$eagerlyExecuteCommands$1.$anonfun$applyOrElse$1(QueryExecution.scala:97)
 at 
org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$5(SQLExecution.scala:103)
 at 
org.apache.spark.sql.execution.SQLExecution$.withSQLConfPropagated(SQLExecution.scala:163)
 at 
org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$1(SQLExecution.scala:90)
 at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:775) at 
org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution.scala:64)
 at 
org.apache.spark.sql.execution.QueryExecution$$anonfun$eagerlyExecuteCommands$1.applyOrElse(QueryExecution.scala:97)
 at 
org.apache.spark.sql.execution.QueryExecution$$anonfun$eagerlyExecuteCommands$1.applyOrElse(QueryExecution.scala:93)
 at 
org.apache.spark.sql.catalyst.trees.TreeNode.$anonfun$transformDownWithPruning$1(TreeNode.scala:481)
 at 
org.apache.spark.sql.catalyst.trees.CurrentOrigin$.withOrigin(TreeNode.scala:82)
 at 
org.apache.spark.sql.catalyst.trees.TreeNode.transformDownWithPruning(TreeNode.scala:481)
 at 
org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.org$apache$spark$sql$catalyst$plans$logical$AnalysisHelper$$super$transformDownWithPruning(LogicalPlan.scala:30)
 at 
org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper.transformDownWithPruning(AnalysisHelper.scala:267)
 at 
org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper.transformDownWithPruning$(AnalysisHelper.scala:263)
 at 
org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.transformDownWithPruning(LogicalPlan.scala:30)
 at 
org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.transformDownWithPruning(LogicalPlan.scala:30)
 at 
org.apache.spark.sql.catalyst.trees.TreeNode.transformDown(TreeNode.scala:457) 
at 
org.apache.spark.sql.execution.QueryExecution.eagerlyExecuteCommands(QueryExecution.scala:93)
 at 
org.apache.spark.sql.execution.QueryExecution.commandExecuted$lzycompute(QueryExecution.scala:80)
 at 
org.apache.spark.sql.execution.QueryExecution.commandExecuted(QueryExecution.scala:78)
 at org.apache.spark.sql.Dataset.<init>(Dataset.scala:220) at 
org.apache.spark.sql.Dataset$.$anonfun$ofRows$1(Dataset.scala:92) at 
org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:775) at 
org.apache.spark.sql.Dataset$.ofRows(Dataset.scala:89) at 
org.apache.spark.sql.hive.test.TestHiveSparkSession.$anonfun$sql$1(TestHive.scala:240)
 at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:775) at 
org.apache.spark.sql.hive.test.TestHiveSparkSession.sql(TestHive.scala:238) at 
org.apache.spark.sql.test.SQLTestUtilsBase.$anonfun$sql$1(SQLTestUtils.scala:231)
 at 
org.apache.spark.sql.hive.StatisticsSuite.$anonfun$new$161(StatisticsSuite.scala:1546)
 at scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23) at 
org.apache.spark.util.Utils$.tryWithSafeFinally(Utils.scala:1462) at 
org.apache.spark.sql.test.SQLTestUtilsBase.withTable(SQLTestUtils.scala:305) at 
org.apache.spark.sql.test.SQLTestUtilsBase.withTable$(SQLTestUtils.scala:303) 
at 
org.apache.spark.sql.StatisticsCollectionTestBase.withTable(StatisticsCollectionTestBase.scala:42)
 at 
org.apache.spark.sql.hive.StatisticsSuite.$anonfun$new$160(StatisticsSuite.scala:1535)
 at 
org.apache.spark.sql.hive.StatisticsSuite.$anonfun$new$160$adapted(StatisticsSuite.scala:1534)
 at 
org.apache.spark.sql.catalyst.plans.SQLHelper.withTempPath(SQLHelper.scala:69) 
at 
org.apache.spark.sql.catalyst.plans.SQLHelper.withTempPath$(SQLHelper.scala:66) 
at org.apache.spark.sql.QueryTest.withTempPath(QueryTest.scala:34) at 
org.apache.spark.sql.hive.StatisticsSuite.$anonfun$new$159(StatisticsSuite.scala:1534)
 at scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23) at 
org.scalatest.OutcomeOf.outcomeOf(OutcomeOf.scala:85) at 
org.scalatest.OutcomeOf.outcomeOf$(OutcomeOf.scala:83) at 
org.scalatest.OutcomeOf$.outcomeOf(OutcomeOf.scala:104) at 
org.scalatest.Transformer.apply(Transformer.scala:22) at 
org.scalatest.Transformer.apply(Transformer.scala:20) at 
org.scalatest.funsuite.AnyFunSuiteLike$$anon$1.apply(AnyFunSuiteLike.scala:226) 
at org.apache.spark.SparkFunSuite.withFixture(SparkFunSuite.scala:190) at 
org.scalatest.funsuite.AnyFunSuiteLike.invokeWithFixture$1(AnyFunSuiteLike.scala:224)
 at 
org.scalatest.funsuite.AnyFunSuiteLike.$anonfun$runTest$1(AnyFunSuiteLike.scala:236)
 at org.scalatest.SuperEngine.runTestImpl(Engine.scala:306) at 
org.scalatest.funsuite.AnyFunSuiteLike.runTest(AnyFunSuiteLike.scala:236) at 
org.scalatest.funsuite.AnyFunSuiteLike.runTest$(AnyFunSuiteLike.scala:218) at 
org.apache.spark.SparkFunSuite.org$scalatest$BeforeAndAfterEach$$super$runTest(SparkFunSuite.scala:62)
 at org.scalatest.BeforeAndAfterEach.runTest(BeforeAndAfterEach.scala:234) at 
org.scalatest.BeforeAndAfterEach.runTest$(BeforeAndAfterEach.scala:227) at 
org.apache.spark.SparkFunSuite.runTest(SparkFunSuite.scala:62) at 
org.scalatest.funsuite.AnyFunSuiteLike.$anonfun$runTests$1(AnyFunSuiteLike.scala:269)
 at org.scalatest.SuperEngine.$anonfun$runTestsInBranch$1(Engine.scala:413) at 
scala.collection.immutable.List.foreach(List.scala:431) at 
org.scalatest.SuperEngine.traverseSubNodes$1(Engine.scala:401) at 
org.scalatest.SuperEngine.runTestsInBranch(Engine.scala:396) at 
org.scalatest.SuperEngine.runTestsImpl(Engine.scala:475) at 
org.scalatest.funsuite.AnyFunSuiteLike.runTests(AnyFunSuiteLike.scala:269) at 
org.scalatest.funsuite.AnyFunSuiteLike.runTests$(AnyFunSuiteLike.scala:268) at 
org.scalatest.funsuite.AnyFunSuite.runTests(AnyFunSuite.scala:1563) at 
org.scalatest.Suite.run(Suite.scala:1112) at 
org.scalatest.Suite.run$(Suite.scala:1094) at 
org.scalatest.funsuite.AnyFunSuite.org$scalatest$funsuite$AnyFunSuiteLike$$super$run(AnyFunSuite.scala:1563)
 at 
org.scalatest.funsuite.AnyFunSuiteLike.$anonfun$run$1(AnyFunSuiteLike.scala:273)
 at org.scalatest.SuperEngine.runImpl(Engine.scala:535) at 
org.scalatest.funsuite.AnyFunSuiteLike.run(AnyFunSuiteLike.scala:273) at 
org.scalatest.funsuite.AnyFunSuiteLike.run$(AnyFunSuiteLike.scala:272) at 
org.apache.spark.SparkFunSuite.org$scalatest$BeforeAndAfterAll$$super$run(SparkFunSuite.scala:62)
 at org.scalatest.BeforeAndAfterAll.liftedTree1$1(BeforeAndAfterAll.scala:213) 
at org.scalatest.BeforeAndAfterAll.run(BeforeAndAfterAll.scala:210) at 
org.scalatest.BeforeAndAfterAll.run$(BeforeAndAfterAll.scala:208) at 
org.apache.spark.SparkFunSuite.run(SparkFunSuite.scala:62) at 
org.scalatest.tools.SuiteRunner.run(SuiteRunner.scala:45) at 
org.scalatest.tools.Runner$.$anonfun$doRunRunRunDaDoRunRun$13(Runner.scala:1322)
 at 
org.scalatest.tools.Runner$.$anonfun$doRunRunRunDaDoRunRun$13$adapted(Runner.scala:1316)
 at scala.collection.immutable.List.foreach(List.scala:431) at 
org.scalatest.tools.Runner$.doRunRunRunDaDoRunRun(Runner.scala:1316) at 
org.scalatest.tools.Runner$.$anonfun$runOptionallyWithPassFailReporter$24(Runner.scala:993)
 at 
org.scalatest.tools.Runner$.$anonfun$runOptionallyWithPassFailReporter$24$adapted(Runner.scala:971)
 at 
org.scalatest.tools.Runner$.withClassLoaderAndDispatchReporter(Runner.scala:1482)
 at 
org.scalatest.tools.Runner$.runOptionallyWithPassFailReporter(Runner.scala:971) 
at org.scalatest.tools.Runner$.run(Runner.scala:798) at 
org.scalatest.tools.Runner.run(Runner.scala) at 
org.jetbrains.plugins.scala.testingSupport.scalaTest.ScalaTestRunner.runScalaTest2(ScalaTestRunner.java:133)
 at 
org.jetbrains.plugins.scala.testingSupport.scalaTest.ScalaTestRunner.main(ScalaTestRunner.java:27)
{code}
3. If we change the simple column's comment, like column `key`, it succeeds.
{code:java}
ALTER TABLE tbl CHANGE COLUMN key key STRING COMMENT 'temp comment';
{code}
 

  was:
1. create a hive table

 
{code:java}
CREATE TABLE tbl
STORED AS PARQUET
AS SELECT
 '1st' AS key,
 '2nd' AS value,
 map('name', 'Bob') as name;
{code}
2. alter complex column's comment
{code:java}
ALTER TABLE tbl CHANGE COLUMN name name MAP<STRING, STRING> COMMENT 'temp 
comment';
{code}
it fails with the following error:
{code:java}
ALTER TABLE CHANGE COLUMN is not supported for changing column 'name' with type 
'MapType(StringType,StringType,false)' to 'name' with type 
'MapType(StringType,StringType,true)'ALTER TABLE CHANGE COLUMN is not supported 
for changing column 'name' with type 'MapType(StringType,StringType,false)' to 
'name' with type 
'MapType(StringType,StringType,true)'org.apache.spark.sql.AnalysisException: 
ALTER TABLE CHANGE COLUMN is not supported for changing column 'name' with type 
'MapType(StringType,StringType,false)' to 'name' with type 
'MapType(StringType,StringType,true)' at 
org.apache.spark.sql.execution.command.AlterTableChangeColumnCommand.run(ddl.scala:349)
 at 
org.apache.spark.sql.execution.command.ExecutedCommandExec.sideEffectResult$lzycompute(commands.scala:75)
 at 
org.apache.spark.sql.execution.command.ExecutedCommandExec.sideEffectResult(commands.scala:73)
 at 
org.apache.spark.sql.execution.command.ExecutedCommandExec.executeCollect(commands.scala:84)
 at 
org.apache.spark.sql.execution.QueryExecution$$anonfun$eagerlyExecuteCommands$1.$anonfun$applyOrElse$1(QueryExecution.scala:97)
 at 
org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$5(SQLExecution.scala:103)
 at 
org.apache.spark.sql.execution.SQLExecution$.withSQLConfPropagated(SQLExecution.scala:163)
 at 
org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$1(SQLExecution.scala:90)
 at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:775) at 
org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution.scala:64)
 at 
org.apache.spark.sql.execution.QueryExecution$$anonfun$eagerlyExecuteCommands$1.applyOrElse(QueryExecution.scala:97)
 at 
org.apache.spark.sql.execution.QueryExecution$$anonfun$eagerlyExecuteCommands$1.applyOrElse(QueryExecution.scala:93)
 at 
org.apache.spark.sql.catalyst.trees.TreeNode.$anonfun$transformDownWithPruning$1(TreeNode.scala:481)
 at 
org.apache.spark.sql.catalyst.trees.CurrentOrigin$.withOrigin(TreeNode.scala:82)
 at 
org.apache.spark.sql.catalyst.trees.TreeNode.transformDownWithPruning(TreeNode.scala:481)
 at 
org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.org$apache$spark$sql$catalyst$plans$logical$AnalysisHelper$$super$transformDownWithPruning(LogicalPlan.scala:30)
 at 
org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper.transformDownWithPruning(AnalysisHelper.scala:267)
 at 
org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper.transformDownWithPruning$(AnalysisHelper.scala:263)
 at 
org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.transformDownWithPruning(LogicalPlan.scala:30)
 at 
org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.transformDownWithPruning(LogicalPlan.scala:30)
 at 
org.apache.spark.sql.catalyst.trees.TreeNode.transformDown(TreeNode.scala:457) 
at 
org.apache.spark.sql.execution.QueryExecution.eagerlyExecuteCommands(QueryExecution.scala:93)
 at 
org.apache.spark.sql.execution.QueryExecution.commandExecuted$lzycompute(QueryExecution.scala:80)
 at 
org.apache.spark.sql.execution.QueryExecution.commandExecuted(QueryExecution.scala:78)
 at org.apache.spark.sql.Dataset.<init>(Dataset.scala:220) at 
org.apache.spark.sql.Dataset$.$anonfun$ofRows$1(Dataset.scala:92) at 
org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:775) at 
org.apache.spark.sql.Dataset$.ofRows(Dataset.scala:89) at 
org.apache.spark.sql.hive.test.TestHiveSparkSession.$anonfun$sql$1(TestHive.scala:240)
 at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:775) at 
org.apache.spark.sql.hive.test.TestHiveSparkSession.sql(TestHive.scala:238) at 
org.apache.spark.sql.test.SQLTestUtilsBase.$anonfun$sql$1(SQLTestUtils.scala:231)
 at 
org.apache.spark.sql.hive.StatisticsSuite.$anonfun$new$161(StatisticsSuite.scala:1546)
 at scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23) at 
org.apache.spark.util.Utils$.tryWithSafeFinally(Utils.scala:1462) at 
org.apache.spark.sql.test.SQLTestUtilsBase.withTable(SQLTestUtils.scala:305) at 
org.apache.spark.sql.test.SQLTestUtilsBase.withTable$(SQLTestUtils.scala:303) 
at 
org.apache.spark.sql.StatisticsCollectionTestBase.withTable(StatisticsCollectionTestBase.scala:42)
 at 
org.apache.spark.sql.hive.StatisticsSuite.$anonfun$new$160(StatisticsSuite.scala:1535)
 at 
org.apache.spark.sql.hive.StatisticsSuite.$anonfun$new$160$adapted(StatisticsSuite.scala:1534)
 at 
org.apache.spark.sql.catalyst.plans.SQLHelper.withTempPath(SQLHelper.scala:69) 
at 
org.apache.spark.sql.catalyst.plans.SQLHelper.withTempPath$(SQLHelper.scala:66) 
at org.apache.spark.sql.QueryTest.withTempPath(QueryTest.scala:34) at 
org.apache.spark.sql.hive.StatisticsSuite.$anonfun$new$159(StatisticsSuite.scala:1534)
 at scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23) at 
org.scalatest.OutcomeOf.outcomeOf(OutcomeOf.scala:85) at 
org.scalatest.OutcomeOf.outcomeOf$(OutcomeOf.scala:83) at 
org.scalatest.OutcomeOf$.outcomeOf(OutcomeOf.scala:104) at 
org.scalatest.Transformer.apply(Transformer.scala:22) at 
org.scalatest.Transformer.apply(Transformer.scala:20) at 
org.scalatest.funsuite.AnyFunSuiteLike$$anon$1.apply(AnyFunSuiteLike.scala:226) 
at org.apache.spark.SparkFunSuite.withFixture(SparkFunSuite.scala:190) at 
org.scalatest.funsuite.AnyFunSuiteLike.invokeWithFixture$1(AnyFunSuiteLike.scala:224)
 at 
org.scalatest.funsuite.AnyFunSuiteLike.$anonfun$runTest$1(AnyFunSuiteLike.scala:236)
 at org.scalatest.SuperEngine.runTestImpl(Engine.scala:306) at 
org.scalatest.funsuite.AnyFunSuiteLike.runTest(AnyFunSuiteLike.scala:236) at 
org.scalatest.funsuite.AnyFunSuiteLike.runTest$(AnyFunSuiteLike.scala:218) at 
org.apache.spark.SparkFunSuite.org$scalatest$BeforeAndAfterEach$$super$runTest(SparkFunSuite.scala:62)
 at org.scalatest.BeforeAndAfterEach.runTest(BeforeAndAfterEach.scala:234) at 
org.scalatest.BeforeAndAfterEach.runTest$(BeforeAndAfterEach.scala:227) at 
org.apache.spark.SparkFunSuite.runTest(SparkFunSuite.scala:62) at 
org.scalatest.funsuite.AnyFunSuiteLike.$anonfun$runTests$1(AnyFunSuiteLike.scala:269)
 at org.scalatest.SuperEngine.$anonfun$runTestsInBranch$1(Engine.scala:413) at 
scala.collection.immutable.List.foreach(List.scala:431) at 
org.scalatest.SuperEngine.traverseSubNodes$1(Engine.scala:401) at 
org.scalatest.SuperEngine.runTestsInBranch(Engine.scala:396) at 
org.scalatest.SuperEngine.runTestsImpl(Engine.scala:475) at 
org.scalatest.funsuite.AnyFunSuiteLike.runTests(AnyFunSuiteLike.scala:269) at 
org.scalatest.funsuite.AnyFunSuiteLike.runTests$(AnyFunSuiteLike.scala:268) at 
org.scalatest.funsuite.AnyFunSuite.runTests(AnyFunSuite.scala:1563) at 
org.scalatest.Suite.run(Suite.scala:1112) at 
org.scalatest.Suite.run$(Suite.scala:1094) at 
org.scalatest.funsuite.AnyFunSuite.org$scalatest$funsuite$AnyFunSuiteLike$$super$run(AnyFunSuite.scala:1563)
 at 
org.scalatest.funsuite.AnyFunSuiteLike.$anonfun$run$1(AnyFunSuiteLike.scala:273)
 at org.scalatest.SuperEngine.runImpl(Engine.scala:535) at 
org.scalatest.funsuite.AnyFunSuiteLike.run(AnyFunSuiteLike.scala:273) at 
org.scalatest.funsuite.AnyFunSuiteLike.run$(AnyFunSuiteLike.scala:272) at 
org.apache.spark.SparkFunSuite.org$scalatest$BeforeAndAfterAll$$super$run(SparkFunSuite.scala:62)
 at org.scalatest.BeforeAndAfterAll.liftedTree1$1(BeforeAndAfterAll.scala:213) 
at org.scalatest.BeforeAndAfterAll.run(BeforeAndAfterAll.scala:210) at 
org.scalatest.BeforeAndAfterAll.run$(BeforeAndAfterAll.scala:208) at 
org.apache.spark.SparkFunSuite.run(SparkFunSuite.scala:62) at 
org.scalatest.tools.SuiteRunner.run(SuiteRunner.scala:45) at 
org.scalatest.tools.Runner$.$anonfun$doRunRunRunDaDoRunRun$13(Runner.scala:1322)
 at 
org.scalatest.tools.Runner$.$anonfun$doRunRunRunDaDoRunRun$13$adapted(Runner.scala:1316)
 at scala.collection.immutable.List.foreach(List.scala:431) at 
org.scalatest.tools.Runner$.doRunRunRunDaDoRunRun(Runner.scala:1316) at 
org.scalatest.tools.Runner$.$anonfun$runOptionallyWithPassFailReporter$24(Runner.scala:993)
 at 
org.scalatest.tools.Runner$.$anonfun$runOptionallyWithPassFailReporter$24$adapted(Runner.scala:971)
 at 
org.scalatest.tools.Runner$.withClassLoaderAndDispatchReporter(Runner.scala:1482)
 at 
org.scalatest.tools.Runner$.runOptionallyWithPassFailReporter(Runner.scala:971) 
at org.scalatest.tools.Runner$.run(Runner.scala:798) at 
org.scalatest.tools.Runner.run(Runner.scala) at 
org.jetbrains.plugins.scala.testingSupport.scalaTest.ScalaTestRunner.runScalaTest2(ScalaTestRunner.java:133)
 at 
org.jetbrains.plugins.scala.testingSupport.scalaTest.ScalaTestRunner.main(ScalaTestRunner.java:27)
{code}
3. If we change the simple column's comment, like column `key`, it succeeds.
{code:java}
ALTER TABLE tbl CHANGE COLUMN key key STRING COMMENT 'temp comment';
{code}
 


> when we change the complex column's comment of hive table , it should succeed 
> but fails
> ---------------------------------------------------------------------------------------
>
>                 Key: SPARK-36955
>                 URL: https://issues.apache.org/jira/browse/SPARK-36955
>             Project: Spark
>          Issue Type: Bug
>          Components: SQL
>    Affects Versions: 3.3.0
>            Reporter: Liu Shuo
>            Priority: Major
>
> 1. create a hive table
> {code:java}
> CREATE TABLE tbl
> STORED AS PARQUET
> AS SELECT
>  '1st' AS key,
>  '2nd' AS value,
>  map('name', 'Bob') as name;
> {code}
> 2. alter complex column's comment
> {code:java}
> ALTER TABLE tbl CHANGE COLUMN name name MAP<STRING, STRING> COMMENT 'temp 
> comment';
> {code}
> it fails with the following error:
> {code:java}
> ALTER TABLE CHANGE COLUMN is not supported for changing column 'name' with 
> type 'MapType(StringType,StringType,false)' to 'name' with type 
> 'MapType(StringType,StringType,true)'ALTER TABLE CHANGE COLUMN is not 
> supported for changing column 'name' with type 
> 'MapType(StringType,StringType,false)' to 'name' with type 
> 'MapType(StringType,StringType,true)'org.apache.spark.sql.AnalysisException: 
> ALTER TABLE CHANGE COLUMN is not supported for changing column 'name' with 
> type 'MapType(StringType,StringType,false)' to 'name' with type 
> 'MapType(StringType,StringType,true)' at 
> org.apache.spark.sql.execution.command.AlterTableChangeColumnCommand.run(ddl.scala:349)
>  at 
> org.apache.spark.sql.execution.command.ExecutedCommandExec.sideEffectResult$lzycompute(commands.scala:75)
>  at 
> org.apache.spark.sql.execution.command.ExecutedCommandExec.sideEffectResult(commands.scala:73)
>  at 
> org.apache.spark.sql.execution.command.ExecutedCommandExec.executeCollect(commands.scala:84)
>  at 
> org.apache.spark.sql.execution.QueryExecution$$anonfun$eagerlyExecuteCommands$1.$anonfun$applyOrElse$1(QueryExecution.scala:97)
>  at 
> org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$5(SQLExecution.scala:103)
>  at 
> org.apache.spark.sql.execution.SQLExecution$.withSQLConfPropagated(SQLExecution.scala:163)
>  at 
> org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$1(SQLExecution.scala:90)
>  at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:775) at 
> org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution.scala:64)
>  at 
> org.apache.spark.sql.execution.QueryExecution$$anonfun$eagerlyExecuteCommands$1.applyOrElse(QueryExecution.scala:97)
>  at 
> org.apache.spark.sql.execution.QueryExecution$$anonfun$eagerlyExecuteCommands$1.applyOrElse(QueryExecution.scala:93)
>  at 
> org.apache.spark.sql.catalyst.trees.TreeNode.$anonfun$transformDownWithPruning$1(TreeNode.scala:481)
>  at 
> org.apache.spark.sql.catalyst.trees.CurrentOrigin$.withOrigin(TreeNode.scala:82)
>  at 
> org.apache.spark.sql.catalyst.trees.TreeNode.transformDownWithPruning(TreeNode.scala:481)
>  at 
> org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.org$apache$spark$sql$catalyst$plans$logical$AnalysisHelper$$super$transformDownWithPruning(LogicalPlan.scala:30)
>  at 
> org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper.transformDownWithPruning(AnalysisHelper.scala:267)
>  at 
> org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper.transformDownWithPruning$(AnalysisHelper.scala:263)
>  at 
> org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.transformDownWithPruning(LogicalPlan.scala:30)
>  at 
> org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.transformDownWithPruning(LogicalPlan.scala:30)
>  at 
> org.apache.spark.sql.catalyst.trees.TreeNode.transformDown(TreeNode.scala:457)
>  at 
> org.apache.spark.sql.execution.QueryExecution.eagerlyExecuteCommands(QueryExecution.scala:93)
>  at 
> org.apache.spark.sql.execution.QueryExecution.commandExecuted$lzycompute(QueryExecution.scala:80)
>  at 
> org.apache.spark.sql.execution.QueryExecution.commandExecuted(QueryExecution.scala:78)
>  at org.apache.spark.sql.Dataset.<init>(Dataset.scala:220) at 
> org.apache.spark.sql.Dataset$.$anonfun$ofRows$1(Dataset.scala:92) at 
> org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:775) at 
> org.apache.spark.sql.Dataset$.ofRows(Dataset.scala:89) at 
> org.apache.spark.sql.hive.test.TestHiveSparkSession.$anonfun$sql$1(TestHive.scala:240)
>  at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:775) at 
> org.apache.spark.sql.hive.test.TestHiveSparkSession.sql(TestHive.scala:238) 
> at 
> org.apache.spark.sql.test.SQLTestUtilsBase.$anonfun$sql$1(SQLTestUtils.scala:231)
>  at 
> org.apache.spark.sql.hive.StatisticsSuite.$anonfun$new$161(StatisticsSuite.scala:1546)
>  at scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23) at 
> org.apache.spark.util.Utils$.tryWithSafeFinally(Utils.scala:1462) at 
> org.apache.spark.sql.test.SQLTestUtilsBase.withTable(SQLTestUtils.scala:305) 
> at 
> org.apache.spark.sql.test.SQLTestUtilsBase.withTable$(SQLTestUtils.scala:303) 
> at 
> org.apache.spark.sql.StatisticsCollectionTestBase.withTable(StatisticsCollectionTestBase.scala:42)
>  at 
> org.apache.spark.sql.hive.StatisticsSuite.$anonfun$new$160(StatisticsSuite.scala:1535)
>  at 
> org.apache.spark.sql.hive.StatisticsSuite.$anonfun$new$160$adapted(StatisticsSuite.scala:1534)
>  at 
> org.apache.spark.sql.catalyst.plans.SQLHelper.withTempPath(SQLHelper.scala:69)
>  at 
> org.apache.spark.sql.catalyst.plans.SQLHelper.withTempPath$(SQLHelper.scala:66)
>  at org.apache.spark.sql.QueryTest.withTempPath(QueryTest.scala:34) at 
> org.apache.spark.sql.hive.StatisticsSuite.$anonfun$new$159(StatisticsSuite.scala:1534)
>  at scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23) at 
> org.scalatest.OutcomeOf.outcomeOf(OutcomeOf.scala:85) at 
> org.scalatest.OutcomeOf.outcomeOf$(OutcomeOf.scala:83) at 
> org.scalatest.OutcomeOf$.outcomeOf(OutcomeOf.scala:104) at 
> org.scalatest.Transformer.apply(Transformer.scala:22) at 
> org.scalatest.Transformer.apply(Transformer.scala:20) at 
> org.scalatest.funsuite.AnyFunSuiteLike$$anon$1.apply(AnyFunSuiteLike.scala:226)
>  at org.apache.spark.SparkFunSuite.withFixture(SparkFunSuite.scala:190) at 
> org.scalatest.funsuite.AnyFunSuiteLike.invokeWithFixture$1(AnyFunSuiteLike.scala:224)
>  at 
> org.scalatest.funsuite.AnyFunSuiteLike.$anonfun$runTest$1(AnyFunSuiteLike.scala:236)
>  at org.scalatest.SuperEngine.runTestImpl(Engine.scala:306) at 
> org.scalatest.funsuite.AnyFunSuiteLike.runTest(AnyFunSuiteLike.scala:236) at 
> org.scalatest.funsuite.AnyFunSuiteLike.runTest$(AnyFunSuiteLike.scala:218) at 
> org.apache.spark.SparkFunSuite.org$scalatest$BeforeAndAfterEach$$super$runTest(SparkFunSuite.scala:62)
>  at org.scalatest.BeforeAndAfterEach.runTest(BeforeAndAfterEach.scala:234) at 
> org.scalatest.BeforeAndAfterEach.runTest$(BeforeAndAfterEach.scala:227) at 
> org.apache.spark.SparkFunSuite.runTest(SparkFunSuite.scala:62) at 
> org.scalatest.funsuite.AnyFunSuiteLike.$anonfun$runTests$1(AnyFunSuiteLike.scala:269)
>  at org.scalatest.SuperEngine.$anonfun$runTestsInBranch$1(Engine.scala:413) 
> at scala.collection.immutable.List.foreach(List.scala:431) at 
> org.scalatest.SuperEngine.traverseSubNodes$1(Engine.scala:401) at 
> org.scalatest.SuperEngine.runTestsInBranch(Engine.scala:396) at 
> org.scalatest.SuperEngine.runTestsImpl(Engine.scala:475) at 
> org.scalatest.funsuite.AnyFunSuiteLike.runTests(AnyFunSuiteLike.scala:269) at 
> org.scalatest.funsuite.AnyFunSuiteLike.runTests$(AnyFunSuiteLike.scala:268) 
> at org.scalatest.funsuite.AnyFunSuite.runTests(AnyFunSuite.scala:1563) at 
> org.scalatest.Suite.run(Suite.scala:1112) at 
> org.scalatest.Suite.run$(Suite.scala:1094) at 
> org.scalatest.funsuite.AnyFunSuite.org$scalatest$funsuite$AnyFunSuiteLike$$super$run(AnyFunSuite.scala:1563)
>  at 
> org.scalatest.funsuite.AnyFunSuiteLike.$anonfun$run$1(AnyFunSuiteLike.scala:273)
>  at org.scalatest.SuperEngine.runImpl(Engine.scala:535) at 
> org.scalatest.funsuite.AnyFunSuiteLike.run(AnyFunSuiteLike.scala:273) at 
> org.scalatest.funsuite.AnyFunSuiteLike.run$(AnyFunSuiteLike.scala:272) at 
> org.apache.spark.SparkFunSuite.org$scalatest$BeforeAndAfterAll$$super$run(SparkFunSuite.scala:62)
>  at 
> org.scalatest.BeforeAndAfterAll.liftedTree1$1(BeforeAndAfterAll.scala:213) at 
> org.scalatest.BeforeAndAfterAll.run(BeforeAndAfterAll.scala:210) at 
> org.scalatest.BeforeAndAfterAll.run$(BeforeAndAfterAll.scala:208) at 
> org.apache.spark.SparkFunSuite.run(SparkFunSuite.scala:62) at 
> org.scalatest.tools.SuiteRunner.run(SuiteRunner.scala:45) at 
> org.scalatest.tools.Runner$.$anonfun$doRunRunRunDaDoRunRun$13(Runner.scala:1322)
>  at 
> org.scalatest.tools.Runner$.$anonfun$doRunRunRunDaDoRunRun$13$adapted(Runner.scala:1316)
>  at scala.collection.immutable.List.foreach(List.scala:431) at 
> org.scalatest.tools.Runner$.doRunRunRunDaDoRunRun(Runner.scala:1316) at 
> org.scalatest.tools.Runner$.$anonfun$runOptionallyWithPassFailReporter$24(Runner.scala:993)
>  at 
> org.scalatest.tools.Runner$.$anonfun$runOptionallyWithPassFailReporter$24$adapted(Runner.scala:971)
>  at 
> org.scalatest.tools.Runner$.withClassLoaderAndDispatchReporter(Runner.scala:1482)
>  at 
> org.scalatest.tools.Runner$.runOptionallyWithPassFailReporter(Runner.scala:971)
>  at org.scalatest.tools.Runner$.run(Runner.scala:798) at 
> org.scalatest.tools.Runner.run(Runner.scala) at 
> org.jetbrains.plugins.scala.testingSupport.scalaTest.ScalaTestRunner.runScalaTest2(ScalaTestRunner.java:133)
>  at 
> org.jetbrains.plugins.scala.testingSupport.scalaTest.ScalaTestRunner.main(ScalaTestRunner.java:27)
> {code}
> 3. If we change the simple column's comment, like column `key`, it succeeds.
> {code:java}
> ALTER TABLE tbl CHANGE COLUMN key key STRING COMMENT 'temp comment';
> {code}
>  



--
This message was sent by Atlassian Jira
(v8.3.4#803005)

---------------------------------------------------------------------
To unsubscribe, e-mail: issues-unsubscr...@spark.apache.org
For additional commands, e-mail: issues-h...@spark.apache.org

Reply via email to