MaxGekk commented on a change in pull request #31405:
URL: https://github.com/apache/spark/pull/31405#discussion_r568665526



##########
File path: 
sql/core/src/test/scala/org/apache/spark/sql/execution/command/AlterTableAddPartitionSuiteBase.scala
##########
@@ -181,4 +181,37 @@ trait AlterTableAddPartitionSuiteBase extends QueryTest 
with DDLCommandTestUtils
       checkPartitions(t, Map("id" -> "1"), Map("id" -> "2"))
     }
   }
+
+  test("SPARK-34304: adding partitions to views is not allowed") {
+    withNamespaceAndTable("ns", "tbl") { t =>
+      sql(s"CREATE TABLE $t (id INT, part INT) $defaultUsing PARTITIONED BY 
(part)")
+      def checkViewAltering(createViewCmd: String, alterCmd: String): Unit = {
+        sql(createViewCmd)
+        val errMsg = intercept[AnalysisException] {
+          sql(alterCmd)
+        }.getMessage
+        assert(errMsg.contains("'ALTER TABLE ... ADD PARTITION ...' expects a 
table"))
+        checkPartitions(t) // no partitions
+      }
+
+      withView("v0") {

Review comment:
       > ... it can be tested with 3 view types automatically
   
   1. I think I can rewrite the test and check all 3 view type automatically as 
well like `withAllViews(view) { ... }`
   2. The test here runs on 3 catalogs, and also automatically as you know.
   
   From my understanding, `SQLViewTestSuite` should focus on view functionality 
in general but unified DS tests focus is on specific commands. I would prefer 
to not pollute `SQLViewTestSuite` by specific DDL commands.
   
   Another benefit to have the test here is we can check entire command 
behavior just by running;
   `build/sbt "test:testOnly *AlterTableAddPartitionSuite"`

##########
File path: 
sql/core/src/main/scala/org/apache/spark/sql/execution/command/ddl.scala
##########
@@ -570,7 +567,6 @@ case class AlterTableDropPartitionCommand(
   override def run(sparkSession: SparkSession): Seq[Row] = {
     val catalog = sparkSession.sessionState.catalog
     val table = catalog.getTableMetadata(tableName)
-    DDLUtils.verifyAlterTableType(catalog, table, isView = false)

Review comment:
       It is used in:
   1. AlterTableSetPropertiesCommand
   ```
    ALTER TABLE table1 SET TBLPROPERTIES ('key1' = 'val1', 'key2' = 'val2', 
...);
    ALTER VIEW view1 SET TBLPROPERTIES ('key1' = 'val1', 'key2' = 'val2', ...);
   ```
   2. AlterTableUnsetPropertiesCommand
   ```
    ALTER TABLE table1 UNSET TBLPROPERTIES [IF EXISTS] ('key1', 'key2', ...);
    ALTER VIEW view1 UNSET TBLPROPERTIES [IF EXISTS] ('key1', 'key2', ...);
   ```
   3. AlterTableChangeColumnCommand, see the PR description about migration on 
new resolution framework

##########
File path: 
sql/core/src/main/scala/org/apache/spark/sql/execution/command/ddl.scala
##########
@@ -570,7 +567,6 @@ case class AlterTableDropPartitionCommand(
   override def run(sparkSession: SparkSession): Seq[Row] = {
     val catalog = sparkSession.sessionState.catalog
     val table = catalog.getTableMetadata(tableName)
-    DDLUtils.verifyAlterTableType(catalog, table, isView = false)

Review comment:
       It is used in:
   1. AlterTableSetPropertiesCommand
   ```
    ALTER TABLE table1 SET TBLPROPERTIES ('key1' = 'val1', 'key2' = 'val2', 
...);
    ALTER VIEW view1 SET TBLPROPERTIES ('key1' = 'val1', 'key2' = 'val2', ...);
   ```
   2. AlterTableUnsetPropertiesCommand
   ```
    ALTER TABLE table1 UNSET TBLPROPERTIES [IF EXISTS] ('key1', 'key2', ...);
    ALTER VIEW view1 UNSET TBLPROPERTIES [IF EXISTS] ('key1', 'key2', ...);
   ```
   3. AlterTableChangeColumnCommand, see the PR description about migration on 
new resolution framework
   4. AlterTableSetLocationCommand but we can remove it from this command since 
the https://github.com/apache/spark/pull/31414 has been merged already. Let me 
do that. 




----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

For queries about this service, please contact Infrastructure at:
[email protected]



---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to