cloud-fan commented on a change in pull request #31405:
URL: https://github.com/apache/spark/pull/31405#discussion_r568626471



##########
File path: 
sql/core/src/main/scala/org/apache/spark/sql/execution/command/ddl.scala
##########
@@ -570,7 +567,6 @@ case class AlterTableDropPartitionCommand(
   override def run(sparkSession: SparkSession): Seq[Row] = {
     val catalog = sparkSession.sessionState.catalog
     val table = catalog.getTableMetadata(tableName)
-    DDLUtils.verifyAlterTableType(catalog, table, isView = false)

Review comment:
       Can we remove `DDLUtils.verifyAlterTableType` then? Or it's still used 
in some places?

##########
File path: 
sql/core/src/test/scala/org/apache/spark/sql/execution/command/AlterTableAddPartitionSuiteBase.scala
##########
@@ -181,4 +181,37 @@ trait AlterTableAddPartitionSuiteBase extends QueryTest 
with DDLCommandTestUtils
       checkPartitions(t, Map("id" -> "1"), Map("id" -> "2"))
     }
   }
+
+  test("SPARK-34304: adding partitions to views is not allowed") {
+    withNamespaceAndTable("ns", "tbl") { t =>
+      sql(s"CREATE TABLE $t (id INT, part INT) $defaultUsing PARTITIONED BY 
(part)")
+      def checkViewAltering(createViewCmd: String, alterCmd: String): Unit = {
+        sql(createViewCmd)
+        val errMsg = intercept[AnalysisException] {
+          sql(alterCmd)
+        }.getMessage
+        assert(errMsg.contains("'ALTER TABLE ... ADD PARTITION ...' expects a 
table"))
+        checkPartitions(t) // no partitions
+      }
+
+      withView("v0") {

Review comment:
       shall we put the test  in `SQLViewTestSuite`? Then it can be tested with 
3 view types automatically.

##########
File path: 
sql/core/src/test/scala/org/apache/spark/sql/execution/command/AlterTableAddPartitionSuiteBase.scala
##########
@@ -181,4 +181,37 @@ trait AlterTableAddPartitionSuiteBase extends QueryTest 
with DDLCommandTestUtils
       checkPartitions(t, Map("id" -> "1"), Map("id" -> "2"))
     }
   }
+
+  test("SPARK-34304: adding partitions to views is not allowed") {
+    withNamespaceAndTable("ns", "tbl") { t =>
+      sql(s"CREATE TABLE $t (id INT, part INT) $defaultUsing PARTITIONED BY 
(part)")
+      def checkViewAltering(createViewCmd: String, alterCmd: String): Unit = {
+        sql(createViewCmd)
+        val errMsg = intercept[AnalysisException] {
+          sql(alterCmd)
+        }.getMessage
+        assert(errMsg.contains("'ALTER TABLE ... ADD PARTITION ...' expects a 
table"))
+        checkPartitions(t) // no partitions
+      }
+
+      withView("v0") {

Review comment:
       Same question to other tests in this PR.

##########
File path: 
sql/core/src/test/scala/org/apache/spark/sql/execution/command/AlterTableAddPartitionSuiteBase.scala
##########
@@ -181,4 +181,37 @@ trait AlterTableAddPartitionSuiteBase extends QueryTest 
with DDLCommandTestUtils
       checkPartitions(t, Map("id" -> "1"), Map("id" -> "2"))
     }
   }
+
+  test("SPARK-34304: adding partitions to views is not allowed") {
+    withNamespaceAndTable("ns", "tbl") { t =>
+      sql(s"CREATE TABLE $t (id INT, part INT) $defaultUsing PARTITIONED BY 
(part)")
+      def checkViewAltering(createViewCmd: String, alterCmd: String): Unit = {
+        sql(createViewCmd)
+        val errMsg = intercept[AnalysisException] {
+          sql(alterCmd)
+        }.getMessage
+        assert(errMsg.contains("'ALTER TABLE ... ADD PARTITION ...' expects a 
table"))
+        checkPartitions(t) // no partitions
+      }
+
+      withView("v0") {

Review comment:
       But this view check is done by the analyzer, and it doesn't matter what 
the underlying catalog is. Is it a waste to test this with 3 different catalogs?




----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

For queries about this service, please contact Infrastructure at:
[email protected]



---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to