dusantism-db commented on code in PR #53541:
URL: https://github.com/apache/spark/pull/53541#discussion_r2635802216


##########
sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/v2/jdbc/JDBCTableCatalogSuite.scala:
##########
@@ -124,6 +124,38 @@ class JDBCTableCatalogSuite extends QueryTest with 
SharedSparkSession {
     }
   }
 
+  test("drop table with dependent view fails with classified exception") {
+    // Test that classifyException in dropTable properly wraps SQLException
+    // when dropping a table fails due to dependent objects
+    try {
+      withConnection { conn =>
+        // Create a table
+        conn.prepareStatement(
+          """CREATE TABLE "test"."base_table" (id INTEGER, name 
VARCHAR(50))""").executeUpdate()
+        // Create a view that depends on it
+        conn.prepareStatement(
+            """CREATE VIEW "test"."dependent_view" AS SELECT * FROM 
"test"."base_table"""")
+          .executeUpdate()
+      }
+
+      // Try to drop the base table while view exists - H2 should prevent this
+      val e = intercept[SparkRuntimeException] {
+        sql("DROP TABLE h2.test.base_table")
+      }
+
+      // Verify the exception is properly classified with 
FAILED_JDBC.DROP_TABLE
+      assert(e.getErrorClass == "FAILED_JDBC.DROP_TABLE")
+      assert(e.getMessage.contains("Failed JDBC"))
+      assert(e.getMessage.contains("Drop the table"))
+      assert(e.getMessage.contains("base_table"))

Review Comment:
   It's better to use `checkError` here.



##########
sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/v2/jdbc/JDBCTableCatalogSuite.scala:
##########
@@ -124,6 +124,38 @@ class JDBCTableCatalogSuite extends QueryTest with 
SharedSparkSession {
     }
   }
 
+  test("drop table with dependent view fails with classified exception") {
+    // Test that classifyException in dropTable properly wraps SQLException
+    // when dropping a table fails due to dependent objects
+    try {
+      withConnection { conn =>
+        // Create a table
+        conn.prepareStatement(
+          """CREATE TABLE "test"."base_table" (id INTEGER, name 
VARCHAR(50))""").executeUpdate()
+        // Create a view that depends on it
+        conn.prepareStatement(
+            """CREATE VIEW "test"."dependent_view" AS SELECT * FROM 
"test"."base_table"""")
+          .executeUpdate()
+      }
+
+      // Try to drop the base table while view exists - H2 should prevent this
+      val e = intercept[SparkRuntimeException] {
+        sql("DROP TABLE h2.test.base_table")
+      }
+
+      // Verify the exception is properly classified with 
FAILED_JDBC.DROP_TABLE
+      assert(e.getErrorClass == "FAILED_JDBC.DROP_TABLE")
+      assert(e.getMessage.contains("Failed JDBC"))
+      assert(e.getMessage.contains("Drop the table"))
+      assert(e.getMessage.contains("base_table"))

Review Comment:
   Test looks good other than that



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: [email protected]

For queries about this service, please contact Infrastructure at:
[email protected]


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to