This is an automated email from the ASF dual-hosted git repository.

yao pushed a commit to branch branch-4.0
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/branch-4.0 by this push:
     new 48897923677f [SPARK-51913][SQL] JDBCTableCatalog#loadTable should 
throw no such table error correctly
48897923677f is described below

commit 48897923677fee9ff21505ce5e8d37efd736c79c
Author: Wenchen Fan <wenc...@databricks.com>
AuthorDate: Sun Apr 27 10:07:47 2025 +0800

    [SPARK-51913][SQL] JDBCTableCatalog#loadTable should throw no such table 
error correctly
    
    ### What changes were proposed in this pull request?
    
    As of today, when people use jdbc v2 and try to query a nonexisting table, 
they will get `FAILED_JDBC.LOAD_TABLE` error. This is a bit confusing as the 
real error is table not exist.
    
    This PR improves the error message by using an additional table existence 
check and throw no such table error if the table does not exists.
    
    ### Why are the changes needed?
    
    better error messaging
    
    ### Does this PR introduce _any_ user-facing change?
    
    yes people will see clearer errors if the JDBC table does not exist
    
    ### How was this patch tested?
    
    updated existing tests
    
    ### Was this patch authored or co-authored using generative AI tooling?
    
    no
    
    Closes #50706 from cloud-fan/jdbc.
    
    Lead-authored-by: Wenchen Fan <wenc...@databricks.com>
    Co-authored-by: Kent Yao <y...@apache.org>
    Signed-off-by: Kent Yao <y...@apache.org>
    (cherry picked from commit 276ca7ffb19b90119a2ee7be31b8fc1994f942eb)
    Signed-off-by: Kent Yao <y...@apache.org>
---
 .../org/apache/spark/sql/jdbc/v2/V2JDBCTest.scala  | 30 ++++++++++++++++++----
 .../datasources/v2/jdbc/JDBCTableCatalog.scala     |  5 +++-
 2 files changed, 29 insertions(+), 6 deletions(-)

diff --git 
a/connector/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/v2/V2JDBCTest.scala
 
b/connector/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/v2/V2JDBCTest.scala
index f97b6a6eb183..d45fc766b4dd 100644
--- 
a/connector/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/v2/V2JDBCTest.scala
+++ 
b/connector/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/v2/V2JDBCTest.scala
@@ -141,7 +141,11 @@ private[v2] trait V2JDBCTest extends SharedSparkSession 
with DockerIntegrationFu
     val e = intercept[AnalysisException] {
       sql(s"ALTER TABLE $catalogName.not_existing_table ADD COLUMNS (C4 
STRING)")
     }
-    checkErrorFailedJDBC(e, "FAILED_JDBC.LOAD_TABLE", "not_existing_table")
+    checkErrorTableNotFound(
+      e,
+      s"`$catalogName`.`not_existing_table`",
+      ExpectedContext(
+        s"$catalogName.not_existing_table", 12, 11 + 
s"$catalogName.not_existing_table".length))
   }
 
   test("SPARK-33034: ALTER TABLE ... drop column") {
@@ -170,7 +174,11 @@ private[v2] trait V2JDBCTest extends SharedSparkSession 
with DockerIntegrationFu
     val e = intercept[AnalysisException] {
       sql(s"ALTER TABLE $catalogName.not_existing_table DROP COLUMN C1")
     }
-    checkErrorFailedJDBC(e, "FAILED_JDBC.LOAD_TABLE", "not_existing_table")
+    checkErrorTableNotFound(
+      e,
+      s"`$catalogName`.`not_existing_table`",
+      ExpectedContext(
+        s"$catalogName.not_existing_table", 12, 11 + 
s"$catalogName.not_existing_table".length))
   }
 
   test("SPARK-33034: ALTER TABLE ... update column type") {
@@ -193,7 +201,11 @@ private[v2] trait V2JDBCTest extends SharedSparkSession 
with DockerIntegrationFu
     val e = intercept[AnalysisException] {
       sql(s"ALTER TABLE $catalogName.not_existing_table ALTER COLUMN id TYPE 
DOUBLE")
     }
-    checkErrorFailedJDBC(e, "FAILED_JDBC.LOAD_TABLE", "not_existing_table")
+    checkErrorTableNotFound(
+      e,
+      s"`$catalogName`.`not_existing_table`",
+      ExpectedContext(
+        s"$catalogName.not_existing_table", 12, 11 + 
s"$catalogName.not_existing_table".length))
   }
 
   test("SPARK-33034: ALTER TABLE ... rename column") {
@@ -221,7 +233,11 @@ private[v2] trait V2JDBCTest extends SharedSparkSession 
with DockerIntegrationFu
     val e = intercept[AnalysisException] {
       sql(s"ALTER TABLE $catalogName.not_existing_table RENAME COLUMN ID TO C")
     }
-    checkErrorFailedJDBC(e, "FAILED_JDBC.LOAD_TABLE", "not_existing_table")
+    checkErrorTableNotFound(
+      e,
+      s"`$catalogName`.`not_existing_table`",
+      ExpectedContext(
+        s"$catalogName.not_existing_table", 12, 11 + 
s"$catalogName.not_existing_table".length))
   }
 
   test("SPARK-33034: ALTER TABLE ... update column nullability") {
@@ -232,7 +248,11 @@ private[v2] trait V2JDBCTest extends SharedSparkSession 
with DockerIntegrationFu
     val e = intercept[AnalysisException] {
       sql(s"ALTER TABLE $catalogName.not_existing_table ALTER COLUMN ID DROP 
NOT NULL")
     }
-    checkErrorFailedJDBC(e, "FAILED_JDBC.LOAD_TABLE", "not_existing_table")
+    checkErrorTableNotFound(
+      e,
+      s"`$catalogName`.`not_existing_table`",
+      ExpectedContext(
+        s"$catalogName.not_existing_table", 12, 11 + 
s"$catalogName.not_existing_table".length))
   }
 
   test("CREATE TABLE with table comment") {
diff --git 
a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/jdbc/JDBCTableCatalog.scala
 
b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/jdbc/JDBCTableCatalog.scala
index 715112e35296..b46223db6abb 100644
--- 
a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/jdbc/JDBCTableCatalog.scala
+++ 
b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/jdbc/JDBCTableCatalog.scala
@@ -130,7 +130,10 @@ class JDBCTableCatalog extends TableCatalog
   }
 
   override def loadTable(ident: Identifier): Table = {
-    checkNamespace(ident.namespace())
+    if (!tableExists(ident)) {
+      throw QueryCompilationErrors.noSuchTableError(ident)
+    }
+
     val optionsWithTableName = new JDBCOptions(
       options.parameters + (JDBCOptions.JDBC_TABLE_NAME -> 
getTableName(ident)))
     JdbcUtils.classifyException(


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to