This is an automated email from the ASF dual-hosted git repository.

maxgekk pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new f1e5a136fa79 [SPARK-46393][SQL] Classify exceptions in the JDBC table 
catalog
f1e5a136fa79 is described below

commit f1e5a136fa79449878bb3bd2cc304c9dde020ce8
Author: Max Gekk <max.g...@gmail.com>
AuthorDate: Thu Dec 14 11:01:34 2023 +0300

    [SPARK-46393][SQL] Classify exceptions in the JDBC table catalog
    
    ### What changes were proposed in this pull request?
    In the PR, I propose to handle exceptions from JDBC drivers in the JDBC 
table catalog, classify them and converted to appropriate Spark exception w/ an 
error class. This PR covers the following functions where such errors haven't 
been classified yet:
    - list tables
    - namespace exists
    - list namespaces
    
    ### Why are the changes needed?
    To unify Spark exceptions, and migrate onto new error framework.
    
    ### Does this PR introduce _any_ user-facing change?
    Yes, if user code expects that Spark SQL bypass Java exceptions from JDBC 
drivers.
    
    ### How was this patch tested?
    By existing test suites:
    ```
    $ build/sbt "test:testOnly *JDBCV2Suite"
    $ build/sbt "test:testOnly *JDBCTableCatalogSuite"
    ```
    
    ### Was this patch authored or co-authored using generative AI tooling?
    No.
    
    Closes #44335 from MaxGekk/classifyException-JDBCTableCatalog.
    
    Authored-by: Max Gekk <max.g...@gmail.com>
    Signed-off-by: Max Gekk <max.g...@gmail.com>
---
 .../execution/datasources/v2/jdbc/JDBCTableCatalog.scala   | 14 ++++++++++----
 1 file changed, 10 insertions(+), 4 deletions(-)

diff --git 
a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/jdbc/JDBCTableCatalog.scala
 
b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/jdbc/JDBCTableCatalog.scala
index 0084abb392ef..6c773d4fb1b0 100644
--- 
a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/jdbc/JDBCTableCatalog.scala
+++ 
b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/jdbc/JDBCTableCatalog.scala
@@ -65,8 +65,10 @@ class JDBCTableCatalog extends TableCatalog
     checkNamespace(namespace)
     JdbcUtils.withConnection(options) { conn =>
       val schemaPattern = if (namespace.length == 1) namespace.head else null
-      val rs = conn.getMetaData
-        .getTables(null, schemaPattern, "%", Array("TABLE"))
+      val rs = JdbcUtils.classifyException(
+        s"Failed get tables from: ${namespace.mkString(".")}", dialect) {
+        conn.getMetaData.getTables(null, schemaPattern, "%", Array("TABLE"))
+      }
       new Iterator[Identifier] {
         def hasNext = rs.next()
         def next() = Identifier.of(namespace, rs.getString("TABLE_NAME"))
@@ -179,14 +181,18 @@ class JDBCTableCatalog extends TableCatalog
   override def namespaceExists(namespace: Array[String]): Boolean = namespace 
match {
     case Array(db) =>
       JdbcUtils.withConnection(options) { conn =>
-        JdbcUtils.schemaExists(conn, options, db)
+        JdbcUtils.classifyException(s"Failed namespace exists: 
${namespace.mkString}", dialect) {
+          JdbcUtils.schemaExists(conn, options, db)
+        }
       }
     case _ => false
   }
 
   override def listNamespaces(): Array[Array[String]] = {
     JdbcUtils.withConnection(options) { conn =>
-      JdbcUtils.listSchemas(conn, options)
+      JdbcUtils.classifyException(s"Failed list namespaces", dialect) {
+        JdbcUtils.listSchemas(conn, options)
+      }
     }
   }
 


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to