This is an automated email from the ASF dual-hosted git repository.

wenchen pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new 3af2ea973c45 [SPARK-48959][SQL] Make `NoSuchNamespaceException` extend 
`NoSuchDatabaseException` to restore the exception handling
3af2ea973c45 is described below

commit 3af2ea973c458b9bd9818d6af733683fb15fbc19
Author: Ruifeng Zheng <[email protected]>
AuthorDate: Mon Jul 22 22:14:30 2024 +0800

    [SPARK-48959][SQL] Make `NoSuchNamespaceException` extend 
`NoSuchDatabaseException` to restore the exception handling
    
    ### What changes were proposed in this pull request?
    Make `NoSuchNamespaceException` extend `NoSuchNamespaceException`
    
    ### Why are the changes needed?
    1, https://github.com/apache/spark/pull/47276 made many SQL commands throw 
`NoSuchNamespaceException` instead of `NoSuchDatabaseException`, it is more 
then an end-user facing change, it is a breaking change which break the 
exception handling in 3-rd libraries in the eco-system.
    
    2, `NoSuchNamespaceException` and `NoSuchDatabaseException` actually share 
the same error class `SCHEMA_NOT_FOUND`
    
    ### Does this PR introduce _any_ user-facing change?
    No
    
    ### How was this patch tested?
    CI
    
    ### Was this patch authored or co-authored using generative AI tooling?
    No
    
    Closes #47433 from zhengruifeng/make_nons_nodb.
    
    Authored-by: Ruifeng Zheng <[email protected]>
    Signed-off-by: Wenchen Fan <[email protected]>
---
 .../apache/spark/sql/catalyst/analysis/noSuchItemsExceptions.scala    | 4 ++--
 .../scala/org/apache/spark/sql/connector/catalog/CatalogV2Util.scala  | 4 +---
 2 files changed, 3 insertions(+), 5 deletions(-)

diff --git 
a/sql/api/src/main/scala/org/apache/spark/sql/catalyst/analysis/noSuchItemsExceptions.scala
 
b/sql/api/src/main/scala/org/apache/spark/sql/catalyst/analysis/noSuchItemsExceptions.scala
index ac22d26ccfd1..8977d0be24d7 100644
--- 
a/sql/api/src/main/scala/org/apache/spark/sql/catalyst/analysis/noSuchItemsExceptions.scala
+++ 
b/sql/api/src/main/scala/org/apache/spark/sql/catalyst/analysis/noSuchItemsExceptions.scala
@@ -27,7 +27,7 @@ import org.apache.spark.util.ArrayImplicits._
  * Thrown by a catalog when an item cannot be found. The analyzer will rethrow 
the exception
  * as an [[org.apache.spark.sql.AnalysisException]] with the correct position 
information.
  */
-class NoSuchDatabaseException private(
+class NoSuchDatabaseException private[analysis](
     message: String,
     cause: Option[Throwable],
     errorClass: Option[String],
@@ -60,7 +60,7 @@ class NoSuchNamespaceException private(
     cause: Option[Throwable],
     errorClass: Option[String],
     messageParameters: Map[String, String])
-  extends AnalysisException(
+  extends NoSuchDatabaseException(
     message,
     cause = cause,
     errorClass = errorClass,
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/connector/catalog/CatalogV2Util.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/connector/catalog/CatalogV2Util.scala
index 645ed9e6bb0c..283c550c4556 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/connector/catalog/CatalogV2Util.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/connector/catalog/CatalogV2Util.scala
@@ -25,7 +25,7 @@ import scala.jdk.CollectionConverters._
 import org.apache.spark.SparkIllegalArgumentException
 import org.apache.spark.sql.AnalysisException
 import org.apache.spark.sql.catalyst.CurrentUserContext
-import org.apache.spark.sql.catalyst.analysis.{AsOfTimestamp, AsOfVersion, 
NamedRelation, NoSuchDatabaseException, NoSuchFunctionException, 
NoSuchNamespaceException, NoSuchTableException, TimeTravelSpec}
+import org.apache.spark.sql.catalyst.analysis.{AsOfTimestamp, AsOfVersion, 
NamedRelation, NoSuchDatabaseException, NoSuchFunctionException, 
NoSuchTableException, TimeTravelSpec}
 import org.apache.spark.sql.catalyst.catalog.ClusterBySpec
 import org.apache.spark.sql.catalyst.expressions.Literal
 import org.apache.spark.sql.catalyst.plans.logical.{SerdeInfo, TableSpec}
@@ -409,7 +409,6 @@ private[sql] object CatalogV2Util {
     } catch {
       case _: NoSuchTableException => None
       case _: NoSuchDatabaseException => None
-      case _: NoSuchNamespaceException => None
     }
 
   def getTable(
@@ -434,7 +433,6 @@ private[sql] object CatalogV2Util {
     } catch {
       case _: NoSuchFunctionException => None
       case _: NoSuchDatabaseException => None
-      case _: NoSuchNamespaceException => None
     }
   }
 


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to