This is an automated email from the ASF dual-hosted git repository.

wenchen pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new 5c0762b  [SPARK-36448][SQL] Exceptions in NoSuchItemException.scala 
have to be case classes
5c0762b is described below

commit 5c0762b5d2004464c36df353a4d483f8f57eebeb
Author: Yesheng Ma <[email protected]>
AuthorDate: Fri Aug 20 20:16:30 2021 +0800

    [SPARK-36448][SQL] Exceptions in NoSuchItemException.scala have to be case 
classes
    
    ### What changes were proposed in this pull request?
    Change all exceptions in NoSuchItemException.scala to case classes.
    
    ### Why are the changes needed?
    Exceptions in NoSuchItemException.scala are not case classes. This is 
causing issues because in Analyzer's executeAndCheck method always calls the 
`copy` method on the exception. However, since these exceptions are not case 
classes, the `copy` method was always delegated to `AnalysisException::copy`, 
which is not the specialized version.
    
    ### Does this PR introduce _any_ user-facing change?
    No
    
    ### How was this patch tested?
    Existing UTs.
    
    Closes #33673 from yeshengm/SPARK-36448.
    
    Authored-by: Yesheng Ma <[email protected]>
    Signed-off-by: Wenchen Fan <[email protected]>
---
 .../catalyst/analysis/NoSuchItemException.scala    | 34 +++++++++++++++-------
 .../execution/command/ShowTablesSuiteBase.scala    |  8 -----
 .../sql/execution/command/v1/ShowTablesSuite.scala | 10 +++++++
 .../sql/execution/command/v2/ShowTablesSuite.scala |  8 +++++
 .../datasources/v2/V2SessionCatalogSuite.scala     |  8 ++---
 5 files changed, 45 insertions(+), 23 deletions(-)

diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/NoSuchItemException.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/NoSuchItemException.scala
index ba5a9c6..9c924e9 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/NoSuchItemException.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/NoSuchItemException.scala
@@ -29,18 +29,24 @@ import org.apache.spark.sql.types.StructType
  * Thrown by a catalog when an item cannot be found. The analyzer will rethrow 
the exception
  * as an [[org.apache.spark.sql.AnalysisException]] with the correct position 
information.
  */
-class NoSuchDatabaseException(
-    val db: String) extends NoSuchNamespaceException(s"Database '$db' not 
found")
+case class NoSuchDatabaseException(db: String)
+  extends AnalysisException(s"Database '$db' not found")
 
-class NoSuchNamespaceException(message: String, cause: Option[Throwable] = 
None)
+case class NoSuchNamespaceException(
+    override val message: String,
+    override val cause: Option[Throwable] = None)
   extends AnalysisException(message, cause = cause) {
+
   def this(namespace: Array[String]) = {
     this(s"Namespace '${namespace.quoted}' not found")
   }
 }
 
-class NoSuchTableException(message: String, cause: Option[Throwable] = None)
+case class NoSuchTableException(
+    override val message: String,
+    override val cause: Option[Throwable] = None)
   extends AnalysisException(message, cause = cause) {
+
   def this(db: String, table: String) = {
     this(s"Table or view '$table' not found in database '$db'")
   }
@@ -50,7 +56,10 @@ class NoSuchTableException(message: String, cause: 
Option[Throwable] = None)
   }
 }
 
-class NoSuchPartitionException(message: String) extends 
AnalysisException(message) {
+case class NoSuchPartitionException(
+    override val message: String)
+  extends AnalysisException(message) {
+
   def this(db: String, table: String, spec: TablePartitionSpec) = {
     this(s"Partition not found in table '$table' database '$db':\n" + 
spec.mkString("\n"))
   }
@@ -62,12 +71,13 @@ class NoSuchPartitionException(message: String) extends 
AnalysisException(messag
   }
 }
 
-class NoSuchPermanentFunctionException(db: String, func: String)
+case class NoSuchPermanentFunctionException(db: String, func: String)
   extends AnalysisException(s"Function '$func' not found in database '$db'")
 
-class NoSuchFunctionException(
-    msg: String,
-    cause: Option[Throwable]) extends AnalysisException(msg, cause = cause) {
+case class NoSuchFunctionException(
+    override val message: String,
+    override val cause: Option[Throwable])
+  extends AnalysisException(message, cause = cause) {
 
   def this(db: String, func: String, cause: Option[Throwable] = None) = {
     this(s"Undefined function: '$func'. " +
@@ -80,7 +90,9 @@ class NoSuchFunctionException(
   }
 }
 
-class NoSuchPartitionsException(message: String) extends 
AnalysisException(message) {
+case class NoSuchPartitionsException(override val message: String)
+  extends AnalysisException(message) {
+
   def this(db: String, table: String, specs: Seq[TablePartitionSpec]) = {
     this(s"The following partitions not found in table '$table' database 
'$db':\n"
       + specs.mkString("\n===\n"))
@@ -93,5 +105,5 @@ class NoSuchPartitionsException(message: String) extends 
AnalysisException(messa
   }
 }
 
-class NoSuchTempFunctionException(func: String)
+case class NoSuchTempFunctionException(func: String)
   extends AnalysisException(s"Temporary function '$func' not found")
diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/ShowTablesSuiteBase.scala
 
b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/ShowTablesSuiteBase.scala
index 0638501..a01adb8 100644
--- 
a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/ShowTablesSuiteBase.scala
+++ 
b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/ShowTablesSuiteBase.scala
@@ -18,7 +18,6 @@
 package org.apache.spark.sql.execution.command
 
 import org.apache.spark.sql.{QueryTest, Row}
-import org.apache.spark.sql.catalyst.analysis.NoSuchNamespaceException
 import org.apache.spark.sql.connector.catalog.CatalogV2Implicits._
 import org.apache.spark.sql.internal.SQLConf
 
@@ -48,13 +47,6 @@ trait ShowTablesSuiteBase extends QueryTest with 
DDLCommandTestUtils {
     }
   }
 
-  test("show table in a not existing namespace") {
-    val msg = intercept[NoSuchNamespaceException] {
-      runShowTablesSql(s"SHOW TABLES IN $catalog.unknown", Seq())
-    }.getMessage
-    assert(msg.matches("(Database|Namespace) 'unknown' not found"))
-  }
-
   test("show tables with a pattern") {
     withNamespace(s"$catalog.ns1", s"$catalog.ns2") {
       sql(s"CREATE NAMESPACE $catalog.ns1")
diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v1/ShowTablesSuite.scala
 
b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v1/ShowTablesSuite.scala
index 4efb7c8..f47493b 100644
--- 
a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v1/ShowTablesSuite.scala
+++ 
b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v1/ShowTablesSuite.scala
@@ -18,6 +18,7 @@
 package org.apache.spark.sql.execution.command.v1
 
 import org.apache.spark.sql.{AnalysisException, Row, SaveMode}
+import org.apache.spark.sql.catalyst.analysis.NoSuchDatabaseException
 import org.apache.spark.sql.execution.command
 import org.apache.spark.sql.internal.SQLConf
 
@@ -127,6 +128,15 @@ trait ShowTablesSuiteBase extends 
command.ShowTablesSuiteBase {
       }
     }
   }
+
+
+  test("show table in a not existing namespace") {
+    val msg = intercept[NoSuchDatabaseException] {
+      runShowTablesSql(s"SHOW TABLES IN $catalog.unknown", Seq())
+    }.getMessage
+    assert(msg.matches("(Database|Namespace) 'unknown' not found"))
+  }
+
 }
 
 /**
diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v2/ShowTablesSuite.scala
 
b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v2/ShowTablesSuite.scala
index 702c174..6dc8a05 100644
--- 
a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v2/ShowTablesSuite.scala
+++ 
b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v2/ShowTablesSuite.scala
@@ -18,6 +18,7 @@
 package org.apache.spark.sql.execution.command.v2
 
 import org.apache.spark.sql.{AnalysisException, Row}
+import org.apache.spark.sql.catalyst.analysis.NoSuchNamespaceException
 import org.apache.spark.sql.execution.command
 
 /**
@@ -89,4 +90,11 @@ class ShowTablesSuite extends command.ShowTablesSuiteBase 
with CommandSuiteBase
       assert(errMsg.contains("SHOW TABLE EXTENDED is not supported for v2 
tables"))
     }
   }
+
+  test("show table in a not existing namespace") {
+    val msg = intercept[NoSuchNamespaceException] {
+      runShowTablesSql(s"SHOW TABLES IN $catalog.unknown", Seq())
+    }.getMessage
+    assert(msg.matches("(Database|Namespace) 'unknown' not found"))
+  }
 }
diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/v2/V2SessionCatalogSuite.scala
 
b/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/v2/V2SessionCatalogSuite.scala
index 1a4f084..2e6cfc5 100644
--- 
a/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/v2/V2SessionCatalogSuite.scala
+++ 
b/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/v2/V2SessionCatalogSuite.scala
@@ -27,7 +27,7 @@ import org.apache.hadoop.fs.Path
 import org.scalatest.BeforeAndAfter
 
 import org.apache.spark.sql.AnalysisException
-import 
org.apache.spark.sql.catalyst.analysis.{NamespaceAlreadyExistsException, 
NoSuchNamespaceException, NoSuchTableException, TableAlreadyExistsException}
+import 
org.apache.spark.sql.catalyst.analysis.{NamespaceAlreadyExistsException, 
NoSuchDatabaseException, NoSuchNamespaceException, NoSuchTableException, 
TableAlreadyExistsException}
 import org.apache.spark.sql.catalyst.parser.CatalystSqlParser
 import org.apache.spark.sql.connector.catalog.{CatalogV2Util, Identifier, 
NamespaceChange, TableCatalog, TableChange, V1Table}
 import org.apache.spark.sql.test.SharedSparkSession
@@ -825,7 +825,7 @@ class V2SessionCatalogNamespaceSuite extends 
V2SessionCatalogBaseSuite {
   test("loadNamespaceMetadata: fail missing namespace") {
     val catalog = newCatalog()
 
-    val exc = intercept[NoSuchNamespaceException] {
+    val exc = intercept[NoSuchDatabaseException] {
       catalog.loadNamespaceMetadata(testNs)
     }
 
@@ -951,7 +951,7 @@ class V2SessionCatalogNamespaceSuite extends 
V2SessionCatalogBaseSuite {
 
     assert(catalog.namespaceExists(testNs) === false)
 
-    val exc = intercept[NoSuchNamespaceException] {
+    val exc = intercept[NoSuchDatabaseException] {
       catalog.createTable(testIdent, schema, Array.empty, emptyProps)
     }
 
@@ -1068,7 +1068,7 @@ class V2SessionCatalogNamespaceSuite extends 
V2SessionCatalogBaseSuite {
 
     assert(catalog.namespaceExists(testNs) === false)
 
-    val exc = intercept[NoSuchNamespaceException] {
+    val exc = intercept[NoSuchDatabaseException] {
       catalog.alterNamespace(testNs, NamespaceChange.setProperty("property", 
"value"))
     }
 

---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to