amaliujia commented on code in PR #41461:
URL: https://github.com/apache/spark/pull/41461#discussion_r1220757472


##########
sql/core/src/main/scala/org/apache/spark/sql/internal/CatalogImpl.scala:
##########
@@ -122,16 +122,26 @@ class CatalogImpl(sparkSession: SparkSession) extends 
Catalog {
    */
   @throws[AnalysisException]("database does not exist")
   override def listTables(dbName: String): Dataset[Table] = {
-    // `dbName` could be either a single database name (behavior in Spark 3.3 
and prior) or
-    // a qualified namespace with catalog name. We assume it's a single 
database name
-    // and check if we can find it in the sessionCatalog. If so we list tables 
under
-    // that database. Otherwise we will resolve the catalog/namespace and list 
tables there.
-    val namespace = if (sessionCatalog.databaseExists(dbName)) {
-      Seq(CatalogManager.SESSION_CATALOG_NAME, dbName)
-    } else {
-      parseIdent(dbName)
-    }
+    val namespace = wrapNamespace(dbName)
     val plan = ShowTables(UnresolvedNamespace(namespace), None)
+    makeTablesDataset(plan)
+  }
+
+  /**
+   * Returns a list of tables/views in the specified database (namespace)
+   * which name match the specify pattern (the name can be qualified with 
catalog).
+   * This includes all temporary views.
+   *
+   * @since 3.5.0
+   */
+  @throws[AnalysisException]("database does not exist")
+  override def listTables(dbName: String, pattern: String): Dataset[Table] = {

Review Comment:
   I think for the impl we can have one implementation of listTables.
   
   ```
   def listTablesInternal(dbName: String, pattern: Optiona[String])
   
   def listTables(dbName: String)  = listTablesInternal(dbName, None)
   override def listTables(dbName: String, pattern: String) = 
listTablesInternal(dbName, Some(pattern))
   
   
   ``` 



##########
sql/core/src/main/scala/org/apache/spark/sql/internal/CatalogImpl.scala:
##########
@@ -122,16 +122,26 @@ class CatalogImpl(sparkSession: SparkSession) extends 
Catalog {
    */
   @throws[AnalysisException]("database does not exist")
   override def listTables(dbName: String): Dataset[Table] = {
-    // `dbName` could be either a single database name (behavior in Spark 3.3 
and prior) or
-    // a qualified namespace with catalog name. We assume it's a single 
database name
-    // and check if we can find it in the sessionCatalog. If so we list tables 
under
-    // that database. Otherwise we will resolve the catalog/namespace and list 
tables there.
-    val namespace = if (sessionCatalog.databaseExists(dbName)) {
-      Seq(CatalogManager.SESSION_CATALOG_NAME, dbName)
-    } else {
-      parseIdent(dbName)
-    }
+    val namespace = wrapNamespace(dbName)
     val plan = ShowTables(UnresolvedNamespace(namespace), None)
+    makeTablesDataset(plan)
+  }
+
+  /**
+   * Returns a list of tables/views in the specified database (namespace)
+   * which name match the specify pattern (the name can be qualified with 
catalog).
+   * This includes all temporary views.
+   *
+   * @since 3.5.0
+   */
+  @throws[AnalysisException]("database does not exist")
+  override def listTables(dbName: String, pattern: String): Dataset[Table] = {

Review Comment:
   I think for the impl we can have one implementation of listTables.
   
   ```
   def listTablesInternal(dbName: String, pattern: Optiona[String])
   
   override def listTables(dbName: String)  = listTablesInternal(dbName, None)
   override def listTables(dbName: String, pattern: String) = 
listTablesInternal(dbName, Some(pattern))
   
   
   ``` 



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org

For queries about this service, please contact Infrastructure at:
us...@infra.apache.org


---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org
For additional commands, e-mail: reviews-h...@spark.apache.org

Reply via email to