beliefer commented on code in PR #44061:
URL: https://github.com/apache/spark/pull/44061#discussion_r1409060815


##########
sql/core/src/main/scala/org/apache/spark/sql/internal/CatalogImpl.scala:
##########
@@ -146,38 +147,44 @@ class CatalogImpl(sparkSession: SparkSession) extends 
Catalog {
     makeTablesDataset(plan)
   }
 
-  private def makeTablesDataset(plan: ShowTables): Dataset[Table] = {
-    val qe = sparkSession.sessionState.executePlan(plan)
-    val catalog = qe.analyzed.collectFirst {
-      case ShowTables(r: ResolvedNamespace, _, _) => r.catalog
-      case _: ShowTablesCommand =>
-        sparkSession.sessionState.catalogManager.v2SessionCatalog
-    }.get
-    val tables = qe.toRdd.collect().map { row =>
-      val tableName = row.getString(1)
-      val namespaceName = row.getString(0)
-      val isTemp = row.getBoolean(2)
+  private[sql] def resolveTable(row: InternalRow, catalogName: String): 
Option[Table] = {
+    val tableName = row.getString(1)
+    val namespaceName = row.getString(0)
+    val isTemp = row.getBoolean(2)
+    try {
       if (isTemp) {
         // Temp views do not belong to any catalog. We shouldn't prepend the 
catalog name here.
         val ns = if (namespaceName.isEmpty) Nil else Seq(namespaceName)
-        makeTable(ns :+ tableName)
+        Some(makeTable(ns :+ tableName))
       } else {
         val ns = parseIdent(namespaceName)
         try {
-          makeTable(catalog.name() +: ns :+ tableName)
+          Some(makeTable(catalogName +: ns :+ tableName))
         } catch {
           case e: AnalysisException if e.getErrorClass == 
"UNSUPPORTED_FEATURE.HIVE_TABLE_TYPE" =>
-            new Table(
+            Some(new Table(
               name = tableName,
-              catalog = catalog.name(),
+              catalog = catalogName,
               namespace = ns.toArray,
               description = null,
               tableType = null,
               isTemporary = false
-            )
+            ))
         }
       }
+    } catch {
+      case e: AnalysisException if e.getErrorClass == 
"TABLE_OR_VIEW_NOT_FOUND" => None
     }
+  }
+
+  private def makeTablesDataset(plan: ShowTables): Dataset[Table] = {
+    val qe = sparkSession.sessionState.executePlan(plan)
+    val catalog = qe.analyzed.collectFirst {
+      case ShowTables(r: ResolvedNamespace, _, _) => r.catalog
+      case _: ShowTablesCommand =>
+        sparkSession.sessionState.catalogManager.v2SessionCatalog
+    }.get
+    val tables = qe.toRdd.collect().flatMap { row => resolveTable(row, 
catalog.name()) }
     CatalogImpl.makeDataset(tables.toImmutableArraySeq, sparkSession)
   }

Review Comment:
   Shall you move `makeTablesDataset` above `resolveTable` ?
   So the change is less and easier to review.



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org

For queries about this service, please contact Infrastructure at:
us...@infra.apache.org


---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org
For additional commands, e-mail: reviews-h...@spark.apache.org

Reply via email to