amaliujia commented on code in PR #36586:
URL: https://github.com/apache/spark/pull/36586#discussion_r875476033
##########
sql/core/src/main/scala/org/apache/spark/sql/internal/CatalogImpl.scala:
##########
@@ -97,8 +97,15 @@ class CatalogImpl(sparkSession: SparkSession) extends
Catalog {
*/
@throws[AnalysisException]("database does not exist")
override def listTables(dbName: String): Dataset[Table] = {
- val tables = sessionCatalog.listTables(dbName).map(makeTable)
- CatalogImpl.makeDataset(tables, sparkSession)
+ if (sessionCatalog.databaseExists(dbName)) {
+ val tables = sessionCatalog.listTables(dbName).map(makeTable)
+ CatalogImpl.makeDataset(tables, sparkSession)
+ } else {
+ val multiParts =
sparkSession.sessionState.sqlParser.parseMultipartIdentifier(dbName)
+ val plan = ShowTables(UnresolvedNamespace(multiParts), None)
+ val ret = sparkSession.sessionState.executePlan(plan).toRdd.collect()
+ CatalogImpl.makeDataset(Seq(), sparkSession)
Review Comment:
Are you saying we don't construct a logical plan for `listTables`?
If so, now I am confused on when we should construct a logical plan (thus to
match SQL command) and when we don't....
--
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
To unsubscribe, e-mail: [email protected]
For queries about this service, please contact Infrastructure at:
[email protected]
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]