Github user kunal642 commented on a diff in the pull request:

    https://github.com/apache/carbondata/pull/1980#discussion_r168687437
  
    --- Diff: 
integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/table/CarbonShowTablesCommand.scala
 ---
    @@ -44,39 +44,15 @@ private[sql] case class CarbonShowTablesCommand ( 
databaseName: Option[String],
         val db = databaseName.getOrElse(catalog.getCurrentDatabase)
         var tables =
           tableIdentifierPattern.map(catalog.listTables(db, 
_)).getOrElse(catalog.listTables(db))
    -    tables = filterDataMaps(tables, sparkSession)
    +    val externalCatalog = sparkSession.sharedState.externalCatalog
    +    // tables will be filtered for all the dataMaps to show only main 
tables
    +    tables = tables
    +      .filter(table => externalCatalog.getTable(db, 
table.table).storage.properties
    +        .getOrElse("isCarbonTableVisibility", true).toString.toBoolean)
         tables.map { tableIdent =>
           val isTemp = catalog.isTemporaryTable(tableIdent)
           Row(tableIdent.database.getOrElse("default"), tableIdent.table, 
isTemp)
    --- End diff --
    
    instead of looping twice we can use collect like this:
    tables.collect {
          case tableIdent if externalCatalog.getTable(db, 
tableIdent.table).storage.properties.getOrElse("isCarbonTableVisibility", true) 
=>
            val isTemp = catalog.isTemporaryTable(tableIdent)
            Row(tableIdent.database.getOrElse("default"), tableIdent.table, 
isTemp)
          case _ => 
            Row()
        }


---

Reply via email to