Github user yhuai commented on a diff in the pull request:

    https://github.com/apache/spark/pull/3752#discussion_r22507568
  
    --- Diff: 
sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveMetastoreCatalog.scala ---
    @@ -55,8 +56,61 @@ private[hive] class HiveMetastoreCatalog(hive: 
HiveContext) extends Catalog with
       /** Connection to hive metastore.  Usages should lock on `this`. */
       protected[hive] val client = Hive.get(hive.hiveconf)
     
    +  // TODO: Use this everywhere instead of tuples or databaseName, 
tableName,.
    +  /** A fully qualified identifier for a table (i.e., database.tableName) 
*/
    +  case class TableIdent(database: String, name: String) {
    +    def toLowerCase = TableIdent(database.toLowerCase, name.toLowerCase)
    +  }
    +
    +  /** A cache of Spark SQL data source tables that have been accessed. */
    +  protected[hive] val cachedDataSourceTables = CacheBuilder.newBuilder()
    +    .maximumSize(1000)
    +    .build(
    +      new CacheLoader[TableIdent, LogicalPlan]() {
    +        override def load(in: TableIdent): LogicalPlan = {
    +          logDebug(s"Creating new cached data source for $in")
    +          val table = client.getTable(in.database, in.name)
    +
    +          // It does not appear that the ql client for the metastore has a 
way to enumerate all the
    +          // SerDe properties directly...
    +          val options = 
table.getTTable.getSd.getSerdeInfo.getParameters.toMap
    +
    +          val resolvedRelation =
    +            ResolvedDataSource(
    +              hive,
    +              table.getProperty("spark.sql.sources.provider"),
    +              options)
    +
    +          LogicalRelation(resolvedRelation.relation)
    +        }
    +      })
    +
    +  def refreshTable(databaseName: String, tableName: String): Unit = {
    +    cachedDataSourceTables.refresh(TableIdent(databaseName, 
tableName).toLowerCase)
    +  }
    +
    +  def invalidateTable(databaseName: String, tableName: String): Unit = {
    +    cachedDataSourceTables.invalidate(TableIdent(databaseName, 
tableName).toLowerCase)
    +  }
    +
       val caseSensitive: Boolean = false
     
    +  def createDataSourceTable(tableName: String, provider: String, options: 
Map[String, String]) = {
    +    val (dbName, tblName) = processDatabaseAndTableName("default", 
tableName)
    +    val tbl = new Table(dbName, tblName)
    +
    +    tbl.setProperty("spark.sql.sources.provider", provider)
    +    options.foreach { case (key, value) => tbl.setSerdeParam(key, value) }
    --- End diff --
    
    We are using serde properties to store all parameters that will be passed 
to a relation provider (for creating a relation), right? Probably we can add a 
comment at here.


---
If your project is set up for it, you can reply to this email and have your
reply appear on GitHub as well. If your project does not have this feature
enabled and wishes so, or if the feature is enabled but not working, please
contact infrastructure at [email protected] or file a JIRA ticket
with INFRA.
---

---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to