panbingkun commented on code in PR #47276:
URL: https://github.com/apache/spark/pull/47276#discussion_r1671782427


##########
sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalog.scala:
##########
@@ -71,7 +71,9 @@ class SessionCatalog(
     functionExpressionBuilder: FunctionExpressionBuilder,
     cacheSize: Int = SQLConf.get.tableRelationCacheSize,
     cacheTTL: Long = SQLConf.get.metadataCacheTTL,
-    defaultDatabase: String = SQLConf.get.defaultDatabase) extends 
SQLConfHelper with Logging {
+    defaultDatabase: String = SQLConf.get.defaultDatabase,
+    var catalogName: Option[String] = 
Some(CatalogManager.SESSION_CATALOG_NAME))

Review Comment:
   - Give a simple example, eg:
   ```
   package org.apache.spark.sql.connector
   
   import java.util
   
   import scala.collection.mutable
   import scala.jdk.CollectionConverters._
   
   import org.apache.spark.sql.catalyst.SQLConfHelper
   import org.apache.spark.sql.catalyst.catalog.{CatalogDatabase, 
SessionCatalog}
   import org.apache.spark.sql.connector.catalog.{CatalogPlugin, 
NamespaceChange, SupportsNamespaces}
   import org.apache.spark.sql.util.CaseInsensitiveStringMap
   
   class BaiduSessionCatalog extends SupportsNamespaces with SQLConfHelper with 
CatalogPlugin {
   
     var sparkSessionCatalog: SessionCatalog = _
   
     override def initialize(name: String, options: CaseInsensitiveStringMap): 
Unit = {}
   
     override def name(): String = "baidu"
   
     override def listNamespaces(): Array[Array[String]] = {
       Array.empty
     }
   
     override def listNamespaces(namespace: Array[String]): 
Array[Array[String]] = {
       Array.empty
     }
   
     override def loadNamespaceMetadata(namespace: Array[String]): 
util.Map[String, String] = {
       new util.HashMap[String, String]()
     }
   
     override def createNamespace(namespace: Array[String], metadata: 
util.Map[String, String]): Unit = {
       println("baidu catalog createNamespace ...")
     }
   
     override def alterNamespace(namespace: Array[String], changes: 
NamespaceChange*): Unit = {
       sparkSessionCatalog.catalogName = Some(name())
       namespace match {
         case Array(db) =>
           val metadata = sparkSessionCatalog.getDatabaseMetadata(db).toMetadata
         case _ =>
       }
     }
   
     override def dropNamespace(namespace: Array[String], cascade: Boolean): 
Boolean = {
       true
     }
   
     private implicit class CatalogDatabaseHelper(catalogDatabase: 
CatalogDatabase) {
       def toMetadata: util.Map[String, String] = {
         val metadata = mutable.HashMap[String, String]()
   
         catalogDatabase.properties.foreach {
           case (key, value) => metadata.put(key, value)
         }
         metadata.put(SupportsNamespaces.PROP_LOCATION, 
catalogDatabase.locationUri.toString)
         metadata.put(SupportsNamespaces.PROP_COMMENT, 
catalogDatabase.description)
   
         metadata.asJava
       }
     }
   }
   ```
   
   - Test code as following:
   ```
   package org.apache.spark.sql.connector
   
   import org.scalatest.BeforeAndAfter
   
   import org.apache.spark.sql.QueryTest
   import org.apache.spark.sql.connector.catalog.SupportsNamespaces
   import org.apache.spark.sql.test.SharedSparkSession
   
   class BaiduSessionCatalogSuite
     extends QueryTest
     with SharedSparkSession
     with BeforeAndAfter {
   
     protected val catalogClassName: String = 
classOf[BaiduSessionCatalog].getName
     private lazy val baiduCatalog = 
spark.sessionState.catalogManager.catalog("baidu")
   
     before {
       spark.conf.set("spark.sql.catalog.baidu", catalogClassName)
       baiduCatalog.asInstanceOf[BaiduSessionCatalog].sparkSessionCatalog = 
spark.sessionState.catalog
     }
   
     test("alter namespace") {
       
baiduCatalog.asInstanceOf[SupportsNamespaces].alterNamespace(Array("ns"), null)
       println("alter namespace successfully.")
     }
   }
   ```
   
   - If we use hardcode `SESSION_CATALOG_NAME `, the error message above will 
always be.
   ```
   [SCHEMA_NOT_FOUND] The schema `spark_catalog`.`ns` cannot be found. Verify 
the spelling and correctness of the schema and catalog.
   If you did not qualify the name with a catalog, verify the current_schema() 
output, or qualify the name with the correct catalog.
   To tolerate the error on drop use DROP SCHEMA IF EXISTS. SQLSTATE: 42704
   ``` 
     <img width="1079" alt="image" 
src="https://github.com/apache/spark/assets/15246973/f4670f57-2cb7-4e18-bd02-69c138bee0ba";>
   
   - Otherwise, it will display
     <img width="1070" alt="image" 
src="https://github.com/apache/spark/assets/15246973/dd1b7755-effa-4f5f-ad86-d7a88aff74e1";>
   
   
   - Do we need to consider this scenario? @cloud-fan 



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: [email protected]

For queries about this service, please contact Infrastructure at:
[email protected]


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to