dtenedor commented on code in PR #53941:
URL: https://github.com/apache/spark/pull/53941#discussion_r2728660391


##########
sql/core/src/main/scala/org/apache/spark/sql/execution/SparkSqlParser.scala:
##########
@@ -410,15 +411,48 @@ class SparkSqlAstBuilder extends AstBuilder {
 
   /**
    * Create a [[SetCatalogCommand]] logical command.
+   *
+   * SET CATALOG is case-sensitive and supports multiple forms:
+   *
+   *   - Session temp variable: SET CATALOG var_name (where var_name is a 
declared variable)
+   *   - Simple identifier: SET CATALOG my_catalog
+   *     (tries to resolve as session temp variable first, then uses as 
literal catalog name)
+   *   - String literal: SET CATALOG 'my_catalog'
+   *   - identifier() function: SET CATALOG IDENTIFIER('my_catalog') or 
IDENTIFIER(var_name)
+   *   - Other foldable expressions: SET CATALOG CAST('my_catalog' AS STRING),
+   *     CONCAT('my', '_catalog')
    */
   override def visitSetCatalog(ctx: SetCatalogContext): LogicalPlan = 
withOrigin(ctx) {
-    withCatalogIdentClause(ctx.catalogIdentifierReference, identifiers => {
-      if (identifiers.size > 1) {
-        // can occur when user put multipart string in IDENTIFIER(...) clause
-        throw QueryParsingErrors.invalidNameForSetCatalog(identifiers, ctx)
+    val expr = expression(ctx.expression())
+
+    def buildSetCatalogCommand(nameParts: Seq[String]): SetCatalogCommand = {
+      if (nameParts.size > 1) {
+        throw QueryParsingErrors.invalidNameForSetCatalog(nameParts, ctx)
       }
-      SetCatalogCommand(identifiers.head)
-    })
+      SetCatalogCommand(nameParts.head)
+    }
+
+    expr match {
+      // UnresolvedAttribute - try to resolve as variable, fallback to literal
+      // Resolution order: (1) session temp variable, (2) literal catalog name
+      case UnresolvedAttribute(nameParts) =>
+        PlanWithUnresolvedIdentifierAndFallback(
+          expr,
+          fallbackIdentifier = nameParts,
+          Nil,
+          (identifiers, _) => buildSetCatalogCommand(identifiers)
+        )
+
+      // All other expressions (IDENTIFIER(), Literal, foldable) - standard 
resolution
+      // Note that we cannot check foldability and evaluate the expression here
+      // as expression plans like UnresolvedFunction need to be resolved in 
the analysis phase

Review Comment:
   super nit, let's put punctuation at the end of every sentence in the 
comments :)



##########
sql/core/src/test/scala/org/apache/spark/sql/connector/DataSourceV2SQLSuite.scala:
##########
@@ -3040,6 +3049,134 @@ class DataSourceV2SQLSuiteV1Filter
     )
   }
 
+  test("SPARK-55155: SET CATALOG statement with foldable expressions") {
+    val catalogManager = spark.sessionState.catalogManager
+    assert(catalogManager.currentCatalog.name() == SESSION_CATALOG_NAME)
+
+    sql("SET CATALOG CAST(\"testcat\" AS STRING)")
+    assert(catalogManager.currentCatalog.name() == "testcat")
+
+    sql("SET CATALOG CONCAT('test', 'cat2')")
+    assert(catalogManager.currentCatalog.name() == "testcat2")
+  }
+
+  test("SPARK-55155: SET CATALOG statement is case-sensitive") {
+    val catalogManager = spark.sessionState.catalogManager
+    assert(catalogManager.currentCatalog.name() == SESSION_CATALOG_NAME)
+
+    checkError(
+      exception = intercept[CatalogNotFoundException] {
+        sql("SET CATALOG teStCaT")
+      },
+      condition = "CATALOG_NOT_FOUND",
+      parameters = Map("catalogName" -> "`teStCaT`", "config" -> 
"\"spark.sql.catalog.teStCaT\"")
+    )
+
+    checkError(
+      exception = intercept[CatalogNotFoundException] {
+        sql("SET CATALOG 'teStCaT'")
+      },
+      condition = "CATALOG_NOT_FOUND",
+      parameters = Map("catalogName" -> "`teStCaT`", "config" -> 
"\"spark.sql.catalog.teStCaT\"")
+    )
+
+    checkError(
+      exception = intercept[CatalogNotFoundException] {
+        sql("SET CATALOG IDENTIFIER('teStCaT')")
+      },
+      condition = "CATALOG_NOT_FOUND",
+      parameters = Map("catalogName" -> "`teStCaT`", "config" -> 
"\"spark.sql.catalog.teStCaT\"")
+    )
+    checkError(
+      exception = intercept[CatalogNotFoundException] {
+        sql("SET CATALOG CONCAT('teSt', 'CaT')")
+      },
+      condition = "CATALOG_NOT_FOUND",
+      parameters = Map("catalogName" -> "`teStCaT`", "config" -> 
"\"spark.sql.catalog.teStCaT\"")
+    )
+  }
+
+  test("SPARK-55155: SET CATALOG with session temp variable") {
+    registerCatalog("testcat3", classOf[InMemoryCatalog])
+    registerCatalog("testcat4", classOf[InMemoryCatalog])
+    val catalogManager = spark.sessionState.catalogManager
+    assert(catalogManager.currentCatalog.name() == SESSION_CATALOG_NAME)
+
+    // Declare and set the session temp variable
+    sql("DECLARE cat_name STRING DEFAULT 'testcat'")
+    sql("DECLARE cat_name2 STRING")
+    sql("SET VAR cat_name2 = 'testcat2'")
+
+    // Using the session temp variable without IDENTIFIER()
+    sql("SET CATALOG cat_name")
+    assert(catalogManager.currentCatalog.name() == "testcat")
+    sql("SET CATALOG cat_name2")
+    assert(catalogManager.currentCatalog.name() == "testcat2")

Review Comment:
   you can make a helper def to check the currentCatalog.name() against a 
string to make these more concise.



##########
sql/core/src/main/scala/org/apache/spark/sql/execution/SparkSqlParser.scala:
##########
@@ -410,15 +411,48 @@ class SparkSqlAstBuilder extends AstBuilder {
 
   /**
    * Create a [[SetCatalogCommand]] logical command.
+   *
+   * SET CATALOG is case-sensitive and supports multiple forms:
+   *
+   *   - Session temp variable: SET CATALOG var_name (where var_name is a 
declared variable)
+   *   - Simple identifier: SET CATALOG my_catalog
+   *     (tries to resolve as session temp variable first, then uses as 
literal catalog name)
+   *   - String literal: SET CATALOG 'my_catalog'
+   *   - identifier() function: SET CATALOG IDENTIFIER('my_catalog') or 
IDENTIFIER(var_name)
+   *   - Other foldable expressions: SET CATALOG CAST('my_catalog' AS STRING),
+   *     CONCAT('my', '_catalog')
    */
   override def visitSetCatalog(ctx: SetCatalogContext): LogicalPlan = 
withOrigin(ctx) {
-    withCatalogIdentClause(ctx.catalogIdentifierReference, identifiers => {
-      if (identifiers.size > 1) {
-        // can occur when user put multipart string in IDENTIFIER(...) clause
-        throw QueryParsingErrors.invalidNameForSetCatalog(identifiers, ctx)
+    val expr = expression(ctx.expression())
+
+    def buildSetCatalogCommand(nameParts: Seq[String]): SetCatalogCommand = {
+      if (nameParts.size > 1) {
+        throw QueryParsingErrors.invalidNameForSetCatalog(nameParts, ctx)
       }
-      SetCatalogCommand(identifiers.head)
-    })
+      SetCatalogCommand(nameParts.head)
+    }
+
+    expr match {
+      // UnresolvedAttribute - try to resolve as variable, fallback to literal
+      // Resolution order: (1) session temp variable, (2) literal catalog name
+      case UnresolvedAttribute(nameParts) =>
+        PlanWithUnresolvedIdentifierAndFallback(

Review Comment:
   We talked offline, instead we can do this instead:
   
   ```
   /**
    * The command for `SET CATALOG XXX`
    */
   case class SetCatalogCommand(catalogNameExpr: Expression) extends 
LeafRunnableCommand {
     override def output: Seq[Attribute] = Seq.empty
   
     override def run(sparkSession: SparkSession): Seq[Row] = {
       val catalogName: String = catalogNameExpr.eval()
       sparkSession.sessionState.catalogManager.setCurrentCatalog(catalogName)
       Seq.empty
     }
   }
   ```
   
   And earlier, we can have a simple analysis rule that runs after the main 
rule batch that matches against when `catalogNameExpr` is an 
`UnresolvedAttribute` and replaces it with a `Literal` of a string.



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: [email protected]

For queries about this service, please contact Infrastructure at:
[email protected]


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to