Peng-Lei commented on a change in pull request #34096:
URL: https://github.com/apache/spark/pull/34096#discussion_r718094327



##########
File path: 
sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/statements.scala
##########
@@ -278,6 +278,11 @@ case class InsertIntoStatement(
  */
 case class UseStatement(isNamespaceSet: Boolean, nameParts: Seq[String]) 
extends LeafParsedStatement
 
+/**
+ * A SetCatalog statement, as parsed from SQL.
+ */
+case class SetCatalogStatement(nameParts: Option[String]) extends 
LeafParsedStatement

Review comment:
       Thank you for your advice. Done

##########
File path: 
sql/catalyst/src/main/antlr4/org/apache/spark/sql/catalyst/parser/SqlBase.g4
##########
@@ -107,6 +107,7 @@ statement
     : query                                                            
#statementDefault
     | ctes? dmlStatementNoWith                                         
#dmlStatement
     | USE NAMESPACE? multipartIdentifier                               #use
+    | SET CATALOG catalogIdentifier                                    
#setCatalog

Review comment:
       ok

##########
File path: 
sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLParserSuite.scala
##########
@@ -404,4 +404,13 @@ class DDLParserSuite extends AnalysisTest with 
SharedSparkSession {
     assert(fileFormat6.locationUri.isEmpty)
     assert(provider6 == Some("ORC"))
   }
+
+  test("SET CATALOG") {
+    comparePlans(
+      parser.parsePlan("SET CATALOG abc"),
+      SetCatalogCommand("abc"))
+    comparePlans(
+      parser.parsePlan("SET CATALOG 'a b c'"),

Review comment:
       ok

##########
File path: 
sql/core/src/main/scala/org/apache/spark/sql/execution/SparkSqlParser.scala
##########
@@ -239,6 +239,21 @@ class SparkSqlAstBuilder extends AstBuilder {
     ShowCurrentNamespaceCommand()
   }
 
+  /**
+   * Create a [[SetCatalogCommand]] logical command.
+   */
+  override def visitSetCatalog(ctx: SetCatalogContext): LogicalPlan = 
withOrigin(ctx) {
+    val name =
+      if (ctx.catalogIdentifier().identifier() != null) {
+        ctx.catalogIdentifier().identifier().getText
+      } else if (ctx.catalogIdentifier.STRING() != null) {
+        string(ctx.catalogIdentifier().STRING())
+      } else {
+        ctx.catalogIdentifier().getText

Review comment:
       OK, How about `QueryParsingErrors.invalidCatalogNameError(ctx)` with 
`ParseException` ?

##########
File path: 
sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLParserSuite.scala
##########
@@ -404,4 +404,13 @@ class DDLParserSuite extends AnalysisTest with 
SharedSparkSession {
     assert(fileFormat6.locationUri.isEmpty)
     assert(provider6 == Some("ORC"))
   }
+
+  test("SET CATALOG") {
+    comparePlans(
+      parser.parsePlan("SET CATALOG abc"),
+      SetCatalogCommand("abc"))
+    comparePlans(
+      parser.parsePlan("SET CATALOG 'a b c'"),

Review comment:
       done




-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: [email protected]

For queries about this service, please contact Infrastructure at:
[email protected]



---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to