This is an automated email from the ASF dual-hosted git repository.

wenchen pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new d8099a201d24 [SPARK-48479][SQL][FOLLOWUP] Consolidate 
createOrReplaceTableColType and colDefinitionType in parser
d8099a201d24 is described below

commit d8099a201d24a4314b6dbea0c0c4e95ea632dc72
Author: allisonwang-db <[email protected]>
AuthorDate: Fri Jun 21 10:25:18 2024 +0800

    [SPARK-48479][SQL][FOLLOWUP] Consolidate createOrReplaceTableColType and 
colDefinitionType in parser
    
    ### What changes were proposed in this pull request?
    
    This PR is a follow-up for https://github.com/apache/spark/pull/46816 to 
address this comment: 
https://github.com/apache/spark/pull/46816#discussion_r1644022040 to 
consolidate `createOrReplaceTableColType` and `colDefinitionType` since they 
are exactly the same.
    
    ### Why are the changes needed?
    
    To make the code cleaner
    
    ### Does this PR introduce _any_ user-facing change?
    
    No
    
    ### How was this patch tested?
    
    Existing tests
    
    ### Was this patch authored or co-authored using generative AI tooling?
    
    No
    
    Closes #47047 from allisonwang-db/spark-48479-sql-udf-parser-follow-up.
    
    Authored-by: allisonwang-db <[email protected]>
    Signed-off-by: Wenchen Fan <[email protected]>
---
 .../spark/sql/catalyst/parser/SqlBaseParser.g4       | 12 ++----------
 .../spark/sql/catalyst/parser/AstBuilder.scala       | 20 +++++++++-----------
 .../apache/spark/sql/execution/SparkSqlParser.scala  | 12 +++++++++---
 3 files changed, 20 insertions(+), 24 deletions(-)

diff --git 
a/sql/api/src/main/antlr4/org/apache/spark/sql/catalyst/parser/SqlBaseParser.g4 
b/sql/api/src/main/antlr4/org/apache/spark/sql/catalyst/parser/SqlBaseParser.g4
index 7501283a4ac3..ff863565910d 100644
--- 
a/sql/api/src/main/antlr4/org/apache/spark/sql/catalyst/parser/SqlBaseParser.g4
+++ 
b/sql/api/src/main/antlr4/org/apache/spark/sql/catalyst/parser/SqlBaseParser.g4
@@ -111,7 +111,7 @@ statement
         (RESTRICT | CASCADE)?                                          
#dropNamespace
     | SHOW namespaces ((FROM | IN) multipartIdentifier)?
         (LIKE? pattern=stringLit)?                                        
#showNamespaces
-    | createTableHeader (LEFT_PAREN createOrReplaceTableColTypeList 
RIGHT_PAREN)? tableProvider?
+    | createTableHeader (LEFT_PAREN colDefinitionList RIGHT_PAREN)? 
tableProvider?
         createTableClauses
         (AS? query)?                                                   
#createTable
     | CREATE TABLE (IF errorCapturingNot EXISTS)? target=tableIdentifier
@@ -121,7 +121,7 @@ statement
         createFileFormat |
         locationSpec |
         (TBLPROPERTIES tableProps=propertyList))*                      
#createTableLike
-    | replaceTableHeader (LEFT_PAREN createOrReplaceTableColTypeList 
RIGHT_PAREN)? tableProvider?
+    | replaceTableHeader (LEFT_PAREN colDefinitionList RIGHT_PAREN)? 
tableProvider?
         createTableClauses
         (AS? query)?                                                   
#replaceTable
     | ANALYZE TABLE identifierReference partitionSpec? COMPUTE STATISTICS
@@ -1213,14 +1213,6 @@ colType
     : colName=errorCapturingIdentifier dataType (errorCapturingNot NULL)? 
commentSpec?
     ;
 
-createOrReplaceTableColTypeList
-    : createOrReplaceTableColType (COMMA createOrReplaceTableColType)*
-    ;
-
-createOrReplaceTableColType
-    : colName=errorCapturingIdentifier dataType colDefinitionOption*
-    ;
-
 colDefinitionList
     : colDefinition (COMMA colDefinition)*
     ;
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala
index 0d8bfe5f988d..bca2c8725394 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala
@@ -3269,24 +3269,24 @@ class AstBuilder extends DataTypeAstBuilder with 
SQLConfHelper with Logging {
   /**
    * Create top level table schema.
    */
-  protected def createSchema(ctx: CreateOrReplaceTableColTypeListContext): 
StructType = {
-    val columns = 
Option(ctx).toArray.flatMap(visitCreateOrReplaceTableColTypeList)
+  protected def createSchema(ctx: ColDefinitionListContext): StructType = {
+    val columns = Option(ctx).toArray.flatMap(visitColDefinitionList)
     StructType(columns.map(_.toV1Column))
   }
 
   /**
    * Get CREATE TABLE column definitions.
    */
-  override def visitCreateOrReplaceTableColTypeList(
-      ctx: CreateOrReplaceTableColTypeListContext): Seq[ColumnDefinition] = 
withOrigin(ctx) {
-    
ctx.createOrReplaceTableColType().asScala.map(visitCreateOrReplaceTableColType).toSeq
+  override def visitColDefinitionList(
+      ctx: ColDefinitionListContext): Seq[ColumnDefinition] = withOrigin(ctx) {
+    ctx.colDefinition().asScala.map(visitColDefinition).toSeq
   }
 
   /**
    * Get a CREATE TABLE column definition.
    */
-  override def visitCreateOrReplaceTableColType(
-      ctx: CreateOrReplaceTableColTypeContext): ColumnDefinition = 
withOrigin(ctx) {
+  override def visitColDefinition(
+      ctx: ColDefinitionContext): ColumnDefinition = withOrigin(ctx) {
     import ctx._
 
     val name: String = colName.getText
@@ -4113,8 +4113,7 @@ class AstBuilder extends DataTypeAstBuilder with 
SQLConfHelper with Logging {
     val (identifierContext, temp, ifNotExists, external) =
       visitCreateTableHeader(ctx.createTableHeader)
 
-    val columns = Option(ctx.createOrReplaceTableColTypeList())
-      .map(visitCreateOrReplaceTableColTypeList).getOrElse(Nil)
+    val columns = 
Option(ctx.colDefinitionList()).map(visitColDefinitionList).getOrElse(Nil)
     val provider = Option(ctx.tableProvider).map(_.multipartIdentifier.getText)
     val (partTransforms, partCols, bucketSpec, properties, options, location,
       comment, serdeInfo, clusterBySpec) = 
visitCreateTableClauses(ctx.createTableClauses())
@@ -4195,8 +4194,7 @@ class AstBuilder extends DataTypeAstBuilder with 
SQLConfHelper with Logging {
     val orCreate = ctx.replaceTableHeader().CREATE() != null
     val (partTransforms, partCols, bucketSpec, properties, options, location, 
comment, serdeInfo,
       clusterBySpec) = visitCreateTableClauses(ctx.createTableClauses())
-    val columns = Option(ctx.createOrReplaceTableColTypeList())
-      .map(visitCreateOrReplaceTableColTypeList).getOrElse(Nil)
+    val columns = 
Option(ctx.colDefinitionList()).map(visitColDefinitionList).getOrElse(Nil)
     val provider = Option(ctx.tableProvider).map(_.multipartIdentifier.getText)
 
     if (provider.isDefined && serdeInfo.isDefined) {
diff --git 
a/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkSqlParser.scala 
b/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkSqlParser.scala
index a5b9aebefc43..59169dc51415 100644
--- 
a/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkSqlParser.scala
+++ 
b/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkSqlParser.scala
@@ -343,7 +343,7 @@ class SparkSqlAstBuilder extends AstBuilder {
         visitCreateTableClauses(ctx.createTableClauses())
       val provider = 
Option(ctx.tableProvider).map(_.multipartIdentifier.getText).getOrElse(
         throw QueryParsingErrors.createTempTableNotSpecifyProviderError(ctx))
-      val schema = 
Option(ctx.createOrReplaceTableColTypeList()).map(createSchema)
+      val schema = Option(ctx.colDefinitionList()).map(createSchema)
 
       logWarning(s"CREATE TEMPORARY TABLE ... USING ... is deprecated, please 
use " +
           "CREATE TEMPORARY VIEW ... USING ... instead")
@@ -661,8 +661,14 @@ class SparkSqlAstBuilder extends AstBuilder {
    *   CREATE [OR REPLACE] [TEMPORARY] FUNCTION [IF NOT EXISTS] 
[db_name.]function_name
    *   ([param_name param_type [COMMENT param_comment], ...])
    *   RETURNS {ret_type | TABLE (ret_name ret_type [COMMENT ret_comment], 
...])}
-   *   [routine_characteristics]
-   *   RETURN {expression | TABLE ( query )};
+   *   [routine_characteristic]
+   *   RETURN {expression | query };
+   *
+   *   routine_characteristic
+   *   { LANGUAGE {SQL | IDENTIFIER} |
+   *     [NOT] DETERMINISTIC |
+   *     COMMENT function_comment |
+   *     [CONTAINS SQL | READS SQL DATA] }
    * }}}
    */
   override def visitCreateUserDefinedFunction(ctx: 
CreateUserDefinedFunctionContext): LogicalPlan =


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to