This is an automated email from the ASF dual-hosted git repository.

maxgekk pushed a commit to branch branch-3.5
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/branch-3.5 by this push:
     new b623c28f521 [SPARK-44680][SQL] Improve the error for parameters in 
`DEFAULT`
b623c28f521 is described below

commit b623c28f521e350b0f4bf15bfb911ca6bf0b1a80
Author: Max Gekk <max.g...@gmail.com>
AuthorDate: Tue Aug 8 13:26:19 2023 +0500

    [SPARK-44680][SQL] Improve the error for parameters in `DEFAULT`
    
    ### What changes were proposed in this pull request?
    In the PR, I propose to check that `DEFAULT` clause contains a parameter. 
If so, raise appropriate error about the feature is not supported. Currently, 
table creation with `DEFAULT` containing any parameters finishes successfully 
even parameters are not supported in such case:
    ```sql
    scala>  spark.sql("CREATE TABLE t12(c1 int default :parm)", args = 
Map("parm" -> 5)).show()
    ++
    ||
    ++
    ++
    scala>  spark.sql("describe t12");
    org.apache.spark.sql.AnalysisException: 
[INVALID_DEFAULT_VALUE.UNRESOLVED_EXPRESSION] Failed to execute EXISTS_DEFAULT 
command because the destination table column `c1` has a DEFAULT value :parm, 
which fails to resolve as a valid expression.
    ```
    
    ### Why are the changes needed?
    This improves user experience with Spark SQL by saying about the root cause 
of the issue.
    
    ### Does this PR introduce _any_ user-facing change?
    Yes. After the change, the table creation completes w/ the error:
    ```sql
    scala> spark.sql("CREATE TABLE t12(c1 int default :parm)", args = 
Map("parm" -> 5)).show()
    org.apache.spark.sql.catalyst.parser.ParseException:
    [UNSUPPORTED_FEATURE.PARAMETER_MARKER_IN_UNEXPECTED_STATEMENT] The feature 
is not supported: Parameter markers are not allowed in DEFAULT.(line 1, pos 32)
    
    == SQL ==
    CREATE TABLE t12(c1 int default :parm)
    --------------------------------^^^
    ```
    
    ### How was this patch tested?
    By running new test:
    ```
    $ build/sbt "test:testOnly *ParametersSuite"
    ```
    
    Closes #42365 from MaxGekk/fix-param-in-DEFAULT.
    
    Authored-by: Max Gekk <max.g...@gmail.com>
    Signed-off-by: Max Gekk <max.g...@gmail.com>
    (cherry picked from commit f7879b4c2500046cd7d889ba94adedd3000f8c41)
    Signed-off-by: Max Gekk <max.g...@gmail.com>
---
 .../org/apache/spark/sql/catalyst/parser/AstBuilder.scala | 12 ++++++++----
 .../test/scala/org/apache/spark/sql/ParametersSuite.scala | 15 +++++++++++++++
 2 files changed, 23 insertions(+), 4 deletions(-)

diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala
index 7a28efa3e42..83938632e53 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala
@@ -40,6 +40,7 @@ import org.apache.spark.sql.catalyst.parser.SqlBaseParser._
 import org.apache.spark.sql.catalyst.plans._
 import org.apache.spark.sql.catalyst.plans.logical._
 import org.apache.spark.sql.catalyst.trees.CurrentOrigin
+import org.apache.spark.sql.catalyst.trees.TreePattern.PARAMETER
 import org.apache.spark.sql.catalyst.types.DataTypeUtils
 import org.apache.spark.sql.catalyst.util.{CharVarcharUtils, DateTimeUtils, 
GeneratedColumn, IntervalUtils, ResolveDefaultColumns}
 import org.apache.spark.sql.catalyst.util.DateTimeUtils.{convertSpecialDate, 
convertSpecialTimestamp, convertSpecialTimestampNTZ, getZoneId, stringToDate, 
stringToTimestamp, stringToTimestampWithoutTimeZone}
@@ -3130,9 +3131,12 @@ class AstBuilder extends DataTypeAstBuilder with 
SQLConfHelper with Logging {
     ctx.asScala.headOption.map(visitLocationSpec)
   }
 
-  private def verifyAndGetExpression(exprCtx: ExpressionContext): String = {
+  private def verifyAndGetExpression(exprCtx: ExpressionContext, place: 
String): String = {
     // Make sure it can be converted to Catalyst expressions.
-    expression(exprCtx)
+    val expr = expression(exprCtx)
+    if (expr.containsPattern(PARAMETER)) {
+      throw QueryParsingErrors.parameterMarkerNotAllowed(place, expr.origin)
+    }
     // Extract the raw expression text so that we can save the user provided 
text. We don't
     // use `Expression.sql` to avoid storing incorrect text caused by bugs in 
any expression's
     // `sql` method. Note: `exprCtx.getText` returns a string without spaces, 
so we need to
@@ -3147,7 +3151,7 @@ class AstBuilder extends DataTypeAstBuilder with 
SQLConfHelper with Logging {
    */
   override def visitDefaultExpression(ctx: DefaultExpressionContext): String =
     withOrigin(ctx) {
-      verifyAndGetExpression(ctx.expression())
+      verifyAndGetExpression(ctx.expression(), "DEFAULT")
     }
 
   /**
@@ -3155,7 +3159,7 @@ class AstBuilder extends DataTypeAstBuilder with 
SQLConfHelper with Logging {
    */
   override def visitGenerationExpression(ctx: GenerationExpressionContext): 
String =
     withOrigin(ctx) {
-      verifyAndGetExpression(ctx.expression())
+      verifyAndGetExpression(ctx.expression(), "GENERATED")
     }
 
   /**
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/ParametersSuite.scala 
b/sql/core/src/test/scala/org/apache/spark/sql/ParametersSuite.scala
index 1ab9dce1c94..a72c9a600ad 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/ParametersSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/ParametersSuite.scala
@@ -487,4 +487,19 @@ class ParametersSuite extends QueryTest with 
SharedSparkSession {
         start = 7,
         stop = 13))
   }
+
+  test("SPARK-44680: parameters in DEFAULT") {
+    checkError(
+      exception = intercept[AnalysisException] {
+        spark.sql(
+          "CREATE TABLE t11(c1 int default :parm) USING parquet",
+          args = Map("parm" -> 5))
+      },
+      errorClass = 
"UNSUPPORTED_FEATURE.PARAMETER_MARKER_IN_UNEXPECTED_STATEMENT",
+      parameters = Map("statement" -> "DEFAULT"),
+      context = ExpectedContext(
+        fragment = "default :parm",
+        start = 24,
+        stop = 36))
+  }
 }


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to