This is an automated email from the ASF dual-hosted git repository.

maxgekk pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new 156b9b5  [SPARK-35736][SPARK-35774][SQL][FOLLOWUP] Prohibit to specify 
the same units for FROM and TO with unit-to-unit interval syntax
156b9b5 is described below

commit 156b9b5d14d9f759ae2beef46f163a541878f53c
Author: Kousuke Saruta <saru...@oss.nttdata.com>
AuthorDate: Thu Jun 24 23:13:31 2021 +0300

    [SPARK-35736][SPARK-35774][SQL][FOLLOWUP] Prohibit to specify the same 
units for FROM and TO with unit-to-unit interval syntax
    
    ### What changes were proposed in this pull request?
    
    This PR change the behavior of unit-to-unit interval syntax to prohibit the 
case that the same units are specified for FROM and TO.
    
    ### Why are the changes needed?
    
    For ANSI compliance.
    
    ### Does this PR introduce _any_ user-facing change?
    
    No.
    
    ### How was this patch tested?
    
    New test.
    
    Closes #33057 from sarutak/prohibit-unit-pattern.
    
    Authored-by: Kousuke Saruta <saru...@oss.nttdata.com>
    Signed-off-by: Max Gekk <max.g...@gmail.com>
---
 .../spark/sql/catalyst/parser/AstBuilder.scala     | 30 ++++++++++++++--------
 .../spark/sql/errors/QueryParsingErrors.scala      |  2 +-
 .../apache/spark/sql/types/StructTypeSuite.scala   | 24 +++++++++++++++++
 3 files changed, 45 insertions(+), 11 deletions(-)

diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala
index 6a373ab..f82b9be 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala
@@ -2521,23 +2521,33 @@ class AstBuilder extends SqlBaseBaseVisitor[AnyRef] 
with SQLConfHelper with Logg
   }
 
   override def visitYearMonthIntervalDataType(ctx: 
YearMonthIntervalDataTypeContext): DataType = {
-    val start = 
YearMonthIntervalType.stringToField(ctx.from.getText.toLowerCase(Locale.ROOT))
-    val end = if (ctx.to != null) {
-      
YearMonthIntervalType.stringToField(ctx.to.getText.toLowerCase(Locale.ROOT))
+    val startStr = ctx.from.getText.toLowerCase(Locale.ROOT)
+    val start = YearMonthIntervalType.stringToField(startStr)
+    if (ctx.to != null) {
+      val endStr = ctx.to.getText.toLowerCase(Locale.ROOT)
+      val end = YearMonthIntervalType.stringToField(endStr)
+      if (end <= start) {
+        throw QueryParsingErrors.fromToIntervalUnsupportedError(startStr, 
endStr, ctx)
+      }
+      YearMonthIntervalType(start, end)
     } else {
-      start
+      YearMonthIntervalType(start)
     }
-    YearMonthIntervalType(start, end)
   }
 
   override def visitDayTimeIntervalDataType(ctx: 
DayTimeIntervalDataTypeContext): DataType = {
-    val start = 
DayTimeIntervalType.stringToField(ctx.from.getText.toLowerCase(Locale.ROOT))
-    val end = if (ctx.to != null ) {
-      
DayTimeIntervalType.stringToField(ctx.to.getText.toLowerCase(Locale.ROOT))
+    val startStr = ctx.from.getText.toLowerCase(Locale.ROOT)
+    val start = DayTimeIntervalType.stringToField(startStr)
+    if (ctx.to != null ) {
+      val endStr = ctx.to.getText.toLowerCase(Locale.ROOT)
+      val end = DayTimeIntervalType.stringToField(endStr)
+      if (end <= start) {
+        throw QueryParsingErrors.fromToIntervalUnsupportedError(startStr, 
endStr, ctx)
+      }
+      DayTimeIntervalType(start, end)
     } else {
-      start
+      DayTimeIntervalType(start)
     }
-    DayTimeIntervalType(start, end)
   }
 
   /**
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryParsingErrors.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryParsingErrors.scala
index 9002728..cab1c17 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryParsingErrors.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryParsingErrors.scala
@@ -206,7 +206,7 @@ object QueryParsingErrors {
   }
 
   def fromToIntervalUnsupportedError(
-      from: String, to: String, ctx: UnitToUnitIntervalContext): Throwable = {
+      from: String, to: String, ctx: ParserRuleContext): Throwable = {
     new ParseException(s"Intervals FROM $from TO $to are not supported.", ctx)
   }
 
diff --git 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/types/StructTypeSuite.scala 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/types/StructTypeSuite.scala
index 820f326..18821b8 100644
--- 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/types/StructTypeSuite.scala
+++ 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/types/StructTypeSuite.scala
@@ -18,8 +18,11 @@
 package org.apache.spark.sql.types
 
 import org.apache.spark.SparkFunSuite
+import org.apache.spark.sql.catalyst.parser.ParseException
 import org.apache.spark.sql.catalyst.plans.SQLHelper
 import org.apache.spark.sql.internal.SQLConf
+import org.apache.spark.sql.types.{DayTimeIntervalType => DT}
+import org.apache.spark.sql.types.{YearMonthIntervalType => YM}
 import org.apache.spark.sql.types.StructType.fromDDL
 
 class StructTypeSuite extends SparkFunSuite with SQLHelper {
@@ -249,4 +252,25 @@ class StructTypeSuite extends SparkFunSuite with SQLHelper 
{
     val dayTimeInterval = "`dti` INTERVAL DAY TO SECOND"
     assert(fromDDL(dayTimeInterval).toDDL === dayTimeInterval)
   }
+
+  test("SPARK-35774: Prohibit the case start/end are the same with 
unit-to-unit interval syntax") {
+    def checkIntervalDDL(start: Byte, end: Byte, fieldToString: Byte => 
String): Unit = {
+      val startUnit = fieldToString(start)
+      val endUnit = fieldToString(end)
+      if (start < end) {
+        fromDDL(s"x INTERVAL $startUnit TO $endUnit")
+      } else {
+        intercept[ParseException] {
+          fromDDL(s"x INTERVAL $startUnit TO $endUnit")
+        }
+      }
+    }
+
+    for (start <- YM.yearMonthFields; end <- YM.yearMonthFields) {
+      checkIntervalDDL(start, end, YM.fieldToString)
+    }
+    for (start <- DT.dayTimeFields; end <- DT.dayTimeFields) {
+      checkIntervalDDL(start, end, DT.fieldToString)
+    }
+  }
 }

---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to