This is an automated email from the ASF dual-hosted git repository. yamamuro pushed a commit to branch master in repository https://gitbox.apache.org/repos/asf/spark.git
The following commit(s) were added to refs/heads/master by this push: new bacffb8 [SPARK-23264][SQL] Make INTERVAL keyword optional in INTERVAL clauses when ANSI mode enabled bacffb8 is described below commit bacffb8810434b36fdb6fdd622a0f8c8d99ee5ab Author: Takeshi Yamamuro <yamam...@apache.org> AuthorDate: Thu Mar 14 10:45:29 2019 +0900 [SPARK-23264][SQL] Make INTERVAL keyword optional in INTERVAL clauses when ANSI mode enabled ## What changes were proposed in this pull request? This pr updated parsing rules in `SqlBase.g4` to support a SQL query below when ANSI mode enabled; ``` SELECT CAST('2017-08-04' AS DATE) + 1 days; ``` The current master cannot parse it though, other dbms-like systems support the syntax (e.g., hive and mysql). Also, the syntax is frequently used in the official TPC-DS queries. This pr added new tokens as follows; ``` YEAR | YEARS | MONTH | MONTHS | WEEK | WEEKS | DAY | DAYS | HOUR | HOURS | MINUTE MINUTES | SECOND | SECONDS | MILLISECOND | MILLISECONDS | MICROSECOND | MICROSECONDS ``` Then, it registered the keywords below as the ANSI reserved (this follows SQL-2011); ``` DAY | HOUR | MINUTE | MONTH | SECOND | YEAR ``` ## How was this patch tested? Added tests in `SQLQuerySuite`, `ExpressionParserSuite`, and `TableIdentifierParserSuite`. Closes #20433 from maropu/SPARK-23264. Authored-by: Takeshi Yamamuro <yamam...@apache.org> Signed-off-by: Takeshi Yamamuro <yamam...@apache.org> --- docs/sql-reserved-and-non-reserved-keywords.md | 24 +- .../apache/spark/sql/catalyst/parser/SqlBase.g4 | 74 +++- .../catalyst/parser/ExpressionParserSuite.scala | 91 +++-- .../parser/TableIdentifierParserSuite.scala | 34 +- .../resources/sql-tests/inputs/ansi/interval.sql | 188 +++++++++ .../sql-tests/results/ansi/interval.sql.out | 439 +++++++++++++++++++++ .../resources/sql-tests/results/literals.sql.out | 4 +- .../scala/org/apache/spark/sql/SQLQuerySuite.scala | 21 +- 8 files changed, 827 insertions(+), 48 deletions(-) diff --git a/docs/sql-reserved-and-non-reserved-keywords.md b/docs/sql-reserved-and-non-reserved-keywords.md index 53eb998..b1561fb 100644 --- a/docs/sql-reserved-and-non-reserved-keywords.md +++ b/docs/sql-reserved-and-non-reserved-keywords.md @@ -137,7 +137,8 @@ The list of reserved and non-reserved keywords can change according to the confi <tr><td>DATABASE</td><td>non-reserved</td><td>non-reserved</td><td>non-reserved</td></tr> <tr><td>DATABASES</td><td>non-reserved</td><td>non-reserved</td><td>non-reserved</td></tr> <tr><td>DATE</td><td>non-reserved</td><td>non-reserved</td><td>reserved</td></tr> - <tr><td>DAY</td><td>non-reserved</td><td>non-reserved</td><td>reserved</td></tr> + <tr><td>DAY</td><td>reserved</td><td>non-reserved</td><td>reserved</td></tr> + <tr><td>DAYS</td><td>non-reserved</td><td>non-reserved</td><td>non-reserved</td></tr> <tr><td>DBPROPERTIES</td><td>non-reserved</td><td>non-reserved</td><td>non-reserved</td></tr> <tr><td>DEALLOCATE</td><td>non-reserved</td><td>non-reserved</td><td>reserved</td></tr> <tr><td>DEC</td><td>non-reserved</td><td>non-reserved</td><td>reserved</td></tr> @@ -230,7 +231,8 @@ The list of reserved and non-reserved keywords can change according to the confi <tr><td>HANDLER</td><td>non-reserved</td><td>non-reserved</td><td>reserved</td></tr> <tr><td>HAVING</td><td>reserved</td><td>non-reserved</td><td>reserved</td></tr> <tr><td>HOLD</td><td>non-reserved</td><td>non-reserved</td><td>reserved</td></tr> - <tr><td>HOUR</td><td>non-reserved</td><td>non-reserved</td><td>reserved</td></tr> + <tr><td>HOUR</td><td>reserved</td><td>non-reserved</td><td>reserved</td></tr> + <tr><td>HOURS</td><td>non-reserved</td><td>non-reserved</td><td>non-reserved</td></tr> <tr><td>IDENTITY</td><td>non-reserved</td><td>non-reserved</td><td>reserved</td></tr> <tr><td>IF</td><td>non-reserved</td><td>non-reserved</td><td>reserved</td></tr> <tr><td>IGNORE</td><td>non-reserved</td><td>non-reserved</td><td>non-reserved</td></tr> @@ -313,13 +315,19 @@ The list of reserved and non-reserved keywords can change according to the confi <tr><td>MEMBER</td><td>non-reserved</td><td>non-reserved</td><td>reserved</td></tr> <tr><td>MERGE</td><td>non-reserved</td><td>non-reserved</td><td>reserved</td></tr> <tr><td>METHOD</td><td>non-reserved</td><td>non-reserved</td><td>reserved</td></tr> + <tr><td>MICROSECOND</td><td>non-reserved</td><td>non-reserved</td><td>non-reserved</td></tr> + <tr><td>MICROSECONDS</td><td>non-reserved</td><td>non-reserved</td><td>non-reserved</td></tr> + <tr><td>MILLISECOND</td><td>non-reserved</td><td>non-reserved</td><td>non-reserved</td></tr> + <tr><td>MILLISECONDS</td><td>non-reserved</td><td>non-reserved</td><td>non-reserved</td></tr> <tr><td>MIN</td><td>non-reserved</td><td>non-reserved</td><td>reserved</td></tr> <tr><td>MINUS</td><td>reserved</td><td>reserved</td><td>non-reserved</td></tr> - <tr><td>MINUTE</td><td>non-reserved</td><td>non-reserved</td><td>reserved</td></tr> + <tr><td>MINUTE</td><td>reserved</td><td>non-reserved</td><td>reserved</td></tr> + <tr><td>MINUTES</td><td>non-reserved</td><td>non-reserved</td><td>non-reserved</td></tr> <tr><td>MOD</td><td>non-reserved</td><td>non-reserved</td><td>reserved</td></tr> <tr><td>MODIFIES</td><td>non-reserved</td><td>non-reserved</td><td>reserved</td></tr> <tr><td>MODULE</td><td>non-reserved</td><td>non-reserved</td><td>reserved</td></tr> - <tr><td>MONTH</td><td>non-reserved</td><td>non-reserved</td><td>reserved</td></tr> + <tr><td>MONTH</td><td>reserved</td><td>non-reserved</td><td>reserved</td></tr> + <tr><td>MONTHS</td><td>non-reserved</td><td>non-reserved</td><td>non-reserved</td></tr> <tr><td>MSCK</td><td>non-reserved</td><td>non-reserved</td><td>non-reserved</td></tr> <tr><td>MULTISET</td><td>non-reserved</td><td>non-reserved</td><td>reserved</td></tr> <tr><td>NAMES</td><td>non-reserved</td><td>non-reserved</td><td>non-reserved</td></tr> @@ -448,7 +456,8 @@ The list of reserved and non-reserved keywords can change according to the confi <tr><td>SCOPE</td><td>non-reserved</td><td>non-reserved</td><td>reserved</td></tr> <tr><td>SCROLL</td><td>non-reserved</td><td>non-reserved</td><td>reserved</td></tr> <tr><td>SEARCH</td><td>non-reserved</td><td>non-reserved</td><td>reserved</td></tr> - <tr><td>SECOND</td><td>non-reserved</td><td>non-reserved</td><td>reserved</td></tr> + <tr><td>SECOND</td><td>reserved</td><td>non-reserved</td><td>reserved</td></tr> + <tr><td>SECONDS</td><td>non-reserved</td><td>non-reserved</td><td>non-reserved</td></tr> <tr><td>SECTION</td><td>non-reserved</td><td>non-reserved</td><td>non-reserved</td></tr> <tr><td>SEEK</td><td>non-reserved</td><td>non-reserved</td><td>non-reserved</td></tr> <tr><td>SELECT</td><td>reserved</td><td>non-reserved</td><td>reserved</td></tr> @@ -568,8 +577,11 @@ The list of reserved and non-reserved keywords can change according to the confi <tr><td>WITH</td><td>reserved</td><td>non-reserved</td><td>reserved</td></tr> <tr><td>WITHIN</td><td>non-reserved</td><td>non-reserved</td><td>reserved</td></tr> <tr><td>WITHOUT</td><td>non-reserved</td><td>non-reserved</td><td>reserved</td></tr> + <tr><td>WEEK</td><td>non-reserved</td><td>non-reserved</td><td>non-reserved</td></tr> + <tr><td>WEEKS</td><td>non-reserved</td><td>non-reserved</td><td>non-reserved</td></tr> <tr><td>WORK</td><td>non-reserved</td><td>non-reserved</td><td>non-reserved</td></tr> <tr><td>WRITE</td><td>non-reserved</td><td>non-reserved</td><td>non-reserved</td></tr> - <tr><td>YEAR</td><td>non-reserved</td><td>non-reserved</td><td>reserved</td></tr> + <tr><td>YEAR</td><td>reserved</td><td>non-reserved</td><td>reserved</td></tr> + <tr><td>YEARS</td><td>non-reserved</td><td>non-reserved</td><td>non-reserved</td></tr> <tr><td>ZONE</td><td>non-reserved</td><td>non-reserved</td><td>non-reserved</td></tr> </table> diff --git a/sql/catalyst/src/main/antlr4/org/apache/spark/sql/catalyst/parser/SqlBase.g4 b/sql/catalyst/src/main/antlr4/org/apache/spark/sql/catalyst/parser/SqlBase.g4 index d11c28c..be36aaa 100644 --- a/sql/catalyst/src/main/antlr4/org/apache/spark/sql/catalyst/parser/SqlBase.g4 +++ b/sql/catalyst/src/main/antlr4/org/apache/spark/sql/catalyst/parser/SqlBase.g4 @@ -655,11 +655,12 @@ booleanValue ; interval - : INTERVAL intervalField* + : {ansi}? INTERVAL? intervalField+ + | {!ansi}? INTERVAL intervalField* ; intervalField - : value=intervalValue unit=identifier (TO to=identifier)? + : value=intervalValue unit=intervalUnit (TO to=intervalUnit)? ; intervalValue @@ -667,6 +668,27 @@ intervalValue | STRING ; +intervalUnit + : DAY + | DAYS + | HOUR + | HOURS + | MICROSECOND + | MICROSECONDS + | MILLISECOND + | MILLISECONDS + | MINUTE + | MINUTES + | MONTH + | MONTHS + | SECOND + | SECONDS + | WEEK + | WEEKS + | YEAR + | YEARS + ; + colPosition : FIRST | AFTER identifier ; @@ -795,6 +817,7 @@ ansiNonReserved | DATA | DATABASE | DATABASES + | DAYS | DBPROPERTIES | DEFINED | DELETE @@ -825,6 +848,7 @@ ansiNonReserved | FUNCTIONS | GLOBAL | GROUPING + | HOURS | IF | IGNORE | IMPORT @@ -851,6 +875,12 @@ ansiNonReserved | LOGICAL | MACRO | MAP + | MICROSECOND + | MICROSECONDS + | MILLISECOND + | MILLISECONDS + | MINUTES + | MONTHS | MSCK | NO | NULLS @@ -891,6 +921,7 @@ ansiNonReserved | ROW | ROWS | SCHEMA + | SECONDS | SEPARATED | SERDE | SERDEPROPERTIES @@ -924,7 +955,10 @@ ansiNonReserved | USE | VALUES | VIEW + | WEEK + | WEEKS | WINDOW + | YEARS ; defaultReserved @@ -996,6 +1030,8 @@ nonReserved | DATA | DATABASE | DATABASES + | DAY + | DAYS | DBPROPERTIES | DEFINED | DELETE @@ -1037,6 +1073,8 @@ nonReserved | GROUP | GROUPING | HAVING + | HOUR + | HOURS | IF | IGNORE | IMPORT @@ -1067,6 +1105,14 @@ nonReserved | LOGICAL | MACRO | MAP + | MICROSECOND + | MICROSECONDS + | MILLISECOND + | MILLISECONDS + | MINUTE + | MINUTES + | MONTH + | MONTHS | MSCK | NO | NOT @@ -1115,6 +1161,8 @@ nonReserved | ROLLUP | ROW | ROWS + | SECOND + | SECONDS | SELECT | SEPARATED | SERDE @@ -1157,10 +1205,14 @@ nonReserved | USER | VALUES | VIEW + | WEEK + | WEEKS | WHEN | WHERE | WINDOW | WITH + | YEAR + | YEARS ; SELECT: 'SELECT'; @@ -1199,6 +1251,24 @@ ASC: 'ASC'; DESC: 'DESC'; FOR: 'FOR'; INTERVAL: 'INTERVAL'; +YEAR: 'YEAR'; +YEARS: 'YEARS'; +MONTH: 'MONTH'; +MONTHS: 'MONTHS'; +WEEK: 'WEEK'; +WEEKS: 'WEEKS'; +DAY: 'DAY'; +DAYS: 'DAYS'; +HOUR: 'HOUR'; +HOURS: 'HOURS'; +MINUTE: 'MINUTE'; +MINUTES: 'MINUTES'; +SECOND: 'SECOND'; +SECONDS: 'SECONDS'; +MILLISECOND: 'MILLISECOND'; +MILLISECONDS: 'MILLISECONDS'; +MICROSECOND: 'MICROSECOND'; +MICROSECONDS: 'MICROSECONDS'; CASE: 'CASE'; WHEN: 'WHEN'; THEN: 'THEN'; diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/ExpressionParserSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/ExpressionParserSuite.scala index 7ecad59..d1d0d38 100644 --- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/ExpressionParserSuite.scala +++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/ExpressionParserSuite.scala @@ -39,7 +39,6 @@ import org.apache.spark.unsafe.types.CalendarInterval * CheckAnalysis classes. */ class ExpressionParserSuite extends PlanTest { - import CatalystSqlParser._ import org.apache.spark.sql.catalyst.dsl.expressions._ import org.apache.spark.sql.catalyst.dsl.plans._ @@ -585,49 +584,60 @@ class ExpressionParserSuite extends PlanTest { } } + val intervalUnits = Seq( + "year", + "month", + "week", + "day", + "hour", + "minute", + "second", + "millisecond", + "microsecond") + + def intervalLiteral(u: String, s: String): Literal = { + Literal(CalendarInterval.fromSingleUnitString(u, s)) + } + test("intervals") { - def intervalLiteral(u: String, s: String): Literal = { - Literal(CalendarInterval.fromSingleUnitString(u, s)) + def checkIntervals(intervalValue: String, expected: Literal): Unit = { + assertEqual(s"interval $intervalValue", expected) + + // SPARK-23264 Support interval values without INTERVAL clauses if ANSI SQL enabled + withSQLConf(SQLConf.ANSI_SQL_PARSER.key -> "true") { + assertEqual(intervalValue, expected) + } } // Empty interval statement intercept("interval", "at least one time unit should be given for interval literal") // Single Intervals. - val units = Seq( - "year", - "month", - "week", - "day", - "hour", - "minute", - "second", - "millisecond", - "microsecond") val forms = Seq("", "s") val values = Seq("0", "10", "-7", "21") - units.foreach { unit => + intervalUnits.foreach { unit => forms.foreach { form => values.foreach { value => val expected = intervalLiteral(unit, value) - assertEqual(s"interval $value $unit$form", expected) - assertEqual(s"interval '$value' $unit$form", expected) + checkIntervals(s"$value $unit$form", expected) + checkIntervals(s"'$value' $unit$form", expected) } } } // Hive nanosecond notation. - assertEqual("interval 13.123456789 seconds", intervalLiteral("second", "13.123456789")) - assertEqual("interval -13.123456789 second", intervalLiteral("second", "-13.123456789")) + checkIntervals("13.123456789 seconds", intervalLiteral("second", "13.123456789")) + checkIntervals("-13.123456789 second", intervalLiteral("second", "-13.123456789")) // Non Existing unit - intercept("interval 10 nanoseconds", "No interval can be constructed") + intercept("interval 10 nanoseconds", + "no viable alternative at input 'interval 10 nanoseconds'") // Year-Month intervals. val yearMonthValues = Seq("123-10", "496-0", "-2-3", "-123-0") yearMonthValues.foreach { value => val result = Literal(CalendarInterval.fromYearMonthString(value)) - assertEqual(s"interval '$value' year to month", result) + checkIntervals(s"'$value' year to month", result) } // Day-Time intervals. @@ -640,22 +650,51 @@ class ExpressionParserSuite extends PlanTest { "1 0:0:1") datTimeValues.foreach { value => val result = Literal(CalendarInterval.fromDayTimeString(value)) - assertEqual(s"interval '$value' day to second", result) + checkIntervals(s"'$value' day to second", result) } // Unknown FROM TO intervals - intercept("interval 10 month to second", "Intervals FROM month TO second are not supported.") + intercept("interval 10 month to second", + "Intervals FROM month TO second are not supported.") // Composed intervals. - assertEqual( - "interval 3 months 22 seconds 1 millisecond", + checkIntervals( + "3 months 22 seconds 1 millisecond", Literal(new CalendarInterval(3, 22001000L))) - assertEqual( - "interval 3 years '-1-10' year to month 3 weeks '1 0:0:2' day to second", + checkIntervals( + "3 years '-1-10' year to month 3 weeks '1 0:0:2' day to second", Literal(new CalendarInterval(14, 22 * CalendarInterval.MICROS_PER_DAY + 2 * CalendarInterval.MICROS_PER_SECOND))) } + test("SPARK-23264 Interval Compatibility tests") { + def checkIntervals(intervalValue: String, expected: Literal): Unit = { + withSQLConf(SQLConf.ANSI_SQL_PARSER.key -> "true") { + assertEqual(intervalValue, expected) + } + + // Compatibility tests: If ANSI SQL disabled, `intervalValue` should be parsed as an alias + withSQLConf(SQLConf.ANSI_SQL_PARSER.key -> "false") { + val aliases = defaultParser.parseExpression(intervalValue).collect { + case a @ Alias(_: Literal, name) + if intervalUnits.exists { unit => name.startsWith(unit) } => a + } + assert(aliases.size === 1) + } + } + val forms = Seq("", "s") + val values = Seq("5", "1", "-11", "8") + intervalUnits.foreach { unit => + forms.foreach { form => + values.foreach { value => + val expected = intervalLiteral(unit, value) + checkIntervals(s"$value $unit$form", expected) + checkIntervals(s"'$value' $unit$form", expected) + } + } + } + } + test("composed expressions") { assertEqual("1 + r.r As q", (Literal(1) + UnresolvedAttribute("r.r")).as("q")) assertEqual("1 - f('o', o(bar))", Literal(1) - 'f.function("o", 'o.function('bar))) diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/TableIdentifierParserSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/TableIdentifierParserSuite.scala index 2725deb..4dfd817 100644 --- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/TableIdentifierParserSuite.scala +++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/TableIdentifierParserSuite.scala @@ -74,6 +74,7 @@ class TableIdentifierParserSuite extends SparkFunSuite with SQLHelper { "date", "datetime", "day", + "days", "dbproperties", "decimal", "deferred", @@ -113,6 +114,7 @@ class TableIdentifierParserSuite extends SparkFunSuite with SQLHelper { "grouping", "hold_ddltime", "hour", + "hours", "idxproperties", "ignore", "import", @@ -148,9 +150,15 @@ class TableIdentifierParserSuite extends SparkFunSuite with SQLHelper { "mapjoin", "materialized", "metadata", + "microsecond", + "microseconds", + "millisecond", + "milliseconds", "minus", "minute", + "minutes", "month", + "months", "msck", "no_drop", "none", @@ -205,6 +213,7 @@ class TableIdentifierParserSuite extends SparkFunSuite with SQLHelper { "rows", "schemas", "second", + "seconds", "serde", "serdeproperties", "server", @@ -253,11 +262,14 @@ class TableIdentifierParserSuite extends SparkFunSuite with SQLHelper { "utctimestamp", "values", "view", + "week", + "weeks", "while", "with", "work", "write", - "year") + "year", + "years") val hiveStrictNonReservedKeyword = Seq( "anti", @@ -408,6 +420,7 @@ class TableIdentifierParserSuite extends SparkFunSuite with SQLHelper { "databases", "date", "day", + "days", "dbproperties", "deallocate", "dec", @@ -500,6 +513,7 @@ class TableIdentifierParserSuite extends SparkFunSuite with SQLHelper { "having", "hold", "hour", + "hours", "identity", "if", "ignore", @@ -582,13 +596,19 @@ class TableIdentifierParserSuite extends SparkFunSuite with SQLHelper { "member", "merge", "method", + "microsecond", + "microseconds", + "millisecond", + "milliseconds", "min", "minus", "minute", + "minutes", "mod", "modifies", "module", "month", + "months", "msck", "multiset", "names", @@ -716,6 +736,7 @@ class TableIdentifierParserSuite extends SparkFunSuite with SQLHelper { "scroll", "search", "second", + "seconds", "section", "seek", "select", @@ -826,6 +847,8 @@ class TableIdentifierParserSuite extends SparkFunSuite with SQLHelper { "varying", "versioning", "view", + "week", + "weeks", "when", "whenever", "where", @@ -838,6 +861,7 @@ class TableIdentifierParserSuite extends SparkFunSuite with SQLHelper { "work", "write", "year", + "years", "zone") val reservedKeywordsInAnsiMode = Set( @@ -860,6 +884,7 @@ class TableIdentifierParserSuite extends SparkFunSuite with SQLHelper { "current_time", "current_timestamp", "current_user", + "day", "distinct", "else", "end", @@ -873,6 +898,7 @@ class TableIdentifierParserSuite extends SparkFunSuite with SQLHelper { "grant", "group", "having", + "hour", "in", "inner", "intersect", @@ -881,6 +907,8 @@ class TableIdentifierParserSuite extends SparkFunSuite with SQLHelper { "is", "leading", "left", + "minute", + "month", "natural", "not", "null", @@ -897,6 +925,7 @@ class TableIdentifierParserSuite extends SparkFunSuite with SQLHelper { "semi", "session_user", "minus", + "second", "some", "table", "then", @@ -908,7 +937,8 @@ class TableIdentifierParserSuite extends SparkFunSuite with SQLHelper { "using", "when", "where", - "with") + "with", + "year") val nonReservedKeywordsInAnsiMode = allCandidateKeywords -- reservedKeywordsInAnsiMode diff --git a/sql/core/src/test/resources/sql-tests/inputs/ansi/interval.sql b/sql/core/src/test/resources/sql-tests/inputs/ansi/interval.sql new file mode 100644 index 0000000..f2f4b02 --- /dev/null +++ b/sql/core/src/test/resources/sql-tests/inputs/ansi/interval.sql @@ -0,0 +1,188 @@ +-- Turns on ANSI mode +SET spark.sql.parser.ansi.enabled=true; + +select + '1' second, + 2 seconds, + '1' minute, + 2 minutes, + '1' hour, + 2 hours, + '1' day, + 2 days, + '1' month, + 2 months, + '1' year, + 2 years; + +select + interval '10-11' year to month, + interval '10' year, + interval '11' month; + +select + '10-11' year to month, + '10' year, + '11' month; + +select + interval '10 9:8:7.987654321' day to second, + interval '10' day, + interval '11' hour, + interval '12' minute, + interval '13' second, + interval '13.123456789' second; + +select + '10 9:8:7.987654321' day to second, + '10' day, + '11' hour, + '12' minute, + '13' second, + '13.123456789' second; + +select map(1, interval 1 day, 2, interval 3 week); + +select map(1, 1 day, 2, 3 week); + +-- Interval year-month arithmetic + +create temporary view interval_arithmetic as + select CAST(dateval AS date), CAST(tsval AS timestamp) from values + ('2012-01-01', '2012-01-01') + as interval_arithmetic(dateval, tsval); + +select + dateval, + dateval - interval '2-2' year to month, + dateval - interval '-2-2' year to month, + dateval + interval '2-2' year to month, + dateval + interval '-2-2' year to month, + - interval '2-2' year to month + dateval, + interval '2-2' year to month + dateval +from interval_arithmetic; + +select + dateval, + dateval - '2-2' year to month, + dateval - '-2-2' year to month, + dateval + '2-2' year to month, + dateval + '-2-2' year to month, + - '2-2' year to month + dateval, + '2-2' year to month + dateval +from interval_arithmetic; + +select + tsval, + tsval - interval '2-2' year to month, + tsval - interval '-2-2' year to month, + tsval + interval '2-2' year to month, + tsval + interval '-2-2' year to month, + - interval '2-2' year to month + tsval, + interval '2-2' year to month + tsval +from interval_arithmetic; + +select + tsval, + tsval - '2-2' year to month, + tsval - '-2-2' year to month, + tsval + '2-2' year to month, + tsval + '-2-2' year to month, + - '2-2' year to month + tsval, + '2-2' year to month + tsval +from interval_arithmetic; + +select + interval '2-2' year to month + interval '3-3' year to month, + interval '2-2' year to month - interval '3-3' year to month +from interval_arithmetic; + +select + '2-2' year to month + '3-3' year to month, + '2-2' year to month - '3-3' year to month +from interval_arithmetic; + +-- Interval day-time arithmetic + +select + dateval, + dateval - interval '99 11:22:33.123456789' day to second, + dateval - interval '-99 11:22:33.123456789' day to second, + dateval + interval '99 11:22:33.123456789' day to second, + dateval + interval '-99 11:22:33.123456789' day to second, + -interval '99 11:22:33.123456789' day to second + dateval, + interval '99 11:22:33.123456789' day to second + dateval +from interval_arithmetic; + +select + dateval, + dateval - '99 11:22:33.123456789' day to second, + dateval - '-99 11:22:33.123456789' day to second, + dateval + '99 11:22:33.123456789' day to second, + dateval + '-99 11:22:33.123456789' day to second, + - '99 11:22:33.123456789' day to second + dateval, + '99 11:22:33.123456789' day to second + dateval +from interval_arithmetic; + +select + tsval, + tsval - interval '99 11:22:33.123456789' day to second, + tsval - interval '-99 11:22:33.123456789' day to second, + tsval + interval '99 11:22:33.123456789' day to second, + tsval + interval '-99 11:22:33.123456789' day to second, + -interval '99 11:22:33.123456789' day to second + tsval, + interval '99 11:22:33.123456789' day to second + tsval +from interval_arithmetic; + +select + tsval, + tsval - '99 11:22:33.123456789' day to second, + tsval - '-99 11:22:33.123456789' day to second, + tsval + '99 11:22:33.123456789' day to second, + tsval + '-99 11:22:33.123456789' day to second, + - '99 11:22:33.123456789' day to second + tsval, + '99 11:22:33.123456789' day to second + tsval +from interval_arithmetic; + +select + interval '99 11:22:33.123456789' day to second + interval '10 9:8:7.123456789' day to second, + interval '99 11:22:33.123456789' day to second - interval '10 9:8:7.123456789' day to second +from interval_arithmetic; + +select + '99 11:22:33.123456789' day to second + '10 9:8:7.123456789' day to second, + '99 11:22:33.123456789' day to second - '10 9:8:7.123456789' day to second +from interval_arithmetic; + +-- More tests for interval syntax alternatives + +select 30 day; + +select 30 day day; + +select 30 day day day; + +select date '2012-01-01' - 30 day; + +select date '2012-01-01' - 30 day day; + +select date '2012-01-01' - 30 day day day; + +select date '2012-01-01' + '-30' day; + +select date '2012-01-01' + interval '-30' day; + +-- Unsupported syntax for intervals + +select date '2012-01-01' + interval (-30) day; + +select date '2012-01-01' + (-30) day; + +create temporary view t as select * from values (1), (2) as t(a); + +select date '2012-01-01' + interval (a + 1) day from t; + +select date '2012-01-01' + (a + 1) day from t; + +-- Turns off ANSI mode +SET spark.sql.parser.ansi.enabled=false; diff --git a/sql/core/src/test/resources/sql-tests/results/ansi/interval.sql.out b/sql/core/src/test/resources/sql-tests/results/ansi/interval.sql.out new file mode 100644 index 0000000..1f8b5b6 --- /dev/null +++ b/sql/core/src/test/resources/sql-tests/results/ansi/interval.sql.out @@ -0,0 +1,439 @@ +-- Automatically generated by SQLQueryTestSuite +-- Number of queries: 35 + + +-- !query 0 +SET spark.sql.parser.ansi.enabled=true +-- !query 0 schema +struct<key:string,value:string> +-- !query 0 output +spark.sql.parser.ansi.enabled true + + +-- !query 1 +select + '1' second, + 2 seconds, + '1' minute, + 2 minutes, + '1' hour, + 2 hours, + '1' day, + 2 days, + '1' month, + 2 months, + '1' year, + 2 years +-- !query 1 schema +struct<interval 1 seconds:calendarinterval,interval 2 seconds:calendarinterval,interval 1 minutes:calendarinterval,interval 2 minutes:calendarinterval,interval 1 hours:calendarinterval,interval 2 hours:calendarinterval,interval 1 days:calendarinterval,interval 2 days:calendarinterval,interval 1 months:calendarinterval,interval 2 months:calendarinterval,interval 1 years:calendarinterval,interval 2 years:calendarinterval> +-- !query 1 output +interval 1 seconds interval 2 seconds interval 1 minutes interval 2 minutes interval 1 hours interval 2 hours interval 1 days interval 2 days interval 1 months interval 2 months interval 1 years interval 2 years + + +-- !query 2 +select + interval '10-11' year to month, + interval '10' year, + interval '11' month +-- !query 2 schema +struct<interval 10 years 11 months:calendarinterval,interval 10 years:calendarinterval,interval 11 months:calendarinterval> +-- !query 2 output +interval 10 years 11 months interval 10 years interval 11 months + + +-- !query 3 +select + '10-11' year to month, + '10' year, + '11' month +-- !query 3 schema +struct<interval 10 years 11 months:calendarinterval,interval 10 years:calendarinterval,interval 11 months:calendarinterval> +-- !query 3 output +interval 10 years 11 months interval 10 years interval 11 months + + +-- !query 4 +select + interval '10 9:8:7.987654321' day to second, + interval '10' day, + interval '11' hour, + interval '12' minute, + interval '13' second, + interval '13.123456789' second +-- !query 4 schema +struct<interval 1 weeks 3 days 9 hours 8 minutes 7 seconds 987 milliseconds 654 microseconds:calendarinterval,interval 1 weeks 3 days:calendarinterval,interval 11 hours:calendarinterval,interval 12 minutes:calendarinterval,interval 13 seconds:calendarinterval,interval 13 seconds 123 milliseconds 456 microseconds:calendarinterval> +-- !query 4 output +interval 1 weeks 3 days 9 hours 8 minutes 7 seconds 987 milliseconds 654 microseconds interval 1 weeks 3 days interval 11 hours interval 12 minutes interval 13 seconds interval 13 seconds 123 milliseconds 456 microseconds + + +-- !query 5 +select + '10 9:8:7.987654321' day to second, + '10' day, + '11' hour, + '12' minute, + '13' second, + '13.123456789' second +-- !query 5 schema +struct<interval 1 weeks 3 days 9 hours 8 minutes 7 seconds 987 milliseconds 654 microseconds:calendarinterval,interval 1 weeks 3 days:calendarinterval,interval 11 hours:calendarinterval,interval 12 minutes:calendarinterval,interval 13 seconds:calendarinterval,interval 13 seconds 123 milliseconds 456 microseconds:calendarinterval> +-- !query 5 output +interval 1 weeks 3 days 9 hours 8 minutes 7 seconds 987 milliseconds 654 microseconds interval 1 weeks 3 days interval 11 hours interval 12 minutes interval 13 seconds interval 13 seconds 123 milliseconds 456 microseconds + + +-- !query 6 +select map(1, interval 1 day, 2, interval 3 week) +-- !query 6 schema +struct<map(1, interval 1 days, 2, interval 3 weeks):map<int,calendarinterval>> +-- !query 6 output +{1:interval 1 days,2:interval 3 weeks} + + +-- !query 7 +select map(1, 1 day, 2, 3 week) +-- !query 7 schema +struct<map(1, interval 1 days, 2, interval 3 weeks):map<int,calendarinterval>> +-- !query 7 output +{1:interval 1 days,2:interval 3 weeks} + + +-- !query 8 +create temporary view interval_arithmetic as + select CAST(dateval AS date), CAST(tsval AS timestamp) from values + ('2012-01-01', '2012-01-01') + as interval_arithmetic(dateval, tsval) +-- !query 8 schema +struct<> +-- !query 8 output + + + +-- !query 9 +select + dateval, + dateval - interval '2-2' year to month, + dateval - interval '-2-2' year to month, + dateval + interval '2-2' year to month, + dateval + interval '-2-2' year to month, + - interval '2-2' year to month + dateval, + interval '2-2' year to month + dateval +from interval_arithmetic +-- !query 9 schema +struct<dateval:date,CAST(CAST(dateval AS TIMESTAMP) - interval 2 years 2 months AS DATE):date,CAST(CAST(dateval AS TIMESTAMP) - interval -2 years -2 months AS DATE):date,CAST(CAST(dateval AS TIMESTAMP) + interval 2 years 2 months AS DATE):date,CAST(CAST(dateval AS TIMESTAMP) + interval -2 years -2 months AS DATE):date,CAST(CAST(dateval AS TIMESTAMP) + (- interval 2 years 2 months) AS DATE):date,CAST(CAST(dateval AS TIMESTAMP) + interval 2 years 2 months AS DATE):date> +-- !query 9 output +2012-01-01 2009-11-01 2014-03-01 2014-03-01 2009-11-01 2009-11-01 2014-03-01 + + +-- !query 10 +select + dateval, + dateval - '2-2' year to month, + dateval - '-2-2' year to month, + dateval + '2-2' year to month, + dateval + '-2-2' year to month, + - '2-2' year to month + dateval, + '2-2' year to month + dateval +from interval_arithmetic +-- !query 10 schema +struct<dateval:date,CAST(CAST(dateval AS TIMESTAMP) - interval 2 years 2 months AS DATE):date,CAST(CAST(dateval AS TIMESTAMP) - interval -2 years -2 months AS DATE):date,CAST(CAST(dateval AS TIMESTAMP) + interval 2 years 2 months AS DATE):date,CAST(CAST(dateval AS TIMESTAMP) + interval -2 years -2 months AS DATE):date,CAST(CAST(dateval AS TIMESTAMP) + (- interval 2 years 2 months) AS DATE):date,CAST(CAST(dateval AS TIMESTAMP) + interval 2 years 2 months AS DATE):date> +-- !query 10 output +2012-01-01 2009-11-01 2014-03-01 2014-03-01 2009-11-01 2009-11-01 2014-03-01 + + +-- !query 11 +select + tsval, + tsval - interval '2-2' year to month, + tsval - interval '-2-2' year to month, + tsval + interval '2-2' year to month, + tsval + interval '-2-2' year to month, + - interval '2-2' year to month + tsval, + interval '2-2' year to month + tsval +from interval_arithmetic +-- !query 11 schema +struct<tsval:timestamp,CAST(tsval - interval 2 years 2 months AS TIMESTAMP):timestamp,CAST(tsval - interval -2 years -2 months AS TIMESTAMP):timestamp,CAST(tsval + interval 2 years 2 months AS TIMESTAMP):timestamp,CAST(tsval + interval -2 years -2 months AS TIMESTAMP):timestamp,CAST(tsval + (- interval 2 years 2 months) AS TIMESTAMP):timestamp,CAST(tsval + interval 2 years 2 months AS TIMESTAMP):timestamp> +-- !query 11 output +2012-01-01 00:00:00 2009-11-01 00:00:00 2014-03-01 00:00:00 2014-03-01 00:00:00 2009-11-01 00:00:00 2009-11-01 00:00:00 2014-03-01 00:00:00 + + +-- !query 12 +select + tsval, + tsval - '2-2' year to month, + tsval - '-2-2' year to month, + tsval + '2-2' year to month, + tsval + '-2-2' year to month, + - '2-2' year to month + tsval, + '2-2' year to month + tsval +from interval_arithmetic +-- !query 12 schema +struct<tsval:timestamp,CAST(tsval - interval 2 years 2 months AS TIMESTAMP):timestamp,CAST(tsval - interval -2 years -2 months AS TIMESTAMP):timestamp,CAST(tsval + interval 2 years 2 months AS TIMESTAMP):timestamp,CAST(tsval + interval -2 years -2 months AS TIMESTAMP):timestamp,CAST(tsval + (- interval 2 years 2 months) AS TIMESTAMP):timestamp,CAST(tsval + interval 2 years 2 months AS TIMESTAMP):timestamp> +-- !query 12 output +2012-01-01 00:00:00 2009-11-01 00:00:00 2014-03-01 00:00:00 2014-03-01 00:00:00 2009-11-01 00:00:00 2009-11-01 00:00:00 2014-03-01 00:00:00 + + +-- !query 13 +select + interval '2-2' year to month + interval '3-3' year to month, + interval '2-2' year to month - interval '3-3' year to month +from interval_arithmetic +-- !query 13 schema +struct<(interval 2 years 2 months + interval 3 years 3 months):calendarinterval,(interval 2 years 2 months - interval 3 years 3 months):calendarinterval> +-- !query 13 output +interval 5 years 5 months interval -1 years -1 months + + +-- !query 14 +select + '2-2' year to month + '3-3' year to month, + '2-2' year to month - '3-3' year to month +from interval_arithmetic +-- !query 14 schema +struct<(interval 2 years 2 months + interval 3 years 3 months):calendarinterval,(interval 2 years 2 months - interval 3 years 3 months):calendarinterval> +-- !query 14 output +interval 5 years 5 months interval -1 years -1 months + + +-- !query 15 +select + dateval, + dateval - interval '99 11:22:33.123456789' day to second, + dateval - interval '-99 11:22:33.123456789' day to second, + dateval + interval '99 11:22:33.123456789' day to second, + dateval + interval '-99 11:22:33.123456789' day to second, + -interval '99 11:22:33.123456789' day to second + dateval, + interval '99 11:22:33.123456789' day to second + dateval +from interval_arithmetic +-- !query 15 schema +struct<dateval:date,CAST(CAST(dateval AS TIMESTAMP) - interval 14 weeks 1 days 11 hours 22 minutes 33 seconds 123 milliseconds 456 microseconds AS DATE):date,CAST(CAST(dateval AS TIMESTAMP) - interval -14 weeks -1 days -11 hours -22 minutes -33 seconds -123 milliseconds -456 microseconds AS DATE):date,CAST(CAST(dateval AS TIMESTAMP) + interval 14 weeks 1 days 11 hours 22 minutes 33 seconds 123 milliseconds 456 microseconds AS DATE):date,CAST(CAST(dateval AS TIMESTAMP) + interval -14 week [...] +-- !query 15 output +2012-01-01 2011-09-23 2012-04-09 2012-04-09 2011-09-23 2011-09-23 2012-04-09 + + +-- !query 16 +select + dateval, + dateval - '99 11:22:33.123456789' day to second, + dateval - '-99 11:22:33.123456789' day to second, + dateval + '99 11:22:33.123456789' day to second, + dateval + '-99 11:22:33.123456789' day to second, + - '99 11:22:33.123456789' day to second + dateval, + '99 11:22:33.123456789' day to second + dateval +from interval_arithmetic +-- !query 16 schema +struct<dateval:date,CAST(CAST(dateval AS TIMESTAMP) - interval 14 weeks 1 days 11 hours 22 minutes 33 seconds 123 milliseconds 456 microseconds AS DATE):date,CAST(CAST(dateval AS TIMESTAMP) - interval -14 weeks -1 days -11 hours -22 minutes -33 seconds -123 milliseconds -456 microseconds AS DATE):date,CAST(CAST(dateval AS TIMESTAMP) + interval 14 weeks 1 days 11 hours 22 minutes 33 seconds 123 milliseconds 456 microseconds AS DATE):date,CAST(CAST(dateval AS TIMESTAMP) + interval -14 week [...] +-- !query 16 output +2012-01-01 2011-09-23 2012-04-09 2012-04-09 2011-09-23 2011-09-23 2012-04-09 + + +-- !query 17 +select + tsval, + tsval - interval '99 11:22:33.123456789' day to second, + tsval - interval '-99 11:22:33.123456789' day to second, + tsval + interval '99 11:22:33.123456789' day to second, + tsval + interval '-99 11:22:33.123456789' day to second, + -interval '99 11:22:33.123456789' day to second + tsval, + interval '99 11:22:33.123456789' day to second + tsval +from interval_arithmetic +-- !query 17 schema +struct<tsval:timestamp,CAST(tsval - interval 14 weeks 1 days 11 hours 22 minutes 33 seconds 123 milliseconds 456 microseconds AS TIMESTAMP):timestamp,CAST(tsval - interval -14 weeks -1 days -11 hours -22 minutes -33 seconds -123 milliseconds -456 microseconds AS TIMESTAMP):timestamp,CAST(tsval + interval 14 weeks 1 days 11 hours 22 minutes 33 seconds 123 milliseconds 456 microseconds AS TIMESTAMP):timestamp,CAST(tsval + interval -14 weeks -1 days -11 hours -22 minutes -33 seconds -123 mi [...] +-- !query 17 output +2012-01-01 00:00:00 2011-09-23 13:37:26.876544 2012-04-09 12:22:33.123456 2012-04-09 12:22:33.123456 2011-09-23 13:37:26.876544 2011-09-23 13:37:26.876544 2012-04-09 12:22:33.123456 + + +-- !query 18 +select + tsval, + tsval - '99 11:22:33.123456789' day to second, + tsval - '-99 11:22:33.123456789' day to second, + tsval + '99 11:22:33.123456789' day to second, + tsval + '-99 11:22:33.123456789' day to second, + - '99 11:22:33.123456789' day to second + tsval, + '99 11:22:33.123456789' day to second + tsval +from interval_arithmetic +-- !query 18 schema +struct<tsval:timestamp,CAST(tsval - interval 14 weeks 1 days 11 hours 22 minutes 33 seconds 123 milliseconds 456 microseconds AS TIMESTAMP):timestamp,CAST(tsval - interval -14 weeks -1 days -11 hours -22 minutes -33 seconds -123 milliseconds -456 microseconds AS TIMESTAMP):timestamp,CAST(tsval + interval 14 weeks 1 days 11 hours 22 minutes 33 seconds 123 milliseconds 456 microseconds AS TIMESTAMP):timestamp,CAST(tsval + interval -14 weeks -1 days -11 hours -22 minutes -33 seconds -123 mi [...] +-- !query 18 output +2012-01-01 00:00:00 2011-09-23 13:37:26.876544 2012-04-09 12:22:33.123456 2012-04-09 12:22:33.123456 2011-09-23 13:37:26.876544 2011-09-23 13:37:26.876544 2012-04-09 12:22:33.123456 + + +-- !query 19 +select + interval '99 11:22:33.123456789' day to second + interval '10 9:8:7.123456789' day to second, + interval '99 11:22:33.123456789' day to second - interval '10 9:8:7.123456789' day to second +from interval_arithmetic +-- !query 19 schema +struct<(interval 14 weeks 1 days 11 hours 22 minutes 33 seconds 123 milliseconds 456 microseconds + interval 1 weeks 3 days 9 hours 8 minutes 7 seconds 123 milliseconds 456 microseconds):calendarinterval,(interval 14 weeks 1 days 11 hours 22 minutes 33 seconds 123 milliseconds 456 microseconds - interval 1 weeks 3 days 9 hours 8 minutes 7 seconds 123 milliseconds 456 microseconds):calendarinterval> +-- !query 19 output +interval 15 weeks 4 days 20 hours 30 minutes 40 seconds 246 milliseconds 912 microseconds interval 12 weeks 5 days 2 hours 14 minutes 26 seconds + + +-- !query 20 +select + '99 11:22:33.123456789' day to second + '10 9:8:7.123456789' day to second, + '99 11:22:33.123456789' day to second - '10 9:8:7.123456789' day to second +from interval_arithmetic +-- !query 20 schema +struct<(interval 14 weeks 1 days 11 hours 22 minutes 33 seconds 123 milliseconds 456 microseconds + interval 1 weeks 3 days 9 hours 8 minutes 7 seconds 123 milliseconds 456 microseconds):calendarinterval,(interval 14 weeks 1 days 11 hours 22 minutes 33 seconds 123 milliseconds 456 microseconds - interval 1 weeks 3 days 9 hours 8 minutes 7 seconds 123 milliseconds 456 microseconds):calendarinterval> +-- !query 20 output +interval 15 weeks 4 days 20 hours 30 minutes 40 seconds 246 milliseconds 912 microseconds interval 12 weeks 5 days 2 hours 14 minutes 26 seconds + + +-- !query 21 +select 30 day +-- !query 21 schema +struct<interval 4 weeks 2 days:calendarinterval> +-- !query 21 output +interval 4 weeks 2 days + + +-- !query 22 +select 30 day day +-- !query 22 schema +struct<> +-- !query 22 output +org.apache.spark.sql.catalyst.parser.ParseException + +no viable alternative at input 'day'(line 1, pos 14) + +== SQL == +select 30 day day +--------------^^^ + + +-- !query 23 +select 30 day day day +-- !query 23 schema +struct<> +-- !query 23 output +org.apache.spark.sql.catalyst.parser.ParseException + +no viable alternative at input 'day'(line 1, pos 14) + +== SQL == +select 30 day day day +--------------^^^ + + +-- !query 24 +select date '2012-01-01' - 30 day +-- !query 24 schema +struct<CAST(CAST(DATE '2012-01-01' AS TIMESTAMP) - interval 4 weeks 2 days AS DATE):date> +-- !query 24 output +2011-12-02 + + +-- !query 25 +select date '2012-01-01' - 30 day day +-- !query 25 schema +struct<> +-- !query 25 output +org.apache.spark.sql.catalyst.parser.ParseException + +no viable alternative at input 'day'(line 1, pos 34) + +== SQL == +select date '2012-01-01' - 30 day day +----------------------------------^^^ + + +-- !query 26 +select date '2012-01-01' - 30 day day day +-- !query 26 schema +struct<> +-- !query 26 output +org.apache.spark.sql.catalyst.parser.ParseException + +no viable alternative at input 'day'(line 1, pos 34) + +== SQL == +select date '2012-01-01' - 30 day day day +----------------------------------^^^ + + +-- !query 27 +select date '2012-01-01' + '-30' day +-- !query 27 schema +struct<CAST(CAST(DATE '2012-01-01' AS TIMESTAMP) + interval -4 weeks -2 days AS DATE):date> +-- !query 27 output +2011-12-02 + + +-- !query 28 +select date '2012-01-01' + interval '-30' day +-- !query 28 schema +struct<CAST(CAST(DATE '2012-01-01' AS TIMESTAMP) + interval -4 weeks -2 days AS DATE):date> +-- !query 28 output +2011-12-02 + + +-- !query 29 +select date '2012-01-01' + interval (-30) day +-- !query 29 schema +struct<> +-- !query 29 output +org.apache.spark.sql.catalyst.parser.ParseException + +no viable alternative at input 'day'(line 1, pos 42) + +== SQL == +select date '2012-01-01' + interval (-30) day +------------------------------------------^^^ + + +-- !query 30 +select date '2012-01-01' + (-30) day +-- !query 30 schema +struct<> +-- !query 30 output +org.apache.spark.sql.catalyst.parser.ParseException + +no viable alternative at input 'day'(line 1, pos 33) + +== SQL == +select date '2012-01-01' + (-30) day +---------------------------------^^^ + + +-- !query 31 +create temporary view t as select * from values (1), (2) as t(a) +-- !query 31 schema +struct<> +-- !query 31 output + + + +-- !query 32 +select date '2012-01-01' + interval (a + 1) day from t +-- !query 32 schema +struct<> +-- !query 32 output +org.apache.spark.sql.catalyst.parser.ParseException + +no viable alternative at input 'day'(line 1, pos 44) + +== SQL == +select date '2012-01-01' + interval (a + 1) day from t +--------------------------------------------^^^ + + +-- !query 33 +select date '2012-01-01' + (a + 1) day from t +-- !query 33 schema +struct<> +-- !query 33 output +org.apache.spark.sql.catalyst.parser.ParseException + +no viable alternative at input 'day'(line 1, pos 35) + +== SQL == +select date '2012-01-01' + (a + 1) day from t +-----------------------------------^^^ + + +-- !query 34 +SET spark.sql.parser.ansi.enabled=false +-- !query 34 schema +struct<key:string,value:string> +-- !query 34 output +spark.sql.parser.ansi.enabled false diff --git a/sql/core/src/test/resources/sql-tests/results/literals.sql.out b/sql/core/src/test/resources/sql-tests/results/literals.sql.out index 8d8decb..c0ce3d7 100644 --- a/sql/core/src/test/resources/sql-tests/results/literals.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/literals.sql.out @@ -343,11 +343,11 @@ struct<> -- !query 36 output org.apache.spark.sql.catalyst.parser.ParseException -No interval can be constructed(line 1, pos 16) +no viable alternative at input 'interval 10 nanoseconds'(line 1, pos 19) == SQL == select interval 10 nanoseconds -----------------^^^ +-------------------^^^ -- !query 37 diff --git a/sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala index 3d374a1..7b85626 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala @@ -19,7 +19,7 @@ package org.apache.spark.sql import java.io.File import java.net.{MalformedURLException, URL} -import java.sql.Timestamp +import java.sql.{Date, Timestamp} import java.util.concurrent.atomic.AtomicBoolean import org.apache.spark.{AccumulatorSuite, SparkException} @@ -413,32 +413,32 @@ class SQLQuerySuite extends QueryTest with SharedSQLContext { checkAnswer(sql( "SELECT time FROM timestamps WHERE time='1969-12-31 16:00:00.0'"), - Row(java.sql.Timestamp.valueOf("1969-12-31 16:00:00"))) + Row(Timestamp.valueOf("1969-12-31 16:00:00"))) checkAnswer(sql( "SELECT time FROM timestamps WHERE time=CAST('1969-12-31 16:00:00.001' AS TIMESTAMP)"), - Row(java.sql.Timestamp.valueOf("1969-12-31 16:00:00.001"))) + Row(Timestamp.valueOf("1969-12-31 16:00:00.001"))) checkAnswer(sql( "SELECT time FROM timestamps WHERE time='1969-12-31 16:00:00.001'"), - Row(java.sql.Timestamp.valueOf("1969-12-31 16:00:00.001"))) + Row(Timestamp.valueOf("1969-12-31 16:00:00.001"))) checkAnswer(sql( "SELECT time FROM timestamps WHERE '1969-12-31 16:00:00.001'=time"), - Row(java.sql.Timestamp.valueOf("1969-12-31 16:00:00.001"))) + Row(Timestamp.valueOf("1969-12-31 16:00:00.001"))) checkAnswer(sql( """SELECT time FROM timestamps WHERE time<'1969-12-31 16:00:00.003' AND time>'1969-12-31 16:00:00.001'"""), - Row(java.sql.Timestamp.valueOf("1969-12-31 16:00:00.002"))) + Row(Timestamp.valueOf("1969-12-31 16:00:00.002"))) checkAnswer(sql( """ |SELECT time FROM timestamps |WHERE time IN ('1969-12-31 16:00:00.001','1969-12-31 16:00:00.002') """.stripMargin), - Seq(Row(java.sql.Timestamp.valueOf("1969-12-31 16:00:00.001")), - Row(java.sql.Timestamp.valueOf("1969-12-31 16:00:00.002")))) + Seq(Row(Timestamp.valueOf("1969-12-31 16:00:00.001")), + Row(Timestamp.valueOf("1969-12-31 16:00:00.002")))) checkAnswer(sql( "SELECT time FROM timestamps WHERE time='123'"), @@ -548,7 +548,7 @@ class SQLQuerySuite extends QueryTest with SharedSQLContext { test("date row") { checkAnswer(sql( """select cast("2015-01-28" as date) from testData limit 1"""), - Row(java.sql.Date.valueOf("2015-01-28")) + Row(Date.valueOf("2015-01-28")) ) } @@ -1484,11 +1484,12 @@ class SQLQuerySuite extends QueryTest with SharedSQLContext { sql("select interval") } assert(e1.message.contains("at least one time unit should be given for interval literal")) + // Currently we don't yet support nanosecond val e2 = intercept[AnalysisException] { sql("select interval 23 nanosecond") } - assert(e2.message.contains("No interval can be constructed")) + assert(e2.message.contains("no viable alternative at input 'interval 23 nanosecond'")) } test("SPARK-8945: add and subtract expressions for interval type") { --------------------------------------------------------------------- To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org For additional commands, e-mail: commits-h...@spark.apache.org