This is an automated email from the ASF dual-hosted git repository.

maxgekk pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new b9fc5c03ed6 [SPARK-44778][SQL] Add the alias `TIMEDIFF` for 
`TIMESTAMPDIFF`
b9fc5c03ed6 is described below

commit b9fc5c03ed69e91d9c4cbe7ff5a1522c7b849568
Author: Max Gekk <max.g...@gmail.com>
AuthorDate: Sat Aug 12 11:08:39 2023 +0500

    [SPARK-44778][SQL] Add the alias `TIMEDIFF` for `TIMESTAMPDIFF`
    
    ### What changes were proposed in this pull request?
    In the PR, I propose to extend the rules of `primaryExpression` in 
`SqlBaseParser.g4`, and one more function `TIMEDIFF` which accepts 3-args in 
the same way as the existing expressions `TIMESTAMPDIFF`.
    
    ### Why are the changes needed?
    To achieve feature parity w/ other system and make the migration to Spark 
SQL from such systems easier:
    1. Snowflake: https://docs.snowflake.com/en/sql-reference/functions/timediff
    2. MySQL/MariaDB: 
https://dev.mysql.com/doc/refman/8.0/en/date-and-time-functions.html#function_timediff
    
    ### Does this PR introduce _any_ user-facing change?
    No.
    
    ### How was this patch tested?
    By running the existing test suites:
    ```
    $ PYSPARK_PYTHON=python3 build/sbt "sql/testOnly 
org.apache.spark.sql.SQLQueryTestSuite"
    ```
    
    Closes #42435 from MaxGekk/timediff.
    
    Authored-by: Max Gekk <max.g...@gmail.com>
    Signed-off-by: Max Gekk <max.g...@gmail.com>
---
 docs/sql-ref-ansi-compliance.md                    |  1 +
 .../spark/sql/catalyst/parser/SqlBaseLexer.g4      |  1 +
 .../spark/sql/catalyst/parser/SqlBaseParser.g4     |  4 +-
 .../analyzer-results/ansi/timestamp.sql.out        | 68 ++++++++++++++++++
 .../analyzer-results/datetime-legacy.sql.out       | 68 ++++++++++++++++++
 .../sql-tests/analyzer-results/timestamp.sql.out   | 68 ++++++++++++++++++
 .../timestampNTZ/timestamp-ansi.sql.out            | 70 +++++++++++++++++++
 .../timestampNTZ/timestamp.sql.out                 | 70 +++++++++++++++++++
 .../test/resources/sql-tests/inputs/timestamp.sql  |  8 +++
 .../sql-tests/results/ansi/keywords.sql.out        |  1 +
 .../sql-tests/results/ansi/timestamp.sql.out       | 80 ++++++++++++++++++++++
 .../sql-tests/results/datetime-legacy.sql.out      | 80 ++++++++++++++++++++++
 .../resources/sql-tests/results/keywords.sql.out   |  1 +
 .../resources/sql-tests/results/timestamp.sql.out  | 80 ++++++++++++++++++++++
 .../results/timestampNTZ/timestamp-ansi.sql.out    | 80 ++++++++++++++++++++++
 .../results/timestampNTZ/timestamp.sql.out         | 80 ++++++++++++++++++++++
 .../ThriftServerWithSparkContextSuite.scala        |  2 +-
 17 files changed, 760 insertions(+), 2 deletions(-)

diff --git a/docs/sql-ref-ansi-compliance.md b/docs/sql-ref-ansi-compliance.md
index f3a0e8f9afb..09c38a00995 100644
--- a/docs/sql-ref-ansi-compliance.md
+++ b/docs/sql-ref-ansi-compliance.md
@@ -636,6 +636,7 @@ Below is a list of all the keywords in Spark SQL.
 |TERMINATED|non-reserved|non-reserved|non-reserved|
 |THEN|reserved|non-reserved|reserved|
 |TIME|reserved|non-reserved|reserved|
+|TIMEDIFF|non-reserved|non-reserved|non-reserved|
 |TIMESTAMP|non-reserved|non-reserved|non-reserved|
 |TIMESTAMP_LTZ|non-reserved|non-reserved|non-reserved|
 |TIMESTAMP_NTZ|non-reserved|non-reserved|non-reserved|
diff --git 
a/sql/api/src/main/antlr4/org/apache/spark/sql/catalyst/parser/SqlBaseLexer.g4 
b/sql/api/src/main/antlr4/org/apache/spark/sql/catalyst/parser/SqlBaseLexer.g4
index bf6370575a1..d9128de0f5d 100644
--- 
a/sql/api/src/main/antlr4/org/apache/spark/sql/catalyst/parser/SqlBaseLexer.g4
+++ 
b/sql/api/src/main/antlr4/org/apache/spark/sql/catalyst/parser/SqlBaseLexer.g4
@@ -373,6 +373,7 @@ TEMPORARY: 'TEMPORARY' | 'TEMP';
 TERMINATED: 'TERMINATED';
 THEN: 'THEN';
 TIME: 'TIME';
+TIMEDIFF: 'TIMEDIFF';
 TIMESTAMP: 'TIMESTAMP';
 TIMESTAMP_LTZ: 'TIMESTAMP_LTZ';
 TIMESTAMP_NTZ: 'TIMESTAMP_NTZ';
diff --git 
a/sql/api/src/main/antlr4/org/apache/spark/sql/catalyst/parser/SqlBaseParser.g4 
b/sql/api/src/main/antlr4/org/apache/spark/sql/catalyst/parser/SqlBaseParser.g4
index a45ebee3106..7a69b10dadb 100644
--- 
a/sql/api/src/main/antlr4/org/apache/spark/sql/catalyst/parser/SqlBaseParser.g4
+++ 
b/sql/api/src/main/antlr4/org/apache/spark/sql/catalyst/parser/SqlBaseParser.g4
@@ -953,7 +953,7 @@ datetimeUnit
 primaryExpression
     : name=(CURRENT_DATE | CURRENT_TIMESTAMP | CURRENT_USER | USER)            
                       #currentLike
     | name=(TIMESTAMPADD | DATEADD | DATE_ADD) LEFT_PAREN (unit=datetimeUnit | 
invalidUnit=stringLit) COMMA unitsAmount=valueExpression COMMA 
timestamp=valueExpression RIGHT_PAREN             #timestampadd
-    | name=(TIMESTAMPDIFF | DATEDIFF | DATE_DIFF) LEFT_PAREN 
(unit=datetimeUnit | invalidUnit=stringLit) COMMA 
startTimestamp=valueExpression COMMA endTimestamp=valueExpression RIGHT_PAREN   
 #timestampdiff
+    | name=(TIMESTAMPDIFF | DATEDIFF | DATE_DIFF | TIMEDIFF) LEFT_PAREN 
(unit=datetimeUnit | invalidUnit=stringLit) COMMA 
startTimestamp=valueExpression COMMA endTimestamp=valueExpression RIGHT_PAREN   
 #timestampdiff
     | CASE whenClause+ (ELSE elseExpression=expression)? END                   
                #searchedCase
     | CASE value=expression whenClause+ (ELSE elseExpression=expression)? END  
                #simpleCase
     | name=(CAST | TRY_CAST) LEFT_PAREN expression AS dataType RIGHT_PAREN     
                #cast
@@ -1511,6 +1511,7 @@ ansiNonReserved
     | TBLPROPERTIES
     | TEMPORARY
     | TERMINATED
+    | TIMEDIFF
     | TIMESTAMP
     | TIMESTAMP_LTZ
     | TIMESTAMP_NTZ
@@ -1853,6 +1854,7 @@ nonReserved
     | TERMINATED
     | THEN
     | TIME
+    | TIMEDIFF
     | TIMESTAMP
     | TIMESTAMP_LTZ
     | TIMESTAMP_NTZ
diff --git 
a/sql/core/src/test/resources/sql-tests/analyzer-results/ansi/timestamp.sql.out 
b/sql/core/src/test/resources/sql-tests/analyzer-results/ansi/timestamp.sql.out
index d06813dc675..49e160a6cd4 100644
--- 
a/sql/core/src/test/resources/sql-tests/analyzer-results/ansi/timestamp.sql.out
+++ 
b/sql/core/src/test/resources/sql-tests/analyzer-results/ansi/timestamp.sql.out
@@ -875,3 +875,71 @@ org.apache.spark.sql.catalyst.parser.ParseException
     "fragment" : "timestampdiff('YEAR', date'2022-02-15', date'2023-02-15')"
   } ]
 }
+
+
+-- !query
+select timediff(QUARTER, timestamp'2023-08-10 01:02:03', timestamp'2022-01-14 
01:02:03')
+-- !query analysis
+[Analyzer test output redacted due to nondeterminism]
+
+
+-- !query
+select timediff(HOUR, timestamp'2022-02-14 01:02:03', timestamp'2022-02-14 
12:00:03')
+-- !query analysis
+[Analyzer test output redacted due to nondeterminism]
+
+
+-- !query
+select timediff(DAY, date'2022-02-15', date'2023-02-15')
+-- !query analysis
+[Analyzer test output redacted due to nondeterminism]
+
+
+-- !query
+select timediff(SECOND, date'2022-02-15', timestamp'2022-02-14 23:59:59')
+-- !query analysis
+[Analyzer test output redacted due to nondeterminism]
+
+
+-- !query
+select timediff('MINUTE', timestamp'2023-02-14 01:02:03', timestamp'2023-02-14 
02:00:03')
+-- !query analysis
+org.apache.spark.sql.catalyst.parser.ParseException
+{
+  "errorClass" : "INVALID_PARAMETER_VALUE.DATETIME_UNIT",
+  "sqlState" : "22023",
+  "messageParameters" : {
+    "functionName" : "`timediff`",
+    "invalidValue" : "'MINUTE'",
+    "parameter" : "`unit`"
+  },
+  "queryContext" : [ {
+    "objectType" : "",
+    "objectName" : "",
+    "startIndex" : 8,
+    "stopIndex" : 89,
+    "fragment" : "timediff('MINUTE', timestamp'2023-02-14 01:02:03', 
timestamp'2023-02-14 02:00:03')"
+  } ]
+}
+
+
+-- !query
+select timediff('YEAR', date'2020-02-15', date'2023-02-15')
+-- !query analysis
+org.apache.spark.sql.catalyst.parser.ParseException
+{
+  "errorClass" : "INVALID_PARAMETER_VALUE.DATETIME_UNIT",
+  "sqlState" : "22023",
+  "messageParameters" : {
+    "functionName" : "`timediff`",
+    "invalidValue" : "'YEAR'",
+    "parameter" : "`unit`"
+  },
+  "queryContext" : [ {
+    "objectType" : "",
+    "objectName" : "",
+    "startIndex" : 8,
+    "stopIndex" : 59,
+    "fragment" : "timediff('YEAR', date'2020-02-15', date'2023-02-15')"
+  } ]
+}
diff --git 
a/sql/core/src/test/resources/sql-tests/analyzer-results/datetime-legacy.sql.out
 
b/sql/core/src/test/resources/sql-tests/analyzer-results/datetime-legacy.sql.out
index 8ae2893a0a8..1243a03fcf0 100644
--- 
a/sql/core/src/test/resources/sql-tests/analyzer-results/datetime-legacy.sql.out
+++ 
b/sql/core/src/test/resources/sql-tests/analyzer-results/datetime-legacy.sql.out
@@ -1978,3 +1978,71 @@ org.apache.spark.sql.catalyst.parser.ParseException
     "fragment" : "timestampdiff('YEAR', date'2022-02-15', date'2023-02-15')"
   } ]
 }
+
+
+-- !query
+select timediff(QUARTER, timestamp'2023-08-10 01:02:03', timestamp'2022-01-14 
01:02:03')
+-- !query analysis
+[Analyzer test output redacted due to nondeterminism]
+
+
+-- !query
+select timediff(HOUR, timestamp'2022-02-14 01:02:03', timestamp'2022-02-14 
12:00:03')
+-- !query analysis
+[Analyzer test output redacted due to nondeterminism]
+
+
+-- !query
+select timediff(DAY, date'2022-02-15', date'2023-02-15')
+-- !query analysis
+[Analyzer test output redacted due to nondeterminism]
+
+
+-- !query
+select timediff(SECOND, date'2022-02-15', timestamp'2022-02-14 23:59:59')
+-- !query analysis
+[Analyzer test output redacted due to nondeterminism]
+
+
+-- !query
+select timediff('MINUTE', timestamp'2023-02-14 01:02:03', timestamp'2023-02-14 
02:00:03')
+-- !query analysis
+org.apache.spark.sql.catalyst.parser.ParseException
+{
+  "errorClass" : "INVALID_PARAMETER_VALUE.DATETIME_UNIT",
+  "sqlState" : "22023",
+  "messageParameters" : {
+    "functionName" : "`timediff`",
+    "invalidValue" : "'MINUTE'",
+    "parameter" : "`unit`"
+  },
+  "queryContext" : [ {
+    "objectType" : "",
+    "objectName" : "",
+    "startIndex" : 8,
+    "stopIndex" : 89,
+    "fragment" : "timediff('MINUTE', timestamp'2023-02-14 01:02:03', 
timestamp'2023-02-14 02:00:03')"
+  } ]
+}
+
+
+-- !query
+select timediff('YEAR', date'2020-02-15', date'2023-02-15')
+-- !query analysis
+org.apache.spark.sql.catalyst.parser.ParseException
+{
+  "errorClass" : "INVALID_PARAMETER_VALUE.DATETIME_UNIT",
+  "sqlState" : "22023",
+  "messageParameters" : {
+    "functionName" : "`timediff`",
+    "invalidValue" : "'YEAR'",
+    "parameter" : "`unit`"
+  },
+  "queryContext" : [ {
+    "objectType" : "",
+    "objectName" : "",
+    "startIndex" : 8,
+    "stopIndex" : 59,
+    "fragment" : "timediff('YEAR', date'2020-02-15', date'2023-02-15')"
+  } ]
+}
diff --git 
a/sql/core/src/test/resources/sql-tests/analyzer-results/timestamp.sql.out 
b/sql/core/src/test/resources/sql-tests/analyzer-results/timestamp.sql.out
index 12fa38ac3c3..0661cd045e4 100644
--- a/sql/core/src/test/resources/sql-tests/analyzer-results/timestamp.sql.out
+++ b/sql/core/src/test/resources/sql-tests/analyzer-results/timestamp.sql.out
@@ -947,3 +947,71 @@ org.apache.spark.sql.catalyst.parser.ParseException
     "fragment" : "timestampdiff('YEAR', date'2022-02-15', date'2023-02-15')"
   } ]
 }
+
+
+-- !query
+select timediff(QUARTER, timestamp'2023-08-10 01:02:03', timestamp'2022-01-14 
01:02:03')
+-- !query analysis
+[Analyzer test output redacted due to nondeterminism]
+
+
+-- !query
+select timediff(HOUR, timestamp'2022-02-14 01:02:03', timestamp'2022-02-14 
12:00:03')
+-- !query analysis
+[Analyzer test output redacted due to nondeterminism]
+
+
+-- !query
+select timediff(DAY, date'2022-02-15', date'2023-02-15')
+-- !query analysis
+[Analyzer test output redacted due to nondeterminism]
+
+
+-- !query
+select timediff(SECOND, date'2022-02-15', timestamp'2022-02-14 23:59:59')
+-- !query analysis
+[Analyzer test output redacted due to nondeterminism]
+
+
+-- !query
+select timediff('MINUTE', timestamp'2023-02-14 01:02:03', timestamp'2023-02-14 
02:00:03')
+-- !query analysis
+org.apache.spark.sql.catalyst.parser.ParseException
+{
+  "errorClass" : "INVALID_PARAMETER_VALUE.DATETIME_UNIT",
+  "sqlState" : "22023",
+  "messageParameters" : {
+    "functionName" : "`timediff`",
+    "invalidValue" : "'MINUTE'",
+    "parameter" : "`unit`"
+  },
+  "queryContext" : [ {
+    "objectType" : "",
+    "objectName" : "",
+    "startIndex" : 8,
+    "stopIndex" : 89,
+    "fragment" : "timediff('MINUTE', timestamp'2023-02-14 01:02:03', 
timestamp'2023-02-14 02:00:03')"
+  } ]
+}
+
+
+-- !query
+select timediff('YEAR', date'2020-02-15', date'2023-02-15')
+-- !query analysis
+org.apache.spark.sql.catalyst.parser.ParseException
+{
+  "errorClass" : "INVALID_PARAMETER_VALUE.DATETIME_UNIT",
+  "sqlState" : "22023",
+  "messageParameters" : {
+    "functionName" : "`timediff`",
+    "invalidValue" : "'YEAR'",
+    "parameter" : "`unit`"
+  },
+  "queryContext" : [ {
+    "objectType" : "",
+    "objectName" : "",
+    "startIndex" : 8,
+    "stopIndex" : 59,
+    "fragment" : "timediff('YEAR', date'2020-02-15', date'2023-02-15')"
+  } ]
+}
diff --git 
a/sql/core/src/test/resources/sql-tests/analyzer-results/timestampNTZ/timestamp-ansi.sql.out
 
b/sql/core/src/test/resources/sql-tests/analyzer-results/timestampNTZ/timestamp-ansi.sql.out
index bbdbd903d82..827bc8ddeea 100644
--- 
a/sql/core/src/test/resources/sql-tests/analyzer-results/timestampNTZ/timestamp-ansi.sql.out
+++ 
b/sql/core/src/test/resources/sql-tests/analyzer-results/timestampNTZ/timestamp-ansi.sql.out
@@ -894,3 +894,73 @@ org.apache.spark.sql.catalyst.parser.ParseException
     "fragment" : "timestampdiff('YEAR', date'2022-02-15', date'2023-02-15')"
   } ]
 }
+
+
+-- !query
+select timediff(QUARTER, timestamp'2023-08-10 01:02:03', timestamp'2022-01-14 
01:02:03')
+-- !query analysis
+Project [timestampdiff(QUARTER, cast(2023-08-10 01:02:03 as timestamp), 
cast(2022-01-14 01:02:03 as timestamp), Some(America/Los_Angeles)) AS 
timestampdiff(QUARTER, TIMESTAMP_NTZ '2023-08-10 01:02:03', TIMESTAMP_NTZ 
'2022-01-14 01:02:03')#xL]
++- OneRowRelation
+
+
+-- !query
+select timediff(HOUR, timestamp'2022-02-14 01:02:03', timestamp'2022-02-14 
12:00:03')
+-- !query analysis
+Project [timestampdiff(HOUR, cast(2022-02-14 01:02:03 as timestamp), 
cast(2022-02-14 12:00:03 as timestamp), Some(America/Los_Angeles)) AS 
timestampdiff(HOUR, TIMESTAMP_NTZ '2022-02-14 01:02:03', TIMESTAMP_NTZ 
'2022-02-14 12:00:03')#xL]
++- OneRowRelation
+
+
+-- !query
+select timediff(DAY, date'2022-02-15', date'2023-02-15')
+-- !query analysis
+[Analyzer test output redacted due to nondeterminism]
+
+
+-- !query
+select timediff(SECOND, date'2022-02-15', timestamp'2022-02-14 23:59:59')
+-- !query analysis
+[Analyzer test output redacted due to nondeterminism]
+
+
+-- !query
+select timediff('MINUTE', timestamp'2023-02-14 01:02:03', timestamp'2023-02-14 
02:00:03')
+-- !query analysis
+org.apache.spark.sql.catalyst.parser.ParseException
+{
+  "errorClass" : "INVALID_PARAMETER_VALUE.DATETIME_UNIT",
+  "sqlState" : "22023",
+  "messageParameters" : {
+    "functionName" : "`timediff`",
+    "invalidValue" : "'MINUTE'",
+    "parameter" : "`unit`"
+  },
+  "queryContext" : [ {
+    "objectType" : "",
+    "objectName" : "",
+    "startIndex" : 8,
+    "stopIndex" : 89,
+    "fragment" : "timediff('MINUTE', timestamp'2023-02-14 01:02:03', 
timestamp'2023-02-14 02:00:03')"
+  } ]
+}
+
+
+-- !query
+select timediff('YEAR', date'2020-02-15', date'2023-02-15')
+-- !query analysis
+org.apache.spark.sql.catalyst.parser.ParseException
+{
+  "errorClass" : "INVALID_PARAMETER_VALUE.DATETIME_UNIT",
+  "sqlState" : "22023",
+  "messageParameters" : {
+    "functionName" : "`timediff`",
+    "invalidValue" : "'YEAR'",
+    "parameter" : "`unit`"
+  },
+  "queryContext" : [ {
+    "objectType" : "",
+    "objectName" : "",
+    "startIndex" : 8,
+    "stopIndex" : 59,
+    "fragment" : "timediff('YEAR', date'2020-02-15', date'2023-02-15')"
+  } ]
+}
diff --git 
a/sql/core/src/test/resources/sql-tests/analyzer-results/timestampNTZ/timestamp.sql.out
 
b/sql/core/src/test/resources/sql-tests/analyzer-results/timestampNTZ/timestamp.sql.out
index 720ab45acd7..a6b61b3957c 100644
--- 
a/sql/core/src/test/resources/sql-tests/analyzer-results/timestampNTZ/timestamp.sql.out
+++ 
b/sql/core/src/test/resources/sql-tests/analyzer-results/timestampNTZ/timestamp.sql.out
@@ -954,3 +954,73 @@ org.apache.spark.sql.catalyst.parser.ParseException
     "fragment" : "timestampdiff('YEAR', date'2022-02-15', date'2023-02-15')"
   } ]
 }
+
+
+-- !query
+select timediff(QUARTER, timestamp'2023-08-10 01:02:03', timestamp'2022-01-14 
01:02:03')
+-- !query analysis
+Project [timestampdiff(QUARTER, cast(2023-08-10 01:02:03 as timestamp), 
cast(2022-01-14 01:02:03 as timestamp), Some(America/Los_Angeles)) AS 
timestampdiff(QUARTER, TIMESTAMP_NTZ '2023-08-10 01:02:03', TIMESTAMP_NTZ 
'2022-01-14 01:02:03')#xL]
++- OneRowRelation
+
+
+-- !query
+select timediff(HOUR, timestamp'2022-02-14 01:02:03', timestamp'2022-02-14 
12:00:03')
+-- !query analysis
+Project [timestampdiff(HOUR, cast(2022-02-14 01:02:03 as timestamp), 
cast(2022-02-14 12:00:03 as timestamp), Some(America/Los_Angeles)) AS 
timestampdiff(HOUR, TIMESTAMP_NTZ '2022-02-14 01:02:03', TIMESTAMP_NTZ 
'2022-02-14 12:00:03')#xL]
++- OneRowRelation
+
+
+-- !query
+select timediff(DAY, date'2022-02-15', date'2023-02-15')
+-- !query analysis
+[Analyzer test output redacted due to nondeterminism]
+
+
+-- !query
+select timediff(SECOND, date'2022-02-15', timestamp'2022-02-14 23:59:59')
+-- !query analysis
+[Analyzer test output redacted due to nondeterminism]
+
+
+-- !query
+select timediff('MINUTE', timestamp'2023-02-14 01:02:03', timestamp'2023-02-14 
02:00:03')
+-- !query analysis
+org.apache.spark.sql.catalyst.parser.ParseException
+{
+  "errorClass" : "INVALID_PARAMETER_VALUE.DATETIME_UNIT",
+  "sqlState" : "22023",
+  "messageParameters" : {
+    "functionName" : "`timediff`",
+    "invalidValue" : "'MINUTE'",
+    "parameter" : "`unit`"
+  },
+  "queryContext" : [ {
+    "objectType" : "",
+    "objectName" : "",
+    "startIndex" : 8,
+    "stopIndex" : 89,
+    "fragment" : "timediff('MINUTE', timestamp'2023-02-14 01:02:03', 
timestamp'2023-02-14 02:00:03')"
+  } ]
+}
+
+
+-- !query
+select timediff('YEAR', date'2020-02-15', date'2023-02-15')
+-- !query analysis
+org.apache.spark.sql.catalyst.parser.ParseException
+{
+  "errorClass" : "INVALID_PARAMETER_VALUE.DATETIME_UNIT",
+  "sqlState" : "22023",
+  "messageParameters" : {
+    "functionName" : "`timediff`",
+    "invalidValue" : "'YEAR'",
+    "parameter" : "`unit`"
+  },
+  "queryContext" : [ {
+    "objectType" : "",
+    "objectName" : "",
+    "startIndex" : 8,
+    "stopIndex" : 59,
+    "fragment" : "timediff('YEAR', date'2020-02-15', date'2023-02-15')"
+  } ]
+}
diff --git a/sql/core/src/test/resources/sql-tests/inputs/timestamp.sql 
b/sql/core/src/test/resources/sql-tests/inputs/timestamp.sql
index 163b734164e..72ea88a0320 100644
--- a/sql/core/src/test/resources/sql-tests/inputs/timestamp.sql
+++ b/sql/core/src/test/resources/sql-tests/inputs/timestamp.sql
@@ -161,3 +161,11 @@ select timestampdiff(SECOND, date'2022-02-15', 
timestamp'2022-02-14 23:59:59');
 
 select timestampdiff('MINUTE', timestamp'2022-02-14 01:02:03', 
timestamp'2022-02-14 02:00:03');
 select timestampdiff('YEAR', date'2022-02-15', date'2023-02-15');
+
+select timediff(QUARTER, timestamp'2023-08-10 01:02:03', timestamp'2022-01-14 
01:02:03');
+select timediff(HOUR, timestamp'2022-02-14 01:02:03', timestamp'2022-02-14 
12:00:03');
+select timediff(DAY, date'2022-02-15', date'2023-02-15');
+select timediff(SECOND, date'2022-02-15', timestamp'2022-02-14 23:59:59');
+
+select timediff('MINUTE', timestamp'2023-02-14 01:02:03', timestamp'2023-02-14 
02:00:03');
+select timediff('YEAR', date'2020-02-15', date'2023-02-15');
diff --git 
a/sql/core/src/test/resources/sql-tests/results/ansi/keywords.sql.out 
b/sql/core/src/test/resources/sql-tests/results/ansi/keywords.sql.out
index 2abb5cee119..f88dcbd4658 100644
--- a/sql/core/src/test/resources/sql-tests/results/ansi/keywords.sql.out
+++ b/sql/core/src/test/resources/sql-tests/results/ansi/keywords.sql.out
@@ -285,6 +285,7 @@ TBLPROPERTIES       false
 TERMINATED     false
 THEN   true
 TIME   true
+TIMEDIFF       false
 TIMESTAMP      false
 TIMESTAMPADD   false
 TIMESTAMPDIFF  false
diff --git 
a/sql/core/src/test/resources/sql-tests/results/ansi/timestamp.sql.out 
b/sql/core/src/test/resources/sql-tests/results/ansi/timestamp.sql.out
index 51314079a8d..d7a58e321b0 100644
--- a/sql/core/src/test/resources/sql-tests/results/ansi/timestamp.sql.out
+++ b/sql/core/src/test/resources/sql-tests/results/ansi/timestamp.sql.out
@@ -1137,3 +1137,83 @@ org.apache.spark.sql.catalyst.parser.ParseException
     "fragment" : "timestampdiff('YEAR', date'2022-02-15', date'2023-02-15')"
   } ]
 }
+
+
+-- !query
+select timediff(QUARTER, timestamp'2023-08-10 01:02:03', timestamp'2022-01-14 
01:02:03')
+-- !query schema
+struct<timestampdiff(QUARTER, TIMESTAMP '2023-08-10 01:02:03', TIMESTAMP 
'2022-01-14 01:02:03'):bigint>
+-- !query output
+-6
+
+
+-- !query
+select timediff(HOUR, timestamp'2022-02-14 01:02:03', timestamp'2022-02-14 
12:00:03')
+-- !query schema
+struct<timestampdiff(HOUR, TIMESTAMP '2022-02-14 01:02:03', TIMESTAMP 
'2022-02-14 12:00:03'):bigint>
+-- !query output
+10
+
+
+-- !query
+select timediff(DAY, date'2022-02-15', date'2023-02-15')
+-- !query schema
+struct<timestampdiff(DAY, DATE '2022-02-15', DATE '2023-02-15'):bigint>
+-- !query output
+365
+
+
+-- !query
+select timediff(SECOND, date'2022-02-15', timestamp'2022-02-14 23:59:59')
+-- !query schema
+struct<timestampdiff(SECOND, DATE '2022-02-15', TIMESTAMP '2022-02-14 
23:59:59'):bigint>
+-- !query output
+-1
+
+
+-- !query
+select timediff('MINUTE', timestamp'2023-02-14 01:02:03', timestamp'2023-02-14 
02:00:03')
+-- !query schema
+struct<>
+-- !query output
+org.apache.spark.sql.catalyst.parser.ParseException
+{
+  "errorClass" : "INVALID_PARAMETER_VALUE.DATETIME_UNIT",
+  "sqlState" : "22023",
+  "messageParameters" : {
+    "functionName" : "`timediff`",
+    "invalidValue" : "'MINUTE'",
+    "parameter" : "`unit`"
+  },
+  "queryContext" : [ {
+    "objectType" : "",
+    "objectName" : "",
+    "startIndex" : 8,
+    "stopIndex" : 89,
+    "fragment" : "timediff('MINUTE', timestamp'2023-02-14 01:02:03', 
timestamp'2023-02-14 02:00:03')"
+  } ]
+}
+
+
+-- !query
+select timediff('YEAR', date'2020-02-15', date'2023-02-15')
+-- !query schema
+struct<>
+-- !query output
+org.apache.spark.sql.catalyst.parser.ParseException
+{
+  "errorClass" : "INVALID_PARAMETER_VALUE.DATETIME_UNIT",
+  "sqlState" : "22023",
+  "messageParameters" : {
+    "functionName" : "`timediff`",
+    "invalidValue" : "'YEAR'",
+    "parameter" : "`unit`"
+  },
+  "queryContext" : [ {
+    "objectType" : "",
+    "objectName" : "",
+    "startIndex" : 8,
+    "stopIndex" : 59,
+    "fragment" : "timediff('YEAR', date'2020-02-15', date'2023-02-15')"
+  } ]
+}
diff --git 
a/sql/core/src/test/resources/sql-tests/results/datetime-legacy.sql.out 
b/sql/core/src/test/resources/sql-tests/results/datetime-legacy.sql.out
index 1642e10ea41..8b8a09ae3f1 100644
--- a/sql/core/src/test/resources/sql-tests/results/datetime-legacy.sql.out
+++ b/sql/core/src/test/resources/sql-tests/results/datetime-legacy.sql.out
@@ -2276,3 +2276,83 @@ org.apache.spark.sql.catalyst.parser.ParseException
     "fragment" : "timestampdiff('YEAR', date'2022-02-15', date'2023-02-15')"
   } ]
 }
+
+
+-- !query
+select timediff(QUARTER, timestamp'2023-08-10 01:02:03', timestamp'2022-01-14 
01:02:03')
+-- !query schema
+struct<timestampdiff(QUARTER, TIMESTAMP '2023-08-10 01:02:03', TIMESTAMP 
'2022-01-14 01:02:03'):bigint>
+-- !query output
+-6
+
+
+-- !query
+select timediff(HOUR, timestamp'2022-02-14 01:02:03', timestamp'2022-02-14 
12:00:03')
+-- !query schema
+struct<timestampdiff(HOUR, TIMESTAMP '2022-02-14 01:02:03', TIMESTAMP 
'2022-02-14 12:00:03'):bigint>
+-- !query output
+10
+
+
+-- !query
+select timediff(DAY, date'2022-02-15', date'2023-02-15')
+-- !query schema
+struct<timestampdiff(DAY, DATE '2022-02-15', DATE '2023-02-15'):bigint>
+-- !query output
+365
+
+
+-- !query
+select timediff(SECOND, date'2022-02-15', timestamp'2022-02-14 23:59:59')
+-- !query schema
+struct<timestampdiff(SECOND, DATE '2022-02-15', TIMESTAMP '2022-02-14 
23:59:59'):bigint>
+-- !query output
+-1
+
+
+-- !query
+select timediff('MINUTE', timestamp'2023-02-14 01:02:03', timestamp'2023-02-14 
02:00:03')
+-- !query schema
+struct<>
+-- !query output
+org.apache.spark.sql.catalyst.parser.ParseException
+{
+  "errorClass" : "INVALID_PARAMETER_VALUE.DATETIME_UNIT",
+  "sqlState" : "22023",
+  "messageParameters" : {
+    "functionName" : "`timediff`",
+    "invalidValue" : "'MINUTE'",
+    "parameter" : "`unit`"
+  },
+  "queryContext" : [ {
+    "objectType" : "",
+    "objectName" : "",
+    "startIndex" : 8,
+    "stopIndex" : 89,
+    "fragment" : "timediff('MINUTE', timestamp'2023-02-14 01:02:03', 
timestamp'2023-02-14 02:00:03')"
+  } ]
+}
+
+
+-- !query
+select timediff('YEAR', date'2020-02-15', date'2023-02-15')
+-- !query schema
+struct<>
+-- !query output
+org.apache.spark.sql.catalyst.parser.ParseException
+{
+  "errorClass" : "INVALID_PARAMETER_VALUE.DATETIME_UNIT",
+  "sqlState" : "22023",
+  "messageParameters" : {
+    "functionName" : "`timediff`",
+    "invalidValue" : "'YEAR'",
+    "parameter" : "`unit`"
+  },
+  "queryContext" : [ {
+    "objectType" : "",
+    "objectName" : "",
+    "startIndex" : 8,
+    "stopIndex" : 59,
+    "fragment" : "timediff('YEAR', date'2020-02-15', date'2023-02-15')"
+  } ]
+}
diff --git a/sql/core/src/test/resources/sql-tests/results/keywords.sql.out 
b/sql/core/src/test/resources/sql-tests/results/keywords.sql.out
index 716e2a32e7f..b618299ea61 100644
--- a/sql/core/src/test/resources/sql-tests/results/keywords.sql.out
+++ b/sql/core/src/test/resources/sql-tests/results/keywords.sql.out
@@ -285,6 +285,7 @@ TBLPROPERTIES       false
 TERMINATED     false
 THEN   false
 TIME   false
+TIMEDIFF       false
 TIMESTAMP      false
 TIMESTAMPADD   false
 TIMESTAMPDIFF  false
diff --git a/sql/core/src/test/resources/sql-tests/results/timestamp.sql.out 
b/sql/core/src/test/resources/sql-tests/results/timestamp.sql.out
index c28fda32c9c..3128c5dd359 100644
--- a/sql/core/src/test/resources/sql-tests/results/timestamp.sql.out
+++ b/sql/core/src/test/resources/sql-tests/results/timestamp.sql.out
@@ -1133,3 +1133,83 @@ org.apache.spark.sql.catalyst.parser.ParseException
     "fragment" : "timestampdiff('YEAR', date'2022-02-15', date'2023-02-15')"
   } ]
 }
+
+
+-- !query
+select timediff(QUARTER, timestamp'2023-08-10 01:02:03', timestamp'2022-01-14 
01:02:03')
+-- !query schema
+struct<timestampdiff(QUARTER, TIMESTAMP '2023-08-10 01:02:03', TIMESTAMP 
'2022-01-14 01:02:03'):bigint>
+-- !query output
+-6
+
+
+-- !query
+select timediff(HOUR, timestamp'2022-02-14 01:02:03', timestamp'2022-02-14 
12:00:03')
+-- !query schema
+struct<timestampdiff(HOUR, TIMESTAMP '2022-02-14 01:02:03', TIMESTAMP 
'2022-02-14 12:00:03'):bigint>
+-- !query output
+10
+
+
+-- !query
+select timediff(DAY, date'2022-02-15', date'2023-02-15')
+-- !query schema
+struct<timestampdiff(DAY, DATE '2022-02-15', DATE '2023-02-15'):bigint>
+-- !query output
+365
+
+
+-- !query
+select timediff(SECOND, date'2022-02-15', timestamp'2022-02-14 23:59:59')
+-- !query schema
+struct<timestampdiff(SECOND, DATE '2022-02-15', TIMESTAMP '2022-02-14 
23:59:59'):bigint>
+-- !query output
+-1
+
+
+-- !query
+select timediff('MINUTE', timestamp'2023-02-14 01:02:03', timestamp'2023-02-14 
02:00:03')
+-- !query schema
+struct<>
+-- !query output
+org.apache.spark.sql.catalyst.parser.ParseException
+{
+  "errorClass" : "INVALID_PARAMETER_VALUE.DATETIME_UNIT",
+  "sqlState" : "22023",
+  "messageParameters" : {
+    "functionName" : "`timediff`",
+    "invalidValue" : "'MINUTE'",
+    "parameter" : "`unit`"
+  },
+  "queryContext" : [ {
+    "objectType" : "",
+    "objectName" : "",
+    "startIndex" : 8,
+    "stopIndex" : 89,
+    "fragment" : "timediff('MINUTE', timestamp'2023-02-14 01:02:03', 
timestamp'2023-02-14 02:00:03')"
+  } ]
+}
+
+
+-- !query
+select timediff('YEAR', date'2020-02-15', date'2023-02-15')
+-- !query schema
+struct<>
+-- !query output
+org.apache.spark.sql.catalyst.parser.ParseException
+{
+  "errorClass" : "INVALID_PARAMETER_VALUE.DATETIME_UNIT",
+  "sqlState" : "22023",
+  "messageParameters" : {
+    "functionName" : "`timediff`",
+    "invalidValue" : "'YEAR'",
+    "parameter" : "`unit`"
+  },
+  "queryContext" : [ {
+    "objectType" : "",
+    "objectName" : "",
+    "startIndex" : 8,
+    "stopIndex" : 59,
+    "fragment" : "timediff('YEAR', date'2020-02-15', date'2023-02-15')"
+  } ]
+}
diff --git 
a/sql/core/src/test/resources/sql-tests/results/timestampNTZ/timestamp-ansi.sql.out
 
b/sql/core/src/test/resources/sql-tests/results/timestampNTZ/timestamp-ansi.sql.out
index 06b6818e00e..25aaadfc8e7 100644
--- 
a/sql/core/src/test/resources/sql-tests/results/timestampNTZ/timestamp-ansi.sql.out
+++ 
b/sql/core/src/test/resources/sql-tests/results/timestampNTZ/timestamp-ansi.sql.out
@@ -1128,3 +1128,83 @@ org.apache.spark.sql.catalyst.parser.ParseException
     "fragment" : "timestampdiff('YEAR', date'2022-02-15', date'2023-02-15')"
   } ]
 }
+
+
+-- !query
+select timediff(QUARTER, timestamp'2023-08-10 01:02:03', timestamp'2022-01-14 
01:02:03')
+-- !query schema
+struct<timestampdiff(QUARTER, TIMESTAMP_NTZ '2023-08-10 01:02:03', 
TIMESTAMP_NTZ '2022-01-14 01:02:03'):bigint>
+-- !query output
+-6
+
+
+-- !query
+select timediff(HOUR, timestamp'2022-02-14 01:02:03', timestamp'2022-02-14 
12:00:03')
+-- !query schema
+struct<timestampdiff(HOUR, TIMESTAMP_NTZ '2022-02-14 01:02:03', TIMESTAMP_NTZ 
'2022-02-14 12:00:03'):bigint>
+-- !query output
+10
+
+
+-- !query
+select timediff(DAY, date'2022-02-15', date'2023-02-15')
+-- !query schema
+struct<timestampdiff(DAY, DATE '2022-02-15', DATE '2023-02-15'):bigint>
+-- !query output
+365
+
+
+-- !query
+select timediff(SECOND, date'2022-02-15', timestamp'2022-02-14 23:59:59')
+-- !query schema
+struct<timestampdiff(SECOND, DATE '2022-02-15', TIMESTAMP_NTZ '2022-02-14 
23:59:59'):bigint>
+-- !query output
+-1
+
+
+-- !query
+select timediff('MINUTE', timestamp'2023-02-14 01:02:03', timestamp'2023-02-14 
02:00:03')
+-- !query schema
+struct<>
+-- !query output
+org.apache.spark.sql.catalyst.parser.ParseException
+{
+  "errorClass" : "INVALID_PARAMETER_VALUE.DATETIME_UNIT",
+  "sqlState" : "22023",
+  "messageParameters" : {
+    "functionName" : "`timediff`",
+    "invalidValue" : "'MINUTE'",
+    "parameter" : "`unit`"
+  },
+  "queryContext" : [ {
+    "objectType" : "",
+    "objectName" : "",
+    "startIndex" : 8,
+    "stopIndex" : 89,
+    "fragment" : "timediff('MINUTE', timestamp'2023-02-14 01:02:03', 
timestamp'2023-02-14 02:00:03')"
+  } ]
+}
+
+
+-- !query
+select timediff('YEAR', date'2020-02-15', date'2023-02-15')
+-- !query schema
+struct<>
+-- !query output
+org.apache.spark.sql.catalyst.parser.ParseException
+{
+  "errorClass" : "INVALID_PARAMETER_VALUE.DATETIME_UNIT",
+  "sqlState" : "22023",
+  "messageParameters" : {
+    "functionName" : "`timediff`",
+    "invalidValue" : "'YEAR'",
+    "parameter" : "`unit`"
+  },
+  "queryContext" : [ {
+    "objectType" : "",
+    "objectName" : "",
+    "startIndex" : 8,
+    "stopIndex" : 59,
+    "fragment" : "timediff('YEAR', date'2020-02-15', date'2023-02-15')"
+  } ]
+}
diff --git 
a/sql/core/src/test/resources/sql-tests/results/timestampNTZ/timestamp.sql.out 
b/sql/core/src/test/resources/sql-tests/results/timestampNTZ/timestamp.sql.out
index ff13693e65b..24edf1a3577 100644
--- 
a/sql/core/src/test/resources/sql-tests/results/timestampNTZ/timestamp.sql.out
+++ 
b/sql/core/src/test/resources/sql-tests/results/timestampNTZ/timestamp.sql.out
@@ -1109,3 +1109,83 @@ org.apache.spark.sql.catalyst.parser.ParseException
     "fragment" : "timestampdiff('YEAR', date'2022-02-15', date'2023-02-15')"
   } ]
 }
+
+
+-- !query
+select timediff(QUARTER, timestamp'2023-08-10 01:02:03', timestamp'2022-01-14 
01:02:03')
+-- !query schema
+struct<timestampdiff(QUARTER, TIMESTAMP_NTZ '2023-08-10 01:02:03', 
TIMESTAMP_NTZ '2022-01-14 01:02:03'):bigint>
+-- !query output
+-6
+
+
+-- !query
+select timediff(HOUR, timestamp'2022-02-14 01:02:03', timestamp'2022-02-14 
12:00:03')
+-- !query schema
+struct<timestampdiff(HOUR, TIMESTAMP_NTZ '2022-02-14 01:02:03', TIMESTAMP_NTZ 
'2022-02-14 12:00:03'):bigint>
+-- !query output
+10
+
+
+-- !query
+select timediff(DAY, date'2022-02-15', date'2023-02-15')
+-- !query schema
+struct<timestampdiff(DAY, DATE '2022-02-15', DATE '2023-02-15'):bigint>
+-- !query output
+365
+
+
+-- !query
+select timediff(SECOND, date'2022-02-15', timestamp'2022-02-14 23:59:59')
+-- !query schema
+struct<timestampdiff(SECOND, DATE '2022-02-15', TIMESTAMP_NTZ '2022-02-14 
23:59:59'):bigint>
+-- !query output
+-1
+
+
+-- !query
+select timediff('MINUTE', timestamp'2023-02-14 01:02:03', timestamp'2023-02-14 
02:00:03')
+-- !query schema
+struct<>
+-- !query output
+org.apache.spark.sql.catalyst.parser.ParseException
+{
+  "errorClass" : "INVALID_PARAMETER_VALUE.DATETIME_UNIT",
+  "sqlState" : "22023",
+  "messageParameters" : {
+    "functionName" : "`timediff`",
+    "invalidValue" : "'MINUTE'",
+    "parameter" : "`unit`"
+  },
+  "queryContext" : [ {
+    "objectType" : "",
+    "objectName" : "",
+    "startIndex" : 8,
+    "stopIndex" : 89,
+    "fragment" : "timediff('MINUTE', timestamp'2023-02-14 01:02:03', 
timestamp'2023-02-14 02:00:03')"
+  } ]
+}
+
+
+-- !query
+select timediff('YEAR', date'2020-02-15', date'2023-02-15')
+-- !query schema
+struct<>
+-- !query output
+org.apache.spark.sql.catalyst.parser.ParseException
+{
+  "errorClass" : "INVALID_PARAMETER_VALUE.DATETIME_UNIT",
+  "sqlState" : "22023",
+  "messageParameters" : {
+    "functionName" : "`timediff`",
+    "invalidValue" : "'YEAR'",
+    "parameter" : "`unit`"
+  },
+  "queryContext" : [ {
+    "objectType" : "",
+    "objectName" : "",
+    "startIndex" : 8,
+    "stopIndex" : 59,
+    "fragment" : "timediff('YEAR', date'2020-02-15', date'2023-02-15')"
+  } ]
+}
diff --git 
a/sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/ThriftServerWithSparkContextSuite.scala
 
b/sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/ThriftServerWithSparkContextSuite.scala
index fd4c68e8ac2..8355a03125a 100644
--- 
a/sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/ThriftServerWithSparkContextSuite.scala
+++ 
b/sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/ThriftServerWithSparkContextSuite.scala
@@ -213,7 +213,7 @@ trait ThriftServerWithSparkContextSuite extends 
SharedThriftServer {
       val sessionHandle = client.openSession(user, "")
       val infoValue = client.getInfo(sessionHandle, 
GetInfoType.CLI_ODBC_KEYWORDS)
       // scalastyle:off line.size.limit
-      assert(infoValue.getStringValue == 
"ADD,AFTER,ALL,ALTER,ALWAYS,ANALYZE,AND,ANTI,ANY,ANY_VALUE,ARCHIVE,ARRAY,AS,ASC,AT,AUTHORIZATION,BETWEEN,BIGINT,BINARY,BOOLEAN,BOTH,BUCKET,BUCKETS,BY,BYTE,CACHE,CASCADE,CASE,CAST,CATALOG,CATALOGS,CHANGE,CHAR,CHARACTER,CHECK,CLEAR,CLUSTER,CLUSTERED,CODEGEN,COLLATE,COLLECTION,COLUMN,COLUMNS,COMMENT,COMMIT,COMPACT,COMPACTIONS,COMPUTE,CONCATENATE,CONSTRAINT,COST,CREATE,CROSS,CUBE,CURRENT,CURRENT_DATE,CURRENT_TIME,CURRENT_TIMESTAMP,CURRENT_USER,DATA,DA
 [...]
+      assert(infoValue.getStringValue == 
"ADD,AFTER,ALL,ALTER,ALWAYS,ANALYZE,AND,ANTI,ANY,ANY_VALUE,ARCHIVE,ARRAY,AS,ASC,AT,AUTHORIZATION,BETWEEN,BIGINT,BINARY,BOOLEAN,BOTH,BUCKET,BUCKETS,BY,BYTE,CACHE,CASCADE,CASE,CAST,CATALOG,CATALOGS,CHANGE,CHAR,CHARACTER,CHECK,CLEAR,CLUSTER,CLUSTERED,CODEGEN,COLLATE,COLLECTION,COLUMN,COLUMNS,COMMENT,COMMIT,COMPACT,COMPACTIONS,COMPUTE,CONCATENATE,CONSTRAINT,COST,CREATE,CROSS,CUBE,CURRENT,CURRENT_DATE,CURRENT_TIME,CURRENT_TIMESTAMP,CURRENT_USER,DATA,DA
 [...]
       // scalastyle:on line.size.limit
     }
   }


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org


Reply via email to