This is an automated email from the ASF dual-hosted git repository.

maxgekk pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new e06275c6f14 [SPARK-43485][SQL] Fix the error message for the `unit` 
argument of the datetime add/diff functions
e06275c6f14 is described below

commit e06275c6f14e88ba583ffb3aac1159718a8cae83
Author: Max Gekk <max.g...@gmail.com>
AuthorDate: Mon May 15 13:55:18 2023 +0300

    [SPARK-43485][SQL] Fix the error message for the `unit` argument of the 
datetime add/diff functions
    
    ### What changes were proposed in this pull request?
    In the PR, I propose to extend the grammar rule of the 
`DATEADD`/`TIMESTAMPADD` and `DATEDIFF`/`TIMESTAMPDIFF`, and catch wrong type 
of the first argument `unit` when an user pass a string instead of an 
identifier like `YEAR`, ..., `MICROSECOND`. In that case, Spark raised an error 
of new error class `INVALID_PARAMETER_VALUE.DATETIME_UNIT`.
    
    ### Why are the changes needed?
    To make the error message clear for the case when a literal string instead 
of an identifier is passed to the datetime `ADD`/`DIFF` functions:
    ```sql
    spark-sql (default)> select dateadd('MONTH', 1, date'2023-05-11');
    [WRONG_NUM_ARGS.WITHOUT_SUGGESTION] The `dateadd` requires 2 parameters but 
the actual number is 3. Please, refer to 
'https://spark.apache.org/docs/latest/sql-ref-functions.html' for a fix.; line 
1 pos 7
    ```
    
    ### Does this PR introduce _any_ user-facing change?
    Yes, it changes the error class.
    
    After the changes:
    ```sql
    spark-sql (default)> select dateadd('MONTH', 1, date'2023-05-11');
    
    [INVALID_PARAMETER_VALUE.DATETIME_UNIT] The value of parameter(s) `unit` in 
`dateadd` is invalid: expects one of the units without quotes YEAR, QUARTER, 
MONTH, WEEK, DAY, DAYOFYEAR, HOUR, MINUTE, SECOND, MILLISECOND, MICROSECOND, 
but got the string literal 'MONTH'.(line 1, pos 7)
    
    == SQL ==
    select dateadd('MONTH', 1, date'2023-05-11')
    -------^^^
    ```
    
    ### How was this patch tested?
    By running the existing test suites:
    ```
    $ PYSPARK_PYTHON=python3 build/sbt "sql/testOnly 
org.apache.spark.sql.SQLQueryTestSuite"
    ```
    
    Closes #41143 from MaxGekk/dateadd-unit-error.
    
    Authored-by: Max Gekk <max.g...@gmail.com>
    Signed-off-by: Max Gekk <max.g...@gmail.com>
---
 core/src/main/resources/error/error-classes.json   |   5 +
 .../spark/sql/catalyst/parser/SqlBaseParser.g4     |   4 +-
 .../spark/sql/catalyst/parser/AstBuilder.scala     |  18 +-
 .../spark/sql/errors/QueryParsingErrors.scala      |  14 ++
 .../sql-tests/analyzer-results/ansi/date.sql.out   |  88 ++++++++++
 .../analyzer-results/ansi/timestamp.sql.out        |  88 ++++++++++
 .../sql-tests/analyzer-results/date.sql.out        |  88 ++++++++++
 .../analyzer-results/datetime-legacy.sql.out       | 176 +++++++++++++++++++
 .../sql-tests/analyzer-results/timestamp.sql.out   |  88 ++++++++++
 .../timestampNTZ/timestamp-ansi.sql.out            |  88 ++++++++++
 .../timestampNTZ/timestamp.sql.out                 |  88 ++++++++++
 .../src/test/resources/sql-tests/inputs/date.sql   |   6 +
 .../test/resources/sql-tests/inputs/timestamp.sql  |   6 +
 .../resources/sql-tests/results/ansi/date.sql.out  |  96 +++++++++++
 .../sql-tests/results/ansi/timestamp.sql.out       |  96 +++++++++++
 .../test/resources/sql-tests/results/date.sql.out  |  96 +++++++++++
 .../sql-tests/results/datetime-legacy.sql.out      | 192 +++++++++++++++++++++
 .../resources/sql-tests/results/timestamp.sql.out  |  96 +++++++++++
 .../results/timestampNTZ/timestamp-ansi.sql.out    |  96 +++++++++++
 .../results/timestampNTZ/timestamp.sql.out         |  96 +++++++++++
 20 files changed, 1521 insertions(+), 4 deletions(-)

diff --git a/core/src/main/resources/error/error-classes.json 
b/core/src/main/resources/error/error-classes.json
index dde165e5fa9..fa838a6da76 100644
--- a/core/src/main/resources/error/error-classes.json
+++ b/core/src/main/resources/error/error-classes.json
@@ -1051,6 +1051,11 @@
           "expects a binary value with 16, 24 or 32 bytes, but got 
<actualLength> bytes."
         ]
       },
+      "DATETIME_UNIT" : {
+        "message" : [
+          "expects one of the units without quotes YEAR, QUARTER, MONTH, WEEK, 
DAY, DAYOFYEAR, HOUR, MINUTE, SECOND, MILLISECOND, MICROSECOND, but got the 
string literal <invalidValue>."
+        ]
+      },
       "PATTERN" : {
         "message" : [
           "<value>."
diff --git 
a/sql/catalyst/src/main/antlr4/org/apache/spark/sql/catalyst/parser/SqlBaseParser.g4
 
b/sql/catalyst/src/main/antlr4/org/apache/spark/sql/catalyst/parser/SqlBaseParser.g4
index 2bc79430343..591b0839ac7 100644
--- 
a/sql/catalyst/src/main/antlr4/org/apache/spark/sql/catalyst/parser/SqlBaseParser.g4
+++ 
b/sql/catalyst/src/main/antlr4/org/apache/spark/sql/catalyst/parser/SqlBaseParser.g4
@@ -892,8 +892,8 @@ datetimeUnit
 
 primaryExpression
     : name=(CURRENT_DATE | CURRENT_TIMESTAMP | CURRENT_USER | USER)            
                       #currentLike
-    | name=(TIMESTAMPADD | DATEADD | DATE_ADD) LEFT_PAREN unit=datetimeUnit 
COMMA unitsAmount=valueExpression COMMA timestamp=valueExpression RIGHT_PAREN   
          #timestampadd
-    | name=(TIMESTAMPDIFF | DATEDIFF | DATE_DIFF) LEFT_PAREN unit=datetimeUnit 
COMMA startTimestamp=valueExpression COMMA endTimestamp=valueExpression 
RIGHT_PAREN    #timestampdiff
+    | name=(TIMESTAMPADD | DATEADD | DATE_ADD) LEFT_PAREN (unit=datetimeUnit | 
invalidUnit=stringLit) COMMA unitsAmount=valueExpression COMMA 
timestamp=valueExpression RIGHT_PAREN             #timestampadd
+    | name=(TIMESTAMPDIFF | DATEDIFF | DATE_DIFF) LEFT_PAREN 
(unit=datetimeUnit | invalidUnit=stringLit) COMMA 
startTimestamp=valueExpression COMMA endTimestamp=valueExpression RIGHT_PAREN   
 #timestampdiff
     | CASE whenClause+ (ELSE elseExpression=expression)? END                   
                #searchedCase
     | CASE value=expression whenClause+ (ELSE elseExpression=expression)? END  
                #simpleCase
     | name=(CAST | TRY_CAST) LEFT_PAREN expression AS dataType RIGHT_PAREN     
                #cast
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala
index cb06fc31f0e..4761836cbad 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala
@@ -4943,14 +4943,28 @@ class AstBuilder extends 
SqlBaseParserBaseVisitor[AnyRef] with SQLConfHelper wit
    * Create a TimestampAdd expression.
    */
   override def visitTimestampadd(ctx: TimestampaddContext): Expression = 
withOrigin(ctx) {
-    TimestampAdd(ctx.unit.getText, expression(ctx.unitsAmount), 
expression(ctx.timestamp))
+    if (ctx.invalidUnit != null) {
+      throw QueryParsingErrors.invalidDatetimeUnitError(
+        ctx,
+        ctx.name.getText,
+        ctx.invalidUnit.getText)
+    } else {
+      TimestampAdd(ctx.unit.getText, expression(ctx.unitsAmount), 
expression(ctx.timestamp))
+    }
   }
 
   /**
    * Create a TimestampDiff expression.
    */
   override def visitTimestampdiff(ctx: TimestampdiffContext): Expression = 
withOrigin(ctx) {
-    TimestampDiff(ctx.unit.getText, expression(ctx.startTimestamp), 
expression(ctx.endTimestamp))
+    if (ctx.invalidUnit != null) {
+      throw QueryParsingErrors.invalidDatetimeUnitError(
+        ctx,
+        ctx.name.getText,
+        ctx.invalidUnit.getText)
+    } else {
+      TimestampDiff(ctx.unit.getText, expression(ctx.startTimestamp), 
expression(ctx.endTimestamp))
+    }
   }
 
   /**
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryParsingErrors.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryParsingErrors.scala
index 228da52f14a..6240bbc07cf 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryParsingErrors.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryParsingErrors.scala
@@ -669,4 +669,18 @@ private[sql] object QueryParsingErrors extends 
QueryErrorsBase {
       ctx
     )
   }
+
+  def invalidDatetimeUnitError(
+      ctx: ParserRuleContext,
+      functionName: String,
+      invalidValue: String): Throwable = {
+    new ParseException(
+      errorClass = "INVALID_PARAMETER_VALUE.DATETIME_UNIT",
+      messageParameters = Map(
+        "functionName" -> toSQLId(functionName),
+        "parameter" -> toSQLId("unit"),
+        "invalidValue" -> invalidValue),
+      ctx
+    )
+  }
 }
diff --git 
a/sql/core/src/test/resources/sql-tests/analyzer-results/ansi/date.sql.out 
b/sql/core/src/test/resources/sql-tests/analyzer-results/ansi/date.sql.out
index 9f287223a0d..28fe86d930f 100644
--- a/sql/core/src/test/resources/sql-tests/analyzer-results/ansi/date.sql.out
+++ b/sql/core/src/test/resources/sql-tests/analyzer-results/ansi/date.sql.out
@@ -807,6 +807,50 @@ select date_add(YEAR, 1, date'2022-02-25')
 [Analyzer test output redacted due to nondeterminism]
 
 
+-- !query
+select dateadd('MICROSECOND', 1001, timestamp'2022-02-25 01:02:03.123')
+-- !query analysis
+org.apache.spark.sql.catalyst.parser.ParseException
+{
+  "errorClass" : "INVALID_PARAMETER_VALUE.DATETIME_UNIT",
+  "sqlState" : "22023",
+  "messageParameters" : {
+    "functionName" : "`dateadd`",
+    "invalidValue" : "'MICROSECOND'",
+    "parameter" : "`unit`"
+  },
+  "queryContext" : [ {
+    "objectType" : "",
+    "objectName" : "",
+    "startIndex" : 8,
+    "stopIndex" : 71,
+    "fragment" : "dateadd('MICROSECOND', 1001, timestamp'2022-02-25 
01:02:03.123')"
+  } ]
+}
+
+
+-- !query
+select date_add('QUARTER', 5, date'2022-02-25')
+-- !query analysis
+org.apache.spark.sql.catalyst.parser.ParseException
+{
+  "errorClass" : "INVALID_PARAMETER_VALUE.DATETIME_UNIT",
+  "sqlState" : "22023",
+  "messageParameters" : {
+    "functionName" : "`date_add`",
+    "invalidValue" : "'QUARTER'",
+    "parameter" : "`unit`"
+  },
+  "queryContext" : [ {
+    "objectType" : "",
+    "objectName" : "",
+    "startIndex" : 8,
+    "stopIndex" : 47,
+    "fragment" : "date_add('QUARTER', 5, date'2022-02-25')"
+  } ]
+}
+
+
 -- !query
 select datediff(MICROSECOND, timestamp'2022-02-25 01:02:03.123', 
timestamp'2022-02-25 01:02:03.124001')
 -- !query analysis
@@ -865,3 +909,47 @@ select datediff(QUARTER, date'2022-02-25', 
date'2023-05-25')
 select date_diff(YEAR, date'2022-02-25', date'2023-02-25')
 -- !query analysis
 [Analyzer test output redacted due to nondeterminism]
+
+
+-- !query
+select date_diff('MILLISECOND', timestamp'2022-02-25 01:02:03.456', 
timestamp'2022-02-25 01:02:03.455')
+-- !query analysis
+org.apache.spark.sql.catalyst.parser.ParseException
+{
+  "errorClass" : "INVALID_PARAMETER_VALUE.DATETIME_UNIT",
+  "sqlState" : "22023",
+  "messageParameters" : {
+    "functionName" : "`date_diff`",
+    "invalidValue" : "'MILLISECOND'",
+    "parameter" : "`unit`"
+  },
+  "queryContext" : [ {
+    "objectType" : "",
+    "objectName" : "",
+    "startIndex" : 8,
+    "stopIndex" : 103,
+    "fragment" : "date_diff('MILLISECOND', timestamp'2022-02-25 01:02:03.456', 
timestamp'2022-02-25 01:02:03.455')"
+  } ]
+}
+
+
+-- !query
+select datediff('YEAR', date'2022-02-25', date'2023-02-25')
+-- !query analysis
+org.apache.spark.sql.catalyst.parser.ParseException
+{
+  "errorClass" : "INVALID_PARAMETER_VALUE.DATETIME_UNIT",
+  "sqlState" : "22023",
+  "messageParameters" : {
+    "functionName" : "`datediff`",
+    "invalidValue" : "'YEAR'",
+    "parameter" : "`unit`"
+  },
+  "queryContext" : [ {
+    "objectType" : "",
+    "objectName" : "",
+    "startIndex" : 8,
+    "stopIndex" : 59,
+    "fragment" : "datediff('YEAR', date'2022-02-25', date'2023-02-25')"
+  } ]
+}
diff --git 
a/sql/core/src/test/resources/sql-tests/analyzer-results/ansi/timestamp.sql.out 
b/sql/core/src/test/resources/sql-tests/analyzer-results/ansi/timestamp.sql.out
index 65baeb48e11..c4858ac10ae 100644
--- 
a/sql/core/src/test/resources/sql-tests/analyzer-results/ansi/timestamp.sql.out
+++ 
b/sql/core/src/test/resources/sql-tests/analyzer-results/ansi/timestamp.sql.out
@@ -765,6 +765,50 @@ select timestampadd(SECOND, -1, date'2022-02-15')
 [Analyzer test output redacted due to nondeterminism]
 
 
+-- !query
+select timestampadd('MONTH', -1, timestamp'2022-02-14 01:02:03')
+-- !query analysis
+org.apache.spark.sql.catalyst.parser.ParseException
+{
+  "errorClass" : "INVALID_PARAMETER_VALUE.DATETIME_UNIT",
+  "sqlState" : "22023",
+  "messageParameters" : {
+    "functionName" : "`timestampadd`",
+    "invalidValue" : "'MONTH'",
+    "parameter" : "`unit`"
+  },
+  "queryContext" : [ {
+    "objectType" : "",
+    "objectName" : "",
+    "startIndex" : 8,
+    "stopIndex" : 64,
+    "fragment" : "timestampadd('MONTH', -1, timestamp'2022-02-14 01:02:03')"
+  } ]
+}
+
+
+-- !query
+select timestampadd('SECOND', -1, date'2022-02-15')
+-- !query analysis
+org.apache.spark.sql.catalyst.parser.ParseException
+{
+  "errorClass" : "INVALID_PARAMETER_VALUE.DATETIME_UNIT",
+  "sqlState" : "22023",
+  "messageParameters" : {
+    "functionName" : "`timestampadd`",
+    "invalidValue" : "'SECOND'",
+    "parameter" : "`unit`"
+  },
+  "queryContext" : [ {
+    "objectType" : "",
+    "objectName" : "",
+    "startIndex" : 8,
+    "stopIndex" : 51,
+    "fragment" : "timestampadd('SECOND', -1, date'2022-02-15')"
+  } ]
+}
+
+
 -- !query
 select timestampdiff(MONTH, timestamp'2022-02-14 01:02:03', 
timestamp'2022-01-14 01:02:03')
 -- !query analysis
@@ -787,3 +831,47 @@ select timestampdiff(YEAR, date'2022-02-15', 
date'2023-02-15')
 select timestampdiff(SECOND, date'2022-02-15', timestamp'2022-02-14 23:59:59')
 -- !query analysis
 [Analyzer test output redacted due to nondeterminism]
+
+
+-- !query
+select timestampdiff('MINUTE', timestamp'2022-02-14 01:02:03', 
timestamp'2022-02-14 02:00:03')
+-- !query analysis
+org.apache.spark.sql.catalyst.parser.ParseException
+{
+  "errorClass" : "INVALID_PARAMETER_VALUE.DATETIME_UNIT",
+  "sqlState" : "22023",
+  "messageParameters" : {
+    "functionName" : "`timestampdiff`",
+    "invalidValue" : "'MINUTE'",
+    "parameter" : "`unit`"
+  },
+  "queryContext" : [ {
+    "objectType" : "",
+    "objectName" : "",
+    "startIndex" : 8,
+    "stopIndex" : 94,
+    "fragment" : "timestampdiff('MINUTE', timestamp'2022-02-14 01:02:03', 
timestamp'2022-02-14 02:00:03')"
+  } ]
+}
+
+
+-- !query
+select timestampdiff('YEAR', date'2022-02-15', date'2023-02-15')
+-- !query analysis
+org.apache.spark.sql.catalyst.parser.ParseException
+{
+  "errorClass" : "INVALID_PARAMETER_VALUE.DATETIME_UNIT",
+  "sqlState" : "22023",
+  "messageParameters" : {
+    "functionName" : "`timestampdiff`",
+    "invalidValue" : "'YEAR'",
+    "parameter" : "`unit`"
+  },
+  "queryContext" : [ {
+    "objectType" : "",
+    "objectName" : "",
+    "startIndex" : 8,
+    "stopIndex" : 64,
+    "fragment" : "timestampdiff('YEAR', date'2022-02-15', date'2023-02-15')"
+  } ]
+}
diff --git 
a/sql/core/src/test/resources/sql-tests/analyzer-results/date.sql.out 
b/sql/core/src/test/resources/sql-tests/analyzer-results/date.sql.out
index 829d650fac8..6f01cc3be07 100644
--- a/sql/core/src/test/resources/sql-tests/analyzer-results/date.sql.out
+++ b/sql/core/src/test/resources/sql-tests/analyzer-results/date.sql.out
@@ -882,6 +882,50 @@ select date_add(YEAR, 1, date'2022-02-25')
 [Analyzer test output redacted due to nondeterminism]
 
 
+-- !query
+select dateadd('MICROSECOND', 1001, timestamp'2022-02-25 01:02:03.123')
+-- !query analysis
+org.apache.spark.sql.catalyst.parser.ParseException
+{
+  "errorClass" : "INVALID_PARAMETER_VALUE.DATETIME_UNIT",
+  "sqlState" : "22023",
+  "messageParameters" : {
+    "functionName" : "`dateadd`",
+    "invalidValue" : "'MICROSECOND'",
+    "parameter" : "`unit`"
+  },
+  "queryContext" : [ {
+    "objectType" : "",
+    "objectName" : "",
+    "startIndex" : 8,
+    "stopIndex" : 71,
+    "fragment" : "dateadd('MICROSECOND', 1001, timestamp'2022-02-25 
01:02:03.123')"
+  } ]
+}
+
+
+-- !query
+select date_add('QUARTER', 5, date'2022-02-25')
+-- !query analysis
+org.apache.spark.sql.catalyst.parser.ParseException
+{
+  "errorClass" : "INVALID_PARAMETER_VALUE.DATETIME_UNIT",
+  "sqlState" : "22023",
+  "messageParameters" : {
+    "functionName" : "`date_add`",
+    "invalidValue" : "'QUARTER'",
+    "parameter" : "`unit`"
+  },
+  "queryContext" : [ {
+    "objectType" : "",
+    "objectName" : "",
+    "startIndex" : 8,
+    "stopIndex" : 47,
+    "fragment" : "date_add('QUARTER', 5, date'2022-02-25')"
+  } ]
+}
+
+
 -- !query
 select datediff(MICROSECOND, timestamp'2022-02-25 01:02:03.123', 
timestamp'2022-02-25 01:02:03.124001')
 -- !query analysis
@@ -940,3 +984,47 @@ select datediff(QUARTER, date'2022-02-25', 
date'2023-05-25')
 select date_diff(YEAR, date'2022-02-25', date'2023-02-25')
 -- !query analysis
 [Analyzer test output redacted due to nondeterminism]
+
+
+-- !query
+select date_diff('MILLISECOND', timestamp'2022-02-25 01:02:03.456', 
timestamp'2022-02-25 01:02:03.455')
+-- !query analysis
+org.apache.spark.sql.catalyst.parser.ParseException
+{
+  "errorClass" : "INVALID_PARAMETER_VALUE.DATETIME_UNIT",
+  "sqlState" : "22023",
+  "messageParameters" : {
+    "functionName" : "`date_diff`",
+    "invalidValue" : "'MILLISECOND'",
+    "parameter" : "`unit`"
+  },
+  "queryContext" : [ {
+    "objectType" : "",
+    "objectName" : "",
+    "startIndex" : 8,
+    "stopIndex" : 103,
+    "fragment" : "date_diff('MILLISECOND', timestamp'2022-02-25 01:02:03.456', 
timestamp'2022-02-25 01:02:03.455')"
+  } ]
+}
+
+
+-- !query
+select datediff('YEAR', date'2022-02-25', date'2023-02-25')
+-- !query analysis
+org.apache.spark.sql.catalyst.parser.ParseException
+{
+  "errorClass" : "INVALID_PARAMETER_VALUE.DATETIME_UNIT",
+  "sqlState" : "22023",
+  "messageParameters" : {
+    "functionName" : "`datediff`",
+    "invalidValue" : "'YEAR'",
+    "parameter" : "`unit`"
+  },
+  "queryContext" : [ {
+    "objectType" : "",
+    "objectName" : "",
+    "startIndex" : 8,
+    "stopIndex" : 59,
+    "fragment" : "datediff('YEAR', date'2022-02-25', date'2023-02-25')"
+  } ]
+}
diff --git 
a/sql/core/src/test/resources/sql-tests/analyzer-results/datetime-legacy.sql.out
 
b/sql/core/src/test/resources/sql-tests/analyzer-results/datetime-legacy.sql.out
index 078bf031420..ef1261197b6 100644
--- 
a/sql/core/src/test/resources/sql-tests/analyzer-results/datetime-legacy.sql.out
+++ 
b/sql/core/src/test/resources/sql-tests/analyzer-results/datetime-legacy.sql.out
@@ -882,6 +882,50 @@ select date_add(YEAR, 1, date'2022-02-25')
 [Analyzer test output redacted due to nondeterminism]
 
 
+-- !query
+select dateadd('MICROSECOND', 1001, timestamp'2022-02-25 01:02:03.123')
+-- !query analysis
+org.apache.spark.sql.catalyst.parser.ParseException
+{
+  "errorClass" : "INVALID_PARAMETER_VALUE.DATETIME_UNIT",
+  "sqlState" : "22023",
+  "messageParameters" : {
+    "functionName" : "`dateadd`",
+    "invalidValue" : "'MICROSECOND'",
+    "parameter" : "`unit`"
+  },
+  "queryContext" : [ {
+    "objectType" : "",
+    "objectName" : "",
+    "startIndex" : 8,
+    "stopIndex" : 71,
+    "fragment" : "dateadd('MICROSECOND', 1001, timestamp'2022-02-25 
01:02:03.123')"
+  } ]
+}
+
+
+-- !query
+select date_add('QUARTER', 5, date'2022-02-25')
+-- !query analysis
+org.apache.spark.sql.catalyst.parser.ParseException
+{
+  "errorClass" : "INVALID_PARAMETER_VALUE.DATETIME_UNIT",
+  "sqlState" : "22023",
+  "messageParameters" : {
+    "functionName" : "`date_add`",
+    "invalidValue" : "'QUARTER'",
+    "parameter" : "`unit`"
+  },
+  "queryContext" : [ {
+    "objectType" : "",
+    "objectName" : "",
+    "startIndex" : 8,
+    "stopIndex" : 47,
+    "fragment" : "date_add('QUARTER', 5, date'2022-02-25')"
+  } ]
+}
+
+
 -- !query
 select datediff(MICROSECOND, timestamp'2022-02-25 01:02:03.123', 
timestamp'2022-02-25 01:02:03.124001')
 -- !query analysis
@@ -942,6 +986,50 @@ select date_diff(YEAR, date'2022-02-25', date'2023-02-25')
 [Analyzer test output redacted due to nondeterminism]
 
 
+-- !query
+select date_diff('MILLISECOND', timestamp'2022-02-25 01:02:03.456', 
timestamp'2022-02-25 01:02:03.455')
+-- !query analysis
+org.apache.spark.sql.catalyst.parser.ParseException
+{
+  "errorClass" : "INVALID_PARAMETER_VALUE.DATETIME_UNIT",
+  "sqlState" : "22023",
+  "messageParameters" : {
+    "functionName" : "`date_diff`",
+    "invalidValue" : "'MILLISECOND'",
+    "parameter" : "`unit`"
+  },
+  "queryContext" : [ {
+    "objectType" : "",
+    "objectName" : "",
+    "startIndex" : 8,
+    "stopIndex" : 103,
+    "fragment" : "date_diff('MILLISECOND', timestamp'2022-02-25 01:02:03.456', 
timestamp'2022-02-25 01:02:03.455')"
+  } ]
+}
+
+
+-- !query
+select datediff('YEAR', date'2022-02-25', date'2023-02-25')
+-- !query analysis
+org.apache.spark.sql.catalyst.parser.ParseException
+{
+  "errorClass" : "INVALID_PARAMETER_VALUE.DATETIME_UNIT",
+  "sqlState" : "22023",
+  "messageParameters" : {
+    "functionName" : "`datediff`",
+    "invalidValue" : "'YEAR'",
+    "parameter" : "`unit`"
+  },
+  "queryContext" : [ {
+    "objectType" : "",
+    "objectName" : "",
+    "startIndex" : 8,
+    "stopIndex" : 59,
+    "fragment" : "datediff('YEAR', date'2022-02-25', date'2023-02-25')"
+  } ]
+}
+
+
 -- !query
 select timestamp '2019-01-01\t'
 -- !query analysis
@@ -1780,6 +1868,50 @@ select timestampadd(SECOND, -1, date'2022-02-15')
 [Analyzer test output redacted due to nondeterminism]
 
 
+-- !query
+select timestampadd('MONTH', -1, timestamp'2022-02-14 01:02:03')
+-- !query analysis
+org.apache.spark.sql.catalyst.parser.ParseException
+{
+  "errorClass" : "INVALID_PARAMETER_VALUE.DATETIME_UNIT",
+  "sqlState" : "22023",
+  "messageParameters" : {
+    "functionName" : "`timestampadd`",
+    "invalidValue" : "'MONTH'",
+    "parameter" : "`unit`"
+  },
+  "queryContext" : [ {
+    "objectType" : "",
+    "objectName" : "",
+    "startIndex" : 8,
+    "stopIndex" : 64,
+    "fragment" : "timestampadd('MONTH', -1, timestamp'2022-02-14 01:02:03')"
+  } ]
+}
+
+
+-- !query
+select timestampadd('SECOND', -1, date'2022-02-15')
+-- !query analysis
+org.apache.spark.sql.catalyst.parser.ParseException
+{
+  "errorClass" : "INVALID_PARAMETER_VALUE.DATETIME_UNIT",
+  "sqlState" : "22023",
+  "messageParameters" : {
+    "functionName" : "`timestampadd`",
+    "invalidValue" : "'SECOND'",
+    "parameter" : "`unit`"
+  },
+  "queryContext" : [ {
+    "objectType" : "",
+    "objectName" : "",
+    "startIndex" : 8,
+    "stopIndex" : 51,
+    "fragment" : "timestampadd('SECOND', -1, date'2022-02-15')"
+  } ]
+}
+
+
 -- !query
 select timestampdiff(MONTH, timestamp'2022-02-14 01:02:03', 
timestamp'2022-01-14 01:02:03')
 -- !query analysis
@@ -1802,3 +1934,47 @@ select timestampdiff(YEAR, date'2022-02-15', 
date'2023-02-15')
 select timestampdiff(SECOND, date'2022-02-15', timestamp'2022-02-14 23:59:59')
 -- !query analysis
 [Analyzer test output redacted due to nondeterminism]
+
+
+-- !query
+select timestampdiff('MINUTE', timestamp'2022-02-14 01:02:03', 
timestamp'2022-02-14 02:00:03')
+-- !query analysis
+org.apache.spark.sql.catalyst.parser.ParseException
+{
+  "errorClass" : "INVALID_PARAMETER_VALUE.DATETIME_UNIT",
+  "sqlState" : "22023",
+  "messageParameters" : {
+    "functionName" : "`timestampdiff`",
+    "invalidValue" : "'MINUTE'",
+    "parameter" : "`unit`"
+  },
+  "queryContext" : [ {
+    "objectType" : "",
+    "objectName" : "",
+    "startIndex" : 8,
+    "stopIndex" : 94,
+    "fragment" : "timestampdiff('MINUTE', timestamp'2022-02-14 01:02:03', 
timestamp'2022-02-14 02:00:03')"
+  } ]
+}
+
+
+-- !query
+select timestampdiff('YEAR', date'2022-02-15', date'2023-02-15')
+-- !query analysis
+org.apache.spark.sql.catalyst.parser.ParseException
+{
+  "errorClass" : "INVALID_PARAMETER_VALUE.DATETIME_UNIT",
+  "sqlState" : "22023",
+  "messageParameters" : {
+    "functionName" : "`timestampdiff`",
+    "invalidValue" : "'YEAR'",
+    "parameter" : "`unit`"
+  },
+  "queryContext" : [ {
+    "objectType" : "",
+    "objectName" : "",
+    "startIndex" : 8,
+    "stopIndex" : 64,
+    "fragment" : "timestampdiff('YEAR', date'2022-02-15', date'2023-02-15')"
+  } ]
+}
diff --git 
a/sql/core/src/test/resources/sql-tests/analyzer-results/timestamp.sql.out 
b/sql/core/src/test/resources/sql-tests/analyzer-results/timestamp.sql.out
index 25e7cfcbcbb..856c317d137 100644
--- a/sql/core/src/test/resources/sql-tests/analyzer-results/timestamp.sql.out
+++ b/sql/core/src/test/resources/sql-tests/analyzer-results/timestamp.sql.out
@@ -837,6 +837,50 @@ select timestampadd(SECOND, -1, date'2022-02-15')
 [Analyzer test output redacted due to nondeterminism]
 
 
+-- !query
+select timestampadd('MONTH', -1, timestamp'2022-02-14 01:02:03')
+-- !query analysis
+org.apache.spark.sql.catalyst.parser.ParseException
+{
+  "errorClass" : "INVALID_PARAMETER_VALUE.DATETIME_UNIT",
+  "sqlState" : "22023",
+  "messageParameters" : {
+    "functionName" : "`timestampadd`",
+    "invalidValue" : "'MONTH'",
+    "parameter" : "`unit`"
+  },
+  "queryContext" : [ {
+    "objectType" : "",
+    "objectName" : "",
+    "startIndex" : 8,
+    "stopIndex" : 64,
+    "fragment" : "timestampadd('MONTH', -1, timestamp'2022-02-14 01:02:03')"
+  } ]
+}
+
+
+-- !query
+select timestampadd('SECOND', -1, date'2022-02-15')
+-- !query analysis
+org.apache.spark.sql.catalyst.parser.ParseException
+{
+  "errorClass" : "INVALID_PARAMETER_VALUE.DATETIME_UNIT",
+  "sqlState" : "22023",
+  "messageParameters" : {
+    "functionName" : "`timestampadd`",
+    "invalidValue" : "'SECOND'",
+    "parameter" : "`unit`"
+  },
+  "queryContext" : [ {
+    "objectType" : "",
+    "objectName" : "",
+    "startIndex" : 8,
+    "stopIndex" : 51,
+    "fragment" : "timestampadd('SECOND', -1, date'2022-02-15')"
+  } ]
+}
+
+
 -- !query
 select timestampdiff(MONTH, timestamp'2022-02-14 01:02:03', 
timestamp'2022-01-14 01:02:03')
 -- !query analysis
@@ -859,3 +903,47 @@ select timestampdiff(YEAR, date'2022-02-15', 
date'2023-02-15')
 select timestampdiff(SECOND, date'2022-02-15', timestamp'2022-02-14 23:59:59')
 -- !query analysis
 [Analyzer test output redacted due to nondeterminism]
+
+
+-- !query
+select timestampdiff('MINUTE', timestamp'2022-02-14 01:02:03', 
timestamp'2022-02-14 02:00:03')
+-- !query analysis
+org.apache.spark.sql.catalyst.parser.ParseException
+{
+  "errorClass" : "INVALID_PARAMETER_VALUE.DATETIME_UNIT",
+  "sqlState" : "22023",
+  "messageParameters" : {
+    "functionName" : "`timestampdiff`",
+    "invalidValue" : "'MINUTE'",
+    "parameter" : "`unit`"
+  },
+  "queryContext" : [ {
+    "objectType" : "",
+    "objectName" : "",
+    "startIndex" : 8,
+    "stopIndex" : 94,
+    "fragment" : "timestampdiff('MINUTE', timestamp'2022-02-14 01:02:03', 
timestamp'2022-02-14 02:00:03')"
+  } ]
+}
+
+
+-- !query
+select timestampdiff('YEAR', date'2022-02-15', date'2023-02-15')
+-- !query analysis
+org.apache.spark.sql.catalyst.parser.ParseException
+{
+  "errorClass" : "INVALID_PARAMETER_VALUE.DATETIME_UNIT",
+  "sqlState" : "22023",
+  "messageParameters" : {
+    "functionName" : "`timestampdiff`",
+    "invalidValue" : "'YEAR'",
+    "parameter" : "`unit`"
+  },
+  "queryContext" : [ {
+    "objectType" : "",
+    "objectName" : "",
+    "startIndex" : 8,
+    "stopIndex" : 64,
+    "fragment" : "timestampdiff('YEAR', date'2022-02-15', date'2023-02-15')"
+  } ]
+}
diff --git 
a/sql/core/src/test/resources/sql-tests/analyzer-results/timestampNTZ/timestamp-ansi.sql.out
 
b/sql/core/src/test/resources/sql-tests/analyzer-results/timestampNTZ/timestamp-ansi.sql.out
index 630e49bfaa5..5c42401acc3 100644
--- 
a/sql/core/src/test/resources/sql-tests/analyzer-results/timestampNTZ/timestamp-ansi.sql.out
+++ 
b/sql/core/src/test/resources/sql-tests/analyzer-results/timestampNTZ/timestamp-ansi.sql.out
@@ -782,6 +782,50 @@ select timestampadd(SECOND, -1, date'2022-02-15')
 [Analyzer test output redacted due to nondeterminism]
 
 
+-- !query
+select timestampadd('MONTH', -1, timestamp'2022-02-14 01:02:03')
+-- !query analysis
+org.apache.spark.sql.catalyst.parser.ParseException
+{
+  "errorClass" : "INVALID_PARAMETER_VALUE.DATETIME_UNIT",
+  "sqlState" : "22023",
+  "messageParameters" : {
+    "functionName" : "`timestampadd`",
+    "invalidValue" : "'MONTH'",
+    "parameter" : "`unit`"
+  },
+  "queryContext" : [ {
+    "objectType" : "",
+    "objectName" : "",
+    "startIndex" : 8,
+    "stopIndex" : 64,
+    "fragment" : "timestampadd('MONTH', -1, timestamp'2022-02-14 01:02:03')"
+  } ]
+}
+
+
+-- !query
+select timestampadd('SECOND', -1, date'2022-02-15')
+-- !query analysis
+org.apache.spark.sql.catalyst.parser.ParseException
+{
+  "errorClass" : "INVALID_PARAMETER_VALUE.DATETIME_UNIT",
+  "sqlState" : "22023",
+  "messageParameters" : {
+    "functionName" : "`timestampadd`",
+    "invalidValue" : "'SECOND'",
+    "parameter" : "`unit`"
+  },
+  "queryContext" : [ {
+    "objectType" : "",
+    "objectName" : "",
+    "startIndex" : 8,
+    "stopIndex" : 51,
+    "fragment" : "timestampadd('SECOND', -1, date'2022-02-15')"
+  } ]
+}
+
+
 -- !query
 select timestampdiff(MONTH, timestamp'2022-02-14 01:02:03', 
timestamp'2022-01-14 01:02:03')
 -- !query analysis
@@ -806,3 +850,47 @@ select timestampdiff(YEAR, date'2022-02-15', 
date'2023-02-15')
 select timestampdiff(SECOND, date'2022-02-15', timestamp'2022-02-14 23:59:59')
 -- !query analysis
 [Analyzer test output redacted due to nondeterminism]
+
+
+-- !query
+select timestampdiff('MINUTE', timestamp'2022-02-14 01:02:03', 
timestamp'2022-02-14 02:00:03')
+-- !query analysis
+org.apache.spark.sql.catalyst.parser.ParseException
+{
+  "errorClass" : "INVALID_PARAMETER_VALUE.DATETIME_UNIT",
+  "sqlState" : "22023",
+  "messageParameters" : {
+    "functionName" : "`timestampdiff`",
+    "invalidValue" : "'MINUTE'",
+    "parameter" : "`unit`"
+  },
+  "queryContext" : [ {
+    "objectType" : "",
+    "objectName" : "",
+    "startIndex" : 8,
+    "stopIndex" : 94,
+    "fragment" : "timestampdiff('MINUTE', timestamp'2022-02-14 01:02:03', 
timestamp'2022-02-14 02:00:03')"
+  } ]
+}
+
+
+-- !query
+select timestampdiff('YEAR', date'2022-02-15', date'2023-02-15')
+-- !query analysis
+org.apache.spark.sql.catalyst.parser.ParseException
+{
+  "errorClass" : "INVALID_PARAMETER_VALUE.DATETIME_UNIT",
+  "sqlState" : "22023",
+  "messageParameters" : {
+    "functionName" : "`timestampdiff`",
+    "invalidValue" : "'YEAR'",
+    "parameter" : "`unit`"
+  },
+  "queryContext" : [ {
+    "objectType" : "",
+    "objectName" : "",
+    "startIndex" : 8,
+    "stopIndex" : 64,
+    "fragment" : "timestampdiff('YEAR', date'2022-02-15', date'2023-02-15')"
+  } ]
+}
diff --git 
a/sql/core/src/test/resources/sql-tests/analyzer-results/timestampNTZ/timestamp.sql.out
 
b/sql/core/src/test/resources/sql-tests/analyzer-results/timestampNTZ/timestamp.sql.out
index d6e29e72682..18d05088b56 100644
--- 
a/sql/core/src/test/resources/sql-tests/analyzer-results/timestampNTZ/timestamp.sql.out
+++ 
b/sql/core/src/test/resources/sql-tests/analyzer-results/timestampNTZ/timestamp.sql.out
@@ -842,6 +842,50 @@ select timestampadd(SECOND, -1, date'2022-02-15')
 [Analyzer test output redacted due to nondeterminism]
 
 
+-- !query
+select timestampadd('MONTH', -1, timestamp'2022-02-14 01:02:03')
+-- !query analysis
+org.apache.spark.sql.catalyst.parser.ParseException
+{
+  "errorClass" : "INVALID_PARAMETER_VALUE.DATETIME_UNIT",
+  "sqlState" : "22023",
+  "messageParameters" : {
+    "functionName" : "`timestampadd`",
+    "invalidValue" : "'MONTH'",
+    "parameter" : "`unit`"
+  },
+  "queryContext" : [ {
+    "objectType" : "",
+    "objectName" : "",
+    "startIndex" : 8,
+    "stopIndex" : 64,
+    "fragment" : "timestampadd('MONTH', -1, timestamp'2022-02-14 01:02:03')"
+  } ]
+}
+
+
+-- !query
+select timestampadd('SECOND', -1, date'2022-02-15')
+-- !query analysis
+org.apache.spark.sql.catalyst.parser.ParseException
+{
+  "errorClass" : "INVALID_PARAMETER_VALUE.DATETIME_UNIT",
+  "sqlState" : "22023",
+  "messageParameters" : {
+    "functionName" : "`timestampadd`",
+    "invalidValue" : "'SECOND'",
+    "parameter" : "`unit`"
+  },
+  "queryContext" : [ {
+    "objectType" : "",
+    "objectName" : "",
+    "startIndex" : 8,
+    "stopIndex" : 51,
+    "fragment" : "timestampadd('SECOND', -1, date'2022-02-15')"
+  } ]
+}
+
+
 -- !query
 select timestampdiff(MONTH, timestamp'2022-02-14 01:02:03', 
timestamp'2022-01-14 01:02:03')
 -- !query analysis
@@ -866,3 +910,47 @@ select timestampdiff(YEAR, date'2022-02-15', 
date'2023-02-15')
 select timestampdiff(SECOND, date'2022-02-15', timestamp'2022-02-14 23:59:59')
 -- !query analysis
 [Analyzer test output redacted due to nondeterminism]
+
+
+-- !query
+select timestampdiff('MINUTE', timestamp'2022-02-14 01:02:03', 
timestamp'2022-02-14 02:00:03')
+-- !query analysis
+org.apache.spark.sql.catalyst.parser.ParseException
+{
+  "errorClass" : "INVALID_PARAMETER_VALUE.DATETIME_UNIT",
+  "sqlState" : "22023",
+  "messageParameters" : {
+    "functionName" : "`timestampdiff`",
+    "invalidValue" : "'MINUTE'",
+    "parameter" : "`unit`"
+  },
+  "queryContext" : [ {
+    "objectType" : "",
+    "objectName" : "",
+    "startIndex" : 8,
+    "stopIndex" : 94,
+    "fragment" : "timestampdiff('MINUTE', timestamp'2022-02-14 01:02:03', 
timestamp'2022-02-14 02:00:03')"
+  } ]
+}
+
+
+-- !query
+select timestampdiff('YEAR', date'2022-02-15', date'2023-02-15')
+-- !query analysis
+org.apache.spark.sql.catalyst.parser.ParseException
+{
+  "errorClass" : "INVALID_PARAMETER_VALUE.DATETIME_UNIT",
+  "sqlState" : "22023",
+  "messageParameters" : {
+    "functionName" : "`timestampdiff`",
+    "invalidValue" : "'YEAR'",
+    "parameter" : "`unit`"
+  },
+  "queryContext" : [ {
+    "objectType" : "",
+    "objectName" : "",
+    "startIndex" : 8,
+    "stopIndex" : 64,
+    "fragment" : "timestampdiff('YEAR', date'2022-02-15', date'2023-02-15')"
+  } ]
+}
diff --git a/sql/core/src/test/resources/sql-tests/inputs/date.sql 
b/sql/core/src/test/resources/sql-tests/inputs/date.sql
index c4c7ff73470..ed16ace0382 100644
--- a/sql/core/src/test/resources/sql-tests/inputs/date.sql
+++ b/sql/core/src/test/resources/sql-tests/inputs/date.sql
@@ -154,6 +154,9 @@ select date_add(MONTH, -1, timestamp'2022-02-25 01:02:03');
 select dateadd(QUARTER, 5, date'2022-02-25');
 select date_add(YEAR, 1, date'2022-02-25');
 
+select dateadd('MICROSECOND', 1001, timestamp'2022-02-25 01:02:03.123');
+select date_add('QUARTER', 5, date'2022-02-25');
+
 -- Get the difference between timestamps or dates in the specified units
 select datediff(MICROSECOND, timestamp'2022-02-25 01:02:03.123', 
timestamp'2022-02-25 01:02:03.124001');
 select date_diff(MILLISECOND, timestamp'2022-02-25 01:02:03.456', 
timestamp'2022-02-25 01:02:03.455');
@@ -165,3 +168,6 @@ select datediff(WEEK, timestamp'2022-02-25 01:02:03', 
timestamp'2022-01-28 01:02
 select date_diff(MONTH, timestamp'2022-02-25 01:02:03', timestamp'2022-01-25 
01:02:03');
 select datediff(QUARTER, date'2022-02-25', date'2023-05-25');
 select date_diff(YEAR, date'2022-02-25', date'2023-02-25');
+
+select date_diff('MILLISECOND', timestamp'2022-02-25 01:02:03.456', 
timestamp'2022-02-25 01:02:03.455');
+select datediff('YEAR', date'2022-02-25', date'2023-02-25');
diff --git a/sql/core/src/test/resources/sql-tests/inputs/timestamp.sql 
b/sql/core/src/test/resources/sql-tests/inputs/timestamp.sql
index b0d958a24b1..163b734164e 100644
--- a/sql/core/src/test/resources/sql-tests/inputs/timestamp.sql
+++ b/sql/core/src/test/resources/sql-tests/inputs/timestamp.sql
@@ -150,8 +150,14 @@ select timestampadd(MINUTE, 58, timestamp'2022-02-14 
01:02:03');
 select timestampadd(YEAR, 1, date'2022-02-15');
 select timestampadd(SECOND, -1, date'2022-02-15');
 
+select timestampadd('MONTH', -1, timestamp'2022-02-14 01:02:03');
+select timestampadd('SECOND', -1, date'2022-02-15');
+
 -- Get the difference between timestamps in the specified units
 select timestampdiff(MONTH, timestamp'2022-02-14 01:02:03', 
timestamp'2022-01-14 01:02:03');
 select timestampdiff(MINUTE, timestamp'2022-02-14 01:02:03', 
timestamp'2022-02-14 02:00:03');
 select timestampdiff(YEAR, date'2022-02-15', date'2023-02-15');
 select timestampdiff(SECOND, date'2022-02-15', timestamp'2022-02-14 23:59:59');
+
+select timestampdiff('MINUTE', timestamp'2022-02-14 01:02:03', 
timestamp'2022-02-14 02:00:03');
+select timestampdiff('YEAR', date'2022-02-15', date'2023-02-15');
diff --git a/sql/core/src/test/resources/sql-tests/results/ansi/date.sql.out 
b/sql/core/src/test/resources/sql-tests/results/ansi/date.sql.out
index a309170d6f6..dee745995ea 100644
--- a/sql/core/src/test/resources/sql-tests/results/ansi/date.sql.out
+++ b/sql/core/src/test/resources/sql-tests/results/ansi/date.sql.out
@@ -1041,6 +1041,54 @@ struct<timestampadd(YEAR, 1, DATE 
'2022-02-25'):timestamp>
 2023-02-25 00:00:00
 
 
+-- !query
+select dateadd('MICROSECOND', 1001, timestamp'2022-02-25 01:02:03.123')
+-- !query schema
+struct<>
+-- !query output
+org.apache.spark.sql.catalyst.parser.ParseException
+{
+  "errorClass" : "INVALID_PARAMETER_VALUE.DATETIME_UNIT",
+  "sqlState" : "22023",
+  "messageParameters" : {
+    "functionName" : "`dateadd`",
+    "invalidValue" : "'MICROSECOND'",
+    "parameter" : "`unit`"
+  },
+  "queryContext" : [ {
+    "objectType" : "",
+    "objectName" : "",
+    "startIndex" : 8,
+    "stopIndex" : 71,
+    "fragment" : "dateadd('MICROSECOND', 1001, timestamp'2022-02-25 
01:02:03.123')"
+  } ]
+}
+
+
+-- !query
+select date_add('QUARTER', 5, date'2022-02-25')
+-- !query schema
+struct<>
+-- !query output
+org.apache.spark.sql.catalyst.parser.ParseException
+{
+  "errorClass" : "INVALID_PARAMETER_VALUE.DATETIME_UNIT",
+  "sqlState" : "22023",
+  "messageParameters" : {
+    "functionName" : "`date_add`",
+    "invalidValue" : "'QUARTER'",
+    "parameter" : "`unit`"
+  },
+  "queryContext" : [ {
+    "objectType" : "",
+    "objectName" : "",
+    "startIndex" : 8,
+    "stopIndex" : 47,
+    "fragment" : "date_add('QUARTER', 5, date'2022-02-25')"
+  } ]
+}
+
+
 -- !query
 select datediff(MICROSECOND, timestamp'2022-02-25 01:02:03.123', 
timestamp'2022-02-25 01:02:03.124001')
 -- !query schema
@@ -1119,3 +1167,51 @@ select date_diff(YEAR, date'2022-02-25', 
date'2023-02-25')
 struct<timestampdiff(YEAR, DATE '2022-02-25', DATE '2023-02-25'):bigint>
 -- !query output
 1
+
+
+-- !query
+select date_diff('MILLISECOND', timestamp'2022-02-25 01:02:03.456', 
timestamp'2022-02-25 01:02:03.455')
+-- !query schema
+struct<>
+-- !query output
+org.apache.spark.sql.catalyst.parser.ParseException
+{
+  "errorClass" : "INVALID_PARAMETER_VALUE.DATETIME_UNIT",
+  "sqlState" : "22023",
+  "messageParameters" : {
+    "functionName" : "`date_diff`",
+    "invalidValue" : "'MILLISECOND'",
+    "parameter" : "`unit`"
+  },
+  "queryContext" : [ {
+    "objectType" : "",
+    "objectName" : "",
+    "startIndex" : 8,
+    "stopIndex" : 103,
+    "fragment" : "date_diff('MILLISECOND', timestamp'2022-02-25 01:02:03.456', 
timestamp'2022-02-25 01:02:03.455')"
+  } ]
+}
+
+
+-- !query
+select datediff('YEAR', date'2022-02-25', date'2023-02-25')
+-- !query schema
+struct<>
+-- !query output
+org.apache.spark.sql.catalyst.parser.ParseException
+{
+  "errorClass" : "INVALID_PARAMETER_VALUE.DATETIME_UNIT",
+  "sqlState" : "22023",
+  "messageParameters" : {
+    "functionName" : "`datediff`",
+    "invalidValue" : "'YEAR'",
+    "parameter" : "`unit`"
+  },
+  "queryContext" : [ {
+    "objectType" : "",
+    "objectName" : "",
+    "startIndex" : 8,
+    "stopIndex" : 59,
+    "fragment" : "datediff('YEAR', date'2022-02-25', date'2023-02-25')"
+  } ]
+}
diff --git 
a/sql/core/src/test/resources/sql-tests/results/ansi/timestamp.sql.out 
b/sql/core/src/test/resources/sql-tests/results/ansi/timestamp.sql.out
index 3afae8ab91a..7433fcd52b9 100644
--- a/sql/core/src/test/resources/sql-tests/results/ansi/timestamp.sql.out
+++ b/sql/core/src/test/resources/sql-tests/results/ansi/timestamp.sql.out
@@ -1011,6 +1011,54 @@ struct<timestampadd(SECOND, -1, DATE 
'2022-02-15'):timestamp>
 2022-02-14 23:59:59
 
 
+-- !query
+select timestampadd('MONTH', -1, timestamp'2022-02-14 01:02:03')
+-- !query schema
+struct<>
+-- !query output
+org.apache.spark.sql.catalyst.parser.ParseException
+{
+  "errorClass" : "INVALID_PARAMETER_VALUE.DATETIME_UNIT",
+  "sqlState" : "22023",
+  "messageParameters" : {
+    "functionName" : "`timestampadd`",
+    "invalidValue" : "'MONTH'",
+    "parameter" : "`unit`"
+  },
+  "queryContext" : [ {
+    "objectType" : "",
+    "objectName" : "",
+    "startIndex" : 8,
+    "stopIndex" : 64,
+    "fragment" : "timestampadd('MONTH', -1, timestamp'2022-02-14 01:02:03')"
+  } ]
+}
+
+
+-- !query
+select timestampadd('SECOND', -1, date'2022-02-15')
+-- !query schema
+struct<>
+-- !query output
+org.apache.spark.sql.catalyst.parser.ParseException
+{
+  "errorClass" : "INVALID_PARAMETER_VALUE.DATETIME_UNIT",
+  "sqlState" : "22023",
+  "messageParameters" : {
+    "functionName" : "`timestampadd`",
+    "invalidValue" : "'SECOND'",
+    "parameter" : "`unit`"
+  },
+  "queryContext" : [ {
+    "objectType" : "",
+    "objectName" : "",
+    "startIndex" : 8,
+    "stopIndex" : 51,
+    "fragment" : "timestampadd('SECOND', -1, date'2022-02-15')"
+  } ]
+}
+
+
 -- !query
 select timestampdiff(MONTH, timestamp'2022-02-14 01:02:03', 
timestamp'2022-01-14 01:02:03')
 -- !query schema
@@ -1041,3 +1089,51 @@ select timestampdiff(SECOND, date'2022-02-15', 
timestamp'2022-02-14 23:59:59')
 struct<timestampdiff(SECOND, DATE '2022-02-15', TIMESTAMP '2022-02-14 
23:59:59'):bigint>
 -- !query output
 -1
+
+
+-- !query
+select timestampdiff('MINUTE', timestamp'2022-02-14 01:02:03', 
timestamp'2022-02-14 02:00:03')
+-- !query schema
+struct<>
+-- !query output
+org.apache.spark.sql.catalyst.parser.ParseException
+{
+  "errorClass" : "INVALID_PARAMETER_VALUE.DATETIME_UNIT",
+  "sqlState" : "22023",
+  "messageParameters" : {
+    "functionName" : "`timestampdiff`",
+    "invalidValue" : "'MINUTE'",
+    "parameter" : "`unit`"
+  },
+  "queryContext" : [ {
+    "objectType" : "",
+    "objectName" : "",
+    "startIndex" : 8,
+    "stopIndex" : 94,
+    "fragment" : "timestampdiff('MINUTE', timestamp'2022-02-14 01:02:03', 
timestamp'2022-02-14 02:00:03')"
+  } ]
+}
+
+
+-- !query
+select timestampdiff('YEAR', date'2022-02-15', date'2023-02-15')
+-- !query schema
+struct<>
+-- !query output
+org.apache.spark.sql.catalyst.parser.ParseException
+{
+  "errorClass" : "INVALID_PARAMETER_VALUE.DATETIME_UNIT",
+  "sqlState" : "22023",
+  "messageParameters" : {
+    "functionName" : "`timestampdiff`",
+    "invalidValue" : "'YEAR'",
+    "parameter" : "`unit`"
+  },
+  "queryContext" : [ {
+    "objectType" : "",
+    "objectName" : "",
+    "startIndex" : 8,
+    "stopIndex" : 64,
+    "fragment" : "timestampdiff('YEAR', date'2022-02-15', date'2023-02-15')"
+  } ]
+}
diff --git a/sql/core/src/test/resources/sql-tests/results/date.sql.out 
b/sql/core/src/test/resources/sql-tests/results/date.sql.out
index c9cd8227083..322d916f282 100644
--- a/sql/core/src/test/resources/sql-tests/results/date.sql.out
+++ b/sql/core/src/test/resources/sql-tests/results/date.sql.out
@@ -1047,6 +1047,54 @@ struct<timestampadd(YEAR, 1, DATE 
'2022-02-25'):timestamp>
 2023-02-25 00:00:00
 
 
+-- !query
+select dateadd('MICROSECOND', 1001, timestamp'2022-02-25 01:02:03.123')
+-- !query schema
+struct<>
+-- !query output
+org.apache.spark.sql.catalyst.parser.ParseException
+{
+  "errorClass" : "INVALID_PARAMETER_VALUE.DATETIME_UNIT",
+  "sqlState" : "22023",
+  "messageParameters" : {
+    "functionName" : "`dateadd`",
+    "invalidValue" : "'MICROSECOND'",
+    "parameter" : "`unit`"
+  },
+  "queryContext" : [ {
+    "objectType" : "",
+    "objectName" : "",
+    "startIndex" : 8,
+    "stopIndex" : 71,
+    "fragment" : "dateadd('MICROSECOND', 1001, timestamp'2022-02-25 
01:02:03.123')"
+  } ]
+}
+
+
+-- !query
+select date_add('QUARTER', 5, date'2022-02-25')
+-- !query schema
+struct<>
+-- !query output
+org.apache.spark.sql.catalyst.parser.ParseException
+{
+  "errorClass" : "INVALID_PARAMETER_VALUE.DATETIME_UNIT",
+  "sqlState" : "22023",
+  "messageParameters" : {
+    "functionName" : "`date_add`",
+    "invalidValue" : "'QUARTER'",
+    "parameter" : "`unit`"
+  },
+  "queryContext" : [ {
+    "objectType" : "",
+    "objectName" : "",
+    "startIndex" : 8,
+    "stopIndex" : 47,
+    "fragment" : "date_add('QUARTER', 5, date'2022-02-25')"
+  } ]
+}
+
+
 -- !query
 select datediff(MICROSECOND, timestamp'2022-02-25 01:02:03.123', 
timestamp'2022-02-25 01:02:03.124001')
 -- !query schema
@@ -1125,3 +1173,51 @@ select date_diff(YEAR, date'2022-02-25', 
date'2023-02-25')
 struct<timestampdiff(YEAR, DATE '2022-02-25', DATE '2023-02-25'):bigint>
 -- !query output
 1
+
+
+-- !query
+select date_diff('MILLISECOND', timestamp'2022-02-25 01:02:03.456', 
timestamp'2022-02-25 01:02:03.455')
+-- !query schema
+struct<>
+-- !query output
+org.apache.spark.sql.catalyst.parser.ParseException
+{
+  "errorClass" : "INVALID_PARAMETER_VALUE.DATETIME_UNIT",
+  "sqlState" : "22023",
+  "messageParameters" : {
+    "functionName" : "`date_diff`",
+    "invalidValue" : "'MILLISECOND'",
+    "parameter" : "`unit`"
+  },
+  "queryContext" : [ {
+    "objectType" : "",
+    "objectName" : "",
+    "startIndex" : 8,
+    "stopIndex" : 103,
+    "fragment" : "date_diff('MILLISECOND', timestamp'2022-02-25 01:02:03.456', 
timestamp'2022-02-25 01:02:03.455')"
+  } ]
+}
+
+
+-- !query
+select datediff('YEAR', date'2022-02-25', date'2023-02-25')
+-- !query schema
+struct<>
+-- !query output
+org.apache.spark.sql.catalyst.parser.ParseException
+{
+  "errorClass" : "INVALID_PARAMETER_VALUE.DATETIME_UNIT",
+  "sqlState" : "22023",
+  "messageParameters" : {
+    "functionName" : "`datediff`",
+    "invalidValue" : "'YEAR'",
+    "parameter" : "`unit`"
+  },
+  "queryContext" : [ {
+    "objectType" : "",
+    "objectName" : "",
+    "startIndex" : 8,
+    "stopIndex" : 59,
+    "fragment" : "datediff('YEAR', date'2022-02-25', date'2023-02-25')"
+  } ]
+}
diff --git 
a/sql/core/src/test/resources/sql-tests/results/datetime-legacy.sql.out 
b/sql/core/src/test/resources/sql-tests/results/datetime-legacy.sql.out
index be809b83687..a824fd96c11 100644
--- a/sql/core/src/test/resources/sql-tests/results/datetime-legacy.sql.out
+++ b/sql/core/src/test/resources/sql-tests/results/datetime-legacy.sql.out
@@ -1020,6 +1020,54 @@ struct<timestampadd(YEAR, 1, DATE 
'2022-02-25'):timestamp>
 2023-02-25 00:00:00
 
 
+-- !query
+select dateadd('MICROSECOND', 1001, timestamp'2022-02-25 01:02:03.123')
+-- !query schema
+struct<>
+-- !query output
+org.apache.spark.sql.catalyst.parser.ParseException
+{
+  "errorClass" : "INVALID_PARAMETER_VALUE.DATETIME_UNIT",
+  "sqlState" : "22023",
+  "messageParameters" : {
+    "functionName" : "`dateadd`",
+    "invalidValue" : "'MICROSECOND'",
+    "parameter" : "`unit`"
+  },
+  "queryContext" : [ {
+    "objectType" : "",
+    "objectName" : "",
+    "startIndex" : 8,
+    "stopIndex" : 71,
+    "fragment" : "dateadd('MICROSECOND', 1001, timestamp'2022-02-25 
01:02:03.123')"
+  } ]
+}
+
+
+-- !query
+select date_add('QUARTER', 5, date'2022-02-25')
+-- !query schema
+struct<>
+-- !query output
+org.apache.spark.sql.catalyst.parser.ParseException
+{
+  "errorClass" : "INVALID_PARAMETER_VALUE.DATETIME_UNIT",
+  "sqlState" : "22023",
+  "messageParameters" : {
+    "functionName" : "`date_add`",
+    "invalidValue" : "'QUARTER'",
+    "parameter" : "`unit`"
+  },
+  "queryContext" : [ {
+    "objectType" : "",
+    "objectName" : "",
+    "startIndex" : 8,
+    "stopIndex" : 47,
+    "fragment" : "date_add('QUARTER', 5, date'2022-02-25')"
+  } ]
+}
+
+
 -- !query
 select datediff(MICROSECOND, timestamp'2022-02-25 01:02:03.123', 
timestamp'2022-02-25 01:02:03.124001')
 -- !query schema
@@ -1100,6 +1148,54 @@ struct<timestampdiff(YEAR, DATE '2022-02-25', DATE 
'2023-02-25'):bigint>
 1
 
 
+-- !query
+select date_diff('MILLISECOND', timestamp'2022-02-25 01:02:03.456', 
timestamp'2022-02-25 01:02:03.455')
+-- !query schema
+struct<>
+-- !query output
+org.apache.spark.sql.catalyst.parser.ParseException
+{
+  "errorClass" : "INVALID_PARAMETER_VALUE.DATETIME_UNIT",
+  "sqlState" : "22023",
+  "messageParameters" : {
+    "functionName" : "`date_diff`",
+    "invalidValue" : "'MILLISECOND'",
+    "parameter" : "`unit`"
+  },
+  "queryContext" : [ {
+    "objectType" : "",
+    "objectName" : "",
+    "startIndex" : 8,
+    "stopIndex" : 103,
+    "fragment" : "date_diff('MILLISECOND', timestamp'2022-02-25 01:02:03.456', 
timestamp'2022-02-25 01:02:03.455')"
+  } ]
+}
+
+
+-- !query
+select datediff('YEAR', date'2022-02-25', date'2023-02-25')
+-- !query schema
+struct<>
+-- !query output
+org.apache.spark.sql.catalyst.parser.ParseException
+{
+  "errorClass" : "INVALID_PARAMETER_VALUE.DATETIME_UNIT",
+  "sqlState" : "22023",
+  "messageParameters" : {
+    "functionName" : "`datediff`",
+    "invalidValue" : "'YEAR'",
+    "parameter" : "`unit`"
+  },
+  "queryContext" : [ {
+    "objectType" : "",
+    "objectName" : "",
+    "startIndex" : 8,
+    "stopIndex" : 59,
+    "fragment" : "datediff('YEAR', date'2022-02-25', date'2023-02-25')"
+  } ]
+}
+
+
 -- !query
 select timestamp '2019-01-01\t'
 -- !query schema
@@ -2054,6 +2150,54 @@ struct<timestampadd(SECOND, -1, DATE 
'2022-02-15'):timestamp>
 2022-02-14 23:59:59
 
 
+-- !query
+select timestampadd('MONTH', -1, timestamp'2022-02-14 01:02:03')
+-- !query schema
+struct<>
+-- !query output
+org.apache.spark.sql.catalyst.parser.ParseException
+{
+  "errorClass" : "INVALID_PARAMETER_VALUE.DATETIME_UNIT",
+  "sqlState" : "22023",
+  "messageParameters" : {
+    "functionName" : "`timestampadd`",
+    "invalidValue" : "'MONTH'",
+    "parameter" : "`unit`"
+  },
+  "queryContext" : [ {
+    "objectType" : "",
+    "objectName" : "",
+    "startIndex" : 8,
+    "stopIndex" : 64,
+    "fragment" : "timestampadd('MONTH', -1, timestamp'2022-02-14 01:02:03')"
+  } ]
+}
+
+
+-- !query
+select timestampadd('SECOND', -1, date'2022-02-15')
+-- !query schema
+struct<>
+-- !query output
+org.apache.spark.sql.catalyst.parser.ParseException
+{
+  "errorClass" : "INVALID_PARAMETER_VALUE.DATETIME_UNIT",
+  "sqlState" : "22023",
+  "messageParameters" : {
+    "functionName" : "`timestampadd`",
+    "invalidValue" : "'SECOND'",
+    "parameter" : "`unit`"
+  },
+  "queryContext" : [ {
+    "objectType" : "",
+    "objectName" : "",
+    "startIndex" : 8,
+    "stopIndex" : 51,
+    "fragment" : "timestampadd('SECOND', -1, date'2022-02-15')"
+  } ]
+}
+
+
 -- !query
 select timestampdiff(MONTH, timestamp'2022-02-14 01:02:03', 
timestamp'2022-01-14 01:02:03')
 -- !query schema
@@ -2084,3 +2228,51 @@ select timestampdiff(SECOND, date'2022-02-15', 
timestamp'2022-02-14 23:59:59')
 struct<timestampdiff(SECOND, DATE '2022-02-15', TIMESTAMP '2022-02-14 
23:59:59'):bigint>
 -- !query output
 -1
+
+
+-- !query
+select timestampdiff('MINUTE', timestamp'2022-02-14 01:02:03', 
timestamp'2022-02-14 02:00:03')
+-- !query schema
+struct<>
+-- !query output
+org.apache.spark.sql.catalyst.parser.ParseException
+{
+  "errorClass" : "INVALID_PARAMETER_VALUE.DATETIME_UNIT",
+  "sqlState" : "22023",
+  "messageParameters" : {
+    "functionName" : "`timestampdiff`",
+    "invalidValue" : "'MINUTE'",
+    "parameter" : "`unit`"
+  },
+  "queryContext" : [ {
+    "objectType" : "",
+    "objectName" : "",
+    "startIndex" : 8,
+    "stopIndex" : 94,
+    "fragment" : "timestampdiff('MINUTE', timestamp'2022-02-14 01:02:03', 
timestamp'2022-02-14 02:00:03')"
+  } ]
+}
+
+
+-- !query
+select timestampdiff('YEAR', date'2022-02-15', date'2023-02-15')
+-- !query schema
+struct<>
+-- !query output
+org.apache.spark.sql.catalyst.parser.ParseException
+{
+  "errorClass" : "INVALID_PARAMETER_VALUE.DATETIME_UNIT",
+  "sqlState" : "22023",
+  "messageParameters" : {
+    "functionName" : "`timestampdiff`",
+    "invalidValue" : "'YEAR'",
+    "parameter" : "`unit`"
+  },
+  "queryContext" : [ {
+    "objectType" : "",
+    "objectName" : "",
+    "startIndex" : 8,
+    "stopIndex" : 64,
+    "fragment" : "timestampdiff('YEAR', date'2022-02-15', date'2023-02-15')"
+  } ]
+}
diff --git a/sql/core/src/test/resources/sql-tests/results/timestamp.sql.out 
b/sql/core/src/test/resources/sql-tests/results/timestamp.sql.out
index 9047b64e01d..8a49774f74f 100644
--- a/sql/core/src/test/resources/sql-tests/results/timestamp.sql.out
+++ b/sql/core/src/test/resources/sql-tests/results/timestamp.sql.out
@@ -1007,6 +1007,54 @@ struct<timestampadd(SECOND, -1, DATE 
'2022-02-15'):timestamp>
 2022-02-14 23:59:59
 
 
+-- !query
+select timestampadd('MONTH', -1, timestamp'2022-02-14 01:02:03')
+-- !query schema
+struct<>
+-- !query output
+org.apache.spark.sql.catalyst.parser.ParseException
+{
+  "errorClass" : "INVALID_PARAMETER_VALUE.DATETIME_UNIT",
+  "sqlState" : "22023",
+  "messageParameters" : {
+    "functionName" : "`timestampadd`",
+    "invalidValue" : "'MONTH'",
+    "parameter" : "`unit`"
+  },
+  "queryContext" : [ {
+    "objectType" : "",
+    "objectName" : "",
+    "startIndex" : 8,
+    "stopIndex" : 64,
+    "fragment" : "timestampadd('MONTH', -1, timestamp'2022-02-14 01:02:03')"
+  } ]
+}
+
+
+-- !query
+select timestampadd('SECOND', -1, date'2022-02-15')
+-- !query schema
+struct<>
+-- !query output
+org.apache.spark.sql.catalyst.parser.ParseException
+{
+  "errorClass" : "INVALID_PARAMETER_VALUE.DATETIME_UNIT",
+  "sqlState" : "22023",
+  "messageParameters" : {
+    "functionName" : "`timestampadd`",
+    "invalidValue" : "'SECOND'",
+    "parameter" : "`unit`"
+  },
+  "queryContext" : [ {
+    "objectType" : "",
+    "objectName" : "",
+    "startIndex" : 8,
+    "stopIndex" : 51,
+    "fragment" : "timestampadd('SECOND', -1, date'2022-02-15')"
+  } ]
+}
+
+
 -- !query
 select timestampdiff(MONTH, timestamp'2022-02-14 01:02:03', 
timestamp'2022-01-14 01:02:03')
 -- !query schema
@@ -1037,3 +1085,51 @@ select timestampdiff(SECOND, date'2022-02-15', 
timestamp'2022-02-14 23:59:59')
 struct<timestampdiff(SECOND, DATE '2022-02-15', TIMESTAMP '2022-02-14 
23:59:59'):bigint>
 -- !query output
 -1
+
+
+-- !query
+select timestampdiff('MINUTE', timestamp'2022-02-14 01:02:03', 
timestamp'2022-02-14 02:00:03')
+-- !query schema
+struct<>
+-- !query output
+org.apache.spark.sql.catalyst.parser.ParseException
+{
+  "errorClass" : "INVALID_PARAMETER_VALUE.DATETIME_UNIT",
+  "sqlState" : "22023",
+  "messageParameters" : {
+    "functionName" : "`timestampdiff`",
+    "invalidValue" : "'MINUTE'",
+    "parameter" : "`unit`"
+  },
+  "queryContext" : [ {
+    "objectType" : "",
+    "objectName" : "",
+    "startIndex" : 8,
+    "stopIndex" : 94,
+    "fragment" : "timestampdiff('MINUTE', timestamp'2022-02-14 01:02:03', 
timestamp'2022-02-14 02:00:03')"
+  } ]
+}
+
+
+-- !query
+select timestampdiff('YEAR', date'2022-02-15', date'2023-02-15')
+-- !query schema
+struct<>
+-- !query output
+org.apache.spark.sql.catalyst.parser.ParseException
+{
+  "errorClass" : "INVALID_PARAMETER_VALUE.DATETIME_UNIT",
+  "sqlState" : "22023",
+  "messageParameters" : {
+    "functionName" : "`timestampdiff`",
+    "invalidValue" : "'YEAR'",
+    "parameter" : "`unit`"
+  },
+  "queryContext" : [ {
+    "objectType" : "",
+    "objectName" : "",
+    "startIndex" : 8,
+    "stopIndex" : 64,
+    "fragment" : "timestampdiff('YEAR', date'2022-02-15', date'2023-02-15')"
+  } ]
+}
diff --git 
a/sql/core/src/test/resources/sql-tests/results/timestampNTZ/timestamp-ansi.sql.out
 
b/sql/core/src/test/resources/sql-tests/results/timestampNTZ/timestamp-ansi.sql.out
index 53d86dfd518..88ba5314b8a 100644
--- 
a/sql/core/src/test/resources/sql-tests/results/timestampNTZ/timestamp-ansi.sql.out
+++ 
b/sql/core/src/test/resources/sql-tests/results/timestampNTZ/timestamp-ansi.sql.out
@@ -1002,6 +1002,54 @@ struct<timestampadd(SECOND, -1, DATE 
'2022-02-15'):timestamp>
 2022-02-14 23:59:59
 
 
+-- !query
+select timestampadd('MONTH', -1, timestamp'2022-02-14 01:02:03')
+-- !query schema
+struct<>
+-- !query output
+org.apache.spark.sql.catalyst.parser.ParseException
+{
+  "errorClass" : "INVALID_PARAMETER_VALUE.DATETIME_UNIT",
+  "sqlState" : "22023",
+  "messageParameters" : {
+    "functionName" : "`timestampadd`",
+    "invalidValue" : "'MONTH'",
+    "parameter" : "`unit`"
+  },
+  "queryContext" : [ {
+    "objectType" : "",
+    "objectName" : "",
+    "startIndex" : 8,
+    "stopIndex" : 64,
+    "fragment" : "timestampadd('MONTH', -1, timestamp'2022-02-14 01:02:03')"
+  } ]
+}
+
+
+-- !query
+select timestampadd('SECOND', -1, date'2022-02-15')
+-- !query schema
+struct<>
+-- !query output
+org.apache.spark.sql.catalyst.parser.ParseException
+{
+  "errorClass" : "INVALID_PARAMETER_VALUE.DATETIME_UNIT",
+  "sqlState" : "22023",
+  "messageParameters" : {
+    "functionName" : "`timestampadd`",
+    "invalidValue" : "'SECOND'",
+    "parameter" : "`unit`"
+  },
+  "queryContext" : [ {
+    "objectType" : "",
+    "objectName" : "",
+    "startIndex" : 8,
+    "stopIndex" : 51,
+    "fragment" : "timestampadd('SECOND', -1, date'2022-02-15')"
+  } ]
+}
+
+
 -- !query
 select timestampdiff(MONTH, timestamp'2022-02-14 01:02:03', 
timestamp'2022-01-14 01:02:03')
 -- !query schema
@@ -1032,3 +1080,51 @@ select timestampdiff(SECOND, date'2022-02-15', 
timestamp'2022-02-14 23:59:59')
 struct<timestampdiff(SECOND, DATE '2022-02-15', TIMESTAMP_NTZ '2022-02-14 
23:59:59'):bigint>
 -- !query output
 -1
+
+
+-- !query
+select timestampdiff('MINUTE', timestamp'2022-02-14 01:02:03', 
timestamp'2022-02-14 02:00:03')
+-- !query schema
+struct<>
+-- !query output
+org.apache.spark.sql.catalyst.parser.ParseException
+{
+  "errorClass" : "INVALID_PARAMETER_VALUE.DATETIME_UNIT",
+  "sqlState" : "22023",
+  "messageParameters" : {
+    "functionName" : "`timestampdiff`",
+    "invalidValue" : "'MINUTE'",
+    "parameter" : "`unit`"
+  },
+  "queryContext" : [ {
+    "objectType" : "",
+    "objectName" : "",
+    "startIndex" : 8,
+    "stopIndex" : 94,
+    "fragment" : "timestampdiff('MINUTE', timestamp'2022-02-14 01:02:03', 
timestamp'2022-02-14 02:00:03')"
+  } ]
+}
+
+
+-- !query
+select timestampdiff('YEAR', date'2022-02-15', date'2023-02-15')
+-- !query schema
+struct<>
+-- !query output
+org.apache.spark.sql.catalyst.parser.ParseException
+{
+  "errorClass" : "INVALID_PARAMETER_VALUE.DATETIME_UNIT",
+  "sqlState" : "22023",
+  "messageParameters" : {
+    "functionName" : "`timestampdiff`",
+    "invalidValue" : "'YEAR'",
+    "parameter" : "`unit`"
+  },
+  "queryContext" : [ {
+    "objectType" : "",
+    "objectName" : "",
+    "startIndex" : 8,
+    "stopIndex" : 64,
+    "fragment" : "timestampdiff('YEAR', date'2022-02-15', date'2023-02-15')"
+  } ]
+}
diff --git 
a/sql/core/src/test/resources/sql-tests/results/timestampNTZ/timestamp.sql.out 
b/sql/core/src/test/resources/sql-tests/results/timestampNTZ/timestamp.sql.out
index 0c5f1aeb78b..545b49445e4 100644
--- 
a/sql/core/src/test/resources/sql-tests/results/timestampNTZ/timestamp.sql.out
+++ 
b/sql/core/src/test/resources/sql-tests/results/timestampNTZ/timestamp.sql.out
@@ -983,6 +983,54 @@ struct<timestampadd(SECOND, -1, DATE 
'2022-02-15'):timestamp>
 2022-02-14 23:59:59
 
 
+-- !query
+select timestampadd('MONTH', -1, timestamp'2022-02-14 01:02:03')
+-- !query schema
+struct<>
+-- !query output
+org.apache.spark.sql.catalyst.parser.ParseException
+{
+  "errorClass" : "INVALID_PARAMETER_VALUE.DATETIME_UNIT",
+  "sqlState" : "22023",
+  "messageParameters" : {
+    "functionName" : "`timestampadd`",
+    "invalidValue" : "'MONTH'",
+    "parameter" : "`unit`"
+  },
+  "queryContext" : [ {
+    "objectType" : "",
+    "objectName" : "",
+    "startIndex" : 8,
+    "stopIndex" : 64,
+    "fragment" : "timestampadd('MONTH', -1, timestamp'2022-02-14 01:02:03')"
+  } ]
+}
+
+
+-- !query
+select timestampadd('SECOND', -1, date'2022-02-15')
+-- !query schema
+struct<>
+-- !query output
+org.apache.spark.sql.catalyst.parser.ParseException
+{
+  "errorClass" : "INVALID_PARAMETER_VALUE.DATETIME_UNIT",
+  "sqlState" : "22023",
+  "messageParameters" : {
+    "functionName" : "`timestampadd`",
+    "invalidValue" : "'SECOND'",
+    "parameter" : "`unit`"
+  },
+  "queryContext" : [ {
+    "objectType" : "",
+    "objectName" : "",
+    "startIndex" : 8,
+    "stopIndex" : 51,
+    "fragment" : "timestampadd('SECOND', -1, date'2022-02-15')"
+  } ]
+}
+
+
 -- !query
 select timestampdiff(MONTH, timestamp'2022-02-14 01:02:03', 
timestamp'2022-01-14 01:02:03')
 -- !query schema
@@ -1013,3 +1061,51 @@ select timestampdiff(SECOND, date'2022-02-15', 
timestamp'2022-02-14 23:59:59')
 struct<timestampdiff(SECOND, DATE '2022-02-15', TIMESTAMP_NTZ '2022-02-14 
23:59:59'):bigint>
 -- !query output
 -1
+
+
+-- !query
+select timestampdiff('MINUTE', timestamp'2022-02-14 01:02:03', 
timestamp'2022-02-14 02:00:03')
+-- !query schema
+struct<>
+-- !query output
+org.apache.spark.sql.catalyst.parser.ParseException
+{
+  "errorClass" : "INVALID_PARAMETER_VALUE.DATETIME_UNIT",
+  "sqlState" : "22023",
+  "messageParameters" : {
+    "functionName" : "`timestampdiff`",
+    "invalidValue" : "'MINUTE'",
+    "parameter" : "`unit`"
+  },
+  "queryContext" : [ {
+    "objectType" : "",
+    "objectName" : "",
+    "startIndex" : 8,
+    "stopIndex" : 94,
+    "fragment" : "timestampdiff('MINUTE', timestamp'2022-02-14 01:02:03', 
timestamp'2022-02-14 02:00:03')"
+  } ]
+}
+
+
+-- !query
+select timestampdiff('YEAR', date'2022-02-15', date'2023-02-15')
+-- !query schema
+struct<>
+-- !query output
+org.apache.spark.sql.catalyst.parser.ParseException
+{
+  "errorClass" : "INVALID_PARAMETER_VALUE.DATETIME_UNIT",
+  "sqlState" : "22023",
+  "messageParameters" : {
+    "functionName" : "`timestampdiff`",
+    "invalidValue" : "'YEAR'",
+    "parameter" : "`unit`"
+  },
+  "queryContext" : [ {
+    "objectType" : "",
+    "objectName" : "",
+    "startIndex" : 8,
+    "stopIndex" : 64,
+    "fragment" : "timestampdiff('YEAR', date'2022-02-15', date'2023-02-15')"
+  } ]
+}


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to