cloud-fan commented on a change in pull request #33640:
URL: https://github.com/apache/spark/pull/33640#discussion_r682854999



##########
File path: sql/core/src/test/resources/sql-tests/results/ansi/date.sql.out
##########
@@ -0,0 +1,525 @@
+-- Automatically generated by SQLQueryTestSuite
+-- Number of queries: 60
+
+
+-- !query
+select date '2019-01-01\t'
+-- !query schema
+struct<DATE '2019-01-01':date>
+-- !query output
+2019-01-01
+
+
+-- !query
+select date '2020-01-01中文'
+-- !query schema
+struct<>
+-- !query output
+org.apache.spark.sql.catalyst.parser.ParseException
+
+Cannot parse the DATE value: 2020-01-01中文(line 1, pos 7)
+
+== SQL ==
+select date '2020-01-01中文'
+-------^^^
+
+
+-- !query
+select date'999999-03-18', date'-0001-1-28', date'0015'
+-- !query schema
+struct<DATE '+999999-03-18':date,DATE '-0001-01-28':date,DATE 
'0015-01-01':date>
+-- !query output
++999999-03-18  -0001-01-28     0015-01-01
+
+
+-- !query
+select date'015'
+-- !query schema
+struct<>
+-- !query output
+org.apache.spark.sql.catalyst.parser.ParseException
+
+Cannot parse the DATE value: 015(line 1, pos 7)
+
+== SQL ==
+select date'015'
+-------^^^
+
+
+-- !query
+select date'2021-4294967297-11'
+-- !query schema
+struct<>
+-- !query output
+org.apache.spark.sql.catalyst.parser.ParseException
+
+Cannot parse the DATE value: 2021-4294967297-11(line 1, pos 7)
+
+== SQL ==
+select date'2021-4294967297-11'
+-------^^^
+
+
+-- !query
+select current_date = current_date
+-- !query schema
+struct<(current_date() = current_date()):boolean>
+-- !query output
+true
+
+
+-- !query
+select current_date() = current_date()
+-- !query schema
+struct<>
+-- !query output
+org.apache.spark.sql.catalyst.parser.ParseException
+
+no viable alternative at input 'current_date'(line 1, pos 7)
+
+== SQL ==
+select current_date() = current_date()
+-------^^^
+
+
+-- !query
+select DATE_FROM_UNIX_DATE(0), DATE_FROM_UNIX_DATE(1000), 
DATE_FROM_UNIX_DATE(null)
+-- !query schema
+struct<date_from_unix_date(0):date,date_from_unix_date(1000):date,date_from_unix_date(NULL):date>
+-- !query output
+1970-01-01     1972-09-27      NULL
+
+
+-- !query
+select UNIX_DATE(DATE('1970-01-01')), UNIX_DATE(DATE('2020-12-04')), 
UNIX_DATE(null)
+-- !query schema
+struct<unix_date(1970-01-01):int,unix_date(2020-12-04):int,unix_date(NULL):int>
+-- !query output
+0      18600   NULL
+
+
+-- !query
+select to_date(null), to_date('2016-12-31'), to_date('2016-12-31', 
'yyyy-MM-dd')
+-- !query schema
+struct<to_date(NULL):date,to_date(2016-12-31):date,to_date(2016-12-31, 
yyyy-MM-dd):date>
+-- !query output
+NULL   2016-12-31      2016-12-31
+
+
+-- !query
+select to_date("16", "dd")
+-- !query schema
+struct<to_date(16, dd):date>
+-- !query output
+1970-01-16
+
+
+-- !query
+select to_date("02-29", "MM-dd")
+-- !query schema
+struct<>
+-- !query output
+java.time.DateTimeException
+Invalid date 'February 29' as '1970' is not a leap year
+
+
+-- !query
+select dayofweek('2007-02-03'), dayofweek('2009-07-30'), 
dayofweek('2017-05-27'),
+  dayofweek(null), dayofweek('1582-10-15 13:10:15'), 
dayofweek(timestamp'1582-10-15 13:10:15')
+-- !query schema
+struct<dayofweek(2007-02-03):int,dayofweek(2009-07-30):int,dayofweek(2017-05-27):int,dayofweek(NULL):int,dayofweek(1582-10-15
 13:10:15):int,dayofweek(TIMESTAMP '1582-10-15 13:10:15'):int>
+-- !query output
+7      5       7       NULL    6       6
+
+
+-- !query
+select weekday('2007-02-03'), weekday('2009-07-30'), weekday('2017-05-27'),
+  weekday(null), weekday('1582-10-15 13:10:15'), weekday(timestamp'1582-10-15 
13:10:15')
+-- !query schema
+struct<weekday(2007-02-03):int,weekday(2009-07-30):int,weekday(2017-05-27):int,weekday(NULL):int,weekday(1582-10-15
 13:10:15):int,weekday(TIMESTAMP '1582-10-15 13:10:15'):int>
+-- !query output
+5      3       5       NULL    4       4
+
+
+-- !query
+select year('1500-01-01'), year('1582-10-15 13:10:15'), 
year(timestamp'1582-10-15 13:10:15')
+-- !query schema
+struct<year(1500-01-01):int,year(1582-10-15 13:10:15):int,year(TIMESTAMP 
'1582-10-15 13:10:15'):int>
+-- !query output
+1500   1582    1582
+
+
+-- !query
+select month('1500-01-01'), month('1582-10-15 13:10:15'), 
month(timestamp'1582-10-15 13:10:15')
+-- !query schema
+struct<month(1500-01-01):int,month(1582-10-15 13:10:15):int,month(TIMESTAMP 
'1582-10-15 13:10:15'):int>
+-- !query output
+1      10      10
+
+
+-- !query
+select dayOfYear('1500-01-01'), dayOfYear('1582-10-15 13:10:15'), 
dayOfYear(timestamp'1582-10-15 13:10:15')
+-- !query schema
+struct<dayofyear(1500-01-01):int,dayofyear(1582-10-15 
13:10:15):int,dayofyear(TIMESTAMP '1582-10-15 13:10:15'):int>
+-- !query output
+1      288     288
+
+
+-- !query
+select next_day("2015-07-23", "Mon")
+-- !query schema
+struct<next_day(2015-07-23, Mon):date>
+-- !query output
+2015-07-27
+
+
+-- !query
+select next_day("2015-07-23", "xx")
+-- !query schema
+struct<>
+-- !query output
+java.lang.IllegalArgumentException
+Illegal input for day of week: xx
+
+
+-- !query
+select next_day("2015-07-23 12:12:12", "Mon")
+-- !query schema
+struct<next_day(2015-07-23 12:12:12, Mon):date>
+-- !query output
+2015-07-27
+
+
+-- !query
+select next_day(timestamp"2015-07-23 12:12:12", "Mon")

Review comment:
       shall we allow this? @gengliangwang 

##########
File path: sql/core/src/test/resources/sql-tests/results/date.sql.out
##########
@@ -0,0 +1,515 @@
+-- Automatically generated by SQLQueryTestSuite
+-- Number of queries: 60
+
+
+-- !query
+select date '2019-01-01\t'
+-- !query schema
+struct<DATE '2019-01-01':date>
+-- !query output
+2019-01-01
+
+
+-- !query
+select date '2020-01-01中文'
+-- !query schema
+struct<>
+-- !query output
+org.apache.spark.sql.catalyst.parser.ParseException
+
+Cannot parse the DATE value: 2020-01-01中文(line 1, pos 7)
+
+== SQL ==
+select date '2020-01-01中文'
+-------^^^
+
+
+-- !query
+select date'999999-03-18', date'-0001-1-28', date'0015'
+-- !query schema
+struct<DATE '+999999-03-18':date,DATE '-0001-01-28':date,DATE 
'0015-01-01':date>
+-- !query output
++999999-03-18  -0001-01-28     0015-01-01
+
+
+-- !query
+select date'015'
+-- !query schema
+struct<>
+-- !query output
+org.apache.spark.sql.catalyst.parser.ParseException
+
+Cannot parse the DATE value: 015(line 1, pos 7)
+
+== SQL ==
+select date'015'
+-------^^^
+
+
+-- !query
+select date'2021-4294967297-11'
+-- !query schema
+struct<>
+-- !query output
+org.apache.spark.sql.catalyst.parser.ParseException
+
+Cannot parse the DATE value: 2021-4294967297-11(line 1, pos 7)
+
+== SQL ==
+select date'2021-4294967297-11'
+-------^^^
+
+
+-- !query
+select current_date = current_date
+-- !query schema
+struct<(current_date() = current_date()):boolean>
+-- !query output
+true
+
+
+-- !query
+select current_date() = current_date()
+-- !query schema
+struct<(current_date() = current_date()):boolean>
+-- !query output
+true
+
+
+-- !query
+select DATE_FROM_UNIX_DATE(0), DATE_FROM_UNIX_DATE(1000), 
DATE_FROM_UNIX_DATE(null)
+-- !query schema
+struct<date_from_unix_date(0):date,date_from_unix_date(1000):date,date_from_unix_date(NULL):date>
+-- !query output
+1970-01-01     1972-09-27      NULL
+
+
+-- !query
+select UNIX_DATE(DATE('1970-01-01')), UNIX_DATE(DATE('2020-12-04')), 
UNIX_DATE(null)
+-- !query schema
+struct<unix_date(1970-01-01):int,unix_date(2020-12-04):int,unix_date(NULL):int>
+-- !query output
+0      18600   NULL
+
+
+-- !query
+select to_date(null), to_date('2016-12-31'), to_date('2016-12-31', 
'yyyy-MM-dd')
+-- !query schema
+struct<to_date(NULL):date,to_date(2016-12-31):date,to_date(2016-12-31, 
yyyy-MM-dd):date>
+-- !query output
+NULL   2016-12-31      2016-12-31
+
+
+-- !query
+select to_date("16", "dd")
+-- !query schema
+struct<to_date(16, dd):date>
+-- !query output
+1970-01-16
+
+
+-- !query
+select to_date("02-29", "MM-dd")
+-- !query schema
+struct<to_date(02-29, MM-dd):date>
+-- !query output
+NULL
+
+
+-- !query
+select dayofweek('2007-02-03'), dayofweek('2009-07-30'), 
dayofweek('2017-05-27'),
+  dayofweek(null), dayofweek('1582-10-15 13:10:15'), 
dayofweek(timestamp'1582-10-15 13:10:15')
+-- !query schema
+struct<dayofweek(2007-02-03):int,dayofweek(2009-07-30):int,dayofweek(2017-05-27):int,dayofweek(NULL):int,dayofweek(1582-10-15
 13:10:15):int,dayofweek(TIMESTAMP '1582-10-15 13:10:15'):int>
+-- !query output
+7      5       7       NULL    6       6
+
+
+-- !query
+select weekday('2007-02-03'), weekday('2009-07-30'), weekday('2017-05-27'),
+  weekday(null), weekday('1582-10-15 13:10:15'), weekday(timestamp'1582-10-15 
13:10:15')
+-- !query schema
+struct<weekday(2007-02-03):int,weekday(2009-07-30):int,weekday(2017-05-27):int,weekday(NULL):int,weekday(1582-10-15
 13:10:15):int,weekday(TIMESTAMP '1582-10-15 13:10:15'):int>
+-- !query output
+5      3       5       NULL    4       4
+
+
+-- !query
+select year('1500-01-01'), year('1582-10-15 13:10:15'), 
year(timestamp'1582-10-15 13:10:15')
+-- !query schema
+struct<year(1500-01-01):int,year(1582-10-15 13:10:15):int,year(TIMESTAMP 
'1582-10-15 13:10:15'):int>
+-- !query output
+1500   1582    1582
+
+
+-- !query
+select month('1500-01-01'), month('1582-10-15 13:10:15'), 
month(timestamp'1582-10-15 13:10:15')
+-- !query schema
+struct<month(1500-01-01):int,month(1582-10-15 13:10:15):int,month(TIMESTAMP 
'1582-10-15 13:10:15'):int>
+-- !query output
+1      10      10
+
+
+-- !query
+select dayOfYear('1500-01-01'), dayOfYear('1582-10-15 13:10:15'), 
dayOfYear(timestamp'1582-10-15 13:10:15')
+-- !query schema
+struct<dayofyear(1500-01-01):int,dayofyear(1582-10-15 
13:10:15):int,dayofyear(TIMESTAMP '1582-10-15 13:10:15'):int>
+-- !query output
+1      288     288
+
+
+-- !query
+select next_day("2015-07-23", "Mon")
+-- !query schema
+struct<next_day(2015-07-23, Mon):date>
+-- !query output
+2015-07-27
+
+
+-- !query
+select next_day("2015-07-23", "xx")
+-- !query schema
+struct<next_day(2015-07-23, xx):date>
+-- !query output
+NULL
+
+
+-- !query
+select next_day("2015-07-23 12:12:12", "Mon")
+-- !query schema
+struct<next_day(2015-07-23 12:12:12, Mon):date>
+-- !query output
+2015-07-27
+
+
+-- !query
+select next_day(timestamp"2015-07-23 12:12:12", "Mon")

Review comment:
       This works in non-ansi mode but not ansi mode.

##########
File path: 
sql/core/src/test/resources/sql-tests/results/timestampNTZ/timestamp.sql.out
##########
@@ -0,0 +1,579 @@
+-- Automatically generated by SQLQueryTestSuite
+-- Number of queries: 68
+
+
+-- !query
+select timestamp '2019-01-01\t'
+-- !query schema
+struct<TIMESTAMP_NTZ '2019-01-01 00:00:00':timestamp_ntz>
+-- !query output
+2019-01-01 00:00:00
+
+
+-- !query
+select timestamp '2019-01-01中文'
+-- !query schema
+struct<>
+-- !query output
+org.apache.spark.sql.catalyst.parser.ParseException
+
+Cannot parse the TIMESTAMP value: 2019-01-01中文(line 1, pos 7)
+
+== SQL ==
+select timestamp '2019-01-01中文'
+-------^^^
+
+
+-- !query
+select timestamp'-1969-12-31 16:00:00', timestamp'0015-03-18 16:00:00', 
timestamp'-000001', timestamp'99999-03-18T12:03:17'
+-- !query schema
+struct<TIMESTAMP_NTZ '-1969-12-31 16:00:00':timestamp_ntz,TIMESTAMP_NTZ 
'0015-03-18 16:00:00':timestamp_ntz,TIMESTAMP_NTZ '-0001-01-01 
00:00:00':timestamp_ntz,TIMESTAMP_NTZ '+99999-03-18 12:03:17':timestamp_ntz>
+-- !query output
+-1969-12-31 16:00:00   0015-03-18 16:00:00     -0001-01-01 00:00:00    
+99999-03-18 12:03:17
+
+
+-- !query
+select timestamp'4294967297'
+-- !query schema
+struct<>
+-- !query output
+org.apache.spark.sql.catalyst.parser.ParseException
+
+Cannot parse the TIMESTAMP value: 4294967297(line 1, pos 7)
+
+== SQL ==
+select timestamp'4294967297'
+-------^^^
+
+
+-- !query
+select timestamp'2021-01-01T12:30:4294967297.123456'
+-- !query schema
+struct<>
+-- !query output
+org.apache.spark.sql.catalyst.parser.ParseException
+
+Cannot parse the TIMESTAMP value: 2021-01-01T12:30:4294967297.123456(line 1, 
pos 7)
+
+== SQL ==
+select timestamp'2021-01-01T12:30:4294967297.123456'
+-------^^^
+
+
+-- !query
+select current_timestamp = current_timestamp
+-- !query schema
+struct<(current_timestamp() = current_timestamp()):boolean>
+-- !query output
+true
+
+
+-- !query
+select current_timestamp() = current_timestamp()
+-- !query schema
+struct<(current_timestamp() = current_timestamp()):boolean>
+-- !query output
+true
+
+
+-- !query
+select localtimestamp() = localtimestamp()
+-- !query schema
+struct<(localtimestamp() = localtimestamp()):boolean>
+-- !query output
+true
+
+
+-- !query
+SELECT make_timestamp(2021, 07, 11, 6, 30, 45.678)
+-- !query schema
+struct<make_timestamp(2021, 7, 11, 6, 30, 45.678):timestamp_ntz>
+-- !query output
+2021-07-11 06:30:45.678
+
+
+-- !query
+SELECT make_timestamp(2021, 07, 11, 6, 30, 45.678, 'CET')
+-- !query schema
+struct<make_timestamp(2021, 7, 11, 6, 30, 45.678, CET):timestamp_ntz>
+-- !query output
+2021-07-11 06:30:45.678
+
+
+-- !query
+SELECT make_timestamp(2021, 07, 11, 6, 30, 60.007)
+-- !query schema
+struct<make_timestamp(2021, 7, 11, 6, 30, 60.007):timestamp_ntz>
+-- !query output
+NULL
+
+
+-- !query
+create temporary view ttf1 as select * from values
+  (1, 2),
+  (2, 3)
+  as ttf1(current_date, current_timestamp)
+-- !query schema
+struct<>
+-- !query output
+
+
+
+-- !query
+select current_date, current_timestamp from ttf1
+-- !query schema
+struct<current_date:int,current_timestamp:int>
+-- !query output
+1      2
+2      3
+
+
+-- !query
+create temporary view ttf2 as select * from values
+  (1, 2),
+  (2, 3)
+  as ttf2(a, b)
+-- !query schema
+struct<>
+-- !query output
+
+
+
+-- !query
+select current_date = current_date(), current_timestamp = current_timestamp(), 
a, b from ttf2
+-- !query schema
+struct<(current_date() = current_date()):boolean,(current_timestamp() = 
current_timestamp()):boolean,a:int,b:int>
+-- !query output
+true   true    1       2
+true   true    2       3
+
+
+-- !query
+select a, b from ttf2 order by a, current_date
+-- !query schema
+struct<a:int,b:int>
+-- !query output
+1      2
+2      3
+
+
+-- !query
+select UNIX_SECONDS(timestamp'2020-12-01 14:30:08Z'), 
UNIX_SECONDS(timestamp'2020-12-01 14:30:08.999999Z'), UNIX_SECONDS(null)
+-- !query schema
+struct<unix_seconds(TIMESTAMP '2020-12-01 
06:30:08'):bigint,unix_seconds(TIMESTAMP '2020-12-01 
06:30:08.999999'):bigint,unix_seconds(NULL):bigint>
+-- !query output
+1606833008     1606833008      NULL
+
+
+-- !query
+select UNIX_MILLIS(timestamp'2020-12-01 14:30:08Z'), 
UNIX_MILLIS(timestamp'2020-12-01 14:30:08.999999Z'), UNIX_MILLIS(null)
+-- !query schema
+struct<unix_millis(TIMESTAMP '2020-12-01 
06:30:08'):bigint,unix_millis(TIMESTAMP '2020-12-01 
06:30:08.999999'):bigint,unix_millis(NULL):bigint>
+-- !query output
+1606833008000  1606833008999   NULL
+
+
+-- !query
+select UNIX_MICROS(timestamp'2020-12-01 14:30:08Z'), 
UNIX_MICROS(timestamp'2020-12-01 14:30:08.999999Z'), UNIX_MICROS(null)
+-- !query schema
+struct<unix_micros(TIMESTAMP '2020-12-01 
06:30:08'):bigint,unix_micros(TIMESTAMP '2020-12-01 
06:30:08.999999'):bigint,unix_micros(NULL):bigint>
+-- !query output
+1606833008000000       1606833008999999        NULL
+
+
+-- !query
+select to_timestamp(null), to_timestamp('2016-12-31 00:12:00'), 
to_timestamp('2016-12-31', 'yyyy-MM-dd')
+-- !query schema
+struct<to_timestamp(NULL):timestamp_ntz,to_timestamp(2016-12-31 
00:12:00):timestamp_ntz,to_timestamp(2016-12-31, yyyy-MM-dd):timestamp_ntz>
+-- !query output
+NULL   2016-12-31 00:12:00     2016-12-31 00:00:00
+
+
+-- !query
+select to_timestamp('2019-10-06 10:11:12.', 'yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]')
+-- !query schema
+struct<to_timestamp(2019-10-06 10:11:12., yyyy-MM-dd 
HH:mm:ss.SSSSSS[zzz]):timestamp_ntz>
+-- !query output
+NULL
+
+
+-- !query
+select to_timestamp('2019-10-06 10:11:12.0', 'yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]')
+-- !query schema
+struct<to_timestamp(2019-10-06 10:11:12.0, yyyy-MM-dd 
HH:mm:ss.SSSSSS[zzz]):timestamp_ntz>
+-- !query output
+2019-10-06 10:11:12
+
+
+-- !query
+select to_timestamp('2019-10-06 10:11:12.1', 'yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]')
+-- !query schema
+struct<to_timestamp(2019-10-06 10:11:12.1, yyyy-MM-dd 
HH:mm:ss.SSSSSS[zzz]):timestamp_ntz>
+-- !query output
+2019-10-06 10:11:12.1
+
+
+-- !query
+select to_timestamp('2019-10-06 10:11:12.12', 'yyyy-MM-dd 
HH:mm:ss.SSSSSS[zzz]')
+-- !query schema
+struct<to_timestamp(2019-10-06 10:11:12.12, yyyy-MM-dd 
HH:mm:ss.SSSSSS[zzz]):timestamp_ntz>
+-- !query output
+2019-10-06 10:11:12.12
+
+
+-- !query
+select to_timestamp('2019-10-06 10:11:12.123UTC', 'yyyy-MM-dd 
HH:mm:ss.SSSSSS[zzz]')
+-- !query schema
+struct<to_timestamp(2019-10-06 10:11:12.123UTC, yyyy-MM-dd 
HH:mm:ss.SSSSSS[zzz]):timestamp_ntz>
+-- !query output
+2019-10-06 10:11:12.123
+
+
+-- !query
+select to_timestamp('2019-10-06 10:11:12.1234', 'yyyy-MM-dd 
HH:mm:ss.SSSSSS[zzz]')
+-- !query schema
+struct<to_timestamp(2019-10-06 10:11:12.1234, yyyy-MM-dd 
HH:mm:ss.SSSSSS[zzz]):timestamp_ntz>
+-- !query output
+2019-10-06 10:11:12.1234
+
+
+-- !query
+select to_timestamp('2019-10-06 10:11:12.12345CST', 'yyyy-MM-dd 
HH:mm:ss.SSSSSS[zzz]')
+-- !query schema
+struct<to_timestamp(2019-10-06 10:11:12.12345CST, yyyy-MM-dd 
HH:mm:ss.SSSSSS[zzz]):timestamp_ntz>
+-- !query output
+2019-10-06 10:11:12.12345
+
+
+-- !query
+select to_timestamp('2019-10-06 10:11:12.123456PST', 'yyyy-MM-dd 
HH:mm:ss.SSSSSS[zzz]')
+-- !query schema
+struct<to_timestamp(2019-10-06 10:11:12.123456PST, yyyy-MM-dd 
HH:mm:ss.SSSSSS[zzz]):timestamp_ntz>
+-- !query output
+2019-10-06 10:11:12.123456
+
+
+-- !query
+select to_timestamp('2019-10-06 10:11:12.1234567PST', 'yyyy-MM-dd 
HH:mm:ss.SSSSSS[zzz]')
+-- !query schema
+struct<to_timestamp(2019-10-06 10:11:12.1234567PST, yyyy-MM-dd 
HH:mm:ss.SSSSSS[zzz]):timestamp_ntz>
+-- !query output
+NULL
+
+
+-- !query
+select to_timestamp('123456 2019-10-06 10:11:12.123456PST', 'SSSSSS yyyy-MM-dd 
HH:mm:ss.SSSSSS[zzz]')
+-- !query schema
+struct<to_timestamp(123456 2019-10-06 10:11:12.123456PST, SSSSSS yyyy-MM-dd 
HH:mm:ss.SSSSSS[zzz]):timestamp_ntz>
+-- !query output
+2019-10-06 10:11:12.123456
+
+
+-- !query
+select to_timestamp('223456 2019-10-06 10:11:12.123456PST', 'SSSSSS yyyy-MM-dd 
HH:mm:ss.SSSSSS[zzz]')
+-- !query schema
+struct<to_timestamp(223456 2019-10-06 10:11:12.123456PST, SSSSSS yyyy-MM-dd 
HH:mm:ss.SSSSSS[zzz]):timestamp_ntz>
+-- !query output
+NULL
+
+
+-- !query
+select to_timestamp('2019-10-06 10:11:12.1234', 'yyyy-MM-dd HH:mm:ss.[SSSSSS]')
+-- !query schema
+struct<to_timestamp(2019-10-06 10:11:12.1234, yyyy-MM-dd 
HH:mm:ss.[SSSSSS]):timestamp_ntz>
+-- !query output
+2019-10-06 10:11:12.1234
+
+
+-- !query
+select to_timestamp('2019-10-06 10:11:12.123', 'yyyy-MM-dd HH:mm:ss[.SSSSSS]')
+-- !query schema
+struct<to_timestamp(2019-10-06 10:11:12.123, yyyy-MM-dd 
HH:mm:ss[.SSSSSS]):timestamp_ntz>
+-- !query output
+2019-10-06 10:11:12.123
+
+
+-- !query
+select to_timestamp('2019-10-06 10:11:12', 'yyyy-MM-dd HH:mm:ss[.SSSSSS]')
+-- !query schema
+struct<to_timestamp(2019-10-06 10:11:12, yyyy-MM-dd 
HH:mm:ss[.SSSSSS]):timestamp_ntz>
+-- !query output
+2019-10-06 10:11:12
+
+
+-- !query
+select to_timestamp('2019-10-06 10:11:12.12', 'yyyy-MM-dd HH:mm[:ss.SSSSSS]')
+-- !query schema
+struct<to_timestamp(2019-10-06 10:11:12.12, yyyy-MM-dd 
HH:mm[:ss.SSSSSS]):timestamp_ntz>
+-- !query output
+2019-10-06 10:11:12.12
+
+
+-- !query
+select to_timestamp('2019-10-06 10:11', 'yyyy-MM-dd HH:mm[:ss.SSSSSS]')
+-- !query schema
+struct<to_timestamp(2019-10-06 10:11, yyyy-MM-dd 
HH:mm[:ss.SSSSSS]):timestamp_ntz>
+-- !query output
+2019-10-06 10:11:00
+
+
+-- !query
+select to_timestamp("2019-10-06S10:11:12.12345", 
"yyyy-MM-dd'S'HH:mm:ss.SSSSSS")
+-- !query schema
+struct<to_timestamp(2019-10-06S10:11:12.12345, 
yyyy-MM-dd'S'HH:mm:ss.SSSSSS):timestamp_ntz>
+-- !query output
+2019-10-06 10:11:12.12345
+
+
+-- !query
+select to_timestamp("12.12342019-10-06S10:11", "ss.SSSSyyyy-MM-dd'S'HH:mm")
+-- !query schema
+struct<to_timestamp(12.12342019-10-06S10:11, 
ss.SSSSyyyy-MM-dd'S'HH:mm):timestamp_ntz>
+-- !query output
+2019-10-06 10:11:12.1234
+
+
+-- !query
+select to_timestamp("12.1232019-10-06S10:11", "ss.SSSSyyyy-MM-dd'S'HH:mm")
+-- !query schema
+struct<to_timestamp(12.1232019-10-06S10:11, 
ss.SSSSyyyy-MM-dd'S'HH:mm):timestamp_ntz>
+-- !query output
+NULL
+
+
+-- !query
+select to_timestamp("12.1232019-10-06S10:11", "ss.SSSSyy-MM-dd'S'HH:mm")
+-- !query schema
+struct<to_timestamp(12.1232019-10-06S10:11, 
ss.SSSSyy-MM-dd'S'HH:mm):timestamp_ntz>
+-- !query output
+NULL
+
+
+-- !query
+select to_timestamp("12.1234019-10-06S10:11", "ss.SSSSy-MM-dd'S'HH:mm")
+-- !query schema
+struct<to_timestamp(12.1234019-10-06S10:11, 
ss.SSSSy-MM-dd'S'HH:mm):timestamp_ntz>
+-- !query output
+0019-10-06 10:11:12.1234
+
+
+-- !query
+select to_timestamp("2019-10-06S", "yyyy-MM-dd'S'")
+-- !query schema
+struct<to_timestamp(2019-10-06S, yyyy-MM-dd'S'):timestamp_ntz>
+-- !query output
+2019-10-06 00:00:00
+
+
+-- !query
+select to_timestamp("S2019-10-06", "'S'yyyy-MM-dd")
+-- !query schema
+struct<to_timestamp(S2019-10-06, 'S'yyyy-MM-dd):timestamp_ntz>
+-- !query output
+2019-10-06 00:00:00
+
+
+-- !query
+select to_timestamp("2019-10-06T10:11:12'12", "yyyy-MM-dd'T'HH:mm:ss''SSSS")
+-- !query schema
+struct<to_timestamp(2019-10-06T10:11:12'12, 
yyyy-MM-dd'T'HH:mm:ss''SSSS):timestamp_ntz>
+-- !query output
+2019-10-06 10:11:12.12
+
+
+-- !query
+select to_timestamp("2019-10-06T10:11:12'", "yyyy-MM-dd'T'HH:mm:ss''")
+-- !query schema
+struct<to_timestamp(2019-10-06T10:11:12', 
yyyy-MM-dd'T'HH:mm:ss''):timestamp_ntz>
+-- !query output
+2019-10-06 10:11:12
+
+
+-- !query
+select to_timestamp("'2019-10-06T10:11:12", "''yyyy-MM-dd'T'HH:mm:ss")
+-- !query schema
+struct<to_timestamp('2019-10-06T10:11:12, 
''yyyy-MM-dd'T'HH:mm:ss):timestamp_ntz>
+-- !query output
+2019-10-06 10:11:12
+
+
+-- !query
+select to_timestamp("P2019-10-06T10:11:12", "'P'yyyy-MM-dd'T'HH:mm:ss")
+-- !query schema
+struct<to_timestamp(P2019-10-06T10:11:12, 
'P'yyyy-MM-dd'T'HH:mm:ss):timestamp_ntz>
+-- !query output
+2019-10-06 10:11:12
+
+
+-- !query
+select to_timestamp("16", "dd")
+-- !query schema
+struct<to_timestamp(16, dd):timestamp_ntz>
+-- !query output
+1970-01-16 00:00:00
+
+
+-- !query
+select to_timestamp("02-29", "MM-dd")
+-- !query schema
+struct<to_timestamp(02-29, MM-dd):timestamp_ntz>
+-- !query output
+NULL
+
+
+-- !query
+select to_timestamp("2019 40", "yyyy mm")
+-- !query schema
+struct<to_timestamp(2019 40, yyyy mm):timestamp_ntz>
+-- !query output
+2019-01-01 00:40:00
+
+
+-- !query
+select to_timestamp("2019 10:10:10", "yyyy hh:mm:ss")
+-- !query schema
+struct<to_timestamp(2019 10:10:10, yyyy hh:mm:ss):timestamp_ntz>
+-- !query output
+2019-01-01 10:10:10
+
+
+-- !query
+select timestamp'2011-11-11 11:11:11' + interval '2' day
+-- !query schema
+struct<TIMESTAMP_NTZ '2011-11-11 11:11:11' + INTERVAL '2' DAY:timestamp_ntz>
+-- !query output
+2011-11-13 11:11:11
+
+
+-- !query
+select timestamp'2011-11-11 11:11:11' - interval '2' day
+-- !query schema
+struct<TIMESTAMP_NTZ '2011-11-11 11:11:11' - INTERVAL '2' DAY:timestamp_ntz>
+-- !query output
+2011-11-09 11:11:11
+
+
+-- !query
+select timestamp'2011-11-11 11:11:11' + interval '2' second
+-- !query schema
+struct<TIMESTAMP_NTZ '2011-11-11 11:11:11' + INTERVAL '02' 
SECOND:timestamp_ntz>
+-- !query output
+2011-11-11 11:11:13
+
+
+-- !query
+select timestamp'2011-11-11 11:11:11' - interval '2' second
+-- !query schema
+struct<TIMESTAMP_NTZ '2011-11-11 11:11:11' - INTERVAL '02' 
SECOND:timestamp_ntz>
+-- !query output
+2011-11-11 11:11:09
+
+
+-- !query
+select '2011-11-11 11:11:11' - interval '2' second
+-- !query schema
+struct<2011-11-11 11:11:11 - INTERVAL '02' SECOND:string>
+-- !query output
+2011-11-11 11:11:09
+
+
+-- !query
+select '1' - interval '2' second
+-- !query schema
+struct<1 - INTERVAL '02' SECOND:string>
+-- !query output
+NULL
+
+
+-- !query
+select 1 - interval '2' second
+-- !query schema
+struct<>
+-- !query output
+org.apache.spark.sql.AnalysisException
+cannot resolve '1 + (- INTERVAL '02' SECOND)' due to data type mismatch: 
argument 1 requires (timestamp or timestamp without time zone) type, however, 
'1' is of int type.; line 1 pos 7
+
+
+-- !query
+select date'2020-01-01' - timestamp'2019-10-06 10:11:12.345678'
+-- !query schema
+struct<(DATE '2020-01-01' - TIMESTAMP_NTZ '2019-10-06 
10:11:12.345678'):interval day to second>
+-- !query output
+86 13:48:47.654322000
+
+
+-- !query
+select timestamp'2019-10-06 10:11:12.345678' - date'2020-01-01'
+-- !query schema
+struct<(TIMESTAMP_NTZ '2019-10-06 10:11:12.345678' - DATE 
'2020-01-01'):interval day to second>
+-- !query output
+-86 13:48:47.654322000
+
+
+-- !query
+select timestamp'2019-10-06 10:11:12.345678' - null
+-- !query schema
+struct<(TIMESTAMP_NTZ '2019-10-06 10:11:12.345678' - NULL):interval day to 
second>
+-- !query output
+NULL
+
+
+-- !query
+select null - timestamp'2019-10-06 10:11:12.345678'
+-- !query schema
+struct<(NULL - TIMESTAMP_NTZ '2019-10-06 10:11:12.345678'):interval day to 
second>
+-- !query output
+NULL
+
+
+-- !query
+select to_timestamp('2019-10-06 A', 'yyyy-MM-dd GGGGG')
+-- !query schema
+struct<>
+-- !query output
+java.lang.RuntimeException
+Fail to recognize 'yyyy-MM-dd GGGGG' pattern in the DateTimeFormatter. You can 
form a valid datetime pattern with the guide from 
https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html
+
+
+-- !query
+select to_timestamp('22 05 2020 Friday', 'dd MM yyyy EEEEEE')
+-- !query schema
+struct<>
+-- !query output
+java.lang.RuntimeException
+Fail to recognize 'dd MM yyyy EEEEEE' pattern in the DateTimeFormatter. You 
can form a valid datetime pattern with the guide from 
https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html
+
+
+-- !query
+select to_timestamp('22 05 2020 Friday', 'dd MM yyyy EEEEE')
+-- !query schema
+struct<>
+-- !query output
+java.lang.RuntimeException
+Fail to recognize 'dd MM yyyy EEEEE' pattern in the DateTimeFormatter. You can 
form a valid datetime pattern with the guide from 
https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html
+
+
+-- !query
+select unix_timestamp('22 05 2020 Friday', 'dd MM yyyy EEEEE')
+-- !query schema
+struct<>
+-- !query output
+org.apache.spark.SparkUpgradeException
+You may get a different result due to the upgrading of Spark 3.0: Fail to 
recognize 'dd MM yyyy EEEEE' pattern in the DateTimeFormatter. 1) You can set 
spark.sql.legacy.timeParserPolicy to LEGACY to restore the behavior before 
Spark 3.0. 2) You can form a valid datetime pattern with the guide from 
https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html
+
+
+-- !query
+select from_json('{"t":"26/October/2015"}', 't Timestamp', 
map('timestampFormat', 'dd/MMMMM/yyyy'))
+-- !query schema
+struct<from_json({"t":"26/October/2015"}):struct<t:timestamp_ntz>>
+-- !query output
+{"t":null}
+
+
+-- !query
+select from_csv('26/October/2015', 't Timestamp', map('timestampFormat', 
'dd/MMMMM/yyyy'))
+-- !query schema
+struct<>
+-- !query output
+java.lang.Exception

Review comment:
       interesting. `from_json` works but `from_csv` does not. cc 
@gengliangwang 

##########
File path: sql/core/src/test/resources/sql-tests/inputs/datetime-legacy.sql
##########
@@ -1,2 +1,3 @@
 --SET spark.sql.legacy.timeParserPolicy=LEGACY
---IMPORT datetime.sql
+--IMPORT date.sql
+--IMPORT timestamp.sql

Review comment:
       We should remove this test file in the future. The config 
`spark.sql.legacy.timeParserPolicy` is only for parsing and 
`datetime-parsing-legacy.sql` should provide sufficient test coverage for 
parsing with the legacy mode. 

##########
File path: sql/core/src/test/resources/sql-tests/inputs/date.sql
##########
@@ -0,0 +1,106 @@
+-- date literals, functions and operations
+
+select date '2019-01-01\t';
+select date '2020-01-01中文';
+
+-- date with year outside [0000-9999]
+select date'999999-03-18', date'-0001-1-28', date'0015';
+-- invalid: year field must have at least 4 digits
+select date'015';
+-- invalid: month field can have at most 2 digits
+select date'2021-4294967297-11';
+
+select current_date = current_date;
+-- under ANSI mode, `current_date` can't be a function name.
+select current_date() = current_date();

Review comment:
       previous it was only one query `select current_date = current_date();`, 
which doesn't work in ANSI and we lost the test coverage.

##########
File path: sql/core/src/test/resources/sql-tests/inputs/datetime.sql
##########
@@ -1,295 +0,0 @@
--- date time functions
-
--- [SPARK-31710] TIMESTAMP_SECONDS, TIMESTAMP_MILLISECONDS and 
TIMESTAMP_MICROSECONDS to timestamp transfer
-select 
TIMESTAMP_SECONDS(1230219000),TIMESTAMP_SECONDS(-1230219000),TIMESTAMP_SECONDS(null);
-select TIMESTAMP_SECONDS(1.23), TIMESTAMP_SECONDS(1.23d), 
TIMESTAMP_SECONDS(FLOAT(1.23));
-select 
TIMESTAMP_MILLIS(1230219000123),TIMESTAMP_MILLIS(-1230219000123),TIMESTAMP_MILLIS(null);
-select 
TIMESTAMP_MICROS(1230219000123123),TIMESTAMP_MICROS(-1230219000123123),TIMESTAMP_MICROS(null);
--- overflow exception
-select TIMESTAMP_SECONDS(1230219000123123);
-select TIMESTAMP_SECONDS(-1230219000123123);
-select TIMESTAMP_MILLIS(92233720368547758);
-select TIMESTAMP_MILLIS(-92233720368547758);
--- truncate exception
-select TIMESTAMP_SECONDS(0.1234567);
--- truncation is OK for float/double
-select TIMESTAMP_SECONDS(0.1234567d), TIMESTAMP_SECONDS(FLOAT(0.1234567));
--- UNIX_SECONDS, UNIX_MILLISECONDS and UNIX_MICROSECONDS
-select UNIX_SECONDS(TIMESTAMP('2020-12-01 14:30:08Z')), 
UNIX_SECONDS(TIMESTAMP('2020-12-01 14:30:08.999999Z')), UNIX_SECONDS(null);
-select UNIX_MILLIS(TIMESTAMP('2020-12-01 14:30:08Z')), 
UNIX_MILLIS(TIMESTAMP('2020-12-01 14:30:08.999999Z')), UNIX_MILLIS(null);
-select UNIX_MICROS(TIMESTAMP('2020-12-01 14:30:08Z')), 
UNIX_MICROS(TIMESTAMP('2020-12-01 14:30:08.999999Z')), UNIX_MICROS(null);
--- DATE_FROM_UNIX_DATE
-select DATE_FROM_UNIX_DATE(0), DATE_FROM_UNIX_DATE(1000), 
DATE_FROM_UNIX_DATE(null);
--- UNIX_DATE
-select UNIX_DATE(DATE('1970-01-01')), UNIX_DATE(DATE('2020-12-04')), 
UNIX_DATE(null);
--- [SPARK-16836] current_date and current_timestamp literals
-select current_date = current_date(), current_timestamp = current_timestamp();
-select localtimestamp() = localtimestamp();
-
-select to_date(null), to_date('2016-12-31'), to_date('2016-12-31', 
'yyyy-MM-dd');
-
-select to_timestamp(null), to_timestamp('2016-12-31 00:12:00'), 
to_timestamp('2016-12-31', 'yyyy-MM-dd');
-
-select to_timestamp_ntz(null), to_timestamp_ntz('2016-12-31 00:12:00'), 
to_timestamp_ntz('2016-12-31', 'yyyy-MM-dd');
-select to_timestamp_ntz(to_date(null)), 
to_timestamp_ntz(to_date('2016-12-31')), to_timestamp_ntz(to_date('2016-12-31', 
'yyyy-MM-dd'));
-select to_timestamp_ntz(to_timestamp(null)), 
to_timestamp_ntz(to_timestamp('2016-12-31 00:12:00')), 
to_timestamp_ntz(to_timestamp('2016-12-31', 'yyyy-MM-dd'));
-
-select to_timestamp_ltz(null), to_timestamp_ltz('2016-12-31 00:12:00'), 
to_timestamp_ltz('2016-12-31', 'yyyy-MM-dd');
-select to_timestamp_ltz(to_date(null)), 
to_timestamp_ltz(to_date('2016-12-31')), to_timestamp_ltz(to_date('2016-12-31', 
'yyyy-MM-dd'));
-select to_timestamp_ltz(to_timestamp(null)), 
to_timestamp_ltz(to_timestamp('2016-12-31 00:12:00')), 
to_timestamp_ltz(to_timestamp('2016-12-31', 'yyyy-MM-dd'));
-
-select dayofweek('2007-02-03'), dayofweek('2009-07-30'), 
dayofweek('2017-05-27'), dayofweek(null), dayofweek('1582-10-15 13:10:15');
-
--- [SPARK-22333]: timeFunctionCall has conflicts with columnReference
-create temporary view ttf1 as select * from values
-  (1, 2),
-  (2, 3)
-  as ttf1(current_date, current_timestamp);
-  
-select current_date, current_timestamp from ttf1;
-
-create temporary view ttf2 as select * from values
-  (1, 2),
-  (2, 3)
-  as ttf2(a, b);
-  
-select current_date = current_date(), current_timestamp = current_timestamp(), 
a, b from ttf2;
-
-select a, b from ttf2 order by a, current_date;
-
-select weekday('2007-02-03'), weekday('2009-07-30'), weekday('2017-05-27'), 
weekday(null), weekday('1582-10-15 13:10:15');
-
-select year('1500-01-01'), month('1500-01-01'), dayOfYear('1500-01-01');
-
-
-select date '2019-01-01\t';
-select timestamp '2019-01-01\t';
-select date '2020-01-01中文';
-select timestamp '2019-01-01中文';
-
--- time add/sub
-select timestamp'2011-11-11 11:11:11' + interval '2' day;
-select timestamp'2011-11-11 11:11:11' - interval '2' day;
-select date'2011-11-11 11:11:11' + interval '2' second;
-select date'2011-11-11 11:11:11' - interval '2' second;
-select '2011-11-11' - interval '2' day;
-select '2011-11-11 11:11:11' - interval '2' second;
-select '1' - interval '2' second;
-select 1 - interval '2' second;
-
--- subtract timestamps
-select date'2020-01-01' - timestamp'2019-10-06 10:11:12.345678';
-select timestamp'2019-10-06 10:11:12.345678' - date'2020-01-01';
-select timestamp'2019-10-06 10:11:12.345678' - null;
-select null - timestamp'2019-10-06 10:11:12.345678';
-
--- subtract timestamps without time zone
-select date'2020-01-01' - to_timestamp_ntz('2019-10-06 10:11:12.345678');
-select to_timestamp_ntz('2019-10-06 10:11:12.345678') - date'2020-01-01';
-select to_timestamp_ntz('2019-10-06 10:11:12.345678') - null;
-select null - to_timestamp_ntz('2019-10-06 10:11:12.345678');
-select to_timestamp_ntz('2019-10-07 10:11:12.345678') - 
to_timestamp_ntz('2019-10-06 10:11:12.345677');
-select to_timestamp_ntz('2019-10-06 10:11:12.345677') - 
to_timestamp_ntz('2019-10-07 10:11:12.345678');
-select to_timestamp_ntz('2019-10-07 10:11:12.345678') - 
to_timestamp('2019-10-06 10:11:12.345678');
-select to_timestamp('2019-10-06 10:11:12.345678') - 
to_timestamp_ntz('2019-10-07 10:11:12.345678');
-
--- date add/sub
-select date_add('2011-11-11', 1Y);
-select date_add('2011-11-11', 1S);
-select date_add('2011-11-11', 1);
-select date_add('2011-11-11', 1L);
-select date_add('2011-11-11', 1.0);
-select date_add('2011-11-11', 1E1);
-select date_add('2011-11-11', '1');
-select date_add('2011-11-11', '1.2');
-select date_add(date'2011-11-11', 1);
-select date_add(timestamp'2011-11-11', 1);
-select date_sub(date'2011-11-11', 1);
-select date_sub(date'2011-11-11', '1');
-select date_sub(date'2011-11-11', '1.2');
-select date_sub(timestamp'2011-11-11', 1);
-select date_sub(null, 1);
-select date_sub(date'2011-11-11', null);
-select date'2011-11-11' + 1E1;
-select date'2011-11-11' + '1';
-select null + date '2001-09-28';
-select date '2001-09-28' + 7Y;
-select 7S + date '2001-09-28';
-select date '2001-10-01' - 7;
-select date '2001-10-01' - '7';
-select date '2001-09-28' + null;
-select date '2001-09-28' - null;
-
--- date add/sub with non-literal string column
-create temp view v as select '1' str;
-select date_add('2011-11-11', str) from v;
-select date_sub('2011-11-11', str) from v;
-
--- subtract dates
-select null - date '2019-10-06';
-select date '2001-10-01' - date '2001-09-28';
-
--- variable-length second fraction tests
-select to_timestamp('2019-10-06 10:11:12.', 'yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]');
-select to_timestamp('2019-10-06 10:11:12.0', 'yyyy-MM-dd 
HH:mm:ss.SSSSSS[zzz]');
-select to_timestamp('2019-10-06 10:11:12.1', 'yyyy-MM-dd 
HH:mm:ss.SSSSSS[zzz]');
-select to_timestamp('2019-10-06 10:11:12.12', 'yyyy-MM-dd 
HH:mm:ss.SSSSSS[zzz]');
-select to_timestamp('2019-10-06 10:11:12.123UTC', 'yyyy-MM-dd 
HH:mm:ss.SSSSSS[zzz]');
-select to_timestamp('2019-10-06 10:11:12.1234', 'yyyy-MM-dd 
HH:mm:ss.SSSSSS[zzz]');
-select to_timestamp('2019-10-06 10:11:12.12345CST', 'yyyy-MM-dd 
HH:mm:ss.SSSSSS[zzz]');
-select to_timestamp('2019-10-06 10:11:12.123456PST', 'yyyy-MM-dd 
HH:mm:ss.SSSSSS[zzz]');
--- second fraction exceeded max variable length
-select to_timestamp('2019-10-06 10:11:12.1234567PST', 'yyyy-MM-dd 
HH:mm:ss.SSSSSS[zzz]');
--- special cases
-select to_timestamp('123456 2019-10-06 10:11:12.123456PST', 'SSSSSS yyyy-MM-dd 
HH:mm:ss.SSSSSS[zzz]');
-select to_timestamp('223456 2019-10-06 10:11:12.123456PST', 'SSSSSS yyyy-MM-dd 
HH:mm:ss.SSSSSS[zzz]');
-select to_timestamp('2019-10-06 10:11:12.1234', 'yyyy-MM-dd 
HH:mm:ss.[SSSSSS]');
-select to_timestamp('2019-10-06 10:11:12.123', 'yyyy-MM-dd HH:mm:ss[.SSSSSS]');
-select to_timestamp('2019-10-06 10:11:12', 'yyyy-MM-dd HH:mm:ss[.SSSSSS]');
-select to_timestamp('2019-10-06 10:11:12.12', 'yyyy-MM-dd HH:mm[:ss.SSSSSS]');
-select to_timestamp('2019-10-06 10:11', 'yyyy-MM-dd HH:mm[:ss.SSSSSS]');
-select to_timestamp("2019-10-06S10:11:12.12345", 
"yyyy-MM-dd'S'HH:mm:ss.SSSSSS");
-select to_timestamp("12.12342019-10-06S10:11", "ss.SSSSyyyy-MM-dd'S'HH:mm");
-select to_timestamp("12.1232019-10-06S10:11", "ss.SSSSyyyy-MM-dd'S'HH:mm");
-select to_timestamp("12.1232019-10-06S10:11", "ss.SSSSyy-MM-dd'S'HH:mm");
-select to_timestamp("12.1234019-10-06S10:11", "ss.SSSSy-MM-dd'S'HH:mm");
-
-select to_timestamp("2019-10-06S", "yyyy-MM-dd'S'");
-select to_timestamp("S2019-10-06", "'S'yyyy-MM-dd");
-
-select to_timestamp("2019-10-06T10:11:12'12", "yyyy-MM-dd'T'HH:mm:ss''SSSS"); 
-- middle
-select to_timestamp("2019-10-06T10:11:12'", "yyyy-MM-dd'T'HH:mm:ss''"); -- tail
-select to_timestamp("'2019-10-06T10:11:12", "''yyyy-MM-dd'T'HH:mm:ss"); -- head
-select to_timestamp("P2019-10-06T10:11:12", "'P'yyyy-MM-dd'T'HH:mm:ss"); -- 
head but as single quote
-
--- variable-length second fraction tests

Review comment:
       To @MaxGekk : I removed these duplicated test cases. We will test 
`to_timestamp` with both default and ntz mode, so we won't lose test coverage.

##########
File path: sql/core/src/test/resources/sql-tests/inputs/datetime.sql
##########
@@ -1,295 +0,0 @@
--- date time functions
-
--- [SPARK-31710] TIMESTAMP_SECONDS, TIMESTAMP_MILLISECONDS and 
TIMESTAMP_MICROSECONDS to timestamp transfer
-select 
TIMESTAMP_SECONDS(1230219000),TIMESTAMP_SECONDS(-1230219000),TIMESTAMP_SECONDS(null);
-select TIMESTAMP_SECONDS(1.23), TIMESTAMP_SECONDS(1.23d), 
TIMESTAMP_SECONDS(FLOAT(1.23));
-select 
TIMESTAMP_MILLIS(1230219000123),TIMESTAMP_MILLIS(-1230219000123),TIMESTAMP_MILLIS(null);
-select 
TIMESTAMP_MICROS(1230219000123123),TIMESTAMP_MICROS(-1230219000123123),TIMESTAMP_MICROS(null);
--- overflow exception
-select TIMESTAMP_SECONDS(1230219000123123);
-select TIMESTAMP_SECONDS(-1230219000123123);
-select TIMESTAMP_MILLIS(92233720368547758);
-select TIMESTAMP_MILLIS(-92233720368547758);
--- truncate exception
-select TIMESTAMP_SECONDS(0.1234567);
--- truncation is OK for float/double
-select TIMESTAMP_SECONDS(0.1234567d), TIMESTAMP_SECONDS(FLOAT(0.1234567));
--- UNIX_SECONDS, UNIX_MILLISECONDS and UNIX_MICROSECONDS
-select UNIX_SECONDS(TIMESTAMP('2020-12-01 14:30:08Z')), 
UNIX_SECONDS(TIMESTAMP('2020-12-01 14:30:08.999999Z')), UNIX_SECONDS(null);
-select UNIX_MILLIS(TIMESTAMP('2020-12-01 14:30:08Z')), 
UNIX_MILLIS(TIMESTAMP('2020-12-01 14:30:08.999999Z')), UNIX_MILLIS(null);
-select UNIX_MICROS(TIMESTAMP('2020-12-01 14:30:08Z')), 
UNIX_MICROS(TIMESTAMP('2020-12-01 14:30:08.999999Z')), UNIX_MICROS(null);
--- DATE_FROM_UNIX_DATE
-select DATE_FROM_UNIX_DATE(0), DATE_FROM_UNIX_DATE(1000), 
DATE_FROM_UNIX_DATE(null);
--- UNIX_DATE
-select UNIX_DATE(DATE('1970-01-01')), UNIX_DATE(DATE('2020-12-04')), 
UNIX_DATE(null);
--- [SPARK-16836] current_date and current_timestamp literals
-select current_date = current_date(), current_timestamp = current_timestamp();
-select localtimestamp() = localtimestamp();
-
-select to_date(null), to_date('2016-12-31'), to_date('2016-12-31', 
'yyyy-MM-dd');
-
-select to_timestamp(null), to_timestamp('2016-12-31 00:12:00'), 
to_timestamp('2016-12-31', 'yyyy-MM-dd');
-
-select to_timestamp_ntz(null), to_timestamp_ntz('2016-12-31 00:12:00'), 
to_timestamp_ntz('2016-12-31', 'yyyy-MM-dd');
-select to_timestamp_ntz(to_date(null)), 
to_timestamp_ntz(to_date('2016-12-31')), to_timestamp_ntz(to_date('2016-12-31', 
'yyyy-MM-dd'));
-select to_timestamp_ntz(to_timestamp(null)), 
to_timestamp_ntz(to_timestamp('2016-12-31 00:12:00')), 
to_timestamp_ntz(to_timestamp('2016-12-31', 'yyyy-MM-dd'));
-
-select to_timestamp_ltz(null), to_timestamp_ltz('2016-12-31 00:12:00'), 
to_timestamp_ltz('2016-12-31', 'yyyy-MM-dd');
-select to_timestamp_ltz(to_date(null)), 
to_timestamp_ltz(to_date('2016-12-31')), to_timestamp_ltz(to_date('2016-12-31', 
'yyyy-MM-dd'));
-select to_timestamp_ltz(to_timestamp(null)), 
to_timestamp_ltz(to_timestamp('2016-12-31 00:12:00')), 
to_timestamp_ltz(to_timestamp('2016-12-31', 'yyyy-MM-dd'));
-
-select dayofweek('2007-02-03'), dayofweek('2009-07-30'), 
dayofweek('2017-05-27'), dayofweek(null), dayofweek('1582-10-15 13:10:15');
-
--- [SPARK-22333]: timeFunctionCall has conflicts with columnReference
-create temporary view ttf1 as select * from values
-  (1, 2),
-  (2, 3)
-  as ttf1(current_date, current_timestamp);
-  
-select current_date, current_timestamp from ttf1;
-
-create temporary view ttf2 as select * from values
-  (1, 2),
-  (2, 3)
-  as ttf2(a, b);
-  
-select current_date = current_date(), current_timestamp = current_timestamp(), 
a, b from ttf2;
-
-select a, b from ttf2 order by a, current_date;
-
-select weekday('2007-02-03'), weekday('2009-07-30'), weekday('2017-05-27'), 
weekday(null), weekday('1582-10-15 13:10:15');
-
-select year('1500-01-01'), month('1500-01-01'), dayOfYear('1500-01-01');
-
-
-select date '2019-01-01\t';
-select timestamp '2019-01-01\t';
-select date '2020-01-01中文';
-select timestamp '2019-01-01中文';
-
--- time add/sub
-select timestamp'2011-11-11 11:11:11' + interval '2' day;
-select timestamp'2011-11-11 11:11:11' - interval '2' day;
-select date'2011-11-11 11:11:11' + interval '2' second;
-select date'2011-11-11 11:11:11' - interval '2' second;
-select '2011-11-11' - interval '2' day;
-select '2011-11-11 11:11:11' - interval '2' second;
-select '1' - interval '2' second;
-select 1 - interval '2' second;
-
--- subtract timestamps
-select date'2020-01-01' - timestamp'2019-10-06 10:11:12.345678';
-select timestamp'2019-10-06 10:11:12.345678' - date'2020-01-01';
-select timestamp'2019-10-06 10:11:12.345678' - null;
-select null - timestamp'2019-10-06 10:11:12.345678';
-
--- subtract timestamps without time zone
-select date'2020-01-01' - to_timestamp_ntz('2019-10-06 10:11:12.345678');
-select to_timestamp_ntz('2019-10-06 10:11:12.345678') - date'2020-01-01';
-select to_timestamp_ntz('2019-10-06 10:11:12.345678') - null;
-select null - to_timestamp_ntz('2019-10-06 10:11:12.345678');
-select to_timestamp_ntz('2019-10-07 10:11:12.345678') - 
to_timestamp_ntz('2019-10-06 10:11:12.345677');
-select to_timestamp_ntz('2019-10-06 10:11:12.345677') - 
to_timestamp_ntz('2019-10-07 10:11:12.345678');
-select to_timestamp_ntz('2019-10-07 10:11:12.345678') - 
to_timestamp('2019-10-06 10:11:12.345678');
-select to_timestamp('2019-10-06 10:11:12.345678') - 
to_timestamp_ntz('2019-10-07 10:11:12.345678');
-
--- date add/sub
-select date_add('2011-11-11', 1Y);
-select date_add('2011-11-11', 1S);
-select date_add('2011-11-11', 1);
-select date_add('2011-11-11', 1L);
-select date_add('2011-11-11', 1.0);
-select date_add('2011-11-11', 1E1);
-select date_add('2011-11-11', '1');
-select date_add('2011-11-11', '1.2');
-select date_add(date'2011-11-11', 1);
-select date_add(timestamp'2011-11-11', 1);
-select date_sub(date'2011-11-11', 1);
-select date_sub(date'2011-11-11', '1');
-select date_sub(date'2011-11-11', '1.2');
-select date_sub(timestamp'2011-11-11', 1);
-select date_sub(null, 1);
-select date_sub(date'2011-11-11', null);
-select date'2011-11-11' + 1E1;
-select date'2011-11-11' + '1';
-select null + date '2001-09-28';
-select date '2001-09-28' + 7Y;
-select 7S + date '2001-09-28';
-select date '2001-10-01' - 7;
-select date '2001-10-01' - '7';
-select date '2001-09-28' + null;
-select date '2001-09-28' - null;
-
--- date add/sub with non-literal string column
-create temp view v as select '1' str;
-select date_add('2011-11-11', str) from v;
-select date_sub('2011-11-11', str) from v;
-
--- subtract dates
-select null - date '2019-10-06';
-select date '2001-10-01' - date '2001-09-28';
-
--- variable-length second fraction tests
-select to_timestamp('2019-10-06 10:11:12.', 'yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]');
-select to_timestamp('2019-10-06 10:11:12.0', 'yyyy-MM-dd 
HH:mm:ss.SSSSSS[zzz]');
-select to_timestamp('2019-10-06 10:11:12.1', 'yyyy-MM-dd 
HH:mm:ss.SSSSSS[zzz]');
-select to_timestamp('2019-10-06 10:11:12.12', 'yyyy-MM-dd 
HH:mm:ss.SSSSSS[zzz]');
-select to_timestamp('2019-10-06 10:11:12.123UTC', 'yyyy-MM-dd 
HH:mm:ss.SSSSSS[zzz]');
-select to_timestamp('2019-10-06 10:11:12.1234', 'yyyy-MM-dd 
HH:mm:ss.SSSSSS[zzz]');
-select to_timestamp('2019-10-06 10:11:12.12345CST', 'yyyy-MM-dd 
HH:mm:ss.SSSSSS[zzz]');
-select to_timestamp('2019-10-06 10:11:12.123456PST', 'yyyy-MM-dd 
HH:mm:ss.SSSSSS[zzz]');
--- second fraction exceeded max variable length
-select to_timestamp('2019-10-06 10:11:12.1234567PST', 'yyyy-MM-dd 
HH:mm:ss.SSSSSS[zzz]');
--- special cases
-select to_timestamp('123456 2019-10-06 10:11:12.123456PST', 'SSSSSS yyyy-MM-dd 
HH:mm:ss.SSSSSS[zzz]');
-select to_timestamp('223456 2019-10-06 10:11:12.123456PST', 'SSSSSS yyyy-MM-dd 
HH:mm:ss.SSSSSS[zzz]');
-select to_timestamp('2019-10-06 10:11:12.1234', 'yyyy-MM-dd 
HH:mm:ss.[SSSSSS]');
-select to_timestamp('2019-10-06 10:11:12.123', 'yyyy-MM-dd HH:mm:ss[.SSSSSS]');
-select to_timestamp('2019-10-06 10:11:12', 'yyyy-MM-dd HH:mm:ss[.SSSSSS]');
-select to_timestamp('2019-10-06 10:11:12.12', 'yyyy-MM-dd HH:mm[:ss.SSSSSS]');
-select to_timestamp('2019-10-06 10:11', 'yyyy-MM-dd HH:mm[:ss.SSSSSS]');
-select to_timestamp("2019-10-06S10:11:12.12345", 
"yyyy-MM-dd'S'HH:mm:ss.SSSSSS");
-select to_timestamp("12.12342019-10-06S10:11", "ss.SSSSyyyy-MM-dd'S'HH:mm");
-select to_timestamp("12.1232019-10-06S10:11", "ss.SSSSyyyy-MM-dd'S'HH:mm");
-select to_timestamp("12.1232019-10-06S10:11", "ss.SSSSyy-MM-dd'S'HH:mm");
-select to_timestamp("12.1234019-10-06S10:11", "ss.SSSSy-MM-dd'S'HH:mm");
-
-select to_timestamp("2019-10-06S", "yyyy-MM-dd'S'");
-select to_timestamp("S2019-10-06", "'S'yyyy-MM-dd");
-
-select to_timestamp("2019-10-06T10:11:12'12", "yyyy-MM-dd'T'HH:mm:ss''SSSS"); 
-- middle
-select to_timestamp("2019-10-06T10:11:12'", "yyyy-MM-dd'T'HH:mm:ss''"); -- tail
-select to_timestamp("'2019-10-06T10:11:12", "''yyyy-MM-dd'T'HH:mm:ss"); -- head
-select to_timestamp("P2019-10-06T10:11:12", "'P'yyyy-MM-dd'T'HH:mm:ss"); -- 
head but as single quote
-
--- variable-length second fraction tests
-select to_timestamp_ntz('2019-10-06 10:11:12.', 'yyyy-MM-dd 
HH:mm:ss.SSSSSS[zzz]');
-select to_timestamp_ntz('2019-10-06 10:11:12.0', 'yyyy-MM-dd 
HH:mm:ss.SSSSSS[zzz]');
-select to_timestamp_ntz('2019-10-06 10:11:12.1', 'yyyy-MM-dd 
HH:mm:ss.SSSSSS[zzz]');
-select to_timestamp_ntz('2019-10-06 10:11:12.12', 'yyyy-MM-dd 
HH:mm:ss.SSSSSS[zzz]');
-select to_timestamp_ntz('2019-10-06 10:11:12.123UTC', 'yyyy-MM-dd 
HH:mm:ss.SSSSSS[zzz]');
-select to_timestamp_ntz('2019-10-06 10:11:12.1234', 'yyyy-MM-dd 
HH:mm:ss.SSSSSS[zzz]');
-select to_timestamp_ntz('2019-10-06 10:11:12.12345CST', 'yyyy-MM-dd 
HH:mm:ss.SSSSSS[zzz]');
-select to_timestamp_ntz('2019-10-06 10:11:12.123456PST', 'yyyy-MM-dd 
HH:mm:ss.SSSSSS[zzz]');
--- second fraction exceeded max variable length
-select to_timestamp_ntz('2019-10-06 10:11:12.1234567PST', 'yyyy-MM-dd 
HH:mm:ss.SSSSSS[zzz]');
--- special cases
-select to_timestamp_ntz('123456 2019-10-06 10:11:12.123456PST', 'SSSSSS 
yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]');
-select to_timestamp_ntz('223456 2019-10-06 10:11:12.123456PST', 'SSSSSS 
yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]');
-select to_timestamp_ntz('2019-10-06 10:11:12.1234', 'yyyy-MM-dd 
HH:mm:ss.[SSSSSS]');
-select to_timestamp_ntz('2019-10-06 10:11:12.123', 'yyyy-MM-dd 
HH:mm:ss[.SSSSSS]');
-select to_timestamp_ntz('2019-10-06 10:11:12', 'yyyy-MM-dd HH:mm:ss[.SSSSSS]');
-select to_timestamp_ntz('2019-10-06 10:11:12.12', 'yyyy-MM-dd 
HH:mm[:ss.SSSSSS]');
-select to_timestamp_ntz('2019-10-06 10:11', 'yyyy-MM-dd HH:mm[:ss.SSSSSS]');
-select to_timestamp_ntz("2019-10-06S10:11:12.12345", 
"yyyy-MM-dd'S'HH:mm:ss.SSSSSS");
-select to_timestamp_ntz("12.12342019-10-06S10:11", 
"ss.SSSSyyyy-MM-dd'S'HH:mm");
-select to_timestamp_ntz("12.1232019-10-06S10:11", "ss.SSSSyyyy-MM-dd'S'HH:mm");
-select to_timestamp_ntz("12.1232019-10-06S10:11", "ss.SSSSyy-MM-dd'S'HH:mm");
-select to_timestamp_ntz("12.1234019-10-06S10:11", "ss.SSSSy-MM-dd'S'HH:mm");
-
-select to_timestamp_ntz("2019-10-06S", "yyyy-MM-dd'S'");
-select to_timestamp_ntz("S2019-10-06", "'S'yyyy-MM-dd");
-
-select to_timestamp_ntz("2019-10-06T10:11:12'12", 
"yyyy-MM-dd'T'HH:mm:ss''SSSS"); -- middle
-select to_timestamp_ntz("2019-10-06T10:11:12'", "yyyy-MM-dd'T'HH:mm:ss''"); -- 
tail
-select to_timestamp_ntz("'2019-10-06T10:11:12", "''yyyy-MM-dd'T'HH:mm:ss"); -- 
head
-select to_timestamp_ntz("P2019-10-06T10:11:12", "'P'yyyy-MM-dd'T'HH:mm:ss"); 
-- head but as single quote
-
--- missing fields
-select to_timestamp("16", "dd");
-select to_timestamp("02-29", "MM-dd");
-select to_timestamp_ntz("16", "dd");
-select to_timestamp_ntz("02-29", "MM-dd");
-select to_date("16", "dd");
-select to_date("02-29", "MM-dd");
-select to_timestamp("2019 40", "yyyy mm");
-select to_timestamp("2019 10:10:10", "yyyy hh:mm:ss");
-select to_timestamp_ntz("2019 40", "yyyy mm");
-select to_timestamp_ntz("2019 10:10:10", "yyyy hh:mm:ss");
-
--- Unsupported narrow text style
-select to_timestamp('2019-10-06 A', 'yyyy-MM-dd GGGGG');
-select to_timestamp('22 05 2020 Friday', 'dd MM yyyy EEEEEE');
-select to_timestamp('22 05 2020 Friday', 'dd MM yyyy EEEEE');
-select to_timestamp_ntz('2019-10-06 A', 'yyyy-MM-dd GGGGG');
-select to_timestamp_ntz('22 05 2020 Friday', 'dd MM yyyy EEEEEE');
-select to_timestamp_ntz('22 05 2020 Friday', 'dd MM yyyy EEEEE');
-select unix_timestamp('22 05 2020 Friday', 'dd MM yyyy EEEEE');
-select from_json('{"t":"26/October/2015"}', 't Timestamp', 
map('timestampFormat', 'dd/MMMMM/yyyy'));
-select from_json('{"d":"26/October/2015"}', 'd Date', map('dateFormat', 
'dd/MMMMM/yyyy'));
-select from_csv('26/October/2015', 't Timestamp', map('timestampFormat', 
'dd/MMMMM/yyyy'));
-select from_csv('26/October/2015', 'd Date', map('dateFormat', 
'dd/MMMMM/yyyy'));
-
--- Datetime types parse error
-select to_date("2020-01-27T20:06:11.847", "yyyy-MM-dd HH:mm:ss.SSS");
-select to_date("Unparseable", "yyyy-MM-dd HH:mm:ss.SSS");
-select to_timestamp("2020-01-27T20:06:11.847", "yyyy-MM-dd HH:mm:ss.SSS");
-select to_timestamp("Unparseable", "yyyy-MM-dd HH:mm:ss.SSS");
-select to_timestamp_ntz("2020-01-27T20:06:11.847", "yyyy-MM-dd HH:mm:ss.SSS");
-select to_timestamp_ntz("Unparseable", "yyyy-MM-dd HH:mm:ss.SSS");
-select unix_timestamp("2020-01-27T20:06:11.847", "yyyy-MM-dd HH:mm:ss.SSS");
-select unix_timestamp("Unparseable", "yyyy-MM-dd HH:mm:ss.SSS");
-select to_unix_timestamp("2020-01-27T20:06:11.847", "yyyy-MM-dd HH:mm:ss.SSS");
-select to_unix_timestamp("Unparseable", "yyyy-MM-dd HH:mm:ss.SSS");
-select cast("Unparseable" as timestamp);
-select cast("Unparseable" as date);
-
--- next_day
-select next_day("2015-07-23", "Mon");
-select next_day("2015-07-23", "xx");
-select next_day("xx", "Mon");
-select next_day(null, "Mon");
-select next_day(null, "xx");
-
--- TimestampNTZ + Intervals

Review comment:
       ditto

##########
File path: sql/core/src/test/resources/sql-tests/results/ansi/date.sql.out
##########
@@ -0,0 +1,536 @@
+-- Automatically generated by SQLQueryTestSuite
+-- Number of queries: 61
+
+
+-- !query
+select date '2019-01-01\t'
+-- !query schema
+struct<DATE '2019-01-01':date>
+-- !query output
+2019-01-01
+
+
+-- !query
+select date '2020-01-01中文'
+-- !query schema
+struct<>
+-- !query output
+org.apache.spark.sql.catalyst.parser.ParseException
+
+Cannot parse the DATE value: 2020-01-01中文(line 1, pos 7)
+
+== SQL ==
+select date '2020-01-01中文'
+-------^^^
+
+
+-- !query
+select date'999999-03-18', date'-0001-1-28', date'0015'
+-- !query schema
+struct<DATE '+999999-03-18':date,DATE '-0001-01-28':date,DATE 
'0015-01-01':date>
+-- !query output
++999999-03-18  -0001-01-28     0015-01-01
+
+
+-- !query
+select date'015'
+-- !query schema
+struct<>
+-- !query output
+org.apache.spark.sql.catalyst.parser.ParseException
+
+Cannot parse the DATE value: 015(line 1, pos 7)
+
+== SQL ==
+select date'015'
+-------^^^
+
+
+-- !query
+select date'2021-4294967297-11'
+-- !query schema
+struct<>
+-- !query output
+org.apache.spark.sql.catalyst.parser.ParseException
+
+Cannot parse the DATE value: 2021-4294967297-11(line 1, pos 7)
+
+== SQL ==
+select date'2021-4294967297-11'
+-------^^^
+
+
+-- !query
+select current_date = current_date
+-- !query schema
+struct<(current_date() = current_date()):boolean>
+-- !query output
+true
+
+
+-- !query
+select current_date() = current_date()
+-- !query schema
+struct<>
+-- !query output
+org.apache.spark.sql.catalyst.parser.ParseException
+
+no viable alternative at input 'current_date'(line 1, pos 7)
+
+== SQL ==
+select current_date() = current_date()
+-------^^^
+
+
+-- !query
+select DATE_FROM_UNIX_DATE(0), DATE_FROM_UNIX_DATE(1000), 
DATE_FROM_UNIX_DATE(null)
+-- !query schema
+struct<date_from_unix_date(0):date,date_from_unix_date(1000):date,date_from_unix_date(NULL):date>
+-- !query output
+1970-01-01     1972-09-27      NULL
+
+
+-- !query
+select UNIX_DATE(DATE('1970-01-01')), UNIX_DATE(DATE('2020-12-04')), 
UNIX_DATE(null)
+-- !query schema
+struct<unix_date(1970-01-01):int,unix_date(2020-12-04):int,unix_date(NULL):int>
+-- !query output
+0      18600   NULL
+
+
+-- !query
+select to_date(null), to_date('2016-12-31'), to_date('2016-12-31', 
'yyyy-MM-dd')
+-- !query schema
+struct<to_date(NULL):date,to_date(2016-12-31):date,to_date(2016-12-31, 
yyyy-MM-dd):date>
+-- !query output
+NULL   2016-12-31      2016-12-31
+
+
+-- !query
+select to_date("16", "dd")
+-- !query schema
+struct<to_date(16, dd):date>
+-- !query output
+1970-01-16
+
+
+-- !query
+select to_date("02-29", "MM-dd")
+-- !query schema
+struct<>
+-- !query output
+java.time.DateTimeException
+Invalid date 'February 29' as '1970' is not a leap year
+
+
+-- !query
+select dayofweek('2007-02-03'), dayofweek('2009-07-30'), 
dayofweek('2017-05-27'), dayofweek(null),
+  dayofweek('1582-10-15 13:10:15'), dayofweek(timestamp_ltz'1582-10-15 
13:10:15'), dayofweek(timestamp_ntz'1582-10-15 13:10:15')
+-- !query schema
+struct<dayofweek(2007-02-03):int,dayofweek(2009-07-30):int,dayofweek(2017-05-27):int,dayofweek(NULL):int,dayofweek(1582-10-15
 13:10:15):int,dayofweek(TIMESTAMP '1582-10-15 
13:10:15'):int,dayofweek(TIMESTAMP_NTZ '1582-10-15 13:10:15'):int>
+-- !query output
+7      5       7       NULL    6       6       6
+
+
+-- !query
+select weekday('2007-02-03'), weekday('2009-07-30'), weekday('2017-05-27'), 
weekday(null),
+  weekday('1582-10-15 13:10:15'), weekday(timestamp_ltz'1582-10-15 13:10:15'), 
weekday(timestamp_ntz'1582-10-15 13:10:15')
+-- !query schema
+struct<weekday(2007-02-03):int,weekday(2009-07-30):int,weekday(2017-05-27):int,weekday(NULL):int,weekday(1582-10-15
 13:10:15):int,weekday(TIMESTAMP '1582-10-15 
13:10:15'):int,weekday(TIMESTAMP_NTZ '1582-10-15 13:10:15'):int>
+-- !query output
+5      3       5       NULL    4       4       4
+
+
+-- !query
+select year('1500-01-01'), year('1582-10-15 13:10:15'), 
year(timestamp_ltz'1582-10-15 13:10:15'), year(timestamp_ntz'1582-10-15 
13:10:15')
+-- !query schema
+struct<year(1500-01-01):int,year(1582-10-15 13:10:15):int,year(TIMESTAMP 
'1582-10-15 13:10:15'):int,year(TIMESTAMP_NTZ '1582-10-15 13:10:15'):int>
+-- !query output
+1500   1582    1582    1582
+
+
+-- !query
+select month('1500-01-01'), month('1582-10-15 13:10:15'), 
month(timestamp_ltz'1582-10-15 13:10:15'), month(timestamp_ntz'1582-10-15 
13:10:15')
+-- !query schema
+struct<month(1500-01-01):int,month(1582-10-15 13:10:15):int,month(TIMESTAMP 
'1582-10-15 13:10:15'):int,month(TIMESTAMP_NTZ '1582-10-15 13:10:15'):int>
+-- !query output
+1      10      10      10
+
+
+-- !query
+select dayOfYear('1500-01-01'), dayOfYear('1582-10-15 13:10:15'), 
dayOfYear(timestamp_ltz'1582-10-15 13:10:15'), 
dayOfYear(timestamp_ntz'1582-10-15 13:10:15')
+-- !query schema
+struct<dayofyear(1500-01-01):int,dayofyear(1582-10-15 
13:10:15):int,dayofyear(TIMESTAMP '1582-10-15 
13:10:15'):int,dayofyear(TIMESTAMP_NTZ '1582-10-15 13:10:15'):int>
+-- !query output
+1      288     288     288
+
+
+-- !query
+select next_day("2015-07-23", "Mon")
+-- !query schema
+struct<next_day(2015-07-23, Mon):date>
+-- !query output
+2015-07-27
+
+
+-- !query
+select next_day("2015-07-23", "xx")
+-- !query schema
+struct<>
+-- !query output
+java.lang.IllegalArgumentException
+Illegal input for day of week: xx
+
+
+-- !query
+select next_day("2015-07-23 12:12:12", "Mon")
+-- !query schema
+struct<next_day(2015-07-23 12:12:12, Mon):date>
+-- !query output
+2015-07-27
+
+
+-- !query
+select next_day(timestamp_ltz"2015-07-23 12:12:12", "Mon")
+-- !query schema
+struct<>
+-- !query output
+org.apache.spark.sql.AnalysisException
+cannot resolve 'next_day(TIMESTAMP '2015-07-23 12:12:12', 'Mon')' due to data 
type mismatch: argument 1 requires date type, however, 'TIMESTAMP '2015-07-23 
12:12:12'' is of timestamp type.; line 1 pos 7
+
+
+-- !query
+select next_day(timestamp_ntz"2015-07-23 12:12:12", "Mon")
+-- !query schema
+struct<>
+-- !query output
+org.apache.spark.sql.AnalysisException
+cannot resolve 'next_day(TIMESTAMP_NTZ '2015-07-23 12:12:12', 'Mon')' due to 
data type mismatch: argument 1 requires date type, however, 'TIMESTAMP_NTZ 
'2015-07-23 12:12:12'' is of timestamp_ntz type.; line 1 pos 7
+
+
+-- !query
+select next_day("xx", "Mon")
+-- !query schema
+struct<>
+-- !query output
+java.time.DateTimeException
+Cannot cast xx to DateType.
+
+
+-- !query
+select next_day(null, "Mon")
+-- !query schema
+struct<next_day(NULL, Mon):date>
+-- !query output
+NULL
+
+
+-- !query
+select next_day(null, "xx")
+-- !query schema
+struct<next_day(NULL, xx):date>
+-- !query output
+NULL
+
+
+-- !query
+select date_add('2011-11-11', 1Y)
+-- !query schema
+struct<date_add(2011-11-11, 1):date>
+-- !query output
+2011-11-12
+
+
+-- !query
+select date_add('2011-11-11', 1S)
+-- !query schema
+struct<date_add(2011-11-11, 1):date>
+-- !query output
+2011-11-12
+
+
+-- !query
+select date_add('2011-11-11', 1)
+-- !query schema
+struct<date_add(2011-11-11, 1):date>
+-- !query output
+2011-11-12
+
+
+-- !query
+select date_add('2011-11-11', 1L)
+-- !query schema
+struct<>
+-- !query output
+org.apache.spark.sql.AnalysisException
+cannot resolve 'date_add(CAST('2011-11-11' AS DATE), 1L)' due to data type 
mismatch: argument 2 requires (int or smallint or tinyint) type, however, '1L' 
is of bigint type.; line 1 pos 7
+
+
+-- !query
+select date_add('2011-11-11', 1.0)
+-- !query schema
+struct<>
+-- !query output
+org.apache.spark.sql.AnalysisException
+cannot resolve 'date_add(CAST('2011-11-11' AS DATE), 1.0BD)' due to data type 
mismatch: argument 2 requires (int or smallint or tinyint) type, however, 
'1.0BD' is of decimal(2,1) type.; line 1 pos 7
+
+
+-- !query
+select date_add('2011-11-11', 1E1)
+-- !query schema
+struct<>
+-- !query output
+org.apache.spark.sql.AnalysisException
+cannot resolve 'date_add(CAST('2011-11-11' AS DATE), 10.0D)' due to data type 
mismatch: argument 2 requires (int or smallint or tinyint) type, however, 
'10.0D' is of double type.; line 1 pos 7
+
+
+-- !query
+select date_add('2011-11-11', '1')
+-- !query schema
+struct<date_add(2011-11-11, 1):date>
+-- !query output
+2011-11-12
+
+
+-- !query
+select date_add('2011-11-11', '1.2')
+-- !query schema
+struct<>
+-- !query output
+org.apache.spark.sql.AnalysisException
+The second argument of 'date_add' function needs to be an integer.
+
+
+-- !query
+select date_add(null, 1)
+-- !query schema
+struct<date_add(NULL, 1):date>
+-- !query output
+NULL
+
+
+-- !query
+select date_add(date'2011-11-11', null)
+-- !query schema
+struct<date_add(DATE '2011-11-11', NULL):date>
+-- !query output
+NULL
+
+
+-- !query
+select date_add(date'2011-11-11', 1)
+-- !query schema
+struct<date_add(DATE '2011-11-11', 1):date>
+-- !query output
+2011-11-12
+
+
+-- !query
+select date_add(timestamp_ltz'2011-11-11 12:12:12', 1), 
date_add(timestamp_ntz'2011-11-11 12:12:12', 1)
+-- !query schema
+struct<>
+-- !query output
+org.apache.spark.sql.AnalysisException
+cannot resolve 'date_add(TIMESTAMP_NTZ '2011-11-11 12:12:12', 1)' due to data 
type mismatch: argument 1 requires date type, however, 'TIMESTAMP_NTZ 
'2011-11-11 12:12:12'' is of timestamp_ntz type.; line 1 pos 56
+
+
+-- !query
+select date_sub(date'2011-11-11', 1)
+-- !query schema
+struct<date_sub(DATE '2011-11-11', 1):date>
+-- !query output
+2011-11-10
+
+
+-- !query
+select date_sub(date'2011-11-11', '1')
+-- !query schema
+struct<date_sub(DATE '2011-11-11', 1):date>
+-- !query output
+2011-11-10
+
+
+-- !query
+select date_sub(date'2011-11-11', '1.2')
+-- !query schema
+struct<>
+-- !query output
+org.apache.spark.sql.AnalysisException
+The second argument of 'date_sub' function needs to be an integer.
+
+
+-- !query
+select date_sub(timestamp_ltz'2011-11-11 12:12:12', 1), 
date_sub(timestamp_ntz'2011-11-11 12:12:12', 1)
+-- !query schema
+struct<>
+-- !query output
+org.apache.spark.sql.AnalysisException
+cannot resolve 'date_sub(TIMESTAMP_NTZ '2011-11-11 12:12:12', 1)' due to data 
type mismatch: argument 1 requires date type, however, 'TIMESTAMP_NTZ 
'2011-11-11 12:12:12'' is of timestamp_ntz type.; line 1 pos 56

Review comment:
       we should fix this later.

##########
File path: sql/core/src/test/resources/sql-tests/inputs/timestamp.sql
##########
@@ -0,0 +1,104 @@
+-- timestamp literals, functions and operations
+
+select timestamp '2019-01-01\t';
+select timestamp '2019-01-01中文';
+
+-- timestamp with year outside [0000-9999]
+select timestamp'-1969-12-31 16:00:00', timestamp'0015-03-18 16:00:00', 
timestamp'-000001', timestamp'99999-03-18T12:03:17';
+-- invalid: year too large
+select timestamp'4294967297';
+-- invalid: minute field can have at most 2 digits
+select timestamp'2021-01-01T12:30:4294967297.123456';
+
+select current_timestamp = current_timestamp;
+-- under ANSI mode, `current_timestamp` can't be a function name.
+select current_timestamp() = current_timestamp();
+
+select localtimestamp() = localtimestamp();
+
+-- timestamp numeric fields constructor
+SELECT make_timestamp(2021, 07, 11, 6, 30, 45.678);
+SELECT make_timestamp(2021, 07, 11, 6, 30, 45.678, 'CET');
+SELECT make_timestamp(2021, 07, 11, 6, 30, 60.007);
+
+-- [SPARK-22333]: timeFunctionCall has conflicts with columnReference
+create temporary view ttf1 as select * from values
+  (1, 2),
+  (2, 3)
+  as ttf1(`current_date`, `current_timestamp`);

Review comment:
       I quoted the column name here so that this test can run with ANSI mode.

##########
File path: 
sql/core/src/test/resources/sql-tests/results/timestampNTZ/timestamp.sql.out
##########
@@ -0,0 +1,579 @@
+-- Automatically generated by SQLQueryTestSuite
+-- Number of queries: 68
+
+
+-- !query
+select timestamp '2019-01-01\t'
+-- !query schema
+struct<TIMESTAMP_NTZ '2019-01-01 00:00:00':timestamp_ntz>
+-- !query output
+2019-01-01 00:00:00
+
+
+-- !query
+select timestamp '2019-01-01中文'
+-- !query schema
+struct<>
+-- !query output
+org.apache.spark.sql.catalyst.parser.ParseException
+
+Cannot parse the TIMESTAMP value: 2019-01-01中文(line 1, pos 7)
+
+== SQL ==
+select timestamp '2019-01-01中文'
+-------^^^
+
+
+-- !query
+select timestamp'-1969-12-31 16:00:00', timestamp'0015-03-18 16:00:00', 
timestamp'-000001', timestamp'99999-03-18T12:03:17'
+-- !query schema
+struct<TIMESTAMP_NTZ '-1969-12-31 16:00:00':timestamp_ntz,TIMESTAMP_NTZ 
'0015-03-18 16:00:00':timestamp_ntz,TIMESTAMP_NTZ '-0001-01-01 
00:00:00':timestamp_ntz,TIMESTAMP_NTZ '+99999-03-18 12:03:17':timestamp_ntz>
+-- !query output
+-1969-12-31 16:00:00   0015-03-18 16:00:00     -0001-01-01 00:00:00    
+99999-03-18 12:03:17
+
+
+-- !query
+select timestamp'4294967297'
+-- !query schema
+struct<>
+-- !query output
+org.apache.spark.sql.catalyst.parser.ParseException
+
+Cannot parse the TIMESTAMP value: 4294967297(line 1, pos 7)
+
+== SQL ==
+select timestamp'4294967297'
+-------^^^
+
+
+-- !query
+select timestamp'2021-01-01T12:30:4294967297.123456'
+-- !query schema
+struct<>
+-- !query output
+org.apache.spark.sql.catalyst.parser.ParseException
+
+Cannot parse the TIMESTAMP value: 2021-01-01T12:30:4294967297.123456(line 1, 
pos 7)
+
+== SQL ==
+select timestamp'2021-01-01T12:30:4294967297.123456'
+-------^^^
+
+
+-- !query
+select current_timestamp = current_timestamp
+-- !query schema
+struct<(current_timestamp() = current_timestamp()):boolean>
+-- !query output
+true
+
+
+-- !query
+select current_timestamp() = current_timestamp()
+-- !query schema
+struct<(current_timestamp() = current_timestamp()):boolean>
+-- !query output
+true
+
+
+-- !query
+select localtimestamp() = localtimestamp()
+-- !query schema
+struct<(localtimestamp() = localtimestamp()):boolean>
+-- !query output
+true
+
+
+-- !query
+SELECT make_timestamp(2021, 07, 11, 6, 30, 45.678)
+-- !query schema
+struct<make_timestamp(2021, 7, 11, 6, 30, 45.678):timestamp_ntz>
+-- !query output
+2021-07-11 06:30:45.678
+
+
+-- !query
+SELECT make_timestamp(2021, 07, 11, 6, 30, 45.678, 'CET')
+-- !query schema
+struct<make_timestamp(2021, 7, 11, 6, 30, 45.678, CET):timestamp_ntz>
+-- !query output
+2021-07-11 06:30:45.678
+
+
+-- !query
+SELECT make_timestamp(2021, 07, 11, 6, 30, 60.007)
+-- !query schema
+struct<make_timestamp(2021, 7, 11, 6, 30, 60.007):timestamp_ntz>
+-- !query output
+NULL
+
+
+-- !query
+create temporary view ttf1 as select * from values
+  (1, 2),
+  (2, 3)
+  as ttf1(current_date, current_timestamp)
+-- !query schema
+struct<>
+-- !query output
+
+
+
+-- !query
+select current_date, current_timestamp from ttf1
+-- !query schema
+struct<current_date:int,current_timestamp:int>
+-- !query output
+1      2
+2      3
+
+
+-- !query
+create temporary view ttf2 as select * from values
+  (1, 2),
+  (2, 3)
+  as ttf2(a, b)
+-- !query schema
+struct<>
+-- !query output
+
+
+
+-- !query
+select current_date = current_date(), current_timestamp = current_timestamp(), 
a, b from ttf2
+-- !query schema
+struct<(current_date() = current_date()):boolean,(current_timestamp() = 
current_timestamp()):boolean,a:int,b:int>
+-- !query output
+true   true    1       2
+true   true    2       3
+
+
+-- !query
+select a, b from ttf2 order by a, current_date
+-- !query schema
+struct<a:int,b:int>
+-- !query output
+1      2
+2      3
+
+
+-- !query
+select UNIX_SECONDS(timestamp'2020-12-01 14:30:08Z'), 
UNIX_SECONDS(timestamp'2020-12-01 14:30:08.999999Z'), UNIX_SECONDS(null)
+-- !query schema
+struct<unix_seconds(TIMESTAMP '2020-12-01 
06:30:08'):bigint,unix_seconds(TIMESTAMP '2020-12-01 
06:30:08.999999'):bigint,unix_seconds(NULL):bigint>
+-- !query output
+1606833008     1606833008      NULL
+
+
+-- !query
+select UNIX_MILLIS(timestamp'2020-12-01 14:30:08Z'), 
UNIX_MILLIS(timestamp'2020-12-01 14:30:08.999999Z'), UNIX_MILLIS(null)
+-- !query schema
+struct<unix_millis(TIMESTAMP '2020-12-01 
06:30:08'):bigint,unix_millis(TIMESTAMP '2020-12-01 
06:30:08.999999'):bigint,unix_millis(NULL):bigint>
+-- !query output
+1606833008000  1606833008999   NULL
+
+
+-- !query
+select UNIX_MICROS(timestamp'2020-12-01 14:30:08Z'), 
UNIX_MICROS(timestamp'2020-12-01 14:30:08.999999Z'), UNIX_MICROS(null)
+-- !query schema
+struct<unix_micros(TIMESTAMP '2020-12-01 
06:30:08'):bigint,unix_micros(TIMESTAMP '2020-12-01 
06:30:08.999999'):bigint,unix_micros(NULL):bigint>
+-- !query output
+1606833008000000       1606833008999999        NULL
+
+
+-- !query
+select to_timestamp(null), to_timestamp('2016-12-31 00:12:00'), 
to_timestamp('2016-12-31', 'yyyy-MM-dd')
+-- !query schema
+struct<to_timestamp(NULL):timestamp_ntz,to_timestamp(2016-12-31 
00:12:00):timestamp_ntz,to_timestamp(2016-12-31, yyyy-MM-dd):timestamp_ntz>
+-- !query output
+NULL   2016-12-31 00:12:00     2016-12-31 00:00:00
+
+
+-- !query
+select to_timestamp('2019-10-06 10:11:12.', 'yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]')
+-- !query schema
+struct<to_timestamp(2019-10-06 10:11:12., yyyy-MM-dd 
HH:mm:ss.SSSSSS[zzz]):timestamp_ntz>
+-- !query output
+NULL
+
+
+-- !query
+select to_timestamp('2019-10-06 10:11:12.0', 'yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]')
+-- !query schema
+struct<to_timestamp(2019-10-06 10:11:12.0, yyyy-MM-dd 
HH:mm:ss.SSSSSS[zzz]):timestamp_ntz>
+-- !query output
+2019-10-06 10:11:12
+
+
+-- !query
+select to_timestamp('2019-10-06 10:11:12.1', 'yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]')
+-- !query schema
+struct<to_timestamp(2019-10-06 10:11:12.1, yyyy-MM-dd 
HH:mm:ss.SSSSSS[zzz]):timestamp_ntz>
+-- !query output
+2019-10-06 10:11:12.1
+
+
+-- !query
+select to_timestamp('2019-10-06 10:11:12.12', 'yyyy-MM-dd 
HH:mm:ss.SSSSSS[zzz]')
+-- !query schema
+struct<to_timestamp(2019-10-06 10:11:12.12, yyyy-MM-dd 
HH:mm:ss.SSSSSS[zzz]):timestamp_ntz>
+-- !query output
+2019-10-06 10:11:12.12
+
+
+-- !query
+select to_timestamp('2019-10-06 10:11:12.123UTC', 'yyyy-MM-dd 
HH:mm:ss.SSSSSS[zzz]')
+-- !query schema
+struct<to_timestamp(2019-10-06 10:11:12.123UTC, yyyy-MM-dd 
HH:mm:ss.SSSSSS[zzz]):timestamp_ntz>
+-- !query output
+2019-10-06 10:11:12.123
+
+
+-- !query
+select to_timestamp('2019-10-06 10:11:12.1234', 'yyyy-MM-dd 
HH:mm:ss.SSSSSS[zzz]')
+-- !query schema
+struct<to_timestamp(2019-10-06 10:11:12.1234, yyyy-MM-dd 
HH:mm:ss.SSSSSS[zzz]):timestamp_ntz>
+-- !query output
+2019-10-06 10:11:12.1234
+
+
+-- !query
+select to_timestamp('2019-10-06 10:11:12.12345CST', 'yyyy-MM-dd 
HH:mm:ss.SSSSSS[zzz]')
+-- !query schema
+struct<to_timestamp(2019-10-06 10:11:12.12345CST, yyyy-MM-dd 
HH:mm:ss.SSSSSS[zzz]):timestamp_ntz>
+-- !query output
+2019-10-06 10:11:12.12345
+
+
+-- !query
+select to_timestamp('2019-10-06 10:11:12.123456PST', 'yyyy-MM-dd 
HH:mm:ss.SSSSSS[zzz]')
+-- !query schema
+struct<to_timestamp(2019-10-06 10:11:12.123456PST, yyyy-MM-dd 
HH:mm:ss.SSSSSS[zzz]):timestamp_ntz>
+-- !query output
+2019-10-06 10:11:12.123456
+
+
+-- !query
+select to_timestamp('2019-10-06 10:11:12.1234567PST', 'yyyy-MM-dd 
HH:mm:ss.SSSSSS[zzz]')
+-- !query schema
+struct<to_timestamp(2019-10-06 10:11:12.1234567PST, yyyy-MM-dd 
HH:mm:ss.SSSSSS[zzz]):timestamp_ntz>
+-- !query output
+NULL
+
+
+-- !query
+select to_timestamp('123456 2019-10-06 10:11:12.123456PST', 'SSSSSS yyyy-MM-dd 
HH:mm:ss.SSSSSS[zzz]')
+-- !query schema
+struct<to_timestamp(123456 2019-10-06 10:11:12.123456PST, SSSSSS yyyy-MM-dd 
HH:mm:ss.SSSSSS[zzz]):timestamp_ntz>
+-- !query output
+2019-10-06 10:11:12.123456
+
+
+-- !query
+select to_timestamp('223456 2019-10-06 10:11:12.123456PST', 'SSSSSS yyyy-MM-dd 
HH:mm:ss.SSSSSS[zzz]')
+-- !query schema
+struct<to_timestamp(223456 2019-10-06 10:11:12.123456PST, SSSSSS yyyy-MM-dd 
HH:mm:ss.SSSSSS[zzz]):timestamp_ntz>
+-- !query output
+NULL
+
+
+-- !query
+select to_timestamp('2019-10-06 10:11:12.1234', 'yyyy-MM-dd HH:mm:ss.[SSSSSS]')
+-- !query schema
+struct<to_timestamp(2019-10-06 10:11:12.1234, yyyy-MM-dd 
HH:mm:ss.[SSSSSS]):timestamp_ntz>
+-- !query output
+2019-10-06 10:11:12.1234
+
+
+-- !query
+select to_timestamp('2019-10-06 10:11:12.123', 'yyyy-MM-dd HH:mm:ss[.SSSSSS]')
+-- !query schema
+struct<to_timestamp(2019-10-06 10:11:12.123, yyyy-MM-dd 
HH:mm:ss[.SSSSSS]):timestamp_ntz>
+-- !query output
+2019-10-06 10:11:12.123
+
+
+-- !query
+select to_timestamp('2019-10-06 10:11:12', 'yyyy-MM-dd HH:mm:ss[.SSSSSS]')
+-- !query schema
+struct<to_timestamp(2019-10-06 10:11:12, yyyy-MM-dd 
HH:mm:ss[.SSSSSS]):timestamp_ntz>
+-- !query output
+2019-10-06 10:11:12
+
+
+-- !query
+select to_timestamp('2019-10-06 10:11:12.12', 'yyyy-MM-dd HH:mm[:ss.SSSSSS]')
+-- !query schema
+struct<to_timestamp(2019-10-06 10:11:12.12, yyyy-MM-dd 
HH:mm[:ss.SSSSSS]):timestamp_ntz>
+-- !query output
+2019-10-06 10:11:12.12
+
+
+-- !query
+select to_timestamp('2019-10-06 10:11', 'yyyy-MM-dd HH:mm[:ss.SSSSSS]')
+-- !query schema
+struct<to_timestamp(2019-10-06 10:11, yyyy-MM-dd 
HH:mm[:ss.SSSSSS]):timestamp_ntz>
+-- !query output
+2019-10-06 10:11:00
+
+
+-- !query
+select to_timestamp("2019-10-06S10:11:12.12345", 
"yyyy-MM-dd'S'HH:mm:ss.SSSSSS")
+-- !query schema
+struct<to_timestamp(2019-10-06S10:11:12.12345, 
yyyy-MM-dd'S'HH:mm:ss.SSSSSS):timestamp_ntz>
+-- !query output
+2019-10-06 10:11:12.12345
+
+
+-- !query
+select to_timestamp("12.12342019-10-06S10:11", "ss.SSSSyyyy-MM-dd'S'HH:mm")
+-- !query schema
+struct<to_timestamp(12.12342019-10-06S10:11, 
ss.SSSSyyyy-MM-dd'S'HH:mm):timestamp_ntz>
+-- !query output
+2019-10-06 10:11:12.1234
+
+
+-- !query
+select to_timestamp("12.1232019-10-06S10:11", "ss.SSSSyyyy-MM-dd'S'HH:mm")
+-- !query schema
+struct<to_timestamp(12.1232019-10-06S10:11, 
ss.SSSSyyyy-MM-dd'S'HH:mm):timestamp_ntz>
+-- !query output
+NULL
+
+
+-- !query
+select to_timestamp("12.1232019-10-06S10:11", "ss.SSSSyy-MM-dd'S'HH:mm")
+-- !query schema
+struct<to_timestamp(12.1232019-10-06S10:11, 
ss.SSSSyy-MM-dd'S'HH:mm):timestamp_ntz>
+-- !query output
+NULL
+
+
+-- !query
+select to_timestamp("12.1234019-10-06S10:11", "ss.SSSSy-MM-dd'S'HH:mm")
+-- !query schema
+struct<to_timestamp(12.1234019-10-06S10:11, 
ss.SSSSy-MM-dd'S'HH:mm):timestamp_ntz>
+-- !query output
+0019-10-06 10:11:12.1234
+
+
+-- !query
+select to_timestamp("2019-10-06S", "yyyy-MM-dd'S'")
+-- !query schema
+struct<to_timestamp(2019-10-06S, yyyy-MM-dd'S'):timestamp_ntz>
+-- !query output
+2019-10-06 00:00:00
+
+
+-- !query
+select to_timestamp("S2019-10-06", "'S'yyyy-MM-dd")
+-- !query schema
+struct<to_timestamp(S2019-10-06, 'S'yyyy-MM-dd):timestamp_ntz>
+-- !query output
+2019-10-06 00:00:00
+
+
+-- !query
+select to_timestamp("2019-10-06T10:11:12'12", "yyyy-MM-dd'T'HH:mm:ss''SSSS")
+-- !query schema
+struct<to_timestamp(2019-10-06T10:11:12'12, 
yyyy-MM-dd'T'HH:mm:ss''SSSS):timestamp_ntz>
+-- !query output
+2019-10-06 10:11:12.12
+
+
+-- !query
+select to_timestamp("2019-10-06T10:11:12'", "yyyy-MM-dd'T'HH:mm:ss''")
+-- !query schema
+struct<to_timestamp(2019-10-06T10:11:12', 
yyyy-MM-dd'T'HH:mm:ss''):timestamp_ntz>
+-- !query output
+2019-10-06 10:11:12
+
+
+-- !query
+select to_timestamp("'2019-10-06T10:11:12", "''yyyy-MM-dd'T'HH:mm:ss")
+-- !query schema
+struct<to_timestamp('2019-10-06T10:11:12, 
''yyyy-MM-dd'T'HH:mm:ss):timestamp_ntz>
+-- !query output
+2019-10-06 10:11:12
+
+
+-- !query
+select to_timestamp("P2019-10-06T10:11:12", "'P'yyyy-MM-dd'T'HH:mm:ss")
+-- !query schema
+struct<to_timestamp(P2019-10-06T10:11:12, 
'P'yyyy-MM-dd'T'HH:mm:ss):timestamp_ntz>
+-- !query output
+2019-10-06 10:11:12
+
+
+-- !query
+select to_timestamp("16", "dd")
+-- !query schema
+struct<to_timestamp(16, dd):timestamp_ntz>
+-- !query output
+1970-01-16 00:00:00
+
+
+-- !query
+select to_timestamp("02-29", "MM-dd")
+-- !query schema
+struct<to_timestamp(02-29, MM-dd):timestamp_ntz>
+-- !query output
+NULL
+
+
+-- !query
+select to_timestamp("2019 40", "yyyy mm")
+-- !query schema
+struct<to_timestamp(2019 40, yyyy mm):timestamp_ntz>
+-- !query output
+2019-01-01 00:40:00
+
+
+-- !query
+select to_timestamp("2019 10:10:10", "yyyy hh:mm:ss")
+-- !query schema
+struct<to_timestamp(2019 10:10:10, yyyy hh:mm:ss):timestamp_ntz>
+-- !query output
+2019-01-01 10:10:10
+
+
+-- !query
+select timestamp'2011-11-11 11:11:11' + interval '2' day
+-- !query schema
+struct<TIMESTAMP_NTZ '2011-11-11 11:11:11' + INTERVAL '2' DAY:timestamp_ntz>
+-- !query output
+2011-11-13 11:11:11
+
+
+-- !query
+select timestamp'2011-11-11 11:11:11' - interval '2' day
+-- !query schema
+struct<TIMESTAMP_NTZ '2011-11-11 11:11:11' - INTERVAL '2' DAY:timestamp_ntz>
+-- !query output
+2011-11-09 11:11:11
+
+
+-- !query
+select timestamp'2011-11-11 11:11:11' + interval '2' second
+-- !query schema
+struct<TIMESTAMP_NTZ '2011-11-11 11:11:11' + INTERVAL '02' 
SECOND:timestamp_ntz>
+-- !query output
+2011-11-11 11:11:13
+
+
+-- !query
+select timestamp'2011-11-11 11:11:11' - interval '2' second
+-- !query schema
+struct<TIMESTAMP_NTZ '2011-11-11 11:11:11' - INTERVAL '02' 
SECOND:timestamp_ntz>
+-- !query output
+2011-11-11 11:11:09
+
+
+-- !query
+select '2011-11-11 11:11:11' - interval '2' second
+-- !query schema
+struct<2011-11-11 11:11:11 - INTERVAL '02' SECOND:string>
+-- !query output
+2011-11-11 11:11:09
+
+
+-- !query
+select '1' - interval '2' second
+-- !query schema
+struct<1 - INTERVAL '02' SECOND:string>
+-- !query output
+NULL
+
+
+-- !query
+select 1 - interval '2' second
+-- !query schema
+struct<>
+-- !query output
+org.apache.spark.sql.AnalysisException
+cannot resolve '1 + (- INTERVAL '02' SECOND)' due to data type mismatch: 
argument 1 requires (timestamp or timestamp without time zone) type, however, 
'1' is of int type.; line 1 pos 7
+
+
+-- !query
+select date'2020-01-01' - timestamp'2019-10-06 10:11:12.345678'
+-- !query schema
+struct<(DATE '2020-01-01' - TIMESTAMP_NTZ '2019-10-06 
10:11:12.345678'):interval day to second>
+-- !query output
+86 13:48:47.654322000
+
+
+-- !query
+select timestamp'2019-10-06 10:11:12.345678' - date'2020-01-01'
+-- !query schema
+struct<(TIMESTAMP_NTZ '2019-10-06 10:11:12.345678' - DATE 
'2020-01-01'):interval day to second>
+-- !query output
+-86 13:48:47.654322000
+
+
+-- !query
+select timestamp'2019-10-06 10:11:12.345678' - null
+-- !query schema
+struct<(TIMESTAMP_NTZ '2019-10-06 10:11:12.345678' - NULL):interval day to 
second>
+-- !query output
+NULL
+
+
+-- !query
+select null - timestamp'2019-10-06 10:11:12.345678'
+-- !query schema
+struct<(NULL - TIMESTAMP_NTZ '2019-10-06 10:11:12.345678'):interval day to 
second>
+-- !query output
+NULL
+
+
+-- !query
+select to_timestamp('2019-10-06 A', 'yyyy-MM-dd GGGGG')
+-- !query schema
+struct<>
+-- !query output
+java.lang.RuntimeException
+Fail to recognize 'yyyy-MM-dd GGGGG' pattern in the DateTimeFormatter. You can 
form a valid datetime pattern with the guide from 
https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html
+
+
+-- !query
+select to_timestamp('22 05 2020 Friday', 'dd MM yyyy EEEEEE')
+-- !query schema
+struct<>
+-- !query output
+java.lang.RuntimeException
+Fail to recognize 'dd MM yyyy EEEEEE' pattern in the DateTimeFormatter. You 
can form a valid datetime pattern with the guide from 
https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html
+
+
+-- !query
+select to_timestamp('22 05 2020 Friday', 'dd MM yyyy EEEEE')
+-- !query schema
+struct<>
+-- !query output
+java.lang.RuntimeException
+Fail to recognize 'dd MM yyyy EEEEE' pattern in the DateTimeFormatter. You can 
form a valid datetime pattern with the guide from 
https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html
+
+
+-- !query
+select unix_timestamp('22 05 2020 Friday', 'dd MM yyyy EEEEE')
+-- !query schema
+struct<>
+-- !query output
+org.apache.spark.SparkUpgradeException
+You may get a different result due to the upgrading of Spark 3.0: Fail to 
recognize 'dd MM yyyy EEEEE' pattern in the DateTimeFormatter. 1) You can set 
spark.sql.legacy.timeParserPolicy to LEGACY to restore the behavior before 
Spark 3.0. 2) You can form a valid datetime pattern with the guide from 
https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html
+
+
+-- !query
+select from_json('{"t":"26/October/2015"}', 't Timestamp', 
map('timestampFormat', 'dd/MMMMM/yyyy'))
+-- !query schema
+struct<from_json({"t":"26/October/2015"}):struct<t:timestamp_ntz>>
+-- !query output
+{"t":null}
+
+
+-- !query
+select from_csv('26/October/2015', 't Timestamp', map('timestampFormat', 
'dd/MMMMM/yyyy'))
+-- !query schema
+struct<>
+-- !query output
+java.lang.Exception

Review comment:
       this test is under the `timestampNTZ` folder, which will set 
`spark.sql.timestampType` to `TIMESTAMP_NTZ`




-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: [email protected]

For queries about this service, please contact Infrastructure at:
[email protected]



---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to