This is an automated email from the ASF dual-hosted git repository.

wenchen pushed a commit to branch branch-4.0
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/branch-4.0 by this push:
     new 6c1be821d2fd [SPARK-52499][SQL] Add more data type tests for SQL UDFs
6c1be821d2fd is described below

commit 6c1be821d2fd1f04d21185c9b7041ccea7cd15fe
Author: Allison Wang <allison.w...@databricks.com>
AuthorDate: Tue Jun 24 08:26:28 2025 +0800

    [SPARK-52499][SQL] Add more data type tests for SQL UDFs
    
    ### What changes were proposed in this pull request?
    
    This PR adds more SQL tests for testing the usage of various Spark data 
types usage in SQL UDFs.
    
    ### Why are the changes needed?
    
    To improve test coverage for SQL UDFs.
    
    ### Does this PR introduce _any_ user-facing change?
    
    No
    
    ### How was this patch tested?
    
    Test only
    
    ### Was this patch authored or co-authored using generative AI tooling?
    
    No
    
    Closes #51193 from allisonwang-db/spark-52499-data-type-tests.
    
    Authored-by: Allison Wang <allison.w...@databricks.com>
    Signed-off-by: Wenchen Fan <wenc...@databricks.com>
    (cherry picked from commit 81ca4fd5479c9af263d639e5686d1c21f3b525b7)
    Signed-off-by: Wenchen Fan <wenc...@databricks.com>
---
 .../sql-tests/analyzer-results/sql-udf.sql.out     | 880 +++++++++++++++++++
 .../test/resources/sql-tests/inputs/sql-udf.sql    | 160 ++++
 .../resources/sql-tests/results/sql-udf.sql.out    | 929 +++++++++++++++++++++
 3 files changed, 1969 insertions(+)

diff --git 
a/sql/core/src/test/resources/sql-tests/analyzer-results/sql-udf.sql.out 
b/sql/core/src/test/resources/sql-tests/analyzer-results/sql-udf.sql.out
index 5b4848d91fe0..7d8111ce47f3 100644
--- a/sql/core/src/test/resources/sql-tests/analyzer-results/sql-udf.sql.out
+++ b/sql/core/src/test/resources/sql-tests/analyzer-results/sql-udf.sql.out
@@ -779,6 +779,886 @@ org.apache.spark.sql.AnalysisException
 }
 
 
+-- !query
+CREATE FUNCTION foo51() RETURNS INT RETURN (SELECT a FROM VALUES(1), (2) AS 
T(a))
+-- !query analysis
+org.apache.spark.sql.catalyst.analysis.FunctionAlreadyExistsException
+{
+  "errorClass" : "ROUTINE_ALREADY_EXISTS",
+  "sqlState" : "42723",
+  "messageParameters" : {
+    "existingRoutineType" : "routine",
+    "newRoutineType" : "routine",
+    "routineName" : "`default`.`foo51`"
+  }
+}
+
+
+-- !query
+SELECT foo51()
+-- !query analysis
+Project [spark_catalog.default.foo51() AS spark_catalog.default.foo51()#x]
+:  +- Project [a#x]
+:     +- SubqueryAlias T
+:        +- LocalRelation [a#x]
++- Project
+   +- OneRowRelation
+
+
+-- !query
+CREATE FUNCTION foo52() RETURNS INT RETURN (SELECT 1 FROM VALUES(1) WHERE 1 = 
0)
+-- !query analysis
+org.apache.spark.sql.catalyst.analysis.FunctionAlreadyExistsException
+{
+  "errorClass" : "ROUTINE_ALREADY_EXISTS",
+  "sqlState" : "42723",
+  "messageParameters" : {
+    "existingRoutineType" : "routine",
+    "newRoutineType" : "routine",
+    "routineName" : "`default`.`foo52`"
+  }
+}
+
+
+-- !query
+SELECT foo52()
+-- !query analysis
+Project [spark_catalog.default.foo52() AS spark_catalog.default.foo52()#x]
+:  +- Project [1 AS 1#x]
+:     +- Filter (1 = 0)
+:        +- LocalRelation [col1#x]
++- Project
+   +- OneRowRelation
+
+
+-- !query
+CREATE FUNCTION foo6c(` a` INT, a INT, `a b` INT) RETURNS INT RETURN 1
+-- !query analysis
+org.apache.spark.sql.catalyst.analysis.FunctionAlreadyExistsException
+{
+  "errorClass" : "ROUTINE_ALREADY_EXISTS",
+  "sqlState" : "42723",
+  "messageParameters" : {
+    "existingRoutineType" : "routine",
+    "newRoutineType" : "routine",
+    "routineName" : "`default`.`foo6c`"
+  }
+}
+
+
+-- !query
+SELECT foo6c(1, 2, 3)
+-- !query analysis
+Project [spark_catalog.default.foo6c( a#x, a#x, a b#x) AS 
spark_catalog.default.foo6c(1, 2, 3)#x]
++- Project [cast(1 as int) AS  a#x, cast(2 as int) AS a#x, cast(3 as int) AS a 
b#x]
+   +- OneRowRelation
+
+
+-- !query
+CREATE FUNCTION foo6d() RETURNS TABLE(` a` INT, a INT, `a b` INT) RETURN 
SELECT 1, 2, 3
+-- !query analysis
+org.apache.spark.sql.catalyst.analysis.FunctionAlreadyExistsException
+{
+  "errorClass" : "ROUTINE_ALREADY_EXISTS",
+  "sqlState" : "42723",
+  "messageParameters" : {
+    "existingRoutineType" : "routine",
+    "newRoutineType" : "routine",
+    "routineName" : "`default`.`foo6d`"
+  }
+}
+
+
+-- !query
+SELECT * FROM foo6d()
+-- !query analysis
+Project [ a#x, a#x, a b#x]
++- SQLFunctionNode spark_catalog.default.foo6d
+   +- SubqueryAlias foo6d
+      +- Project [cast(1#x as int) AS  a#x, cast(2#x as int) AS a#x, cast(3#x 
as int) AS a b#x]
+         +- Project [1 AS 1#x, 2 AS 2#x, 3 AS 3#x]
+            +- OneRowRelation
+
+
+-- !query
+CREATE FUNCTION foo7a(a STRING, b STRING, c STRING) RETURNS STRING RETURN
+SELECT 'Foo.a: ' || a ||  ' Foo.a: ' || foo7a.a
+       || ' T.b: ' ||  b || ' Foo.b: ' || foo7a.b
+       || ' T.c: ' || c || ' T.c: ' || t.c FROM VALUES('t.b', 't.c') AS T(b, c)
+-- !query analysis
+org.apache.spark.sql.catalyst.analysis.FunctionAlreadyExistsException
+{
+  "errorClass" : "ROUTINE_ALREADY_EXISTS",
+  "sqlState" : "42723",
+  "messageParameters" : {
+    "existingRoutineType" : "routine",
+    "newRoutineType" : "routine",
+    "routineName" : "`default`.`foo7a`"
+  }
+}
+
+
+-- !query
+SELECT foo7a('Foo.a', 'Foo.b', 'Foo.c')
+-- !query analysis
+Project [spark_catalog.default.foo7a(a#x, b#x, c#x) AS 
spark_catalog.default.foo7a(Foo.a, Foo.b, Foo.c)#x]
+:  +- Project 
[concat(concat(concat(concat(concat(concat(concat(concat(concat(concat(concat(Foo.a:
 , outer(a#x)),  Foo.a: ), outer(a#x)),  T.b: ), b#x),  Foo.b: ), outer(b#x)),  
T.c: ), c#x),  T.c: ), c#x) AS 
concat(concat(concat(concat(concat(concat(concat(concat(concat(concat(concat(Foo.a:
 , outer(foo7a.a)),  Foo.a: ), outer(foo7a.a)),  T.b: ), b),  Foo.b: ), 
outer(foo7a.b)),  T.c: ), c),  T.c: ), c)#x]
+:     +- SubqueryAlias T
+:        +- LocalRelation [b#x, c#x]
++- Project [cast(Foo.a as string) AS a#x, cast(Foo.b as string) AS b#x, 
cast(Foo.c as string) AS c#x]
+   +- OneRowRelation
+
+
+-- !query
+CREATE FUNCTION foo7at(a STRING, b STRING, c STRING) RETURNS TABLE (a STRING, 
b STRING, c STRING, d STRING, e STRING) RETURN
+SELECT CONCAT('Foo.a: ', a), CONCAT('Foo.b: ', foo7at.b), CONCAT('T.b: ', b),
+       CONCAT('Foo.c: ', foo7at.c), CONCAT('T.c: ', c)
+FROM VALUES ('t.b', 't.c') AS T(b, c)
+-- !query analysis
+org.apache.spark.sql.catalyst.analysis.FunctionAlreadyExistsException
+{
+  "errorClass" : "ROUTINE_ALREADY_EXISTS",
+  "sqlState" : "42723",
+  "messageParameters" : {
+    "existingRoutineType" : "routine",
+    "newRoutineType" : "routine",
+    "routineName" : "`default`.`foo7at`"
+  }
+}
+
+
+-- !query
+SELECT * FROM foo7at('Foo.a', 'Foo.b', 'Foo.c')
+-- !query analysis
+Project [a#x, b#x, c#x, d#x, e#x]
++- SQLFunctionNode spark_catalog.default.foo7at
+   +- SubqueryAlias foo7at
+      +- Project [cast(concat(Foo.a: , outer(foo7at.a))#x as string) AS a#x, 
cast(concat(Foo.b: , outer(foo7at.b))#x as string) AS b#x, cast(concat(T.b: , 
b)#x as string) AS c#x, cast(concat(Foo.c: , outer(foo7at.c))#x as string) AS 
d#x, cast(concat(T.c: , c)#x as string) AS e#x]
+         +- Project [concat(Foo.a: , cast(Foo.a as string)) AS concat(Foo.a: , 
outer(foo7at.a))#x, concat(Foo.b: , cast(Foo.b as string)) AS concat(Foo.b: , 
outer(foo7at.b))#x, concat(T.b: , b#x) AS concat(T.b: , b)#x, concat(Foo.c: , 
cast(Foo.c as string)) AS concat(Foo.c: , outer(foo7at.c))#x, concat(T.c: , 
c#x) AS concat(T.c: , c)#x]
+            +- SubqueryAlias T
+               +- LocalRelation [b#x, c#x]
+
+
+-- !query
+CREATE FUNCTION foo9a(a BOOLEAN) RETURNS BOOLEAN RETURN NOT a
+-- !query analysis
+org.apache.spark.sql.catalyst.analysis.FunctionAlreadyExistsException
+{
+  "errorClass" : "ROUTINE_ALREADY_EXISTS",
+  "sqlState" : "42723",
+  "messageParameters" : {
+    "existingRoutineType" : "routine",
+    "newRoutineType" : "routine",
+    "routineName" : "`default`.`foo9a`"
+  }
+}
+
+
+-- !query
+SELECT foo9a(true)
+-- !query analysis
+Project [spark_catalog.default.foo9a(a#x) AS 
spark_catalog.default.foo9a(true)#x]
++- Project [cast(true as boolean) AS a#x]
+   +- OneRowRelation
+
+
+-- !query
+SELECT foo9a(5)
+-- !query analysis
+Project [spark_catalog.default.foo9a(a#x) AS spark_catalog.default.foo9a(5)#x]
++- Project [cast(5 as boolean) AS a#x]
+   +- OneRowRelation
+
+
+-- !query
+SELECT foo9a('Nonsense')
+-- !query analysis
+Project [spark_catalog.default.foo9a(a#x) AS 
spark_catalog.default.foo9a(Nonsense)#x]
++- Project [cast(Nonsense as boolean) AS a#x]
+   +- OneRowRelation
+
+
+-- !query
+CREATE FUNCTION foo9b(a BYTE) RETURNS BYTE RETURN CAST(a AS SHORT) + 1
+-- !query analysis
+org.apache.spark.sql.catalyst.analysis.FunctionAlreadyExistsException
+{
+  "errorClass" : "ROUTINE_ALREADY_EXISTS",
+  "sqlState" : "42723",
+  "messageParameters" : {
+    "existingRoutineType" : "routine",
+    "newRoutineType" : "routine",
+    "routineName" : "`default`.`foo9b`"
+  }
+}
+
+
+-- !query
+SELECT foo9b(126)
+-- !query analysis
+Project [spark_catalog.default.foo9b(a#x) AS 
spark_catalog.default.foo9b(126)#x]
++- Project [cast(126 as tinyint) AS a#x]
+   +- OneRowRelation
+
+
+-- !query
+SELECT foo9b(127)
+-- !query analysis
+Project [spark_catalog.default.foo9b(a#x) AS 
spark_catalog.default.foo9b(127)#x]
++- Project [cast(127 as tinyint) AS a#x]
+   +- OneRowRelation
+
+
+-- !query
+SELECT foo9b(128)
+-- !query analysis
+Project [spark_catalog.default.foo9b(a#x) AS 
spark_catalog.default.foo9b(128)#x]
++- Project [cast(128 as tinyint) AS a#x]
+   +- OneRowRelation
+
+
+-- !query
+CREATE FUNCTION foo9c(a SHORT) RETURNS SHORT RETURN CAST(a AS INTEGER) + 1
+-- !query analysis
+org.apache.spark.sql.catalyst.analysis.FunctionAlreadyExistsException
+{
+  "errorClass" : "ROUTINE_ALREADY_EXISTS",
+  "sqlState" : "42723",
+  "messageParameters" : {
+    "existingRoutineType" : "routine",
+    "newRoutineType" : "routine",
+    "routineName" : "`default`.`foo9c`"
+  }
+}
+
+
+-- !query
+SELECT foo9c(32766)
+-- !query analysis
+Project [spark_catalog.default.foo9c(a#x) AS 
spark_catalog.default.foo9c(32766)#x]
++- Project [cast(32766 as smallint) AS a#x]
+   +- OneRowRelation
+
+
+-- !query
+SELECT foo9c(32767)
+-- !query analysis
+Project [spark_catalog.default.foo9c(a#x) AS 
spark_catalog.default.foo9c(32767)#x]
++- Project [cast(32767 as smallint) AS a#x]
+   +- OneRowRelation
+
+
+-- !query
+SELECT foo9c(32768)
+-- !query analysis
+Project [spark_catalog.default.foo9c(a#x) AS 
spark_catalog.default.foo9c(32768)#x]
++- Project [cast(32768 as smallint) AS a#x]
+   +- OneRowRelation
+
+
+-- !query
+CREATE FUNCTION foo9d(a INTEGER) RETURNS INTEGER RETURN CAST(a AS BIGINT) + 1
+-- !query analysis
+org.apache.spark.sql.catalyst.analysis.FunctionAlreadyExistsException
+{
+  "errorClass" : "ROUTINE_ALREADY_EXISTS",
+  "sqlState" : "42723",
+  "messageParameters" : {
+    "existingRoutineType" : "routine",
+    "newRoutineType" : "routine",
+    "routineName" : "`default`.`foo9d`"
+  }
+}
+
+
+-- !query
+SELECT foo9d(2147483647 - 1)
+-- !query analysis
+Project [spark_catalog.default.foo9d(a#x) AS 
spark_catalog.default.foo9d((2147483647 - 1))#x]
++- Project [cast((2147483647 - 1) as int) AS a#x]
+   +- OneRowRelation
+
+
+-- !query
+SELECT foo9d(2147483647)
+-- !query analysis
+Project [spark_catalog.default.foo9d(a#x) AS 
spark_catalog.default.foo9d(2147483647)#x]
++- Project [cast(2147483647 as int) AS a#x]
+   +- OneRowRelation
+
+
+-- !query
+SELECT foo9d(2147483647 + 1)
+-- !query analysis
+Project [spark_catalog.default.foo9d(a#x) AS 
spark_catalog.default.foo9d((2147483647 + 1))#x]
++- Project [cast((2147483647 + 1) as int) AS a#x]
+   +- OneRowRelation
+
+
+-- !query
+CREATE FUNCTION foo9e(a BIGINT) RETURNS BIGINT RETURN CAST(a AS DECIMAL(20, 
0)) + 1
+-- !query analysis
+org.apache.spark.sql.catalyst.analysis.FunctionAlreadyExistsException
+{
+  "errorClass" : "ROUTINE_ALREADY_EXISTS",
+  "sqlState" : "42723",
+  "messageParameters" : {
+    "existingRoutineType" : "routine",
+    "newRoutineType" : "routine",
+    "routineName" : "`default`.`foo9e`"
+  }
+}
+
+
+-- !query
+SELECT foo9e(9223372036854775807 - 1)
+-- !query analysis
+Project [spark_catalog.default.foo9e(a#xL) AS 
spark_catalog.default.foo9e((9223372036854775807 - 1))#xL]
++- Project [cast((9223372036854775807 - cast(1 as bigint)) as bigint) AS a#xL]
+   +- OneRowRelation
+
+
+-- !query
+SELECT foo9e(9223372036854775807)
+-- !query analysis
+Project [spark_catalog.default.foo9e(a#xL) AS 
spark_catalog.default.foo9e(9223372036854775807)#xL]
++- Project [cast(9223372036854775807 as bigint) AS a#xL]
+   +- OneRowRelation
+
+
+-- !query
+SELECT foo9e(9223372036854775807.0 + 1)
+-- !query analysis
+Project [spark_catalog.default.foo9e(a#xL) AS 
spark_catalog.default.foo9e((9223372036854775807.0 + 1))#xL]
++- Project [cast((9223372036854775807.0 + cast(1 as decimal(1,0))) as bigint) 
AS a#xL]
+   +- OneRowRelation
+
+
+-- !query
+CREATE FUNCTION foo9f(a DECIMAL( 5, 2 )) RETURNS DECIMAL (5, 2) RETURN CAST(a 
AS DECIMAL(6, 2)) + 1
+-- !query analysis
+org.apache.spark.sql.catalyst.analysis.FunctionAlreadyExistsException
+{
+  "errorClass" : "ROUTINE_ALREADY_EXISTS",
+  "sqlState" : "42723",
+  "messageParameters" : {
+    "existingRoutineType" : "routine",
+    "newRoutineType" : "routine",
+    "routineName" : "`default`.`foo9f`"
+  }
+}
+
+
+-- !query
+SELECT foo9f(999 - 1)
+-- !query analysis
+Project [spark_catalog.default.foo9f(a#x) AS spark_catalog.default.foo9f((999 
- 1))#x]
++- Project [cast((999 - 1) as decimal(5,2)) AS a#x]
+   +- OneRowRelation
+
+
+-- !query
+SELECT foo9f(999)
+-- !query analysis
+Project [spark_catalog.default.foo9f(a#x) AS 
spark_catalog.default.foo9f(999)#x]
++- Project [cast(999 as decimal(5,2)) AS a#x]
+   +- OneRowRelation
+
+
+-- !query
+SELECT foo9f(999 + 1)
+-- !query analysis
+Project [spark_catalog.default.foo9f(a#x) AS spark_catalog.default.foo9f((999 
+ 1))#x]
++- Project [cast((999 + 1) as decimal(5,2)) AS a#x]
+   +- OneRowRelation
+
+
+-- !query
+CREATE FUNCTION foo9g(a FLOAT, b String) RETURNS FLOAT RETURN b || CAST(a AS 
String)
+-- !query analysis
+org.apache.spark.sql.catalyst.analysis.FunctionAlreadyExistsException
+{
+  "errorClass" : "ROUTINE_ALREADY_EXISTS",
+  "sqlState" : "42723",
+  "messageParameters" : {
+    "existingRoutineType" : "routine",
+    "newRoutineType" : "routine",
+    "routineName" : "`default`.`foo9g`"
+  }
+}
+
+
+-- !query
+SELECT foo9g(123.23, '7')
+-- !query analysis
+Project [spark_catalog.default.foo9g(a#x, b#x) AS 
spark_catalog.default.foo9g(123.23, 7)#x]
++- Project [cast(123.23 as float) AS a#x, cast(7 as string) AS b#x]
+   +- OneRowRelation
+
+
+-- !query
+SELECT foo9g('hello', '7')
+-- !query analysis
+Project [spark_catalog.default.foo9g(a#x, b#x) AS 
spark_catalog.default.foo9g(hello, 7)#x]
++- Project [cast(hello as float) AS a#x, cast(7 as string) AS b#x]
+   +- OneRowRelation
+
+
+-- !query
+SELECT foo9g(123.23, 'q')
+-- !query analysis
+Project [spark_catalog.default.foo9g(a#x, b#x) AS 
spark_catalog.default.foo9g(123.23, q)#x]
++- Project [cast(123.23 as float) AS a#x, cast(q as string) AS b#x]
+   +- OneRowRelation
+
+
+-- !query
+CREATE FUNCTION foo9h(a DOUBLE, b String) RETURNS DOUBLE RETURN b || CAST(a AS 
String)
+-- !query analysis
+org.apache.spark.sql.catalyst.analysis.FunctionAlreadyExistsException
+{
+  "errorClass" : "ROUTINE_ALREADY_EXISTS",
+  "sqlState" : "42723",
+  "messageParameters" : {
+    "existingRoutineType" : "routine",
+    "newRoutineType" : "routine",
+    "routineName" : "`default`.`foo9h`"
+  }
+}
+
+
+-- !query
+SELECT foo9h(123.23, '7')
+-- !query analysis
+Project [spark_catalog.default.foo9h(a#x, b#x) AS 
spark_catalog.default.foo9h(123.23, 7)#x]
++- Project [cast(123.23 as double) AS a#x, cast(7 as string) AS b#x]
+   +- OneRowRelation
+
+
+-- !query
+SELECT foo9h('hello', '7')
+-- !query analysis
+Project [spark_catalog.default.foo9h(a#x, b#x) AS 
spark_catalog.default.foo9h(hello, 7)#x]
++- Project [cast(hello as double) AS a#x, cast(7 as string) AS b#x]
+   +- OneRowRelation
+
+
+-- !query
+SELECT foo9h(123.23, 'q')
+-- !query analysis
+Project [spark_catalog.default.foo9h(a#x, b#x) AS 
spark_catalog.default.foo9h(123.23, q)#x]
++- Project [cast(123.23 as double) AS a#x, cast(q as string) AS b#x]
+   +- OneRowRelation
+
+
+-- !query
+CREATE FUNCTION foo9i(a VARCHAR(10), b VARCHAR(10)) RETURNS VARCHAR(12) RETURN 
a || b
+-- !query analysis
+org.apache.spark.sql.AnalysisException
+{
+  "errorClass" : "UNSUPPORTED_CHAR_OR_VARCHAR_AS_STRING",
+  "sqlState" : "0A000"
+}
+
+
+-- !query
+CREATE FUNCTION foo9j(a STRING, b STRING) RETURNS STRING RETURN a || b
+-- !query analysis
+org.apache.spark.sql.catalyst.analysis.FunctionAlreadyExistsException
+{
+  "errorClass" : "ROUTINE_ALREADY_EXISTS",
+  "sqlState" : "42723",
+  "messageParameters" : {
+    "existingRoutineType" : "routine",
+    "newRoutineType" : "routine",
+    "routineName" : "`default`.`foo9j`"
+  }
+}
+
+
+-- !query
+SELECT foo9j('1234567890', '12')
+-- !query analysis
+Project [spark_catalog.default.foo9j(a#x, b#x) AS 
spark_catalog.default.foo9j(1234567890, 12)#x]
++- Project [cast(1234567890 as string) AS a#x, cast(12 as string) AS b#x]
+   +- OneRowRelation
+
+
+-- !query
+SELECT foo9j(12345678901, '12')
+-- !query analysis
+Project [spark_catalog.default.foo9j(a#x, b#x) AS 
spark_catalog.default.foo9j(12345678901, 12)#x]
++- Project [cast(12345678901 as string) AS a#x, cast(12 as string) AS b#x]
+   +- OneRowRelation
+
+
+-- !query
+CREATE FUNCTION foo9l(a DATE, b INTERVAL) RETURNS DATE RETURN a + b
+-- !query analysis
+org.apache.spark.sql.catalyst.analysis.FunctionAlreadyExistsException
+{
+  "errorClass" : "ROUTINE_ALREADY_EXISTS",
+  "sqlState" : "42723",
+  "messageParameters" : {
+    "existingRoutineType" : "routine",
+    "newRoutineType" : "routine",
+    "routineName" : "`default`.`foo9l`"
+  }
+}
+
+
+-- !query
+SELECT foo9l(DATE '2020-02-02', INTERVAL '1' YEAR)
+-- !query analysis
+org.apache.spark.sql.catalyst.ExtendedAnalysisException
+{
+  "errorClass" : "DATATYPE_MISMATCH.CAST_WITHOUT_SUGGESTION",
+  "sqlState" : "42K09",
+  "messageParameters" : {
+    "sqlExpr" : "\"INTERVAL '1' YEAR\"",
+    "srcType" : "\"INTERVAL YEAR\"",
+    "targetType" : "\"INTERVAL\""
+  },
+  "queryContext" : [ {
+    "objectType" : "",
+    "objectName" : "",
+    "startIndex" : 8,
+    "stopIndex" : 50,
+    "fragment" : "foo9l(DATE '2020-02-02', INTERVAL '1' YEAR)"
+  } ]
+}
+
+
+-- !query
+SELECT foo9l('2020-02-02', INTERVAL '1' YEAR)
+-- !query analysis
+org.apache.spark.sql.catalyst.ExtendedAnalysisException
+{
+  "errorClass" : "DATATYPE_MISMATCH.CAST_WITHOUT_SUGGESTION",
+  "sqlState" : "42K09",
+  "messageParameters" : {
+    "sqlExpr" : "\"INTERVAL '1' YEAR\"",
+    "srcType" : "\"INTERVAL YEAR\"",
+    "targetType" : "\"INTERVAL\""
+  },
+  "queryContext" : [ {
+    "objectType" : "",
+    "objectName" : "",
+    "startIndex" : 8,
+    "stopIndex" : 45,
+    "fragment" : "foo9l('2020-02-02', INTERVAL '1' YEAR)"
+  } ]
+}
+
+
+-- !query
+SELECT foo9l(DATE '-7', INTERVAL '1' YEAR)
+-- !query analysis
+org.apache.spark.sql.catalyst.parser.ParseException
+{
+  "errorClass" : "INVALID_TYPED_LITERAL",
+  "sqlState" : "42604",
+  "messageParameters" : {
+    "value" : "'-7'",
+    "valueType" : "\"DATE\""
+  },
+  "queryContext" : [ {
+    "objectType" : "",
+    "objectName" : "",
+    "startIndex" : 14,
+    "stopIndex" : 22,
+    "fragment" : "DATE '-7'"
+  } ]
+}
+
+
+-- !query
+SELECT foo9l(DATE '2020-02-02', INTERVAL '9999999' YEAR)
+-- !query analysis
+org.apache.spark.sql.catalyst.ExtendedAnalysisException
+{
+  "errorClass" : "DATATYPE_MISMATCH.CAST_WITHOUT_SUGGESTION",
+  "sqlState" : "42K09",
+  "messageParameters" : {
+    "sqlExpr" : "\"INTERVAL '9999999' YEAR\"",
+    "srcType" : "\"INTERVAL YEAR\"",
+    "targetType" : "\"INTERVAL\""
+  },
+  "queryContext" : [ {
+    "objectType" : "",
+    "objectName" : "",
+    "startIndex" : 8,
+    "stopIndex" : 56,
+    "fragment" : "foo9l(DATE '2020-02-02', INTERVAL '9999999' YEAR)"
+  } ]
+}
+
+
+-- !query
+CREATE FUNCTION foo9m(a TIMESTAMP, b INTERVAL) RETURNS TIMESTAMP RETURN a + b
+-- !query analysis
+org.apache.spark.sql.catalyst.analysis.FunctionAlreadyExistsException
+{
+  "errorClass" : "ROUTINE_ALREADY_EXISTS",
+  "sqlState" : "42723",
+  "messageParameters" : {
+    "existingRoutineType" : "routine",
+    "newRoutineType" : "routine",
+    "routineName" : "`default`.`foo9m`"
+  }
+}
+
+
+-- !query
+SELECT foo9m(TIMESTAMP'2020-02-02 12:15:16.123', INTERVAL '1' YEAR)
+-- !query analysis
+org.apache.spark.sql.catalyst.ExtendedAnalysisException
+{
+  "errorClass" : "DATATYPE_MISMATCH.CAST_WITHOUT_SUGGESTION",
+  "sqlState" : "42K09",
+  "messageParameters" : {
+    "sqlExpr" : "\"INTERVAL '1' YEAR\"",
+    "srcType" : "\"INTERVAL YEAR\"",
+    "targetType" : "\"INTERVAL\""
+  },
+  "queryContext" : [ {
+    "objectType" : "",
+    "objectName" : "",
+    "startIndex" : 8,
+    "stopIndex" : 67,
+    "fragment" : "foo9m(TIMESTAMP'2020-02-02 12:15:16.123', INTERVAL '1' YEAR)"
+  } ]
+}
+
+
+-- !query
+SELECT foo9m('2020-02-02 12:15:16.123', INTERVAL '1' YEAR)
+-- !query analysis
+org.apache.spark.sql.catalyst.ExtendedAnalysisException
+{
+  "errorClass" : "DATATYPE_MISMATCH.CAST_WITHOUT_SUGGESTION",
+  "sqlState" : "42K09",
+  "messageParameters" : {
+    "sqlExpr" : "\"INTERVAL '1' YEAR\"",
+    "srcType" : "\"INTERVAL YEAR\"",
+    "targetType" : "\"INTERVAL\""
+  },
+  "queryContext" : [ {
+    "objectType" : "",
+    "objectName" : "",
+    "startIndex" : 8,
+    "stopIndex" : 58,
+    "fragment" : "foo9m('2020-02-02 12:15:16.123', INTERVAL '1' YEAR)"
+  } ]
+}
+
+
+-- !query
+SELECT foo9m(TIMESTAMP'2020-02-02 12:15:16.123', INTERVAL '999999' YEAR)
+-- !query analysis
+org.apache.spark.sql.catalyst.ExtendedAnalysisException
+{
+  "errorClass" : "DATATYPE_MISMATCH.CAST_WITHOUT_SUGGESTION",
+  "sqlState" : "42K09",
+  "messageParameters" : {
+    "sqlExpr" : "\"INTERVAL '999999' YEAR\"",
+    "srcType" : "\"INTERVAL YEAR\"",
+    "targetType" : "\"INTERVAL\""
+  },
+  "queryContext" : [ {
+    "objectType" : "",
+    "objectName" : "",
+    "startIndex" : 8,
+    "stopIndex" : 72,
+    "fragment" : "foo9m(TIMESTAMP'2020-02-02 12:15:16.123', INTERVAL '999999' 
YEAR)"
+  } ]
+}
+
+
+-- !query
+CREATE FUNCTION foo9n(a ARRAY<INTEGER>) RETURNS ARRAY<INTEGER> RETURN a
+-- !query analysis
+org.apache.spark.sql.catalyst.analysis.FunctionAlreadyExistsException
+{
+  "errorClass" : "ROUTINE_ALREADY_EXISTS",
+  "sqlState" : "42723",
+  "messageParameters" : {
+    "existingRoutineType" : "routine",
+    "newRoutineType" : "routine",
+    "routineName" : "`default`.`foo9n`"
+  }
+}
+
+
+-- !query
+SELECT foo9n(ARRAY(1, 2, 3))
+-- !query analysis
+Project [spark_catalog.default.foo9n(a#x) AS 
spark_catalog.default.foo9n(array(1, 2, 3))#x]
++- Project [cast(array(1, 2, 3) as array<int>) AS a#x]
+   +- OneRowRelation
+
+
+-- !query
+SELECT foo9n(from_json('[1, 2, 3]', 'array<int>'))
+-- !query analysis
+Project [spark_catalog.default.foo9n(a#x) AS 
spark_catalog.default.foo9n(from_json([1, 2, 3]))#x]
++- Project [cast(from_json(ArrayType(IntegerType,true), [1, 2, 3], 
Some(America/Los_Angeles), false) as array<int>) AS a#x]
+   +- OneRowRelation
+
+
+-- !query
+CREATE FUNCTION foo9o(a MAP<STRING, INTEGER>) RETURNS MAP<STRING, INTEGER> 
RETURN a
+-- !query analysis
+org.apache.spark.sql.catalyst.analysis.FunctionAlreadyExistsException
+{
+  "errorClass" : "ROUTINE_ALREADY_EXISTS",
+  "sqlState" : "42723",
+  "messageParameters" : {
+    "existingRoutineType" : "routine",
+    "newRoutineType" : "routine",
+    "routineName" : "`default`.`foo9o`"
+  }
+}
+
+
+-- !query
+SELECT foo9o(MAP('hello', 1, 'world', 2))
+-- !query analysis
+Project [spark_catalog.default.foo9o(a#x) AS 
spark_catalog.default.foo9o(map(hello, 1, world, 2))#x]
++- Project [cast(map(hello, 1, world, 2) as map<string,int>) AS a#x]
+   +- OneRowRelation
+
+
+-- !query
+SELECT foo9o(from_json('{"hello":1, "world":2}', 'map<string,int>'))
+-- !query analysis
+Project [spark_catalog.default.foo9o(a#x) AS 
spark_catalog.default.foo9o(entries)#x]
++- Project [cast(from_json(MapType(StringType,IntegerType,true), {"hello":1, 
"world":2}, Some(America/Los_Angeles), false) as map<string,int>) AS a#x]
+   +- OneRowRelation
+
+
+-- !query
+CREATE FUNCTION foo9p(a STRUCT<a1: INTEGER, a2: STRING>) RETURNS STRUCT<a1: 
INTEGER, a2: STRING> RETURN a
+-- !query analysis
+org.apache.spark.sql.catalyst.analysis.FunctionAlreadyExistsException
+{
+  "errorClass" : "ROUTINE_ALREADY_EXISTS",
+  "sqlState" : "42723",
+  "messageParameters" : {
+    "existingRoutineType" : "routine",
+    "newRoutineType" : "routine",
+    "routineName" : "`default`.`foo9p`"
+  }
+}
+
+
+-- !query
+SELECT foo9p(STRUCT(1, 'hello'))
+-- !query analysis
+Project [spark_catalog.default.foo9p(a#x) AS 
spark_catalog.default.foo9p(struct(1, hello))#x]
++- Project [cast(struct(col1, 1, col2, hello) as struct<a1:int,a2:string>) AS 
a#x]
+   +- OneRowRelation
+
+
+-- !query
+SELECT foo9p(from_json('{1:"hello"}', 'struct<a1:int, a2:string>'))
+-- !query analysis
+Project [spark_catalog.default.foo9p(a#x) AS 
spark_catalog.default.foo9p(from_json({1:"hello"}))#x]
++- Project [cast(from_json(StructField(a1,IntegerType,true), 
StructField(a2,StringType,true), {1:"hello"}, Some(America/Los_Angeles), false) 
as struct<a1:int,a2:string>) AS a#x]
+   +- OneRowRelation
+
+
+-- !query
+CREATE FUNCTION foo9q(a ARRAY<STRUCT<a1: INT, a2: STRING>>) RETURNS 
ARRAY<STRUCT<a1: INT, a2: STRING>> RETURN a
+-- !query analysis
+org.apache.spark.sql.catalyst.analysis.FunctionAlreadyExistsException
+{
+  "errorClass" : "ROUTINE_ALREADY_EXISTS",
+  "sqlState" : "42723",
+  "messageParameters" : {
+    "existingRoutineType" : "routine",
+    "newRoutineType" : "routine",
+    "routineName" : "`default`.`foo9q`"
+  }
+}
+
+
+-- !query
+SELECT foo9q(ARRAY(STRUCT(1, 'hello'), STRUCT(2, 'world')))
+-- !query analysis
+Project [spark_catalog.default.foo9q(a#x) AS 
spark_catalog.default.foo9q(array(struct(1, hello), struct(2, world)))#x]
++- Project [cast(array(struct(col1, 1, col2, hello), struct(col1, 2, col2, 
world)) as array<struct<a1:int,a2:string>>) AS a#x]
+   +- OneRowRelation
+
+
+-- !query
+SELECT foo9q(ARRAY(NAMED_STRUCT('x', 1, 'y', 'hello'), NAMED_STRUCT('x', 2, 
'y', 'world')))
+-- !query analysis
+Project [spark_catalog.default.foo9q(a#x) AS 
spark_catalog.default.foo9q(array(named_struct(x, 1, y, hello), named_struct(x, 
2, y, world)))#x]
++- Project [cast(array(named_struct(x, 1, y, hello), named_struct(x, 2, y, 
world)) as array<struct<a1:int,a2:string>>) AS a#x]
+   +- OneRowRelation
+
+
+-- !query
+SELECT foo9q(from_json('[{1:"hello"}, {2:"world"}]', 
'array<struct<a1:int,a2:string>>'))
+-- !query analysis
+Project [spark_catalog.default.foo9q(a#x) AS 
spark_catalog.default.foo9q(from_json([{1:"hello"}, {2:"world"}]))#x]
++- Project 
[cast(from_json(ArrayType(StructType(StructField(a1,IntegerType,true),StructField(a2,StringType,true)),true),
 [{1:"hello"}, {2:"world"}], Some(America/Los_Angeles), false) as 
array<struct<a1:int,a2:string>>) AS a#x]
+   +- OneRowRelation
+
+
+-- !query
+CREATE FUNCTION foo9r(a ARRAY<MAP<STRING, INT>>) RETURNS ARRAY<MAP<STRING, 
INT>> RETURN a
+-- !query analysis
+org.apache.spark.sql.catalyst.analysis.FunctionAlreadyExistsException
+{
+  "errorClass" : "ROUTINE_ALREADY_EXISTS",
+  "sqlState" : "42723",
+  "messageParameters" : {
+    "existingRoutineType" : "routine",
+    "newRoutineType" : "routine",
+    "routineName" : "`default`.`foo9r`"
+  }
+}
+
+
+-- !query
+SELECT foo9r(ARRAY(MAP('hello', 1), MAP('world', 2)))
+-- !query analysis
+Project [spark_catalog.default.foo9r(a#x) AS 
spark_catalog.default.foo9r(array(map(hello, 1), map(world, 2)))#x]
++- Project [cast(array(map(hello, 1), map(world, 2)) as 
array<map<string,int>>) AS a#x]
+   +- OneRowRelation
+
+
+-- !query
+SELECT foo9r(from_json('[{"hello":1}, {"world":2}]', 'array<map<string,int>>'))
+-- !query analysis
+Project [spark_catalog.default.foo9r(a#x) AS 
spark_catalog.default.foo9r(from_json([{"hello":1}, {"world":2}]))#x]
++- Project 
[cast(from_json(ArrayType(MapType(StringType,IntegerType,true),true), 
[{"hello":1}, {"world":2}], Some(America/Los_Angeles), false) as 
array<map<string,int>>) AS a#x]
+   +- OneRowRelation
+
+
+-- !query
+CREATE OR REPLACE FUNCTION foo1_10(a INT) RETURNS INT RETURN a + 2
+-- !query analysis
+CreateSQLFunctionCommand spark_catalog.default.foo1_10, a INT, INT, a + 2, 
false, false, false, true
+
+
+-- !query
+CREATE OR REPLACE FUNCTION bar1_10(b INT) RETURNS STRING RETURN 
foo1_10(TRY_CAST(b AS STRING))
+-- !query analysis
+CreateSQLFunctionCommand spark_catalog.default.bar1_10, b INT, STRING, 
foo1_10(TRY_CAST(b AS STRING)), false, false, false, true
+
+
+-- !query
+SELECT bar1_10(3)
+-- !query analysis
+Project [spark_catalog.default.bar1_10(b#x) AS 
spark_catalog.default.bar1_10(3)#x]
++- Project [b#x, cast(try_cast(b#x as string) as int) AS a#x]
+   +- Project [cast(3 as int) AS b#x]
+      +- OneRowRelation
+
+
 -- !query
 CREATE FUNCTION foo2_1a(a INT) RETURNS INT RETURN a
 -- !query analysis
diff --git a/sql/core/src/test/resources/sql-tests/inputs/sql-udf.sql 
b/sql/core/src/test/resources/sql-tests/inputs/sql-udf.sql
index 2d86c6966178..849207119a50 100644
--- a/sql/core/src/test/resources/sql-tests/inputs/sql-udf.sql
+++ b/sql/core/src/test/resources/sql-tests/inputs/sql-udf.sql
@@ -183,6 +183,166 @@ CREATE FUNCTION foo41() RETURNS INT RETURN SELECT 1;
 -- Expect failure
 CREATE FUNCTION foo42() RETURNS TABLE(a INT) RETURN 1;
 
+-- 1.5 Scalar function returns subquery with more than one row or no rows
+
+-- 1.5.a More than one row
+CREATE FUNCTION foo51() RETURNS INT RETURN (SELECT a FROM VALUES(1), (2) AS 
T(a));
+SELECT foo51();
+
+-- 1.5.b No Rows
+CREATE FUNCTION foo52() RETURNS INT RETURN (SELECT 1 FROM VALUES(1) WHERE 1 = 
0);
+-- Expect Success: NULL
+SELECT foo52();
+
+-- 1.6 Difficult identifiers
+-- 1.6.a Space in the schema name
+-- UNSUPPORTED BY CREATE SCHEMA
+-- CREATE SCHEMA `a b`;
+
+-- CREATE FUNCTION `a b`.foo6a() RETURNS INT RETURN 1;
+-- SELECT `a b`.foo6a();
+
+-- DROP FUNCTION `a b`.foo6a;
+-- DROP SCHEMA `a b`;
+
+-- 1.6.b Space in a function name
+-- Default Hive configuration does not allow function name with space
+-- CREATE FUNCTION `foo 6 b`() RETURNS INT RETURN 1;
+-- SELECT `foo 6 b`();
+-- DROP FUNCTION `foo 6 b`;
+
+-- 1.6.c Spaces in parameter names
+CREATE FUNCTION foo6c(` a` INT, a INT, `a b` INT) RETURNS INT RETURN 1;
+SELECT foo6c(1, 2, 3);
+
+-- 1.6.d Spaces in RETURNS column list
+CREATE FUNCTION foo6d() RETURNS TABLE(` a` INT, a INT, `a b` INT) RETURN 
SELECT 1, 2, 3;
+SELECT * FROM foo6d();
+
+-- 1.7 Parameter resolution
+CREATE FUNCTION foo7a(a STRING, b STRING, c STRING) RETURNS STRING RETURN
+SELECT 'Foo.a: ' || a ||  ' Foo.a: ' || foo7a.a
+       || ' T.b: ' ||  b || ' Foo.b: ' || foo7a.b
+       || ' T.c: ' || c || ' T.c: ' || t.c FROM VALUES('t.b', 't.c') AS T(b, 
c);
+
+SELECT foo7a('Foo.a', 'Foo.b', 'Foo.c');
+
+CREATE FUNCTION foo7at(a STRING, b STRING, c STRING) RETURNS TABLE (a STRING, 
b STRING, c STRING, d STRING, e STRING) RETURN
+SELECT CONCAT('Foo.a: ', a), CONCAT('Foo.b: ', foo7at.b), CONCAT('T.b: ', b),
+       CONCAT('Foo.c: ', foo7at.c), CONCAT('T.c: ', c)
+FROM VALUES ('t.b', 't.c') AS T(b, c);
+SELECT * FROM foo7at('Foo.a', 'Foo.b', 'Foo.c');
+
+-- 1.8 Comments
+-- Need to verify comments in non-sql tests
+
+-- 1.9 Test all data types
+-- Boolean
+CREATE FUNCTION foo9a(a BOOLEAN) RETURNS BOOLEAN RETURN NOT a;
+SELECT foo9a(true);
+
+-- Expect error
+SELECT foo9a(5);
+SELECT foo9a('Nonsense');
+
+-- Byte
+CREATE FUNCTION foo9b(a BYTE) RETURNS BYTE RETURN CAST(a AS SHORT) + 1;
+SELECT foo9b(126);
+SELECT foo9b(127);
+SELECT foo9b(128);
+
+-- Short
+CREATE FUNCTION foo9c(a SHORT) RETURNS SHORT RETURN CAST(a AS INTEGER) + 1;
+SELECT foo9c(32766);
+SELECT foo9c(32767);
+SELECT foo9c(32768);
+
+-- Integer
+CREATE FUNCTION foo9d(a INTEGER) RETURNS INTEGER RETURN CAST(a AS BIGINT) + 1;
+SELECT foo9d(2147483647 - 1);
+SELECT foo9d(2147483647);
+SELECT foo9d(2147483647 + 1);
+
+-- Bigint
+CREATE FUNCTION foo9e(a BIGINT) RETURNS BIGINT RETURN CAST(a AS DECIMAL(20, 
0)) + 1;
+SELECT foo9e(9223372036854775807 - 1);
+SELECT foo9e(9223372036854775807);
+SELECT foo9e(9223372036854775807.0 + 1);
+
+-- DECIMAL
+CREATE FUNCTION foo9f(a DECIMAL( 5, 2 )) RETURNS DECIMAL (5, 2) RETURN CAST(a 
AS DECIMAL(6, 2)) + 1;
+SELECT foo9f(999 - 1);
+SELECT foo9f(999);
+SELECT foo9f(999 + 1);
+
+-- FLOAT
+CREATE FUNCTION foo9g(a FLOAT, b String) RETURNS FLOAT RETURN b || CAST(a AS 
String);
+SELECT foo9g(123.23, '7');
+SELECT foo9g('hello', '7');
+SELECT foo9g(123.23, 'q');
+
+-- DOUBLE
+CREATE FUNCTION foo9h(a DOUBLE, b String) RETURNS DOUBLE RETURN b || CAST(a AS 
String);
+SELECT foo9h(123.23, '7');
+SELECT foo9h('hello', '7');
+SELECT foo9h(123.23, 'q');
+
+-- VARCHAR
+-- Expect failure: char/varchar type can only be used in the table schema.
+CREATE FUNCTION foo9i(a VARCHAR(10), b VARCHAR(10)) RETURNS VARCHAR(12) RETURN 
a || b;
+-- SELECT foo9i('1234567890', '');
+-- SELECT foo9i('12345678901', '');
+-- SELECT foo9i('1234567890', '1');
+
+-- STRING
+CREATE FUNCTION foo9j(a STRING, b STRING) RETURNS STRING RETURN a || b;
+SELECT foo9j('1234567890', '12');
+SELECT foo9j(12345678901, '12');
+
+-- DATE
+CREATE FUNCTION foo9l(a DATE, b INTERVAL) RETURNS DATE RETURN a + b;
+SELECT foo9l(DATE '2020-02-02', INTERVAL '1' YEAR);
+SELECT foo9l('2020-02-02', INTERVAL '1' YEAR);
+SELECT foo9l(DATE '-7', INTERVAL '1' YEAR);
+SELECT foo9l(DATE '2020-02-02', INTERVAL '9999999' YEAR);
+
+-- TIMESTAMP
+CREATE FUNCTION foo9m(a TIMESTAMP, b INTERVAL) RETURNS TIMESTAMP RETURN a + b;
+SELECT foo9m(TIMESTAMP'2020-02-02 12:15:16.123', INTERVAL '1' YEAR);
+SELECT foo9m('2020-02-02 12:15:16.123', INTERVAL '1' YEAR);
+SELECT foo9m(TIMESTAMP'2020-02-02 12:15:16.123', INTERVAL '999999' YEAR);
+
+-- ARRAY
+CREATE FUNCTION foo9n(a ARRAY<INTEGER>) RETURNS ARRAY<INTEGER> RETURN a;
+SELECT foo9n(ARRAY(1, 2, 3));
+SELECT foo9n(from_json('[1, 2, 3]', 'array<int>'));
+
+-- MAP
+CREATE FUNCTION foo9o(a MAP<STRING, INTEGER>) RETURNS MAP<STRING, INTEGER> 
RETURN a;
+SELECT foo9o(MAP('hello', 1, 'world', 2));
+SELECT foo9o(from_json('{"hello":1, "world":2}', 'map<string,int>'));
+
+-- STRUCT
+CREATE FUNCTION foo9p(a STRUCT<a1: INTEGER, a2: STRING>) RETURNS STRUCT<a1: 
INTEGER, a2: STRING> RETURN a;
+SELECT foo9p(STRUCT(1, 'hello'));
+SELECT foo9p(from_json('{1:"hello"}', 'struct<a1:int, a2:string>'));
+
+-- ARRAY of STRUCT
+CREATE FUNCTION foo9q(a ARRAY<STRUCT<a1: INT, a2: STRING>>) RETURNS 
ARRAY<STRUCT<a1: INT, a2: STRING>> RETURN a;
+SELECT foo9q(ARRAY(STRUCT(1, 'hello'), STRUCT(2, 'world')));
+SELECT foo9q(ARRAY(NAMED_STRUCT('x', 1, 'y', 'hello'), NAMED_STRUCT('x', 2, 
'y', 'world')));
+SELECT foo9q(from_json('[{1:"hello"}, {2:"world"}]', 
'array<struct<a1:int,a2:string>>'));
+
+-- ARRAY of MAP
+CREATE FUNCTION foo9r(a ARRAY<MAP<STRING, INT>>) RETURNS ARRAY<MAP<STRING, 
INT>> RETURN a;
+SELECT foo9r(ARRAY(MAP('hello', 1), MAP('world', 2)));
+SELECT foo9r(from_json('[{"hello":1}, {"world":2}]', 
'array<map<string,int>>'));
+
+-- 1.10 Proper name resolution when referencing another function
+CREATE OR REPLACE FUNCTION foo1_10(a INT) RETURNS INT RETURN a + 2;
+CREATE OR REPLACE FUNCTION bar1_10(b INT) RETURNS STRING RETURN 
foo1_10(TRY_CAST(b AS STRING));
+SELECT bar1_10(3);
+
 -------------------------------
 -- 2. Scalar SQL UDF
 -- 2.1 deterministic simple expressions
diff --git a/sql/core/src/test/resources/sql-tests/results/sql-udf.sql.out 
b/sql/core/src/test/resources/sql-tests/results/sql-udf.sql.out
index b4bf5dde3852..eab2470d3ffb 100644
--- a/sql/core/src/test/resources/sql-tests/results/sql-udf.sql.out
+++ b/sql/core/src/test/resources/sql-tests/results/sql-udf.sql.out
@@ -765,6 +765,935 @@ org.apache.spark.sql.AnalysisException
 }
 
 
+-- !query
+CREATE FUNCTION foo51() RETURNS INT RETURN (SELECT a FROM VALUES(1), (2) AS 
T(a))
+-- !query schema
+struct<>
+-- !query output
+
+
+
+-- !query
+SELECT foo51()
+-- !query schema
+struct<>
+-- !query output
+org.apache.spark.SparkException
+{
+  "errorClass" : "SCALAR_SUBQUERY_TOO_MANY_ROWS",
+  "sqlState" : "21000",
+  "queryContext" : [ {
+    "objectType" : "",
+    "objectName" : "",
+    "fragment" : ""
+  } ]
+}
+
+
+-- !query
+CREATE FUNCTION foo52() RETURNS INT RETURN (SELECT 1 FROM VALUES(1) WHERE 1 = 
0)
+-- !query schema
+struct<>
+-- !query output
+
+
+
+-- !query
+SELECT foo52()
+-- !query schema
+struct<spark_catalog.default.foo52():int>
+-- !query output
+NULL
+
+
+-- !query
+CREATE FUNCTION foo6c(` a` INT, a INT, `a b` INT) RETURNS INT RETURN 1
+-- !query schema
+struct<>
+-- !query output
+
+
+
+-- !query
+SELECT foo6c(1, 2, 3)
+-- !query schema
+struct<spark_catalog.default.foo6c(1, 2, 3):int>
+-- !query output
+1
+
+
+-- !query
+CREATE FUNCTION foo6d() RETURNS TABLE(` a` INT, a INT, `a b` INT) RETURN 
SELECT 1, 2, 3
+-- !query schema
+struct<>
+-- !query output
+
+
+
+-- !query
+SELECT * FROM foo6d()
+-- !query schema
+struct< a:int,a:int,a b:int>
+-- !query output
+1      2       3
+
+
+-- !query
+CREATE FUNCTION foo7a(a STRING, b STRING, c STRING) RETURNS STRING RETURN
+SELECT 'Foo.a: ' || a ||  ' Foo.a: ' || foo7a.a
+       || ' T.b: ' ||  b || ' Foo.b: ' || foo7a.b
+       || ' T.c: ' || c || ' T.c: ' || t.c FROM VALUES('t.b', 't.c') AS T(b, c)
+-- !query schema
+struct<>
+-- !query output
+
+
+
+-- !query
+SELECT foo7a('Foo.a', 'Foo.b', 'Foo.c')
+-- !query schema
+struct<spark_catalog.default.foo7a(Foo.a, Foo.b, Foo.c):string>
+-- !query output
+Foo.a: Foo.a Foo.a: Foo.a T.b: t.b Foo.b: Foo.b T.c: t.c T.c: t.c
+
+
+-- !query
+CREATE FUNCTION foo7at(a STRING, b STRING, c STRING) RETURNS TABLE (a STRING, 
b STRING, c STRING, d STRING, e STRING) RETURN
+SELECT CONCAT('Foo.a: ', a), CONCAT('Foo.b: ', foo7at.b), CONCAT('T.b: ', b),
+       CONCAT('Foo.c: ', foo7at.c), CONCAT('T.c: ', c)
+FROM VALUES ('t.b', 't.c') AS T(b, c)
+-- !query schema
+struct<>
+-- !query output
+
+
+
+-- !query
+SELECT * FROM foo7at('Foo.a', 'Foo.b', 'Foo.c')
+-- !query schema
+struct<a:string,b:string,c:string,d:string,e:string>
+-- !query output
+Foo.a: Foo.a   Foo.b: Foo.b    T.b: t.b        Foo.c: Foo.c    T.c: t.c
+
+
+-- !query
+CREATE FUNCTION foo9a(a BOOLEAN) RETURNS BOOLEAN RETURN NOT a
+-- !query schema
+struct<>
+-- !query output
+
+
+
+-- !query
+SELECT foo9a(true)
+-- !query schema
+struct<spark_catalog.default.foo9a(true):boolean>
+-- !query output
+false
+
+
+-- !query
+SELECT foo9a(5)
+-- !query schema
+struct<spark_catalog.default.foo9a(5):boolean>
+-- !query output
+false
+
+
+-- !query
+SELECT foo9a('Nonsense')
+-- !query schema
+struct<>
+-- !query output
+org.apache.spark.SparkRuntimeException
+{
+  "errorClass" : "CAST_INVALID_INPUT",
+  "sqlState" : "22018",
+  "messageParameters" : {
+    "ansiConfig" : "\"spark.sql.ansi.enabled\"",
+    "expression" : "'Nonsense'",
+    "sourceType" : "\"STRING\"",
+    "targetType" : "\"BOOLEAN\""
+  },
+  "queryContext" : [ {
+    "objectType" : "",
+    "objectName" : "",
+    "fragment" : ""
+  } ]
+}
+
+
+-- !query
+CREATE FUNCTION foo9b(a BYTE) RETURNS BYTE RETURN CAST(a AS SHORT) + 1
+-- !query schema
+struct<>
+-- !query output
+
+
+
+-- !query
+SELECT foo9b(126)
+-- !query schema
+struct<spark_catalog.default.foo9b(126):tinyint>
+-- !query output
+127
+
+
+-- !query
+SELECT foo9b(127)
+-- !query schema
+struct<>
+-- !query output
+org.apache.spark.SparkArithmeticException
+{
+  "errorClass" : "CAST_OVERFLOW",
+  "sqlState" : "22003",
+  "messageParameters" : {
+    "ansiConfig" : "\"spark.sql.ansi.enabled\"",
+    "sourceType" : "\"INT\"",
+    "targetType" : "\"TINYINT\"",
+    "value" : "128"
+  }
+}
+
+
+-- !query
+SELECT foo9b(128)
+-- !query schema
+struct<>
+-- !query output
+org.apache.spark.SparkArithmeticException
+{
+  "errorClass" : "CAST_OVERFLOW",
+  "sqlState" : "22003",
+  "messageParameters" : {
+    "ansiConfig" : "\"spark.sql.ansi.enabled\"",
+    "sourceType" : "\"INT\"",
+    "targetType" : "\"TINYINT\"",
+    "value" : "128"
+  }
+}
+
+
+-- !query
+CREATE FUNCTION foo9c(a SHORT) RETURNS SHORT RETURN CAST(a AS INTEGER) + 1
+-- !query schema
+struct<>
+-- !query output
+
+
+
+-- !query
+SELECT foo9c(32766)
+-- !query schema
+struct<spark_catalog.default.foo9c(32766):smallint>
+-- !query output
+32767
+
+
+-- !query
+SELECT foo9c(32767)
+-- !query schema
+struct<>
+-- !query output
+org.apache.spark.SparkArithmeticException
+{
+  "errorClass" : "CAST_OVERFLOW",
+  "sqlState" : "22003",
+  "messageParameters" : {
+    "ansiConfig" : "\"spark.sql.ansi.enabled\"",
+    "sourceType" : "\"INT\"",
+    "targetType" : "\"SMALLINT\"",
+    "value" : "32768"
+  }
+}
+
+
+-- !query
+SELECT foo9c(32768)
+-- !query schema
+struct<>
+-- !query output
+org.apache.spark.SparkArithmeticException
+{
+  "errorClass" : "CAST_OVERFLOW",
+  "sqlState" : "22003",
+  "messageParameters" : {
+    "ansiConfig" : "\"spark.sql.ansi.enabled\"",
+    "sourceType" : "\"INT\"",
+    "targetType" : "\"SMALLINT\"",
+    "value" : "32768"
+  }
+}
+
+
+-- !query
+CREATE FUNCTION foo9d(a INTEGER) RETURNS INTEGER RETURN CAST(a AS BIGINT) + 1
+-- !query schema
+struct<>
+-- !query output
+
+
+
+-- !query
+SELECT foo9d(2147483647 - 1)
+-- !query schema
+struct<spark_catalog.default.foo9d((2147483647 - 1)):int>
+-- !query output
+2147483647
+
+
+-- !query
+SELECT foo9d(2147483647)
+-- !query schema
+struct<>
+-- !query output
+org.apache.spark.SparkArithmeticException
+{
+  "errorClass" : "CAST_OVERFLOW",
+  "sqlState" : "22003",
+  "messageParameters" : {
+    "ansiConfig" : "\"spark.sql.ansi.enabled\"",
+    "sourceType" : "\"BIGINT\"",
+    "targetType" : "\"INT\"",
+    "value" : "2147483648L"
+  }
+}
+
+
+-- !query
+SELECT foo9d(2147483647 + 1)
+-- !query schema
+struct<>
+-- !query output
+org.apache.spark.SparkArithmeticException
+{
+  "errorClass" : "ARITHMETIC_OVERFLOW",
+  "sqlState" : "22003",
+  "messageParameters" : {
+    "alternative" : " Use 'try_add' to tolerate overflow and return NULL 
instead.",
+    "config" : "\"spark.sql.ansi.enabled\"",
+    "message" : "integer overflow"
+  },
+  "queryContext" : [ {
+    "objectType" : "",
+    "objectName" : "",
+    "startIndex" : 14,
+    "stopIndex" : 27,
+    "fragment" : "2147483647 + 1"
+  } ]
+}
+
+
+-- !query
+CREATE FUNCTION foo9e(a BIGINT) RETURNS BIGINT RETURN CAST(a AS DECIMAL(20, 
0)) + 1
+-- !query schema
+struct<>
+-- !query output
+
+
+
+-- !query
+SELECT foo9e(9223372036854775807 - 1)
+-- !query schema
+struct<spark_catalog.default.foo9e((9223372036854775807 - 1)):bigint>
+-- !query output
+9223372036854775807
+
+
+-- !query
+SELECT foo9e(9223372036854775807)
+-- !query schema
+struct<>
+-- !query output
+org.apache.spark.SparkArithmeticException
+{
+  "errorClass" : "CAST_OVERFLOW",
+  "sqlState" : "22003",
+  "messageParameters" : {
+    "ansiConfig" : "\"spark.sql.ansi.enabled\"",
+    "sourceType" : "\"DECIMAL(21,0)\"",
+    "targetType" : "\"BIGINT\"",
+    "value" : "9223372036854775808BD"
+  }
+}
+
+
+-- !query
+SELECT foo9e(9223372036854775807.0 + 1)
+-- !query schema
+struct<>
+-- !query output
+org.apache.spark.SparkArithmeticException
+{
+  "errorClass" : "CAST_OVERFLOW",
+  "sqlState" : "22003",
+  "messageParameters" : {
+    "ansiConfig" : "\"spark.sql.ansi.enabled\"",
+    "sourceType" : "\"DECIMAL(21,1)\"",
+    "targetType" : "\"BIGINT\"",
+    "value" : "9223372036854775808.0BD"
+  }
+}
+
+
+-- !query
+CREATE FUNCTION foo9f(a DECIMAL( 5, 2 )) RETURNS DECIMAL (5, 2) RETURN CAST(a 
AS DECIMAL(6, 2)) + 1
+-- !query schema
+struct<>
+-- !query output
+
+
+
+-- !query
+SELECT foo9f(999 - 1)
+-- !query schema
+struct<spark_catalog.default.foo9f((999 - 1)):decimal(5,2)>
+-- !query output
+999.00
+
+
+-- !query
+SELECT foo9f(999)
+-- !query schema
+struct<>
+-- !query output
+org.apache.spark.SparkArithmeticException
+{
+  "errorClass" : "NUMERIC_VALUE_OUT_OF_RANGE.WITH_SUGGESTION",
+  "sqlState" : "22003",
+  "messageParameters" : {
+    "config" : "\"spark.sql.ansi.enabled\"",
+    "precision" : "5",
+    "scale" : "2",
+    "value" : "1000.00"
+  },
+  "queryContext" : [ {
+    "objectType" : "",
+    "objectName" : "",
+    "fragment" : ""
+  } ]
+}
+
+
+-- !query
+SELECT foo9f(999 + 1)
+-- !query schema
+struct<>
+-- !query output
+org.apache.spark.SparkArithmeticException
+{
+  "errorClass" : "NUMERIC_VALUE_OUT_OF_RANGE.WITH_SUGGESTION",
+  "sqlState" : "22003",
+  "messageParameters" : {
+    "config" : "\"spark.sql.ansi.enabled\"",
+    "precision" : "5",
+    "scale" : "2",
+    "value" : "1000"
+  },
+  "queryContext" : [ {
+    "objectType" : "",
+    "objectName" : "",
+    "fragment" : ""
+  } ]
+}
+
+
+-- !query
+CREATE FUNCTION foo9g(a FLOAT, b String) RETURNS FLOAT RETURN b || CAST(a AS 
String)
+-- !query schema
+struct<>
+-- !query output
+
+
+
+-- !query
+SELECT foo9g(123.23, '7')
+-- !query schema
+struct<spark_catalog.default.foo9g(123.23, 7):float>
+-- !query output
+7123.23
+
+
+-- !query
+SELECT foo9g('hello', '7')
+-- !query schema
+struct<>
+-- !query output
+org.apache.spark.SparkNumberFormatException
+{
+  "errorClass" : "CAST_INVALID_INPUT",
+  "sqlState" : "22018",
+  "messageParameters" : {
+    "ansiConfig" : "\"spark.sql.ansi.enabled\"",
+    "expression" : "'hello'",
+    "sourceType" : "\"STRING\"",
+    "targetType" : "\"FLOAT\""
+  },
+  "queryContext" : [ {
+    "objectType" : "",
+    "objectName" : "",
+    "fragment" : ""
+  } ]
+}
+
+
+-- !query
+SELECT foo9g(123.23, 'q')
+-- !query schema
+struct<>
+-- !query output
+org.apache.spark.SparkNumberFormatException
+{
+  "errorClass" : "CAST_INVALID_INPUT",
+  "sqlState" : "22018",
+  "messageParameters" : {
+    "ansiConfig" : "\"spark.sql.ansi.enabled\"",
+    "expression" : "'q123.23'",
+    "sourceType" : "\"STRING\"",
+    "targetType" : "\"FLOAT\""
+  },
+  "queryContext" : [ {
+    "objectType" : "",
+    "objectName" : "",
+    "fragment" : ""
+  } ]
+}
+
+
+-- !query
+CREATE FUNCTION foo9h(a DOUBLE, b String) RETURNS DOUBLE RETURN b || CAST(a AS 
String)
+-- !query schema
+struct<>
+-- !query output
+
+
+
+-- !query
+SELECT foo9h(123.23, '7')
+-- !query schema
+struct<spark_catalog.default.foo9h(123.23, 7):double>
+-- !query output
+7123.23
+
+
+-- !query
+SELECT foo9h('hello', '7')
+-- !query schema
+struct<>
+-- !query output
+org.apache.spark.SparkNumberFormatException
+{
+  "errorClass" : "CAST_INVALID_INPUT",
+  "sqlState" : "22018",
+  "messageParameters" : {
+    "ansiConfig" : "\"spark.sql.ansi.enabled\"",
+    "expression" : "'hello'",
+    "sourceType" : "\"STRING\"",
+    "targetType" : "\"DOUBLE\""
+  },
+  "queryContext" : [ {
+    "objectType" : "",
+    "objectName" : "",
+    "fragment" : ""
+  } ]
+}
+
+
+-- !query
+SELECT foo9h(123.23, 'q')
+-- !query schema
+struct<>
+-- !query output
+org.apache.spark.SparkNumberFormatException
+{
+  "errorClass" : "CAST_INVALID_INPUT",
+  "sqlState" : "22018",
+  "messageParameters" : {
+    "ansiConfig" : "\"spark.sql.ansi.enabled\"",
+    "expression" : "'q123.23'",
+    "sourceType" : "\"STRING\"",
+    "targetType" : "\"DOUBLE\""
+  },
+  "queryContext" : [ {
+    "objectType" : "",
+    "objectName" : "",
+    "fragment" : ""
+  } ]
+}
+
+
+-- !query
+CREATE FUNCTION foo9i(a VARCHAR(10), b VARCHAR(10)) RETURNS VARCHAR(12) RETURN 
a || b
+-- !query schema
+struct<>
+-- !query output
+org.apache.spark.sql.AnalysisException
+{
+  "errorClass" : "UNSUPPORTED_CHAR_OR_VARCHAR_AS_STRING",
+  "sqlState" : "0A000"
+}
+
+
+-- !query
+CREATE FUNCTION foo9j(a STRING, b STRING) RETURNS STRING RETURN a || b
+-- !query schema
+struct<>
+-- !query output
+
+
+
+-- !query
+SELECT foo9j('1234567890', '12')
+-- !query schema
+struct<spark_catalog.default.foo9j(1234567890, 12):string>
+-- !query output
+123456789012
+
+
+-- !query
+SELECT foo9j(12345678901, '12')
+-- !query schema
+struct<spark_catalog.default.foo9j(12345678901, 12):string>
+-- !query output
+1234567890112
+
+
+-- !query
+CREATE FUNCTION foo9l(a DATE, b INTERVAL) RETURNS DATE RETURN a + b
+-- !query schema
+struct<>
+-- !query output
+
+
+
+-- !query
+SELECT foo9l(DATE '2020-02-02', INTERVAL '1' YEAR)
+-- !query schema
+struct<>
+-- !query output
+org.apache.spark.sql.catalyst.ExtendedAnalysisException
+{
+  "errorClass" : "DATATYPE_MISMATCH.CAST_WITHOUT_SUGGESTION",
+  "sqlState" : "42K09",
+  "messageParameters" : {
+    "sqlExpr" : "\"INTERVAL '1' YEAR\"",
+    "srcType" : "\"INTERVAL YEAR\"",
+    "targetType" : "\"INTERVAL\""
+  },
+  "queryContext" : [ {
+    "objectType" : "",
+    "objectName" : "",
+    "startIndex" : 8,
+    "stopIndex" : 50,
+    "fragment" : "foo9l(DATE '2020-02-02', INTERVAL '1' YEAR)"
+  } ]
+}
+
+
+-- !query
+SELECT foo9l('2020-02-02', INTERVAL '1' YEAR)
+-- !query schema
+struct<>
+-- !query output
+org.apache.spark.sql.catalyst.ExtendedAnalysisException
+{
+  "errorClass" : "DATATYPE_MISMATCH.CAST_WITHOUT_SUGGESTION",
+  "sqlState" : "42K09",
+  "messageParameters" : {
+    "sqlExpr" : "\"INTERVAL '1' YEAR\"",
+    "srcType" : "\"INTERVAL YEAR\"",
+    "targetType" : "\"INTERVAL\""
+  },
+  "queryContext" : [ {
+    "objectType" : "",
+    "objectName" : "",
+    "startIndex" : 8,
+    "stopIndex" : 45,
+    "fragment" : "foo9l('2020-02-02', INTERVAL '1' YEAR)"
+  } ]
+}
+
+
+-- !query
+SELECT foo9l(DATE '-7', INTERVAL '1' YEAR)
+-- !query schema
+struct<>
+-- !query output
+org.apache.spark.sql.catalyst.parser.ParseException
+{
+  "errorClass" : "INVALID_TYPED_LITERAL",
+  "sqlState" : "42604",
+  "messageParameters" : {
+    "value" : "'-7'",
+    "valueType" : "\"DATE\""
+  },
+  "queryContext" : [ {
+    "objectType" : "",
+    "objectName" : "",
+    "startIndex" : 14,
+    "stopIndex" : 22,
+    "fragment" : "DATE '-7'"
+  } ]
+}
+
+
+-- !query
+SELECT foo9l(DATE '2020-02-02', INTERVAL '9999999' YEAR)
+-- !query schema
+struct<>
+-- !query output
+org.apache.spark.sql.catalyst.ExtendedAnalysisException
+{
+  "errorClass" : "DATATYPE_MISMATCH.CAST_WITHOUT_SUGGESTION",
+  "sqlState" : "42K09",
+  "messageParameters" : {
+    "sqlExpr" : "\"INTERVAL '9999999' YEAR\"",
+    "srcType" : "\"INTERVAL YEAR\"",
+    "targetType" : "\"INTERVAL\""
+  },
+  "queryContext" : [ {
+    "objectType" : "",
+    "objectName" : "",
+    "startIndex" : 8,
+    "stopIndex" : 56,
+    "fragment" : "foo9l(DATE '2020-02-02', INTERVAL '9999999' YEAR)"
+  } ]
+}
+
+
+-- !query
+CREATE FUNCTION foo9m(a TIMESTAMP, b INTERVAL) RETURNS TIMESTAMP RETURN a + b
+-- !query schema
+struct<>
+-- !query output
+
+
+
+-- !query
+SELECT foo9m(TIMESTAMP'2020-02-02 12:15:16.123', INTERVAL '1' YEAR)
+-- !query schema
+struct<>
+-- !query output
+org.apache.spark.sql.catalyst.ExtendedAnalysisException
+{
+  "errorClass" : "DATATYPE_MISMATCH.CAST_WITHOUT_SUGGESTION",
+  "sqlState" : "42K09",
+  "messageParameters" : {
+    "sqlExpr" : "\"INTERVAL '1' YEAR\"",
+    "srcType" : "\"INTERVAL YEAR\"",
+    "targetType" : "\"INTERVAL\""
+  },
+  "queryContext" : [ {
+    "objectType" : "",
+    "objectName" : "",
+    "startIndex" : 8,
+    "stopIndex" : 67,
+    "fragment" : "foo9m(TIMESTAMP'2020-02-02 12:15:16.123', INTERVAL '1' YEAR)"
+  } ]
+}
+
+
+-- !query
+SELECT foo9m('2020-02-02 12:15:16.123', INTERVAL '1' YEAR)
+-- !query schema
+struct<>
+-- !query output
+org.apache.spark.sql.catalyst.ExtendedAnalysisException
+{
+  "errorClass" : "DATATYPE_MISMATCH.CAST_WITHOUT_SUGGESTION",
+  "sqlState" : "42K09",
+  "messageParameters" : {
+    "sqlExpr" : "\"INTERVAL '1' YEAR\"",
+    "srcType" : "\"INTERVAL YEAR\"",
+    "targetType" : "\"INTERVAL\""
+  },
+  "queryContext" : [ {
+    "objectType" : "",
+    "objectName" : "",
+    "startIndex" : 8,
+    "stopIndex" : 58,
+    "fragment" : "foo9m('2020-02-02 12:15:16.123', INTERVAL '1' YEAR)"
+  } ]
+}
+
+
+-- !query
+SELECT foo9m(TIMESTAMP'2020-02-02 12:15:16.123', INTERVAL '999999' YEAR)
+-- !query schema
+struct<>
+-- !query output
+org.apache.spark.sql.catalyst.ExtendedAnalysisException
+{
+  "errorClass" : "DATATYPE_MISMATCH.CAST_WITHOUT_SUGGESTION",
+  "sqlState" : "42K09",
+  "messageParameters" : {
+    "sqlExpr" : "\"INTERVAL '999999' YEAR\"",
+    "srcType" : "\"INTERVAL YEAR\"",
+    "targetType" : "\"INTERVAL\""
+  },
+  "queryContext" : [ {
+    "objectType" : "",
+    "objectName" : "",
+    "startIndex" : 8,
+    "stopIndex" : 72,
+    "fragment" : "foo9m(TIMESTAMP'2020-02-02 12:15:16.123', INTERVAL '999999' 
YEAR)"
+  } ]
+}
+
+
+-- !query
+CREATE FUNCTION foo9n(a ARRAY<INTEGER>) RETURNS ARRAY<INTEGER> RETURN a
+-- !query schema
+struct<>
+-- !query output
+
+
+
+-- !query
+SELECT foo9n(ARRAY(1, 2, 3))
+-- !query schema
+struct<spark_catalog.default.foo9n(array(1, 2, 3)):array<int>>
+-- !query output
+[1,2,3]
+
+
+-- !query
+SELECT foo9n(from_json('[1, 2, 3]', 'array<int>'))
+-- !query schema
+struct<spark_catalog.default.foo9n(from_json([1, 2, 3])):array<int>>
+-- !query output
+[1,2,3]
+
+
+-- !query
+CREATE FUNCTION foo9o(a MAP<STRING, INTEGER>) RETURNS MAP<STRING, INTEGER> 
RETURN a
+-- !query schema
+struct<>
+-- !query output
+
+
+
+-- !query
+SELECT foo9o(MAP('hello', 1, 'world', 2))
+-- !query schema
+struct<spark_catalog.default.foo9o(map(hello, 1, world, 2)):map<string,int>>
+-- !query output
+{"hello":1,"world":2}
+
+
+-- !query
+SELECT foo9o(from_json('{"hello":1, "world":2}', 'map<string,int>'))
+-- !query schema
+struct<spark_catalog.default.foo9o(entries):map<string,int>>
+-- !query output
+{"hello":1,"world":2}
+
+
+-- !query
+CREATE FUNCTION foo9p(a STRUCT<a1: INTEGER, a2: STRING>) RETURNS STRUCT<a1: 
INTEGER, a2: STRING> RETURN a
+-- !query schema
+struct<>
+-- !query output
+
+
+
+-- !query
+SELECT foo9p(STRUCT(1, 'hello'))
+-- !query schema
+struct<spark_catalog.default.foo9p(struct(1, hello)):struct<a1:int,a2:string>>
+-- !query output
+{"a1":1,"a2":"hello"}
+
+
+-- !query
+SELECT foo9p(from_json('{1:"hello"}', 'struct<a1:int, a2:string>'))
+-- !query schema
+struct<spark_catalog.default.foo9p(from_json({1:"hello"})):struct<a1:int,a2:string>>
+-- !query output
+{"a1":null,"a2":null}
+
+
+-- !query
+CREATE FUNCTION foo9q(a ARRAY<STRUCT<a1: INT, a2: STRING>>) RETURNS 
ARRAY<STRUCT<a1: INT, a2: STRING>> RETURN a
+-- !query schema
+struct<>
+-- !query output
+
+
+
+-- !query
+SELECT foo9q(ARRAY(STRUCT(1, 'hello'), STRUCT(2, 'world')))
+-- !query schema
+struct<spark_catalog.default.foo9q(array(struct(1, hello), struct(2, 
world))):array<struct<a1:int,a2:string>>>
+-- !query output
+[{"a1":1,"a2":"hello"},{"a1":2,"a2":"world"}]
+
+
+-- !query
+SELECT foo9q(ARRAY(NAMED_STRUCT('x', 1, 'y', 'hello'), NAMED_STRUCT('x', 2, 
'y', 'world')))
+-- !query schema
+struct<spark_catalog.default.foo9q(array(named_struct(x, 1, y, hello), 
named_struct(x, 2, y, world))):array<struct<a1:int,a2:string>>>
+-- !query output
+[{"a1":1,"a2":"hello"},{"a1":2,"a2":"world"}]
+
+
+-- !query
+SELECT foo9q(from_json('[{1:"hello"}, {2:"world"}]', 
'array<struct<a1:int,a2:string>>'))
+-- !query schema
+struct<spark_catalog.default.foo9q(from_json([{1:"hello"}, 
{2:"world"}])):array<struct<a1:int,a2:string>>>
+-- !query output
+NULL
+
+
+-- !query
+CREATE FUNCTION foo9r(a ARRAY<MAP<STRING, INT>>) RETURNS ARRAY<MAP<STRING, 
INT>> RETURN a
+-- !query schema
+struct<>
+-- !query output
+
+
+
+-- !query
+SELECT foo9r(ARRAY(MAP('hello', 1), MAP('world', 2)))
+-- !query schema
+struct<spark_catalog.default.foo9r(array(map(hello, 1), map(world, 
2))):array<map<string,int>>>
+-- !query output
+[{"hello":1},{"world":2}]
+
+
+-- !query
+SELECT foo9r(from_json('[{"hello":1}, {"world":2}]', 'array<map<string,int>>'))
+-- !query schema
+struct<spark_catalog.default.foo9r(from_json([{"hello":1}, 
{"world":2}])):array<map<string,int>>>
+-- !query output
+[{"hello":1},{"world":2}]
+
+
+-- !query
+CREATE OR REPLACE FUNCTION foo1_10(a INT) RETURNS INT RETURN a + 2
+-- !query schema
+struct<>
+-- !query output
+
+
+
+-- !query
+CREATE OR REPLACE FUNCTION bar1_10(b INT) RETURNS STRING RETURN 
foo1_10(TRY_CAST(b AS STRING))
+-- !query schema
+struct<>
+-- !query output
+
+
+
+-- !query
+SELECT bar1_10(3)
+-- !query schema
+struct<spark_catalog.default.bar1_10(3):string>
+-- !query output
+5
+
+
 -- !query
 CREATE FUNCTION foo2_1a(a INT) RETURNS INT RETURN a
 -- !query schema


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to