This is an automated email from the ASF dual-hosted git repository.
wenchen pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git
The following commit(s) were added to refs/heads/master by this push:
new 0afe9f5d13e2 [SPARK-52148][SQL] Fix CREATE OR REPLACE function for SQL
user-defined TVFs
0afe9f5d13e2 is described below
commit 0afe9f5d13e28fa5b25b1ccedc6a96a39fc34967
Author: Allison Wang <[email protected]>
AuthorDate: Mon Jun 30 21:42:36 2025 +0800
[SPARK-52148][SQL] Fix CREATE OR REPLACE function for SQL user-defined TVFs
### What changes were proposed in this pull request?
This PR fixes CREATE OR REPLACE function for SQL user-defined table-valued
functions.
### Why are the changes needed?
To fix an issue when creating SQL TVFs.
### Does this PR introduce _any_ user-facing change?
No
### How was this patch tested?
Existing tests
### Was this patch authored or co-authored using generative AI tooling?
No
Closes #51191 from allisonwang-db/spark-52148-fix-tvf.
Authored-by: Allison Wang <[email protected]>
Signed-off-by: Wenchen Fan <[email protected]>
---
.../sql/catalyst/catalog/SessionCatalog.scala | 2 ++
.../sql-tests/analyzer-results/sql-udf.sql.out | 17 ++++------
.../test/resources/sql-tests/inputs/sql-udf.sql | 1 +
.../resources/sql-tests/results/sql-udf.sql.out | 37 +++++++---------------
4 files changed, 21 insertions(+), 36 deletions(-)
diff --git
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalog.scala
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalog.scala
index 3eb1b35d2419..531b453213ed 100644
---
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalog.scala
+++
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalog.scala
@@ -1481,6 +1481,8 @@ class SessionCatalog(
// For a permanent function, because we loaded it to the
FunctionRegistry
// when it's first used, we also need to drop it from the
FunctionRegistry.
functionRegistry.dropFunction(qualifiedIdent)
+ } else if (tableFunctionRegistry.functionExists(qualifiedIdent)) {
+ tableFunctionRegistry.dropFunction(qualifiedIdent)
}
externalCatalog.dropFunction(db, funcName)
} else if (!ignoreIfNotExists) {
diff --git
a/sql/core/src/test/resources/sql-tests/analyzer-results/sql-udf.sql.out
b/sql/core/src/test/resources/sql-tests/analyzer-results/sql-udf.sql.out
index 64df5f26b69d..6130a134adc8 100644
--- a/sql/core/src/test/resources/sql-tests/analyzer-results/sql-udf.sql.out
+++ b/sql/core/src/test/resources/sql-tests/analyzer-results/sql-udf.sql.out
@@ -4077,6 +4077,12 @@ SET spark.sql.ansi.enabled=true
SetCommand (spark.sql.ansi.enabled,Some(true))
+-- !query
+DROP FUNCTION IF EXISTS foo3_3at
+-- !query analysis
+DropFunctionCommand spark_catalog.default.foo3_3at, true, false
+
+
-- !query
CREATE FUNCTION foo3_3a(x INT) RETURNS DOUBLE RETURN 1 / x
-- !query analysis
@@ -4146,16 +4152,7 @@ CreateSQLFunctionCommand spark_catalog.default.foo3_3a,
x INT, DOUBLE, 1 / x, fa
-- !query
CREATE OR REPLACE FUNCTION foo3_3at(x INT) RETURNS TABLE (a DOUBLE) RETURN
SELECT 1 / x
-- !query analysis
-org.apache.spark.sql.catalyst.analysis.FunctionAlreadyExistsException
-{
- "errorClass" : "ROUTINE_ALREADY_EXISTS",
- "sqlState" : "42723",
- "messageParameters" : {
- "existingRoutineType" : "routine",
- "newRoutineType" : "routine",
- "routineName" : "`default`.`foo3_3at`"
- }
-}
+CreateSQLFunctionCommand spark_catalog.default.foo3_3at, x INT, a DOUBLE,
SELECT 1 / x, true, false, false, true
-- !query
diff --git a/sql/core/src/test/resources/sql-tests/inputs/sql-udf.sql
b/sql/core/src/test/resources/sql-tests/inputs/sql-udf.sql
index f9e6f490ed9f..0c387fa4201a 100644
--- a/sql/core/src/test/resources/sql-tests/inputs/sql-udf.sql
+++ b/sql/core/src/test/resources/sql-tests/inputs/sql-udf.sql
@@ -750,6 +750,7 @@ SELECT foo3_2e1(
-- 3.3 Create and invoke function with different SQL configurations
SET spark.sql.ansi.enabled=true;
+DROP FUNCTION IF EXISTS foo3_3at;
CREATE FUNCTION foo3_3a(x INT) RETURNS DOUBLE RETURN 1 / x;
CREATE FUNCTION foo3_3at(x INT) RETURNS TABLE (a DOUBLE) RETURN SELECT 1 / x;
CREATE TEMPORARY FUNCTION foo3_3b(x INT) RETURNS DOUBLE RETURN 1 / x;
diff --git a/sql/core/src/test/resources/sql-tests/results/sql-udf.sql.out
b/sql/core/src/test/resources/sql-tests/results/sql-udf.sql.out
index e289dca99ad4..07abc815777d 100644
--- a/sql/core/src/test/resources/sql-tests/results/sql-udf.sql.out
+++ b/sql/core/src/test/resources/sql-tests/results/sql-udf.sql.out
@@ -3629,6 +3629,14 @@ struct<key:string,value:string>
spark.sql.ansi.enabled true
+-- !query
+DROP FUNCTION IF EXISTS foo3_3at
+-- !query schema
+struct<>
+-- !query output
+
+
+
-- !query
CREATE FUNCTION foo3_3a(x INT) RETURNS DOUBLE RETURN 1 / x
-- !query schema
@@ -3740,16 +3748,7 @@ CREATE OR REPLACE FUNCTION foo3_3at(x INT) RETURNS TABLE
(a DOUBLE) RETURN SELEC
-- !query schema
struct<>
-- !query output
-org.apache.spark.sql.catalyst.analysis.FunctionAlreadyExistsException
-{
- "errorClass" : "ROUTINE_ALREADY_EXISTS",
- "sqlState" : "42723",
- "messageParameters" : {
- "existingRoutineType" : "routine",
- "newRoutineType" : "routine",
- "routineName" : "`default`.`foo3_3at`"
- }
-}
+
-- !query
@@ -3779,23 +3778,9 @@ NULL
-- !query
SELECT * FROM foo3_3at(0)
-- !query schema
-struct<>
+struct<a:double>
-- !query output
-org.apache.spark.SparkArithmeticException
-{
- "errorClass" : "DIVIDE_BY_ZERO",
- "sqlState" : "22012",
- "messageParameters" : {
- "config" : "\"spark.sql.ansi.enabled\""
- },
- "queryContext" : [ {
- "objectType" : "",
- "objectName" : "",
- "startIndex" : 8,
- "stopIndex" : 12,
- "fragment" : "1 / x"
- } ]
-}
+NULL
-- !query
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]