This is an automated email from the ASF dual-hosted git repository.

jiayu pushed a commit to branch fix/2640-simplify-sedonacontext-warnings
in repository https://gitbox.apache.org/repos/asf/sedona.git

commit 8553b05db256e5a1901664a14892137170689a12
Author: Jia Yu <[email protected]>
AuthorDate: Wed Feb 11 01:46:03 2026 -0800

    fix: skip re-registration of functions in SedonaContext.create()
    
    When SedonaContext.create() is called a second time, all ~329 ST functions
    are re-registered. Spark's FunctionRegistry detects that each function
    already exists with a different builder closure and emits a WARN log for
    each one, producing hundreds of noisy warning lines.
    
    Guard both expression and aggregate function registration with
    functionExists() checks to skip functions that are already registered
    in the session's function registry.
    
    Closes #2640
---
 .../apache/sedona/sql/UDF/AbstractCatalog.scala    | 36 +++++++++++++---------
 1 file changed, 21 insertions(+), 15 deletions(-)

diff --git 
a/spark/common/src/main/scala/org/apache/sedona/sql/UDF/AbstractCatalog.scala 
b/spark/common/src/main/scala/org/apache/sedona/sql/UDF/AbstractCatalog.scala
index fc15570d16..2f7784fd7a 100644
--- 
a/spark/common/src/main/scala/org/apache/sedona/sql/UDF/AbstractCatalog.scala
+++ 
b/spark/common/src/main/scala/org/apache/sedona/sql/UDF/AbstractCatalog.scala
@@ -83,15 +83,18 @@ abstract class AbstractCatalog {
   }
 
   def registerAll(sparkSession: SparkSession): Unit = {
+    val registry = sparkSession.sessionState.functionRegistry
     expressions.foreach { case (functionIdentifier, expressionInfo, 
functionBuilder) =>
-      sparkSession.sessionState.functionRegistry.registerFunction(
-        functionIdentifier,
-        expressionInfo,
-        functionBuilder)
-      FunctionRegistry.builtin.registerFunction(
-        functionIdentifier,
-        expressionInfo,
-        functionBuilder)
+      if (!registry.functionExists(functionIdentifier)) {
+        registry.registerFunction(
+          functionIdentifier,
+          expressionInfo,
+          functionBuilder)
+        FunctionRegistry.builtin.registerFunction(
+          functionIdentifier,
+          expressionInfo,
+          functionBuilder)
+      }
     }
     aggregateExpressions.foreach { f =>
       registerAggregateFunction(sparkSession, f.getClass.getSimpleName, f)
@@ -106,13 +109,16 @@ abstract class AbstractCatalog {
       sparkSession: SparkSession,
       functionName: String,
       aggregator: Aggregator[Geometry, _, _]): Unit = {
-    sparkSession.udf.register(functionName, functions.udaf(aggregator))
-    FunctionRegistry.builtin.registerFunction(
-      FunctionIdentifier(functionName),
-      new ExpressionInfo(aggregator.getClass.getCanonicalName, null, 
functionName),
-      (_: Seq[Expression]) =>
-        throw new UnsupportedOperationException(
-          s"Aggregate function $functionName cannot be used as a regular 
function"))
+    val functionIdentifier = FunctionIdentifier(functionName)
+    if 
(!sparkSession.sessionState.functionRegistry.functionExists(functionIdentifier))
 {
+      sparkSession.udf.register(functionName, functions.udaf(aggregator))
+      FunctionRegistry.builtin.registerFunction(
+        functionIdentifier,
+        new ExpressionInfo(aggregator.getClass.getCanonicalName, null, 
functionName),
+        (_: Seq[Expression]) =>
+          throw new UnsupportedOperationException(
+            s"Aggregate function $functionName cannot be used as a regular 
function"))
+    }
   }
 
   def dropAll(sparkSession: SparkSession): Unit = {

Reply via email to