This is an automated email from the ASF dual-hosted git repository.
jiayu pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/sedona.git
The following commit(s) were added to refs/heads/master by this push:
new 1a3ec79c3a [GH-2640] Skip re-registration of functions in
SedonaContext.create() (#2642)
1a3ec79c3a is described below
commit 1a3ec79c3a1ece80a60600b0853fd6b14f330a0f
Author: Jia Yu <[email protected]>
AuthorDate: Wed Feb 11 03:26:08 2026 -0700
[GH-2640] Skip re-registration of functions in SedonaContext.create()
(#2642)
---
.../apache/sedona/sql/UDF/AbstractCatalog.scala | 33 ++++++++++++----------
1 file changed, 18 insertions(+), 15 deletions(-)
diff --git
a/spark/common/src/main/scala/org/apache/sedona/sql/UDF/AbstractCatalog.scala
b/spark/common/src/main/scala/org/apache/sedona/sql/UDF/AbstractCatalog.scala
index fc15570d16..72dad00467 100644
---
a/spark/common/src/main/scala/org/apache/sedona/sql/UDF/AbstractCatalog.scala
+++
b/spark/common/src/main/scala/org/apache/sedona/sql/UDF/AbstractCatalog.scala
@@ -83,15 +83,15 @@ abstract class AbstractCatalog {
}
def registerAll(sparkSession: SparkSession): Unit = {
+ val registry = sparkSession.sessionState.functionRegistry
expressions.foreach { case (functionIdentifier, expressionInfo,
functionBuilder) =>
- sparkSession.sessionState.functionRegistry.registerFunction(
- functionIdentifier,
- expressionInfo,
- functionBuilder)
- FunctionRegistry.builtin.registerFunction(
- functionIdentifier,
- expressionInfo,
- functionBuilder)
+ if (!registry.functionExists(functionIdentifier)) {
+ registry.registerFunction(functionIdentifier, expressionInfo,
functionBuilder)
+ FunctionRegistry.builtin.registerFunction(
+ functionIdentifier,
+ expressionInfo,
+ functionBuilder)
+ }
}
aggregateExpressions.foreach { f =>
registerAggregateFunction(sparkSession, f.getClass.getSimpleName, f)
@@ -106,13 +106,16 @@ abstract class AbstractCatalog {
sparkSession: SparkSession,
functionName: String,
aggregator: Aggregator[Geometry, _, _]): Unit = {
- sparkSession.udf.register(functionName, functions.udaf(aggregator))
- FunctionRegistry.builtin.registerFunction(
- FunctionIdentifier(functionName),
- new ExpressionInfo(aggregator.getClass.getCanonicalName, null,
functionName),
- (_: Seq[Expression]) =>
- throw new UnsupportedOperationException(
- s"Aggregate function $functionName cannot be used as a regular
function"))
+ val functionIdentifier = FunctionIdentifier(functionName)
+ if
(!sparkSession.sessionState.functionRegistry.functionExists(functionIdentifier))
{
+ sparkSession.udf.register(functionName, functions.udaf(aggregator))
+ FunctionRegistry.builtin.registerFunction(
+ functionIdentifier,
+ new ExpressionInfo(aggregator.getClass.getCanonicalName, null,
functionName),
+ (_: Seq[Expression]) =>
+ throw new UnsupportedOperationException(
+ s"Aggregate function $functionName cannot be used as a regular
function"))
+ }
}
def dropAll(sparkSession: SparkSession): Unit = {