This is an automated email from the ASF dual-hosted git repository.

jmalkin pushed a commit to branch cleanup
in repository https://gitbox.apache.org/repos/asf/datasketches-spark.git

commit 3923c57000ea03b2fd9cc5c3308b80f72de9badc
Author: Jon <[email protected]>
AuthorDate: Mon Jan 6 15:05:56 2025 -0800

    Move funtion registration from trait to accompanying object
---
 .../sql/registrar/DatasketchesFunctionRegistry.scala   | 18 ++++++++++--------
 1 file changed, 10 insertions(+), 8 deletions(-)

diff --git 
a/src/main/scala/org/apache/spark/sql/registrar/DatasketchesFunctionRegistry.scala
 
b/src/main/scala/org/apache/spark/sql/registrar/DatasketchesFunctionRegistry.scala
index 5ab1738..381aa49 100644
--- 
a/src/main/scala/org/apache/spark/sql/registrar/DatasketchesFunctionRegistry.scala
+++ 
b/src/main/scala/org/apache/spark/sql/registrar/DatasketchesFunctionRegistry.scala
@@ -35,13 +35,6 @@ trait DatasketchesFunctionRegistry {
   // override this to define the actual functions
   val expressions: Map[String, (ExpressionInfo, FunctionBuilder)]
 
-  // registers all the functions in the expressions Map
-  def registerFunctions(spark: SparkSession): Unit = {
-    expressions.foreach { case (name, (info, builder)) =>
-      
spark.sessionState.functionRegistry.registerFunction(FunctionIdentifier(name), 
info, builder)
-    }
-  }
-
   // simplifies defining the expression (ignoring "since" as a stand-alone 
library)
   protected def expression[T <: Expression : ClassTag](name: String): (String, 
(ExpressionInfo, FunctionBuilder)) = {
     val (expressionInfo, builder) = FunctionRegistryBase.build[T](name, None)
@@ -68,7 +61,7 @@ object DatasketchesFunctionRegistry extends 
DatasketchesFunctionRegistry {
 
     // TODO: it seems like there's got to be a way to simplify this, but
     // perhaps not with the optional isInclusive parameter?
-    // Spark uses ExprssionBuilder, extending that class via a builder class
+    // Spark uses ExpressionBuilder, extending that class via a builder class
     // and overriding build() to handle the lambda.
     // It allows for a cleaner registry here, so we can look at where to put
     // the builder classes in the future.
@@ -82,4 +75,13 @@ object DatasketchesFunctionRegistry extends 
DatasketchesFunctionRegistry {
       new KllGetPmfCdf(args(0), args(1), isInclusive = isInclusive, isPmf = 
false)
     }
   )
+
+  // registers all the functions in the expressions Map
+  def registerFunctions(spark: SparkSession): Unit = {
+    val functionRegistry = spark.sessionState.functionRegistry
+    expressions.foreach { case (name, (info, builder)) =>
+      functionRegistry.registerFunction(FunctionIdentifier(name), info, 
builder)
+    }
+  }
+
 }


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to