This is an automated email from the ASF dual-hosted git repository.
yangjie01 pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git
The following commit(s) were added to refs/heads/master by this push:
new f18c4e7722b4 [SPARK-49805][SQL][ML] Remove private[xxx] functions from
`function.scala`
f18c4e7722b4 is described below
commit f18c4e7722b46e8573e959f5f3b063ed0efa5d23
Author: Ruifeng Zheng <[email protected]>
AuthorDate: Fri Sep 27 15:27:34 2024 +0800
[SPARK-49805][SQL][ML] Remove private[xxx] functions from `function.scala`
### What changes were proposed in this pull request?
Remove private[xxx] functions from `function.scala`
### Why are the changes needed?
internal functions can be directly invoked by `Column.internalFn`, no need
to add them in `function.scala`
### Does this PR introduce _any_ user-facing change?
no
### How was this patch tested?
ci
### Was this patch authored or co-authored using generative AI tooling?
no
Closes #48276 from zhengruifeng/move_private_func.
Authored-by: Ruifeng Zheng <[email protected]>
Signed-off-by: yangjie01 <[email protected]>
---
mllib/src/main/scala/org/apache/spark/ml/recommendation/ALS.scala | 5 ++++-
.../scala/org/apache/spark/ml/recommendation/CollectTopKSuite.scala | 3 ++-
sql/api/src/main/scala/org/apache/spark/sql/functions.scala | 3 ---
3 files changed, 6 insertions(+), 5 deletions(-)
diff --git a/mllib/src/main/scala/org/apache/spark/ml/recommendation/ALS.scala
b/mllib/src/main/scala/org/apache/spark/ml/recommendation/ALS.scala
index 1a004f71749e..5899bf891ec9 100644
--- a/mllib/src/main/scala/org/apache/spark/ml/recommendation/ALS.scala
+++ b/mllib/src/main/scala/org/apache/spark/ml/recommendation/ALS.scala
@@ -517,7 +517,7 @@ class ALSModel private[ml] (
)
ratings.groupBy(srcOutputColumn)
- .agg(collect_top_k(struct(ratingColumn, dstOutputColumn), num, false))
+ .agg(ALSModel.collect_top_k(struct(ratingColumn, dstOutputColumn), num,
false))
.as[(Int, Seq[(Float, Int)])]
.map(t => (t._1, t._2.map(p => (p._2, p._1))))
.toDF(srcOutputColumn, recommendColumn)
@@ -546,6 +546,9 @@ object ALSModel extends MLReadable[ALSModel] {
private val Drop = "drop"
private[recommendation] final val supportedColdStartStrategies = Array(NaN,
Drop)
+ private[recommendation] def collect_top_k(e: Column, num: Int, reverse:
Boolean): Column =
+ Column.internalFn("collect_top_k", e, lit(num), lit(reverse))
+
@Since("1.6.0")
override def read: MLReader[ALSModel] = new ALSModelReader
diff --git
a/mllib/src/test/scala/org/apache/spark/ml/recommendation/CollectTopKSuite.scala
b/mllib/src/test/scala/org/apache/spark/ml/recommendation/CollectTopKSuite.scala
index b79e10d0d267..bd83d5498ae6 100644
---
a/mllib/src/test/scala/org/apache/spark/ml/recommendation/CollectTopKSuite.scala
+++
b/mllib/src/test/scala/org/apache/spark/ml/recommendation/CollectTopKSuite.scala
@@ -17,9 +17,10 @@
package org.apache.spark.ml.recommendation
+import org.apache.spark.ml.recommendation.ALSModel.collect_top_k
import org.apache.spark.ml.util.MLTest
import org.apache.spark.sql.DataFrame
-import org.apache.spark.sql.functions.{col, collect_top_k, struct}
+import org.apache.spark.sql.functions.{col, struct}
class CollectTopKSuite extends MLTest {
diff --git a/sql/api/src/main/scala/org/apache/spark/sql/functions.scala
b/sql/api/src/main/scala/org/apache/spark/sql/functions.scala
index 93bff2262105..e6fd06f2ec63 100644
--- a/sql/api/src/main/scala/org/apache/spark/sql/functions.scala
+++ b/sql/api/src/main/scala/org/apache/spark/sql/functions.scala
@@ -401,9 +401,6 @@ object functions {
def count_min_sketch(e: Column, eps: Column, confidence: Column): Column =
count_min_sketch(e, eps, confidence, lit(SparkClassUtils.random.nextLong))
- private[spark] def collect_top_k(e: Column, num: Int, reverse: Boolean):
Column =
- Column.internalFn("collect_top_k", e, lit(num), lit(reverse))
-
/**
* Aggregate function: returns the Pearson Correlation Coefficient for two
columns.
*
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]