This is an automated email from the ASF dual-hosted git repository. gurwls223 pushed a commit to branch master in repository https://gitbox.apache.org/repos/asf/spark.git
The following commit(s) were added to refs/heads/master by this push: new 8b1891438dd6 [SPARK-48510][CONNECT][FOLLOW-UP-MK2] Fix for UDAF `toColumn` API when running tests in Maven 8b1891438dd6 is described below commit 8b1891438dd668010a4d939b0a1d6bfe8a5f0518 Author: Paddy Xu <xupa...@gmail.com> AuthorDate: Thu Jul 18 09:52:46 2024 +0900 [SPARK-48510][CONNECT][FOLLOW-UP-MK2] Fix for UDAF `toColumn` API when running tests in Maven ### What changes were proposed in this pull request? This PR follows https://github.com/apache/spark/pull/47368 as another try to fix the broken tests. The previous try failed due to NPE, caused by `Iterator.iterate` generating an **infinite** flow of values. I can't reproduce the previous issue locally, so my fix is purely based on the error message: https://github.com/apache/spark/actions/runs/9974746135/job/27562881993. ### Why are the changes needed? Because previous one failed. ### Does this PR introduce _any_ user-facing change? No. ### How was this patch tested? Locally. ### Was this patch authored or co-authored using generative AI tooling? No. Closes #47387 from xupefei/udaf-tocolumn-fixup-mk2. Authored-by: Paddy Xu <xupa...@gmail.com> Signed-off-by: Hyukjin Kwon <gurwls...@apache.org> --- .../org/apache/spark/sql/expressions/Aggregator.scala | 14 ++++++++++++-- 1 file changed, 12 insertions(+), 2 deletions(-) diff --git a/connector/connect/client/jvm/src/main/scala/org/apache/spark/sql/expressions/Aggregator.scala b/connector/connect/client/jvm/src/main/scala/org/apache/spark/sql/expressions/Aggregator.scala index 8ef7ccf22586..b177c110285d 100644 --- a/connector/connect/client/jvm/src/main/scala/org/apache/spark/sql/expressions/Aggregator.scala +++ b/connector/connect/client/jvm/src/main/scala/org/apache/spark/sql/expressions/Aggregator.scala @@ -133,10 +133,20 @@ abstract class Aggregator[-IN, BUF, OUT] extends Serializable { import scala.reflect.api._ def areCompatibleMirrors(one: Mirror[_], another: Mirror[_]): Boolean = { + def checkAllParents(target: JavaMirror, candidate: JavaMirror): Boolean = { + var current = candidate.classLoader + while (current != null) { + if (current == target.classLoader) { + return true + } + current = current.getParent + } + false + } + (one, another) match { case (a: JavaMirror, b: JavaMirror) => - Iterator.iterate(b.classLoader)(_.getParent).contains(a.classLoader) || - Iterator.iterate(a.classLoader)(_.getParent).contains(b.classLoader) + a == b || checkAllParents(a, b) || checkAllParents(b, a) case _ => one == another } } --------------------------------------------------------------------- To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org For additional commands, e-mail: commits-h...@spark.apache.org