This is an automated email from the ASF dual-hosted git repository.

imbruced pushed a commit to branch arrow-worker
in repository https://gitbox.apache.org/repos/asf/sedona.git

commit 7371c25a7d90c6582dcb4618d869ad7c15f93fce
Author: pawelkocinski <[email protected]>
AuthorDate: Thu Nov 13 18:48:45 2025 +0100

    SEDONA-748 add working example
---
 .../spark/sql/execution/python/SedonaPythonArrowOutput.scala   | 10 +++++++++-
 .../test/scala/org/apache/spark/sql/udf/StrategySuite.scala    |  1 +
 .../scala/org/apache/spark/sql/udf/TestScalarPandasUDF.scala   |  2 +-
 3 files changed, 11 insertions(+), 2 deletions(-)

diff --git 
a/spark/spark-3.5/src/main/scala/org/apache/spark/sql/execution/python/SedonaPythonArrowOutput.scala
 
b/spark/spark-3.5/src/main/scala/org/apache/spark/sql/execution/python/SedonaPythonArrowOutput.scala
index f2c8543537..316cb32c3e 100644
--- 
a/spark/spark-3.5/src/main/scala/org/apache/spark/sql/execution/python/SedonaPythonArrowOutput.scala
+++ 
b/spark/spark-3.5/src/main/scala/org/apache/spark/sql/execution/python/SedonaPythonArrowOutput.scala
@@ -121,7 +121,15 @@ private[python] trait SedonaPythonArrowOutput[OUT <: 
AnyRef] { self: SedonaBaseP
                 null.asInstanceOf[OUT]
             }
           }
-        } catch handleException
+        } catch {
+          case e: Exception =>
+            // If an exception happens, make sure to close the reader to 
release resources.
+            if (reader != null) {
+              reader.close(false)
+            }
+            allocator.close()
+            throw e
+        }
       }
     }
   }
diff --git 
a/spark/spark-3.5/src/test/scala/org/apache/spark/sql/udf/StrategySuite.scala 
b/spark/spark-3.5/src/test/scala/org/apache/spark/sql/udf/StrategySuite.scala
index 48e8e1c878..3396defd01 100644
--- 
a/spark/spark-3.5/src/test/scala/org/apache/spark/sql/udf/StrategySuite.scala
+++ 
b/spark/spark-3.5/src/test/scala/org/apache/spark/sql/udf/StrategySuite.scala
@@ -47,6 +47,7 @@ class StrategySuite extends AnyFunSuite with Matchers {
 //    spark.sql("select 1").show()
     val df = spark.read.format("geoparquet")
       
.load("/Users/pawelkocinski/Desktop/projects/sedona-production/apache-sedona-book/data/warehouse/buildings")
+      .limit(10)
       .select(
 //        geometryToNonGeometryFunction(col("geometry")),
         geometryToGeometryFunction(col("geometry")),
diff --git 
a/spark/spark-3.5/src/test/scala/org/apache/spark/sql/udf/TestScalarPandasUDF.scala
 
b/spark/spark-3.5/src/test/scala/org/apache/spark/sql/udf/TestScalarPandasUDF.scala
index 2a3c1dbb2b..83f87f401e 100644
--- 
a/spark/spark-3.5/src/test/scala/org/apache/spark/sql/udf/TestScalarPandasUDF.scala
+++ 
b/spark/spark-3.5/src/test/scala/org/apache/spark/sql/udf/TestScalarPandasUDF.scala
@@ -146,7 +146,7 @@ object ScalarUDF {
 
   private val workerEnv = new java.util.HashMap[String, String]()
     workerEnv.put("PYTHONPATH", s"$pysparkPythonPath:$pythonPath")
-    SparkEnv.get.conf.set(PYTHON_WORKER_MODULE, "sedonaworker.worker")
+    SparkEnv.get.conf.set(PYTHON_WORKER_MODULE, "sedonaworker.work")
     SparkEnv.get.conf.set(PYTHON_USE_DAEMON, false)
 //
 //  val geometryToNonGeometryFunction: UserDefinedPythonFunction = 
UserDefinedPythonFunction(

Reply via email to