Yicong-Huang commented on code in PR #53035:
URL: https://github.com/apache/spark/pull/53035#discussion_r2561248634


##########
python/pyspark/sql/tests/arrow/test_arrow_udf_grouped_agg.py:
##########
@@ -1059,6 +1059,90 @@ def my_grouped_agg_arrow_udf(x):
                 ],
             )
 
+    def test_iterator_grouped_agg_single_column(self):
+        """
+        Test iterator API for grouped aggregation with single column.
+        """
+        import pyarrow as pa
+        from typing import Iterator
+
+        @arrow_udf("double")
+        def arrow_mean_iter(it: Iterator[pa.Array]) -> float:
+            sum_val = 0.0
+            cnt = 0
+            for v in it:
+                assert isinstance(v, pa.Array)
+                sum_val += pa.compute.sum(v).as_py()
+                cnt += len(v)
+            return sum_val / cnt if cnt > 0 else 0.0
+
+        df = self.spark.createDataFrame(
+            [(1, 1.0), (1, 2.0), (2, 3.0), (2, 5.0), (2, 10.0)], ("id", "v")
+        )
+
+        result = 
df.groupby("id").agg(arrow_mean_iter(df["v"]).alias("mean")).sort("id")
+        expected = 
df.groupby("id").agg(sf.mean(df["v"]).alias("mean")).sort("id").collect()
+
+        self.assertEqual(expected, result.collect())
+
+    @unittest.skipIf(not have_numpy, numpy_requirement_message)
+    def test_iterator_grouped_agg_multiple_columns(self):
+        """
+        Test iterator API for grouped aggregation with multiple columns.
+        """
+        import pyarrow as pa
+        import numpy as np
+        from typing import Iterator, Tuple

Review Comment:
   moved



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: [email protected]

For queries about this service, please contact Infrastructure at:
[email protected]


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to