fangchenli commented on code in PR #53769:
URL: https://github.com/apache/spark/pull/53769#discussion_r2683575044
##########
python/pyspark/worker.py:
##########
@@ -110,6 +113,57 @@
has_memory_profiler = False
+class _ArrowUDFThreadPool:
+ """Singleton ThreadPoolExecutor for Arrow UDF concurrent execution."""
+
+ _instance: ClassVar[Optional[ThreadPoolExecutor]] = None
+ _lock: ClassVar[RLock] = RLock()
+ _max_workers: ClassVar[int] = 0
+
+ @classmethod
+ def get_pool(cls, max_workers: int) -> ThreadPoolExecutor:
+ """Get or create the singleton thread pool with the specified number
of workers."""
+ if cls._instance is not None and cls._max_workers == max_workers:
+ return cls._instance
+ with cls._lock:
+ if cls._instance is None or cls._max_workers != max_workers:
+ if cls._instance is not None:
+ cls._instance.shutdown(wait=False)
+ cls._max_workers = max_workers
+ cls._instance = ThreadPoolExecutor(max_workers=max_workers)
+ return cls._instance
+
+ @classmethod
+ def shutdown(cls, wait: bool = True) -> None:
+ """Shutdown the thread pool."""
+ with cls._lock:
+ if cls._instance is not None:
+ cls._instance.shutdown(wait=wait)
+ cls._instance = None
+ cls._max_workers = 0
+
+ @classmethod
+ def map_chunked(cls, func: Callable, data: Iterable, max_workers: int) ->
list:
+ """Apply func to data using chunked parallel execution (one task per
worker)."""
+ data_list = list(data)
+ if not data_list:
+ return []
+
+ chunk_size = (len(data_list) + max_workers - 1) // max_workers
+ chunks = [data_list[i : i + chunk_size] for i in range(0,
len(data_list), chunk_size)]
Review Comment:
You are right, let me fix it.
--
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
To unsubscribe, e-mail: [email protected]
For queries about this service, please contact Infrastructure at:
[email protected]
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]