Github user rdblue commented on a diff in the pull request:
https://github.com/apache/spark/pull/21977#discussion_r213777162
--- Diff:
core/src/main/scala/org/apache/spark/api/python/PythonRunner.scala ---
@@ -62,14 +63,20 @@ private[spark] object PythonEvalType {
*/
private[spark] abstract class BasePythonRunner[IN, OUT](
funcs: Seq[ChainedPythonFunctions],
- bufferSize: Int,
- reuseWorker: Boolean,
evalType: Int,
argOffsets: Array[Array[Int]])
extends Logging {
require(funcs.length == argOffsets.length, "argOffsets should have the
same length as funcs")
+ private val conf = SparkEnv.get.conf
+ private val bufferSize = conf.getInt("spark.buffer.size", 65536)
+ private val reuseWorker = conf.getBoolean("spark.python.worker.reuse",
true)
+ // each python worker gets an equal part of the allocation. the worker
pool will grow to the
+ // number of concurrent tasks, which is determined by the number of
cores in this executor.
+ private val memoryMb = conf.get(PYSPARK_EXECUTOR_MEMORY)
+ .map(_ / conf.getInt("spark.executor.cores", 1))
--- End diff --
@HyukjinKwon, sorry but it looks like this was merged before I could push a
commit to update it.
---
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]