Github user felixcheung commented on a diff in the pull request:
https://github.com/apache/spark/pull/21977#discussion_r207726897
--- Diff: python/pyspark/worker.py ---
@@ -259,6 +260,26 @@ def main(infile, outfile):
"PYSPARK_DRIVER_PYTHON are correctly set.") %
("%d.%d" % sys.version_info[:2], version))
+ # set up memory limits
+ memory_limit_mb = int(os.environ.get('PYSPARK_EXECUTOR_MEMORY_MB',
"-1"))
+ total_memory = resource.RLIMIT_AS
+ try:
+ (total_memory_limit, max_total_memory) =
resource.getrlimit(total_memory)
+ msg = "Current mem: {0} of max
{1}\n".format(total_memory_limit, max_total_memory)
+ print(msg, file=sys.stderr)
+
+ if memory_limit_mb > 0 and total_memory_limit ==
resource.RLIM_INFINITY:
+ # convert to bytes
+ total_memory_limit = memory_limit_mb * 1024 * 1024
+
+ msg = "Setting mem to {0} of max
{1}\n".format(total_memory_limit, max_total_memory)
+ print(msg, file=sys.stderr)
+ resource.setrlimit(total_memory, (total_memory_limit,
total_memory_limit))
+
+ except (resource.error, OSError, ValueError) as e:
+ # not all systems support resource limits, so warn instead of
failing
+ sys.stderr.write("WARN: Failed to set memory limit:
{0}\n".format(e))
--- End diff --
ditto here
`print(msg, file=sys.stderr)`?
---
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]