Github user holdenk commented on a diff in the pull request: https://github.com/apache/spark/pull/13599#discussion_r160069473 --- Diff: python/pyspark/context.py --- @@ -189,6 +190,21 @@ def _do_init(self, master, appName, sparkHome, pyFiles, environment, batchSize, self._jsc.sc().register(self._javaAccumulator) self.pythonExec = os.environ.get("PYSPARK_PYTHON", 'python') + if self._conf.get("spark.pyspark.virtualenv.enabled") == "true": + self.pythonExec = self._conf.get("spark.pyspark.python", self.pythonExec) + requirements = self._conf.get("spark.pyspark.virtualenv.requirements") + virtualEnvBinPath = self._conf.get("spark.pyspark.virtualenv.bin.path") + virtualEnvType = self._conf.get("spark.pyspark.virtualenv.type", "native") + python_version = self._conf.get("spark.pyspark.virtualenv.python_version") + + if virtualEnvType == "conda" and (requirements is None) and python_version is None: --- End diff -- Could we just assume its the same python version as the version which is running context.py?
--- --------------------------------------------------------------------- To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org For additional commands, e-mail: reviews-h...@spark.apache.org