Github user ueshin commented on a diff in the pull request: https://github.com/apache/spark/pull/13599#discussion_r160088202 --- Diff: launcher/src/main/java/org/apache/spark/launcher/SparkSubmitCommandBuilder.java --- @@ -299,20 +301,39 @@ // 4. environment variable PYSPARK_PYTHON // 5. python List<String> pyargs = new ArrayList<>(); - pyargs.add(firstNonEmpty(conf.get(SparkLauncher.PYSPARK_DRIVER_PYTHON), - conf.get(SparkLauncher.PYSPARK_PYTHON), - System.getenv("PYSPARK_DRIVER_PYTHON"), - System.getenv("PYSPARK_PYTHON"), - "python")); - String pyOpts = System.getenv("PYSPARK_DRIVER_PYTHON_OPTS"); - if (conf.containsKey(SparkLauncher.PYSPARK_PYTHON)) { - // pass conf spark.pyspark.python to python by environment variable. - env.put("PYSPARK_PYTHON", conf.get(SparkLauncher.PYSPARK_PYTHON)); - } - if (!isEmpty(pyOpts)) { - pyargs.addAll(parseOptionString(pyOpts)); - } + String pythonExec = firstNonEmpty(conf.get(SparkLauncher.PYSPARK_DRIVER_PYTHON), + conf.get(SparkLauncher.PYSPARK_PYTHON), + System.getenv("PYSPARK_DRIVER_PYTHON"), + System.getenv("PYSPARK_PYTHON"), + "python"); + if (conf.getOrDefault("spark.pyspark.virtualenv.enabled", "false").equals("true")) { + try { + // setup virtualenv in launcher when virtualenv is enabled in pyspark shell + Class virtualEnvClazz = getClass().forName("org.apache.spark.api.python.VirtualEnvFactory"); + Object virtualEnv = virtualEnvClazz.getConstructor(String.class, Map.class, Boolean.class) + .newInstance(pythonExec, conf, true); + Method virtualEnvMethod = virtualEnv.getClass().getMethod("setupVirtualEnv"); + pythonExec = (String) virtualEnvMethod.invoke(virtualEnv); + pyargs.add(pythonExec); + } catch (Exception e) { + throw new IOException(e); + } + } else { + pyargs.add(firstNonEmpty(conf.get(SparkLauncher.PYSPARK_DRIVER_PYTHON), + conf.get(SparkLauncher.PYSPARK_PYTHON), + System.getenv("PYSPARK_DRIVER_PYTHON"), + System.getenv("PYSPARK_PYTHON"), + "python")); --- End diff -- We can simplify as `pyargs.add(pythonExec);`?
--- --------------------------------------------------------------------- To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org For additional commands, e-mail: reviews-h...@spark.apache.org