HyukjinKwon commented on a change in pull request #30735:
URL: https://github.com/apache/spark/pull/30735#discussion_r540934954
##########
File path:
resource-managers/kubernetes/integration-tests/src/test/scala/org/apache/spark/deploy/k8s/integrationtest/DepsTestsSuite.scala
##########
@@ -175,40 +175,81 @@ private[spark] trait DepsTestsSuite { k8sSuite:
KubernetesSuite =>
}
}
+ test(
+ "SPARK-33748: Launcher python client respecting PYSPARK_PYTHON",
k8sTestTag, MinikubeTag) {
+ val fileName = Utils.createTempFile(
+ """
+ |#!/usr/bin/env bash
+ |export IS_CUSTOM_PYTHON=1
+ |python3 "$@"
+ """.stripMargin, HOST_PATH)
+ Utils.createTarGzFile(s"$HOST_PATH/$fileName", s"$HOST_PATH/$fileName.tgz")
+ sparkAppConf.set("spark.archives", s"$HOST_PATH/$fileName.tgz#test_env")
+ val pySparkFiles =
Utils.getTestFileAbsolutePath("python_executable_check.py", sparkHomeDir)
+ testPython(pySparkFiles,
+ Seq(
+ s"PYSPARK_PYTHON: ./test_env/$fileName",
+ "PYSPARK_DRIVER_PYTHON: None",
+ "Custom Python used on executor: True",
+ "Custom Python used on driver: True"),
+ env = Map("PYSPARK_PYTHON" -> s"./test_env/$fileName"))
+ }
+
+ test(
+ "SPARK-33748: Launcher python client respecting " +
+ "PYSPARK_PYTHON and PYSPARK_DRIVER_PYTHON", k8sTestTag, MinikubeTag) {
+ val fileName = Utils.createTempFile(
+ """
+ |#!/usr/bin/env bash
+ |export IS_CUSTOM_PYTHON=1
+ |python3 "$@"
+ """.stripMargin, HOST_PATH)
+ Utils.createTarGzFile(s"$HOST_PATH/$fileName", s"$HOST_PATH/$fileName.tgz")
+ sparkAppConf.set("spark.archives", s"$HOST_PATH/$fileName.tgz#test_env")
+ val pySparkFiles =
Utils.getTestFileAbsolutePath("python_executable_check.py", sparkHomeDir)
+ testPython(pySparkFiles,
+ Seq(
+ s"PYSPARK_PYTHON: ./test_env/$fileName",
+ "PYSPARK_DRIVER_PYTHON: python3",
+ "Custom Python used on executor: True",
+ "Custom Python used on driver: False"),
+ env = Map(
+ "PYSPARK_PYTHON" -> s"./test_env/$fileName", "PYSPARK_DRIVER_PYTHON"
-> "python3"))
+ }
+
test("Launcher python client dependencies using a zip file", k8sTestTag,
MinikubeTag) {
+ val pySparkFiles = Utils.getTestFileAbsolutePath("pyfiles.py",
sparkHomeDir)
val inDepsFile = Utils.getTestFileAbsolutePath("py_container_checks.py",
sparkHomeDir)
val outDepsFile = s"${inDepsFile.substring(0,
inDepsFile.lastIndexOf("."))}.zip"
Utils.createZipFile(inDepsFile, outDepsFile)
- testPythonDeps(outDepsFile)
+ testPython(
+ pySparkFiles,
+ Seq(
+ "Python runtime version check is: True",
+ "Python environment version check is: True",
+ "Python runtime version check for executor is: True"),
+ Some(outDepsFile))
}
- private def testPythonDeps(depsFile: String): Unit = {
- tryDepsTest({
- val pySparkFiles = Utils.getTestFileAbsolutePath("pyfiles.py",
sparkHomeDir)
+ private def testPython(
+ pySparkFiles: String,
+ expectedDriverLogs: Seq[String],
+ depsFile: Option[String] = None,
+ env: Map[String, String] = Map.empty[String, String]): Unit = {
+ tryDepsTest {
setPythonSparkConfProperties(sparkAppConf)
runSparkApplicationAndVerifyCompletion(
appResource = pySparkFiles,
mainClass = "",
- expectedDriverLogOnCompletion = Seq(
- "Python runtime version check is: True",
- "Python environment version check is: True",
- "Python runtime version check for executor is: True"),
+ expectedDriverLogOnCompletion = expectedDriverLogs,
appArgs = Array("python3"),
driverPodChecker = doBasicDriverPyPodCheck,
executorPodChecker = doBasicExecutorPyPodCheck,
appLocator = appLocator,
isJVM = false,
- pyFiles = Option(depsFile)) })
- }
-
- private def extractS3Key(data: String, key: String): String = {
Review comment:
`extractS3Key` is not used too.
----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
For queries about this service, please contact Infrastructure at:
[email protected]
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]