pan3793 commented on code in PR #5196:
URL: https://github.com/apache/kyuubi/pull/5196#discussion_r1332942832
##########
kyuubi-server/src/main/scala/org/apache/kyuubi/engine/spark/SparkProcessBuilder.scala:
##########
@@ -102,6 +102,25 @@ class SparkProcessBuilder(
}
}
+ private lazy val sparkCoreScalaVersion: String = {
+ Paths.get(sparkHome, "jars").toFile
+ .list((_, name) => name.matches("^spark-core_.*\\.jar$"))
+ .map { p => p.substring(p.indexOf("_") + 1, p.lastIndexOf("-")) }
+ .head
+ }
+
+ override protected def engineScalaBinaryVersion: String =
+ StringUtils.defaultIfBlank(System.getenv("SPARK_SCALA_VERSION"),
sparkCoreScalaVersion)
+
+ override protected lazy val engineHomeDirFilter: FileFilter = (file: File)
=> {
+ val pattern = if (SemanticVersion(SCALA_COMPILE_VERSION) >= "2.13") {
+ "^spark-\\d+\\.\\d+\\.\\d+-bin-hadoop\\d(\\.\\d+)?+-scala\\d+(\\.\\d+)?$"
Review Comment:
I think it's OK since we only suppose the official Spark binary release
would be used here, for users who want to integrate with the vendor's Spark
version, they could set SPARK_HOME explicitly
--
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
To unsubscribe, e-mail: [email protected]
For queries about this service, please contact Infrastructure at:
[email protected]
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]