yaooqinn commented on code in PR #2418:
URL: https://github.com/apache/incubator-kyuubi/pull/2418#discussion_r861492609
##########
kyuubi-server/src/main/scala/org/apache/kyuubi/engine/flink/FlinkProcessBuilder.scala:
##########
@@ -37,43 +41,75 @@ class FlinkProcessBuilder(
val extraEngineLog: Option[OperationLog] = None)
extends ProcBuilder with Logging {
- override protected def executable: String = {
- val flinkEngineHomeOpt = env.get("FLINK_ENGINE_HOME").orElse {
- val cwd = Utils.getCodeSourceLocation(getClass)
- .split("kyuubi-server")
- assert(cwd.length > 1)
- Option(
- Paths.get(cwd.head)
- .resolve("externals")
- .resolve("kyuubi-flink-sql-engine")
- .toFile)
- .map(_.getAbsolutePath)
- }
-
- flinkEngineHomeOpt.map { dir =>
- Paths.get(dir, "bin",
FLINK_ENGINE_BINARY_FILE).toAbsolutePath.toFile.getCanonicalPath
- } getOrElse {
- throw KyuubiSQLException("FLINK_ENGINE_HOME is not set! " +
- "For more detail information on installing and configuring Flink,
please visit " +
-
"https://kyuubi.apache.org/docs/latest/deployment/settings.html#environments")
- }
- }
-
override protected def module: String = "kyuubi-flink-sql-engine"
override protected def mainClass: String =
"org.apache.kyuubi.engine.flink.FlinkSQLEngine"
override protected def childProcEnv: Map[String, String] = conf.getEnvs +
("FLINK_HOME" -> FLINK_HOME) +
("FLINK_CONF_DIR" -> s"$FLINK_HOME/conf") +
- ("FLINK_SQL_ENGINE_JAR" -> mainResource.get) +
- ("FLINK_SQL_ENGINE_DYNAMIC_ARGS" ->
- conf.getAll.filter { case (k, _) =>
- k.startsWith("kyuubi.") || k.startsWith("flink.") ||
- k.startsWith("hadoop.") || k.startsWith("yarn.")
- }.map { case (k, v) => s"-D$k=$v" }.mkString(" "))
-
- override protected def commands: Array[String] = Array(executable)
+ ("_FLINK_HOME_DETERMINED" -> s"1")
+
+ override protected def commands: Array[String] = {
+ val buffer = new ArrayBuffer[String]()
+ buffer += s"bash"
+ buffer += s"-c"
+ val commandStr = new StringBuilder()
+
+ commandStr.append(s"source $FLINK_HOME${File.separator}bin" +
+ s"${File.separator}config.sh && $executable")
+
+ // TODO: How shall we deal with proxyUser,
+ // user.name
+ // kyuubi.session.user
+ // or just leave it, because we can handle it at operation layer
+ commandStr.append(s" -D$KYUUBI_SESSION_USER_KEY=$proxyUser ")
+
+ // TODO: add Kyuubi.engineEnv.FLINK_ENGINE_MEMORY or
kyuubi.engine.flink.memory to configure
+ // -Xmx5g
+ // java options
+ val confStr = conf.getAll.filter { case (k, _) =>
Review Comment:
why we do not respect other configuations, dfs.xx, hive.xx, io.xx etc, they
are hadoop related configuration @SteNicholas @deadwind4
--
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
To unsubscribe, e-mail: [email protected]
For queries about this service, please contact Infrastructure at:
[email protected]
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]