bowenliang123 commented on code in PR #5662:
URL: https://github.com/apache/kyuubi/pull/5662#discussion_r1389141951
##########
externals/kyuubi-spark-sql-engine/src/main/scala/org/apache/kyuubi/engine/spark/SparkSQLEngine.scala:
##########
@@ -141,6 +156,42 @@ case class SparkSQLEngine(spark: SparkSession) extends
Serverable("SparkSQLEngin
}
}
+ private[kyuubi] def startMetricsReporter(): Unit = {
+ val interval = conf.get(ENGINE_REPORT_INTERVAL)
+ val engineSpace = conf.get(HA_NAMESPACE)
+ val statusTracker = spark.sparkContext.statusTracker
+ val metricsSpace = s"/metrics$engineSpace"
+ val report: Runnable = () => {
+ if (!shutdown.get) {
+ val openSessionCount =
backendService.sessionManager.getOpenSessionCount
+ val activeTask = statusTracker.getActiveStageIds()
+ .flatMap { stage =>
+ statusTracker.getStageInfo(stage).map(_.numActiveTasks)
+ }.sum
+ val engineMetrics = Map(
+ "openSessionCount" -> openSessionCount,
+ "activeTask" -> activeTask,
+ "poolId" -> engineSpace.split("-").last)
+ info(s"Spark engine has $openSessionCount open sessions and
$activeTask active tasks.")
+ DiscoveryClientProvider.withDiscoveryClient(conf) { client =>
+ if (client.pathNonExists(metricsSpace)) {
+ client.create(metricsSpace, "PERSISTENT")
+ }
+ client.setData(
+ s"/metrics$engineSpace",
+ engineMetrics.map { case (k, v) => s"$k=$v"
}.mkString(";").getBytes)
Review Comment:
Is there any convention for value in zookeeper? How about use Json
serialization for readability and parsing.
--
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
To unsubscribe, e-mail: [email protected]
For queries about this service, please contact Infrastructure at:
[email protected]
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]