Github user liancheng commented on a diff in the pull request:

    https://github.com/apache/spark/pull/5730#discussion_r29211457
  
    --- Diff: 
sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/HiveThriftServer2.scala
 ---
    @@ -73,15 +94,146 @@ object HiveThriftServer2 extends Logging {
         }
       }
     
    +  private[thriftserver] class SessionInfo(
    +      val sessionId: String,
    +      val startTimestamp: Long,
    +      val ip: String,
    +      val userName: String) {
    +    var finishTimestamp: Long = 0L
    +    var totalExecute: Int = 0
    +    def totalTime: Long = {
    +      if (finishTimestamp == 0L) {
    +        System.currentTimeMillis() - startTimestamp
    +      } else {
    +        finishTimestamp - startTimestamp
    +      }
    +    }
    +  }
    +
    +  private[thriftserver] object ExecutionState extends Enumeration {
    +    val STARTED, COMPILED, FAILED, FINISHED = Value
    +    type ExecutionState = Value
    +  }
    +
    +  private[thriftserver] class ExecutionInfo(
    +      val statement: String,
    +      val sessionId: String,
    +      val startTimestamp: Long,
    +      val userName: String) {
    +    var finishTimestamp: Long = 0L
    +    var executePlan: String = ""
    +    var detail: String = ""
    +    var state: ExecutionState.Value = ExecutionState.STARTED
    +    val jobId: ArrayBuffer[String] = ArrayBuffer[String]()
    +    var groupId: String = ""
    +    def totalTime: Long = {
    +      if (finishTimestamp == 0L) {
    +        System.currentTimeMillis - startTimestamp
    +      } else {
    +        finishTimestamp - startTimestamp
    +      }
    +    }
    +  }
    +
    +
       /**
        * A inner sparkListener called in sc.stop to clean up the 
HiveThriftServer2
        */
    -  class HiveThriftServer2Listener(val server: HiveServer2) extends 
SparkListener {
    +  class HiveThriftServer2Listener(
    +      val server: HiveServer2,
    +      val conf: SparkConf) extends SparkListener {
    +
         override def onApplicationEnd(applicationEnd: 
SparkListenerApplicationEnd): Unit = {
           server.stop()
         }
    -  }
     
    +    val sessionList = new mutable.HashMap[String, SessionInfo]
    +    val executeList = new mutable.HashMap[String, ExecutionInfo]
    +    val retainedStatements =
    +      conf.getInt("spark.thriftserver.ui.retainedStatements", 200)
    +    val retainedSessions =
    +      conf.getInt("spark.thriftserver.ui.retainedSessions", 200)
    +    var totalRunning = 0
    +
    +    override def onJobStart(jobStart: SparkListenerJobStart): Unit = {
    +      val jobGroup = for (
    +        props <- Option(jobStart.properties);
    +        statement <- 
Option(props.getProperty(SparkContext.SPARK_JOB_GROUP_ID))
    +      ) yield statement
    +
    +      jobGroup.map( groupId => {
    +        val ret = executeList.find( _ match {
    +          case (id: String, info: ExecutionInfo) => info.groupId == groupId
    +        })
    +        if (ret.isDefined) {
    +          ret.get._2.jobId += jobStart.jobId.toString
    +          ret.get._2.groupId = groupId
    +        }
    +      })
    --- End diff --
    
    Please always use
    
    ```scala
    collection.map { element =>
      ...
    }
    ```
    
    instead of
    
    ```scala
    collection.map(element => {
      ...
    })
    ```


---
If your project is set up for it, you can reply to this email and have your
reply appear on GitHub as well. If your project does not have this feature
enabled and wishes so, or if the feature is enabled but not working, please
contact infrastructure at [email protected] or file a JIRA ticket
with INFRA.
---

---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to