Github user vanzin commented on a diff in the pull request:

    https://github.com/apache/spark/pull/20474#discussion_r165711425
  
    --- Diff: 
core/src/main/scala/org/apache/spark/status/api/v1/OneApplicationResource.scala 
---
    @@ -51,6 +52,43 @@ private[v1] class AbstractApplicationResource extends 
BaseAppResource {
       @Path("executors")
       def executorList(): Seq[ExecutorSummary] = 
withUI(_.store.executorList(true))
     
    +  @GET
    +  @Path("executors/{executorId}/threads")
    +  def threadDump(@PathParam("executorId") executorId: String): Response = 
withUI { ui =>
    +    uiRoot.executorThreadDumpsNotAvailableError().getOrElse {
    +      val safeExecutorId =
    +        Option(UIUtils.stripXSS(executorId)).map { executorId =>
    +          UIUtils.decodeURLParameter(executorId)
    +        }.getOrElse {
    +          throw new IllegalArgumentException(s"Missing executorId 
parameter")
    +        }
    +
    +      def isAllDigits(x: String) = x.forall(Character.isDigit)
    +
    +      if (executorId != SparkContext.DRIVER_IDENTIFIER && 
!isAllDigits(executorId)) {
    +        Response.serverError()
    +          .entity(s"Invalid executorId: neither 
'${SparkContext.DRIVER_IDENTIFIER}' nor number.")
    +          .status(Response.Status.BAD_REQUEST)
    +          .build()
    +      } else {
    +        if 
(ui.store.asOption(ui.store.executorSummary(executorId)).exists(!_.isActive)) {
    +          Response.serverError()
    +            .entity("Executor is already dead.")
    +            .status(Response.Status.BAD_REQUEST)
    +            .build()
    +        } else {
    +          ui.sc.flatMap(_.getExecutorThreadDump(safeExecutorId))
    --- End diff --
    
    This is still exposing an internal type through the public REST API.
    
    You need to create an API type for it and translate between the two, so 
that the type returned here is public and checked by mima. Or you could even 
just move that type into the api package and make it public.


---

---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org
For additional commands, e-mail: reviews-h...@spark.apache.org

Reply via email to