Github user tgravescs commented on a diff in the pull request:

    https://github.com/apache/spark/pull/21688#discussion_r232832492
  
    --- Diff: 
core/src/main/scala/org/apache/spark/status/api/v1/StagesResource.scala ---
    @@ -162,26 +165,29 @@ private[v1] class StagesResource extends 
BaseAppResource {
       // Performs pagination on the server side
       def doPagination(queryParameters: MultivaluedMap[String, String], 
stageId: Int,
         stageAttemptId: Int, isSearch: Boolean, totalRecords: Int): 
Seq[TaskData] = {
    -    val queryParams = queryParameters.keySet()
         var columnNameToSort = queryParameters.getFirst("columnNameToSort")
    +    // Sorting on Logs column will default to Index column sort
         if (columnNameToSort.equalsIgnoreCase("Logs")) {
           columnNameToSort = "Index"
         }
         val isAscendingStr = queryParameters.getFirst("order[0][dir]")
         var pageStartIndex = 0
         var pageLength = totalRecords
    +    // We fetch only the desired rows upto the specified page length for 
all cases except when a
    +    // search query is present, in that case, we need to fetch all the 
rows to perform the search
    +    // on the entire table
         if (!isSearch) {
           pageStartIndex = queryParameters.getFirst("start").toInt
           pageLength = queryParameters.getFirst("length").toInt
         }
    -    return withUI(_.store.taskList(stageId, stageAttemptId, 
pageStartIndex, pageLength,
    +    withUI(_.store.taskList(stageId, stageAttemptId, pageStartIndex, 
pageLength,
           indexName(columnNameToSort), isAscendingStr.equalsIgnoreCase("asc")))
       }
     
       // Filters task list based on search parameter
       def filterTaskList(
         taskDataList: Seq[TaskData],
    -    searchValue: String): Seq[TaskData] = {
    +    searchValue: String): Option[Seq[TaskData]] = {
    --- End diff --
    
    sorry my comment was confusing don't make the return value Option


---

---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org
For additional commands, e-mail: reviews-h...@spark.apache.org

Reply via email to