Repository: spark Updated Branches: refs/heads/branch-1.0 ea1a455a7 -> fb61928cf
SPARK-2181:The keys for sorting the columns of Executor page in SparkUI are incorrect Author: witgo <[email protected]> Closes #1135 from witgo/SPARK-2181 and squashes the following commits: 39dad90 [witgo] The keys for sorting the columns of Executor page in SparkUI are incorrect (cherry picked from commit 18f29b96c7e0948f5f504e522e5aa8a8d1ab163e) Signed-off-by: Patrick Wendell <[email protected]> Project: http://git-wip-us.apache.org/repos/asf/spark/repo Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/fb61928c Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/fb61928c Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/fb61928c Branch: refs/heads/branch-1.0 Commit: fb61928cfb16be79effcdb48ad33fcb97d6cc8c8 Parents: ea1a455 Author: witgo <[email protected]> Authored: Thu Jun 26 21:59:21 2014 -0700 Committer: Patrick Wendell <[email protected]> Committed: Thu Jun 26 21:59:37 2014 -0700 ---------------------------------------------------------------------- .../scala/org/apache/spark/ui/exec/ExecutorsPage.scala | 8 +++++--- .../scala/org/apache/spark/ui/jobs/ExecutorTable.scala | 12 +++++++----- .../scala/org/apache/spark/ui/storage/StoragePage.scala | 8 +++++--- 3 files changed, 17 insertions(+), 11 deletions(-) ---------------------------------------------------------------------- http://git-wip-us.apache.org/repos/asf/spark/blob/fb61928c/core/src/main/scala/org/apache/spark/ui/exec/ExecutorsPage.scala ---------------------------------------------------------------------- diff --git a/core/src/main/scala/org/apache/spark/ui/exec/ExecutorsPage.scala b/core/src/main/scala/org/apache/spark/ui/exec/ExecutorsPage.scala index 6cb43c0..2d8c3b9 100644 --- a/core/src/main/scala/org/apache/spark/ui/exec/ExecutorsPage.scala +++ b/core/src/main/scala/org/apache/spark/ui/exec/ExecutorsPage.scala @@ -79,6 +79,7 @@ private[ui] class ExecutorsPage(parent: ExecutorsTab) extends WebUIPage("") { val maximumMemory = values("Maximum Memory") val memoryUsed = values("Memory Used") val diskUsed = values("Disk Used") + // scalastyle:off <tr> <td>{values("Executor ID")}</td> <td>{values("Address")}</td> @@ -94,10 +95,11 @@ private[ui] class ExecutorsPage(parent: ExecutorsTab) extends WebUIPage("") { <td>{values("Failed Tasks")}</td> <td>{values("Complete Tasks")}</td> <td>{values("Total Tasks")}</td> - <td>{Utils.msDurationToString(values("Task Time").toLong)}</td> - <td>{Utils.bytesToString(values("Shuffle Read").toLong)}</td> - <td>{Utils.bytesToString(values("Shuffle Write").toLong)}</td> + <td sorttable_customkey={values("Task Time")}>{Utils.msDurationToString(values("Task Time").toLong)}</td> + <td sorttable_customkey={values("Shuffle Read")}>{Utils.bytesToString(values("Shuffle Read").toLong)}</td> + <td sorttable_customkey={values("Shuffle Write")} >{Utils.bytesToString(values("Shuffle Write").toLong)}</td> </tr> + // scalastyle:on } /** Represent an executor's info as a map given a storage status index */ http://git-wip-us.apache.org/repos/asf/spark/blob/fb61928c/core/src/main/scala/org/apache/spark/ui/jobs/ExecutorTable.scala ---------------------------------------------------------------------- diff --git a/core/src/main/scala/org/apache/spark/ui/jobs/ExecutorTable.scala b/core/src/main/scala/org/apache/spark/ui/jobs/ExecutorTable.scala index c83e196..add0e98 100644 --- a/core/src/main/scala/org/apache/spark/ui/jobs/ExecutorTable.scala +++ b/core/src/main/scala/org/apache/spark/ui/jobs/ExecutorTable.scala @@ -67,18 +67,20 @@ private[ui] class ExecutorTable(stageId: Int, parent: JobProgressTab) { executorIdToSummary match { case Some(x) => x.toSeq.sortBy(_._1).map { case (k, v) => { + // scalastyle:off <tr> <td>{k}</td> <td>{executorIdToAddress.getOrElse(k, "CANNOT FIND ADDRESS")}</td> - <td>{UIUtils.formatDuration(v.taskTime)}</td> + <td sorttable_customekey={v.taskTime.toString}>{UIUtils.formatDuration(v.taskTime)}</td> <td>{v.failedTasks + v.succeededTasks}</td> <td>{v.failedTasks}</td> <td>{v.succeededTasks}</td> - <td>{Utils.bytesToString(v.shuffleRead)}</td> - <td>{Utils.bytesToString(v.shuffleWrite)}</td> - <td>{Utils.bytesToString(v.memoryBytesSpilled)}</td> - <td>{Utils.bytesToString(v.diskBytesSpilled)}</td> + <td sorttable_customekey={v.shuffleRead.toString}>{Utils.bytesToString(v.shuffleRead)}</td> + <td sorttable_customekey={v.shuffleWrite.toString}>{Utils.bytesToString(v.shuffleWrite)}</td> + <td sorttable_customekey={v.memoryBytesSpilled.toString} >{Utils.bytesToString(v.memoryBytesSpilled)}</td> + <td sorttable_customekey={v.diskBytesSpilled.toString} >{Utils.bytesToString(v.diskBytesSpilled)}</td> </tr> + // scalastyle:on } } case _ => Seq[Node]() http://git-wip-us.apache.org/repos/asf/spark/blob/fb61928c/core/src/main/scala/org/apache/spark/ui/storage/StoragePage.scala ---------------------------------------------------------------------- diff --git a/core/src/main/scala/org/apache/spark/ui/storage/StoragePage.scala b/core/src/main/scala/org/apache/spark/ui/storage/StoragePage.scala index b66edd9..9813d93 100644 --- a/core/src/main/scala/org/apache/spark/ui/storage/StoragePage.scala +++ b/core/src/main/scala/org/apache/spark/ui/storage/StoragePage.scala @@ -49,6 +49,7 @@ private[ui] class StoragePage(parent: StorageTab) extends WebUIPage("") { /** Render an HTML row representing an RDD */ private def rddRow(rdd: RDDInfo): Seq[Node] = { + // scalastyle:off <tr> <td> <a href={"%s/storage/rdd?id=%s".format(UIUtils.prependBaseUri(basePath), rdd.id)}> @@ -59,9 +60,10 @@ private[ui] class StoragePage(parent: StorageTab) extends WebUIPage("") { </td> <td>{rdd.numCachedPartitions}</td> <td>{"%.0f%%".format(rdd.numCachedPartitions * 100.0 / rdd.numPartitions)}</td> - <td>{Utils.bytesToString(rdd.memSize)}</td> - <td>{Utils.bytesToString(rdd.tachyonSize)}</td> - <td>{Utils.bytesToString(rdd.diskSize)}</td> + <td sorttable_customekey={rdd.memSize.toString}>{Utils.bytesToString(rdd.memSize)}</td> + <td sorttable_customekey={rdd.tachyonSize.toString}>{Utils.bytesToString(rdd.tachyonSize)}</td> + <td sorttable_customekey={rdd.diskSize.toString} >{Utils.bytesToString(rdd.diskSize)}</td> </tr> + // scalastyle:on } }
