This is an automated email from the ASF dual-hosted git repository.

gurwls223 pushed a commit to branch branch-3.5
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/branch-3.5 by this push:
     new df02aa29ddc [MINOR][UI] Increasing the number of significant digits 
for Fraction Cached of RDD
df02aa29ddc is described below

commit df02aa29ddc9b03e4386128f26bfc98b869485e5
Author: Kent Yao <y...@apache.org>
AuthorDate: Tue Aug 8 08:10:09 2023 +0900

    [MINOR][UI] Increasing the number of significant digits for Fraction Cached 
of RDD
    
    ### What changes were proposed in this pull request?
    
    This PR is a typo improvement for increasing the number of significant 
digits for Fraction Cached of RDD that shows on the Storage Tab.
    
    ### Why are the changes needed?
    
    improves accuracy and precision
    
    
![image](https://github.com/apache/spark/assets/8326978/7106352c-b806-4953-8938-c4cba8ea1191)
    
    ### Does this PR introduce _any_ user-facing change?
    
    Yes, the Fraction Cached on Storage Page increases the fractional length 
from 0 to 2
    
    ### How was this patch tested?
    
    locally verified
    
    Closes #42373 from yaooqinn/uiminor.
    
    Authored-by: Kent Yao <y...@apache.org>
    Signed-off-by: Hyukjin Kwon <gurwls...@apache.org>
    (cherry picked from commit f47a2560e6e39ba8eac51a76290614b2fba4d65a)
    Signed-off-by: Hyukjin Kwon <gurwls...@apache.org>
---
 .../scala/org/apache/spark/ui/storage/StoragePage.scala   |  2 +-
 .../org/apache/spark/ui/storage/StoragePageSuite.scala    | 15 ++++++++-------
 2 files changed, 9 insertions(+), 8 deletions(-)

diff --git a/core/src/main/scala/org/apache/spark/ui/storage/StoragePage.scala 
b/core/src/main/scala/org/apache/spark/ui/storage/StoragePage.scala
index c1708c320c5..72662267365 100644
--- a/core/src/main/scala/org/apache/spark/ui/storage/StoragePage.scala
+++ b/core/src/main/scala/org/apache/spark/ui/storage/StoragePage.scala
@@ -98,7 +98,7 @@ private[ui] class StoragePage(parent: SparkUITab, store: 
AppStatusStore) extends
       <td>{rdd.storageLevel}
       </td>
       <td>{rdd.numCachedPartitions.toString}</td>
-      <td>{"%.0f%%".format(rdd.numCachedPartitions * 100.0 / 
rdd.numPartitions)}</td>
+      <td>{"%.2f%%".format(rdd.numCachedPartitions * 100.0 / 
rdd.numPartitions)}</td>
       <td 
sorttable_customkey={rdd.memoryUsed.toString}>{Utils.bytesToString(rdd.memoryUsed)}</td>
       <td sorttable_customkey={rdd.diskUsed.toString} 
>{Utils.bytesToString(rdd.diskUsed)}</td>
     </tr>
diff --git 
a/core/src/test/scala/org/apache/spark/ui/storage/StoragePageSuite.scala 
b/core/src/test/scala/org/apache/spark/ui/storage/StoragePageSuite.scala
index 718c6856cb3..d1e25bf8a23 100644
--- a/core/src/test/scala/org/apache/spark/ui/storage/StoragePageSuite.scala
+++ b/core/src/test/scala/org/apache/spark/ui/storage/StoragePageSuite.scala
@@ -48,8 +48,8 @@ class StoragePageSuite extends SparkFunSuite {
 
     val rdd2 = new RDDStorageInfo(2,
       "rdd2",
-      10,
-      5,
+      1000,
+      56,
       StorageLevel.DISK_ONLY.description,
       0L,
       200L,
@@ -58,8 +58,8 @@ class StoragePageSuite extends SparkFunSuite {
 
     val rdd3 = new RDDStorageInfo(3,
       "rdd3",
-      10,
-      10,
+      1000,
+      103,
       StorageLevel.MEMORY_AND_DISK_SER.description,
       400L,
       500L,
@@ -94,19 +94,20 @@ class StoragePageSuite extends SparkFunSuite {
 
     assert((xmlNodes \\ "tr").size === 3)
     assert(((xmlNodes \\ "tr")(0) \\ "td").map(_.text.trim) ===
-      Seq("1", "rdd1", "Memory Deserialized 1x Replicated", "10", "100%", 
"100.0 B", "0.0 B"))
+      Seq("1", "rdd1", "Memory Deserialized 1x Replicated", "10", "100.00%", 
"100.0 B", "0.0 B"))
     // Check the url
     assert(((xmlNodes \\ "tr")(0) \\ "td" \ 
"a")(0).attribute("href").map(_.text) ===
       Some("http://localhost:4040/storage/rdd/?id=1";))
 
     assert(((xmlNodes \\ "tr")(1) \\ "td").map(_.text.trim) ===
-      Seq("2", "rdd2", "Disk Serialized 1x Replicated", "5", "50%", "0.0 B", 
"200.0 B"))
+      Seq("2", "rdd2", "Disk Serialized 1x Replicated", "56", "5.60%", "0.0 
B", "200.0 B"))
     // Check the url
     assert(((xmlNodes \\ "tr")(1) \\ "td" \ 
"a")(0).attribute("href").map(_.text) ===
       Some("http://localhost:4040/storage/rdd/?id=2";))
 
     assert(((xmlNodes \\ "tr")(2) \\ "td").map(_.text.trim) ===
-      Seq("3", "rdd3", "Disk Memory Serialized 1x Replicated", "10", "100%", 
"400.0 B", "500.0 B"))
+      Seq("3", "rdd3", "Disk Memory Serialized 1x Replicated", "103", 
"10.30%", "400.0 B",
+        "500.0 B"))
     // Check the url
     assert(((xmlNodes \\ "tr")(2) \\ "td" \ 
"a")(0).attribute("href").map(_.text) ===
       Some("http://localhost:4040/storage/rdd/?id=3";))


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to