This is an automated email from the ASF dual-hosted git repository.

maxgekk pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new 82bf3fcc81a [SPARK-43576][CORE] Remove unused declarations from Core 
module
82bf3fcc81a is described below

commit 82bf3fcc81ae0be8ce945242ae966cee4fae4104
Author: panbingkun <[email protected]>
AuthorDate: Fri May 26 10:19:46 2023 +0300

    [SPARK-43576][CORE] Remove unused declarations from Core module
    
    ### What changes were proposed in this pull request?
    The pr aims to remove unused declarations from `Core` module
    
    ### Why are the changes needed?
    Make code clean.
    
    ### Does this PR introduce _any_ user-facing change?
    No.
    
    ### How was this patch tested?
    Pass GA.
    
    Closes #41218 from panbingkun/remove_unused_declaration_core.
    
    Authored-by: panbingkun <[email protected]>
    Signed-off-by: Max Gekk <[email protected]>
---
 .../src/main/resources/org/apache/spark/ui/static/executorspage.js | 1 -
 .../scala/org/apache/spark/deploy/history/ApplicationCache.scala   | 1 -
 core/src/main/scala/org/apache/spark/rdd/HadoopRDD.scala           | 3 ---
 core/src/main/scala/org/apache/spark/ui/JettyUtils.scala           | 5 -----
 core/src/main/scala/org/apache/spark/ui/ToolTips.scala             | 7 -------
 5 files changed, 17 deletions(-)

diff --git 
a/core/src/main/resources/org/apache/spark/ui/static/executorspage.js 
b/core/src/main/resources/org/apache/spark/ui/static/executorspage.js
index 8c2dc13c35b..92d75c18e49 100644
--- a/core/src/main/resources/org/apache/spark/ui/static/executorspage.js
+++ b/core/src/main/resources/org/apache/spark/ui/static/executorspage.js
@@ -126,7 +126,6 @@ function totalDurationAlpha(totalGCTime, totalDuration) {
     (Math.min(totalGCTime / totalDuration + 0.5, 1)) : 1;
 }
 
-// When GCTimePercent is edited change ToolTips.TASK_TIME to match
 var GCTimePercent = 0.1;
 
 function totalDurationStyle(totalGCTime, totalDuration) {
diff --git 
a/core/src/main/scala/org/apache/spark/deploy/history/ApplicationCache.scala 
b/core/src/main/scala/org/apache/spark/deploy/history/ApplicationCache.scala
index 829631a0454..909f5ea937c 100644
--- a/core/src/main/scala/org/apache/spark/deploy/history/ApplicationCache.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/history/ApplicationCache.scala
@@ -394,7 +394,6 @@ private[history] class ApplicationCacheCheckFilter(
     val httpRequest = request.asInstanceOf[HttpServletRequest]
     val httpResponse = response.asInstanceOf[HttpServletResponse]
     val requestURI = httpRequest.getRequestURI
-    val operation = httpRequest.getMethod
 
     // if the request is for an attempt, check to see if it is in need of 
delete/refresh
     // and have the cache update the UI if so
diff --git a/core/src/main/scala/org/apache/spark/rdd/HadoopRDD.scala 
b/core/src/main/scala/org/apache/spark/rdd/HadoopRDD.scala
index 0d905b46953..cad107256c5 100644
--- a/core/src/main/scala/org/apache/spark/rdd/HadoopRDD.scala
+++ b/core/src/main/scala/org/apache/spark/rdd/HadoopRDD.scala
@@ -404,9 +404,6 @@ private[spark] object HadoopRDD extends Logging {
    */
   val CONFIGURATION_INSTANTIATION_LOCK = new Object()
 
-  /** Update the input bytes read metric each time this number of records has 
been read */
-  val RECORDS_BETWEEN_BYTES_READ_METRIC_UPDATES = 256
-
   /**
    * The three methods below are helpers for accessing the local map, a 
property of the SparkEnv of
    * the local process.
diff --git a/core/src/main/scala/org/apache/spark/ui/JettyUtils.scala 
b/core/src/main/scala/org/apache/spark/ui/JettyUtils.scala
index d8119fb9498..9582bdbf526 100644
--- a/core/src/main/scala/org/apache/spark/ui/JettyUtils.scala
+++ b/core/src/main/scala/org/apache/spark/ui/JettyUtils.scala
@@ -590,11 +590,6 @@ private class ProxyRedirectHandler(_proxyUri: String) 
extends HandlerWrapper {
     override def sendRedirect(location: String): Unit = {
       val newTarget = if (location != null) {
         val target = new URI(location)
-        val path = if (target.getPath().startsWith("/")) {
-          target.getPath()
-        } else {
-          req.getRequestURI().stripSuffix("/") + "/" + target.getPath()
-        }
         // The target path should already be encoded, so don't re-encode it, 
just the
         // proxy address part.
         val proxyBase = UIUtils.uiRoot(req)
diff --git a/core/src/main/scala/org/apache/spark/ui/ToolTips.scala 
b/core/src/main/scala/org/apache/spark/ui/ToolTips.scala
index 587046676ff..b80fba396b3 100644
--- a/core/src/main/scala/org/apache/spark/ui/ToolTips.scala
+++ b/core/src/main/scala/org/apache/spark/ui/ToolTips.scala
@@ -35,10 +35,6 @@ private[spark] object ToolTips {
 
   val OUTPUT = "Bytes written to Hadoop."
 
-  val STORAGE_MEMORY =
-    "Memory used / total available memory for storage of data " +
-      "like RDD partitions cached in memory. "
-
   val SHUFFLE_WRITE =
     "Bytes and records written to disk in order to be read by a shuffle in a 
future stage."
 
@@ -88,9 +84,6 @@ private[spark] object ToolTips {
        also create multiple RDDs internally. Cached RDDs are shown in green.
     """
 
-  val TASK_TIME =
-  "Shaded red when garbage collection (GC) time is over 10% of task time"
-
   val APPLICATION_EXECUTOR_LIMIT =
     """Maximum number of executors that this application will use. This limit 
is finite only when
        dynamic allocation is enabled. The number of granted executors may 
exceed the limit


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to