This is an automated email from the ASF dual-hosted git repository.

sarutak pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new b0089c3887e8 [SPARK-55041][CORE] Cleanup some unused private 
`funcion/val` from core module
b0089c3887e8 is described below

commit b0089c3887e8611b0c987c550c00e178337ffe05
Author: yangjie01 <[email protected]>
AuthorDate: Mon Jan 19 17:11:29 2026 +0900

    [SPARK-55041][CORE] Cleanup some unused private `funcion/val` from core 
module
    
    ### What changes were proposed in this pull request?
    This pr aims to cleanup some unused private `funcion/val` from core module
    
    ### Why are the changes needed?
    Code cleanup.
    
    ### Does this PR introduce _any_ user-facing change?
    No
    
    ### How was this patch tested?
    Pass Github Actions
    
    ### Was this patch authored or co-authored using generative AI tooling?
    No
    
    Closes #53809 from LuciferYang/core-unused.
    
    Authored-by: yangjie01 <[email protected]>
    Signed-off-by: Kousuke Saruta <[email protected]>
---
 .../org/apache/spark/deploy/history/ApplicationCache.scala | 14 --------------
 .../spark/deploy/rest/SubmitRestProtocolRequest.scala      |  3 ---
 .../org/apache/spark/deploy/worker/ui/WorkerWebUI.scala    |  3 ---
 .../org/apache/spark/input/WholeTextFileRecordReader.scala |  1 -
 .../scala/org/apache/spark/memory/StorageMemoryPool.scala  |  5 -----
 5 files changed, 26 deletions(-)

diff --git 
a/core/src/main/scala/org/apache/spark/deploy/history/ApplicationCache.scala 
b/core/src/main/scala/org/apache/spark/deploy/history/ApplicationCache.scala
index c2fe0acec2e9..b9f4f4b974a5 100644
--- a/core/src/main/scala/org/apache/spark/deploy/history/ApplicationCache.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/history/ApplicationCache.scala
@@ -293,10 +293,6 @@ private[history] class CacheMetrics(prefix: String) 
extends Source {
     ("eviction.count", evictionCount),
     ("load.count", loadCount))
 
-  /** all metrics, including timers */
-  private val allMetrics = counters ++ Seq(
-    ("load.timer", loadTimer))
-
   /**
    * Name of metric source
    */
@@ -304,16 +300,6 @@ private[history] class CacheMetrics(prefix: String) 
extends Source {
 
   override val metricRegistry: MetricRegistry = new MetricRegistry
 
-  /**
-   * Startup actions.
-   * This includes registering metrics with [[metricRegistry]]
-   */
-  private def init(): Unit = {
-    allMetrics.foreach { case (name, metric) =>
-      metricRegistry.register(MetricRegistry.name(prefix, name), metric)
-    }
-  }
-
   override def toString: String = {
     val sb = new StringBuilder()
     counters.foreach { case (name, counter) =>
diff --git 
a/core/src/main/scala/org/apache/spark/deploy/rest/SubmitRestProtocolRequest.scala
 
b/core/src/main/scala/org/apache/spark/deploy/rest/SubmitRestProtocolRequest.scala
index c6ff3dbb33cb..c21cd4bb1e47 100644
--- 
a/core/src/main/scala/org/apache/spark/deploy/rest/SubmitRestProtocolRequest.scala
+++ 
b/core/src/main/scala/org/apache/spark/deploy/rest/SubmitRestProtocolRequest.scala
@@ -54,9 +54,6 @@ private[rest] class CreateSubmissionRequest extends 
SubmitRestProtocolRequest {
     assertPropertyIsMemory(config.EXECUTOR_MEMORY.key)
   }
 
-  private def assertPropertyIsSet(key: String): Unit =
-    assertFieldIsSet(sparkProperties.getOrElse(key, null), key)
-
   private def assertPropertyIsBoolean(key: String): Unit =
     assertProperty[Boolean](key, "boolean", _.toBoolean)
 
diff --git 
a/core/src/main/scala/org/apache/spark/deploy/worker/ui/WorkerWebUI.scala 
b/core/src/main/scala/org/apache/spark/deploy/worker/ui/WorkerWebUI.scala
index 4fd50378ffc2..1ce660e61f93 100644
--- a/core/src/main/scala/org/apache/spark/deploy/worker/ui/WorkerWebUI.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/worker/ui/WorkerWebUI.scala
@@ -25,7 +25,6 @@ import org.apache.spark.deploy.worker.Worker
 import org.apache.spark.internal.Logging
 import org.apache.spark.ui.{SparkUI, WebUI}
 import org.apache.spark.ui.JettyUtils._
-import org.apache.spark.util.RpcUtils
 
 /**
  * Web UI server for the standalone worker.
@@ -39,8 +38,6 @@ class WorkerWebUI(
     requestedPort, worker.conf, name = "WorkerUI")
   with Logging {
 
-  private[ui] val timeout = RpcUtils.askRpcTimeout(worker.conf)
-
   initialize()
 
   /** Initialize all components of the server. */
diff --git 
a/core/src/main/scala/org/apache/spark/input/WholeTextFileRecordReader.scala 
b/core/src/main/scala/org/apache/spark/input/WholeTextFileRecordReader.scala
index ba975237cb93..cb7350fc0fb3 100644
--- a/core/src/main/scala/org/apache/spark/input/WholeTextFileRecordReader.scala
+++ b/core/src/main/scala/org/apache/spark/input/WholeTextFileRecordReader.scala
@@ -50,7 +50,6 @@ private[spark] class WholeTextFileRecordReader(
   extends RecordReader[Text, Text] with Configurable {
 
   private[this] val path = split.getPath(index)
-  private[this] val fs = path.getFileSystem(context.getConfiguration)
 
   // True means the current file has been processed, then skip it.
   private[this] var processed = false
diff --git 
a/core/src/main/scala/org/apache/spark/memory/StorageMemoryPool.scala 
b/core/src/main/scala/org/apache/spark/memory/StorageMemoryPool.scala
index 7cde44b55332..0f15254f3a08 100644
--- a/core/src/main/scala/org/apache/spark/memory/StorageMemoryPool.scala
+++ b/core/src/main/scala/org/apache/spark/memory/StorageMemoryPool.scala
@@ -37,11 +37,6 @@ private[memory] class StorageMemoryPool(
     memoryMode: MemoryMode
   ) extends MemoryPool(lock) with Logging {
 
-  private[this] val poolName: String = memoryMode match {
-    case MemoryMode.ON_HEAP => "on-heap storage"
-    case MemoryMode.OFF_HEAP => "off-heap storage"
-  }
-
   @GuardedBy("lock")
   private[this] var _memoryUsed: Long = 0L
 


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to