This is an automated email from the ASF dual-hosted git repository.

yangjie01 pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new 48a81194351e [SPARK-50777][CORE] Remove redundant no-op `init/destroy` 
methods from `Filter` classes
48a81194351e is described below

commit 48a81194351e565b881be96d49fbdde0907d4f31
Author: Dongjoon Hyun <[email protected]>
AuthorDate: Fri Jan 10 10:37:55 2025 +0800

    [SPARK-50777][CORE] Remove redundant no-op `init/destroy` methods from 
`Filter` classes
    
    ### What changes were proposed in this pull request?
    
    This PR aims to remove redundant no-op `init/destroy` methods from `Filter` 
classes.
    
    ### Why are the changes needed?
    
    `Filter` interface already provides the default no-op methods for `init` 
and `destroy`. So, we can clean up them.
    - 
https://github.com/jakartaee/servlet/blob/5.0.0-RELEASE/api/src/main/java/jakarta/servlet/Filter.java#L79
    - 
https://github.com/jakartaee/servlet/blob/5.0.0-RELEASE/api/src/main/java/jakarta/servlet/Filter.java#L133
    ```
    default public void init(FilterConfig filterConfig) throws ServletException 
{}
    default public void destroy() {}
    ```
    
    ### Does this PR introduce _any_ user-facing change?
    
    No.
    
    ### How was this patch tested?
    
    Pass the CIs.
    
    ### Was this patch authored or co-authored using generative AI tooling?
    
    No.
    
    Closes #49429 from dongjoon-hyun/SPARK-50777.
    
    Authored-by: Dongjoon Hyun <[email protected]>
    Signed-off-by: yangjie01 <[email protected]>
---
 .../scala/org/apache/spark/deploy/history/ApplicationCache.scala   | 7 +------
 core/src/main/scala/org/apache/spark/ui/HttpSecurityFilter.scala   | 4 ----
 .../scala/org/apache/spark/deploy/history/HistoryServerSuite.scala | 5 -----
 core/src/test/scala/org/apache/spark/ui/UISuite.scala              | 2 --
 4 files changed, 1 insertion(+), 17 deletions(-)

diff --git 
a/core/src/main/scala/org/apache/spark/deploy/history/ApplicationCache.scala 
b/core/src/main/scala/org/apache/spark/deploy/history/ApplicationCache.scala
index 6e0fe69f3bfb..8caf67ff4680 100644
--- a/core/src/main/scala/org/apache/spark/deploy/history/ApplicationCache.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/history/ApplicationCache.scala
@@ -24,7 +24,7 @@ import scala.jdk.CollectionConverters._
 import com.codahale.metrics.{Counter, MetricRegistry, Timer}
 import com.google.common.cache.{CacheBuilder, CacheLoader, LoadingCache, 
RemovalListener, RemovalNotification}
 import com.google.common.util.concurrent.UncheckedExecutionException
-import jakarta.servlet.{DispatcherType, Filter, FilterChain, FilterConfig, 
ServletException, ServletRequest, ServletResponse}
+import jakarta.servlet.{DispatcherType, Filter, FilterChain, ServletException, 
ServletRequest, ServletResponse}
 import jakarta.servlet.http.{HttpServletRequest, HttpServletResponse}
 import org.eclipse.jetty.servlet.FilterHolder
 
@@ -428,9 +428,4 @@ private[history] class ApplicationCacheCheckFilter(
       httpResponse.sendRedirect(redirectUrl)
     }
   }
-
-  override def init(config: FilterConfig): Unit = { }
-
-  override def destroy(): Unit = { }
-
 }
diff --git a/core/src/main/scala/org/apache/spark/ui/HttpSecurityFilter.scala 
b/core/src/main/scala/org/apache/spark/ui/HttpSecurityFilter.scala
index 551f0eb98cb8..cf881b6ea990 100644
--- a/core/src/main/scala/org/apache/spark/ui/HttpSecurityFilter.scala
+++ b/core/src/main/scala/org/apache/spark/ui/HttpSecurityFilter.scala
@@ -44,10 +44,6 @@ private class HttpSecurityFilter(
     conf: SparkConf,
     securityMgr: SecurityManager) extends Filter {
 
-  override def destroy(): Unit = { }
-
-  override def init(config: FilterConfig): Unit = { }
-
   override def doFilter(req: ServletRequest, res: ServletResponse, chain: 
FilterChain): Unit = {
     val hreq = req.asInstanceOf[HttpServletRequest]
     val hres = res.asInstanceOf[HttpServletResponse]
diff --git 
a/core/src/test/scala/org/apache/spark/deploy/history/HistoryServerSuite.scala 
b/core/src/test/scala/org/apache/spark/deploy/history/HistoryServerSuite.scala
index 6b2bd90cd431..10092f416f9e 100644
--- 
a/core/src/test/scala/org/apache/spark/deploy/history/HistoryServerSuite.scala
+++ 
b/core/src/test/scala/org/apache/spark/deploy/history/HistoryServerSuite.scala
@@ -794,11 +794,6 @@ object HistoryServerSuite {
  * A filter used for auth tests; sets the request's user to the value of the 
"HTTP_USER" header.
  */
 class FakeAuthFilter extends Filter {
-
-  override def destroy(): Unit = { }
-
-  override def init(config: FilterConfig): Unit = { }
-
   override def doFilter(req: ServletRequest, res: ServletResponse, chain: 
FilterChain): Unit = {
     val hreq = req.asInstanceOf[HttpServletRequest]
     val wrapped = new HttpServletRequestWrapper(hreq) {
diff --git a/core/src/test/scala/org/apache/spark/ui/UISuite.scala 
b/core/src/test/scala/org/apache/spark/ui/UISuite.scala
index 1b68ed301fb9..6d12e88e8efa 100644
--- a/core/src/test/scala/org/apache/spark/ui/UISuite.scala
+++ b/core/src/test/scala/org/apache/spark/ui/UISuite.scala
@@ -504,8 +504,6 @@ private[spark] class TestFilter extends Filter {
 
   private var rc: Int = HttpServletResponse.SC_OK
 
-  override def destroy(): Unit = { }
-
   override def init(config: FilterConfig): Unit = {
     if (config.getInitParameter("responseCode") != null) {
       rc = config.getInitParameter("responseCode").toInt


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to