This is an automated email from the ASF dual-hosted git repository.

yumwang pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new 0996a1588fe [SPARK-40401][CORE] Remove the support of deprecated 
`spark.akka.*` configs
0996a1588fe is described below

commit 0996a1588feabea0aa3578bd551dcc1a1627eab7
Author: Dongjoon Hyun <dongj...@apache.org>
AuthorDate: Sat Sep 10 12:17:25 2022 +0800

    [SPARK-40401][CORE] Remove the support of deprecated `spark.akka.*` configs
    
    ### What changes were proposed in this pull request?
    
    This PR aims to remove the support of `spark.akka.*` configs and related 
warnings.
    
    ### Why are the changes needed?
    
    - Apache Spark 2.0+ is not using Akka, but some of `spark.akka.*` configs 
are still accepted as alternative values from deprecated configs.
    - Due to the Akka's License change, we had better be clear that Apache 
Spark is irrelevant to Akka even in config names.
      - https://www.lightbend.com/blog/why-we-are-changing-the-license-for-akka
    
    After this PR, we don't have a word `spark.akka`.
    ```
    $ git grep spark.akka
    ```
    ### Does this PR introduce _any_ user-facing change?
    
    Yes, we will ignore `spark.akka.*`, but it's inevitable in order to remove 
any user confusions due to the config naming, `spark.akka.*`.
    
    ### How was this patch tested?
    
    Manually review.
    
    Closes #37849 from dongjoon-hyun/SPARK-40401.
    
    Authored-by: Dongjoon Hyun <dongj...@apache.org>
    Signed-off-by: Yuming Wang <yumw...@ebay.com>
---
 core/src/main/scala/org/apache/spark/SparkConf.scala     | 11 -----------
 .../src/test/scala/org/apache/spark/SparkConfSuite.scala | 16 +---------------
 2 files changed, 1 insertion(+), 26 deletions(-)

diff --git a/core/src/main/scala/org/apache/spark/SparkConf.scala 
b/core/src/main/scala/org/apache/spark/SparkConf.scala
index f296b1408fa..08344d8e547 100644
--- a/core/src/main/scala/org/apache/spark/SparkConf.scala
+++ b/core/src/main/scala/org/apache/spark/SparkConf.scala
@@ -682,18 +682,12 @@ private[spark] object SparkConf extends Logging {
       AlternateConfig("spark.io.compression.snappy.block.size", "1.4")),
     IO_COMPRESSION_LZ4_BLOCKSIZE.key -> Seq(
       AlternateConfig("spark.io.compression.lz4.block.size", "1.4")),
-    RPC_ASK_TIMEOUT.key -> Seq(
-      AlternateConfig("spark.akka.askTimeout", "1.4")),
-    RPC_LOOKUP_TIMEOUT.key -> Seq(
-      AlternateConfig("spark.akka.lookupTimeout", "1.4")),
     "spark.streaming.fileStream.minRememberDuration" -> Seq(
       AlternateConfig("spark.streaming.minRememberDuration", "1.5")),
     "spark.yarn.max.executor.failures" -> Seq(
       AlternateConfig("spark.yarn.max.worker.failures", "1.5")),
     MEMORY_OFFHEAP_ENABLED.key -> Seq(
       AlternateConfig("spark.unsafe.offHeap", "1.6")),
-    RPC_MESSAGE_MAX_SIZE.key -> Seq(
-      AlternateConfig("spark.akka.frameSize", "1.6")),
     "spark.yarn.jars" -> Seq(
       AlternateConfig("spark.yarn.jar", "2.0")),
     MAX_REMOTE_BLOCK_SIZE_FETCH_TO_MEM.key -> Seq(
@@ -780,11 +774,6 @@ private[spark] object SparkConf extends Logging {
         s"may be removed in the future. Please use the new key '$newKey' 
instead.")
       return
     }
-    if (key.startsWith("spark.akka") || key.startsWith("spark.ssl.akka")) {
-      logWarning(
-        s"The configuration key $key is not supported anymore " +
-          s"because Spark doesn't use Akka since 2.0")
-    }
   }
 
   /**
diff --git a/core/src/test/scala/org/apache/spark/SparkConfSuite.scala 
b/core/src/test/scala/org/apache/spark/SparkConfSuite.scala
index 7ae1eac1db1..74fd7816221 100644
--- a/core/src/test/scala/org/apache/spark/SparkConfSuite.scala
+++ b/core/src/test/scala/org/apache/spark/SparkConfSuite.scala
@@ -20,7 +20,6 @@ package org.apache.spark
 import java.util.concurrent.{Executors, TimeUnit}
 
 import scala.collection.JavaConverters._
-import scala.concurrent.duration._
 import scala.util.{Random, Try}
 
 import com.esotericsoftware.kryo.Kryo
@@ -34,7 +33,7 @@ import org.apache.spark.resource.ResourceID
 import org.apache.spark.resource.ResourceUtils._
 import org.apache.spark.resource.TestResourceIDs._
 import org.apache.spark.serializer.{JavaSerializer, KryoRegistrator, 
KryoSerializer}
-import org.apache.spark.util.{ResetSystemProperties, RpcUtils, Utils}
+import org.apache.spark.util.{ResetSystemProperties, Utils}
 
 class SparkConfSuite extends SparkFunSuite with LocalSparkContext with 
ResetSystemProperties {
   test("Test byteString conversion") {
@@ -281,19 +280,6 @@ class SparkConfSuite extends SparkFunSuite with 
LocalSparkContext with ResetSyst
     assert(conf.get(KERBEROS_FILESYSTEMS_TO_ACCESS) === Array("testNode"))
   }
 
-  test("akka deprecated configs") {
-    val conf = new SparkConf()
-
-    assert(!conf.contains(RPC_ASK_TIMEOUT))
-    assert(!conf.contains(RPC_LOOKUP_TIMEOUT))
-
-    conf.set("spark.akka.askTimeout", "3")
-    assert(RpcUtils.askRpcTimeout(conf).duration === 3.seconds)
-
-    conf.set("spark.akka.lookupTimeout", "4")
-    assert(RpcUtils.lookupRpcTimeout(conf).duration === 4.seconds)
-  }
-
   test("SPARK-13727") {
     val conf = new SparkConf()
     // set the conf in the deprecated way


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to