This is an automated email from the ASF dual-hosted git repository.

gurwls223 pushed a commit to branch branch-3.5
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/branch-3.5 by this push:
     new c831bd79fba [SPARK-44982][CONNECT] Mark Spark Connect server 
configurations as static
c831bd79fba is described below

commit c831bd79fba036d121a5d1f24cfc75be4006f4c9
Author: Hyukjin Kwon <gurwls...@apache.org>
AuthorDate: Mon Aug 28 17:36:20 2023 +0900

    [SPARK-44982][CONNECT] Mark Spark Connect server configurations as static
    
    This PR proposes to mark all Spark Connect server configurations as static 
configurations.
    
    They are already static configurations, and cannot be set in runtime 
configuration (by default), see also 
https://github.com/apache/spark/blob/4a4856207d414ba88a8edabeb70e20765460ef1a/sql/core/src/main/scala/org/apache/spark/sql/RuntimeConfig.scala#L164-L167
    
    No, they are already static configurations.
    
    Existing unittests.
    
    No.
    
    Closes #42695 from HyukjinKwon/SPARK-44982.
    
    Authored-by: Hyukjin Kwon <gurwls...@apache.org>
    Signed-off-by: Hyukjin Kwon <gurwls...@apache.org>
    (cherry picked from commit 5b69dfd67e35f8be742a58cbd55f33088b4c7704)
    Signed-off-by: Hyukjin Kwon <gurwls...@apache.org>
---
 .../apache/spark/sql/connect/config/Connect.scala  | 37 +++++++++++-----------
 1 file changed, 18 insertions(+), 19 deletions(-)

diff --git 
a/connector/connect/server/src/main/scala/org/apache/spark/sql/connect/config/Connect.scala
 
b/connector/connect/server/src/main/scala/org/apache/spark/sql/connect/config/Connect.scala
index 054ccbe6707..7b8b05ce11a 100644
--- 
a/connector/connect/server/src/main/scala/org/apache/spark/sql/connect/config/Connect.scala
+++ 
b/connector/connect/server/src/main/scala/org/apache/spark/sql/connect/config/Connect.scala
@@ -18,7 +18,6 @@ package org.apache.spark.sql.connect.config
 
 import java.util.concurrent.TimeUnit
 
-import org.apache.spark.internal.config.ConfigBuilder
 import org.apache.spark.network.util.ByteUnit
 import org.apache.spark.sql.connect.common.config.ConnectCommon
 
@@ -26,13 +25,13 @@ object Connect {
   import org.apache.spark.sql.internal.SQLConf.buildStaticConf
 
   val CONNECT_GRPC_BINDING_PORT =
-    ConfigBuilder("spark.connect.grpc.binding.port")
+    buildStaticConf("spark.connect.grpc.binding.port")
       .version("3.4.0")
       .intConf
       .createWithDefault(ConnectCommon.CONNECT_GRPC_BINDING_PORT)
 
   val CONNECT_GRPC_INTERCEPTOR_CLASSES =
-    ConfigBuilder("spark.connect.grpc.interceptor.classes")
+    buildStaticConf("spark.connect.grpc.interceptor.classes")
       .doc(
         "Comma separated list of class names that must " +
           "implement the io.grpc.ServerInterceptor interface.")
@@ -41,7 +40,7 @@ object Connect {
       .createOptional
 
   val CONNECT_GRPC_ARROW_MAX_BATCH_SIZE =
-    ConfigBuilder("spark.connect.grpc.arrow.maxBatchSize")
+    buildStaticConf("spark.connect.grpc.arrow.maxBatchSize")
       .doc(
         "When using Apache Arrow, limit the maximum size of one arrow batch, 
in bytes unless " +
           "otherwise specified, that can be sent from server side to client 
side. Currently, we " +
@@ -51,7 +50,7 @@ object Connect {
       .createWithDefault(4 * 1024 * 1024)
 
   val CONNECT_GRPC_MAX_INBOUND_MESSAGE_SIZE =
-    ConfigBuilder("spark.connect.grpc.maxInboundMessageSize")
+    buildStaticConf("spark.connect.grpc.maxInboundMessageSize")
       .doc("Sets the maximum inbound message in bytes size for the gRPC 
requests." +
         "Requests with a larger payload will fail.")
       .version("3.4.0")
@@ -59,7 +58,7 @@ object Connect {
       .createWithDefault(ConnectCommon.CONNECT_GRPC_MAX_MESSAGE_SIZE)
 
   val CONNECT_GRPC_MARSHALLER_RECURSION_LIMIT =
-    ConfigBuilder("spark.connect.grpc.marshallerRecursionLimit")
+    buildStaticConf("spark.connect.grpc.marshallerRecursionLimit")
       .internal()
       .doc("""
           |Sets the recursion limit to grpc protobuf messages.
@@ -69,7 +68,7 @@ object Connect {
       .createWithDefault(1024)
 
   val CONNECT_EXECUTE_MANAGER_DETACHED_TIMEOUT =
-    ConfigBuilder("spark.connect.execute.manager.detachedTimeout")
+    buildStaticConf("spark.connect.execute.manager.detachedTimeout")
       .internal()
       .doc("Timeout after which executions without an attached RPC will be 
removed.")
       .version("3.5.0")
@@ -77,7 +76,7 @@ object Connect {
       .createWithDefaultString("5m")
 
   val CONNECT_EXECUTE_MANAGER_MAINTENANCE_INTERVAL =
-    ConfigBuilder("spark.connect.execute.manager.maintenanceInterval")
+    buildStaticConf("spark.connect.execute.manager.maintenanceInterval")
       .internal()
       .doc("Interval at which execution manager will search for abandoned 
executions to remove.")
       .version("3.5.0")
@@ -85,7 +84,7 @@ object Connect {
       .createWithDefaultString("30s")
 
   val CONNECT_EXECUTE_MANAGER_ABANDONED_TOMBSTONES_SIZE =
-    ConfigBuilder("spark.connect.execute.manager.abandonedTombstonesSize")
+    buildStaticConf("spark.connect.execute.manager.abandonedTombstonesSize")
       .internal()
       .doc("Maximum size of the cache of abandoned executions.")
       .version("3.5.0")
@@ -93,7 +92,7 @@ object Connect {
       .createWithDefaultString("10000")
 
   val CONNECT_EXECUTE_REATTACHABLE_ENABLED =
-    ConfigBuilder("spark.connect.execute.reattachable.enabled")
+    buildStaticConf("spark.connect.execute.reattachable.enabled")
       .internal()
       .doc("Enables reattachable execution on the server. If disabled and a 
client requests it, " +
         "non-reattachable execution will follow and should run until query 
completion. This will " +
@@ -104,7 +103,7 @@ object Connect {
       .createWithDefault(true)
 
   val CONNECT_EXECUTE_REATTACHABLE_SENDER_MAX_STREAM_DURATION =
-    ConfigBuilder("spark.connect.execute.reattachable.senderMaxStreamDuration")
+    
buildStaticConf("spark.connect.execute.reattachable.senderMaxStreamDuration")
       .internal()
       .doc("For reattachable execution, after this amount of time the response 
stream will be " +
         "automatically completed and client needs to send a new 
ReattachExecute RPC to continue. " +
@@ -114,7 +113,7 @@ object Connect {
       .createWithDefaultString("2m")
 
   val CONNECT_EXECUTE_REATTACHABLE_SENDER_MAX_STREAM_SIZE =
-    ConfigBuilder("spark.connect.execute.reattachable.senderMaxStreamSize")
+    buildStaticConf("spark.connect.execute.reattachable.senderMaxStreamSize")
       .internal()
       .doc(
         "For reattachable execution, after total responses size exceeds this 
value, the " +
@@ -125,7 +124,7 @@ object Connect {
       .createWithDefaultString("1g")
 
   val CONNECT_EXECUTE_REATTACHABLE_OBSERVER_RETRY_BUFFER_SIZE =
-    ConfigBuilder("spark.connect.execute.reattachable.observerRetryBufferSize")
+    
buildStaticConf("spark.connect.execute.reattachable.observerRetryBufferSize")
       .internal()
       .doc(
         "For reattachable execution, the total size of responses that were 
already sent to be " +
@@ -137,7 +136,7 @@ object Connect {
       .createWithDefaultString("1m")
 
   val CONNECT_EXTENSIONS_RELATION_CLASSES =
-    ConfigBuilder("spark.connect.extensions.relation.classes")
+    buildStaticConf("spark.connect.extensions.relation.classes")
       .doc("""
           |Comma separated list of classes that implement the trait
           |org.apache.spark.sql.connect.plugin.RelationPlugin to support custom
@@ -149,7 +148,7 @@ object Connect {
       .createWithDefault(Nil)
 
   val CONNECT_EXTENSIONS_EXPRESSION_CLASSES =
-    ConfigBuilder("spark.connect.extensions.expression.classes")
+    buildStaticConf("spark.connect.extensions.expression.classes")
       .doc("""
           |Comma separated list of classes that implement the trait
           |org.apache.spark.sql.connect.plugin.ExpressionPlugin to support 
custom
@@ -161,7 +160,7 @@ object Connect {
       .createWithDefault(Nil)
 
   val CONNECT_EXTENSIONS_COMMAND_CLASSES =
-    ConfigBuilder("spark.connect.extensions.command.classes")
+    buildStaticConf("spark.connect.extensions.command.classes")
       .doc("""
              |Comma separated list of classes that implement the trait
              |org.apache.spark.sql.connect.plugin.CommandPlugin to support 
custom
@@ -173,7 +172,7 @@ object Connect {
       .createWithDefault(Nil)
 
   val CONNECT_JVM_STACK_TRACE_MAX_SIZE =
-    ConfigBuilder("spark.connect.jvmStacktrace.maxSize")
+    buildStaticConf("spark.connect.jvmStacktrace.maxSize")
       .doc("""
           |Sets the maximum stack trace size to display when
           |`spark.sql.pyspark.jvmStacktrace.enabled` is true.
@@ -197,13 +196,13 @@ object Connect {
       .createWithDefault(false)
 
   val CONNECT_UI_STATEMENT_LIMIT =
-    ConfigBuilder("spark.sql.connect.ui.retainedStatements")
+    buildStaticConf("spark.sql.connect.ui.retainedStatements")
       .doc("The number of statements kept in the Spark Connect UI history.")
       .version("3.5.0")
       .intConf
       .createWithDefault(200)
 
-  val CONNECT_UI_SESSION_LIMIT = 
ConfigBuilder("spark.sql.connect.ui.retainedSessions")
+  val CONNECT_UI_SESSION_LIMIT = 
buildStaticConf("spark.sql.connect.ui.retainedSessions")
     .doc("The number of client sessions kept in the Spark Connect UI history.")
     .version("3.5.0")
     .intConf


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to