This is an automated email from the ASF dual-hosted git repository.

mingliang pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/incubator-gluten.git


The following commit(s) were added to refs/heads/main by this push:
     new 75891c0494 [GLUTEN-10950][VL] Add time unit to 
'spark.hadoop.fs.s3a.connection.timeout' (#10951)
75891c0494 is described below

commit 75891c0494c8d2b1b5007ad6746230f6e577af11
Author: Mingliang Zhu <[email protected]>
AuthorDate: Thu Oct 30 09:14:20 2025 +0800

    [GLUTEN-10950][VL] Add time unit to 
'spark.hadoop.fs.s3a.connection.timeout' (#10951)
---
 .../org/apache/gluten/backendsapi/velox/VeloxTransformerApi.scala | 8 +++++++-
 .../java/org/apache/gluten/init/NativeBackendInitializer.java     | 5 +++++
 2 files changed, 12 insertions(+), 1 deletion(-)

diff --git 
a/backends-velox/src/main/scala/org/apache/gluten/backendsapi/velox/VeloxTransformerApi.scala
 
b/backends-velox/src/main/scala/org/apache/gluten/backendsapi/velox/VeloxTransformerApi.scala
index 5222265bea..330086c1e6 100644
--- 
a/backends-velox/src/main/scala/org/apache/gluten/backendsapi/velox/VeloxTransformerApi.scala
+++ 
b/backends-velox/src/main/scala/org/apache/gluten/backendsapi/velox/VeloxTransformerApi.scala
@@ -39,6 +39,7 @@ import org.apache.spark.task.TaskResources
 import org.apache.spark.util.collection.BitSet
 
 import com.google.protobuf.{Any, Message}
+import org.apache.commons.lang3.math.NumberUtils
 
 import java.util.{Map => JMap}
 
@@ -69,7 +70,12 @@ class VeloxTransformerApi extends TransformerApi with 
Logging {
   override def postProcessNativeConfig(
       nativeConfMap: JMap[String, String],
       backendPrefix: String): Unit = {
-    // TODO: IMPLEMENT SPECIAL PROCESS FOR VELOX BACKEND
+    // 'spark.hadoop.fs.s3a.connection.timeout' by velox requires time unit, 
hadoop-aws versions
+    // before 3.4 do not have time unit.
+    val s3sConnectionTimeout = 
nativeConfMap.get("spark.hadoop.fs.s3a.connection.timeout")
+    if (NumberUtils.isCreatable(s3sConnectionTimeout)) {
+      nativeConfMap.put("spark.hadoop.fs.s3a.connection.timeout", 
s"${s3sConnectionTimeout}ms")
+    }
   }
 
   override def createCheckOverflowExprNode(
diff --git 
a/gluten-arrow/src/main/java/org/apache/gluten/init/NativeBackendInitializer.java
 
b/gluten-arrow/src/main/java/org/apache/gluten/init/NativeBackendInitializer.java
index c0ae2199c5..bfbb9fd51e 100644
--- 
a/gluten-arrow/src/main/java/org/apache/gluten/init/NativeBackendInitializer.java
+++ 
b/gluten-arrow/src/main/java/org/apache/gluten/init/NativeBackendInitializer.java
@@ -16,6 +16,7 @@
  */
 package org.apache.gluten.init;
 
+import org.apache.gluten.backendsapi.BackendsApiManager;
 import org.apache.gluten.config.GlutenConfig;
 import org.apache.gluten.memory.listener.ReservationListener;
 import org.apache.gluten.utils.ConfigUtil;
@@ -68,6 +69,10 @@ public final class NativeBackendInitializer {
   private void initialize0(ReservationListener rl, 
scala.collection.Map<String, String> conf) {
     try {
       Map<String, String> nativeConfMap = 
GlutenConfig.getNativeBackendConf(backendName, conf);
+      // Get the customer config from SparkConf for each backend.
+      BackendsApiManager.getTransformerApiInstance()
+          .postProcessNativeConfig(
+              nativeConfMap, 
GlutenConfig.prefixOf(BackendsApiManager.getBackendName()));
       initialize(rl, ConfigUtil.serialize(nativeConfMap));
     } catch (Exception e) {
       LOG.error("Failed to call native backend's initialize method", e);


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to