This is an automated email from the ASF dual-hosted git repository.

mbutrovich pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/datafusion-comet.git


The following commit(s) were added to refs/heads/main by this push:
     new b5e8316cf fix: skip calculating per-task memory limit when in off-heap 
mode (#2462)
b5e8316cf is described below

commit b5e8316cf6674199949045b4adb0ebd2be304cbb
Author: Andy Grove <[email protected]>
AuthorDate: Fri Sep 26 14:41:35 2025 -0600

    fix: skip calculating per-task memory limit when in off-heap mode (#2462)
---
 spark/src/main/scala/org/apache/comet/CometExecIterator.scala | 10 +++++++++-
 1 file changed, 9 insertions(+), 1 deletion(-)

diff --git a/spark/src/main/scala/org/apache/comet/CometExecIterator.scala 
b/spark/src/main/scala/org/apache/comet/CometExecIterator.scala
index 67d044f8c..0bd41d05e 100644
--- a/spark/src/main/scala/org/apache/comet/CometExecIterator.scala
+++ b/spark/src/main/scala/org/apache/comet/CometExecIterator.scala
@@ -93,6 +93,14 @@ class CometExecIterator(
     }
     val protobufSparkConfigs = builder.build().toByteArray
 
+    val memoryLimitPerTask = if (offHeapMode) {
+      // this per-task limit is not used in native code when using unified 
memory
+      // so we can skip calculating it and avoid logging irrelevant information
+      0
+    } else {
+      getMemoryLimitPerTask(conf)
+    }
+
     nativeLib.createPlan(
       id,
       cometBatchIterators,
@@ -107,7 +115,7 @@ class CometExecIterator(
       offHeapMode,
       memoryPoolType = COMET_EXEC_MEMORY_POOL_TYPE.get(),
       memoryLimit,
-      memoryLimitPerTask = getMemoryLimitPerTask(conf),
+      memoryLimitPerTask,
       taskAttemptId = TaskContext.get().taskAttemptId,
       debug = COMET_DEBUG_ENABLED.get(),
       explain = COMET_EXPLAIN_NATIVE_ENABLED.get(),


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to