kazuyukitanimura commented on code in PR #1379:
URL: https://github.com/apache/datafusion-comet/pull/1379#discussion_r1951820736


##########
spark/src/main/scala/org/apache/spark/Plugins.scala:
##########
@@ -62,7 +62,13 @@ class CometDriverPlugin extends DriverPlugin with Logging 
with ShimCometDriverPl
         Math.max((executorMemory * memoryOverheadFactor).toLong, 
memoryOverheadMinMib)
       }
 
-      val cometMemOverhead = 
CometSparkSessionExtensions.getCometMemoryOverheadInMiB(sc.getConf)
+      val cometMemOverhead =
+        if 
(!CometSparkSessionExtensions.cometUnifiedMemoryManagerEnabled(sc.getConf)) {
+          CometSparkSessionExtensions.getCometMemoryOverheadInMiB(sc.getConf)

Review Comment:
   Is it intentional to put the negation `!` here?



##########
spark/src/test/scala/org/apache/spark/CometPluginsSuite.scala:
##########
@@ -126,6 +126,8 @@ class CometPluginsNonOverrideSuite extends CometTestBase {
     conf.set("spark.executor.memoryOverheadFactor", "0.5")
     conf.set("spark.plugins", "org.apache.spark.CometPlugin")
     conf.set("spark.comet.enabled", "true")
+    conf.set("spark.comet.exec.shuffle.enabled", "false")

Review Comment:
   Where is this used?



##########
spark/src/main/scala/org/apache/comet/CometSparkSessionExtensions.scala:
##########
@@ -1399,6 +1404,16 @@ object CometSparkSessionExtensions extends Logging {
     }
   }
 
+  def cometUnifiedMemoryManagerEnabled(sparkConf: SparkConf): Boolean = {
+    sparkConf.getBoolean("spark.memory.offHeap.enabled", false)
+  }
+
+  def cometShuffleUnifiedMemoryManagerEnabled(sparkConf: SparkConf): Boolean = 
{

Review Comment:
   Perhaps it is best to add `Test` into the name of this function to indicate 
this is only for test purpose



##########
spark/src/main/scala/org/apache/comet/CometSparkSessionExtensions.scala:
##########
@@ -1354,9 +1354,14 @@ object CometSparkSessionExtensions extends Logging {
 
   /** Calculates required memory overhead in MB per executor process for 
Comet. */
   def getCometMemoryOverheadInMiB(sparkConf: SparkConf): Long = {
-    // `spark.executor.memory` default value is 1g
-    val executorMemoryMiB = ConfigHelpers
-      .byteFromString(sparkConf.get("spark.executor.memory", "1024MB"), 
ByteUnit.MiB)
+    val executorMemoryMiB = if (cometUnifiedMemoryManagerEnabled(sparkConf)) {

Review Comment:
   hmm it is a bit weird to put off heap memory amount to `executorMemoryMiB`



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: github-unsubscr...@datafusion.apache.org

For queries about this service, please contact Infrastructure at:
us...@infra.apache.org


---------------------------------------------------------------------
To unsubscribe, e-mail: github-unsubscr...@datafusion.apache.org
For additional commands, e-mail: github-h...@datafusion.apache.org

Reply via email to