kazuyukitanimura commented on code in PR #1209:
URL: https://github.com/apache/datafusion-comet/pull/1209#discussion_r1909581449


##########
spark/src/test/scala/org/apache/spark/sql/CometTestBase.scala:
##########
@@ -78,6 +78,7 @@ abstract class CometTestBase
     conf.set(CometConf.COMET_ENABLED.key, "true")
     conf.set(CometConf.COMET_EXEC_ENABLED.key, "true")
     conf.set(CometConf.COMET_EXEC_SHUFFLE_ENABLED.key, "true")
+    conf.set(CometConf.COMET_SHUFFLE_FALLBACK_TO_COLUMNAR.key, "true")

Review Comment:
   Do we have a plan to enable this in Spark tests? I guess we need to fix some 
bugs first, but might be a good idea to file an issue ticket...



##########
spark/src/main/scala/org/apache/comet/CometSparkSessionExtensions.scala:
##########
@@ -788,68 +788,93 @@ class CometSparkSessionExtensions
           }
 
         // Native shuffle for Comet operators
-        case s: ShuffleExchangeExec
-            if isCometShuffleEnabled(conf) &&
-              isCometNativeShuffleMode(conf) &&
-              QueryPlanSerde.supportPartitioning(s.child.output, 
s.outputPartitioning)._1 =>
-          logInfo("Comet extension enabled for Native Shuffle")
+        case s: ShuffleExchangeExec =>
+          val nativePrecondition = isCometShuffleEnabled(conf) &&
+            isCometNativeShuffleMode(conf) &&
+            QueryPlanSerde.supportPartitioning(s.child.output, 
s.outputPartitioning)._1
 
-          val newOp = transform1(s)
-          newOp match {
-            case Some(nativeOp) =>
-              // Switch to use Decimal128 regardless of precision, since Arrow 
native execution
-              // doesn't support Decimal32 and Decimal64 yet.
-              conf.setConfString(CometConf.COMET_USE_DECIMAL_128.key, "true")
-              val cometOp = CometShuffleExchangeExec(s, shuffleType = 
CometNativeShuffle)
-              CometSinkPlaceHolder(nativeOp, s, cometOp)
-            case None =>
-              s
-          }
+          val nativeShuffle: Option[SparkPlan] =
+            if (nativePrecondition) {
+              val newOp = transform1(s)
+              newOp match {
+                case Some(nativeOp) =>
+                  // Switch to use Decimal128 regardless of precision, since 
Arrow native execution
+                  // doesn't support Decimal32 and Decimal64 yet.
+                  conf.setConfString(CometConf.COMET_USE_DECIMAL_128.key, 
"true")
+                  val cometOp = CometShuffleExchangeExec(s, shuffleType = 
CometNativeShuffle)
+                  Some(CometSinkPlaceHolder(nativeOp, s, cometOp))
+                case None =>
+                  None
+              }
+            } else {
+              None
+            }
 
-        // Columnar shuffle for regular Spark operators (not Comet) and Comet 
operators
-        // (if configured).
-        // If the child of ShuffleExchangeExec is also a ShuffleExchangeExec, 
we should not
-        // convert it to CometColumnarShuffle,
-        case s: ShuffleExchangeExec
-            if isCometShuffleEnabled(conf) && isCometJVMShuffleMode(conf) &&
+          // this is a temporary workaround because some Spark SQL tests fail
+          // when we enable COMET_SHUFFLE_FALLBACK_TO_COLUMNAR due to valid 
bugs
+          // that we had not previously seen
+          val tryColumnarNext =
+            !nativePrecondition || (nativePrecondition && 
nativeShuffle.isEmpty &&
+              COMET_SHUFFLE_FALLBACK_TO_COLUMNAR.get(conf))

Review Comment:
   nit: this can be shorter
   ```
   val tryColumnarNext =
       !nativePrecondition || (nativeShuffle.isEmpty && 
COMET_SHUFFLE_FALLBACK_TO_COLUMNAR.get(conf))
   ```



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: [email protected]

For queries about this service, please contact Infrastructure at:
[email protected]


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to