This is an automated email from the ASF dual-hosted git repository.

richox pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/auron.git


The following commit(s) were added to refs/heads/master by this push:
     new b7c88300 [AURON #1378]add spark.auron.enable.broadcastexchange to 
control BroadcastExchangeExec into Native or not  (#1379)
b7c88300 is described below

commit b7c88300748dc562fbae96b572273af854251c4c
Author: guixiaowen <[email protected]>
AuthorDate: Thu Oct 9 14:58:28 2025 +0800

    [AURON #1378]add spark.auron.enable.broadcastexchange to control 
BroadcastExchangeExec into Native or not  (#1379)
    
    * [AURON #1378]Do not convert into BroadcastExchangeExec into Native when 
spark.auron.enable.bhj or spark.auron.enable.bnlj is false
    
    * [AURON #1378]add spark.auron.enable.broadcastexchange to control 
BroadcastExchangeExec into Native or not #1378
    
    * [AURON #1378]add spark.auron.enable.broadcastexchange to control 
BroadcastExchangeExec into Native or not #1378
    
    * [AURON #1378]add spark.auron.enable.broadcastexchange to control 
BroadcastExchangeExec into Native or not #1378
    
    * [AURON #1378]add spark.auron.enable.broadcastexchange to control 
BroadcastExchangeExec into Native or not
    
    * [AURON #1378]add spark.auron.enable.broadcastexchange to control 
BroadcastExchangeExec into Native or not
    
    * [AURON #1378]add spark.auron.enable.broadcastexchange to control 
BroadcastExchangeExec into Native or not #1379
    
    ---------
    
    Co-authored-by: guihuawen <[email protected]>
---
 .../AuronCheckConvertBroadcastExchangeSuite.scala  | 170 +++++++++++++++++++++
 .../apache/spark/sql/auron/AuronConverters.scala   |   5 +-
 2 files changed, 174 insertions(+), 1 deletion(-)

diff --git 
a/spark-extension-shims-spark3/src/test/scala/org/apache/spark/sql/auron/AuronCheckConvertBroadcastExchangeSuite.scala
 
b/spark-extension-shims-spark3/src/test/scala/org/apache/spark/sql/auron/AuronCheckConvertBroadcastExchangeSuite.scala
new file mode 100644
index 00000000..e04d4bea
--- /dev/null
+++ 
b/spark-extension-shims-spark3/src/test/scala/org/apache/spark/sql/auron/AuronCheckConvertBroadcastExchangeSuite.scala
@@ -0,0 +1,170 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.spark.sql.auron
+
+import org.apache.spark.sql.{QueryTest, Row, SparkSession}
+import org.apache.spark.sql.execution.adaptive.AdaptiveSparkPlanExec
+import org.apache.spark.sql.execution.auron.plan.NativeBroadcastExchangeExec
+import org.apache.spark.sql.execution.exchange.BroadcastExchangeExec
+import org.apache.spark.sql.test.SharedSparkSession
+
+class AuronCheckConvertBroadcastExchangeSuite
+    extends QueryTest
+    with SharedSparkSession
+    with AuronSQLTestHelper
+    with org.apache.spark.sql.execution.adaptive.AdaptiveSparkPlanHelper {
+  import testImplicits._
+
+  test(
+    "test bhj broadcastExchange to native where 
spark.auron.enable.broadcastexchange is true") {
+    val spark = SparkSession
+      .builder()
+      .master("local[2]")
+      .appName("checkConvertToBroadcast")
+      .config("spark.sql.shuffle.partitions", "4")
+      .config("spark.sql.autoBroadcastJoinThreshold", -1)
+      .config("spark.sql.extensions", 
"org.apache.spark.sql.auron.AuronSparkSessionExtension")
+      .config(
+        "spark.shuffle.manager",
+        "org.apache.spark.sql.execution.auron.shuffle.AuronShuffleManager")
+      .config("spark.memory.offHeap.enabled", "false")
+      .config("spark.auron.enable", "true")
+      .getOrCreate()
+
+    Seq((1, 2, "test test")).toDF("c1", "c2", 
"part").createOrReplaceTempView("broad_cast_table1")
+    Seq((1, 2, "test test")).toDF("c1", "c2", 
"part").createOrReplaceTempView("broad_cast_table2")
+    val executePlan =
+      spark.sql(
+        "select /*+ broadcast(a)*/ a.c1, a.c2 from broad_cast_table1 a inner 
join broad_cast_table2 b on a.c1 = b.c1")
+
+    val plan = 
executePlan.queryExecution.executedPlan.asInstanceOf[AdaptiveSparkPlanExec]
+    val broadcastExchangeExec =
+      plan.executedPlan
+        .collectFirst { case broadcastExchangeExec: BroadcastExchangeExec =>
+          broadcastExchangeExec
+        }
+
+    val afterConvertPlan = 
AuronConverters.convertSparkPlan(broadcastExchangeExec.get)
+    assert(afterConvertPlan.isInstanceOf[NativeBroadcastExchangeExec])
+    checkAnswer(executePlan, Seq(Row(1, 2)))
+  }
+
+  test(
+    "test bnlj broadcastExchange to native where 
spark.auron.enable.broadcastexchange is true") {
+    val spark = SparkSession
+      .builder()
+      .master("local[2]")
+      .appName("checkConvertToBroadcast")
+      .config("spark.sql.shuffle.partitions", "4")
+      .config("spark.sql.autoBroadcastJoinThreshold", -1)
+      .config("spark.sql.extensions", 
"org.apache.spark.sql.auron.AuronSparkSessionExtension")
+      .config(
+        "spark.shuffle.manager",
+        "org.apache.spark.sql.execution.auron.shuffle.AuronShuffleManager")
+      .config("spark.memory.offHeap.enabled", "false")
+      .config("spark.auron.enable", "true")
+      .getOrCreate()
+
+    Seq((1, 2, "test test")).toDF("c1", "c2", 
"part").createOrReplaceTempView("broad_cast_table1")
+    Seq((1, 2, "test test")).toDF("c1", "c2", 
"part").createOrReplaceTempView("broad_cast_table2")
+    val executePlan =
+      spark.sql(
+        "select /*+ broadcast(a)*/ a.c1, a.c2 from broad_cast_table1 a inner 
join broad_cast_table2 b ")
+
+    val plan = 
executePlan.queryExecution.executedPlan.asInstanceOf[AdaptiveSparkPlanExec]
+    val broadcastExchangeExec =
+      plan.executedPlan
+        .collectFirst { case broadcastExchangeExec: BroadcastExchangeExec =>
+          broadcastExchangeExec
+        }
+
+    val afterConvertPlan = 
AuronConverters.convertSparkPlan(broadcastExchangeExec.get)
+    assert(afterConvertPlan.isInstanceOf[NativeBroadcastExchangeExec])
+    checkAnswer(executePlan, Seq(Row(1, 2)))
+  }
+
+  test(
+    "test do not convert broadcastExchange to native when set 
spark.auron.enable.broadcastexchange is false") {
+    val spark = SparkSession
+      .builder()
+      .master("local[2]")
+      .appName("checkConvertToBroadcast")
+      .config("spark.sql.shuffle.partitions", "4")
+      .config("spark.sql.autoBroadcastJoinThreshold", -1)
+      .config("spark.sql.extensions", 
"org.apache.spark.sql.auron.AuronSparkSessionExtension")
+      .config(
+        "spark.shuffle.manager",
+        "org.apache.spark.sql.execution.auron.shuffle.AuronShuffleManager")
+      .config("spark.memory.offHeap.enabled", "false")
+      .config("spark.auron.enable.broadcastExchange", "false")
+      .config("spark.auron.enable", "true")
+      .getOrCreate()
+
+    Seq((1, 2, "test test")).toDF("c1", "c2", 
"part").createOrReplaceTempView("broad_cast_table1")
+    Seq((1, 2, "test test")).toDF("c1", "c2", 
"part").createOrReplaceTempView("broad_cast_table2")
+    val executePlan =
+      spark.sql(
+        "select /*+ broadcast(a)*/ a.c1, a.c2 from broad_cast_table1 a inner 
join broad_cast_table2 b on a.c1 = b.c1")
+
+    val plan = 
executePlan.queryExecution.executedPlan.asInstanceOf[AdaptiveSparkPlanExec]
+    val broadcastExchangeExec =
+      plan.executedPlan
+        .collectFirst { case broadcastExchangeExec: BroadcastExchangeExec =>
+          broadcastExchangeExec
+        }
+
+    val afterConvertPlan = 
AuronConverters.convertSparkPlan(broadcastExchangeExec.get)
+    assert(afterConvertPlan.isInstanceOf[BroadcastExchangeExec])
+    checkAnswer(executePlan, Seq(Row(1, 2)))
+  }
+
+  test(
+    "test bnlj broadcastExchange to native where 
spark.auron.enable.broadcastexchange is false") {
+    val spark = SparkSession
+      .builder()
+      .master("local[2]")
+      .appName("checkConvertToBroadcast")
+      .config("spark.sql.shuffle.partitions", "4")
+      .config("spark.sql.autoBroadcastJoinThreshold", -1)
+      .config("spark.sql.extensions", 
"org.apache.spark.sql.auron.AuronSparkSessionExtension")
+      .config(
+        "spark.shuffle.manager",
+        "org.apache.spark.sql.execution.auron.shuffle.AuronShuffleManager")
+      .config("spark.memory.offHeap.enabled", "false")
+      .config("spark.auron.enable.broadcastExchange", "false")
+      .config("spark.auron.enable", "true")
+      .getOrCreate()
+
+    Seq((1, 2, "test test")).toDF("c1", "c2", 
"part").createOrReplaceTempView("broad_cast_table1")
+    Seq((1, 2, "test test")).toDF("c1", "c2", 
"part").createOrReplaceTempView("broad_cast_table2")
+    val executePlan =
+      spark.sql(
+        "select /*+ broadcast(a)*/ a.c1, a.c2 from broad_cast_table1 a inner 
join broad_cast_table2 b ")
+
+    val plan = 
executePlan.queryExecution.executedPlan.asInstanceOf[AdaptiveSparkPlanExec]
+    val broadcastExchangeExec =
+      plan.executedPlan
+        .collectFirst { case broadcastExchangeExec: BroadcastExchangeExec =>
+          broadcastExchangeExec
+        }
+
+    val afterConvertPlan = 
AuronConverters.convertSparkPlan(broadcastExchangeExec.get)
+    assert(afterConvertPlan.isInstanceOf[BroadcastExchangeExec])
+    checkAnswer(executePlan, Seq(Row(1, 2)))
+  }
+
+}
diff --git 
a/spark-extension/src/main/scala/org/apache/spark/sql/auron/AuronConverters.scala
 
b/spark-extension/src/main/scala/org/apache/spark/sql/auron/AuronConverters.scala
index 8f075ac4..df2608b4 100644
--- 
a/spark-extension/src/main/scala/org/apache/spark/sql/auron/AuronConverters.scala
+++ 
b/spark-extension/src/main/scala/org/apache/spark/sql/auron/AuronConverters.scala
@@ -133,6 +133,8 @@ object AuronConverters extends Logging {
     getBooleanConf("spark.auron.enable.scan.parquet", defaultValue = true)
   def enableScanOrc: Boolean =
     getBooleanConf("spark.auron.enable.scan.orc", defaultValue = true)
+  def enableBroadcastExchange: Boolean =
+    getBooleanConf("spark.auron.enable.broadcastExchange", defaultValue = true)
 
   private val extConvertProviders = 
ServiceLoader.load(classOf[AuronConvertProvider]).asScala
   def extConvertSupported(exec: SparkPlan): Boolean = {
@@ -173,7 +175,8 @@ object AuronConverters extends Logging {
   def convertSparkPlan(exec: SparkPlan): SparkPlan = {
     exec match {
       case e: ShuffleExchangeExec => tryConvert(e, convertShuffleExchangeExec)
-      case e: BroadcastExchangeExec => tryConvert(e, 
convertBroadcastExchangeExec)
+      case e: BroadcastExchangeExec if enableBroadcastExchange =>
+        tryConvert(e, convertBroadcastExchangeExec)
       case e: FileSourceScanExec if enableScan => // scan
         tryConvert(e, convertFileSourceScanExec)
       case e: ProjectExec if enableProject => // project

Reply via email to