viirya commented on code in PR #92:
URL:
https://github.com/apache/arrow-datafusion-comet/pull/92#discussion_r1499585072
##########
spark/src/test/scala/org/apache/comet/exec/CometExecSuite.scala:
##########
@@ -59,22 +59,28 @@ class CometExecSuite extends CometTestBase {
withSQLConf(CometConf.COMET_EXEC_BROADCAST_ENABLED.key -> "true") {
withParquetTable((0 until 5).map(i => (i, i + 1)), "tbl_a") {
withParquetTable((0 until 5).map(i => (i, i + 1)), "tbl_b") {
- val df = sql(
- "SELECT tbl_a._1, tbl_b._2 FROM tbl_a JOIN tbl_b " +
- "WHERE tbl_a._1 > tbl_a._2 LIMIT 2")
-
- val nativeBroadcast = find(df.queryExecution.executedPlan) {
- case _: CometBroadcastExchangeExec => true
- case _ => false
- }.get.asInstanceOf[CometBroadcastExchangeExec]
-
- val numParts = nativeBroadcast.executeColumnar().getNumPartitions
-
- val rows = nativeBroadcast.executeCollect().toSeq.sortBy(row =>
row.getInt(0))
- val rowContents = rows.map(row => row.getInt(0))
- val expected = (0 until numParts).flatMap(_ => (0 until 5).map(i =>
i + 1)).sorted
-
- assert(rowContents === expected)
+ Seq(true, false).foreach { emptyBroadcast =>
Review Comment:
Hmm, the test looks complicated by the change. Could you add a new test for
empty broadcast case instead of modifying current one?
--
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
To unsubscribe, e-mail: [email protected]
For queries about this service, please contact Infrastructure at:
[email protected]