This is an automated email from the ASF dual-hosted git repository.

viirya pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/datafusion-comet.git


The following commit(s) were added to refs/heads/main by this push:
     new 7fe20b13 test: Fix explain with exteded info comet test (#436)
7fe20b13 is described below

commit 7fe20b136bb949a8e51f7f8d01ce548455ce0a9f
Author: KAZUYUKI TANIMURA <[email protected]>
AuthorDate: Thu May 16 12:36:45 2024 -0700

    test: Fix explain with exteded info comet test (#436)
    
    * test: Fix explain with exteded info comet test
    
    * address review comments
    
    * address review comments
---
 .../org/apache/comet/shims/ShimCometSparkSessionExtensions.scala    | 5 +++++
 spark/src/test/scala/org/apache/comet/CometExpressionSuite.scala    | 2 +-
 spark/src/test/scala/org/apache/spark/sql/CometTestBase.scala       | 6 ++----
 3 files changed, 8 insertions(+), 5 deletions(-)

diff --git 
a/spark/src/main/spark-3.x/org/apache/comet/shims/ShimCometSparkSessionExtensions.scala
 
b/spark/src/main/spark-3.x/org/apache/comet/shims/ShimCometSparkSessionExtensions.scala
index ffec1bd4..eb04c68a 100644
--- 
a/spark/src/main/spark-3.x/org/apache/comet/shims/ShimCometSparkSessionExtensions.scala
+++ 
b/spark/src/main/spark-3.x/org/apache/comet/shims/ShimCometSparkSessionExtensions.scala
@@ -40,6 +40,11 @@ trait ShimCometSparkSessionExtensions {
    */
   def getOffset(limit: LimitExec): Int = getOffsetOpt(limit).getOrElse(0)
 
+  /**
+   * TODO: delete after dropping Spark 3.x support and directly call
+   *       SQLConf.EXTENDED_EXPLAIN_PROVIDERS.key
+   */
+  protected val EXTENDED_EXPLAIN_PROVIDERS_KEY = 
"spark.sql.extendedExplainProviders"
 }
 
 object ShimCometSparkSessionExtensions {
diff --git a/spark/src/test/scala/org/apache/comet/CometExpressionSuite.scala 
b/spark/src/test/scala/org/apache/comet/CometExpressionSuite.scala
index dbb46e07..f3fd50e9 100644
--- a/spark/src/test/scala/org/apache/comet/CometExpressionSuite.scala
+++ b/spark/src/test/scala/org/apache/comet/CometExpressionSuite.scala
@@ -1399,7 +1399,7 @@ class CometExpressionSuite extends CometTestBase with 
AdaptiveSparkPlanHelper {
       CometConf.COMET_EXEC_ENABLED.key -> "true",
       CometConf.COMET_SHUFFLE_ENFORCE_MODE_ENABLED.key -> "true",
       CometConf.COMET_EXEC_ALL_OPERATOR_ENABLED.key -> "true",
-      "spark.sql.extendedExplainProvider" -> 
"org.apache.comet.ExtendedExplainInfo") {
+      EXTENDED_EXPLAIN_PROVIDERS_KEY -> 
"org.apache.comet.ExtendedExplainInfo") {
       val table = "test"
       withTable(table) {
         sql(s"create table $table(c0 int, c1 int , c2 float) using parquet")
diff --git a/spark/src/test/scala/org/apache/spark/sql/CometTestBase.scala 
b/spark/src/test/scala/org/apache/spark/sql/CometTestBase.scala
index 8e05bf26..112d35b1 100644
--- a/spark/src/test/scala/org/apache/spark/sql/CometTestBase.scala
+++ b/spark/src/test/scala/org/apache/spark/sql/CometTestBase.scala
@@ -247,9 +247,7 @@ abstract class CometTestBase
       expectedInfo: Set[String]): Unit = {
     var expected: Array[Row] = Array.empty
     var dfSpark: Dataset[Row] = null
-    withSQLConf(
-      CometConf.COMET_ENABLED.key -> "false",
-      "spark.sql.extendedExplainProvider" -> "") {
+    withSQLConf(CometConf.COMET_ENABLED.key -> "false", 
EXTENDED_EXPLAIN_PROVIDERS_KEY -> "") {
       dfSpark = Dataset.ofRows(spark, df.logicalPlan)
       expected = dfSpark.collect()
     }
@@ -259,7 +257,7 @@ abstract class CometTestBase
       dfSpark.queryExecution.explainString(ExtendedMode),
       dfComet.queryExecution.explainString(ExtendedMode))
     if (supportsExtendedExplainInfo(dfSpark.queryExecution)) {
-      assert(diff.contains(expectedInfo))
+      assert(expectedInfo.forall(s => diff.contains(s)))
     }
     val extendedInfo =
       new 
ExtendedExplainInfo().generateExtendedInfo(dfComet.queryExecution.executedPlan)


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to