This is an automated email from the ASF dual-hosted git repository.
kazuyukitanimura pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/datafusion-comet.git
The following commit(s) were added to refs/heads/main by this push:
new 5f1e99830 test: Enable Comet by default except some tests in
SparkSessionExtensionSuite (#1201)
5f1e99830 is described below
commit 5f1e99830caec96ce5889656cb8fca68a6d6d5e0
Author: KAZUYUKI TANIMURA <[email protected]>
AuthorDate: Fri Jan 3 16:29:49 2025 -0800
test: Enable Comet by default except some tests in
SparkSessionExtensionSuite (#1201)
## Which issue does this PR close?
Part of https://github.com/apache/datafusion-comet/issues/1197
## Rationale for this change
Since `loadCometExtension` in the diffs were not using `isCometEnabled`,
`SparkSessionExtensionSuite` was not using Comet. Once enabled, some test
failures discovered
## What changes are included in this PR?
`loadCometExtension` now uses `isCometEnabled` that enables Comet by default
Temporary ignore the failing tests in SparkSessionExtensionSuite
## How are these changes tested?
existing tests
---
dev/diffs/3.4.3.diff | 66 ++++++++++++++++++++++++++++++--------
dev/diffs/3.5.1.diff | 74 +++++++++++++++++++++++++++++++++----------
dev/diffs/4.0.0-preview1.diff | 66 ++++++++++++++++++++++++++++++--------
3 files changed, 163 insertions(+), 43 deletions(-)
diff --git a/dev/diffs/3.4.3.diff b/dev/diffs/3.4.3.diff
index 8be1cdcaa..b78a24a4c 100644
--- a/dev/diffs/3.4.3.diff
+++ b/dev/diffs/3.4.3.diff
@@ -53,7 +53,7 @@ index b386d135da1..46449e3f3f1 100644
<!--
This spark-tags test-dep is needed even though it isn't used in this
module, otherwise testing-cmds that exclude
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/SparkSession.scala
b/sql/core/src/main/scala/org/apache/spark/sql/SparkSession.scala
-index c595b50950b..6b60213e775 100644
+index c595b50950b..3abb6cb9441 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/SparkSession.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/SparkSession.scala
@@ -102,7 +102,7 @@ class SparkSession private(
@@ -79,7 +79,7 @@ index c595b50950b..6b60213e775 100644
}
+ private def loadCometExtension(sparkContext: SparkContext): Seq[String] = {
-+ if (sparkContext.getConf.getBoolean("spark.comet.enabled", false)) {
++ if (sparkContext.getConf.getBoolean("spark.comet.enabled",
isCometEnabled)) {
+ Seq("org.apache.comet.CometSparkSessionExtensions")
+ } else {
+ Seq.empty
@@ -100,6 +100,19 @@ index c595b50950b..6b60213e775 100644
try {
val extensionConfClass = Utils.classForName(extensionConfClassName)
val extensionConf = extensionConfClass.getConstructor().newInstance()
+@@ -1323,4 +1333,12 @@ object SparkSession extends Logging {
+ }
+ }
+ }
++
++ /**
++ * Whether Comet extension is enabled
++ */
++ def isCometEnabled: Boolean = {
++ val v = System.getenv("ENABLE_COMET")
++ v == null || v.toBoolean
++ }
+ }
diff --git
a/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkPlanInfo.scala
b/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkPlanInfo.scala
index db587dd9868..aac7295a53d 100644
---
a/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkPlanInfo.scala
@@ -957,6 +970,37 @@ index 525d97e4998..8a3e7457618 100644
AccumulatorSuite.verifyPeakExecutionMemorySet(sparkContext, "external
sort") {
sql("SELECT * FROM testData2 ORDER BY a ASC, b ASC").collect()
}
+diff --git
a/sql/core/src/test/scala/org/apache/spark/sql/SparkSessionExtensionSuite.scala
b/sql/core/src/test/scala/org/apache/spark/sql/SparkSessionExtensionSuite.scala
+index 48ad10992c5..51d1ee65422 100644
+---
a/sql/core/src/test/scala/org/apache/spark/sql/SparkSessionExtensionSuite.scala
++++
b/sql/core/src/test/scala/org/apache/spark/sql/SparkSessionExtensionSuite.scala
+@@ -221,6 +221,8 @@ class SparkSessionExtensionSuite extends SparkFunSuite
with SQLHelper {
+ withSession(extensions) { session =>
+ session.conf.set(SQLConf.ADAPTIVE_EXECUTION_ENABLED, true)
+ session.conf.set(SQLConf.AUTO_BROADCASTJOIN_THRESHOLD.key, "-1")
++ // https://github.com/apache/datafusion-comet/issues/1197
++ session.conf.set("spark.comet.enabled", false)
+ assert(session.sessionState.columnarRules.contains(
+ MyColumnarRule(PreRuleReplaceAddWithBrokenVersion(), MyPostRule())))
+ import session.sqlContext.implicits._
+@@ -279,6 +281,8 @@ class SparkSessionExtensionSuite extends SparkFunSuite
with SQLHelper {
+ }
+ withSession(extensions) { session =>
+ session.conf.set(SQLConf.ADAPTIVE_EXECUTION_ENABLED, enableAQE)
++ // https://github.com/apache/datafusion-comet/issues/1197
++ session.conf.set("spark.comet.enabled", false)
+ assert(session.sessionState.columnarRules.contains(
+ MyColumnarRule(PreRuleReplaceAddWithBrokenVersion(), MyPostRule())))
+ import session.sqlContext.implicits._
+@@ -317,6 +321,8 @@ class SparkSessionExtensionSuite extends SparkFunSuite
with SQLHelper {
+ val session = SparkSession.builder()
+ .master("local[1]")
+ .config(COLUMN_BATCH_SIZE.key, 2)
++ // https://github.com/apache/datafusion-comet/issues/1197
++ .config("spark.comet.enabled", false)
+ .withExtensions { extensions =>
+ extensions.injectColumnar(session =>
+ MyColumnarRule(PreRuleReplaceAddWithBrokenVersion(), MyPostRule()))
}
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/SubquerySuite.scala
b/sql/core/src/test/scala/org/apache/spark/sql/SubquerySuite.scala
index 75eabcb96f2..36e3318ad7e 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/SubquerySuite.scala
@@ -2746,7 +2790,7 @@ index abe606ad9c1..2d930b64cca 100644
val tblTargetName = "tbl_target"
val tblSourceQualified = s"default.$tblSourceName"
diff --git
a/sql/core/src/test/scala/org/apache/spark/sql/test/SQLTestUtils.scala
b/sql/core/src/test/scala/org/apache/spark/sql/test/SQLTestUtils.scala
-index dd55fcfe42c..aa9b0be8e68 100644
+index dd55fcfe42c..2702f87c1f1 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/test/SQLTestUtils.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/test/SQLTestUtils.scala
@@ -41,6 +41,7 @@ import org.apache.spark.sql.catalyst.plans.PlanTest
@@ -2770,17 +2814,14 @@ index dd55fcfe42c..aa9b0be8e68 100644
}
}
-@@ -242,6 +247,41 @@ private[sql] trait SQLTestUtilsBase
+@@ -242,6 +247,38 @@ private[sql] trait SQLTestUtilsBase
protected override def _sqlContext: SQLContext = self.spark.sqlContext
}
+ /**
+ * Whether Comet extension is enabled
+ */
-+ protected def isCometEnabled: Boolean = {
-+ val v = System.getenv("ENABLE_COMET")
-+ v != null && v.toBoolean
-+ }
++ protected def isCometEnabled: Boolean = SparkSession.isCometEnabled
+
+ /**
+ * Whether to enable ansi mode This is only effective when
@@ -2812,7 +2853,7 @@ index dd55fcfe42c..aa9b0be8e68 100644
protected override def withSQLConf(pairs: (String, String)*)(f: => Unit):
Unit = {
SparkSession.setActiveSession(spark)
super.withSQLConf(pairs: _*)(f)
-@@ -434,6 +474,8 @@ private[sql] trait SQLTestUtilsBase
+@@ -434,6 +471,8 @@ private[sql] trait SQLTestUtilsBase
val schema = df.schema
val withoutFilters = df.queryExecution.executedPlan.transform {
case FilterExec(_, child) => child
@@ -2910,10 +2951,10 @@ index 1966e1e64fd..cde97a0aafe 100644
spark.sql(
"""
diff --git
a/sql/hive/src/test/scala/org/apache/spark/sql/hive/test/TestHive.scala
b/sql/hive/src/test/scala/org/apache/spark/sql/hive/test/TestHive.scala
-index 07361cfdce9..6673c141c9a 100644
+index 07361cfdce9..e40c59a4207 100644
--- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/test/TestHive.scala
+++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/test/TestHive.scala
-@@ -55,25 +55,53 @@ object TestHive
+@@ -55,25 +55,52 @@ object TestHive
new SparkContext(
System.getProperty("spark.sql.test.master", "local[1]"),
"TestSQLContext",
@@ -2955,8 +2996,7 @@ index 07361cfdce9..6673c141c9a 100644
+ // ConstantPropagation etc.
+ .set(SQLConf.OPTIMIZER_EXCLUDED_RULES.key,
ConvertToLocalRelation.ruleName)
+
-+ val v = System.getenv("ENABLE_COMET")
-+ if (v != null && v.toBoolean) {
++ if (SparkSession.isCometEnabled) {
+ conf
+ .set("spark.sql.extensions",
"org.apache.comet.CometSparkSessionExtensions")
+ .set("spark.comet.enabled", "true")
diff --git a/dev/diffs/3.5.1.diff b/dev/diffs/3.5.1.diff
index 5e3433c68..fff50ecd3 100644
--- a/dev/diffs/3.5.1.diff
+++ b/dev/diffs/3.5.1.diff
@@ -53,7 +53,7 @@ index c46ab7b8fce..13357e8c7a6 100644
<!--
This spark-tags test-dep is needed even though it isn't used in this
module, otherwise testing-cmds that exclude
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/SparkSession.scala
b/sql/core/src/main/scala/org/apache/spark/sql/SparkSession.scala
-index 27ae10b3d59..064cbc252ea 100644
+index 27ae10b3d59..78e69902dfd 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/SparkSession.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/SparkSession.scala
@@ -1353,6 +1353,14 @@ object SparkSession extends Logging {
@@ -61,11 +61,11 @@ index 27ae10b3d59..064cbc252ea 100644
}
+ private def loadCometExtension(sparkContext: SparkContext): Seq[String] = {
-+ if (sparkContext.getConf.getBoolean("spark.comet.enabled", false)) {
-+ Seq("org.apache.comet.CometSparkSessionExtensions")
-+ } else {
-+ Seq.empty
-+ }
++ if (sparkContext.getConf.getBoolean("spark.comet.enabled",
isCometEnabled)) {
++ Seq("org.apache.comet.CometSparkSessionExtensions")
++ } else {
++ Seq.empty
++ }
+ }
+
/**
@@ -79,6 +79,19 @@ index 27ae10b3d59..064cbc252ea 100644
extensionConfClassNames.foreach { extensionConfClassName =>
try {
val extensionConfClass = Utils.classForName(extensionConfClassName)
+@@ -1396,4 +1405,12 @@ object SparkSession extends Logging {
+ }
+ }
+ }
++
++ /**
++ * Whether Comet extension is enabled
++ */
++ def isCometEnabled: Boolean = {
++ val v = System.getenv("ENABLE_COMET")
++ v == null || v.toBoolean
++ }
+ }
diff --git
a/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkPlanInfo.scala
b/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkPlanInfo.scala
index db587dd9868..aac7295a53d 100644
---
a/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkPlanInfo.scala
@@ -959,6 +972,37 @@ index cfeccbdf648..803d8734cc4 100644
AccumulatorSuite.verifyPeakExecutionMemorySet(sparkContext, "external
sort") {
sql("SELECT * FROM testData2 ORDER BY a ASC, b ASC").collect()
}
+diff --git
a/sql/core/src/test/scala/org/apache/spark/sql/SparkSessionExtensionSuite.scala
b/sql/core/src/test/scala/org/apache/spark/sql/SparkSessionExtensionSuite.scala
+index 8b4ac474f87..3f79f20822f 100644
+---
a/sql/core/src/test/scala/org/apache/spark/sql/SparkSessionExtensionSuite.scala
++++
b/sql/core/src/test/scala/org/apache/spark/sql/SparkSessionExtensionSuite.scala
+@@ -223,6 +223,8 @@ class SparkSessionExtensionSuite extends SparkFunSuite
with SQLHelper with Adapt
+ withSession(extensions) { session =>
+ session.conf.set(SQLConf.ADAPTIVE_EXECUTION_ENABLED, true)
+ session.conf.set(SQLConf.AUTO_BROADCASTJOIN_THRESHOLD.key, "-1")
++ // https://github.com/apache/datafusion-comet/issues/1197
++ session.conf.set("spark.comet.enabled", false)
+ assert(session.sessionState.columnarRules.contains(
+ MyColumnarRule(PreRuleReplaceAddWithBrokenVersion(), MyPostRule())))
+ import session.sqlContext.implicits._
+@@ -281,6 +283,8 @@ class SparkSessionExtensionSuite extends SparkFunSuite
with SQLHelper with Adapt
+ }
+ withSession(extensions) { session =>
+ session.conf.set(SQLConf.ADAPTIVE_EXECUTION_ENABLED, enableAQE)
++ // https://github.com/apache/datafusion-comet/issues/1197
++ session.conf.set("spark.comet.enabled", false)
+ assert(session.sessionState.columnarRules.contains(
+ MyColumnarRule(PreRuleReplaceAddWithBrokenVersion(), MyPostRule())))
+ import session.sqlContext.implicits._
+@@ -319,6 +323,8 @@ class SparkSessionExtensionSuite extends SparkFunSuite
with SQLHelper with Adapt
+ val session = SparkSession.builder()
+ .master("local[1]")
+ .config(COLUMN_BATCH_SIZE.key, 2)
++ // https://github.com/apache/datafusion-comet/issues/1197
++ .config("spark.comet.enabled", false)
+ .withExtensions { extensions =>
+ extensions.injectColumnar(session =>
+ MyColumnarRule(PreRuleReplaceAddWithBrokenVersion(), MyPostRule()))
}
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/SubquerySuite.scala
b/sql/core/src/test/scala/org/apache/spark/sql/SubquerySuite.scala
index fbc256b3396..0821999c7c2 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/SubquerySuite.scala
@@ -2731,7 +2775,7 @@ index abe606ad9c1..2d930b64cca 100644
val tblTargetName = "tbl_target"
val tblSourceQualified = s"default.$tblSourceName"
diff --git
a/sql/core/src/test/scala/org/apache/spark/sql/test/SQLTestUtils.scala
b/sql/core/src/test/scala/org/apache/spark/sql/test/SQLTestUtils.scala
-index dd55fcfe42c..aa9b0be8e68 100644
+index dd55fcfe42c..2702f87c1f1 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/test/SQLTestUtils.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/test/SQLTestUtils.scala
@@ -41,6 +41,7 @@ import org.apache.spark.sql.catalyst.plans.PlanTest
@@ -2755,17 +2799,14 @@ index dd55fcfe42c..aa9b0be8e68 100644
}
}
-@@ -242,6 +247,41 @@ private[sql] trait SQLTestUtilsBase
+@@ -242,6 +247,38 @@ private[sql] trait SQLTestUtilsBase
protected override def _sqlContext: SQLContext = self.spark.sqlContext
}
+ /**
+ * Whether Comet extension is enabled
+ */
-+ protected def isCometEnabled: Boolean = {
-+ val v = System.getenv("ENABLE_COMET")
-+ v != null && v.toBoolean
-+ }
++ protected def isCometEnabled: Boolean = SparkSession.isCometEnabled
+
+ /**
+ * Whether to enable ansi mode This is only effective when
@@ -2797,7 +2838,7 @@ index dd55fcfe42c..aa9b0be8e68 100644
protected override def withSQLConf(pairs: (String, String)*)(f: => Unit):
Unit = {
SparkSession.setActiveSession(spark)
super.withSQLConf(pairs: _*)(f)
-@@ -434,6 +474,8 @@ private[sql] trait SQLTestUtilsBase
+@@ -434,6 +471,8 @@ private[sql] trait SQLTestUtilsBase
val schema = df.schema
val withoutFilters = df.queryExecution.executedPlan.transform {
case FilterExec(_, child) => child
@@ -2895,10 +2936,10 @@ index dc8b184fcee..dd69a989d40 100644
spark.sql(
"""
diff --git
a/sql/hive/src/test/scala/org/apache/spark/sql/hive/test/TestHive.scala
b/sql/hive/src/test/scala/org/apache/spark/sql/hive/test/TestHive.scala
-index 9284b35fb3e..e8984be5ebc 100644
+index 9284b35fb3e..2a0269bdc16 100644
--- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/test/TestHive.scala
+++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/test/TestHive.scala
-@@ -53,25 +53,53 @@ object TestHive
+@@ -53,25 +53,52 @@ object TestHive
new SparkContext(
System.getProperty("spark.sql.test.master", "local[1]"),
"TestSQLContext",
@@ -2940,8 +2981,7 @@ index 9284b35fb3e..e8984be5ebc 100644
+ // ConstantPropagation etc.
+ .set(SQLConf.OPTIMIZER_EXCLUDED_RULES.key,
ConvertToLocalRelation.ruleName)
+
-+ val v = System.getenv("ENABLE_COMET")
-+ if (v != null && v.toBoolean) {
++ if (SparkSession.isCometEnabled) {
+ conf
+ .set("spark.sql.extensions",
"org.apache.comet.CometSparkSessionExtensions")
+ .set("spark.comet.enabled", "true")
diff --git a/dev/diffs/4.0.0-preview1.diff b/dev/diffs/4.0.0-preview1.diff
index 5f9bf9d80..2de366f33 100644
--- a/dev/diffs/4.0.0-preview1.diff
+++ b/dev/diffs/4.0.0-preview1.diff
@@ -53,7 +53,7 @@ index 19f6303be36..6c0e77882e6 100644
<!--
This spark-tags test-dep is needed even though it isn't used in this
module, otherwise testing-cmds that exclude
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/SparkSession.scala
b/sql/core/src/main/scala/org/apache/spark/sql/SparkSession.scala
-index 466e4cf8131..7ead2715b5d 100644
+index 466e4cf8131..798f118464c 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/SparkSession.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/SparkSession.scala
@@ -1376,6 +1376,14 @@ object SparkSession extends Logging {
@@ -61,7 +61,7 @@ index 466e4cf8131..7ead2715b5d 100644
}
+ private def loadCometExtension(sparkContext: SparkContext): Seq[String] = {
-+ if (sparkContext.getConf.getBoolean("spark.comet.enabled", false)) {
++ if (sparkContext.getConf.getBoolean("spark.comet.enabled",
isCometEnabled)) {
+ Seq("org.apache.comet.CometSparkSessionExtensions")
+ } else {
+ Seq.empty
@@ -81,6 +81,19 @@ index 466e4cf8131..7ead2715b5d 100644
try {
val extensionConfClass = Utils.classForName(extensionConfClassName)
val extensionConf = extensionConfClass.getConstructor().newInstance()
+@@ -1420,4 +1429,12 @@ object SparkSession extends Logging {
+ }
+ }
+ }
++
++ /**
++ * Whether Comet extension is enabled
++ */
++ def isCometEnabled: Boolean = {
++ val v = System.getenv("ENABLE_COMET")
++ v == null || v.toBoolean
++ }
+ }
diff --git
a/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkPlanInfo.scala
b/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkPlanInfo.scala
index 7c45b02ee84..9f2b608c9f5 100644
---
a/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkPlanInfo.scala
@@ -1031,6 +1044,37 @@ index 56c364e2084..fc3abd7cdc4 100644
withSQLConf(SQLConf.WHOLESTAGE_CODEGEN_ENABLED.key -> "false",
SQLConf.ANSI_ENABLED.key -> "true") {
withTable("t") {
+diff --git
a/sql/core/src/test/scala/org/apache/spark/sql/SparkSessionExtensionSuite.scala
b/sql/core/src/test/scala/org/apache/spark/sql/SparkSessionExtensionSuite.scala
+index 4d38e360f43..3c272af0b62 100644
+---
a/sql/core/src/test/scala/org/apache/spark/sql/SparkSessionExtensionSuite.scala
++++
b/sql/core/src/test/scala/org/apache/spark/sql/SparkSessionExtensionSuite.scala
+@@ -223,6 +223,8 @@ class SparkSessionExtensionSuite extends SparkFunSuite
with SQLHelper with Adapt
+ withSession(extensions) { session =>
+ session.conf.set(SQLConf.ADAPTIVE_EXECUTION_ENABLED, true)
+ session.conf.set(SQLConf.AUTO_BROADCASTJOIN_THRESHOLD.key, "-1")
++ // https://github.com/apache/datafusion-comet/issues/1197
++ session.conf.set("spark.comet.enabled", false)
+ assert(session.sessionState.columnarRules.contains(
+ MyColumnarRule(PreRuleReplaceAddWithBrokenVersion(), MyPostRule())))
+ import session.sqlContext.implicits._
+@@ -281,6 +283,8 @@ class SparkSessionExtensionSuite extends SparkFunSuite
with SQLHelper with Adapt
+ }
+ withSession(extensions) { session =>
+ session.conf.set(SQLConf.ADAPTIVE_EXECUTION_ENABLED, enableAQE)
++ // https://github.com/apache/datafusion-comet/issues/1197
++ session.conf.set("spark.comet.enabled", false)
+ assert(session.sessionState.columnarRules.contains(
+ MyColumnarRule(PreRuleReplaceAddWithBrokenVersion(), MyPostRule())))
+ import session.sqlContext.implicits._
+@@ -319,6 +323,8 @@ class SparkSessionExtensionSuite extends SparkFunSuite
with SQLHelper with Adapt
+ val session = SparkSession.builder()
+ .master("local[1]")
+ .config(COLUMN_BATCH_SIZE.key, 2)
++ // https://github.com/apache/datafusion-comet/issues/1197
++ .config("spark.comet.enabled", false)
+ .withExtensions { extensions =>
+ extensions.injectColumnar(session =>
+ MyColumnarRule(PreRuleReplaceAddWithBrokenVersion(), MyPostRule()))
}
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/SubquerySuite.scala
b/sql/core/src/test/scala/org/apache/spark/sql/SubquerySuite.scala
index 68f14f13bbd..174636cefb5 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/SubquerySuite.scala
@@ -2936,7 +2980,7 @@ index af07aceaed1..ed0b5e6d9be 100644
val tblTargetName = "tbl_target"
val tblSourceQualified = s"default.$tblSourceName"
diff --git
a/sql/core/src/test/scala/org/apache/spark/sql/test/SQLTestUtils.scala
b/sql/core/src/test/scala/org/apache/spark/sql/test/SQLTestUtils.scala
-index 5fbf379644f..51debb8c481 100644
+index 5fbf379644f..1f2e5d7cfa0 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/test/SQLTestUtils.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/test/SQLTestUtils.scala
@@ -41,6 +41,7 @@ import org.apache.spark.sql.catalyst.plans.PlanTest
@@ -2960,17 +3004,14 @@ index 5fbf379644f..51debb8c481 100644
}
}
-@@ -243,6 +248,42 @@ private[sql] trait SQLTestUtilsBase
+@@ -243,6 +248,39 @@ private[sql] trait SQLTestUtilsBase
protected override def _sqlContext: SQLContext = self.spark.sqlContext
}
+ /**
+ * Whether Comet extension is enabled
+ */
-+ protected def isCometEnabled: Boolean = {
-+ val v = System.getenv("ENABLE_COMET")
-+ v != null && v.toBoolean
-+ }
++ protected def isCometEnabled: Boolean = SparkSession.isCometEnabled
+
+ /**
+ * Whether to enable ansi mode This is only effective when
@@ -3003,7 +3044,7 @@ index 5fbf379644f..51debb8c481 100644
protected override def withSQLConf[T](pairs: (String, String)*)(f: => T): T
= {
SparkSession.setActiveSession(spark)
super.withSQLConf(pairs: _*)(f)
-@@ -434,6 +475,8 @@ private[sql] trait SQLTestUtilsBase
+@@ -434,6 +472,8 @@ private[sql] trait SQLTestUtilsBase
val schema = df.schema
val withoutFilters = df.queryExecution.executedPlan.transform {
case FilterExec(_, child) => child
@@ -3087,10 +3128,10 @@ index 52abd248f3a..7a199931a08 100644
case d: DynamicPruningExpression => d.child
}
diff --git
a/sql/hive/src/test/scala/org/apache/spark/sql/hive/test/TestHive.scala
b/sql/hive/src/test/scala/org/apache/spark/sql/hive/test/TestHive.scala
-index 3f8de93b330..fc06e81f41a 100644
+index 3f8de93b330..2912f98cc5b 100644
--- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/test/TestHive.scala
+++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/test/TestHive.scala
-@@ -53,24 +53,53 @@ object TestHive
+@@ -53,24 +53,52 @@ object TestHive
new SparkContext(
System.getProperty("spark.sql.test.master", "local[1]"),
"TestSQLContext",
@@ -3132,8 +3173,7 @@ index 3f8de93b330..fc06e81f41a 100644
+ // ConstantPropagation etc.
+ .set(SQLConf.OPTIMIZER_EXCLUDED_RULES.key,
ConvertToLocalRelation.ruleName)
+
-+ val v = System.getenv("ENABLE_COMET")
-+ if (v != null && v.toBoolean) {
++ if (SparkSession.isCometEnabled) {
+ conf
+ .set("spark.sql.extensions",
"org.apache.comet.CometSparkSessionExtensions")
+ .set("spark.comet.enabled", "true")
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]