This is an automated email from the ASF dual-hosted git repository.

agrove pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/datafusion-comet.git


The following commit(s) were added to refs/heads/main by this push:
     new f35d80c39 chore: Remove COMET_EXPR_ALLOW_INCOMPATIBLE config (#2786)
f35d80c39 is described below

commit f35d80c39db2a5173ee923b77eb8840feab472f7
Author: Andy Grove <[email protected]>
AuthorDate: Sat Nov 15 11:25:05 2025 -0700

    chore: Remove COMET_EXPR_ALLOW_INCOMPATIBLE config (#2786)
---
 .../main/scala/org/apache/comet/CometConf.scala    |  8 -----
 dev/benchmarks/comet-tpcds.sh                      |  2 +-
 dev/benchmarks/comet-tpch.sh                       |  2 +-
 docs/source/user-guide/latest/compatibility.md     |  8 ++---
 docs/source/user-guide/latest/configs.md           | 35 +++++++++++-----------
 docs/source/user-guide/latest/expressions.md       |  3 --
 docs/source/user-guide/latest/kubernetes.md        |  1 -
 .../org/apache/comet/expressions/CometCast.scala   |  7 ++---
 .../org/apache/comet/serde/QueryPlanSerde.scala    | 20 ++++++-------
 .../apache/comet/exec/CometAggregateSuite.scala    |  3 +-
 .../spark/sql/comet/CometPlanStabilitySuite.scala  |  9 ++++--
 11 files changed, 43 insertions(+), 55 deletions(-)

diff --git a/common/src/main/scala/org/apache/comet/CometConf.scala 
b/common/src/main/scala/org/apache/comet/CometConf.scala
index 53ae060c0..acecced28 100644
--- a/common/src/main/scala/org/apache/comet/CometConf.scala
+++ b/common/src/main/scala/org/apache/comet/CometConf.scala
@@ -666,14 +666,6 @@ object CometConf extends ShimCometConf {
       .booleanConf
       .createWithDefault(false)
 
-  val COMET_EXPR_ALLOW_INCOMPATIBLE: ConfigEntry[Boolean] =
-    conf("spark.comet.expression.allowIncompatible")
-      .category(CATEGORY_EXEC)
-      .doc("Comet is not currently fully compatible with Spark for all 
expressions. " +
-        s"Set this config to true to allow them anyway. $COMPAT_GUIDE.")
-      .booleanConf
-      .createWithDefault(false)
-
   val COMET_EXEC_STRICT_FLOATING_POINT: ConfigEntry[Boolean] =
     conf("spark.comet.exec.strictFloatingPoint")
       .category(CATEGORY_EXEC)
diff --git a/dev/benchmarks/comet-tpcds.sh b/dev/benchmarks/comet-tpcds.sh
index cd44bb50a..86cc12b02 100755
--- a/dev/benchmarks/comet-tpcds.sh
+++ b/dev/benchmarks/comet-tpcds.sh
@@ -40,7 +40,7 @@ $SPARK_HOME/bin/spark-submit \
     --conf spark.executor.extraClassPath=$COMET_JAR \
     --conf spark.plugins=org.apache.spark.CometPlugin \
     --conf 
spark.shuffle.manager=org.apache.spark.sql.comet.execution.shuffle.CometShuffleManager
 \
-    --conf spark.comet.expression.allowIncompatible=true \
+    --conf spark.comet.expression.Cast.allowIncompatible=true \
     --conf spark.hadoop.fs.s3a.impl=org.apache.hadoop.fs.s3a.S3AFileSystem \
     --conf 
spark.hadoop.fs.s3a.aws.credentials.provider=com.amazonaws.auth.DefaultAWSCredentialsProviderChain
 \
     tpcbench.py \
diff --git a/dev/benchmarks/comet-tpch.sh b/dev/benchmarks/comet-tpch.sh
index df95565fe..385719e16 100755
--- a/dev/benchmarks/comet-tpch.sh
+++ b/dev/benchmarks/comet-tpch.sh
@@ -41,7 +41,7 @@ $SPARK_HOME/bin/spark-submit \
     --conf spark.plugins=org.apache.spark.CometPlugin \
     --conf 
spark.shuffle.manager=org.apache.spark.sql.comet.execution.shuffle.CometShuffleManager
 \
     --conf spark.comet.exec.replaceSortMergeJoin=true \
-    --conf spark.comet.expression.allowIncompatible=true \
+    --conf spark.comet.expression.Cast.allowIncompatible=true \
     --conf spark.hadoop.fs.s3a.impl=org.apache.hadoop.fs.s3a.S3AFileSystem \
     --conf 
spark.hadoop.fs.s3a.aws.credentials.provider=com.amazonaws.auth.DefaultAWSCredentialsProviderChain
 \
     tpcbench.py \
diff --git a/docs/source/user-guide/latest/compatibility.md 
b/docs/source/user-guide/latest/compatibility.md
index 73d27d10a..17a951578 100644
--- a/docs/source/user-guide/latest/compatibility.md
+++ b/docs/source/user-guide/latest/compatibility.md
@@ -32,8 +32,9 @@ Comet has the following limitations when reading Parquet 
files:
 
 ## ANSI Mode
 
-Comet will fall back to Spark for the following expressions when ANSI mode is 
enabled, unless
-`spark.comet.expression.allowIncompatible=true`.
+Comet will fall back to Spark for the following expressions when ANSI mode is 
enabled. Thes expressions can be enabled by setting
+`spark.comet.expression.EXPRNAME.allowIncompatible=true`, where `EXPRNAME` is 
the Spark expression class name. See
+the [Comet Supported Expressions Guide](expressions.md) for more information 
on this configuration setting.
 
 - Average
 - Sum
@@ -58,9 +59,6 @@ Expressions that are not 100% Spark-compatible will fall back 
to Spark by defaul
 `spark.comet.expression.EXPRNAME.allowIncompatible=true`, where `EXPRNAME` is 
the Spark expression class name. See
 the [Comet Supported Expressions Guide](expressions.md) for more information 
on this configuration setting.
 
-It is also possible to specify `spark.comet.expression.allowIncompatible=true` 
to enable all
-incompatible expressions.
-
 ## Regular Expressions
 
 Comet uses the Rust regexp crate for evaluating regular expressions, and this 
has different behavior from Java's
diff --git a/docs/source/user-guide/latest/configs.md 
b/docs/source/user-guide/latest/configs.md
index ea8589e94..7e3d2a79f 100644
--- a/docs/source/user-guide/latest/configs.md
+++ b/docs/source/user-guide/latest/configs.md
@@ -58,21 +58,20 @@ Comet provides the following configuration settings.
 <!-- WARNING! DO NOT MANUALLY MODIFY CONTENT BETWEEN THE BEGIN AND END TAGS -->
 <!--BEGIN:CONFIG_TABLE[exec]-->
 
-| Config                                     | Description                     
                                                                                
                                                                                
                                                                                
                                                                                
                                                                                
              [...]
-| ------------------------------------------ | 
-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------
 [...]
-| `spark.comet.caseConversion.enabled`       | Java uses locale-specific rules 
when converting strings to upper or lower case and Rust does not, so we disable 
upper and lower by default.                                                     
                                                                                
                                                                                
                                                                                
              [...]
-| `spark.comet.debug.enabled`                | Whether to enable debug mode 
for Comet. When enabled, Comet will do additional checks for debugging purpose. 
For example, validating array when importing arrays from JVM at native side. 
Note that these checks may be expensive in performance and should only be 
enabled for debugging purpose.                                                  
                                                                                
                          [...]
-| `spark.comet.dppFallback.enabled`          | Whether to fall back to Spark 
for queries that use DPP.                                                       
                                                                                
                                                                                
                                                                                
                                                                                
                [...]
-| `spark.comet.enabled`                      | Whether to enable Comet 
extension for Spark. When this is turned on, Spark will use Comet to read 
Parquet data source. Note that to enable native vectorized execution, both this 
config and `spark.comet.exec.enabled` need to be enabled. Can be overridden by 
environment variable `ENABLE_COMET`.                                            
                                                                                
                             [...]
-| `spark.comet.exceptionOnDatetimeRebase`    | Whether to throw exception when 
seeing dates/timestamps from the legacy hybrid (Julian + Gregorian) calendar. 
Since Spark 3, dates/timestamps were written according to the Proleptic 
Gregorian calendar. When this is true, Comet will throw exceptions when seeing 
these dates/timestamps that were written by Spark version before 3.0. If this 
is false, these dates/timestamps will be read as if they were written to the 
Proleptic Gregorian calendar  [...]
-| `spark.comet.exec.enabled`                 | Whether to enable Comet native 
vectorized execution for Spark. This controls whether Spark should convert 
operators into their Comet counterparts and execute them in native space. Note: 
each operator is associated with a separate config in the format of 
`spark.comet.exec.<operator_name>.enabled` at the moment, and both the config 
and this need to be turned on, in order for the operator to be executed in 
native.                                [...]
-| `spark.comet.exec.replaceSortMergeJoin`    | Experimental feature to force 
Spark to replace SortMergeJoin with ShuffledHashJoin for improved performance. 
This feature is not stable yet. For more information, refer to the [Comet 
Tuning Guide](https://datafusion.apache.org/comet/user-guide/tuning.html).      
                                                                                
                                                                                
                       [...]
-| `spark.comet.exec.strictFloatingPoint`     | When enabled, fall back to 
Spark for floating-point operations that may differ from Spark, such as when 
comparing or sorting -0.0 and 0.0. For more information, refer to the [Comet 
Compatibility 
Guide](https://datafusion.apache.org/comet/user-guide/compatibility.html).      
                                                                                
                                                                                
           [...]
-| `spark.comet.expression.allowIncompatible` | Comet is not currently fully 
compatible with Spark for all expressions. Set this config to true to allow 
them anyway. For more information, refer to the [Comet Compatibility 
Guide](https://datafusion.apache.org/comet/user-guide/compatibility.html).      
                                                                                
                                                                                
                                [...]
-| `spark.comet.maxTempDirectorySize`         | The maximum amount of data (in 
bytes) stored inside the temporary directories.                                 
                                                                                
                                                                                
                                                                                
                                                                                
               [...]
-| `spark.comet.metrics.updateInterval`       | The interval in milliseconds to 
update metrics. If interval is negative, metrics will be updated upon task 
completion.                                                                     
                                                                                
                                                                                
                                                                                
                   [...]
-| `spark.comet.nativeLoadRequired`           | Whether to require Comet native 
library to load successfully when Comet is enabled. If not, Comet will silently 
fallback to Spark when it fails to load the native lib. Otherwise, an error 
will be thrown and the Spark job will be aborted.                               
                                                                                
                                                                                
                  [...]
-| `spark.comet.regexp.allowIncompatible`     | Comet is not currently fully 
compatible with Spark for all regular expressions. Set this config to true to 
allow them anyway. For more information, refer to the [Comet Compatibility 
Guide](https://datafusion.apache.org/comet/user-guide/compatibility.html).      
                                                                                
                                                                                
                        [...]
+| Config                                  | Description                        
                                                                                
                                                                                
                                                                                
                                                                                
                                                                                
              [...]
+| --------------------------------------- | 
----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------
 [...]
+| `spark.comet.caseConversion.enabled`    | Java uses locale-specific rules 
when converting strings to upper or lower case and Rust does not, so we disable 
upper and lower by default.                                                     
                                                                                
                                                                                
                                                                                
                 [...]
+| `spark.comet.debug.enabled`             | Whether to enable debug mode for 
Comet. When enabled, Comet will do additional checks for debugging purpose. For 
example, validating array when importing arrays from JVM at native side. Note 
that these checks may be expensive in performance and should only be enabled 
for debugging purpose.                                                          
                                                                                
                     [...]
+| `spark.comet.dppFallback.enabled`       | Whether to fall back to Spark for 
queries that use DPP.                                                           
                                                                                
                                                                                
                                                                                
                                                                                
               [...]
+| `spark.comet.enabled`                   | Whether to enable Comet extension 
for Spark. When this is turned on, Spark will use Comet to read Parquet data 
source. Note that to enable native vectorized execution, both this config and 
`spark.comet.exec.enabled` need to be enabled. It can be overridden by the 
environment variable `ENABLE_COMET`.                                            
                                                                                
                         [...]
+| `spark.comet.exceptionOnDatetimeRebase` | Whether to throw exception when 
seeing dates/timestamps from the legacy hybrid (Julian + Gregorian) calendar. 
Since Spark 3, dates/timestamps were written according to the Proleptic 
Gregorian calendar. When this is true, Comet will throw exceptions when seeing 
these dates/timestamps that were written by Spark version before 3.0. If this 
is false, these dates/timestamps will be read as if they were written to the 
Proleptic Gregorian calendar and [...]
+| `spark.comet.exec.enabled`              | Whether to enable Comet native 
vectorized execution for Spark. This controls whether Spark should convert 
operators into their Comet counterparts and execute them in native space. Note: 
each operator is associated with a separate config in the format of 
`spark.comet.exec.<operator_name>.enabled` at the moment, and both the config 
and this need to be turned on, in order for the operator to be executed in 
native.                                   [...]
+| `spark.comet.exec.replaceSortMergeJoin` | Experimental feature to force 
Spark to replace SortMergeJoin with ShuffledHashJoin for improved performance. 
This feature is not stable yet. For more information, refer to the [Comet 
Tuning Guide](https://datafusion.apache.org/comet/user-guide/tuning.html).      
                                                                                
                                                                                
                          [...]
+| `spark.comet.exec.strictFloatingPoint`  | When enabled, fall back to Spark 
for floating-point operations that may differ from Spark, such as when 
comparing or sorting -0.0 and 0.0. For more information, refer to the [Comet 
Compatibility 
Guide](https://datafusion.apache.org/comet/user-guide/compatibility.html).      
                                                                                
                                                                                
              [...]
+| `spark.comet.maxTempDirectorySize`      | The maximum amount of data (in 
bytes) stored inside the temporary directories.                                 
                                                                                
                                                                                
                                                                                
                                                                                
                  [...]
+| `spark.comet.metrics.updateInterval`    | The interval in milliseconds to 
update metrics. If interval is negative, metrics will be updated upon task 
completion.                                                                     
                                                                                
                                                                                
                                                                                
                      [...]
+| `spark.comet.nativeLoadRequired`        | Whether to require Comet native 
library to load successfully when Comet is enabled. If not, Comet will silently 
fallback to Spark when it fails to load the native lib. Otherwise, an error 
will be thrown and the Spark job will be aborted.                               
                                                                                
                                                                                
                     [...]
+| `spark.comet.regexp.allowIncompatible`  | Comet is not currently fully 
compatible with Spark for all regular expressions. Set this config to true to 
allow them anyway. For more information, refer to the [Comet Compatibility 
Guide](https://datafusion.apache.org/comet/user-guide/compatibility.html).      
                                                                                
                                                                                
                           [...]
 
 <!--END:CONFIG_TABLE-->
 
@@ -89,7 +88,7 @@ These settings can be used to determine which parts of the 
plan are accelerated
 | `spark.comet.explain.native.enabled`     | When this setting is enabled, 
Comet will provide a tree representation of the native query plan before 
execution and again after execution, with metrics.                              
                                                                                
    | false         |
 | `spark.comet.explain.rules`              | When this setting is enabled, 
Comet will log all plan transformations performed in physical optimizer rules. 
Default: false                                                                  
                                                                              | 
false         |
 | `spark.comet.explainFallback.enabled`    | When this setting is enabled, 
Comet will provide logging explaining the reason(s) why a query stage cannot be 
executed natively. Set this to false to reduce the amount of logging.           
                                                                             | 
false         |
-| `spark.comet.logFallbackReasons.enabled` | When this setting is enabled, 
Comet will log warnings for all fallback reasons. Can be overridden by 
environment variable `ENABLE_COMET_LOG_FALLBACK_REASONS`.                       
                                                                                
      | false         |
+| `spark.comet.logFallbackReasons.enabled` | When this setting is enabled, 
Comet will log warnings for all fallback reasons. It can be overridden by the 
environment variable `ENABLE_COMET_LOG_FALLBACK_REASONS`.                       
                                                                               
| false         |
 
 <!--END:CONFIG_TABLE-->
 
@@ -139,12 +138,12 @@ These settings can be used to determine which parts of 
the plan are accelerated
 | `spark.comet.convert.csv.enabled`                   | When enabled, data 
from Spark (non-native) CSV v1 and v2 scans will be converted to Arrow format. 
This is an experimental feature and has known issues with non-UTC timezones.    
                                                                                
                           | false                           |
 | `spark.comet.convert.json.enabled`                  | When enabled, data 
from Spark (non-native) JSON v1 and v2 scans will be converted to Arrow format. 
This is an experimental feature and has known issues with non-UTC timezones.    
                                                                                
                          | false                           |
 | `spark.comet.convert.parquet.enabled`               | When enabled, data 
from Spark (non-native) Parquet v1 and v2 scans will be converted to Arrow 
format. This is an experimental feature and has known issues with non-UTC 
timezones.                                                                      
                                     | false                           |
-| `spark.comet.exec.onHeap.enabled`                   | Whether to allow Comet 
to run in on-heap mode. Required for running Spark SQL tests. Can be overridden 
by environment variable `ENABLE_COMET_ONHEAP`.                                  
                                                                                
                      | false                           |
+| `spark.comet.exec.onHeap.enabled`                   | Whether to allow Comet 
to run in on-heap mode. Required for running Spark SQL tests. It can be 
overridden by the environment variable `ENABLE_COMET_ONHEAP`.                   
                                                                                
                              | false                           |
 | `spark.comet.exec.onHeap.memoryPool`                | The type of memory 
pool to be used for Comet native execution when running Spark in on-heap mode. 
Available pool types are `greedy`, `fair_spill`, `greedy_task_shared`, 
`fair_spill_task_shared`, `greedy_global`, `fair_spill_global`, and 
`unbounded`.                                    | greedy_task_shared            
  |
 | `spark.comet.memoryOverhead`                        | The amount of 
additional memory to be allocated per executor process for Comet, in MiB, when 
running Spark in on-heap mode.                                                  
                                                                                
                                | 1024 MiB                        |
 | `spark.comet.sparkToColumnar.enabled`               | Whether to enable 
Spark to Arrow columnar conversion. When this is turned on, Comet will convert 
operators in `spark.comet.sparkToColumnar.supportedOperatorList` into Arrow 
columnar format before processing. This is an experimental feature and has 
known issues with non-UTC timezones. | false                           |
 | `spark.comet.sparkToColumnar.supportedOperatorList` | A comma-separated list 
of operators that will be converted to Arrow columnar format when 
`spark.comet.sparkToColumnar.enabled` is true.                                  
                                                                                
                                    | Range,InMemoryTableScan,RDDScan |
-| `spark.comet.testing.strict`                        | Experimental option to 
enable strict testing, which will fail tests that could be more comprehensive, 
such as checking for a specific fallback reason. Can be overridden by 
environment variable `ENABLE_COMET_STRICT_TESTING`.                             
                                 | false                           |
+| `spark.comet.testing.strict`                        | Experimental option to 
enable strict testing, which will fail tests that could be more comprehensive, 
such as checking for a specific fallback reason. It can be overridden by the 
environment variable `ENABLE_COMET_STRICT_TESTING`.                             
                          | false                           |
 
 <!--END:CONFIG_TABLE-->
 
diff --git a/docs/source/user-guide/latest/expressions.md 
b/docs/source/user-guide/latest/expressions.md
index f56fe1975..d58fc8a90 100644
--- a/docs/source/user-guide/latest/expressions.md
+++ b/docs/source/user-guide/latest/expressions.md
@@ -31,9 +31,6 @@ of expressions that be disabled.
 Expressions that are not Spark-compatible will fall back to Spark by default 
and can be enabled by setting
 `spark.comet.expression.EXPRNAME.allowIncompatible=true`.
 
-It is also possible to specify `spark.comet.expression.allowIncompatible=true` 
to enable all
-incompatible expressions.
-
 ## Conditional Expressions
 
 | Expression | SQL                                         | Spark-Compatible? 
|
diff --git a/docs/source/user-guide/latest/kubernetes.md 
b/docs/source/user-guide/latest/kubernetes.md
index 4aa5a88ad..2fb037d63 100644
--- a/docs/source/user-guide/latest/kubernetes.md
+++ b/docs/source/user-guide/latest/kubernetes.md
@@ -79,7 +79,6 @@ spec:
     "spark.plugins": "org.apache.spark.CometPlugin"
     "spark.comet.enabled": "true"
     "spark.comet.exec.enabled": "true"
-    "spark.comet.expression.allowIncompatible": "true"
     "spark.comet.exec.shuffle.enabled": "true"
     "spark.comet.exec.shuffle.mode": "auto"
     "spark.shuffle.manager": 
"org.apache.spark.sql.comet.execution.shuffle.CometShuffleManager"
diff --git a/spark/src/main/scala/org/apache/comet/expressions/CometCast.scala 
b/spark/src/main/scala/org/apache/comet/expressions/CometCast.scala
index 7c4ffa73e..98ce8ac44 100644
--- a/spark/src/main/scala/org/apache/comet/expressions/CometCast.scala
+++ b/spark/src/main/scala/org/apache/comet/expressions/CometCast.scala
@@ -93,10 +93,9 @@ object CometCast extends CometExpressionSerde[Cast] with 
CometExprShim {
         castBuilder.setDatatype(dataType)
         castBuilder.setEvalMode(evalModeToProto(evalMode))
         castBuilder.setAllowIncompat(
-          CometConf.COMET_EXPR_ALLOW_INCOMPATIBLE.get() ||
-            SQLConf.get
-              
.getConfString(CometConf.getExprAllowIncompatConfigKey(classOf[Cast]), "false")
-              .toBoolean)
+          SQLConf.get
+            
.getConfString(CometConf.getExprAllowIncompatConfigKey(classOf[Cast]), "false")
+            .toBoolean)
         castBuilder.setTimezone(timeZoneId.getOrElse("UTC"))
         Some(
           ExprOuterClass.Expr
diff --git a/spark/src/main/scala/org/apache/comet/serde/QueryPlanSerde.scala 
b/spark/src/main/scala/org/apache/comet/serde/QueryPlanSerde.scala
index 3f62cd7f9..6bf3776a2 100644
--- a/spark/src/main/scala/org/apache/comet/serde/QueryPlanSerde.scala
+++ b/spark/src/main/scala/org/apache/comet/serde/QueryPlanSerde.scala
@@ -404,10 +404,11 @@ object QueryPlanSerde extends Logging with CometExprShim {
             None
           case Incompatible(notes) =>
             val exprAllowIncompat = CometConf.isExprAllowIncompat(exprConfName)
-            if (exprAllowIncompat || 
CometConf.COMET_EXPR_ALLOW_INCOMPATIBLE.get()) {
+            if (exprAllowIncompat) {
               if (notes.isDefined) {
                 logWarning(
-                  s"Comet supports $fn when 
${CometConf.COMET_EXPR_ALLOW_INCOMPATIBLE.key}=true " +
+                  s"Comet supports $fn when " +
+                    
s"${CometConf.getExprAllowIncompatConfigKey(exprConfName)}=true " +
                     s"but has notes: ${notes.get}")
               }
               aggHandler.convert(aggExpr, fn, inputs, binding, conf)
@@ -416,9 +417,8 @@ object QueryPlanSerde extends Logging with CometExprShim {
               withInfo(
                 fn,
                 s"$fn is not fully compatible with Spark$optionalNotes. To 
enable it anyway, " +
-                  s"set 
${CometConf.getExprAllowIncompatConfigKey(exprConfName)}=true, or set " +
-                  s"${CometConf.COMET_EXPR_ALLOW_INCOMPATIBLE.key}=true to 
enable all " +
-                  s"incompatible expressions. ${CometConf.COMPAT_GUIDE}.")
+                  s"set 
${CometConf.getExprAllowIncompatConfigKey(exprConfName)}=true. " +
+                  s"${CometConf.COMPAT_GUIDE}.")
               None
             }
           case Compatible(notes) =>
@@ -509,10 +509,11 @@ object QueryPlanSerde extends Logging with CometExprShim {
           None
         case Incompatible(notes) =>
           val exprAllowIncompat = CometConf.isExprAllowIncompat(exprConfName)
-          if (exprAllowIncompat || 
CometConf.COMET_EXPR_ALLOW_INCOMPATIBLE.get()) {
+          if (exprAllowIncompat) {
             if (notes.isDefined) {
               logWarning(
-                s"Comet supports $expr when 
${CometConf.COMET_EXPR_ALLOW_INCOMPATIBLE.key}=true " +
+                s"Comet supports $expr when " +
+                  
s"${CometConf.getExprAllowIncompatConfigKey(exprConfName)}=true " +
                   s"but has notes: ${notes.get}")
             }
             handler.convert(expr, inputs, binding)
@@ -521,9 +522,8 @@ object QueryPlanSerde extends Logging with CometExprShim {
             withInfo(
               expr,
               s"$expr is not fully compatible with Spark$optionalNotes. To 
enable it anyway, " +
-                s"set 
${CometConf.getExprAllowIncompatConfigKey(exprConfName)}=true, or set " +
-                s"${CometConf.COMET_EXPR_ALLOW_INCOMPATIBLE.key}=true to 
enable all " +
-                s"incompatible expressions. ${CometConf.COMPAT_GUIDE}.")
+                s"set 
${CometConf.getExprAllowIncompatConfigKey(exprConfName)}=true. " +
+                s"${CometConf.COMPAT_GUIDE}.")
             None
           }
         case Compatible(notes) =>
diff --git 
a/spark/src/test/scala/org/apache/comet/exec/CometAggregateSuite.scala 
b/spark/src/test/scala/org/apache/comet/exec/CometAggregateSuite.scala
index 9ed023a71..7e577c5fd 100644
--- a/spark/src/test/scala/org/apache/comet/exec/CometAggregateSuite.scala
+++ b/spark/src/test/scala/org/apache/comet/exec/CometAggregateSuite.scala
@@ -23,6 +23,7 @@ import scala.util.Random
 
 import org.apache.hadoop.fs.Path
 import org.apache.spark.sql.{CometTestBase, DataFrame, Row}
+import org.apache.spark.sql.catalyst.expressions.Cast
 import org.apache.spark.sql.catalyst.optimizer.EliminateSorts
 import org.apache.spark.sql.comet.CometHashAggregateExec
 import org.apache.spark.sql.execution.adaptive.AdaptiveSparkPlanHelper
@@ -1073,7 +1074,7 @@ class CometAggregateSuite extends CometTestBase with 
AdaptiveSparkPlanHelper {
         withSQLConf(
           CometConf.COMET_EXEC_SHUFFLE_ENABLED.key -> 
nativeShuffleEnabled.toString,
           CometConf.COMET_SHUFFLE_MODE.key -> "native",
-          CometConf.COMET_EXPR_ALLOW_INCOMPATIBLE.key -> "true") {
+          CometConf.getExprAllowIncompatConfigKey(classOf[Cast]) -> "true") {
           withTempDir { dir =>
             val path = new Path(dir.toURI.toString, "test")
             makeParquetFile(path, 1000, 20, dictionaryEnabled)
diff --git 
a/spark/src/test/scala/org/apache/spark/sql/comet/CometPlanStabilitySuite.scala 
b/spark/src/test/scala/org/apache/spark/sql/comet/CometPlanStabilitySuite.scala
index c4c79e952..8f260e2ca 100644
--- 
a/spark/src/test/scala/org/apache/spark/sql/comet/CometPlanStabilitySuite.scala
+++ 
b/spark/src/test/scala/org/apache/spark/sql/comet/CometPlanStabilitySuite.scala
@@ -28,7 +28,8 @@ import org.apache.commons.io.FileUtils
 import org.apache.spark.SparkContext
 import org.apache.spark.internal.config.{MEMORY_OFFHEAP_ENABLED, 
MEMORY_OFFHEAP_SIZE}
 import org.apache.spark.sql.TPCDSBase
-import org.apache.spark.sql.catalyst.expressions.AttributeSet
+import org.apache.spark.sql.catalyst.expressions.{AttributeSet, Cast}
+import org.apache.spark.sql.catalyst.expressions.aggregate.{Average, Sum}
 import org.apache.spark.sql.catalyst.util.resourceToString
 import org.apache.spark.sql.execution.{FormattedMode, ReusedSubqueryExec, 
SparkPlan, SubqueryBroadcastExec, SubqueryExec}
 import org.apache.spark.sql.execution.adaptive.DisableAdaptiveExecutionSuite
@@ -223,9 +224,11 @@ trait CometPlanStabilitySuite extends 
DisableAdaptiveExecutionSuite with TPCDSBa
       CometConf.COMET_DPP_FALLBACK_ENABLED.key -> "false",
       CometConf.COMET_EXEC_SHUFFLE_ENABLED.key -> "true",
       CometConf.COMET_EXEC_SORT_MERGE_JOIN_WITH_JOIN_FILTER_ENABLED.key -> 
"true",
-      // COMET_EXPR_ALLOW_INCOMPATIBLE is needed for Spark 4.0.0 / ANSI support
+      // Allow Incompatible is needed for Sum + Average for Spark 4.0.0 / ANSI 
support
+      CometConf.getExprAllowIncompatConfigKey(classOf[Average]) -> "true",
+      CometConf.getExprAllowIncompatConfigKey(classOf[Sum]) -> "true",
       // as well as for v1.4/q9, v1.4/q44, v2.7.0/q6, v2.7.0/q64
-      CometConf.COMET_EXPR_ALLOW_INCOMPATIBLE.key -> "true",
+      CometConf.getExprAllowIncompatConfigKey(classOf[Cast]) -> "true",
       SQLConf.AUTO_BROADCASTJOIN_THRESHOLD.key -> "10MB") {
       val qe = sql(queryString).queryExecution
       val plan = qe.executedPlan


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]


Reply via email to