This is an automated email from the ASF dual-hosted git repository.
agrove pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/datafusion-comet.git
The following commit(s) were added to refs/heads/main by this push:
new 551574108 chore: Remove many references to
`COMET_EXPR_ALLOW_INCOMPATIBLE` (#2775)
551574108 is described below
commit 55157410813fd62e9022a709e09f5dfbc6c07186
Author: Andy Grove <[email protected]>
AuthorDate: Fri Nov 14 12:58:58 2025 -0700
chore: Remove many references to `COMET_EXPR_ALLOW_INCOMPATIBLE` (#2775)
---
.../main/scala/org/apache/comet/GenerateDocs.scala | 2 +-
.../org/apache/comet/expressions/CometCast.scala | 7 ++-
.../apache/comet/CometArrayExpressionSuite.scala | 70 +++++++++++-----------
.../scala/org/apache/comet/CometCastSuite.scala | 16 ++---
.../org/apache/comet/CometExpressionSuite.scala | 48 +++++----------
.../apache/comet/CometStringExpressionSuite.scala | 28 ++++-----
.../apache/comet/exec/CometAggregateSuite.scala | 25 ++++----
.../org/apache/comet/exec/CometExecSuite.scala | 7 +--
.../apache/comet/exec/CometWindowExecSuite.scala | 1 -
9 files changed, 91 insertions(+), 113 deletions(-)
diff --git a/spark/src/main/scala/org/apache/comet/GenerateDocs.scala
b/spark/src/main/scala/org/apache/comet/GenerateDocs.scala
index 8abd09241..6f3c05d50 100644
--- a/spark/src/main/scala/org/apache/comet/GenerateDocs.scala
+++ b/spark/src/main/scala/org/apache/comet/GenerateDocs.scala
@@ -77,7 +77,7 @@ object GenerateDocs {
// append env var info if present
val docWithEnvVar = conf.envVar match {
case Some(envVarName) =>
- s"$doc Can be overridden by environment variable
`$envVarName`."
+ s"$doc It can be overridden by the environment variable
`$envVarName`."
case None => doc
}
if (conf.defaultValue.isEmpty) {
diff --git a/spark/src/main/scala/org/apache/comet/expressions/CometCast.scala
b/spark/src/main/scala/org/apache/comet/expressions/CometCast.scala
index ee7216686..7c4ffa73e 100644
--- a/spark/src/main/scala/org/apache/comet/expressions/CometCast.scala
+++ b/spark/src/main/scala/org/apache/comet/expressions/CometCast.scala
@@ -20,6 +20,7 @@
package org.apache.comet.expressions
import org.apache.spark.sql.catalyst.expressions.{Attribute, Cast, Expression,
Literal}
+import org.apache.spark.sql.internal.SQLConf
import org.apache.spark.sql.types.{ArrayType, DataType, DataTypes,
DecimalType, NullType, StructType}
import org.apache.comet.CometConf
@@ -91,7 +92,11 @@ object CometCast extends CometExpressionSerde[Cast] with
CometExprShim {
castBuilder.setChild(childExpr)
castBuilder.setDatatype(dataType)
castBuilder.setEvalMode(evalModeToProto(evalMode))
-
castBuilder.setAllowIncompat(CometConf.COMET_EXPR_ALLOW_INCOMPATIBLE.get())
+ castBuilder.setAllowIncompat(
+ CometConf.COMET_EXPR_ALLOW_INCOMPATIBLE.get() ||
+ SQLConf.get
+
.getConfString(CometConf.getExprAllowIncompatConfigKey(classOf[Cast]), "false")
+ .toBoolean)
castBuilder.setTimezone(timeZoneId.getOrElse("UTC"))
Some(
ExprOuterClass.Expr
diff --git
a/spark/src/test/scala/org/apache/comet/CometArrayExpressionSuite.scala
b/spark/src/test/scala/org/apache/comet/CometArrayExpressionSuite.scala
index a8f5c66dd..c5060382e 100644
--- a/spark/src/test/scala/org/apache/comet/CometArrayExpressionSuite.scala
+++ b/spark/src/test/scala/org/apache/comet/CometArrayExpressionSuite.scala
@@ -23,6 +23,7 @@ import scala.util.Random
import org.apache.hadoop.fs.Path
import org.apache.spark.sql.CometTestBase
+import org.apache.spark.sql.catalyst.expressions.{ArrayAppend, ArrayDistinct,
ArrayExcept, ArrayInsert, ArrayIntersect, ArrayJoin, ArrayRepeat,
ArraysOverlap, ArrayUnion}
import org.apache.spark.sql.execution.adaptive.AdaptiveSparkPlanHelper
import org.apache.spark.sql.functions._
@@ -135,7 +136,12 @@ class CometArrayExpressionSuite extends CometTestBase with
AdaptiveSparkPlanHelp
}
test("array_append") {
- withSQLConf(CometConf.COMET_EXPR_ALLOW_INCOMPATIBLE.key -> "true") {
+ val incompatKey = if (isSpark40Plus) {
+ classOf[ArrayInsert]
+ } else {
+ classOf[ArrayAppend]
+ }
+ withSQLConf(CometConf.getExprAllowIncompatConfigKey(incompatKey) ->
"true") {
Seq(true, false).foreach { dictionaryEnabled =>
withTempDir { dir =>
withTempView("t1") {
@@ -164,7 +170,7 @@ class CometArrayExpressionSuite extends CometTestBase with
AdaptiveSparkPlanHelp
test("array_prepend") {
assume(isSpark35Plus) // in Spark 3.5 array_prepend is implemented via
array_insert
- withSQLConf(CometConf.COMET_EXPR_ALLOW_INCOMPATIBLE.key -> "true") {
+ withSQLConf(CometConf.getExprAllowIncompatConfigKey(classOf[ArrayInsert])
-> "true") {
Seq(true, false).foreach { dictionaryEnabled =>
withTempDir { dir =>
withTempView("t1") {
@@ -193,7 +199,7 @@ class CometArrayExpressionSuite extends CometTestBase with
AdaptiveSparkPlanHelp
}
test("ArrayInsert") {
- withSQLConf(CometConf.COMET_EXPR_ALLOW_INCOMPATIBLE.key -> "true") {
+ withSQLConf(CometConf.getExprAllowIncompatConfigKey(classOf[ArrayInsert])
-> "true") {
Seq(true, false).foreach(dictionaryEnabled =>
withTempDir { dir =>
val path = new Path(dir.toURI.toString, "test.parquet")
@@ -218,7 +224,7 @@ class CometArrayExpressionSuite extends CometTestBase with
AdaptiveSparkPlanHelp
test("ArrayInsertUnsupportedArgs") {
// This test checks that the else branch in ArrayInsert
// mapping to the comet is valid and fallback to spark is working fine.
- withSQLConf(CometConf.COMET_EXPR_ALLOW_INCOMPATIBLE.key -> "true") {
+ withSQLConf(CometConf.getExprAllowIncompatConfigKey(classOf[ArrayInsert])
-> "true") {
withTempDir { dir =>
val path = new Path(dir.toURI.toString, "test.parquet")
makeParquetFileAllPrimitiveTypes(path, dictionaryEnabled = false,
10000)
@@ -227,7 +233,9 @@ class CometArrayExpressionSuite extends CometTestBase with
AdaptiveSparkPlanHelp
.withColumn("arr", array(col("_4"), lit(null), col("_4")))
.withColumn("idx", udf((_: Int) => 1).apply(col("_4")))
.withColumn("arrUnsupportedArgs", expr("array_insert(arr, idx, 1)"))
- checkSparkAnswer(df.select("arrUnsupportedArgs"))
+ checkSparkAnswerAndFallbackReasons(
+ df.select("arrUnsupportedArgs"),
+ Set("scalaudf is not supported", "unsupported arguments for
ArrayInsert"))
}
}
}
@@ -346,7 +354,7 @@ class CometArrayExpressionSuite extends CometTestBase with
AdaptiveSparkPlanHelp
}
test("array_distinct") {
- withSQLConf(CometConf.COMET_EXPR_ALLOW_INCOMPATIBLE.key -> "true") {
+
withSQLConf(CometConf.getExprAllowIncompatConfigKey(classOf[ArrayDistinct]) ->
"true") {
Seq(true, false).foreach { dictionaryEnabled =>
withTempDir { dir =>
withTempView("t1") {
@@ -375,7 +383,7 @@ class CometArrayExpressionSuite extends CometTestBase with
AdaptiveSparkPlanHelp
}
test("array_union") {
- withSQLConf(CometConf.COMET_EXPR_ALLOW_INCOMPATIBLE.key -> "true") {
+ withSQLConf(CometConf.getExprAllowIncompatConfigKey(classOf[ArrayUnion])
-> "true") {
Seq(true, false).foreach { dictionaryEnabled =>
withTempDir { dir =>
withTempView("t1") {
@@ -446,7 +454,7 @@ class CometArrayExpressionSuite extends CometTestBase with
AdaptiveSparkPlanHelp
}
test("array_intersect") {
- withSQLConf(CometConf.COMET_EXPR_ALLOW_INCOMPATIBLE.key -> "true") {
+
withSQLConf(CometConf.getExprAllowIncompatConfigKey(classOf[ArrayIntersect]) ->
"true") {
Seq(true, false).foreach { dictionaryEnabled =>
withTempDir { dir =>
withTempView("t1") {
@@ -466,7 +474,7 @@ class CometArrayExpressionSuite extends CometTestBase with
AdaptiveSparkPlanHelp
}
test("array_join") {
- withSQLConf(CometConf.COMET_EXPR_ALLOW_INCOMPATIBLE.key -> "true") {
+ withSQLConf(CometConf.getExprAllowIncompatConfigKey(classOf[ArrayJoin]) ->
"true") {
Seq(true, false).foreach { dictionaryEnabled =>
withTempDir { dir =>
withTempView("t1") {
@@ -488,7 +496,7 @@ class CometArrayExpressionSuite extends CometTestBase with
AdaptiveSparkPlanHelp
}
test("arrays_overlap") {
- withSQLConf(CometConf.COMET_EXPR_ALLOW_INCOMPATIBLE.key -> "true") {
+
withSQLConf(CometConf.getExprAllowIncompatConfigKey(classOf[ArraysOverlap]) ->
"true") {
Seq(true, false).foreach { dictionaryEnabled =>
withTempDir { dir =>
withTempView("t1") {
@@ -512,31 +520,26 @@ class CometArrayExpressionSuite extends CometTestBase
with AdaptiveSparkPlanHelp
test("array_compact") {
// TODO fix for Spark 4.0.0
assume(!isSpark40Plus)
- withSQLConf(CometConf.COMET_EXPR_ALLOW_INCOMPATIBLE.key -> "true") {
- Seq(true, false).foreach { dictionaryEnabled =>
- withTempDir { dir =>
- withTempView("t1") {
- val path = new Path(dir.toURI.toString, "test.parquet")
- makeParquetFileAllPrimitiveTypes(
- path,
- dictionaryEnabled = dictionaryEnabled,
- n = 10000)
- spark.read.parquet(path.toString).createOrReplaceTempView("t1")
+ Seq(true, false).foreach { dictionaryEnabled =>
+ withTempDir { dir =>
+ withTempView("t1") {
+ val path = new Path(dir.toURI.toString, "test.parquet")
+ makeParquetFileAllPrimitiveTypes(path, dictionaryEnabled =
dictionaryEnabled, n = 10000)
+ spark.read.parquet(path.toString).createOrReplaceTempView("t1")
- checkSparkAnswerAndOperator(
- sql("SELECT array_compact(array(_2)) FROM t1 WHERE _2 IS NULL"))
- checkSparkAnswerAndOperator(
- sql("SELECT array_compact(array(_2)) FROM t1 WHERE _2 IS NOT
NULL"))
- checkSparkAnswerAndOperator(
- sql("SELECT array_compact(array(_2, _3, null)) FROM t1 WHERE _2
IS NOT NULL"))
- }
+ checkSparkAnswerAndOperator(
+ sql("SELECT array_compact(array(_2)) FROM t1 WHERE _2 IS NULL"))
+ checkSparkAnswerAndOperator(
+ sql("SELECT array_compact(array(_2)) FROM t1 WHERE _2 IS NOT
NULL"))
+ checkSparkAnswerAndOperator(
+ sql("SELECT array_compact(array(_2, _3, null)) FROM t1 WHERE _2 IS
NOT NULL"))
}
}
}
}
test("array_except - basic test (only integer values)") {
- withSQLConf(CometConf.COMET_EXPR_ALLOW_INCOMPATIBLE.key -> "true") {
+ withSQLConf(CometConf.getExprAllowIncompatConfigKey(classOf[ArrayExcept])
-> "true") {
Seq(true, false).foreach { dictionaryEnabled =>
withTempDir { dir =>
withTempView("t1") {
@@ -571,7 +574,7 @@ class CometArrayExpressionSuite extends CometTestBase with
AdaptiveSparkPlanHelp
SchemaGenOptions(generateArray = false, generateStruct = false,
generateMap = false),
DataGenOptions(allowNull = true, generateNegativeZero = true))
}
- withSQLConf(CometConf.COMET_EXPR_ALLOW_INCOMPATIBLE.key -> "true") {
+
withSQLConf(CometConf.getExprAllowIncompatConfigKey(classOf[ArrayExcept]) ->
"true") {
withTempView("t1", "t2") {
val table = spark.read.parquet(filename)
table.createOrReplaceTempView("t1")
@@ -609,8 +612,7 @@ class CometArrayExpressionSuite extends CometTestBase with
AdaptiveSparkPlanHelp
withSQLConf(
CometConf.COMET_NATIVE_SCAN_ENABLED.key -> "false",
CometConf.COMET_SPARK_TO_ARROW_ENABLED.key -> "true",
- CometConf.COMET_CONVERT_FROM_PARQUET_ENABLED.key -> "true",
- CometConf.COMET_EXPR_ALLOW_INCOMPATIBLE.key -> "true") {
+ CometConf.COMET_CONVERT_FROM_PARQUET_ENABLED.key -> "true") {
withTempView("t1", "t2") {
val table = spark.read.parquet(filename)
table.createOrReplaceTempView("t1")
@@ -631,7 +633,7 @@ class CometArrayExpressionSuite extends CometTestBase with
AdaptiveSparkPlanHelp
test("array_repeat") {
withSQLConf(
- CometConf.COMET_EXPR_ALLOW_INCOMPATIBLE.key -> "true",
+ CometConf.getExprAllowIncompatConfigKey(classOf[ArrayRepeat]) -> "true",
CometConf.COMET_EXPLAIN_FALLBACK_ENABLED.key -> "true") {
Seq(true, false).foreach { dictionaryEnabled =>
withTempDir { dir =>
@@ -722,9 +724,7 @@ class CometArrayExpressionSuite extends CometTestBase with
AdaptiveSparkPlanHelp
}
test("array literals") {
- withSQLConf(
- CometConf.COMET_EXPR_ALLOW_INCOMPATIBLE.key -> "true",
- CometConf.COMET_EXPLAIN_FALLBACK_ENABLED.key -> "true") {
+ withSQLConf(CometConf.COMET_EXPLAIN_FALLBACK_ENABLED.key -> "true") {
Seq(true, false).foreach { dictionaryEnabled =>
withTempDir { dir =>
withTempView("t1") {
diff --git a/spark/src/test/scala/org/apache/comet/CometCastSuite.scala
b/spark/src/test/scala/org/apache/comet/CometCastSuite.scala
index 6e9b4ec70..1912e982b 100644
--- a/spark/src/test/scala/org/apache/comet/CometCastSuite.scala
+++ b/spark/src/test/scala/org/apache/comet/CometCastSuite.scala
@@ -433,7 +433,7 @@ class CometCastSuite extends CometTestBase with
AdaptiveSparkPlanHelper {
}
test("cast FloatType to DecimalType(10,2) - allow incompat") {
- withSQLConf(CometConf.COMET_EXPR_ALLOW_INCOMPATIBLE.key -> "true") {
+ withSQLConf(CometConf.getExprAllowIncompatConfigKey(classOf[Cast]) ->
"true") {
castTest(generateFloats(), DataTypes.createDecimalType(10, 2))
}
}
@@ -493,7 +493,7 @@ class CometCastSuite extends CometTestBase with
AdaptiveSparkPlanHelper {
}
test("cast DoubleType to DecimalType(10,2) - allow incompat") {
- withSQLConf(CometConf.COMET_EXPR_ALLOW_INCOMPATIBLE.key -> "true") {
+ withSQLConf(CometConf.getExprAllowIncompatConfigKey(classOf[Cast]) ->
"true") {
castTest(generateDoubles(), DataTypes.createDecimalType(10, 2))
}
}
@@ -659,7 +659,7 @@ class CometCastSuite extends CometTestBase with
AdaptiveSparkPlanHelper {
test("cast StringType to FloatType (partial support)") {
withSQLConf(
- CometConf.COMET_EXPR_ALLOW_INCOMPATIBLE.key -> "true",
+ CometConf.getExprAllowIncompatConfigKey(classOf[Cast]) -> "true",
SQLConf.ANSI_ENABLED.key -> "false") {
castTest(
gen.generateStrings(dataSize, "0123456789.", 8).toDF("a"),
@@ -675,7 +675,7 @@ class CometCastSuite extends CometTestBase with
AdaptiveSparkPlanHelper {
test("cast StringType to DoubleType (partial support)") {
withSQLConf(
- CometConf.COMET_EXPR_ALLOW_INCOMPATIBLE.key -> "true",
+ CometConf.getExprAllowIncompatConfigKey(classOf[Cast]) -> "true",
SQLConf.ANSI_ENABLED.key -> "false") {
castTest(
gen.generateStrings(dataSize, "0123456789.", 8).toDF("a"),
@@ -692,7 +692,7 @@ class CometCastSuite extends CometTestBase with
AdaptiveSparkPlanHelper {
test("cast StringType to DecimalType(10,2) (partial support)") {
withSQLConf(
- CometConf.COMET_EXPR_ALLOW_INCOMPATIBLE.key -> "true",
+ CometConf.getExprAllowIncompatConfigKey(classOf[Cast]) -> "true",
SQLConf.ANSI_ENABLED.key -> "false") {
val values = gen
.generateStrings(dataSize, "0123456789.", 8)
@@ -781,7 +781,7 @@ class CometCastSuite extends CometTestBase with
AdaptiveSparkPlanHelper {
ignore("cast StringType to TimestampType") {
// https://github.com/apache/datafusion-comet/issues/328
- withSQLConf((CometConf.COMET_EXPR_ALLOW_INCOMPATIBLE.key, "true")) {
+ withSQLConf((CometConf.getExprAllowIncompatConfigKey(classOf[Cast]),
"true")) {
val values = Seq("2020-01-01T12:34:56.123456", "T2") ++
gen.generateStrings(
dataSize,
timestampPattern,
@@ -832,7 +832,7 @@ class CometCastSuite extends CometTestBase with
AdaptiveSparkPlanHelper {
// test for invalid inputs
withSQLConf(
SQLConf.SESSION_LOCAL_TIMEZONE.key -> "UTC",
- CometConf.COMET_EXPR_ALLOW_INCOMPATIBLE.key -> "true") {
+ CometConf.getExprAllowIncompatConfigKey(classOf[Cast]) -> "true") {
val values = Seq("-9?", "1-", "0.5")
castTimestampTest(values.toDF("a"), DataTypes.TimestampType)
}
@@ -1268,7 +1268,7 @@ class CometCastSuite extends CometTestBase with
AdaptiveSparkPlanHelper {
// with ANSI enabled, we should produce the same exception as Spark
withSQLConf(
SQLConf.ANSI_ENABLED.key -> "true",
- CometConf.COMET_EXPR_ALLOW_INCOMPATIBLE.key -> "true") {
+ CometConf.getExprAllowIncompatConfigKey(classOf[Cast]) -> "true") {
// cast() should throw exception on invalid inputs when ansi mode is
enabled
val df = data.withColumn("converted", col("a").cast(toType))
diff --git a/spark/src/test/scala/org/apache/comet/CometExpressionSuite.scala
b/spark/src/test/scala/org/apache/comet/CometExpressionSuite.scala
index 7d2f03513..0a50f1272 100644
--- a/spark/src/test/scala/org/apache/comet/CometExpressionSuite.scala
+++ b/spark/src/test/scala/org/apache/comet/CometExpressionSuite.scala
@@ -28,7 +28,7 @@ import org.scalatest.Tag
import org.apache.hadoop.fs.Path
import org.apache.spark.sql.{CometTestBase, DataFrame, Row}
-import org.apache.spark.sql.catalyst.expressions.{Alias, Cast, Literal,
TruncDate, TruncTimestamp}
+import org.apache.spark.sql.catalyst.expressions.{Alias, Cast, FromUnixTime,
Literal, TruncDate, TruncTimestamp}
import org.apache.spark.sql.catalyst.optimizer.SimplifyExtractValueOps
import org.apache.spark.sql.comet.{CometColumnarToRowExec, CometProjectExec}
import org.apache.spark.sql.execution.{InputAdapter, ProjectExec, SparkPlan,
WholeStageCodegenExec}
@@ -191,13 +191,11 @@ class CometExpressionSuite extends CometTestBase with
AdaptiveSparkPlanHelper {
test("Integral Division Overflow Handling Matches Spark Behavior") {
withTable("t1") {
- withSQLConf(CometConf.COMET_EXPR_ALLOW_INCOMPATIBLE.key -> "true") {
- val value = Long.MinValue
- sql("create table t1(c1 long, c2 short) using parquet")
- sql(s"insert into t1 values($value, -1)")
- val res = sql("select c1 div c2 from t1 order by c1")
- checkSparkAnswerAndOperator(res)
- }
+ val value = Long.MinValue
+ sql("create table t1(c1 long, c2 short) using parquet")
+ sql(s"insert into t1 values($value, -1)")
+ val res = sql("select c1 div c2 from t1 order by c1")
+ checkSparkAnswerAndOperator(res)
}
}
@@ -473,9 +471,7 @@ class CometExpressionSuite extends CometTestBase with
AdaptiveSparkPlanHelper {
}
test("test coalesce lazy eval") {
- withSQLConf(
- SQLConf.ANSI_ENABLED.key -> "true",
- CometConf.COMET_EXPR_ALLOW_INCOMPATIBLE.key -> "true") {
+ withSQLConf(SQLConf.ANSI_ENABLED.key -> "true") {
val data = Seq((9999999999999L, 0))
withParquetTable(data, "t1") {
val res = spark.sql("""
@@ -625,7 +621,7 @@ class CometExpressionSuite extends CometTestBase with
AdaptiveSparkPlanHelper {
test("cast timestamp and timestamp_ntz") {
withSQLConf(
SESSION_LOCAL_TIMEZONE.key -> "Asia/Kathmandu",
- CometConf.COMET_EXPR_ALLOW_INCOMPATIBLE.key -> "true") {
+ CometConf.getExprAllowIncompatConfigKey(classOf[Cast]) -> "true") {
Seq(true, false).foreach { dictionaryEnabled =>
withTempDir { dir =>
val path = new Path(dir.toURI.toString, "timestamp_trunc.parquet")
@@ -647,7 +643,7 @@ class CometExpressionSuite extends CometTestBase with
AdaptiveSparkPlanHelper {
test("cast timestamp and timestamp_ntz to string") {
withSQLConf(
SESSION_LOCAL_TIMEZONE.key -> "Asia/Kathmandu",
- CometConf.COMET_EXPR_ALLOW_INCOMPATIBLE.key -> "true") {
+ CometConf.getExprAllowIncompatConfigKey(classOf[Cast]) -> "true") {
Seq(true, false).foreach { dictionaryEnabled =>
withTempDir { dir =>
val path = new Path(dir.toURI.toString, "timestamp_trunc.parquet")
@@ -669,7 +665,7 @@ class CometExpressionSuite extends CometTestBase with
AdaptiveSparkPlanHelper {
test("cast timestamp and timestamp_ntz to long, date") {
withSQLConf(
SESSION_LOCAL_TIMEZONE.key -> "Asia/Kathmandu",
- CometConf.COMET_EXPR_ALLOW_INCOMPATIBLE.key -> "true") {
+ CometConf.getExprAllowIncompatConfigKey(classOf[Cast]) -> "true") {
Seq(true, false).foreach { dictionaryEnabled =>
withTempDir { dir =>
val path = new Path(dir.toURI.toString, "timestamp_trunc.parquet")
@@ -759,7 +755,7 @@ class CometExpressionSuite extends CometTestBase with
AdaptiveSparkPlanHelper {
}
test("date_trunc with timestamp_ntz") {
- withSQLConf(CometConf.COMET_EXPR_ALLOW_INCOMPATIBLE.key -> "true") {
+ withSQLConf(CometConf.getExprAllowIncompatConfigKey(classOf[Cast]) ->
"true") {
Seq(true, false).foreach { dictionaryEnabled =>
withTempDir { dir =>
val path = new Path(dir.toURI.toString, "timestamp_trunc.parquet")
@@ -1438,9 +1434,7 @@ class CometExpressionSuite extends CometTestBase with
AdaptiveSparkPlanHelper {
test("ceil and floor") {
Seq("true", "false").foreach { dictionary =>
- withSQLConf(
- "parquet.enable.dictionary" -> dictionary,
- CometConf.COMET_EXPR_ALLOW_INCOMPATIBLE.key -> "true") {
+ withSQLConf("parquet.enable.dictionary" -> dictionary) {
withParquetTable(
(-5 until 5).map(i => (i.toDouble + 0.3, i.toDouble + 0.8)),
"tbl",
@@ -1759,7 +1753,7 @@ class CometExpressionSuite extends CometTestBase with
AdaptiveSparkPlanHelper {
Seq(false, true).foreach { dictionary =>
withSQLConf(
"parquet.enable.dictionary" -> dictionary.toString,
- CometConf.COMET_EXPR_ALLOW_INCOMPATIBLE.key -> "true") {
+ CometConf.getExprAllowIncompatConfigKey(classOf[FromUnixTime]) ->
"true") {
val table = "test"
withTempDir { dir =>
val path = new Path(dir.toURI.toString, "test.parquet")
@@ -1992,9 +1986,7 @@ class CometExpressionSuite extends CometTestBase with
AdaptiveSparkPlanHelper {
test("hash functions") {
Seq(true, false).foreach { dictionary =>
- withSQLConf(
- "parquet.enable.dictionary" -> dictionary.toString,
- CometConf.COMET_EXPR_ALLOW_INCOMPATIBLE.key -> "true") {
+ withSQLConf("parquet.enable.dictionary" -> dictionary.toString) {
val table = "test"
withTable(table) {
sql(s"create table $table(col string, a int, b float) using parquet")
@@ -2021,7 +2013,6 @@ class CometExpressionSuite extends CometTestBase with
AdaptiveSparkPlanHelper {
def withAnsiMode(enabled: Boolean)(f: => Unit): Unit = {
withSQLConf(
SQLConf.ANSI_ENABLED.key -> enabled.toString,
- CometConf.COMET_EXPR_ALLOW_INCOMPATIBLE.key -> enabled.toString,
CometConf.COMET_ENABLED.key -> "true",
CometConf.COMET_EXEC_ENABLED.key -> "true")(f)
}
@@ -2099,9 +2090,7 @@ class CometExpressionSuite extends CometTestBase with
AdaptiveSparkPlanHelper {
val whitespaceChars = " \t\r\n"
val timestampPattern = "0123456789/:T" + whitespaceChars
Seq(true, false).foreach { dictionary =>
- withSQLConf(
- "parquet.enable.dictionary" -> dictionary.toString,
- CometConf.COMET_EXPR_ALLOW_INCOMPATIBLE.key -> "true") {
+ withSQLConf("parquet.enable.dictionary" -> dictionary.toString) {
val table = "test"
withTable(table) {
sql(s"create table $table(col string, a int, b float) using parquet")
@@ -2176,7 +2165,6 @@ class CometExpressionSuite extends CometTestBase with
AdaptiveSparkPlanHelper {
def withAnsiMode(enabled: Boolean)(f: => Unit): Unit = {
withSQLConf(
SQLConf.ANSI_ENABLED.key -> enabled.toString,
- CometConf.COMET_EXPR_ALLOW_INCOMPATIBLE.key -> enabled.toString,
CometConf.COMET_ENABLED.key -> "true",
CometConf.COMET_EXEC_ENABLED.key -> "true")(f)
}
@@ -2239,7 +2227,6 @@ class CometExpressionSuite extends CometTestBase with
AdaptiveSparkPlanHelper {
withSQLConf(
"spark.sql.optimizer.excludedRules" ->
"org.apache.spark.sql.catalyst.optimizer.ConstantFolding",
SQLConf.ANSI_ENABLED.key -> "true",
- CometConf.COMET_EXPR_ALLOW_INCOMPATIBLE.key -> "true",
CometConf.COMET_ENABLED.key -> "true",
CometConf.COMET_EXEC_ENABLED.key -> "true") {
for (n <- Seq("2147483647", "-2147483648")) {
@@ -2723,7 +2710,6 @@ class CometExpressionSuite extends CometTestBase with
AdaptiveSparkPlanHelper {
Seq(true, false).foreach { ansiEnabled =>
withSQLConf(
- CometConf.COMET_EXPR_ALLOW_INCOMPATIBLE.key -> "true",
SQLConf.ANSI_ENABLED.key -> ansiEnabled.toString(),
// Prevent the optimizer from collapsing an extract value of a
create array
SQLConf.OPTIMIZER_EXCLUDED_RULES.key ->
SimplifyExtractValueOps.ruleName) {
@@ -2800,9 +2786,7 @@ class CometExpressionSuite extends CometTestBase with
AdaptiveSparkPlanHelper {
test("test integral divide") {
// this test requires native_comet scan due to unsigned u8/u16 issue
- withSQLConf(
- CometConf.COMET_NATIVE_SCAN_IMPL.key -> CometConf.SCAN_NATIVE_COMET,
- CometConf.COMET_EXPR_ALLOW_INCOMPATIBLE.key -> "true") {
+ withSQLConf(CometConf.COMET_NATIVE_SCAN_IMPL.key ->
CometConf.SCAN_NATIVE_COMET) {
Seq(true, false).foreach { dictionaryEnabled =>
withTempDir { dir =>
val path1 = new Path(dir.toURI.toString, "test1.parquet")
diff --git
a/spark/src/test/scala/org/apache/comet/CometStringExpressionSuite.scala
b/spark/src/test/scala/org/apache/comet/CometStringExpressionSuite.scala
index 2479a41a3..f9882780c 100644
--- a/spark/src/test/scala/org/apache/comet/CometStringExpressionSuite.scala
+++ b/spark/src/test/scala/org/apache/comet/CometStringExpressionSuite.scala
@@ -174,27 +174,23 @@ class CometStringExpressionSuite extends CometTestBase {
}
test("Chr") {
- withSQLConf(CometConf.COMET_EXPR_ALLOW_INCOMPATIBLE.key -> "true") {
- val table = "test"
- withTable(table) {
- sql(s"create table $table(col varchar(20)) using parquet")
- sql(
- s"insert into $table values('65'), ('66'), ('67'), ('68'), ('65'),
('66'), ('67'), ('68')")
- checkSparkAnswerAndOperator(s"SELECT chr(col) FROM $table")
- }
+ val table = "test"
+ withTable(table) {
+ sql(s"create table $table(col varchar(20)) using parquet")
+ sql(
+ s"insert into $table values('65'), ('66'), ('67'), ('68'), ('65'),
('66'), ('67'), ('68')")
+ checkSparkAnswerAndOperator(s"SELECT chr(col) FROM $table")
}
}
test("Chr with null character") {
// test compatibility with Spark, spark supports chr(0)
- withSQLConf(CometConf.COMET_EXPR_ALLOW_INCOMPATIBLE.key -> "true") {
- val table = "test0"
- withTable(table) {
- sql(s"create table $table(c9 int, c4 int) using parquet")
- sql(s"insert into $table values(0, 0), (66, null), (null, 70), (null,
null)")
- val query = s"SELECT chr(c9), chr(c4) FROM $table"
- checkSparkAnswerAndOperator(query)
- }
+ val table = "test0"
+ withTable(table) {
+ sql(s"create table $table(c9 int, c4 int) using parquet")
+ sql(s"insert into $table values(0, 0), (66, null), (null, 70), (null,
null)")
+ val query = s"SELECT chr(c9), chr(c4) FROM $table"
+ checkSparkAnswerAndOperator(query)
}
}
diff --git
a/spark/src/test/scala/org/apache/comet/exec/CometAggregateSuite.scala
b/spark/src/test/scala/org/apache/comet/exec/CometAggregateSuite.scala
index 1a61a0ddb..9ed023a71 100644
--- a/spark/src/test/scala/org/apache/comet/exec/CometAggregateSuite.scala
+++ b/spark/src/test/scala/org/apache/comet/exec/CometAggregateSuite.scala
@@ -964,19 +964,17 @@ class CometAggregateSuite extends CometTestBase with
AdaptiveSparkPlanHelper {
}
test("avg/sum overflow on decimal(38, _)") {
- withSQLConf(CometConf.COMET_EXPR_ALLOW_INCOMPATIBLE.key -> "true") {
- val table = "overflow_decimal_38"
- withTable(table) {
- sql(s"create table $table(a decimal(38, 2), b INT) using parquet")
- sql(s"insert into $table values(42.00, 1),
(999999999999999999999999999999999999.99, 1)")
- checkSparkAnswerAndNumOfAggregates(s"select sum(a) from $table", 2)
- sql(s"insert into $table values(42.00, 2),
(99999999999999999999999999999999.99, 2)")
- sql(s"insert into $table
values(999999999999999999999999999999999999.99, 3)")
- sql(s"insert into $table values(99999999999999999999999999999999.99,
4)")
- checkSparkAnswerAndNumOfAggregates(
- s"select avg(a), sum(a) from $table group by b order by b",
- 2)
- }
+ val table = "overflow_decimal_38"
+ withTable(table) {
+ sql(s"create table $table(a decimal(38, 2), b INT) using parquet")
+ sql(s"insert into $table values(42.00, 1),
(999999999999999999999999999999999999.99, 1)")
+ checkSparkAnswerAndNumOfAggregates(s"select sum(a) from $table", 2)
+ sql(s"insert into $table values(42.00, 2),
(99999999999999999999999999999999.99, 2)")
+ sql(s"insert into $table values(999999999999999999999999999999999999.99,
3)")
+ sql(s"insert into $table values(99999999999999999999999999999999.99, 4)")
+ checkSparkAnswerAndNumOfAggregates(
+ s"select avg(a), sum(a) from $table group by b order by b",
+ 2)
}
}
@@ -1005,7 +1003,6 @@ class CometAggregateSuite extends CometTestBase with
AdaptiveSparkPlanHelper {
test("final decimal avg") {
withSQLConf(
CometConf.COMET_EXEC_SHUFFLE_ENABLED.key -> "true",
- CometConf.COMET_EXPR_ALLOW_INCOMPATIBLE.key -> "true",
CometConf.COMET_SHUFFLE_MODE.key -> "native") {
Seq(true, false).foreach { dictionaryEnabled =>
withSQLConf("parquet.enable.dictionary" -> dictionaryEnabled.toString)
{
diff --git a/spark/src/test/scala/org/apache/comet/exec/CometExecSuite.scala
b/spark/src/test/scala/org/apache/comet/exec/CometExecSuite.scala
index be9e80347..64d7663c0 100644
--- a/spark/src/test/scala/org/apache/comet/exec/CometExecSuite.scala
+++ b/spark/src/test/scala/org/apache/comet/exec/CometExecSuite.scala
@@ -527,8 +527,7 @@ class CometExecSuite extends CometTestBase {
dataTypes.map { subqueryType =>
withSQLConf(
CometConf.COMET_EXEC_SHUFFLE_ENABLED.key -> "true",
- CometConf.COMET_SHUFFLE_MODE.key -> "jvm",
- CometConf.COMET_EXPR_ALLOW_INCOMPATIBLE.key -> "true") {
+ CometConf.COMET_SHUFFLE_MODE.key -> "jvm") {
withParquetTable((0 until 5).map(i => (i, i + 1)), "tbl") {
var column1 = s"CAST(max(_1) AS $subqueryType)"
if (subqueryType == "BINARY") {
@@ -1438,9 +1437,7 @@ class CometExecSuite extends CometTestBase {
}
test("SPARK-33474: Support typed literals as partition spec values") {
- withSQLConf(
- SESSION_LOCAL_TIMEZONE.key -> "Asia/Kathmandu",
- CometConf.COMET_EXPR_ALLOW_INCOMPATIBLE.key -> "true") {
+ withSQLConf(SESSION_LOCAL_TIMEZONE.key -> "Asia/Kathmandu") {
withTable("t1") {
val binaryStr = "Spark SQL"
val binaryHexStr =
Hex.hex(UTF8String.fromString(binaryStr).getBytes).toString
diff --git
a/spark/src/test/scala/org/apache/comet/exec/CometWindowExecSuite.scala
b/spark/src/test/scala/org/apache/comet/exec/CometWindowExecSuite.scala
index 424ed5252..a1a24f4c2 100644
--- a/spark/src/test/scala/org/apache/comet/exec/CometWindowExecSuite.scala
+++ b/spark/src/test/scala/org/apache/comet/exec/CometWindowExecSuite.scala
@@ -42,7 +42,6 @@ class CometWindowExecSuite extends CometTestBase {
withSQLConf(
CometConf.COMET_EXEC_SHUFFLE_ENABLED.key -> "true",
CometConf.COMET_EXEC_WINDOW_ENABLED.key -> "true",
- CometConf.COMET_EXPR_ALLOW_INCOMPATIBLE.key -> "false",
CometConf.COMET_NATIVE_SCAN_IMPL.key -> CometConf.SCAN_AUTO) {
testFun
}
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]