This is an automated email from the ASF dual-hosted git repository.
agrove pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/datafusion-comet.git
The following commit(s) were added to refs/heads/main by this push:
new 614fe1356 fix: enable cast tests for Spark 4.0 (#2919)
614fe1356 is described below
commit 614fe135694ef166eecf1b3ef9968dd038787815
Author: Manu Zhang <[email protected]>
AuthorDate: Tue Dec 23 01:52:11 2025 +0800
fix: enable cast tests for Spark 4.0 (#2919)
---
.../scala/org/apache/comet/CometCastSuite.scala | 33 ++++++++++------------
1 file changed, 15 insertions(+), 18 deletions(-)
diff --git a/spark/src/test/scala/org/apache/comet/CometCastSuite.scala
b/spark/src/test/scala/org/apache/comet/CometCastSuite.scala
index 1912e982b..90386a979 100644
--- a/spark/src/test/scala/org/apache/comet/CometCastSuite.scala
+++ b/spark/src/test/scala/org/apache/comet/CometCastSuite.scala
@@ -33,7 +33,6 @@ import org.apache.spark.sql.functions.col
import org.apache.spark.sql.internal.SQLConf
import org.apache.spark.sql.types.{ArrayType, BooleanType, ByteType, DataType,
DataTypes, DecimalType, IntegerType, LongType, ShortType, StringType,
StructField, StructType}
-import org.apache.comet.CometSparkSessionExtensions.isSpark40Plus
import org.apache.comet.expressions.{CometCast, CometEvalMode}
import org.apache.comet.rules.CometScanTypeChecker
import org.apache.comet.serde.Compatible
@@ -575,8 +574,6 @@ class CometCastSuite extends CometTestBase with
AdaptiveSparkPlanHelper {
// CAST from StringType
test("cast StringType to BooleanType") {
- // TODO fix for Spark 4.0.0
- assume(!isSpark40Plus)
val testValues =
(Seq("TRUE", "True", "true", "FALSE", "False", "false", "1", "0", "",
null) ++
gen.generateStrings(dataSize, "truefalseTRUEFALSEyesno10" +
whitespaceChars, 8)).toDF("a")
@@ -617,8 +614,6 @@ class CometCastSuite extends CometTestBase with
AdaptiveSparkPlanHelper {
)
test("cast StringType to ByteType") {
- // TODO fix for Spark 4.0.0
- assume(!isSpark40Plus)
// test with hand-picked values
castTest(castStringToIntegralInputs.toDF("a"), DataTypes.ByteType)
// fuzz test
@@ -626,8 +621,6 @@ class CometCastSuite extends CometTestBase with
AdaptiveSparkPlanHelper {
}
test("cast StringType to ShortType") {
- // TODO fix for Spark 4.0.0
- assume(!isSpark40Plus)
// test with hand-picked values
castTest(castStringToIntegralInputs.toDF("a"), DataTypes.ShortType)
// fuzz test
@@ -635,8 +628,6 @@ class CometCastSuite extends CometTestBase with
AdaptiveSparkPlanHelper {
}
test("cast StringType to IntegerType") {
- // TODO fix for Spark 4.0.0
- assume(!isSpark40Plus)
// test with hand-picked values
castTest(castStringToIntegralInputs.toDF("a"), DataTypes.IntegerType)
// fuzz test
@@ -644,8 +635,6 @@ class CometCastSuite extends CometTestBase with
AdaptiveSparkPlanHelper {
}
test("cast StringType to LongType") {
- // TODO fix for Spark 4.0.0
- assume(!isSpark40Plus)
// test with hand-picked values
castTest(castStringToIntegralInputs.toDF("a"), DataTypes.LongType)
// fuzz test
@@ -707,8 +696,6 @@ class CometCastSuite extends CometTestBase with
AdaptiveSparkPlanHelper {
}
test("cast StringType to DateType") {
- // TODO fix for Spark 4.0.0
- assume(!isSpark40Plus)
val validDates = Seq(
"262142-01-01",
"262142-01-01 ",
@@ -1295,10 +1282,21 @@ class CometCastSuite extends CometTestBase with
AdaptiveSparkPlanHelper {
} else {
if (CometSparkSessionExtensions.isSpark40Plus) {
// for Spark 4 we expect to sparkException carries the
message
- assert(
- sparkException.getMessage
- .replace(".WITH_SUGGESTION] ", "]")
- .startsWith(cometMessage))
+ assert(sparkMessage.contains("SQLSTATE"))
+ if
(sparkMessage.startsWith("[NUMERIC_VALUE_OUT_OF_RANGE.WITH_SUGGESTION]")) {
+ assert(
+ sparkMessage.replace(".WITH_SUGGESTION] ",
"]").startsWith(cometMessage))
+ } else if (cometMessage.startsWith("[CAST_INVALID_INPUT]")
|| cometMessage
+ .startsWith("[CAST_OVERFLOW]")) {
+ assert(
+ sparkMessage.startsWith(
+ cometMessage
+ .replace(
+ "If necessary set \"spark.sql.ansi.enabled\" to
\"false\" to bypass this error.",
+ "")))
+ } else {
+ assert(sparkMessage.startsWith(cometMessage))
+ }
} else {
// for Spark 3.4 we expect to reproduce the error message
exactly
assert(cometMessage == sparkMessage)
@@ -1325,5 +1323,4 @@ class CometCastSuite extends CometTestBase with
AdaptiveSparkPlanHelper {
df.write.mode(SaveMode.Overwrite).parquet(filename)
spark.read.parquet(filename)
}
-
}
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]