This is an automated email from the ASF dual-hosted git repository.
rluvaton pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/datafusion-comet.git
The following commit(s) were added to refs/heads/main by this push:
new bc05ef82e test: extract conditional expression tests (#2807)
bc05ef82e is described below
commit bc05ef82e5bc418d605914766a5b2abbc3866781
Author: Raz Luvaton <[email protected]>
AuthorDate: Sun Nov 23 14:12:12 2025 +0200
test: extract conditional expression tests (#2807)
---
.github/workflows/pr_build_linux.yml | 21 +++---
.github/workflows/pr_build_macos.yml | 21 +++---
.../org/apache/comet/CometExpressionSuite.scala | 76 ---------------------
.../conditional/CometCaseWhenSuite.scala | 65 ++++++++++++++++++
.../conditional/CometCoalesceSuite.scala | 77 ++++++++++++++++++++++
.../expressions/conditional/CometIfSuite.scala | 58 ++++++++++++++++
6 files changed, 226 insertions(+), 92 deletions(-)
diff --git a/.github/workflows/pr_build_linux.yml
b/.github/workflows/pr_build_linux.yml
index 8570c7e7a..d06ccd441 100644
--- a/.github/workflows/pr_build_linux.yml
+++ b/.github/workflows/pr_build_linux.yml
@@ -133,28 +133,33 @@ jobs:
org.apache.comet.exec.CometExecSuite
org.apache.comet.exec.CometWindowExecSuite
org.apache.comet.exec.CometJoinSuite
- org.apache.comet.CometArrayExpressionSuite
- org.apache.comet.CometCastSuite
- org.apache.comet.CometExpressionSuite
- org.apache.comet.CometExpressionCoverageSuite
- org.apache.comet.CometMathExpressionSuite
org.apache.comet.CometNativeSuite
org.apache.comet.CometSparkSessionExtensionsSuite
- org.apache.comet.CometStringExpressionSuite
org.apache.spark.CometPluginsSuite
org.apache.spark.CometPluginsDefaultSuite
org.apache.spark.CometPluginsNonOverrideSuite
org.apache.spark.CometPluginsUnifiedModeOverrideSuite
- org.apache.comet.CometTemporalExpressionSuite
org.apache.spark.sql.CometTPCDSQuerySuite
org.apache.spark.sql.CometTPCDSQueryTestSuite
org.apache.spark.sql.CometTPCHQuerySuite
org.apache.spark.sql.comet.CometTPCDSV1_4_PlanStabilitySuite
org.apache.spark.sql.comet.CometTPCDSV2_7_PlanStabilitySuite
org.apache.spark.sql.comet.CometTaskMetricsSuite
+ org.apache.comet.objectstore.NativeConfigSuite
+ - name: "expressions"
+ value: |
+ org.apache.comet.CometExpressionSuite
+ org.apache.comet.CometExpressionCoverageSuite
+ org.apache.comet.CometTemporalExpressionSuite
+ org.apache.comet.CometArrayExpressionSuite
+ org.apache.comet.CometCastSuite
+ org.apache.comet.CometMathExpressionSuite
+ org.apache.comet.CometStringExpressionSuite
org.apache.comet.CometBitwiseExpressionSuite
org.apache.comet.CometMapExpressionSuite
- org.apache.comet.objectstore.NativeConfigSuite
+ org.apache.comet.expressions.conditional.CometIfSuite
+ org.apache.comet.expressions.conditional.CometCoalesceSuite
+ org.apache.comet.expressions.conditional.CometCaseWhenSuite
- name: "sql"
value: |
org.apache.spark.sql.CometToPrettyStringSuite
diff --git a/.github/workflows/pr_build_macos.yml
b/.github/workflows/pr_build_macos.yml
index 5eb6be4ee..d57800f4f 100644
--- a/.github/workflows/pr_build_macos.yml
+++ b/.github/workflows/pr_build_macos.yml
@@ -98,28 +98,33 @@ jobs:
org.apache.comet.exec.CometExecSuite
org.apache.comet.exec.CometWindowExecSuite
org.apache.comet.exec.CometJoinSuite
- org.apache.comet.CometArrayExpressionSuite
- org.apache.comet.CometCastSuite
- org.apache.comet.CometExpressionSuite
- org.apache.comet.CometExpressionCoverageSuite
- org.apache.comet.CometMathExpressionSuite
org.apache.comet.CometNativeSuite
org.apache.comet.CometSparkSessionExtensionsSuite
- org.apache.comet.CometStringExpressionSuite
org.apache.spark.CometPluginsSuite
org.apache.spark.CometPluginsDefaultSuite
org.apache.spark.CometPluginsNonOverrideSuite
org.apache.spark.CometPluginsUnifiedModeOverrideSuite
- org.apache.comet.CometTemporalExpressionSuite
org.apache.spark.sql.CometTPCDSQuerySuite
org.apache.spark.sql.CometTPCDSQueryTestSuite
org.apache.spark.sql.CometTPCHQuerySuite
org.apache.spark.sql.comet.CometTPCDSV1_4_PlanStabilitySuite
org.apache.spark.sql.comet.CometTPCDSV2_7_PlanStabilitySuite
org.apache.spark.sql.comet.CometTaskMetricsSuite
+ org.apache.comet.objectstore.NativeConfigSuite
+ - name: "expressions"
+ value: |
+ org.apache.comet.CometExpressionSuite
+ org.apache.comet.CometExpressionCoverageSuite
+ org.apache.comet.CometTemporalExpressionSuite
+ org.apache.comet.CometArrayExpressionSuite
+ org.apache.comet.CometCastSuite
+ org.apache.comet.CometMathExpressionSuite
+ org.apache.comet.CometStringExpressionSuite
org.apache.comet.CometBitwiseExpressionSuite
org.apache.comet.CometMapExpressionSuite
- org.apache.comet.objectstore.NativeConfigSuite
+ org.apache.comet.expressions.conditional.CometIfSuite
+ org.apache.comet.expressions.conditional.CometCoalesceSuite
+ org.apache.comet.expressions.conditional.CometCaseWhenSuite
- name: "sql"
value: |
org.apache.spark.sql.CometToPrettyStringSuite
diff --git a/spark/src/test/scala/org/apache/comet/CometExpressionSuite.scala
b/spark/src/test/scala/org/apache/comet/CometExpressionSuite.scala
index b0c718a2b..4fb4b02fa 100644
--- a/spark/src/test/scala/org/apache/comet/CometExpressionSuite.scala
+++ b/spark/src/test/scala/org/apache/comet/CometExpressionSuite.scala
@@ -156,19 +156,6 @@ class CometExpressionSuite extends CometTestBase with
AdaptiveSparkPlanHelper {
}
}
- test("coalesce should return correct datatype") {
- Seq(true, false).foreach { dictionaryEnabled =>
- withTempDir { dir =>
- val path = new Path(dir.toURI.toString, "test.parquet")
- makeParquetFileAllPrimitiveTypes(path, dictionaryEnabled =
dictionaryEnabled, 10000)
- withParquetTable(path.toString, "tbl") {
- checkSparkAnswerAndOperator(
- "SELECT coalesce(cast(_18 as date), cast(_19 as date), _20) FROM
tbl")
- }
- }
- }
- }
-
test("decimals divide by zero") {
Seq(true, false).foreach { dictionary =>
withSQLConf(
@@ -470,18 +457,6 @@ class CometExpressionSuite extends CometTestBase with
AdaptiveSparkPlanHelper {
}
}
- test("test coalesce lazy eval") {
- withSQLConf(SQLConf.ANSI_ENABLED.key -> "true") {
- val data = Seq((9999999999999L, 0))
- withParquetTable(data, "t1") {
- val res = spark.sql("""
- |SELECT coalesce(_1, CAST(_1 AS TINYINT)) from t1;
- | """.stripMargin)
- checkSparkAnswerAndOperator(res)
- }
- }
- }
-
test("dictionary arithmetic") {
// TODO: test ANSI mode
withSQLConf(SQLConf.ANSI_ENABLED.key -> "false",
"parquet.enable.dictionary" -> "true") {
@@ -522,15 +497,6 @@ class CometExpressionSuite extends CometTestBase with
AdaptiveSparkPlanHelper {
}
}
- test("string with coalesce") {
- withParquetTable(
- (0 until 10).map(i => (i.toString, if (i > 5) None else Some((i +
100).toString))),
- "tbl") {
- checkSparkAnswerAndOperator(
- "SELECT coalesce(_1), coalesce(_1, 1), coalesce(null, _1),
coalesce(null, 1), coalesce(_2, _1), coalesce(null) FROM tbl")
- }
- }
-
test("substring with dictionary") {
val data = (0 until 1000)
.map(_ % 5) // reduce value space to trigger dictionary encoding
@@ -1635,30 +1601,6 @@ class CometExpressionSuite extends CometTestBase with
AdaptiveSparkPlanHelper {
}
}
- test("case_when") {
- Seq(false, true).foreach { dictionary =>
- withSQLConf("parquet.enable.dictionary" -> dictionary.toString) {
- val table = "test"
- withTable(table) {
- sql(s"create table $table(id int) using parquet")
- sql(s"insert into $table values(1), (NULL), (2), (2), (3), (3), (4),
(5), (NULL)")
- checkSparkAnswerAndOperator(
- s"SELECT CASE WHEN id > 2 THEN 3333 WHEN id > 1 THEN 2222 ELSE
1111 END FROM $table")
- checkSparkAnswerAndOperator(
- s"SELECT CASE WHEN id > 2 THEN NULL WHEN id > 1 THEN 2222 ELSE
1111 END FROM $table")
- checkSparkAnswerAndOperator(
- s"SELECT CASE id WHEN 1 THEN 1111 WHEN 2 THEN 2222 ELSE 3333 END
FROM $table")
- checkSparkAnswerAndOperator(
- s"SELECT CASE id WHEN 1 THEN 1111 WHEN 2 THEN 2222 ELSE NULL END
FROM $table")
- checkSparkAnswerAndOperator(
- s"SELECT CASE id WHEN 1 THEN 1111 WHEN 2 THEN 2222 WHEN 3 THEN
3333 WHEN 4 THEN 4444 END FROM $table")
- checkSparkAnswerAndOperator(
- s"SELECT CASE id WHEN NULL THEN 0 WHEN 1 THEN 1111 WHEN 2 THEN
2222 ELSE 3333 END FROM $table")
- }
- }
- }
- }
-
test("not") {
Seq(false, true).foreach { dictionary =>
withSQLConf("parquet.enable.dictionary" -> dictionary.toString) {
@@ -1685,24 +1627,6 @@ class CometExpressionSuite extends CometTestBase with
AdaptiveSparkPlanHelper {
}
}
- test("conditional expressions") {
- Seq(false, true).foreach { dictionary =>
- withSQLConf("parquet.enable.dictionary" -> dictionary.toString) {
- val table = "test1"
- withTable(table) {
- sql(s"create table $table(c1 int, c2 string, c3 int) using parquet")
- sql(
- s"insert into $table values(1, 'comet', 1), (2, 'comet', 3),
(null, 'spark', 4)," +
- " (null, null, 4), (2, 'spark', 3), (2, 'comet', 3)")
- checkSparkAnswerAndOperator(s"SELECT if (c1 < 2, 1111, 2222) FROM
$table")
- checkSparkAnswerAndOperator(s"SELECT if (c1 < c3, 1111, 2222) FROM
$table")
- checkSparkAnswerAndOperator(
- s"SELECT if (c2 == 'comet', 'native execution', 'non-native
execution') FROM $table")
- }
- }
- }
- }
-
test("basic arithmetic") {
withSQLConf("parquet.enable.dictionary" -> "false") {
withParquetTable((1 until 10).map(i => (i, i + 1)), "tbl", false) {
diff --git
a/spark/src/test/scala/org/apache/comet/expressions/conditional/CometCaseWhenSuite.scala
b/spark/src/test/scala/org/apache/comet/expressions/conditional/CometCaseWhenSuite.scala
new file mode 100644
index 000000000..baae51698
--- /dev/null
+++
b/spark/src/test/scala/org/apache/comet/expressions/conditional/CometCaseWhenSuite.scala
@@ -0,0 +1,65 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.comet.expressions.conditional
+
+import org.scalactic.source.Position
+import org.scalatest.Tag
+
+import org.apache.spark.sql.CometTestBase
+import org.apache.spark.sql.execution.adaptive.AdaptiveSparkPlanHelper
+
+import org.apache.comet.CometConf
+
+class CometCaseWhenSuite extends CometTestBase with AdaptiveSparkPlanHelper {
+
+ override protected def test(testName: String, testTags: Tag*)(testFun: =>
Any)(implicit
+ pos: Position): Unit = {
+ super.test(testName, testTags: _*) {
+ withSQLConf(CometConf.COMET_NATIVE_SCAN_IMPL.key -> CometConf.SCAN_AUTO)
{
+ testFun
+ }
+ }
+ }
+
+ test("case_when") {
+ Seq(false, true).foreach { dictionary =>
+ withSQLConf("parquet.enable.dictionary" -> dictionary.toString) {
+ val table = "test"
+ withTable(table) {
+ sql(s"create table $table(id int) using parquet")
+ sql(s"insert into $table values(1), (NULL), (2), (2), (3), (3), (4),
(5), (NULL)")
+ checkSparkAnswerAndOperator(
+ s"SELECT CASE WHEN id > 2 THEN 3333 WHEN id > 1 THEN 2222 ELSE
1111 END FROM $table")
+ checkSparkAnswerAndOperator(
+ s"SELECT CASE WHEN id > 2 THEN NULL WHEN id > 1 THEN 2222 ELSE
1111 END FROM $table")
+ checkSparkAnswerAndOperator(
+ s"SELECT CASE id WHEN 1 THEN 1111 WHEN 2 THEN 2222 ELSE 3333 END
FROM $table")
+ checkSparkAnswerAndOperator(
+ s"SELECT CASE id WHEN 1 THEN 1111 WHEN 2 THEN 2222 ELSE NULL END
FROM $table")
+ checkSparkAnswerAndOperator(
+ s"SELECT CASE id WHEN 1 THEN 1111 WHEN 2 THEN 2222 WHEN 3 THEN
3333 WHEN 4 THEN 4444 END FROM $table")
+ checkSparkAnswerAndOperator(
+ s"SELECT CASE id WHEN NULL THEN 0 WHEN 1 THEN 1111 WHEN 2 THEN
2222 ELSE 3333 END FROM $table")
+ }
+ }
+ }
+ }
+
+}
diff --git
a/spark/src/test/scala/org/apache/comet/expressions/conditional/CometCoalesceSuite.scala
b/spark/src/test/scala/org/apache/comet/expressions/conditional/CometCoalesceSuite.scala
new file mode 100644
index 000000000..40f2b45dd
--- /dev/null
+++
b/spark/src/test/scala/org/apache/comet/expressions/conditional/CometCoalesceSuite.scala
@@ -0,0 +1,77 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.comet.expressions.conditional
+
+import org.scalactic.source.Position
+import org.scalatest.Tag
+
+import org.apache.hadoop.fs.Path
+import org.apache.spark.sql.CometTestBase
+import org.apache.spark.sql.execution.adaptive.AdaptiveSparkPlanHelper
+import org.apache.spark.sql.internal.SQLConf
+
+import org.apache.comet.CometConf
+
+class CometCoalesceSuite extends CometTestBase with AdaptiveSparkPlanHelper {
+
+ override protected def test(testName: String, testTags: Tag*)(testFun: =>
Any)(implicit
+ pos: Position): Unit = {
+ super.test(testName, testTags: _*) {
+ withSQLConf(CometConf.COMET_NATIVE_SCAN_IMPL.key -> CometConf.SCAN_AUTO)
{
+ testFun
+ }
+ }
+ }
+
+ test("coalesce should return correct datatype") {
+ Seq(true, false).foreach { dictionaryEnabled =>
+ withTempDir { dir =>
+ val path = new Path(dir.toURI.toString, "test.parquet")
+ makeParquetFileAllPrimitiveTypes(path, dictionaryEnabled =
dictionaryEnabled, 10000)
+ withParquetTable(path.toString, "tbl") {
+ checkSparkAnswerAndOperator(
+ "SELECT coalesce(cast(_18 as date), cast(_19 as date), _20) FROM
tbl")
+ }
+ }
+ }
+ }
+
+ test("test coalesce lazy eval") {
+ withSQLConf(SQLConf.ANSI_ENABLED.key -> "true") {
+ val data = Seq((9999999999999L, 0))
+ withParquetTable(data, "t1") {
+ val res = spark.sql("""
+ |SELECT coalesce(_1, CAST(_1 AS TINYINT)) from t1;
+ | """.stripMargin)
+ checkSparkAnswerAndOperator(res)
+ }
+ }
+ }
+
+ test("string with coalesce") {
+ withParquetTable(
+ (0 until 10).map(i => (i.toString, if (i > 5) None else Some((i +
100).toString))),
+ "tbl") {
+ checkSparkAnswerAndOperator(
+ "SELECT coalesce(_1), coalesce(_1, 1), coalesce(null, _1),
coalesce(null, 1), coalesce(_2, _1), coalesce(null) FROM tbl")
+ }
+ }
+
+}
diff --git
a/spark/src/test/scala/org/apache/comet/expressions/conditional/CometIfSuite.scala
b/spark/src/test/scala/org/apache/comet/expressions/conditional/CometIfSuite.scala
new file mode 100644
index 000000000..a206b21ad
--- /dev/null
+++
b/spark/src/test/scala/org/apache/comet/expressions/conditional/CometIfSuite.scala
@@ -0,0 +1,58 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.comet.expressions.conditional
+
+import org.scalactic.source.Position
+import org.scalatest.Tag
+
+import org.apache.spark.sql.CometTestBase
+import org.apache.spark.sql.execution.adaptive.AdaptiveSparkPlanHelper
+
+import org.apache.comet.CometConf
+
+class CometIfSuite extends CometTestBase with AdaptiveSparkPlanHelper {
+
+ override protected def test(testName: String, testTags: Tag*)(testFun: =>
Any)(implicit
+ pos: Position): Unit = {
+ super.test(testName, testTags: _*) {
+ withSQLConf(CometConf.COMET_NATIVE_SCAN_IMPL.key -> CometConf.SCAN_AUTO)
{
+ testFun
+ }
+ }
+ }
+
+ test("if expression") {
+ Seq(false, true).foreach { dictionary =>
+ withSQLConf("parquet.enable.dictionary" -> dictionary.toString) {
+ val table = "test1"
+ withTable(table) {
+ sql(s"create table $table(c1 int, c2 string, c3 int) using parquet")
+ sql(
+ s"insert into $table values(1, 'comet', 1), (2, 'comet', 3),
(null, 'spark', 4)," +
+ " (null, null, 4), (2, 'spark', 3), (2, 'comet', 3)")
+ checkSparkAnswerAndOperator(s"SELECT if (c1 < 2, 1111, 2222) FROM
$table")
+ checkSparkAnswerAndOperator(s"SELECT if (c1 < c3, 1111, 2222) FROM
$table")
+ checkSparkAnswerAndOperator(
+ s"SELECT if (c2 == 'comet', 'native execution', 'non-native
execution') FROM $table")
+ }
+ }
+ }
+ }
+}
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]