This is an automated email from the ASF dual-hosted git repository.
maxgekk pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git
The following commit(s) were added to refs/heads/master by this push:
new ce5762649435 [SPARK-49837][SQL][TESTS] Add more tests for NULLIF
function
ce5762649435 is described below
commit ce5762649435086f0eeacbfa721d5f4686135abc
Author: ivanjevtic-db <[email protected]>
AuthorDate: Thu Oct 3 08:59:18 2024 +0200
[SPARK-49837][SQL][TESTS] Add more tests for NULLIF function
### What changes were proposed in this pull request?
In this pull request, the proposed changes include introducing tests for
the **NULLIF** function. These tests will help prevent potential regressions by
ensuring that future modifications do not unintentionally break the behavior of
**NULLIF**. I have written several tests, along with queries that combine
NULLIF with GROUP BY to cover more complex use cases.
### Why are the changes needed?
Currently, there is a lack of tests for the NULLIF function. We should add
tests to prevent regressions.
### Does this PR introduce _any_ user-facing change?
No.
### How was this patch tested?
Tests.
### Was this patch authored or co-authored using generative AI tooling?
No.
Closes #48302 from ivanjevtic-db/nullif-tests.
Authored-by: ivanjevtic-db <[email protected]>
Signed-off-by: Max Gekk <[email protected]>
---
.../apache/spark/sql/DataFrameFunctionsSuite.scala | 47 +++++++++++++++++-----
1 file changed, 38 insertions(+), 9 deletions(-)
diff --git
a/sql/core/src/test/scala/org/apache/spark/sql/DataFrameFunctionsSuite.scala
b/sql/core/src/test/scala/org/apache/spark/sql/DataFrameFunctionsSuite.scala
index 016803635ff6..47691e1ccd40 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/DataFrameFunctionsSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/DataFrameFunctionsSuite.scala
@@ -315,6 +315,44 @@ class DataFrameFunctionsSuite extends QueryTest with
SharedSparkSession {
checkAnswer(df.select(isnotnull(col("a"))), Seq(Row(false)))
}
+ test("nullif function") {
+ Seq(true, false).foreach { alwaysInlineCommonExpr =>
+ withSQLConf(SQLConf.ALWAYS_INLINE_COMMON_EXPR.key ->
alwaysInlineCommonExpr.toString) {
+ Seq(
+ "SELECT NULLIF(1, 1)" -> Seq(Row(null)),
+ "SELECT NULLIF(1, 2)" -> Seq(Row(1)),
+ "SELECT NULLIF(NULL, 1)" -> Seq(Row(null)),
+ "SELECT NULLIF(1, NULL)" -> Seq(Row(1)),
+ "SELECT NULLIF(NULL, NULL)" -> Seq(Row(null)),
+ "SELECT NULLIF('abc', 'abc')" -> Seq(Row(null)),
+ "SELECT NULLIF('abc', 'xyz')" -> Seq(Row("abc")),
+ "SELECT NULLIF(id, 1) " +
+ "FROM range(10) " +
+ "GROUP BY NULLIF(id, 1)" -> Seq(Row(null), Row(2), Row(3), Row(4),
Row(5), Row(6),
+ Row(7), Row(8), Row(9), Row(0)),
+ "SELECT NULLIF(id, 1), COUNT(*)" +
+ "FROM range(10) " +
+ "GROUP BY NULLIF(id, 1) " +
+ "HAVING COUNT(*) > 1" -> Seq.empty[Row]
+ ).foreach {
+ case (sqlText, expected) => checkAnswer(sql(sqlText), expected)
+ }
+
+ checkError(
+ exception = intercept[AnalysisException] {
+ sql("SELECT NULLIF(id, 1), COUNT(*) " +
+ "FROM range(10) " +
+ "GROUP BY NULLIF(id, 2)")
+ },
+ condition = "MISSING_AGGREGATION",
+ parameters = Map(
+ "expression" -> "\"id\"",
+ "expressionAnyValue" -> "\"any_value(id)\"")
+ )
+ }
+ }
+ }
+
test("equal_null function") {
val df = Seq[(Integer, Integer)]((null, 8)).toDF("a", "b")
checkAnswer(df.selectExpr("equal_null(a, b)"), Seq(Row(false)))
@@ -324,15 +362,6 @@ class DataFrameFunctionsSuite extends QueryTest with
SharedSparkSession {
checkAnswer(df.select(equal_null(col("a"), col("a"))), Seq(Row(true)))
}
- test("nullif function") {
- val df = Seq((5, 8)).toDF("a", "b")
- checkAnswer(df.selectExpr("nullif(5, 8)"), Seq(Row(5)))
- checkAnswer(df.select(nullif(lit(5), lit(8))), Seq(Row(5)))
-
- checkAnswer(df.selectExpr("nullif(a, a)"), Seq(Row(null)))
- checkAnswer(df.select(nullif(lit(5), lit(5))), Seq(Row(null)))
- }
-
test("nullifzero function") {
withTable("t") {
// Here we exercise a non-nullable, non-foldable column.
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]