stefankandic commented on code in PR #48585:
URL: https://github.com/apache/spark/pull/48585#discussion_r1812115111
##########
sql/core/src/test/scala/org/apache/spark/sql/CollationSQLExpressionsSuite.scala:
##########
@@ -3260,6 +3260,72 @@ class CollationSQLExpressionsSuite
}
}
+ test("SPARK-50060: set operators with conflicting and non-conflicting
collations") {
+ val setOperators = Seq[(String, Seq[Row])](
+ ("UNION", Seq(Row("a"))),
+ ("INTERSECT", Seq(Row("a"))),
+ ("EXCEPT", Seq()),
+ ("UNION ALL", Seq(Row("A"), Row("a"))),
+ ("INTERSECT ALL", Seq(Row("a"))),
+ ("EXCEPT ALL", Seq()),
+ ("UNION DISTINCT", Seq(Row("a"))),
+ ("INTERSECT DISTINCT", Seq(Row("a"))),
+ ("EXCEPT DISTINCT", Seq()))
+
+ Seq[Boolean](true, false).foreach{ ansi_enabled =>
+ withSQLConf(SQLConf.ANSI_ENABLED.key -> ansi_enabled.toString,
+ SqlApiConf.DEFAULT_COLLATION -> "UNICODE") {
+ setOperators.foreach { case (operator, result) =>
Review Comment:
sorry for not catching this earlier but in cases where we have multiple
nested for each calls we can just wrap all of them in a for loop like:
```code
for {
ansiEnabled <- Seq(true, false)
(operator, result) <- setOperators
} {
withSQLConf(
SQLConf.ANSI_ENABLED.key -> ansiEnabled.toString,
SqlApiConf.DEFAULT_COLLATION -> "UNICODE"
) {
// some code
}
}
```
--
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
To unsubscribe, e-mail: [email protected]
For queries about this service, please contact Infrastructure at:
[email protected]
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]