HyukjinKwon commented on code in PR #38685:
URL: https://github.com/apache/spark/pull/38685#discussion_r1029966254
##########
sql/core/src/test/scala/org/apache/spark/sql/DataFrameSuite.scala:
##########
@@ -1759,24 +1763,25 @@ class DataFrameSuite extends QueryTest
test("SPARK-8072: Better Exception for Duplicate Columns") {
// only one duplicate column present
- val e = intercept[org.apache.spark.sql.AnalysisException] {
+ val e = intercept[AnalysisException] {
Seq((1, 2, 3), (2, 3, 4), (3, 4, 5)).toDF("column1", "column2",
"column1")
.write.format("parquet").save("temp")
}
- assert(e.getMessage.contains("Found duplicate column(s) when inserting
into"))
- assert(e.getMessage.contains("column1"))
- assert(!e.getMessage.contains("column2"))
+ checkError(
+ exception = e,
+ errorClass = "COLUMN_ALREADY_EXISTS",
+ parameters = Map("columnName" -> "`column1`"))
Review Comment:
Several tests fixed here seem flaky because of the order in the map, e.g.:
```
2022-11-22T20:16:18.0679384Z [0m[[0m[0minfo[0m] [0m[0m[31m-
SPARK-8072: Better Exception for Duplicate Columns *** FAILED *** (42
milliseconds)[0m[0m
2022-11-22T20:16:18.0680329Z [0m[[0m[0minfo[0m] [0m[0m[31m
Map("columnName" -> "`column3`") did not equal Map("columnName" -> "`column1`")
(SparkFunSuite.scala:317)[0m[0m
2022-11-22T20:16:18.0680867Z [0m[[0m[0minfo[0m] [0m[0m[31m
Analysis:[0m[0m
2022-11-22T20:16:18.0681437Z [0m[[0m[0minfo[0m] [0m[0m[31m
JavaCollectionWrappers$JMapWrapper(columnName: `column3` -> `column1`)[0m[0m
2022-11-22T20:16:18.0682045Z [0m[[0m[0minfo[0m] [0m[0m[31m
org.scalatest.exceptions.TestFailedException:[0m[0m
2022-11-22T20:16:18.0682715Z [0m[[0m[0minfo[0m] [0m[0m[31m at
org.scalatest.Assertions.newAssertionFailedException(Assertions.scala:472)[0m[0m
2022-11-22T20:16:18.0683422Z [0m[[0m[0minfo[0m] [0m[0m[31m at
org.scalatest.Assertions.newAssertionFailedException$(Assertions.scala:471)[0m[0m
2022-11-22T20:16:18.0684101Z [0m[[0m[0minfo[0m] [0m[0m[31m at
org.scalatest.Assertions$.newAssertionFailedException(Assertions.scala:1231)[0m[0m
2022-11-22T20:16:18.0684760Z [0m[[0m[0minfo[0m] [0m[0m[31m at
org.scalatest.Assertions$AssertionsHelper.macroAssert(Assertions.scala:1295)[0m[0m
2022-11-22T20:16:18.0685415Z [0m[[0m[0minfo[0m] [0m[0m[31m at
org.apache.spark.SparkFunSuite.checkError(SparkFunSuite.scala:317)[0m[0m
2022-11-22T20:16:18.0686092Z [0m[[0m[0minfo[0m] [0m[0m[31m at
org.apache.spark.sql.DataFrameSuite.$anonfun$new$368(DataFrameSuite.scala:1781)[0m[0m
2022-11-22T20:16:18.0686748Z [0m[[0m[0minfo[0m] [0m[0m[31m at
scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.scala:18)[0m[0m
2022-11-22T20:16:18.0687352Z [0m[[0m[0minfo[0m] [0m[0m[31m at
org.scalatest.OutcomeOf.outcomeOf(OutcomeOf.scala:85)[0m[0m
2022-11-22T20:16:18.0687936Z [0m[[0m[0minfo[0m] [0m[0m[31m at
org.scalatest.OutcomeOf.outcomeOf$(OutcomeOf.scala:83)[0m[0m
2022-11-22T20:16:18.0688514Z [0m[[0m[0minfo[0m] [0m[0m[31m at
org.scalatest.OutcomeOf$.outcomeOf(OutcomeOf.scala:104)[0m[0m
2022-11-22T20:16:18.0689084Z [0m[[0m[0minfo[0m] [0m[0m[31m at
org.scalatest.Transformer.apply(Transformer.scala:22)[0m[0m
2022-11-22T20:16:18.0689662Z [0m[[0m[0minfo[0m] [0m[0m[31m at
org.scalatest.Transformer.apply(Transformer.scala:20)[0m[0m
2022-11-22T20:16:18.0690294Z [0m[[0m[0minfo[0m] [0m[0m[31m at
org.scalatest.funsuite.AnyFunSuiteLike$$anon$1.apply(AnyFunSuiteLike.scala:226)[0m[0m
2022-11-22T20:16:18.0690957Z [0m[[0m[0minfo[0m] [0m[0m[31m at
org.apache.spark.SparkFunSuite.withFixture(SparkFunSuite.scala:207)[0m[0m
2022-11-22T20:16:18.0691669Z [0m[[0m[0minfo[0m] [0m[0m[31m at
org.scalatest.funsuite.AnyFunSuiteLike.invokeWithFixture$1(AnyFunSuiteLike.scala:224)[0m[0m
```
https://github.com/apache/spark/actions/runs/3525051044/jobs/5911287739 and
https://github.com/apache/spark/actions/runs/3526328003 which happens in a
different JDK or Scala version.
--
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
To unsubscribe, e-mail: [email protected]
For queries about this service, please contact Infrastructure at:
[email protected]
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]