fe2s commented on code in PR #39099:
URL: https://github.com/apache/spark/pull/39099#discussion_r1057810842
##########
sql/catalyst/src/test/scala/org/apache/spark/sql/types/DecimalSuite.scala:
##########
@@ -384,4 +387,51 @@ class DecimalSuite extends SparkFunSuite with
PrivateMethodTester with SQLHelper
}
}
}
+
+ // 18 is a max number of digits in Decimal's compact long
+ test("SPARK-41554: decrease/increase scale by 18 and more on compact
decimal") {
+ val unscaledNums = Seq(
+ 0L, 1L, 10L, 51L, 123L, 523L,
+ // 18 digits
+ 912345678901234567L,
+ 112345678901234567L,
+ 512345678901234567L
+ )
+ val precision = 38
+ // generate some (from, to) scale pairs, e.g. (38, 18), (-20, -2), etc
+ val scalePairs = for {
+ scale <- Seq(38, 20, 19, 18)
+ delta <- Seq(38, 20, 19, 18)
+ a = scale
+ b = scale - delta
+ } yield {
+ Seq((a, b), (-a, -b), (b, a), (-b, -a))
+ }
+
+ for {
+ unscaled <- unscaledNums
+ mode <- allSupportedRoundModes
+ (scaleFrom, scaleTo) <- scalePairs.flatten
+ sign <- Seq(1L, -1L)
+ } {
+ val unscaledWithSign = unscaled * sign
+ if (scaleFrom < 0 || scaleTo < 0) {
+ withSQLConf(SQLConf.LEGACY_ALLOW_NEGATIVE_SCALE_OF_DECIMAL_ENABLED.key
-> "true") {
Review Comment:
It's a legacy behaviour, but it's still something that Spark supports, right?
We have use cases in production where `allowNegativeScaleOfDecimal = true`,
so I would like to make sure the bug is fixed for negative scales also.
--
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
To unsubscribe, e-mail: [email protected]
For queries about this service, please contact Infrastructure at:
[email protected]
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]