cloud-fan commented on code in PR #41072:
URL: https://github.com/apache/spark/pull/41072#discussion_r1392240967
##########
sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/collectionOperations.scala:
##########
@@ -3122,6 +3122,34 @@ case class Sequence(
}
object Sequence {
+ private def prettyName: String = "sequence"
+
+ def sequenceLength(start: Long, stop: Long, step: Long): Int = {
+ try {
+ val delta = Math.subtractExact(stop, start)
+ if (delta == Long.MinValue && step == -1L) {
+ // We must special-case division of Long.MinValue by -1 to catch
potential unchecked
+ // overflow in next operation. Division does not have a builtin
overflow check. We
+ // previously special-case div-by-zero.
+ throw new ArithmeticException("Long overflow (Long.MinValue / -1)")
+ }
+ val len = if (stop == start) 1L else Math.addExact(1L, (delta / step))
+ if (len > ByteArrayMethods.MAX_ROUNDED_ARRAY_LENGTH) {
+ throw
QueryExecutionErrors.createArrayWithElementsExceedLimitError(prettyName, len)
+ }
+ len.toInt
+ } catch {
+ // We handle overflows in the previous try block by raising an
appropriate exception.
+ case _: ArithmeticException =>
+ val safeLen =
+ BigInt(1) + (BigInt(stop) - BigInt(start)) / BigInt(step)
+ if (safeLen > ByteArrayMethods.MAX_ROUNDED_ARRAY_LENGTH) {
Review Comment:
maybe just use an assert? Assertion error is also treated as internal errors.
--
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
To unsubscribe, e-mail: [email protected]
For queries about this service, please contact Infrastructure at:
[email protected]
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]