vinodkc commented on code in PR #53370:
URL: https://github.com/apache/spark/pull/53370#discussion_r2616433239


##########
sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/DateTimeUtils.scala:
##########
@@ -897,6 +897,102 @@ object DateTimeUtils extends SparkDateTimeUtils {
     }
   }
 
+  private def withTimeConversionErrorHandling(f: => Long): Long = {
+    try {
+      val nanos = f
+      if (nanos < 0 || nanos >= NANOS_PER_DAY) {
+        throw new DateTimeException(
+          s"Invalid TIME value: must be between 00:00:00 and 
23:59:59.999999999, " +
+          s"but got $nanos nanoseconds")
+      }
+      nanos
+    } catch {
+      case e: DateTimeException =>
+        throw 
QueryExecutionErrors.ansiDateTimeArgumentOutOfRangeWithoutSuggestion(e)
+      case e: ArithmeticException =>
+        throw 
QueryExecutionErrors.ansiDateTimeArgumentOutOfRangeWithoutSuggestion(
+          new DateTimeException("Overflow in TIME conversion", e))
+    }
+  }
+
+  /**
+   * Creates a TIME value from seconds since midnight (integral types).
+   * @param seconds Seconds (0 to 86399)
+   * @return Nanoseconds since midnight
+   */
+  def timeFromSeconds(seconds: Long): Long = withTimeConversionErrorHandling {
+    Math.multiplyExact(seconds, NANOS_PER_SECOND)
+  }
+
+  /**
+   * Creates a TIME value from seconds since midnight (decimal type).
+   * @param seconds Seconds (0 to 86399.999999)
+   * @return Nanoseconds since midnight
+   */
+  def timeFromSeconds(seconds: Decimal): Long = 
withTimeConversionErrorHandling {
+    val operand = new java.math.BigDecimal(NANOS_PER_SECOND)
+    seconds.toJavaBigDecimal.multiply(operand).longValueExact()
+  }
+
+  /**
+   * Creates a TIME value from seconds since midnight (floating point type).
+   * @param seconds Seconds (0 to 86399.999999)
+   * @return Nanoseconds since midnight
+   */
+  def timeFromSeconds(seconds: Double): Long = withTimeConversionErrorHandling 
{
+    if (seconds.isNaN || seconds.isInfinite) {
+      throw new DateTimeException("Cannot convert NaN or Infinite value to 
TIME")
+    }
+    (seconds * NANOS_PER_SECOND).toLong

Review Comment:
   Double.toLong can produce overflow results (when the Double is too 
large/small):
   Large values → Long.MaxValue
   Small values → Long.MinValue
   How it's handled: The range check `(nanos < 0 || nanos >= NANOS_PER_DAY)` in 
`withTimeConversionErrorHandling` detects these overflow values and throws 
`SparkDateTimeException`.
   
   I tested with  -1e199 , 1e200
   ```
   scala>spark.conf.set("spark.sql.timeType.enabled", "true")
   scala>spark.sql("SELECT time_from_seconds(-1e199)").show()
   ```
   
   ```
   org.apache.spark.SparkDateTimeException: 
[DATETIME_FIELD_OUT_OF_BOUNDS.WITHOUT_SUGGESTION] Invalid TIME value: must be 
between 00:00:00 and 23:59:59.999999999, but got -9223372036854775808 
nanoseconds.  SQLSTATE: 22023
   ```
   
   ```
   scala>spark.sql("SELECT time_from_seconds(1e200)").show()
   ```
   
   ```
   org.apache.spark.SparkDateTimeException: 
[DATETIME_FIELD_OUT_OF_BOUNDS.WITHOUT_SUGGESTION] Invalid TIME value: must be 
between 00:00:00 and 23:59:59.999999999, but got 9223372036854775807 
nanoseconds.  SQLSTATE: 22023
   ```



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: [email protected]

For queries about this service, please contact Infrastructure at:
[email protected]


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to