MaxGekk commented on a change in pull request #25716: [SPARK-29012][SQL] 
Support special timestamp values
URL: https://github.com/apache/spark/pull/25716#discussion_r322592582
 
 

 ##########
 File path: 
sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/DateTimeUtils.scala
 ##########
 @@ -848,4 +852,46 @@ object DateTimeUtils {
     val sinceEpoch = BigDecimal(timestamp) / MICROS_PER_SECOND + offset
     new Decimal().set(sinceEpoch, 20, 6)
   }
+
+  def currentTimestamp(): SQLTimestamp = instantToMicros(Instant.now())
+
+  private def today(zoneId: ZoneId): ZonedDateTime = {
+    Instant.now().atZone(zoneId).`with`(LocalTime.MIDNIGHT)
+  }
+
+  private val specialValue = 
"""(EPOCH|NOW|TODAY|TOMORROW|YESTERDAY)\p{Blank}*(.*)""".r
+
+  /**
+   * Converts notational shorthands that are converted to ordinary timestamps.
+   * @param input - a trimmed string
+   * @param zoneId - zone identifier used to get the current date.
+   * @return some of microseconds since the epoch if the conversion completed
+   *         successfully otherwise None.
+   */
+  def convertSpecialTimestamp(input: String, zoneId: ZoneId): 
Option[SQLTimestamp] = {
+    def isValidZoneId(z: String): Boolean = {
+      z == "" || Try { getZoneId(z) }.isSuccess
+    }
+
+    if (input.length < 3 || !input(0).isLetter) return None
+    input.toUpperCase(Locale.US) match {
+      case specialValue("EPOCH", z) if isValidZoneId(z) => Some(0)
+      case specialValue("NOW", "") => Some(currentTimestamp())
 
 Review comment:
   I have already fixed that.

----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
us...@infra.apache.org


With regards,
Apache Git Services

---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org
For additional commands, e-mail: reviews-h...@spark.apache.org

Reply via email to