This is an automated email from the ASF dual-hosted git repository.

maxgekk pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new 0670e4f7945a [SPARK-51541][SQL] Support the `TIME` data type in 
`Literal` methods
0670e4f7945a is described below

commit 0670e4f7945ae4935261ed1f45db7ede79aca127
Author: Max Gekk <max.g...@gmail.com>
AuthorDate: Tue Mar 18 11:06:38 2025 +0300

    [SPARK-51541][SQL] Support the `TIME` data type in `Literal` methods
    
    ### What changes were proposed in this pull request?
    In the PR, I propose to support new data type `TIME` in `Literal`'s methods 
such as:
    - `default()`
    - `sql()`
    - `toString()`
    - `componentTypeToDataType()`
    
    ### Why are the changes needed?
    To output literals of the TIME data type in human readable format. In 
particular, the column names are formatted by default with internal 
representation. See the example below where `43200000000` is a column name:
    ```sql
    spark-sql (default)> desc select time'12:00';
    43200000000             time(6)
    ```
    
    ### Does this PR introduce _any_ user-facing change?
    Yes. After the changes, TIME literals are in human readable format. For the 
example above:
    ```sql
    spark-sql (default)> desc select time'12:00';
    TIME '12:00:00'         time(6)
    ```
    
    ### How was this patch tested?
    By running the modified test suites:
    ```
    $ build/sbt "test:testOnly *LiteralExpressionSuite"
    $ build/sbt "sql/testOnly org.apache.spark.sql.SQLQueryTestSuite -- -z 
time.sql"
    ```
    
    ### Was this patch authored or co-authored using generative AI tooling?
    No.
    
    Closes #50299 from MaxGekk/time-in-literals.
    
    Authored-by: Max Gekk <max.g...@gmail.com>
    Signed-off-by: Max Gekk <max.g...@gmail.com>
---
 .../spark/sql/catalyst/expressions/literals.scala  |  8 +++++-
 .../expressions/LiteralExpressionSuite.scala       | 30 +++++++++++++++++++++-
 .../sql-tests/analyzer-results/time.sql.out        |  2 +-
 .../test/resources/sql-tests/results/time.sql.out  |  2 +-
 4 files changed, 38 insertions(+), 4 deletions(-)

diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/literals.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/literals.scala
index 6968b8a4cb2e..e3ed2c4a0b0b 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/literals.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/literals.scala
@@ -127,6 +127,7 @@ object Literal {
     // java classes
     case _ if clz == classOf[LocalDate] => DateType
     case _ if clz == classOf[Date] => DateType
+    case _ if clz == classOf[LocalTime] => TimeType()
     case _ if clz == classOf[Instant] => TimestampType
     case _ if clz == classOf[Timestamp] => TimestampType
     case _ if clz == classOf[LocalDateTime] => TimestampNTZType
@@ -199,6 +200,7 @@ object Literal {
     case DateType => create(0, DateType)
     case TimestampType => create(0L, TimestampType)
     case TimestampNTZType => create(0L, TimestampNTZType)
+    case t: TimeType => create(0L, t)
     case it: DayTimeIntervalType => create(0L, it)
     case it: YearMonthIntervalType => create(0, it)
     case CharType(length) =>
@@ -433,6 +435,8 @@ case class Literal (value: Any, dataType: DataType) extends 
LeafExpression {
       dataType match {
         case DateType =>
           DateFormatter().format(value.asInstanceOf[Int])
+        case _: TimeType =>
+          new FractionTimeFormatter().format(value.asInstanceOf[Long])
         case TimestampType =>
           
TimestampFormatter.getFractionFormatter(timeZoneId).format(value.asInstanceOf[Long])
         case TimestampNTZType =>
@@ -479,7 +483,7 @@ case class Literal (value: Any, dataType: DataType) extends 
LeafExpression {
     val jsonValue = (value, dataType) match {
       case (null, _) => JNull
       case (i: Int, DateType) => JString(toString)
-      case (l: Long, TimestampType) => JString(toString)
+      case (l: Long, TimestampType | _: TimeType) => JString(toString)
       case (other, _) => JString(other.toString)
     }
     ("value" -> jsonValue) :: ("dataType" -> dataType.jsonValue) :: Nil
@@ -563,6 +567,8 @@ case class Literal (value: Any, dataType: DataType) extends 
LeafExpression {
     case (v: Decimal, t: DecimalType) => s"${v}BD"
     case (v: Int, DateType) =>
       s"DATE '$toString'"
+    case (_: Long, _: TimeType) =>
+      s"TIME '$toString'"
     case (v: Long, TimestampType) =>
       s"TIMESTAMP '$toString'"
     case (v: Long, TimestampNTZType) =>
diff --git 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/LiteralExpressionSuite.scala
 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/LiteralExpressionSuite.scala
index 5da5c6ac412c..9ed0b48680c6 100644
--- 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/LiteralExpressionSuite.scala
+++ 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/LiteralExpressionSuite.scala
@@ -18,7 +18,7 @@
 package org.apache.spark.sql.catalyst.expressions
 
 import java.nio.charset.StandardCharsets
-import java.time.{Duration, Instant, LocalDate, LocalDateTime, Period, 
ZoneOffset}
+import java.time.{Duration, Instant, LocalDate, LocalDateTime, LocalTime, 
Period, ZoneOffset}
 import java.time.temporal.ChronoUnit
 import java.util.TimeZone
 
@@ -30,6 +30,7 @@ import org.apache.spark.sql.Row
 import org.apache.spark.sql.catalyst.{CatalystTypeConverters, ScalaReflection}
 import org.apache.spark.sql.catalyst.encoders.ExamplePointUDT
 import org.apache.spark.sql.catalyst.util.DateTimeConstants._
+import org.apache.spark.sql.catalyst.util.DateTimeTestUtils.localTime
 import org.apache.spark.sql.catalyst.util.DateTimeUtils
 import org.apache.spark.sql.internal.SQLConf
 import org.apache.spark.sql.types._
@@ -51,6 +52,7 @@ class LiteralExpressionSuite extends SparkFunSuite with 
ExpressionEvalHelper {
     checkEvaluation(Literal.create(null, BinaryType), null)
     checkEvaluation(Literal.create(null, DecimalType.USER_DEFAULT), null)
     checkEvaluation(Literal.create(null, DateType), null)
+    checkEvaluation(Literal.create(null, TimeType()), null)
     checkEvaluation(Literal.create(null, TimestampType), null)
     checkEvaluation(Literal.create(null, CalendarIntervalType), null)
     checkEvaluation(Literal.create(null, YearMonthIntervalType()), null)
@@ -81,6 +83,7 @@ class LiteralExpressionSuite extends SparkFunSuite with 
ExpressionEvalHelper {
       checkEvaluation(Literal.default(DateType), LocalDate.ofEpochDay(0))
       checkEvaluation(Literal.default(TimestampType), Instant.ofEpochSecond(0))
     }
+    checkEvaluation(Literal.default(TimeType()), LocalTime.MIDNIGHT)
     checkEvaluation(Literal.default(CalendarIntervalType), new 
CalendarInterval(0, 0, 0L))
     checkEvaluation(Literal.default(YearMonthIntervalType()), 0)
     checkEvaluation(Literal.default(DayTimeIntervalType()), 0L)
@@ -313,6 +316,13 @@ class LiteralExpressionSuite extends SparkFunSuite with 
ExpressionEvalHelper {
     }
   }
 
+  test("construct literals from arrays of java.time.LocalTime") {
+    val localTime0 = LocalTime.of(1, 2, 3)
+    checkEvaluation(Literal(Array(localTime0)), Array(localTime0))
+    val localTime1 = LocalTime.of(23, 59, 59, 999999000)
+    checkEvaluation(Literal(Array(localTime0, localTime1)), Array(localTime0, 
localTime1))
+  }
+
   test("construct literals from java.time.Instant") {
     Seq(
       Instant.parse("0001-01-01T00:00:00Z"),
@@ -497,6 +507,11 @@ class LiteralExpressionSuite extends SparkFunSuite with 
ExpressionEvalHelper {
       }
       checkEvaluation(Literal.create(duration, dt), result)
     }
+
+    val time = LocalTime.of(12, 13, 14)
+    DataTypeTestUtils.timeTypes.foreach { tt =>
+      checkEvaluation(Literal.create(time, tt), localTime(12, 13, 14))
+    }
   }
 
   test("SPARK-37967: Literal.create support ObjectType") {
@@ -531,4 +546,17 @@ class LiteralExpressionSuite extends SparkFunSuite with 
ExpressionEvalHelper {
     checkEvaluation(Literal.create(immArraySeq), expected)
     checkEvaluation(Literal.create(immArraySeq, ArrayType(DoubleType)), 
expected)
   }
+
+  test("TimeType toString and sql") {
+    Seq(
+      Literal.default(TimeType()) -> "00:00:00",
+      Literal(LocalTime.NOON) -> "12:00:00",
+      Literal(LocalTime.of(23, 59, 59, 100 * 1000 * 1000)) -> "23:59:59.1",
+      Literal(LocalTime.of(23, 59, 59, 10000)) -> "23:59:59.00001",
+      Literal(LocalTime.of(23, 59, 59, 999999000)) -> "23:59:59.999999"
+    ).foreach { case (lit, str) =>
+      assert(lit.toString === str)
+      assert(lit.sql === s"TIME '$str'")
+    }
+  }
 }
diff --git 
a/sql/core/src/test/resources/sql-tests/analyzer-results/time.sql.out 
b/sql/core/src/test/resources/sql-tests/analyzer-results/time.sql.out
index 02608683007e..6dc6cb817840 100644
--- a/sql/core/src/test/resources/sql-tests/analyzer-results/time.sql.out
+++ b/sql/core/src/test/resources/sql-tests/analyzer-results/time.sql.out
@@ -10,7 +10,7 @@ CreateViewCommand `time_view`, select '11:53:26.038344' 
time_str, 'HH:mm:ss.SSSS
 -- !query
 select time '16:39:45\t'
 -- !query analysis
-Project [59985000000 AS 59985000000#x]
+Project [16:39:45 AS TIME '16:39:45'#x]
 +- OneRowRelation
 
 
diff --git a/sql/core/src/test/resources/sql-tests/results/time.sql.out 
b/sql/core/src/test/resources/sql-tests/results/time.sql.out
index d21665aeeed8..806cd7262e71 100644
--- a/sql/core/src/test/resources/sql-tests/results/time.sql.out
+++ b/sql/core/src/test/resources/sql-tests/results/time.sql.out
@@ -10,7 +10,7 @@ struct<>
 -- !query
 select time '16:39:45\t'
 -- !query schema
-struct<59985000000:time(6)>
+struct<TIME '16:39:45':time(6)>
 -- !query output
 16:39:45
 


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to