This is an automated email from the ASF dual-hosted git repository.

maxgekk pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new 234163f  [SPARK-35732][SQL] Parse DayTimeIntervalType from JSON
234163f is described below

commit 234163fbe08c8ac51b2ce52094c38baaa4e06709
Author: Angerszhuuuu <angers....@gmail.com>
AuthorDate: Thu Jun 17 12:54:34 2021 +0300

    [SPARK-35732][SQL] Parse DayTimeIntervalType from JSON
    
    ### What changes were proposed in this pull request?
    Support Parse DayTimeIntervalType from JSON
    
    ### Why are the changes needed?
    this will allow to store day-second intervals as table columns into Hive 
external catalog.
    
    ### Does this PR introduce _any_ user-facing change?
    No
    
    ### How was this patch tested?
    Added UT
    
    Closes #32930 from AngersZhuuuu/SPARK-35732.
    
    Lead-authored-by: Angerszhuuuu <angers....@gmail.com>
    Co-authored-by: AngersZhuuuu <angers....@gmail.com>
    Signed-off-by: Max Gekk <max.g...@gmail.com>
---
 .../main/scala/org/apache/spark/sql/types/DataType.scala    | 13 +++++++++++--
 .../scala/org/apache/spark/sql/types/DataTypeSuite.scala    |  2 +-
 2 files changed, 12 insertions(+), 3 deletions(-)

diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/DataType.scala 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/DataType.scala
index 1c4ad88..d781b05 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/DataType.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/DataType.scala
@@ -37,6 +37,7 @@ import 
org.apache.spark.sql.catalyst.util.StringUtils.StringConcat
 import org.apache.spark.sql.internal.SQLConf
 import org.apache.spark.sql.internal.SQLConf.StoreAssignmentPolicy
 import org.apache.spark.sql.internal.SQLConf.StoreAssignmentPolicy.{ANSI, 
STRICT}
+import org.apache.spark.sql.types.DayTimeIntervalType._
 import org.apache.spark.sql.types.YearMonthIntervalType._
 import org.apache.spark.util.Utils
 
@@ -172,8 +173,16 @@ object DataType {
   private val otherTypes = {
     Seq(NullType, DateType, TimestampType, BinaryType, IntegerType, 
BooleanType, LongType,
       DoubleType, FloatType, ShortType, ByteType, StringType, 
CalendarIntervalType,
-      // TODO(SPARK-35732): Parse DayTimeIntervalType from JSON
-      DayTimeIntervalType(),
+      DayTimeIntervalType(DAY, DAY),
+      DayTimeIntervalType(DAY, HOUR),
+      DayTimeIntervalType(DAY, MINUTE),
+      DayTimeIntervalType(DAY, SECOND),
+      DayTimeIntervalType(HOUR, HOUR),
+      DayTimeIntervalType(HOUR, MINUTE),
+      DayTimeIntervalType(HOUR, SECOND),
+      DayTimeIntervalType(MINUTE, MINUTE),
+      DayTimeIntervalType(MINUTE, SECOND),
+      DayTimeIntervalType(SECOND, SECOND),
       YearMonthIntervalType(YEAR, YEAR),
       YearMonthIntervalType(MONTH, MONTH),
       YearMonthIntervalType(YEAR, MONTH),
diff --git 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/types/DataTypeSuite.scala 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/types/DataTypeSuite.scala
index d620415..3761833 100644
--- a/sql/catalyst/src/test/scala/org/apache/spark/sql/types/DataTypeSuite.scala
+++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/types/DataTypeSuite.scala
@@ -256,7 +256,7 @@ class DataTypeSuite extends SparkFunSuite {
   checkDataTypeFromJson(VarcharType(10))
   checkDataTypeFromDDL(VarcharType(11))
 
-
+  dayTimeIntervalTypes.foreach(checkDataTypeFromJson)
   yearMonthIntervalTypes.foreach(checkDataTypeFromJson)
 
   yearMonthIntervalTypes.foreach(checkDataTypeFromDDL)

---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to