MaxGekk commented on a change in pull request #32601:
URL: https://github.com/apache/spark/pull/32601#discussion_r651144800



##########
File path: 
sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/intervalExpressions.scala
##########
@@ -345,6 +345,104 @@ case class MakeInterval(
     )
 }
 
+// scalastyle:off line.size.limit
+@ExpressionDescription(
+  usage = "_FUNC_(days, hours, mins, secs) - Make DayTimeIntervalType duration 
from days, hours, mins and secs.",
+  arguments = """
+    Arguments:
+      * days - the number of days, positive or negative
+      * hours - the number of hours, positive or negative
+      * mins - the number of minutes, positive or negative
+      * secs - the number of seconds with the fractional part in microsecond 
precision.
+  """,
+  examples = """
+    Examples:
+      > SELECT _FUNC_(1, 12, 30, 01.001001);
+       1 12:30:01.001001000
+      > SELECT _FUNC_(100, null, 3);
+       NULL
+  """,
+  since = "3.2.0",
+  group = "datetime_funcs")
+// scalastyle:on line.size.limit
+case class MakeDTInterval(
+    days: Expression,
+    hours: Expression,
+    mins: Expression,
+    secs: Expression,
+    failOnError: Boolean = SQLConf.get.ansiEnabled)
+    extends QuaternaryExpression with ImplicitCastInputTypes with 
NullIntolerant {

Review comment:
       Please, remove 2 gaps. See 
sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/intervalExpressions.scala

##########
File path: 
sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/IntervalUtils.scala
##########
@@ -853,6 +853,19 @@ object IntervalUtils {
     new CalendarInterval(totalMonths, totalDays, micros)
   }
 
+  def makeMicrosInterval(

Review comment:
       Let's rename it to `makeDayTimeInterval`

##########
File path: 
sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/IntervalUtils.scala
##########
@@ -853,6 +853,19 @@ object IntervalUtils {
     new CalendarInterval(totalMonths, totalDays, micros)
   }
 
+  def makeMicrosInterval(
+      days: Int,
+      hours: Int,
+      mins: Int,
+      secs: Decimal): Long = {
+    assert(secs.scale == 6, "Seconds fractional must have 6 digits for 
microseconds")
+    var micros = secs.toUnscaledLong
+    micros = Math.addExact(micros, Math.multiplyExact(days, MICROS_PER_DAY))
+    micros = Math.addExact(micros, Math.multiplyExact(hours, MICROS_PER_HOUR))

Review comment:
       Should hours (and other fields) be in some range? See the table from the 
SQL:2016
   <img width="595" alt="Screenshot 2021-06-14 at 20 41 45" 
src="https://user-images.githubusercontent.com/1580697/121935371-07b0bb80-cd51-11eb-851d-000c865f99c4.png";>
   If not, please, explain.

##########
File path: 
sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/intervalExpressions.scala
##########
@@ -345,6 +345,109 @@ case class MakeInterval(
     )
 }
 
+// scalastyle:off line.size.limit
+@ExpressionDescription(
+  usage = "_FUNC_(days, hours, mins, secs) - Make DayTimeIntervalType duration 
from days, hours, mins and secs.",
+  arguments = """
+    Arguments:
+      * days - the number of days, positive or negative
+      * hours - the number of hours, positive or negative
+      * mins - the number of minutes, positive or negative
+      * secs - the number of seconds with the fractional part in microsecond 
precision.
+  """,
+  examples = """
+    Examples:
+      > SELECT _FUNC_(1, 12, 30, 01.001001);
+       1 12:30:01.001001000
+      > SELECT _FUNC_(100, null, 3);
+       NULL
+  """,
+  since = "3.2.0",
+  group = "datetime_funcs")
+// scalastyle:on line.size.limit
+case class MakeDuration(
+    days: Expression,
+    hours: Expression,
+    mins: Expression,
+    secs: Expression,
+    failOnError: Boolean = SQLConf.get.ansiEnabled)

Review comment:
       I don't see any reasons to support non-ansi behavior for ANSI intervals. 
Let's remove this parameter.

##########
File path: 
sql/core/src/test/scala/org/apache/spark/sql/ExpressionsSchemaSuite.scala
##########
@@ -170,7 +170,7 @@ class ExpressionsSchemaSuite extends QueryTest with 
SharedSparkSession {
           funcName = segments(2).trim,
           sql = segments(3).trim,
           schema = segments(4).trim)
-      }
+      }.sortBy(_.className)

Review comment:
       Why do you need this?




-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

For queries about this service, please contact Infrastructure at:
[email protected]



---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to