cloud-fan commented on a change in pull request #32999:
URL: https://github.com/apache/spark/pull/32999#discussion_r655546772



##########
File path: 
sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Analyzer.scala
##########
@@ -349,8 +349,18 @@ class Analyzer(override val catalogManager: CatalogManager)
       case p: LogicalPlan => p.transformExpressionsUpWithPruning(
         _.containsPattern(BINARY_ARITHMETIC), ruleId) {
         case a @ Add(l, r, f) if a.childrenResolved => (l.dataType, 
r.dataType) match {
-          case (DateType, _: DayTimeIntervalType) => TimeAdd(Cast(l, 
TimestampType), r)
-          case (_: DayTimeIntervalType, DateType) => TimeAdd(Cast(r, 
TimestampType), l)
+          case (DateType, dit: DayTimeIntervalType) =>

Review comment:
       I agree with change but I think it's a separate thing and can be done in 
a new PR.
   
   The impl detail can also be improved: we can convert the interval to the 
number of days and then use `DateAdd` expression.

##########
File path: 
sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Analyzer.scala
##########
@@ -349,8 +349,18 @@ class Analyzer(override val catalogManager: CatalogManager)
       case p: LogicalPlan => p.transformExpressionsUpWithPruning(
         _.containsPattern(BINARY_ARITHMETIC), ruleId) {
         case a @ Add(l, r, f) if a.childrenResolved => (l.dataType, 
r.dataType) match {
-          case (DateType, _: DayTimeIntervalType) => TimeAdd(Cast(l, 
TimestampType), r)
-          case (_: DayTimeIntervalType, DateType) => TimeAdd(Cast(r, 
TimestampType), l)
+          case (DateType, dit: DayTimeIntervalType) =>

Review comment:
       I agree with change but I think it's a separate thing and can be done in 
a new PR.
   
   The impl detail can also be improved: we can convert the interval to the 
number of days (`ExtractANSIIntervalDays`) and then use `DateAdd` expression.

##########
File path: 
sql/core/src/test/scala/org/apache/spark/sql/ColumnExpressionSuite.scala
##########
@@ -2790,7 +2790,9 @@ class ColumnExpressionSuite extends QueryTest with 
SharedSparkSession {
           ).foreach { case (end, start) =>
             val df = Seq((end, start)).toDF("end", "start")
             val daysBetween = Duration.ofDays(ChronoUnit.DAYS.between(start, 
end))
-            checkAnswer(df.select($"end" - $"start"), Row(daysBetween))
+            val r = df.select($"end" - $"start").toDF("diff")
+            checkAnswer(r, Row(daysBetween))
+            assert(r.schema === new StructType().add("diff", 
DayTimeIntervalType(0, 0)))

Review comment:
       Let's exclude the `Date +/- Interval` changes from this PR.

##########
File path: 
sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Analyzer.scala
##########
@@ -349,8 +349,18 @@ class Analyzer(override val catalogManager: CatalogManager)
       case p: LogicalPlan => p.transformExpressionsUpWithPruning(
         _.containsPattern(BINARY_ARITHMETIC), ruleId) {
         case a @ Add(l, r, f) if a.childrenResolved => (l.dataType, 
r.dataType) match {
-          case (DateType, _: DayTimeIntervalType) => TimeAdd(Cast(l, 
TimestampType), r)
-          case (_: DayTimeIntervalType, DateType) => TimeAdd(Cast(r, 
TimestampType), l)
+          case (DateType, dit: DayTimeIntervalType) =>

Review comment:
       Yes please




-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

For queries about this service, please contact Infrastructure at:
[email protected]



---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to