cloud-fan commented on a change in pull request #28650:
URL: https://github.com/apache/spark/pull/28650#discussion_r431680313



##########
File path: 
sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/datetimeExpressions.scala
##########
@@ -859,127 +865,80 @@ case class UnixTimestamp(timeExp: Expression, format: 
Expression, timeZoneId: Op
 }
 
 abstract class ToTimestamp
-  extends BinaryExpression with TimeZoneAwareExpression with ExpectsInputTypes 
{
+  extends BinaryExpression with TimestampFormatterHelper with 
ExpectsInputTypes {
 
   // The result of the conversion to timestamp is microseconds divided by this 
factor.
   // For example if the factor is 1000000, the result of the expression is in 
seconds.
   protected def downScaleFactor: Long
 
+  override protected def formatString: Expression = right
+  override protected def isParsing = true
+
   override def inputTypes: Seq[AbstractDataType] =
     Seq(TypeCollection(StringType, DateType, TimestampType), StringType)
 
   override def dataType: DataType = LongType
   override def nullable: Boolean = true
 
-  private lazy val constFormat: UTF8String = 
right.eval().asInstanceOf[UTF8String]
-  private lazy val formatter: TimestampFormatter =
-    try {
-      TimestampFormatter(
-        constFormat.toString,
-        zoneId,
-        legacyFormat = SIMPLE_DATE_FORMAT,
-        needVarLengthSecondFraction = true)
-    } catch {
-      case e: SparkUpgradeException => throw e
-      case NonFatal(_) => null
-    }
-
   override def eval(input: InternalRow): Any = {
-    val t = left.eval(input)
-    if (t == null) {
-      null
-    } else {
+    Option(left.eval(input)).map { t =>
       left.dataType match {
-        case DateType =>
-          epochDaysToMicros(t.asInstanceOf[Int], zoneId) / downScaleFactor
-        case TimestampType =>
-          t.asInstanceOf[Long] / downScaleFactor
-        case StringType if right.foldable =>
-          if (constFormat == null || formatter == null) {
-            null
-          } else {
-            try {
-              formatter.parse(
-                t.asInstanceOf[UTF8String].toString) / downScaleFactor
-            } catch {
-              case e: SparkUpgradeException => throw e
-              case NonFatal(_) => null
-            }
-          }
+        case DateType => epochDaysToMicros(t.asInstanceOf[Int], zoneId) / 
downScaleFactor
+        case TimestampType => t.asInstanceOf[Long] / downScaleFactor
         case StringType =>
-          val f = right.eval(input)
-          if (f == null) {
-            null

Review comment:
       ditto




----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

For queries about this service, please contact Infrastructure at:
us...@infra.apache.org



---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org
For additional commands, e-mail: reviews-h...@spark.apache.org

Reply via email to