maropu commented on a change in pull request #28979:
URL: https://github.com/apache/spark/pull/28979#discussion_r450548418



##########
File path: sql/core/src/test/scala/org/apache/spark/sql/UDFSuite.scala
##########
@@ -504,23 +508,34 @@ class UDFSuite extends QueryTest with SharedSparkSession {
   }
 
   test("Using java.time.Instant in UDF") {
-    withSQLConf(SQLConf.DATETIME_JAVA8API_ENABLED.key -> "true") {
-      val expected = java.time.Instant.parse("2019-02-27T00:00:00Z")
-      val plusSec = udf((i: java.time.Instant) => i.plusSeconds(1))
-      val df = spark.sql("SELECT TIMESTAMP '2019-02-26 23:59:59Z' as t")
-        .select(plusSec('t))
-      assert(df.collect().toSeq === Seq(Row(expected)))
-    }
+    val dtf = DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss")
+    val expected = java.time.Instant.parse("2019-02-27T00:00:00Z")
+      .atZone(DateTimeUtils.getZoneId(conf.sessionLocalTimeZone))
+      .toLocalDateTime
+      .format(dtf)
+    val plusSec = udf((i: java.time.Instant) => i.plusSeconds(1))
+    val df = spark.sql("SELECT TIMESTAMP '2019-02-26 23:59:59Z' as t")
+      .select(plusSec('t).cast(StringType))
+    assert(df.collect().toSeq === Seq(Row(expected)))
   }
 
   test("Using java.time.LocalDate in UDF") {
-    withSQLConf(SQLConf.DATETIME_JAVA8API_ENABLED.key -> "true") {
-      val expected = java.time.LocalDate.parse("2019-02-27")
-      val plusDay = udf((i: java.time.LocalDate) => i.plusDays(1))
-      val df = spark.sql("SELECT DATE '2019-02-26' as d")
-        .select(plusDay('d))
-      assert(df.collect().toSeq === Seq(Row(expected)))
-    }
+    val expected = java.time.LocalDate.parse("2019-02-27").toString
+    val plusDay = udf((i: java.time.LocalDate) => i.plusDays(1))
+    val df = spark.sql("SELECT DATE '2019-02-26' as d")
+      .select(plusDay('d).cast(StringType))
+    assert(df.collect().toSeq === Seq(Row(expected)))
+  }
+
+  test("Using combined types of Instant/LocalDate in UDF") {
+    val ts = "2019-02-26T23:59:59Z"
+    val date = "2019-02-26"
+    val expectedDate = sql(s"SELECT CAST(DATE '$date' AS 
STRING)").collect().head.getString(0)
+    val expectedIns = sql(s"SELECT CAST(TIMESTAMP '$ts' AS 
STRING)").collect().head.getString(0)
+    spark.udf.register("toDateTime", udf((d: LocalDate, i: Instant) => 
DateTimeResult(d, i)))

Review comment:
       The previous behaviour looks like a bug? cc: @cloud-fan 
   ```
   // v3.0
   scala> val dtf = java.time.format.DateTimeFormatter.ofPattern("yyyy-MM-dd 
HH:mm:ss")
   scala> val timestamp = java.sql.Timestamp.valueOf("2019-02-26 23:59:59")
   scala> val df = Seq((timestamp)).toDF("t")
   scala> spark.udf.register("scalaUdf", udf((t: java.sql.Timestamp) => null))
   scala> df.selectExpr("scalaUdf(t)").show()
   +------+
   |UDF(t)|
   +------+
   |  null|
   +------+
   
   // This PR
   scala> val dtf = java.time.format.DateTimeFormatter.ofPattern("yyyy-MM-dd 
HH:mm:ss")
   scala> val timestamp = java.sql.Timestamp.valueOf("2019-02-26 23:59:59")
   scala> val df = Seq((timestamp)).toDF("t")
   scala> spark.udf.register("scalaUdf", udf((t: java.sql.Timestamp) => null))
   java.lang.ClassNotFoundException: scala.Null
     at 
scala.reflect.internal.util.AbstractFileClassLoader.findClass(AbstractFileClassLoader.scala:72)
     at java.base/java.lang.ClassLoader.loadClass(ClassLoader.java:588)
     at java.base/java.lang.ClassLoader.loadClass(ClassLoader.java:521)
     at java.base/java.lang.Class.forName0(Native Method)
     at java.base/java.lang.Class.forName(Class.java:398)
     at 
scala.reflect.runtime.JavaMirrors$JavaMirror.javaClass(JavaMirrors.scala:589)
     at 
scala.reflect.runtime.JavaMirrors$JavaMirror.$anonfun$classToJava$1(JavaMirrors.scala:1265)
     at 
scala.reflect.runtime.TwoWayCaches$TwoWayCache.$anonfun$toJava$1(TwoWayCaches.scala:61)
     at 
scala.reflect.runtime.TwoWayCaches$TwoWayCache.toJava(TwoWayCaches.scala:57)
     at 
scala.reflect.runtime.JavaMirrors$JavaMirror.classToJava(JavaMirrors.scala:1257)
     at 
scala.reflect.runtime.JavaMirrors$JavaMirror.typeToJavaClass(JavaMirrors.scala:1351)
     at 
scala.reflect.runtime.JavaMirrors$JavaMirror.runtimeClass(JavaMirrors.scala:227)
     at 
scala.reflect.runtime.JavaMirrors$JavaMirror.runtimeClass(JavaMirrors.scala:68)
     at 
org.apache.spark.sql.catalyst.encoders.ExpressionEncoder$.apply(ExpressionEncoder.scala:56)
     at org.apache.spark.sql.functions$.udf(functions.scala:4542)
     ... 47 elided
   ```




----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

For queries about this service, please contact Infrastructure at:
[email protected]



---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to