cloud-fan commented on a change in pull request #28650:
URL: https://github.com/apache/spark/pull/28650#discussion_r437231685
##########
File path:
sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/datetimeExpressions.scala
##########
@@ -56,6 +54,26 @@ trait TimeZoneAwareExpression extends Expression {
@transient lazy val zoneId: ZoneId = DateTimeUtils.getZoneId(timeZoneId.get)
}
+trait TimestampFormatterHelper extends TimeZoneAwareExpression {
Review comment:
do we have expressions that create DateFormatter?
##########
File path:
sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/DateExpressionsSuite.scala
##########
@@ -1171,34 +1165,28 @@ class DateExpressionsSuite extends SparkFunSuite with
ExpressionEvalHelper {
test("Disable week-based date fields and quarter fields for parsing") {
- def checkSparkUpgrade(c: Char): Unit = {
- checkExceptionInExpression[SparkUpgradeException](
- new ParseToTimestamp(Literal("1"), Literal(c.toString)).child, "3.0")
- checkExceptionInExpression[SparkUpgradeException](
- new ParseToDate(Literal("1"), Literal(c.toString)).child, "3.0")
- checkExceptionInExpression[SparkUpgradeException](
- ToUnixTimestamp(Literal("1"), Literal(c.toString)), "3.0")
- checkExceptionInExpression[SparkUpgradeException](
- UnixTimestamp(Literal("1"), Literal(c.toString)), "3.0")
+ def checkException[T <: Exception : ClassTag](c: String, onlyParsing:
Boolean = false): Unit = {
Review comment:
do we need the `onlyParsing` flag? we can get it by `Seq('E', 'F', 'q',
'Q').contains(c)`
##########
File path: sql/core/src/test/scala/org/apache/spark/sql/DateFunctionsSuite.scala
##########
@@ -689,8 +689,9 @@ class DateFunctionsSuite extends QueryTest with
SharedSparkSession {
Row(secs(ts5.getTime)), Row(null)))
// invalid format
- checkAnswer(df1.selectExpr(s"to_unix_timestamp(x, 'yyyy-MM-dd
bb:HH:ss')"), Seq(
- Row(null), Row(null), Row(null), Row(null)))
+ val invalid = df1.selectExpr(s"to_unix_timestamp(x, 'yyyy-MM-dd
bb:HH:ss')")
+ val e = intercept[IllegalArgumentException](invalid.collect())
+ assert(e.getMessage.contains('b'))
Review comment:
can we check more in the error message?
##########
File path: sql/core/src/test/resources/sql-tests/inputs/datetime.sql
##########
@@ -138,25 +138,11 @@ select to_timestamp("2019 40", "yyyy mm");
select to_timestamp("2019 10:10:10", "yyyy hh:mm:ss");
-- Unsupported narrow text style
-select date_format(date '2020-05-23', 'GGGGG');
Review comment:
why we remove these tests?
##########
File path: sql/core/src/test/resources/sql-tests/inputs/datetime.sql
##########
@@ -138,25 +138,11 @@ select to_timestamp("2019 40", "yyyy mm");
select to_timestamp("2019 10:10:10", "yyyy hh:mm:ss");
-- Unsupported narrow text style
-select date_format(date '2020-05-23', 'GGGGG');
Review comment:
so it's not related to this PR? maybe better to open a new PR to remove
duplicated test cases.
##########
File path:
sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/datetimeExpressions.scala
##########
@@ -1044,7 +1009,8 @@ abstract class UnixTime extends ToTimestamp {
since = "1.5.0")
// scalastyle:on line.size.limit
case class FromUnixTime(sec: Expression, format: Expression, timeZoneId:
Option[String] = None)
- extends BinaryExpression with TimeZoneAwareExpression with
ImplicitCastInputTypes {
+ extends BinaryExpression with TimestampFormatterHelper with
ImplicitCastInputTypes
+ with NullIntolerant {
Review comment:
shall we add the `NullIntolerant` in the base class
`TimestampFormatterHelper`? and it seems better to do it in a new PR.
----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
For queries about this service, please contact Infrastructure at:
[email protected]
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]