bart-samwel commented on a change in pull request #28576:
URL: https://github.com/apache/spark/pull/28576#discussion_r427850722
##########
File path:
sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/DateTimeFormatterHelper.scala
##########
@@ -31,17 +31,50 @@ import org.apache.spark.sql.internal.SQLConf
import org.apache.spark.sql.internal.SQLConf.LegacyBehaviorPolicy._
trait DateTimeFormatterHelper {
+ private def getOrDefault(accessor: TemporalAccessor, field: ChronoField,
default: Int): Int = {
+ if (accessor.isSupported(field)) {
+ accessor.get(field)
+ } else {
+ default
+ }
+ }
+
+ protected def toLocalDate(accessor: TemporalAccessor, allowMissingYear:
Boolean): LocalDate = {
+ val year = if (accessor.isSupported(ChronoField.YEAR)) {
+ accessor.get(ChronoField.YEAR)
+ } else if (allowMissingYear) {
+ // To keep backward compatibility with Spark 2.x, we pick 1970 as the
default value of year.
+ 1970
+ } else {
+ throw new SparkUpgradeException("3.0",
+ "Year must be given in the date/timestamp string to be parsed. You can
set " +
+ SQLConf.LEGACY_ALLOW_MISSING_YEAR_DURING_PARSING.key + " to true, to
pick 1970 as " +
+ "the default value of year.", null)
+ }
+ val month = getOrDefault(accessor, ChronoField.MONTH_OF_YEAR, 1)
Review comment:
I guess we could even allow the default year, and if people hit Feb 29
then they'll get an error at runtime. No need for a legacy config in that
case...
----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
For queries about this service, please contact Infrastructure at:
[email protected]
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]