yaooqinn commented on a change in pull request #28674:
URL: https://github.com/apache/spark/pull/28674#discussion_r433111137



##########
File path: 
sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/DateTimeFormatterHelper.scala
##########
@@ -39,15 +39,22 @@ trait DateTimeFormatterHelper {
     }
   }
 
-  protected def toLocalDate(accessor: TemporalAccessor): LocalDate = {
+  protected def toLocalDate(accessor: TemporalAccessor, locale: Locale): 
LocalDate = {
     val localDate = accessor.query(TemporalQueries.localDate())
     // If all the date fields are specified, return the local date directly.
     if (localDate != null) return localDate
 
+    lazy val weekBasedYearField = WeekFields.of(locale).weekBasedYear()
     // Users may want to parse only a few datetime fields from a string and 
extract these fields
     // later, and we should provide default values for missing fields.
     // To be compatible with Spark 2.4, we pick 1970 as the default value of 
year.
-    val year = getOrDefault(accessor, ChronoField.YEAR, 1970)
+    val year = if (accessor.isSupported(ChronoField.YEAR)) {
+      accessor.get(ChronoField.YEAR)
+    } else if (accessor.isSupported(weekBasedYearField)) {
+      val year = accessor.get(weekBasedYearField) - 1
+      return LocalDate.of(year, 12, 
1).`with`(TemporalAdjusters.lastInMonth(DayOfWeek.SUNDAY))

Review comment:
       this was a mistake I have made, it is fixed now




----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

For queries about this service, please contact Infrastructure at:
[email protected]



---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to