cloud-fan commented on a change in pull request #28592:
URL: https://github.com/apache/spark/pull/28592#discussion_r429180213



##########
File path: 
sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/DateTimeFormatterHelper.scala
##########
@@ -117,6 +117,33 @@ trait DateTimeFormatterHelper {
         s"set ${SQLConf.LEGACY_TIME_PARSER_POLICY.key} to LEGACY to restore 
the behavior " +
         s"before Spark 3.0, or set to CORRECTED and treat it as an invalid 
datetime string.", e)
   }
+
+  /**
+   * When the new DateTimeFormatter failed to initialize because of invalid 
datetime pattern, it
+   * will throw IllegalArgumentException. If the pattern can be recognized by 
the legacy formatter
+   * it will raise SparkUpgradeException to tell users to restore the previous 
behavior via LEGACY
+   * policy or follow our guide to correct their pattern. Otherwise, the 
original
+   * IllegalArgumentException will be thrown.
+   *
+   * @param pattern the date time pattern
+   * @param tryLegacyFormatter a func to capture exception, identically which 
forces a legacy
+   *                           datetime formatter to be initialized
+   */
+
+  protected def checkLegacyFormatter(
+      pattern: String,
+      tryLegacyFormatter: => Unit): PartialFunction[Throwable, 
DateTimeFormatter] = {
+    case e: IllegalArgumentException =>
+      try {
+        tryLegacyFormatter
+      } catch {
+        case _: Throwable => throw e
+      }
+      throw new SparkUpgradeException("3.0", s"Fail to recognize '$pattern' 
pattern in the" +
+        s" new parser. 1) You can set ${SQLConf.LEGACY_TIME_PARSER_POLICY.key} 
to LEGACY to" +

Review comment:
       `new parser` -> `DateTimeFormatter`




----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

For queries about this service, please contact Infrastructure at:
us...@infra.apache.org



---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org
For additional commands, e-mail: reviews-h...@spark.apache.org

Reply via email to