cloud-fan commented on code in PR #52762:
URL: https://github.com/apache/spark/pull/52762#discussion_r2471467833


##########
sql/core/src/test/scala/org/apache/spark/sql/errors/QueryExecutionAnsiErrorsSuite.scala:
##########
@@ -269,6 +270,84 @@ class QueryExecutionAnsiErrorsSuite extends QueryTest
     )
   }
 
+  test("INVALID_DATETIME_PATTERN with constant pattern (constant folding 
path)") {
+    // Test that invalid pattern letters (like 'I') are properly wrapped with 
error code
+    // when the pattern is a constant literal. This triggers the 
formatterOption lazy val
+    // during constant folding optimization phase.
+    checkError(
+      exception = intercept[SparkRuntimeException] {
+        sql("select to_timestamp('20231225143045', 
'yyyyMMddHHMIss')").collect()
+      },
+      condition = "INVALID_DATETIME_PATTERN.WITH_SUGGESTION",
+      parameters = Map(
+        "pattern" -> "'yyyyMMddHHMIss'",
+        "docroot" -> SparkBuildInfo.spark_doc_root),
+      sqlState = "22007"
+    )
+  }
+
+  test("INVALID_DATETIME_PATTERN with non-constant pattern") {
+    // Test that invalid patterns are properly wrapped when the pattern is NOT 
a constant
+    // (e.g., from a column or variable). This exercises the runtime 
getFormatter() path.
+    withTempView("patterns") {
+      sql("select 'yyyyMMddHHMIss' as 
pattern").createOrReplaceTempView("patterns")
+      checkError(
+        exception = intercept[SparkRuntimeException] {
+          sql("select to_timestamp('20231225143045', pattern) from 
patterns").collect()
+        },
+        condition = "INVALID_DATETIME_PATTERN.WITH_SUGGESTION",
+        parameters = Map(
+          "pattern" -> "'yyyyMMddHHMIss'",
+          "docroot" -> SparkBuildInfo.spark_doc_root),
+        sqlState = "22007"
+      )
+    }
+  }
+
+  test("INVALID_DATETIME_PATTERN with various invalid pattern letters") {
+    // Test multiple invalid pattern letters that throw 
IllegalArgumentException
+    // Using clearly invalid letters like I, P, R which are not valid datetime 
pattern letters
+    val invalidPatterns = Seq(
+      ("yyyyMMddHHMIss", "I"),   // Invalid 'I' - unknown pattern letter
+      ("yyyyMMddHHPmss", "P"),   // Invalid 'P' - unknown pattern letter
+      ("yyyyMMddRHmmss", "R")    // Invalid 'R' - unknown pattern letter
+    )
+
+    invalidPatterns.foreach { case (pattern, _) =>
+      checkError(
+        exception = intercept[SparkRuntimeException] {
+          sql(s"select to_timestamp('20231225143045', '$pattern')").collect()
+        },
+        condition = "INVALID_DATETIME_PATTERN.WITH_SUGGESTION",
+        parameters = Map(
+          "pattern" -> s"'$pattern'",
+          "docroot" -> SparkBuildInfo.spark_doc_root),
+        sqlState = "22007"
+      )
+    }
+  }
+
+  test("Valid patterns should still work correctly") {
+    // Ensure our fix doesn't break valid patterns in constant folding path

Review Comment:
   We have plenty of tests for `to_timestamp` already...



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: [email protected]

For queries about this service, please contact Infrastructure at:
[email protected]


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to