asfgit closed pull request #23455: [SPARK-26246][SQL][FOLLOWUP] Inferring 
TimestampType from JSON
URL: https://github.com/apache/spark/pull/23455
 
 
   

This is a PR merged from a forked repository.
As GitHub hides the original diff on merge, it is displayed below for
the sake of provenance:

As this is a foreign pull request (from a fork), the diff is supplied
below (as it won't show otherwise due to GitHub magic):

diff --git a/docs/sql-migration-guide-upgrade.md 
b/docs/sql-migration-guide-upgrade.md
index c4d2157de8b60..7e6a0c097d242 100644
--- a/docs/sql-migration-guide-upgrade.md
+++ b/docs/sql-migration-guide-upgrade.md
@@ -40,6 +40,9 @@ displayTitle: Spark SQL Upgrading Guide
   - In Spark version 2.4 and earlier, JSON datasource and JSON functions like 
`from_json` convert a bad JSON record to a row with all `null`s in the 
PERMISSIVE mode when specified schema is `StructType`. Since Spark 3.0, the 
returned row can contain non-`null` fields if some of JSON column values were 
parsed and converted to desired types successfully.
 
   - Since Spark 3.0, the `unix_timestamp`, `date_format`, `to_unix_timestamp`, 
`from_unixtime`, `to_date`, `to_timestamp` functions use java.time API for 
parsing and formatting dates/timestamps from/to strings by using ISO chronology 
(https://docs.oracle.com/javase/8/docs/api/java/time/chrono/IsoChronology.html) 
based on Proleptic Gregorian calendar. In Spark version 2.4 and earlier, 
java.text.SimpleDateFormat and java.util.GregorianCalendar (hybrid calendar 
that supports both the Julian and Gregorian calendar systems, see 
https://docs.oracle.com/javase/7/docs/api/java/util/GregorianCalendar.html) is 
used for the same purpuse. New implementation supports pattern formats as 
described here 
https://docs.oracle.com/javase/8/docs/api/java/time/format/DateTimeFormatter.html
 and performs strict checking of its input. For example, the `2015-07-22 
10:00:00` timestamp cannot be parse if pattern is `yyyy-MM-dd` because the 
parser does not consume whole input. Another example is the `31/01/2015 00:00` 
input cannot be parsed by the `dd/MM/yyyy hh:mm` pattern because `hh` supposes 
hours in the range `1-12`. To switch back to the implementation used in Spark 
2.4 and earlier, set `spark.sql.legacy.timeParser.enabled` to `true`.
+
+  - Since Spark 3.0, JSON datasource and JSON function `schema_of_json` infer 
TimestampType from string values if they matches to the pattern defined by the 
JSON option `timestampFormat`. Set JSON option `inferTimestamp` to `false` to 
disable such type inferring.
+
 ## Upgrading From Spark SQL 2.3 to 2.4
 
   - In Spark version 2.3 and earlier, the second parameter to array_contains 
function is implicitly promoted to the element type of first array type 
parameter. This type promotion can be lossy and may cause `array_contains` 
function to return wrong result. This problem has been addressed in 2.4 by 
employing a safer type promotion mechanism. This can cause some change in 
behavior and are illustrated in the table below.
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/json/JSONOptions.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/json/JSONOptions.scala
index eaff3fa7bec25..1ec9d5093a789 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/json/JSONOptions.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/json/JSONOptions.scala
@@ -117,6 +117,12 @@ private[sql] class JSONOptions(
    */
   val pretty: Boolean = 
parameters.get("pretty").map(_.toBoolean).getOrElse(false)
 
+  /**
+   * Enables inferring of TimestampType from strings matched to the timestamp 
pattern
+   * defined by the timestampFormat option.
+   */
+  val inferTimestamp: Boolean = 
parameters.get("inferTimestamp").map(_.toBoolean).getOrElse(true)
+
   /** Sets config options on a Jackson [[JsonFactory]]. */
   def setJacksonOptions(factory: JsonFactory): Unit = {
     factory.configure(JsonParser.Feature.ALLOW_COMMENTS, allowComments)
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/json/JsonInferSchema.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/json/JsonInferSchema.scala
index 3203e626ea400..0bf3f03cdb72d 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/json/JsonInferSchema.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/json/JsonInferSchema.scala
@@ -128,7 +128,8 @@ private[sql] class JsonInferSchema(options: JSONOptions) 
extends Serializable {
         }
         if (options.prefersDecimal && decimalTry.isDefined) {
           decimalTry.get
-        } else if ((allCatch opt timestampFormatter.parse(field)).isDefined) {
+        } else if (options.inferTimestamp &&
+            (allCatch opt timestampFormatter.parse(field)).isDefined) {
           TimestampType
         } else {
           StringType
diff --git 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/json/JsonInferSchemaSuite.scala
 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/json/JsonInferSchemaSuite.scala
index 9307f9b47b807..9a6f4f5f9b0cb 100644
--- 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/json/JsonInferSchemaSuite.scala
+++ 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/json/JsonInferSchemaSuite.scala
@@ -99,4 +99,10 @@ class JsonInferSchemaSuite extends SparkFunSuite with 
SQLHelper {
       }
     }
   }
+
+  test("disable timestamp inferring") {
+    val json = """{"a": "2019-01-04T21:11:10.123Z"}"""
+    checkType(Map("inferTimestamp" -> "true"), json, TimestampType)
+    checkType(Map("inferTimestamp" -> "false"), json, StringType)
+  }
 }


 

----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
[email protected]


With regards,
Apache Git Services

---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to