MaxGekk commented on a change in pull request #23196: [SPARK-26243][SQL] Use 
java.time API for parsing timestamps and dates from JSON
URL: https://github.com/apache/spark/pull/23196#discussion_r241809225
 
 

 ##########
 File path: 
sql/hive/src/test/scala/org/apache/spark/sql/sources/HadoopFsRelationTest.scala
 ##########
 @@ -125,56 +126,58 @@ abstract class HadoopFsRelationTest extends QueryTest 
with SQLTestUtils with Tes
     } else {
       Seq(false)
     }
-    for (dataType <- supportedDataTypes) {
-      for (parquetDictionaryEncodingEnabled <- 
parquetDictionaryEncodingEnabledConfs) {
-        val extraMessage = if (isParquetDataSource) {
-          s" with parquet.enable.dictionary = 
$parquetDictionaryEncodingEnabled"
-        } else {
-          ""
-        }
-        logInfo(s"Testing $dataType data type$extraMessage")
-
-        val extraOptions = Map[String, String](
-          "parquet.enable.dictionary" -> 
parquetDictionaryEncodingEnabled.toString
-        )
-
-        withTempPath { file =>
-          val path = file.getCanonicalPath
-
-          val dataGenerator = RandomDataGenerator.forType(
-            dataType = dataType,
-            nullable = true,
-            new Random(System.nanoTime())
-          ).getOrElse {
-            fail(s"Failed to create data generator for schema $dataType")
+    withSQLConf(SQLConf.SESSION_LOCAL_TIMEZONE.key -> "UTC") {
 
 Review comment:
   > see which seed can reproduce the bug and debug it locally.
   
   I ran it locally many times. It is almost 100% reproducible for any seed.  

----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
[email protected]


With regards,
Apache Git Services

---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to