This is an automated email from the ASF dual-hosted git repository.

maxgekk pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new c5c1927d6a1 [SPARK-42345][SQL] Rename TimestampNTZ inference conf as 
spark.sql.sources.timestampNTZTypeInference.enabled
c5c1927d6a1 is described below

commit c5c1927d6a137c0e92417a0efad5da62ab253137
Author: Gengliang Wang <gengli...@apache.org>
AuthorDate: Sun Feb 5 10:43:34 2023 +0300

    [SPARK-42345][SQL] Rename TimestampNTZ inference conf as 
spark.sql.sources.timestampNTZTypeInference.enabled
    
    ### What changes were proposed in this pull request?
    
    Rename TimestampNTZ data source inference configuration from 
`spark.sql.inferTimestampNTZInDataSources.enabled` to 
`spark.sql.sources.timestampNTZTypeInference.enabled`
    For more context on this configuration:
    https://github.com/apache/spark/pull/39777
    https://github.com/apache/spark/pull/39812
    https://github.com/apache/spark/pull/39868
    ### Why are the changes needed?
    
    Since the configuration is for data source, we can put it under the prefix 
`spark.sql.sources`. The new naming is consistent with another configuration 
`spark.sql.sources.partitionColumnTypeInference.enabled`.
    
    ### Does this PR introduce _any_ user-facing change?
    
    ### How was this patch tested?
    
    Closes #39885 from gengliangwang/renameConf.
    
    Authored-by: Gengliang Wang <gengli...@apache.org>
    Signed-off-by: Max Gekk <max.g...@gmail.com>
---
 .../org/apache/spark/sql/internal/SQLConf.scala      | 20 ++++++++++----------
 .../execution/datasources/PartitioningUtils.scala    |  2 +-
 2 files changed, 11 insertions(+), 11 deletions(-)

diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/internal/SQLConf.scala 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/internal/SQLConf.scala
index 363e763be4f..2f05c356160 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/internal/SQLConf.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/internal/SQLConf.scala
@@ -1416,6 +1416,16 @@ object SQLConf {
       .booleanConf
       .createWithDefault(true)
 
+  val INFER_TIMESTAMP_NTZ_IN_DATA_SOURCES =
+    buildConf("spark.sql.sources.timestampNTZTypeInference.enabled")
+      .doc("For the schema inference of JSON/CSV/JDBC data sources and 
partition directories, " +
+        "this config determines whether to choose the TimestampNTZ type if a 
column can be " +
+        "either TimestampNTZ or TimestampLTZ type. If set to true, the 
inference result of " +
+        "the column will be TimestampNTZ type. Otherwise, the result will be 
TimestampLTZ type.")
+      .version("3.4.0")
+      .booleanConf
+      .createWithDefault(false)
+
   val BUCKETING_ENABLED = buildConf("spark.sql.sources.bucketing.enabled")
     .doc("When false, we will treat bucketed table as normal table")
     .version("2.0.0")
@@ -3518,16 +3528,6 @@ object SQLConf {
       .checkValues(TimestampTypes.values.map(_.toString))
       .createWithDefault(TimestampTypes.TIMESTAMP_LTZ.toString)
 
-  val INFER_TIMESTAMP_NTZ_IN_DATA_SOURCES =
-    buildConf("spark.sql.inferTimestampNTZInDataSources.enabled")
-      .doc("For the schema inference of JSON/CSV/JDBC data sources and 
partition directories, " +
-        "this config determines whether to choose the TimestampNTZ type if a 
column can be " +
-        "either TimestampNTZ or TimestampLTZ type. If set to true, the 
inference result of " +
-        "the column will be TimestampNTZ type. Otherwise, the result will be 
TimestampLTZ type.")
-      .version("3.4.0")
-      .booleanConf
-      .createWithDefault(false)
-
   val DATETIME_JAVA8API_ENABLED = 
buildConf("spark.sql.datetime.java8API.enabled")
     .doc("If the configuration property is set to true, java.time.Instant and 
" +
       "java.time.LocalDate classes of Java 8 API are used as external types 
for " +
diff --git 
a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/PartitioningUtils.scala
 
b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/PartitioningUtils.scala
index 38c3f71ab49..90c45fd11dd 100644
--- 
a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/PartitioningUtils.scala
+++ 
b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/PartitioningUtils.scala
@@ -490,7 +490,7 @@ object PartitioningUtils extends SQLConfHelper {
       val unescapedRaw = unescapePathName(raw)
       // try and parse the date, if no exception occurs this is a candidate to 
be resolved as
       // TimestampType or TimestampNTZType. The inference timestamp typ is 
controlled by the conf
-      // "spark.sql.inferTimestampNTZInDataSources.enabled".
+      // "spark.sql.sources.timestampNTZTypeInference.enabled".
       val timestampType = conf.timestampTypeInSchemaInference
       timestampType match {
         case TimestampType => timestampFormatter.parse(unescapedRaw)


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to