This is an automated email from the ASF dual-hosted git repository.

gengliang pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new ae9f612  [SPARK-36257][SQL] Updated the version of TimestampNTZ 
related changes as 3.3.0
ae9f612 is described below

commit ae9f6126fbbf0cf5fe5a7ece8d074d7c0e11fd93
Author: Gengliang Wang <gengli...@apache.org>
AuthorDate: Thu Jul 22 21:01:29 2021 +0800

    [SPARK-36257][SQL] Updated the version of TimestampNTZ related changes as 
3.3.0
    
    ### What changes were proposed in this pull request?
    
    As we decided to release TimestampNTZ type in Spark 3.3, we should update 
the versions of TimestampNTZ related changes as 3.3.0.
    
    ### Why are the changes needed?
    
    Correct the versions in documentation/code comment.
    
    ### Does this PR introduce _any_ user-facing change?
    
    No
    
    ### How was this patch tested?
    
    Existing UT
    
    Closes #33478 from gengliangwang/updateVersion.
    
    Authored-by: Gengliang Wang <gengli...@apache.org>
    Signed-off-by: Gengliang Wang <gengli...@apache.org>
---
 .../src/main/scala/org/apache/spark/sql/Encoders.scala         |  2 +-
 .../spark/sql/catalyst/expressions/datetimeExpressions.scala   | 10 +++++-----
 .../src/main/scala/org/apache/spark/sql/internal/SQLConf.scala |  4 ++--
 .../scala/org/apache/spark/sql/types/TimestampNTZType.scala    |  4 ++--
 .../src/main/scala/org/apache/spark/sql/SQLImplicits.scala     |  2 +-
 sql/core/src/main/scala/org/apache/spark/sql/functions.scala   |  2 +-
 6 files changed, 12 insertions(+), 12 deletions(-)

diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/Encoders.scala 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/Encoders.scala
index f23f3c6..98d2f1a5 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/Encoders.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/Encoders.scala
@@ -118,7 +118,7 @@ object Encoders {
    * Creates an encoder that serializes instances of the 
`java.time.LocalDateTime` class
    * to the internal representation of nullable Catalyst's TimestampNTZType.
    *
-   * @since 3.2.0
+   * @since 3.3.0
    */
   def LOCALDATETIME: Encoder[java.time.LocalDateTime] = ExpressionEncoder()
 
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/datetimeExpressions.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/datetimeExpressions.scala
index bc2e33b..5d42b31 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/datetimeExpressions.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/datetimeExpressions.scala
@@ -217,7 +217,7 @@ case class Now() extends CurrentTimestampLike {
        2020-04-25 15:49:11.914
   """,
   group = "datetime_funcs",
-  since = "3.2.0")
+  since = "3.3.0")
 case class LocalTimestamp(timeZoneId: Option[String] = None) extends 
LeafExpression
   with TimeZoneAwareExpression with CodegenFallback {
 
@@ -1096,7 +1096,7 @@ case class GetTimestamp(
        2016-12-31 00:00:00
   """,
   group = "datetime_funcs",
-  since = "3.2.0")
+  since = "3.3.0")
 // scalastyle:on line.size.limit
 case class ParseToTimestampNTZ(
     left: Expression,
@@ -1143,7 +1143,7 @@ case class ParseToTimestampNTZ(
        2016-12-31 00:00:00
   """,
   group = "datetime_funcs",
-  since = "3.2.0")
+  since = "3.3.0")
 // scalastyle:on line.size.limit
 case class ParseToTimestampLTZ(
     left: Expression,
@@ -2381,7 +2381,7 @@ case class MakeDate(
        NULL
   """,
   group = "datetime_funcs",
-  since = "3.2.0")
+  since = "3.3.0")
 // scalastyle:on line.size.limit
 case class MakeTimestampNTZ(
     year: Expression,
@@ -2440,7 +2440,7 @@ case class MakeTimestampNTZ(
        NULL
   """,
   group = "datetime_funcs",
-  since = "3.2.0")
+  since = "3.3.0")
 // scalastyle:on line.size.limit
 case class MakeTimestampLTZ(
     year: Expression,
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/internal/SQLConf.scala 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/internal/SQLConf.scala
index bf421f0..189e59a 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/internal/SQLConf.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/internal/SQLConf.scala
@@ -2892,9 +2892,9 @@ object SQLConf {
         s"and type literal. Setting the configuration as 
${TimestampTypes.TIMESTAMP_NTZ} will " +
         "use TIMESTAMP WITHOUT TIME ZONE as the default type while putting it 
as " +
         s"${TimestampTypes.TIMESTAMP_LTZ} will use TIMESTAMP WITH LOCAL TIME 
ZONE. " +
-        "Before the 3.2.0 release, Spark only supports the TIMESTAMP WITH " +
+        "Before the 3.3.0 release, Spark only supports the TIMESTAMP WITH " +
         "LOCAL TIME ZONE type.")
-      .version("3.2.0")
+      .version("3.3.0")
       .stringConf
       .transform(_.toUpperCase(Locale.ROOT))
       .checkValues(TimestampTypes.values.map(_.toString))
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/TimestampNTZType.scala 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/TimestampNTZType.scala
index f7d20a0..99c29e4 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/TimestampNTZType.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/TimestampNTZType.scala
@@ -29,7 +29,7 @@ import org.apache.spark.annotation.Unstable
  * To represent an absolute point in time, use `TimestampType` instead.
  *
  * Please use the singleton `DataTypes.TimestampNTZType` to refer the type.
- * @since 3.2.0
+ * @since 3.3.0
  */
 @Unstable
 class TimestampNTZType private() extends AtomicType {
@@ -59,7 +59,7 @@ class TimestampNTZType private() extends AtomicType {
  * "TimestampNTZType" in byte code. Defined with a private constructor so the 
companion
  * object is the only possible instantiation.
  *
- * @since 3.2.0
+ * @since 3.3.0
  */
 @Unstable
 case object TimestampNTZType extends TimestampNTZType
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/SQLImplicits.scala 
b/sql/core/src/main/scala/org/apache/spark/sql/SQLImplicits.scala
index a3004ca..044231f 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/SQLImplicits.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/SQLImplicits.scala
@@ -82,7 +82,7 @@ abstract class SQLImplicits extends LowPrioritySQLImplicits {
   /** @since 3.0.0 */
   implicit def newLocalDateEncoder: Encoder[java.time.LocalDate] = 
Encoders.LOCALDATE
 
-  /** @since 3.2.0 */
+  /** @since 3.3.0 */
   implicit def newLocalDateTimeEncoder: Encoder[java.time.LocalDateTime] = 
Encoders.LOCALDATETIME
 
   /** @since 2.2.0 */
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/functions.scala 
b/sql/core/src/main/scala/org/apache/spark/sql/functions.scala
index 74fef63..10ff3d4 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/functions.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/functions.scala
@@ -2981,7 +2981,7 @@ object functions {
    * All calls of localtimestamp within the same query return the same value.
    *
    * @group datetime_funcs
-   * @since 3.2.0
+   * @since 3.3.0
    */
   def localtimestamp(): Column = withExpr { LocalTimestamp() }
 

---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to