Repository: spark
Updated Branches:
refs/heads/branch-1.5 b767ceeb2 -> bf79a171e
[SPARK-11752] [SQL] fix timezone problem for DateTimeUtils.getSeconds
code snippet to reproduce it:
```
TimeZone.setDefault(TimeZone.getTimeZone("Asia/Shanghai"))
val t = Timestamp.valueOf("1900-06-11 12:14:50.789")
val us = fromJavaTimestamp(t)
assert(getSeconds(us) === t.getSeconds)
```
it will be good to add a regression test for it, but the reproducing code need
to change the default timezone, and even we change it back, the `lazy val
defaultTimeZone` in `DataTimeUtils` is fixed.
Author: Wenchen Fan <[email protected]>
Closes #9728 from cloud-fan/seconds.
(cherry picked from commit 06f1fdba6d1425afddfc1d45a20dbe9bede15e7a)
Signed-off-by: Davies Liu <[email protected]>
Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/bf79a171
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/bf79a171
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/bf79a171
Branch: refs/heads/branch-1.5
Commit: bf79a171e93c63d48dcc2fe066440f6ed61c9cb1
Parents: b767cee
Author: Wenchen Fan <[email protected]>
Authored: Mon Nov 16 08:58:40 2015 -0800
Committer: Davies Liu <[email protected]>
Committed: Mon Nov 16 09:00:29 2015 -0800
----------------------------------------------------------------------
.../spark/sql/catalyst/util/DateTimeUtils.scala | 14 ++++++++------
.../spark/sql/catalyst/util/DateTimeUtilsSuite.scala | 2 +-
2 files changed, 9 insertions(+), 7 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/spark/blob/bf79a171/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/DateTimeUtils.scala
----------------------------------------------------------------------
diff --git
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/DateTimeUtils.scala
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/DateTimeUtils.scala
index e9f0689..c6a2780 100644
---
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/DateTimeUtils.scala
+++
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/DateTimeUtils.scala
@@ -402,16 +402,19 @@ object DateTimeUtils {
/**
* Returns the microseconds since year zero (-17999) from microseconds since
epoch.
*/
- def absoluteMicroSecond(microsec: SQLTimestamp): SQLTimestamp = {
+ private def absoluteMicroSecond(microsec: SQLTimestamp): SQLTimestamp = {
microsec + toYearZero * MICROS_PER_DAY
}
+ private def localTimestamp(microsec: SQLTimestamp): SQLTimestamp = {
+ absoluteMicroSecond(microsec) + defaultTimeZone.getOffset(microsec / 1000)
* 1000L
+ }
+
/**
* Returns the hour value of a given timestamp value. The timestamp is
expressed in microseconds.
*/
def getHours(microsec: SQLTimestamp): Int = {
- val localTs = absoluteMicroSecond(microsec) +
defaultTimeZone.getOffset(microsec / 1000) * 1000L
- ((localTs / MICROS_PER_SECOND / 3600) % 24).toInt
+ ((localTimestamp(microsec) / MICROS_PER_SECOND / 3600) % 24).toInt
}
/**
@@ -419,8 +422,7 @@ object DateTimeUtils {
* microseconds.
*/
def getMinutes(microsec: SQLTimestamp): Int = {
- val localTs = absoluteMicroSecond(microsec) +
defaultTimeZone.getOffset(microsec / 1000) * 1000L
- ((localTs / MICROS_PER_SECOND / 60) % 60).toInt
+ ((localTimestamp(microsec) / MICROS_PER_SECOND / 60) % 60).toInt
}
/**
@@ -428,7 +430,7 @@ object DateTimeUtils {
* microseconds.
*/
def getSeconds(microsec: SQLTimestamp): Int = {
- ((absoluteMicroSecond(microsec) / MICROS_PER_SECOND) % 60).toInt
+ ((localTimestamp(microsec) / MICROS_PER_SECOND) % 60).toInt
}
private[this] def isLeapYear(year: Int): Boolean = {
http://git-wip-us.apache.org/repos/asf/spark/blob/bf79a171/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/util/DateTimeUtilsSuite.scala
----------------------------------------------------------------------
diff --git
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/util/DateTimeUtilsSuite.scala
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/util/DateTimeUtilsSuite.scala
index d6d860f..b35d400 100644
---
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/util/DateTimeUtilsSuite.scala
+++
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/util/DateTimeUtilsSuite.scala
@@ -326,7 +326,7 @@ class DateTimeUtilsSuite extends SparkFunSuite {
assert(getSeconds(c.getTimeInMillis * 1000) === 9)
}
- test("hours / miniute / seconds") {
+ test("hours / minutes / seconds") {
Seq(Timestamp.valueOf("2015-06-11 10:12:35.789"),
Timestamp.valueOf("2015-06-11 20:13:40.789"),
Timestamp.valueOf("1900-06-11 12:14:50.789"),
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]