MaxGekk commented on code in PR #39239: URL: https://github.com/apache/spark/pull/39239#discussion_r1137571293
########## python/pyspark/sql/types.py: ########## @@ -276,7 +276,18 @@ def toInternal(self, dt: datetime.datetime) -> int: def fromInternal(self, ts: int) -> datetime.datetime: if ts is not None: # using int to avoid precision loss in float - return datetime.datetime.fromtimestamp(ts // 1000000).replace(microsecond=ts % 1000000) + return ( + datetime.datetime + # Set the time zone to UTC because the TIMESTAMP type stores timestamps + # as the number of microseconds from the epoch of 1970-01-01T00:00:00.000000Z + # in the UTC time zone. + .fromtimestamp(ts // 1000000, tz=datetime.timezone.utc) + .replace(microsecond=ts % 1000000) + # Convert to the local time zone and remove the time zone info to + # to have the result type `datetime64[us]` for compatibility. + .astimezone(None) + .replace(tzinfo=None) Review Comment: Let me check this, and add a roundtrip test. -- This is an automated message from the Apache Git Service. To respond to the message, please log on to GitHub and use the URL above to go to the specific comment. To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org For queries about this service, please contact Infrastructure at: us...@infra.apache.org --------------------------------------------------------------------- To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org For additional commands, e-mail: reviews-h...@spark.apache.org