itholic commented on code in PR #40420:
URL: https://github.com/apache/spark/pull/40420#discussion_r1310660048
##########
python/pyspark/pandas/datetimes.py:
##########
@@ -116,26 +117,55 @@ def pandas_microsecond(s) -> ps.Series[np.int32]: #
type: ignore[no-untyped-def
def nanosecond(self) -> "ps.Series":
raise NotImplementedError()
- # TODO(SPARK-42617): Support isocalendar.week and replace it.
- # See also https://github.com/pandas-dev/pandas/pull/33595.
- @property
- def week(self) -> "ps.Series":
+ def isocalendar(self) -> "ps.DataFrame":
"""
- The week ordinal of the year.
+ Calculate year, week, and day according to the ISO 8601 standard.
- .. deprecated:: 3.4.0
- """
- warnings.warn(
- "weekofyear and week have been deprecated.",
- FutureWarning,
- )
- return self._data.spark.transform(lambda c:
F.weekofyear(c).cast(LongType()))
+ .. versionadded:: 4.0.0
- @property
- def weekofyear(self) -> "ps.Series":
- return self.week
+ Returns
+ -------
+ DataFrame
+ With columns year, week and day.
- weekofyear.__doc__ = week.__doc__
+ Examples
+ --------
+ >>> dfs = ps.from_pandas(pd.date_range(start='2019-12-29', freq='D',
periods=4).to_series())
+ >>> dfs.dt.isocalendar()
+ year week day
+ 2019-12-29 2019 52 7
+ 2019-12-30 2020 1 1
+ 2019-12-31 2020 1 2
+ 2020-01-01 2020 1 3
Review Comment:
nit: Could you add a new line between each examples to split them when
displaying in the documents.
--
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
To unsubscribe, e-mail: [email protected]
For queries about this service, please contact Infrastructure at:
[email protected]
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]