MaxGekk commented on code in PR #48580:
URL: https://github.com/apache/spark/pull/48580#discussion_r1822786912
##########
sql/api/src/main/scala/org/apache/spark/sql/functions.scala:
##########
@@ -8073,6 +8161,16 @@ object functions {
def make_interval(years: Column): Column =
Column.fn("make_interval", years)
+ /**
+ * This is a special version of `make_interval` that performs the same
operation, but returns a
+ * NULL value instead of raising an error if interval cannot be created.
+ *
+ * @group url_funcs
+ * @since 4.0.0
+ */
+ def try_make_interval(): Column =
Review Comment:
Why do you need this one? In which cases does `make_interval()` fail?
##########
python/pyspark/sql/functions/builtin.py:
##########
@@ -21152,6 +21152,162 @@ def make_dt_interval(
return _invoke_function_over_columns("make_dt_interval", _days, _hours,
_mins, _secs)
+@_try_remote_functions
+def try_make_interval(
+ years: Optional["ColumnOrName"] = None,
+ months: Optional["ColumnOrName"] = None,
+ weeks: Optional["ColumnOrName"] = None,
+ days: Optional["ColumnOrName"] = None,
+ hours: Optional["ColumnOrName"] = None,
+ mins: Optional["ColumnOrName"] = None,
+ secs: Optional["ColumnOrName"] = None,
+) -> Column:
+ """
+ This is a special version of `make_interval` that performs the same
operation, but returns a
+ NULL value instead of raising an error if interval cannot be created.
+
+ .. versionadded:: 4.0.0
+
+ Parameters
+ ----------
+ years : :class:`~pyspark.sql.Column` or str, optional
+ The number of years, positive or negative.
+ months : :class:`~pyspark.sql.Column` or str, optional
+ The number of months, positive or negative.
+ weeks : :class:`~pyspark.sql.Column` or str, optional
+ The number of weeks, positive or negative.
+ days : :class:`~pyspark.sql.Column` or str, optional
+ The number of days, positive or negative.
+ hours : :class:`~pyspark.sql.Column` or str, optional
+ The number of hours, positive or negative.
+ mins : :class:`~pyspark.sql.Column` or str, optional
+ The number of minutes, positive or negative.
+ secs : :class:`~pyspark.sql.Column` or str, optional
+ The number of seconds with the fractional part in microsecond
precision.
+
+ Returns
+ -------
+ :class:`~pyspark.sql.Column`
+ A new column that contains an interval.
+
+ Examples
+ --------
+
+ Example 1: Try make interval from years, months, weeks, days, hours, mins
and secs.
+
+ >>> import pyspark.sql.functions as sf
+ >>> df = spark.createDataFrame([[100, 11, 1, 1, 12, 30, 01.001001]],
+ ... ["year", "month", "week", "day", "hour", "min", "sec"])
+ >>> df.select(sf.try_make_interval(
+ ... df.year, df.month, df.week, df.day, df.hour, df.min, df.sec)
+ ... ).show(truncate=False)
+ +---------------------------------------------------------------+
+ |try_make_interval(year, month, week, day, hour, min, sec) |
+ +---------------------------------------------------------------+
+ |100 years 11 months 8 days 12 hours 30 minutes 1.001001 seconds|
+ +---------------------------------------------------------------+
+
+ Example 2: Try make interval from years, months, weeks, days, hours and
mins.
+
+ >>> import pyspark.sql.functions as sf
+ >>> df = spark.createDataFrame([[100, 11, 1, 1, 12, 30, 01.001001]],
+ ... ["year", "month", "week", "day", "hour", "min", "sec"])
+ >>> df.select(sf.try_make_interval(
+ ... df.year, df.month, df.week, df.day, df.hour, df.min)
+ ... ).show(truncate=False)
+ +-------------------------------------------------------+
+ |try_make_interval(year, month, week, day, hour, min, 0)|
+ +-------------------------------------------------------+
+ |100 years 11 months 8 days 12 hours 30 minutes |
+ +-------------------------------------------------------+
+
+ Example 3: Try make interval from years, months, weeks, days and hours.
+
+ >>> import pyspark.sql.functions as sf
+ >>> df = spark.createDataFrame([[100, 11, 1, 1, 12, 30, 01.001001]],
+ ... ["year", "month", "week", "day", "hour", "min", "sec"])
+ >>> df.select(sf.try_make_interval(
+ ... df.year, df.month, df.week, df.day, df.hour)
+ ... ).show(truncate=False)
+ +-----------------------------------------------------+
+ |try_make_interval(year, month, week, day, hour, 0, 0)|
+ +-----------------------------------------------------+
+ |100 years 11 months 8 days 12 hours |
+ +-----------------------------------------------------+
+
+ Example 4: Try make interval from years, months, weeks and days.
+
+ >>> import pyspark.sql.functions as sf
+ >>> df = spark.createDataFrame([[100, 11, 1, 1, 12, 30, 01.001001]],
+ ... ["year", "month", "week", "day", "hour", "min", "sec"])
+ >>> df.select(sf.try_make_interval(df.year, df.month, df.week,
df.day)).show(truncate=False)
+ +--------------------------------------------------+
+ |try_make_interval(year, month, week, day, 0, 0, 0)|
+ +--------------------------------------------------+
+ |100 years 11 months 8 days |
+ +--------------------------------------------------+
+
+ Example 5: Try make interval from years, months and weeks.
+
+ >>> import pyspark.sql.functions as sf
+ >>> df = spark.createDataFrame([[100, 11, 1, 1, 12, 30, 01.001001]],
+ ... ["year", "month", "week", "day", "hour", "min", "sec"])
+ >>> df.select(sf.try_make_interval(df.year, df.month,
df.week)).show(truncate=False)
+ +------------------------------------------------+
+ |try_make_interval(year, month, week, 0, 0, 0, 0)|
+ +------------------------------------------------+
+ |100 years 11 months 7 days |
+ +------------------------------------------------+
+
+ Example 6: Try make interval from years and months.
+
+ >>> import pyspark.sql.functions as sf
+ >>> df = spark.createDataFrame([[100, 11, 1, 1, 12, 30, 01.001001]],
+ ... ["year", "month", "week", "day", "hour", "min", "sec"])
+ >>> df.select(sf.try_make_interval(df.year, df.month)).show(truncate=False)
+ +---------------------------------------------+
+ |try_make_interval(year, month, 0, 0, 0, 0, 0)|
+ +---------------------------------------------+
+ |100 years 11 months |
+ +---------------------------------------------+
+
+ Example 7: Try make interval from years.
+
+ >>> import pyspark.sql.functions as sf
+ >>> df = spark.createDataFrame([[100, 11, 1, 1, 12, 30, 01.001001]],
+ ... ["year", "month", "week", "day", "hour", "min", "sec"])
+ >>> df.select(sf.try_make_interval(df.year)).show(truncate=False)
+ +-----------------------------------------+
+ |try_make_interval(year, 0, 0, 0, 0, 0, 0)|
+ +-----------------------------------------+
+ |100 years |
+ +-----------------------------------------+
+
+ Example 8: Try make interval.
Review Comment:
Could you add an example which shows the difference between try and non-try
versions.
##########
python/pyspark/sql/functions/builtin.py:
##########
@@ -21152,6 +21152,162 @@ def make_dt_interval(
return _invoke_function_over_columns("make_dt_interval", _days, _hours,
_mins, _secs)
+@_try_remote_functions
+def try_make_interval(
+ years: Optional["ColumnOrName"] = None,
+ months: Optional["ColumnOrName"] = None,
+ weeks: Optional["ColumnOrName"] = None,
+ days: Optional["ColumnOrName"] = None,
+ hours: Optional["ColumnOrName"] = None,
+ mins: Optional["ColumnOrName"] = None,
+ secs: Optional["ColumnOrName"] = None,
Review Comment:
fix indentations
##########
python/pyspark/sql/connect/functions/builtin.py:
##########
@@ -3712,6 +3712,31 @@ def make_dt_interval(
make_dt_interval.__doc__ = pysparkfuncs.make_dt_interval.__doc__
+def try_make_interval(
+ years: Optional["ColumnOrName"] = None,
+ months: Optional["ColumnOrName"] = None,
+ weeks: Optional["ColumnOrName"] = None,
+ days: Optional["ColumnOrName"] = None,
+ hours: Optional["ColumnOrName"] = None,
+ mins: Optional["ColumnOrName"] = None,
+ secs: Optional["ColumnOrName"] = None,
Review Comment:
Please, fix indentations here.
--
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
To unsubscribe, e-mail: [email protected]
For queries about this service, please contact Infrastructure at:
[email protected]
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]