HyukjinKwon commented on code in PR #40525:
URL: https://github.com/apache/spark/pull/40525#discussion_r1155889925
##########
python/pyspark/pandas/data_type_ops/num_ops.py:
##########
@@ -119,29 +127,36 @@ def radd(self, left: IndexOpsLike, right: Any) ->
SeriesOrIndex:
if not isinstance(right, numbers.Number):
raise TypeError("Addition can not be applied to given types.")
right = transform_boolean_operand_to_numeric(right)
+ Column: Type[GenericColumn] = ConnectColumn if is_remote() else
PySparkColumn
return column_op(Column.__radd__)(left, right)
def rsub(self, left: IndexOpsLike, right: Any) -> SeriesOrIndex:
_sanitize_list_like(right)
if not isinstance(right, numbers.Number):
raise TypeError("Subtraction can not be applied to given types.")
right = transform_boolean_operand_to_numeric(right)
+ Column: Type[GenericColumn] = ConnectColumn if is_remote() else
PySparkColumn
return column_op(Column.__rsub__)(left, right)
def rmul(self, left: IndexOpsLike, right: Any) -> SeriesOrIndex:
_sanitize_list_like(right)
if not isinstance(right, numbers.Number):
raise TypeError("Multiplication can not be applied to given
types.")
right = transform_boolean_operand_to_numeric(right)
+ Column: Type[GenericColumn] = ConnectColumn if is_remote() else
PySparkColumn
return column_op(Column.__rmul__)(left, right)
def rpow(self, left: IndexOpsLike, right: Any) -> SeriesOrIndex:
_sanitize_list_like(right)
if not isinstance(right, numbers.Number):
raise TypeError("Exponentiation can not be applied to given
types.")
- def rpow_func(left: Column, right: Any) -> Column:
- return F.when(F.lit(right == 1),
right).otherwise(Column.__rpow__(left, right))
+ Column = ConnectColumn if is_remote() else PySparkColumn
+
+ def rpow_func(left: GenericColumn, right: Any) -> GenericColumn:
+ return F.when(F.lit(right == 1), right).otherwise(
+ Column.__rpow__(left, right) # type: ignore
Review Comment:
ditto. `left.__rpow__`
##########
python/pyspark/pandas/data_type_ops/num_ops.py:
##########
@@ -167,18 +182,22 @@ def abs(self, operand: IndexOpsLike) -> IndexOpsLike:
def lt(self, left: IndexOpsLike, right: Any) -> SeriesOrIndex:
_sanitize_list_like(right)
+ Column: Type[GenericColumn] = ConnectColumn if is_remote() else
PySparkColumn
return column_op(Column.__lt__)(left, right)
Review Comment:
ditto for this and next three
--
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
To unsubscribe, e-mail: [email protected]
For queries about this service, please contact Infrastructure at:
[email protected]
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]