This is an automated email from the ASF dual-hosted git repository.

ruifengz pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new bd31a080fa0 [SPARK-41898][CONNECT][PYTHON] Window.rowsBetween, 
Window.rangeBetween parameters typechecking parity with pyspark
bd31a080fa0 is described below

commit bd31a080fa08b08c323c6c6921405bcd86d0a0cc
Author: Sandeep Singh <sand...@techaddict.me>
AuthorDate: Sat Jan 7 09:17:51 2023 +0800

    [SPARK-41898][CONNECT][PYTHON] Window.rowsBetween, Window.rangeBetween 
parameters typechecking parity with pyspark
    
    ### What changes were proposed in this pull request?
    Window.rowsBetween, Window.rangeBetween can accept values outside the range 
of int, and are trimmed to Window.unboundedPreceding, Window.unboundedFollowing
    Like float(-inf) and float(+inf)
    
    ### Why are the changes needed?
    Bug Fix
    
    ### Does this PR introduce _any_ user-facing change?
    Yes
    
    ### How was this patch tested?
    Existing test
    
    Closes #39433 from techaddict/SPARK-41898.
    
    Authored-by: Sandeep Singh <sand...@techaddict.me>
    Signed-off-by: Ruifeng Zheng <ruife...@apache.org>
---
 python/pyspark/sql/connect/window.py                      | 10 ----------
 python/pyspark/sql/tests/connect/test_parity_functions.py | 10 ----------
 2 files changed, 20 deletions(-)

diff --git a/python/pyspark/sql/connect/window.py 
b/python/pyspark/sql/connect/window.py
index 24b057022bf..315d74709fb 100644
--- a/python/pyspark/sql/connect/window.py
+++ b/python/pyspark/sql/connect/window.py
@@ -148,11 +148,6 @@ class WindowSpec:
         )
 
     def rowsBetween(self, start: int, end: int) -> "WindowSpec":
-        if not isinstance(start, int):
-            raise TypeError(f"start must be a int, but got 
{type(start).__name__}")
-        if not isinstance(end, int):
-            raise TypeError(f"end must be a int, but got {type(end).__name__}")
-
         if start <= Window._PRECEDING_THRESHOLD:
             start = Window.unboundedPreceding
         if end >= Window._FOLLOWING_THRESHOLD:
@@ -165,11 +160,6 @@ class WindowSpec:
         )
 
     def rangeBetween(self, start: int, end: int) -> "WindowSpec":
-        if not isinstance(start, int):
-            raise TypeError(f"start must be a int, but got 
{type(start).__name__}")
-        if not isinstance(end, int):
-            raise TypeError(f"end must be a int, but got {type(end).__name__}")
-
         if start <= Window._PRECEDING_THRESHOLD:
             start = Window.unboundedPreceding
         if end >= Window._FOLLOWING_THRESHOLD:
diff --git a/python/pyspark/sql/tests/connect/test_parity_functions.py 
b/python/pyspark/sql/tests/connect/test_parity_functions.py
index f94203ac977..c5add102c6a 100644
--- a/python/pyspark/sql/tests/connect/test_parity_functions.py
+++ b/python/pyspark/sql/tests/connect/test_parity_functions.py
@@ -147,16 +147,6 @@ class FunctionsParityTests(ReusedSQLTestCase, 
FunctionsTestsMixin):
     def test_sorting_functions_with_column(self):
         super().test_sorting_functions_with_column()
 
-    # TODO(SPARK-41898): support float("-inf") Window.rowsBetween
-    @unittest.skip("Fails in Spark Connect, should enable.")
-    def test_window_functions(self):
-        super().test_window_functions()
-
-    # TODO(SPARK-41898): support float("-inf") Window.rowsBetween
-    @unittest.skip("Fails in Spark Connect, should enable.")
-    def test_window_functions_without_partitionBy(self):
-        super().test_window_functions_without_partitionBy()
-
     # TODO(SPARK-41907): sampleby returning wrong output
     @unittest.skip("Fails in Spark Connect, should enable.")
     def test_sampleby(self):


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to