Github user icexelloss commented on a diff in the pull request: https://github.com/apache/spark/pull/22305#discussion_r232393452 --- Diff: python/pyspark/sql/tests.py --- @@ -6323,6 +6333,33 @@ def ordered_window(self): def unpartitioned_window(self): return Window.partitionBy() + @property + def sliding_row_window(self): + return Window.partitionBy('id').orderBy('v').rowsBetween(-2, 1) + + @property + def sliding_range_window(self): + from pyspark.sql.functions import lit + return Window.partitionBy('id').orderBy('v').rangeBetween(lit(-2.0), lit(4.0)) + + @property + def growing_row_window(self): + return Window.partitionBy('id').orderBy('v').rowsBetween(Window.unboundedPreceding, 3) + + @property + def growing_range_window(self): + return Window.partitionBy('id').orderBy('v') \ + .rangeBetween(F.unboundedPreceding(), F.lit(4.0)) --- End diff -- Thanks! I fixed this to use the rangeBetween(Long, Long) API
--- --------------------------------------------------------------------- To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org For additional commands, e-mail: reviews-h...@spark.apache.org