This is an automated email from the ASF dual-hosted git repository.
gurwls223 pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git
The following commit(s) were added to refs/heads/master by this push:
new 58d5dc3d573 [SPARK-41891][CONNECT][TESTS] Enable
test_add_months_function, test_array_repeat, test_dayofweek,
test_first_last_ignorenulls, test_inline, test_window_time,
test_reciprocal_trig_functions
58d5dc3d573 is described below
commit 58d5dc3d573dfbb6d21ea41d101550146756f45b
Author: Sandeep Singh <[email protected]>
AuthorDate: Thu Jan 5 16:59:19 2023 +0900
[SPARK-41891][CONNECT][TESTS] Enable test_add_months_function,
test_array_repeat, test_dayofweek, test_first_last_ignorenulls, test_inline,
test_window_time, test_reciprocal_trig_functions
### What changes were proposed in this pull request?
Enabling tests in connect/test_parity_functions.py
### Why are the changes needed?
Improved coverage
### Does this PR introduce _any_ user-facing change?
No
### How was this patch tested?
New Tests
Closes #39400 from techaddict/SPARK-41891.
Authored-by: Sandeep Singh <[email protected]>
Signed-off-by: Hyukjin Kwon <[email protected]>
---
.../sql/tests/connect/test_parity_functions.py | 28 ----------------------
1 file changed, 28 deletions(-)
diff --git a/python/pyspark/sql/tests/connect/test_parity_functions.py
b/python/pyspark/sql/tests/connect/test_parity_functions.py
index 78ccbd49148..3c616b5c864 100644
--- a/python/pyspark/sql/tests/connect/test_parity_functions.py
+++ b/python/pyspark/sql/tests/connect/test_parity_functions.py
@@ -44,14 +44,6 @@ class FunctionsParityTests(ReusedSQLTestCase,
FunctionsTestsMixin):
cls.spark = cls._spark.stop()
del os.environ["SPARK_REMOTE"]
- @unittest.skip("Fails in Spark Connect, should enable.")
- def test_add_months_function(self):
- super().test_add_months_function()
-
- @unittest.skip("Fails in Spark Connect, should enable.")
- def test_array_repeat(self):
- super().test_array_repeat()
-
@unittest.skip("Fails in Spark Connect, should enable.")
def test_assert_true(self):
super().test_assert_true()
@@ -68,18 +60,10 @@ class FunctionsParityTests(ReusedSQLTestCase,
FunctionsTestsMixin):
def test_date_sub_function(self):
super().test_date_sub_function()
- @unittest.skip("Fails in Spark Connect, should enable.")
- def test_dayofweek(self):
- super().test_dayofweek()
-
@unittest.skip("Fails in Spark Connect, should enable.")
def test_explode(self):
super().test_explode()
- @unittest.skip("Fails in Spark Connect, should enable.")
- def test_first_last_ignorenulls(self):
- super().test_first_last_ignorenulls()
-
@unittest.skip("Fails in Spark Connect, should enable.")
def test_function_parity(self):
super().test_function_parity()
@@ -88,10 +72,6 @@ class FunctionsParityTests(ReusedSQLTestCase,
FunctionsTestsMixin):
def test_functions_broadcast(self):
super().test_functions_broadcast()
- @unittest.skip("Fails in Spark Connect, should enable.")
- def test_inline(self):
- super().test_inline()
-
@unittest.skip("Fails in Spark Connect, should enable.")
def test_input_file_name_reset_for_rdd(self):
super().test_input_file_name_reset_for_rdd()
@@ -160,18 +140,10 @@ class FunctionsParityTests(ReusedSQLTestCase,
FunctionsTestsMixin):
def test_window_functions_without_partitionBy(self):
super().test_window_functions_without_partitionBy()
- @unittest.skip("Fails in Spark Connect, should enable.")
- def test_window_time(self):
- super().test_window_time()
-
@unittest.skip("Fails in Spark Connect, should enable.")
def test_rand_functions(self):
super().test_rand_functions()
- @unittest.skip("Fails in Spark Connect, should enable.")
- def test_reciprocal_trig_functions(self):
- super().test_reciprocal_trig_functions()
-
@unittest.skip("Fails in Spark Connect, should enable.")
def test_sampleby(self):
super().test_sampleby()
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]