This is an automated email from the ASF dual-hosted git repository.

kabhwan pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new 7dbd17d5ea23 [SPARK-50275][SS][PYTHON] Enable 
test_pandas_transform_with_state unit test
7dbd17d5ea23 is described below

commit 7dbd17d5ea23075fa0e090a6c59e0522084ccb97
Author: bogao007 <[email protected]>
AuthorDate: Mon Nov 11 17:25:20 2024 +0900

    [SPARK-50275][SS][PYTHON] Enable test_pandas_transform_with_state unit test
    
    ### What changes were proposed in this pull request?
    
    - Enabled test_pandas_transform_with_state unit test.
    - Added some sleep time between resource creation to fix flaky tests.
    
    ### Why are the changes needed?
    
    Improve python test coverage
    
    ### Does this PR introduce _any_ user-facing change?
    
    No
    
    ### How was this patch tested?
    
    Test only change.
    
    ### Was this patch authored or co-authored using generative AI tooling?
    
    No
    
    Closes #48805 from bogao007/python-test.
    
    Authored-by: bogao007 <[email protected]>
    Signed-off-by: Jungtaek Lim <[email protected]>
---
 dev/sparktestsupport/modules.py                                     | 1 +
 python/pyspark/sql/tests/pandas/test_pandas_transform_with_state.py | 6 ++++++
 2 files changed, 7 insertions(+)

diff --git a/dev/sparktestsupport/modules.py b/dev/sparktestsupport/modules.py
index 6849ce1f3590..701ebb54dbbf 100644
--- a/dev/sparktestsupport/modules.py
+++ b/dev/sparktestsupport/modules.py
@@ -526,6 +526,7 @@ pyspark_sql = Module(
         "pyspark.sql.tests.pandas.test_pandas_grouped_map",
         "pyspark.sql.tests.pandas.test_pandas_grouped_map_with_state",
         "pyspark.sql.tests.pandas.test_pandas_map",
+        "pyspark.sql.tests.pandas.test_pandas_transform_with_state",
         "pyspark.sql.tests.test_arrow_map",
         "pyspark.sql.tests.pandas.test_pandas_udf",
         "pyspark.sql.tests.pandas.test_pandas_udf_grouped_agg",
diff --git 
a/python/pyspark/sql/tests/pandas/test_pandas_transform_with_state.py 
b/python/pyspark/sql/tests/pandas/test_pandas_transform_with_state.py
index 384920f03f1a..8901f09e9272 100644
--- a/python/pyspark/sql/tests/pandas/test_pandas_transform_with_state.py
+++ b/python/pyspark/sql/tests/pandas/test_pandas_transform_with_state.py
@@ -109,6 +109,7 @@ class TransformWithStateInPandasTestsMixin:
         input_path = tempfile.mkdtemp()
         self._prepare_test_resource1(input_path)
         if not single_batch:
+            time.sleep(2)
             self._prepare_test_resource2(input_path)
 
         df = self._build_test_df(input_path)
@@ -389,7 +390,9 @@ class TransformWithStateInPandasTestsMixin:
     def _test_transform_with_state_in_pandas_proc_timer(self, 
stateful_processor, check_results):
         input_path = tempfile.mkdtemp()
         self._prepare_test_resource3(input_path)
+        time.sleep(2)
         self._prepare_test_resource1(input_path)
+        time.sleep(2)
         self._prepare_test_resource2(input_path)
 
         df = self._build_test_df(input_path)
@@ -501,7 +504,9 @@ class TransformWithStateInPandasTestsMixin:
                 fw.write("a, 15\n")
 
         prepare_batch1(input_path)
+        time.sleep(2)
         prepare_batch2(input_path)
+        time.sleep(2)
         prepare_batch3(input_path)
 
         df = self._build_test_df(input_path)
@@ -559,6 +564,7 @@ class TransformWithStateInPandasTestsMixin:
     def _test_transform_with_state_init_state_in_pandas(self, 
stateful_processor, check_results):
         input_path = tempfile.mkdtemp()
         self._prepare_test_resource1(input_path)
+        time.sleep(2)
         self._prepare_input_data(input_path + "/text-test2.txt", [0, 3], [67, 
12])
 
         df = self._build_test_df(input_path)


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to