HyukjinKwon commented on PR #46417:
URL: https://github.com/apache/spark/pull/46417#issuecomment-2100237805
```
======================================================================
ERROR [9.997s]: test_map
(pyspark.pandas.tests.connect.indexes.test_parity_base.IndexesParityTests)
----------------------------------------------------------------------
Traceback (most recent call last):
File
"/home/runner/work/spark/spark-3.5/python/pyspark/pandas/tests/indexes/test_base.py",
line 2642, in test_map
self.assert_eq(
File
"/home/runner/work/spark/spark-3.5/python/pyspark/testing/pandasutils.py", line
536, in assert_eq
robj = self._to_pandas(right)
File
"/home/runner/work/spark/spark-3.5/python/pyspark/testing/pandasutils.py", line
557, in _to_pandas
return obj.to_pandas()
File
"/home/runner/work/spark/spark-3.5/python/pyspark/pandas/indexes/base.py", line
524, in to_pandas
return self._to_pandas()
File
"/home/runner/work/spark/spark-3.5/python/pyspark/pandas/indexes/base.py", line
530, in _to_pandas
return self._to_internal_pandas().copy()
File
"/home/runner/work/spark/spark-3.5/python/pyspark/pandas/indexes/base.py", line
503, in _to_internal_pandas
return self._psdf._internal.to_pandas_frame.index
File "/home/runner/work/spark/spark-3.5/python/pyspark/pandas/utils.py",
line 600, in wrapped_lazy_property
setattr(self, attr_name, fn(self))
File
"/home/runner/work/spark/spark-3.5/python/pyspark/pandas/internal.py", line
1115, in to_pandas_frame
pdf = sdf.toPandas()
File
"/home/runner/work/spark/spark-3.5/python/pyspark/sql/connect/dataframe.py",
line 1663, in toPandas
return self._session.client.to_pandas(query)
File
"/home/runner/work/spark/spark-3.5/python/pyspark/sql/connect/client/core.py",
line 873, in to_pandas
table, schema, metrics, observed_metrics, _ = self._execute_and_fetch(
File
"/home/runner/work/spark/spark-3.5/python/pyspark/sql/connect/client/core.py",
line 1283, in _execute_and_fetch
for response in self._execute_and_fetch_as_iterator(req):
File
"/home/runner/work/spark/spark-3.5/python/pyspark/sql/connect/client/core.py",
line 1264, in _execute_and_fetch_as_iterator
self._handle_error(error)
File
"/home/runner/work/spark/spark-3.5/python/pyspark/sql/connect/client/core.py",
line 1503, in _handle_error
self._handle_rpc_error(error)
File
"/home/runner/work/spark/spark-3.5/python/pyspark/sql/connect/client/core.py",
line 1539, in _handle_rpc_error
raise convert_exception(info, status.message) from None
pyspark.errors.exceptions.connect.PythonException:
An exception was thrown from the Python worker. Please see the stack trace
below.
Traceback (most recent call last):
File
"/home/runner/work/spark/spark/python/lib/pyspark.zip/pyspark/worker.py", line
1818, in main
func, profiler, deserializer, serializer = read_udfs(pickleSer, infile,
eval_type)
File
"/home/runner/work/spark/spark/python/lib/pyspark.zip/pyspark/worker.py", line
1728, in read_udfs
read_single_udf(
File
"/home/runner/work/spark/spark/python/lib/pyspark.zip/pyspark/worker.py", line
783, in read_single_udf
f, return_type = read_command(pickleSer, infile)
File
"/home/runner/work/spark/spark/python/lib/pyspark.zip/pyspark/worker_util.py",
line 64, in read_command
command = serializer._read_with_length(file)
File
"/home/runner/work/spark/spark/python/lib/pyspark.zip/pyspark/serializers.py",
line 173, in _read_with_length
return self.loads(obj)
File
"/home/runner/work/spark/spark/python/lib/pyspark.zip/pyspark/serializers.py",
line 473, in loads
return cloudpickle.loads(obj, encoding=encoding)
ModuleNotFoundError: No module named 'pandas.core.indexes.numeric'
---------------------------------------------------------------------
```
--
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
To unsubscribe, e-mail: [email protected]
For queries about this service, please contact Infrastructure at:
[email protected]
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]