itholic commented on code in PR #45699:
URL: https://github.com/apache/spark/pull/45699#discussion_r1538570645
##########
python/pyspark/sql/connect/session.py:
##########
@@ -418,6 +426,28 @@ def createDataFrame(
# If no schema supplied by user then get the names of columns only
if schema is None:
_cols = [str(x) if not isinstance(x, str) else x for x in
data.columns]
+ infer_pandas_dict_as_map = (
+
str(self.conf.get("spark.sql.execution.pandas.inferPandasDictAsMap")).lower()
+ == "true"
+ )
+ if infer_pandas_dict_as_map:
+ fields = []
+ pa_schema = pa.Schema.from_pandas(data)
+ spark_type: Union[MapType, DataType]
+ for field in pa_schema:
+ field_type = field.type
+ if isinstance(field_type, pa.StructType):
+ if len(field_type) == 0:
+ raise PySparkValueError(
+ error_class="CANNOT_INFER_EMPTY_SCHEMA",
+ message_parameters={},
+ )
+ arrow_type = field_type.field(0).type
Review Comment:
if the dictionary contains different types of values, Arrow automatically
tries converting the values in the same type based on first item, and raise
exception if it is not possible:
```python
>>> pdf = pd.DataFrame({"str_col": ['second'], "dict_col": [{"1": 0.7, "2":
"1"}]})
>>> sdf = spark.createDataFrame(pdf)
Traceback (most recent call last):
File "/.../spark/python/pyspark/sql/pandas/serializers.py", line 302, in
_create_array
return pa.Array.from_pandas(
File "pyarrow/array.pxi", line 1116, in pyarrow.lib.Array.from_pandas
File "pyarrow/array.pxi", line 340, in pyarrow.lib.array
File "pyarrow/array.pxi", line 86, in pyarrow.lib._ndarray_to_array
File "pyarrow/error.pxi", line 91, in pyarrow.lib.check_status
pyarrow.lib.ArrowInvalid: Could not convert '1' with type str: tried to
convert to double
```
--
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
To unsubscribe, e-mail: [email protected]
For queries about this service, please contact Infrastructure at:
[email protected]
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]