HyukjinKwon commented on a change in pull request #34389:
URL: https://github.com/apache/spark/pull/34389#discussion_r736137408
##########
File path: python/pyspark/pandas/namespace.py
##########
@@ -775,39 +795,46 @@ def read_parquet(
index_names = None
- if index_col is None and pandas_metadata:
- # Try to read pandas metadata
-
- @no_type_check
- @pandas_udf("index_col array<string>, index_names array<string>")
- def read_index_metadata(pser: pd.Series) -> pd.DataFrame:
- binary = pser.iloc[0]
- metadata =
pq.ParquetFile(pa.BufferReader(binary)).metadata.metadata
- if b"pandas" in metadata:
- pandas_metadata =
json.loads(metadata[b"pandas"].decode("utf8"))
- if all(isinstance(col, str) for col in
pandas_metadata["index_columns"]):
- index_col = []
- index_names = []
- for col in pandas_metadata["index_columns"]:
- index_col.append(col)
- for column in pandas_metadata["columns"]:
- if column["field_name"] == col:
- index_names.append(column["name"])
- break
- else:
- index_names.append(None)
- return pd.DataFrame({"index_col": [index_col],
"index_names": [index_names]})
- return pd.DataFrame({"index_col": [None], "index_names": [None]})
-
- index_col, index_names = (
- default_session()
- .read.format("binaryFile")
- .load(path)
- .limit(1)
- .select(read_index_metadata("content").alias("index_metadata"))
- .select("index_metadata.*")
- .head()
+ if index_col is None:
Review comment:
can we don't touch other codes but just add a simple:
```
if index_col is None:
raise_advice_warning( ...
```
alone? BTW, I think we're fine when `pandas_metadata` is specified because
pandas metadata contains about index info. cc @ueshin
--
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
To unsubscribe, e-mail: [email protected]
For queries about this service, please contact Infrastructure at:
[email protected]
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]