stefankandic commented on code in PR #46280:
URL: https://github.com/apache/spark/pull/46280#discussion_r1601839139
##########
python/pyspark/sql/types.py:
##########
@@ -876,30 +894,86 @@ def __init__(
self.dataType = dataType
self.nullable = nullable
self.metadata = metadata or {}
+ self._collationMetadata: Optional[Dict[str, str]] = None
def simpleString(self) -> str:
return "%s:%s" % (self.name, self.dataType.simpleString())
def __repr__(self) -> str:
return "StructField('%s', %s, %s)" % (self.name, self.dataType,
str(self.nullable))
+ def __eq__(self, other: Any) -> bool:
+ # since collationMetadata is lazy evaluated we should not use it in
equality check
Review Comment:
I agree, doing this for every json call is annoying but since we don't have
the security of immutable data like in scala perhaps we should avoid lazy eval
--
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
To unsubscribe, e-mail: [email protected]
For queries about this service, please contact Infrastructure at:
[email protected]
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]