Github user BryanCutler commented on a diff in the pull request:

    https://github.com/apache/spark/pull/20373#discussion_r163420127
  
    --- Diff: python/pyspark/cloudpickle.py ---
    @@ -420,20 +440,18 @@ def save_dynamic_class(self, obj):
             from global modules.
             """
             clsdict = dict(obj.__dict__)  # copy dict proxy to a dict
    -        if not isinstance(clsdict.get('__dict__', None), property):
    -            # don't extract dict that are properties
    -            clsdict.pop('__dict__', None)
    -            clsdict.pop('__weakref__', None)
    -
    -        # hack as __new__ is stored differently in the __dict__
    -        new_override = clsdict.get('__new__', None)
    -        if new_override:
    -            clsdict['__new__'] = obj.__new__
    -
    -        # namedtuple is a special case for Spark where we use the 
_load_namedtuple function
    -        if getattr(obj, '_is_namedtuple_', False):
    -            self.save_reduce(_load_namedtuple, (obj.__name__, obj._fields))
    -            return
    +        clsdict.pop('__weakref__', None)
    +
    +        # On PyPy, __doc__ is a readonly attribute, so we need to include 
it in
    +        # the initial skeleton class.  This is safe because we know that 
the
    +        # doc can't participate in a cycle with the original class.
    +        type_kwargs = {'__doc__': clsdict.pop('__doc__', None)}
    +
    +        # If type overrides __dict__ as a property, include it in the type 
kwargs.
    +        # In Python 2, we can't set this attribute after construction.
    +        __dict__ = clsdict.pop('__dict__', None)
    +        if isinstance(__dict__, property):
    +            type_kwargs['__dict__'] = __dict__
    --- End diff --
    
    BUG: Fix bug pickling namedtuple 
https://github.com/cloudpipe/cloudpickle/commit/28070bba79cf71e5719ab8d7c1d6cbc72cd95a0c


---

---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org
For additional commands, e-mail: reviews-h...@spark.apache.org

Reply via email to