Hello community,
here is the log from the commit of package python-cloudpickle for
openSUSE:Leap:15.2 checked in at 2020-04-05 17:07:47
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Comparing /work/SRC/openSUSE:Leap:15.2/python-cloudpickle (Old)
and /work/SRC/openSUSE:Leap:15.2/.python-cloudpickle.new.3248 (New)
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Package is "python-cloudpickle"
Sun Apr 5 17:07:47 2020 rev:13 rq:791358 version:1.3.0
Changes:
--------
--- /work/SRC/openSUSE:Leap:15.2/python-cloudpickle/python-cloudpickle.changes
2020-03-27 16:48:40.415952693 +0100
+++
/work/SRC/openSUSE:Leap:15.2/.python-cloudpickle.new.3248/python-cloudpickle.changes
2020-04-05 17:07:51.082263302 +0200
@@ -1,0 +2,7 @@
+Tue Mar 31 14:59:31 UTC 2020 - Marketa Calabkova <[email protected]>
+
+- Update to version 1.3.0
+ * mostly bugfix release
+ * Add support for out-of-band pickling (Python 3.8 and later).
+
+-------------------------------------------------------------------
Old:
----
cloudpickle-1.2.2.tar.gz
New:
----
cloudpickle-1.3.0.tar.gz
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Other differences:
------------------
++++++ python-cloudpickle.spec ++++++
--- /var/tmp/diff_new_pack.uOjqNJ/_old 2020-04-05 17:07:51.570263823 +0200
+++ /var/tmp/diff_new_pack.uOjqNJ/_new 2020-04-05 17:07:51.574263827 +0200
@@ -19,7 +19,7 @@
%{?!python_module:%define python_module() python-%{**} python3-%{**}}
%bcond_without python2
Name: python-cloudpickle
-Version: 1.2.2
+Version: 1.3.0
Release: 0
Summary: Extended pickling support for Python objects
License: BSD-3-Clause
++++++ cloudpickle-1.2.2.tar.gz -> cloudpickle-1.3.0.tar.gz ++++++
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn'
'--exclude=.svnignore' old/cloudpickle-1.2.2/PKG-INFO
new/cloudpickle-1.3.0/PKG-INFO
--- old/cloudpickle-1.2.2/PKG-INFO 2019-09-10 14:27:06.000000000 +0200
+++ new/cloudpickle-1.3.0/PKG-INFO 2020-02-10 15:30:46.000000000 +0100
@@ -1,6 +1,6 @@
Metadata-Version: 2.1
Name: cloudpickle
-Version: 1.2.2
+Version: 1.3.0
Summary: Extended pickling support for Python objects
Home-page: https://github.com/cloudpipe/cloudpickle
Author: Cloudpipe
@@ -8,8 +8,7 @@
License: BSD 3-Clause License
Description: # cloudpickle
- [](https://travis-ci.org/cloudpipe/cloudpickle)
+
[](https://github.com/cloudpipe/cloudpickle/actions)
[](https://codecov.io/github/cloudpipe/cloudpickle?branch=master)
`cloudpickle` makes it possible to serialize Python constructs not
supported
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn'
'--exclude=.svnignore' old/cloudpickle-1.2.2/README.md
new/cloudpickle-1.3.0/README.md
--- old/cloudpickle-1.2.2/README.md 2019-09-10 14:19:13.000000000 +0200
+++ new/cloudpickle-1.3.0/README.md 2020-02-10 15:13:49.000000000 +0100
@@ -1,7 +1,6 @@
# cloudpickle
-[](https://travis-ci.org/cloudpipe/cloudpickle)
+[](https://github.com/cloudpipe/cloudpickle/actions)
[](https://codecov.io/github/cloudpipe/cloudpickle?branch=master)
`cloudpickle` makes it possible to serialize Python constructs not supported
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn'
'--exclude=.svnignore' old/cloudpickle-1.2.2/cloudpickle/__init__.py
new/cloudpickle-1.3.0/cloudpickle/__init__.py
--- old/cloudpickle-1.2.2/cloudpickle/__init__.py 2019-09-10
14:19:34.000000000 +0200
+++ new/cloudpickle-1.3.0/cloudpickle/__init__.py 2020-02-10
15:15:40.000000000 +0100
@@ -8,4 +8,4 @@
if sys.version_info[:2] >= (3, 8):
from cloudpickle.cloudpickle_fast import CloudPickler, dumps, dump
-__version__ = '1.2.2'
+__version__ = '1.3.0'
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn'
'--exclude=.svnignore' old/cloudpickle-1.2.2/cloudpickle/cloudpickle.py
new/cloudpickle-1.3.0/cloudpickle/cloudpickle.py
--- old/cloudpickle-1.2.2/cloudpickle/cloudpickle.py 2019-09-10
14:19:13.000000000 +0200
+++ new/cloudpickle-1.3.0/cloudpickle/cloudpickle.py 2020-02-10
15:13:49.000000000 +0100
@@ -42,6 +42,7 @@
"""
from __future__ import print_function
+import abc
import dis
from functools import partial
import io
@@ -90,17 +91,18 @@
from cStringIO import StringIO
except ImportError:
from StringIO import StringIO
+ import __builtin__ as builtins
string_types = (basestring,) # noqa
PY3 = False
PY2 = True
else:
- types.ClassType = type
from pickle import _Pickler as Pickler
from io import BytesIO as StringIO
string_types = (str,)
PY3 = True
PY2 = False
from importlib._bootstrap import _find_spec
+ import builtins
_extract_code_globals_cache = weakref.WeakKeyDictionary()
@@ -151,10 +153,17 @@
module_name = getattr(obj, '__module__', None)
if module_name is not None:
return module_name
- # Protect the iteration by using a list copy of sys.modules against dynamic
- # modules that trigger imports of other modules upon calls to getattr.
- for module_name, module in list(sys.modules.items()):
- if module_name == '__main__' or module is None:
+ # Protect the iteration by using a copy of sys.modules against dynamic
+ # modules that trigger imports of other modules upon calls to getattr or
+ # other threads importing at the same time.
+ for module_name, module in sys.modules.copy().items():
+ # Some modules such as coverage can inject non-module objects inside
+ # sys.modules
+ if (
+ module_name == '__main__' or
+ module is None or
+ not isinstance(module, types.ModuleType)
+ ):
continue
try:
if _getattribute(module, name)[0] is obj:
@@ -503,6 +512,7 @@
Save a module as an import
"""
if _is_dynamic(obj):
+ obj.__dict__.pop('__builtins__', None)
self.save_reduce(dynamic_subimport, (obj.__name__, vars(obj)),
obj=obj)
else:
@@ -617,21 +627,33 @@
clsdict = _extract_class_dict(obj)
clsdict.pop('__weakref__', None)
- # For ABCMeta in python3.7+, remove _abc_impl as it is not picklable.
- # This is a fix which breaks the cache but this only makes the first
- # calls to issubclass slower.
- if "_abc_impl" in clsdict:
- import abc
- (registry, _, _, _) = abc._get_dump(obj)
- clsdict["_abc_impl"] = [subclass_weakref()
- for subclass_weakref in registry]
+ if issubclass(type(obj), abc.ABCMeta):
+ # If obj is an instance of an ABCMeta subclass, dont pickle the
+ # cache/negative caches populated during isinstance/issubclass
+ # checks, but pickle the list of registered subclasses of obj.
+ clsdict.pop('_abc_cache', None)
+ clsdict.pop('_abc_negative_cache', None)
+ clsdict.pop('_abc_negative_cache_version', None)
+ registry = clsdict.pop('_abc_registry', None)
+ if registry is None:
+ # in Python3.7+, the abc caches and registered subclasses of a
+ # class are bundled into the single _abc_impl attribute
+ clsdict.pop('_abc_impl', None)
+ (registry, _, _, _) = abc._get_dump(obj)
+
+ clsdict["_abc_impl"] = [subclass_weakref()
+ for subclass_weakref in registry]
+ else:
+ # In the above if clause, registry is a set of weakrefs -- in
+ # this case, registry is a WeakSet
+ clsdict["_abc_impl"] = [type_ for type_ in registry]
# On PyPy, __doc__ is a readonly attribute, so we need to include it in
# the initial skeleton class. This is safe because we know that the
# doc can't participate in a cycle with the original class.
type_kwargs = {'__doc__': clsdict.pop('__doc__', None)}
- if hasattr(obj, "__slots__"):
+ if "__slots__" in clsdict:
type_kwargs['__slots__'] = obj.__slots__
# pickle string length optimization: member descriptors of obj are
# created automatically from obj's __slots__ attribute, no need to
@@ -879,7 +901,8 @@
Pickler.save_global(self, obj, name=name)
dispatch[type] = save_global
- dispatch[types.ClassType] = save_global
+ if PY2:
+ dispatch[types.ClassType] = save_global
def save_instancemethod(self, obj):
# Memoization rarely is ever useful due to python bounding
@@ -1142,6 +1165,7 @@
def dynamic_subimport(name, vars):
mod = types.ModuleType(name)
mod.__dict__.update(vars)
+ mod.__dict__['__builtins__'] = builtins.__dict__
return mod
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn'
'--exclude=.svnignore' old/cloudpickle-1.2.2/cloudpickle/cloudpickle_fast.py
new/cloudpickle-1.3.0/cloudpickle/cloudpickle_fast.py
--- old/cloudpickle-1.2.2/cloudpickle/cloudpickle_fast.py 2019-08-02
21:50:49.000000000 +0200
+++ new/cloudpickle-1.3.0/cloudpickle/cloudpickle_fast.py 2020-02-10
15:13:49.000000000 +0100
@@ -34,7 +34,7 @@
# Shorthands similar to pickle.dump/pickle.dumps
-def dump(obj, file, protocol=None):
+def dump(obj, file, protocol=None, buffer_callback=None):
"""Serialize obj as bytes streamed into file
protocol defaults to cloudpickle.DEFAULT_PROTOCOL which is an alias to
@@ -44,10 +44,10 @@
Set protocol=pickle.DEFAULT_PROTOCOL instead if you need to ensure
compatibility with older versions of Python.
"""
- CloudPickler(file, protocol=protocol).dump(obj)
+ CloudPickler(file, protocol=protocol,
buffer_callback=buffer_callback).dump(obj)
-def dumps(obj, protocol=None):
+def dumps(obj, protocol=None, buffer_callback=None):
"""Serialize obj as a string of bytes allocated in memory
protocol defaults to cloudpickle.DEFAULT_PROTOCOL which is an alias to
@@ -58,7 +58,7 @@
compatibility with older versions of Python.
"""
with io.BytesIO() as file:
- cp = CloudPickler(file, protocol=protocol)
+ cp = CloudPickler(file, protocol=protocol,
buffer_callback=buffer_callback)
cp.dump(obj)
return file.getvalue()
@@ -68,7 +68,7 @@
def _class_getnewargs(obj):
type_kwargs = {}
- if hasattr(obj, "__slots__"):
+ if "__slots__" in obj.__dict__:
type_kwargs["__slots__"] = obj.__slots__
__dict__ = obj.__dict__.get('__dict__', None)
@@ -136,14 +136,16 @@
clsdict = _extract_class_dict(obj)
clsdict.pop('__weakref__', None)
- # For ABCMeta in python3.7+, remove _abc_impl as it is not picklable.
- # This is a fix which breaks the cache but this only makes the first
- # calls to issubclass slower.
- if "_abc_impl" in clsdict:
+ if issubclass(type(obj), abc.ABCMeta):
+ # If obj is an instance of an ABCMeta subclass, dont pickle the
+ # cache/negative caches populated during isinstance/issubclass
+ # checks, but pickle the list of registered subclasses of obj.
+ clsdict.pop('_abc_impl', None)
(registry, _, _, _) = abc._get_dump(obj)
clsdict["_abc_impl"] = [subclass_weakref()
for subclass_weakref in registry]
- if hasattr(obj, "__slots__"):
+
+ if "__slots__" in clsdict:
# pickle string length optimization: member descriptors of obj are
# created automatically from obj's __slots__ attribute, no need to
# save them in obj's state
@@ -274,6 +276,7 @@
def _module_reduce(obj):
if _is_dynamic(obj):
+ obj.__dict__.pop('__builtins__', None)
return dynamic_subimport, (obj.__name__, vars(obj))
else:
return subimport, (obj.__name__,)
@@ -291,6 +294,10 @@
return logging.getLogger, ()
+def _property_reduce(obj):
+ return property, (obj.fget, obj.fset, obj.fdel, obj.__doc__)
+
+
def _weakset_reduce(obj):
return weakref.WeakSet, (list(obj),)
@@ -406,6 +413,7 @@
dispatch[logging.Logger] = _logger_reduce
dispatch[logging.RootLogger] = _root_logger_reduce
dispatch[memoryview] = _memoryview_reduce
+ dispatch[property] = _property_reduce
dispatch[staticmethod] = _classmethod_reduce
dispatch[types.CellType] = _cell_reduce
dispatch[types.CodeType] = _code_reduce
@@ -415,10 +423,10 @@
dispatch[types.MappingProxyType] = _mappingproxy_reduce
dispatch[weakref.WeakSet] = _weakset_reduce
- def __init__(self, file, protocol=None):
+ def __init__(self, file, protocol=None, buffer_callback=None):
if protocol is None:
protocol = DEFAULT_PROTOCOL
- Pickler.__init__(self, file, protocol=protocol)
+ Pickler.__init__(self, file, protocol=protocol,
buffer_callback=buffer_callback)
# map functions __globals__ attribute ids, to ensure that functions
# sharing the same global namespace at pickling time also share their
# global namespace at unpickling time.
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn'
'--exclude=.svnignore' old/cloudpickle-1.2.2/cloudpickle.egg-info/PKG-INFO
new/cloudpickle-1.3.0/cloudpickle.egg-info/PKG-INFO
--- old/cloudpickle-1.2.2/cloudpickle.egg-info/PKG-INFO 2019-09-10
14:27:06.000000000 +0200
+++ new/cloudpickle-1.3.0/cloudpickle.egg-info/PKG-INFO 2020-02-10
15:30:46.000000000 +0100
@@ -1,6 +1,6 @@
Metadata-Version: 2.1
Name: cloudpickle
-Version: 1.2.2
+Version: 1.3.0
Summary: Extended pickling support for Python objects
Home-page: https://github.com/cloudpipe/cloudpickle
Author: Cloudpipe
@@ -8,8 +8,7 @@
License: BSD 3-Clause License
Description: # cloudpickle
- [](https://travis-ci.org/cloudpipe/cloudpickle)
+
[](https://github.com/cloudpipe/cloudpickle/actions)
[](https://codecov.io/github/cloudpipe/cloudpickle?branch=master)
`cloudpickle` makes it possible to serialize Python constructs not
supported
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn'
'--exclude=.svnignore' old/cloudpickle-1.2.2/tests/cloudpickle_test.py
new/cloudpickle-1.3.0/tests/cloudpickle_test.py
--- old/cloudpickle-1.2.2/tests/cloudpickle_test.py 2019-09-10
14:19:13.000000000 +0200
+++ new/cloudpickle-1.3.0/tests/cloudpickle_test.py 2020-02-10
15:13:49.000000000 +0100
@@ -42,7 +42,7 @@
import cloudpickle
from cloudpickle.cloudpickle import _is_dynamic
from cloudpickle.cloudpickle import _make_empty_cell, cell_set
-from cloudpickle.cloudpickle import _extract_class_dict
+from cloudpickle.cloudpickle import _extract_class_dict, _whichmodule
from .testutils import subprocess_pickle_echo
from .testutils import assert_run_python_script
@@ -108,6 +108,28 @@
def tearDown(self):
shutil.rmtree(self.tmpdir)
+ @pytest.mark.skipif(
+ platform.python_implementation() != "CPython" or
+ (sys.version_info >= (3, 8, 0) and sys.version_info < (3, 8, 2)),
+ reason="Underlying bug fixed upstream starting Python 3.8.2")
+ def test_reducer_override_reference_cycle(self):
+ # Early versions of Python 3.8 introduced a reference cycle between a
+ # Pickler and it's reducer_override method. Because a Pickler
+ # object references every object it has pickled through its memo, this
+ # cycle prevented the garbage-collection of those external pickled
+ # objects. See #327 as well as https://bugs.python.org/issue39492
+ # This bug was fixed in Python 3.8.2, but is still present using
+ # cloudpickle and Python 3.8.0/1, hence the skipif directive.
+ class MyClass:
+ pass
+
+ my_object = MyClass()
+ wr = weakref.ref(my_object)
+
+ cloudpickle.dumps(my_object)
+ del my_object
+ assert wr() is None, "'del'-ed my_object has not been collected"
+
def test_itemgetter(self):
d = range(10)
getter = itemgetter(1)
@@ -528,6 +550,46 @@
finally:
os.unlink(pickled_func_path)
+ def test_dynamic_module_with_unpicklable_builtin(self):
+ # Reproducer of https://github.com/cloudpipe/cloudpickle/issues/316
+ # Some modules such as scipy inject some unpicklable objects into the
+ # __builtins__ module, which appears in every module's __dict__ under
+ # the '__builtins__' key. In such cases, cloudpickle used to fail
+ # when pickling dynamic modules.
+ class UnpickleableObject(object):
+ def __reduce__(self):
+ raise ValueError('Unpicklable object')
+
+ mod = types.ModuleType("mod")
+
+ exec('f = lambda x: abs(x)', mod.__dict__)
+ assert mod.f(-1) == 1
+ assert '__builtins__' in mod.__dict__
+
+ unpicklable_obj = UnpickleableObject()
+ with pytest.raises(ValueError):
+ cloudpickle.dumps(unpicklable_obj)
+
+ # Emulate the behavior of scipy by injecting an unpickleable object
+ # into mod's builtins.
+ # The __builtins__ entry of mod's __dict__ can either be the
+ # __builtins__ module, or the __builtins__ module's __dict__. #316
+ # happens only in the latter case.
+ if isinstance(mod.__dict__['__builtins__'], dict):
+ mod.__dict__['__builtins__']['unpickleable_obj'] = unpicklable_obj
+ elif isinstance(mod.__dict__['__builtins__'], types.ModuleType):
+ mod.__dict__['__builtins__'].unpickleable_obj = unpicklable_obj
+
+ depickled_mod = pickle_depickle(mod, protocol=self.protocol)
+ assert '__builtins__' in depickled_mod.__dict__
+
+ if isinstance(depickled_mod.__dict__['__builtins__'], dict):
+ assert "abs" in depickled_mod.__builtins__
+ elif isinstance(
+ depickled_mod.__dict__['__builtins__'], types.ModuleType):
+ assert hasattr(depickled_mod.__builtins__, "abs")
+ assert depickled_mod.f(-1) == 1
+
def test_load_dynamic_module_in_grandchild_process(self):
# Make sure that when loaded, a dynamic module preserves its dynamic
# property. Otherwise, this will lead to an ImportError if pickled in
@@ -988,6 +1050,32 @@
depickled_descriptor = pickle_depickle(float.real)
self.assertIs(depickled_descriptor, float.real)
+ def test_abc_cache_not_pickled(self):
+ # cloudpickle issue #302: make sure that cloudpickle does not pickle
+ # the caches populated during instance/subclass checks of abc.ABCMeta
+ # instances.
+ MyClass = abc.ABCMeta('MyClass', (), {})
+
+ class MyUnrelatedClass:
+ pass
+
+ class MyRelatedClass:
+ pass
+
+ MyClass.register(MyRelatedClass)
+
+ assert not issubclass(MyUnrelatedClass, MyClass)
+ assert issubclass(MyRelatedClass, MyClass)
+
+ s = cloudpickle.dumps(MyClass)
+
+ assert b"MyUnrelatedClass" not in s
+ assert b"MyRelatedClass" in s
+
+ depickled_class = cloudpickle.loads(s)
+ assert not issubclass(MyUnrelatedClass, depickled_class)
+ assert issubclass(MyRelatedClass, depickled_class)
+
def test_abc(self):
@abc.abstractmethod
@@ -1048,35 +1136,94 @@
self.assertEqual(set(weakset), {depickled1, depickled2})
- def test_faulty_module(self):
- for module_name in ['_missing_module', None]:
- class FaultyModule(object):
- def __getattr__(self, name):
- # This throws an exception while looking up within
- # pickle.whichmodule or getattr(module, name, None)
- raise Exception()
+ def test_non_module_object_passing_whichmodule_test(self):
+ # https://github.com/cloudpipe/cloudpickle/pull/326: cloudpickle should
+ # not try to instrospect non-modules object when trying to discover the
+ # module of a function/class. This happenened because codecov injects
+ # tuples (and not modules) into sys.modules, but type-checks were not
+ # carried out on the entries of sys.modules, causing cloupdickle to
+ # then error in unexpected ways
+ def func(x):
+ return x ** 2
- class Foo(object):
- __module__ = module_name
+ # Trigger a loop during the execution of whichmodule(func) by
+ # explicitly setting the function's module to None
+ func.__module__ = None
- def foo(self):
- return "it works!"
+ class NonModuleObject(object):
+ def __getattr__(self, name):
+ # We whitelist func so that a _whichmodule(func, None) call
returns
+ # the NonModuleObject instance if a type check on the entries
+ # of sys.modules is not carried out, but manipulating this
+ # instance thinking it really is a module later on in the
+ # pickling process of func errors out
+ if name == 'func':
+ return func
+ else:
+ raise AttributeError
+
+ non_module_object = NonModuleObject()
+
+ assert func(2) == 4
+ assert func is non_module_object.func
+
+ # Any manipulation of non_module_object relying on attribute access
+ # will raise an Exception
+ with pytest.raises(AttributeError):
+ _is_dynamic(non_module_object)
+
+ try:
+ sys.modules['NonModuleObject'] = non_module_object
- def foo():
- return "it works!"
+ func_module_name = _whichmodule(func, None)
+ assert func_module_name != 'NonModuleObject'
+ assert func_module_name is None
- foo.__module__ = module_name
+ depickled_func = pickle_depickle(func, protocol=self.protocol)
+ assert depickled_func(2) == 4
- sys.modules["_faulty_module"] = FaultyModule()
- try:
- # Test whichmodule in save_global.
- self.assertEqual(pickle_depickle(Foo()).foo(), "it works!")
-
- # Test whichmodule in save_function.
- cloned = pickle_depickle(foo, protocol=self.protocol)
- self.assertEqual(cloned(), "it works!")
- finally:
- sys.modules.pop("_faulty_module", None)
+ finally:
+ sys.modules.pop('NonModuleObject')
+
+ def test_unrelated_faulty_module(self):
+ # Check that pickling a dynamically defined function or class does not
+ # fail when introspecting the currently loaded modules in sys.modules
+ # as long as those faulty modules are unrelated to the class or
+ # function we are currently pickling.
+ for base_class in (object, types.ModuleType):
+ for module_name in ['_missing_module', None]:
+ class FaultyModule(base_class):
+ def __getattr__(self, name):
+ # This throws an exception while looking up within
+ # pickle.whichmodule or getattr(module, name, None)
+ raise Exception()
+
+ class Foo(object):
+ __module__ = module_name
+
+ def foo(self):
+ return "it works!"
+
+ def foo():
+ return "it works!"
+
+ foo.__module__ = module_name
+
+ if base_class is types.ModuleType: # noqa
+ faulty_module = FaultyModule('_faulty_module')
+ else:
+ faulty_module = FaultyModule()
+ sys.modules["_faulty_module"] = faulty_module
+
+ try:
+ # Test whichmodule in save_global.
+ self.assertEqual(pickle_depickle(Foo()).foo(), "it works!")
+
+ # Test whichmodule in save_function.
+ cloned = pickle_depickle(foo, protocol=self.protocol)
+ self.assertEqual(cloned(), "it works!")
+ finally:
+ sys.modules.pop("_faulty_module", None)
def test_dynamic_pytest_module(self):
# Test case for pull request
https://github.com/cloudpipe/cloudpickle/pull/116
@@ -1119,6 +1266,52 @@
cloned = pickle_depickle(func, protocol=self.protocol)
self.assertEqual(cloned.__qualname__, func.__qualname__)
+ def test_property(self):
+ # Note that the @property decorator only has an effect on new-style
+ # classes.
+ class MyObject(object):
+ _read_only_value = 1
+ _read_write_value = 1
+
+ @property
+ def read_only_value(self):
+ "A read-only attribute"
+ return self._read_only_value
+
+ @property
+ def read_write_value(self):
+ return self._read_write_value
+
+ @read_write_value.setter
+ def read_write_value(self, value):
+ self._read_write_value = value
+
+
+
+ my_object = MyObject()
+
+ assert my_object.read_only_value == 1
+ assert MyObject.read_only_value.__doc__ == "A read-only attribute"
+
+ with pytest.raises(AttributeError):
+ my_object.read_only_value = 2
+ my_object.read_write_value = 2
+
+ depickled_obj = pickle_depickle(my_object)
+
+ assert depickled_obj.read_only_value == 1
+ assert depickled_obj.read_write_value == 2
+
+ # make sure the depickled read_only_value attribute is still read-only
+ with pytest.raises(AttributeError):
+ my_object.read_only_value = 2
+
+ # make sure the depickled read_write_value attribute is writeable
+ depickled_obj.read_write_value = 3
+ assert depickled_obj.read_write_value == 3
+ type(depickled_obj).read_only_value.__doc__ == "A read-only attribute"
+
+
def test_namedtuple(self):
MyTuple = collections.namedtuple('MyTuple', ['a', 'b', 'c'])
t1 = MyTuple(1, 2, 3)
@@ -1533,7 +1726,16 @@
# grown by more than a few MB as closures are garbage collected at
# the end of each remote function call.
growth = w.memsize() - reference_size
- assert growth < 1e7, growth
+
+ # For some reason, the memory growth after processing 100MB of
+ # data is ~10MB on MacOS, and ~1MB on Linux, so the upper bound on
+ # memory growth we use is only tight for MacOS. However,
+ # - 10MB is still 10x lower than the expected memory growth in case
+ # of a leak (which would be the total size of the processed data,
+ # 100MB)
+ # - the memory usage growth does not increase if using 10000
+ # iterations instead of 100 as used now (100x more data)
+ assert growth < 1.5e7, growth
""".format(protocol=self.protocol)
assert_run_python_script(code)
@@ -1666,6 +1868,17 @@
with pytest.raises(AttributeError):
obj.non_registered_attribute = 1
+ class SubclassWithSlots(ClassWithSlots):
+ def __init__(self):
+ self.unregistered_attribute = 1
+
+ obj = SubclassWithSlots()
+ s = cloudpickle.dumps(obj, protocol=self.protocol)
+ del SubclassWithSlots
+ depickled_obj = cloudpickle.loads(s)
+ assert depickled_obj.unregistered_attribute == 1
+
+
@unittest.skipIf(not hasattr(types, "MappingProxyType"),
"Old versions of Python do not have this type.")
def test_mappingproxy(self):
@@ -1876,6 +2089,22 @@
with pytest.raises(pickle.PicklingError, match='recursion'):
cloudpickle.dumps(a)
+ def test_out_of_band_buffers(self):
+ if self.protocol < 5:
+ pytest.skip("Need Pickle Protocol 5 or later")
+ np = pytest.importorskip("numpy")
+
+ class LocallyDefinedClass:
+ data = np.zeros(10)
+
+ data_instance = LocallyDefinedClass()
+ buffers = []
+ pickle_bytes = cloudpickle.dumps(data_instance, protocol=self.protocol,
+ buffer_callback=buffers.append)
+ assert len(buffers) == 1
+ reconstructed = pickle.loads(pickle_bytes, buffers=buffers)
+ np.testing.assert_allclose(reconstructed.data, data_instance.data)
+
class Protocol2CloudPickleTest(CloudPickleTest):