Hello community,

here is the log from the commit of package python-cloudpickle for 
openSUSE:Factory checked in at 2019-02-06 15:48:18
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Comparing /work/SRC/openSUSE:Factory/python-cloudpickle (Old)
 and      /work/SRC/openSUSE:Factory/.python-cloudpickle.new.28833 (New)
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++

Package is "python-cloudpickle"

Wed Feb  6 15:48:18 2019 rev:4 rq:671952 version:0.7.0

Changes:
--------
--- /work/SRC/openSUSE:Factory/python-cloudpickle/python-cloudpickle.changes    
2018-07-18 22:58:07.014129736 +0200
+++ 
/work/SRC/openSUSE:Factory/.python-cloudpickle.new.28833/python-cloudpickle.changes
 2019-02-06 15:48:19.751226078 +0100
@@ -1,0 +2,29 @@
+Tue Feb  5 15:42:55 UTC 2019 - [email protected]
+
+- Update to version 0.7.0:
+  * Correctly serialize dynamically defined classes that have a __slots__
+    attribute. (issue #225)
+- Update to version 0.6.1
+  * Fix regression in 0.6.0 which breaks the pickling of local function
+    defined in a module, making it impossible to access builtins. (issue #211)
+- Update to version 0.6.0
+  * Ensure that unpickling a function defined in a dynamic module several
+    times sequentially does not reset the values of global variables.
+    (issue #187)
+  * Restrict the ability to pickle annotations to python3.7+ (issue #193
+    and issue #196)
+  * Stop using the deprecated imp module under Python 3. (issue #207)
+  * Fixed pickling issue with singleton types NoneType, type(...) and
+    type(NotImplemented) (issue #209)
+- Update to version 0.5.6
+  * Ensure that unpickling a locally defined function that accesses the global
+    variables of a module does not reset the values of the global variables if
+    they are already initialized. (issue #187)
+- Update to version 0.5.5
+  * Fixed inconsistent version in cloudpickle.__version__.
+- Update to version 0.5.4
+  * Fixed a pickling issue for ABC in python3.7+ (issue #180).
+  * Fixed a bug when pickling functions in __main__ that access global
+    variables (issue #187).
+
+-------------------------------------------------------------------

Old:
----
  cloudpickle-0.5.3.tar.gz

New:
----
  cloudpickle-0.7.0.tar.gz

++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++

Other differences:
------------------
++++++ python-cloudpickle.spec ++++++
--- /var/tmp/diff_new_pack.6zDuqw/_old  2019-02-06 15:48:20.339225723 +0100
+++ /var/tmp/diff_new_pack.6zDuqw/_new  2019-02-06 15:48:20.343225720 +0100
@@ -1,7 +1,7 @@
 #
 # spec file for package python-cloudpickle
 #
-# Copyright (c) 2018 SUSE LINUX GmbH, Nuernberg, Germany.
+# Copyright (c) 2019 SUSE LINUX GmbH, Nuernberg, Germany.
 #
 # All modifications and additions to the file contributed by third parties
 # remain the property of their copyright owners, unless otherwise agreed
@@ -12,13 +12,13 @@
 # license that conforms to the Open Source Definition (Version 1.9)
 # published by the Open Source Initiative.
 
-# Please submit bugfixes or comments via http://bugs.opensuse.org/
+# Please submit bugfixes or comments via https://bugs.opensuse.org/
 #
 
 
 %{?!python_module:%define python_module() python-%{**} python3-%{**}}
 Name:           python-cloudpickle
-Version:        0.5.3
+Version:        0.7.0
 Release:        0
 Summary:        Extended pickling support for Python objects
 License:        BSD-3-Clause

++++++ cloudpickle-0.5.3.tar.gz -> cloudpickle-0.7.0.tar.gz ++++++
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/cloudpickle-0.5.3/PKG-INFO 
new/cloudpickle-0.7.0/PKG-INFO
--- old/cloudpickle-0.5.3/PKG-INFO      2018-05-14 18:24:48.000000000 +0200
+++ new/cloudpickle-0.7.0/PKG-INFO      2019-01-23 17:36:06.000000000 +0100
@@ -1,6 +1,6 @@
 Metadata-Version: 1.1
 Name: cloudpickle
-Version: 0.5.3
+Version: 0.7.0
 Summary: Extended pickling support for Python objects
 Home-page: https://github.com/cloudpipe/cloudpickle
 Author: Cloudpipe
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/cloudpickle-0.5.3/cloudpickle/__init__.py 
new/cloudpickle-0.7.0/cloudpickle/__init__.py
--- old/cloudpickle-0.5.3/cloudpickle/__init__.py       2018-05-14 
18:21:31.000000000 +0200
+++ new/cloudpickle-0.7.0/cloudpickle/__init__.py       2019-01-23 
17:34:22.000000000 +0100
@@ -2,4 +2,4 @@
 
 from cloudpickle.cloudpickle import *
 
-__version__ = '0.5.3'
+__version__ = '0.7.0'
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/cloudpickle-0.5.3/cloudpickle/cloudpickle.py 
new/cloudpickle-0.7.0/cloudpickle/cloudpickle.py
--- old/cloudpickle-0.5.3/cloudpickle/cloudpickle.py    2018-05-13 
06:52:50.000000000 +0200
+++ new/cloudpickle-0.7.0/cloudpickle/cloudpickle.py    2019-01-23 
17:32:14.000000000 +0100
@@ -44,7 +44,7 @@
 
 import dis
 from functools import partial
-import imp
+import importlib
 import io
 import itertools
 import logging
@@ -57,7 +57,6 @@
 import types
 import weakref
 
-
 # cloudpickle is meant for inter process communication: we expect all
 # communicating processes to run the same Python version hence we favor
 # communication speed over compatibility:
@@ -70,14 +69,32 @@
         from cStringIO import StringIO
     except ImportError:
         from StringIO import StringIO
+    string_types = (basestring,)  # noqa
     PY3 = False
 else:
     types.ClassType = type
     from pickle import _Pickler as Pickler
     from io import BytesIO as StringIO
+    string_types = (str,)
     PY3 = True
 
 
+# Container for the global namespace to ensure consistent unpickling of
+# functions defined in dynamic modules (modules not registed in sys.modules).
+_dynamic_modules_globals = weakref.WeakValueDictionary()
+
+
+class _DynamicModuleFuncGlobals(dict):
+    """Global variables referenced by a function defined in a dynamic module
+
+    To avoid leaking references we store such context in a WeakValueDictionary
+    instance.  However instances of python builtin types such as dict cannot
+    be used directly as values in such a construct, hence the need for a
+    derived class.
+    """
+    pass
+
+
 def _make_cell_set_template_code():
     """Get the Python compiler to emit LOAD_FAST(arg); STORE_DEREF
 
@@ -96,7 +113,7 @@
 
            return _stub
 
-        _cell_set_template_code = f()
+        _cell_set_template_code = f().__code__
 
     This function is _only_ a LOAD_FAST(arg); STORE_DEREF, but that is
     invalid syntax on Python 2. If we use this function we also don't need
@@ -257,8 +274,6 @@
         if protocol is None:
             protocol = DEFAULT_PROTOCOL
         Pickler.__init__(self, file, protocol=protocol)
-        # set of modules to unpickle
-        self.modules = set()
         # map ids to dictionary. used to ensure that functions can share 
global env
         self.globals_ref = {}
 
@@ -288,20 +303,9 @@
         """
         Save a module as an import
         """
-        mod_name = obj.__name__
-        # If module is successfully found then it is not a dynamically created 
module
-        if hasattr(obj, '__file__'):
-            is_dynamic = False
-        else:
-            try:
-                _find_module(mod_name)
-                is_dynamic = False
-            except ImportError:
-                is_dynamic = True
-
-        self.modules.add(obj)
-        if is_dynamic:
-            self.save_reduce(dynamic_subimport, (obj.__name__, vars(obj)), 
obj=obj)
+        if _is_dynamic(obj):
+            self.save_reduce(dynamic_subimport, (obj.__name__, vars(obj)),
+                             obj=obj)
         else:
             self.save_reduce(subimport, (obj.__name__,), obj=obj)
 
@@ -378,7 +382,6 @@
             lookedup_by_name = None
 
         if themodule:
-            self.modules.add(themodule)
             if lookedup_by_name is obj:
                 return self.save_global(obj, name)
 
@@ -428,9 +431,33 @@
 
     def _save_subimports(self, code, top_level_dependencies):
         """
-        Ensure de-pickler imports any package child-modules that
-        are needed by the function
+        Save submodules used by a function but not listed in its globals.
+
+        In the example below:
+
+        ```
+        import concurrent.futures
+        import cloudpickle
+
+
+        def func():
+            x = concurrent.futures.ThreadPoolExecutor
+
+
+        if __name__ == '__main__':
+            cloudpickle.dumps(func)
+        ```
+
+        the globals extracted by cloudpickle in the function's state include
+        the concurrent module, but not its submodule (here,
+        concurrent.futures), which is the module used by func.
+
+        To ensure that calling the depickled function does not raise an
+        AttributeError, this function looks for any currently loaded submodule
+        that the function uses and whose parent is present in the function
+        globals, and saves it before saving the function.
         """
+
         # check if any known dependency is an imported package
         for x in top_level_dependencies:
             if isinstance(x, types.ModuleType) and hasattr(x, '__package__') 
and x.__package__:
@@ -460,11 +487,31 @@
         clsdict = dict(obj.__dict__)  # copy dict proxy to a dict
         clsdict.pop('__weakref__', None)
 
+        # For ABCMeta in python3.7+, remove _abc_impl as it is not picklable.
+        # This is a fix which breaks the cache but this only makes the first
+        # calls to issubclass slower.
+        if "_abc_impl" in clsdict:
+            import abc
+            (registry, _, _, _) = abc._get_dump(obj)
+            clsdict["_abc_impl"] = [subclass_weakref()
+                                    for subclass_weakref in registry]
+
         # On PyPy, __doc__ is a readonly attribute, so we need to include it in
         # the initial skeleton class.  This is safe because we know that the
         # doc can't participate in a cycle with the original class.
         type_kwargs = {'__doc__': clsdict.pop('__doc__', None)}
 
+        if hasattr(obj, "__slots__"):
+            type_kwargs['__slots__'] = obj.__slots__
+            # pickle string length optimization: member descriptors of obj are
+            # created automatically from obj's __slots__ attribute, no need to
+            # save them in obj's state
+            if isinstance(obj.__slots__, string_types):
+                clsdict.pop(obj.__slots__)
+            else:
+                for k in obj.__slots__:
+                    clsdict.pop(k, None)
+
         # If type overrides __dict__ as a property, include it in the type 
kwargs.
         # In Python 2, we can't set this attribute after construction.
         __dict__ = clsdict.pop('__dict__', None)
@@ -551,9 +598,13 @@
             'globals': f_globals,
             'defaults': defaults,
             'dict': dct,
-            'module': func.__module__,
             'closure_values': closure_values,
+            'module': func.__module__,
+            'name': func.__name__,
+            'doc': func.__doc__,
         }
+        if hasattr(func, '__annotations__') and sys.version_info >= (3, 7):
+            state['annotations'] = func.__annotations__
         if hasattr(func, '__qualname__'):
             state['qualname'] = func.__qualname__
         save(state)
@@ -619,7 +670,16 @@
         # save the dict
         dct = func.__dict__
 
-        base_globals = self.globals_ref.get(id(func.__globals__), {})
+        base_globals = self.globals_ref.get(id(func.__globals__), None)
+        if base_globals is None:
+            # For functions defined in a well behaved module use
+            # vars(func.__module__) for base_globals. This is necessary to
+            # share the global variables across multiple pickled functions from
+            # this module.
+            if func.__module__ is not None:
+                base_globals = func.__module__
+            else:
+                base_globals = {}
         self.globals_ref[id(func.__globals__)] = base_globals
 
         return (code, f_globals, defaults, closure, dct, base_globals)
@@ -638,6 +698,13 @@
         The name of this method is somewhat misleading: all types get
         dispatched here.
         """
+        if obj is type(None):
+            return self.save_reduce(type, (None,), obj=obj)
+        elif obj is type(Ellipsis):
+            return self.save_reduce(type, (Ellipsis,), obj=obj)
+        elif obj is type(NotImplemented):
+            return self.save_reduce(type, (NotImplemented,), obj=obj)
+
         if obj.__module__ == "__main__":
             return self.save_dynamic_class(obj)
 
@@ -910,9 +977,8 @@
 
 
 def dynamic_subimport(name, vars):
-    mod = imp.new_module(name)
+    mod = types.ModuleType(name)
     mod.__dict__.update(vars)
-    sys.modules[name] = mod
     return mod
 
 
@@ -1024,9 +1090,19 @@
     else:
         raise ValueError('Unexpected _fill_value arguments: %r' % (args,))
 
-    func.__globals__.update(state['globals'])
+    # Only set global variables that do not exist.
+    for k, v in state['globals'].items():
+        if k not in func.__globals__:
+            func.__globals__[k] = v
+
     func.__defaults__ = state['defaults']
     func.__dict__ = state['dict']
+    if 'annotations' in state:
+        func.__annotations__ = state['annotations']
+    if 'doc' in state:
+        func.__doc__  = state['doc']
+    if 'name' in state:
+        func.__name__ = state['name']
     if 'module' in state:
         func.__module__ = state['module']
     if 'qualname' in state:
@@ -1057,6 +1133,20 @@
     """
     if base_globals is None:
         base_globals = {}
+    elif isinstance(base_globals, str):
+        base_globals_name = base_globals
+        try:
+            # First try to reuse the globals from the module containing the
+            # function. If it is not possible to retrieve it, fallback to an
+            # empty dictionary.
+            base_globals = vars(importlib.import_module(base_globals))
+        except ImportError:
+            base_globals = _dynamic_modules_globals.get(
+                    base_globals_name, None)
+            if base_globals is None:
+                base_globals = _DynamicModuleFuncGlobals()
+            _dynamic_modules_globals[base_globals_name] = base_globals
+
     base_globals['__builtins__'] = __builtins__
 
     closure = (
@@ -1072,24 +1162,44 @@
 
     See CloudPickler.save_dynamic_class for more info.
     """
+    registry = None
     for attrname, attr in class_dict.items():
-        setattr(skeleton_class, attrname, attr)
+        if attrname == "_abc_impl":
+            registry = attr
+        else:
+            setattr(skeleton_class, attrname, attr)
+    if registry is not None:
+        for subclass in registry:
+            skeleton_class.register(subclass)
+
     return skeleton_class
 
 
-def _find_module(mod_name):
+def _is_dynamic(module):
     """
-    Iterate over each part instead of calling imp.find_module directly.
-    This function is able to find submodules (e.g. scikit.tree)
+    Return True if the module is special module that cannot be imported by its
+    name.
     """
-    path = None
-    for part in mod_name.split('.'):
-        if path is not None:
-            path = [path]
-        file, path, description = imp.find_module(part, path)
-        if file is not None:
-            file.close()
-    return path, description
+    # Quick check: module that have __file__ attribute are not dynamic modules.
+    if hasattr(module, '__file__'):
+        return False
+
+    if hasattr(module, '__spec__'):
+        return module.__spec__ is None
+    else:
+        # Backward compat for Python 2
+        import imp
+        try:
+            path = None
+            for part in module.__name__.split('.'):
+                if path is not None:
+                    path = [path]
+                f, path, description = imp.find_module(part, path)
+                if f is not None:
+                    f.close()
+        except ImportError:
+            return True
+        return False
 
 
 """Constructors for 3rd party libraries
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/cloudpickle-0.5.3/cloudpickle.egg-info/PKG-INFO 
new/cloudpickle-0.7.0/cloudpickle.egg-info/PKG-INFO
--- old/cloudpickle-0.5.3/cloudpickle.egg-info/PKG-INFO 2018-05-14 
18:24:48.000000000 +0200
+++ new/cloudpickle-0.7.0/cloudpickle.egg-info/PKG-INFO 2019-01-23 
17:36:05.000000000 +0100
@@ -1,6 +1,6 @@
 Metadata-Version: 1.1
 Name: cloudpickle
-Version: 0.5.3
+Version: 0.7.0
 Summary: Extended pickling support for Python objects
 Home-page: https://github.com/cloudpipe/cloudpickle
 Author: Cloudpipe
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/cloudpickle-0.5.3/setup.py 
new/cloudpickle-0.7.0/setup.py
--- old/cloudpickle-0.5.3/setup.py      2018-05-14 18:21:31.000000000 +0200
+++ new/cloudpickle-0.7.0/setup.py      2019-01-23 17:29:10.000000000 +0100
@@ -1,14 +1,29 @@
 #!/usr/bin/env python
 # -*- coding: utf-8 -*-
+import os
+import re
 
 try:
     from setuptools import setup
 except ImportError:
     from distutils.core import setup
 
+
+# Function to parse __version__ in `cloudpickle/__init__.py`
+def find_version():
+    here = os.path.abspath(os.path.dirname(__file__))
+    with open(os.path.join(here, 'cloudpickle', '__init__.py'), 'r') as fp:
+        version_file = fp.read()
+    version_match = re.search(r"^__version__ = ['\"]([^'\"]*)['\"]",
+                              version_file, re.M)
+    if version_match:
+        return version_match.group(1)
+    raise RuntimeError("Unable to find version string.")
+
+
 dist = setup(
     name='cloudpickle',
-    version='0.5.3',
+    version=find_version(),
     description='Extended pickling support for Python objects',
     author='Cloudpipe',
     author_email='[email protected]',
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/cloudpickle-0.5.3/tests/cloudpickle_file_test.py 
new/cloudpickle-0.7.0/tests/cloudpickle_file_test.py
--- old/cloudpickle-0.5.3/tests/cloudpickle_file_test.py        2018-05-13 
06:41:45.000000000 +0200
+++ new/cloudpickle-0.7.0/tests/cloudpickle_file_test.py        2019-01-22 
14:46:57.000000000 +0100
@@ -1,13 +1,13 @@
-import unittest
-import tempfile
+from __future__ import unicode_literals
+
 import os
-import shutil
 import pickle
+import shutil
 import sys
-from io import StringIO
+import tempfile
+import unittest
 
 import pytest
-from mock import patch, mock_open
 
 import cloudpickle
 
@@ -19,7 +19,7 @@
     def setUp(self):
         self.tmpdir = tempfile.mkdtemp()
         self.tmpfilepath = os.path.join(self.tmpdir, 'testfile')
-        self.teststring = u'Hello world!'
+        self.teststring = 'Hello world!'
 
     def tearDown(self):
         shutil.rmtree(self.tmpdir)
@@ -99,18 +99,6 @@
         self.assertRaises(pickle.PicklingError,
                           lambda: cloudpickle.dumps(sys.stdin))
 
-    def NOT_WORKING_test_tty(self):
-        # FIXME: Mocking 'file' is not trivial... and fails for now
-        from sys import version_info
-        if version_info.major == 2:
-            import __builtin__ as builtins  # pylint:disable=import-error
-        else:
-            import builtins  # pylint:disable=import-error
-
-        with patch.object(builtins, 'open', mock_open(), create=True):
-            with open('foo', 'w+') as handle:
-                cloudpickle.dumps(handle)
-
 
 if __name__ == '__main__':
     unittest.main()
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/cloudpickle-0.5.3/tests/cloudpickle_test.py 
new/cloudpickle-0.7.0/tests/cloudpickle_test.py
--- old/cloudpickle-0.5.3/tests/cloudpickle_test.py     2018-05-13 
06:52:50.000000000 +0200
+++ new/cloudpickle-0.7.0/tests/cloudpickle_test.py     2019-01-23 
10:51:18.000000000 +0100
@@ -4,24 +4,23 @@
 import collections
 import base64
 import functools
-import imp
-from io import BytesIO
+import io
 import itertools
 import logging
+import math
 from operator import itemgetter, attrgetter
 import pickle
 import platform
 import random
+import shutil
 import subprocess
 import sys
+import tempfile
 import textwrap
+import types
 import unittest
 import weakref
-
-try:
-    from StringIO import StringIO
-except ImportError:
-    from io import StringIO
+import os
 
 import pytest
 
@@ -41,12 +40,16 @@
     tornado = None
 
 import cloudpickle
-from cloudpickle.cloudpickle import _find_module, _make_empty_cell, cell_set
+from cloudpickle.cloudpickle import _is_dynamic
+from cloudpickle.cloudpickle import _make_empty_cell, cell_set
 
 from .testutils import subprocess_pickle_echo
 from .testutils import assert_run_python_script
 
 
+_TEST_GLOBAL_VARIABLE = "default_value"
+
+
 class RaiserOnPickle(object):
 
     def __init__(self, exc):
@@ -63,16 +66,21 @@
     return pickle.loads(cloudpickle.dumps(obj, protocol=protocol))
 
 
-class CloudPicklerTest(unittest.TestCase):
-    def setUp(self):
-        self.file_obj = StringIO()
-        self.cloudpickler = cloudpickle.CloudPickler(self.file_obj, 2)
+def _escape(raw_filepath):
+    # Ugly hack to embed filepaths in code templates for windows
+    return raw_filepath.replace("\\", r"\\\\")
 
 
 class CloudPickleTest(unittest.TestCase):
 
     protocol = cloudpickle.DEFAULT_PROTOCOL
 
+    def setUp(self):
+        self.tmpdir = tempfile.mkdtemp(prefix="tmp_cloudpickle_test_")
+
+    def tearDown(self):
+        shutil.rmtree(self.tmpdir)
+
     def test_itemgetter(self):
         d = range(10)
         getter = itemgetter(1)
@@ -107,7 +115,7 @@
     # Regression test for SPARK-3415
     def test_pickling_file_handles(self):
         out1 = sys.stderr
-        out2 = pickle.loads(cloudpickle.dumps(out1))
+        out2 = pickle.loads(cloudpickle.dumps(out1, protocol=self.protocol))
         self.assertEqual(out1, out2)
 
     def test_func_globals(self):
@@ -118,7 +126,8 @@
         global exit
         exit = Unpicklable()
 
-        self.assertRaises(Exception, lambda: cloudpickle.dumps(exit))
+        self.assertRaises(Exception, lambda: cloudpickle.dumps(
+            exit, protocol=self.protocol))
 
         def foo():
             sys.exit(0)
@@ -160,7 +169,8 @@
                          buffer_obj.tobytes())
 
     def test_lambda(self):
-        self.assertEqual(pickle_depickle(lambda: 1)(), 1)
+        self.assertEqual(
+                pickle_depickle(lambda: 1, protocol=self.protocol)(), 1)
 
     def test_nested_lambdas(self):
         a, b = 1, 2
@@ -179,10 +189,10 @@
                 return base if n <= 1 else n * g(n - 1)
             return g
 
-        g1 = pickle_depickle(f1())
+        g1 = pickle_depickle(f1(), protocol=self.protocol)
         self.assertEqual(g1(), g1)
 
-        g2 = pickle_depickle(f2(2))
+        g2 = pickle_depickle(f2(2), protocol=self.protocol)
         self.assertEqual(g2(5), 240)
 
     def test_closure_none_is_preserved(self):
@@ -229,7 +239,7 @@
 
             return g
 
-        g = pickle_depickle(f())
+        g = pickle_depickle(f(), protocol=self.protocol)
         self.assertEqual(g(), 2)
 
     def test_dynamically_generated_class_that_uses_super(self):
@@ -284,6 +294,8 @@
         LOCAL_CONSTANT = 42
 
         def some_function(x, y):
+            # Make sure the __builtins__ are not broken (see #211)
+            sum(range(10))
             return (x + y) / LOCAL_CONSTANT
 
         # pickle the function definition
@@ -337,24 +349,27 @@
 
         if np:
             # simple ufunc: np.add
-            self.assertEqual(pickle_depickle(np.add), np.add)
+            self.assertEqual(pickle_depickle(np.add, protocol=self.protocol),
+                             np.add)
         else:  # skip if numpy is not available
             pass
 
         if spp:
             # custom ufunc: scipy.special.iv
-            self.assertEqual(pickle_depickle(spp.iv), spp.iv)
+            self.assertEqual(pickle_depickle(spp.iv, protocol=self.protocol),
+                             spp.iv)
         else:  # skip if scipy is not available
             pass
 
     def test_loads_namespace(self):
         obj = 1, 2, 3, 4
-        returned_obj = cloudpickle.loads(cloudpickle.dumps(obj))
+        returned_obj = cloudpickle.loads(cloudpickle.dumps(
+            obj, protocol=self.protocol))
         self.assertEqual(obj, returned_obj)
 
     def test_load_namespace(self):
         obj = 1, 2, 3, 4
-        bio = BytesIO()
+        bio = io.BytesIO()
         cloudpickle.dump(obj, bio)
         bio.seek(0)
         returned_obj = cloudpickle.load(bio)
@@ -398,7 +413,7 @@
             def f(self, x):
                 return x + 1
 
-        g = pickle_depickle(F.f)
+        g = pickle_depickle(F.f, protocol=self.protocol)
         self.assertEqual(g.__name__, F.f.__name__)
         if sys.version_info[0] < 3:
             self.assertEqual(g.im_class.__name__, F.f.im_class.__name__)
@@ -409,7 +424,7 @@
         self.assertEqual(pickle, pickle_clone)
 
     def test_dynamic_module(self):
-        mod = imp.new_module('mod')
+        mod = types.ModuleType('mod')
         code = '''
         x = 1
         def f(y):
@@ -436,16 +451,199 @@
         mod1, mod2 = pickle_depickle([mod, mod])
         self.assertEqual(id(mod1), id(mod2))
 
-    def test_find_module(self):
-        import pickle  # ensure this test is decoupled from global imports
-        _find_module('pickle')
-
-        with pytest.raises(ImportError):
-            _find_module('invalid_module')
-
-        with pytest.raises(ImportError):
-            valid_module = imp.new_module('valid_module')
-            _find_module('valid_module')
+    def test_dynamic_modules_globals(self):
+        # _dynamic_modules_globals is a WeakValueDictionary, so if a value
+        # in this dict (containing a set of global variables from a dynamic
+        # module created in the parent process) has no other reference than in
+        # this dict in the child process, it will be garbage collected.
+
+        # We first create a module
+        mod = types.ModuleType('mod')
+        code = '''
+        x = 1
+        def func():
+            return
+        '''
+        exec(textwrap.dedent(code), mod.__dict__)
+
+        pickled_module_path = os.path.join(self.tmpdir, 'mod_f.pkl')
+        child_process_script = '''
+        import pickle
+        from cloudpickle.cloudpickle import _dynamic_modules_globals
+        import gc
+        with open("{pickled_module_path}", 'rb') as f:
+            func = pickle.load(f)
+
+        # A dictionnary storing the globals of the newly unpickled function
+        # should have been created
+        assert list(_dynamic_modules_globals.keys()) == ['mod']
+
+        # func.__globals__ is the only non-weak reference to
+        # _dynamic_modules_globals['mod']. By deleting func, we delete also
+        # _dynamic_modules_globals['mod']
+        del func
+        gc.collect()
+
+        # There is no reference to the globals of func since func has been
+        # deleted and _dynamic_modules_globals is a WeakValueDictionary,
+        # so _dynamic_modules_globals should now be empty
+        assert list(_dynamic_modules_globals.keys()) == []
+        '''
+
+        child_process_script = child_process_script.format(
+                pickled_module_path=_escape(pickled_module_path))
+
+        try:
+            with open(pickled_module_path, 'wb') as f:
+                cloudpickle.dump(mod.func, f, protocol=self.protocol)
+
+            assert_run_python_script(textwrap.dedent(child_process_script))
+
+        finally:
+            os.unlink(pickled_module_path)
+
+    def test_module_locals_behavior(self):
+        # Makes sure that a local function defined in another module is
+        # correctly serialized. This notably checks that the globals are
+        # accessible and that there is no issue with the builtins (see #211)
+
+        pickled_func_path = os.path.join(self.tmpdir, 'local_func_g.pkl')
+
+        child_process_script = '''
+        import pickle
+        import gc
+        with open("{pickled_func_path}", 'rb') as f:
+            func = pickle.load(f)
+
+        assert func(range(10)) == 45
+        '''
+
+        child_process_script = child_process_script.format(
+                pickled_func_path=_escape(pickled_func_path))
+
+        try:
+
+            from .testutils import make_local_function
+
+            g = make_local_function()
+            with open(pickled_func_path, 'wb') as f:
+                cloudpickle.dump(g, f, protocol=self.protocol)
+
+            assert_run_python_script(textwrap.dedent(child_process_script))
+
+        finally:
+            os.unlink(pickled_func_path)
+
+    def test_load_dynamic_module_in_grandchild_process(self):
+        # Make sure that when loaded, a dynamic module preserves its dynamic
+        # property. Otherwise, this will lead to an ImportError if pickled in
+        # the child process and reloaded in another one.
+
+        # We create a new dynamic module
+        mod = types.ModuleType('mod')
+        code = '''
+        x = 1
+        '''
+        exec(textwrap.dedent(code), mod.__dict__)
+
+        # This script will be ran in a separate child process. It will import
+        # the pickled dynamic module, and then re-pickle it under a new name.
+        # Finally, it will create a child process that will load the re-pickled
+        # dynamic module.
+        parent_process_module_file = os.path.join(
+            self.tmpdir, 'dynamic_module_from_parent_process.pkl')
+        child_process_module_file = os.path.join(
+            self.tmpdir, 'dynamic_module_from_child_process.pkl')
+        child_process_script = '''
+            import pickle
+            import textwrap
+
+            import cloudpickle
+            from testutils import assert_run_python_script
+
+
+            child_of_child_process_script = {child_of_child_process_script}
+
+            with open('{parent_process_module_file}', 'rb') as f:
+                mod = pickle.load(f)
+
+            with open('{child_process_module_file}', 'wb') as f:
+                cloudpickle.dump(mod, f, protocol={protocol})
+
+            
assert_run_python_script(textwrap.dedent(child_of_child_process_script))
+            '''
+
+        # The script ran by the process created by the child process
+        child_of_child_process_script = """ '''
+                import pickle
+                with open('{child_process_module_file}','rb') as fid:
+                    mod = pickle.load(fid)
+                ''' """
+
+        # Filling the two scripts with the pickled modules filepaths and,
+        # for the first child process, the script to be executed by its
+        # own child process.
+        child_of_child_process_script = child_of_child_process_script.format(
+                child_process_module_file=child_process_module_file)
+
+        child_process_script = child_process_script.format(
+            parent_process_module_file=_escape(parent_process_module_file),
+            child_process_module_file=_escape(child_process_module_file),
+            
child_of_child_process_script=_escape(child_of_child_process_script),
+            protocol=self.protocol)
+
+        try:
+            with open(parent_process_module_file, 'wb') as fid:
+                cloudpickle.dump(mod, fid, protocol=self.protocol)
+
+            assert_run_python_script(textwrap.dedent(child_process_script))
+
+        finally:
+            # Remove temporary created files
+            if os.path.exists(parent_process_module_file):
+                os.unlink(parent_process_module_file)
+            if os.path.exists(child_process_module_file):
+                os.unlink(child_process_module_file)
+
+    def test_correct_globals_import(self):
+        def nested_function(x):
+            return x + 1
+
+        def unwanted_function(x):
+            return math.exp(x)
+
+        def my_small_function(x, y):
+            return nested_function(x) + y
+
+        b = cloudpickle.dumps(my_small_function, protocol=self.protocol)
+
+        # Make sure that the pickle byte string only includes the definition
+        # of my_small_function and its dependency nested_function while
+        # extra functions and modules such as unwanted_function and the math
+        # module are not included so as to keep the pickle payload as
+        # lightweight as possible.
+
+        assert b'my_small_function' in b
+        assert b'nested_function' in b
+
+        assert b'unwanted_function' not in b
+        assert b'math' not in b
+
+    def test_is_dynamic_module(self):
+        import pickle  # decouple this test from global imports
+        import os.path
+        import distutils
+        import distutils.ccompiler
+
+        assert not _is_dynamic(pickle)
+        assert not _is_dynamic(os.path)  # fake (aliased) module
+        assert not _is_dynamic(distutils)  # package
+        assert not _is_dynamic(distutils.ccompiler)  # module in package
+
+        # user-created module without using the import machinery are also
+        # dynamic
+        dynamic_module = types.ModuleType('dynamic_module')
+        assert _is_dynamic(dynamic_module)
 
     def test_Ellipsis(self):
         self.assertEqual(Ellipsis,
@@ -455,6 +653,18 @@
         ExcClone = pickle_depickle(NotImplemented, protocol=self.protocol)
         self.assertEqual(NotImplemented, ExcClone)
 
+    def test_NoneType(self):
+        res = pickle_depickle(type(None), protocol=self.protocol)
+        self.assertEqual(type(None), res)
+
+    def test_EllipsisType(self):
+        res = pickle_depickle(type(Ellipsis), protocol=self.protocol)
+        self.assertEqual(type(Ellipsis), res)
+
+    def test_NotImplementedType(self):
+        res = pickle_depickle(type(NotImplemented), protocol=self.protocol)
+        self.assertEqual(type(NotImplemented), res)
+
     def test_builtin_function_without_module(self):
         on = object.__new__
         on_depickled = pickle_depickle(on, protocol=self.protocol)
@@ -462,7 +672,7 @@
 
         fi = itertools.chain.from_iterable
         fi_depickled = pickle_depickle(fi, protocol=self.protocol)
-        self.assertEqual(list(fi([[1, 2], [3, 4]])), [1, 2, 3, 4])
+        self.assertEqual(list(fi_depickled([[1, 2], [3, 4]])), [1, 2, 3, 4])
 
     @pytest.mark.skipif(tornado is None,
                         reason="test needs Tornado installed")
@@ -480,7 +690,7 @@
             res = yield f(0.01, y)
             raise gen.Return(res + 1)
 
-        data = cloudpickle.dumps([g, g])
+        data = cloudpickle.dumps([g, g], protocol=self.protocol)
         f = g = None
         g2, g3 = pickle.loads(data)
         self.assertTrue(g2 is g3)
@@ -508,7 +718,7 @@
         exec(textwrap.dedent(code), d, d)
         f = d['f']
         res = f()
-        data = cloudpickle.dumps([f, f])
+        data = cloudpickle.dumps([f, f], protocol=self.protocol)
         d = f = None
         f2, f3 = pickle.loads(data)
         self.assertTrue(f2 is f3)
@@ -526,7 +736,7 @@
         def example():
             x = xml.etree.ElementTree.Comment # potential AttributeError
 
-        s = cloudpickle.dumps(example)
+        s = cloudpickle.dumps(example, protocol=self.protocol)
 
         # refresh the environment, i.e., unimport the dependency
         del xml
@@ -547,7 +757,7 @@
             return example
         example = scope()
 
-        s = cloudpickle.dumps(example)
+        s = cloudpickle.dumps(example, protocol=self.protocol)
 
         # refresh the environment (unimport dependency)
         for item in list(sys.modules):
@@ -560,16 +770,14 @@
     def test_multiprocess(self):
         # running a function pickled by another process (a la dask.distributed)
         def scope():
-            import curses.textpad
             def example():
                 x = xml.etree.ElementTree.Comment
-                x = curses.textpad.Textbox
             return example
         global xml
         import xml.etree.ElementTree
         example = scope()
 
-        s = cloudpickle.dumps(example)
+        s = cloudpickle.dumps(example, protocol=self.protocol)
 
         # choose "subprocess" rather than "multiprocessing" because the latter
         # library uses fork to preserve the parent environment.
@@ -584,15 +792,15 @@
         # (unlike test_submodule)
         global etree
         def scope():
-            import curses.textpad as foobar
+            import xml.etree as foobar
             def example():
                 x = etree.Comment
-                x = foobar.Textbox
+                x = foobar.ElementTree
             return example
         example = scope()
         import xml.etree.ElementTree as etree
 
-        s = cloudpickle.dumps(example)
+        s = cloudpickle.dumps(example, protocol=self.protocol)
 
         command = ("import pickle, base64; "
                    "pickle.loads(base64.b32decode('" +
@@ -621,12 +829,12 @@
         dumped = cloudpickle.dumps(logger)
 
         code = """if 1:
-            import cloudpickle, logging
+            import base64, cloudpickle, logging
 
             logging.basicConfig(level=logging.INFO)
-            logger = cloudpickle.loads(%(dumped)r)
+            logger = cloudpickle.loads(base64.b32decode(b'{}'))
             logger.info('hello')
-            """ % locals()
+            """.format(base64.b32encode(dumped).decode('ascii'))
         proc = subprocess.Popen([sys.executable, "-c", code],
                                 stdout=subprocess.PIPE,
                                 stderr=subprocess.STDOUT)
@@ -655,16 +863,22 @@
             def foo(self):
                 return 'it works!'
 
+        # This class is local so we can safely register tuple in it to verify
+        # the unpickled class also register tuple.
+        AbstractClass.register(tuple)
+
         depickled_base = pickle_depickle(AbstractClass, protocol=self.protocol)
         depickled_class = pickle_depickle(ConcreteClass,
                                           protocol=self.protocol)
         depickled_instance = pickle_depickle(ConcreteClass())
 
+        assert issubclass(tuple, AbstractClass)
+        assert issubclass(tuple, depickled_base)
+
         self.assertEqual(depickled_class().foo(), 'it works!')
         self.assertEqual(depickled_instance.foo(), 'it works!')
 
-        # assertRaises doesn't return a contextmanager in python 2.6 :(.
-        self.failUnlessRaises(TypeError, depickled_base)
+        self.assertRaises(TypeError, depickled_base)
 
         class DepickledBaseSubclass(depickled_base):
             def foo(self):
@@ -737,7 +951,7 @@
         # serializable.
         from cloudpickle import CloudPickler
         CloudPickler.dispatch[type(py.builtin)] = CloudPickler.save_module
-        g = cloudpickle.loads(cloudpickle.dumps(f))
+        g = cloudpickle.loads(cloudpickle.dumps(f, protocol=self.protocol))
 
         result = g()
         self.assertEqual(1, result)
@@ -841,6 +1055,168 @@
         """.format(protocol=self.protocol)
         assert_run_python_script(textwrap.dedent(code))
 
+    def test_interactively_defined_global_variable(self):
+        # Check that callables defined in the __main__ module of a Python
+        # script (or jupyter kernel) correctly retrieve global variables.
+        code_template = """\
+        from testutils import subprocess_pickle_echo
+        from cloudpickle import dumps, loads
+
+        def local_clone(obj, protocol=None):
+            return loads(dumps(obj, protocol=protocol))
+
+        VARIABLE = "default_value"
+
+        def f0():
+            global VARIABLE
+            VARIABLE = "changed_by_f0"
+
+        def f1():
+            return VARIABLE
+
+        cloned_f0 = {clone_func}(f0, protocol={protocol})
+        cloned_f1 = {clone_func}(f1, protocol={protocol})
+        pickled_f1 = dumps(f1, protocol={protocol})
+
+        # Change the value of the global variable
+        cloned_f0()
+
+        # Ensure that the global variable is the same for another function
+        result_f1 = cloned_f1()
+        assert result_f1 == "changed_by_f0", result_f1
+
+        # Ensure that unpickling the global variable does not change its value
+        result_pickled_f1 = loads(pickled_f1)()
+        assert result_pickled_f1 == "changed_by_f0", result_pickled_f1
+        """
+        for clone_func in ['local_clone', 'subprocess_pickle_echo']:
+            code = code_template.format(protocol=self.protocol,
+                                        clone_func=clone_func)
+            assert_run_python_script(textwrap.dedent(code))
+
+    def test_closure_interacting_with_a_global_variable(self):
+        global _TEST_GLOBAL_VARIABLE
+        assert _TEST_GLOBAL_VARIABLE == "default_value"
+        orig_value = _TEST_GLOBAL_VARIABLE
+        try:
+            def f0():
+                global _TEST_GLOBAL_VARIABLE
+                _TEST_GLOBAL_VARIABLE = "changed_by_f0"
+
+            def f1():
+                return _TEST_GLOBAL_VARIABLE
+
+            cloned_f0 = cloudpickle.loads(cloudpickle.dumps(
+                f0, protocol=self.protocol))
+            cloned_f1 = cloudpickle.loads(cloudpickle.dumps(
+                f1, protocol=self.protocol))
+            pickled_f1 = cloudpickle.dumps(f1, protocol=self.protocol)
+
+            # Change the value of the global variable
+            cloned_f0()
+            assert _TEST_GLOBAL_VARIABLE == "changed_by_f0"
+
+            # Ensure that the global variable is the same for another function
+            result_cloned_f1 = cloned_f1()
+            assert result_cloned_f1 == "changed_by_f0", result_cloned_f1
+            assert f1() == result_cloned_f1
+
+            # Ensure that unpickling the global variable does not change its
+            # value
+            result_pickled_f1 = cloudpickle.loads(pickled_f1)()
+            assert result_pickled_f1 == "changed_by_f0", result_pickled_f1
+        finally:
+            _TEST_GLOBAL_VARIABLE = orig_value
+
+    def test_function_from_dynamic_module_with_globals_modifications(self):
+        # This test verifies that the global variable state of a function
+        # defined in a dynamic module in a child process are not reset by
+        # subsequent uplickling.
+
+        # first, we create a dynamic module in the parent process
+        mod = types.ModuleType('mod')
+        code = '''
+        GLOBAL_STATE = "initial value"
+
+        def func_defined_in_dynamic_module(v=None):
+            global GLOBAL_STATE
+            if v is not None:
+                GLOBAL_STATE = v
+            return GLOBAL_STATE
+        '''
+        exec(textwrap.dedent(code), mod.__dict__)
+
+        with_initial_globals_file = os.path.join(
+            self.tmpdir, 'function_with_initial_globals.pkl')
+        with_modified_globals_file = os.path.join(
+            self.tmpdir, 'function_with_modified_globals.pkl')
+
+        try:
+            # Simple sanity check on the function's output
+            assert mod.func_defined_in_dynamic_module() == "initial value"
+
+            # The function of mod is pickled two times, with two different
+            # values for the global variable GLOBAL_STATE.
+            # Then we launch a child process that sequentially unpickles the
+            # two functions. Those unpickle functions should share the same
+            # global variables in the child process:
+            # Once the first function gets unpickled, mod is created and
+            # tracked in the child environment. This is state is preserved
+            # when unpickling the second function whatever the global variable
+            # GLOBAL_STATE's value at the time of pickling.
+
+            with open(with_initial_globals_file, 'wb') as f:
+                cloudpickle.dump(mod.func_defined_in_dynamic_module, f)
+
+            # Change the mod's global variable
+            mod.GLOBAL_STATE = 'changed value'
+
+            # At this point, mod.func_defined_in_dynamic_module()
+            # returns the updated value. Let's pickle it again.
+            assert mod.func_defined_in_dynamic_module() == 'changed value'
+            with open(with_modified_globals_file, 'wb') as f:
+                cloudpickle.dump(mod.func_defined_in_dynamic_module, f,
+                                 protocol=self.protocol)
+
+            child_process_code = """
+                import pickle
+
+                with open({with_initial_globals_file!r},'rb') as f:
+                    func_with_initial_globals = pickle.load(f)
+
+                # At this point, a module called 'mod' should exist in
+                # _dynamic_modules_globals. Further function loading
+                # will use the globals living in mod.
+
+                assert func_with_initial_globals() == 'initial value'
+
+                # Load a function with initial global variable that was
+                # pickled after a change in the global variable
+                with open({with_initial_globals_file!r},'rb') as f:
+                    func_with_modified_globals = pickle.load(f)
+
+                # assert the this unpickling did not modify the value of
+                # the local
+                assert func_with_modified_globals() == 'initial value'
+
+                # Update the value from the child process and check that
+                # unpickling again does not reset our change.
+                assert func_with_initial_globals('new value') == 'new value'
+                assert func_with_modified_globals() == 'new value'
+
+                with open({with_initial_globals_file!r},'rb') as f:
+                    func_with_initial_globals = pickle.load(f)
+                assert func_with_initial_globals() == 'new value'
+                assert func_with_modified_globals() == 'new value'
+            """.format(
+                with_initial_globals_file=_escape(with_initial_globals_file),
+                with_modified_globals_file=_escape(with_modified_globals_file))
+            assert_run_python_script(textwrap.dedent(child_process_code))
+
+        finally:
+            os.unlink(with_initial_globals_file)
+            os.unlink(with_modified_globals_file)
+
     @pytest.mark.skipif(sys.version_info >= (3, 0),
                         reason="hardcoded pickle bytes for 2.7")
     def test_function_pickle_compat_0_4_0(self):
@@ -852,7 +1228,7 @@
             b'\x01K\x01K\x01KCU\x04|\x00\x00Sq\x06N\x85q\x07)U\x01xq\x08\x85q'
             b'\tU\x07<stdin>q\nU\x08<lambda>q\x0bK\x01U\x00q\x0c))tq\rRq\x0eJ'
             b'\xff\xff\xff\xff}q\x0f\x87q\x10Rq\x11}q\x12N}q\x13NtR.')
-        self.assertEquals(42, cloudpickle.loads(pickled)(42))
+        self.assertEqual(42, cloudpickle.loads(pickled)(42))
 
     @pytest.mark.skipif(sys.version_info >= (3, 0),
                         reason="hardcoded pickle bytes for 2.7")
@@ -866,19 +1242,19 @@
             b'\tU\x07<stdin>q\nU\x08<lambda>q\x0bK\x01U\x00q\x0c))tq\rRq\x0eJ'
             b'\xff\xff\xff\xff}q\x0f\x87q\x10Rq\x11}q\x12N}q\x13U\x08__main__q'
             b'\x14NtR.')
-        self.assertEquals(42, cloudpickle.loads(pickled)(42))
+        self.assertEqual(42, cloudpickle.loads(pickled)(42))
 
     def test_pickle_reraise(self):
         for exc_type in [Exception, ValueError, TypeError, RuntimeError]:
             obj = RaiserOnPickle(exc_type("foo"))
             with pytest.raises((exc_type, pickle.PicklingError)):
-                cloudpickle.dumps(obj)
+                cloudpickle.dumps(obj, protocol=self.protocol)
 
     def test_unhashable_function(self):
         d = {'a': 1}
-        depickled_method = pickle_depickle(d.get)
-        self.assertEquals(depickled_method('a'), 1)
-        self.assertEquals(depickled_method('b'), None)
+        depickled_method = pickle_depickle(d.get, protocol=self.protocol)
+        self.assertEqual(depickled_method('a'), 1)
+        self.assertEqual(depickled_method('b'), None)
 
     def test_itertools_count(self):
         counter = itertools.count(1, step=2)
@@ -894,6 +1270,71 @@
         for _ in range(10):
             self.assertEqual(next(counter), next(new_counter))
 
+    def test_wraps_preserves_function_name(self):
+        from functools import wraps
+
+        def f():
+            pass
+
+        @wraps(f)
+        def g():
+            f()
+
+        f2 = pickle_depickle(g, protocol=self.protocol)
+
+        self.assertEqual(f2.__name__, f.__name__)
+
+    def test_wraps_preserves_function_doc(self):
+        from functools import wraps
+
+        def f():
+            """42"""
+            pass
+
+        @wraps(f)
+        def g():
+            f()
+
+        f2 = pickle_depickle(g, protocol=self.protocol)
+
+        self.assertEqual(f2.__doc__, f.__doc__)
+
+    @unittest.skipIf(sys.version_info < (3, 7),
+                     """This syntax won't work on py2 and pickling annotations
+                     isn't supported for py36 and below.""")
+    def test_wraps_preserves_function_annotations(self):
+        from functools import wraps
+
+        def f(x):
+            pass
+
+        f.__annotations__ = {'x': 1, 'return': float}
+
+        @wraps(f)
+        def g(x):
+            f(x)
+
+        f2 = pickle_depickle(g, protocol=self.protocol)
+
+        self.assertEqual(f2.__annotations__, f.__annotations__)
+
+    def test_instance_with_slots(self):
+        for slots in [["registered_attribute"], "registered_attribute"]:
+            class ClassWithSlots(object):
+                __slots__ = slots
+
+                def __init__(self):
+                    self.registered_attribute = 42
+
+            initial_obj = ClassWithSlots()
+            depickled_obj = pickle_depickle(
+                initial_obj, protocol=self.protocol)
+
+            for obj in [initial_obj, depickled_obj]:
+                self.assertEqual(obj.registered_attribute, 42)
+                with pytest.raises(AttributeError):
+                    obj.non_registered_attribute = 1
+
 
 class Protocol2CloudPickleTest(CloudPickleTest):
 
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/cloudpickle-0.5.3/tests/testutils.py 
new/cloudpickle-0.7.0/tests/testutils.py
--- old/cloudpickle-0.5.3/tests/testutils.py    2018-05-13 06:52:50.000000000 
+0200
+++ new/cloudpickle-0.7.0/tests/testutils.py    2019-01-23 16:35:51.000000000 
+0100
@@ -2,13 +2,14 @@
 import os
 import os.path as op
 import tempfile
+import base64
 from subprocess import Popen, check_output, PIPE, STDOUT, CalledProcessError
-
 from cloudpickle import dumps
 from pickle import loads
 
+TIMEOUT = 60
 try:
-    from suprocess import TimeoutExpired
+    from subprocess import TimeoutExpired
     timeout_supported = True
 except ImportError:
     # no support for timeout in Python 2
@@ -17,7 +18,31 @@
     timeout_supported = False
 
 
-def subprocess_pickle_echo(input_data, protocol=None):
+TEST_GLOBALS = "a test value"
+
+
+def make_local_function():
+    def g(x):
+        # this function checks that the globals are correctly handled and that
+        # the builtins are available
+        assert TEST_GLOBALS == "a test value"
+        return sum(range(10))
+
+    return g
+
+
+def _make_cwd_env():
+    """Helper to prepare environment for the child processes"""
+    cloudpickle_repo_folder = op.normpath(
+        op.join(op.dirname(__file__), '..'))
+    env = os.environ.copy()
+    pythonpath = "{src}{sep}tests{pathsep}{src}".format(
+        src=cloudpickle_repo_folder, sep=os.sep, pathsep=os.pathsep)
+    env['PYTHONPATH'] = pythonpath
+    return cloudpickle_repo_folder, env
+
+
+def subprocess_pickle_echo(input_data, protocol=None, timeout=TIMEOUT):
     """Echo function with a child Python process
 
     Pickle the input data into a buffer, send it to a subprocess via
@@ -29,23 +54,27 @@
 
     """
     pickled_input_data = dumps(input_data, protocol=protocol)
-    cmd = [sys.executable, __file__]  # run then pickle_echo() in __main__
-    cloudpickle_repo_folder = op.normpath(
-        op.join(op.dirname(__file__), '..'))
-    cwd = cloudpickle_repo_folder
-    pythonpath = "{src}/tests:{src}".format(src=cloudpickle_repo_folder)
-    env = {'PYTHONPATH': pythonpath}
-    proc = Popen(cmd, stdin=PIPE, stdout=PIPE, stderr=PIPE, cwd=cwd, env=env)
+    # Under Windows + Python 2.7, subprocess / communicate truncate the data
+    # on some specific bytes. To avoid this issue, let's use the pure ASCII
+    # Base32 encoding to encapsulate the pickle message sent to the child
+    # process.
+    pickled_b32 = base64.b32encode(pickled_input_data)
+
+    # run then pickle_echo(protocol=protocol) in __main__:
+    cmd = [sys.executable, __file__, "--protocol", str(protocol)]
+    cwd, env = _make_cwd_env()
+    proc = Popen(cmd, stdin=PIPE, stdout=PIPE, stderr=PIPE, cwd=cwd, env=env,
+                 bufsize=4096)
     try:
         comm_kwargs = {}
         if timeout_supported:
-            comm_kwargs['timeout'] = 5
-        out, err = proc.communicate(pickled_input_data, **comm_kwargs)
+            comm_kwargs['timeout'] = timeout
+        out, err = proc.communicate(pickled_b32, **comm_kwargs)
         if proc.returncode != 0 or len(err):
             message = "Subprocess returned %d: " % proc.returncode
             message += err.decode('utf-8')
             raise RuntimeError(message)
-        return loads(out)
+        return loads(base64.b32decode(out))
     except TimeoutExpired:
         proc.kill()
         out, err = proc.communicate()
@@ -53,6 +82,16 @@
         raise RuntimeError(message)
 
 
+def _read_all_bytes(stream_in, chunk_size=4096):
+    all_data = b""
+    while True:
+        data = stream_in.read(chunk_size)
+        all_data += data
+        if len(data) < chunk_size:
+            break
+    return all_data
+
+
 def pickle_echo(stream_in=None, stream_out=None, protocol=None):
     """Read a pickle from stdin and pickle it back to stdout"""
     if stream_in is None:
@@ -66,14 +105,15 @@
     if hasattr(stream_out, 'buffer'):
         stream_out = stream_out.buffer
 
-    input_bytes = stream_in.read()
+    input_bytes = base64.b32decode(_read_all_bytes(stream_in))
     stream_in.close()
-    unpickled_content = loads(input_bytes)
-    stream_out.write(dumps(unpickled_content, protocol=protocol))
+    obj = loads(input_bytes)
+    repickled_bytes = dumps(obj, protocol=protocol)
+    stream_out.write(base64.b32encode(repickled_bytes))
     stream_out.close()
 
 
-def assert_run_python_script(source_code, timeout=5):
+def assert_run_python_script(source_code, timeout=TIMEOUT):
     """Utility to help check pickleability of objects defined in __main__
 
     The script provided in the source code should return 0 and not print
@@ -85,14 +125,16 @@
         with open(source_file, 'wb') as f:
             f.write(source_code.encode('utf-8'))
         cmd = [sys.executable, source_file]
-        cloudpickle_repo_folder = op.normpath(
-            op.join(op.dirname(__file__), '..'))
-        pythonpath = "{src}/tests:{src}".format(src=cloudpickle_repo_folder)
+        cwd, env = _make_cwd_env()
         kwargs = {
-            'cwd': cloudpickle_repo_folder,
+            'cwd': cwd,
             'stderr': STDOUT,
-            'env': {'PYTHONPATH': pythonpath},
+            'env': env,
         }
+        # If coverage is running, pass the config file to the subprocess
+        coverage_rc = os.environ.get("COVERAGE_PROCESS_START")
+        if coverage_rc:
+            kwargs['env']['COVERAGE_PROCESS_START'] = coverage_rc
         if timeout_supported:
             kwargs['timeout'] = timeout
         try:
@@ -111,4 +153,5 @@
 
 
 if __name__ == '__main__':
-    pickle_echo()
+    protocol = int(sys.argv[sys.argv.index('--protocol') + 1])
+    pickle_echo(protocol=protocol)


Reply via email to