[Python-modules-commits] [cloudpickle] 01/07: New upstream version 0.5.2

Diane Trout diane at moszumanska.debian.org
Fri Dec 8 23:19:12 UTC 2017


This is an automated email from the git hooks/post-receive script.

diane pushed a commit to branch master
in repository cloudpickle.

commit 2789e57a35e68d9c0d535aed10ad23a9e3305711
Author: Diane Trout <diane at ghic.org>
Date:   Thu Dec 7 16:26:31 2017 -0800

    New upstream version 0.5.2
---
 PKG-INFO                       |   8 +-
 README.md                      |   3 +
 cloudpickle.egg-info/PKG-INFO  |   8 +-
 cloudpickle/__init__.py        |   2 +-
 cloudpickle/cloudpickle.py     | 335 +++++++++++++++++++++--------------------
 setup.cfg                      |   1 -
 setup.py                       |   4 +-
 tests/cloudpickle_file_test.py |   6 +-
 tests/cloudpickle_test.py      | 317 ++++++++++++++++++++++++++++++--------
 tests/testutils.py             |  60 ++++++--
 10 files changed, 490 insertions(+), 254 deletions(-)

diff --git a/PKG-INFO b/PKG-INFO
index 75e76c5..9cb2204 100644
--- a/PKG-INFO
+++ b/PKG-INFO
@@ -1,11 +1,12 @@
 Metadata-Version: 1.1
 Name: cloudpickle
-Version: 0.4.0
+Version: 0.5.2
 Summary: Extended pickling support for Python objects
 Home-page: https://github.com/cloudpipe/cloudpickle
 Author: Cloudpipe
 Author-email: cloudpipe at googlegroups.com
 License: LICENSE.txt
+Description-Content-Type: UNKNOWN
 Description: # cloudpickle
         
         [![Build Status](https://travis-ci.org/cloudpipe/cloudpickle.svg?branch=master
@@ -22,6 +23,9 @@ Description: # cloudpickle
         Among other things, `cloudpickle` supports pickling for lambda expressions,
         functions and classes defined interactively in the `__main__` module.
         
+        `cloudpickle` uses `pickle.HIGHEST_PROTOCOL` by default: it is meant to
+        send objects between processes running the same version of Python. It is
+        discouraged to use `cloudpickle` for long-term storage.
         
         Installation
         ------------
@@ -104,9 +108,7 @@ Classifier: License :: OSI Approved :: BSD License
 Classifier: Operating System :: POSIX
 Classifier: Operating System :: Microsoft :: Windows
 Classifier: Operating System :: MacOS :: MacOS X
-Classifier: Programming Language :: Python :: 2.6
 Classifier: Programming Language :: Python :: 2.7
-Classifier: Programming Language :: Python :: 3.3
 Classifier: Programming Language :: Python :: 3.4
 Classifier: Programming Language :: Python :: 3.5
 Classifier: Programming Language :: Python :: 3.6
diff --git a/README.md b/README.md
index b6588ac..3ab9abd 100644
--- a/README.md
+++ b/README.md
@@ -14,6 +14,9 @@ close to the data.
 Among other things, `cloudpickle` supports pickling for lambda expressions,
 functions and classes defined interactively in the `__main__` module.
 
+`cloudpickle` uses `pickle.HIGHEST_PROTOCOL` by default: it is meant to
+send objects between processes running the same version of Python. It is
+discouraged to use `cloudpickle` for long-term storage.
 
 Installation
 ------------
diff --git a/cloudpickle.egg-info/PKG-INFO b/cloudpickle.egg-info/PKG-INFO
index 75e76c5..9cb2204 100644
--- a/cloudpickle.egg-info/PKG-INFO
+++ b/cloudpickle.egg-info/PKG-INFO
@@ -1,11 +1,12 @@
 Metadata-Version: 1.1
 Name: cloudpickle
-Version: 0.4.0
+Version: 0.5.2
 Summary: Extended pickling support for Python objects
 Home-page: https://github.com/cloudpipe/cloudpickle
 Author: Cloudpipe
 Author-email: cloudpipe at googlegroups.com
 License: LICENSE.txt
+Description-Content-Type: UNKNOWN
 Description: # cloudpickle
         
         [![Build Status](https://travis-ci.org/cloudpipe/cloudpickle.svg?branch=master
@@ -22,6 +23,9 @@ Description: # cloudpickle
         Among other things, `cloudpickle` supports pickling for lambda expressions,
         functions and classes defined interactively in the `__main__` module.
         
+        `cloudpickle` uses `pickle.HIGHEST_PROTOCOL` by default: it is meant to
+        send objects between processes running the same version of Python. It is
+        discouraged to use `cloudpickle` for long-term storage.
         
         Installation
         ------------
@@ -104,9 +108,7 @@ Classifier: License :: OSI Approved :: BSD License
 Classifier: Operating System :: POSIX
 Classifier: Operating System :: Microsoft :: Windows
 Classifier: Operating System :: MacOS :: MacOS X
-Classifier: Programming Language :: Python :: 2.6
 Classifier: Programming Language :: Python :: 2.7
-Classifier: Programming Language :: Python :: 3.3
 Classifier: Programming Language :: Python :: 3.4
 Classifier: Programming Language :: Python :: 3.5
 Classifier: Programming Language :: Python :: 3.6
diff --git a/cloudpickle/__init__.py b/cloudpickle/__init__.py
index 0cbfd9e..cc509ad 100644
--- a/cloudpickle/__init__.py
+++ b/cloudpickle/__init__.py
@@ -2,4 +2,4 @@ from __future__ import absolute_import
 
 from cloudpickle.cloudpickle import *
 
-__version__ = '0.4.0'
+__version__ = '0.5.2'
diff --git a/cloudpickle/cloudpickle.py b/cloudpickle/cloudpickle.py
index 17a1f95..e5aab05 100644
--- a/cloudpickle/cloudpickle.py
+++ b/cloudpickle/cloudpickle.py
@@ -58,6 +58,12 @@ import types
 import weakref
 
 
+# cloudpickle is meant for inter process communication: we expect all
+# communicating processes to run the same Python version hence we favor
+# communication speed over compatibility:
+DEFAULT_PROTOCOL = pickle.HIGHEST_PROTOCOL
+
+
 if sys.version < '3':
     from pickle import Pickler
     try:
@@ -180,6 +186,32 @@ def _builtin_type(name):
     return getattr(types, name)
 
 
+def _make__new__factory(type_):
+    def _factory():
+        return type_.__new__
+    return _factory
+
+
+# NOTE: These need to be module globals so that they're pickleable as globals.
+_get_dict_new = _make__new__factory(dict)
+_get_frozenset_new = _make__new__factory(frozenset)
+_get_list_new = _make__new__factory(list)
+_get_set_new = _make__new__factory(set)
+_get_tuple_new = _make__new__factory(tuple)
+_get_object_new = _make__new__factory(object)
+
+# Pre-defined set of builtin_function_or_method instances that can be
+# serialized.
+_BUILTIN_TYPE_CONSTRUCTORS = {
+    dict.__new__: _get_dict_new,
+    frozenset.__new__: _get_frozenset_new,
+    set.__new__: _get_set_new,
+    list.__new__: _get_list_new,
+    tuple.__new__: _get_tuple_new,
+    object.__new__: _get_object_new,
+}
+
+
 if sys.version_info < (3, 4):
     def _walk_global_ops(code):
         """
@@ -222,7 +254,9 @@ class CloudPickler(Pickler):
     dispatch = Pickler.dispatch.copy()
 
     def __init__(self, file, protocol=None):
-        Pickler.__init__(self, file, protocol)
+        if protocol is None:
+            protocol = DEFAULT_PROTOCOL
+        Pickler.__init__(self, file, protocol=protocol)
         # set of modules to unpickle
         self.modules = set()
         # map ids to dictionary. used to ensure that functions can share global env
@@ -238,15 +272,12 @@ class CloudPickler(Pickler):
                 raise pickle.PicklingError(msg)
 
     def save_memoryview(self, obj):
-        """Fallback to save_string"""
-        Pickler.save_string(self, str(obj))
+        self.save(obj.tobytes())
+    dispatch[memoryview] = save_memoryview
 
-    def save_buffer(self, obj):
-        """Fallback to save_string"""
-        Pickler.save_string(self,str(obj))
-    if PY3:
-        dispatch[memoryview] = save_memoryview
-    else:
+    if not PY3:
+        def save_buffer(self, obj):
+            self.save(str(obj))
         dispatch[buffer] = save_buffer
 
     def save_unsupported(self, obj):
@@ -306,6 +337,18 @@ class CloudPickler(Pickler):
         Determines what kind of function obj is (e.g. lambda, defined at
         interactive prompt, etc) and handles the pickling appropriately.
         """
+        if obj in _BUILTIN_TYPE_CONSTRUCTORS:
+            # We keep a special-cased cache of built-in type constructors at
+            # global scope, because these functions are structured very
+            # differently in different python versions and implementations (for
+            # example, they're instances of types.BuiltinFunctionType in
+            # CPython, but they're ordinary types.FunctionType instances in
+            # PyPy).
+            #
+            # If the function we've received is in that cache, we just
+            # serialize it as a lookup into the cache.
+            return self.save_reduce(_BUILTIN_TYPE_CONSTRUCTORS[obj], (), obj=obj)
+
         write = self.write
 
         if name is None:
@@ -326,13 +369,18 @@ class CloudPickler(Pickler):
         if modname == '__main__':
             themodule = None
 
+        try:
+            lookedup_by_name = getattr(themodule, name, None)
+        except Exception:
+            lookedup_by_name = None
+
         if themodule:
             self.modules.add(themodule)
-            if getattr(themodule, name, None) is obj:
+            if lookedup_by_name is obj:
                 return self.save_global(obj, name)
 
         # a builtin_function_or_method which comes in as an attribute of some
-        # object (e.g., object.__new__, itertools.chain.from_iterable) will end
+        # object (e.g., itertools.chain.from_iterable) will end
         # up with modname "__main__" and so end up here. But these functions
         # have no __code__ attribute in CPython, so the handling for
         # user-defined functions below will fail.
@@ -340,16 +388,13 @@ class CloudPickler(Pickler):
         # for different python versions.
         if not hasattr(obj, '__code__'):
             if PY3:
-                if sys.version_info < (3, 4):
-                    raise pickle.PicklingError("Can't pickle %r" % obj)
-                else:
-                    rv = obj.__reduce_ex__(self.proto)
+                rv = obj.__reduce_ex__(self.proto)
             else:
                 if hasattr(obj, '__self__'):
                     rv = (getattr, (obj.__self__, name))
                 else:
                     raise pickle.PicklingError("Can't pickle %r" % obj)
-            return Pickler.save_reduce(self, obj=obj, *rv)
+            return self.save_reduce(obj=obj, *rv)
 
         # if func is lambda, def'ed at prompt, is in main, or is nested, then
         # we'll pickle the actual function object rather than simply saving a
@@ -361,8 +406,7 @@ class CloudPickler(Pickler):
             return
         else:
             # func is nested
-            klass = getattr(themodule, name, None)
-            if klass is None or klass is not obj:
+            if lookedup_by_name is None or lookedup_by_name is not obj:
                 self.save_function_tuple(obj)
                 return
 
@@ -385,7 +429,7 @@ class CloudPickler(Pickler):
         """
         # check if any known dependency is an imported package
         for x in top_level_dependencies:
-            if isinstance(x, types.ModuleType) and x.__package__:
+            if isinstance(x, types.ModuleType) and hasattr(x, '__package__') and x.__package__:
                 # check if the package has any currently loaded sub-imports
                 prefix = x.__name__ + '.'
                 for name, module in sys.modules.items():
@@ -408,15 +452,18 @@ class CloudPickler(Pickler):
         from global modules.
         """
         clsdict = dict(obj.__dict__)  # copy dict proxy to a dict
-        if not isinstance(clsdict.get('__dict__', None), property):
-            # don't extract dict that are properties
-            clsdict.pop('__dict__', None)
-            clsdict.pop('__weakref__', None)
+        clsdict.pop('__weakref__', None)
 
-        # hack as __new__ is stored differently in the __dict__
-        new_override = clsdict.get('__new__', None)
-        if new_override:
-            clsdict['__new__'] = obj.__new__
+        # On PyPy, __doc__ is a readonly attribute, so we need to include it in
+        # the initial skeleton class.  This is safe because we know that the
+        # doc can't participate in a cycle with the original class.
+        type_kwargs = {'__doc__': clsdict.pop('__doc__', None)}
+
+        # If type overrides __dict__ as a property, include it in the type kwargs.
+        # In Python 2, we can't set this attribute after construction.
+        __dict__ = clsdict.pop('__dict__', None)
+        if isinstance(__dict__, property):
+            type_kwargs['__dict__'] = __dict__
 
         save = self.save
         write = self.write
@@ -436,23 +483,12 @@ class CloudPickler(Pickler):
         # Push the rehydration function.
         save(_rehydrate_skeleton_class)
 
-        # Mark the start of the args for the rehydration function.
+        # Mark the start of the args tuple for the rehydration function.
         write(pickle.MARK)
 
-        # On PyPy, __doc__ is a readonly attribute, so we need to include it in
-        # the initial skeleton class.  This is safe because we know that the
-        # doc can't participate in a cycle with the original class.
-        doc_dict = {'__doc__': clsdict.pop('__doc__', None)}
-
-        # Create and memoize an empty class with obj's name and bases.
-        save(type(obj))
-        save((
-            obj.__name__,
-            obj.__bases__,
-            doc_dict,
-        ))
-        write(pickle.REDUCE)
-        self.memoize(obj)
+        # Create and memoize an skeleton class with obj's name and bases.
+        tp = type(obj)
+        self.save_reduce(tp, (obj.__name__, obj.__bases__, type_kwargs), obj=obj)
 
         # Now save the rest of obj's __dict__. Any references to obj
         # encountered while saving will point to the skeleton class.
@@ -505,16 +541,22 @@ class CloudPickler(Pickler):
         self.memoize(func)
 
         # save the rest of the func data needed by _fill_function
-        save(f_globals)
-        save(defaults)
-        save(dct)
-        save(closure_values)
+        state = {
+            'globals': f_globals,
+            'defaults': defaults,
+            'dict': dct,
+            'module': func.__module__,
+            'closure_values': closure_values,
+        }
+        if hasattr(func, '__qualname__'):
+            state['qualname'] = func.__qualname__
+        save(state)
         write(pickle.TUPLE)
         write(pickle.REDUCE)  # applies _fill_function on the tuple
 
     _extract_code_globals_cache = (
         weakref.WeakKeyDictionary()
-        if sys.version_info >= (2, 7) and not hasattr(sys, "pypy_version_info")
+        if not hasattr(sys, "pypy_version_info")
         else {})
 
     @classmethod
@@ -590,37 +632,22 @@ class CloudPickler(Pickler):
         The name of this method is somewhat misleading: all types get
         dispatched here.
         """
-        if obj.__module__ == "__builtin__" or obj.__module__ == "builtins":
-            if obj in _BUILTIN_TYPE_NAMES:
-                return self.save_reduce(_builtin_type, (_BUILTIN_TYPE_NAMES[obj],), obj=obj)
+        if obj.__module__ == "__main__":
+            return self.save_dynamic_class(obj)
 
-        if name is None:
-            name = obj.__name__
-
-        modname = getattr(obj, "__module__", None)
-        if modname is None:
-            try:
-                # whichmodule() could fail, see
-                # https://bitbucket.org/gutworth/six/issues/63/importing-six-breaks-pickling
-                modname = pickle.whichmodule(obj, name)
-            except Exception:
-                modname = '__main__'
-
-        if modname == '__main__':
-            themodule = None
-        else:
-            __import__(modname)
-            themodule = sys.modules[modname]
-            self.modules.add(themodule)
+        try:
+            return Pickler.save_global(self, obj, name=name)
+        except Exception:
+            if obj.__module__ == "__builtin__" or obj.__module__ == "builtins":
+                if obj in _BUILTIN_TYPE_NAMES:
+                    return self.save_reduce(
+                        _builtin_type, (_BUILTIN_TYPE_NAMES[obj],), obj=obj)
 
-        if hasattr(themodule, name) and getattr(themodule, name) is obj:
-            return Pickler.save_global(self, obj, name)
+            typ = type(obj)
+            if typ is not obj and isinstance(obj, (type, types.ClassType)):
+                return self.save_dynamic_class(obj)
 
-        typ = type(obj)
-        if typ is not obj and isinstance(obj, (type, types.ClassType)):
-            self.save_dynamic_class(obj)
-        else:
-            raise pickle.PicklingError("Can't pickle %r" % obj)
+            raise
 
     dispatch[type] = save_global
     dispatch[types.ClassType] = save_global
@@ -691,12 +718,7 @@ class CloudPickler(Pickler):
     dispatch[property] = save_property
 
     def save_classmethod(self, obj):
-        try:
-            orig_func = obj.__func__
-        except AttributeError:  # Python 2.6
-            orig_func = obj.__get__(None, object)
-            if isinstance(obj, classmethod):
-                orig_func = orig_func.__func__  # Unbind
+        orig_func = obj.__func__
         self.save_reduce(type(obj), (orig_func,), obj=obj)
     dispatch[classmethod] = save_classmethod
     dispatch[staticmethod] = save_classmethod
@@ -736,64 +758,6 @@ class CloudPickler(Pickler):
     if type(operator.attrgetter) is type:
         dispatch[operator.attrgetter] = save_attrgetter
 
-    def save_reduce(self, func, args, state=None,
-                    listitems=None, dictitems=None, obj=None):
-        # Assert that args is a tuple or None
-        if not isinstance(args, tuple):
-            raise pickle.PicklingError("args from reduce() should be a tuple")
-
-        # Assert that func is callable
-        if not hasattr(func, '__call__'):
-            raise pickle.PicklingError("func from reduce should be callable")
-
-        save = self.save
-        write = self.write
-
-        # Protocol 2 special case: if func's name is __newobj__, use NEWOBJ
-        if self.proto >= 2 and getattr(func, "__name__", "") == "__newobj__":
-            cls = args[0]
-            if not hasattr(cls, "__new__"):
-                raise pickle.PicklingError(
-                    "args[0] from __newobj__ args has no __new__")
-            if obj is not None and cls is not obj.__class__:
-                raise pickle.PicklingError(
-                    "args[0] from __newobj__ args has the wrong class")
-            args = args[1:]
-            save(cls)
-
-            save(args)
-            write(pickle.NEWOBJ)
-        else:
-            save(func)
-            save(args)
-            write(pickle.REDUCE)
-
-        if obj is not None:
-            self.memoize(obj)
-
-        # More new special cases (that work with older protocols as
-        # well): when __reduce__ returns a tuple with 4 or 5 items,
-        # the 4th and 5th item should be iterators that provide list
-        # items and dict items (as (key, value) tuples), or None.
-
-        if listitems is not None:
-            self._batch_appends(listitems)
-
-        if dictitems is not None:
-            self._batch_setitems(dictitems)
-
-        if state is not None:
-            save(state)
-            write(pickle.BUILD)
-
-    def save_partial(self, obj):
-        """Partial objects do not serialize correctly in python2.x -- this fixes the bugs"""
-        self.save_reduce(_genpartial, (obj.func, obj.args, obj.keywords))
-
-    if sys.version_info < (2,7):  # 2.7 supports partial pickling
-        dispatch[partial] = save_partial
-
-
     def save_file(self, obj):
         """Save a file"""
         try:
@@ -849,23 +813,21 @@ class CloudPickler(Pickler):
     dispatch[type(Ellipsis)] = save_ellipsis
     dispatch[type(NotImplemented)] = save_not_implemented
 
-    # WeakSet was added in 2.7.
-    if hasattr(weakref, 'WeakSet'):
-        def save_weakset(self, obj):
-            self.save_reduce(weakref.WeakSet, (list(obj),))
+    def save_weakset(self, obj):
+        self.save_reduce(weakref.WeakSet, (list(obj),))
 
-        dispatch[weakref.WeakSet] = save_weakset
-
-    """Special functions for Add-on libraries"""
-    def inject_addons(self):
-        """Plug in system. Register additional pickling functions if modules already loaded"""
-        pass
+    dispatch[weakref.WeakSet] = save_weakset
 
     def save_logger(self, obj):
         self.save_reduce(logging.getLogger, (obj.name,), obj=obj)
 
     dispatch[logging.Logger] = save_logger
 
+    """Special functions for Add-on libraries"""
+    def inject_addons(self):
+        """Plug in system. Register additional pickling functions if modules already loaded"""
+        pass
+
 
 # Tornado support
 
@@ -882,6 +844,7 @@ def is_tornado_coroutine(func):
         return False
     return gen.is_coroutine_function(func)
 
+
 def _rebuild_tornado_coroutine(func):
     from tornado import gen
     return gen.coroutine(func)
@@ -889,24 +852,44 @@ def _rebuild_tornado_coroutine(func):
 
 # Shorthands for legacy support
 
-def dump(obj, file, protocol=2):
-    CloudPickler(file, protocol).dump(obj)
+def dump(obj, file, protocol=None):
+    """Serialize obj as bytes streamed into file
 
+    protocol defaults to cloudpickle.DEFAULT_PROTOCOL which is an alias to
+    pickle.HIGHEST_PROTOCOL. This setting favors maximum communication speed
+    between processes running the same Python version.
 
-def dumps(obj, protocol=2):
-    file = StringIO()
+    Set protocol=pickle.DEFAULT_PROTOCOL instead if you need to ensure
+    compatibility with older versions of Python.
+    """
+    CloudPickler(file, protocol=protocol).dump(obj)
 
-    cp = CloudPickler(file,protocol)
-    cp.dump(obj)
 
-    return file.getvalue()
+def dumps(obj, protocol=None):
+    """Serialize obj as a string of bytes allocated in memory
+
+    protocol defaults to cloudpickle.DEFAULT_PROTOCOL which is an alias to
+    pickle.HIGHEST_PROTOCOL. This setting favors maximum communication speed
+    between processes running the same Python version.
+
+    Set protocol=pickle.DEFAULT_PROTOCOL instead if you need to ensure
+    compatibility with older versions of Python.
+    """
+    file = StringIO()
+    try:
+        cp = CloudPickler(file, protocol=protocol)
+        cp.dump(obj)
+        return file.getvalue()
+    finally:
+        file.close()
+
 
 # including pickles unloading functions in this namespace
 load = pickle.load
 loads = pickle.loads
 
 
-#hack for __import__ not working as desired
+# hack for __import__ not working as desired
 def subimport(name):
     __import__(name)
     return sys.modules[name]
@@ -918,6 +901,7 @@ def dynamic_subimport(name, vars):
     sys.modules[name] = mod
     return mod
 
+
 # restores function attributes
 def _restore_attr(obj, attr):
     for key, val in attr.items():
@@ -1001,17 +985,40 @@ class _empty_cell_value(object):
         return cls.__name__
 
 
-def _fill_function(func, globals, defaults, dict, closure_values):
-    """ Fills in the rest of function data into the skeleton function object
-        that were created via _make_skel_func().
+def _fill_function(*args):
+    """Fills in the rest of function data into the skeleton function object
+
+    The skeleton itself is create by _make_skel_func().
     """
-    func.__globals__.update(globals)
-    func.__defaults__ = defaults
-    func.__dict__ = dict
+    if len(args) == 2:
+        func = args[0]
+        state = args[1]
+    elif len(args) == 5:
+        # Backwards compat for cloudpickle v0.4.0, after which the `module`
+        # argument was introduced
+        func = args[0]
+        keys = ['globals', 'defaults', 'dict', 'closure_values']
+        state = dict(zip(keys, args[1:]))
+    elif len(args) == 6:
+        # Backwards compat for cloudpickle v0.4.1, after which the function
+        # state was passed as a dict to the _fill_function it-self.
+        func = args[0]
+        keys = ['globals', 'defaults', 'dict', 'module', 'closure_values']
+        state = dict(zip(keys, args[1:]))
+    else:
+        raise ValueError('Unexpected _fill_value arguments: %r' % (args,))
+
+    func.__globals__.update(state['globals'])
+    func.__defaults__ = state['defaults']
+    func.__dict__ = state['dict']
+    if 'module' in state:
+        func.__module__ = state['module']
+    if 'qualname' in state:
+        func.__qualname__ = state['qualname']
 
     cells = func.__closure__
     if cells is not None:
-        for cell, value in zip(cells, closure_values):
+        for cell, value in zip(cells, state['closure_values']):
             if value is not _empty_cell_value:
                 cell_set(cell, value)
 
diff --git a/setup.cfg b/setup.cfg
index 6c71b61..1e3eb36 100644
--- a/setup.cfg
+++ b/setup.cfg
@@ -4,5 +4,4 @@ universal = 1
 [egg_info]
 tag_build = 
 tag_date = 0
-tag_svn_revision = 0
 
diff --git a/setup.py b/setup.py
index 8987560..735596c 100644
--- a/setup.py
+++ b/setup.py
@@ -8,7 +8,7 @@ except ImportError:
 
 dist = setup(
     name='cloudpickle',
-    version='0.4.0',
+    version='0.5.2',
     description='Extended pickling support for Python objects',
     author='Cloudpipe',
     author_email='cloudpipe at googlegroups.com',
@@ -23,9 +23,7 @@ dist = setup(
         'Operating System :: POSIX',
         'Operating System :: Microsoft :: Windows',
         'Operating System :: MacOS :: MacOS X',
-        'Programming Language :: Python :: 2.6',
         'Programming Language :: Python :: 2.7',
-        'Programming Language :: Python :: 3.3',
         'Programming Language :: Python :: 3.4',
         'Programming Language :: Python :: 3.5',
         'Programming Language :: Python :: 3.6',
diff --git a/tests/cloudpickle_file_test.py b/tests/cloudpickle_file_test.py
index 4799359..7c27fed 100644
--- a/tests/cloudpickle_file_test.py
+++ b/tests/cloudpickle_file_test.py
@@ -4,11 +4,7 @@ import os
 import shutil
 import pickle
 import sys
-try:
-    from io import StringIO
-except ImportError:
-    # compat for Python 2.6
-    from StringIO import StringIO
+from io import StringIO
 
 import pytest
 from mock import patch, mock_open
diff --git a/tests/cloudpickle_test.py b/tests/cloudpickle_test.py
index aa33ce4..23cbb3d 100644
--- a/tests/cloudpickle_test.py
+++ b/tests/cloudpickle_test.py
@@ -1,7 +1,7 @@
 from __future__ import division
 
 import abc
-
+import collections
 import base64
 import functools
 import imp
@@ -44,16 +44,14 @@ import cloudpickle
 from cloudpickle.cloudpickle import _find_module, _make_empty_cell, cell_set
 
 from .testutils import subprocess_pickle_echo
+from .testutils import assert_run_python_script
 
 
-HAVE_WEAKSET = hasattr(weakref, 'WeakSet')
-
-
-def pickle_depickle(obj):
+def pickle_depickle(obj, protocol=cloudpickle.DEFAULT_PROTOCOL):
     """Helper function to test whether object pickled with cloudpickle can be
     depickled with pickle
     """
-    return pickle.loads(cloudpickle.dumps(obj))
+    return pickle.loads(cloudpickle.dumps(obj, protocol=protocol))
 
 
 class CloudPicklerTest(unittest.TestCase):
@@ -64,15 +62,17 @@ class CloudPicklerTest(unittest.TestCase):
 
 class CloudPickleTest(unittest.TestCase):
 
+    protocol = cloudpickle.DEFAULT_PROTOCOL
+
     def test_itemgetter(self):
         d = range(10)
         getter = itemgetter(1)
 
-        getter2 = pickle_depickle(getter)
+        getter2 = pickle_depickle(getter, protocol=self.protocol)
         self.assertEqual(getter(d), getter2(d))
 
         getter = itemgetter(0, 3)
-        getter2 = pickle_depickle(getter)
+        getter2 = pickle_depickle(getter, protocol=self.protocol)
         self.assertEqual(getter(d), getter2(d))
 
     def test_attrgetter(self):
@@ -81,18 +81,18 @@ class CloudPickleTest(unittest.TestCase):
                 return item
         d = C()
         getter = attrgetter("a")
-        getter2 = pickle_depickle(getter)
+        getter2 = pickle_depickle(getter, protocol=self.protocol)
         self.assertEqual(getter(d), getter2(d))
         getter = attrgetter("a", "b")
-        getter2 = pickle_depickle(getter)
+        getter2 = pickle_depickle(getter, protocol=self.protocol)
         self.assertEqual(getter(d), getter2(d))
 
         d.e = C()
         getter = attrgetter("e.a")
-        getter2 = pickle_depickle(getter)
+        getter2 = pickle_depickle(getter, protocol=self.protocol)
         self.assertEqual(getter(d), getter2(d))
         getter = attrgetter("e.a", "e.b")
-        getter2 = pickle_depickle(getter)
+        getter2 = pickle_depickle(getter, protocol=self.protocol)
         self.assertEqual(getter(d), getter2(d))
 
     # Regression test for SPARK-3415
@@ -124,12 +124,32 @@ class CloudPickleTest(unittest.TestCase):
     def test_buffer(self):
         try:
             buffer_obj = buffer("Hello")
-            self.assertEqual(pickle_depickle(buffer_obj), str(buffer_obj))
+            buffer_clone = pickle_depickle(buffer_obj, protocol=self.protocol)
+            self.assertEqual(buffer_clone, str(buffer_obj))
             buffer_obj = buffer("Hello", 2, 3)
-            self.assertEqual(pickle_depickle(buffer_obj), str(buffer_obj))
+            buffer_clone = pickle_depickle(buffer_obj, protocol=self.protocol)
+            self.assertEqual(buffer_clone, str(buffer_obj))
         except NameError:  # Python 3 does no longer support buffers
             pass
 
+    def test_memoryview(self):
+        buffer_obj = memoryview(b"Hello")
+        self.assertEqual(pickle_depickle(buffer_obj, protocol=self.protocol),
+                         buffer_obj.tobytes())
+
+    @pytest.mark.skipif(sys.version_info < (3, 4),
+                        reason="non-contiguous memoryview not implemented in "
+                               "old Python versions")
+    def test_sliced_and_non_contiguous_memoryview(self):
+        buffer_obj = memoryview(b"Hello!" * 3)[2:15:2]
+        self.assertEqual(pickle_depickle(buffer_obj, protocol=self.protocol),
+                         buffer_obj.tobytes())
+
+    def test_large_memoryview(self):
+        buffer_obj = memoryview(b"Hello!" * int(1e7))
+        self.assertEqual(pickle_depickle(buffer_obj, protocol=self.protocol),
+                         buffer_obj.tobytes())
+
     def test_lambda(self):
         self.assertEqual(pickle_depickle(lambda: 1)(), 1)
 
@@ -137,7 +157,7 @@ class CloudPickleTest(unittest.TestCase):
         a, b = 1, 2
         f1 = lambda x: x + a
         f2 = lambda x: f1(x) // b
-        self.assertEqual(pickle_depickle(f2)(1), 1)
+        self.assertEqual(pickle_depickle(f2, protocol=self.protocol)(1), 1)
 
     def test_recursive_closure(self):
         def f1():
@@ -166,7 +186,7 @@ class CloudPickleTest(unittest.TestCase):
             msg='f actually has closure cells!',
         )
 
-        g = pickle_depickle(f)
+        g = pickle_depickle(f, protocol=self.protocol)
 
         self.assertTrue(
             g.__closure__ is None,
@@ -187,7 +207,7 @@ class CloudPickleTest(unittest.TestCase):
         with pytest.raises(NameError):
             g1()
 
-        g2 = pickle_depickle(g1)
+        g2 = pickle_depickle(g1, protocol=self.protocol)
         with pytest.raises(NameError):
             g2()
 
@@ -217,7 +237,7 @@ class CloudPickleTest(unittest.TestCase):
         self.assertEqual(Derived().method(), 2)
 
         # Pickle and unpickle the class.
-        UnpickledDerived = pickle_depickle(Derived)
+        UnpickledDerived = pickle_depickle(Derived, protocol=self.protocol)
         self.assertEqual(UnpickledDerived().method(), 2)
 
         # We have special logic for handling __doc__ because it's a readonly
@@ -226,7 +246,7 @@ class CloudPickleTest(unittest.TestCase):
 
         # Pickle and unpickle an instance.
         orig_d = Derived()
-        d = pickle_depickle(orig_d)
+        d = pickle_depickle(orig_d, protocol=self.protocol)
         self.assertEqual(d.method(), 2)
 
     def test_cycle_in_classdict_globals(self):
@@ -239,7 +259,7 @@ class CloudPickleTest(unittest.TestCase):
         C.C_again = C
         C.instance_of_C = C()
 
-        depickled_C = pickle_depickle(C)
+        depickled_C = pickle_depickle(C, protocol=self.protocol)
         depickled_instance = pickle_depickle(C())
 
         # Test instance of depickled class.
@@ -258,8 +278,8 @@ class CloudPickleTest(unittest.TestCase):
             return (x + y) / LOCAL_CONSTANT
 
         # pickle the function definition
-        self.assertEqual(pickle_depickle(some_function)(41, 1), 1)
-        self.assertEqual(pickle_depickle(some_function)(81, 3), 2)
+        self.assertEqual(pickle_depickle(some_function, protocol=self.protocol)(41, 1), 1)
+        self.assertEqual(pickle_depickle(some_function, protocol=self.protocol)(81, 3), 2)
 
         hidden_constant = lambda: LOCAL_CONSTANT
 
@@ -275,26 +295,30 @@ class CloudPickleTest(unittest.TestCase):
                 return self.one() + some_function(x, 1) + self.value
 
         # pickle the class definition
-        self.assertEqual(pickle_depickle(SomeClass)(1).one(), 1)
-        self.assertEqual(pickle_depickle(SomeClass)(5).some_method(41), 7)
-        new_class = subprocess_pickle_echo(SomeClass)
-        self.assertEqual(new_class(5).some_method(41), 7)
+        clone_class = pickle_depickle(SomeClass, protocol=self.protocol)
+        self.assertEqual(clone_class(1).one(), 1)
+        self.assertEqual(clone_class(5).some_method(41), 7)
+        clone_class = subprocess_pickle_echo(SomeClass, protocol=self.protocol)
+        self.assertEqual(clone_class(5).some_method(41), 7)
 
         # pickle the class instances
         self.assertEqual(pickle_depickle(SomeClass(1)).one(), 1)
         self.assertEqual(pickle_depickle(SomeClass(5)).some_method(41), 7)
-        new_instance = subprocess_pickle_echo(SomeClass(5))
+        new_instance = subprocess_pickle_echo(SomeClass(5),
+                                              protocol=self.protocol)
         self.assertEqual(new_instance.some_method(41), 7)
 
         # pickle the method instances
         self.assertEqual(pickle_depickle(SomeClass(1).one)(), 1)
         self.assertEqual(pickle_depickle(SomeClass(5).some_method)(41), 7)
-        new_method = subprocess_pickle_echo(SomeClass(5).some_method)
+        new_method = subprocess_pickle_echo(SomeClass(5).some_method,
+                                            protocol=self.protocol)
         self.assertEqual(new_method(41), 7)
 
     def test_partial(self):
         partial_obj = functools.partial(min, 1)
-        self.assertEqual(pickle_depickle(partial_obj)(4), 1)
+        partial_clone = pickle_depickle(partial_obj, protocol=self.protocol)
+        self.assertEqual(partial_clone(4), 1)
 
     @pytest.mark.skipif(platform.python_implementation() == 'PyPy',
                         reason="Skip numpy and scipy tests on PyPy")
@@ -342,7 +366,7 @@ class CloudPickleTest(unittest.TestCase):
             for i in range(cnt):
                 yield i
 
-        gen2 = pickle_depickle(some_generator)
+        gen2 = pickle_depickle(some_generator, protocol=self.protocol)
 
         assert type(gen2(3)) == type(some_generator(3))
         assert list(gen2(3)) == list(range(3))
@@ -359,8 +383,8 @@ class CloudPickleTest(unittest.TestCase):
         sm = A.__dict__["test_sm"]
         cm = A.__dict__["test_cm"]
 
-        A.test_sm = pickle_depickle(sm)
-        A.test_cm = pickle_depickle(cm)
+        A.test_sm = pickle_depickle(sm, protocol=self.protocol)
+        A.test_cm = pickle_depickle(cm, protocol=self.protocol)
 
         self.assertEqual(A.test_sm(), "sm")
         self.assertEqual(A.test_cm(), "cm")
@@ -381,7 +405,8 @@ class CloudPickleTest(unittest.TestCase):
         # self.assertEqual(g(F(), 1), 2)  # still fails
 
     def test_module(self):
-        self.assertEqual(pickle, pickle_depickle(pickle))
+        pickle_clone = pickle_depickle(pickle, protocol=self.protocol)
+        self.assertEqual(pickle, pickle_clone)
 
     def test_dynamic_module(self):
         mod = imp.new_module('mod')
@@ -389,11 +414,23 @@ class CloudPickleTest(unittest.TestCase):
         x = 1
         def f(y):
             return x + y
+
+        class Foo:
+            def method(self, x):
+                return f(x)
         '''
         exec(textwrap.dedent(code), mod.__dict__)
-        mod2 = pickle_depickle(mod)
+        mod2 = pickle_depickle(mod, protocol=self.protocol)
         self.assertEqual(mod.x, mod2.x)
         self.assertEqual(mod.f(5), mod2.f(5))
+        self.assertEqual(mod.Foo().method(5), mod2.Foo().method(5))
+
+        if platform.python_implementation() != 'PyPy':
+            # XXX: this fails with excessive recursion on PyPy.
+            mod3 = subprocess_pickle_echo(mod, protocol=self.protocol)
+            self.assertEqual(mod.x, mod3.x)
+            self.assertEqual(mod.f(5), mod3.f(5))
+            self.assertEqual(mod.Foo().method(5), mod3.Foo().method(5))
 
         # Test dynamic modules when imported back are singletons
         mod1, mod2 = pickle_depickle([mod, mod])
@@ -411,20 +448,20 @@ class CloudPickleTest(unittest.TestCase):
             _find_module('valid_module')
 
     def test_Ellipsis(self):
-        self.assertEqual(Ellipsis, pickle_depickle(Ellipsis))
+        self.assertEqual(Ellipsis,
+                         pickle_depickle(Ellipsis, protocol=self.protocol))
 
     def test_NotImplemented(self):
-        self.assertEqual(NotImplemented, pickle_depickle(NotImplemented))
+        ExcClone = pickle_depickle(NotImplemented, protocol=self.protocol)
+        self.assertEqual(NotImplemented, ExcClone)
 
-    @pytest.mark.skipif((3, 0) < sys.version_info < (3, 4),
-                        reason="fails due to pickle behavior in Python 3.0-3.3")
     def test_builtin_function_without_module(self):
         on = object.__new__
-        on_depickled = pickle_depickle(on)
+        on_depickled = pickle_depickle(on, protocol=self.protocol)
         self.assertEqual(type(on_depickled(object)), type(object()))
 
         fi = itertools.chain.from_iterable
-        fi_depickled = pickle_depickle(fi)
+        fi_depickled = pickle_depickle(fi, protocol=self.protocol)
         self.assertEqual(list(fi([[1, 2], [3, 4]])), [1, 2, 3, 4])
 
     @pytest.mark.skipif(tornado is None,
@@ -578,7 +615,7 @@ class CloudPickleTest(unittest.TestCase):
 
     def test_logger(self):
         logger = logging.getLogger('cloudpickle.dummy_test_logger')
-        pickled = pickle_depickle(logger)
+        pickled = pickle_depickle(logger, protocol=self.protocol)
         self.assertTrue(pickled is logger, (pickled, logger))
 
         dumped = cloudpickle.dumps(logger)
@@ -612,8 +649,9 @@ class CloudPickleTest(unittest.TestCase):
             def foo(self):
                 return 'it works!'
 
-        depickled_base = pickle_depickle(AbstractClass)
... 328 lines suppressed ...

-- 
Alioth's /usr/local/bin/git-commit-notice on /srv/git.debian.org/git/python-modules/packages/cloudpickle.git



More information about the Python-modules-commits mailing list