[Python-modules-commits] [cloudpickle] 01/04: Import cloudpickle_0.2.2.orig.tar.gz

Diane Trout diane at moszumanska.debian.org
Tue Apr 25 03:02:04 UTC 2017


This is an automated email from the git hooks/post-receive script.

diane pushed a commit to branch master
in repository cloudpickle.

commit 63b00b2a864c5d6dcea5e3750a2d8e6914a81c47
Author: Diane Trout <diane at ghic.org>
Date:   Fri Apr 21 17:12:51 2017 -0700

    Import cloudpickle_0.2.2.orig.tar.gz
---
 PKG-INFO                         |   6 +-
 README.md                        |   2 +-
 cloudpickle.egg-info/PKG-INFO    |   6 +-
 cloudpickle.egg-info/SOURCES.txt |   1 -
 cloudpickle.egg-info/pbr.json    |   1 -
 cloudpickle/__init__.py          |   2 +-
 cloudpickle/cloudpickle.py       | 161 +++++++++++++++++++++++++++++----------
 setup.py                         |   4 +-
 tests/cloudpickle_file_test.py   |   2 +-
 tests/cloudpickle_test.py        |  70 +++++++++++++++++
 10 files changed, 203 insertions(+), 52 deletions(-)

diff --git a/PKG-INFO b/PKG-INFO
index 6aba4a1..412a7c0 100644
--- a/PKG-INFO
+++ b/PKG-INFO
@@ -1,6 +1,6 @@
 Metadata-Version: 1.1
 Name: cloudpickle
-Version: 0.2.1
+Version: 0.2.2
 Summary: Extended pickling support for Python objects
 Home-page: https://github.com/cloudpipe/cloudpickle
 Author: Cloudpipe
@@ -85,7 +85,7 @@ Description: # cloudpickle
         History
         -------
         
-        `cloudpickle` was initially developed by picloud.com and shipped as part of
+        `cloudpickle` was initially developed by [picloud.com](http://web.archive.org/web/20140721022102/http://blog.picloud.com/2013/11/17/picloud-has-joined-dropbox/) and shipped as part of
         the client SDK.
         
         A copy of `cloudpickle.py` was included as part of PySpark, the Python
@@ -108,6 +108,8 @@ Classifier: Programming Language :: Python :: 2.6
 Classifier: Programming Language :: Python :: 2.7
 Classifier: Programming Language :: Python :: 3.3
 Classifier: Programming Language :: Python :: 3.4
+Classifier: Programming Language :: Python :: 3.5
+Classifier: Programming Language :: Python :: 3.6
 Classifier: Programming Language :: Python :: Implementation :: CPython
 Classifier: Programming Language :: Python :: Implementation :: PyPy
 Classifier: Topic :: Software Development :: Libraries :: Python Modules
diff --git a/README.md b/README.md
index 4706d85..b6588ac 100644
--- a/README.md
+++ b/README.md
@@ -77,7 +77,7 @@ Running the tests
 History
 -------
 
-`cloudpickle` was initially developed by picloud.com and shipped as part of
+`cloudpickle` was initially developed by [picloud.com](http://web.archive.org/web/20140721022102/http://blog.picloud.com/2013/11/17/picloud-has-joined-dropbox/) and shipped as part of
 the client SDK.
 
 A copy of `cloudpickle.py` was included as part of PySpark, the Python
diff --git a/cloudpickle.egg-info/PKG-INFO b/cloudpickle.egg-info/PKG-INFO
index 6aba4a1..412a7c0 100644
--- a/cloudpickle.egg-info/PKG-INFO
+++ b/cloudpickle.egg-info/PKG-INFO
@@ -1,6 +1,6 @@
 Metadata-Version: 1.1
 Name: cloudpickle
-Version: 0.2.1
+Version: 0.2.2
 Summary: Extended pickling support for Python objects
 Home-page: https://github.com/cloudpipe/cloudpickle
 Author: Cloudpipe
@@ -85,7 +85,7 @@ Description: # cloudpickle
         History
         -------
         
-        `cloudpickle` was initially developed by picloud.com and shipped as part of
+        `cloudpickle` was initially developed by [picloud.com](http://web.archive.org/web/20140721022102/http://blog.picloud.com/2013/11/17/picloud-has-joined-dropbox/) and shipped as part of
         the client SDK.
         
         A copy of `cloudpickle.py` was included as part of PySpark, the Python
@@ -108,6 +108,8 @@ Classifier: Programming Language :: Python :: 2.6
 Classifier: Programming Language :: Python :: 2.7
 Classifier: Programming Language :: Python :: 3.3
 Classifier: Programming Language :: Python :: 3.4
+Classifier: Programming Language :: Python :: 3.5
+Classifier: Programming Language :: Python :: 3.6
 Classifier: Programming Language :: Python :: Implementation :: CPython
 Classifier: Programming Language :: Python :: Implementation :: PyPy
 Classifier: Topic :: Software Development :: Libraries :: Python Modules
diff --git a/cloudpickle.egg-info/SOURCES.txt b/cloudpickle.egg-info/SOURCES.txt
index 8063c75..acc2dcd 100644
--- a/cloudpickle.egg-info/SOURCES.txt
+++ b/cloudpickle.egg-info/SOURCES.txt
@@ -8,7 +8,6 @@ cloudpickle/cloudpickle.py
 cloudpickle.egg-info/PKG-INFO
 cloudpickle.egg-info/SOURCES.txt
 cloudpickle.egg-info/dependency_links.txt
-cloudpickle.egg-info/pbr.json
 cloudpickle.egg-info/top_level.txt
 tests/__init__.py
 tests/cloudpickle_file_test.py
diff --git a/cloudpickle.egg-info/pbr.json b/cloudpickle.egg-info/pbr.json
deleted file mode 100644
index 1784f4a..0000000
--- a/cloudpickle.egg-info/pbr.json
+++ /dev/null
@@ -1 +0,0 @@
-{"is_release": false, "git_version": "4e34fd2"}
\ No newline at end of file
diff --git a/cloudpickle/__init__.py b/cloudpickle/__init__.py
index 891a802..46b2f1a 100644
--- a/cloudpickle/__init__.py
+++ b/cloudpickle/__init__.py
@@ -2,4 +2,4 @@ from __future__ import absolute_import
 
 from cloudpickle.cloudpickle import *
 
-__version__ = '0.2.1'
+__version__ = '0.2.2'
diff --git a/cloudpickle/cloudpickle.py b/cloudpickle/cloudpickle.py
index 306c859..e8f4223 100644
--- a/cloudpickle/cloudpickle.py
+++ b/cloudpickle/cloudpickle.py
@@ -9,10 +9,10 @@ The goals of it follow:
 It does not include an unpickler, as standard python unpickling suffices.
 
 This module was extracted from the `cloud` package, developed by `PiCloud, Inc.
-<http://www.picloud.com>`_.
+<https://web.archive.org/web/20140626004012/http://www.picloud.com/>`_.
 
 Copyright (c) 2012, Regents of the University of California.
-Copyright (c) 2009 `PiCloud, Inc. <http://www.picloud.com>`_.
+Copyright (c) 2009 `PiCloud, Inc. <https://web.archive.org/web/20140626004012/http://www.picloud.com/>`_.
 All rights reserved.
 
 Redistribution and use in source and binary forms, with or without
@@ -42,17 +42,19 @@ SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
 """
 from __future__ import print_function
 
-import operator
-import io
+import dis
+from functools import partial
 import imp
+import io
+import itertools
+import opcode
+import operator
 import pickle
 import struct
 import sys
-import types
-from functools import partial
-import itertools
-import dis
 import traceback
+import types
+import weakref
 
 if sys.version < '3':
     from pickle import Pickler
@@ -68,10 +70,10 @@ else:
     PY3 = True
 
 #relevant opcodes
-STORE_GLOBAL = dis.opname.index('STORE_GLOBAL')
-DELETE_GLOBAL = dis.opname.index('DELETE_GLOBAL')
-LOAD_GLOBAL = dis.opname.index('LOAD_GLOBAL')
-GLOBAL_OPS = [STORE_GLOBAL, DELETE_GLOBAL, LOAD_GLOBAL]
+STORE_GLOBAL = opcode.opmap['STORE_GLOBAL']
+DELETE_GLOBAL = opcode.opmap['DELETE_GLOBAL']
+LOAD_GLOBAL = opcode.opmap['LOAD_GLOBAL']
+GLOBAL_OPS = (STORE_GLOBAL, DELETE_GLOBAL, LOAD_GLOBAL)
 HAVE_ARGUMENT = dis.HAVE_ARGUMENT
 EXTENDED_ARG = dis.EXTENDED_ARG
 
@@ -90,6 +92,43 @@ def _builtin_type(name):
     return getattr(types, name)
 
 
+if sys.version_info < (3, 4):
+    def _walk_global_ops(code):
+        """
+        Yield (opcode, argument number) tuples for all
+        global-referencing instructions in *code*.
+        """
+        code = getattr(code, 'co_code', b'')
+        if not PY3:
+            code = map(ord, code)
+
+        n = len(code)
+        i = 0
+        extended_arg = 0
+        while i < n:
+            op = code[i]
+            i += 1
+            if op >= HAVE_ARGUMENT:
+                oparg = code[i] + code[i + 1] * 256 + extended_arg
+                extended_arg = 0
+                i += 2
+                if op == EXTENDED_ARG:
+                    extended_arg = oparg * 65536
+                if op in GLOBAL_OPS:
+                    yield op, oparg
+
+else:
+    def _walk_global_ops(code):
+        """
+        Yield (opcode, argument number) tuples for all
+        global-referencing instructions in *code*.
+        """
+        for instr in dis.get_instructions(code):
+            op = instr.opcode
+            if op in GLOBAL_OPS:
+                yield op, instr.arg
+
+
 class CloudPickler(Pickler):
 
     dispatch = Pickler.dispatch.copy()
@@ -196,6 +235,26 @@ class CloudPickler(Pickler):
             if getattr(themodule, name, None) is obj:
                 return self.save_global(obj, name)
 
+        # a builtin_function_or_method which comes in as an attribute of some
+        # object (e.g., object.__new__, itertools.chain.from_iterable) will end
+        # up with modname "__main__" and so end up here. But these functions
+        # have no __code__ attribute in CPython, so the handling for 
+        # user-defined functions below will fail.
+        # So we pickle them here using save_reduce; have to do it differently
+        # for different python versions.
+        if not hasattr(obj, '__code__'):
+            if PY3:
+                if sys.version_info < (3, 4):
+                    raise pickle.PicklingError("Can't pickle %r" % obj)
+                else:
+                    rv = obj.__reduce_ex__(self.proto)
+            else:
+                if hasattr(obj, '__self__'):
+                    rv = (getattr, (obj.__self__, name))
+                else:
+                    raise pickle.PicklingError("Can't pickle %r" % obj)
+            return Pickler.save_reduce(self, obj=obj, *rv)
+
         # if func is lambda, def'ed at prompt, is in main, or is nested, then
         # we'll pickle the actual function object rather than simply saving a
         # reference (as is done in default pickler), via save_function_tuple.
@@ -235,6 +294,11 @@ class CloudPickler(Pickler):
         safe, since this won't contain a ref to the func), and memoize it as
         soon as it's created.  The other stuff can then be filled in later.
         """
+        if is_tornado_coroutine(func):
+            self.save_reduce(_rebuild_tornado_coroutine, (func.__wrapped__,),
+                             obj=func)
+            return
+
         save = self.save
         write = self.write
 
@@ -256,41 +320,34 @@ class CloudPickler(Pickler):
         write(pickle.TUPLE)
         write(pickle.REDUCE)  # applies _fill_function on the tuple
 
-    @staticmethod
-    def extract_code_globals(co):
+    _extract_code_globals_cache = (
+        weakref.WeakKeyDictionary()
+        if sys.version_info >= (2, 7) and not hasattr(sys, "pypy_version_info")
+        else {})
+
+    @classmethod
+    def extract_code_globals(cls, co):
         """
         Find all globals names read or written to by codeblock co
         """
+        out_names = cls._extract_code_globals_cache.get(co)
+        if out_names is None:
+            try:
+                names = co.co_names
+            except AttributeError:
+                # PyPy "builtin-code" object
+                out_names = set()
+            else:
+                out_names = set(names[oparg]
+                                for op, oparg in _walk_global_ops(co))
 
-        code = getattr(co, 'co_code', None)
-        if code is None:
-            return set()
-        if not PY3:
-            code = [ord(c) for c in code]
-        names = co.co_names
-        out_names = set()
-
-        n = len(code)
-        i = 0
-        extended_arg = 0
-        while i < n:
-            op = code[i]
-
-            i += 1
-            if op >= HAVE_ARGUMENT:
-                oparg = code[i] + code[i+1] * 256 + extended_arg
-                extended_arg = 0
-                i += 2
-                if op == EXTENDED_ARG:
-                    extended_arg = oparg*65536
-                if op in GLOBAL_OPS:
-                    out_names.add(names[oparg])
+                # see if nested function have any global refs
+                if co.co_consts:
+                    for const in co.co_consts:
+                        if type(const) is types.CodeType:
+                            out_names |= cls.extract_code_globals(const)
 
-        # see if nested function have any global refs
-        if co.co_consts:
-            for const in co.co_consts:
-                if type(const) is types.CodeType:
-                    out_names |= CloudPickler.extract_code_globals(const)
+            cls._extract_code_globals_cache[co] = out_names
 
         return out_names
 
@@ -616,6 +673,26 @@ class CloudPickler(Pickler):
         pass
 
 
+# Tornado support
+
+def is_tornado_coroutine(func):
+    """
+    Return whether *func* is a Tornado coroutine function.
+    Running coroutines are not supported.
+    """
+    if 'tornado.gen' not in sys.modules:
+        return False
+    gen = sys.modules['tornado.gen']
+    if not hasattr(gen, "is_coroutine_function"):
+        # Tornado version is too old
+        return False
+    return gen.is_coroutine_function(func)
+
+def _rebuild_tornado_coroutine(func):
+    from tornado import gen
+    return gen.coroutine(func)
+
+
 # Shorthands for legacy support
 
 def dump(obj, file, protocol=2):
diff --git a/setup.py b/setup.py
index 613ed8a..d32149e 100644
--- a/setup.py
+++ b/setup.py
@@ -8,7 +8,7 @@ except ImportError:
 
 dist = setup(
     name='cloudpickle',
-    version='0.2.1',
+    version='0.2.2',
     description='Extended pickling support for Python objects',
     author='Cloudpipe',
     author_email='cloudpipe at googlegroups.com',
@@ -27,6 +27,8 @@ dist = setup(
         'Programming Language :: Python :: 2.7',
         'Programming Language :: Python :: 3.3',
         'Programming Language :: Python :: 3.4',
+        'Programming Language :: Python :: 3.5',
+        'Programming Language :: Python :: 3.6',
         'Programming Language :: Python :: Implementation :: CPython',
         'Programming Language :: Python :: Implementation :: PyPy',
         'Topic :: Software Development :: Libraries :: Python Modules',
diff --git a/tests/cloudpickle_file_test.py b/tests/cloudpickle_file_test.py
index a5b13fc..f98cf07 100644
--- a/tests/cloudpickle_file_test.py
+++ b/tests/cloudpickle_file_test.py
@@ -85,7 +85,7 @@ class CloudPickleFileTests(unittest.TestCase):
             self.assertEquals(self.teststring, unpickled.read())
         os.remove(self.tmpfilepath)
 
-    @pytest.mark.skipif(sys.version_info > (2, 7),
+    @pytest.mark.skipif(sys.version_info >= (3,),
                         reason="only works on Python 2.x")
     def test_temp_file(self):
         with tempfile.NamedTemporaryFile(mode='ab+') as fp:
diff --git a/tests/cloudpickle_test.py b/tests/cloudpickle_test.py
index e0ff4a2..a1dec11 100644
--- a/tests/cloudpickle_test.py
+++ b/tests/cloudpickle_test.py
@@ -4,7 +4,9 @@ import unittest
 import pytest
 import pickle
 import sys
+import random
 import functools
+import itertools
 import platform
 import textwrap
 
@@ -17,6 +19,12 @@ except ImportError:
     np = None
     spp = None
 
+try:
+    # Ditto for Tornado
+    import tornado
+except ImportError:
+    tornado = None
+
 
 from operator import itemgetter, attrgetter
 
@@ -291,5 +299,67 @@ class CloudPickleTest(unittest.TestCase):
     def test_NotImplemented(self):
         self.assertEqual(NotImplemented, pickle_depickle(NotImplemented))
 
+    @pytest.mark.skipif((3, 0) < sys.version_info < (3, 4),
+                        reason="fails due to pickle behavior in Python 3.0-3.3")
+    def test_builtin_function_without_module(self):
+        on = object.__new__
+        on_depickled = pickle_depickle(on)
+        self.assertEqual(type(on_depickled(object)), type(object()))
+
+        fi = itertools.chain.from_iterable
+        fi_depickled = pickle_depickle(fi)
+        self.assertEqual(list(fi([[1, 2], [3, 4]])), [1, 2, 3, 4])
+
+    @pytest.mark.skipif(tornado is None,
+                        reason="test needs Tornado installed")
+    def test_tornado_coroutine(self):
+        # Pickling a locally defined coroutine function
+        from tornado import gen, ioloop
+
+        @gen.coroutine
+        def f(x, y):
+            yield gen.sleep(x)
+            raise gen.Return(y + 1)
+
+        @gen.coroutine
+        def g(y):
+            res = yield f(0.01, y)
+            raise gen.Return(res + 1)
+
+        data = cloudpickle.dumps([g, g])
+        f = g = None
+        g2, g3 = pickle.loads(data)
+        self.assertTrue(g2 is g3)
+        loop = ioloop.IOLoop.current()
+        res = loop.run_sync(functools.partial(g2, 5))
+        self.assertEqual(res, 7)
+
+    def test_extended_arg(self):
+        # Functions with more than 65535 global vars prefix some global
+        # variable references with the EXTENDED_ARG opcode.
+        nvars = 65537 + 258
+        names = ['g%d' % i for i in range(1, nvars)]
+        r = random.Random(42)
+        d = dict([(name, r.randrange(100)) for name in names])
+        # def f(x):
+        #     x = g1, g2, ...
+        #     return zlib.crc32(bytes(bytearray(x)))
+        code = """
+        import zlib
+
+        def f():
+            x = {tup}
+            return zlib.crc32(bytes(bytearray(x)))
+        """.format(tup=', '.join(names))
+        exec(textwrap.dedent(code), d, d)
+        f = d['f']
+        res = f()
+        data = cloudpickle.dumps([f, f])
+        d = f = None
+        f2, f3 = pickle.loads(data)
+        self.assertTrue(f2 is f3)
+        self.assertEqual(f2(), res)
+
+
 if __name__ == '__main__':
     unittest.main()

-- 
Alioth's /usr/local/bin/git-commit-notice on /srv/git.debian.org/git/python-modules/packages/cloudpickle.git



More information about the Python-modules-commits mailing list