[Python-modules-commits] [jsonpickle] 01/09: Import jsonpickle_0.9.5.orig.tar.gz

Sophie Brun sbrun-guest at moszumanska.debian.org
Tue Aug 22 06:56:30 UTC 2017


This is an automated email from the git hooks/post-receive script.

sbrun-guest pushed a commit to branch master
in repository jsonpickle.

commit 659aaa267c2e16abb6d26cffeab07578fba72a0f
Author: Sophie Brun <sophie at freexian.com>
Date:   Mon Aug 21 11:19:23 2017 +0200

    Import jsonpickle_0.9.5.orig.tar.gz
---
 PKG-INFO                        |   4 +-
 README.rst                      |   9 ++
 docs/source/changelog.rst       |  15 +++
 docs/source/contrib.rst         |  36 ++++--
 docs/source/extensions.rst      |  12 ++
 docs/source/index.rst           |   9 +-
 jsonpickle.egg-info/PKG-INFO    |   4 +-
 jsonpickle.egg-info/SOURCES.txt |   3 +-
 jsonpickle/__init__.py          |  33 +++---
 jsonpickle/backend.py           |   5 +-
 jsonpickle/compat.py            |   8 +-
 jsonpickle/ext/numpy.py         | 246 ++++++++++++++++++++++++++++++++++++----
 jsonpickle/handlers.py          |  88 ++++----------
 jsonpickle/pickler.py           | 128 +++++++++++----------
 jsonpickle/tags.py              |   5 +-
 jsonpickle/unpickler.py         | 101 ++++++++++++-----
 jsonpickle/util.py              |  97 +++++++++++-----
 jsonpickle/version.py           |   2 +-
 requirements-2.txt              |   6 +-
 requirements-3.txt              |   1 +
 requirements-test.txt           |   5 +-
 requirements.txt                |   5 -
 setup.cfg                       |   1 -
 setup.py                        |   4 +-
 tests/jsonpickle_test.py        |   2 +-
 tests/numpy_test.py             | 212 ++++++++++++++++++++++++++++++++--
 tests/object_test.py            |  60 +++++++---
 tests/sqlalchemy_test.py        | 102 +++++++++++++++++
 tests/stdlib_test.py            |  22 +++-
 29 files changed, 931 insertions(+), 294 deletions(-)

diff --git a/PKG-INFO b/PKG-INFO
index b6dd592..d144231 100644
--- a/PKG-INFO
+++ b/PKG-INFO
@@ -1,10 +1,10 @@
 Metadata-Version: 1.1
 Name: jsonpickle
-Version: 0.9.3
+Version: 0.9.5
 Summary: Python library for serializing any arbitrary object graph into JSON
 Home-page: http://jsonpickle.github.io/
 Author: David Aguilar
-Author-email: davvid -at- gmail.com
+Author-email: davvid at gmail.com
 License: BSD
 Description: jsonpickle converts complex Python objects to and from JSON.
 Keywords: json pickle,json,pickle,marshal,serialization,JavaScript Object Notation
diff --git a/README.rst b/README.rst
index b4fd724..8e647d4 100644
--- a/README.rst
+++ b/README.rst
@@ -34,6 +34,15 @@ If you have the files checked out for development:
     python setup.py develop
 
 
+Numpy Support
+=============
+jsonpickle includes a built-in numpy extension.  If would like to encode
+sklearn models, numpy arrays, and other numpy-based data then you must
+enable the numpy extension by registering its handlers::
+
+    >>> import jsonpickle.ext.numpy as jsonpickle_numpy
+    >>> jsonpickle_numpy.register_handlers()
+
 jsonpickleJS
 ============
 `jsonpickleJS <https://github.com/cuthbertLab/jsonpickleJS>`_
diff --git a/docs/source/changelog.rst b/docs/source/changelog.rst
index f137cc6..b856311 100644
--- a/docs/source/changelog.rst
+++ b/docs/source/changelog.rst
@@ -1,6 +1,21 @@
 Change Log
 ==========
 
+Version 0.9.5 - July 16, 2017
+-----------------------------
+    * Better support for objects that implement the reduce protocol.
+      (`#170 <https://github.com/jsonpickle/jsonpickle/pull/170>`_).
+
+Version 0.9.4 - January 10, 2017
+--------------------------------
+    * Arbitrary byte streams are now better supported.
+      (`#143 <https://github.com/jsonpickle/jsonpickle/issues/143>`_).
+
+    * Better support for NumPy data types.  The Python3 NumPy support
+      is especially robust.
+
+    * Fortran-ordered based NumPy arrays are now properly serialized.
+
 Version 0.9.3 - March 9, 2016
 -----------------------------
     * UUID objects can now be serialized
diff --git a/docs/source/contrib.rst b/docs/source/contrib.rst
index 0935c9a..626fc4a 100644
--- a/docs/source/contrib.rst
+++ b/docs/source/contrib.rst
@@ -21,26 +21,24 @@ Before code is pulled into the master jsonpickle branch, all tests should pass.
 If you are contributing an addition or a change in behavior, we ask that you
 document the change in the form of test cases.
 
-The jsonpickle test suite uses several JSON encoding libraries as well as 
+The jsonpickle test suite uses several JSON encoding libraries as well as
 several libraries for sample objects.  To simplify the process of setting up
-these libraries we recommend creating a virtualenv_ and using a pip_ 
-requirements file to install the dependencies.  In the base jsonpickle 
+these libraries we recommend creating a virtualenv_ and using a pip_
+requirements file to install the dependencies.  In the base jsonpickle
 directory::
 
-    # create a virtualenv that is completely isolated from the 
+    # create a virtualenv that is completely isolated from the
     # site-wide python install
     virtualenv --no-site-packages env
 
-    # activate the virtualenv
-    source env/bin/activate
-
     # use pip to install the dependencies listed in the requirements file
-    pip install --upgrade -r requirements.txt
-    pip install --upgrade -r requirements-test.txt
+    ./env/bin/pip install --upgrade -r requirements-2.txt  # Python2
+    ./env/bin/pip install --upgrade -r requirements-3.txt  # Python3
+    ./env/bin/pip install --upgrade -r requirements-test.txt
 
 To run the suite, simply invoke :file:`tests/runtests.py`::
 
-    $ tests/runtests.py
+    $ ./env/bin/python tests/runtests.py
     test_None (util_tests.IsPrimitiveTestCase) ... ok
     test_bool (util_tests.IsPrimitiveTestCase) ... ok
     test_dict (util_tests.IsPrimitiveTestCase) ... ok
@@ -50,6 +48,24 @@ To run the suite, simply invoke :file:`tests/runtests.py`::
 .. _virtualenv: http://pypi.python.org/pypi/virtualenv
 .. _pip: http://pypi.python.org/pypi/pip
 
+Testing with Tox
+================
+jsonpickle supports many versions of Python.  To make it easy to test
+mutiple versions of Python you should install the tox_ testing tool,
+e.g. on Debian::
+
+    $ sudo apt-get install tox
+
+Once tox_ is installed you can run the test suite against multiple Python
+interpreters::
+
+    $ make tox
+
+It is recommended that you install at least one Python2 and one Python3
+interpreter for use by tox_.
+
+.. _tox: https://tox.readthedocs.io/
+
 Generate Documentation
 ======================
 
diff --git a/docs/source/extensions.rst b/docs/source/extensions.rst
new file mode 100644
index 0000000..9a22ac4
--- /dev/null
+++ b/docs/source/extensions.rst
@@ -0,0 +1,12 @@
+=====================
+jsonpickle extensions
+=====================
+
+NumPy
+-----
+jsonpickle includes a built-in numpy extension.  If would like to encode
+sklearn models, numpy arrays, and other numpy-based data then you must
+enable the numpy extension by registering its handlers::
+
+    >>> import jsonpickle.ext.numpy as jsonpickle_numpy
+    >>> jsonpickle_numpy.register_handlers()
diff --git a/docs/source/index.rst b/docs/source/index.rst
index 64be8b0..834822c 100644
--- a/docs/source/index.rst
+++ b/docs/source/index.rst
@@ -47,15 +47,20 @@ API Reference
 =============
 
 .. toctree::
-   :maxdepth: 3
 
    api
 
+Extensions
+==========
+
+.. toctree::
+
+   extensions
+
 Contributing
 ============
 
 .. toctree::
-   :maxdepth: 3
 
    contrib
 
diff --git a/jsonpickle.egg-info/PKG-INFO b/jsonpickle.egg-info/PKG-INFO
index b6dd592..d144231 100644
--- a/jsonpickle.egg-info/PKG-INFO
+++ b/jsonpickle.egg-info/PKG-INFO
@@ -1,10 +1,10 @@
 Metadata-Version: 1.1
 Name: jsonpickle
-Version: 0.9.3
+Version: 0.9.5
 Summary: Python library for serializing any arbitrary object graph into JSON
 Home-page: http://jsonpickle.github.io/
 Author: David Aguilar
-Author-email: davvid -at- gmail.com
+Author-email: davvid at gmail.com
 License: BSD
 Description: jsonpickle converts complex Python objects to and from JSON.
 Keywords: json pickle,json,pickle,marshal,serialization,JavaScript Object Notation
diff --git a/jsonpickle.egg-info/SOURCES.txt b/jsonpickle.egg-info/SOURCES.txt
index 8190ada..1e7c639 100644
--- a/jsonpickle.egg-info/SOURCES.txt
+++ b/jsonpickle.egg-info/SOURCES.txt
@@ -4,12 +4,12 @@ README.rst
 requirements-2.txt
 requirements-3.txt
 requirements-test.txt
-requirements.txt
 setup.py
 docs/source/api.rst
 docs/source/changelog.rst
 docs/source/conf.py
 docs/source/contrib.rst
+docs/source/extensions.rst
 docs/source/index.rst
 jsonpickle/__init__.py
 jsonpickle/backend.py
@@ -49,5 +49,6 @@ tests/jsonpickle_test.py
 tests/numpy_test.py
 tests/object_test.py
 tests/runtests.py
+tests/sqlalchemy_test.py
 tests/stdlib_test.py
 tests/util_test.py
\ No newline at end of file
diff --git a/jsonpickle/__init__.py b/jsonpickle/__init__.py
index e735da8..3b0e61a 100644
--- a/jsonpickle/__init__.py
+++ b/jsonpickle/__init__.py
@@ -53,13 +53,15 @@ added to JSON::
     assert obj.name == result['name'] == 'Awesome'
 
 """
-from jsonpickle import pickler
-from jsonpickle import unpickler
-from jsonpickle.backend import JSONBackend
-from jsonpickle.version import VERSION
+from __future__ import absolute_import, division, unicode_literals
+
+from . import pickler
+from . import unpickler
+from .backend import JSONBackend
+from .version import VERSION
 
 # ensure built-in handlers are loaded
-__import__('jsonpickle.handlers')
+from . import handlers as _
 
 __all__ = ('encode', 'decode')
 __version__ = VERSION
@@ -106,19 +108,14 @@ def encode(value,
     :param max_iter: If set to a non-negative integer then jsonpickle will
         consume at most `max_iter` items when pickling iterators.
 
-    >>> encode('my string')
-    '"my string"'
-    >>> encode(36)
-    '36'
-
-    >>> encode({'foo': True})
-    '{"foo": true}'
-
-    >>> encode({'foo': True}, max_depth=0)
-    '"{\\'foo\\': True}"'
-
-    >>> encode({'foo': True}, max_depth=1)
-    '{"foo": "True"}'
+    >>> encode('my string') == '"my string"'
+    True
+    >>> encode(36) == '36'
+    True
+    >>> encode({'foo': True}) == '{"foo": true}'
+    True
+    >>> encode({'foo': True}, max_depth=1) == '{"foo": "True"}'
+    True
 
 
     """
diff --git a/jsonpickle/backend.py b/jsonpickle/backend.py
index 67e4f88..2820d27 100644
--- a/jsonpickle/backend.py
+++ b/jsonpickle/backend.py
@@ -1,7 +1,8 @@
 # -*- coding: utf-8 -*-
+from __future__ import absolute_import, division, unicode_literals
 
-from jsonpickle.compat import PY32
-from jsonpickle.compat import unicode
+from .compat import PY32
+from .compat import unicode
 
 
 class JSONBackend(object):
diff --git a/jsonpickle/compat.py b/jsonpickle/compat.py
index b03a3e2..d30efa0 100644
--- a/jsonpickle/compat.py
+++ b/jsonpickle/compat.py
@@ -1,7 +1,10 @@
 # -*- coding: utf-8 -*-
-
+from __future__ import absolute_import, division, unicode_literals
 import sys
 
+
+__all__ = ('bytes', 'set', 'unicode', 'long', 'unichr', 'queue')
+
 PY_MAJOR = sys.version_info[0]
 PY_MINOR = sys.version_info[1]
 PY2 = PY_MAJOR == 2
@@ -43,6 +46,3 @@ try:
 except ImportError:
     # Python2
     import Queue as queue
-
-
-__all__ = ['bytes', 'set', 'unicode', 'long', 'unichr', 'queue']
diff --git a/jsonpickle/ext/numpy.py b/jsonpickle/ext/numpy.py
index c4a8a79..4a20c0e 100644
--- a/jsonpickle/ext/numpy.py
+++ b/jsonpickle/ext/numpy.py
@@ -2,25 +2,30 @@
 
 from __future__ import absolute_import
 
+import sys
+import zlib
+import warnings
+
 import numpy as np
 
 import ast
-import jsonpickle
-from jsonpickle.compat import unicode
+from ..handlers import BaseHandler, register, unregister
+from ..compat import unicode
+from ..util import b64decode, b64encode
 
 __all__ = ['register_handlers', 'unregister_handlers']
 
+native_byteorder = '<' if sys.byteorder == 'little' else '>'
 
-class NumpyBaseHandler(jsonpickle.handlers.BaseHandler):
+def get_byteorder(arr):
+    """translate equals sign to native order"""
+    byteorder = arr.dtype.byteorder
+    return native_byteorder if byteorder == '=' else byteorder
 
-    def restore_dtype(self, data):
-        dtype = data['dtype']
-        if dtype.startswith(('{', '[')):
-            return ast.literal_eval(dtype)
-        return np.dtype(dtype)
 
-    def flatten_dtype(self, dtype, data):
+class NumpyBaseHandler(BaseHandler):
 
+    def flatten_dtype(self, dtype, data):
         if hasattr(dtype, 'tostring'):
             data['dtype'] = dtype.tostring()
         else:
@@ -30,6 +35,12 @@ class NumpyBaseHandler(jsonpickle.handlers.BaseHandler):
                 dtype = dtype[len(prefix):-1]
             data['dtype'] = dtype
 
+    def restore_dtype(self, data):
+        dtype = data['dtype']
+        if dtype.startswith(('{', '[')):
+            dtype = ast.literal_eval(dtype)
+        return np.dtype(dtype)
+
 
 class NumpyDTypeHandler(NumpyBaseHandler):
 
@@ -44,7 +55,7 @@ class NumpyDTypeHandler(NumpyBaseHandler):
 class NumpyGenericHandler(NumpyBaseHandler):
 
     def flatten(self, obj, data):
-        self.flatten_dtype(obj.dtype, data)
+        self.flatten_dtype(obj.dtype.newbyteorder('N'), data)
         data['value'] = self.context.flatten(obj.tolist(), reset=False)
         return data
 
@@ -54,25 +65,220 @@ class NumpyGenericHandler(NumpyBaseHandler):
 
 
 class NumpyNDArrayHandler(NumpyBaseHandler):
+    """Stores arrays as text representation, without regard for views
+    """
+    def flatten_flags(self, obj, data):
+        if obj.flags.writeable is False:
+            data['writeable'] = False
+
+    def restore_flags(self, data, arr):
+        if not data.get('writeable', True):
+            arr.flags.writeable = False
 
     def flatten(self, obj, data):
-        self.flatten_dtype(obj.dtype, data)
+        self.flatten_dtype(obj.dtype.newbyteorder('N'), data)
+        self.flatten_flags(obj, data)
         data['values'] = self.context.flatten(obj.tolist(), reset=False)
+        if 0 in obj.shape:
+            # add shape information explicitly as it cannot be inferred from an empty list
+            data['shape'] = obj.shape
         return data
 
     def restore(self, data):
-        dtype = self.restore_dtype(data)
-        return np.array(self.context.restore(data['values'], reset=False),
-                        dtype=dtype)
+        values = self.context.restore(data['values'], reset=False)
+        arr = np.array(
+            values,
+            dtype=self.restore_dtype(data),
+            order=data.get('order', 'C')
+        )
+        shape = data.get('shape', None)
+        if shape is not None:
+            arr = arr.reshape(shape)
+
+        self.restore_flags(data, arr)
+        return arr
+
+
+class NumpyNDArrayHandlerBinary(NumpyNDArrayHandler):
+    """stores arrays with size greater than 'size_treshold' as (optionally) compressed base64
+
+    Notes
+    -----
+    This would be easier to implement using np.save/np.load, but that would be less language-agnostic
+    """
+
+    def __init__(self, size_treshold=16, compression=zlib):
+        """
+        :param size_treshold: nonnegative int or None
+            valid values for 'size_treshold' are all nonnegative integers and None
+            if size_treshold is None, values are always stored as nested lists
+        :param compression: a compression module or None
+            valid values for 'compression' are {zlib, bz2, None}
+            if compresion is None, no compression is applied
+        """
+        self.size_treshold = size_treshold
+        self.compression = compression
+
+    def flatten_byteorder(self, obj, data):
+        byteorder = obj.dtype.byteorder
+        if byteorder != '|':
+            data['byteorder'] = get_byteorder(obj)
+
+    def restore_byteorder(self, data, arr):
+        byteorder = data.get('byteorder', None)
+        if byteorder:
+            arr.dtype = arr.dtype.newbyteorder(byteorder)
+
+    def flatten(self, obj, data):
+        """encode numpy to json"""
+        if self.size_treshold >= obj.size or self.size_treshold is None:
+            # encode as text
+            data = super(NumpyNDArrayHandlerBinary, self).flatten(obj, data)
+        else:
+            # encode as binary
+            buffer = obj.tobytes(order='a')	 # numpy docstring is lacking as of 1.11.2, but this is the option we need
+            if self.compression:
+                buffer = self.compression.compress(buffer)
+            data['values'] = b64encode(buffer)
+            data['shape'] = obj.shape
+            self.flatten_dtype(obj.dtype.newbyteorder('N'), data)
+            self.flatten_byteorder(obj, data)
+            self.flatten_flags(obj, data)
+
+            if not obj.flags.c_contiguous:
+                data['order'] = 'F'
+
+        return data
+
+    def restore(self, data):
+        """decode numpy from json"""
+        values = data['values']
+        if isinstance(values, list):
+            # decode text representation
+            arr = super(NumpyNDArrayHandlerBinary, self).restore(data)
+        else:
+            # decode binary representation
+            buffer = b64decode(values)
+            if self.compression:
+                buffer = self.compression.decompress(buffer)
+            arr = np.ndarray(
+                buffer=buffer,
+                dtype=self.restore_dtype(data),
+                shape=data.get('shape'),
+                order=data.get('order', 'C')
+            ).copy() # make a copy, to force the result to own the data
+            self.restore_byteorder(data, arr)
+            self.restore_flags(data, arr)
+
+        return arr
+
+
+class NumpyNDArrayHandlerView(NumpyNDArrayHandlerBinary):
+    """Pickles references inside ndarrays, or array-views
+
+    Notes
+    -----
+    The current implementation has some restrictions.
+
+    'base' arrays, or arrays which are viewed by other arrays, must be f-or-c-contiguous.
+    This is not such a large restriction in practice, because all numpy array creation is c-contiguous by default.
+    Relaxing this restriction would be nice though; especially if it can be done without bloating the design too much.
+
+    Furthermore, ndarrays which are views of array-like objects implementing __array_interface__,
+    but which are not themselves nd-arrays, are deepcopied with a warning (by default),
+    as we cannot guarantee whatever custom logic such classes implement is correctly reproduced.
+    """
+    def __init__(self, mode='warn', size_treshold=16, compression=zlib):
+        """
+        :param mode: {'warn', 'raise', 'ignore'}
+            How to react when encountering array-like objects whos references we cannot safely serialize
+        :param size_treshold: nonnegative int or None
+            valid values for 'size_treshold' are all nonnegative integers and None
+            if size_treshold is None, values are always stored as nested lists
+        :param compression: a compression module or None
+            valid values for 'compression' are {zlib, bz2, None}
+            if compresion is None, no compression is applied
+        """
+        super(NumpyNDArrayHandlerView, self).__init__(size_treshold, compression)
+        self.mode = mode
+
+    def flatten(self, obj, data):
+        """encode numpy to json"""
+        base = obj.base
+        if base is None and obj.flags.forc:
+            # store by value
+            data = super(NumpyNDArrayHandlerView, self).flatten(obj, data)
+            # ensure that views on arrays stored as text are interpreted correctly
+            if not obj.flags.c_contiguous:
+                data['order'] = 'F'
+        elif isinstance(base, np.ndarray) and base.flags.forc:
+            # store by reference
+            data['base'] = self.context.flatten(base, reset=False)
+
+            offset = obj.ctypes.data - base.ctypes.data
+            if offset:
+                data['offset'] = offset
+
+            if not obj.flags.c_contiguous:
+                data['strides'] = obj.strides
+
+            data['shape'] = obj.shape
+            self.flatten_dtype(obj.dtype.newbyteorder('N'), data)
+            self.flatten_flags(obj, data)
+
+            if get_byteorder(obj) != '|':
+                byteorder = 'S' if get_byteorder(obj) != get_byteorder(base) else None
+                if byteorder:
+                    data['byteorder'] = byteorder
+
+            if self.size_treshold >= obj.size:
+                # not used in restore since base is present, but include values for human-readability
+                super(NumpyNDArrayHandlerBinary, self).flatten(obj, data)
+        else:
+            # store a deepcopy or fail
+            if self.mode == 'warn':
+                msg = "ndarray is defined by reference to an object we do not know how to serialize. " \
+                      "A deep copy is serialized instead, breaking memory aliasing."
+                warnings.warn(msg)
+            elif self.mode == 'raise':
+                msg = "ndarray is defined by reference to an object we do not know how to serialize."
+                raise ValueError(msg)
+            data = super(NumpyNDArrayHandlerView, self).flatten(obj.copy(), data)
+
+        return data
+
+    def restore(self, data):
+        """decode numpy from json"""
+        base = data.get('base', None)
+        if base is None:
+            # decode array with owndata=True
+            arr = super(NumpyNDArrayHandlerView, self).restore(data)
+        else:
+            # decode array view, which references the data of another array
+            base = self.context.restore(base, reset=False)
+            assert base.flags.forc, \
+                "Current implementation assumes base is C or F contiguous"
+
+            arr = np.ndarray(
+                buffer=base.data,
+                dtype=self.restore_dtype(data).newbyteorder(data.get('byteorder', '|')),
+                shape=data.get('shape'),
+                offset=data.get('offset', 0),
+                strides=data.get('strides', None)
+            )
+
+            self.restore_flags(data, arr)
+
+        return arr
 
 
 def register_handlers():
-    jsonpickle.handlers.register(np.dtype, NumpyDTypeHandler, base=True)
-    jsonpickle.handlers.register(np.generic, NumpyGenericHandler, base=True)
-    jsonpickle.handlers.register(np.ndarray, NumpyNDArrayHandler, base=True)
+    register(np.dtype, NumpyDTypeHandler, base=True)
+    register(np.generic, NumpyGenericHandler, base=True)
+    register(np.ndarray, NumpyNDArrayHandlerView(), base=True)
 
 
 def unregister_handlers():
-    jsonpickle.handlers.unregister(np.dtype)
-    jsonpickle.handlers.unregister(np.generic)
-    jsonpickle.handlers.unregister(np.ndarray)
+    unregister(np.dtype)
+    unregister(np.generic)
+    unregister(np.ndarray)
diff --git a/jsonpickle/handlers.py b/jsonpickle/handlers.py
index 53f12b9..44d6740 100644
--- a/jsonpickle/handlers.py
+++ b/jsonpickle/handlers.py
@@ -1,5 +1,4 @@
 # -*- coding: utf-8 -*-
-
 """
 Custom handlers may be created to handle other objects. Each custom handler
 must derive from :class:`jsonpickle.handlers.BaseHandler` and
@@ -21,7 +20,7 @@ objects that implement the reduce protocol::
     handlers.register(MyCustomObject, handlers.SimpleReduceHandler)
 
 """
-
+from __future__ import absolute_import, division, unicode_literals
 import collections
 import copy
 import datetime
@@ -31,9 +30,9 @@ import sys
 import time
 import uuid
 
-from jsonpickle import util
-from jsonpickle.compat import unicode
-from jsonpickle.compat import queue
+from . import util
+from .compat import queue
+from .compat import unicode
 
 
 class Registry(object):
@@ -64,11 +63,12 @@ class Registry(object):
         :param handler: The custom handler class (if None, a decorator wrapper is returned)
         :param base: Indicates whether the handler should be registered for all subclasses
 
-        This function can be also used as a decorator by omitting the `handler` argument:
+        This function can be also used as a decorator by omitting the `handler` argument::
+
+            @jsonpickle.handlers.register(Foo, base=True)
+            class FooHandler(jsonpickle.handlers.BaseHandler):
+                pass
 
-        @jsonpickle.handlers.register(Foo, base=True)
-        class FooHandler(jsonpickle.handlers.BaseHandler):
-            pass
         """
         if handler is None:
             def _register(handler_cls):
@@ -108,6 +108,15 @@ class BaseHandler(object):
         """
         self.context = context
 
+    def __call__(self, context):
+        """This permits registering either Handler instances or classes
+
+        :Parameters:
+          - `context`: reference to pickler/unpickler
+        """
+        self.context = context
+        return self
+
     def flatten(self, obj, data):
         """
         Flatten `obj` into a json-friendly form and write result to `data`.
@@ -188,67 +197,8 @@ class RegexHandler(BaseHandler):
     def restore(self, data):
         return re.compile(data['pattern'])
 
-RegexHandler.handles(type(re.compile('')))
-
-
-class SimpleReduceHandler(BaseHandler):
-    """Follow the __reduce__ protocol to pickle an object.
-
-    As long as the factory and its arguments are pickleable, this should
-    pickle any object that implements the reduce protocol.
-
-    """
-    def flatten(self, obj, data):
-        flatten = self.context.flatten
-        data['__reduce__'] = [flatten(i, reset=False) for i in obj.__reduce__()]
-        return data
-
-    def restore(self, data):
-        restore = self.context.restore
-        factory, args = [restore(i, reset=False) for i in data['__reduce__']]
-        return factory(*args)
-
-
-class OrderedDictReduceHandler(SimpleReduceHandler):
-    """Serialize OrderedDict on Python 3.4+
 
-    Python 3.4+ returns multiple entries in an OrderedDict's
-    reduced form.  Previous versions return a two-item tuple.
-    OrderedDictReduceHandler makes the formats compatible.
-
-    """
-    def flatten(self, obj, data):
-        # __reduce__() on older pythons returned a list of
-        # [key, value] list pairs inside a tuple.
-        # Recreate that structure so that the file format
-        # is consistent between python versions.
-        flatten = self.context.flatten
-        reduced = obj.__reduce__()
-        factory = flatten(reduced[0], reset=False)
-        pairs = [list(x) for x in reduced[-1]]
-        args = flatten((pairs,), reset=False)
-        data['__reduce__'] = [factory, args]
-        return data
-
-
-SimpleReduceHandler.handles(time.struct_time)
-SimpleReduceHandler.handles(datetime.timedelta)
-SimpleReduceHandler.handles(collections.deque)
-if sys.version_info >= (2, 7):
-    SimpleReduceHandler.handles(collections.Counter)
-    if sys.version_info >= (3, 4):
-        OrderedDictReduceHandler.handles(collections.OrderedDict)
-    else:
-        SimpleReduceHandler.handles(collections.OrderedDict)
-
-if sys.version_info >= (3, 0):
-    SimpleReduceHandler.handles(decimal.Decimal)
-
-try:
-    import posix
-    SimpleReduceHandler.handles(posix.stat_result)
-except ImportError:
-    pass
+RegexHandler.handles(type(re.compile('')))
 
 
 class QueueHandler(BaseHandler):
diff --git a/jsonpickle/pickler.py b/jsonpickle/pickler.py
index f616f4e..b1e1d4c 100644
--- a/jsonpickle/pickler.py
+++ b/jsonpickle/pickler.py
@@ -1,23 +1,22 @@
 # -*- coding: utf-8 -*-
 #
 # Copyright (C) 2008 John Paulett (john -at- paulett.org)
-# Copyright (C) 2009, 2011, 2013 David Aguilar (davvid -at- gmail.com)
+# Copyright (C) 2009, 2011, 2013 David Aguilar (davvid -at- gmail.com) and contributors
 # All rights reserved.
 #
 # This software is licensed as described in the file COPYING, which
 # you should have received as part of this distribution.
-
+from __future__ import absolute_import, division, unicode_literals
+import base64
 import warnings
 import sys
-import quopri
 from itertools import chain, islice
 
-import jsonpickle.util as util
-import jsonpickle.tags as tags
-import jsonpickle.handlers as handlers
-
-from jsonpickle.backend import JSONBackend
-from jsonpickle.compat import numeric_types, unicode, PY3, PY2
+from . import util
+from . import tags
+from . import handlers
+from .backend import JSONBackend
+from .compat import numeric_types, unicode, PY3, PY2
 
 
 def encode(value,
@@ -251,7 +250,7 @@ class Pickler(object):
                 return obj.decode('utf-8')
             except:
                 pass
-        return {tags.BYTES: quopri.encodestring(obj).decode('utf-8')}
+        return {tags.B64: base64.encodestring(obj).decode('utf-8')}
 
     def _flatten_obj_instance(self, obj):
         """Recursively flatten an instance and return a json-friendly dict
@@ -284,48 +283,79 @@ class Pickler(object):
             return handler(self).flatten(obj, data)
 
         reduce_val = None
-        if has_class and not util.is_module(obj):
-            if self.unpicklable:
-                class_name = util.importable_name(cls)
-                data[tags.OBJECT] = class_name
 
-            # test for a reduce implementation, and redirect before doing anything else
-            # if that is what reduce requests
-            if has_reduce_ex:
+        if self.unpicklable:
+            if has_reduce and not has_reduce_ex:
                 try:
-                    # we're implementing protocol 2
-                    reduce_val = obj.__reduce_ex__(2)
+                    reduce_val = obj.__reduce__()
                 except TypeError:
-                    # A lot of builtin types have a reduce which just raises a TypeError
+                    # A lot of builtin types have a reduce which
+                    # just raises a TypeError
                     # we ignore those
                     pass
 
-            if has_reduce and not reduce_val:
+            # test for a reduce implementation, and redirect before
+            # doing anything else if that is what reduce requests
+            elif has_reduce_ex:
                 try:
-                    reduce_val = obj.__reduce__()
+                    # we're implementing protocol 2
+                    reduce_val = obj.__reduce_ex__(2)
                 except TypeError:
-                    # A lot of builtin types have a reduce which just raises a TypeError
+                    # A lot of builtin types have a reduce which
+                    # just raises a TypeError
                     # we ignore those
                     pass
 
-            if reduce_val:
+            if reduce_val and isinstance(reduce_val, (str, unicode)):
                 try:
-                    # At this stage, we only handle the case where __reduce__ returns a string
-                    # other reduce functionality is implemented further down
-                    if isinstance(reduce_val, (str, unicode)):
-                        varpath = iter(reduce_val.split('.'))
-                        # curmod will be transformed by the loop into the value to pickle
-                        curmod = sys.modules[next(varpath)]
-                        for modname in varpath:
-                            curmod = getattr(curmod, modname)
-                            # replace obj with value retrieved
+                    varpath = iter(reduce_val.split('.'))
+                    # curmod will be transformed by the
+                    # loop into the value to pickle
+                    curmod = sys.modules[next(varpath)]
+                    for modname in varpath:
+                        curmod = getattr(curmod, modname)
+                        # replace obj with value retrieved
                         return self._flatten(curmod)
                 except KeyError:
                     # well, we can't do anything with that, so we ignore it
                     pass
 
+            elif reduce_val:
+                # at this point, reduce_val should be some kind of iterable
+                # pad out to len 5
+                rv_as_list = list(reduce_val)
+                insufficiency = 5 - len(rv_as_list)
+                if insufficiency:
+                    rv_as_list += [None] * insufficiency
+
+                if rv_as_list[0].__name__ == '__newobj__':
+                    rv_as_list[0] = tags.NEWOBJ
+
+                f, args, state, listitems, dictitems = rv_as_list
+
+                # check that getstate/setstate is sane
+                if not (state and hasattr(obj, '__getstate__')
+                            and not hasattr(obj, '__setstate__')
+                            and not isinstance(obj, dict)):
+                    # turn iterators to iterables for convenient serialization
+                    if rv_as_list[3]:
+                        rv_as_list[3] = tuple(rv_as_list[3])
+
+                    if rv_as_list[4]:
+                        rv_as_list[4] = tuple(rv_as_list[4])
+
+                    data[tags.REDUCE] = list(map(self._flatten, rv_as_list))
+
+                    return data
+
+        if has_class and not util.is_module(obj):
+            if self.unpicklable:
+                class_name = util.importable_name(cls)
+                data[tags.OBJECT] = class_name
+
             if has_getnewargs_ex:
-                data[tags.NEWARGSEX] = list(map(self._flatten, obj.__getnewargs_ex__()))
+                data[tags.NEWARGSEX] = list(
+                    map(self._flatten, obj.__getnewargs_ex__()))
 
             if has_getnewargs and not has_getnewargs_ex:
                 data[tags.NEWARGS] = self._flatten(obj.__getnewargs__())
@@ -359,37 +389,11 @@ class Pickler(object):
         if util.is_sequence_subclass(obj):
             return self._flatten_sequence_obj(obj, data)
 
-        if util.is_noncomplex(obj):
-            return [self._flatten(v) for v in obj]
-
         if util.is_iterator(obj):
             # force list in python 3
             data[tags.ITERATOR] = list(map(self._flatten, islice(obj, self._max_iter)))
             return data
 
-        if reduce_val and not isinstance(reduce_val, (str, unicode)):
-            # at this point, reduce_val should be some kind of iterable
-            # pad out to len 5
-            rv_as_list = list(reduce_val)
-            insufficiency = 5 - len(rv_as_list)
-            if insufficiency:
-                rv_as_list += [None] * insufficiency
-
-            if rv_as_list[0].__name__ == '__newobj__':
-                rv_as_list[0] = tags.NEWOBJ
-
-            data[tags.REDUCE] = list(map(self._flatten, rv_as_list))
-
-            # lift out iterators, so we don't have to iterator and uniterator their content
-            # on unpickle
-            if data[tags.REDUCE][3]:
-                data[tags.REDUCE][3] = data[tags.REDUCE][3][tags.ITERATOR]
-
-            if data[tags.REDUCE][4]:
-                data[tags.REDUCE][4] = data[tags.REDUCE][4][tags.ITERATOR]
-
-            return data
-
         if has_dict:
             # Support objects that subclasses list and set
             if util.is_sequence_subclass(obj):
@@ -532,8 +536,8 @@ class Pickler(object):
 def _mktyperef(obj):
     """Return a typeref dictionary
 
-    >>> _mktyperef(AssertionError)
-    {'py/type': '__builtin__.AssertionError'}
+    >>> _mktyperef(AssertionError) == {'py/type': '__builtin__.AssertionError'}
+    True
 
     """
     return {tags.TYPE: util.importable_name(obj)}
diff --git a/jsonpickle/tags.py b/jsonpickle/tags.py
index 2350f79..4885fea 100644
--- a/jsonpickle/tags.py
+++ b/jsonpickle/tags.py
@@ -1,5 +1,4 @@
 # -*- coding: utf-8 -*-
-
 """The jsonpickle.tags module provides the custom tags
 used for pickling and unpickling Python objects.
 
@@ -8,10 +7,12 @@ created by the Pickler class.  The Unpickler uses
 these custom key names to identify dictionaries
 that need to be specially handled.
 """
-from jsonpickle.compat import set
+from __future__ import absolute_import, division, unicode_literals
+from .compat import set
 
 
 BYTES = 'py/bytes'
+B64 = 'py/b64'
 FUNCTION = 'py/function'
 ID = 'py/id'
 INITARGS = 'py/initargs'
diff --git a/jsonpickle/unpickler.py b/jsonpickle/unpickler.py
index 737cadd..164118c 100644
--- a/jsonpickle/unpickler.py
+++ b/jsonpickle/unpickler.py
... 978 lines suppressed ...

-- 
Alioth's /usr/local/bin/git-commit-notice on /srv/git.debian.org/git/python-modules/packages/jsonpickle.git



More information about the Python-modules-commits mailing list