Hello community,
here is the log from the commit of package python-jsonpickle for
openSUSE:Factory checked in at 2019-06-22 11:22:57
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Comparing /work/SRC/openSUSE:Factory/python-jsonpickle (Old)
and /work/SRC/openSUSE:Factory/.python-jsonpickle.new.4615 (New)
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Package is "python-jsonpickle"
Sat Jun 22 11:22:57 2019 rev:4 rq:710340 version:1.2
Changes:
--------
--- /work/SRC/openSUSE:Factory/python-jsonpickle/python-jsonpickle.changes
2019-03-20 13:19:26.929341552 +0100
+++
/work/SRC/openSUSE:Factory/.python-jsonpickle.new.4615/python-jsonpickle.changes
2019-06-22 11:22:58.505308569 +0200
@@ -1,0 +2,7 @@
+Mon Jun 17 11:54:50 UTC 2019 - Tomáš Chvátal <[email protected]>
+
+- Update to 1.2:
+ * support new sqlalchemy
+- Remove merged patch sqlalchemy13.patch
+
+-------------------------------------------------------------------
Old:
----
jsonpickle-1.1.tar.gz
sqlalchemy13.patch
New:
----
jsonpickle-1.2.tar.gz
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Other differences:
------------------
++++++ python-jsonpickle.spec ++++++
--- /var/tmp/diff_new_pack.iz2PU8/_old 2019-06-22 11:22:59.121309241 +0200
+++ /var/tmp/diff_new_pack.iz2PU8/_new 2019-06-22 11:22:59.125309245 +0200
@@ -18,14 +18,13 @@
%{?!python_module:%define python_module() python-%{**} python3-%{**}}
Name: python-jsonpickle
-Version: 1.1
+Version: 1.2
Release: 0
Summary: Python library for serializing any arbitrary object graph into
JSON
License: BSD-3-Clause
Group: Development/Languages/Python
URL: https://github.com/jsonpickle/jsonpickle
Source:
https://files.pythonhosted.org/packages/source/j/jsonpickle/jsonpickle-%{version}.tar.gz
-Patch0: sqlalchemy13.patch
BuildRequires: %{python_module setuptools}
BuildRequires: fdupes
BuildRequires: python-rpm-macros
@@ -57,7 +56,6 @@
%prep
%setup -q -n jsonpickle-%{version}
-%patch0 -p1
%build
%python_build
++++++ jsonpickle-1.1.tar.gz -> jsonpickle-1.2.tar.gz ++++++
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn'
'--exclude=.svnignore' old/jsonpickle-1.1/PKG-INFO new/jsonpickle-1.2/PKG-INFO
--- old/jsonpickle-1.1/PKG-INFO 2019-01-23 08:29:21.000000000 +0100
+++ new/jsonpickle-1.2/PKG-INFO 2019-05-24 07:17:33.000000000 +0200
@@ -1,6 +1,6 @@
Metadata-Version: 1.1
Name: jsonpickle
-Version: 1.1
+Version: 1.2
Summary: Python library for serializing any arbitrary object graph into JSON
Home-page: https://jsonpickle.github.io/
Author: David Aguilar
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn'
'--exclude=.svnignore' old/jsonpickle-1.1/docs/source/changelog.rst
new/jsonpickle-1.2/docs/source/changelog.rst
--- old/jsonpickle-1.1/docs/source/changelog.rst 2019-01-23
08:29:08.000000000 +0100
+++ new/jsonpickle-1.2/docs/source/changelog.rst 2019-05-24
07:17:18.000000000 +0200
@@ -1,6 +1,38 @@
Change Log
==========
+Version 1.2 - May 15, 2019
+--------------------------
+ * Simplified JSON representation for `__reduce__` values.
+ (`#261 <https://github.com/jsonpickle/jsonpickle/pull/261>`_)
+
+ * Improved Pandas support with new handlers for more Pandas data types.
+ (`#256 <https://github.com/jsonpickle/jsonpickle/pull/256>`_)
+
+ * Prevent stack overflows caused by bugs in user-defined `__getstate__`
+ functions which cause infinite recursion.
+ (`#260 <https://github.com/jsonpickle/jsonpickle/pull/260>`_)
+ (`#259 <https://github.com/jsonpickle/jsonpickle/issues/259>`_)
+
+ * Improved support for objects that contain dicts with Integer keys.
+ Previously, jsonpickle could not restore objects that contained
+ dicts with integer keys and provided getstate only.
+ These objects are now handled robustly.
+ (`#247 <https://github.com/jsonpickle/jsonpickle/issues/247>`_).
+
+ * Support for encoding binary data in `base85`_ instead of base64 has been
+ added on Python 3. Base85 produces payloads about 10% smaller than
base64,
+ albeit at the cost of lower throughput. For performance and backwards
+ compatibility with Python 2 the pickler uses base64 by default, but it
can
+ be configured to use ``base85`` with the new ``use_base85`` argument.
+ (`#251 <https://github.com/jsonpickle/jsonpickle/issues/251>`_).
+
+ * Dynamic SQLAlchemy tables in SQLAlchemy >= 1.3 are now supported.
+ (`#254 <https://github.com/jsonpickle/jsonpickle/issues/254>`_).
+
+.. _base85: https://en.wikipedia.org/wiki/Ascii85
+
+
Version 1.1 - January 22, 2019
------------------------------
* Python 3.7 `collections.Iterator` deprecation warnings have been fixed.
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn'
'--exclude=.svnignore' old/jsonpickle-1.1/jsonpickle/backend.py
new/jsonpickle-1.2/jsonpickle/backend.py
--- old/jsonpickle-1.1/jsonpickle/backend.py 2019-01-23 08:29:08.000000000
+0100
+++ new/jsonpickle-1.2/jsonpickle/backend.py 2019-05-24 07:17:18.000000000
+0200
@@ -122,8 +122,8 @@
self._decoder_exceptions[name] = loads_exc
# Setup the default args and kwargs for this encoder/decoder
- self._encoder_options[name] = ([], {})
- self._decoder_options[name] = ([], {})
+ self._encoder_options.setdefault(name, ([], {}))
+ self._decoder_options.setdefault(name, ([], {}))
# Add this backend to the list of candidate backends
self._backend_names.append(name)
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn'
'--exclude=.svnignore' old/jsonpickle-1.1/jsonpickle/ext/pandas.py
new/jsonpickle-1.2/jsonpickle/ext/pandas.py
--- old/jsonpickle-1.1/jsonpickle/ext/pandas.py 2019-01-23 08:29:08.000000000
+0100
+++ new/jsonpickle-1.2/jsonpickle/ext/pandas.py 2019-05-24 07:17:18.000000000
+0200
@@ -4,9 +4,11 @@
from io import StringIO
import zlib
+from .. import encode, decode
from ..handlers import BaseHandler, register, unregister
from ..util import b64decode, b64encode
from ..backend import json
+from .numpy import register_handlers as register_numpy_handlers,
unregister_handlers as unregister_numpy_handlers
__all__ = ['register_handlers', 'unregister_handlers']
@@ -75,21 +77,18 @@
def flatten(self, obj, data):
dtype = obj.dtypes.to_dict()
- # Handles named multi-indexes
- index_col = list(obj.index.names) if list(obj.index.names) != [None]
else 0
-
meta = {'dtypes': {k: str(dtype[k]) for k in dtype},
- 'index_col': index_col}
+ 'index': encode(obj.index)}
- data = self.pp.flatten_pandas(obj.to_csv(), data, meta)
+ data =
self.pp.flatten_pandas(obj.reset_index(drop=True).to_csv(index=False), data,
meta)
return data
def restore(self, data):
csv, meta = self.pp.restore_pandas(data)
params = make_read_csv_params(meta)
df = pd.read_csv(StringIO(csv),
- index_col=meta.get('index_col', None),
**params)
+ df.set_index(decode(meta["index"]), inplace=True)
return df
@@ -117,27 +116,102 @@
class PandasIndexHandler(BaseHandler):
pp = PandasProcessor()
+ index_constructor = pd.Index
+ name_bundler = lambda _, obj: {'name': obj.name}
+
def flatten(self, obj, data):
- meta = {'dtype': str(obj.dtype), 'name': obj.name}
- buf = json.dumps(obj.tolist())
+ name_bundle = self.name_bundler(obj)
+ meta = dict(dtype= str(obj.dtype), **name_bundle)
+ buf = encode(obj.tolist())
data = self.pp.flatten_pandas(buf, data, meta)
return data
def restore(self, data):
buf, meta = self.pp.restore_pandas(data)
dtype = meta.get('dtype', None)
- name = meta.get('name', None)
- idx = pd.Index(json.loads(buf), dtype=dtype, name=name)
+ name_bundle = {k: v for k, v in meta.items() if k in {'name', 'names'}}
+ idx = self.index_constructor(decode(buf), dtype=dtype, **name_bundle)
return idx
+class PandasPeriodIndexHandler(PandasIndexHandler):
+ index_constructor = pd.PeriodIndex
+
+
+class PandasMultiIndexHandler(PandasIndexHandler):
+ name_bundler = lambda _, obj: {'names': obj.names}
+
+
+class PandasTimestampHandler(BaseHandler):
+ pp = PandasProcessor()
+
+ def flatten(self, obj, data):
+ meta = {"isoformat": obj.isoformat()}
+ buf = ""
+ data = self.pp.flatten_pandas(buf, data, meta)
+ return data
+
+ def restore(self, data):
+ _, meta = self.pp.restore_pandas(data)
+ isoformat = meta['isoformat']
+ obj = pd.Timestamp(isoformat)
+ return obj
+
+
+class PandasPeriodHandler(BaseHandler):
+ pp = PandasProcessor()
+
+ def flatten(self, obj, data):
+ meta = {"start_time": encode(obj.start_time), "freqstr": obj.freqstr}
+ buf = ""
+ data = self.pp.flatten_pandas(buf, data, meta)
+ return data
+
+ def restore(self, data):
+ _, meta = self.pp.restore_pandas(data)
+ start_time = decode(meta['start_time'])
+ freqstr = meta['freqstr']
+ obj = pd.Period(start_time, freqstr)
+ return obj
+
+
+class PandasIntervalHandler(BaseHandler):
+ pp = PandasProcessor()
+
+ def flatten(self, obj, data):
+ meta = {"left": encode(obj.left), "right": encode(obj.right),
"closed": obj.closed}
+ buf = ""
+ data = self.pp.flatten_pandas(buf, data, meta)
+ return data
+
+ def restore(self, data):
+ _, meta = self.pp.restore_pandas(data)
+ left = decode(meta['left'])
+ right = decode(meta['right'])
+ closed = str(meta['closed'])
+ obj = pd.Interval(left, right, closed=closed)
+ return obj
+
+
def register_handlers():
+ register_numpy_handlers()
register(pd.DataFrame, PandasDfHandler, base=True)
register(pd.Series, PandasSeriesHandler, base=True)
register(pd.Index, PandasIndexHandler, base=True)
+ register(pd.PeriodIndex, PandasPeriodIndexHandler, base=True)
+ register(pd.MultiIndex, PandasMultiIndexHandler, base=True)
+ register(pd.Timestamp, PandasTimestampHandler, base=True)
+ register(pd.Period, PandasPeriodHandler, base=True)
+ register(pd.Interval, PandasIntervalHandler, base=True)
def unregister_handlers():
+ unregister_numpy_handlers()
unregister(pd.DataFrame)
unregister(pd.Series)
unregister(pd.Index)
+ unregister(pd.PeriodIndex)
+ unregister(pd.MultiIndex)
+ unregister(pd.Timestamp)
+ unregister(pd.Period)
+ unregister(pd.Interval)
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn'
'--exclude=.svnignore' old/jsonpickle-1.1/jsonpickle/pickler.py
new/jsonpickle-1.2/jsonpickle/pickler.py
--- old/jsonpickle-1.1/jsonpickle/pickler.py 2019-01-23 08:29:08.000000000
+0100
+++ new/jsonpickle-1.2/jsonpickle/pickler.py 2019-05-24 07:17:18.000000000
+0200
@@ -5,6 +5,7 @@
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution.
from __future__ import absolute_import, division, unicode_literals
+import decimal
import warnings
import sys
import types
@@ -15,7 +16,7 @@
from . import tags
from . import handlers
from .backend import json
-from .compat import numeric_types, string_types, PY3, PY2, encodebytes
+from .compat import numeric_types, string_types, PY3, PY2
def encode(value,
@@ -28,7 +29,9 @@
warn=False,
context=None,
max_iter=None,
- numeric_keys=False):
+ use_decimal=False,
+ numeric_keys=False,
+ use_base85=False):
"""Return a JSON formatted representation of value, a Python object.
:param unpicklable: If set to False then the output will not contain the
@@ -50,6 +53,19 @@
(e.g. file descriptors).
:param max_iter: If set to a non-negative integer then jsonpickle will
consume at most `max_iter` items when pickling iterators.
+ :param use_decimal: If set to True jsonpickle will allow Decimal
+ instances to pass-through, with the assumption that the simplejson
+ backend will be used in `use_decimal` mode. In order to use this mode
+ you will need to configure simplejson::
+
+ jsonpickle.set_encoder_options('simplejson',
+ use_decimal=True, sort_keys=True)
+ jsonpickle.set_decoder_options('simplejson',
+ use_decimal=True)
+ jsonpickle.set_preferred_backend('simplejson')
+
+ NOTE: A side-effect of the above settings is that float values will be
+ converted to Decimal when converting to json.
>>> encode('my string') == '"my string"'
True
@@ -60,7 +76,10 @@
>>> encode({'foo': [1, 2, [3, 4]]}, max_depth=1)
'{"foo": "[1, 2, [3, 4]]"}'
-
+ :param use_base85:
+ If possible, use base85 to encode binary data. Base85 bloats binary
data
+ by 1/4 as opposed to base64, which expands it by 1/3. This argument is
+ ignored on Python 2 because it doesn't support it.
"""
backend = backend or json
context = context or Pickler(
@@ -71,7 +90,9 @@
max_depth=max_depth,
warn=warn,
max_iter=max_iter,
- numeric_keys=numeric_keys)
+ numeric_keys=numeric_keys,
+ use_decimal=use_decimal,
+ use_base85=use_base85)
return backend.encode(context.flatten(value, reset=reset))
@@ -85,13 +106,16 @@
keys=False,
warn=False,
max_iter=None,
- numeric_keys=False):
+ numeric_keys=False,
+ use_decimal=False,
+ use_base85=False):
self.unpicklable = unpicklable
self.make_refs = make_refs
self.backend = backend or json
self.keys = keys
self.warn = warn
self.numeric_keys = numeric_keys
+ self.use_base85 = use_base85 and (not PY2)
# The current recursion depth
self._depth = -1
# The maximal recursion depth
@@ -102,6 +126,15 @@
self._seen = []
# maximum amount of items to take from a pickled iterator
self._max_iter = max_iter
+ # Whether to allow decimals to pass-through
+ self._use_decimal = use_decimal
+
+ if self.use_base85:
+ self._bytes_tag = tags.B85
+ self._bytes_encoder = util.b85encode
+ else:
+ self._bytes_tag = tags.B64
+ self._bytes_encoder = util.b64encode
def reset(self):
self._objs = {}
@@ -219,6 +252,10 @@
if util.is_primitive(obj):
return lambda obj: obj
+ # Decimal is a primitive when use_decimal is True
+ if self._use_decimal and isinstance(obj, decimal.Decimal):
+ return lambda obj: obj
+
list_recurse = self._list_recurse
if util.is_list(obj):
@@ -278,9 +315,9 @@
if PY2:
try:
return obj.decode('utf-8')
- except Exception:
+ except UnicodeDecodeError:
pass
- return {tags.B64: encodebytes(obj).decode('utf-8')}
+ return {self._bytes_tag: self._bytes_encoder(obj)}
def _flatten_obj_instance(self, obj):
"""Recursively flatten an instance and return a json-friendly dict
@@ -374,13 +411,16 @@
if rv_as_list[4]:
rv_as_list[4] = tuple(rv_as_list[4])
- data[tags.REDUCE] = list(map(self._flatten, rv_as_list))
+ reduce_args = list(map(self._flatten, rv_as_list))
+ last_index = len(reduce_args) - 1
+ while last_index >= 2 and reduce_args[last_index] is None:
+ last_index -= 1
+ data[tags.REDUCE] = reduce_args[:last_index+1]
return data
if has_class and not util.is_module(obj):
if self.unpicklable:
- class_name = util.importable_name(cls)
data[tags.OBJECT] = class_name
if has_getnewargs_ex:
@@ -556,7 +596,7 @@
make_refs=self.make_refs)
def _getstate(self, obj, data):
- state = self._flatten_obj(obj)
+ state = self._flatten(obj)
if self.unpicklable:
data[tags.STATE] = state
else:
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn'
'--exclude=.svnignore' old/jsonpickle-1.1/jsonpickle/tags.py
new/jsonpickle-1.2/jsonpickle/tags.py
--- old/jsonpickle-1.1/jsonpickle/tags.py 2019-01-23 08:29:08.000000000
+0100
+++ new/jsonpickle-1.2/jsonpickle/tags.py 2019-05-24 07:17:18.000000000
+0200
@@ -11,6 +11,7 @@
BYTES = 'py/bytes'
B64 = 'py/b64'
+B85 = 'py/b85'
FUNCTION = 'py/function'
ID = 'py/id'
INITARGS = 'py/initargs'
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn'
'--exclude=.svnignore' old/jsonpickle-1.1/jsonpickle/unpickler.py
new/jsonpickle-1.2/jsonpickle/unpickler.py
--- old/jsonpickle-1.1/jsonpickle/unpickler.py 2019-01-23 08:29:08.000000000
+0100
+++ new/jsonpickle-1.2/jsonpickle/unpickler.py 2019-05-24 07:17:18.000000000
+0200
@@ -8,10 +8,11 @@
import quopri
import sys
+from . import compat
from . import util
from . import tags
from . import handlers
-from .compat import numeric_types, decodebytes
+from .compat import numeric_types
from .backend import json
@@ -41,8 +42,12 @@
def _safe_hasattr(obj, attr):
- """A safe (but slow) hasattr() that avoids hasattr"""
- return attr in dir(obj)
+ """Workaround unreliable hasattr() availability on sqlalchemy objects"""
+ try:
+ object.__getattribute__(obj, attr)
+ return True
+ except AttributeError:
+ return False
class _Proxy(object):
@@ -168,6 +173,8 @@
def _restore(self, obj):
if has_tag(obj, tags.B64):
restore = self._restore_base64
+ elif has_tag(obj, tags.B85):
+ restore = self._restore_base85
elif has_tag(obj, tags.BYTES): # Backwards compatibility
restore = self._restore_quopri
elif has_tag(obj, tags.ID):
@@ -200,7 +207,10 @@
return restore(obj)
def _restore_base64(self, obj):
- return decodebytes(obj[tags.B64].encode('utf-8'))
+ return util.b64decode(obj[tags.B64].encode('utf-8'))
+
+ def _restore_base85(self, obj):
+ return util.b85decode(obj[tags.B85].encode('utf-8'))
#: For backwards compatibility with bytes data produced by older versions
def _restore_quopri(self, obj):
@@ -217,8 +227,10 @@
"""
proxy = _Proxy()
self._mkref(proxy)
- reduce_val = obj[tags.REDUCE]
- f, args, state, listitems, dictitems = map(self._restore, reduce_val)
+ reduce_val = list(map(self._restore, obj[tags.REDUCE]))
+ if len(reduce_val) < 5:
+ reduce_val.extend([None] * (5 - len(reduce_val)))
+ f, args, state, listitems, dictitems = reduce_val
if f == tags.NEWOBJ or getattr(f, '__name__', '') == '__newobj__':
# mandated special case
@@ -527,7 +539,9 @@
return restore_key
def _restore_pickled_key(self, key):
- if key.startswith(tags.JSON_KEY):
+ """Restore a possibly pickled key"""
+ if (isinstance(key, compat.string_types) and
+ key.startswith(tags.JSON_KEY)):
key = decode(key[len(tags.JSON_KEY):],
backend=self.backend, context=self,
keys=True, reset=False)
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn'
'--exclude=.svnignore' old/jsonpickle-1.1/jsonpickle/util.py
new/jsonpickle-1.2/jsonpickle/util.py
--- old/jsonpickle-1.1/jsonpickle/util.py 2019-01-23 08:29:08.000000000
+0100
+++ new/jsonpickle-1.2/jsonpickle/util.py 2019-05-24 07:17:18.000000000
+0200
@@ -511,7 +511,25 @@
"""
Decode payload - must be ascii text.
"""
- return base64.b64decode(payload.encode('ascii'))
+ return base64.b64decode(payload)
+
+
+def b85encode(data):
+ """
+ Encode binary data to ascii text in base85. Data must be bytes.
+ """
+ if PY2:
+ raise NotImplementedError("Python 2 can't encode data in base85.")
+ return base64.b85encode(data).decode('ascii')
+
+
+def b85decode(payload):
+ """
+ Decode payload - must be ascii text.
+ """
+ if PY2:
+ raise NotImplementedError("Python 2 can't decode base85-encoded data.")
+ return base64.b85decode(payload)
def itemgetter(obj, getter=operator.itemgetter(0)):
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn'
'--exclude=.svnignore' old/jsonpickle-1.1/jsonpickle/version.py
new/jsonpickle-1.2/jsonpickle/version.py
--- old/jsonpickle-1.1/jsonpickle/version.py 2019-01-23 08:29:08.000000000
+0100
+++ new/jsonpickle-1.2/jsonpickle/version.py 2019-05-24 07:17:18.000000000
+0200
@@ -1 +1 @@
-__version__ = '1.1'
+__version__ = '1.2'
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn'
'--exclude=.svnignore' old/jsonpickle-1.1/jsonpickle.egg-info/PKG-INFO
new/jsonpickle-1.2/jsonpickle.egg-info/PKG-INFO
--- old/jsonpickle-1.1/jsonpickle.egg-info/PKG-INFO 2019-01-23
08:29:21.000000000 +0100
+++ new/jsonpickle-1.2/jsonpickle.egg-info/PKG-INFO 2019-05-24
07:17:32.000000000 +0200
@@ -1,6 +1,6 @@
Metadata-Version: 1.1
Name: jsonpickle
-Version: 1.1
+Version: 1.2
Summary: Python library for serializing any arbitrary object graph into JSON
Home-page: https://jsonpickle.github.io/
Author: David Aguilar
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn'
'--exclude=.svnignore' old/jsonpickle-1.1/requirements-dev.txt
new/jsonpickle-1.2/requirements-dev.txt
--- old/jsonpickle-1.1/requirements-dev.txt 2019-01-23 08:29:08.000000000
+0100
+++ new/jsonpickle-1.2/requirements-dev.txt 2019-05-24 07:17:18.000000000
+0200
@@ -13,4 +13,4 @@
simplejson
sqlalchemy
ujson
-yajl
+yajl; sys_platform != 'win32'
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn'
'--exclude=.svnignore' old/jsonpickle-1.1/tests/backend_test.py
new/jsonpickle-1.2/tests/backend_test.py
--- old/jsonpickle-1.1/tests/backend_test.py 2019-01-23 08:29:08.000000000
+0100
+++ new/jsonpickle-1.2/tests/backend_test.py 2019-05-24 07:17:18.000000000
+0200
@@ -1,4 +1,5 @@
from __future__ import absolute_import, division, unicode_literals
+import decimal
import unittest
from warnings import warn
@@ -89,6 +90,35 @@
']}')
self.assertEncodeDecode(expected_pickled)
+ def test_decimal(self):
+ # Default behavior: Decimal is preserved
+ obj = decimal.Decimal(0.5)
+ as_json = jsonpickle.dumps(obj)
+ clone = jsonpickle.loads(as_json)
+ self.assertTrue(isinstance(clone, decimal.Decimal))
+ self.assertEqual(obj, clone)
+
+ # Custom behavior: we want to use simplejson's Decimal support.
+ jsonpickle.set_encoder_options('simplejson',
+ use_decimal=True, sort_keys=True)
+
+ jsonpickle.set_decoder_options('simplejson',
+ use_decimal=True)
+
+ # use_decimal mode allows Decimal objects to pass-through to
simplejson.
+ # The end result is we get a simple '0.5' value as our json string.
+ as_json = jsonpickle.dumps(obj, unpicklable=True, use_decimal=True)
+ self.assertEqual(as_json, '0.5')
+ # But when loading we get back a Decimal.
+ clone = jsonpickle.loads(as_json)
+ self.assertTrue(isinstance(clone, decimal.Decimal))
+
+ # side-effect: floats become decimals too!
+ obj = 0.5
+ as_json = jsonpickle.dumps(obj)
+ clone = jsonpickle.loads(as_json)
+ self.assertTrue(isinstance(clone, decimal.Decimal))
+
def has_module(module):
try:
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn'
'--exclude=.svnignore' old/jsonpickle-1.1/tests/jsonpickle_test.py
new/jsonpickle-1.2/tests/jsonpickle_test.py
--- old/jsonpickle-1.1/tests/jsonpickle_test.py 2019-01-23 08:29:08.000000000
+0100
+++ new/jsonpickle-1.2/tests/jsonpickle_test.py 2019-05-24 07:17:18.000000000
+0200
@@ -98,11 +98,56 @@
def setUp(self):
self.pickler = jsonpickle.pickler.Pickler()
self.unpickler = jsonpickle.unpickler.Unpickler()
+ self.b85_pickler = jsonpickle.pickler.Pickler(use_base85=True)
def tearDown(self):
self.pickler.reset()
self.unpickler.reset()
+ @unittest.skipIf(not PY2, 'Python 2-specific base85 test')
+ def test_base85_always_false_on_py2(self):
+ self.assertFalse(self.b85_pickler.use_base85)
+
+ @unittest.skipIf(PY2, 'Base85 not supported on Python 2')
+ def test_base85_override_py3(self):
+ """Ensure the Python 2 check still lets us set use_base85 on Python
3"""
+ self.assertTrue(self.b85_pickler.use_base85)
+
+ @unittest.skipIf(PY2, 'Base85 not supported on Python 2')
+ def test_bytes_default_base85(self):
+ data = os.urandom(16)
+ encoded = util.b85encode(data)
+ self.assertEqual({tags.B85: encoded}, self.b85_pickler.flatten(data))
+
+ @unittest.skipIf(PY2, 'Base85 not supported on Python 2')
+ def test_py3_bytes_base64_default(self):
+ data = os.urandom(16)
+ encoded = util.b64encode(data)
+ self.assertEqual({tags.B64: encoded}, self.pickler.flatten(data))
+
+ @unittest.skipIf(not PY2, 'Python 2-specific base64 test')
+ def test_py2_default_base64(self):
+ data = os.urandom(16)
+ encoded = util.b64encode(data)
+ self.assertEqual({tags.B64: encoded}, self.pickler.flatten(data))
+
+ @unittest.skipIf(PY2, 'Base85 not supported on Python 2')
+ def test_decode_base85(self):
+ pickled = {tags.B85: 'P{Y4;Xv4O{u^=-c'}
+ expected = u'P\u00ffth\u00f6\u00f1 3!'.encode('utf-8')
+ self.assertEqual(expected, self.unpickler.restore(pickled))
+
+ @unittest.skipIf(PY2, 'Base85 not supported on Python 2')
+ def test_base85_still_handles_base64(self):
+ pickled = {tags.B64: 'UMO/dGjDtsOxIDMh'}
+ expected = u'P\u00ffth\u00f6\u00f1 3!'.encode('utf-8')
+ self.assertEqual(expected, self.unpickler.restore(pickled))
+
+ @unittest.skipIf(not PY2, 'Python 2-specific base85 test')
+ def test_base85_crashes_py2(self):
+ with self.assertRaises(NotImplementedError):
+ self.unpickler.restore({tags.B85: 'P{Y4;Xv4O{u^=-c'})
+
def test_string(self):
self.assertEqual('a string', self.pickler.flatten('a string'))
self.assertEqual('a string', self.unpickler.restore('a string'))
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn'
'--exclude=.svnignore' old/jsonpickle-1.1/tests/object_test.py
new/jsonpickle-1.2/tests/object_test.py
--- old/jsonpickle-1.1/tests/object_test.py 2019-01-23 08:29:08.000000000
+0100
+++ new/jsonpickle-1.2/tests/object_test.py 2019-05-24 07:17:18.000000000
+0200
@@ -11,7 +11,7 @@
from jsonpickle import handlers
from jsonpickle import tags
from jsonpickle import util
-from jsonpickle.compat import queue, PY2, encodebytes
+from jsonpickle.compat import queue, PY2
from helper import SkippableTest
@@ -76,6 +76,10 @@
def __setstate__(self, state):
self.x, self.y = state[0], state[1]
+class GetstateRecursesInfintely(object):
+ def __getstate__(self):
+ return GetstateRecursesInfintely()
+
class ListSubclassWithInit(list):
@@ -233,6 +237,15 @@
return self.offset,
+class IntKeysObject(object):
+
+ def __init__(self):
+ self.data = {0: 0}
+
+ def __getstate__(self):
+ return self.__dict__
+
+
class AdvancedObjectsTestCase(SkippableTest):
def setUp(self):
@@ -503,10 +516,10 @@
self.assertEqual(restored.data, data)
def test_decimal(self):
- obj = decimal.Decimal(1)
+ obj = decimal.Decimal('0.5')
flattened = self.pickler.flatten(obj)
inflated = self.unpickler.restore(flattened)
- self.assertEqual(type(inflated), decimal.Decimal)
+ self.assertTrue(isinstance(inflated, decimal.Decimal))
def test_repr_using_jsonpickle(self):
thing = ObjWithJsonPickleRepr()
@@ -547,6 +560,12 @@
self.assertTrue(util.is_reducible(d))
self.assertTrue(has_reduce or has_reduce_ex)
+ def test_int_keys_in_object_with_getstate_only(self):
+ obj = IntKeysObject()
+ encoded = jsonpickle.encode(obj, keys=True)
+ decoded = jsonpickle.decode(encoded, keys=True)
+ self.assertEqual(obj.data, decoded.data)
+
def test_ordered_dict_int_keys(self):
d = {
1: collections.OrderedDict([(2, -2), (3, -3)]),
@@ -702,6 +721,11 @@
restored = self.unpickler.restore(flat)
self.assertEqual(expect, restored)
+ def test_getstate_does_not_recurse_infinitely(self):
+ obj = GetstateRecursesInfintely()
+ pickler = jsonpickle.pickler.Pickler(max_depth=5)
+ pickler.flatten(obj)
+
def test_thing_with_queue(self):
obj = ThingWithQueue()
flattened = self.pickler.flatten(obj)
@@ -794,8 +818,8 @@
self.assertTrue(isinstance(encoded, compat.ustr))
else:
self.assertNotEqual(encoded, u1)
- b64ustr = encodebytes(b'foo').decode('utf-8')
- self.assertEqual({tags.B64: b64ustr}, encoded)
+ encoded_ustr = util.b64encode(b'foo')
+ self.assertEqual({tags.B64: encoded_ustr}, encoded)
self.assertTrue(isinstance(encoded[tags.B64], compat.ustr))
decoded = self.unpickler.restore(encoded)
self.assertTrue(decoded == b1)
@@ -807,8 +831,8 @@
# bytestrings that we can't decode to UTF-8 will always be wrapped
encoded = self.pickler.flatten(b2)
self.assertNotEqual(encoded, b2)
- b64ustr = encodebytes(b'foo\xff').decode('utf-8')
- self.assertEqual({tags.B64: b64ustr}, encoded)
+ encoded_ustr = util.b64encode(b'foo\xff')
+ self.assertEqual({tags.B64: encoded_ustr}, encoded)
self.assertTrue(isinstance(encoded[tags.B64], compat.ustr))
decoded = self.unpickler.restore(encoded)
self.assertEqual(decoded, b2)
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn'
'--exclude=.svnignore' old/jsonpickle-1.1/tests/pandas_test.py
new/jsonpickle-1.2/tests/pandas_test.py
--- old/jsonpickle-1.1/tests/pandas_test.py 2019-01-23 08:29:08.000000000
+0100
+++ new/jsonpickle-1.2/tests/pandas_test.py 2019-05-24 07:17:18.000000000
+0200
@@ -8,7 +8,7 @@
try:
import pandas as pd
import numpy as np
- from pandas.testing import assert_series_equal, assert_frame_equal
+ from pandas.testing import assert_series_equal, assert_frame_equal,
assert_index_equal
except ImportError:
np = None
@@ -91,6 +91,128 @@
decoded_df = self.roundtrip(df)
assert_frame_equal(decoded_df, df)
+ def test_dataframe_with_interval_index_roundtrip(self):
+ if self.should_skip:
+ return self.skip('pandas is not importable')
+
+ df = pd.DataFrame({"a": [1, 2], "b": [3, 4]},
+ index=pd.IntervalIndex.from_breaks([1,2,4]))
+
+ decoded_df = self.roundtrip(df)
+ assert_frame_equal(decoded_df, df)
+
+ def test_index_roundtrip(self):
+ if self.should_skip:
+ return self.skip('pandas is not importable')
+
+ idx = pd.Index(range(5, 10))
+ decoded_idx = self.roundtrip(idx)
+ assert_index_equal(decoded_idx, idx)
+
+ def test_datetime_index_roundtrip(self):
+ if self.should_skip:
+ return self.skip('pandas is not importable')
+
+ idx = pd.date_range(start='2019-01-01', end='2019-02-01', freq='D')
+ decoded_idx = self.roundtrip(idx)
+ assert_index_equal(decoded_idx, idx)
+
+ def test_ragged_datetime_index_roundtrip(self):
+ if self.should_skip:
+ return self.skip('pandas is not importable')
+
+ idx = pd.DatetimeIndex(['2019-01-01', '2019-01-02', '2019-01-05',])
+ decoded_idx = self.roundtrip(idx)
+ assert_index_equal(decoded_idx, idx)
+
+ def test_timedelta_index_roundtrip(self):
+ if self.should_skip:
+ return self.skip('pandas is not importable')
+
+ idx = pd.timedelta_range(start='1 day', periods=4, closed='right')
+ decoded_idx = self.roundtrip(idx)
+ assert_index_equal(decoded_idx, idx)
+
+ def test_period_index_roundtrip(self):
+ if self.should_skip:
+ return self.skip('pandas is not importable')
+
+ idx = pd.period_range(start='2017-01-01', end='2018-01-01', freq='M')
+ decoded_idx = self.roundtrip(idx)
+ assert_index_equal(decoded_idx, idx)
+
+ def test_int64_index_roundtrip(self):
+ if self.should_skip:
+ return self.skip('pandas is not importable')
+
+ idx = pd.Int64Index([-1, 0, 3, 4])
+ decoded_idx = self.roundtrip(idx)
+ assert_index_equal(decoded_idx, idx)
+
+ def test_uint64_index_roundtrip(self):
+ if self.should_skip:
+ return self.skip('pandas is not importable')
+
+ idx = pd.UInt64Index([0, 3, 4])
+ decoded_idx = self.roundtrip(idx)
+ assert_index_equal(decoded_idx, idx)
+
+ def test_float64_index_roundtrip(self):
+ if self.should_skip:
+ return self.skip('pandas is not importable')
+
+ idx = pd.Float64Index([0.1, 3.7, 4.2])
+ decoded_idx = self.roundtrip(idx)
+ assert_index_equal(decoded_idx, idx)
+
+ def test_interval_index_roundtrip(self):
+ if self.should_skip:
+ return self.skip('pandas is not importable')
+
+ idx = pd.IntervalIndex.from_breaks(range(5))
+ decoded_idx = self.roundtrip(idx)
+ assert_index_equal(decoded_idx, idx)
+
+ def test_datetime_interval_index_roundtrip(self):
+ if self.should_skip:
+ return self.skip('pandas is not importable')
+
+ idx = pd.IntervalIndex.from_breaks(pd.date_range('2019-01-01',
'2019-01-10'))
+ decoded_idx = self.roundtrip(idx)
+ assert_index_equal(decoded_idx, idx)
+
+ def test_multi_index_roundtrip(self):
+ if self.should_skip:
+ return self.skip('pandas is not importable')
+
+ idx = pd.MultiIndex.from_product(((1,2,3), ("a", "b")))
+ decoded_idx = self.roundtrip(idx)
+ assert_index_equal(decoded_idx, idx)
+
+ def test_timestamp_roundtrip(self):
+ if self.should_skip:
+ return self.skip('pandas is not importable')
+
+ obj = pd.Timestamp('2019-01-01')
+ decoded_obj = self.roundtrip(obj)
+ assert decoded_obj == obj
+
+ def test_period_roundtrip(self):
+ if self.should_skip:
+ return self.skip('pandas is not importable')
+
+ obj = pd.Timestamp('2019-01-01')
+ decoded_obj = self.roundtrip(obj)
+ assert decoded_obj == obj
+
+ def test_interval_roundtrip(self):
+ if self.should_skip:
+ return self.skip('pandas is not importable')
+
+ obj = pd.Interval(2, 4, closed=str('left'))
+ decoded_obj = self.roundtrip(obj)
+ assert decoded_obj == obj
+
def test_b64(self):
"""Test the binary encoding"""
if self.should_skip:
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn'
'--exclude=.svnignore' old/jsonpickle-1.1/tests/runtests.py
new/jsonpickle-1.2/tests/runtests.py
--- old/jsonpickle-1.1/tests/runtests.py 2019-01-23 08:29:08.000000000
+0100
+++ new/jsonpickle-1.2/tests/runtests.py 2019-05-24 07:17:18.000000000
+0200
@@ -26,6 +26,7 @@
import feedparser_test
import bson_test
import numpy_test
+import pandas_test
def suite():
@@ -41,6 +42,7 @@
suite.addTest(feedparser_test.suite())
suite.addTest(numpy_test.suite())
suite.addTest(bson_test.suite())
+ suite.addTest(pandas_test.suite())
return suite
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn'
'--exclude=.svnignore' old/jsonpickle-1.1/tests/util_test.py
new/jsonpickle-1.2/tests/util_test.py
--- old/jsonpickle-1.1/tests/util_test.py 2019-01-23 08:29:08.000000000
+0100
+++ new/jsonpickle-1.2/tests/util_test.py 2019-05-24 07:17:18.000000000
+0200
@@ -55,6 +55,16 @@
class UtilTestCase(unittest.TestCase):
+ @unittest.skipIf(not compat.PY2, 'Python 2-specific Base85 test')
+ def test_b85encode_crashes_on_python2(self):
+ with self.assertRaises(NotImplementedError):
+ util.b85encode(b'')
+
+ @unittest.skipIf(not compat.PY2, 'Python 2-specific Base85 test')
+ def test_b85decode_crashes_on_python2(self):
+ with self.assertRaises(NotImplementedError):
+ util.b85decode(u'RC2?pb0AN3baKO~')
+
def test_is_primitive_int(self):
self.assertTrue(util.is_primitive(0))
self.assertTrue(util.is_primitive(3))