Hello community,

here is the log from the commit of package python-simplejson for 
openSUSE:Factory checked in at 2018-05-22 17:01:23
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Comparing /work/SRC/openSUSE:Factory/python-simplejson (Old)
 and      /work/SRC/openSUSE:Factory/.python-simplejson.new (New)
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++

Package is "python-simplejson"

Tue May 22 17:01:23 2018 rev:39 rq:610880 version:3.15.0

Changes:
--------
--- /work/SRC/openSUSE:Factory/python-simplejson/python-simplejson.changes      
2018-05-11 11:26:13.600860187 +0200
+++ /work/SRC/openSUSE:Factory/.python-simplejson.new/python-simplejson.changes 
2018-05-22 17:01:30.333222321 +0200
@@ -1,0 +2,11 @@
+Mon May 21 04:02:31 UTC 2018 - a...@gmx.de
+
+- update to version 3.15.0:
+  * Clean up the C code
+  * Bypass the decode() method in bytes subclasses
+  * Support builds without cStringIO
+  * Allow to disable serializing bytes by default in Python 3
+  * Simplify the compatibility code
+  * Fix tests in Python 2.5
+
+-------------------------------------------------------------------

Old:
----
  simplejson-3.14.0.tar.gz

New:
----
  simplejson-3.15.0.tar.gz

++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++

Other differences:
------------------
++++++ python-simplejson.spec ++++++
--- /var/tmp/diff_new_pack.VHo2OA/_old  2018-05-22 17:01:31.229189717 +0200
+++ /var/tmp/diff_new_pack.VHo2OA/_new  2018-05-22 17:01:31.233189570 +0200
@@ -18,7 +18,7 @@
 
 %{?!python_module:%define python_module() python-%{**} python3-%{**}}
 Name:           python-simplejson
-Version:        3.14.0
+Version:        3.15.0
 Release:        0
 Summary:        Extensible JSON encoder/decoder for Python
 License:        MIT OR AFL-2.1

++++++ simplejson-3.14.0.tar.gz -> simplejson-3.15.0.tar.gz ++++++
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/simplejson-3.14.0/CHANGES.txt 
new/simplejson-3.15.0/CHANGES.txt
--- old/simplejson-3.14.0/CHANGES.txt   2018-04-22 00:57:00.000000000 +0200
+++ new/simplejson-3.15.0/CHANGES.txt   2018-05-12 20:58:17.000000000 +0200
@@ -1,3 +1,18 @@
+Version 3.15.0 released 2018-05-12
+
+* Clean up the C code
+  https://github.com/simplejson/simplejson/pull/220
+* Bypass the decode() method in bytes subclasses
+  https://github.com/simplejson/simplejson/pull/219
+* Support builds without cStringIO
+  https://github.com/simplejson/simplejson/pull/217
+* Allow to disable serializing bytes by default in Python 3
+  https://github.com/simplejson/simplejson/pull/216
+* Simplify the compatibility code
+  https://github.com/simplejson/simplejson/pull/215
+* Fix tests in Python 2.5
+  https://github.com/simplejson/simplejson/pull/214
+
 Version 3.14.0 released 2018-04-21
 
 * Defer is_raw_json test (performance improvement)
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/simplejson-3.14.0/PKG-INFO 
new/simplejson-3.15.0/PKG-INFO
--- old/simplejson-3.14.0/PKG-INFO      2018-04-22 00:57:23.000000000 +0200
+++ new/simplejson-3.15.0/PKG-INFO      2018-05-12 20:58:44.000000000 +0200
@@ -1,6 +1,6 @@
 Metadata-Version: 1.1
 Name: simplejson
-Version: 3.14.0
+Version: 3.15.0
 Summary: Simple, fast, extensible JSON encoder/decoder for Python
 Home-page: http://github.com/simplejson/simplejson
 Author: Bob Ippolito
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/simplejson-3.14.0/conf.py 
new/simplejson-3.15.0/conf.py
--- old/simplejson-3.14.0/conf.py       2018-04-22 00:57:00.000000000 +0200
+++ new/simplejson-3.15.0/conf.py       2018-05-12 20:58:17.000000000 +0200
@@ -42,9 +42,9 @@
 # other places throughout the built documents.
 #
 # The short X.Y version.
-version = '3.14'
+version = '3.15'
 # The full version, including alpha/beta/rc tags.
-release = '3.14.0'
+release = '3.15.0'
 
 # There are two options for replacing |today|: either, you set today to some
 # non-false value, then it is used:
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/simplejson-3.14.0/index.rst 
new/simplejson-3.15.0/index.rst
--- old/simplejson-3.14.0/index.rst     2018-04-22 00:57:00.000000000 +0200
+++ new/simplejson-3.15.0/index.rst     2018-05-12 20:58:17.000000000 +0200
@@ -192,8 +192,16 @@
    .. versionchanged:: 2.1.4
       Use ``(',', ': ')`` as default if *indent* is not ``None``.
 
-   *encoding* is the character encoding for str instances, default is
-   ``'utf-8'``.
+   If *encoding* is not ``None``, then all input :class:`bytes` objects in
+   Python 3 and 8-bit strings in Python 2 will be transformed
+   into unicode using that encoding prior to JSON-encoding.  The default is
+   ``'utf-8'``.  If *encoding* is ``None``, then all :class:`bytes` objects
+   will be passed to the *default* function in Python 3
+
+   .. versionchanged:: 3.15.0
+      ``encoding=None`` disables serializing :class:`bytes` by default in
+      Python 3.
+
 
    *default(obj)* is a function that should return a serializable version of
    *obj* or raise :exc:`TypeError`. The default simply raises :exc:`TypeError`.
@@ -656,9 +664,15 @@
    that can't otherwise be serialized.  It should return a JSON encodable
    version of the object or raise a :exc:`TypeError`.
 
-   If *encoding* is not ``None``, then all input strings will be transformed
+   If *encoding* is not ``None``, then all input :class:`bytes` objects in
+   Python 3 and 8-bit strings in Python 2 will be transformed
    into unicode using that encoding prior to JSON-encoding.  The default is
-   ``'utf-8'``.
+   ``'utf-8'``.  If *encoding* is ``None``, then all :class:`bytes` objects
+   will be passed to the :meth:`default` method in Python 3
+
+   .. versionchanged:: 3.15.0
+      ``encoding=None`` disables serializing :class:`bytes` by default in
+      Python 3.
 
    If *namedtuple_as_object* is true (default: ``True``),
    objects with ``_asdict()`` methods will be encoded
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/simplejson-3.14.0/setup.py 
new/simplejson-3.15.0/setup.py
--- old/simplejson-3.14.0/setup.py      2018-04-22 00:57:00.000000000 +0200
+++ new/simplejson-3.15.0/setup.py      2018-05-12 20:58:17.000000000 +0200
@@ -12,7 +12,7 @@
     DistutilsPlatformError
 
 IS_PYPY = hasattr(sys, 'pypy_translation_info')
-VERSION = '3.14.0'
+VERSION = '3.15.0'
 DESCRIPTION = "Simple, fast, extensible JSON encoder/decoder for Python"
 
 with open('README.rst', 'r') as f:
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/simplejson-3.14.0/simplejson/__init__.py 
new/simplejson-3.15.0/simplejson/__init__.py
--- old/simplejson-3.14.0/simplejson/__init__.py        2018-04-22 
00:57:00.000000000 +0200
+++ new/simplejson-3.15.0/simplejson/__init__.py        2018-05-12 
20:58:17.000000000 +0200
@@ -98,7 +98,7 @@
     Expecting property name: line 1 column 3 (char 2)
 """
 from __future__ import absolute_import
-__version__ = '3.14.0'
+__version__ = '3.15.0'
 __all__ = [
     'dump', 'dumps', 'load', 'loads',
     'JSONDecoder', 'JSONDecodeError', 'JSONEncoder',
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/simplejson-3.14.0/simplejson/_speedups.c 
new/simplejson-3.15.0/simplejson/_speedups.c
--- old/simplejson-3.14.0/simplejson/_speedups.c        2018-04-22 
00:57:00.000000000 +0200
+++ new/simplejson-3.15.0/simplejson/_speedups.c        2018-05-12 
20:58:17.000000000 +0200
@@ -5,36 +5,24 @@
 #if PY_MAJOR_VERSION >= 3
 #define PyInt_FromSsize_t PyLong_FromSsize_t
 #define PyInt_AsSsize_t PyLong_AsSsize_t
-#define PyString_Check PyBytes_Check
-#define PyString_GET_SIZE PyBytes_GET_SIZE
-#define PyString_AS_STRING PyBytes_AS_STRING
-#define PyString_FromStringAndSize PyBytes_FromStringAndSize
 #define PyInt_Check(obj) 0
 #define PyInt_CheckExact(obj) 0
 #define JSON_UNICHR Py_UCS4
 #define JSON_InternFromString PyUnicode_InternFromString
-#define JSON_Intern_GET_SIZE PyUnicode_GET_SIZE
-#define JSON_ASCII_Check PyUnicode_Check
-#define JSON_ASCII_AS_STRING PyUnicode_AsUTF8
-#define PyInt_Type PyLong_Type
-#define PyInt_FromString PyLong_FromString
+#define PyString_GET_SIZE PyUnicode_GET_LENGTH
 #define PY2_UNUSED
 #define PY3_UNUSED UNUSED
-#define JSON_NewEmptyUnicode() PyUnicode_New(0, 127)
 #else /* PY_MAJOR_VERSION >= 3 */
 #define PY2_UNUSED UNUSED
 #define PY3_UNUSED
+#define PyBytes_Check PyString_Check
 #define PyUnicode_READY(obj) 0
 #define PyUnicode_KIND(obj) (sizeof(Py_UNICODE))
 #define PyUnicode_DATA(obj) ((void *)(PyUnicode_AS_UNICODE(obj)))
 #define PyUnicode_READ(kind, data, index) ((JSON_UNICHR)((const Py_UNICODE 
*)(data))[(index)])
-#define PyUnicode_GetLength PyUnicode_GET_SIZE
+#define PyUnicode_GET_LENGTH PyUnicode_GET_SIZE
 #define JSON_UNICHR Py_UNICODE
-#define JSON_ASCII_Check PyString_Check
-#define JSON_ASCII_AS_STRING PyString_AS_STRING
 #define JSON_InternFromString PyString_InternFromString
-#define JSON_Intern_GET_SIZE PyString_GET_SIZE
-#define JSON_NewEmptyUnicode() PyUnicode_FromUnicode(NULL, 0)
 #endif /* PY_MAJOR_VERSION < 3 */
 
 #if PY_VERSION_HEX < 0x02070000
@@ -84,6 +72,11 @@
 #define JSON_ALLOW_NAN 1
 #define JSON_IGNORE_NAN 2
 
+static PyObject *JSON_Infinity = NULL;
+static PyObject *JSON_NegInfinity = NULL;
+static PyObject *JSON_NaN = NULL;
+static PyObject *JSON_EmptyUnicode = NULL;
+
 static PyTypeObject PyScannerType;
 static PyTypeObject PyEncoderType;
 
@@ -188,8 +181,6 @@
 static PyObject *
 JSON_ParseEncoding(PyObject *encoding);
 static PyObject *
-JSON_UnicodeFromChar(JSON_UNICHR c);
-static PyObject *
 maybe_quote_bigint(PyEncoderObject* s, PyObject *encoded, PyObject *obj);
 static Py_ssize_t
 ascii_char_size(JSON_UNICHR c);
@@ -318,7 +309,7 @@
 #if PY_MAJOR_VERSION >= 3
     assert(PyUnicode_Check(unicode));
 #else /* PY_MAJOR_VERSION >= 3 */
-    assert(JSON_ASCII_Check(unicode) || PyUnicode_Check(unicode));
+    assert(PyString_Check(unicode) || PyUnicode_Check(unicode));
 #endif /* PY_MAJOR_VERSION < 3 */
 
     if (PyList_Append(acc->small_strings, unicode))
@@ -369,19 +360,6 @@
 }
 
 static PyObject *
-JSON_UnicodeFromChar(JSON_UNICHR c)
-{
-#if PY_MAJOR_VERSION >= 3
-    PyObject *rval = PyUnicode_New(1, c);
-    if (rval)
-        PyUnicode_WRITE(PyUnicode_KIND(rval), PyUnicode_DATA(rval), 0, c);
-    return rval;
-#else /* PY_MAJOR_VERSION >= 3 */
-    return PyUnicode_FromUnicode(&c, 1);
-#endif /* PY_MAJOR_VERSION < 3 */
-}
-
-static PyObject *
 maybe_quote_bigint(PyEncoderObject* s, PyObject *encoded, PyObject *obj)
 {
     if (s->max_long_size != Py_None && s->min_long_size != Py_None) {
@@ -466,7 +444,7 @@
             case '\r': output[chars++] = 'r'; break;
             case '\t': output[chars++] = 't'; break;
             default:
-#if defined(Py_UNICODE_WIDE) || PY_MAJOR_VERSION >= 3
+#if PY_MAJOR_VERSION >= 3 || defined(Py_UNICODE_WIDE)
                 if (c >= 0x10000) {
                     /* UTF-16 surrogate pair */
                     JSON_UNICHR v = c - 0x10000;
@@ -505,7 +483,7 @@
                c == '\t') {
         return 2;
     }
-#if defined(Py_UNICODE_WIDE) || PY_MAJOR_VERSION >= 3
+#if PY_MAJOR_VERSION >= 3 || defined(Py_UNICODE_WIDE)
     else if (c >= 0x10000U) {
         return 2 * MIN_EXPANSION;
     }
@@ -520,20 +498,14 @@
 {
     /* Take a PyUnicode pystr and return a new ASCII-only escaped PyString */
     Py_ssize_t i;
-    Py_ssize_t input_chars;
-    Py_ssize_t output_size;
+    Py_ssize_t input_chars = PyUnicode_GET_LENGTH(pystr);
+    Py_ssize_t output_size = 2;
     Py_ssize_t chars;
-    PY2_UNUSED int kind;
-    void *data;
+    PY2_UNUSED int kind = PyUnicode_KIND(pystr);
+    void *data = PyUnicode_DATA(pystr);
     PyObject *rval;
     char *output;
 
-    if (PyUnicode_READY(pystr))
-        return NULL;
-
-    kind = PyUnicode_KIND(pystr);
-    data = PyUnicode_DATA(pystr);
-    input_chars = PyUnicode_GetLength(pystr);
     output_size = 2;
     for (i = 0; i < input_chars; i++) {
         output_size += ascii_char_size(PyUnicode_READ(kind, data, i));
@@ -568,7 +540,7 @@
 ascii_escape_str(PyObject *pystr)
 {
     PyObject *rval;
-    PyObject *input = PyUnicode_DecodeUTF8(PyString_AS_STRING(pystr), 
PyString_GET_SIZE(pystr), NULL);
+    PyObject *input = PyUnicode_DecodeUTF8(PyBytes_AS_STRING(pystr), 
PyBytes_GET_SIZE(pystr), NULL);
     if (input == NULL)
         return NULL;
     rval = ascii_escape_unicode(input);
@@ -634,21 +606,23 @@
         Py_INCREF(key);
         return key;
     }
-    else if (PyString_Check(key)) {
 #if PY_MAJOR_VERSION >= 3
-        const char *encoding = JSON_ASCII_AS_STRING(s->encoding);
+    else if (PyBytes_Check(key) && s->encoding != NULL) {
+        const char *encoding = PyUnicode_AsUTF8(s->encoding);
         if (encoding == NULL)
             return NULL;
         return PyUnicode_Decode(
-            PyString_AS_STRING(key),
-            PyString_GET_SIZE(key),
+            PyBytes_AS_STRING(key),
+            PyBytes_GET_SIZE(key),
             encoding,
             NULL);
+    }
 #else /* PY_MAJOR_VERSION >= 3 */
+    else if (PyString_Check(key)) {
         Py_INCREF(key);
         return key;
-#endif /* PY_MAJOR_VERSION < 3 */
     }
+#endif /* PY_MAJOR_VERSION < 3 */
     else if (PyFloat_Check(key)) {
         return encoder_encode_float(s, key);
     }
@@ -676,7 +650,7 @@
     else if (s->use_decimal && PyObject_TypeCheck(key, (PyTypeObject 
*)s->Decimal)) {
         return PyObject_Str(key);
     }
-    else if (s->skipkeys) {
+    if (s->skipkeys) {
         Py_INCREF(Py_None);
         return Py_None;
     }
@@ -799,18 +773,7 @@
 join_list_unicode(PyObject *lst)
 {
     /* return u''.join(lst) */
-    static PyObject *joinfn = NULL;
-    if (joinfn == NULL) {
-        PyObject *ustr = JSON_NewEmptyUnicode();
-        if (ustr == NULL)
-            return NULL;
-
-        joinfn = PyObject_GetAttrString(ustr, "join");
-        Py_DECREF(ustr);
-        if (joinfn == NULL)
-            return NULL;
-    }
-    return PyObject_CallFunctionObjArgs(joinfn, lst, NULL);
+    return PyUnicode_Join(JSON_EmptyUnicode, lst);
 }
 
 #if PY_MAJOR_VERSION >= 3
@@ -932,17 +895,6 @@
         /* Pick up this chunk if it's not zero length */
         if (next != end) {
             APPEND_OLD_CHUNK
-#if PY_MAJOR_VERSION >= 3
-            if (!has_unicode) {
-                chunk = PyUnicode_DecodeASCII(&buf[end], next - end, NULL);
-            }
-            else {
-                chunk = PyUnicode_Decode(&buf[end], next - end, encoding, 
NULL);
-            }
-            if (chunk == NULL) {
-                goto bail;
-            }
-#else /* PY_MAJOR_VERSION >= 3 */
             strchunk = PyString_FromStringAndSize(&buf[end], next - end);
             if (strchunk == NULL) {
                 goto bail;
@@ -957,7 +909,6 @@
             else {
                 chunk = strchunk;
             }
-#endif /* PY_MAJOR_VERSION < 3 */
         }
         next++;
         if (c == '"') {
@@ -1015,7 +966,7 @@
                         goto bail;
                 }
             }
-#if (PY_MAJOR_VERSION >= 3 || defined(Py_UNICODE_WIDE))
+#if defined(Py_UNICODE_WIDE)
             /* Surrogate pair */
             if ((c & 0xfc00) == 0xd800) {
                 if (end + 6 < len && buf[next] == '\\' && buf[next+1] == 'u') {
@@ -1050,20 +1001,14 @@
                     }
                 }
             }
-#endif /* PY_MAJOR_VERSION >= 3 || Py_UNICODE_WIDE */
+#endif /* Py_UNICODE_WIDE */
         }
         if (c > 0x7f) {
             has_unicode = 1;
         }
         APPEND_OLD_CHUNK
-#if PY_MAJOR_VERSION >= 3
-        chunk = JSON_UnicodeFromChar(c);
-        if (chunk == NULL) {
-            goto bail;
-        }
-#else /* PY_MAJOR_VERSION >= 3 */
         if (has_unicode) {
-            chunk = JSON_UnicodeFromChar(c);
+            chunk = PyUnicode_FromOrdinal(c);
             if (chunk == NULL) {
                 goto bail;
             }
@@ -1075,14 +1020,15 @@
                 goto bail;
             }
         }
-#endif
     }
 
     if (chunks == NULL) {
         if (chunk != NULL)
             rval = chunk;
-        else
-            rval = JSON_NewEmptyUnicode();
+        else {
+            rval = JSON_EmptyUnicode;
+            Py_INCREF(rval);
+        }
     }
     else {
         APPEND_OLD_CHUNK
@@ -1118,7 +1064,7 @@
     Py_ssize_t begin = end - 1;
     Py_ssize_t next = begin;
     PY2_UNUSED int kind = PyUnicode_KIND(pystr);
-    Py_ssize_t len = PyUnicode_GetLength(pystr);
+    Py_ssize_t len = PyUnicode_GET_LENGTH(pystr);
     void *buf = PyUnicode_DATA(pystr);
     PyObject *chunks = NULL;
     PyObject *chunk = NULL;
@@ -1256,7 +1202,7 @@
 #endif
         }
         APPEND_OLD_CHUNK
-        chunk = JSON_UnicodeFromChar(c);
+        chunk = PyUnicode_FromOrdinal(c);
         if (chunk == NULL) {
             goto bail;
         }
@@ -1265,8 +1211,10 @@
     if (chunks == NULL) {
         if (chunk != NULL)
             rval = chunk;
-        else
-            rval = JSON_NewEmptyUnicode();
+        else {
+            rval = JSON_EmptyUnicode;
+            Py_INCREF(rval);
+        }
     }
     else {
         APPEND_OLD_CHUNK
@@ -1314,6 +1262,8 @@
         encoding = DEFAULT_ENCODING;
     }
     if (PyUnicode_Check(pystr)) {
+        if (PyUnicode_READY(pystr))
+            return NULL;
         rval = scanstring_unicode(pystr, end, strict, &next_end);
     }
 #if PY_MAJOR_VERSION < 3
@@ -1343,10 +1293,12 @@
 {
     /* Return an ASCII-only JSON representation of a Python string */
     /* METH_O */
-    if (PyString_Check(pystr)) {
+    if (PyBytes_Check(pystr)) {
         return ascii_escape_str(pystr);
     }
     else if (PyUnicode_Check(pystr)) {
+        if (PyUnicode_READY(pystr))
+            return NULL;
         return ascii_escape_unicode(pystr);
     }
     else {
@@ -1419,7 +1371,7 @@
     PyObject *item;
     PyObject *key = NULL;
     PyObject *val = NULL;
-    char *encoding = JSON_ASCII_AS_STRING(s->encoding);
+    char *encoding = PyString_AS_STRING(s->encoding);
     int has_pairs_hook = (s->pairs_hook != Py_None);
     int did_parse = 0;
     Py_ssize_t next_idx;
@@ -1573,7 +1525,7 @@
     Returns a new PyObject (usually a dict, but object_hook can change that)
     */
     void *str = PyUnicode_DATA(pystr);
-    Py_ssize_t end_idx = PyUnicode_GetLength(pystr) - 1;
+    Py_ssize_t end_idx = PyUnicode_GET_LENGTH(pystr) - 1;
     PY2_UNUSED int kind = PyUnicode_KIND(pystr);
     PyObject *rval = NULL;
     PyObject *pairs = NULL;
@@ -1818,7 +1770,7 @@
     */
     PY2_UNUSED int kind = PyUnicode_KIND(pystr);
     void *str = PyUnicode_DATA(pystr);
-    Py_ssize_t end_idx = PyUnicode_GetLength(pystr) - 1;
+    Py_ssize_t end_idx = PyUnicode_GET_LENGTH(pystr) - 1;
     PyObject *val = NULL;
     PyObject *rval = PyList_New(0);
     Py_ssize_t next_idx;
@@ -1887,10 +1839,10 @@
 }
 
 static PyObject *
-_parse_constant(PyScannerObject *s, char *constant, Py_ssize_t idx, Py_ssize_t 
*next_idx_ptr)
+_parse_constant(PyScannerObject *s, PyObject *constant, Py_ssize_t idx, 
Py_ssize_t *next_idx_ptr)
 {
     /* Read a JSON constant from PyString pystr.
-    constant is the constant string that was found
+    constant is the Python string that was found
         ("NaN", "Infinity", "-Infinity").
     idx is the index of the first character of the constant
     *next_idx_ptr is a return-by-reference index to the first character after
@@ -1898,17 +1850,11 @@
 
     Returns the result of parse_constant
     */
-    PyObject *cstr;
     PyObject *rval;
-    /* constant is "NaN", "Infinity", or "-Infinity" */
-    cstr = JSON_InternFromString(constant);
-    if (cstr == NULL)
-        return NULL;
 
     /* rval = parse_constant(constant) */
-    rval = PyObject_CallFunctionObjArgs(s->parse_constant, cstr, NULL);
-    idx += JSON_Intern_GET_SIZE(cstr);
-    Py_DECREF(cstr);
+    rval = PyObject_CallFunctionObjArgs(s->parse_constant, constant, NULL);
+    idx += PyString_GET_SIZE(constant);
     *next_idx_ptr = idx;
     return rval;
 }
@@ -2033,7 +1979,7 @@
     */
     PY2_UNUSED int kind = PyUnicode_KIND(pystr);
     void *str = PyUnicode_DATA(pystr);
-    Py_ssize_t end_idx = PyUnicode_GetLength(pystr) - 1;
+    Py_ssize_t end_idx = PyUnicode_GET_LENGTH(pystr) - 1;
     Py_ssize_t idx = start;
     int is_float = 0;
     JSON_UNICHR c;
@@ -2153,7 +2099,7 @@
         case '"':
             /* string */
             rval = scanstring_str(pystr, idx + 1,
-                JSON_ASCII_AS_STRING(s->encoding),
+                PyString_AS_STRING(s->encoding),
                 s->strict,
                 next_idx_ptr);
             break;
@@ -2206,7 +2152,7 @@
         case 'N':
             /* NaN */
             if ((idx + 2 < length) && str[idx + 1] == 'a' && str[idx + 2] == 
'N') {
-                rval = _parse_constant(s, "NaN", idx, next_idx_ptr);
+                rval = _parse_constant(s, JSON_NaN, idx, next_idx_ptr);
             }
             else
                 fallthrough = 1;
@@ -2214,7 +2160,7 @@
         case 'I':
             /* Infinity */
             if ((idx + 7 < length) && str[idx + 1] == 'n' && str[idx + 2] == 
'f' && str[idx + 3] == 'i' && str[idx + 4] == 'n' && str[idx + 5] == 'i' && 
str[idx + 6] == 't' && str[idx + 7] == 'y') {
-                rval = _parse_constant(s, "Infinity", idx, next_idx_ptr);
+                rval = _parse_constant(s, JSON_Infinity, idx, next_idx_ptr);
             }
             else
                 fallthrough = 1;
@@ -2222,7 +2168,7 @@
         case '-':
             /* -Infinity */
             if ((idx + 8 < length) && str[idx + 1] == 'I' && str[idx + 2] == 
'n' && str[idx + 3] == 'f' && str[idx + 4] == 'i' && str[idx + 5] == 'n' && 
str[idx + 6] == 'i' && str[idx + 7] == 't' && str[idx + 8] == 'y') {
-                rval = _parse_constant(s, "-Infinity", idx, next_idx_ptr);
+                rval = _parse_constant(s, JSON_NegInfinity, idx, next_idx_ptr);
             }
             else
                 fallthrough = 1;
@@ -2250,7 +2196,7 @@
     */
     PY2_UNUSED int kind = PyUnicode_KIND(pystr);
     void *str = PyUnicode_DATA(pystr);
-    Py_ssize_t length = PyUnicode_GetLength(pystr);
+    Py_ssize_t length = PyUnicode_GET_LENGTH(pystr);
     PyObject *rval = NULL;
     int fallthrough = 0;
     if (idx < 0 || idx >= length) {
@@ -2325,7 +2271,7 @@
             if ((idx + 2 < length) &&
                 PyUnicode_READ(kind, str, idx + 1) == 'a' &&
                 PyUnicode_READ(kind, str, idx + 2) == 'N') {
-                rval = _parse_constant(s, "NaN", idx, next_idx_ptr);
+                rval = _parse_constant(s, JSON_NaN, idx, next_idx_ptr);
             }
             else
                 fallthrough = 1;
@@ -2340,7 +2286,7 @@
                 PyUnicode_READ(kind, str, idx + 5) == 'i' &&
                 PyUnicode_READ(kind, str, idx + 6) == 't' &&
                 PyUnicode_READ(kind, str, idx + 7) == 'y') {
-                rval = _parse_constant(s, "Infinity", idx, next_idx_ptr);
+                rval = _parse_constant(s, JSON_Infinity, idx, next_idx_ptr);
             }
             else
                 fallthrough = 1;
@@ -2356,7 +2302,7 @@
                 PyUnicode_READ(kind, str, idx + 6) == 'i' &&
                 PyUnicode_READ(kind, str, idx + 7) == 't' &&
                 PyUnicode_READ(kind, str, idx + 8) == 'y') {
-                rval = _parse_constant(s, "-Infinity", idx, next_idx_ptr);
+                rval = _parse_constant(s, JSON_NegInfinity, idx, next_idx_ptr);
             }
             else
                 fallthrough = 1;
@@ -2386,6 +2332,8 @@
         return NULL;
 
     if (PyUnicode_Check(pystr)) {
+        if (PyUnicode_READY(pystr))
+            return NULL;
         rval = scan_once_unicode(s, pystr, idx, &next_idx);
     }
 #if PY_MAJOR_VERSION < 3
@@ -2406,18 +2354,24 @@
 static PyObject *
 JSON_ParseEncoding(PyObject *encoding)
 {
-    if (encoding == NULL)
-        return NULL;
     if (encoding == Py_None)
         return JSON_InternFromString(DEFAULT_ENCODING);
-#if PY_MAJOR_VERSION < 3
-    if (PyUnicode_Check(encoding))
-        return PyUnicode_AsEncodedString(encoding, NULL, NULL);
-#endif
-    if (JSON_ASCII_Check(encoding)) {
+#if PY_MAJOR_VERSION >= 3
+    if (PyUnicode_Check(encoding)) {
+        if (PyUnicode_AsUTF8(encoding) == NULL) {
+            return NULL;
+        }
+        Py_INCREF(encoding);
+        return encoding;
+    }
+#else /* PY_MAJOR_VERSION >= 3 */
+    if (PyString_Check(encoding)) {
         Py_INCREF(encoding);
         return encoding;
     }
+    if (PyUnicode_Check(encoding))
+        return PyUnicode_AsEncodedString(encoding, NULL, NULL);
+#endif /* PY_MAJOR_VERSION >= 3 */
     PyErr_SetString(PyExc_TypeError, "encoding must be a string");
     return NULL;
 }
@@ -2444,8 +2398,9 @@
             goto bail;
     }
 
-    /* JSON_ASCII_AS_STRING is used on encoding */
     encoding = PyObject_GetAttrString(ctx, "encoding");
+    if (encoding == NULL)
+        goto bail;
     s->encoding = JSON_ParseEncoding(encoding);
     Py_XDECREF(encoding);
     if (s->encoding == NULL)
@@ -2578,11 +2533,17 @@
     s->defaultfn = defaultfn;
     Py_INCREF(encoder);
     s->encoder = encoder;
-    s->encoding = JSON_ParseEncoding(encoding);
-    if (s->encoding == NULL)
-        goto bail;
-    if (JSON_ASCII_AS_STRING(s->encoding) == NULL)
-        goto bail;
+#if PY_MAJOR_VERSION >= 3
+    if (encoding == Py_None) {
+        s->encoding = NULL;
+    }
+    else
+#endif /* PY_MAJOR_VERSION >= 3 */
+    {
+        s->encoding = JSON_ParseEncoding(encoding);
+        if (s->encoding == NULL)
+            goto bail;
+    }
     Py_INCREF(indent);
     s->indent = indent;
     Py_INCREF(key_separator);
@@ -2768,28 +2729,16 @@
         }
         /* JSON_ALLOW_NAN is set */
         else if (i > 0) {
-            static PyObject *sInfinity = NULL;
-            if (sInfinity == NULL)
-                sInfinity = JSON_InternFromString("Infinity");
-            if (sInfinity)
-                Py_INCREF(sInfinity);
-            return sInfinity;
+            Py_INCREF(JSON_Infinity);
+            return JSON_Infinity;
         }
         else if (i < 0) {
-            static PyObject *sNegInfinity = NULL;
-            if (sNegInfinity == NULL)
-                sNegInfinity = JSON_InternFromString("-Infinity");
-            if (sNegInfinity)
-                Py_INCREF(sNegInfinity);
-            return sNegInfinity;
+            Py_INCREF(JSON_NegInfinity);
+            return JSON_NegInfinity;
         }
         else {
-            static PyObject *sNaN = NULL;
-            if (sNaN == NULL)
-                sNaN = JSON_InternFromString("NaN");
-            if (sNaN)
-                Py_INCREF(sNaN);
-            return sNaN;
+            Py_INCREF(JSON_NaN);
+            return JSON_NaN;
         }
     }
     /* Use a better float format here? */
@@ -2821,7 +2770,7 @@
     encoded = PyObject_CallFunctionObjArgs(s->encoder, obj, NULL);
     if (encoded != NULL &&
 #if PY_MAJOR_VERSION < 3
-        !JSON_ASCII_Check(encoded) &&
+        !PyString_Check(encoded) &&
 #endif /* PY_MAJOR_VERSION < 3 */
         !PyUnicode_Check(encoded))
     {
@@ -2854,7 +2803,8 @@
             if (cstr != NULL)
                 rv = _steal_accumulate(rval, cstr);
         }
-        else if (PyString_Check(obj) || PyUnicode_Check(obj))
+        else if ((PyBytes_Check(obj) && s->encoding != NULL) ||
+                 PyUnicode_Check(obj))
         {
             PyObject *encoded = encoder_encode_string(s, obj);
             if (encoded != NULL)
@@ -3366,6 +3316,29 @@
     return rval;
 }
 
+static int
+init_constants(void)
+{
+    JSON_NaN = JSON_InternFromString("NaN");
+    if (JSON_NaN == NULL)
+        return 0;
+    JSON_Infinity = JSON_InternFromString("Infinity");
+    if (JSON_Infinity == NULL)
+        return 0;
+    JSON_NegInfinity = JSON_InternFromString("-Infinity");
+    if (JSON_NegInfinity == NULL)
+        return 0;
+#if PY_MAJOR_VERSION >= 3
+    JSON_EmptyUnicode = PyUnicode_New(0, 127);
+#else /* PY_MAJOR_VERSION >= 3 */
+    JSON_EmptyUnicode = PyUnicode_FromUnicode(NULL, 0);
+#endif /* PY_MAJOR_VERSION >= 3 */
+    if (JSON_EmptyUnicode == NULL)
+        return 0;
+
+    return 1;
+}
+
 static PyObject *
 moduleinit(void)
 {
@@ -3374,6 +3347,8 @@
         return NULL;
     if (PyType_Ready(&PyEncoderType) < 0)
         return NULL;
+    if (!init_constants())
+        return NULL;
 
 #if PY_MAJOR_VERSION >= 3
     m = PyModule_Create(&moduledef);
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/simplejson-3.14.0/simplejson/compat.py 
new/simplejson-3.15.0/simplejson/compat.py
--- old/simplejson-3.14.0/simplejson/compat.py  2018-04-22 00:57:00.000000000 
+0200
+++ new/simplejson-3.15.0/simplejson/compat.py  2018-05-12 20:58:17.000000000 
+0200
@@ -5,10 +5,11 @@
     PY3 = False
     def b(s):
         return s
-    def u(s):
-        return unicode(s, 'unicode_escape')
-    import cStringIO as StringIO
-    StringIO = BytesIO = StringIO.StringIO
+    try:
+        from cStringIO import StringIO
+    except ImportError:
+        from StringIO import StringIO
+    BytesIO = StringIO
     text_type = unicode
     binary_type = str
     string_types = (basestring,)
@@ -21,20 +22,13 @@
         from importlib import reload as reload_module
     else:
         from imp import reload as reload_module
-    import codecs
     def b(s):
-        return codecs.latin_1_encode(s)[0]
-    def u(s):
-        return s
-    import io
-    StringIO = io.StringIO
-    BytesIO = io.BytesIO
+        return bytes(s, 'latin1')
+    from io import StringIO, BytesIO
     text_type = str
     binary_type = bytes
     string_types = (str,)
     integer_types = (int,)
-
-    def unichr(s):
-        return u(chr(s))
+    unichr = chr
 
 long_type = integer_types[-1]
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/simplejson-3.14.0/simplejson/decoder.py 
new/simplejson-3.15.0/simplejson/decoder.py
--- old/simplejson-3.14.0/simplejson/decoder.py 2018-04-22 00:57:00.000000000 
+0200
+++ new/simplejson-3.15.0/simplejson/decoder.py 2018-05-12 20:58:17.000000000 
+0200
@@ -4,7 +4,7 @@
 import re
 import sys
 import struct
-from .compat import u, text_type, binary_type, PY3, unichr
+from .compat import PY3, unichr
 from .scanner import make_scanner, JSONDecodeError
 
 def _import_c_scanstring():
@@ -40,14 +40,14 @@
 
 STRINGCHUNK = re.compile(r'(.*?)(["\\\x00-\x1f])', FLAGS)
 BACKSLASH = {
-    '"': u('"'), '\\': u('\u005c'), '/': u('/'),
-    'b': u('\b'), 'f': u('\f'), 'n': u('\n'), 'r': u('\r'), 't': u('\t'),
+    '"': u'"', '\\': u'\\', '/': u'/',
+    'b': u'\b', 'f': u'\f', 'n': u'\n', 'r': u'\r', 't': u'\t',
 }
 
 DEFAULT_ENCODING = "utf-8"
 
 def py_scanstring(s, end, encoding=None, strict=True,
-        _b=BACKSLASH, _m=STRINGCHUNK.match, _join=u('').join,
+        _b=BACKSLASH, _m=STRINGCHUNK.match, _join=u''.join,
         _PY3=PY3, _maxunicode=sys.maxunicode):
     """Scan the string s for a JSON string. End is the index of the
     character in s after the quote that started the JSON string.
@@ -71,8 +71,8 @@
         content, terminator = chunk.groups()
         # Content is contains zero or more unescaped string characters
         if content:
-            if not _PY3 and not isinstance(content, text_type):
-                content = text_type(content, encoding)
+            if not _PY3 and not isinstance(content, unicode):
+                content = unicode(content, encoding)
             _append(content)
         # Terminator is the end of string, a literal control character,
         # or a backslash denoting that an escape sequence follows
@@ -365,8 +365,8 @@
         instance containing a JSON document)
 
         """
-        if _PY3 and isinstance(s, binary_type):
-            s = s.decode(self.encoding)
+        if _PY3 and isinstance(s, bytes):
+            s = str(s, self.encoding)
         obj, end = self.raw_decode(s)
         end = _w(s, end).end()
         if end != len(s):
@@ -388,7 +388,7 @@
             # Ensure that raw_decode bails on negative indexes, the regex
             # would otherwise mask this behavior. #98
             raise JSONDecodeError('Expecting value', s, idx)
-        if _PY3 and not isinstance(s, text_type):
+        if _PY3 and not isinstance(s, str):
             raise TypeError("Input string must be text, not bytes")
         # strip UTF-8 bom
         if len(s) > idx:
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/simplejson-3.14.0/simplejson/encoder.py 
new/simplejson-3.15.0/simplejson/encoder.py
--- old/simplejson-3.14.0/simplejson/encoder.py 2018-04-22 00:57:00.000000000 
+0200
+++ new/simplejson-3.15.0/simplejson/encoder.py 2018-05-12 20:58:17.000000000 
+0200
@@ -5,7 +5,7 @@
 from operator import itemgetter
 # Do not import Decimal directly to avoid reload issues
 import decimal
-from .compat import u, unichr, binary_type, text_type, string_types, 
integer_types, PY3
+from .compat import unichr, binary_type, text_type, string_types, 
integer_types, PY3
 def _import_speedups():
     try:
         from . import _speedups
@@ -35,19 +35,24 @@
 
 FLOAT_REPR = repr
 
-def encode_basestring(s, _PY3=PY3, _q=u('"')):
+def encode_basestring(s, _PY3=PY3, _q=u'"'):
     """Return a JSON representation of a Python string
 
     """
     if _PY3:
-        if isinstance(s, binary_type):
-            s = s.decode('utf-8')
-        if type(s) is not text_type:
-            s = text_type.__str__(s)
+        if isinstance(s, bytes):
+            s = str(s, 'utf-8')
+        elif type(s) is not str:
+            # convert an str subclass instance to exact str
+            # raise a TypeError otherwise
+            s = str.__str__(s)
     else:
         if isinstance(s, str) and HAS_UTF8.search(s) is not None:
-            s = s.decode('utf-8')
-        if type(s) not in (str, unicode):
+            s = unicode(s, 'utf-8')
+        elif type(s) not in (str, unicode):
+            # convert an str subclass instance to exact str
+            # convert a unicode subclass instance to exact unicode
+            # raise a TypeError otherwise
             if isinstance(s, str):
                 s = str.__str__(s)
             else:
@@ -62,14 +67,19 @@
 
     """
     if _PY3:
-        if isinstance(s, binary_type):
-            s = s.decode('utf-8')
-        if type(s) is not text_type:
-            s = text_type.__str__(s)
+        if isinstance(s, bytes):
+            s = str(s, 'utf-8')
+        elif type(s) is not str:
+            # convert an str subclass instance to exact str
+            # raise a TypeError otherwise
+            s = str.__str__(s)
     else:
         if isinstance(s, str) and HAS_UTF8.search(s) is not None:
-            s = s.decode('utf-8')
-        if type(s) not in (str, unicode):
+            s = unicode(s, 'utf-8')
+        elif type(s) not in (str, unicode):
+            # convert an str subclass instance to exact str
+            # convert a unicode subclass instance to exact unicode
+            # raise a TypeError otherwise
             if isinstance(s, str):
                 s = str.__str__(s)
             else:
@@ -274,7 +284,7 @@
         if isinstance(o, binary_type):
             _encoding = self.encoding
             if (_encoding is not None and not (_encoding == 'utf-8')):
-                o = o.decode(_encoding)
+                o = text_type(o, _encoding)
         if isinstance(o, string_types):
             if self.ensure_ascii:
                 return encode_basestring_ascii(o)
@@ -309,10 +319,10 @@
             _encoder = encode_basestring_ascii
         else:
             _encoder = encode_basestring
-        if self.encoding != 'utf-8':
+        if self.encoding != 'utf-8' and self.encoding is not None:
             def _encoder(o, _orig_encoder=_encoder, _encoding=self.encoding):
                 if isinstance(o, binary_type):
-                    o = o.decode(_encoding)
+                    o = text_type(o, _encoding)
                 return _orig_encoder(o)
 
         def floatstr(o, allow_nan=self.allow_nan, ignore_nan=self.ignore_nan,
@@ -482,8 +492,9 @@
                 first = False
             else:
                 buf = separator
-            if (isinstance(value, string_types) or
-                (_PY3 and isinstance(value, binary_type))):
+            if isinstance(value, string_types):
+                yield buf + _encoder(value)
+            elif _PY3 and isinstance(value, bytes) and _encoding is not None:
                 yield buf + _encoder(value)
             elif isinstance(value, RawJSON):
                 yield buf + value.encoded_json
@@ -533,8 +544,8 @@
     def _stringify_key(key):
         if isinstance(key, string_types): # pragma: no cover
             pass
-        elif isinstance(key, binary_type):
-            key = key.decode(_encoding)
+        elif _PY3 and isinstance(key, bytes) and _encoding is not None:
+            key = str(key, _encoding)
         elif isinstance(key, float):
             key = _floatstr(key)
         elif key is True:
@@ -603,8 +614,9 @@
                 yield item_separator
             yield _encoder(key)
             yield _key_separator
-            if (isinstance(value, string_types) or
-                (_PY3 and isinstance(value, binary_type))):
+            if isinstance(value, string_types):
+                yield _encoder(value)
+            elif _PY3 and isinstance(value, bytes) and _encoding is not None:
                 yield _encoder(value)
             elif isinstance(value, RawJSON):
                 yield value.encoded_json
@@ -647,8 +659,9 @@
             del markers[markerid]
 
     def _iterencode(o, _current_indent_level):
-        if (isinstance(o, string_types) or
-            (_PY3 and isinstance(o, binary_type))):
+        if isinstance(o, string_types):
+            yield _encoder(o)
+        elif _PY3 and isinstance(o, bytes) and _encoding is not None:
             yield _encoder(o)
         elif isinstance(o, RawJSON):
             yield o.encoded_json
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/simplejson-3.14.0/simplejson/tests/__init__.py 
new/simplejson-3.15.0/simplejson/tests/__init__.py
--- old/simplejson-3.14.0/simplejson/tests/__init__.py  2018-04-22 
00:57:00.000000000 +0200
+++ new/simplejson-3.15.0/simplejson/tests/__init__.py  2018-05-12 
20:58:17.000000000 +0200
@@ -1,6 +1,5 @@
 from __future__ import absolute_import
 import unittest
-import doctest
 import sys
 import os
 
@@ -28,6 +27,13 @@
     import simplejson.decoder
     if suite is None:
         suite = unittest.TestSuite()
+    try:
+        import doctest
+    except ImportError:
+        if sys.version_info < (2, 7):
+            # doctests in 2.6 depends on cStringIO
+            return suite
+        raise
     for mod in (simplejson, simplejson.encoder, simplejson.decoder):
         suite.addTest(doctest.DocTestSuite(mod))
     suite.addTest(doctest.DocFileSuite('../../index.rst'))
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/simplejson-3.14.0/simplejson/tests/test_decode.py 
new/simplejson-3.15.0/simplejson/tests/test_decode.py
--- old/simplejson-3.14.0/simplejson/tests/test_decode.py       2018-04-22 
00:57:00.000000000 +0200
+++ new/simplejson-3.15.0/simplejson/tests/test_decode.py       2018-05-12 
20:58:17.000000000 +0200
@@ -3,9 +3,17 @@
 from unittest import TestCase
 
 import simplejson as json
-from simplejson.compat import StringIO
+from simplejson.compat import StringIO, b, binary_type
 from simplejson import OrderedDict
 
+class MisbehavingBytesSubtype(binary_type):
+    def decode(self, encoding=None):
+        return "bad decode"
+    def __str__(self):
+        return "bad __str__"
+    def __bytes__(self):
+        return b("bad __bytes__")
+
 class TestDecode(TestCase):
     if not hasattr(TestCase, 'assertIs'):
         def assertIs(self, a, b):
@@ -87,6 +95,18 @@
             ({'a': {}}, 11),
             cls().raw_decode(" \n{\"a\": {}}"))
 
+    def test_bytes_decode(self):
+        cls = json.decoder.JSONDecoder
+        data = b('"\xe2\x82\xac"')
+        self.assertEqual(cls().decode(data), u'\u20ac')
+        self.assertEqual(cls(encoding='latin1').decode(data), u'\xe2\x82\xac')
+        self.assertEqual(cls(encoding=None).decode(data), u'\u20ac')
+
+        data = MisbehavingBytesSubtype(b('"\xe2\x82\xac"'))
+        self.assertEqual(cls().decode(data), u'\u20ac')
+        self.assertEqual(cls(encoding='latin1').decode(data), u'\xe2\x82\xac')
+        self.assertEqual(cls(encoding=None).decode(data), u'\u20ac')
+
     def test_bounds_checking(self):
         # https://github.com/simplejson/simplejson/issues/98
         j = json.decoder.JSONDecoder()
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/simplejson-3.14.0/simplejson/tests/test_dump.py 
new/simplejson-3.15.0/simplejson/tests/test_dump.py
--- old/simplejson-3.14.0/simplejson/tests/test_dump.py 2018-04-22 
00:57:00.000000000 +0200
+++ new/simplejson-3.15.0/simplejson/tests/test_dump.py 2018-05-12 
20:58:17.000000000 +0200
@@ -6,11 +6,22 @@
     def __str__(self):
         return "FAIL!"
 
+class MisbehavingBytesSubtype(binary_type):
+    def decode(self, encoding=None):
+        return "bad decode"
+    def __str__(self):
+        return "bad __str__"
+    def __bytes__(self):
+        return b("bad __bytes__")
+
 def as_text_type(s):
-    if PY3 and isinstance(s, binary_type):
+    if PY3 and isinstance(s, bytes):
         return s.decode('ascii')
     return s
 
+def decode_iso_8859_15(b):
+    return b.decode('iso-8859-15')
+
 class TestDump(TestCase):
     def test_dump(self):
         sio = StringIO()
@@ -140,3 +151,99 @@
             json.dumps(MisbehavingTextSubtype(text)),
             json.dumps(text)
         )
+        self.assertEqual(
+            json.dumps([MisbehavingTextSubtype(text)]),
+            json.dumps([text])
+        )
+        self.assertEqual(
+            json.dumps({MisbehavingTextSubtype(text): 42}),
+            json.dumps({text: 42})
+        )
+
+    def test_misbehaving_bytes_subtype(self):
+        data = b("this is some data \xe2\x82\xac")
+        self.assertEqual(
+            json.dumps(MisbehavingBytesSubtype(data)),
+            json.dumps(data)
+        )
+        self.assertEqual(
+            json.dumps([MisbehavingBytesSubtype(data)]),
+            json.dumps([data])
+        )
+        self.assertEqual(
+            json.dumps({MisbehavingBytesSubtype(data): 42}),
+            json.dumps({data: 42})
+        )
+
+    def test_bytes_toplevel(self):
+        self.assertEqual(json.dumps(b('\xe2\x82\xac')), r'"\u20ac"')
+        self.assertRaises(UnicodeDecodeError, json.dumps, b('\xa4'))
+        self.assertEqual(json.dumps(b('\xa4'), encoding='iso-8859-1'),
+                         r'"\u00a4"')
+        self.assertEqual(json.dumps(b('\xa4'), encoding='iso-8859-15'),
+                         r'"\u20ac"')
+        if PY3:
+            self.assertRaises(TypeError, json.dumps, b('\xe2\x82\xac'),
+                              encoding=None)
+            self.assertRaises(TypeError, json.dumps, b('\xa4'),
+                              encoding=None)
+            self.assertEqual(json.dumps(b('\xa4'), encoding=None,
+                                        default=decode_iso_8859_15),
+                            r'"\u20ac"')
+        else:
+            self.assertEqual(json.dumps(b('\xe2\x82\xac'), encoding=None),
+                             r'"\u20ac"')
+            self.assertRaises(UnicodeDecodeError, json.dumps, b('\xa4'),
+                              encoding=None)
+            self.assertRaises(UnicodeDecodeError, json.dumps, b('\xa4'),
+                              encoding=None, default=decode_iso_8859_15)
+
+    def test_bytes_nested(self):
+        self.assertEqual(json.dumps([b('\xe2\x82\xac')]), r'["\u20ac"]')
+        self.assertRaises(UnicodeDecodeError, json.dumps, [b('\xa4')])
+        self.assertEqual(json.dumps([b('\xa4')], encoding='iso-8859-1'),
+                         r'["\u00a4"]')
+        self.assertEqual(json.dumps([b('\xa4')], encoding='iso-8859-15'),
+                         r'["\u20ac"]')
+        if PY3:
+            self.assertRaises(TypeError, json.dumps, [b('\xe2\x82\xac')],
+                              encoding=None)
+            self.assertRaises(TypeError, json.dumps, [b('\xa4')],
+                              encoding=None)
+            self.assertEqual(json.dumps([b('\xa4')], encoding=None,
+                                        default=decode_iso_8859_15),
+                             r'["\u20ac"]')
+        else:
+            self.assertEqual(json.dumps([b('\xe2\x82\xac')], encoding=None),
+                             r'["\u20ac"]')
+            self.assertRaises(UnicodeDecodeError, json.dumps, [b('\xa4')],
+                              encoding=None)
+            self.assertRaises(UnicodeDecodeError, json.dumps, [b('\xa4')],
+                              encoding=None, default=decode_iso_8859_15)
+
+    def test_bytes_key(self):
+        self.assertEqual(json.dumps({b('\xe2\x82\xac'): 42}), r'{"\u20ac": 
42}')
+        self.assertRaises(UnicodeDecodeError, json.dumps, {b('\xa4'): 42})
+        self.assertEqual(json.dumps({b('\xa4'): 42}, encoding='iso-8859-1'),
+                         r'{"\u00a4": 42}')
+        self.assertEqual(json.dumps({b('\xa4'): 42}, encoding='iso-8859-15'),
+                         r'{"\u20ac": 42}')
+        if PY3:
+            self.assertRaises(TypeError, json.dumps, {b('\xe2\x82\xac'): 42},
+                              encoding=None)
+            self.assertRaises(TypeError, json.dumps, {b('\xa4'): 42},
+                              encoding=None)
+            self.assertRaises(TypeError, json.dumps, {b('\xa4'): 42},
+                              encoding=None, default=decode_iso_8859_15)
+            self.assertEqual(json.dumps({b('\xa4'): 42}, encoding=None,
+                                        skipkeys=True),
+                             r'{}')
+        else:
+            self.assertEqual(json.dumps({b('\xe2\x82\xac'): 42}, 
encoding=None),
+                             r'{"\u20ac": 42}')
+            self.assertRaises(UnicodeDecodeError, json.dumps, {b('\xa4'): 42},
+                              encoding=None)
+            self.assertRaises(UnicodeDecodeError, json.dumps, {b('\xa4'): 42},
+                              encoding=None, default=decode_iso_8859_15)
+            self.assertRaises(UnicodeDecodeError, json.dumps, {b('\xa4'): 42},
+                              encoding=None, skipkeys=True)
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/simplejson-3.14.0/simplejson/tests/test_errors.py 
new/simplejson-3.15.0/simplejson/tests/test_errors.py
--- old/simplejson-3.14.0/simplejson/tests/test_errors.py       2018-04-22 
00:57:00.000000000 +0200
+++ new/simplejson-3.15.0/simplejson/tests/test_errors.py       2018-05-12 
20:58:17.000000000 +0200
@@ -2,7 +2,7 @@
 from unittest import TestCase
 
 import simplejson as json
-from simplejson.compat import u, b
+from simplejson.compat import text_type, b
 
 class TestErrors(TestCase):
     def test_string_keys_error(self):
@@ -41,7 +41,7 @@
 
     def test_scan_error(self):
         err = None
-        for t in (u, b):
+        for t in (text_type, b):
             try:
                 json.loads(t('{"asdf": "'))
             except json.JSONDecodeError:
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' 
old/simplejson-3.14.0/simplejson/tests/test_str_subclass.py 
new/simplejson-3.15.0/simplejson/tests/test_str_subclass.py
--- old/simplejson-3.14.0/simplejson/tests/test_str_subclass.py 2018-04-22 
00:57:00.000000000 +0200
+++ new/simplejson-3.15.0/simplejson/tests/test_str_subclass.py 2018-05-12 
20:58:17.000000000 +0200
@@ -1,7 +1,7 @@
 from unittest import TestCase
 
 import simplejson
-from simplejson.compat import text_type, u
+from simplejson.compat import text_type
 
 # Tests for issue demonstrated in 
https://github.com/simplejson/simplejson/issues/144
 class WonkyTextSubclass(text_type):
@@ -10,7 +10,7 @@
 
 class TestStrSubclass(TestCase):
     def test_dump_load(self):
-        for s in ['', '"hello"', 'text', u('\u005c')]:
+        for s in ['', '"hello"', 'text', u'\u005c']:
             self.assertEqual(
                 s,
                 simplejson.loads(simplejson.dumps(WonkyTextSubclass(s))))
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/simplejson-3.14.0/simplejson/tests/test_tool.py 
new/simplejson-3.15.0/simplejson/tests/test_tool.py
--- old/simplejson-3.14.0/simplejson/tests/test_tool.py 2018-04-22 
00:57:00.000000000 +0200
+++ new/simplejson-3.15.0/simplejson/tests/test_tool.py 2018-05-12 
20:58:17.000000000 +0200
@@ -21,6 +21,15 @@
                 "".encode(),
                 stderr).strip()
 
+def open_temp_file():
+    if sys.version_info >= (2, 6):
+        file = tempfile.NamedTemporaryFile(delete=False)
+        filename = file.name
+    else:
+        fd, filename = tempfile.mkstemp()
+        file = os.fdopen(fd, 'w+b')
+    return file, filename
+
 class TestTool(unittest.TestCase):
     data = """
 
@@ -71,35 +80,35 @@
             self.expect.splitlines())
 
     def test_infile_stdout(self):
-        infile = tempfile.NamedTemporaryFile(delete=False)
+        infile, infile_name = open_temp_file()
         try:
             infile.write(self.data.encode())
             infile.close()
             self.assertEqual(
-                self.runTool(args=[infile.name]),
+                self.runTool(args=[infile_name]),
                 self.expect.splitlines())
         finally:
-            os.unlink(infile.name)
+            os.unlink(infile_name)
 
     def test_infile_outfile(self):
-        infile = tempfile.NamedTemporaryFile(delete=False)
+        infile, infile_name = open_temp_file()
         try:
             infile.write(self.data.encode())
             infile.close()
             # outfile will get overwritten by tool, so the delete
             # may not work on some platforms. Do it manually.
-            outfile = tempfile.NamedTemporaryFile(delete=False)
+            outfile, outfile_name = open_temp_file()
             try:
                 outfile.close()
                 self.assertEqual(
-                    self.runTool(args=[infile.name, outfile.name]),
+                    self.runTool(args=[infile_name, outfile_name]),
                     [])
-                with open(outfile.name, 'rb') as f:
+                with open(outfile_name, 'rb') as f:
                     self.assertEqual(
                         f.read().decode('utf8').splitlines(),
                         self.expect.splitlines()
                     )
             finally:
-                os.unlink(outfile.name)
+                os.unlink(outfile_name)
         finally:
-            os.unlink(infile.name)
+            os.unlink(infile_name)
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/simplejson-3.14.0/simplejson/tests/test_unicode.py 
new/simplejson-3.15.0/simplejson/tests/test_unicode.py
--- old/simplejson-3.14.0/simplejson/tests/test_unicode.py      2018-04-22 
00:57:00.000000000 +0200
+++ new/simplejson-3.15.0/simplejson/tests/test_unicode.py      2018-05-12 
20:58:17.000000000 +0200
@@ -3,7 +3,7 @@
 from unittest import TestCase
 
 import simplejson as json
-from simplejson.compat import unichr, text_type, b, u, BytesIO
+from simplejson.compat import unichr, text_type, b, BytesIO
 
 class TestUnicode(TestCase):
     def test_encoding1(self):
@@ -93,7 +93,7 @@
     def test_ensure_ascii_false_bytestring_encoding(self):
         # http://code.google.com/p/simplejson/issues/detail?id=48
         doc1 = {u'quux': b('Arr\xc3\xaat sur images')}
-        doc2 = {u'quux': u('Arr\xeat sur images')}
+        doc2 = {u'quux': u'Arr\xeat sur images'}
         doc_ascii = '{"quux": "Arr\\u00eat sur images"}'
         doc_unicode = u'{"quux": "Arr\xeat sur images"}'
         self.assertEqual(json.dumps(doc1), doc_ascii)
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/simplejson-3.14.0/simplejson.egg-info/PKG-INFO 
new/simplejson-3.15.0/simplejson.egg-info/PKG-INFO
--- old/simplejson-3.14.0/simplejson.egg-info/PKG-INFO  2018-04-22 
00:57:23.000000000 +0200
+++ new/simplejson-3.15.0/simplejson.egg-info/PKG-INFO  2018-05-12 
20:58:44.000000000 +0200
@@ -1,6 +1,6 @@
 Metadata-Version: 1.1
 Name: simplejson
-Version: 3.14.0
+Version: 3.15.0
 Summary: Simple, fast, extensible JSON encoder/decoder for Python
 Home-page: http://github.com/simplejson/simplejson
 Author: Bob Ippolito


Reply via email to