dabo Commit
Revision 5234
Date: 2009-05-26 21:18:09 -0700 (Tue, 26 May 2009)
Author: Ed
Trac: http://trac.dabodev.com/changeset/5234

Changed:
U   trunk/dabo/dApp.py
U   trunk/dabo/lib/__init__.py
A   trunk/dabo/lib/simplejson/
A   trunk/dabo/lib/simplejson/__init__.py
A   trunk/dabo/lib/simplejson/_speedups.py
A   trunk/dabo/lib/simplejson/_speedups.so
A   trunk/dabo/lib/simplejson/decoder.py
A   trunk/dabo/lib/simplejson/encoder.py
A   trunk/dabo/lib/simplejson/scanner.py
A   trunk/dabo/lib/simplejson/tests/
A   trunk/dabo/lib/simplejson/tests/__init__.py
A   trunk/dabo/lib/simplejson/tests/test_decode.py
A   trunk/dabo/lib/simplejson/tests/test_default.py
A   trunk/dabo/lib/simplejson/tests/test_dump.py
A   trunk/dabo/lib/simplejson/tests/test_encode_basestring_ascii.py
A   trunk/dabo/lib/simplejson/tests/test_fail.py
A   trunk/dabo/lib/simplejson/tests/test_float.py
A   trunk/dabo/lib/simplejson/tests/test_indent.py
A   trunk/dabo/lib/simplejson/tests/test_pass1.py
A   trunk/dabo/lib/simplejson/tests/test_pass2.py
A   trunk/dabo/lib/simplejson/tests/test_pass3.py
A   trunk/dabo/lib/simplejson/tests/test_recursion.py
A   trunk/dabo/lib/simplejson/tests/test_scanstring.py
A   trunk/dabo/lib/simplejson/tests/test_separators.py
A   trunk/dabo/lib/simplejson/tests/test_unicode.py
A   trunk/dabo/lib/simplejson/tool.py

Log:
Included the simplejson module as part of the distribution, as it is now 
required for Web Update. Please test for compatibility with all versions of 
Python, as well as any platforms you may run on.


Diff:
Modified: trunk/dabo/dApp.py
===================================================================
--- trunk/dabo/dApp.py  2009-05-27 02:28:29 UTC (rev 5233)
+++ trunk/dabo/dApp.py  2009-05-27 04:18:09 UTC (rev 5234)
@@ -12,7 +12,6 @@
 import urllib2
 import shutil
 import logging
-import simplejson
 from cStringIO import StringIO
 from zipfile import ZipFile
 import dabo
@@ -28,6 +27,11 @@
 from dabo import dUserSettingProvider
 from dabo.lib.RemoteConnector import RemoteConnector
 
+try:
+       import simplejson
+except ImportError:
+       # Not installed on the user's Python; use the included version
+       from dabo.lib import simplejson as simplejson
 
 
 class Collection(list):
@@ -503,12 +507,8 @@
 
 
        def checkForUpdates(self, evt=None):
-               """Public interface to the web updates mechanism. Returns a 
2-tuple
-               consisting of a boolean and a list of projects available for 
update. The boolean
-               indicates whether this is the first time that the framework is 
being run and the list
-               contains the updatable project names; e.g., if both project 
'Foo' and the framework
-               have updates, it will return ["Dabo", "Foo"]; if only the 
framework has updates, it will
-               return ["Dabo"]. If there are no updates available, an empty 
list will be returned.
+               """Public interface to the web updates mechanism. Returns a 
boolean
+               indicating whether the update was successful.
                """
                return self.uiApp.checkForUpdates(force=True)
 

Modified: trunk/dabo/lib/__init__.py
===================================================================
--- trunk/dabo/lib/__init__.py  2009-05-27 02:28:29 UTC (rev 5233)
+++ trunk/dabo/lib/__init__.py  2009-05-27 04:18:09 UTC (rev 5234)
@@ -7,6 +7,8 @@
 #      from dabo.lib.ListSorter import ListSorter
 #      import dabo.lib.ofFunctions as oFox
 
+import os
+import sys
 import uuid
 import dabo
 
@@ -21,31 +23,30 @@
 
 try:
        import simplejson
-except:
-       jsonConverter = None
-       def jsonEncode(val):
-               raise ImportError("The simplejson module is not installed")
-       def jsonDecode(val):
-               raise ImportError("The simplejson module is not installed")
-else:
-       import dejavuJSON
-       jsonConverter = dejavuJSON.Converter()
-       def jsonEncode(val):
-               return jsonConverter.dumps(val)
-       
-       def jsonDecode(val):
-               ret = None
-               try:
-                       ret = jsonConverter.loads(val)
-               except UnicodeDecodeError:
-                       # Try typical encodings, starting with the default.
-                       for enctype in (dabo.defaultEncoding, "utf-8", 
"latin-1"):
-                               try:
-                                       ret = jsonConverter.loads(val, enctype)
-                                       break
-                               except UnicodeDecodeError:
-                                       continue
-               if ret is None:
-                       raise
-               return ret
+except ImportError:
+       # Not installed in site-packages; use the included version
+       pth = os.path.split(dabo.__file__)[0]
+       sys.path.append("%s/lib" % pth)
+       import simplejson
 
+import dejavuJSON
+jsonConverter = dejavuJSON.Converter()
+def jsonEncode(val):
+       return jsonConverter.dumps(val)
+
+def jsonDecode(val):
+       ret = None
+       try:
+               ret = jsonConverter.loads(val)
+       except UnicodeDecodeError:
+               # Try typical encodings, starting with the default.
+               for enctype in (dabo.defaultEncoding, "utf-8", "latin-1"):
+                       try:
+                               ret = jsonConverter.loads(val, enctype)
+                               break
+                       except UnicodeDecodeError:
+                               continue
+       if ret is None:
+               raise
+       return ret
+

Added: trunk/dabo/lib/simplejson/__init__.py
===================================================================
--- trunk/dabo/lib/simplejson/__init__.py                               (rev 0)
+++ trunk/dabo/lib/simplejson/__init__.py       2009-05-27 04:18:09 UTC (rev 
5234)
@@ -0,0 +1,376 @@
+r"""
+A simple, fast, extensible JSON encoder and decoder
+
+JSON (JavaScript Object Notation) <http://json.org> is a subset of
+JavaScript syntax (ECMA-262 3rd edition) used as a lightweight data
+interchange format.
+
+simplejson exposes an API familiar to uses of the standard library
+marshal and pickle modules.
+
+Encoding basic Python object hierarchies::
+    
+    >>> import simplejson
+    >>> simplejson.dumps(['foo', {'bar': ('baz', None, 1.0, 2)}])
+    '["foo", {"bar": ["baz", null, 1.0, 2]}]'
+    >>> print simplejson.dumps("\"foo\bar")
+    "\"foo\bar"
+    >>> print simplejson.dumps(u'\u1234')
+    "\u1234"
+    >>> print simplejson.dumps('\\')
+    "\\"
+    >>> print simplejson.dumps({"c": 0, "b": 0, "a": 0}, sort_keys=True)
+    {"a": 0, "b": 0, "c": 0}
+    >>> from StringIO import StringIO
+    >>> io = StringIO()
+    >>> simplejson.dump(['streaming API'], io)
+    >>> io.getvalue()
+    '["streaming API"]'
+
+Compact encoding::
+
+    >>> import simplejson
+    >>> simplejson.dumps([1,2,3,{'4': 5, '6': 7}], separators=(',',':'))
+    '[1,2,3,{"4":5,"6":7}]'
+
+Pretty printing::
+
+    >>> import simplejson
+    >>> print simplejson.dumps({'4': 5, '6': 7}, sort_keys=True, indent=4)
+    {
+        "4": 5, 
+        "6": 7
+    }
+
+Decoding JSON::
+    
+    >>> import simplejson
+    >>> simplejson.loads('["foo", {"bar":["baz", null, 1.0, 2]}]')
+    [u'foo', {u'bar': [u'baz', None, 1.0, 2]}]
+    >>> simplejson.loads('"\\"foo\\bar"')
+    u'"foo\x08ar'
+    >>> from StringIO import StringIO
+    >>> io = StringIO('["streaming API"]')
+    >>> simplejson.load(io)
+    [u'streaming API']
+
+Specializing JSON object decoding::
+
+    >>> import simplejson
+    >>> def as_complex(dct):
+    ...     if '__complex__' in dct:
+    ...         return complex(dct['real'], dct['imag'])
+    ...     return dct
+    ... 
+    >>> simplejson.loads('{"__complex__": true, "real": 1, "imag": 2}',
+    ...     object_hook=as_complex)
+    (1+2j)
+    >>> import decimal
+    >>> simplejson.loads('1.1', parse_float=decimal.Decimal)
+    Decimal("1.1")
+
+Extending JSONEncoder::
+    
+    >>> import simplejson
+    >>> class ComplexEncoder(simplejson.JSONEncoder):
+    ...     def default(self, obj):
+    ...         if isinstance(obj, complex):
+    ...             return [obj.real, obj.imag]
+    ...         return simplejson.JSONEncoder.default(self, obj)
+    ... 
+    >>> dumps(2 + 1j, cls=ComplexEncoder)
+    '[2.0, 1.0]'
+    >>> ComplexEncoder().encode(2 + 1j)
+    '[2.0, 1.0]'
+    >>> list(ComplexEncoder().iterencode(2 + 1j))
+    ['[', '2.0', ', ', '1.0', ']']
+    
+
+Using simplejson from the shell to validate and
+pretty-print::
+    
+    $ echo '{"json":"obj"}' | python -msimplejson.tool
+    {
+        "json": "obj"
+    }
+    $ echo '{ 1.2:3.4}' | python -msimplejson.tool
+    Expecting property name: line 1 column 2 (char 2)
+
+Note that the JSON produced by this module's default settings
+is a subset of YAML, so it may be used as a serializer for that as well.
+"""
+__version__ = '1.9.2'
+__all__ = [
+    'dump', 'dumps', 'load', 'loads',
+    'JSONDecoder', 'JSONEncoder',
+]
+
+if __name__ == '__main__':
+    import warnings
+    warnings.warn('python -msimplejson is deprecated, use python 
-msiplejson.tool', DeprecationWarning)
+    from simplejson.decoder import JSONDecoder
+    from simplejson.encoder import JSONEncoder
+else:
+    from decoder import JSONDecoder
+    from encoder import JSONEncoder
+
+_default_encoder = JSONEncoder(
+    skipkeys=False,
+    ensure_ascii=True,
+    check_circular=True,
+    allow_nan=True,
+    indent=None,
+    separators=None,
+    encoding='utf-8',
+    default=None,
+)
+
+def dump(obj, fp, skipkeys=False, ensure_ascii=True, check_circular=True,
+        allow_nan=True, cls=None, indent=None, separators=None,
+        encoding='utf-8', default=None, **kw):
+    """
+    Serialize ``obj`` as a JSON formatted stream to ``fp`` (a
+    ``.write()``-supporting file-like object).
+
+    If ``skipkeys`` is ``True`` then ``dict`` keys that are not basic types
+    (``str``, ``unicode``, ``int``, ``long``, ``float``, ``bool``, ``None``) 
+    will be skipped instead of raising a ``TypeError``.
+
+    If ``ensure_ascii`` is ``False``, then the some chunks written to ``fp``
+    may be ``unicode`` instances, subject to normal Python ``str`` to
+    ``unicode`` coercion rules. Unless ``fp.write()`` explicitly
+    understands ``unicode`` (as in ``codecs.getwriter()``) this is likely
+    to cause an error.
+
+    If ``check_circular`` is ``False``, then the circular reference check
+    for container types will be skipped and a circular reference will
+    result in an ``OverflowError`` (or worse).
+
+    If ``allow_nan`` is ``False``, then it will be a ``ValueError`` to
+    serialize out of range ``float`` values (``nan``, ``inf``, ``-inf``)
+    in strict compliance of the JSON specification, instead of using the
+    JavaScript equivalents (``NaN``, ``Infinity``, ``-Infinity``).
+
+    If ``indent`` is a non-negative integer, then JSON array elements and 
object
+    members will be pretty-printed with that indent level. An indent level
+    of 0 will only insert newlines. ``None`` is the most compact 
representation.
+
+    If ``separators`` is an ``(item_separator, dict_separator)`` tuple
+    then it will be used instead of the default ``(', ', ': ')`` separators.
+    ``(',', ':')`` is the most compact JSON representation.
+
+    ``encoding`` is the character encoding for str instances, default is UTF-8.
+
+    ``default(obj)`` is a function that should return a serializable version
+    of obj or raise TypeError. The default simply raises TypeError.
+
+    To use a custom ``JSONEncoder`` subclass (e.g. one that overrides the
+    ``.default()`` method to serialize additional types), specify it with
+    the ``cls`` kwarg.
+    """
+    # cached encoder
+    if (skipkeys is False and ensure_ascii is True and
+        check_circular is True and allow_nan is True and
+        cls is None and indent is None and separators is None and
+        encoding == 'utf-8' and default is None and not kw):
+        iterable = _default_encoder.iterencode(obj)
+    else:
+        if cls is None:
+            cls = JSONEncoder
+        iterable = cls(skipkeys=skipkeys, ensure_ascii=ensure_ascii,
+            check_circular=check_circular, allow_nan=allow_nan, indent=indent,
+            separators=separators, encoding=encoding,
+            default=default, **kw).iterencode(obj)
+    # could accelerate with writelines in some versions of Python, at
+    # a debuggability cost
+    for chunk in iterable:
+        fp.write(chunk)
+
+
+def dumps(obj, skipkeys=False, ensure_ascii=True, check_circular=True,
+        allow_nan=True, cls=None, indent=None, separators=None,
+        encoding='utf-8', default=None, **kw):
+    """
+    Serialize ``obj`` to a JSON formatted ``str``.
+
+    If ``skipkeys`` is ``True`` then ``dict`` keys that are not basic types
+    (``str``, ``unicode``, ``int``, ``long``, ``float``, ``bool``, ``None``) 
+    will be skipped instead of raising a ``TypeError``.
+
+    If ``ensure_ascii`` is ``False``, then the return value will be a
+    ``unicode`` instance subject to normal Python ``str`` to ``unicode``
+    coercion rules instead of being escaped to an ASCII ``str``.
+
+    If ``check_circular`` is ``False``, then the circular reference check
+    for container types will be skipped and a circular reference will
+    result in an ``OverflowError`` (or worse).
+
+    If ``allow_nan`` is ``False``, then it will be a ``ValueError`` to
+    serialize out of range ``float`` values (``nan``, ``inf``, ``-inf``) in
+    strict compliance of the JSON specification, instead of using the
+    JavaScript equivalents (``NaN``, ``Infinity``, ``-Infinity``).
+
+    If ``indent`` is a non-negative integer, then JSON array elements and
+    object members will be pretty-printed with that indent level. An indent
+    level of 0 will only insert newlines. ``None`` is the most compact
+    representation.
+
+    If ``separators`` is an ``(item_separator, dict_separator)`` tuple
+    then it will be used instead of the default ``(', ', ': ')`` separators.
+    ``(',', ':')`` is the most compact JSON representation.
+
+    ``encoding`` is the character encoding for str instances, default is UTF-8.
+
+    ``default(obj)`` is a function that should return a serializable version
+    of obj or raise TypeError. The default simply raises TypeError.
+
+    To use a custom ``JSONEncoder`` subclass (e.g. one that overrides the
+    ``.default()`` method to serialize additional types), specify it with
+    the ``cls`` kwarg.
+    """
+    # cached encoder
+    if (skipkeys is False and ensure_ascii is True and
+        check_circular is True and allow_nan is True and
+        cls is None and indent is None and separators is None and
+        encoding == 'utf-8' and default is None and not kw):
+        return _default_encoder.encode(obj)
+    if cls is None:
+        cls = JSONEncoder
+    return cls(
+        skipkeys=skipkeys, ensure_ascii=ensure_ascii,
+        check_circular=check_circular, allow_nan=allow_nan, indent=indent,
+        separators=separators, encoding=encoding, default=default,
+        **kw).encode(obj)
+
+
+_default_decoder = JSONDecoder(encoding=None, object_hook=None)
+
+
+def load(fp, encoding=None, cls=None, object_hook=None, parse_float=None,
+        parse_int=None, parse_constant=None, **kw):
+    """
+    Deserialize ``fp`` (a ``.read()``-supporting file-like object containing
+    a JSON document) to a Python object.
+
+    If the contents of ``fp`` is encoded with an ASCII based encoding other
+    than utf-8 (e.g. latin-1), then an appropriate ``encoding`` name must
+    be specified. Encodings that are not ASCII based (such as UCS-2) are
+    not allowed, and should be wrapped with
+    ``codecs.getreader(fp)(encoding)``, or simply decoded to a ``unicode``
+    object and passed to ``loads()``
+
+    ``object_hook`` is an optional function that will be called with the
+    result of any object literal decode (a ``dict``). The return value of
+    ``object_hook`` will be used instead of the ``dict``. This feature
+    can be used to implement custom decoders (e.g. JSON-RPC class hinting).
+    
+    To use a custom ``JSONDecoder`` subclass, specify it with the ``cls``
+    kwarg.
+    """
+    return loads(fp.read(),
+        encoding=encoding, cls=cls, object_hook=object_hook,
+        parse_float=parse_float, parse_int=parse_int,
+        parse_constant=parse_constant, **kw)
+
+
+def loads(s, encoding=None, cls=None, object_hook=None, parse_float=None,
+        parse_int=None, parse_constant=None, **kw):
+    """
+    Deserialize ``s`` (a ``str`` or ``unicode`` instance containing a JSON
+    document) to a Python object.
+
+    If ``s`` is a ``str`` instance and is encoded with an ASCII based encoding
+    other than utf-8 (e.g. latin-1) then an appropriate ``encoding`` name
+    must be specified. Encodings that are not ASCII based (such as UCS-2)
+    are not allowed and should be decoded to ``unicode`` first.
+
+    ``object_hook`` is an optional function that will be called with the
+    result of any object literal decode (a ``dict``). The return value of
+    ``object_hook`` will be used instead of the ``dict``. This feature
+    can be used to implement custom decoders (e.g. JSON-RPC class hinting).
+
+    ``parse_float``, if specified, will be called with the string
+    of every JSON float to be decoded. By default this is equivalent to
+    float(num_str). This can be used to use another datatype or parser
+    for JSON floats (e.g. decimal.Decimal).
+
+    ``parse_int``, if specified, will be called with the string
+    of every JSON int to be decoded. By default this is equivalent to
+    int(num_str). This can be used to use another datatype or parser
+    for JSON integers (e.g. float).
+
+    ``parse_constant``, if specified, will be called with one of the
+    following strings: -Infinity, Infinity, NaN, null, true, false.
+    This can be used to raise an exception if invalid JSON numbers
+    are encountered.
+
+    To use a custom ``JSONDecoder`` subclass, specify it with the ``cls``
+    kwarg.
+    """
+    if (cls is None and encoding is None and object_hook is None and
+            parse_int is None and parse_float is None and
+            parse_constant is None and not kw):
+        return _default_decoder.decode(s)
+    if cls is None:
+        cls = JSONDecoder
+    if object_hook is not None:
+        kw['object_hook'] = object_hook
+    if parse_float is not None:
+        kw['parse_float'] = parse_float
+    if parse_int is not None:
+        kw['parse_int'] = parse_int
+    if parse_constant is not None:
+        kw['parse_constant'] = parse_constant
+    return cls(encoding=encoding, **kw).decode(s)
+
+
+#
+# Compatibility cruft from other libraries
+#
+
+
+def decode(s):
+    """
+    demjson, python-cjson API compatibility hook. Use loads(s) instead.
+    """
+    import warnings
+    warnings.warn("simplejson.loads(s) should be used instead of decode(s)",
+        DeprecationWarning)
+    return loads(s)
+
+
+def encode(obj):
+    """
+    demjson, python-cjson compatibility hook. Use dumps(s) instead.
+    """
+    import warnings
+    warnings.warn("simplejson.dumps(s) should be used instead of encode(s)",
+        DeprecationWarning)
+    return dumps(obj)
+
+
+def read(s):
+    """
+    jsonlib, JsonUtils, python-json, json-py API compatibility hook.
+    Use loads(s) instead.
+    """
+    import warnings
+    warnings.warn("simplejson.loads(s) should be used instead of read(s)",
+        DeprecationWarning)
+    return loads(s)
+
+
+def write(obj):
+    """
+    jsonlib, JsonUtils, python-json, json-py API compatibility hook.
+    Use dumps(s) instead.
+    """
+    import warnings
+    warnings.warn("simplejson.dumps(s) should be used instead of write(s)",
+        DeprecationWarning)
+    return dumps(obj)
+
+
+if __name__ == '__main__':
+    import simplejson.tool
+    simplejson.tool.main()

Added: trunk/dabo/lib/simplejson/_speedups.py
===================================================================
--- trunk/dabo/lib/simplejson/_speedups.py                              (rev 0)
+++ trunk/dabo/lib/simplejson/_speedups.py      2009-05-27 04:18:09 UTC (rev 
5234)
@@ -0,0 +1,7 @@
+def __bootstrap__():
+   global __bootstrap__, __loader__, __file__
+   import sys, pkg_resources, imp
+   __file__ = pkg_resources.resource_filename(__name__,'_speedups.so')
+   del __bootstrap__, __loader__
+   imp.load_dynamic(__name__,__file__)
+__bootstrap__()

Added: trunk/dabo/lib/simplejson/_speedups.so
===================================================================
(Binary files differ)


Property changes on: trunk/dabo/lib/simplejson/_speedups.so
___________________________________________________________________
Name: svn:executable
   + *
Name: svn:mime-type
   + application/octet-stream

Added: trunk/dabo/lib/simplejson/decoder.py
===================================================================
--- trunk/dabo/lib/simplejson/decoder.py                                (rev 0)
+++ trunk/dabo/lib/simplejson/decoder.py        2009-05-27 04:18:09 UTC (rev 
5234)
@@ -0,0 +1,343 @@
+"""
+Implementation of JSONDecoder
+"""
+import re
+import sys
+
+from simplejson.scanner import Scanner, pattern
+try:
+    from simplejson._speedups import scanstring as c_scanstring
+except ImportError:
+    pass
+
+FLAGS = re.VERBOSE | re.MULTILINE | re.DOTALL
+
+def _floatconstants():
+    import struct
+    import sys
+    _BYTES = '7FF80000000000007FF0000000000000'.decode('hex')
+    if sys.byteorder != 'big':
+        _BYTES = _BYTES[:8][::-1] + _BYTES[8:][::-1]
+    nan, inf = struct.unpack('dd', _BYTES)
+    return nan, inf, -inf
+
+NaN, PosInf, NegInf = _floatconstants()
+
+
+def linecol(doc, pos):
+    lineno = doc.count('\n', 0, pos) + 1
+    if lineno == 1:
+        colno = pos
+    else:
+        colno = pos - doc.rindex('\n', 0, pos)
+    return lineno, colno
+
+
+def errmsg(msg, doc, pos, end=None):
+    lineno, colno = linecol(doc, pos)
+    if end is None:
+        return '%s: line %d column %d (char %d)' % (msg, lineno, colno, pos)
+    endlineno, endcolno = linecol(doc, end)
+    return '%s: line %d column %d - line %d column %d (char %d - %d)' % (
+        msg, lineno, colno, endlineno, endcolno, pos, end)
+
+
+_CONSTANTS = {
+    '-Infinity': NegInf,
+    'Infinity': PosInf,
+    'NaN': NaN,
+    'true': True,
+    'false': False,
+    'null': None,
+}
+
+def JSONConstant(match, context, c=_CONSTANTS):
+    s = match.group(0)
+    fn = getattr(context, 'parse_constant', None)
+    if fn is None:
+        rval = c[s]
+    else:
+        rval = fn(s)
+    return rval, None
+pattern('(-?Infinity|NaN|true|false|null)')(JSONConstant)
+
+
+def JSONNumber(match, context):
+    match = JSONNumber.regex.match(match.string, *match.span())
+    integer, frac, exp = match.groups()
+    if frac or exp:
+        fn = getattr(context, 'parse_float', None) or float
+        res = fn(integer + (frac or '') + (exp or ''))
+    else:
+        fn = getattr(context, 'parse_int', None) or int
+        res = fn(integer)
+    return res, None
+pattern(r'(-?(?:0|[1-9]\d*))(\.\d+)?([eE][-+]?\d+)?')(JSONNumber)
+
+
+STRINGCHUNK = re.compile(r'(.*?)(["\\\x00-\x1f])', FLAGS)
+BACKSLASH = {
+    '"': u'"', '\\': u'\\', '/': u'/',
+    'b': u'\b', 'f': u'\f', 'n': u'\n', 'r': u'\r', 't': u'\t',
+}
+
+DEFAULT_ENCODING = "utf-8"
+
+def py_scanstring(s, end, encoding=None, strict=True, _b=BACKSLASH, 
_m=STRINGCHUNK.match):
+    if encoding is None:
+        encoding = DEFAULT_ENCODING
+    chunks = []
+    _append = chunks.append
+    begin = end - 1
+    while 1:
+        chunk = _m(s, end)
+        if chunk is None:
+            raise ValueError(
+                errmsg("Unterminated string starting at", s, begin))
+        end = chunk.end()
+        content, terminator = chunk.groups()
+        if content:
+            if not isinstance(content, unicode):
+                content = unicode(content, encoding)
+            _append(content)
+        if terminator == '"':
+            break
+        elif terminator != '\\':
+            if strict:
+                raise ValueError(errmsg("Invalid control character %r at", s, 
end))
+            else:
+                _append(terminator)
+                continue
+        try:
+            esc = s[end]
+        except IndexError:
+            raise ValueError(
+                errmsg("Unterminated string starting at", s, begin))
+        if esc != 'u':
+            try:
+                m = _b[esc]
+            except KeyError:
+                raise ValueError(
+                    errmsg("Invalid \\escape: %r" % (esc,), s, end))
+            end += 1
+        else:
+            esc = s[end + 1:end + 5]
+            next_end = end + 5
+            msg = "Invalid \\uXXXX escape"
+            try:
+                if len(esc) != 4:
+                    raise ValueError
+                uni = int(esc, 16)
+                if 0xd800 <= uni <= 0xdbff and sys.maxunicode > 65535:
+                    msg = "Invalid \\uXXXX\\uXXXX surrogate pair"
+                    if not s[end + 5:end + 7] == '\\u':
+                        raise ValueError
+                    esc2 = s[end + 7:end + 11]
+                    if len(esc2) != 4:
+                        raise ValueError
+                    uni2 = int(esc2, 16)
+                    uni = 0x10000 + (((uni - 0xd800) << 10) | (uni2 - 0xdc00))
+                    next_end += 6
+                m = unichr(uni)
+            except ValueError:
+                raise ValueError(errmsg(msg, s, end))
+            end = next_end
+        _append(m)
+    return u''.join(chunks), end
+
+
+# Use speedup
+try:
+    scanstring = c_scanstring
+except NameError:
+    scanstring = py_scanstring
+
+def JSONString(match, context):
+    encoding = getattr(context, 'encoding', None)
+    strict = getattr(context, 'strict', True)
+    return scanstring(match.string, match.end(), encoding, strict)
+pattern(r'"')(JSONString)
+
+
+WHITESPACE = re.compile(r'\s*', FLAGS)
+
+def JSONObject(match, context, _w=WHITESPACE.match):
+    pairs = {}
+    s = match.string
+    end = _w(s, match.end()).end()
+    nextchar = s[end:end + 1]
+    # Trivial empty object
+    if nextchar == '}':
+        return pairs, end + 1
+    if nextchar != '"':
+        raise ValueError(errmsg("Expecting property name", s, end))
+    end += 1
+    encoding = getattr(context, 'encoding', None)
+    strict = getattr(context, 'strict', True)
+    iterscan = JSONScanner.iterscan
+    while True:
+        key, end = scanstring(s, end, encoding, strict)
+        end = _w(s, end).end()
+        if s[end:end + 1] != ':':
+            raise ValueError(errmsg("Expecting : delimiter", s, end))
+        end = _w(s, end + 1).end()
+        try:
+            value, end = iterscan(s, idx=end, context=context).next()
+        except StopIteration:
+            raise ValueError(errmsg("Expecting object", s, end))
+        pairs[key] = value
+        end = _w(s, end).end()
+        nextchar = s[end:end + 1]
+        end += 1
+        if nextchar == '}':
+            break
+        if nextchar != ',':
+            raise ValueError(errmsg("Expecting , delimiter", s, end - 1))
+        end = _w(s, end).end()
+        nextchar = s[end:end + 1]
+        end += 1
+        if nextchar != '"':
+            raise ValueError(errmsg("Expecting property name", s, end - 1))
+    object_hook = getattr(context, 'object_hook', None)
+    if object_hook is not None:
+        pairs = object_hook(pairs)
+    return pairs, end
+pattern(r'{')(JSONObject)
+
+
+def JSONArray(match, context, _w=WHITESPACE.match):
+    values = []
+    s = match.string
+    end = _w(s, match.end()).end()
+    # Look-ahead for trivial empty array
+    nextchar = s[end:end + 1]
+    if nextchar == ']':
+        return values, end + 1
+    iterscan = JSONScanner.iterscan
+    while True:
+        try:
+            value, end = iterscan(s, idx=end, context=context).next()
+        except StopIteration:
+            raise ValueError(errmsg("Expecting object", s, end))
+        values.append(value)
+        end = _w(s, end).end()
+        nextchar = s[end:end + 1]
+        end += 1
+        if nextchar == ']':
+            break
+        if nextchar != ',':
+            raise ValueError(errmsg("Expecting , delimiter", s, end))
+        end = _w(s, end).end()
+    return values, end
+pattern(r'\[')(JSONArray)
+
+
+ANYTHING = [
+    JSONObject,
+    JSONArray,
+    JSONString,
+    JSONConstant,
+    JSONNumber,
+]
+
+JSONScanner = Scanner(ANYTHING)
+
+
+class JSONDecoder(object):
+    """
+    Simple JSON <http://json.org> decoder
+
+    Performs the following translations in decoding by default:
+    
+    +---------------+-------------------+
+    | JSON          | Python            |
+    +===============+===================+
+    | object        | dict              |
+    +---------------+-------------------+
+    | array         | list              |
+    +---------------+-------------------+
+    | string        | unicode           |
+    +---------------+-------------------+
+    | number (int)  | int, long         |
+    +---------------+-------------------+
+    | number (real) | float             |
+    +---------------+-------------------+
+    | true          | True              |
+    +---------------+-------------------+
+    | false         | False             |
+    +---------------+-------------------+
+    | null          | None              |
+    +---------------+-------------------+
+
+    It also understands ``NaN``, ``Infinity``, and ``-Infinity`` as
+    their corresponding ``float`` values, which is outside the JSON spec.
+    """
+
+    _scanner = Scanner(ANYTHING)
+    __all__ = ['__init__', 'decode', 'raw_decode']
+
+    def __init__(self, encoding=None, object_hook=None, parse_float=None,
+            parse_int=None, parse_constant=None, strict=True):
+        """
+        ``encoding`` determines the encoding used to interpret any ``str``
+        objects decoded by this instance (utf-8 by default).  It has no
+        effect when decoding ``unicode`` objects.
+        
+        Note that currently only encodings that are a superset of ASCII work,
+        strings of other encodings should be passed in as ``unicode``.
+
+        ``object_hook``, if specified, will be called with the result
+        of every JSON object decoded and its return value will be used in
+        place of the given ``dict``.  This can be used to provide custom
+        deserializations (e.g. to support JSON-RPC class hinting).
+
+        ``parse_float``, if specified, will be called with the string
+        of every JSON float to be decoded. By default this is equivalent to
+        float(num_str). This can be used to use another datatype or parser
+        for JSON floats (e.g. decimal.Decimal).
+
+        ``parse_int``, if specified, will be called with the string
+        of every JSON int to be decoded. By default this is equivalent to
+        int(num_str). This can be used to use another datatype or parser
+        for JSON integers (e.g. float).
+
+        ``parse_constant``, if specified, will be called with one of the
+        following strings: -Infinity, Infinity, NaN, null, true, false.
+        This can be used to raise an exception if invalid JSON numbers
+        are encountered.
+        """
+        self.encoding = encoding
+        self.object_hook = object_hook
+        self.parse_float = parse_float
+        self.parse_int = parse_int
+        self.parse_constant = parse_constant
+        self.strict = strict
+
+    def decode(self, s, _w=WHITESPACE.match):
+        """
+        Return the Python representation of ``s`` (a ``str`` or ``unicode``
+        instance containing a JSON document)
+        """
+        obj, end = self.raw_decode(s, idx=_w(s, 0).end())
+        end = _w(s, end).end()
+        if end != len(s):
+            raise ValueError(errmsg("Extra data", s, end, len(s)))
+        return obj
+
+    def raw_decode(self, s, **kw):
+        """
+        Decode a JSON document from ``s`` (a ``str`` or ``unicode`` beginning
+        with a JSON document) and return a 2-tuple of the Python
+        representation and the index in ``s`` where the document ended.
+
+        This can be used to decode a JSON document from a string that may
+        have extraneous data at the end.
+        """
+        kw.setdefault('context', self)
+        try:
+            obj, end = self._scanner.iterscan(s, **kw).next()
+        except StopIteration:
+            raise ValueError("No JSON object could be decoded")
+        return obj, end
+
+__all__ = ['JSONDecoder']

Added: trunk/dabo/lib/simplejson/encoder.py
===================================================================
--- trunk/dabo/lib/simplejson/encoder.py                                (rev 0)
+++ trunk/dabo/lib/simplejson/encoder.py        2009-05-27 04:18:09 UTC (rev 
5234)
@@ -0,0 +1,385 @@
+"""
+Implementation of JSONEncoder
+"""
+import re
+
+try:
+    from simplejson._speedups import encode_basestring_ascii as 
c_encode_basestring_ascii
+except ImportError:
+    pass
+
+ESCAPE = re.compile(r'[\x00-\x1f\\"\b\f\n\r\t]')
+ESCAPE_ASCII = re.compile(r'([\\"]|[^\ -~])')
+HAS_UTF8 = re.compile(r'[\x80-\xff]')
+ESCAPE_DCT = {
+    '\\': '\\\\',
+    '"': '\\"',
+    '\b': '\\b',
+    '\f': '\\f',
+    '\n': '\\n',
+    '\r': '\\r',
+    '\t': '\\t',
+}
+for i in range(0x20):
+    ESCAPE_DCT.setdefault(chr(i), '\\u%04x' % (i,))
+
+# Assume this produces an infinity on all machines (probably not guaranteed)
+INFINITY = float('1e66666')
+FLOAT_REPR = repr
+
+def floatstr(o, allow_nan=True):
+    # Check for specials.  Note that this type of test is processor- and/or
+    # platform-specific, so do tests which don't depend on the internals.
+
+    if o != o:
+        text = 'NaN'
+    elif o == INFINITY:
+        text = 'Infinity'
+    elif o == -INFINITY:
+        text = '-Infinity'
+    else:
+        return FLOAT_REPR(o)
+
+    if not allow_nan:
+        raise ValueError("Out of range float values are not JSON compliant: %r"
+            % (o,))
+
+    return text
+
+
+def encode_basestring(s):
+    """
+    Return a JSON representation of a Python string
+    """
+    def replace(match):
+        return ESCAPE_DCT[match.group(0)]
+    return '"' + ESCAPE.sub(replace, s) + '"'
+
+
+def py_encode_basestring_ascii(s):
+    if isinstance(s, str) and HAS_UTF8.search(s) is not None:
+        s = s.decode('utf-8')
+    def replace(match):
+        s = match.group(0)
+        try:
+            return ESCAPE_DCT[s]
+        except KeyError:
+            n = ord(s)
+            if n < 0x10000:
+                return '\\u%04x' % (n,)
+            else:
+                # surrogate pair
+                n -= 0x10000
+                s1 = 0xd800 | ((n >> 10) & 0x3ff)
+                s2 = 0xdc00 | (n & 0x3ff)
+                return '\\u%04x\\u%04x' % (s1, s2)
+    return '"' + str(ESCAPE_ASCII.sub(replace, s)) + '"'
+
+
+try:
+    encode_basestring_ascii = c_encode_basestring_ascii
+except NameError:
+    encode_basestring_ascii = py_encode_basestring_ascii
+
+
+class JSONEncoder(object):
+    """
+    Extensible JSON <http://json.org> encoder for Python data structures.
+
+    Supports the following objects and types by default:
+    
+    +-------------------+---------------+
+    | Python            | JSON          |
+    +===================+===============+
+    | dict              | object        |
+    +-------------------+---------------+
+    | list, tuple       | array         |
+    +-------------------+---------------+
+    | str, unicode      | string        |
+    +-------------------+---------------+
+    | int, long, float  | number        |
+    +-------------------+---------------+
+    | True              | true          |
+    +-------------------+---------------+
+    | False             | false         |
+    +-------------------+---------------+
+    | None              | null          |
+    +-------------------+---------------+
+
+    To extend this to recognize other objects, subclass and implement a
+    ``.default()`` method with another method that returns a serializable
+    object for ``o`` if possible, otherwise it should call the superclass
+    implementation (to raise ``TypeError``).
+    """
+    __all__ = ['__init__', 'default', 'encode', 'iterencode']
+    item_separator = ', '
+    key_separator = ': '
+    def __init__(self, skipkeys=False, ensure_ascii=True,
+            check_circular=True, allow_nan=True, sort_keys=False,
+            indent=None, separators=None, encoding='utf-8', default=None):
+        """
+        Constructor for JSONEncoder, with sensible defaults.
+
+        If skipkeys is False, then it is a TypeError to attempt
+        encoding of keys that are not str, int, long, float or None.  If
+        skipkeys is True, such items are simply skipped.
+
+        If ensure_ascii is True, the output is guaranteed to be str
+        objects with all incoming unicode characters escaped.  If
+        ensure_ascii is false, the output will be unicode object.
+
+        If check_circular is True, then lists, dicts, and custom encoded
+        objects will be checked for circular references during encoding to
+        prevent an infinite recursion (which would cause an OverflowError).
+        Otherwise, no such check takes place.
+
+        If allow_nan is True, then NaN, Infinity, and -Infinity will be
+        encoded as such.  This behavior is not JSON specification compliant,
+        but is consistent with most JavaScript based encoders and decoders.
+        Otherwise, it will be a ValueError to encode such floats.
+
+        If sort_keys is True, then the output of dictionaries will be
+        sorted by key; this is useful for regression tests to ensure
+        that JSON serializations can be compared on a day-to-day basis.
+
+        If indent is a non-negative integer, then JSON array
+        elements and object members will be pretty-printed with that
+        indent level.  An indent level of 0 will only insert newlines.
+        None is the most compact representation.
+
+        If specified, separators should be a (item_separator, key_separator)
+        tuple.  The default is (', ', ': ').  To get the most compact JSON
+        representation you should specify (',', ':') to eliminate whitespace.
+
+        If specified, default is a function that gets called for objects
+        that can't otherwise be serialized.  It should return a JSON encodable
+        version of the object or raise a ``TypeError``.
+
+        If encoding is not None, then all input strings will be
+        transformed into unicode using that encoding prior to JSON-encoding.
+        The default is UTF-8.
+        """
+
+        self.skipkeys = skipkeys
+        self.ensure_ascii = ensure_ascii
+        self.check_circular = check_circular
+        self.allow_nan = allow_nan
+        self.sort_keys = sort_keys
+        self.indent = indent
+        self.current_indent_level = 0
+        if separators is not None:
+            self.item_separator, self.key_separator = separators
+        if default is not None:
+            self.default = default
+        self.encoding = encoding
+
+    def _newline_indent(self):
+        return '\n' + (' ' * (self.indent * self.current_indent_level))
+
+    def _iterencode_list(self, lst, markers=None):
+        if not lst:
+            yield '[]'
+            return
+        if markers is not None:
+            markerid = id(lst)
+            if markerid in markers:
+                raise ValueError("Circular reference detected")
+            markers[markerid] = lst
+        yield '['
+        if self.indent is not None:
+            self.current_indent_level += 1
+            newline_indent = self._newline_indent()
+            separator = self.item_separator + newline_indent
+            yield newline_indent
+        else:
+            newline_indent = None
+            separator = self.item_separator
+        first = True
+        for value in lst:
+            if first:
+                first = False
+            else:
+                yield separator
+            for chunk in self._iterencode(value, markers):
+                yield chunk
+        if newline_indent is not None:
+            self.current_indent_level -= 1
+            yield self._newline_indent()
+        yield ']'
+        if markers is not None:
+            del markers[markerid]
+
+    def _iterencode_dict(self, dct, markers=None):
+        if not dct:
+            yield '{}'
+            return
+        if markers is no

 (33687 bytes were truncated as it was too long for the email (max 40000 bytes.)

_______________________________________________
Post Messages to: [email protected]
Subscription Maintenance: http://leafe.com/mailman/listinfo/dabo-dev
Searchable Archives: http://leafe.com/archives/search/dabo-dev
This message: 
http://leafe.com/archives/byMID/[email protected]

Reply via email to