219 changed files with 26714 additions and 21513 deletions
File diff suppressed because it is too large
@ -1,5 +0,0 @@ |
|||
#!/usr/bin/env python |
|||
from migrate.versioning.shell import main |
|||
|
|||
if __name__ == '__main__': |
|||
main(%(defaults)s) |
@ -1,11 +1,13 @@ |
|||
from sqlalchemy import * |
|||
from migrate import * |
|||
|
|||
|
|||
def upgrade(migrate_engine): |
|||
# Upgrade operations go here. Don't create your own engine; bind migrate_engine |
|||
# to your metadata |
|||
# Upgrade operations go here. Don't create your own engine; bind |
|||
# migrate_engine to your metadata |
|||
pass |
|||
|
|||
|
|||
def downgrade(migrate_engine): |
|||
# Operations to reverse the above upgrade go here. |
|||
pass |
|||
|
@ -1,11 +1,13 @@ |
|||
from sqlalchemy import * |
|||
from migrate import * |
|||
|
|||
|
|||
def upgrade(migrate_engine): |
|||
# Upgrade operations go here. Don't create your own engine; bind migrate_engine |
|||
# to your metadata |
|||
# Upgrade operations go here. Don't create your own engine; bind |
|||
# migrate_engine to your metadata |
|||
pass |
|||
|
|||
|
|||
def downgrade(migrate_engine): |
|||
# Operations to reverse the above upgrade go here. |
|||
pass |
|||
|
File diff suppressed because it is too large
@ -0,0 +1,102 @@ |
|||
# -*- coding: utf-8 -*- |
|||
|
|||
""" |
|||
pythoncompat |
|||
""" |
|||
|
|||
|
|||
import sys |
|||
|
|||
# ------- |
|||
# Pythons |
|||
# ------- |
|||
|
|||
# Syntax sugar. |
|||
_ver = sys.version_info |
|||
|
|||
#: Python 2.x? |
|||
is_py2 = (_ver[0] == 2) |
|||
|
|||
#: Python 3.x? |
|||
is_py3 = (_ver[0] == 3) |
|||
|
|||
#: Python 3.0.x |
|||
is_py30 = (is_py3 and _ver[1] == 0) |
|||
|
|||
#: Python 3.1.x |
|||
is_py31 = (is_py3 and _ver[1] == 1) |
|||
|
|||
#: Python 3.2.x |
|||
is_py32 = (is_py3 and _ver[1] == 2) |
|||
|
|||
#: Python 3.3.x |
|||
is_py33 = (is_py3 and _ver[1] == 3) |
|||
|
|||
#: Python 3.4.x |
|||
is_py34 = (is_py3 and _ver[1] == 4) |
|||
|
|||
#: Python 2.7.x |
|||
is_py27 = (is_py2 and _ver[1] == 7) |
|||
|
|||
#: Python 2.6.x |
|||
is_py26 = (is_py2 and _ver[1] == 6) |
|||
|
|||
#: Python 2.5.x |
|||
is_py25 = (is_py2 and _ver[1] == 5) |
|||
|
|||
#: Python 2.4.x |
|||
is_py24 = (is_py2 and _ver[1] == 4) # I'm assuming this is not by choice. |
|||
|
|||
|
|||
# --------- |
|||
# Platforms |
|||
# --------- |
|||
|
|||
|
|||
# Syntax sugar. |
|||
_ver = sys.version.lower() |
|||
|
|||
is_pypy = ('pypy' in _ver) |
|||
is_jython = ('jython' in _ver) |
|||
is_ironpython = ('iron' in _ver) |
|||
|
|||
# Assume CPython, if nothing else. |
|||
is_cpython = not any((is_pypy, is_jython, is_ironpython)) |
|||
|
|||
# Windows-based system. |
|||
is_windows = 'win32' in str(sys.platform).lower() |
|||
|
|||
# Standard Linux 2+ system. |
|||
is_linux = ('linux' in str(sys.platform).lower()) |
|||
is_osx = ('darwin' in str(sys.platform).lower()) |
|||
is_hpux = ('hpux' in str(sys.platform).lower()) # Complete guess. |
|||
is_solaris = ('solar==' in str(sys.platform).lower()) # Complete guess. |
|||
|
|||
|
|||
# --------- |
|||
# Specifics |
|||
# --------- |
|||
|
|||
|
|||
if is_py2: |
|||
from urllib import quote, unquote, urlencode |
|||
from urlparse import urlparse, urlunparse, urljoin, urlsplit |
|||
from urllib2 import parse_http_list |
|||
import cookielib |
|||
from .packages.oreos.monkeys import SimpleCookie |
|||
from StringIO import StringIO |
|||
|
|||
str = unicode |
|||
bytes = str |
|||
|
|||
|
|||
elif is_py3: |
|||
from urllib.parse import urlparse, urlunparse, urljoin, urlsplit, urlencode, quote, unquote |
|||
from urllib.request import parse_http_list |
|||
from http import cookiejar as cookielib |
|||
from http.cookies import SimpleCookie |
|||
from io import StringIO |
|||
|
|||
str = str |
|||
bytes = bytes |
|||
|
@ -1,35 +1,54 @@ |
|||
# urllib3/exceptions.py |
|||
# Copyright 2008-2011 Andrey Petrov and contributors (see CONTRIBUTORS.txt) |
|||
# Copyright 2008-2012 Andrey Petrov and contributors (see CONTRIBUTORS.txt) |
|||
# |
|||
# This module is part of urllib3 and is released under |
|||
# the MIT License: http://www.opensource.org/licenses/mit-license.php |
|||
|
|||
## Exceptions |
|||
## Base Exceptions |
|||
|
|||
class HTTPError(Exception): |
|||
"Base exception used by this module." |
|||
pass |
|||
|
|||
|
|||
class SSLError(Exception): |
|||
class PoolError(HTTPError): |
|||
"Base exception for errors caused within a pool." |
|||
def __init__(self, pool, message): |
|||
self.pool = pool |
|||
HTTPError.__init__(self, "%s: %s" % (pool, message)) |
|||
|
|||
|
|||
class SSLError(HTTPError): |
|||
"Raised when SSL certificate fails in an HTTPS connection." |
|||
pass |
|||
|
|||
|
|||
class MaxRetryError(HTTPError): |
|||
"Raised when the maximum number of retries is exceeded." |
|||
pass |
|||
## Leaf Exceptions |
|||
|
|||
class MaxRetryError(PoolError): |
|||
"Raised when the maximum number of retries is exceeded." |
|||
def __init__(self, pool, url): |
|||
PoolError.__init__(self, pool, |
|||
"Max retries exceeded with url: %s" % url) |
|||
|
|||
class TimeoutError(HTTPError): |
|||
"Raised when a socket timeout occurs." |
|||
pass |
|||
self.url = url |
|||
|
|||
|
|||
class HostChangedError(HTTPError): |
|||
class HostChangedError(PoolError): |
|||
"Raised when an existing pool gets a request for a foreign host." |
|||
def __init__(self, pool, url, retries=3): |
|||
PoolError.__init__(self, pool, |
|||
"Tried to open a foreign host with url: %s" % url) |
|||
|
|||
self.url = url |
|||
self.retries = retries |
|||
|
|||
|
|||
class TimeoutError(PoolError): |
|||
"Raised when a socket timeout occurs." |
|||
pass |
|||
|
|||
class EmptyPoolError(HTTPError): |
|||
|
|||
class EmptyPoolError(PoolError): |
|||
"Raised when a pool runs out of connections and no more are allowed." |
|||
pass |
|||
|
@ -0,0 +1,47 @@ |
|||
"""The function mimetools.choose_boundary() from Python 2.7, which seems to |
|||
have disappeared in Python 3 (although email.generator._make_boundary() might |
|||
work as a replacement?). |
|||
|
|||
Tweaked to use lock from threading rather than thread. |
|||
""" |
|||
import os |
|||
from threading import Lock |
|||
_counter_lock = Lock() |
|||
|
|||
_counter = 0 |
|||
def _get_next_counter(): |
|||
global _counter |
|||
with _counter_lock: |
|||
_counter += 1 |
|||
return _counter |
|||
|
|||
_prefix = None |
|||
|
|||
def choose_boundary(): |
|||
"""Return a string usable as a multipart boundary. |
|||
|
|||
The string chosen is unique within a single program run, and |
|||
incorporates the user id (if available), process id (if available), |
|||
and current time. So it's very unlikely the returned string appears |
|||
in message text, but there's no guarantee. |
|||
|
|||
The boundary contains dots so you have to quote it in the header.""" |
|||
|
|||
global _prefix |
|||
import time |
|||
if _prefix is None: |
|||
import socket |
|||
try: |
|||
hostid = socket.gethostbyname(socket.gethostname()) |
|||
except socket.gaierror: |
|||
hostid = '127.0.0.1' |
|||
try: |
|||
uid = repr(os.getuid()) |
|||
except AttributeError: |
|||
uid = '1' |
|||
try: |
|||
pid = repr(os.getpid()) |
|||
except AttributeError: |
|||
pid = '1' |
|||
_prefix = hostid + '.' + uid + '.' + pid |
|||
return "%s.%.3f.%d" % (_prefix, time.time(), _get_next_counter()) |
@ -0,0 +1,372 @@ |
|||
"""Utilities for writing code that runs on Python 2 and 3""" |
|||
|
|||
#Copyright (c) 2010-2011 Benjamin Peterson |
|||
|
|||
#Permission is hereby granted, free of charge, to any person obtaining a copy of |
|||
#this software and associated documentation files (the "Software"), to deal in |
|||
#the Software without restriction, including without limitation the rights to |
|||
#use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of |
|||
#the Software, and to permit persons to whom the Software is furnished to do so, |
|||
#subject to the following conditions: |
|||
|
|||
#The above copyright notice and this permission notice shall be included in all |
|||
#copies or substantial portions of the Software. |
|||
|
|||
#THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR |
|||
#IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS |
|||
#FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR |
|||
#COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER |
|||
#IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN |
|||
#CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. |
|||
|
|||
import operator |
|||
import sys |
|||
import types |
|||
|
|||
__author__ = "Benjamin Peterson <benjamin@python.org>" |
|||
__version__ = "1.1.0" |
|||
|
|||
|
|||
# True if we are running on Python 3. |
|||
PY3 = sys.version_info[0] == 3 |
|||
|
|||
if PY3: |
|||
string_types = str, |
|||
integer_types = int, |
|||
class_types = type, |
|||
text_type = str |
|||
binary_type = bytes |
|||
|
|||
MAXSIZE = sys.maxsize |
|||
else: |
|||
string_types = basestring, |
|||
integer_types = (int, long) |
|||
class_types = (type, types.ClassType) |
|||
text_type = unicode |
|||
binary_type = str |
|||
|
|||
# It's possible to have sizeof(long) != sizeof(Py_ssize_t). |
|||
class X(object): |
|||
def __len__(self): |
|||
return 1 << 31 |
|||
try: |
|||
len(X()) |
|||
except OverflowError: |
|||
# 32-bit |
|||
MAXSIZE = int((1 << 31) - 1) |
|||
else: |
|||
# 64-bit |
|||
MAXSIZE = int((1 << 63) - 1) |
|||
del X |
|||
|
|||
|
|||
def _add_doc(func, doc): |
|||
"""Add documentation to a function.""" |
|||
func.__doc__ = doc |
|||
|
|||
|
|||
def _import_module(name): |
|||
"""Import module, returning the module after the last dot.""" |
|||
__import__(name) |
|||
return sys.modules[name] |
|||
|
|||
|
|||
class _LazyDescr(object): |
|||
|
|||
def __init__(self, name): |
|||
self.name = name |
|||
|
|||
def __get__(self, obj, tp): |
|||
result = self._resolve() |
|||
setattr(obj, self.name, result) |
|||
# This is a bit ugly, but it avoids running this again. |
|||
delattr(tp, self.name) |
|||
return result |
|||
|
|||
|
|||
class MovedModule(_LazyDescr): |
|||
|
|||
def __init__(self, name, old, new=None): |
|||
super(MovedModule, self).__init__(name) |
|||
if PY3: |
|||
if new is None: |
|||
new = name |
|||
self.mod = new |
|||
else: |
|||
self.mod = old |
|||
|
|||
def _resolve(self): |
|||
return _import_module(self.mod) |
|||
|
|||
|
|||
class MovedAttribute(_LazyDescr): |
|||
|
|||
def __init__(self, name, old_mod, new_mod, old_attr=None, new_attr=None): |
|||
super(MovedAttribute, self).__init__(name) |
|||
if PY3: |
|||
if new_mod is None: |
|||
new_mod = name |
|||
self.mod = new_mod |
|||
if new_attr is None: |
|||
if old_attr is None: |
|||
new_attr = name |
|||
else: |
|||
new_attr = old_attr |
|||
self.attr = new_attr |
|||
else: |
|||
self.mod = old_mod |
|||
if old_attr is None: |
|||
old_attr = name |
|||
self.attr = old_attr |
|||
|
|||
def _resolve(self): |
|||
module = _import_module(self.mod) |
|||
return getattr(module, self.attr) |
|||
|
|||
|
|||
|
|||
class _MovedItems(types.ModuleType): |
|||
"""Lazy loading of moved objects""" |
|||
|
|||
|
|||
_moved_attributes = [ |
|||
MovedAttribute("cStringIO", "cStringIO", "io", "StringIO"), |
|||
MovedAttribute("filter", "itertools", "builtins", "ifilter", "filter"), |
|||
MovedAttribute("map", "itertools", "builtins", "imap", "map"), |
|||
MovedAttribute("reload_module", "__builtin__", "imp", "reload"), |
|||
MovedAttribute("reduce", "__builtin__", "functools"), |
|||
MovedAttribute("StringIO", "StringIO", "io"), |
|||
MovedAttribute("xrange", "__builtin__", "builtins", "xrange", "range"), |
|||
MovedAttribute("zip", "itertools", "builtins", "izip", "zip"), |
|||
|
|||
MovedModule("builtins", "__builtin__"), |
|||
MovedModule("configparser", "ConfigParser"), |
|||
MovedModule("copyreg", "copy_reg"), |
|||
MovedModule("http_cookiejar", "cookielib", "http.cookiejar"), |
|||
MovedModule("http_cookies", "Cookie", "http.cookies"), |
|||
MovedModule("html_entities", "htmlentitydefs", "html.entities"), |
|||
MovedModule("html_parser", "HTMLParser", "html.parser"), |
|||
MovedModule("http_client", "httplib", "http.client"), |
|||
MovedModule("BaseHTTPServer", "BaseHTTPServer", "http.server"), |
|||
MovedModule("CGIHTTPServer", "CGIHTTPServer", "http.server"), |
|||
MovedModule("SimpleHTTPServer", "SimpleHTTPServer", "http.server"), |
|||
MovedModule("cPickle", "cPickle", "pickle"), |
|||
MovedModule("queue", "Queue"), |
|||
MovedModule("reprlib", "repr"), |
|||
MovedModule("socketserver", "SocketServer"), |
|||
MovedModule("tkinter", "Tkinter"), |
|||
MovedModule("tkinter_dialog", "Dialog", "tkinter.dialog"), |
|||
MovedModule("tkinter_filedialog", "FileDialog", "tkinter.filedialog"), |
|||
MovedModule("tkinter_scrolledtext", "ScrolledText", "tkinter.scrolledtext"), |
|||
MovedModule("tkinter_simpledialog", "SimpleDialog", "tkinter.simpledialog"), |
|||
MovedModule("tkinter_tix", "Tix", "tkinter.tix"), |
|||
MovedModule("tkinter_constants", "Tkconstants", "tkinter.constants"), |
|||
MovedModule("tkinter_dnd", "Tkdnd", "tkinter.dnd"), |
|||
MovedModule("tkinter_colorchooser", "tkColorChooser", |
|||
"tkinter.colorchooser"), |
|||
MovedModule("tkinter_commondialog", "tkCommonDialog", |
|||
"tkinter.commondialog"), |
|||
MovedModule("tkinter_tkfiledialog", "tkFileDialog", "tkinter.filedialog"), |
|||
MovedModule("tkinter_font", "tkFont", "tkinter.font"), |
|||
MovedModule("tkinter_messagebox", "tkMessageBox", "tkinter.messagebox"), |
|||
MovedModule("tkinter_tksimpledialog", "tkSimpleDialog", |
|||
"tkinter.simpledialog"), |
|||
MovedModule("urllib_robotparser", "robotparser", "urllib.robotparser"), |
|||
MovedModule("winreg", "_winreg"), |
|||
] |
|||
for attr in _moved_attributes: |
|||
setattr(_MovedItems, attr.name, attr) |
|||
del attr |
|||
|
|||
moves = sys.modules["six.moves"] = _MovedItems("moves") |
|||
|
|||
|
|||
def add_move(move): |
|||
"""Add an item to six.moves.""" |
|||
setattr(_MovedItems, move.name, move) |
|||
|
|||
|
|||
def remove_move(name): |
|||
"""Remove item from six.moves.""" |
|||
try: |
|||
delattr(_MovedItems, name) |
|||
except AttributeError: |
|||
try: |
|||
del moves.__dict__[name] |
|||
except KeyError: |
|||
raise AttributeError("no such move, %r" % (name,)) |
|||
|
|||
|
|||
if PY3: |
|||
_meth_func = "__func__" |
|||
_meth_self = "__self__" |
|||
|
|||
_func_code = "__code__" |
|||
_func_defaults = "__defaults__" |
|||
|
|||
_iterkeys = "keys" |
|||
_itervalues = "values" |
|||
_iteritems = "items" |
|||
else: |
|||
_meth_func = "im_func" |
|||
_meth_self = "im_self" |
|||
|
|||
_func_code = "func_code" |
|||
_func_defaults = "func_defaults" |
|||
|
|||
_iterkeys = "iterkeys" |
|||
_itervalues = "itervalues" |
|||
_iteritems = "iteritems" |
|||
|
|||
|
|||
if PY3: |
|||
def get_unbound_function(unbound): |
|||
return unbound |
|||
|
|||
|
|||
advance_iterator = next |
|||
|
|||
def callable(obj): |
|||
return any("__call__" in klass.__dict__ for klass in type(obj).__mro__) |
|||
else: |
|||
def get_unbound_function(unbound): |
|||
return unbound.im_func |
|||
|
|||
|
|||
def advance_iterator(it): |
|||
return it.next() |
|||
|
|||
callable = callable |
|||
_add_doc(get_unbound_function, |
|||
"""Get the function out of a possibly unbound function""") |
|||
|
|||
|
|||
get_method_function = operator.attrgetter(_meth_func) |
|||
get_method_self = operator.attrgetter(_meth_self) |
|||
get_function_code = operator.attrgetter(_func_code) |
|||
get_function_defaults = operator.attrgetter(_func_defaults) |
|||
|
|||
|
|||
def iterkeys(d): |
|||
"""Return an iterator over the keys of a dictionary.""" |
|||
return getattr(d, _iterkeys)() |
|||
|
|||
def itervalues(d): |
|||
"""Return an iterator over the values of a dictionary.""" |
|||
return getattr(d, _itervalues)() |
|||
|
|||
def iteritems(d): |
|||
"""Return an iterator over the (key, value) pairs of a dictionary.""" |
|||
return getattr(d, _iteritems)() |
|||
|
|||
|
|||
if PY3: |
|||
def b(s): |
|||
return s.encode("latin-1") |
|||
def u(s): |
|||
return s |
|||
if sys.version_info[1] <= 1: |
|||
def int2byte(i): |
|||
return bytes((i,)) |
|||
else: |
|||
# This is about 2x faster than the implementation above on 3.2+ |
|||
int2byte = operator.methodcaller("to_bytes", 1, "big") |
|||
import io |
|||
StringIO = io.StringIO |
|||
BytesIO = io.BytesIO |
|||
else: |
|||
def b(s): |
|||
return s |
|||
def u(s): |
|||
return unicode(s, "unicode_escape") |
|||
int2byte = chr |
|||
import StringIO |
|||
StringIO = BytesIO = StringIO.StringIO |
|||
_add_doc(b, """Byte literal""") |
|||
_add_doc(u, """Text literal""") |
|||
|
|||
|
|||
if PY3: |
|||
import builtins |
|||
exec_ = getattr(builtins, "exec") |
|||
|
|||
|
|||
def reraise(tp, value, tb=None): |
|||
if value.__traceback__ is not tb: |
|||
raise value.with_traceback(tb) |
|||
raise value |
|||
|
|||
|
|||
print_ = getattr(builtins, "print") |
|||
del builtins |
|||
|
|||
else: |
|||
def exec_(code, globs=None, locs=None): |
|||
"""Execute code in a namespace.""" |
|||
if globs is None: |
|||
frame = sys._getframe(1) |
|||
globs = frame.f_globals |
|||
if locs is None: |
|||
locs = frame.f_locals |
|||
del frame |
|||
elif locs is None: |
|||
locs = globs |
|||
exec("""exec code in globs, locs""") |
|||
|
|||
|
|||
exec_("""def reraise(tp, value, tb=None): |
|||
raise tp, value, tb |
|||
""") |
|||
|
|||
|
|||
def print_(*args, **kwargs): |
|||
"""The new-style print function.""" |
|||
fp = kwargs.pop("file", sys.stdout) |
|||
if fp is None: |
|||
return |
|||
def write(data): |
|||
if not isinstance(data, basestring): |
|||
data = str(data) |
|||
fp.write(data) |
|||
want_unicode = False |
|||
sep = kwargs.pop("sep", None) |
|||
if sep is not None: |
|||
if isinstance(sep, unicode): |
|||
want_unicode = True |
|||
elif not isinstance(sep, str): |
|||
raise TypeError("sep must be None or a string") |
|||
end = kwargs.pop("end", None) |
|||
if end is not None: |
|||
if isinstance(end, unicode): |
|||
want_unicode = True |
|||
elif not isinstance(end, str): |
|||
raise TypeError("end must be None or a string") |
|||
if kwargs: |
|||
raise TypeError("invalid keyword arguments to print()") |
|||
if not want_unicode: |
|||
for arg in args: |
|||
if isinstance(arg, unicode): |
|||
want_unicode = True |
|||
break |
|||
if want_unicode: |
|||
newline = unicode("\n") |
|||
space = unicode(" ") |
|||
else: |
|||
newline = "\n" |
|||
space = " " |
|||
if sep is None: |
|||
sep = space |
|||
if end is None: |
|||
end = newline |
|||
for i, arg in enumerate(args): |
|||
if i: |
|||
write(sep) |
|||
write(arg) |
|||
write(end) |
|||
|
|||
_add_doc(reraise, """Reraise an exception.""") |
|||
|
|||
|
|||
def with_metaclass(meta, base=object): |
|||
"""Create a base class with a metaclass.""" |
|||
return meta("NewBase", (base,), {}) |
@ -1,438 +0,0 @@ |
|||
r"""JSON (JavaScript Object Notation) <http://json.org> is a subset of |
|||
JavaScript syntax (ECMA-262 3rd edition) used as a lightweight data |
|||
interchange format. |
|||
|
|||
:mod:`simplejson` exposes an API familiar to users of the standard library |
|||
:mod:`marshal` and :mod:`pickle` modules. It is the externally maintained |
|||
version of the :mod:`json` library contained in Python 2.6, but maintains |
|||
compatibility with Python 2.4 and Python 2.5 and (currently) has |
|||
significant performance advantages, even without using the optional C |
|||
extension for speedups. |
|||
|
|||
Encoding basic Python object hierarchies:: |
|||
|
|||
>>> import simplejson as json |
|||
>>> json.dumps(['foo', {'bar': ('baz', None, 1.0, 2)}]) |
|||
'["foo", {"bar": ["baz", null, 1.0, 2]}]' |
|||
>>> print json.dumps("\"foo\bar") |
|||
"\"foo\bar" |
|||
>>> print json.dumps(u'\u1234') |
|||
"\u1234" |
|||
>>> print json.dumps('\\') |
|||
"\\" |
|||
>>> print json.dumps({"c": 0, "b": 0, "a": 0}, sort_keys=True) |
|||
{"a": 0, "b": 0, "c": 0} |
|||
>>> from StringIO import StringIO |
|||
>>> io = StringIO() |
|||
>>> json.dump(['streaming API'], io) |
|||
>>> io.getvalue() |
|||
'["streaming API"]' |
|||
|
|||
Compact encoding:: |
|||
|
|||
>>> import simplejson as json |
|||
>>> json.dumps([1,2,3,{'4': 5, '6': 7}], separators=(',',':')) |
|||
'[1,2,3,{"4":5,"6":7}]' |
|||
|
|||
Pretty printing:: |
|||
|
|||
>>> import simplejson as json |
|||
>>> s = json.dumps({'4': 5, '6': 7}, sort_keys=True, indent=' ') |
|||
>>> print '\n'.join([l.rstrip() for l in s.splitlines()]) |
|||
{ |
|||
"4": 5, |
|||
"6": 7 |
|||
} |
|||
|
|||
Decoding JSON:: |
|||
|
|||
>>> import simplejson as json |
|||
>>> obj = [u'foo', {u'bar': [u'baz', None, 1.0, 2]}] |
|||
>>> json.loads('["foo", {"bar":["baz", null, 1.0, 2]}]') == obj |
|||
True |
|||
>>> json.loads('"\\"foo\\bar"') == u'"foo\x08ar' |
|||
True |
|||
>>> from StringIO import StringIO |
|||
>>> io = StringIO('["streaming API"]') |
|||
>>> json.load(io)[0] == 'streaming API' |
|||
True |
|||
|
|||
Specializing JSON object decoding:: |
|||
|
|||
>>> import simplejson as json |
|||
>>> def as_complex(dct): |
|||
... if '__complex__' in dct: |
|||
... return complex(dct['real'], dct['imag']) |
|||
... return dct |
|||
... |
|||
>>> json.loads('{"__complex__": true, "real": 1, "imag": 2}', |
|||
... object_hook=as_complex) |
|||
(1+2j) |
|||
>>> from decimal import Decimal |
|||
>>> json.loads('1.1', parse_float=Decimal) == Decimal('1.1') |
|||
True |
|||
|
|||
Specializing JSON object encoding:: |
|||
|
|||
>>> import simplejson as json |
|||
>>> def encode_complex(obj): |
|||
... if isinstance(obj, complex): |
|||
... return [obj.real, obj.imag] |
|||
... raise TypeError(repr(o) + " is not JSON serializable") |
|||
... |
|||
>>> json.dumps(2 + 1j, default=encode_complex) |
|||
'[2.0, 1.0]' |
|||
>>> json.JSONEncoder(default=encode_complex).encode(2 + 1j) |
|||
'[2.0, 1.0]' |
|||
>>> ''.join(json.JSONEncoder(default=encode_complex).iterencode(2 + 1j)) |
|||
'[2.0, 1.0]' |
|||
|
|||
|
|||
Using simplejson.tool from the shell to validate and pretty-print:: |
|||
|
|||
$ echo '{"json":"obj"}' | python -m simplejson.tool |
|||
{ |
|||
"json": "obj" |
|||
} |
|||
$ echo '{ 1.2:3.4}' | python -m simplejson.tool |
|||
Expecting property name: line 1 column 2 (char 2) |
|||
""" |
|||
__version__ = '2.1.3' |
|||
__all__ = [ |
|||
'dump', 'dumps', 'load', 'loads', |
|||
'JSONDecoder', 'JSONDecodeError', 'JSONEncoder', |
|||
'OrderedDict', |
|||
] |
|||
|
|||
__author__ = 'Bob Ippolito <bob@redivi.com>' |
|||
|
|||
from decimal import Decimal |
|||
|
|||
from decoder import JSONDecoder, JSONDecodeError |
|||
from encoder import JSONEncoder |
|||
def _import_OrderedDict(): |
|||
import collections |
|||
try: |
|||
return collections.OrderedDict |
|||
except AttributeError: |
|||
import ordered_dict |
|||
return ordered_dict.OrderedDict |
|||
OrderedDict = _import_OrderedDict() |
|||
|
|||
def _import_c_make_encoder(): |
|||
try: |
|||
from simplejson._speedups import make_encoder |
|||
return make_encoder |
|||
except ImportError: |
|||
return None |
|||
|
|||
_default_encoder = JSONEncoder( |
|||
skipkeys=False, |
|||
ensure_ascii=True, |
|||
check_circular=True, |
|||
allow_nan=True, |
|||
indent=None, |
|||
separators=None, |
|||
encoding='utf-8', |
|||
default=None, |
|||
use_decimal=False, |
|||
) |
|||
|
|||
def dump(obj, fp, skipkeys=False, ensure_ascii=True, check_circular=True, |
|||
allow_nan=True, cls=None, indent=None, separators=None, |
|||
encoding='utf-8', default=None, use_decimal=False, **kw): |
|||
"""Serialize ``obj`` as a JSON formatted stream to ``fp`` (a |
|||
``.write()``-supporting file-like object). |
|||
|
|||
If ``skipkeys`` is true then ``dict`` keys that are not basic types |
|||
(``str``, ``unicode``, ``int``, ``long``, ``float``, ``bool``, ``None``) |
|||
will be skipped instead of raising a ``TypeError``. |
|||
|
|||
If ``ensure_ascii`` is false, then the some chunks written to ``fp`` |
|||
may be ``unicode`` instances, subject to normal Python ``str`` to |
|||
``unicode`` coercion rules. Unless ``fp.write()`` explicitly |
|||
understands ``unicode`` (as in ``codecs.getwriter()``) this is likely |
|||
to cause an error. |
|||
|
|||
If ``check_circular`` is false, then the circular reference check |
|||
for container types will be skipped and a circular reference will |
|||
result in an ``OverflowError`` (or worse). |
|||
|
|||
If ``allow_nan`` is false, then it will be a ``ValueError`` to |
|||
serialize out of range ``float`` values (``nan``, ``inf``, ``-inf``) |
|||
in strict compliance of the JSON specification, instead of using the |
|||
JavaScript equivalents (``NaN``, ``Infinity``, ``-Infinity``). |
|||
|
|||
If *indent* is a string, then JSON array elements and object members |
|||
will be pretty-printed with a newline followed by that string repeated |
|||
for each level of nesting. ``None`` (the default) selects the most compact |
|||
representation without any newlines. For backwards compatibility with |
|||
versions of simplejson earlier than 2.1.0, an integer is also accepted |
|||
and is converted to a string with that many spaces. |
|||
|
|||
If ``separators`` is an ``(item_separator, dict_separator)`` tuple |
|||
then it will be used instead of the default ``(', ', ': ')`` separators. |
|||
``(',', ':')`` is the most compact JSON representation. |
|||
|
|||
``encoding`` is the character encoding for str instances, default is UTF-8. |
|||
|
|||
``default(obj)`` is a function that should return a serializable version |
|||
of obj or raise TypeError. The default simply raises TypeError. |
|||
|
|||
If *use_decimal* is true (default: ``False``) then decimal.Decimal |
|||
will be natively serialized to JSON with full precision. |
|||
|
|||
To use a custom ``JSONEncoder`` subclass (e.g. one that overrides the |
|||
``.default()`` method to serialize additional types), specify it with |
|||
the ``cls`` kwarg. |
|||
|
|||
""" |
|||
# cached encoder |
|||
if (not skipkeys and ensure_ascii and |
|||
check_circular and allow_nan and |
|||
cls is None and indent is None and separators is None and |
|||
encoding == 'utf-8' and default is None and not use_decimal |
|||
and not kw): |
|||
iterable = _default_encoder.iterencode(obj) |
|||
else: |
|||
if cls is None: |
|||
cls = JSONEncoder |
|||
iterable = cls(skipkeys=skipkeys, ensure_ascii=ensure_ascii, |
|||
check_circular=check_circular, allow_nan=allow_nan, indent=indent, |
|||
separators=separators, encoding=encoding, |
|||
default=default, use_decimal=use_decimal, **kw).iterencode(obj) |
|||
# could accelerate with writelines in some versions of Python, at |
|||
# a debuggability cost |
|||
for chunk in iterable: |
|||
fp.write(chunk) |
|||
|
|||
|
|||
def dumps(obj, skipkeys=False, ensure_ascii=True, check_circular=True, |
|||
allow_nan=True, cls=None, indent=None, separators=None, |
|||
encoding='utf-8', default=None, use_decimal=False, **kw): |
|||
"""Serialize ``obj`` to a JSON formatted ``str``. |
|||
|
|||
If ``skipkeys`` is false then ``dict`` keys that are not basic types |
|||
(``str``, ``unicode``, ``int``, ``long``, ``float``, ``bool``, ``None``) |
|||
will be skipped instead of raising a ``TypeError``. |
|||
|
|||
If ``ensure_ascii`` is false, then the return value will be a |
|||
``unicode`` instance subject to normal Python ``str`` to ``unicode`` |
|||
coercion rules instead of being escaped to an ASCII ``str``. |
|||
|
|||
If ``check_circular`` is false, then the circular reference check |
|||
for container types will be skipped and a circular reference will |
|||
result in an ``OverflowError`` (or worse). |
|||
|
|||
If ``allow_nan`` is false, then it will be a ``ValueError`` to |
|||
serialize out of range ``float`` values (``nan``, ``inf``, ``-inf``) in |
|||
strict compliance of the JSON specification, instead of using the |
|||
JavaScript equivalents (``NaN``, ``Infinity``, ``-Infinity``). |
|||
|
|||
If ``indent`` is a string, then JSON array elements and object members |
|||
will be pretty-printed with a newline followed by that string repeated |
|||
for each level of nesting. ``None`` (the default) selects the most compact |
|||
representation without any newlines. For backwards compatibility with |
|||
versions of simplejson earlier than 2.1.0, an integer is also accepted |
|||
and is converted to a string with that many spaces. |
|||
|
|||
If ``separators`` is an ``(item_separator, dict_separator)`` tuple |
|||
then it will be used instead of the default ``(', ', ': ')`` separators. |
|||
``(',', ':')`` is the most compact JSON representation. |
|||
|
|||
``encoding`` is the character encoding for str instances, default is UTF-8. |
|||
|
|||
``default(obj)`` is a function that should return a serializable version |
|||
of obj or raise TypeError. The default simply raises TypeError. |
|||
|
|||
If *use_decimal* is true (default: ``False``) then decimal.Decimal |
|||
will be natively serialized to JSON with full precision. |
|||
|
|||
To use a custom ``JSONEncoder`` subclass (e.g. one that overrides the |
|||
``.default()`` method to serialize additional types), specify it with |
|||
the ``cls`` kwarg. |
|||
|
|||
""" |
|||
# cached encoder |
|||
if (not skipkeys and ensure_ascii and |
|||
check_circular and allow_nan and |
|||
cls is None and indent is None and separators is None and |
|||
encoding == 'utf-8' and default is None and not use_decimal |
|||
and not kw): |
|||
return _default_encoder.encode(obj) |
|||
if cls is None: |
|||
cls = JSONEncoder |
|||
return cls( |
|||
skipkeys=skipkeys, ensure_ascii=ensure_ascii, |
|||
check_circular=check_circular, allow_nan=allow_nan, indent=indent, |
|||
separators=separators, encoding=encoding, default=default, |
|||
use_decimal=use_decimal, **kw).encode(obj) |
|||
|
|||
|
|||
_default_decoder = JSONDecoder(encoding=None, object_hook=None, |
|||
object_pairs_hook=None) |
|||
|
|||
|
|||
def load(fp, encoding=None, cls=None, object_hook=None, parse_float=None, |
|||
parse_int=None, parse_constant=None, object_pairs_hook=None, |
|||
use_decimal=False, **kw): |
|||
"""Deserialize ``fp`` (a ``.read()``-supporting file-like object containing |
|||
a JSON document) to a Python object. |
|||
|
|||
*encoding* determines the encoding used to interpret any |
|||
:class:`str` objects decoded by this instance (``'utf-8'`` by |
|||
default). It has no effect when decoding :class:`unicode` objects. |
|||
|
|||
Note that currently only encodings that are a superset of ASCII work, |
|||
strings of other encodings should be passed in as :class:`unicode`. |
|||
|
|||
*object_hook*, if specified, will be called with the result of every |
|||
JSON object decoded and its return value will be used in place of the |
|||
given :class:`dict`. This can be used to provide custom |
|||
deserializations (e.g. to support JSON-RPC class hinting). |
|||
|
|||
*object_pairs_hook* is an optional function that will be called with |
|||
the result of any object literal decode with an ordered list of pairs. |
|||
The return value of *object_pairs_hook* will be used instead of the |
|||
:class:`dict`. This feature can be used to implement custom decoders |
|||
that rely on the order that the key and value pairs are decoded (for |
|||
example, :func:`collections.OrderedDict` will remember the order of |
|||
insertion). If *object_hook* is also defined, the *object_pairs_hook* |
|||
takes priority. |
|||
|
|||
*parse_float*, if specified, will be called with the string of every |
|||
JSON float to be decoded. By default, this is equivalent to |
|||
``float(num_str)``. This can be used to use another datatype or parser |
|||
for JSON floats (e.g. :class:`decimal.Decimal`). |
|||
|
|||
*parse_int*, if specified, will be called with the string of every |
|||
JSON int to be decoded. By default, this is equivalent to |
|||
``int(num_str)``. This can be used to use another datatype or parser |
|||
for JSON integers (e.g. :class:`float`). |
|||
|
|||
*parse_constant*, if specified, will be called with one of the |
|||
following strings: ``'-Infinity'``, ``'Infinity'``, ``'NaN'``. This |
|||
can be used to raise an exception if invalid JSON numbers are |
|||
encountered. |
|||
|
|||
If *use_decimal* is true (default: ``False``) then it implies |
|||
parse_float=decimal.Decimal for parity with ``dump``. |
|||
|
|||
To use a custom ``JSONDecoder`` subclass, specify it with the ``cls`` |
|||
kwarg. |
|||
|
|||
""" |
|||
return loads(fp.read(), |
|||
encoding=encoding, cls=cls, object_hook=object_hook, |
|||
parse_float=parse_float, parse_int=parse_int, |
|||
parse_constant=parse_constant, object_pairs_hook=object_pairs_hook, |
|||
use_decimal=use_decimal, **kw) |
|||
|
|||
|
|||
def loads(s, encoding=None, cls=None, object_hook=None, parse_float=None, |
|||
parse_int=None, parse_constant=None, object_pairs_hook=None, |
|||
use_decimal=False, **kw): |
|||
"""Deserialize ``s`` (a ``str`` or ``unicode`` instance containing a JSON |
|||
document) to a Python object. |
|||
|
|||
*encoding* determines the encoding used to interpret any |
|||
:class:`str` objects decoded by this instance (``'utf-8'`` by |
|||
default). It has no effect when decoding :class:`unicode` objects. |
|||
|
|||
Note that currently only encodings that are a superset of ASCII work, |
|||
strings of other encodings should be passed in as :class:`unicode`. |
|||
|
|||
*object_hook*, if specified, will be called with the result of every |
|||
JSON object decoded and its return value will be used in place of the |
|||
given :class:`dict`. This can be used to provide custom |
|||
deserializations (e.g. to support JSON-RPC class hinting). |
|||
|
|||
*object_pairs_hook* is an optional function that will be called with |
|||
the result of any object literal decode with an ordered list of pairs. |
|||
The return value of *object_pairs_hook* will be used instead of the |
|||
:class:`dict`. This feature can be used to implement custom decoders |
|||
that rely on the order that the key and value pairs are decoded (for |
|||
example, :func:`collections.OrderedDict` will remember the order of |
|||
insertion). If *object_hook* is also defined, the *object_pairs_hook* |
|||
takes priority. |
|||
|
|||
*parse_float*, if specified, will be called with the string of every |
|||
JSON float to be decoded. By default, this is equivalent to |
|||
``float(num_str)``. This can be used to use another datatype or parser |
|||
for JSON floats (e.g. :class:`decimal.Decimal`). |
|||
|
|||
*parse_int*, if specified, will be called with the string of every |
|||
JSON int to be decoded. By default, this is equivalent to |
|||
``int(num_str)``. This can be used to use another datatype or parser |
|||
for JSON integers (e.g. :class:`float`). |
|||
|
|||
*parse_constant*, if specified, will be called with one of the |
|||
following strings: ``'-Infinity'``, ``'Infinity'``, ``'NaN'``. This |
|||
can be used to raise an exception if invalid JSON numbers are |
|||
encountered. |
|||
|
|||
If *use_decimal* is true (default: ``False``) then it implies |
|||
parse_float=decimal.Decimal for parity with ``dump``. |
|||
|
|||
To use a custom ``JSONDecoder`` subclass, specify it with the ``cls`` |
|||
kwarg. |
|||
|
|||
""" |
|||
if (cls is None and encoding is None and object_hook is None and |
|||
parse_int is None and parse_float is None and |
|||
parse_constant is None and object_pairs_hook is None |
|||
and not use_decimal and not kw): |
|||
return _default_decoder.decode(s) |
|||
if cls is None: |
|||
cls = JSONDecoder |
|||
if object_hook is not None: |
|||
kw['object_hook'] = object_hook |
|||
if object_pairs_hook is not None: |
|||
kw['object_pairs_hook'] = object_pairs_hook |
|||
if parse_float is not None: |
|||
kw['parse_float'] = parse_float |
|||
if parse_int is not None: |
|||
kw['parse_int'] = parse_int |
|||
if parse_constant is not None: |
|||
kw['parse_constant'] = parse_constant |
|||
if use_decimal: |
|||
if parse_float is not None: |
|||
raise TypeError("use_decimal=True implies parse_float=Decimal") |
|||
kw['parse_float'] = Decimal |
|||
return cls(encoding=encoding, **kw).decode(s) |
|||
|
|||
|
|||
def _toggle_speedups(enabled): |
|||
import simplejson.decoder as dec |
|||
import simplejson.encoder as enc |
|||
import simplejson.scanner as scan |
|||
c_make_encoder = _import_c_make_encoder() |
|||
if enabled: |
|||
dec.scanstring = dec.c_scanstring or dec.py_scanstring |
|||
enc.c_make_encoder = c_make_encoder |
|||
enc.encode_basestring_ascii = (enc.c_encode_basestring_ascii or |
|||
enc.py_encode_basestring_ascii) |
|||
scan.make_scanner = scan.c_make_scanner or scan.py_make_scanner |
|||
else: |
|||
dec.scanstring = dec.py_scanstring |
|||
enc.c_make_encoder = None |
|||
enc.encode_basestring_ascii = enc.py_encode_basestring_ascii |
|||
scan.make_scanner = scan.py_make_scanner |
|||
dec.make_scanner = scan.make_scanner |
|||
global _default_decoder |
|||
_default_decoder = JSONDecoder( |
|||
encoding=None, |
|||
object_hook=None, |
|||
object_pairs_hook=None, |
|||
) |
|||
global _default_encoder |
|||
_default_encoder = JSONEncoder( |
|||
skipkeys=False, |
|||
ensure_ascii=True, |
|||
check_circular=True, |
|||
allow_nan=True, |
|||
indent=None, |
|||
separators=None, |
|||
encoding='utf-8', |
|||
default=None, |
|||
) |
File diff suppressed because it is too large
@ -1,421 +0,0 @@ |
|||
"""Implementation of JSONDecoder |
|||
""" |
|||
import re |
|||
import sys |
|||
import struct |
|||
|
|||
from simplejson.scanner import make_scanner |
|||
def _import_c_scanstring(): |
|||
try: |
|||
from simplejson._speedups import scanstring |
|||
return scanstring |
|||
except ImportError: |
|||
return None |
|||
c_scanstring = _import_c_scanstring() |
|||
|
|||
__all__ = ['JSONDecoder'] |
|||
|
|||
FLAGS = re.VERBOSE | re.MULTILINE | re.DOTALL |
|||
|
|||
def _floatconstants(): |
|||
_BYTES = '7FF80000000000007FF0000000000000'.decode('hex') |
|||
# The struct module in Python 2.4 would get frexp() out of range here |
|||
# when an endian is specified in the format string. Fixed in Python 2.5+ |
|||
if sys.byteorder != 'big': |
|||
_BYTES = _BYTES[:8][::-1] + _BYTES[8:][::-1] |
|||
nan, inf = struct.unpack('dd', _BYTES) |
|||
return nan, inf, -inf |
|||
|
|||
NaN, PosInf, NegInf = _floatconstants() |
|||
|
|||
|
|||
class JSONDecodeError(ValueError): |
|||
"""Subclass of ValueError with the following additional properties: |
|||
|
|||
msg: The unformatted error message |
|||
doc: The JSON document being parsed |
|||
pos: The start index of doc where parsing failed |
|||
end: The end index of doc where parsing failed (may be None) |
|||
lineno: The line corresponding to pos |
|||
colno: The column corresponding to pos |
|||
endlineno: The line corresponding to end (may be None) |
|||
endcolno: The column corresponding to end (may be None) |
|||
|
|||
""" |
|||
def __init__(self, msg, doc, pos, end=None): |
|||
ValueError.__init__(self, errmsg(msg, doc, pos, end=end)) |
|||
self.msg = msg |
|||
self.doc = doc |
|||
self.pos = pos |
|||
self.end = end |
|||
self.lineno, self.colno = linecol(doc, pos) |
|||
if end is not None: |
|||
self.endlineno, self.endcolno = linecol(doc, end) |
|||
else: |
|||
self.endlineno, self.endcolno = None, None |
|||
|
|||
|
|||
def linecol(doc, pos): |
|||
lineno = doc.count('\n', 0, pos) + 1 |
|||
if lineno == 1: |
|||
colno = pos |
|||
else: |
|||
colno = pos - doc.rindex('\n', 0, pos) |
|||
return lineno, colno |
|||
|
|||
|
|||
def errmsg(msg, doc, pos, end=None): |
|||
# Note that this function is called from _speedups |
|||
lineno, colno = linecol(doc, pos) |
|||
if end is None: |
|||
#fmt = '{0}: line {1} column {2} (char {3})' |
|||
#return fmt.format(msg, lineno, colno, pos) |
|||
fmt = '%s: line %d column %d (char %d)' |
|||
return fmt % (msg, lineno, colno, pos) |
|||
endlineno, endcolno = linecol(doc, end) |
|||
#fmt = '{0}: line {1} column {2} - line {3} column {4} (char {5} - {6})' |
|||
#return fmt.format(msg, lineno, colno, endlineno, endcolno, pos, end) |
|||
fmt = '%s: line %d column %d - line %d column %d (char %d - %d)' |
|||
return fmt % (msg, lineno, colno, endlineno, endcolno, pos, end) |
|||
|
|||
|
|||
_CONSTANTS = { |
|||
'-Infinity': NegInf, |
|||
'Infinity': PosInf, |
|||
'NaN': NaN, |
|||
} |
|||
|
|||
STRINGCHUNK = re.compile(r'(.*?)(["\\\x00-\x1f])', FLAGS) |
|||
BACKSLASH = { |
|||
'"': u'"', '\\': u'\\', '/': u'/', |
|||
'b': u'\b', 'f': u'\f', 'n': u'\n', 'r': u'\r', 't': u'\t', |
|||
} |
|||
|
|||
DEFAULT_ENCODING = "utf-8" |
|||
|
|||
def py_scanstring(s, end, encoding=None, strict=True, |
|||
_b=BACKSLASH, _m=STRINGCHUNK.match): |
|||
"""Scan the string s for a JSON string. End is the index of the |
|||
character in s after the quote that started the JSON string. |
|||
Unescapes all valid JSON string escape sequences and raises ValueError |
|||
on attempt to decode an invalid string. If strict is False then literal |
|||
control characters are allowed in the string. |
|||
|
|||
Returns a tuple of the decoded string and the index of the character in s |
|||
after the end quote.""" |
|||
if encoding is None: |
|||
encoding = DEFAULT_ENCODING |
|||
chunks = [] |
|||
_append = chunks.append |
|||
begin = end - 1 |
|||
while 1: |
|||
chunk = _m(s, end) |
|||
if chunk is None: |
|||
raise JSONDecodeError( |
|||
"Unterminated string starting at", s, begin) |
|||
end = chunk.end() |
|||
content, terminator = chunk.groups() |
|||
# Content is contains zero or more unescaped string characters |
|||
if content: |
|||
if not isinstance(content, unicode): |
|||
content = unicode(content, encoding) |
|||
_append(content) |
|||
# Terminator is the end of string, a literal control character, |
|||
# or a backslash denoting that an escape sequence follows |
|||
if terminator == '"': |
|||
break |
|||
elif terminator != '\\': |
|||
if strict: |
|||
msg = "Invalid control character %r at" % (terminator,) |
|||
#msg = "Invalid control character {0!r} at".format(terminator) |
|||
raise JSONDecodeError(msg, s, end) |
|||
else: |
|||
_append(terminator) |
|||
continue |
|||
try: |
|||
esc = s[end] |
|||
except IndexError: |
|||
raise JSONDecodeError( |
|||
"Unterminated string starting at", s, begin) |
|||
# If not a unicode escape sequence, must be in the lookup table |
|||
if esc != 'u': |
|||
try: |
|||
char = _b[esc] |
|||
except KeyError: |
|||
msg = "Invalid \\escape: " + repr(esc) |
|||
raise JSONDecodeError(msg, s, end) |
|||
end += 1 |
|||
else: |
|||
# Unicode escape sequence |
|||
esc = s[end + 1:end + 5] |
|||
next_end = end + 5 |
|||
if len(esc) != 4: |
|||
msg = "Invalid \\uXXXX escape" |
|||
raise JSONDecodeError(msg, s, end) |
|||
uni = int(esc, 16) |
|||
# Check for surrogate pair on UCS-4 systems |
|||
if 0xd800 <= uni <= 0xdbff and sys.maxunicode > 65535: |
|||
msg = "Invalid \\uXXXX\\uXXXX surrogate pair" |
|||
if not s[end + 5:end + 7] == '\\u': |
|||
raise JSONDecodeError(msg, s, end) |
|||
esc2 = s[end + 7:end + 11] |
|||
if len(esc2) != 4: |
|||
raise JSONDecodeError(msg, s, end) |
|||
uni2 = int(esc2, 16) |
|||
uni = 0x10000 + (((uni - 0xd800) << 10) | (uni2 - 0xdc00)) |
|||
next_end += 6 |
|||
char = unichr(uni) |
|||
end = next_end |
|||
# Append the unescaped character |
|||
_append(char) |
|||
return u''.join(chunks), end |
|||
|
|||
|
|||
# Use speedup if available |
|||
scanstring = c_scanstring or py_scanstring |
|||
|
|||
WHITESPACE = re.compile(r'[ \t\n\r]*', FLAGS) |
|||
WHITESPACE_STR = ' \t\n\r' |
|||
|
|||
def JSONObject((s, end), encoding, strict, scan_once, object_hook, |
|||
object_pairs_hook, memo=None, |
|||
_w=WHITESPACE.match, _ws=WHITESPACE_STR): |
|||
# Backwards compatibility |
|||
if memo is None: |
|||
memo = {} |
|||
memo_get = memo.setdefault |
|||
pairs = [] |
|||
# Use a slice to prevent IndexError from being raised, the following |
|||
# check will raise a more specific ValueError if the string is empty |
|||
nextchar = s[end:end + 1] |
|||
# Normally we expect nextchar == '"' |
|||
if nextchar != '"': |
|||
if nextchar in _ws: |
|||
end = _w(s, end).end() |
|||
nextchar = s[end:end + 1] |
|||
# Trivial empty object |
|||
if nextchar == '}': |
|||
if object_pairs_hook is not None: |
|||
result = object_pairs_hook(pairs) |
|||
return result, end + 1 |
|||
pairs = {} |
|||
if object_hook is not None: |
|||
pairs = object_hook(pairs) |
|||
return pairs, end + 1 |
|||
elif nextchar != '"': |
|||
raise JSONDecodeError("Expecting property name", s, end) |
|||
end += 1 |
|||
while True: |
|||
key, end = scanstring(s, end, encoding, strict) |
|||
key = memo_get(key, key) |
|||
|
|||
# To skip some function call overhead we optimize the fast paths where |
|||
# the JSON key separator is ": " or just ":". |
|||
if s[end:end + 1] != ':': |
|||
end = _w(s, end).end() |
|||
if s[end:end + 1] != ':': |
|||
raise JSONDecodeError("Expecting : delimiter", s, end) |
|||
|
|||
end += 1 |
|||
|
|||
try: |
|||
if s[end] in _ws: |
|||
end += 1 |
|||
if s[end] in _ws: |
|||
end = _w(s, end + 1).end() |
|||
except IndexError: |
|||
pass |
|||
|
|||
try: |
|||
value, end = scan_once(s, end) |
|||
except StopIteration: |
|||
raise JSONDecodeError("Expecting object", s, end) |
|||
pairs.append((key, value)) |
|||
|
|||
try: |
|||
nextchar = s[end] |
|||
if nextchar in _ws: |
|||
end = _w(s, end + 1).end() |
|||
nextchar = s[end] |
|||
except IndexError: |
|||
nextchar = '' |
|||
end += 1 |
|||
|
|||
if nextchar == '}': |
|||
break |
|||
elif nextchar != ',': |
|||
raise JSONDecodeError("Expecting , delimiter", s, end - 1) |
|||
|
|||
try: |
|||
nextchar = s[end] |
|||
if nextchar in _ws: |
|||
end += 1 |
|||
nextchar = s[end] |
|||
if nextchar in _ws: |
|||
end = _w(s, end + 1).end() |
|||
nextchar = s[end] |
|||
except IndexError: |
|||
nextchar = '' |
|||
|
|||
end += 1 |
|||
if nextchar != '"': |
|||
raise JSONDecodeError("Expecting property name", s, end - 1) |
|||
|
|||
if object_pairs_hook is not None: |
|||
result = object_pairs_hook(pairs) |
|||
return result, end |
|||
pairs = dict(pairs) |
|||
if object_hook is not None: |
|||
pairs = object_hook(pairs) |
|||
return pairs, end |
|||
|
|||
def JSONArray((s, end), scan_once, _w=WHITESPACE.match, _ws=WHITESPACE_STR): |
|||
values = [] |
|||
nextchar = s[end:end + 1] |
|||
if nextchar in _ws: |
|||
end = _w(s, end + 1).end() |
|||
nextchar = s[end:end + 1] |
|||
# Look-ahead for trivial empty array |
|||
if nextchar == ']': |
|||
return values, end + 1 |
|||
_append = values.append |
|||
while True: |
|||
try: |
|||
value, end = scan_once(s, end) |
|||
except StopIteration: |
|||
raise JSONDecodeError("Expecting object", s, end) |
|||
_append(value) |
|||
nextchar = s[end:end + 1] |
|||
if nextchar in _ws: |
|||
end = _w(s, end + 1).end() |
|||
nextchar = s[end:end + 1] |
|||
end += 1 |
|||
if nextchar == ']': |
|||
break |
|||
elif nextchar != ',': |
|||
raise JSONDecodeError("Expecting , delimiter", s, end) |
|||
|
|||
try: |
|||
if s[end] in _ws: |
|||
end += 1 |
|||
if s[end] in _ws: |
|||
end = _w(s, end + 1).end() |
|||
except IndexError: |
|||
pass |
|||
|
|||
return values, end |
|||
|
|||
class JSONDecoder(object): |
|||
"""Simple JSON <http://json.org> decoder |
|||
|
|||
Performs the following translations in decoding by default: |
|||
|
|||
+---------------+-------------------+ |
|||
| JSON | Python | |
|||
+===============+===================+ |
|||
| object | dict | |
|||
+---------------+-------------------+ |
|||
| array | list | |
|||
+---------------+-------------------+ |
|||
| string | unicode | |
|||
+---------------+-------------------+ |
|||
| number (int) | int, long | |
|||
+---------------+-------------------+ |
|||
| number (real) | float | |
|||
+---------------+-------------------+ |
|||
| true | True | |
|||
+---------------+-------------------+ |
|||
| false | False | |
|||
+---------------+-------------------+ |
|||
| null | None | |
|||
+---------------+-------------------+ |
|||
|
|||
It also understands ``NaN``, ``Infinity``, and ``-Infinity`` as |
|||
their corresponding ``float`` values, which is outside the JSON spec. |
|||
|
|||
""" |
|||
|
|||
def __init__(self, encoding=None, object_hook=None, parse_float=None, |
|||
parse_int=None, parse_constant=None, strict=True, |
|||
object_pairs_hook=None): |
|||
""" |
|||
*encoding* determines the encoding used to interpret any |
|||
:class:`str` objects decoded by this instance (``'utf-8'`` by |
|||
default). It has no effect when decoding :class:`unicode` objects. |
|||
|
|||
Note that currently only encodings that are a superset of ASCII work, |
|||
strings of other encodings should be passed in as :class:`unicode`. |
|||
|
|||
*object_hook*, if specified, will be called with the result of every |
|||
JSON object decoded and its return value will be used in place of the |
|||
given :class:`dict`. This can be used to provide custom |
|||
deserializations (e.g. to support JSON-RPC class hinting). |
|||
|
|||
*object_pairs_hook* is an optional function that will be called with |
|||
the result of any object literal decode with an ordered list of pairs. |
|||
The return value of *object_pairs_hook* will be used instead of the |
|||
:class:`dict`. This feature can be used to implement custom decoders |
|||
that rely on the order that the key and value pairs are decoded (for |
|||
example, :func:`collections.OrderedDict` will remember the order of |
|||
insertion). If *object_hook* is also defined, the *object_pairs_hook* |
|||
takes priority. |
|||
|
|||
*parse_float*, if specified, will be called with the string of every |
|||
JSON float to be decoded. By default, this is equivalent to |
|||
``float(num_str)``. This can be used to use another datatype or parser |
|||
for JSON floats (e.g. :class:`decimal.Decimal`). |
|||
|
|||
*parse_int*, if specified, will be called with the string of every |
|||
JSON int to be decoded. By default, this is equivalent to |
|||
``int(num_str)``. This can be used to use another datatype or parser |
|||
for JSON integers (e.g. :class:`float`). |
|||
|
|||
*parse_constant*, if specified, will be called with one of the |
|||
following strings: ``'-Infinity'``, ``'Infinity'``, ``'NaN'``. This |
|||
can be used to raise an exception if invalid JSON numbers are |
|||
encountered. |
|||
|
|||
*strict* controls the parser's behavior when it encounters an |
|||
invalid control character in a string. The default setting of |
|||
``True`` means that unescaped control characters are parse errors, if |
|||
``False`` then control characters will be allowed in strings. |
|||
|
|||
""" |
|||
self.encoding = encoding |
|||
self.object_hook = object_hook |
|||
self.object_pairs_hook = object_pairs_hook |
|||
self.parse_float = parse_float or float |
|||
self.parse_int = parse_int or int |
|||
self.parse_constant = parse_constant or _CONSTANTS.__getitem__ |
|||
self.strict = strict |
|||
self.parse_object = JSONObject |
|||
self.parse_array = JSONArray |
|||
self.parse_string = scanstring |
|||
self.memo = {} |
|||
self.scan_once = make_scanner(self) |
|||
|
|||
def decode(self, s, _w=WHITESPACE.match): |
|||
"""Return the Python representation of ``s`` (a ``str`` or ``unicode`` |
|||
instance containing a JSON document) |
|||
|
|||
""" |
|||
obj, end = self.raw_decode(s, idx=_w(s, 0).end()) |
|||
end = _w(s, end).end() |
|||
if end != len(s): |
|||
raise JSONDecodeError("Extra data", s, end, len(s)) |
|||
return obj |
|||
|
|||
def raw_decode(self, s, idx=0): |
|||
"""Decode a JSON document from ``s`` (a ``str`` or ``unicode`` |
|||
beginning with a JSON document) and return a 2-tuple of the Python |
|||
representation and the index in ``s`` where the document ended. |
|||
|
|||
This can be used to decode a JSON document from a string that may |
|||
have extraneous data at the end. |
|||
|
|||
""" |
|||
try: |
|||
obj, end = self.scan_once(s, idx) |
|||
except StopIteration: |
|||
raise JSONDecodeError("No JSON object could be decoded", s, idx) |
|||
return obj, end |
@ -1,501 +0,0 @@ |
|||
"""Implementation of JSONEncoder |
|||
""" |
|||
import re |
|||
from decimal import Decimal |
|||
|
|||
def _import_speedups(): |
|||
try: |
|||
from simplejson import _speedups |
|||
return _speedups.encode_basestring_ascii, _speedups.make_encoder |
|||
except ImportError: |
|||
return None, None |
|||
c_encode_basestring_ascii, c_make_encoder = _import_speedups() |
|||
|
|||
from simplejson.decoder import PosInf |
|||
|
|||
ESCAPE = re.compile(r'[\x00-\x1f\\"\b\f\n\r\t]') |
|||
ESCAPE_ASCII = re.compile(r'([\\"]|[^\ -~])') |
|||
HAS_UTF8 = re.compile(r'[\x80-\xff]') |
|||
ESCAPE_DCT = { |
|||
'\\': '\\\\', |
|||
'"': '\\"', |
|||
'\b': '\\b', |
|||
'\f': '\\f', |
|||
'\n': '\\n', |
|||
'\r': '\\r', |
|||
'\t': '\\t', |
|||
} |
|||
for i in range(0x20): |
|||
#ESCAPE_DCT.setdefault(chr(i), '\\u{0:04x}'.format(i)) |
|||
ESCAPE_DCT.setdefault(chr(i), '\\u%04x' % (i,)) |
|||
|
|||
FLOAT_REPR = repr |
|||
|
|||
def encode_basestring(s): |
|||
"""Return a JSON representation of a Python string |
|||
|
|||
""" |
|||
if isinstance(s, str) and HAS_UTF8.search(s) is not None: |
|||
s = s.decode('utf-8') |
|||
def replace(match): |
|||
return ESCAPE_DCT[match.group(0)] |
|||
return u'"' + ESCAPE.sub(replace, s) + u'"' |
|||
|
|||
|
|||
def py_encode_basestring_ascii(s): |
|||
"""Return an ASCII-only JSON representation of a Python string |
|||
|
|||
""" |
|||
if isinstance(s, str) and HAS_UTF8.search(s) is not None: |
|||
s = s.decode('utf-8') |
|||
def replace(match): |
|||
s = match.group(0) |
|||
try: |
|||
return ESCAPE_DCT[s] |
|||
except KeyError: |
|||
n = ord(s) |
|||
if n < 0x10000: |
|||
#return '\\u{0:04x}'.format(n) |
|||
return '\\u%04x' % (n,) |
|||
else: |
|||
# surrogate pair |
|||
n -= 0x10000 |
|||
s1 = 0xd800 | ((n >> 10) & 0x3ff) |
|||
s2 = 0xdc00 | (n & 0x3ff) |
|||
#return '\\u{0:04x}\\u{1:04x}'.format(s1, s2) |
|||
return '\\u%04x\\u%04x' % (s1, s2) |
|||
return '"' + str(ESCAPE_ASCII.sub(replace, s)) + '"' |
|||
|
|||
|
|||
encode_basestring_ascii = ( |
|||
c_encode_basestring_ascii or py_encode_basestring_ascii) |
|||
|
|||
class JSONEncoder(object): |
|||
"""Extensible JSON <http://json.org> encoder for Python data structures. |
|||
|
|||
Supports the following objects and types by default: |
|||
|
|||
+-------------------+---------------+ |
|||
| Python | JSON | |
|||
+===================+===============+ |
|||
| dict | object | |
|||
+-------------------+---------------+ |
|||
| list, tuple | array | |
|||
+-------------------+---------------+ |
|||
| str, unicode | string | |
|||
+-------------------+---------------+ |
|||
| int, long, float | number | |
|||
+-------------------+---------------+ |
|||
| True | true | |
|||
+-------------------+---------------+ |
|||
| False | false | |
|||
+-------------------+---------------+ |
|||
| None | null | |
|||
+-------------------+---------------+ |
|||
|
|||
To extend this to recognize other objects, subclass and implement a |
|||
``.default()`` method with another method that returns a serializable |
|||
object for ``o`` if possible, otherwise it should call the superclass |
|||
implementation (to raise ``TypeError``). |
|||
|
|||
""" |
|||
item_separator = ', ' |
|||
key_separator = ': ' |
|||
def __init__(self, skipkeys=False, ensure_ascii=True, |
|||
check_circular=True, allow_nan=True, sort_keys=False, |
|||
indent=None, separators=None, encoding='utf-8', default=None, |
|||
use_decimal=False): |
|||
"""Constructor for JSONEncoder, with sensible defaults. |
|||
|
|||
If skipkeys is false, then it is a TypeError to attempt |
|||
encoding of keys that are not str, int, long, float or None. If |
|||
skipkeys is True, such items are simply skipped. |
|||
|
|||
If ensure_ascii is true, the output is guaranteed to be str |
|||
objects with all incoming unicode characters escaped. If |
|||
ensure_ascii is false, the output will be unicode object. |
|||
|
|||
If check_circular is true, then lists, dicts, and custom encoded |
|||
objects will be checked for circular references during encoding to |
|||
prevent an infinite recursion (which would cause an OverflowError). |
|||
Otherwise, no such check takes place. |
|||
|
|||
If allow_nan is true, then NaN, Infinity, and -Infinity will be |
|||
encoded as such. This behavior is not JSON specification compliant, |
|||
but is consistent with most JavaScript based encoders and decoders. |
|||
Otherwise, it will be a ValueError to encode such floats. |
|||
|
|||
If sort_keys is true, then the output of dictionaries will be |
|||
sorted by key; this is useful for regression tests to ensure |
|||
that JSON serializations can be compared on a day-to-day basis. |
|||
|
|||
If indent is a string, then JSON array elements and object members |
|||
will be pretty-printed with a newline followed by that string repeated |
|||
for each level of nesting. ``None`` (the default) selects the most compact |
|||
representation without any newlines. For backwards compatibility with |
|||
versions of simplejson earlier than 2.1.0, an integer is also accepted |
|||
and is converted to a string with that many spaces. |
|||
|
|||
If specified, separators should be a (item_separator, key_separator) |
|||
tuple. The default is (', ', ': '). To get the most compact JSON |
|||
representation you should specify (',', ':') to eliminate whitespace. |
|||
|
|||
If specified, default is a function that gets called for objects |
|||
that can't otherwise be serialized. It should return a JSON encodable |
|||
version of the object or raise a ``TypeError``. |
|||
|
|||
If encoding is not None, then all input strings will be |
|||
transformed into unicode using that encoding prior to JSON-encoding. |
|||
The default is UTF-8. |
|||
|
|||
If use_decimal is true (not the default), ``decimal.Decimal`` will |
|||
be supported directly by the encoder. For the inverse, decode JSON |
|||
with ``parse_float=decimal.Decimal``. |
|||
|
|||
""" |
|||
|
|||
self.skipkeys = skipkeys |
|||
self.ensure_ascii = ensure_ascii |
|||
self.check_circular = check_circular |
|||
self.allow_nan = allow_nan |
|||
self.sort_keys = sort_keys |
|||
self.use_decimal = use_decimal |
|||
if isinstance(indent, (int, long)): |
|||
indent = ' ' * indent |
|||
self.indent = indent |
|||
if separators is not None: |
|||
self.item_separator, self.key_separator = separators |
|||
if default is not None: |
|||
self.default = default |
|||
self.encoding = encoding |
|||
|
|||
def default(self, o): |
|||
"""Implement this method in a subclass such that it returns |
|||
a serializable object for ``o``, or calls the base implementation |
|||
(to raise a ``TypeError``). |
|||
|
|||
For example, to support arbitrary iterators, you could |
|||
implement default like this:: |
|||
|
|||
def default(self, o): |
|||
try: |
|||
iterable = iter(o) |
|||
except TypeError: |
|||
pass |
|||
else: |
|||
return list(iterable) |
|||
return JSONEncoder.default(self, o) |
|||
|
|||
""" |
|||
raise TypeError(repr(o) + " is not JSON serializable") |
|||
|
|||
def encode(self, o): |
|||
"""Return a JSON string representation of a Python data structure. |
|||
|
|||
>>> from simplejson import JSONEncoder |
|||
>>> JSONEncoder().encode({"foo": ["bar", "baz"]}) |
|||
'{"foo": ["bar", "baz"]}' |
|||
|
|||
""" |
|||
# This is for extremely simple cases and benchmarks. |
|||
if isinstance(o, basestring): |
|||
if isinstance(o, str): |
|||
_encoding = self.encoding |
|||
if (_encoding is not None |
|||
and not (_encoding == 'utf-8')): |
|||
o = o.decode(_encoding) |
|||
if self.ensure_ascii: |
|||
return encode_basestring_ascii(o) |
|||
else: |
|||
return encode_basestring(o) |
|||
# This doesn't pass the iterator directly to ''.join() because the |
|||
# exceptions aren't as detailed. The list call should be roughly |
|||
# equivalent to the PySequence_Fast that ''.join() would do. |
|||
chunks = self.iterencode(o, _one_shot=True) |
|||
if not isinstance(chunks, (list, tuple)): |
|||
chunks = list(chunks) |
|||
if self.ensure_ascii: |
|||
return ''.join(chunks) |
|||
else: |
|||
return u''.join(chunks) |
|||
|
|||
def iterencode(self, o, _one_shot=False): |
|||
"""Encode the given object and yield each string |
|||
representation as available. |
|||
|
|||
For example:: |
|||
|
|||
for chunk in JSONEncoder().iterencode(bigobject): |
|||
mysocket.write(chunk) |
|||
|
|||
""" |
|||
if self.check_circular: |
|||
markers = {} |
|||
else: |
|||
markers = None |
|||
if self.ensure_ascii: |
|||
_encoder = encode_basestring_ascii |
|||
else: |
|||
_encoder = encode_basestring |
|||
if self.encoding != 'utf-8': |
|||
def _encoder(o, _orig_encoder=_encoder, _encoding=self.encoding): |
|||
if isinstance(o, str): |
|||
o = o.decode(_encoding) |
|||
return _orig_encoder(o) |
|||
|
|||
def floatstr(o, allow_nan=self.allow_nan, |
|||
_repr=FLOAT_REPR, _inf=PosInf, _neginf=-PosInf): |
|||
# Check for specials. Note that this type of test is processor |
|||
# and/or platform-specific, so do tests which don't depend on |
|||
# the internals. |
|||
|
|||
if o != o: |
|||
text = 'NaN' |
|||
elif o == _inf: |
|||
text = 'Infinity' |
|||
elif o == _neginf: |
|||
text = '-Infinity' |
|||
else: |
|||
return _repr(o) |
|||
|
|||
if not allow_nan: |
|||
raise ValueError( |
|||
"Out of range float values are not JSON compliant: " + |
|||
repr(o)) |
|||
|
|||
return text |
|||
|
|||
|
|||
key_memo = {} |
|||
if (_one_shot and c_make_encoder is not None |
|||
and self.indent is None): |
|||
_iterencode = c_make_encoder( |
|||
markers, self.default, _encoder, self.indent, |
|||
self.key_separator, self.item_separator, self.sort_keys, |
|||
self.skipkeys, self.allow_nan, key_memo, self.use_decimal) |
|||
else: |
|||
_iterencode = _make_iterencode( |
|||
markers, self.default, _encoder, self.indent, floatstr, |
|||
self.key_separator, self.item_separator, self.sort_keys, |
|||
self.skipkeys, _one_shot, self.use_decimal) |
|||
try: |
|||
return _iterencode(o, 0) |
|||
finally: |
|||
key_memo.clear() |
|||
|
|||
|
|||
class JSONEncoderForHTML(JSONEncoder): |
|||
"""An encoder that produces JSON safe to embed in HTML. |
|||
|
|||
To embed JSON content in, say, a script tag on a web page, the |
|||
characters &, < and > should be escaped. They cannot be escaped |
|||
with the usual entities (e.g. &) because they are not expanded |
|||
within <script> tags. |
|||
""" |
|||
|
|||
def encode(self, o): |
|||
# Override JSONEncoder.encode because it has hacks for |
|||
# performance that make things more complicated. |
|||
chunks = self.iterencode(o, True) |
|||
if self.ensure_ascii: |
|||
return ''.join(chunks) |
|||
else: |
|||
return u''.join(chunks) |
|||
|
|||
def iterencode(self, o, _one_shot=False): |
|||
chunks = super(JSONEncoderForHTML, self).iterencode(o, _one_shot) |
|||
for chunk in chunks: |
|||
chunk = chunk.replace('&', '\\u0026') |
|||
chunk = chunk.replace('<', '\\u003c') |
|||
chunk = chunk.replace('>', '\\u003e') |
|||
yield chunk |
|||
|
|||
|
|||
def _make_iterencode(markers, _default, _encoder, _indent, _floatstr, |
|||
_key_separator, _item_separator, _sort_keys, _skipkeys, _one_shot, |
|||
_use_decimal, |
|||
## HACK: hand-optimized bytecode; turn globals into locals |
|||
False=False, |
|||
True=True, |
|||
ValueError=ValueError, |
|||
basestring=basestring, |
|||
Decimal=Decimal, |
|||
dict=dict, |
|||
float=float, |
|||
id=id, |
|||
int=int, |
|||
isinstance=isinstance, |
|||
list=list, |
|||
long=long, |
|||
str=str, |
|||
tuple=tuple, |
|||
): |
|||
|
|||
def _iterencode_list(lst, _current_indent_level): |
|||
if not lst: |
|||
yield '[]' |
|||
return |
|||
if markers is not None: |
|||
markerid = id(lst) |
|||
if markerid in markers: |
|||
raise ValueError("Circular reference detected") |
|||
markers[markerid] = lst |
|||
buf = '[' |
|||
if _indent is not None: |
|||
_current_indent_level += 1 |
|||
newline_indent = '\n' + (_indent * _current_indent_level) |
|||
separator = _item_separator + newline_indent |
|||
buf += newline_indent |
|||
else: |
|||
newline_indent = None |
|||
separator = _item_separator |
|||
first = True |
|||
for value in lst: |
|||
if first: |
|||
first = False |
|||
else: |
|||
buf = separator |
|||
if isinstance(value, basestring): |
|||
yield buf + _encoder(value) |
|||
elif value is None: |
|||
yield buf + 'null' |
|||
elif value is True: |
|||
yield buf + 'true' |
|||
elif value is False: |
|||
yield buf + 'false' |
|||
elif isinstance(value, (int, long)): |
|||
yield buf + str(value) |
|||
elif isinstance(value, float): |
|||
yield buf + _floatstr(value) |
|||
elif _use_decimal and isinstance(value, Decimal): |
|||
yield buf + str(value) |
|||
else: |
|||
yield buf |
|||
if isinstance(value, (list, tuple)): |
|||
chunks = _iterencode_list(value, _current_indent_level) |
|||
elif isinstance(value, dict): |
|||
chunks = _iterencode_dict(value, _current_indent_level) |
|||
else: |
|||
chunks = _iterencode(value, _current_indent_level) |
|||
for chunk in chunks: |
|||
yield chunk |
|||
if newline_indent is not None: |
|||
_current_indent_level -= 1 |
|||
yield '\n' + (_indent * _current_indent_level) |
|||
yield ']' |
|||
if markers is not None: |
|||
del markers[markerid] |
|||
|
|||
def _iterencode_dict(dct, _current_indent_level): |
|||
if not dct: |
|||
yield '{}' |
|||
return |
|||
if markers is not None: |
|||
markerid = id(dct) |
|||
if markerid in markers: |
|||
raise ValueError("Circular reference detected") |
|||
markers[markerid] = dct |
|||
yield '{' |
|||
if _indent is not None: |
|||
_current_indent_level += 1 |
|||
newline_indent = '\n' + (_indent * _current_indent_level) |
|||
item_separator = _item_separator + newline_indent |
|||
yield newline_indent |
|||
else: |
|||
newline_indent = None |
|||
item_separator = _item_separator |
|||
first = True |
|||
if _sort_keys: |
|||
items = dct.items() |
|||
items.sort(key=lambda kv: kv[0]) |
|||
else: |
|||
items = dct.iteritems() |
|||
for key, value in items: |
|||
if isinstance(key, basestring): |
|||
pass |
|||
# JavaScript is weakly typed for these, so it makes sense to |
|||
# also allow them. Many encoders seem to do something like this. |
|||
elif isinstance(key, float): |
|||
key = _floatstr(key) |
|||
elif key is True: |
|||
key = 'true' |
|||
elif key is False: |
|||
key = 'false' |
|||
elif key is None: |
|||
key = 'null' |
|||
elif isinstance(key, (int, long)): |
|||
key = str(key) |
|||
elif _skipkeys: |
|||
continue |
|||
else: |
|||
raise TypeError("key " + repr(key) + " is not a string") |
|||
if first: |
|||
first = False |
|||
else: |
|||
yield item_separator |
|||
yield _encoder(key) |
|||
yield _key_separator |
|||
if isinstance(value, basestring): |
|||
yield _encoder(value) |
|||
elif value is None: |
|||
yield 'null' |
|||
elif value is True: |
|||
yield 'true' |
|||
elif value is False: |
|||
yield 'false' |
|||
elif isinstance(value, (int, long)): |
|||
yield str(value) |
|||
elif isinstance(value, float): |
|||
yield _floatstr(value) |
|||
elif _use_decimal and isinstance(value, Decimal): |
|||
yield str(value) |
|||
else: |
|||
if isinstance(value, (list, tuple)): |
|||
chunks = _iterencode_list(value, _current_indent_level) |
|||
elif isinstance(value, dict): |
|||
chunks = _iterencode_dict(value, _current_indent_level) |
|||
else: |
|||
chunks = _iterencode(value, _current_indent_level) |
|||
for chunk in chunks: |
|||
yield chunk |
|||
if newline_indent is not None: |
|||
_current_indent_level -= 1 |
|||
yield '\n' + (_indent * _current_indent_level) |
|||
yield '}' |
|||
if markers is not None: |
|||
del markers[markerid] |
|||
|
|||
def _iterencode(o, _current_indent_level): |
|||
if isinstance(o, basestring): |
|||
yield _encoder(o) |
|||
elif o is None: |
|||
yield 'null' |
|||
elif o is True: |
|||
yield 'true' |
|||
elif o is False: |
|||
yield 'false' |
|||
elif isinstance(o, (int, long)): |
|||
yield str(o) |
|||
elif isinstance(o, float): |
|||
yield _floatstr(o) |
|||
elif isinstance(o, (list, tuple)): |
|||
for chunk in _iterencode_list(o, _current_indent_level): |
|||
yield chunk |
|||
elif isinstance(o, dict): |
|||
for chunk in _iterencode_dict(o, _current_indent_level): |
|||
yield chunk |
|||
elif _use_decimal and isinstance(o, Decimal): |
|||
yield str(o) |
|||
else: |
|||
if markers is not None: |
|||
markerid = id(o) |
|||
if markerid in markers: |
|||
raise ValueError("Circular reference detected") |
|||
markers[markerid] = o |
|||
o = _default(o) |
|||
for chunk in _iterencode(o, _current_indent_level): |
|||
yield chunk |
|||
if markers is not None: |
|||
del markers[markerid] |
|||
|
|||
return _iterencode |
@ -1,119 +0,0 @@ |
|||
"""Drop-in replacement for collections.OrderedDict by Raymond Hettinger |
|||
|
|||
http://code.activestate.com/recipes/576693/ |
|||
|
|||
""" |
|||
from UserDict import DictMixin |
|||
|
|||
# Modified from original to support Python 2.4, see |
|||
# http://code.google.com/p/simplejson/issues/detail?id=53 |
|||
try: |
|||
all |
|||
except NameError: |
|||
def all(seq): |
|||
for elem in seq: |
|||
if not elem: |
|||
return False |
|||
return True |
|||
|
|||
class OrderedDict(dict, DictMixin): |
|||
|
|||
def __init__(self, *args, **kwds): |
|||
if len(args) > 1: |
|||
raise TypeError('expected at most 1 arguments, got %d' % len(args)) |
|||
try: |
|||
self.__end |
|||
except AttributeError: |
|||
self.clear() |
|||
self.update(*args, **kwds) |
|||
|
|||
def clear(self): |
|||
self.__end = end = [] |
|||
end += [None, end, end] # sentinel node for doubly linked list |
|||
self.__map = {} # key --> [key, prev, next] |
|||
dict.clear(self) |
|||
|
|||
def __setitem__(self, key, value): |
|||
if key not in self: |
|||
end = self.__end |
|||
curr = end[1] |
|||
curr[2] = end[1] = self.__map[key] = [key, curr, end] |
|||
dict.__setitem__(self, key, value) |
|||
|
|||
def __delitem__(self, key): |
|||
dict.__delitem__(self, key) |
|||
key, prev, next = self.__map.pop(key) |
|||
prev[2] = next |
|||
next[1] = prev |
|||
|
|||
def __iter__(self): |
|||
end = self.__end |
|||
curr = end[2] |
|||
while curr is not end: |
|||
yield curr[0] |
|||
curr = curr[2] |
|||
|
|||
def __reversed__(self): |
|||
end = self.__end |
|||
curr = end[1] |
|||
while curr is not end: |
|||
yield curr[0] |
|||
curr = curr[1] |
|||
|
|||
def popitem(self, last=True): |
|||
if not self: |
|||
raise KeyError('dictionary is empty') |
|||
# Modified from original to support Python 2.4, see |
|||
# http://code.google.com/p/simplejson/issues/detail?id=53 |
|||
if last: |
|||
key = reversed(self).next() |
|||
else: |
|||
key = iter(self).next() |
|||
value = self.pop(key) |
|||
return key, value |
|||
|
|||
def __reduce__(self): |
|||
items = [[k, self[k]] for k in self] |
|||
tmp = self.__map, self.__end |
|||
del self.__map, self.__end |
|||
inst_dict = vars(self).copy() |
|||
self.__map, self.__end = tmp |
|||
if inst_dict: |
|||
return (self.__class__, (items,), inst_dict) |
|||
return self.__class__, (items,) |
|||
|
|||
def keys(self): |
|||
return list(self) |
|||
|
|||
setdefault = DictMixin.setdefault |
|||
update = DictMixin.update |
|||
pop = DictMixin.pop |
|||
values = DictMixin.values |
|||
items = DictMixin.items |
|||
iterkeys = DictMixin.iterkeys |
|||
itervalues = DictMixin.itervalues |
|||
iteritems = DictMixin.iteritems |
|||
|
|||
def __repr__(self): |
|||
if not self: |
|||
return '%s()' % (self.__class__.__name__,) |
|||
return '%s(%r)' % (self.__class__.__name__, self.items()) |
|||
|
|||
def copy(self): |
|||
return self.__class__(self) |
|||
|
|||
@classmethod |
|||
def fromkeys(cls, iterable, value=None): |
|||
d = cls() |
|||
for key in iterable: |
|||
d[key] = value |
|||
return d |
|||
|
|||
def __eq__(self, other): |
|||
if isinstance(other, OrderedDict): |
|||
return len(self)==len(other) and \ |
|||
all(p==q for p, q in zip(self.items(), other.items())) |
|||
return dict.__eq__(self, other) |
|||
|
|||
def __ne__(self, other): |
|||
return not self == other |
@ -1,77 +0,0 @@ |
|||
"""JSON token scanner |
|||
""" |
|||
import re |
|||
def _import_c_make_scanner(): |
|||
try: |
|||
from simplejson._speedups import make_scanner |
|||
return make_scanner |
|||
except ImportError: |
|||
return None |
|||
c_make_scanner = _import_c_make_scanner() |
|||
|
|||
__all__ = ['make_scanner'] |
|||
|
|||
NUMBER_RE = re.compile( |
|||
r'(-?(?:0|[1-9]\d*))(\.\d+)?([eE][-+]?\d+)?', |
|||
(re.VERBOSE | re.MULTILINE | re.DOTALL)) |
|||
|
|||
def py_make_scanner(context): |
|||
parse_object = context.parse_object |
|||
parse_array = context.parse_array |
|||
parse_string = context.parse_string |
|||
match_number = NUMBER_RE.match |
|||
encoding = context.encoding |
|||
strict = context.strict |
|||
parse_float = context.parse_float |
|||
parse_int = context.parse_int |
|||
parse_constant = context.parse_constant |
|||
object_hook = context.object_hook |
|||
object_pairs_hook = context.object_pairs_hook |
|||
memo = context.memo |
|||
|
|||
def _scan_once(string, idx): |
|||
try: |
|||
nextchar = string[idx] |
|||
except IndexError: |
|||
raise StopIteration |
|||
|
|||
if nextchar == '"': |
|||
return parse_string(string, idx + 1, encoding, strict) |
|||
elif nextchar == '{': |
|||
return parse_object((string, idx + 1), encoding, strict, |
|||
_scan_once, object_hook, object_pairs_hook, memo) |
|||
elif nextchar == '[': |
|||
return parse_array((string, idx + 1), _scan_once) |
|||
elif nextchar == 'n' and string[idx:idx + 4] == 'null': |
|||
return None, idx + 4 |
|||
elif nextchar == 't' and string[idx:idx + 4] == 'true': |
|||
return True, idx + 4 |
|||
elif nextchar == 'f' and string[idx:idx + 5] == 'false': |
|||
return False, idx + 5 |
|||
|
|||
m = match_number(string, idx) |
|||
if m is not None: |
|||
integer, frac, exp = m.groups() |
|||
if frac or exp: |
|||
res = parse_float(integer + (frac or '') + (exp or '')) |
|||
else: |
|||
res = parse_int(integer) |
|||
return res, m.end() |
|||
elif nextchar == 'N' and string[idx:idx + 3] == 'NaN': |
|||
return parse_constant('NaN'), idx + 3 |
|||
elif nextchar == 'I' and string[idx:idx + 8] == 'Infinity': |
|||
return parse_constant('Infinity'), idx + 8 |
|||
elif nextchar == '-' and string[idx:idx + 9] == '-Infinity': |
|||
return parse_constant('-Infinity'), idx + 9 |
|||
else: |
|||
raise StopIteration |
|||
|
|||
def scan_once(string, idx): |
|||
try: |
|||
return _scan_once(string, idx) |
|||
finally: |
|||
memo.clear() |
|||
|
|||
return scan_once |
|||
|
|||
make_scanner = c_make_scanner or py_make_scanner |
@ -1,39 +0,0 @@ |
|||
r"""Command-line tool to validate and pretty-print JSON |
|||
|
|||
Usage:: |
|||
|
|||
$ echo '{"json":"obj"}' | python -m simplejson.tool |
|||
{ |
|||
"json": "obj" |
|||
} |
|||
$ echo '{ 1.2:3.4}' | python -m simplejson.tool |
|||
Expecting property name: line 1 column 2 (char 2) |
|||
|
|||
""" |
|||
import sys |
|||
import simplejson as json |
|||
|
|||
def main(): |
|||
if len(sys.argv) == 1: |
|||
infile = sys.stdin |
|||
outfile = sys.stdout |
|||
elif len(sys.argv) == 2: |
|||
infile = open(sys.argv[1], 'rb') |
|||
outfile = sys.stdout |
|||
elif len(sys.argv) == 3: |
|||
infile = open(sys.argv[1], 'rb') |
|||
outfile = open(sys.argv[2], 'wb') |
|||
else: |
|||
raise SystemExit(sys.argv[0] + " [infile [outfile]]") |
|||
try: |
|||
obj = json.load(infile, |
|||
object_pairs_hook=json.OrderedDict, |
|||
use_decimal=True) |
|||
except ValueError, e: |
|||
raise SystemExit(e) |
|||
json.dump(obj, outfile, sort_keys=True, indent=' ', use_decimal=True) |
|||
outfile.write('\n') |
|||
|
|||
|
|||
if __name__ == '__main__': |
|||
main() |
@ -0,0 +1,150 @@ |
|||
"""Define behaviors common to MySQLdb dialects. |
|||
|
|||
Currently includes MySQL and Drizzle. |
|||
|
|||
""" |
|||
|
|||
from sqlalchemy.connectors import Connector |
|||
from sqlalchemy.engine import base as engine_base, default |
|||
from sqlalchemy.sql import operators as sql_operators |
|||
from sqlalchemy import exc, log, schema, sql, types as sqltypes, util |
|||
from sqlalchemy import processors |
|||
import re |
|||
|
|||
# the subclassing of Connector by all classes |
|||
# here is not strictly necessary |
|||
|
|||
class MySQLDBExecutionContext(Connector): |
|||
|
|||
@property |
|||
def rowcount(self): |
|||
if hasattr(self, '_rowcount'): |
|||
return self._rowcount |
|||
else: |
|||
return self.cursor.rowcount |
|||
|
|||
class MySQLDBCompiler(Connector): |
|||
def visit_mod(self, binary, **kw): |
|||
return self.process(binary.left) + " %% " + self.process(binary.right) |
|||
|
|||
def post_process_text(self, text): |
|||
return text.replace('%', '%%') |
|||
|
|||
class MySQLDBIdentifierPreparer(Connector): |
|||
|
|||
def _escape_identifier(self, value): |
|||
value = value.replace(self.escape_quote, self.escape_to_quote) |
|||
return value.replace("%", "%%") |
|||
|
|||
class MySQLDBConnector(Connector): |
|||
driver = 'mysqldb' |
|||
supports_unicode_statements = False |
|||
supports_sane_rowcount = True |
|||
supports_sane_multi_rowcount = True |
|||
|
|||
supports_native_decimal = True |
|||
|
|||
default_paramstyle = 'format' |
|||
|
|||
@classmethod |
|||
def dbapi(cls): |
|||
# is overridden when pymysql is used |
|||
return __import__('MySQLdb') |
|||
|
|||
def do_executemany(self, cursor, statement, parameters, context=None): |
|||
rowcount = cursor.executemany(statement, parameters) |
|||
if context is not None: |
|||
context._rowcount = rowcount |
|||
|
|||
def create_connect_args(self, url): |
|||
opts = url.translate_connect_args(database='db', username='user', |
|||
password='passwd') |
|||
opts.update(url.query) |
|||
|
|||
util.coerce_kw_type(opts, 'compress', bool) |
|||
util.coerce_kw_type(opts, 'connect_timeout', int) |
|||
util.coerce_kw_type(opts, 'client_flag', int) |
|||
util.coerce_kw_type(opts, 'local_infile', int) |
|||
# Note: using either of the below will cause all strings to be returned |
|||
# as Unicode, both in raw SQL operations and with column types like |
|||
# String and MSString. |
|||
util.coerce_kw_type(opts, 'use_unicode', bool) |
|||
util.coerce_kw_type(opts, 'charset', str) |
|||
|
|||
# Rich values 'cursorclass' and 'conv' are not supported via |
|||
# query string. |
|||
|
|||
ssl = {} |
|||
for key in ['ssl_ca', 'ssl_key', 'ssl_cert', 'ssl_capath', 'ssl_cipher']: |
|||
if key in opts: |
|||
ssl[key[4:]] = opts[key] |
|||
util.coerce_kw_type(ssl, key[4:], str) |
|||
del opts[key] |
|||
if ssl: |
|||
opts['ssl'] = ssl |
|||
|
|||
# FOUND_ROWS must be set in CLIENT_FLAGS to enable |
|||
# supports_sane_rowcount. |
|||
client_flag = opts.get('client_flag', 0) |
|||
if self.dbapi is not None: |
|||
try: |
|||
CLIENT_FLAGS = __import__( |
|||
self.dbapi.__name__ + '.constants.CLIENT' |
|||
).constants.CLIENT |
|||
client_flag |= CLIENT_FLAGS.FOUND_ROWS |
|||
except (AttributeError, ImportError): |
|||
self.supports_sane_rowcount = False |
|||
opts['client_flag'] = client_flag |
|||
return [[], opts] |
|||
|
|||
def _get_server_version_info(self, connection): |
|||
dbapi_con = connection.connection |
|||
version = [] |
|||
r = re.compile('[.\-]') |
|||
for n in r.split(dbapi_con.get_server_info()): |
|||
try: |
|||
version.append(int(n)) |
|||
except ValueError: |
|||
version.append(n) |
|||
return tuple(version) |
|||
|
|||
def _extract_error_code(self, exception): |
|||
return exception.args[0] |
|||
|
|||
def _detect_charset(self, connection): |
|||
"""Sniff out the character set in use for connection results.""" |
|||
|
|||
# Note: MySQL-python 1.2.1c7 seems to ignore changes made |
|||
# on a connection via set_character_set() |
|||
if self.server_version_info < (4, 1, 0): |
|||
try: |
|||
return connection.connection.character_set_name() |
|||
except AttributeError: |
|||
# < 1.2.1 final MySQL-python drivers have no charset support. |
|||
# a query is needed. |
|||
pass |
|||
|
|||
# Prefer 'character_set_results' for the current connection over the |
|||
# value in the driver. SET NAMES or individual variable SETs will |
|||
# change the charset without updating the driver's view of the world. |
|||
# |
|||
# If it's decided that issuing that sort of SQL leaves you SOL, then |
|||
# this can prefer the driver value. |
|||
rs = connection.execute("SHOW VARIABLES LIKE 'character_set%%'") |
|||
opts = dict([(row[0], row[1]) for row in self._compat_fetchall(rs)]) |
|||
|
|||
if 'character_set_results' in opts: |
|||
return opts['character_set_results'] |
|||
try: |
|||
return connection.connection.character_set_name() |
|||
except AttributeError: |
|||
# Still no charset on < 1.2.1 final... |
|||
if 'character_set' in opts: |
|||
return opts['character_set'] |
|||
else: |
|||
util.warn( |
|||
"Could not detect the connection character set with this " |
|||
"combination of MySQL server and MySQL-python. " |
|||
"MySQL-python >= 1.2.2 is recommended. Assuming latin1.") |
|||
return 'latin1' |
|||
|
@ -0,0 +1,18 @@ |
|||
from sqlalchemy.dialects.drizzle import base, mysqldb |
|||
|
|||
# default dialect |
|||
base.dialect = mysqldb.dialect |
|||
|
|||
from sqlalchemy.dialects.drizzle.base import \ |
|||
BIGINT, BINARY, BLOB, BOOLEAN, CHAR, DATE, DATETIME, \ |
|||
DECIMAL, DOUBLE, ENUM, \ |
|||
FLOAT, INTEGER, \ |
|||
NUMERIC, REAL, TEXT, TIME, TIMESTAMP, \ |
|||
VARBINARY, VARCHAR, dialect |
|||
|
|||
__all__ = ( |
|||
'BIGINT', 'BINARY', 'BLOB', 'BOOLEAN', 'CHAR', 'DATE', 'DATETIME', 'DECIMAL', 'DOUBLE', |
|||
'ENUM', 'FLOAT', 'INTEGER', |
|||
'NUMERIC', 'SET', 'REAL', 'TEXT', 'TIME', 'TIMESTAMP', |
|||
'VARBINARY', 'VARCHAR', 'dialect' |
|||
) |
@ -0,0 +1,582 @@ |
|||
# drizzle/base.py |
|||
# Copyright (C) 2005-2012 the SQLAlchemy authors and contributors <see AUTHORS file> |
|||
# Copyright (C) 2010-2011 Monty Taylor <mordred@inaugust.com> |
|||
# |
|||
# This module is part of SQLAlchemy and is released under |
|||
# the MIT License: http://www.opensource.org/licenses/mit-license.php |
|||
|
|||
"""Support for the Drizzle database. |
|||
|
|||
Supported Versions and Features |
|||
------------------------------- |
|||
|
|||
SQLAlchemy supports the Drizzle database starting with 2010.08. |
|||
with capabilities increasing with more modern servers. |
|||
|
|||
Most available DBAPI drivers are supported; see below. |
|||
|
|||
===================================== =============== |
|||
Feature Minimum Version |
|||
===================================== =============== |
|||
sqlalchemy.orm 2010.08 |
|||
Table Reflection 2010.08 |
|||
DDL Generation 2010.08 |
|||
utf8/Full Unicode Connections 2010.08 |
|||
Transactions 2010.08 |
|||
Two-Phase Transactions 2010.08 |
|||
Nested Transactions 2010.08 |
|||
===================================== =============== |
|||
|
|||
See the official Drizzle documentation for detailed information about features |
|||
supported in any given server release. |
|||
|
|||
Connecting |
|||
---------- |
|||
|
|||
See the API documentation on individual drivers for details on connecting. |
|||
|
|||
Connection Timeouts |
|||
------------------- |
|||
|
|||
Drizzle features an automatic connection close behavior, for connections that |
|||
have been idle for eight hours or more. To circumvent having this issue, use |
|||
the ``pool_recycle`` option which controls the maximum age of any connection:: |
|||
|
|||
engine = create_engine('drizzle+mysqldb://...', pool_recycle=3600) |
|||
|
|||
Storage Engines |
|||
--------------- |
|||
|
|||
Drizzle defaults to the ``InnoDB`` storage engine, which is transactional. |
|||
|
|||
Storage engines can be elected when creating tables in SQLAlchemy by supplying |
|||
a ``drizzle_engine='whatever'`` to the ``Table`` constructor. Any Drizzle table |
|||
creation option can be specified in this syntax:: |
|||
|
|||
Table('mytable', metadata, |
|||
Column('data', String(32)), |
|||
drizzle_engine='InnoDB', |
|||
) |
|||
|
|||
Keys |
|||
---- |
|||
|
|||
Not all Drizzle storage engines support foreign keys. For ``BlitzDB`` and |
|||
similar engines, the information loaded by table reflection will not include |
|||
foreign keys. For these tables, you may supply a |
|||
:class:`~sqlalchemy.ForeignKeyConstraint` at reflection time:: |
|||
|
|||
Table('mytable', metadata, |
|||
ForeignKeyConstraint(['other_id'], ['othertable.other_id']), |
|||
autoload=True |
|||
) |
|||
|
|||
When creating tables, SQLAlchemy will automatically set ``AUTO_INCREMENT`` on |
|||
an integer primary key column:: |
|||
|
|||
>>> t = Table('mytable', metadata, |
|||
... Column('mytable_id', Integer, primary_key=True) |
|||
... ) |
|||
>>> t.create() |
|||
CREATE TABLE mytable ( |
|||
id INTEGER NOT NULL AUTO_INCREMENT, |
|||
PRIMARY KEY (id) |
|||
) |
|||
|
|||
You can disable this behavior by supplying ``autoincrement=False`` to the |
|||
:class:`~sqlalchemy.Column`. This flag can also be used to enable |
|||
auto-increment on a secondary column in a multi-column key for some storage |
|||
engines:: |
|||
|
|||
Table('mytable', metadata, |
|||
Column('gid', Integer, primary_key=True, autoincrement=False), |
|||
Column('id', Integer, primary_key=True) |
|||
) |
|||
|
|||
Drizzle SQL Extensions |
|||
---------------------- |
|||
|
|||
Many of the Drizzle SQL extensions are handled through SQLAlchemy's generic |
|||
function and operator support:: |
|||
|
|||
table.select(table.c.password==func.md5('plaintext')) |
|||
table.select(table.c.username.op('regexp')('^[a-d]')) |
|||
|
|||
And of course any valid Drizzle statement can be executed as a string as well. |
|||
|
|||
Some limited direct support for Drizzle extensions to SQL is currently |
|||
available. |
|||
|
|||
* SELECT pragma:: |
|||
|
|||
select(..., prefixes=['HIGH_PRIORITY', 'SQL_SMALL_RESULT']) |
|||
|
|||
* UPDATE with LIMIT:: |
|||
|
|||
update(..., drizzle_limit=10) |
|||
|
|||
""" |
|||
|
|||
import datetime, inspect, re, sys |
|||
|
|||
from sqlalchemy import schema as sa_schema |
|||
from sqlalchemy import exc, log, sql, util |
|||
from sqlalchemy.sql import operators as sql_operators |
|||
from sqlalchemy.sql import functions as sql_functions |
|||
from sqlalchemy.sql import compiler |
|||
from array import array as _array |
|||
|
|||
from sqlalchemy.engine import reflection |
|||
from sqlalchemy.engine import base as engine_base, default |
|||
from sqlalchemy import types as sqltypes |
|||
from sqlalchemy.dialects.mysql import base as mysql_dialect |
|||
|
|||
from sqlalchemy.types import DATE, DATETIME, BOOLEAN, TIME, \ |
|||
BLOB, BINARY, VARBINARY |
|||
|
|||
class _NumericType(object): |
|||
"""Base for Drizzle numeric types.""" |
|||
|
|||
def __init__(self, **kw): |
|||
super(_NumericType, self).__init__(**kw) |
|||
|
|||
class _FloatType(_NumericType, sqltypes.Float): |
|||
def __init__(self, precision=None, scale=None, asdecimal=True, **kw): |
|||
if isinstance(self, (REAL, DOUBLE)) and \ |
|||
( |
|||
(precision is None and scale is not None) or |
|||
(precision is not None and scale is None) |
|||
): |
|||
raise exc.ArgumentError( |
|||
"You must specify both precision and scale or omit " |
|||
"both altogether.") |
|||
|
|||
super(_FloatType, self).__init__(precision=precision, asdecimal=asdecimal, **kw) |
|||
self.scale = scale |
|||
|
|||
class _StringType(mysql_dialect._StringType): |
|||
"""Base for Drizzle string types.""" |
|||
|
|||
def __init__(self, collation=None, |
|||
binary=False, |
|||
**kw): |
|||
kw['national'] = False |
|||
super(_StringType, self).__init__(collation=collation, |
|||
binary=binary, |
|||
**kw) |
|||
|
|||
|
|||
class NUMERIC(_NumericType, sqltypes.NUMERIC): |
|||
"""Drizzle NUMERIC type.""" |
|||
|
|||
__visit_name__ = 'NUMERIC' |
|||
|
|||
def __init__(self, precision=None, scale=None, asdecimal=True, **kw): |
|||
"""Construct a NUMERIC. |
|||
|
|||
:param precision: Total digits in this number. If scale and precision |
|||
are both None, values are stored to limits allowed by the server. |
|||
|
|||
:param scale: The number of digits after the decimal point. |
|||
|
|||
""" |
|||
super(NUMERIC, self).__init__(precision=precision, scale=scale, asdecimal=asdecimal, **kw) |
|||
|
|||
|
|||
class DECIMAL(_NumericType, sqltypes.DECIMAL): |
|||
"""Drizzle DECIMAL type.""" |
|||
|
|||
__visit_name__ = 'DECIMAL' |
|||
|
|||
def __init__(self, precision=None, scale=None, asdecimal=True, **kw): |
|||
"""Construct a DECIMAL. |
|||
|
|||
:param precision: Total digits in this number. If scale and precision |
|||
are both None, values are stored to limits allowed by the server. |
|||
|
|||
:param scale: The number of digits after the decimal point. |
|||
|
|||
""" |
|||
super(DECIMAL, self).__init__(precision=precision, scale=scale, |
|||
asdecimal=asdecimal, **kw) |
|||
|
|||
|
|||
class DOUBLE(_FloatType): |
|||
"""Drizzle DOUBLE type.""" |
|||
|
|||
__visit_name__ = 'DOUBLE' |
|||
|
|||
def __init__(self, precision=None, scale=None, asdecimal=True, **kw): |
|||
"""Construct a DOUBLE. |
|||
|
|||
:param precision: Total digits in this number. If scale and precision |
|||
are both None, values are stored to limits allowed by the server. |
|||
|
|||
:param scale: The number of digits after the decimal point. |
|||
|
|||
""" |
|||
super(DOUBLE, self).__init__(precision=precision, scale=scale, |
|||
asdecimal=asdecimal, **kw) |
|||
|
|||
class REAL(_FloatType, sqltypes.REAL): |
|||
"""Drizzle REAL type.""" |
|||
|
|||
__visit_name__ = 'REAL' |
|||
|
|||
def __init__(self, precision=None, scale=None, asdecimal=True, **kw): |
|||
"""Construct a REAL. |
|||
|
|||
:param precision: Total digits in this number. If scale and precision |
|||
are both None, values are stored to limits allowed by the server. |
|||
|
|||
:param scale: The number of digits after the decimal point. |
|||
|
|||
""" |
|||
super(REAL, self).__init__(precision=precision, scale=scale, |
|||
asdecimal=asdecimal, **kw) |
|||
|
|||
class FLOAT(_FloatType, sqltypes.FLOAT): |
|||
"""Drizzle FLOAT type.""" |
|||
|
|||
__visit_name__ = 'FLOAT' |
|||
|
|||
def __init__(self, precision=None, scale=None, asdecimal=False, **kw): |
|||
"""Construct a FLOAT. |
|||
|
|||
:param precision: Total digits in this number. If scale and precision |
|||
are both None, values are stored to limits allowed by the server. |
|||
|
|||
:param scale: The number of digits after the decimal point. |
|||
|
|||
""" |
|||
super(FLOAT, self).__init__(precision=precision, scale=scale, |
|||
asdecimal=asdecimal, **kw) |
|||
|
|||
def bind_processor(self, dialect): |
|||
return None |
|||
|
|||
class INTEGER(sqltypes.INTEGER): |
|||
"""Drizzle INTEGER type.""" |
|||
|
|||
__visit_name__ = 'INTEGER' |
|||
|
|||
def __init__(self, **kw): |
|||
"""Construct an INTEGER. |
|||
|
|||
""" |
|||
super(INTEGER, self).__init__(**kw) |
|||
|
|||
class BIGINT(sqltypes.BIGINT): |
|||
"""Drizzle BIGINTEGER type.""" |
|||
|
|||
__visit_name__ = 'BIGINT' |
|||
|
|||
def __init__(self, **kw): |
|||
"""Construct a BIGINTEGER. |
|||
|
|||
""" |
|||
super(BIGINT, self).__init__(**kw) |
|||
|
|||
|
|||
class _DrizzleTime(mysql_dialect._MSTime): |
|||
"""Drizzle TIME type.""" |
|||
|
|||
class TIMESTAMP(sqltypes.TIMESTAMP): |
|||
"""Drizzle TIMESTAMP type.""" |
|||
__visit_name__ = 'TIMESTAMP' |
|||
|
|||
class TEXT(_StringType, sqltypes.TEXT): |
|||
"""Drizzle TEXT type, for text up to 2^16 characters.""" |
|||
|
|||
__visit_name__ = 'TEXT' |
|||
|
|||
def __init__(self, length=None, **kw): |
|||
"""Construct a TEXT. |
|||
|
|||
:param length: Optional, if provided the server may optimize storage |
|||
by substituting the smallest TEXT type sufficient to store |
|||
``length`` characters. |
|||
|
|||
:param collation: Optional, a column-level collation for this string |
|||
value. Takes precedence to 'binary' short-hand. |
|||
|
|||
:param binary: Defaults to False: short-hand, pick the binary |
|||
collation type that matches the column's character set. Generates |
|||
BINARY in schema. This does not affect the type of data stored, |
|||
only the collation of character data. |
|||
|
|||
""" |
|||
super(TEXT, self).__init__(length=length, **kw) |
|||
|
|||
class VARCHAR(_StringType, sqltypes.VARCHAR): |
|||
"""Drizzle VARCHAR type, for variable-length character data.""" |
|||
|
|||
__visit_name__ = 'VARCHAR' |
|||
|
|||
def __init__(self, length=None, **kwargs): |
|||
"""Construct a VARCHAR. |
|||
|
|||
:param collation: Optional, a column-level collation for this string |
|||
value. Takes precedence to 'binary' short-hand. |
|||
|
|||
:param binary: Defaults to False: short-hand, pick the binary |
|||
collation type that matches the column's character set. Generates |
|||
BINARY in schema. This does not affect the type of data stored, |
|||
only the collation of character data. |
|||
|
|||
""" |
|||
super(VARCHAR, self).__init__(length=length, **kwargs) |
|||
|
|||
class CHAR(_StringType, sqltypes.CHAR): |
|||
"""Drizzle CHAR type, for fixed-length character data.""" |
|||
|
|||
__visit_name__ = 'CHAR' |
|||
|
|||
def __init__(self, length=None, **kwargs): |
|||
"""Construct a CHAR. |
|||
|
|||
:param length: Maximum data length, in characters. |
|||
|
|||
:param binary: Optional, use the default binary collation for the |
|||
national character set. This does not affect the type of data |
|||
stored, use a BINARY type for binary data. |
|||
|
|||
:param collation: Optional, request a particular collation. Must be |
|||
compatible with the national character set. |
|||
|
|||
""" |
|||
super(CHAR, self).__init__(length=length, **kwargs) |
|||
|
|||
class ENUM(mysql_dialect.ENUM): |
|||
"""Drizzle ENUM type.""" |
|||
|
|||
def __init__(self, *enums, **kw): |
|||
"""Construct an ENUM. |
|||
|
|||
Example: |
|||
|
|||
Column('myenum', ENUM("foo", "bar", "baz")) |
|||
|
|||
:param enums: The range of valid values for this ENUM. Values will be |
|||
quoted when generating the schema according to the quoting flag (see |
|||
below). |
|||
|
|||
:param strict: Defaults to False: ensure that a given value is in this |
|||
ENUM's range of permissible values when inserting or updating rows. |
|||
Note that Drizzle will not raise a fatal error if you attempt to store |
|||
an out of range value- an alternate value will be stored instead. |
|||
(See Drizzle ENUM documentation.) |
|||
|
|||
:param collation: Optional, a column-level collation for this string |
|||
value. Takes precedence to 'binary' short-hand. |
|||
|
|||
:param binary: Defaults to False: short-hand, pick the binary |
|||
collation type that matches the column's character set. Generates |
|||
BINARY in schema. This does not affect the type of data stored, |
|||
only the collation of character data. |
|||
|
|||
:param quoting: Defaults to 'auto': automatically determine enum value |
|||
quoting. If all enum values are surrounded by the same quoting |
|||
character, then use 'quoted' mode. Otherwise, use 'unquoted' mode. |
|||
|
|||
'quoted': values in enums are already quoted, they will be used |
|||
directly when generating the schema - this usage is deprecated. |
|||
|
|||
'unquoted': values in enums are not quoted, they will be escaped and |
|||
surrounded by single quotes when generating the schema. |
|||
|
|||
Previous versions of this type always required manually quoted |
|||
values to be supplied; future versions will always quote the string |
|||
literals for you. This is a transitional option. |
|||
|
|||
""" |
|||
super(ENUM, self).__init__(*enums, **kw) |
|||
|
|||
class _DrizzleBoolean(sqltypes.Boolean): |
|||
def get_dbapi_type(self, dbapi): |
|||
return dbapi.NUMERIC |
|||
|
|||
colspecs = { |
|||
sqltypes.Numeric: NUMERIC, |
|||
sqltypes.Float: FLOAT, |
|||
sqltypes.Time: _DrizzleTime, |
|||
sqltypes.Enum: ENUM, |
|||
sqltypes.Boolean: _DrizzleBoolean, |
|||
} |
|||
|
|||
# All the types we have in Drizzle |
|||
ischema_names = { |
|||
'BIGINT': BIGINT, |
|||
'BINARY': BINARY, |
|||
'BLOB': BLOB, |
|||
'BOOLEAN': BOOLEAN, |
|||
'CHAR': CHAR, |
|||
'DATE': DATE, |
|||
'DATETIME': DATETIME, |
|||
'DECIMAL': DECIMAL, |
|||
'DOUBLE': DOUBLE, |
|||
'ENUM': ENUM, |
|||
'FLOAT': FLOAT, |
|||
'INT': INTEGER, |
|||
'INTEGER': INTEGER, |
|||
'NUMERIC': NUMERIC, |
|||
'TEXT': TEXT, |
|||
'TIME': TIME, |
|||
'TIMESTAMP': TIMESTAMP, |
|||
'VARBINARY': VARBINARY, |
|||
'VARCHAR': VARCHAR, |
|||
} |
|||
|
|||
class DrizzleCompiler(mysql_dialect.MySQLCompiler): |
|||
|
|||
def visit_typeclause(self, typeclause): |
|||
type_ = typeclause.type.dialect_impl(self.dialect) |
|||
if isinstance(type_, sqltypes.Integer): |
|||
return 'INTEGER' |
|||
else: |
|||
return super(DrizzleCompiler, self).visit_typeclause(typeclause) |
|||
|
|||
def visit_cast(self, cast, **kwargs): |
|||
type_ = self.process(cast.typeclause) |
|||
if type_ is None: |
|||
return self.process(cast.clause) |
|||
|
|||
return 'CAST(%s AS %s)' % (self.process(cast.clause), type_) |
|||
|
|||
|
|||
class DrizzleDDLCompiler(mysql_dialect.MySQLDDLCompiler): |
|||
pass |
|||
|
|||
class DrizzleTypeCompiler(mysql_dialect.MySQLTypeCompiler): |
|||
def _extend_numeric(self, type_, spec): |
|||
return spec |
|||
|
|||
def _extend_string(self, type_, defaults, spec): |
|||
"""Extend a string-type declaration with standard SQL |
|||
COLLATE annotations and Drizzle specific extensions. |
|||
|
|||
""" |
|||
|
|||
def attr(name): |
|||
return getattr(type_, name, defaults.get(name)) |
|||
|
|||
if attr('collation'): |
|||
collation = 'COLLATE %s' % type_.collation |
|||
elif attr('binary'): |
|||
collation = 'BINARY' |
|||
else: |
|||
collation = None |
|||
|
|||
return ' '.join([c for c in (spec, collation) |
|||
if c is not None]) |
|||
|
|||
def visit_NCHAR(self, type): |
|||
raise NotImplementedError("Drizzle does not support NCHAR") |
|||
|
|||
def visit_NVARCHAR(self, type): |
|||
raise NotImplementedError("Drizzle does not support NVARCHAR") |
|||
|
|||
def visit_FLOAT(self, type_): |
|||
if type_.scale is not None and type_.precision is not None: |
|||
return "FLOAT(%s, %s)" % (type_.precision, type_.scale) |
|||
else: |
|||
return "FLOAT" |
|||
|
|||
def visit_BOOLEAN(self, type_): |
|||
return "BOOLEAN" |
|||
|
|||
def visit_BLOB(self, type_): |
|||
return "BLOB" |
|||
|
|||
|
|||
class DrizzleExecutionContext(mysql_dialect.MySQLExecutionContext): |
|||
pass |
|||
|
|||
class DrizzleIdentifierPreparer(mysql_dialect.MySQLIdentifierPreparer): |
|||
pass |
|||
|
|||
class DrizzleDialect(mysql_dialect.MySQLDialect): |
|||
"""Details of the Drizzle dialect. Not used directly in application code.""" |
|||
|
|||
name = 'drizzle' |
|||
|
|||
_supports_cast = True |
|||
supports_sequences = False |
|||
supports_native_boolean = True |
|||
supports_views = False |
|||
|
|||
|
|||
default_paramstyle = 'format' |
|||
colspecs = colspecs |
|||
|
|||
statement_compiler = DrizzleCompiler |
|||
ddl_compiler = DrizzleDDLCompiler |
|||
type_compiler = DrizzleTypeCompiler |
|||
ischema_names = ischema_names |
|||
preparer = DrizzleIdentifierPreparer |
|||
|
|||
def on_connect(self): |
|||
"""Force autocommit - Drizzle Bug#707842 doesn't set this |
|||
properly""" |
|||
def connect(conn): |
|||
conn.autocommit(False) |
|||
return connect |
|||
|
|||
def do_commit(self, connection): |
|||
"""Execute a COMMIT.""" |
|||
|
|||
connection.commit() |
|||
|
|||
def do_rollback(self, connection): |
|||
"""Execute a ROLLBACK.""" |
|||
|
|||
connection.rollback() |
|||
|
|||
@reflection.cache |
|||
def get_table_names(self, connection, schema=None, **kw): |
|||
"""Return a Unicode SHOW TABLES from a given schema.""" |
|||
if schema is not None: |
|||
current_schema = schema |
|||
else: |
|||
current_schema = self.default_schema_name |
|||
|
|||
charset = 'utf8' |
|||
rp = connection.execute("SHOW TABLES FROM %s" % |
|||
self.identifier_preparer.quote_identifier(current_schema)) |
|||
return [row[0] for row in self._compat_fetchall(rp, charset=charset)] |
|||
|
|||
@reflection.cache |
|||
def get_view_names(self, connection, schema=None, **kw): |
|||
raise NotImplementedError |
|||
|
|||
def _detect_casing(self, connection): |
|||
"""Sniff out identifier case sensitivity. |
|||
|
|||
Cached per-connection. This value can not change without a server |
|||
restart. |
|||
|
|||
""" |
|||
return 0 |
|||
|
|||
def _detect_collations(self, connection): |
|||
"""Pull the active COLLATIONS list from the server. |
|||
|
|||
Cached per-connection. |
|||
""" |
|||
|
|||
collations = {} |
|||
charset = self._connection_charset |
|||
rs = connection.execute('SELECT CHARACTER_SET_NAME, COLLATION_NAME from data_dictionary.COLLATIONS') |
|||
for row in self._compat_fetchall(rs, charset): |
|||
collations[row[0]] = row[1] |
|||
return collations |
|||
|
|||
def _detect_ansiquotes(self, connection): |
|||
"""Detect and adjust for the ANSI_QUOTES sql mode.""" |
|||
|
|||
self._server_ansiquotes = False |
|||
|
|||
self._backslash_escapes = False |
|||
|
|||
log.class_logger(DrizzleDialect) |
|||
|
@ -0,0 +1,69 @@ |
|||
"""Support for the Drizzle database via the Drizzle-python adapter. |
|||
|
|||
Drizzle-Python is available at: |
|||
|
|||
http://sourceforge.net/projects/mysql-python |
|||
|
|||
At least version 1.2.1 or 1.2.2 should be used. |
|||
|
|||
Connecting |
|||
----------- |
|||
|
|||
Connect string format:: |
|||
|
|||
drizzle+mysqldb://<user>:<password>@<host>[:<port>]/<dbname> |
|||
|
|||
Unicode |
|||
------- |
|||
|
|||
Drizzle accommodates Python ``unicode`` objects directly and |
|||
uses the ``utf8`` encoding in all cases. |
|||
|
|||
Known Issues |
|||
------------- |
|||
|
|||
Drizzle-python at least as of version 1.2.2 has a serious memory leak related |
|||
to unicode conversion, a feature which is disabled via ``use_unicode=0``. |
|||
The recommended connection form with SQLAlchemy is:: |
|||
|
|||
engine = create_engine('mysql://scott:tiger@localhost/test?charset=utf8&use_unicode=0', pool_recycle=3600) |
|||
|
|||
|
|||
""" |
|||
|
|||
from sqlalchemy.dialects.drizzle.base import (DrizzleDialect, |
|||
DrizzleExecutionContext, |
|||
DrizzleCompiler, DrizzleIdentifierPreparer) |
|||
from sqlalchemy.connectors.mysqldb import ( |
|||
MySQLDBExecutionContext, |
|||
MySQLDBCompiler, |
|||
MySQLDBIdentifierPreparer, |
|||
MySQLDBConnector |
|||
) |
|||
|
|||
class DrizzleExecutionContext_mysqldb( |
|||
MySQLDBExecutionContext, |
|||
DrizzleExecutionContext): |
|||
pass |
|||
|
|||
|
|||
class DrizzleCompiler_mysqldb(MySQLDBCompiler, DrizzleCompiler): |
|||
pass |
|||
|
|||
|
|||
class DrizzleIdentifierPreparer_mysqldb( |
|||
MySQLDBIdentifierPreparer, |
|||
DrizzleIdentifierPreparer): |
|||
pass |
|||
|
|||
class DrizzleDialect_mysqldb(MySQLDBConnector, DrizzleDialect): |
|||
execution_ctx_cls = DrizzleExecutionContext_mysqldb |
|||
statement_compiler = DrizzleCompiler_mysqldb |
|||
preparer = DrizzleIdentifierPreparer_mysqldb |
|||
|
|||
def _detect_charset(self, connection): |
|||
"""Sniff out the character set in use for connection results.""" |
|||
return 'utf8' |
|||
|
|||
|
|||
dialect = DrizzleDialect_mysqldb |
Some files were not shown because too many files changed in this diff
Loading…
Reference in new issue