From f53364eb6c75bcf46de3c491dab1e075cf0899c7 Mon Sep 17 00:00:00 2001 From: Ruud Date: Fri, 22 Nov 2013 16:08:54 +0100 Subject: [PATCH] Update Tornado --- libs/backports/__init__.py | 3 + libs/backports/ssl_match_hostname/README.txt | 42 ++++++++ libs/backports/ssl_match_hostname/__init__.py | 60 +++++++++++ libs/tornado/auth.py | 68 +++++++++++- libs/tornado/autoreload.py | 15 ++- libs/tornado/curl_httpclient.py | 11 ++ libs/tornado/gen.py | 23 +++- libs/tornado/httpclient.py | 30 ++++-- libs/tornado/httpserver.py | 26 ++++- libs/tornado/httputil.py | 6 +- libs/tornado/ioloop.py | 3 +- libs/tornado/iostream.py | 2 +- libs/tornado/log.py | 2 +- libs/tornado/netutil.py | 78 ++------------ libs/tornado/platform/asyncio.py | 134 ++++++++++++++++++++++++ libs/tornado/process.py | 3 +- libs/tornado/simple_httpclient.py | 67 +++++++++--- libs/tornado/speedups.c | 49 +++++++++ libs/tornado/tcpserver.py | 3 +- libs/tornado/web.py | 145 ++++++++++++++++++++------ libs/tornado/websocket.py | 88 ++++++++++++---- libs/tornado/wsgi.py | 9 +- 22 files changed, 703 insertions(+), 164 deletions(-) create mode 100644 libs/backports/__init__.py create mode 100644 libs/backports/ssl_match_hostname/README.txt create mode 100644 libs/backports/ssl_match_hostname/__init__.py create mode 100644 libs/tornado/platform/asyncio.py create mode 100644 libs/tornado/speedups.c diff --git a/libs/backports/__init__.py b/libs/backports/__init__.py new file mode 100644 index 0000000..612d328 --- /dev/null +++ b/libs/backports/__init__.py @@ -0,0 +1,3 @@ +# This is a Python "namespace package" http://www.python.org/dev/peps/pep-0382/ +from pkgutil import extend_path +__path__ = extend_path(__path__, __name__) diff --git a/libs/backports/ssl_match_hostname/README.txt b/libs/backports/ssl_match_hostname/README.txt new file mode 100644 index 0000000..f024fd7 --- /dev/null +++ b/libs/backports/ssl_match_hostname/README.txt @@ -0,0 +1,42 @@ + +The ssl.match_hostname() function from Python 3.2 +================================================= + +The Secure Sockets layer is only actually *secure* +if you check the hostname in the certificate returned +by the server to which you are connecting, +and verify that it matches to hostname +that you are trying to reach. + +But the matching logic, defined in `RFC2818`_, +can be a bit tricky to implement on your own. +So the ``ssl`` package in the Standard Library of Python 3.2 +now includes a ``match_hostname()`` function +for performing this check instead of requiring every application +to implement the check separately. + +This backport brings ``match_hostname()`` to users +of earlier versions of Python. +Simply make this distribution a dependency of your package, +and then use it like this:: + + from backports.ssl_match_hostname import match_hostname, CertificateError + ... + sslsock = ssl.wrap_socket(sock, ssl_version=ssl.PROTOCOL_SSLv3, + cert_reqs=ssl.CERT_REQUIRED, ca_certs=...) + try: + match_hostname(sslsock.getpeercert(), hostname) + except CertificateError, ce: + ... + +Note that the ``ssl`` module is only included in the Standard Library +for Python 2.6 and later; +users of Python 2.5 or earlier versions +will also need to install the ``ssl`` distribution +from the Python Package Index to use code like that shown above. + +Brandon Craig Rhodes is merely the packager of this distribution; +the actual code inside comes verbatim from Python 3.2. + +.. _RFC2818: http://tools.ietf.org/html/rfc2818.html + diff --git a/libs/backports/ssl_match_hostname/__init__.py b/libs/backports/ssl_match_hostname/__init__.py new file mode 100644 index 0000000..5707649 --- /dev/null +++ b/libs/backports/ssl_match_hostname/__init__.py @@ -0,0 +1,60 @@ +"""The match_hostname() function from Python 3.2, essential when using SSL.""" + +import re + +__version__ = '3.2a3' + +class CertificateError(ValueError): + pass + +def _dnsname_to_pat(dn): + pats = [] + for frag in dn.split(r'.'): + if frag == '*': + # When '*' is a fragment by itself, it matches a non-empty dotless + # fragment. + pats.append('[^.]+') + else: + # Otherwise, '*' matches any dotless fragment. + frag = re.escape(frag) + pats.append(frag.replace(r'\*', '[^.]*')) + return re.compile(r'\A' + r'\.'.join(pats) + r'\Z', re.IGNORECASE) + +def match_hostname(cert, hostname): + """Verify that *cert* (in decoded format as returned by + SSLSocket.getpeercert()) matches the *hostname*. RFC 2818 rules + are mostly followed, but IP addresses are not accepted for *hostname*. + + CertificateError is raised on failure. On success, the function + returns nothing. + """ + if not cert: + raise ValueError("empty or no certificate") + dnsnames = [] + san = cert.get('subjectAltName', ()) + for key, value in san: + if key == 'DNS': + if _dnsname_to_pat(value).match(hostname): + return + dnsnames.append(value) + if not san: + # The subject is only checked when subjectAltName is empty + for sub in cert.get('subject', ()): + for key, value in sub: + # XXX according to RFC 2818, the most specific Common Name + # must be used. + if key == 'commonName': + if _dnsname_to_pat(value).match(hostname): + return + dnsnames.append(value) + if len(dnsnames) > 1: + raise CertificateError("hostname %r " + "doesn't match either of %s" + % (hostname, ', '.join(map(repr, dnsnames)))) + elif len(dnsnames) == 1: + raise CertificateError("hostname %r " + "doesn't match %r" + % (hostname, dnsnames[0])) + else: + raise CertificateError("no appropriate commonName or " + "subjectAltName fields were found") diff --git a/libs/tornado/auth.py b/libs/tornado/auth.py index 0cbfa7c..a2cef35 100755 --- a/libs/tornado/auth.py +++ b/libs/tornado/auth.py @@ -549,7 +549,7 @@ class OAuth2Mixin(object): @return_future def authorize_redirect(self, redirect_uri=None, client_id=None, client_secret=None, extra_params=None, - callback=None): + callback=None, scope=None, response_type="code"): """Redirects the user to obtain OAuth authorization for this service. Some providers require that you register a redirect URL with @@ -566,10 +566,13 @@ class OAuth2Mixin(object): """ args = { "redirect_uri": redirect_uri, - "client_id": client_id + "client_id": client_id, + "response_type": response_type } if extra_params: args.update(extra_params) + if scope: + args['scope'] = ' '.join(scope) self.redirect( url_concat(self._OAUTH_AUTHORIZE_URL, args)) callback() @@ -945,6 +948,67 @@ class GoogleMixin(OpenIdMixin, OAuthMixin): return OpenIdMixin.get_authenticated_user(self) +class GoogleOAuth2Mixin(OAuth2Mixin): + """Google authentication using OAuth2.""" + _OAUTH_AUTHORIZE_URL = "https://accounts.google.com/o/oauth2/auth" + _OAUTH_ACCESS_TOKEN_URL = "https://accounts.google.com/o/oauth2/token" + _OAUTH_NO_CALLBACKS = False + _OAUTH_SETTINGS_KEY = 'google_oauth' + + @_auth_return_future + def get_authenticated_user(self, redirect_uri, code, callback): + """Handles the login for the Google user, returning a user object. + + Example usage:: + + class GoogleOAuth2LoginHandler(LoginHandler, tornado.auth.GoogleOAuth2Mixin): + @tornado.web.asynchronous + @tornado.gen.coroutine + def get(self): + if self.get_argument("code", False): + user = yield self.get_authenticated_user( + redirect_uri='http://your.site.com/auth/google', + code=self.get_argument("code")) + # Save the user with e.g. set_secure_cookie + else: + yield self.authorize_redirect( + redirect_uri='http://your.site.com/auth/google', + client_id=self.settings["google_consumer_key"], + scope=['openid', 'email'], + response_type='code', + extra_params={"approval_prompt": "auto"}) + """ + http = self.get_auth_http_client() + body = urllib_parse.urlencode({ + "redirect_uri": redirect_uri, + "code": code, + "client_id": self.settings[self._OAUTH_SETTINGS_KEY]['key'], + "client_secret": self.settings[self._OAUTH_SETTINGS_KEY]['secret'], + "grant_type": "authorization_code", + }) + + http.fetch(self._OAUTH_ACCESS_TOKEN_URL, + self.async_callback(self._on_access_token, callback), + method="POST", headers={'Content-Type': 'application/x-www-form-urlencoded'}, body=body) + + def _on_access_token(self, future, response): + """Callback function for the exchange to the access token.""" + if response.error: + future.set_exception(AuthError('Google auth error: %s' % str(response))) + return + + args = escape.json_decode(response.body) + future.set_result(args) + + def get_auth_http_client(self): + """Returns the `.AsyncHTTPClient` instance to be used for auth requests. + + May be overridden by subclasses to use an HTTP client other than + the default. + """ + return httpclient.AsyncHTTPClient() + + class FacebookMixin(object): """Facebook Connect authentication. diff --git a/libs/tornado/autoreload.py b/libs/tornado/autoreload.py index 0575429..79cccb4 100755 --- a/libs/tornado/autoreload.py +++ b/libs/tornado/autoreload.py @@ -16,11 +16,15 @@ """xAutomatically restart the server when a source file is modified. -Most applications should not access this module directly. Instead, pass the -keyword argument ``debug=True`` to the `tornado.web.Application` constructor. -This will enable autoreload mode as well as checking for changes to templates -and static resources. Note that restarting is a destructive operation -and any requests in progress will be aborted when the process restarts. +Most applications should not access this module directly. Instead, +pass the keyword argument ``autoreload=True`` to the +`tornado.web.Application` constructor (or ``debug=True``, which +enables this setting and several others). This will enable autoreload +mode as well as checking for changes to templates and static +resources. Note that restarting is a destructive operation and any +requests in progress will be aborted when the process restarts. (If +you want to disable autoreload while using other debug-mode features, +pass both ``debug=True`` and ``autoreload=False``). This module can also be used as a command-line wrapper around scripts such as unit test runners. See the `main` method for details. @@ -38,6 +42,7 @@ Reloading loses any Python interpreter command-line arguments (e.g. ``-u``) because it re-executes Python using ``sys.executable`` and ``sys.argv``. Additionally, modifying these variables will cause reloading to behave incorrectly. + """ from __future__ import absolute_import, division, print_function, with_statement diff --git a/libs/tornado/curl_httpclient.py b/libs/tornado/curl_httpclient.py index e090056..cb97710 100755 --- a/libs/tornado/curl_httpclient.py +++ b/libs/tornado/curl_httpclient.py @@ -360,6 +360,7 @@ def _curl_setup_request(curl, request, buffer, headers): curl.setopt(pycurl.PROXYUSERPWD, credentials) else: curl.setopt(pycurl.PROXY, '') + curl.unsetopt(pycurl.PROXYUSERPWD) if request.validate_cert: curl.setopt(pycurl.SSL_VERIFYPEER, 1) curl.setopt(pycurl.SSL_VERIFYHOST, 2) @@ -382,6 +383,8 @@ def _curl_setup_request(curl, request, buffer, headers): # that we can't reach, so allow ipv6 unless the user asks to disable. # (but see version check in _process_queue above) curl.setopt(pycurl.IPRESOLVE, pycurl.IPRESOLVE_V4) + else: + curl.setopt(pycurl.IPRESOLVE, pycurl.IPRESOLVE_WHATEVER) # Set the request method through curl's irritating interface which makes # up names for almost every single method @@ -404,6 +407,11 @@ def _curl_setup_request(curl, request, buffer, headers): # Handle curl's cryptic options for every individual HTTP method if request.method in ("POST", "PUT"): + if request.body is None: + raise AssertionError( + 'Body must not be empty for "%s" request' + % request.method) + request_buffer = BytesIO(utf8(request.body)) curl.setopt(pycurl.READFUNCTION, request_buffer.read) if request.method == "POST": @@ -414,6 +422,9 @@ def _curl_setup_request(curl, request, buffer, headers): curl.setopt(pycurl.POSTFIELDSIZE, len(request.body)) else: curl.setopt(pycurl.INFILESIZE, len(request.body)) + elif request.method == "GET": + if request.body is not None: + raise AssertionError('Body must be empty for GET request') if request.auth_username is not None: userpwd = "%s:%s" % (request.auth_username, request.auth_password or '') diff --git a/libs/tornado/gen.py b/libs/tornado/gen.py index 92b7458..217ebdf 100755 --- a/libs/tornado/gen.py +++ b/libs/tornado/gen.py @@ -38,8 +38,8 @@ since it is both shorter and provides better exception handling):: def get(self): yield gen.Task(AsyncHTTPClient().fetch, "http://example.com") -You can also yield a list of ``Futures`` and/or ``Tasks``, which will be -started at the same time and run in parallel; a list of results will +You can also yield a list or dict of ``Futures`` and/or ``Tasks``, which will be +started at the same time and run in parallel; a list or dict of results will be returned when they are all finished:: @gen.coroutine @@ -47,6 +47,13 @@ be returned when they are all finished:: http_client = AsyncHTTPClient() response1, response2 = yield [http_client.fetch(url1), http_client.fetch(url2)] + response_dict = yield dict(response3=http_client.fetch(url3), + response4=http_client.fetch(url4)) + response3 = response_dict['response3'] + response4 = response_dict['response4'] + +.. versionchanged:: 3.2 + Dict support added. For more complicated interfaces, `Task` can be split into two parts: `Callback` and `Wait`:: @@ -404,6 +411,10 @@ class Multi(YieldPoint): a list of ``YieldPoints``. """ def __init__(self, children): + self.keys = None + if isinstance(children, dict): + self.keys = list(children.keys()) + children = children.values() self.children = [] for i in children: if isinstance(i, Future): @@ -423,7 +434,11 @@ class Multi(YieldPoint): return not self.unfinished_children def get_result(self): - return [i.get_result() for i in self.children] + result = (i.get_result() for i in self.children) + if self.keys is not None: + return dict(zip(self.keys, result)) + else: + return list(result) class _NullYieldPoint(YieldPoint): @@ -523,7 +538,7 @@ class Runner(object): self.finished = True self.yield_point = _null_yield_point raise - if isinstance(yielded, list): + if isinstance(yielded, (list, dict)): yielded = Multi(yielded) elif isinstance(yielded, Future): yielded = YieldFuture(yielded) diff --git a/libs/tornado/httpclient.py b/libs/tornado/httpclient.py index 6767589..b58a834 100755 --- a/libs/tornado/httpclient.py +++ b/libs/tornado/httpclient.py @@ -282,7 +282,8 @@ class HTTPRequest(object): :arg int max_redirects: Limit for ``follow_redirects`` :arg string user_agent: String to send as ``User-Agent`` header :arg bool use_gzip: Request gzip encoding from the server - :arg string network_interface: Network interface to use for request + :arg string network_interface: Network interface to use for request. + ``curl_httpclient`` only; see note below. :arg callable streaming_callback: If set, ``streaming_callback`` will be run with each chunk of data as it is received, and ``HTTPResponse.body`` and ``HTTPResponse.buffer`` will be empty in @@ -310,14 +311,26 @@ class HTTPRequest(object): :arg bool validate_cert: For HTTPS requests, validate the server's certificate? :arg string ca_certs: filename of CA certificates in PEM format, - or None to use defaults. Note that in ``curl_httpclient``, if - any request uses a custom ``ca_certs`` file, they all must (they - don't have to all use the same ``ca_certs``, but it's not possible - to mix requests with ``ca_certs`` and requests that use the defaults. + or None to use defaults. See note below when used with + ``curl_httpclient``. :arg bool allow_ipv6: Use IPv6 when available? Default is false in ``simple_httpclient`` and true in ``curl_httpclient`` - :arg string client_key: Filename for client SSL key, if any - :arg string client_cert: Filename for client SSL certificate, if any + :arg string client_key: Filename for client SSL key, if any. See + note below when used with ``curl_httpclient``. + :arg string client_cert: Filename for client SSL certificate, if any. + See note below when used with ``curl_httpclient``. + + .. note:: + + When using ``curl_httpclient`` certain options may be + inherited by subsequent fetches because ``pycurl`` does + not allow them to be cleanly reset. This applies to the + ``ca_certs``, ``client_key``, ``client_cert``, and + ``network_interface`` arguments. If you use these + options, you should pass them on every request (you don't + have to always use the same values, but it's not possible + to mix requests that specify these options with ones that + use the defaults). .. versionadded:: 3.1 The ``auth_mode`` argument. @@ -372,6 +385,9 @@ class HTTPResponse(object): * headers: `tornado.httputil.HTTPHeaders` object + * effective_url: final location of the resource after following any + redirects + * buffer: ``cStringIO`` object for response body * body: response body as string (created on demand from ``self.buffer``) diff --git a/libs/tornado/httpserver.py b/libs/tornado/httpserver.py index d005545..34e7b76 100755 --- a/libs/tornado/httpserver.py +++ b/libs/tornado/httpserver.py @@ -29,6 +29,7 @@ from __future__ import absolute_import, division, print_function, with_statement import socket import ssl import time +import copy from tornado.escape import native_str, parse_qs_bytes from tornado import httputil @@ -326,8 +327,8 @@ class HTTPConnection(object): self.request_callback(self._request) except _BadRequestException as e: - gen_log.info("Malformed HTTP request from %s: %s", - self.address[0], e) + gen_log.info("Malformed HTTP request from %r: %s", + self.address, e) self.close() return @@ -336,7 +337,10 @@ class HTTPConnection(object): if self._request.method in ("POST", "PATCH", "PUT"): httputil.parse_body_arguments( self._request.headers.get("Content-Type", ""), data, - self._request.arguments, self._request.files) + self._request.body_arguments, self._request.files) + + for k, v in self._request.body_arguments.items(): + self._request.arguments.setdefault(k, []).extend(v) self.request_callback(self._request) @@ -403,6 +407,20 @@ class HTTPRequest(object): `.RequestHandler.get_argument`, which returns argument values as unicode strings. + .. attribute:: query_arguments + + Same format as ``arguments``, but contains only arguments extracted + from the query string. + + .. versionadded:: 3.2 + + .. attribute:: body_arguments + + Same format as ``arguments``, but contains only arguments extracted + from the request body. + + .. versionadded:: 3.2 + .. attribute:: files File uploads are available in the files property, which maps file @@ -457,6 +475,8 @@ class HTTPRequest(object): self.path, sep, self.query = uri.partition('?') self.arguments = parse_qs_bytes(self.query, keep_blank_values=True) + self.query_arguments = copy.deepcopy(self.arguments) + self.body_arguments = {} def supports_http_1_1(self): """Returns True if this request supports HTTP/1.1 semantics""" diff --git a/libs/tornado/httputil.py b/libs/tornado/httputil.py index 3e7337d..2575bc5 100755 --- a/libs/tornado/httputil.py +++ b/libs/tornado/httputil.py @@ -320,7 +320,11 @@ def parse_body_arguments(content_type, body, arguments, files): with the parsed contents. """ if content_type.startswith("application/x-www-form-urlencoded"): - uri_arguments = parse_qs_bytes(native_str(body), keep_blank_values=True) + try: + uri_arguments = parse_qs_bytes(native_str(body), keep_blank_values=True) + except Exception as e: + gen_log.warning('Invalid x-www-form-urlencoded body: %s', e) + uri_arguments = {} for name, values in uri_arguments.items(): if values: arguments.setdefault(name, []).extend(values) diff --git a/libs/tornado/ioloop.py b/libs/tornado/ioloop.py index 91ee2c5..a36ab7a 100755 --- a/libs/tornado/ioloop.py +++ b/libs/tornado/ioloop.py @@ -676,8 +676,7 @@ class PollIOLoop(IOLoop): while self._events: fd, events = self._events.popitem() try: - if self._handlers.has_key(fd): - self._handlers[fd](fd, events) + self._handlers[fd](fd, events) except (OSError, IOError) as e: if e.args[0] == errno.EPIPE: # Happens when the client closes the connection diff --git a/libs/tornado/iostream.py b/libs/tornado/iostream.py index 6bdc639..08430ce 100755 --- a/libs/tornado/iostream.py +++ b/libs/tornado/iostream.py @@ -774,7 +774,7 @@ class IOStream(BaseIOStream): # Sometimes setsockopt will fail if the socket is closed # at the wrong time. This can happen with HTTPServer # resetting the value to false between requests. - if e.errno != errno.EINVAL: + if e.errno not in (errno.EINVAL, errno.ECONNRESET): raise diff --git a/libs/tornado/log.py b/libs/tornado/log.py index fa11f37..648db5c 100755 --- a/libs/tornado/log.py +++ b/libs/tornado/log.py @@ -51,7 +51,7 @@ gen_log = logging.getLogger("tornado.general") def _stderr_supports_color(): color = False - if curses and sys.stderr.isatty(): + if curses and hasattr(sys.stderr, 'isatty') and sys.stderr.isatty(): try: curses.setupterm() if curses.tigetnum("colors") > 0: diff --git a/libs/tornado/netutil.py b/libs/tornado/netutil.py index 9dc8506..21db475 100755 --- a/libs/tornado/netutil.py +++ b/libs/tornado/netutil.py @@ -20,7 +20,6 @@ from __future__ import absolute_import, division, print_function, with_statement import errno import os -import re import socket import ssl import stat @@ -30,6 +29,13 @@ from tornado.ioloop import IOLoop from tornado.platform.auto import set_close_exec from tornado.util import Configurable +if hasattr(ssl, 'match_hostname') and hasattr(ssl, 'CertificateError'): # python 3.2+ + ssl_match_hostname = ssl.match_hostname + SSLCertificateError = ssl.CertificateError +else: + import backports.ssl_match_hostname + ssl_match_hostname = backports.ssl_match_hostname.match_hostname + SSLCertificateError = backports.ssl_match_hostname.CertificateError def bind_sockets(port, address=None, family=socket.AF_UNSPEC, backlog=128, flags=None): """Creates listening sockets bound to the given port and address. @@ -391,73 +397,3 @@ def ssl_wrap_socket(socket, ssl_options, server_hostname=None, **kwargs): return context.wrap_socket(socket, **kwargs) else: return ssl.wrap_socket(socket, **dict(context, **kwargs)) - -if hasattr(ssl, 'match_hostname') and hasattr(ssl, 'CertificateError'): # python 3.2+ - ssl_match_hostname = ssl.match_hostname - SSLCertificateError = ssl.CertificateError -else: - # match_hostname was added to the standard library ssl module in python 3.2. - # The following code was backported for older releases and copied from - # https://bitbucket.org/brandon/backports.ssl_match_hostname - class SSLCertificateError(ValueError): - pass - - def _dnsname_to_pat(dn, max_wildcards=1): - pats = [] - for frag in dn.split(r'.'): - if frag.count('*') > max_wildcards: - # Issue #17980: avoid denials of service by refusing more - # than one wildcard per fragment. A survery of established - # policy among SSL implementations showed it to be a - # reasonable choice. - raise SSLCertificateError( - "too many wildcards in certificate DNS name: " + repr(dn)) - if frag == '*': - # When '*' is a fragment by itself, it matches a non-empty dotless - # fragment. - pats.append('[^.]+') - else: - # Otherwise, '*' matches any dotless fragment. - frag = re.escape(frag) - pats.append(frag.replace(r'\*', '[^.]*')) - return re.compile(r'\A' + r'\.'.join(pats) + r'\Z', re.IGNORECASE) - - def ssl_match_hostname(cert, hostname): - """Verify that *cert* (in decoded format as returned by - SSLSocket.getpeercert()) matches the *hostname*. RFC 2818 rules - are mostly followed, but IP addresses are not accepted for *hostname*. - - CertificateError is raised on failure. On success, the function - returns nothing. - """ - if not cert: - raise ValueError("empty or no certificate") - dnsnames = [] - san = cert.get('subjectAltName', ()) - for key, value in san: - if key == 'DNS': - if _dnsname_to_pat(value).match(hostname): - return - dnsnames.append(value) - if not dnsnames: - # The subject is only checked when there is no dNSName entry - # in subjectAltName - for sub in cert.get('subject', ()): - for key, value in sub: - # XXX according to RFC 2818, the most specific Common Name - # must be used. - if key == 'commonName': - if _dnsname_to_pat(value).match(hostname): - return - dnsnames.append(value) - if len(dnsnames) > 1: - raise SSLCertificateError("hostname %r " - "doesn't match either of %s" - % (hostname, ', '.join(map(repr, dnsnames)))) - elif len(dnsnames) == 1: - raise SSLCertificateError("hostname %r " - "doesn't match %r" - % (hostname, dnsnames[0])) - else: - raise SSLCertificateError("no appropriate commonName or " - "subjectAltName fields were found") diff --git a/libs/tornado/platform/asyncio.py b/libs/tornado/platform/asyncio.py new file mode 100644 index 0000000..a8f5bad --- /dev/null +++ b/libs/tornado/platform/asyncio.py @@ -0,0 +1,134 @@ +"""Bridges between the `asyncio` module and Tornado IOLoop. + +This is a work in progress and interfaces are subject to change. + +To test: +python3.4 -m tornado.test.runtests --ioloop=tornado.platform.asyncio.AsyncIOLoop +python3.4 -m tornado.test.runtests --ioloop=tornado.platform.asyncio.AsyncIOMainLoop +(the tests log a few warnings with AsyncIOMainLoop because they leave some +unfinished callbacks on the event loop that fail when it resumes) +""" +import asyncio +import datetime +import functools +import os + +from tornado.ioloop import IOLoop +from tornado import stack_context + +class BaseAsyncIOLoop(IOLoop): + def initialize(self, asyncio_loop, close_loop=False): + self.asyncio_loop = asyncio_loop + self.close_loop = close_loop + self.asyncio_loop.call_soon(self.make_current) + # Maps fd to handler function (as in IOLoop.add_handler) + self.handlers = {} + # Set of fds listening for reads/writes + self.readers = set() + self.writers = set() + self.closing = False + + def close(self, all_fds=False): + self.closing = True + for fd in list(self.handlers): + self.remove_handler(fd) + if all_fds: + os.close(fd) + if self.close_loop: + self.asyncio_loop.close() + + def add_handler(self, fd, handler, events): + if fd in self.handlers: + raise ValueError("fd %d added twice" % fd) + self.handlers[fd] = stack_context.wrap(handler) + if events & IOLoop.READ: + self.asyncio_loop.add_reader( + fd, self._handle_events, fd, IOLoop.READ) + self.readers.add(fd) + if events & IOLoop.WRITE: + self.asyncio_loop.add_writer( + fd, self._handle_events, fd, IOLoop.WRITE) + self.writers.add(fd) + + def update_handler(self, fd, events): + if events & IOLoop.READ: + if fd not in self.readers: + self.asyncio_loop.add_reader( + fd, self._handle_events, fd, IOLoop.READ) + self.readers.add(fd) + else: + if fd in self.readers: + self.asyncio_loop.remove_reader(fd) + self.readers.remove(fd) + if events & IOLoop.WRITE: + if fd not in self.writers: + self.asyncio_loop.add_writer( + fd, self._handle_events, fd, IOLoop.WRITE) + self.writers.add(fd) + else: + if fd in self.writers: + self.asyncio_loop.remove_writer(fd) + self.writers.remove(fd) + + def remove_handler(self, fd): + if fd not in self.handlers: + return + if fd in self.readers: + self.asyncio_loop.remove_reader(fd) + self.readers.remove(fd) + if fd in self.writers: + self.asyncio_loop.remove_writer(fd) + self.writers.remove(fd) + del self.handlers[fd] + + def _handle_events(self, fd, events): + self.handlers[fd](fd, events) + + def start(self): + self.asyncio_loop.run_forever() + + def stop(self): + self.asyncio_loop.stop() + + def _run_callback(self, callback, *args, **kwargs): + try: + callback(*args, **kwargs) + except Exception: + self.handle_callback_exception(callback) + + def add_timeout(self, deadline, callback): + if isinstance(deadline, (int, float)): + delay = max(deadline - self.time(), 0) + elif isinstance(deadline, datetime.timedelta): + delay = deadline.total_seconds() + else: + raise TypeError("Unsupported deadline %r", deadline) + return self.asyncio_loop.call_later(delay, self._run_callback, + stack_context.wrap(callback)) + + def remove_timeout(self, timeout): + timeout.cancel() + + def add_callback(self, callback, *args, **kwargs): + if self.closing: + raise RuntimeError("IOLoop is closing") + if kwargs: + self.asyncio_loop.call_soon_threadsafe(functools.partial( + self._run_callback, stack_context.wrap(callback), + *args, **kwargs)) + else: + self.asyncio_loop.call_soon_threadsafe( + self._run_callback, stack_context.wrap(callback), *args) + + add_callback_from_signal = add_callback + + +class AsyncIOMainLoop(BaseAsyncIOLoop): + def initialize(self): + super(AsyncIOMainLoop, self).initialize(asyncio.get_event_loop(), + close_loop=False) + +class AsyncIOLoop(BaseAsyncIOLoop): + def initialize(self): + super(AsyncIOLoop, self).initialize(asyncio.new_event_loop(), + close_loop=True) diff --git a/libs/tornado/process.py b/libs/tornado/process.py index ffd2d29..942c5c3 100755 --- a/libs/tornado/process.py +++ b/libs/tornado/process.py @@ -92,7 +92,8 @@ def fork_processes(num_processes, max_restarts=100): between any server code. Note that multiple processes are not compatible with the autoreload - module (or the debug=True option to `tornado.web.Application`). + module (or the ``autoreload=True`` option to `tornado.web.Application` + which defaults to True when ``debug=True``). When using multiple processes, no IOLoops can be created or referenced until after the call to ``fork_processes``. diff --git a/libs/tornado/simple_httpclient.py b/libs/tornado/simple_httpclient.py index d8dbb27..2558ada 100755 --- a/libs/tornado/simple_httpclient.py +++ b/libs/tornado/simple_httpclient.py @@ -72,6 +72,7 @@ class SimpleAsyncHTTPClient(AsyncHTTPClient): self.max_clients = max_clients self.queue = collections.deque() self.active = {} + self.waiting = {} self.max_buffer_size = max_buffer_size if resolver: self.resolver = resolver @@ -89,7 +90,16 @@ class SimpleAsyncHTTPClient(AsyncHTTPClient): self.resolver.close() def fetch_impl(self, request, callback): - self.queue.append((request, callback)) + key = object() + self.queue.append((key, request, callback)) + if not len(self.active) < self.max_clients: + timeout_handle = self.io_loop.add_timeout( + self.io_loop.time() + min(request.connect_timeout, + request.request_timeout), + functools.partial(self._on_timeout, key)) + else: + timeout_handle = None + self.waiting[key] = (request, callback, timeout_handle) self._process_queue() if self.queue: gen_log.debug("max_clients limit reached, request queued. " @@ -99,8 +109,10 @@ class SimpleAsyncHTTPClient(AsyncHTTPClient): def _process_queue(self): with stack_context.NullContext(): while self.queue and len(self.active) < self.max_clients: - request, callback = self.queue.popleft() - key = object() + key, request, callback = self.queue.popleft() + if key not in self.waiting: + continue + self._remove_timeout(key) self.active[key] = (request, callback) release_callback = functools.partial(self._release_fetch, key) self._handle_request(request, release_callback, callback) @@ -113,6 +125,22 @@ class SimpleAsyncHTTPClient(AsyncHTTPClient): del self.active[key] self._process_queue() + def _remove_timeout(self, key): + if key in self.waiting: + request, callback, timeout_handle = self.waiting[key] + if timeout_handle is not None: + self.io_loop.remove_timeout(timeout_handle) + del self.waiting[key] + + def _on_timeout(self, key): + request, callback, timeout_handle = self.waiting[key] + self.queue.remove((key, request, callback)) + timeout_response = HTTPResponse( + request, 599, error=HTTPError(599, "Timeout"), + request_time=self.io_loop.time() - request.start_time) + self.io_loop.add_callback(callback, timeout_response) + del self.waiting[key] + class _HTTPConnection(object): _SUPPORTED_METHODS = set(["GET", "HEAD", "POST", "PUT", "DELETE", "PATCH", "OPTIONS"]) @@ -162,15 +190,18 @@ class _HTTPConnection(object): # so restrict to ipv4 by default. af = socket.AF_INET + timeout = min(self.request.connect_timeout, self.request.request_timeout) + if timeout: + self._timeout = self.io_loop.add_timeout( + self.start_time + timeout, + stack_context.wrap(self._on_timeout)) self.resolver.resolve(host, port, af, callback=self._on_resolve) def _on_resolve(self, addrinfo): + if self.final_callback is None: + # final_callback is cleared if we've hit our timeout + return self.stream = self._create_stream(addrinfo) - timeout = min(self.request.connect_timeout, self.request.request_timeout) - if timeout: - self._timeout = self.io_loop.add_timeout( - self.start_time + timeout, - stack_context.wrap(self._on_timeout)) self.stream.set_close_callback(self._on_close) # ipv6 addresses are broken (in self.parsed.hostname) until # 2.7, here is correctly parsed value calculated in __init__ @@ -199,10 +230,10 @@ class _HTTPConnection(object): # the SSL_OP_NO_SSLv2, but that wasn't exposed to python # until 3.2. Python 2.7 adds the ciphers argument, which # can also be used to disable SSLv2. As a last resort - # on python 2.6, we set ssl_version to SSLv3. This is + # on python 2.6, we set ssl_version to TLSv1. This is # more narrow than we'd like since it also breaks - # compatibility with servers configured for TLSv1 only, - # but nearly all servers support SSLv3: + # compatibility with servers configured for SSLv3 only, + # but nearly all servers support both SSLv3 and TLSv1: # http://blog.ivanristic.com/2011/09/ssl-survey-protocol-support.html if sys.version_info >= (2, 7): ssl_options["ciphers"] = "DEFAULT:!SSLv2" @@ -210,7 +241,7 @@ class _HTTPConnection(object): # This is really only necessary for pre-1.0 versions # of openssl, but python 2.6 doesn't expose version # information. - ssl_options["ssl_version"] = ssl.PROTOCOL_SSLv3 + ssl_options["ssl_version"] = ssl.PROTOCOL_TLSv1 return SSLIOStream(socket.socket(af), io_loop=self.io_loop, @@ -233,6 +264,8 @@ class _HTTPConnection(object): def _on_connect(self): self._remove_timeout() + if self.final_callback is None: + return if self.request.request_timeout: self._timeout = self.io_loop.add_timeout( self.start_time + self.request.request_timeout, @@ -269,9 +302,15 @@ class _HTTPConnection(object): self.request.headers["User-Agent"] = self.request.user_agent if not self.request.allow_nonstandard_methods: if self.request.method in ("POST", "PATCH", "PUT"): - assert self.request.body is not None + if self.request.body is None: + raise AssertionError( + 'Body must not be empty for "%s" request' + % self.request.method) else: - assert self.request.body is None + if self.request.body is not None: + raise AssertionError( + 'Body must be empty for "%s" request' + % self.request.method) if self.request.body is not None: self.request.headers["Content-Length"] = str(len( self.request.body)) diff --git a/libs/tornado/speedups.c b/libs/tornado/speedups.c new file mode 100644 index 0000000..8a316c5 --- /dev/null +++ b/libs/tornado/speedups.c @@ -0,0 +1,49 @@ +#include + +static PyObject* websocket_mask(PyObject* self, PyObject* args) { + const char* mask; + int mask_len; + const char* data; + int data_len; + int i; + + if (!PyArg_ParseTuple(args, "s#s#", &mask, &mask_len, &data, &data_len)) { + return NULL; + } + + PyObject* result = PyBytes_FromStringAndSize(NULL, data_len); + if (!result) { + return NULL; + } + char* buf = PyBytes_AsString(result); + for (i = 0; i < data_len; i++) { + buf[i] = data[i] ^ mask[i % 4]; + } + + return result; +} + +static PyMethodDef methods[] = { + {"websocket_mask", websocket_mask, METH_VARARGS, ""}, + {NULL, NULL, 0, NULL} +}; + +#if PY_MAJOR_VERSION >= 3 +static struct PyModuleDef speedupsmodule = { + PyModuleDef_HEAD_INIT, + "speedups", + NULL, + -1, + methods +}; + +PyMODINIT_FUNC +PyInit_speedups() { + return PyModule_Create(&speedupsmodule); +} +#else // Python 2.x +PyMODINIT_FUNC +initspeedups() { + Py_InitModule("tornado.speedups", methods); +} +#endif diff --git a/libs/tornado/tcpserver.py b/libs/tornado/tcpserver.py index 8473a21..c077373 100755 --- a/libs/tornado/tcpserver.py +++ b/libs/tornado/tcpserver.py @@ -180,7 +180,8 @@ class TCPServer(object): between any server code. Note that multiple processes are not compatible with the autoreload - module (or the ``debug=True`` option to `tornado.web.Application`). + module (or the ``autoreload=True`` option to `tornado.web.Application` + which defaults to True when ``debug=True``). When using multiple processes, no IOLoops can be created or referenced until after the call to ``TCPServer.start(n)``. """ diff --git a/libs/tornado/web.py b/libs/tornado/web.py index 5f8d609..b6d7e97 100755 --- a/libs/tornado/web.py +++ b/libs/tornado/web.py @@ -250,7 +250,7 @@ class RequestHandler(object): not self.request.connection.no_keep_alive): conn_header = self.request.headers.get("Connection") if conn_header and (conn_header.lower() == "keep-alive"): - self.set_header("Connection", "Keep-Alive") + self._headers["Connection"] = "Keep-Alive" self._write_buffer = [] self._status_code = 200 self._reason = httputil.responses[200] @@ -348,12 +348,7 @@ class RequestHandler(object): The returned value is always unicode. """ - args = self.get_arguments(name, strip=strip) - if not args: - if default is self._ARG_DEFAULT: - raise MissingArgumentError(name) - return default - return args[-1] + return self._get_argument(name, default, self.request.arguments, strip) def get_arguments(self, name, strip=True): """Returns a list of the arguments with the given name. @@ -362,9 +357,73 @@ class RequestHandler(object): The returned values are always unicode. """ + return self._get_arguments(name, self.request.arguments, strip) + + def get_body_argument(self, name, default=_ARG_DEFAULT, strip=True): + """Returns the value of the argument with the given name + from the request body. + + If default is not provided, the argument is considered to be + required, and we raise a `MissingArgumentError` if it is missing. + + If the argument appears in the url more than once, we return the + last value. + + The returned value is always unicode. + + .. versionadded:: 3.2 + """ + return self._get_argument(name, default, self.request.body_arguments, strip) + + def get_body_arguments(self, name, strip=True): + """Returns a list of the body arguments with the given name. + + If the argument is not present, returns an empty list. + + The returned values are always unicode. + + .. versionadded:: 3.2 + """ + return self._get_arguments(name, self.request.body_arguments, strip) + + def get_query_argument(self, name, default=_ARG_DEFAULT, strip=True): + """Returns the value of the argument with the given name + from the request query string. + + If default is not provided, the argument is considered to be + required, and we raise a `MissingArgumentError` if it is missing. + + If the argument appears in the url more than once, we return the + last value. + + The returned value is always unicode. + + .. versionadded:: 3.2 + """ + return self._get_argument(name, default, self.request.query_arguments, strip) + + def get_query_arguments(self, name, strip=True): + """Returns a list of the query arguments with the given name. + + If the argument is not present, returns an empty list. + + The returned values are always unicode. + + .. versionadded:: 3.2 + """ + return self._get_arguments(name, self.request.query_arguments, strip) + + def _get_argument(self, name, default, source, strip=True): + args = self._get_arguments(name, source, strip=strip) + if not args: + if default is self._ARG_DEFAULT: + raise MissingArgumentError(name) + return default + return args[-1] + def _get_arguments(self, name, source, strip=True): values = [] - for v in self.request.arguments.get(name, []): + for v in source.get(name, []): v = self.decode_argument(v, name=name) if isinstance(v, unicode_type): # Get rid of any weird control chars (unless decoding gave @@ -838,7 +897,7 @@ class RequestHandler(object): else: self.finish(self.get_error_html(status_code, **kwargs)) return - if self.settings.get("debug") and "exc_info" in kwargs: + if self.settings.get("serve_traceback") and "exc_info" in kwargs: # in debug mode, try to send a traceback self.set_header('Content-Type', 'text/plain') for line in traceback.format_exception(*kwargs["exc_info"]): @@ -1318,6 +1377,12 @@ def asynchronous(method): if not self._finished: self.finish() IOLoop.current().add_future(result, future_complete) + # Once we have done this, hide the Future from our + # caller (i.e. RequestHandler._when_complete), which + # would otherwise set up its own callback and + # exception handler (resulting in exceptions being + # logged twice). + return None return result return wrapper @@ -1383,10 +1448,16 @@ class Application(object): or (regexp, request_class) tuples. When we receive requests, we iterate over the list in order and instantiate an instance of the first request class whose regexp matches the request path. + The request class can be specified as either a class object or a + (fully-qualified) name. + + Each tuple can contain additional elements, which correspond to the + arguments to the `URLSpec` constructor. (Prior to Tornado 3.2, this + only tuples of two or three elements were allowed). - Each tuple can contain an optional third element, which should be - a dictionary if it is present. That dictionary is passed as - keyword arguments to the contructor of the handler. This pattern + A dictionary may be passed as the third element of the tuple, + which will be used as keyword arguments to the handler's + constructor and `~RequestHandler.initialize` method. This pattern is used for the `StaticFileHandler` in this example (note that a `StaticFileHandler` can be installed automatically with the static_path setting described below):: @@ -1409,6 +1480,7 @@ class Application(object): and ``/robots.txt`` from the same directory. A custom subclass of `StaticFileHandler` can be specified with the ``static_handler_class`` setting. + """ def __init__(self, handlers=None, default_host="", transforms=None, wsgi=False, **settings): @@ -1447,8 +1519,14 @@ class Application(object): if handlers: self.add_handlers(".*$", handlers) + if self.settings.get('debug'): + self.settings.setdefault('autoreload', True) + self.settings.setdefault('compiled_template_cache', False) + self.settings.setdefault('static_hash_cache', False) + self.settings.setdefault('serve_traceback', True) + # Automatically reload modified modules - if self.settings.get("debug") and not wsgi: + if self.settings.get('autoreload') and not wsgi: from tornado import autoreload autoreload.start() @@ -1493,20 +1571,8 @@ class Application(object): for spec in host_handlers: if isinstance(spec, (tuple, list)): - assert len(spec) in (2, 3) - pattern = spec[0] - handler = spec[1] - - if isinstance(handler, str): - # import the Module and instantiate the class - # Must be a fully qualified name (module.ClassName) - handler = import_object(handler) - - if len(spec) == 3: - kwargs = spec[2] - else: - kwargs = {} - spec = URLSpec(pattern, handler, kwargs) + assert len(spec) in (2, 3, 4) + spec = URLSpec(*spec) handlers.append(spec) if spec.name: if spec.name in self.named_handlers: @@ -1597,14 +1663,23 @@ class Application(object): args = [unquote(s) for s in match.groups()] break if not handler: - handler = ErrorHandler(self, request, status_code=404) + if self.settings.get('default_handler_class'): + handler_class = self.settings['default_handler_class'] + handler_args = self.settings.get( + 'default_handler_args', {}) + else: + handler_class = ErrorHandler + handler_args = dict(status_code=404) + handler = handler_class(self, request, **handler_args) - # In debug mode, re-compile templates and reload static files on every + # If template cache is disabled (usually in the debug mode), + # re-compile templates and reload static files on every # request so you don't need to restart to see changes - if self.settings.get("debug"): + if not self.settings.get("compiled_template_cache", True): with RequestHandler._template_loader_lock: for loader in RequestHandler._template_loaders.values(): loader.reset() + if not self.settings.get('static_hash_cache', True): StaticFileHandler.reset() handler._execute(transforms, *args, **kwargs) @@ -2454,7 +2529,7 @@ class _UIModuleNamespace(object): class URLSpec(object): """Specifies mappings between URLs and handlers.""" - def __init__(self, pattern, handler_class, kwargs=None, name=None): + def __init__(self, pattern, handler, kwargs=None, name=None): """Parameters: * ``pattern``: Regular expression to be matched. Any groups @@ -2475,7 +2550,13 @@ class URLSpec(object): assert len(self.regex.groupindex) in (0, self.regex.groups), \ ("groups in url regexes must either be all named or all " "positional: %r" % self.regex.pattern) - self.handler_class = handler_class + + if isinstance(handler, str): + # import the Module and instantiate the class + # Must be a fully qualified name (module.ClassName) + handler = import_object(handler) + + self.handler_class = handler self.kwargs = kwargs or {} self.name = name self._path, self._group_count = self._find_groups() diff --git a/libs/tornado/websocket.py b/libs/tornado/websocket.py index 676d21b..8c2f5a6 100755 --- a/libs/tornado/websocket.py +++ b/libs/tornado/websocket.py @@ -33,7 +33,7 @@ import tornado.web from tornado.concurrent import TracebackFuture from tornado.escape import utf8, native_str -from tornado import httpclient +from tornado import httpclient, httputil from tornado.ioloop import IOLoop from tornado.iostream import StreamClosedError from tornado.log import gen_log, app_log @@ -52,6 +52,10 @@ class WebSocketError(Exception): class WebSocketClosedError(WebSocketError): + """Raised by operations on a closed connection. + + .. versionadded:: 3.2 + """ pass @@ -163,6 +167,12 @@ class WebSocketHandler(tornado.web.RequestHandler): encoded as json). If the ``binary`` argument is false, the message will be sent as utf8; in binary mode any byte string is allowed. + + If the connection is already closed, raises `WebSocketClosedError`. + + .. versionchanged:: 3.2 + `WebSocketClosedError` was added (previously a closed connection + would raise an `AttributeError`) """ if self.ws_connection is None: raise WebSocketClosedError() @@ -586,7 +596,7 @@ class WebSocketProtocol13(WebSocketProtocol): frame += struct.pack("!BQ", 127 | mask_bit, l) if self.mask_outgoing: mask = os.urandom(4) - data = mask + self._apply_mask(mask, data) + data = mask + _websocket_mask(mask, data) frame += data self.stream.write(frame) @@ -671,21 +681,8 @@ class WebSocketProtocol13(WebSocketProtocol): except StreamClosedError: self._abort() - def _apply_mask(self, mask, data): - mask = array.array("B", mask) - unmasked = array.array("B", data) - for i in xrange(len(data)): - unmasked[i] = unmasked[i] ^ mask[i % 4] - if hasattr(unmasked, 'tobytes'): - # tostring was deprecated in py32. It hasn't been removed, - # but since we turn on deprecation warnings in our tests - # we need to use the right one. - return unmasked.tobytes() - else: - return unmasked.tostring() - def _on_masked_frame_data(self, data): - self._on_frame_data(self._apply_mask(self._frame_mask, data)) + self._on_frame_data(_websocket_mask(self._frame_mask, data)) def _on_frame_data(self, data): if self._frame_opcode_is_control: @@ -771,7 +768,11 @@ class WebSocketProtocol13(WebSocketProtocol): class WebSocketClientConnection(simple_httpclient._HTTPConnection): - """WebSocket client connection.""" + """WebSocket client connection. + + This class should not be instantiated directly; use the + `websocket_connect` function instead. + """ def __init__(self, io_loop, request): self.connect_future = TracebackFuture() self.read_future = None @@ -793,9 +794,19 @@ class WebSocketClientConnection(simple_httpclient._HTTPConnection): io_loop, None, request, lambda: None, self._on_http_response, 104857600, self.resolver) + def close(self): + """Closes the websocket connection. + + .. versionadded:: 3.2 + """ + if self.protocol is not None: + self.protocol.close() + self.protocol = None + def _on_close(self): self.on_message(None) self.resolver.close() + super(WebSocketClientConnection, self)._on_close() def _on_http_response(self, response): if not self.connect_future.done(): @@ -859,13 +870,54 @@ def websocket_connect(url, io_loop=None, callback=None, connect_timeout=None): Takes a url and returns a Future whose result is a `WebSocketClientConnection`. + + .. versionchanged:: 3.2 + Also accepts ``HTTPRequest`` objects in place of urls. """ if io_loop is None: io_loop = IOLoop.current() - request = httpclient.HTTPRequest(url, connect_timeout=connect_timeout) + if isinstance(url, httpclient.HTTPRequest): + assert connect_timeout is None + request = url + # Copy and convert the headers dict/object (see comments in + # AsyncHTTPClient.fetch) + request.headers = httputil.HTTPHeaders(request.headers) + else: + request = httpclient.HTTPRequest(url, connect_timeout=connect_timeout) request = httpclient._RequestProxy( request, httpclient.HTTPRequest._DEFAULTS) conn = WebSocketClientConnection(io_loop, request) if callback is not None: io_loop.add_future(conn.connect_future, callback) return conn.connect_future + +def _websocket_mask_python(mask, data): + """Websocket masking function. + + `mask` is a `bytes` object of length 4; `data` is a `bytes` object of any length. + Returns a `bytes` object of the same length as `data` with the mask applied + as specified in section 5.3 of RFC 6455. + + This pure-python implementation may be replaced by an optimized version when available. + """ + mask = array.array("B", mask) + unmasked = array.array("B", data) + for i in xrange(len(data)): + unmasked[i] = unmasked[i] ^ mask[i % 4] + if hasattr(unmasked, 'tobytes'): + # tostring was deprecated in py32. It hasn't been removed, + # but since we turn on deprecation warnings in our tests + # we need to use the right one. + return unmasked.tobytes() + else: + return unmasked.tostring() + +if os.environ.get('TORNADO_NO_EXTENSION'): + # This environment variable exists to make it easier to do performance comparisons; + # it's not guaranteed to remain supported in the future. + _websocket_mask = _websocket_mask_python +else: + try: + from tornado.speedups import websocket_mask as _websocket_mask + except ImportError: + _websocket_mask = _websocket_mask_python diff --git a/libs/tornado/wsgi.py b/libs/tornado/wsgi.py index 5e25a56..8e5dded 100755 --- a/libs/tornado/wsgi.py +++ b/libs/tornado/wsgi.py @@ -33,6 +33,7 @@ from __future__ import absolute_import, division, print_function, with_statement import sys import time +import copy import tornado from tornado import escape @@ -142,11 +143,14 @@ class HTTPRequest(object): self.path += urllib_parse.quote(from_wsgi_str(environ.get("PATH_INFO", ""))) self.uri = self.path self.arguments = {} + self.query_arguments = {} + self.body_arguments = {} self.query = environ.get("QUERY_STRING", "") if self.query: self.uri += "?" + self.query self.arguments = parse_qs_bytes(native_str(self.query), keep_blank_values=True) + self.query_arguments = copy.deepcopy(self.arguments) self.version = "HTTP/1.1" self.headers = httputil.HTTPHeaders() if environ.get("CONTENT_TYPE"): @@ -171,7 +175,10 @@ class HTTPRequest(object): # Parse request body self.files = {} httputil.parse_body_arguments(self.headers.get("Content-Type", ""), - self.body, self.arguments, self.files) + self.body, self.body_arguments, self.files) + + for k, v in self.body_arguments.items(): + self.arguments.setdefault(k, []).extend(v) self._start_time = time.time() self._finish_time = None