Browse Source

Update lib: Tornado

pull/3944/head
Ruud 11 years ago
parent
commit
e0dffe20a4
  1. 4
      libs/tornado/__init__.py
  2. 8
      libs/tornado/auth.py
  3. 6
      libs/tornado/concurrent.py
  4. 27
      libs/tornado/curl_httpclient.py
  5. 69
      libs/tornado/gen.py
  6. 74
      libs/tornado/http1connection.py
  7. 86
      libs/tornado/httpclient.py
  8. 11
      libs/tornado/httpserver.py
  9. 28
      libs/tornado/httputil.py
  10. 144
      libs/tornado/ioloop.py
  11. 55
      libs/tornado/iostream.py
  12. 2
      libs/tornado/log.py
  13. 3
      libs/tornado/netutil.py
  14. 34
      libs/tornado/platform/asyncio.py
  15. 31
      libs/tornado/platform/twisted.py
  16. 33
      libs/tornado/simple_httpclient.py
  17. 6
      libs/tornado/testing.py
  18. 5
      libs/tornado/util.py
  19. 94
      libs/tornado/web.py
  20. 351
      libs/tornado/websocket.py
  21. 8
      libs/tornado/wsgi.py

4
libs/tornado/__init__.py

@ -25,5 +25,5 @@ from __future__ import absolute_import, division, print_function, with_statement
# is zero for an official release, positive for a development branch, # is zero for an official release, positive for a development branch,
# or negative for a release candidate or beta (after the base version # or negative for a release candidate or beta (after the base version
# number has been incremented) # number has been incremented)
version = "3.3.dev1" version = "4.0.1"
version_info = (3, 3, 0, -100) version_info = (4, 0, 1, -100)

8
libs/tornado/auth.py

@ -51,7 +51,7 @@ Example usage for Google OpenID::
response_type='code', response_type='code',
extra_params={'approval_prompt': 'auto'}) extra_params={'approval_prompt': 'auto'})
.. versionchanged:: 3.3 .. versionchanged:: 4.0
All of the callback interfaces in this module are now guaranteed All of the callback interfaces in this module are now guaranteed
to run their callback with an argument of ``None`` on error. to run their callback with an argument of ``None`` on error.
Previously some functions would do this while others would simply Previously some functions would do this while others would simply
@ -883,7 +883,8 @@ class FriendFeedMixin(OAuthMixin):
class GoogleMixin(OpenIdMixin, OAuthMixin): class GoogleMixin(OpenIdMixin, OAuthMixin):
"""Google Open ID / OAuth authentication. """Google Open ID / OAuth authentication.
*Deprecated:* New applications should use `GoogleOAuth2Mixin` .. deprecated:: 4.0
New applications should use `GoogleOAuth2Mixin`
below instead of this class. As of May 19, 2014, Google has stopped below instead of this class. As of May 19, 2014, Google has stopped
supporting registration-free authentication. supporting registration-free authentication.
@ -1053,7 +1054,8 @@ class GoogleOAuth2Mixin(OAuth2Mixin):
class FacebookMixin(object): class FacebookMixin(object):
"""Facebook Connect authentication. """Facebook Connect authentication.
*Deprecated:* New applications should use `FacebookGraphMixin` .. deprecated:: 1.1
New applications should use `FacebookGraphMixin`
below instead of this class. This class does not support the below instead of this class. This class does not support the
Future-based interface seen on other classes in this module. Future-based interface seen on other classes in this module.

6
libs/tornado/concurrent.py

@ -60,7 +60,7 @@ class Future(object):
This functionality was previously available in a separate class This functionality was previously available in a separate class
``TracebackFuture``, which is now a deprecated alias for this class. ``TracebackFuture``, which is now a deprecated alias for this class.
.. versionchanged:: 3.3 .. versionchanged:: 4.0
`tornado.concurrent.Future` is always a thread-unsafe ``Future`` `tornado.concurrent.Future` is always a thread-unsafe ``Future``
with support for the ``exc_info`` methods. Previously it would with support for the ``exc_info`` methods. Previously it would
be an alias for the thread-safe `concurrent.futures.Future` be an alias for the thread-safe `concurrent.futures.Future`
@ -152,7 +152,7 @@ class Future(object):
def exc_info(self): def exc_info(self):
"""Returns a tuple in the same format as `sys.exc_info` or None. """Returns a tuple in the same format as `sys.exc_info` or None.
.. versionadded:: 3.3 .. versionadded:: 4.0
""" """
return self._exc_info return self._exc_info
@ -161,7 +161,7 @@ class Future(object):
Preserves tracebacks on Python 2. Preserves tracebacks on Python 2.
.. versionadded:: 3.3 .. versionadded:: 4.0
""" """
self._exc_info = exc_info self._exc_info = exc_info
self.set_exception(exc_info[1]) self.set_exception(exc_info[1])

27
libs/tornado/curl_httpclient.py

@ -51,18 +51,6 @@ class CurlAsyncHTTPClient(AsyncHTTPClient):
self._fds = {} self._fds = {}
self._timeout = None self._timeout = None
try:
self._socket_action = self._multi.socket_action
except AttributeError:
# socket_action is found in pycurl since 7.18.2 (it's been
# in libcurl longer than that but wasn't accessible to
# python).
gen_log.warning("socket_action method missing from pycurl; "
"falling back to socket_all. Upgrading "
"libcurl and pycurl will improve performance")
self._socket_action = \
lambda fd, action: self._multi.socket_all()
# libcurl has bugs that sometimes cause it to not report all # libcurl has bugs that sometimes cause it to not report all
# relevant file descriptors and timeouts to TIMERFUNCTION/ # relevant file descriptors and timeouts to TIMERFUNCTION/
# SOCKETFUNCTION. Mitigate the effects of such bugs by # SOCKETFUNCTION. Mitigate the effects of such bugs by
@ -87,7 +75,6 @@ class CurlAsyncHTTPClient(AsyncHTTPClient):
for curl in self._curls: for curl in self._curls:
curl.close() curl.close()
self._multi.close() self._multi.close()
self._closed = True
super(CurlAsyncHTTPClient, self).close() super(CurlAsyncHTTPClient, self).close()
def fetch_impl(self, request, callback): def fetch_impl(self, request, callback):
@ -143,7 +130,7 @@ class CurlAsyncHTTPClient(AsyncHTTPClient):
action |= pycurl.CSELECT_OUT action |= pycurl.CSELECT_OUT
while True: while True:
try: try:
ret, num_handles = self._socket_action(fd, action) ret, num_handles = self._multi.socket_action(fd, action)
except pycurl.error as e: except pycurl.error as e:
ret = e.args[0] ret = e.args[0]
if ret != pycurl.E_CALL_MULTI_PERFORM: if ret != pycurl.E_CALL_MULTI_PERFORM:
@ -156,7 +143,7 @@ class CurlAsyncHTTPClient(AsyncHTTPClient):
self._timeout = None self._timeout = None
while True: while True:
try: try:
ret, num_handles = self._socket_action( ret, num_handles = self._multi.socket_action(
pycurl.SOCKET_TIMEOUT, 0) pycurl.SOCKET_TIMEOUT, 0)
except pycurl.error as e: except pycurl.error as e:
ret = e.args[0] ret = e.args[0]
@ -224,11 +211,6 @@ class CurlAsyncHTTPClient(AsyncHTTPClient):
"callback": callback, "callback": callback,
"curl_start_time": time.time(), "curl_start_time": time.time(),
} }
# Disable IPv6 to mitigate the effects of this bug
# on curl versions <= 7.21.0
# http://sourceforge.net/tracker/?func=detail&aid=3017819&group_id=976&atid=100976
if pycurl.version_info()[2] <= 0x71500: # 7.21.0
curl.setopt(pycurl.IPRESOLVE, pycurl.IPRESOLVE_V4)
_curl_setup_request(curl, request, curl.info["buffer"], _curl_setup_request(curl, request, curl.info["buffer"],
curl.info["headers"]) curl.info["headers"])
self._multi.add_handle(curl) self._multi.add_handle(curl)
@ -350,7 +332,7 @@ def _curl_setup_request(curl, request, buffer, headers):
curl.setopt(pycurl.USERAGENT, "Mozilla/5.0 (compatible; pycurl)") curl.setopt(pycurl.USERAGENT, "Mozilla/5.0 (compatible; pycurl)")
if request.network_interface: if request.network_interface:
curl.setopt(pycurl.INTERFACE, request.network_interface) curl.setopt(pycurl.INTERFACE, request.network_interface)
if request.use_gzip: if request.decompress_response:
curl.setopt(pycurl.ENCODING, "gzip,deflate") curl.setopt(pycurl.ENCODING, "gzip,deflate")
else: else:
curl.setopt(pycurl.ENCODING, "none") curl.setopt(pycurl.ENCODING, "none")
@ -384,7 +366,6 @@ def _curl_setup_request(curl, request, buffer, headers):
if request.allow_ipv6 is False: if request.allow_ipv6 is False:
# Curl behaves reasonably when DNS resolution gives an ipv6 address # Curl behaves reasonably when DNS resolution gives an ipv6 address
# that we can't reach, so allow ipv6 unless the user asks to disable. # that we can't reach, so allow ipv6 unless the user asks to disable.
# (but see version check in _process_queue above)
curl.setopt(pycurl.IPRESOLVE, pycurl.IPRESOLVE_V4) curl.setopt(pycurl.IPRESOLVE, pycurl.IPRESOLVE_V4)
else: else:
curl.setopt(pycurl.IPRESOLVE, pycurl.IPRESOLVE_WHATEVER) curl.setopt(pycurl.IPRESOLVE, pycurl.IPRESOLVE_WHATEVER)
@ -474,7 +455,7 @@ def _curl_header_callback(headers, header_line):
try: try:
(__, __, reason) = httputil.parse_response_start_line(header_line) (__, __, reason) = httputil.parse_response_start_line(header_line)
header_line = "X-Http-Reason: %s" % reason header_line = "X-Http-Reason: %s" % reason
except httputil.HTTPInputException: except httputil.HTTPInputError:
return return
if not header_line: if not header_line:
return return

69
libs/tornado/gen.py

@ -29,16 +29,7 @@ could be written with ``gen`` as::
Most asynchronous functions in Tornado return a `.Future`; Most asynchronous functions in Tornado return a `.Future`;
yielding this object returns its `~.Future.result`. yielding this object returns its `~.Future.result`.
For functions that do not return ``Futures``, `Task` works with any You can also yield a list or dict of ``Futures``, which will be
function that takes a ``callback`` keyword argument (most Tornado functions
can be used in either style, although the ``Future`` style is preferred
since it is both shorter and provides better exception handling)::
@gen.coroutine
def get(self):
yield gen.Task(AsyncHTTPClient().fetch, "http://example.com")
You can also yield a list or dict of ``Futures`` and/or ``Tasks``, which will be
started at the same time and run in parallel; a list or dict of results will started at the same time and run in parallel; a list or dict of results will
be returned when they are all finished:: be returned when they are all finished::
@ -54,30 +45,6 @@ be returned when they are all finished::
.. versionchanged:: 3.2 .. versionchanged:: 3.2
Dict support added. Dict support added.
For more complicated interfaces, `Task` can be split into two parts:
`Callback` and `Wait`::
class GenAsyncHandler2(RequestHandler):
@gen.coroutine
def get(self):
http_client = AsyncHTTPClient()
http_client.fetch("http://example.com",
callback=(yield gen.Callback("key")))
response = yield gen.Wait("key")
do_something_with_response(response)
self.render("template.html")
The ``key`` argument to `Callback` and `Wait` allows for multiple
asynchronous operations to be started at different times and proceed
in parallel: yield several callbacks with different keys, then wait
for them once all the async operations have started.
The result of a `Wait` or `Task` yield expression depends on how the callback
was run. If it was called with no arguments, the result is ``None``. If
it was called with one argument, the result is that argument. If it was
called with more than one argument or any keyword arguments, the result
is an `Arguments` object, which is a named tuple ``(args, kwargs)``.
""" """
from __future__ import absolute_import, division, print_function, with_statement from __future__ import absolute_import, division, print_function, with_statement
@ -252,8 +219,8 @@ class Return(Exception):
class YieldPoint(object): class YieldPoint(object):
"""Base class for objects that may be yielded from the generator. """Base class for objects that may be yielded from the generator.
Applications do not normally need to use this class, but it may be .. deprecated:: 4.0
subclassed to provide additional yielding behavior. Use `Futures <.Future>` instead.
""" """
def start(self, runner): def start(self, runner):
"""Called by the runner after the generator has yielded. """Called by the runner after the generator has yielded.
@ -289,6 +256,9 @@ class Callback(YieldPoint):
The callback may be called with zero or one arguments; if an argument The callback may be called with zero or one arguments; if an argument
is given it will be returned by `Wait`. is given it will be returned by `Wait`.
.. deprecated:: 4.0
Use `Futures <.Future>` instead.
""" """
def __init__(self, key): def __init__(self, key):
self.key = key self.key = key
@ -305,7 +275,11 @@ class Callback(YieldPoint):
class Wait(YieldPoint): class Wait(YieldPoint):
"""Returns the argument passed to the result of a previous `Callback`.""" """Returns the argument passed to the result of a previous `Callback`.
.. deprecated:: 4.0
Use `Futures <.Future>` instead.
"""
def __init__(self, key): def __init__(self, key):
self.key = key self.key = key
@ -326,6 +300,9 @@ class WaitAll(YieldPoint):
a list of results in the same order. a list of results in the same order.
`WaitAll` is equivalent to yielding a list of `Wait` objects. `WaitAll` is equivalent to yielding a list of `Wait` objects.
.. deprecated:: 4.0
Use `Futures <.Future>` instead.
""" """
def __init__(self, keys): def __init__(self, keys):
self.keys = keys self.keys = keys
@ -341,21 +318,13 @@ class WaitAll(YieldPoint):
def Task(func, *args, **kwargs): def Task(func, *args, **kwargs):
"""Runs a single asynchronous operation. """Adapts a callback-based asynchronous function for use in coroutines.
Takes a function (and optional additional arguments) and runs it with Takes a function (and optional additional arguments) and runs it with
those arguments plus a ``callback`` keyword argument. The argument passed those arguments plus a ``callback`` keyword argument. The argument passed
to the callback is returned as the result of the yield expression. to the callback is returned as the result of the yield expression.
A `Task` is equivalent to a `Callback`/`Wait` pair (with a unique .. versionchanged:: 4.0
key generated automatically)::
result = yield gen.Task(func, args)
func(args, callback=(yield gen.Callback(key)))
result = yield gen.Wait(key)
.. versionchanged:: 3.3
``gen.Task`` is now a function that returns a `.Future`, instead of ``gen.Task`` is now a function that returns a `.Future`, instead of
a subclass of `YieldPoint`. It still behaves the same way when a subclass of `YieldPoint`. It still behaves the same way when
yielded. yielded.
@ -464,7 +433,7 @@ def multi_future(children):
This function is faster than the `Multi` `YieldPoint` because it does not This function is faster than the `Multi` `YieldPoint` because it does not
require the creation of a stack context. require the creation of a stack context.
.. versionadded:: 3.3 .. versionadded:: 4.0
""" """
if isinstance(children, dict): if isinstance(children, dict):
keys = list(children.keys()) keys = list(children.keys())
@ -520,7 +489,7 @@ def with_timeout(timeout, future, io_loop=None):
Currently only supports Futures, not other `YieldPoint` classes. Currently only supports Futures, not other `YieldPoint` classes.
.. versionadded:: 3.3 .. versionadded:: 4.0
""" """
# TODO: allow yield points in addition to futures? # TODO: allow yield points in addition to futures?
# Tricky to do with stack_context semantics. # Tricky to do with stack_context semantics.
@ -564,7 +533,7 @@ coroutines that are likely to yield Futures that are ready instantly.
Usage: ``yield gen.moment`` Usage: ``yield gen.moment``
.. versionadded:: 3.3 .. versionadded:: 4.0
""" """
moment.set_result(None) moment.set_result(None)

74
libs/tornado/http1connection.py

@ -16,11 +16,13 @@
"""Client and server implementations of HTTP/1.x. """Client and server implementations of HTTP/1.x.
.. versionadded:: 3.3 .. versionadded:: 4.0
""" """
from __future__ import absolute_import, division, print_function, with_statement from __future__ import absolute_import, division, print_function, with_statement
import re
from tornado.concurrent import Future from tornado.concurrent import Future
from tornado.escape import native_str, utf8 from tornado.escape import native_str, utf8
from tornado import gen from tornado import gen
@ -56,7 +58,7 @@ class HTTP1ConnectionParameters(object):
""" """
def __init__(self, no_keep_alive=False, chunk_size=None, def __init__(self, no_keep_alive=False, chunk_size=None,
max_header_size=None, header_timeout=None, max_body_size=None, max_header_size=None, header_timeout=None, max_body_size=None,
body_timeout=None, use_gzip=False): body_timeout=None, decompress=False):
""" """
:arg bool no_keep_alive: If true, always close the connection after :arg bool no_keep_alive: If true, always close the connection after
one request. one request.
@ -65,7 +67,8 @@ class HTTP1ConnectionParameters(object):
:arg float header_timeout: how long to wait for all headers (seconds) :arg float header_timeout: how long to wait for all headers (seconds)
:arg int max_body_size: maximum amount of data for body :arg int max_body_size: maximum amount of data for body
:arg float body_timeout: how long to wait while reading body (seconds) :arg float body_timeout: how long to wait while reading body (seconds)
:arg bool use_gzip: if true, decode incoming ``Content-Encoding: gzip`` :arg bool decompress: if true, decode incoming
``Content-Encoding: gzip``
""" """
self.no_keep_alive = no_keep_alive self.no_keep_alive = no_keep_alive
self.chunk_size = chunk_size or 65536 self.chunk_size = chunk_size or 65536
@ -73,7 +76,7 @@ class HTTP1ConnectionParameters(object):
self.header_timeout = header_timeout self.header_timeout = header_timeout
self.max_body_size = max_body_size self.max_body_size = max_body_size
self.body_timeout = body_timeout self.body_timeout = body_timeout
self.use_gzip = use_gzip self.decompress = decompress
class HTTP1Connection(httputil.HTTPConnection): class HTTP1Connection(httputil.HTTPConnection):
@ -141,7 +144,7 @@ class HTTP1Connection(httputil.HTTPConnection):
Returns a `.Future` that resolves to None after the full response has Returns a `.Future` that resolves to None after the full response has
been read. been read.
""" """
if self.params.use_gzip: if self.params.decompress:
delegate = _GzipMessageDelegate(delegate, self.params.chunk_size) delegate = _GzipMessageDelegate(delegate, self.params.chunk_size)
return self._read_message(delegate) return self._read_message(delegate)
@ -190,8 +193,17 @@ class HTTP1Connection(httputil.HTTPConnection):
skip_body = True skip_body = True
code = start_line.code code = start_line.code
if code == 304: if code == 304:
# 304 responses may include the content-length header
# but do not actually have a body.
# http://tools.ietf.org/html/rfc7230#section-3.3
skip_body = True skip_body = True
if code >= 100 and code < 200: if code >= 100 and code < 200:
# 1xx responses should never indicate the presence of
# a body.
if ('Content-Length' in headers or
'Transfer-Encoding' in headers):
raise httputil.HTTPInputError(
"Response code %d cannot have body" % code)
# TODO: client delegates will get headers_received twice # TODO: client delegates will get headers_received twice
# in the case of a 100-continue. Document or change? # in the case of a 100-continue. Document or change?
yield self._read_message(delegate) yield self._read_message(delegate)
@ -200,7 +212,8 @@ class HTTP1Connection(httputil.HTTPConnection):
not self._write_finished): not self._write_finished):
self.stream.write(b"HTTP/1.1 100 (Continue)\r\n\r\n") self.stream.write(b"HTTP/1.1 100 (Continue)\r\n\r\n")
if not skip_body: if not skip_body:
body_future = self._read_body(headers, delegate) body_future = self._read_body(
start_line.code if self.is_client else 0, headers, delegate)
if body_future is not None: if body_future is not None:
if self._body_timeout is None: if self._body_timeout is None:
yield body_future yield body_future
@ -231,7 +244,7 @@ class HTTP1Connection(httputil.HTTPConnection):
self.close() self.close()
if self.stream is None: if self.stream is None:
raise gen.Return(False) raise gen.Return(False)
except httputil.HTTPInputException as e: except httputil.HTTPInputError as e:
gen_log.info("Malformed HTTP message from %s: %s", gen_log.info("Malformed HTTP message from %s: %s",
self.context, e) self.context, e)
self.close() self.close()
@ -258,7 +271,7 @@ class HTTP1Connection(httputil.HTTPConnection):
def set_close_callback(self, callback): def set_close_callback(self, callback):
"""Sets a callback that will be run when the connection is closed. """Sets a callback that will be run when the connection is closed.
.. deprecated:: 3.3 .. deprecated:: 4.0
Use `.HTTPMessageDelegate.on_connection_close` instead. Use `.HTTPMessageDelegate.on_connection_close` instead.
""" """
self._close_callback = stack_context.wrap(callback) self._close_callback = stack_context.wrap(callback)
@ -377,7 +390,7 @@ class HTTP1Connection(httputil.HTTPConnection):
if self._expected_content_remaining < 0: if self._expected_content_remaining < 0:
# Close the stream now to stop further framing errors. # Close the stream now to stop further framing errors.
self.stream.close() self.stream.close()
raise httputil.HTTPOutputException( raise httputil.HTTPOutputError(
"Tried to write more data than Content-Length") "Tried to write more data than Content-Length")
if self._chunking_output and chunk: if self._chunking_output and chunk:
# Don't write out empty chunks because that means END-OF-STREAM # Don't write out empty chunks because that means END-OF-STREAM
@ -412,7 +425,7 @@ class HTTP1Connection(httputil.HTTPConnection):
self._expected_content_remaining != 0 and self._expected_content_remaining != 0 and
not self.stream.closed()): not self.stream.closed()):
self.stream.close() self.stream.close()
raise httputil.HTTPOutputException( raise httputil.HTTPOutputError(
"Tried to write %d bytes less than Content-Length" % "Tried to write %d bytes less than Content-Length" %
self._expected_content_remaining) self._expected_content_remaining)
if self._chunking_output: if self._chunking_output:
@ -477,16 +490,40 @@ class HTTP1Connection(httputil.HTTPConnection):
headers = httputil.HTTPHeaders.parse(data[eol:]) headers = httputil.HTTPHeaders.parse(data[eol:])
except ValueError: except ValueError:
# probably form split() if there was no ':' in the line # probably form split() if there was no ':' in the line
raise httputil.HTTPInputException("Malformed HTTP headers: %r" % raise httputil.HTTPInputError("Malformed HTTP headers: %r" %
data[eol:100]) data[eol:100])
return start_line, headers return start_line, headers
def _read_body(self, headers, delegate): def _read_body(self, code, headers, delegate):
content_length = headers.get("Content-Length") if "Content-Length" in headers:
if content_length: if "," in headers["Content-Length"]:
content_length = int(content_length) # Proxies sometimes cause Content-Length headers to get
# duplicated. If all the values are identical then we can
# use them but if they differ it's an error.
pieces = re.split(r',\s*', headers["Content-Length"])
if any(i != pieces[0] for i in pieces):
raise httputil.HTTPInputError(
"Multiple unequal Content-Lengths: %r" %
headers["Content-Length"])
headers["Content-Length"] = pieces[0]
content_length = int(headers["Content-Length"])
if content_length > self._max_body_size: if content_length > self._max_body_size:
raise httputil.HTTPInputException("Content-Length too long") raise httputil.HTTPInputError("Content-Length too long")
else:
content_length = None
if code == 204:
# This response code is not allowed to have a non-empty body,
# and has an implicit length of zero instead of read-until-close.
# http://www.w3.org/Protocols/rfc2616/rfc2616-sec4.html#sec4.3
if ("Transfer-Encoding" in headers or
content_length not in (None, 0)):
raise httputil.HTTPInputError(
"Response with code %d should not have body" % code)
content_length = 0
if content_length is not None:
return self._read_fixed_body(content_length, delegate) return self._read_fixed_body(content_length, delegate)
if headers.get("Transfer-Encoding") == "chunked": if headers.get("Transfer-Encoding") == "chunked":
return self._read_chunked_body(delegate) return self._read_chunked_body(delegate)
@ -515,7 +552,7 @@ class HTTP1Connection(httputil.HTTPConnection):
return return
total_size += chunk_len total_size += chunk_len
if total_size > self._max_body_size: if total_size > self._max_body_size:
raise httputil.HTTPInputException("chunked body too large") raise httputil.HTTPInputError("chunked body too large")
bytes_to_read = chunk_len bytes_to_read = chunk_len
while bytes_to_read: while bytes_to_read:
chunk = yield self.stream.read_bytes( chunk = yield self.stream.read_bytes(
@ -581,6 +618,9 @@ class _GzipMessageDelegate(httputil.HTTPMessageDelegate):
self._delegate.data_received(tail) self._delegate.data_received(tail)
return self._delegate.finish() return self._delegate.finish()
def on_connection_close(self):
return self._delegate.on_connection_close()
class HTTP1ServerConnection(object): class HTTP1ServerConnection(object):
"""An HTTP/1.x server.""" """An HTTP/1.x server."""

86
libs/tornado/httpclient.py

@ -22,14 +22,20 @@ to switch to ``curl_httpclient`` for reasons such as the following:
* ``curl_httpclient`` was the default prior to Tornado 2.0. * ``curl_httpclient`` was the default prior to Tornado 2.0.
Note that if you are using ``curl_httpclient``, it is highly recommended that Note that if you are using ``curl_httpclient``, it is highly
you use a recent version of ``libcurl`` and ``pycurl``. Currently the minimum recommended that you use a recent version of ``libcurl`` and
supported version is 7.18.2, and the recommended version is 7.21.1 or newer. ``pycurl``. Currently the minimum supported version of libcurl is
It is highly recommended that your ``libcurl`` installation is built with 7.21.1, and the minimum version of pycurl is 7.18.2. It is highly
asynchronous DNS resolver (threaded or c-ares), otherwise you may encounter recommended that your ``libcurl`` installation is built with
various problems with request timeouts (for more information, see asynchronous DNS resolver (threaded or c-ares), otherwise you may
encounter various problems with request timeouts (for more
information, see
http://curl.haxx.se/libcurl/c/curl_easy_setopt.html#CURLOPTCONNECTTIMEOUTMS http://curl.haxx.se/libcurl/c/curl_easy_setopt.html#CURLOPTCONNECTTIMEOUTMS
and comments in curl_httpclient.py). and comments in curl_httpclient.py).
To select ``curl_httpclient``, call `AsyncHTTPClient.configure` at startup::
AsyncHTTPClient.configure("tornado.curl_httpclient.CurlAsyncHTTPClient")
""" """
from __future__ import absolute_import, division, print_function, with_statement from __future__ import absolute_import, division, print_function, with_statement
@ -110,10 +116,21 @@ class AsyncHTTPClient(Configurable):
actually creates an instance of an implementation-specific actually creates an instance of an implementation-specific
subclass, and instances are reused as a kind of pseudo-singleton subclass, and instances are reused as a kind of pseudo-singleton
(one per `.IOLoop`). The keyword argument ``force_instance=True`` (one per `.IOLoop`). The keyword argument ``force_instance=True``
can be used to suppress this singleton behavior. Constructor can be used to suppress this singleton behavior. Unless
arguments other than ``io_loop`` and ``force_instance`` are ``force_instance=True`` is used, no arguments other than
deprecated. The implementation subclass as well as arguments to ``io_loop`` should be passed to the `AsyncHTTPClient` constructor.
its constructor can be set with the static method `configure()` The implementation subclass as well as arguments to its
constructor can be set with the static method `configure()`
All `AsyncHTTPClient` implementations support a ``defaults``
keyword argument, which can be used to set default values for
`HTTPRequest` attributes. For example::
AsyncHTTPClient.configure(
None, defaults=dict(user_agent="MyUserAgent"))
# or with force_instance:
client = AsyncHTTPClient(force_instance=True,
defaults=dict(user_agent="MyUserAgent"))
""" """
@classmethod @classmethod
def configurable_base(cls): def configurable_base(cls):
@ -133,12 +150,21 @@ class AsyncHTTPClient(Configurable):
def __new__(cls, io_loop=None, force_instance=False, **kwargs): def __new__(cls, io_loop=None, force_instance=False, **kwargs):
io_loop = io_loop or IOLoop.current() io_loop = io_loop or IOLoop.current()
if io_loop in cls._async_clients() and not force_instance: if force_instance:
return cls._async_clients()[io_loop] instance_cache = None
else:
instance_cache = cls._async_clients()
if instance_cache is not None and io_loop in instance_cache:
return instance_cache[io_loop]
instance = super(AsyncHTTPClient, cls).__new__(cls, io_loop=io_loop, instance = super(AsyncHTTPClient, cls).__new__(cls, io_loop=io_loop,
**kwargs) **kwargs)
if not force_instance: # Make sure the instance knows which cache to remove itself from.
cls._async_clients()[io_loop] = instance # It can't simply call _async_clients() because we may be in
# __new__(AsyncHTTPClient) but instance.__class__ may be
# SimpleAsyncHTTPClient.
instance._instance_cache = instance_cache
if instance_cache is not None:
instance_cache[instance.io_loop] = instance
return instance return instance
def initialize(self, io_loop, defaults=None): def initialize(self, io_loop, defaults=None):
@ -146,6 +172,7 @@ class AsyncHTTPClient(Configurable):
self.defaults = dict(HTTPRequest._DEFAULTS) self.defaults = dict(HTTPRequest._DEFAULTS)
if defaults is not None: if defaults is not None:
self.defaults.update(defaults) self.defaults.update(defaults)
self._closed = False
def close(self): def close(self):
"""Destroys this HTTP client, freeing any file descriptors used. """Destroys this HTTP client, freeing any file descriptors used.
@ -160,8 +187,13 @@ class AsyncHTTPClient(Configurable):
``close()``. ``close()``.
""" """
if self._async_clients().get(self.io_loop) is self: if self._closed:
del self._async_clients()[self.io_loop] return
self._closed = True
if self._instance_cache is not None:
if self._instance_cache.get(self.io_loop) is not self:
raise RuntimeError("inconsistent AsyncHTTPClient cache")
del self._instance_cache[self.io_loop]
def fetch(self, request, callback=None, **kwargs): def fetch(self, request, callback=None, **kwargs):
"""Executes a request, asynchronously returning an `HTTPResponse`. """Executes a request, asynchronously returning an `HTTPResponse`.
@ -179,6 +211,8 @@ class AsyncHTTPClient(Configurable):
Instead, you must check the response's ``error`` attribute or Instead, you must check the response's ``error`` attribute or
call its `~HTTPResponse.rethrow` method. call its `~HTTPResponse.rethrow` method.
""" """
if self._closed:
raise RuntimeError("fetch() called on closed AsyncHTTPClient")
if not isinstance(request, HTTPRequest): if not isinstance(request, HTTPRequest):
request = HTTPRequest(url=request, **kwargs) request = HTTPRequest(url=request, **kwargs)
# We may modify this (to add Host, Accept-Encoding, etc), # We may modify this (to add Host, Accept-Encoding, etc),
@ -248,7 +282,7 @@ class HTTPRequest(object):
request_timeout=20.0, request_timeout=20.0,
follow_redirects=True, follow_redirects=True,
max_redirects=5, max_redirects=5,
use_gzip=True, decompress_response=True,
proxy_password='', proxy_password='',
allow_nonstandard_methods=False, allow_nonstandard_methods=False,
validate_cert=True) validate_cert=True)
@ -265,7 +299,7 @@ class HTTPRequest(object):
validate_cert=None, ca_certs=None, validate_cert=None, ca_certs=None,
allow_ipv6=None, allow_ipv6=None,
client_key=None, client_cert=None, body_producer=None, client_key=None, client_cert=None, body_producer=None,
expect_100_continue=False): expect_100_continue=False, decompress_response=None):
r"""All parameters except ``url`` are optional. r"""All parameters except ``url`` are optional.
:arg string url: URL to fetch :arg string url: URL to fetch
@ -284,7 +318,7 @@ class HTTPRequest(object):
``curl_httpclient``. When using ``body_producer`` it is recommended ``curl_httpclient``. When using ``body_producer`` it is recommended
to pass a ``Content-Length`` in the headers as otherwise chunked to pass a ``Content-Length`` in the headers as otherwise chunked
encoding will be used, and many servers do not support chunked encoding will be used, and many servers do not support chunked
encoding on requests. New in Tornado 3.3 encoding on requests. New in Tornado 4.0
:arg string auth_username: Username for HTTP authentication :arg string auth_username: Username for HTTP authentication
:arg string auth_password: Password for HTTP authentication :arg string auth_password: Password for HTTP authentication
:arg string auth_mode: Authentication mode; default is "basic". :arg string auth_mode: Authentication mode; default is "basic".
@ -299,7 +333,11 @@ class HTTPRequest(object):
or return the 3xx response? or return the 3xx response?
:arg int max_redirects: Limit for ``follow_redirects`` :arg int max_redirects: Limit for ``follow_redirects``
:arg string user_agent: String to send as ``User-Agent`` header :arg string user_agent: String to send as ``User-Agent`` header
:arg bool use_gzip: Request gzip encoding from the server :arg bool decompress_response: Request a compressed response from
the server and decompress it after downloading. Default is True.
New in Tornado 4.0.
:arg bool use_gzip: Deprecated alias for ``decompress_response``
since Tornado 4.0.
:arg string network_interface: Network interface to use for request. :arg string network_interface: Network interface to use for request.
``curl_httpclient`` only; see note below. ``curl_httpclient`` only; see note below.
:arg callable streaming_callback: If set, ``streaming_callback`` will :arg callable streaming_callback: If set, ``streaming_callback`` will
@ -342,7 +380,6 @@ class HTTPRequest(object):
before sending the request body. Only supported with before sending the request body. Only supported with
simple_httpclient. simple_httpclient.
.. note:: .. note::
When using ``curl_httpclient`` certain options may be When using ``curl_httpclient`` certain options may be
@ -358,7 +395,7 @@ class HTTPRequest(object):
.. versionadded:: 3.1 .. versionadded:: 3.1
The ``auth_mode`` argument. The ``auth_mode`` argument.
.. versionadded:: 3.3 .. versionadded:: 4.0
The ``body_producer`` and ``expect_100_continue`` arguments. The ``body_producer`` and ``expect_100_continue`` arguments.
""" """
# Note that some of these attributes go through property setters # Note that some of these attributes go through property setters
@ -383,7 +420,10 @@ class HTTPRequest(object):
self.follow_redirects = follow_redirects self.follow_redirects = follow_redirects
self.max_redirects = max_redirects self.max_redirects = max_redirects
self.user_agent = user_agent self.user_agent = user_agent
self.use_gzip = use_gzip if decompress_response is not None:
self.decompress_response = decompress_response
else:
self.decompress_response = use_gzip
self.network_interface = network_interface self.network_interface = network_interface
self.streaming_callback = streaming_callback self.streaming_callback = streaming_callback
self.header_callback = header_callback self.header_callback = header_callback

11
libs/tornado/httpserver.py

@ -20,7 +20,7 @@ Typical applications have little direct interaction with the `HTTPServer`
class except to start a server at the beginning of the process class except to start a server at the beginning of the process
(and even that is often done indirectly via `tornado.web.Application.listen`). (and even that is often done indirectly via `tornado.web.Application.listen`).
.. versionchanged:: 3.3 .. versionchanged:: 4.0
The ``HTTPRequest`` class that used to live in this module has been moved The ``HTTPRequest`` class that used to live in this module has been moved
to `tornado.httputil.HTTPServerRequest`. The old name remains as an alias. to `tornado.httputil.HTTPServerRequest`. The old name remains as an alias.
@ -128,14 +128,15 @@ class HTTPServer(TCPServer, httputil.HTTPServerConnectionDelegate):
servers if you want to create your listening sockets in some servers if you want to create your listening sockets in some
way other than `tornado.netutil.bind_sockets`. way other than `tornado.netutil.bind_sockets`.
.. versionchanged:: 3.3 .. versionchanged:: 4.0
Added ``gzip``, ``chunk_size``, ``max_header_size``, Added ``decompress_request``, ``chunk_size``, ``max_header_size``,
``idle_connection_timeout``, ``body_timeout``, ``max_body_size`` ``idle_connection_timeout``, ``body_timeout``, ``max_body_size``
arguments. Added support for `.HTTPServerConnectionDelegate` arguments. Added support for `.HTTPServerConnectionDelegate`
instances as ``request_callback``. instances as ``request_callback``.
""" """
def __init__(self, request_callback, no_keep_alive=False, io_loop=None, def __init__(self, request_callback, no_keep_alive=False, io_loop=None,
xheaders=False, ssl_options=None, protocol=None, gzip=False, xheaders=False, ssl_options=None, protocol=None,
decompress_request=False,
chunk_size=None, max_header_size=None, chunk_size=None, max_header_size=None,
idle_connection_timeout=None, body_timeout=None, idle_connection_timeout=None, body_timeout=None,
max_body_size=None, max_buffer_size=None): max_body_size=None, max_buffer_size=None):
@ -144,7 +145,7 @@ class HTTPServer(TCPServer, httputil.HTTPServerConnectionDelegate):
self.xheaders = xheaders self.xheaders = xheaders
self.protocol = protocol self.protocol = protocol
self.conn_params = HTTP1ConnectionParameters( self.conn_params = HTTP1ConnectionParameters(
use_gzip=gzip, decompress=decompress_request,
chunk_size=chunk_size, chunk_size=chunk_size,
max_header_size=max_header_size, max_header_size=max_header_size,
header_timeout=idle_connection_timeout or 3600, header_timeout=idle_connection_timeout or 3600,

28
libs/tornado/httputil.py

@ -319,7 +319,7 @@ class HTTPServerRequest(object):
are typically kept open in HTTP/1.1, multiple requests can be handled are typically kept open in HTTP/1.1, multiple requests can be handled
sequentially on a single connection. sequentially on a single connection.
.. versionchanged:: 3.3 .. versionchanged:: 4.0
Moved from ``tornado.httpserver.HTTPRequest``. Moved from ``tornado.httpserver.HTTPRequest``.
""" """
def __init__(self, method=None, uri=None, version="HTTP/1.0", headers=None, def __init__(self, method=None, uri=None, version="HTTP/1.0", headers=None,
@ -352,7 +352,7 @@ class HTTPServerRequest(object):
def supports_http_1_1(self): def supports_http_1_1(self):
"""Returns True if this request supports HTTP/1.1 semantics. """Returns True if this request supports HTTP/1.1 semantics.
.. deprecated:: 3.3 .. deprecated:: 4.0
Applications are less likely to need this information with the Applications are less likely to need this information with the
introduction of `.HTTPConnection`. If you still need it, access introduction of `.HTTPConnection`. If you still need it, access
the ``version`` attribute directly. the ``version`` attribute directly.
@ -375,7 +375,7 @@ class HTTPServerRequest(object):
def write(self, chunk, callback=None): def write(self, chunk, callback=None):
"""Writes the given chunk to the response stream. """Writes the given chunk to the response stream.
.. deprecated:: 3.3 .. deprecated:: 4.0
Use ``request.connection`` and the `.HTTPConnection` methods Use ``request.connection`` and the `.HTTPConnection` methods
to write the response. to write the response.
""" """
@ -385,7 +385,7 @@ class HTTPServerRequest(object):
def finish(self): def finish(self):
"""Finishes this HTTP request on the open connection. """Finishes this HTTP request on the open connection.
.. deprecated:: 3.3 .. deprecated:: 4.0
Use ``request.connection`` and the `.HTTPConnection` methods Use ``request.connection`` and the `.HTTPConnection` methods
to write the response. to write the response.
""" """
@ -445,19 +445,19 @@ class HTTPServerRequest(object):
self.__class__.__name__, args, dict(self.headers)) self.__class__.__name__, args, dict(self.headers))
class HTTPInputException(Exception): class HTTPInputError(Exception):
"""Exception class for malformed HTTP requests or responses """Exception class for malformed HTTP requests or responses
from remote sources. from remote sources.
.. versionadded:: 3.3 .. versionadded:: 4.0
""" """
pass pass
class HTTPOutputException(Exception): class HTTPOutputError(Exception):
"""Exception class for errors in HTTP output. """Exception class for errors in HTTP output.
.. versionadded:: 3.3 .. versionadded:: 4.0
""" """
pass pass
@ -465,7 +465,7 @@ class HTTPOutputException(Exception):
class HTTPServerConnectionDelegate(object): class HTTPServerConnectionDelegate(object):
"""Implement this interface to handle requests from `.HTTPServer`. """Implement this interface to handle requests from `.HTTPServer`.
.. versionadded:: 3.3 .. versionadded:: 4.0
""" """
def start_request(self, server_conn, request_conn): def start_request(self, server_conn, request_conn):
"""This method is called by the server when a new request has started. """This method is called by the server when a new request has started.
@ -491,7 +491,7 @@ class HTTPServerConnectionDelegate(object):
class HTTPMessageDelegate(object): class HTTPMessageDelegate(object):
"""Implement this interface to handle an HTTP request or response. """Implement this interface to handle an HTTP request or response.
.. versionadded:: 3.3 .. versionadded:: 4.0
""" """
def headers_received(self, start_line, headers): def headers_received(self, start_line, headers):
"""Called when the HTTP headers have been received and parsed. """Called when the HTTP headers have been received and parsed.
@ -531,7 +531,7 @@ class HTTPMessageDelegate(object):
class HTTPConnection(object): class HTTPConnection(object):
"""Applications use this interface to write their responses. """Applications use this interface to write their responses.
.. versionadded:: 3.3 .. versionadded:: 4.0
""" """
def write_headers(self, start_line, headers, chunk=None, callback=None): def write_headers(self, start_line, headers, chunk=None, callback=None):
"""Write an HTTP header block. """Write an HTTP header block.
@ -774,9 +774,9 @@ def parse_request_start_line(line):
try: try:
method, path, version = line.split(" ") method, path, version = line.split(" ")
except ValueError: except ValueError:
raise HTTPInputException("Malformed HTTP request line") raise HTTPInputError("Malformed HTTP request line")
if not version.startswith("HTTP/"): if not version.startswith("HTTP/"):
raise HTTPInputException( raise HTTPInputError(
"Malformed HTTP version in HTTP Request-Line: %r" % version) "Malformed HTTP version in HTTP Request-Line: %r" % version)
return RequestStartLine(method, path, version) return RequestStartLine(method, path, version)
@ -796,7 +796,7 @@ def parse_response_start_line(line):
line = native_str(line) line = native_str(line)
match = re.match("(HTTP/1.[01]) ([0-9]+) ([^\r]*)", line) match = re.match("(HTTP/1.[01]) ([0-9]+) ([^\r]*)", line)
if not match: if not match:
raise HTTPInputException("Error parsing response start line") raise HTTPInputError("Error parsing response start line")
return ResponseStartLine(match.group(1), int(match.group(2)), return ResponseStartLine(match.group(1), int(match.group(2)),
match.group(3)) match.group(3))

144
libs/tornado/ioloop.py

@ -45,8 +45,7 @@ import traceback
from tornado.concurrent import TracebackFuture, is_future from tornado.concurrent import TracebackFuture, is_future
from tornado.log import app_log, gen_log from tornado.log import app_log, gen_log
from tornado import stack_context from tornado import stack_context
from tornado.util import Configurable from tornado.util import Configurable, errno_from_exception, timedelta_to_seconds
from tornado.util import errno_from_exception
try: try:
import signal import signal
@ -162,7 +161,7 @@ class IOLoop(Configurable):
def clear_instance(): def clear_instance():
"""Clear the global `IOLoop` instance. """Clear the global `IOLoop` instance.
.. versionadded:: 3.3 .. versionadded:: 4.0
""" """
if hasattr(IOLoop, "_instance"): if hasattr(IOLoop, "_instance"):
del IOLoop._instance del IOLoop._instance
@ -267,7 +266,7 @@ class IOLoop(Configurable):
When an event occurs, ``handler(fd, events)`` will be run. When an event occurs, ``handler(fd, events)`` will be run.
.. versionchanged:: 3.3 .. versionchanged:: 4.0
Added the ability to pass file-like objects in addition to Added the ability to pass file-like objects in addition to
raw file descriptors. raw file descriptors.
""" """
@ -276,7 +275,7 @@ class IOLoop(Configurable):
def update_handler(self, fd, events): def update_handler(self, fd, events):
"""Changes the events we listen for ``fd``. """Changes the events we listen for ``fd``.
.. versionchanged:: 3.3 .. versionchanged:: 4.0
Added the ability to pass file-like objects in addition to Added the ability to pass file-like objects in addition to
raw file descriptors. raw file descriptors.
""" """
@ -285,7 +284,7 @@ class IOLoop(Configurable):
def remove_handler(self, fd): def remove_handler(self, fd):
"""Stop listening for events on ``fd``. """Stop listening for events on ``fd``.
.. versionchanged:: 3.3 .. versionchanged:: 4.0
Added the ability to pass file-like objects in addition to Added the ability to pass file-like objects in addition to
raw file descriptors. raw file descriptors.
""" """
@ -433,7 +432,7 @@ class IOLoop(Configurable):
""" """
return time.time() return time.time()
def add_timeout(self, deadline, callback): def add_timeout(self, deadline, callback, *args, **kwargs):
"""Runs the ``callback`` at the time ``deadline`` from the I/O loop. """Runs the ``callback`` at the time ``deadline`` from the I/O loop.
Returns an opaque handle that may be passed to Returns an opaque handle that may be passed to
@ -442,13 +441,59 @@ class IOLoop(Configurable):
``deadline`` may be a number denoting a time (on the same ``deadline`` may be a number denoting a time (on the same
scale as `IOLoop.time`, normally `time.time`), or a scale as `IOLoop.time`, normally `time.time`), or a
`datetime.timedelta` object for a deadline relative to the `datetime.timedelta` object for a deadline relative to the
current time. current time. Since Tornado 4.0, `call_later` is a more
convenient alternative for the relative case since it does not
require a timedelta object.
Note that it is not safe to call `add_timeout` from other threads. Note that it is not safe to call `add_timeout` from other threads.
Instead, you must use `add_callback` to transfer control to the Instead, you must use `add_callback` to transfer control to the
`IOLoop`'s thread, and then call `add_timeout` from there. `IOLoop`'s thread, and then call `add_timeout` from there.
Subclasses of IOLoop must implement either `add_timeout` or
`call_at`; the default implementations of each will call
the other. `call_at` is usually easier to implement, but
subclasses that wish to maintain compatibility with Tornado
versions prior to 4.0 must use `add_timeout` instead.
.. versionchanged:: 4.0
Now passes through ``*args`` and ``**kwargs`` to the callback.
""" """
raise NotImplementedError() if isinstance(deadline, numbers.Real):
return self.call_at(deadline, callback, *args, **kwargs)
elif isinstance(deadline, datetime.timedelta):
return self.call_at(self.time() + timedelta_to_seconds(deadline),
callback, *args, **kwargs)
else:
raise TypeError("Unsupported deadline %r" % deadline)
def call_later(self, delay, callback, *args, **kwargs):
"""Runs the ``callback`` after ``delay`` seconds have passed.
Returns an opaque handle that may be passed to `remove_timeout`
to cancel. Note that unlike the `asyncio` method of the same
name, the returned object does not have a ``cancel()`` method.
See `add_timeout` for comments on thread-safety and subclassing.
.. versionadded:: 4.0
"""
return self.call_at(self.time() + delay, callback, *args, **kwargs)
def call_at(self, when, callback, *args, **kwargs):
"""Runs the ``callback`` at the absolute time designated by ``when``.
``when`` must be a number using the same reference point as
`IOLoop.time`.
Returns an opaque handle that may be passed to `remove_timeout`
to cancel. Note that unlike the `asyncio` method of the same
name, the returned object does not have a ``cancel()`` method.
See `add_timeout` for comments on thread-safety and subclassing.
.. versionadded:: 4.0
"""
return self.add_timeout(when, callback, *args, **kwargs)
def remove_timeout(self, timeout): def remove_timeout(self, timeout):
"""Cancels a pending timeout. """Cancels a pending timeout.
@ -486,6 +531,19 @@ class IOLoop(Configurable):
""" """
raise NotImplementedError() raise NotImplementedError()
def spawn_callback(self, callback, *args, **kwargs):
"""Calls the given callback on the next IOLoop iteration.
Unlike all other callback-related methods on IOLoop,
``spawn_callback`` does not associate the callback with its caller's
``stack_context``, so it is suitable for fire-and-forget callbacks
that should not interfere with the caller.
.. versionadded:: 4.0
"""
with stack_context.NullContext():
self.add_callback(callback, *args, **kwargs)
def add_future(self, future, callback): def add_future(self, future, callback):
"""Schedules a callback on the ``IOLoop`` when the given """Schedules a callback on the ``IOLoop`` when the given
`.Future` is finished. `.Future` is finished.
@ -504,7 +562,13 @@ class IOLoop(Configurable):
For use in subclasses. For use in subclasses.
""" """
try: try:
callback() ret = callback()
if ret is not None and is_future(ret):
# Functions that return Futures typically swallow all
# exceptions and store them in the Future. If a Future
# makes it out to the IOLoop, ensure its exception (if any)
# gets logged too.
self.add_future(ret, lambda f: f.result())
except Exception: except Exception:
self.handle_callback_exception(callback) self.handle_callback_exception(callback)
@ -534,7 +598,7 @@ class IOLoop(Configurable):
This method is provided for use by `IOLoop` subclasses and should This method is provided for use by `IOLoop` subclasses and should
not generally be used by application code. not generally be used by application code.
.. versionadded:: 3.3 .. versionadded:: 4.0
""" """
try: try:
return fd.fileno(), fd return fd.fileno(), fd
@ -551,7 +615,7 @@ class IOLoop(Configurable):
implementations of ``IOLoop.close(all_fds=True)`` and should implementations of ``IOLoop.close(all_fds=True)`` and should
not generally be used by application code. not generally be used by application code.
.. versionadded:: 3.3 .. versionadded:: 4.0
""" """
try: try:
try: try:
@ -680,19 +744,16 @@ class PollIOLoop(IOLoop):
try: try:
while True: while True:
poll_timeout = _POLL_TIMEOUT
# Prevent IO event starvation by delaying new callbacks # Prevent IO event starvation by delaying new callbacks
# to the next iteration of the event loop. # to the next iteration of the event loop.
with self._callback_lock: with self._callback_lock:
callbacks = self._callbacks callbacks = self._callbacks
self._callbacks = [] self._callbacks = []
for callback in callbacks:
self._run_callback(callback)
# Closures may be holding on to a lot of memory, so allow
# them to be freed before we go into our poll wait.
callbacks = callback = None
# Add any timeouts that have come due to the callback list.
# Do not run anything until we have determined which ones
# are ready, so timeouts that call add_timeout cannot
# schedule anything in this iteration.
if self._timeouts: if self._timeouts:
now = self.time() now = self.time()
while self._timeouts: while self._timeouts:
@ -702,11 +763,9 @@ class PollIOLoop(IOLoop):
self._cancellations -= 1 self._cancellations -= 1
elif self._timeouts[0].deadline <= now: elif self._timeouts[0].deadline <= now:
timeout = heapq.heappop(self._timeouts) timeout = heapq.heappop(self._timeouts)
self._run_callback(timeout.callback) callbacks.append(timeout.callback)
del timeout del timeout
else: else:
seconds = self._timeouts[0].deadline - now
poll_timeout = min(seconds, poll_timeout)
break break
if (self._cancellations > 512 if (self._cancellations > 512
and self._cancellations > (len(self._timeouts) >> 1)): and self._cancellations > (len(self._timeouts) >> 1)):
@ -717,10 +776,25 @@ class PollIOLoop(IOLoop):
if x.callback is not None] if x.callback is not None]
heapq.heapify(self._timeouts) heapq.heapify(self._timeouts)
for callback in callbacks:
self._run_callback(callback)
# Closures may be holding on to a lot of memory, so allow
# them to be freed before we go into our poll wait.
callbacks = callback = None
if self._callbacks: if self._callbacks:
# If any callbacks or timeouts called add_callback, # If any callbacks or timeouts called add_callback,
# we don't want to wait in poll() before we run them. # we don't want to wait in poll() before we run them.
poll_timeout = 0.0 poll_timeout = 0.0
elif self._timeouts:
# If there are any timeouts, schedule the first one.
# Use self.time() instead of 'now' to account for time
# spent running callbacks.
poll_timeout = self._timeouts[0].deadline - self.time()
poll_timeout = max(0, min(poll_timeout, _POLL_TIMEOUT))
else:
# No timeouts and no callbacks, so use the default.
poll_timeout = _POLL_TIMEOUT
if not self._running: if not self._running:
break break
@ -784,8 +858,11 @@ class PollIOLoop(IOLoop):
def time(self): def time(self):
return self.time_func() return self.time_func()
def add_timeout(self, deadline, callback): def call_at(self, deadline, callback, *args, **kwargs):
timeout = _Timeout(deadline, stack_context.wrap(callback), self) timeout = _Timeout(
deadline,
functools.partial(stack_context.wrap(callback), *args, **kwargs),
self)
heapq.heappush(self._timeouts, timeout) heapq.heappush(self._timeouts, timeout)
return timeout return timeout
@ -840,24 +917,12 @@ class _Timeout(object):
__slots__ = ['deadline', 'callback', 'tiebreaker'] __slots__ = ['deadline', 'callback', 'tiebreaker']
def __init__(self, deadline, callback, io_loop): def __init__(self, deadline, callback, io_loop):
if isinstance(deadline, numbers.Real): if not isinstance(deadline, numbers.Real):
self.deadline = deadline
elif isinstance(deadline, datetime.timedelta):
now = io_loop.time()
try:
self.deadline = now + deadline.total_seconds()
except AttributeError: # py2.6
self.deadline = now + _Timeout.timedelta_to_seconds(deadline)
else:
raise TypeError("Unsupported deadline %r" % deadline) raise TypeError("Unsupported deadline %r" % deadline)
self.deadline = deadline
self.callback = callback self.callback = callback
self.tiebreaker = next(io_loop._timeout_counter) self.tiebreaker = next(io_loop._timeout_counter)
@staticmethod
def timedelta_to_seconds(td):
"""Equivalent to td.total_seconds() (introduced in python 2.7)."""
return (td.microseconds + (td.seconds + td.days * 24 * 3600) * 10 ** 6) / float(10 ** 6)
# Comparison methods to sort by deadline, with object id as a tiebreaker # Comparison methods to sort by deadline, with object id as a tiebreaker
# to guarantee a consistent ordering. The heapq module uses __le__ # to guarantee a consistent ordering. The heapq module uses __le__
# in python2.5, and __lt__ in 2.6+ (sort() and most other comparisons # in python2.5, and __lt__ in 2.6+ (sort() and most other comparisons
@ -904,9 +969,10 @@ class PeriodicCallback(object):
if not self._running: if not self._running:
return return
try: try:
self.callback() return self.callback()
except Exception: except Exception:
self.io_loop.handle_callback_exception(self.callback) self.io_loop.handle_callback_exception(self.callback)
finally:
self._schedule_next() self._schedule_next()
def _schedule_next(self): def _schedule_next(self):

55
libs/tornado/iostream.py

@ -57,11 +57,24 @@ except ImportError:
# some they differ. # some they differ.
_ERRNO_WOULDBLOCK = (errno.EWOULDBLOCK, errno.EAGAIN) _ERRNO_WOULDBLOCK = (errno.EWOULDBLOCK, errno.EAGAIN)
if hasattr(errno, "WSAEWOULDBLOCK"):
_ERRNO_WOULDBLOCK += (errno.WSAEWOULDBLOCK,)
# These errnos indicate that a connection has been abruptly terminated. # These errnos indicate that a connection has been abruptly terminated.
# They should be caught and handled less noisily than other errors. # They should be caught and handled less noisily than other errors.
_ERRNO_CONNRESET = (errno.ECONNRESET, errno.ECONNABORTED, errno.EPIPE) _ERRNO_CONNRESET = (errno.ECONNRESET, errno.ECONNABORTED, errno.EPIPE,
errno.ETIMEDOUT)
if hasattr(errno, "WSAECONNRESET"):
_ERRNO_CONNRESET += (errno.WSAECONNRESET, errno.WSAECONNABORTED, errno.WSAETIMEDOUT)
# More non-portable errnos:
_ERRNO_INPROGRESS = (errno.EINPROGRESS,)
if hasattr(errno, "WSAEINPROGRESS"):
_ERRNO_INPROGRESS += (errno.WSAEINPROGRESS,)
#######################################################
class StreamClosedError(IOError): class StreamClosedError(IOError):
"""Exception raised by `IOStream` methods when the stream is closed. """Exception raised by `IOStream` methods when the stream is closed.
@ -116,7 +129,7 @@ class BaseIOStream(object):
:arg max_write_buffer_size: Amount of outgoing data to buffer; :arg max_write_buffer_size: Amount of outgoing data to buffer;
defaults to unlimited. defaults to unlimited.
.. versionchanged:: 3.3 .. versionchanged:: 4.0
Add the ``max_write_buffer_size`` parameter. Changed default Add the ``max_write_buffer_size`` parameter. Changed default
``read_chunk_size`` to 64KB. ``read_chunk_size`` to 64KB.
""" """
@ -203,7 +216,7 @@ class BaseIOStream(object):
if more than ``max_bytes`` bytes have been read and the regex is if more than ``max_bytes`` bytes have been read and the regex is
not satisfied. not satisfied.
.. versionchanged:: 3.3 .. versionchanged:: 4.0
Added the ``max_bytes`` argument. The ``callback`` argument is Added the ``max_bytes`` argument. The ``callback`` argument is
now optional and a `.Future` will be returned if it is omitted. now optional and a `.Future` will be returned if it is omitted.
""" """
@ -230,7 +243,7 @@ class BaseIOStream(object):
if more than ``max_bytes`` bytes have been read and the delimiter if more than ``max_bytes`` bytes have been read and the delimiter
is not found. is not found.
.. versionchanged:: 3.3 .. versionchanged:: 4.0
Added the ``max_bytes`` argument. The ``callback`` argument is Added the ``max_bytes`` argument. The ``callback`` argument is
now optional and a `.Future` will be returned if it is omitted. now optional and a `.Future` will be returned if it is omitted.
""" """
@ -259,7 +272,7 @@ class BaseIOStream(object):
If ``partial`` is true, the callback is run as soon as we have If ``partial`` is true, the callback is run as soon as we have
any bytes to return (but never more than ``num_bytes``) any bytes to return (but never more than ``num_bytes``)
.. versionchanged:: 3.3 .. versionchanged:: 4.0
Added the ``partial`` argument. The callback argument is now Added the ``partial`` argument. The callback argument is now
optional and a `.Future` will be returned if it is omitted. optional and a `.Future` will be returned if it is omitted.
""" """
@ -280,7 +293,7 @@ class BaseIOStream(object):
If a callback is given, it will be run with the data as an argument; If a callback is given, it will be run with the data as an argument;
if not, this method returns a `.Future`. if not, this method returns a `.Future`.
.. versionchanged:: 3.3 .. versionchanged:: 4.0
The callback argument is now optional and a `.Future` will The callback argument is now optional and a `.Future` will
be returned if it is omitted. be returned if it is omitted.
""" """
@ -308,7 +321,7 @@ class BaseIOStream(object):
completed. If `write` is called again before that `.Future` has completed. If `write` is called again before that `.Future` has
resolved, the previous future will be orphaned and will never resolve. resolved, the previous future will be orphaned and will never resolve.
.. versionchanged:: 3.3 .. versionchanged:: 4.0
Now returns a `.Future` if no callback is given. Now returns a `.Future` if no callback is given.
""" """
assert isinstance(data, bytes_type) assert isinstance(data, bytes_type)
@ -492,7 +505,7 @@ class BaseIOStream(object):
def wrapper(): def wrapper():
self._pending_callbacks -= 1 self._pending_callbacks -= 1
try: try:
callback(*args) return callback(*args)
except Exception: except Exception:
app_log.error("Uncaught exception, closing connection.", app_log.error("Uncaught exception, closing connection.",
exc_info=True) exc_info=True)
@ -504,6 +517,7 @@ class BaseIOStream(object):
# Re-raise the exception so that IOLoop.handle_callback_exception # Re-raise the exception so that IOLoop.handle_callback_exception
# can see it and log the error # can see it and log the error
raise raise
finally:
self._maybe_add_error_listener() self._maybe_add_error_listener()
# We schedule callbacks to be run on the next IOLoop iteration # We schedule callbacks to be run on the next IOLoop iteration
# rather than running them directly for several reasons: # rather than running them directly for several reasons:
@ -949,11 +963,19 @@ class IOStream(BaseIOStream):
May only be called if the socket passed to the constructor was May only be called if the socket passed to the constructor was
not previously connected. The address parameter is in the not previously connected. The address parameter is in the
same format as for `socket.connect <socket.socket.connect>`, same format as for `socket.connect <socket.socket.connect>` for
i.e. a ``(host, port)`` tuple. If ``callback`` is specified, the type of socket passed to the IOStream constructor,
it will be called with no arguments when the connection is e.g. an ``(ip, port)`` tuple. Hostnames are accepted here,
completed; if not this method returns a `.Future` (whose result but will be resolved synchronously and block the IOLoop.
after a successful connection will be the stream itself). If you have a hostname instead of an IP address, the `.TCPClient`
class is recommended instead of calling this method directly.
`.TCPClient` will do asynchronous DNS resolution and handle
both IPv4 and IPv6.
If ``callback`` is specified, it will be called with no
arguments when the connection is completed; if not this method
returns a `.Future` (whose result after a successful
connection will be the stream itself).
If specified, the ``server_hostname`` parameter will be used If specified, the ``server_hostname`` parameter will be used
in SSL connections for certificate validation (if requested in in SSL connections for certificate validation (if requested in
@ -966,8 +988,9 @@ class IOStream(BaseIOStream):
is ready. Calling `IOStream` read methods before the socket is is ready. Calling `IOStream` read methods before the socket is
connected works on some platforms but is non-portable. connected works on some platforms but is non-portable.
.. versionchanged:: 3.3 .. versionchanged:: 4.0
If no callback is given, returns a `.Future`. If no callback is given, returns a `.Future`.
""" """
self._connecting = True self._connecting = True
try: try:
@ -980,7 +1003,7 @@ class IOStream(BaseIOStream):
# returned immediately when attempting to connect to # returned immediately when attempting to connect to
# localhost, so handle them the same way as an error # localhost, so handle them the same way as an error
# reported later in _handle_connect. # reported later in _handle_connect.
if (errno_from_exception(e) != errno.EINPROGRESS and if (errno_from_exception(e) not in _ERRNO_INPROGRESS and
errno_from_exception(e) not in _ERRNO_WOULDBLOCK): errno_from_exception(e) not in _ERRNO_WOULDBLOCK):
gen_log.warning("Connect error on fd %s: %s", gen_log.warning("Connect error on fd %s: %s",
self.socket.fileno(), e) self.socket.fileno(), e)
@ -1021,7 +1044,7 @@ class IOStream(BaseIOStream):
If a close callback is defined on this stream, it will be If a close callback is defined on this stream, it will be
transferred to the new stream. transferred to the new stream.
.. versionadded:: 3.3 .. versionadded:: 4.0
""" """
if (self._read_callback or self._read_future or if (self._read_callback or self._read_future or
self._write_callback or self._write_future or self._write_callback or self._write_future or

2
libs/tornado/log.py

@ -179,7 +179,7 @@ class LogFormatter(logging.Formatter):
def enable_pretty_logging(options=None, logger=None): def enable_pretty_logging(options=None, logger=None):
"""Turns on formatted logging output as configured. """Turns on formatted logging output as configured.
This is called automaticaly by `tornado.options.parse_command_line` This is called automatically by `tornado.options.parse_command_line`
and `tornado.options.parse_config_file`. and `tornado.options.parse_config_file`.
""" """
if options is None: if options is None:

3
libs/tornado/netutil.py

@ -57,6 +57,9 @@ u('foo').encode('idna')
# some they differ. # some they differ.
_ERRNO_WOULDBLOCK = (errno.EWOULDBLOCK, errno.EAGAIN) _ERRNO_WOULDBLOCK = (errno.EWOULDBLOCK, errno.EAGAIN)
if hasattr(errno, "WSAEWOULDBLOCK"):
_ERRNO_WOULDBLOCK += (errno.WSAEWOULDBLOCK,)
def bind_sockets(port, address=None, family=socket.AF_UNSPEC, backlog=128, flags=None): def bind_sockets(port, address=None, family=socket.AF_UNSPEC, backlog=128, flags=None):
"""Creates listening sockets bound to the given port and address. """Creates listening sockets bound to the given port and address.

34
libs/tornado/platform/asyncio.py

@ -13,9 +13,9 @@ from __future__ import absolute_import, division, print_function, with_statement
import datetime import datetime
import functools import functools
# _Timeout is used for its timedelta_to_seconds method for py26 compatibility. from tornado.ioloop import IOLoop
from tornado.ioloop import IOLoop, _Timeout
from tornado import stack_context from tornado import stack_context
from tornado.util import timedelta_to_seconds
try: try:
# Import the real asyncio module for py33+ first. Older versions of the # Import the real asyncio module for py33+ first. Older versions of the
@ -109,21 +109,13 @@ class BaseAsyncIOLoop(IOLoop):
def stop(self): def stop(self):
self.asyncio_loop.stop() self.asyncio_loop.stop()
def _run_callback(self, callback, *args, **kwargs): def call_at(self, when, callback, *args, **kwargs):
try: # asyncio.call_at supports *args but not **kwargs, so bind them here.
callback(*args, **kwargs) # We do not synchronize self.time and asyncio_loop.time, so
except Exception: # convert from absolute to relative.
self.handle_callback_exception(callback) return self.asyncio_loop.call_later(
max(0, when - self.time()), self._run_callback,
def add_timeout(self, deadline, callback): functools.partial(stack_context.wrap(callback), *args, **kwargs))
if isinstance(deadline, (int, float)):
delay = max(deadline - self.time(), 0)
elif isinstance(deadline, datetime.timedelta):
delay = _Timeout.timedelta_to_seconds(deadline)
else:
raise TypeError("Unsupported deadline %r", deadline)
return self.asyncio_loop.call_later(delay, self._run_callback,
stack_context.wrap(callback))
def remove_timeout(self, timeout): def remove_timeout(self, timeout):
timeout.cancel() timeout.cancel()
@ -131,13 +123,9 @@ class BaseAsyncIOLoop(IOLoop):
def add_callback(self, callback, *args, **kwargs): def add_callback(self, callback, *args, **kwargs):
if self.closing: if self.closing:
raise RuntimeError("IOLoop is closing") raise RuntimeError("IOLoop is closing")
if kwargs:
self.asyncio_loop.call_soon_threadsafe(functools.partial(
self._run_callback, stack_context.wrap(callback),
*args, **kwargs))
else:
self.asyncio_loop.call_soon_threadsafe( self.asyncio_loop.call_soon_threadsafe(
self._run_callback, stack_context.wrap(callback), *args) self._run_callback,
functools.partial(stack_context.wrap(callback), *args, **kwargs))
add_callback_from_signal = add_callback add_callback_from_signal = add_callback

31
libs/tornado/platform/twisted.py

@ -68,6 +68,7 @@ from __future__ import absolute_import, division, print_function, with_statement
import datetime import datetime
import functools import functools
import numbers
import socket import socket
import twisted.internet.abstract import twisted.internet.abstract
@ -90,11 +91,7 @@ from tornado.log import app_log
from tornado.netutil import Resolver from tornado.netutil import Resolver
from tornado.stack_context import NullContext, wrap from tornado.stack_context import NullContext, wrap
from tornado.ioloop import IOLoop from tornado.ioloop import IOLoop
from tornado.util import timedelta_to_seconds
try:
long # py2
except NameError:
long = int # py3
@implementer(IDelayedCall) @implementer(IDelayedCall)
@ -475,28 +472,28 @@ class TwistedIOLoop(tornado.ioloop.IOLoop):
def stop(self): def stop(self):
self.reactor.crash() self.reactor.crash()
def _run_callback(self, callback, *args, **kwargs): def add_timeout(self, deadline, callback, *args, **kwargs):
try: # This method could be simplified (since tornado 4.0) by
callback(*args, **kwargs) # overriding call_at instead of add_timeout, but we leave it
except Exception: # for now as a test of backwards-compatibility.
self.handle_callback_exception(callback) if isinstance(deadline, numbers.Real):
def add_timeout(self, deadline, callback):
if isinstance(deadline, (int, long, float)):
delay = max(deadline - self.time(), 0) delay = max(deadline - self.time(), 0)
elif isinstance(deadline, datetime.timedelta): elif isinstance(deadline, datetime.timedelta):
delay = tornado.ioloop._Timeout.timedelta_to_seconds(deadline) delay = timedelta_to_seconds(deadline)
else: else:
raise TypeError("Unsupported deadline %r") raise TypeError("Unsupported deadline %r")
return self.reactor.callLater(delay, self._run_callback, wrap(callback)) return self.reactor.callLater(
delay, self._run_callback,
functools.partial(wrap(callback), *args, **kwargs))
def remove_timeout(self, timeout): def remove_timeout(self, timeout):
if timeout.active(): if timeout.active():
timeout.cancel() timeout.cancel()
def add_callback(self, callback, *args, **kwargs): def add_callback(self, callback, *args, **kwargs):
self.reactor.callFromThread(self._run_callback, self.reactor.callFromThread(
wrap(callback), *args, **kwargs) self._run_callback,
functools.partial(wrap(callback), *args, **kwargs))
def add_callback_from_signal(self, callback, *args, **kwargs): def add_callback_from_signal(self, callback, *args, **kwargs):
self.add_callback(callback, *args, **kwargs) self.add_callback(callback, *args, **kwargs)

33
libs/tornado/simple_httpclient.py

@ -277,7 +277,7 @@ class _HTTPConnection(httputil.HTTPMessageDelegate):
stream.close() stream.close()
return return
self.stream = stream self.stream = stream
self.stream.set_close_callback(self._on_close) self.stream.set_close_callback(self.on_connection_close)
self._remove_timeout() self._remove_timeout()
if self.final_callback is None: if self.final_callback is None:
return return
@ -338,7 +338,7 @@ class _HTTPConnection(httputil.HTTPMessageDelegate):
if (self.request.method == "POST" and if (self.request.method == "POST" and
"Content-Type" not in self.request.headers): "Content-Type" not in self.request.headers):
self.request.headers["Content-Type"] = "application/x-www-form-urlencoded" self.request.headers["Content-Type"] = "application/x-www-form-urlencoded"
if self.request.use_gzip: if self.request.decompress_response:
self.request.headers["Accept-Encoding"] = "gzip" self.request.headers["Accept-Encoding"] = "gzip"
req_path = ((self.parsed.path or '/') + req_path = ((self.parsed.path or '/') +
(('?' + self.parsed.query) if self.parsed.query else '')) (('?' + self.parsed.query) if self.parsed.query else ''))
@ -348,7 +348,7 @@ class _HTTPConnection(httputil.HTTPMessageDelegate):
HTTP1ConnectionParameters( HTTP1ConnectionParameters(
no_keep_alive=True, no_keep_alive=True,
max_header_size=self.max_header_size, max_header_size=self.max_header_size,
use_gzip=self.request.use_gzip), decompress=self.request.decompress_response),
self._sockaddr) self._sockaddr)
start_line = httputil.RequestStartLine(self.request.method, start_line = httputil.RequestStartLine(self.request.method,
req_path, 'HTTP/1.1') req_path, 'HTTP/1.1')
@ -418,12 +418,15 @@ class _HTTPConnection(httputil.HTTPMessageDelegate):
# pass it along, unless it's just the stream being closed. # pass it along, unless it's just the stream being closed.
return isinstance(value, StreamClosedError) return isinstance(value, StreamClosedError)
def _on_close(self): def on_connection_close(self):
if self.final_callback is not None: if self.final_callback is not None:
message = "Connection closed" message = "Connection closed"
if self.stream.error: if self.stream.error:
raise self.stream.error raise self.stream.error
try:
raise HTTPError(599, message) raise HTTPError(599, message)
except HTTPError:
self._handle_exception(*sys.exc_info())
def headers_received(self, first_line, headers): def headers_received(self, first_line, headers):
if self.request.expect_100_continue and first_line.code == 100: if self.request.expect_100_continue and first_line.code == 100:
@ -433,20 +436,6 @@ class _HTTPConnection(httputil.HTTPMessageDelegate):
self.code = first_line.code self.code = first_line.code
self.reason = first_line.reason self.reason = first_line.reason
if "Content-Length" in self.headers:
if "," in self.headers["Content-Length"]:
# Proxies sometimes cause Content-Length headers to get
# duplicated. If all the values are identical then we can
# use them but if they differ it's an error.
pieces = re.split(r',\s*', self.headers["Content-Length"])
if any(i != pieces[0] for i in pieces):
raise ValueError("Multiple unequal Content-Lengths: %r" %
self.headers["Content-Length"])
self.headers["Content-Length"] = pieces[0]
content_length = int(self.headers["Content-Length"])
else:
content_length = None
if self.request.header_callback is not None: if self.request.header_callback is not None:
# Reassemble the start line. # Reassemble the start line.
self.request.header_callback('%s %s %s\r\n' % first_line) self.request.header_callback('%s %s %s\r\n' % first_line)
@ -454,14 +443,6 @@ class _HTTPConnection(httputil.HTTPMessageDelegate):
self.request.header_callback("%s: %s\r\n" % (k, v)) self.request.header_callback("%s: %s\r\n" % (k, v))
self.request.header_callback('\r\n') self.request.header_callback('\r\n')
if 100 <= self.code < 200 or self.code == 204:
# These response codes never have bodies
# http://www.w3.org/Protocols/rfc2616/rfc2616-sec4.html#sec4.3
if ("Transfer-Encoding" in self.headers or
content_length not in (None, 0)):
raise ValueError("Response with code %d should not have body" %
self.code)
def finish(self): def finish(self):
data = b''.join(self.chunks) data = b''.join(self.chunks)
self._remove_timeout() self._remove_timeout()

6
libs/tornado/testing.py

@ -70,8 +70,8 @@ def get_unused_port():
only that a series of get_unused_port calls in a single process return only that a series of get_unused_port calls in a single process return
distinct ports. distinct ports.
**Deprecated**. Use bind_unused_port instead, which is guaranteed .. deprecated::
to find an unused port. Use bind_unused_port instead, which is guaranteed to find an unused port.
""" """
global _next_port global _next_port
port = _next_port port = _next_port
@ -459,7 +459,7 @@ def gen_test(func=None, timeout=None):
The ``timeout`` argument and ``ASYNC_TEST_TIMEOUT`` environment The ``timeout`` argument and ``ASYNC_TEST_TIMEOUT`` environment
variable. variable.
.. versionchanged:: 3.3 .. versionchanged:: 4.0
The wrapper now passes along ``*args, **kwargs`` so it can be used The wrapper now passes along ``*args, **kwargs`` so it can be used
on functions with arguments. on functions with arguments.
""" """

5
libs/tornado/util.py

@ -311,6 +311,11 @@ class ArgReplacer(object):
return old_value, args, kwargs return old_value, args, kwargs
def timedelta_to_seconds(td):
"""Equivalent to td.total_seconds() (introduced in python 2.7)."""
return (td.microseconds + (td.seconds + td.days * 24 * 3600) * 10 ** 6) / float(10 ** 6)
def _websocket_mask_python(mask, data): def _websocket_mask_python(mask, data):
"""Websocket masking function. """Websocket masking function.

94
libs/tornado/web.py

@ -35,8 +35,7 @@ Here is a simple "Hello, world" example app::
application.listen(8888) application.listen(8888)
tornado.ioloop.IOLoop.instance().start() tornado.ioloop.IOLoop.instance().start()
See the :doc:`Tornado overview <overview>` for more details and a good getting See the :doc:`guide` for additional information.
started guide.
Thread-safety notes Thread-safety notes
------------------- -------------------
@ -48,6 +47,7 @@ not thread-safe. In particular, methods such as
you use multiple threads it is important to use `.IOLoop.add_callback` you use multiple threads it is important to use `.IOLoop.add_callback`
to transfer control back to the main thread before finishing the to transfer control back to the main thread before finishing the
request. request.
""" """
from __future__ import absolute_import, division, print_function, with_statement from __future__ import absolute_import, division, print_function, with_statement
@ -820,7 +820,7 @@ class RequestHandler(object):
if another flush occurs before the previous flush's callback if another flush occurs before the previous flush's callback
has been run, the previous callback will be discarded. has been run, the previous callback will be discarded.
.. versionchanged:: 3.3 .. versionchanged:: 4.0
Now returns a `.Future` if no callback is given. Now returns a `.Future` if no callback is given.
""" """
chunk = b"".join(self._write_buffer) chunk = b"".join(self._write_buffer)
@ -943,26 +943,7 @@ class RequestHandler(object):
``kwargs["exc_info"]``. Note that this exception may not be ``kwargs["exc_info"]``. Note that this exception may not be
the "current" exception for purposes of methods like the "current" exception for purposes of methods like
``sys.exc_info()`` or ``traceback.format_exc``. ``sys.exc_info()`` or ``traceback.format_exc``.
For historical reasons, if a method ``get_error_html`` exists,
it will be used instead of the default ``write_error`` implementation.
``get_error_html`` returned a string instead of producing output
normally, and had different semantics for exception handling.
Users of ``get_error_html`` are encouraged to convert their code
to override ``write_error`` instead.
""" """
if hasattr(self, 'get_error_html'):
if 'exc_info' in kwargs:
exc_info = kwargs.pop('exc_info')
kwargs['exception'] = exc_info[1]
try:
# Put the traceback into sys.exc_info()
raise_exc_info(exc_info)
except Exception:
self.finish(self.get_error_html(status_code, **kwargs))
else:
self.finish(self.get_error_html(status_code, **kwargs))
return
if self.settings.get("serve_traceback") and "exc_info" in kwargs: if self.settings.get("serve_traceback") and "exc_info" in kwargs:
# in debug mode, try to send a traceback # in debug mode, try to send a traceback
self.set_header('Content-Type', 'text/plain') self.set_header('Content-Type', 'text/plain')
@ -1147,14 +1128,15 @@ class RequestHandler(object):
else: else:
# Treat unknown versions as not present instead of failing. # Treat unknown versions as not present instead of failing.
return None, None, None return None, None, None
elif len(cookie) == 32: else:
version = 1 version = 1
try:
token = binascii.a2b_hex(utf8(cookie)) token = binascii.a2b_hex(utf8(cookie))
except (binascii.Error, TypeError):
token = utf8(cookie)
# We don't have a usable timestamp in older versions. # We don't have a usable timestamp in older versions.
timestamp = int(time.time()) timestamp = int(time.time())
return (version, token, timestamp) return (version, token, timestamp)
else:
return None, None, None
def check_xsrf_cookie(self): def check_xsrf_cookie(self):
"""Verifies that the ``_xsrf`` cookie matches the ``_xsrf`` argument. """Verifies that the ``_xsrf`` cookie matches the ``_xsrf`` argument.
@ -1242,27 +1224,6 @@ class RequestHandler(object):
return base + get_url(self.settings, path, **kwargs) return base + get_url(self.settings, path, **kwargs)
def async_callback(self, callback, *args, **kwargs):
"""Obsolete - catches exceptions from the wrapped function.
This function is unnecessary since Tornado 1.1.
"""
if callback is None:
return None
if args or kwargs:
callback = functools.partial(callback, *args, **kwargs)
def wrapper(*args, **kwargs):
try:
return callback(*args, **kwargs)
except Exception as e:
if self._headers_written:
app_log.error("Exception after headers written",
exc_info=True)
else:
self._handle_request_exception(e)
return wrapper
def require_setting(self, name, feature="this feature"): def require_setting(self, name, feature="this feature"):
"""Raises an exception if the given app setting is not defined.""" """Raises an exception if the given app setting is not defined."""
if not self.application.settings.get(name): if not self.application.settings.get(name):
@ -1405,6 +1366,11 @@ class RequestHandler(object):
" (" + self.request.remote_ip + ")" " (" + self.request.remote_ip + ")"
def _handle_request_exception(self, e): def _handle_request_exception(self, e):
if isinstance(e, Finish):
# Not an error; just finish the request without logging.
if not self._finished:
self.finish()
return
self.log_exception(*sys.exc_info()) self.log_exception(*sys.exc_info())
if self._finished: if self._finished:
# Extra errors after the request has been finished should # Extra errors after the request has been finished should
@ -1662,7 +1628,7 @@ class Application(httputil.HTTPServerConnectionDelegate):
**settings): **settings):
if transforms is None: if transforms is None:
self.transforms = [] self.transforms = []
if settings.get("gzip"): if settings.get("compress_response") or settings.get("gzip"):
self.transforms.append(GZipContentEncoding) self.transforms.append(GZipContentEncoding)
else: else:
self.transforms = transforms self.transforms = transforms
@ -1959,6 +1925,9 @@ class HTTPError(Exception):
`RequestHandler.send_error` since it automatically ends the `RequestHandler.send_error` since it automatically ends the
current function. current function.
To customize the response sent with an `HTTPError`, override
`RequestHandler.write_error`.
:arg int status_code: HTTP status code. Must be listed in :arg int status_code: HTTP status code. Must be listed in
`httplib.responses <http.client.responses>` unless the ``reason`` `httplib.responses <http.client.responses>` unless the ``reason``
keyword argument is given. keyword argument is given.
@ -1987,6 +1956,25 @@ class HTTPError(Exception):
return message return message
class Finish(Exception):
"""An exception that ends the request without producing an error response.
When `Finish` is raised in a `RequestHandler`, the request will end
(calling `RequestHandler.finish` if it hasn't already been called),
but the outgoing response will not be modified and the error-handling
methods (including `RequestHandler.write_error`) will not be called.
This can be a more convenient way to implement custom error pages
than overriding ``write_error`` (especially in library code)::
if self.current_user is None:
self.set_status(401)
self.set_header('WWW-Authenticate', 'Basic realm="something"')
raise Finish()
"""
pass
class MissingArgumentError(HTTPError): class MissingArgumentError(HTTPError):
"""Exception raised by `RequestHandler.get_argument`. """Exception raised by `RequestHandler.get_argument`.
@ -2367,7 +2355,7 @@ class StaticFileHandler(RequestHandler):
.. versionadded:: 3.1 .. versionadded:: 3.1
.. versionchanged:: 3.3 .. versionchanged:: 4.0
This method is now always called, instead of only when This method is now always called, instead of only when
partial results are requested. partial results are requested.
""" """
@ -2514,9 +2502,9 @@ class FallbackHandler(RequestHandler):
class OutputTransform(object): class OutputTransform(object):
"""A transform modifies the result of an HTTP request (e.g., GZip encoding) """A transform modifies the result of an HTTP request (e.g., GZip encoding)
A new transform instance is created for every request. See the Applications are not expected to create their own OutputTransforms
GZipContentEncoding example below if you want to implement a or interact with them directly; the framework chooses which transforms
new Transform. (if any) to apply.
""" """
def __init__(self, request): def __init__(self, request):
pass pass
@ -2533,7 +2521,7 @@ class GZipContentEncoding(OutputTransform):
See http://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html#sec14.11 See http://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html#sec14.11
.. versionchanged:: 3.3 .. versionchanged:: 4.0
Now compresses all mime types beginning with ``text/``, instead Now compresses all mime types beginning with ``text/``, instead
of just a whitelist. (the whitelist is still used for certain of just a whitelist. (the whitelist is still used for certain
non-text mime types). non-text mime types).
@ -2767,7 +2755,7 @@ class URLSpec(object):
in the regex will be passed in to the handler's get/post/etc in the regex will be passed in to the handler's get/post/etc
methods as arguments. methods as arguments.
* ``handler_class``: `RequestHandler` subclass to be invoked. * ``handler``: `RequestHandler` subclass to be invoked.
* ``kwargs`` (optional): A dictionary of additional arguments * ``kwargs`` (optional): A dictionary of additional arguments
to be passed to the handler's constructor. to be passed to the handler's constructor.

351
libs/tornado/websocket.py

@ -3,18 +3,17 @@
`WebSockets <http://dev.w3.org/html5/websockets/>`_ allow for bidirectional `WebSockets <http://dev.w3.org/html5/websockets/>`_ allow for bidirectional
communication between the browser and server. communication between the browser and server.
.. warning:: WebSockets are supported in the current versions of all major browsers,
although older versions that do not support WebSockets are still in use
The WebSocket protocol was recently finalized as `RFC 6455 (refer to http://caniuse.com/websockets for details).
<http://tools.ietf.org/html/rfc6455>`_ and is not yet supported in
all browsers. Refer to http://caniuse.com/websockets for details This module implements the final version of the WebSocket protocol as
on compatibility. In addition, during development the protocol defined in `RFC 6455 <http://tools.ietf.org/html/rfc6455>`_. Certain
went through several incompatible versions, and some browsers only browser versions (notably Safari 5.x) implemented an earlier draft of
support older versions. By default this module only supports the the protocol (known as "draft 76") and are not compatible with this module.
latest version of the protocol, but optional support for an older
version (known as "draft 76" or "hixie-76") can be enabled by .. versionchanged:: 4.0
overriding `WebSocketHandler.allow_draft76` (see that method's Removed support for the draft 76 protocol version.
documentation for caveats).
""" """
from __future__ import absolute_import, division, print_function, with_statement from __future__ import absolute_import, division, print_function, with_statement
@ -22,11 +21,9 @@ from __future__ import absolute_import, division, print_function, with_statement
import base64 import base64
import collections import collections
import functools
import hashlib import hashlib
import os import os
import struct import struct
import time
import tornado.escape import tornado.escape
import tornado.web import tornado.web
@ -38,7 +35,7 @@ from tornado.iostream import StreamClosedError
from tornado.log import gen_log, app_log from tornado.log import gen_log, app_log
from tornado import simple_httpclient from tornado import simple_httpclient
from tornado.tcpclient import TCPClient from tornado.tcpclient import TCPClient
from tornado.util import bytes_type, unicode_type, _websocket_mask from tornado.util import bytes_type, _websocket_mask
try: try:
from urllib.parse import urlparse # py2 from urllib.parse import urlparse # py2
@ -108,6 +105,21 @@ class WebSocketHandler(tornado.web.RequestHandler):
}; };
This script pops up an alert box that says "You said: Hello, world". This script pops up an alert box that says "You said: Hello, world".
Web browsers allow any site to open a websocket connection to any other,
instead of using the same-origin policy that governs other network
access from javascript. This can be surprising and is a potential
security hole, so since Tornado 4.0 `WebSocketHandler` requires
applications that wish to receive cross-origin websockets to opt in
by overriding the `~WebSocketHandler.check_origin` method (see that
method's docs for details). Failure to do so is the most likely
cause of 403 errors when making a websocket connection.
When using a secure websocket connection (``wss://``) with a self-signed
certificate, the connection from a browser may fail because it wants
to show the "accept this certificate" dialog but has nowhere to show it.
You must first visit a regular HTML page using the same certificate
to accept it before the websocket connection will succeed.
""" """
def __init__(self, application, request, **kwargs): def __init__(self, application, request, **kwargs):
tornado.web.RequestHandler.__init__(self, application, request, tornado.web.RequestHandler.__init__(self, application, request,
@ -115,22 +127,17 @@ class WebSocketHandler(tornado.web.RequestHandler):
self.ws_connection = None self.ws_connection = None
self.close_code = None self.close_code = None
self.close_reason = None self.close_reason = None
self.stream = None
@tornado.web.asynchronous @tornado.web.asynchronous
def get(self, *args, **kwargs): def get(self, *args, **kwargs):
self.open_args = args self.open_args = args
self.open_kwargs = kwargs self.open_kwargs = kwargs
self.stream = self.request.connection.detach()
self.stream.set_close_callback(self.on_connection_close)
# Upgrade header should be present and should be equal to WebSocket # Upgrade header should be present and should be equal to WebSocket
if self.request.headers.get("Upgrade", "").lower() != 'websocket': if self.request.headers.get("Upgrade", "").lower() != 'websocket':
self.stream.write(tornado.escape.utf8( self.set_status(400)
"HTTP/1.1 400 Bad Request\r\n\r\n" self.finish("Can \"Upgrade\" only to \"WebSocket\".")
"Can \"Upgrade\" only to \"WebSocket\"."
))
self.stream.close()
return return
# Connection header should be upgrade. Some proxy servers/load balancers # Connection header should be upgrade. Some proxy servers/load balancers
@ -138,11 +145,8 @@ class WebSocketHandler(tornado.web.RequestHandler):
headers = self.request.headers headers = self.request.headers
connection = map(lambda s: s.strip().lower(), headers.get("Connection", "").split(",")) connection = map(lambda s: s.strip().lower(), headers.get("Connection", "").split(","))
if 'upgrade' not in connection: if 'upgrade' not in connection:
self.stream.write(tornado.escape.utf8( self.set_status(400)
"HTTP/1.1 400 Bad Request\r\n\r\n" self.finish("\"Connection\" must be \"Upgrade\".")
"\"Connection\" must be \"Upgrade\"."
))
self.stream.close()
return return
# Handle WebSocket Origin naming convention differences # Handle WebSocket Origin naming convention differences
@ -159,19 +163,16 @@ class WebSocketHandler(tornado.web.RequestHandler):
# according to check_origin. When the origin is None, we assume it # according to check_origin. When the origin is None, we assume it
# did not come from a browser and that it can be passed on. # did not come from a browser and that it can be passed on.
if origin is not None and not self.check_origin(origin): if origin is not None and not self.check_origin(origin):
self.stream.write(tornado.escape.utf8( self.set_status(403)
"HTTP/1.1 403 Cross Origin Websockets Disabled\r\n\r\n" self.finish("Cross origin websockets not allowed")
))
self.stream.close()
return return
self.stream = self.request.connection.detach()
self.stream.set_close_callback(self.on_connection_close)
if self.request.headers.get("Sec-WebSocket-Version") in ("7", "8", "13"): if self.request.headers.get("Sec-WebSocket-Version") in ("7", "8", "13"):
self.ws_connection = WebSocketProtocol13(self) self.ws_connection = WebSocketProtocol13(self)
self.ws_connection.accept_connection() self.ws_connection.accept_connection()
elif (self.allow_draft76() and
"Sec-WebSocket-Version" not in self.request.headers):
self.ws_connection = WebSocketProtocol76(self)
self.ws_connection.accept_connection()
else: else:
self.stream.write(tornado.escape.utf8( self.stream.write(tornado.escape.utf8(
"HTTP/1.1 426 Upgrade Required\r\n" "HTTP/1.1 426 Upgrade Required\r\n"
@ -245,7 +246,7 @@ class WebSocketHandler(tornado.web.RequestHandler):
phrase was supplied, these values will be available as the attributes phrase was supplied, these values will be available as the attributes
``self.close_code`` and ``self.close_reason``. ``self.close_code`` and ``self.close_reason``.
.. versionchanged:: 3.3 .. versionchanged:: 4.0
Added ``close_code`` and ``close_reason`` attributes. Added ``close_code`` and ``close_reason`` attributes.
""" """
@ -263,10 +264,7 @@ class WebSocketHandler(tornado.web.RequestHandler):
closing. These values are made available to the client, but are closing. These values are made available to the client, but are
not otherwise interpreted by the websocket protocol. not otherwise interpreted by the websocket protocol.
The ``code`` and ``reason`` arguments are ignored in the "draft76" .. versionchanged:: 4.0
protocol version.
.. versionchanged:: 3.3
Added the ``code`` and ``reason`` arguments. Added the ``code`` and ``reason`` arguments.
""" """
@ -292,7 +290,20 @@ class WebSocketHandler(tornado.web.RequestHandler):
browsers, since WebSockets are allowed to bypass the usual same-origin browsers, since WebSockets are allowed to bypass the usual same-origin
policies and don't use CORS headers. policies and don't use CORS headers.
.. versionadded:: 3.3 To accept all cross-origin traffic (which was the default prior to
Tornado 4.0), simply override this method to always return true::
def check_origin(self, origin):
return True
To allow connections from any subdomain of your site, you might
do something like::
def check_origin(self, origin):
parsed_origin = urllib.parse.urlparse(origin)
return parsed_origin.netloc.endswith(".mydomain.com")
.. versionadded:: 4.0
""" """
parsed_origin = urlparse(origin) parsed_origin = urlparse(origin)
origin = parsed_origin.netloc origin = parsed_origin.netloc
@ -303,21 +314,6 @@ class WebSocketHandler(tornado.web.RequestHandler):
# Check to see that origin matches host directly, including ports # Check to see that origin matches host directly, including ports
return origin == host return origin == host
def allow_draft76(self):
"""Override to enable support for the older "draft76" protocol.
The draft76 version of the websocket protocol is disabled by
default due to security concerns, but it can be enabled by
overriding this method to return True.
Connections using the draft76 protocol do not support the
``binary=True`` flag to `write_message`.
Support for the draft76 protocol is deprecated and will be
removed in a future version of Tornado.
"""
return False
def set_nodelay(self, value): def set_nodelay(self, value):
"""Set the no-delay flag for this stream. """Set the no-delay flag for this stream.
@ -334,29 +330,6 @@ class WebSocketHandler(tornado.web.RequestHandler):
""" """
self.stream.set_nodelay(value) self.stream.set_nodelay(value)
def get_websocket_scheme(self):
"""Return the url scheme used for this request, either "ws" or "wss".
This is normally decided by HTTPServer, but applications
may wish to override this if they are using an SSL proxy
that does not provide the X-Scheme header as understood
by HTTPServer.
Note that this is only used by the draft76 protocol.
"""
return "wss" if self.request.protocol == "https" else "ws"
def async_callback(self, callback, *args, **kwargs):
"""Obsolete - catches exceptions from the wrapped function.
This function is normally unncecessary thanks to
`tornado.stack_context`.
"""
return self.ws_connection.async_callback(callback, *args, **kwargs)
def _not_supported(self, *args, **kwargs):
raise Exception("Method not supported for Web Sockets")
def on_connection_close(self): def on_connection_close(self):
if self.ws_connection: if self.ws_connection:
self.ws_connection.on_connection_close() self.ws_connection.on_connection_close()
@ -364,9 +337,17 @@ class WebSocketHandler(tornado.web.RequestHandler):
self.on_close() self.on_close()
def _wrap_method(method):
def _disallow_for_websocket(self, *args, **kwargs):
if self.stream is None:
method(self, *args, **kwargs)
else:
raise RuntimeError("Method not supported for Web Sockets")
return _disallow_for_websocket
for method in ["write", "redirect", "set_header", "send_error", "set_cookie", for method in ["write", "redirect", "set_header", "send_error", "set_cookie",
"set_status", "flush", "finish"]: "set_status", "flush", "finish"]:
setattr(WebSocketHandler, method, WebSocketHandler._not_supported) setattr(WebSocketHandler, method,
_wrap_method(getattr(WebSocketHandler, method)))
class WebSocketProtocol(object): class WebSocketProtocol(object):
@ -379,23 +360,17 @@ class WebSocketProtocol(object):
self.client_terminated = False self.client_terminated = False
self.server_terminated = False self.server_terminated = False
def async_callback(self, callback, *args, **kwargs): def _run_callback(self, callback, *args, **kwargs):
"""Wrap callbacks with this if they are used on asynchronous requests. """Runs the given callback with exception handling.
Catches exceptions properly and closes this WebSocket if an exception On error, aborts the websocket connection and returns False.
is uncaught.
""" """
if args or kwargs:
callback = functools.partial(callback, *args, **kwargs)
def wrapper(*args, **kwargs):
try: try:
return callback(*args, **kwargs) callback(*args, **kwargs)
except Exception: except Exception:
app_log.error("Uncaught exception in %s", app_log.error("Uncaught exception in %s",
self.request.path, exc_info=True) self.request.path, exc_info=True)
self._abort() self._abort()
return wrapper
def on_connection_close(self): def on_connection_close(self):
self._abort() self._abort()
@ -408,174 +383,6 @@ class WebSocketProtocol(object):
self.close() # let the subclass cleanup self.close() # let the subclass cleanup
class WebSocketProtocol76(WebSocketProtocol):
"""Implementation of the WebSockets protocol, version hixie-76.
This class provides basic functionality to process WebSockets requests as
specified in
http://tools.ietf.org/html/draft-hixie-thewebsocketprotocol-76
"""
def __init__(self, handler):
WebSocketProtocol.__init__(self, handler)
self.challenge = None
self._waiting = None
def accept_connection(self):
try:
self._handle_websocket_headers()
except ValueError:
gen_log.debug("Malformed WebSocket request received")
self._abort()
return
scheme = self.handler.get_websocket_scheme()
# draft76 only allows a single subprotocol
subprotocol_header = ''
subprotocol = self.request.headers.get("Sec-WebSocket-Protocol", None)
if subprotocol:
selected = self.handler.select_subprotocol([subprotocol])
if selected:
assert selected == subprotocol
subprotocol_header = "Sec-WebSocket-Protocol: %s\r\n" % selected
# Write the initial headers before attempting to read the challenge.
# This is necessary when using proxies (such as HAProxy), which
# need to see the Upgrade headers before passing through the
# non-HTTP traffic that follows.
self.stream.write(tornado.escape.utf8(
"HTTP/1.1 101 WebSocket Protocol Handshake\r\n"
"Upgrade: WebSocket\r\n"
"Connection: Upgrade\r\n"
"Server: TornadoServer/%(version)s\r\n"
"Sec-WebSocket-Origin: %(origin)s\r\n"
"Sec-WebSocket-Location: %(scheme)s://%(host)s%(uri)s\r\n"
"%(subprotocol)s"
"\r\n" % (dict(
version=tornado.version,
origin=self.request.headers["Origin"],
scheme=scheme,
host=self.request.host,
uri=self.request.uri,
subprotocol=subprotocol_header))))
self.stream.read_bytes(8, self._handle_challenge)
def challenge_response(self, challenge):
"""Generates the challenge response that's needed in the handshake
The challenge parameter should be the raw bytes as sent from the
client.
"""
key_1 = self.request.headers.get("Sec-Websocket-Key1")
key_2 = self.request.headers.get("Sec-Websocket-Key2")
try:
part_1 = self._calculate_part(key_1)
part_2 = self._calculate_part(key_2)
except ValueError:
raise ValueError("Invalid Keys/Challenge")
return self._generate_challenge_response(part_1, part_2, challenge)
def _handle_challenge(self, challenge):
try:
challenge_response = self.challenge_response(challenge)
except ValueError:
gen_log.debug("Malformed key data in WebSocket request")
self._abort()
return
self._write_response(challenge_response)
def _write_response(self, challenge):
self.stream.write(challenge)
self.async_callback(self.handler.open)(*self.handler.open_args, **self.handler.open_kwargs)
self._receive_message()
def _handle_websocket_headers(self):
"""Verifies all invariant- and required headers
If a header is missing or have an incorrect value ValueError will be
raised
"""
fields = ("Origin", "Host", "Sec-Websocket-Key1",
"Sec-Websocket-Key2")
if not all(map(lambda f: self.request.headers.get(f), fields)):
raise ValueError("Missing/Invalid WebSocket headers")
def _calculate_part(self, key):
"""Processes the key headers and calculates their key value.
Raises ValueError when feed invalid key."""
# pyflakes complains about variable reuse if both of these lines use 'c'
number = int(''.join(c for c in key if c.isdigit()))
spaces = len([c2 for c2 in key if c2.isspace()])
try:
key_number = number // spaces
except (ValueError, ZeroDivisionError):
raise ValueError
return struct.pack(">I", key_number)
def _generate_challenge_response(self, part_1, part_2, part_3):
m = hashlib.md5()
m.update(part_1)
m.update(part_2)
m.update(part_3)
return m.digest()
def _receive_message(self):
self.stream.read_bytes(1, self._on_frame_type)
def _on_frame_type(self, byte):
frame_type = ord(byte)
if frame_type == 0x00:
self.stream.read_until(b"\xff", self._on_end_delimiter)
elif frame_type == 0xff:
self.stream.read_bytes(1, self._on_length_indicator)
else:
self._abort()
def _on_end_delimiter(self, frame):
if not self.client_terminated:
self.async_callback(self.handler.on_message)(
frame[:-1].decode("utf-8", "replace"))
if not self.client_terminated:
self._receive_message()
def _on_length_indicator(self, byte):
if ord(byte) != 0x00:
self._abort()
return
self.client_terminated = True
self.close()
def write_message(self, message, binary=False):
"""Sends the given message to the client of this Web Socket."""
if binary:
raise ValueError(
"Binary messages not supported by this version of websockets")
if isinstance(message, unicode_type):
message = message.encode("utf-8")
assert isinstance(message, bytes_type)
self.stream.write(b"\x00" + message + b"\xff")
def write_ping(self, data):
"""Send ping frame."""
raise ValueError("Ping messages not supported by this version of websockets")
def close(self, code=None, reason=None):
"""Closes the WebSocket connection."""
if not self.server_terminated:
if not self.stream.closed():
self.stream.write("\xff\x00")
self.server_terminated = True
if self.client_terminated:
if self._waiting is not None:
self.stream.io_loop.remove_timeout(self._waiting)
self._waiting = None
self.stream.close()
elif self._waiting is None:
self._waiting = self.stream.io_loop.add_timeout(
time.time() + 5, self._abort)
class WebSocketProtocol13(WebSocketProtocol): class WebSocketProtocol13(WebSocketProtocol):
"""Implementation of the WebSocket protocol from RFC 6455. """Implementation of the WebSocket protocol from RFC 6455.
@ -645,7 +452,8 @@ class WebSocketProtocol13(WebSocketProtocol):
"%s" "%s"
"\r\n" % (self._challenge_response(), subprotocol_header))) "\r\n" % (self._challenge_response(), subprotocol_header)))
self.async_callback(self.handler.open)(*self.handler.open_args, **self.handler.open_kwargs) self._run_callback(self.handler.open, *self.handler.open_args,
**self.handler.open_kwargs)
self._receive_frame() self._receive_frame()
def _write_frame(self, fin, opcode, data): def _write_frame(self, fin, opcode, data):
@ -803,10 +611,10 @@ class WebSocketProtocol13(WebSocketProtocol):
except UnicodeDecodeError: except UnicodeDecodeError:
self._abort() self._abort()
return return
self.async_callback(self.handler.on_message)(decoded) self._run_callback(self.handler.on_message, decoded)
elif opcode == 0x2: elif opcode == 0x2:
# Binary data # Binary data
self.async_callback(self.handler.on_message)(data) self._run_callback(self.handler.on_message, data)
elif opcode == 0x8: elif opcode == 0x8:
# Close # Close
self.client_terminated = True self.client_terminated = True
@ -820,7 +628,7 @@ class WebSocketProtocol13(WebSocketProtocol):
self._write_frame(True, 0xA, data) self._write_frame(True, 0xA, data)
elif opcode == 0xA: elif opcode == 0xA:
# Pong # Pong
self.async_callback(self.handler.on_pong)(data) self._run_callback(self.handler.on_pong, data)
else: else:
self._abort() self._abort()
@ -885,7 +693,7 @@ class WebSocketClientConnection(simple_httpclient._HTTPConnection):
.. versionadded:: 3.2 .. versionadded:: 3.2
.. versionchanged:: 3.3 .. versionchanged:: 4.0
Added the ``code`` and ``reason`` arguments. Added the ``code`` and ``reason`` arguments.
""" """
@ -893,10 +701,12 @@ class WebSocketClientConnection(simple_httpclient._HTTPConnection):
self.protocol.close(code, reason) self.protocol.close(code, reason)
self.protocol = None self.protocol = None
def _on_close(self): def on_connection_close(self):
if not self.connect_future.done():
self.connect_future.set_exception(StreamClosedError())
self.on_message(None) self.on_message(None)
self.resolver.close() self.tcp_client.close()
super(WebSocketClientConnection, self)._on_close() super(WebSocketClientConnection, self).on_connection_close()
def _on_http_response(self, response): def _on_http_response(self, response):
if not self.connect_future.done(): if not self.connect_future.done():
@ -925,7 +735,12 @@ class WebSocketClientConnection(simple_httpclient._HTTPConnection):
self._timeout = None self._timeout = None
self.stream = self.connection.detach() self.stream = self.connection.detach()
self.stream.set_close_callback(self._on_close) self.stream.set_close_callback(self.on_connection_close)
# Once we've taken over the connection, clear the final callback
# we set on the http request. This deactivates the error handling
# in simple_httpclient that would otherwise interfere with our
# ability to see exceptions.
self.final_callback = None
self.connect_future.set_result(self) self.connect_future.set_result(self)

8
libs/tornado/wsgi.py

@ -77,7 +77,7 @@ else:
class WSGIApplication(web.Application): class WSGIApplication(web.Application):
"""A WSGI equivalent of `tornado.web.Application`. """A WSGI equivalent of `tornado.web.Application`.
.. deprecated: 3.3:: .. deprecated:: 4.0
Use a regular `.Application` and wrap it in `WSGIAdapter` instead. Use a regular `.Application` and wrap it in `WSGIAdapter` instead.
""" """
@ -126,7 +126,7 @@ class _WSGIConnection(httputil.HTTPConnection):
if self._expected_content_remaining is not None: if self._expected_content_remaining is not None:
self._expected_content_remaining -= len(chunk) self._expected_content_remaining -= len(chunk)
if self._expected_content_remaining < 0: if self._expected_content_remaining < 0:
self._error = httputil.HTTPOutputException( self._error = httputil.HTTPOutputError(
"Tried to write more data than Content-Length") "Tried to write more data than Content-Length")
raise self._error raise self._error
self._write_buffer.append(chunk) self._write_buffer.append(chunk)
@ -137,7 +137,7 @@ class _WSGIConnection(httputil.HTTPConnection):
def finish(self): def finish(self):
if (self._expected_content_remaining is not None and if (self._expected_content_remaining is not None and
self._expected_content_remaining != 0): self._expected_content_remaining != 0):
self._error = httputil.HTTPOutputException( self._error = httputil.HTTPOutputError(
"Tried to write %d bytes less than Content-Length" % "Tried to write %d bytes less than Content-Length" %
self._expected_content_remaining) self._expected_content_remaining)
raise self._error raise self._error
@ -183,7 +183,7 @@ class WSGIAdapter(object):
that it is not possible to use `.AsyncHTTPClient`, or the that it is not possible to use `.AsyncHTTPClient`, or the
`tornado.auth` or `tornado.websocket` modules. `tornado.auth` or `tornado.websocket` modules.
.. versionadded:: 3.3 .. versionadded:: 4.0
""" """
def __init__(self, application): def __init__(self, application):
if isinstance(application, WSGIApplication): if isinstance(application, WSGIApplication):

Loading…
Cancel
Save