Browse Source

Update Tornado_py3 Web Server 6.0.4 (b4e39e5) → 6.1.0 (2047e7a).

tags/release_0.25.1
JackDandy 5 years ago
parent
commit
7bd888c40a
  1. 1
      CHANGES.md
  2. 4
      lib/tornado_py3/__init__.py
  3. 2
      lib/tornado_py3/_locale_data.py
  4. 73
      lib/tornado_py3/auth.py
  5. 7
      lib/tornado_py3/autoreload.py
  6. 3
      lib/tornado_py3/concurrent.py
  7. 41
      lib/tornado_py3/curl_httpclient.py
  8. 10
      lib/tornado_py3/escape.py
  9. 47
      lib/tornado_py3/gen.py
  10. 34
      lib/tornado_py3/http1connection.py
  11. 107
      lib/tornado_py3/httpclient.py
  12. 24
      lib/tornado_py3/httpserver.py
  13. 101
      lib/tornado_py3/httputil.py
  14. 16
      lib/tornado_py3/ioloop.py
  15. 129
      lib/tornado_py3/iostream.py
  16. 45
      lib/tornado_py3/locale.py
  17. 29
      lib/tornado_py3/locks.py
  18. 7
      lib/tornado_py3/log.py
  19. 77
      lib/tornado_py3/netutil.py
  20. 61
      lib/tornado_py3/options.py
  21. 315
      lib/tornado_py3/platform/asyncio.py
  22. 32
      lib/tornado_py3/platform/auto.py
  23. 4
      lib/tornado_py3/platform/auto.pyi
  24. 26
      lib/tornado_py3/platform/interface.py
  25. 29
      lib/tornado_py3/platform/posix.py
  26. 35
      lib/tornado_py3/platform/twisted.py
  27. 22
      lib/tornado_py3/platform/windows.py
  28. 66
      lib/tornado_py3/process.py
  29. 14
      lib/tornado_py3/queues.py
  30. 28
      lib/tornado_py3/routing.py
  31. 273
      lib/tornado_py3/simple_httpclient.py
  32. 7
      lib/tornado_py3/speedups.c
  33. 26
      lib/tornado_py3/tcpclient.py
  34. 14
      lib/tornado_py3/tcpserver.py
  35. 45
      lib/tornado_py3/template.py
  36. 50
      lib/tornado_py3/testing.py
  37. 12
      lib/tornado_py3/util.py
  38. 165
      lib/tornado_py3/web.py
  39. 77
      lib/tornado_py3/websocket.py

1
CHANGES.md

@ -59,6 +59,7 @@
* Update Requests library 2.24.0 (1b41763) to 2.24.0 (2f70990)
* Update soupsieve_py3 2.0.0.final (e66c311) to 2.0.2.dev (05086ef)
* Update soupsieve_py2 backport
* Update Tornado_py3 Web Server 6.0.4 (b4e39e5) to 6.1.0 (2047e7a)
* Update tmdbsimple 2.2.6 (310d933) to 2.6.6 (679e343)
* Update urllib3 1.25.9 (a5a45dc) to 1.25.11 (5eb604f)
* Change add remove duplicates in newznab provider list based on name and url

4
lib/tornado_py3/__init__.py

@ -22,5 +22,5 @@
# is zero for an official release, positive for a development branch,
# or negative for a release candidate or beta (after the base version
# number has been incremented)
version = "6.0.4"
version_info = (6, 0, 4, 0)
version = "6.1"
version_info = (6, 1, 0, 0)

2
lib/tornado_py3/_locale_data.py

@ -1,5 +1,3 @@
# -*- coding: utf-8 -*-
#
# Copyright 2012 Facebook
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may

73
lib/tornado_py3/auth.py

@ -44,7 +44,7 @@ Example usage for Google OAuth:
code=self.get_argument('code'))
# Save the user with e.g. set_secure_cookie
else:
await self.authorize_redirect(
self.authorize_redirect(
redirect_uri='http://your.site.com/auth/google',
client_id=self.settings['google_oauth']['key'],
scope=['profile', 'email'],
@ -87,7 +87,7 @@ class OpenIdMixin(object):
def authenticate_redirect(
self,
callback_uri: str = None,
callback_uri: Optional[str] = None,
ax_attrs: List[str] = ["name", "email", "language", "username"],
) -> None:
"""Redirects to the authentication URL for this service.
@ -114,7 +114,7 @@ class OpenIdMixin(object):
handler.redirect(endpoint + "?" + urllib.parse.urlencode(args))
async def get_authenticated_user(
self, http_client: httpclient.AsyncHTTPClient = None
self, http_client: Optional[httpclient.AsyncHTTPClient] = None
) -> Dict[str, Any]:
"""Fetches the authenticated user data upon redirect.
@ -146,7 +146,10 @@ class OpenIdMixin(object):
return self._on_authentication_verified(resp)
def _openid_args(
self, callback_uri: str, ax_attrs: Iterable[str] = [], oauth_scope: str = None
self,
callback_uri: str,
ax_attrs: Iterable[str] = [],
oauth_scope: Optional[str] = None,
) -> Dict[str, str]:
handler = cast(RequestHandler, self)
url = urllib.parse.urljoin(handler.request.full_url(), callback_uri)
@ -201,7 +204,7 @@ class OpenIdMixin(object):
) -> Dict[str, Any]:
handler = cast(RequestHandler, self)
if b"is_valid:true" not in response.body:
raise AuthError("Invalid OpenID response: %s" % response.body)
raise AuthError("Invalid OpenID response: %r" % response.body)
# Make sure we got back at least an email from attribute exchange
ax_ns = None
@ -286,9 +289,9 @@ class OAuthMixin(object):
async def authorize_redirect(
self,
callback_uri: str = None,
extra_params: Dict[str, Any] = None,
http_client: httpclient.AsyncHTTPClient = None,
callback_uri: Optional[str] = None,
extra_params: Optional[Dict[str, Any]] = None,
http_client: Optional[httpclient.AsyncHTTPClient] = None,
) -> None:
"""Redirects the user to obtain OAuth authorization for this service.
@ -334,7 +337,7 @@ class OAuthMixin(object):
self._on_request_token(url, callback_uri, response)
async def get_authenticated_user(
self, http_client: httpclient.AsyncHTTPClient = None
self, http_client: Optional[httpclient.AsyncHTTPClient] = None
) -> Dict[str, Any]:
"""Gets the OAuth authorized user and access token.
@ -380,7 +383,9 @@ class OAuthMixin(object):
return user
def _oauth_request_token_url(
self, callback_uri: str = None, extra_params: Dict[str, Any] = None
self,
callback_uri: Optional[str] = None,
extra_params: Optional[Dict[str, Any]] = None,
) -> str:
handler = cast(RequestHandler, self)
consumer_token = self._oauth_consumer_token()
@ -547,11 +552,11 @@ class OAuth2Mixin(object):
def authorize_redirect(
self,
redirect_uri: str = None,
client_id: str = None,
client_secret: str = None,
extra_params: Dict[str, Any] = None,
scope: str = None,
redirect_uri: Optional[str] = None,
client_id: Optional[str] = None,
client_secret: Optional[str] = None,
extra_params: Optional[Dict[str, Any]] = None,
scope: Optional[List[str]] = None,
response_type: str = "code",
) -> None:
"""Redirects the user to obtain OAuth authorization for this service.
@ -582,11 +587,11 @@ class OAuth2Mixin(object):
def _oauth_request_token_url(
self,
redirect_uri: str = None,
client_id: str = None,
client_secret: str = None,
code: str = None,
extra_params: Dict[str, Any] = None,
redirect_uri: Optional[str] = None,
client_id: Optional[str] = None,
client_secret: Optional[str] = None,
code: Optional[str] = None,
extra_params: Optional[Dict[str, Any]] = None,
) -> str:
url = self._OAUTH_ACCESS_TOKEN_URL # type: ignore
args = {} # type: Dict[str, str]
@ -605,8 +610,8 @@ class OAuth2Mixin(object):
async def oauth2_request(
self,
url: str,
access_token: str = None,
post_args: Dict[str, Any] = None,
access_token: Optional[str] = None,
post_args: Optional[Dict[str, Any]] = None,
**args: Any
) -> Any:
"""Fetches the given URL auth an OAuth2 access token.
@ -629,7 +634,7 @@ class OAuth2Mixin(object):
if not new_entry:
# Call failed; perhaps missing permission?
await self.authorize_redirect()
self.authorize_redirect()
return
self.finish("Posted a message!")
@ -709,7 +714,7 @@ class TwitterMixin(OAuthMixin):
_OAUTH_NO_CALLBACKS = False
_TWITTER_BASE_URL = "https://api.twitter.com/1.1"
async def authenticate_redirect(self, callback_uri: str = None) -> None:
async def authenticate_redirect(self, callback_uri: Optional[str] = None) -> None:
"""Just like `~OAuthMixin.authorize_redirect`, but
auto-redirects if authorized.
@ -735,7 +740,7 @@ class TwitterMixin(OAuthMixin):
self,
path: str,
access_token: Dict[str, Any],
post_args: Dict[str, Any] = None,
post_args: Optional[Dict[str, Any]] = None,
**args: Any
) -> Any:
"""Fetches the given API path, e.g., ``statuses/user_timeline/btaylor``
@ -767,7 +772,7 @@ class TwitterMixin(OAuthMixin):
access_token=self.current_user["access_token"])
if not new_entry:
# Call failed; perhaps missing permission?
yield self.authorize_redirect()
await self.authorize_redirect()
return
self.finish("Posted a message!")
@ -881,7 +886,7 @@ class GoogleOAuth2Mixin(OAuth2Mixin):
# Save the user and access token with
# e.g. set_secure_cookie.
else:
await self.authorize_redirect(
self.authorize_redirect(
redirect_uri='http://your.site.com/auth/google',
client_id=self.settings['google_oauth']['key'],
scope=['profile', 'email'],
@ -930,7 +935,7 @@ class FacebookGraphMixin(OAuth2Mixin):
client_id: str,
client_secret: str,
code: str,
extra_fields: Dict[str, Any] = None,
extra_fields: Optional[Dict[str, Any]] = None,
) -> Optional[Dict[str, Any]]:
"""Handles the login for the Facebook user, returning a user object.
@ -949,7 +954,7 @@ class FacebookGraphMixin(OAuth2Mixin):
code=self.get_argument("code"))
# Save the user with e.g. set_secure_cookie
else:
await self.authorize_redirect(
self.authorize_redirect(
redirect_uri='/auth/facebookgraph/',
client_id=self.settings["facebook_api_key"],
extra_params={"scope": "read_stream,offline_access"})
@ -1034,8 +1039,8 @@ class FacebookGraphMixin(OAuth2Mixin):
async def facebook_request(
self,
path: str,
access_token: str = None,
post_args: Dict[str, Any] = None,
access_token: Optional[str] = None,
post_args: Optional[Dict[str, Any]] = None,
**args: Any
) -> Any:
"""Fetches the given relative API path, e.g., "/btaylor/picture"
@ -1067,7 +1072,7 @@ class FacebookGraphMixin(OAuth2Mixin):
if not new_entry:
# Call failed; perhaps missing permission?
yield self.authorize_redirect()
self.authorize_redirect()
return
self.finish("Posted a message!")
@ -1099,7 +1104,7 @@ def _oauth_signature(
method: str,
url: str,
parameters: Dict[str, Any] = {},
token: Dict[str, Any] = None,
token: Optional[Dict[str, Any]] = None,
) -> bytes:
"""Calculates the HMAC-SHA1 OAuth signature for the given request.
@ -1132,7 +1137,7 @@ def _oauth10a_signature(
method: str,
url: str,
parameters: Dict[str, Any] = {},
token: Dict[str, Any] = None,
token: Optional[Dict[str, Any]] = None,
) -> bytes:
"""Calculates the HMAC-SHA1 OAuth 1.0a signature for the given request.

7
lib/tornado_py3/autoreload.py

@ -158,8 +158,7 @@ def add_reload_hook(fn: Callable[[], None]) -> None:
Note that for open file and socket handles it is generally
preferable to set the ``FD_CLOEXEC`` flag (using `fcntl` or
``tornado.platform.auto.set_close_exec``) instead
of using a reload hook to close them.
`os.set_inheritable`) instead of using a reload hook to close them.
"""
_reload_hooks.append(fn)
@ -253,8 +252,8 @@ def _reload() -> None:
# Unfortunately the errno returned in this case does not
# appear to be consistent, so we can't easily check for
# this error specifically.
os.spawnv( # type: ignore
os.P_NOWAIT, sys.executable, [sys.executable] + argv
os.spawnv(
os.P_NOWAIT, sys.executable, [sys.executable] + argv # type: ignore
)
# At this point the IOLoop has been closed and finally
# blocks will experience errors if we allow the stack to

3
lib/tornado_py3/concurrent.py

@ -74,8 +74,7 @@ dummy_executor = DummyExecutor()
def run_on_executor(*args: Any, **kwargs: Any) -> Callable:
"""Decorator to run a synchronous method asynchronously on an executor.
The decorated method may be called with a ``callback`` keyword
argument and returns a future.
Returns a future.
The executor to be used is determined by the ``executor``
attributes of ``self``. To use a different attribute name, pass a

41
lib/tornado_py3/curl_httpclient.py

@ -18,7 +18,7 @@
import collections
import functools
import logging
import pycurl # type: ignore
import pycurl
import threading
import time
from io import BytesIO
@ -36,21 +36,22 @@ from tornado_py3.httpclient import (
)
from tornado_py3.log import app_log
from typing import Dict, Any, Callable, Union
from typing import Dict, Any, Callable, Union, Tuple, Optional
import typing
if typing.TYPE_CHECKING:
from typing import Deque, Tuple, Optional # noqa: F401
from typing import Deque # noqa: F401
curl_log = logging.getLogger("tornado.curl_httpclient")
class CurlAsyncHTTPClient(AsyncHTTPClient):
def initialize( # type: ignore
self, max_clients: int = 10, defaults: Dict[str, Any] = None
self, max_clients: int = 10, defaults: Optional[Dict[str, Any]] = None
) -> None:
super(CurlAsyncHTTPClient, self).initialize(defaults=defaults)
self._multi = pycurl.CurlMulti()
super().initialize(defaults=defaults)
# Typeshed is incomplete for CurlMulti, so just use Any for now.
self._multi = pycurl.CurlMulti() # type: Any
self._multi.setopt(pycurl.M_TIMERFUNCTION, self._set_timeout)
self._multi.setopt(pycurl.M_SOCKETFUNCTION, self._handle_socket)
self._curls = [self._curl_create() for i in range(max_clients)]
@ -86,7 +87,7 @@ class CurlAsyncHTTPClient(AsyncHTTPClient):
for curl in self._curls:
curl.close()
self._multi.close()
super(CurlAsyncHTTPClient, self).close()
super().close()
# Set below properties to None to reduce the reference count of current
# instance, because those properties hold some methods of current
@ -219,7 +220,8 @@ class CurlAsyncHTTPClient(AsyncHTTPClient):
started += 1
curl = self._free_list.pop()
(request, callback, queue_start_time) = self._requests.popleft()
curl.info = {
# TODO: Don't smuggle extra data on an attribute of the Curl object.
curl.info = { # type: ignore
"headers": httputil.HTTPHeaders(),
"buffer": BytesIO(),
"request": request,
@ -230,7 +232,10 @@ class CurlAsyncHTTPClient(AsyncHTTPClient):
}
try:
self._curl_setup_request(
curl, request, curl.info["buffer"], curl.info["headers"]
curl,
request,
curl.info["buffer"], # type: ignore
curl.info["headers"], # type: ignore
)
except Exception as e:
# If there was an error in setup, pass it on
@ -250,10 +255,13 @@ class CurlAsyncHTTPClient(AsyncHTTPClient):
break
def _finish(
self, curl: pycurl.Curl, curl_error: int = None, curl_message: str = None
self,
curl: pycurl.Curl,
curl_error: Optional[int] = None,
curl_message: Optional[str] = None,
) -> None:
info = curl.info
curl.info = None
info = curl.info # type: ignore
curl.info = None # type: ignore
self._multi.remove_handle(curl)
self._free_list.append(curl)
buffer = info["buffer"]
@ -378,7 +386,7 @@ class CurlAsyncHTTPClient(AsyncHTTPClient):
if request.decompress_response:
curl.setopt(pycurl.ENCODING, "gzip,deflate")
else:
curl.setopt(pycurl.ENCODING, "none")
curl.setopt(pycurl.ENCODING, None)
if request.proxy_host and request.proxy_port:
curl.setopt(pycurl.PROXY, request.proxy_host)
curl.setopt(pycurl.PROXYPORT, request.proxy_port)
@ -398,7 +406,10 @@ class CurlAsyncHTTPClient(AsyncHTTPClient):
"Unsupported proxy_auth_mode %s" % request.proxy_auth_mode
)
else:
curl.setopt(pycurl.PROXY, "")
try:
curl.unsetopt(pycurl.PROXY)
except TypeError: # not supported, disable proxy
curl.setopt(pycurl.PROXY, "")
curl.unsetopt(pycurl.PROXYUSERPWD)
if request.validate_cert:
curl.setopt(pycurl.SSL_VERIFYPEER, 1)
@ -469,7 +480,7 @@ class CurlAsyncHTTPClient(AsyncHTTPClient):
request_buffer = BytesIO(utf8(request.body or ""))
def ioctl(cmd: int) -> None:
if cmd == curl.IOCMD_RESTARTREAD:
if cmd == curl.IOCMD_RESTARTREAD: # type: ignore
request_buffer.seek(0)
curl.setopt(pycurl.READFUNCTION, request_buffer.read)

10
lib/tornado_py3/escape.py

@ -69,7 +69,7 @@ def json_encode(value: Any) -> str:
# JSON permits but does not require forward slashes to be escaped.
# This is useful when json data is emitted in a <script> tag
# in HTML, as it prevents </script> tags from prematurely terminating
# the javascript. Some json libraries do this escaping by default,
# the JavaScript. Some json libraries do this escaping by default,
# although python's standard library does not, so we do it here.
# http://stackoverflow.com/questions/1580647/json-why-are-forward-slashes-escaped
return json.dumps(value).replace("</", "<\\/")
@ -145,10 +145,10 @@ def url_unescape( # noqa: F811
def parse_qs_bytes(
qs: str, keep_blank_values: bool = False, strict_parsing: bool = False
qs: Union[str, bytes], keep_blank_values: bool = False, strict_parsing: bool = False
) -> Dict[str, List[bytes]]:
"""Parses a query string like urlparse.parse_qs, but returns the
values as byte strings.
"""Parses a query string like urlparse.parse_qs,
but takes bytes and returns the values as byte strings.
Keys still become type str (interpreted as latin1 in python3!)
because it's too painful to keep them as byte strings in
@ -156,6 +156,8 @@ def parse_qs_bytes(
"""
# This is gross, but python3 doesn't give us another way.
# Latin1 is the universal donor of character encodings.
if isinstance(qs, bytes):
qs = qs.decode("latin1")
result = urllib.parse.parse_qs(
qs, keep_blank_values, strict_parsing, encoding="latin1", errors="strict"
)

47
lib/tornado_py3/gen.py

@ -90,8 +90,13 @@ from tornado_py3.ioloop import IOLoop
from tornado_py3.log import app_log
from tornado_py3.util import TimeoutError
try:
import contextvars
except ImportError:
contextvars = None # type: ignore
import typing
from typing import Union, Any, Callable, List, Type, Tuple, Awaitable, Dict
from typing import Union, Any, Callable, List, Type, Tuple, Awaitable, Dict, overload
if typing.TYPE_CHECKING:
from typing import Sequence, Deque, Optional, Set, Iterable # noqa: F401
@ -153,9 +158,25 @@ def _create_future() -> Future:
return future
def _fake_ctx_run(f: Callable[..., _T], *args: Any, **kw: Any) -> _T:
return f(*args, **kw)
@overload
def coroutine(
func: Callable[..., "Generator[Any, Any, _T]"]
) -> Callable[..., "Future[_T]"]:
...
@overload
def coroutine(func: Callable[..., _T]) -> Callable[..., "Future[_T]"]:
...
def coroutine(
func: Union[Callable[..., "Generator[Any, Any, _T]"], Callable[..., _T]]
) -> Callable[..., "Future[_T]"]:
"""Decorator for asynchronous generators.
For compatibility with older versions of Python, coroutines may
@ -187,8 +208,12 @@ def coroutine(
# This function is type-annotated with a comment to work around
# https://bitbucket.org/pypy/pypy/issues/2868/segfault-with-args-type-annotation-in
future = _create_future()
if contextvars is not None:
ctx_run = contextvars.copy_context().run # type: Callable
else:
ctx_run = _fake_ctx_run
try:
result = func(*args, **kwargs)
result = ctx_run(func, *args, **kwargs)
except (Return, StopIteration) as e:
result = _value_from_stopiteration(e)
except Exception:
@ -206,7 +231,7 @@ def coroutine(
# use "optional" coroutines in critical path code without
# performance penalty for the synchronous case.
try:
yielded = next(result)
yielded = ctx_run(next, result)
except (StopIteration, Return) as e:
future_set_result_unless_cancelled(
future, _value_from_stopiteration(e)
@ -221,8 +246,8 @@ def coroutine(
# We do this by exploiting the public API
# add_done_callback() instead of putting a private
# attribute on the Future.
# (Github issues #1769, #2229).
runner = Runner(result, future, yielded)
# (GitHub issues #1769, #2229).
runner = Runner(ctx_run, result, future, yielded)
future.add_done_callback(lambda _: runner)
yielded = None
try:
@ -276,7 +301,7 @@ class Return(Exception):
"""
def __init__(self, value: Any = None) -> None:
super(Return, self).__init__()
super().__init__()
self.value = value
# Cython recognizes subclasses of StopIteration with a .args tuple.
self.args = (value,)
@ -699,10 +724,12 @@ class Runner(object):
def __init__(
self,
ctx_run: Callable,
gen: "Generator[_Yieldable, Any, _T]",
result_future: "Future[_T]",
first_yielded: _Yieldable,
) -> None:
self.ctx_run = ctx_run
self.gen = gen
self.result_future = result_future
self.future = _null_future # type: Union[None, Future]
@ -711,7 +738,7 @@ class Runner(object):
self.io_loop = IOLoop.current()
if self.handle_yield(first_yielded):
gen = result_future = first_yielded = None # type: ignore
self.run()
self.ctx_run(self.run)
def run(self) -> None:
"""Starts or resumes the generator, running until it reaches a
@ -775,7 +802,7 @@ class Runner(object):
future_set_exc_info(self.future, sys.exc_info())
if self.future is moment:
self.io_loop.add_callback(self.run)
self.io_loop.add_callback(self.ctx_run, self.run)
return False
elif self.future is None:
raise Exception("no pending future")
@ -784,7 +811,7 @@ class Runner(object):
def inner(f: Any) -> None:
# Break a reference cycle to speed GC.
f = None # noqa: F841
self.run()
self.ctx_run(self.run)
self.io_loop.add_future(self.future, inner)
return False
@ -796,7 +823,7 @@ class Runner(object):
if not self.running and not self.finished:
self.future = Future()
future_set_exc_info(self.future, (typ, value, tb))
self.run()
self.ctx_run(self.run)
return True
else:
return False

34
lib/tornado_py3/http1connection.py

@ -75,11 +75,11 @@ class HTTP1ConnectionParameters(object):
def __init__(
self,
no_keep_alive: bool = False,
chunk_size: int = None,
max_header_size: int = None,
header_timeout: float = None,
max_body_size: int = None,
body_timeout: float = None,
chunk_size: Optional[int] = None,
max_header_size: Optional[int] = None,
header_timeout: Optional[float] = None,
max_body_size: Optional[int] = None,
body_timeout: Optional[float] = None,
decompress: bool = False,
) -> None:
"""
@ -113,8 +113,8 @@ class HTTP1Connection(httputil.HTTPConnection):
self,
stream: iostream.IOStream,
is_client: bool,
params: HTTP1ConnectionParameters = None,
context: object = None,
params: Optional[HTTP1ConnectionParameters] = None,
context: Optional[object] = None,
) -> None:
"""
:arg stream: an `.IOStream`
@ -235,7 +235,7 @@ class HTTP1Connection(httputil.HTTPConnection):
# but do not actually have a body.
# http://tools.ietf.org/html/rfc7230#section-3.3
skip_body = True
if code >= 100 and code < 200:
if 100 <= code < 200:
# 1xx responses should never indicate the presence of
# a body.
if "Content-Length" in headers or "Transfer-Encoding" in headers:
@ -317,7 +317,7 @@ class HTTP1Connection(httputil.HTTPConnection):
Note that this callback is slightly different from
`.HTTPMessageDelegate.on_connection_close`: The
`.HTTPMessageDelegate` method is called when the connection is
closed while recieving a message. This callback is used when
closed while receiving a message. This callback is used when
there is not an active delegate (for example, on the server
side this callback is used if the client closes the connection
after sending its request but before receiving all the
@ -377,7 +377,7 @@ class HTTP1Connection(httputil.HTTPConnection):
self,
start_line: Union[httputil.RequestStartLine, httputil.ResponseStartLine],
headers: httputil.HTTPHeaders,
chunk: bytes = None,
chunk: Optional[bytes] = None,
) -> "Future[None]":
"""Implements `.HTTPConnection.write_headers`."""
lines = []
@ -406,6 +406,8 @@ class HTTP1Connection(httputil.HTTPConnection):
# self._request_start_line.version or
# start_line.version?
self._request_start_line.version == "HTTP/1.1"
# Omit payload header field for HEAD request.
and self._request_start_line.method != "HEAD"
# 1xx, 204 and 304 responses have no body (not even a zero-length
# body), and so should not have either Content-Length or
# Transfer-Encoding headers.
@ -446,7 +448,7 @@ class HTTP1Connection(httputil.HTTPConnection):
header_lines = (
native_str(n) + ": " + native_str(v) for n, v in headers.get_all()
)
lines.extend(l.encode("latin1") for l in header_lines)
lines.extend(line.encode("latin1") for line in header_lines)
for line in lines:
if b"\n" in line:
raise ValueError("Newline in header: " + repr(line))
@ -735,6 +737,10 @@ class _GzipMessageDelegate(httputil.HTTPMessageDelegate):
if ret is not None:
await ret
compressed_data = self._decompressor.unconsumed_tail
if compressed_data and not decompressed:
raise httputil.HTTPInputError(
"encountered unconsumed gzip data without making progress"
)
else:
ret = self._delegate.data_received(chunk)
if ret is not None:
@ -751,7 +757,7 @@ class _GzipMessageDelegate(httputil.HTTPMessageDelegate):
# chunk at this point we'd need to change the
# interface to make finish() a coroutine.
raise ValueError(
"decompressor.flush returned data; possile truncated input"
"decompressor.flush returned data; possible truncated input"
)
return self._delegate.finish()
@ -765,8 +771,8 @@ class HTTP1ServerConnection(object):
def __init__(
self,
stream: iostream.IOStream,
params: HTTP1ConnectionParameters = None,
context: object = None,
params: Optional[HTTP1ConnectionParameters] = None,
context: Optional[object] = None,
) -> None:
"""
:arg stream: an `.IOStream`

107
lib/tornado_py3/httpclient.py

@ -53,7 +53,7 @@ from tornado_py3 import gen, httputil
from tornado_py3.ioloop import IOLoop
from tornado_py3.util import Configurable
from typing import Type, Any, Union, Dict, Callable, Optional, cast, Awaitable
from typing import Type, Any, Union, Dict, Callable, Optional, cast
class HTTPClient(object):
@ -87,7 +87,9 @@ class HTTPClient(object):
"""
def __init__(
self, async_client_class: Type["AsyncHTTPClient"] = None, **kwargs: Any
self,
async_client_class: "Optional[Type[AsyncHTTPClient]]" = None,
**kwargs: Any
) -> None:
# Initialize self._closed at the beginning of the constructor
# so that an exception raised here doesn't lead to confusing
@ -211,7 +213,7 @@ class AsyncHTTPClient(Configurable):
instance_cache[instance.io_loop] = instance
return instance
def initialize(self, defaults: Dict[str, Any] = None) -> None:
def initialize(self, defaults: Optional[Dict[str, Any]] = None) -> None:
self.io_loop = IOLoop.current()
self.defaults = dict(HTTPRequest._DEFAULTS)
if defaults is not None:
@ -249,7 +251,7 @@ class AsyncHTTPClient(Configurable):
request: Union[str, "HTTPRequest"],
raise_error: bool = True,
**kwargs: Any
) -> Awaitable["HTTPResponse"]:
) -> "Future[HTTPResponse]":
"""Executes a request, asynchronously returning an `HTTPResponse`.
The request may be either a string URL or an `HTTPRequest` object.
@ -357,37 +359,39 @@ class HTTPRequest(object):
self,
url: str,
method: str = "GET",
headers: Union[Dict[str, str], httputil.HTTPHeaders] = None,
body: Union[bytes, str] = None,
auth_username: str = None,
auth_password: str = None,
auth_mode: str = None,
connect_timeout: float = None,
request_timeout: float = None,
if_modified_since: Union[float, datetime.datetime] = None,
follow_redirects: bool = None,
max_redirects: int = None,
user_agent: str = None,
use_gzip: bool = None,
network_interface: str = None,
streaming_callback: Callable[[bytes], None] = None,
header_callback: Callable[[str], None] = None,
prepare_curl_callback: Callable[[Any], None] = None,
proxy_host: str = None,
proxy_port: int = None,
proxy_username: str = None,
proxy_password: str = None,
proxy_auth_mode: str = None,
allow_nonstandard_methods: bool = None,
validate_cert: bool = None,
ca_certs: str = None,
allow_ipv6: bool = None,
client_key: str = None,
client_cert: str = None,
body_producer: Callable[[Callable[[bytes], None]], "Future[None]"] = None,
headers: Optional[Union[Dict[str, str], httputil.HTTPHeaders]] = None,
body: Optional[Union[bytes, str]] = None,
auth_username: Optional[str] = None,
auth_password: Optional[str] = None,
auth_mode: Optional[str] = None,
connect_timeout: Optional[float] = None,
request_timeout: Optional[float] = None,
if_modified_since: Optional[Union[float, datetime.datetime]] = None,
follow_redirects: Optional[bool] = None,
max_redirects: Optional[int] = None,
user_agent: Optional[str] = None,
use_gzip: Optional[bool] = None,
network_interface: Optional[str] = None,
streaming_callback: Optional[Callable[[bytes], None]] = None,
header_callback: Optional[Callable[[str], None]] = None,
prepare_curl_callback: Optional[Callable[[Any], None]] = None,
proxy_host: Optional[str] = None,
proxy_port: Optional[int] = None,
proxy_username: Optional[str] = None,
proxy_password: Optional[str] = None,
proxy_auth_mode: Optional[str] = None,
allow_nonstandard_methods: Optional[bool] = None,
validate_cert: Optional[bool] = None,
ca_certs: Optional[str] = None,
allow_ipv6: Optional[bool] = None,
client_key: Optional[str] = None,
client_cert: Optional[str] = None,
body_producer: Optional[
Callable[[Callable[[bytes], None]], "Future[None]"]
] = None,
expect_100_continue: bool = False,
decompress_response: bool = None,
ssl_options: Union[Dict[str, Any], ssl.SSLContext] = None,
decompress_response: Optional[bool] = None,
ssl_options: Optional[Union[Dict[str, Any], ssl.SSLContext]] = None,
) -> None:
r"""All parameters except ``url`` are optional.
@ -397,7 +401,9 @@ class HTTPRequest(object):
:type headers: `~tornado.httputil.HTTPHeaders` or `dict`
:arg body: HTTP request body as a string (byte or unicode; if unicode
the utf-8 encoding will be used)
:arg body_producer: Callable used for lazy/asynchronous request bodies.
:type body: `str` or `bytes`
:arg collections.abc.Callable body_producer: Callable used for
lazy/asynchronous request bodies.
It is called with one argument, a ``write`` function, and should
return a `.Future`. It should call the write function with new
data as it becomes available. The write function returns a
@ -415,9 +421,9 @@ class HTTPRequest(object):
supports "basic" and "digest"; ``simple_httpclient`` only supports
"basic"
:arg float connect_timeout: Timeout for initial connection in seconds,
default 20 seconds
default 20 seconds (0 means no timeout)
:arg float request_timeout: Timeout for entire request in seconds,
default 20 seconds
default 20 seconds (0 means no timeout)
:arg if_modified_since: Timestamp for ``If-Modified-Since`` header
:type if_modified_since: `datetime` or `float`
:arg bool follow_redirects: Should redirects be followed automatically
@ -502,7 +508,7 @@ class HTTPRequest(object):
"""
# Note that some of these attributes go through property setters
# defined below.
self.headers = headers
self.headers = headers # type: ignore
if if_modified_since:
self.headers["If-Modified-Since"] = httputil.format_timestamp(
if_modified_since
@ -514,7 +520,7 @@ class HTTPRequest(object):
self.proxy_auth_mode = proxy_auth_mode
self.url = url
self.method = method
self.body = body
self.body = body # type: ignore
self.body_producer = body_producer
self.auth_username = auth_username
self.auth_password = auth_password
@ -624,14 +630,14 @@ class HTTPResponse(object):
self,
request: HTTPRequest,
code: int,
headers: httputil.HTTPHeaders = None,
buffer: BytesIO = None,
effective_url: str = None,
error: BaseException = None,
request_time: float = None,
time_info: Dict[str, float] = None,
reason: str = None,
start_time: float = None,
headers: Optional[httputil.HTTPHeaders] = None,
buffer: Optional[BytesIO] = None,
effective_url: Optional[str] = None,
error: Optional[BaseException] = None,
request_time: Optional[float] = None,
time_info: Optional[Dict[str, float]] = None,
reason: Optional[str] = None,
start_time: Optional[float] = None,
) -> None:
if isinstance(request, _RequestProxy):
self.request = request.request
@ -703,12 +709,15 @@ class HTTPClientError(Exception):
"""
def __init__(
self, code: int, message: str = None, response: HTTPResponse = None
self,
code: int,
message: Optional[str] = None,
response: Optional[HTTPResponse] = None,
) -> None:
self.code = code
self.message = message or httputil.responses.get(code, "Unknown")
self.response = response
super(HTTPClientError, self).__init__(code, message, response)
super().__init__(code, message, response)
def __str__(self) -> str:
return "HTTP %d: %s" % (self.code, self.message)

24
lib/tornado_py3/httpserver.py

@ -157,16 +157,16 @@ class HTTPServer(TCPServer, Configurable, httputil.HTTPServerConnectionDelegate)
],
no_keep_alive: bool = False,
xheaders: bool = False,
ssl_options: Union[Dict[str, Any], ssl.SSLContext] = None,
protocol: str = None,
ssl_options: Optional[Union[Dict[str, Any], ssl.SSLContext]] = None,
protocol: Optional[str] = None,
decompress_request: bool = False,
chunk_size: int = None,
max_header_size: int = None,
idle_connection_timeout: float = None,
body_timeout: float = None,
max_body_size: int = None,
max_buffer_size: int = None,
trusted_downstream: List[str] = None,
chunk_size: Optional[int] = None,
max_header_size: Optional[int] = None,
idle_connection_timeout: Optional[float] = None,
body_timeout: Optional[float] = None,
max_body_size: Optional[int] = None,
max_buffer_size: Optional[int] = None,
trusted_downstream: Optional[List[str]] = None,
) -> None:
# This method's signature is not extracted with autodoc
# because we want its arguments to appear on the class
@ -212,7 +212,7 @@ class HTTPServer(TCPServer, Configurable, httputil.HTTPServerConnectionDelegate)
This method does not currently close open websocket connections.
Note that this method is a coroutine and must be caled with ``await``.
Note that this method is a coroutine and must be called with ``await``.
"""
while self._connections:
@ -289,7 +289,7 @@ class _HTTPRequestContext(object):
stream: iostream.IOStream,
address: Tuple,
protocol: Optional[str],
trusted_downstream: List[str] = None,
trusted_downstream: Optional[List[str]] = None,
) -> None:
self.address = address
# Save the socket's address family now so we know how to
@ -346,7 +346,7 @@ class _HTTPRequestContext(object):
)
if proto_header:
# use only the last proto entry if there is more than one
# TODO: support trusting mutiple layers of proxied protocol
# TODO: support trusting multiple layers of proxied protocol
proto_header = proto_header.split(",")[-1].strip()
if proto_header in ("http", "https"):
self.protocol = proto_header

101
lib/tornado_py3/httputil.py

@ -24,6 +24,7 @@ import collections
import copy
import datetime
import email.utils
from functools import lru_cache
from http.client import responses
import http.cookies
import re
@ -62,42 +63,14 @@ if typing.TYPE_CHECKING:
import unittest # noqa: F401
# RFC 7230 section 3.5: a recipient MAY recognize a single LF as a line
# terminator and ignore any preceding CR.
_CRLF_RE = re.compile(r"\r?\n")
@lru_cache(1000)
def _normalize_header(name: str) -> str:
"""Map a header name to Http-Header-Case.
class _NormalizedHeaderCache(dict):
"""Dynamic cached mapping of header names to Http-Header-Case.
Implemented as a dict subclass so that cache hits are as fast as a
normal dict lookup, without the overhead of a python function
call.
>>> normalized_headers = _NormalizedHeaderCache(10)
>>> normalized_headers["coNtent-TYPE"]
>>> _normalize_header("coNtent-TYPE")
'Content-Type'
"""
def __init__(self, size: int) -> None:
super(_NormalizedHeaderCache, self).__init__()
self.size = size
self.queue = collections.deque() # type: Deque[str]
def __missing__(self, key: str) -> str:
normalized = "-".join([w.capitalize() for w in key.split("-")])
self[key] = normalized
self.queue.append(key)
if len(self.queue) > self.size:
# Limit the size of the cache. LRU would be better, but this
# simpler approach should be fine. In Python 2.7+ we could
# use OrderedDict (or in 3.2+, @functools.lru_cache).
old_key = self.queue.popleft()
del self[old_key]
return normalized
_normalized_headers = _NormalizedHeaderCache(1000)
return "-".join([w.capitalize() for w in name.split("-")])
class HTTPHeaders(collections.abc.MutableMapping):
@ -148,7 +121,7 @@ class HTTPHeaders(collections.abc.MutableMapping):
def __init__(self, *args: typing.Any, **kwargs: str) -> None: # noqa: F811
self._dict = {} # type: typing.Dict[str, str]
self._as_list = {} # type: typing.Dict[str, typing.List[str]]
self._last_key = None
self._last_key = None # type: Optional[str]
if len(args) == 1 and len(kwargs) == 0 and isinstance(args[0], HTTPHeaders):
# Copy constructor
for k, v in args[0].get_all():
@ -161,7 +134,7 @@ class HTTPHeaders(collections.abc.MutableMapping):
def add(self, name: str, value: str) -> None:
"""Adds a new value for the given key."""
norm_name = _normalized_headers[name]
norm_name = _normalize_header(name)
self._last_key = norm_name
if norm_name in self:
self._dict[norm_name] = (
@ -173,7 +146,7 @@ class HTTPHeaders(collections.abc.MutableMapping):
def get_list(self, name: str) -> List[str]:
"""Returns all values for the given header as a list."""
norm_name = _normalized_headers[name]
norm_name = _normalize_header(name)
return self._as_list.get(norm_name, [])
def get_all(self) -> Iterable[Tuple[str, str]]:
@ -223,7 +196,11 @@ class HTTPHeaders(collections.abc.MutableMapping):
"""
h = cls()
for line in _CRLF_RE.split(headers):
# RFC 7230 section 3.5: a recipient MAY recognize a single LF as a line
# terminator and ignore any preceding CR.
for line in headers.split("\n"):
if line.endswith("\r"):
line = line[:-1]
if line:
h.parse_line(line)
return h
@ -231,15 +208,15 @@ class HTTPHeaders(collections.abc.MutableMapping):
# MutableMapping abstract method implementations.
def __setitem__(self, name: str, value: str) -> None:
norm_name = _normalized_headers[name]
norm_name = _normalize_header(name)
self._dict[norm_name] = value
self._as_list[norm_name] = [value]
def __getitem__(self, name: str) -> str:
return self._dict[_normalized_headers[name]]
return self._dict[_normalize_header(name)]
def __delitem__(self, name: str) -> None:
norm_name = _normalized_headers[name]
norm_name = _normalize_header(name)
del self._dict[norm_name]
del self._as_list[norm_name]
@ -368,16 +345,16 @@ class HTTPServerRequest(object):
def __init__(
self,
method: str = None,
uri: str = None,
method: Optional[str] = None,
uri: Optional[str] = None,
version: str = "HTTP/1.0",
headers: HTTPHeaders = None,
body: bytes = None,
host: str = None,
files: Dict[str, List["HTTPFile"]] = None,
connection: "HTTPConnection" = None,
start_line: "RequestStartLine" = None,
server_connection: object = None,
headers: Optional[HTTPHeaders] = None,
body: Optional[bytes] = None,
host: Optional[str] = None,
files: Optional[Dict[str, List["HTTPFile"]]] = None,
connection: Optional["HTTPConnection"] = None,
start_line: Optional["RequestStartLine"] = None,
server_connection: Optional[object] = None,
) -> None:
if start_line is not None:
method, uri, version = start_line
@ -410,7 +387,9 @@ class HTTPServerRequest(object):
def cookies(self) -> Dict[str, http.cookies.Morsel]:
"""A dictionary of ``http.cookies.Morsel`` objects."""
if not hasattr(self, "_cookies"):
self._cookies = http.cookies.SimpleCookie()
self._cookies = (
http.cookies.SimpleCookie()
) # type: http.cookies.SimpleCookie
if "Cookie" in self.headers:
try:
parsed = parse_cookie(self.headers["Cookie"])
@ -591,7 +570,7 @@ class HTTPConnection(object):
self,
start_line: Union["RequestStartLine", "ResponseStartLine"],
headers: HTTPHeaders,
chunk: bytes = None,
chunk: Optional[bytes] = None,
) -> "Future[None]":
"""Write an HTTP header block.
@ -767,7 +746,7 @@ def parse_body_arguments(
body: bytes,
arguments: Dict[str, List[bytes]],
files: Dict[str, List[HTTPFile]],
headers: HTTPHeaders = None,
headers: Optional[HTTPHeaders] = None,
) -> None:
"""Parses a form request body.
@ -784,7 +763,8 @@ def parse_body_arguments(
)
return
try:
uri_arguments = parse_qs_bytes(native_str(body), keep_blank_values=True)
# real charset decoding will happen in RequestHandler.decode_argument()
uri_arguments = parse_qs_bytes(body, keep_blank_values=True)
except Exception as e:
gen_log.warning("Invalid x-www-form-urlencoded body: %s", e)
uri_arguments = {}
@ -896,6 +876,9 @@ RequestStartLine = collections.namedtuple(
)
_http_version_re = re.compile(r"^HTTP/1\.[0-9]$")
def parse_request_start_line(line: str) -> RequestStartLine:
"""Returns a (method, path, version) tuple for an HTTP 1.x request line.
@ -910,7 +893,7 @@ def parse_request_start_line(line: str) -> RequestStartLine:
# https://tools.ietf.org/html/rfc7230#section-3.1.1
# invalid request-line SHOULD respond with a 400 (Bad Request)
raise HTTPInputError("Malformed HTTP request line")
if not re.match(r"^HTTP/1\.[0-9]$", version):
if not _http_version_re.match(version):
raise HTTPInputError(
"Malformed HTTP version in HTTP Request-Line: %r" % version
)
@ -922,6 +905,9 @@ ResponseStartLine = collections.namedtuple(
)
_http_response_line_re = re.compile(r"(HTTP/1.[0-9]) ([0-9]+) ([^\r]*)")
def parse_response_start_line(line: str) -> ResponseStartLine:
"""Returns a (version, code, reason) tuple for an HTTP 1.x response line.
@ -931,7 +917,7 @@ def parse_response_start_line(line: str) -> ResponseStartLine:
ResponseStartLine(version='HTTP/1.1', code=200, reason='OK')
"""
line = native_str(line)
match = re.match("(HTTP/1.[0-9]) ([0-9]+) ([^\r]*)", line)
match = _http_response_line_re.match(line)
if not match:
raise HTTPInputError("Error parsing response start line")
return ResponseStartLine(match.group(1), int(match.group(2)), match.group(3))
@ -1036,6 +1022,9 @@ def doctests():
return doctest.DocTestSuite()
_netloc_re = re.compile(r"^(.+):(\d+)$")
def split_host_and_port(netloc: str) -> Tuple[str, Optional[int]]:
"""Returns ``(host, port)`` tuple from ``netloc``.
@ -1043,7 +1032,7 @@ def split_host_and_port(netloc: str) -> Tuple[str, Optional[int]]:
.. versionadded:: 4.1
"""
match = re.match(r"^(.+):(\d+)$", netloc)
match = _netloc_re.match(netloc)
if match:
host = match.group(1)
port = int(match.group(2)) # type: Optional[int]

16
lib/tornado_py3/ioloop.py

@ -101,9 +101,7 @@ class IOLoop(Configurable):
while True:
try:
connection, address = sock.accept()
except socket.error as e:
if e.args[0] not in (errno.EWOULDBLOCK, errno.EAGAIN):
raise
except BlockingIOError:
return
connection.setblocking(0)
io_loop = tornado.ioloop.IOLoop.current()
@ -235,13 +233,13 @@ class IOLoop(Configurable):
def current() -> "IOLoop":
pass
@typing.overload # noqa: F811
@typing.overload
@staticmethod
def current(instance: bool = True) -> Optional["IOLoop"]:
def current(instance: bool = True) -> Optional["IOLoop"]: # noqa: F811
pass
@staticmethod # noqa: F811
def current(instance: bool = True) -> Optional["IOLoop"]:
@staticmethod
def current(instance: bool = True) -> Optional["IOLoop"]: # noqa: F811
"""Returns the current thread's `IOLoop`.
If an `IOLoop` is currently running or has been marked as
@ -329,7 +327,7 @@ class IOLoop(Configurable):
return AsyncIOLoop
def initialize(self, make_current: bool = None) -> None:
def initialize(self, make_current: Optional[bool] = None) -> None:
if make_current is None:
if IOLoop.current(instance=False) is None:
self.make_current()
@ -457,7 +455,7 @@ class IOLoop(Configurable):
"""
raise NotImplementedError()
def run_sync(self, func: Callable, timeout: float = None) -> Any:
def run_sync(self, func: Callable, timeout: Optional[float] = None) -> Any:
"""Starts the `IOLoop`, runs the given function, and stops the loop.
The function must return either an awaitable object or

129
lib/tornado_py3/iostream.py

@ -59,19 +59,6 @@ if typing.TYPE_CHECKING:
_IOStreamType = TypeVar("_IOStreamType", bound="IOStream")
try:
from tornado_py3.platform.posix import _set_nonblocking
except ImportError:
_set_nonblocking = None # type: ignore
# These errnos indicate that a non-blocking operation must be retried
# at a later time. On most platforms they're the same value, but on
# some they differ.
_ERRNO_WOULDBLOCK = (errno.EWOULDBLOCK, errno.EAGAIN)
if hasattr(errno, "WSAEWOULDBLOCK"):
_ERRNO_WOULDBLOCK += (errno.WSAEWOULDBLOCK,) # type: ignore
# These errnos indicate that a connection has been abruptly terminated.
# They should be caught and handled less noisily than other errors.
_ERRNO_CONNRESET = (errno.ECONNRESET, errno.ECONNABORTED, errno.EPIPE, errno.ETIMEDOUT)
@ -91,12 +78,6 @@ if sys.platform == "darwin":
# instead of an unexpected error.
_ERRNO_CONNRESET += (errno.EPROTOTYPE,) # type: ignore
# More non-portable errnos:
_ERRNO_INPROGRESS = (errno.EINPROGRESS,)
if hasattr(errno, "WSAEINPROGRESS"):
_ERRNO_INPROGRESS += (errno.WSAEINPROGRESS,) # type: ignore
_WINDOWS = sys.platform.startswith("win")
@ -114,8 +95,8 @@ class StreamClosedError(IOError):
Added the ``real_error`` attribute.
"""
def __init__(self, real_error: BaseException = None) -> None:
super(StreamClosedError, self).__init__("Stream is closed")
def __init__(self, real_error: Optional[BaseException] = None) -> None:
super().__init__("Stream is closed")
self.real_error = real_error
@ -246,9 +227,9 @@ class BaseIOStream(object):
def __init__(
self,
max_buffer_size: int = None,
read_chunk_size: int = None,
max_write_buffer_size: int = None,
max_buffer_size: Optional[int] = None,
read_chunk_size: Optional[int] = None,
max_write_buffer_size: Optional[int] = None,
) -> None:
"""`BaseIOStream` constructor.
@ -346,7 +327,9 @@ class BaseIOStream(object):
"""
return None
def read_until_regex(self, regex: bytes, max_bytes: int = None) -> Awaitable[bytes]:
def read_until_regex(
self, regex: bytes, max_bytes: Optional[int] = None
) -> Awaitable[bytes]:
"""Asynchronously read until we have matched the given regex.
The result includes the data that matches the regex and anything
@ -383,7 +366,9 @@ class BaseIOStream(object):
raise
return future
def read_until(self, delimiter: bytes, max_bytes: int = None) -> Awaitable[bytes]:
def read_until(
self, delimiter: bytes, max_bytes: Optional[int] = None
) -> Awaitable[bytes]:
"""Asynchronously read until we have found the given delimiter.
The result includes all the data read including the delimiter.
@ -883,8 +868,6 @@ class BaseIOStream(object):
buf = bytearray(self.read_chunk_size)
bytes_read = self.read_from_fd(buf)
except (socket.error, IOError, OSError) as e:
if errno_from_exception(e) == errno.EINTR:
continue
# ssl.SSLError is a subclass of socket.error
if self._is_connreset(e):
# Treat ECONNRESET as a connection close rather than
@ -992,17 +975,16 @@ class BaseIOStream(object):
break
self._write_buffer.advance(num_bytes)
self._total_write_done_index += num_bytes
except BlockingIOError:
break
except (socket.error, IOError, OSError) as e:
if e.args[0] in _ERRNO_WOULDBLOCK:
break
else:
if not self._is_connreset(e):
# Broken pipe errors are usually caused by connection
# reset, and its better to not log EPIPE errors to
# minimize log spam
gen_log.warning("Write error on %s: %s", self.fileno(), e)
self.close(exc_info=e)
return
if not self._is_connreset(e):
# Broken pipe errors are usually caused by connection
# reset, and its better to not log EPIPE errors to
# minimize log spam
gen_log.warning("Write error on %s: %s", self.fileno(), e)
self.close(exc_info=e)
return
while self._write_futures:
index, future = self._write_futures[0]
@ -1142,7 +1124,7 @@ class IOStream(BaseIOStream):
def __init__(self, socket: socket.socket, *args: Any, **kwargs: Any) -> None:
self.socket = socket
self.socket.setblocking(False)
super(IOStream, self).__init__(*args, **kwargs)
super().__init__(*args, **kwargs)
def fileno(self) -> Union[int, ioloop._Selectable]:
return self.socket
@ -1158,11 +1140,8 @@ class IOStream(BaseIOStream):
def read_from_fd(self, buf: Union[bytearray, memoryview]) -> Optional[int]:
try:
return self.socket.recv_into(buf, len(buf))
except socket.error as e:
if e.args[0] in _ERRNO_WOULDBLOCK:
return None
else:
raise
except BlockingIOError:
return None
finally:
del buf
@ -1175,7 +1154,7 @@ class IOStream(BaseIOStream):
del data
def connect(
self: _IOStreamType, address: tuple, server_hostname: str = None
self: _IOStreamType, address: Any, server_hostname: Optional[str] = None
) -> "Future[_IOStreamType]":
"""Connects the socket to a remote address without blocking.
@ -1226,32 +1205,27 @@ class IOStream(BaseIOStream):
self._connect_future = typing.cast("Future[IOStream]", future)
try:
self.socket.connect(address)
except socket.error as e:
except BlockingIOError:
# In non-blocking mode we expect connect() to raise an
# exception with EINPROGRESS or EWOULDBLOCK.
#
pass
except socket.error as e:
# On freebsd, other errors such as ECONNREFUSED may be
# returned immediately when attempting to connect to
# localhost, so handle them the same way as an error
# reported later in _handle_connect.
if (
errno_from_exception(e) not in _ERRNO_INPROGRESS
and errno_from_exception(e) not in _ERRNO_WOULDBLOCK
):
if future is None:
gen_log.warning(
"Connect error on fd %s: %s", self.socket.fileno(), e
)
self.close(exc_info=e)
return future
if future is None:
gen_log.warning("Connect error on fd %s: %s", self.socket.fileno(), e)
self.close(exc_info=e)
return future
self._add_io_state(self.io_loop.WRITE)
return future
def start_tls(
self,
server_side: bool,
ssl_options: Union[Dict[str, Any], ssl.SSLContext] = None,
server_hostname: str = None,
ssl_options: Optional[Union[Dict[str, Any], ssl.SSLContext]] = None,
server_hostname: Optional[str] = None,
) -> Awaitable["SSLIOStream"]:
"""Convert this `IOStream` to an `SSLIOStream`.
@ -1388,7 +1362,7 @@ class SSLIOStream(IOStream):
for `ssl.wrap_socket`
"""
self._ssl_options = kwargs.pop("ssl_options", _client_ssl_defaults)
super(SSLIOStream, self).__init__(*args, **kwargs)
super().__init__(*args, **kwargs)
self._ssl_accepting = True
self._handshake_reading = False
self._handshake_writing = False
@ -1406,10 +1380,10 @@ class SSLIOStream(IOStream):
self._add_io_state(self.io_loop.WRITE)
def reading(self) -> bool:
return self._handshake_reading or super(SSLIOStream, self).reading()
return self._handshake_reading or super().reading()
def writing(self) -> bool:
return self._handshake_writing or super(SSLIOStream, self).writing()
return self._handshake_writing or super().writing()
def _do_ssl_handshake(self) -> None:
# Based on code from test_ssl.py in the python stdlib
@ -1436,6 +1410,12 @@ class SSLIOStream(IOStream):
)
return self.close(exc_info=err)
raise
except ssl.CertificateError as err:
# CertificateError can happen during handshake (hostname
# verification) and should be passed to user. Starting
# in Python 3.7, this error is a subclass of SSLError
# and will be handled by the previous block instead.
return self.close(exc_info=err)
except socket.error as err:
# Some port scans (e.g. nmap in -sT mode) have been known
# to cause do_handshake to raise EBADF and ENOTCONN, so make
@ -1499,16 +1479,16 @@ class SSLIOStream(IOStream):
if self._ssl_accepting:
self._do_ssl_handshake()
return
super(SSLIOStream, self)._handle_read()
super()._handle_read()
def _handle_write(self) -> None:
if self._ssl_accepting:
self._do_ssl_handshake()
return
super(SSLIOStream, self)._handle_write()
super()._handle_write()
def connect(
self, address: Tuple, server_hostname: str = None
self, address: Tuple, server_hostname: Optional[str] = None
) -> "Future[SSLIOStream]":
self._server_hostname = server_hostname
# Ignore the result of connect(). If it fails,
@ -1522,13 +1502,13 @@ class SSLIOStream(IOStream):
# (There's a test for it, but I think in practice users
# either wait for the connect before performing a write or
# they don't care about the connect Future at all)
fut = super(SSLIOStream, self).connect(address)
fut = super().connect(address)
fut.add_done_callback(lambda f: f.exception())
return self.wait_for_handshake()
def _handle_connect(self) -> None:
# Call the superclass method to check for errors.
super(SSLIOStream, self)._handle_connect()
super()._handle_connect()
if self.closed():
return
# When the connection is complete, wrap the socket for SSL
@ -1619,18 +1599,15 @@ class SSLIOStream(IOStream):
return None
else:
raise
except socket.error as e:
if e.args[0] in _ERRNO_WOULDBLOCK:
return None
else:
raise
except BlockingIOError:
return None
finally:
del buf
def _is_connreset(self, e: BaseException) -> bool:
if isinstance(e, ssl.SSLError) and e.args[0] == ssl.SSL_ERROR_EOF:
return True
return super(SSLIOStream, self)._is_connreset(e)
return super()._is_connreset(e)
class PipeIOStream(BaseIOStream):
@ -1640,13 +1617,15 @@ class PipeIOStream(BaseIOStream):
by `os.pipe`) rather than an open file object. Pipes are generally
one-way, so a `PipeIOStream` can be used for reading or writing but not
both.
``PipeIOStream`` is only available on Unix-based platforms.
"""
def __init__(self, fd: int, *args: Any, **kwargs: Any) -> None:
self.fd = fd
self._fio = io.FileIO(self.fd, "r+")
_set_nonblocking(fd)
super(PipeIOStream, self).__init__(*args, **kwargs)
os.set_blocking(fd, False)
super().__init__(*args, **kwargs)
def fileno(self) -> int:
return self.fd

45
lib/tornado_py3/locale.py

@ -1,5 +1,3 @@
# -*- coding: utf-8 -*-
#
# Copyright 2009 Facebook
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
@ -51,7 +49,7 @@ from tornado_py3.log import gen_log
from tornado_py3._locale_data import LOCALE_NAMES
from typing import Iterable, Any, Union, Dict
from typing import Iterable, Any, Union, Dict, Optional
_default_locale = "en_US"
_translations = {} # type: Dict[str, Any]
@ -88,7 +86,7 @@ def set_default_locale(code: str) -> None:
_supported_locales = frozenset(list(_translations.keys()) + [_default_locale])
def load_translations(directory: str, encoding: str = None) -> None:
def load_translations(directory: str, encoding: Optional[str] = None) -> None:
"""Loads translations from CSV files in a directory.
Translations are strings with optional Python-style named placeholders
@ -196,8 +194,6 @@ def load_gettext_translations(directory: str, domain: str) -> None:
msgfmt mydomain.po -o {directory}/pt_BR/LC_MESSAGES/mydomain.mo
"""
import gettext
global _translations
global _supported_locales
global _use_gettext
@ -306,7 +302,10 @@ class Locale(object):
]
def translate(
self, message: str, plural_message: str = None, count: int = None
self,
message: str,
plural_message: Optional[str] = None,
count: Optional[int] = None,
) -> str:
"""Returns the translation for the given message for this locale.
@ -318,7 +317,11 @@ class Locale(object):
raise NotImplementedError()
def pgettext(
self, context: str, message: str, plural_message: str = None, count: int = None
self,
context: str,
message: str,
plural_message: Optional[str] = None,
count: Optional[int] = None,
) -> str:
raise NotImplementedError()
@ -478,10 +481,13 @@ class CSVLocale(Locale):
def __init__(self, code: str, translations: Dict[str, Dict[str, str]]) -> None:
self.translations = translations
super(CSVLocale, self).__init__(code)
super().__init__(code)
def translate(
self, message: str, plural_message: str = None, count: int = None
self,
message: str,
plural_message: Optional[str] = None,
count: Optional[int] = None,
) -> str:
if plural_message is not None:
assert count is not None
@ -495,7 +501,11 @@ class CSVLocale(Locale):
return message_dict.get(message, message)
def pgettext(
self, context: str, message: str, plural_message: str = None, count: int = None
self,
context: str,
message: str,
plural_message: Optional[str] = None,
count: Optional[int] = None,
) -> str:
if self.translations:
gen_log.warning("pgettext is not supported by CSVLocale")
@ -510,10 +520,13 @@ class GettextLocale(Locale):
self.gettext = translations.gettext
# self.gettext must exist before __init__ is called, since it
# calls into self.translate
super(GettextLocale, self).__init__(code)
super().__init__(code)
def translate(
self, message: str, plural_message: str = None, count: int = None
self,
message: str,
plural_message: Optional[str] = None,
count: Optional[int] = None,
) -> str:
if plural_message is not None:
assert count is not None
@ -522,7 +535,11 @@ class GettextLocale(Locale):
return self.gettext(message)
def pgettext(
self, context: str, message: str, plural_message: str = None, count: int = None
self,
context: str,
message: str,
plural_message: Optional[str] = None,
count: Optional[int] = None,
) -> str:
"""Allows to set context for translation, accepts plural forms.

29
lib/tornado_py3/locks.py

@ -13,7 +13,6 @@
# under the License.
import collections
from concurrent.futures import CancelledError
import datetime
import types
@ -112,7 +111,7 @@ class Condition(_TimeoutGarbageCollector):
"""
def __init__(self) -> None:
super(Condition, self).__init__()
super().__init__()
self.io_loop = ioloop.IOLoop.current()
def __repr__(self) -> str:
@ -121,7 +120,9 @@ class Condition(_TimeoutGarbageCollector):
result += " waiters[%s]" % len(self._waiters)
return result + ">"
def wait(self, timeout: Union[float, datetime.timedelta] = None) -> Awaitable[bool]:
def wait(
self, timeout: Optional[Union[float, datetime.timedelta]] = None
) -> Awaitable[bool]:
"""Wait for `.notify`.
Returns a `.Future` that resolves ``True`` if the condition is notified,
@ -231,7 +232,9 @@ class Event(object):
"""
self._value = False
def wait(self, timeout: Union[float, datetime.timedelta] = None) -> Awaitable[None]:
def wait(
self, timeout: Optional[Union[float, datetime.timedelta]] = None
) -> Awaitable[None]:
"""Block until the internal flag is true.
Returns an awaitable, which raises `tornado.util.TimeoutError` after a
@ -246,9 +249,7 @@ class Event(object):
if timeout is None:
return fut
else:
timeout_fut = gen.with_timeout(
timeout, fut, quiet_exceptions=(CancelledError,)
)
timeout_fut = gen.with_timeout(timeout, fut)
# This is a slightly clumsy workaround for the fact that
# gen.with_timeout doesn't cancel its futures. Cancelling
# fut will remove it from the waiters list.
@ -379,14 +380,14 @@ class Semaphore(_TimeoutGarbageCollector):
"""
def __init__(self, value: int = 1) -> None:
super(Semaphore, self).__init__()
super().__init__()
if value < 0:
raise ValueError("semaphore initial value must be >= 0")
self._value = value
def __repr__(self) -> str:
res = super(Semaphore, self).__repr__()
res = super().__repr__()
extra = (
"locked" if self._value == 0 else "unlocked,value:{0}".format(self._value)
)
@ -412,7 +413,7 @@ class Semaphore(_TimeoutGarbageCollector):
break
def acquire(
self, timeout: Union[float, datetime.timedelta] = None
self, timeout: Optional[Union[float, datetime.timedelta]] = None
) -> Awaitable[_ReleasingContextManager]:
"""Decrement the counter. Returns an awaitable.
@ -472,14 +473,14 @@ class BoundedSemaphore(Semaphore):
"""
def __init__(self, value: int = 1) -> None:
super(BoundedSemaphore, self).__init__(value=value)
super().__init__(value=value)
self._initial_value = value
def release(self) -> None:
"""Increment the counter and wake one waiter."""
if self._value >= self._initial_value:
raise ValueError("Semaphore released too many times")
super(BoundedSemaphore, self).release()
super().release()
class Lock(object):
@ -494,7 +495,7 @@ class Lock(object):
A Lock can be used as an async context manager with the ``async
with`` statement:
>>> from tornado_py3 import locks
>>> from tornado import locks
>>> lock = locks.Lock()
>>>
>>> async def f():
@ -526,7 +527,7 @@ class Lock(object):
return "<%s _block=%s>" % (self.__class__.__name__, self._block)
def acquire(
self, timeout: Union[float, datetime.timedelta] = None
self, timeout: Optional[Union[float, datetime.timedelta]] = None
) -> Awaitable[_ReleasingContextManager]:
"""Attempt to lock. Returns an awaitable.

7
lib/tornado_py3/log.py

@ -44,7 +44,7 @@ try:
except ImportError:
curses = None # type: ignore
from typing import Dict, Any, cast
from typing import Dict, Any, cast, Optional
# Logger objects for internal tornado use
access_log = logging.getLogger("tornado.access")
@ -110,6 +110,7 @@ class LogFormatter(logging.Formatter):
logging.INFO: 2, # Green
logging.WARNING: 3, # Yellow
logging.ERROR: 1, # Red
logging.CRITICAL: 5, # Magenta
}
def __init__(
@ -207,7 +208,9 @@ class LogFormatter(logging.Formatter):
return formatted.replace("\n", "\n ")
def enable_pretty_logging(options: Any = None, logger: logging.Logger = None) -> None:
def enable_pretty_logging(
options: Any = None, logger: Optional[logging.Logger] = None
) -> None:
"""Turns on formatted logging output as configured.
This is called automatically by `tornado.options.parse_command_line`

77
lib/tornado_py3/netutil.py

@ -25,14 +25,9 @@ import stat
from tornado_py3.concurrent import dummy_executor, run_on_executor
from tornado_py3.ioloop import IOLoop
from tornado_py3.platform.auto import set_close_exec
from tornado_py3.util import Configurable, errno_from_exception
import typing
from typing import List, Callable, Any, Type, Dict, Union, Tuple, Awaitable
if typing.TYPE_CHECKING:
from asyncio import Future # noqa: F401
from typing import List, Callable, Any, Type, Dict, Union, Tuple, Awaitable, Optional
# Note that the naming of ssl.Purpose is confusing; the purpose
# of a context is to authentiate the opposite side of the connection.
@ -53,24 +48,16 @@ u"foo".encode("idna")
# For undiagnosed reasons, 'latin1' codec may also need to be preloaded.
u"foo".encode("latin1")
# These errnos indicate that a non-blocking operation must be retried
# at a later time. On most platforms they're the same value, but on
# some they differ.
_ERRNO_WOULDBLOCK = (errno.EWOULDBLOCK, errno.EAGAIN)
if hasattr(errno, "WSAEWOULDBLOCK"):
_ERRNO_WOULDBLOCK += (errno.WSAEWOULDBLOCK,) # type: ignore
# Default backlog used when calling sock.listen()
_DEFAULT_BACKLOG = 128
def bind_sockets(
port: int,
address: str = None,
address: Optional[str] = None,
family: socket.AddressFamily = socket.AF_UNSPEC,
backlog: int = _DEFAULT_BACKLOG,
flags: int = None,
flags: Optional[int] = None,
reuse_port: bool = False,
) -> List[socket.socket]:
"""Creates listening sockets bound to the given port and address.
@ -142,7 +129,6 @@ def bind_sockets(
if errno_from_exception(e) == errno.EAFNOSUPPORT:
continue
raise
set_close_exec(sock.fileno())
if os.name != "nt":
try:
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
@ -171,7 +157,29 @@ def bind_sockets(
sockaddr = tuple([host, bound_port] + list(sockaddr[2:]))
sock.setblocking(False)
sock.bind(sockaddr)
try:
sock.bind(sockaddr)
except OSError as e:
if (
errno_from_exception(e) == errno.EADDRNOTAVAIL
and address == "localhost"
and sockaddr[0] == "::1"
):
# On some systems (most notably docker with default
# configurations), ipv6 is partially disabled:
# socket.has_ipv6 is true, we can create AF_INET6
# sockets, and getaddrinfo("localhost", ...,
# AF_PASSIVE) resolves to ::1, but we get an error
# when binding.
#
# Swallow the error, but only for this specific case.
# If EADDRNOTAVAIL occurs in other situations, it
# might be a real problem like a typo in a
# configuration.
sock.close()
continue
else:
raise
bound_port = sock.getsockname()[1]
sock.listen(backlog)
sockets.append(sock)
@ -193,7 +201,6 @@ if hasattr(socket, "AF_UNIX"):
`bind_sockets`)
"""
sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
set_close_exec(sock.fileno())
try:
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
except socket.error as e:
@ -203,9 +210,8 @@ if hasattr(socket, "AF_UNIX"):
sock.setblocking(False)
try:
st = os.stat(file)
except OSError as err:
if errno_from_exception(err) != errno.ENOENT:
raise
except FileNotFoundError:
pass
else:
if stat.S_ISSOCK(st.st_mode):
os.remove(file)
@ -258,18 +264,15 @@ def add_accept_handler(
return
try:
connection, address = sock.accept()
except socket.error as e:
# _ERRNO_WOULDBLOCK indicate we have accepted every
except BlockingIOError:
# EWOULDBLOCK indicates we have accepted every
# connection that is available.
if errno_from_exception(e) in _ERRNO_WOULDBLOCK:
return
return
except ConnectionAbortedError:
# ECONNABORTED indicates that there was a connection
# but it was closed while still in the accept queue.
# (observed on FreeBSD).
if errno_from_exception(e) == errno.ECONNABORTED:
continue
raise
set_close_exec(connection.fileno())
continue
callback(connection, address)
def remove_handler() -> None:
@ -380,7 +383,7 @@ def _resolve_addr(
results = []
for fam, socktype, proto, canonname, address in addrinfo:
results.append((fam, address))
return results
return results # type: ignore
class DefaultExecutorResolver(Resolver):
@ -417,7 +420,9 @@ class ExecutorResolver(Resolver):
"""
def initialize(
self, executor: concurrent.futures.Executor = None, close_executor: bool = True
self,
executor: Optional[concurrent.futures.Executor] = None,
close_executor: bool = True,
) -> None:
self.io_loop = IOLoop.current()
if executor is not None:
@ -451,7 +456,7 @@ class BlockingResolver(ExecutorResolver):
"""
def initialize(self) -> None: # type: ignore
super(BlockingResolver, self).initialize()
super().initialize()
class ThreadedResolver(ExecutorResolver):
@ -480,9 +485,7 @@ class ThreadedResolver(ExecutorResolver):
def initialize(self, num_threads: int = 10) -> None: # type: ignore
threadpool = ThreadedResolver._create_threadpool(num_threads)
super(ThreadedResolver, self).initialize(
executor=threadpool, close_executor=False
)
super().initialize(executor=threadpool, close_executor=False)
@classmethod
def _create_threadpool(
@ -591,7 +594,7 @@ def ssl_options_to_context(
def ssl_wrap_socket(
socket: socket.socket,
ssl_options: Union[Dict[str, Any], ssl.SSLContext],
server_hostname: str = None,
server_hostname: Optional[str] = None,
**kwargs: Any
) -> ssl.SSLSocket:
"""Returns an ``ssl.SSLSocket`` wrapping the given socket.

61
lib/tornado_py3/options.py

@ -104,11 +104,18 @@ from tornado_py3.escape import _unicode, native_str
from tornado_py3.log import define_logging_options
from tornado_py3.util import basestring_type, exec_in
import typing
from typing import Any, Iterator, Iterable, Tuple, Set, Dict, Callable, List, TextIO
if typing.TYPE_CHECKING:
from typing import Optional # noqa: F401
from typing import (
Any,
Iterator,
Iterable,
Tuple,
Set,
Dict,
Callable,
List,
TextIO,
Optional,
)
class Error(Exception):
@ -211,12 +218,12 @@ class OptionParser(object):
self,
name: str,
default: Any = None,
type: type = None,
help: str = None,
metavar: str = None,
type: Optional[type] = None,
help: Optional[str] = None,
metavar: Optional[str] = None,
multiple: bool = False,
group: str = None,
callback: Callable[[Any], None] = None,
group: Optional[str] = None,
callback: Optional[Callable[[Any], None]] = None,
) -> None:
"""Defines a new command line option.
@ -295,7 +302,7 @@ class OptionParser(object):
self._options[normalized] = option
def parse_command_line(
self, args: List[str] = None, final: bool = True
self, args: Optional[List[str]] = None, final: bool = True
) -> List[str]:
"""Parses all options given on the command line (defaults to
`sys.argv`).
@ -417,7 +424,7 @@ class OptionParser(object):
if final:
self.run_parse_callbacks()
def print_help(self, file: TextIO = None) -> None:
def print_help(self, file: Optional[TextIO] = None) -> None:
"""Prints all the command line options to stderr (or another file)."""
if file is None:
file = sys.stderr
@ -484,7 +491,7 @@ class _Mockable(object):
As of ``mock`` version 1.0.1, when an object uses ``__getattr__``
hooks instead of ``__dict__``, ``patch.__exit__`` tries to delete
the attribute it set instead of setting a new one (assuming that
the object does not catpure ``__setattr__``, so the patch
the object does not capture ``__setattr__``, so the patch
created a new attribute in ``__dict__``).
_Mockable's getattr and setattr pass through to the underlying
@ -518,13 +525,13 @@ class _Option(object):
self,
name: str,
default: Any = None,
type: type = None,
help: str = None,
metavar: str = None,
type: Optional[type] = None,
help: Optional[str] = None,
metavar: Optional[str] = None,
multiple: bool = False,
file_name: str = None,
group_name: str = None,
callback: Callable[[Any], None] = None,
file_name: Optional[str] = None,
group_name: Optional[str] = None,
callback: Optional[Callable[[Any], None]] = None,
) -> None:
if default is None and multiple:
default = []
@ -667,12 +674,12 @@ All defined options are available as attributes on this object.
def define(
name: str,
default: Any = None,
type: type = None,
help: str = None,
metavar: str = None,
type: Optional[type] = None,
help: Optional[str] = None,
metavar: Optional[str] = None,
multiple: bool = False,
group: str = None,
callback: Callable[[Any], None] = None,
group: Optional[str] = None,
callback: Optional[Callable[[Any], None]] = None,
) -> None:
"""Defines an option in the global namespace.
@ -690,7 +697,9 @@ def define(
)
def parse_command_line(args: List[str] = None, final: bool = True) -> List[str]:
def parse_command_line(
args: Optional[List[str]] = None, final: bool = True
) -> List[str]:
"""Parses global options from the command line.
See `OptionParser.parse_command_line`.
@ -706,7 +715,7 @@ def parse_config_file(path: str, final: bool = True) -> None:
return options.parse_config_file(path, final=final)
def print_help(file: TextIO = None) -> None:
def print_help(file: Optional[TextIO] = None) -> None:
"""Prints all the command line options to stderr (or another file).
See `OptionParser.print_help`.

315
lib/tornado_py3/platform/asyncio.py

@ -14,35 +14,83 @@ the same event loop.
.. note::
Tornado requires the `~asyncio.AbstractEventLoop.add_reader` family of
methods, so it is not compatible with the `~asyncio.ProactorEventLoop` on
Windows. Use the `~asyncio.SelectorEventLoop` instead.
Tornado is designed to use a selector-based event loop. On Windows,
where a proactor-based event loop has been the default since Python 3.8,
a selector event loop is emulated by running ``select`` on a separate thread.
Configuring ``asyncio`` to use a selector event loop may improve performance
of Tornado (but may reduce performance of other ``asyncio``-based libraries
in the same process).
"""
import asyncio
import atexit
import concurrent.futures
import errno
import functools
import select
import socket
import sys
from threading import get_ident
import threading
import typing
from tornado_py3.gen import convert_yielded
from tornado_py3.ioloop import IOLoop, _Selectable
import asyncio
import typing
from typing import Any, TypeVar, Awaitable, Callable, Union, Optional
from typing import Any, TypeVar, Awaitable, Callable, Union, Optional, List, Tuple, Dict
if typing.TYPE_CHECKING:
from typing import Set, Dict, Tuple # noqa: F401
from typing import Set # noqa: F401
from typing_extensions import Protocol
class _HasFileno(Protocol):
def fileno(self) -> int:
pass
_FileDescriptorLike = Union[int, _HasFileno]
_T = TypeVar("_T")
# Collection of selector thread event loops to shut down on exit.
_selector_loops = set() # type: Set[AddThreadSelectorEventLoop]
def _atexit_callback() -> None:
for loop in _selector_loops:
with loop._select_cond:
loop._closing_selector = True
loop._select_cond.notify()
try:
loop._waker_w.send(b"a")
except BlockingIOError:
pass
# If we don't join our (daemon) thread here, we may get a deadlock
# during interpreter shutdown. I don't really understand why. This
# deadlock happens every time in CI (both travis and appveyor) but
# I've never been able to reproduce locally.
loop._thread.join()
_selector_loops.clear()
atexit.register(_atexit_callback)
class BaseAsyncIOLoop(IOLoop):
def initialize( # type: ignore
self, asyncio_loop: asyncio.AbstractEventLoop, **kwargs: Any
) -> None:
# asyncio_loop is always the real underlying IOLoop. This is used in
# ioloop.py to maintain the asyncio-to-ioloop mappings.
self.asyncio_loop = asyncio_loop
# selector_loop is an event loop that implements the add_reader family of
# methods. Usually the same as asyncio_loop but differs on platforms such
# as windows where the default event loop does not implement these methods.
self.selector_loop = asyncio_loop
if hasattr(asyncio, "ProactorEventLoop") and isinstance(
asyncio_loop, asyncio.ProactorEventLoop # type: ignore
):
# Ignore this line for mypy because the abstract method checker
# doesn't understand dynamic proxies.
self.selector_loop = AddThreadSelectorEventLoop(asyncio_loop) # type: ignore
# Maps fd to (fileobj, handler function) pair (as in IOLoop.add_handler)
self.handlers = {} # type: Dict[int, Tuple[Union[int, _Selectable], Callable]]
# Set of fds listening for reads/writes
@ -67,10 +115,10 @@ class BaseAsyncIOLoop(IOLoop):
self._thread_identity = 0
super(BaseAsyncIOLoop, self).initialize(**kwargs)
super().initialize(**kwargs)
def assign_thread_identity() -> None:
self._thread_identity = get_ident()
self._thread_identity = threading.get_ident()
self.add_callback(assign_thread_identity)
@ -87,6 +135,8 @@ class BaseAsyncIOLoop(IOLoop):
# assume it was closed from the asyncio side, and do this
# cleanup for us, leading to a KeyError.
del IOLoop._ioloop_for_asyncio[self.asyncio_loop]
if self.selector_loop is not self.asyncio_loop:
self.selector_loop.close()
self.asyncio_loop.close()
def add_handler(
@ -97,29 +147,29 @@ class BaseAsyncIOLoop(IOLoop):
raise ValueError("fd %s added twice" % fd)
self.handlers[fd] = (fileobj, handler)
if events & IOLoop.READ:
self.asyncio_loop.add_reader(fd, self._handle_events, fd, IOLoop.READ)
self.selector_loop.add_reader(fd, self._handle_events, fd, IOLoop.READ)
self.readers.add(fd)
if events & IOLoop.WRITE:
self.asyncio_loop.add_writer(fd, self._handle_events, fd, IOLoop.WRITE)
self.selector_loop.add_writer(fd, self._handle_events, fd, IOLoop.WRITE)
self.writers.add(fd)
def update_handler(self, fd: Union[int, _Selectable], events: int) -> None:
fd, fileobj = self.split_fd(fd)
if events & IOLoop.READ:
if fd not in self.readers:
self.asyncio_loop.add_reader(fd, self._handle_events, fd, IOLoop.READ)
self.selector_loop.add_reader(fd, self._handle_events, fd, IOLoop.READ)
self.readers.add(fd)
else:
if fd in self.readers:
self.asyncio_loop.remove_reader(fd)
self.selector_loop.remove_reader(fd)
self.readers.remove(fd)
if events & IOLoop.WRITE:
if fd not in self.writers:
self.asyncio_loop.add_writer(fd, self._handle_events, fd, IOLoop.WRITE)
self.selector_loop.add_writer(fd, self._handle_events, fd, IOLoop.WRITE)
self.writers.add(fd)
else:
if fd in self.writers:
self.asyncio_loop.remove_writer(fd)
self.selector_loop.remove_writer(fd)
self.writers.remove(fd)
def remove_handler(self, fd: Union[int, _Selectable]) -> None:
@ -127,10 +177,10 @@ class BaseAsyncIOLoop(IOLoop):
if fd not in self.handlers:
return
if fd in self.readers:
self.asyncio_loop.remove_reader(fd)
self.selector_loop.remove_reader(fd)
self.readers.remove(fd)
if fd in self.writers:
self.asyncio_loop.remove_writer(fd)
self.selector_loop.remove_writer(fd)
self.writers.remove(fd)
del self.handlers[fd]
@ -169,7 +219,7 @@ class BaseAsyncIOLoop(IOLoop):
timeout.cancel() # type: ignore
def add_callback(self, callback: Callable, *args: Any, **kwargs: Any) -> None:
if get_ident() == self._thread_identity:
if threading.get_ident() == self._thread_identity:
call_soon = self.asyncio_loop.call_soon
else:
call_soon = self.asyncio_loop.call_soon_threadsafe
@ -182,6 +232,11 @@ class BaseAsyncIOLoop(IOLoop):
# add_callback that completes without error will
# eventually execute).
pass
except AttributeError:
# ProactorEventLoop may raise this instead of RuntimeError
# if call_soon_threadsafe races with a call to close().
# Swallow it too for consistency.
pass
def add_callback_from_signal(
self, callback: Callable, *args: Any, **kwargs: Any
@ -221,7 +276,7 @@ class AsyncIOMainLoop(BaseAsyncIOLoop):
"""
def initialize(self, **kwargs: Any) -> None: # type: ignore
super(AsyncIOMainLoop, self).initialize(asyncio.get_event_loop(), **kwargs)
super().initialize(asyncio.get_event_loop(), **kwargs)
def make_current(self) -> None:
# AsyncIOMainLoop already refers to the current asyncio loop so
@ -253,7 +308,7 @@ class AsyncIOLoop(BaseAsyncIOLoop):
self.is_current = False
loop = asyncio.new_event_loop()
try:
super(AsyncIOLoop, self).initialize(loop, **kwargs)
super().initialize(loop, **kwargs)
except Exception:
# If initialize() does not succeed (taking ownership of the loop),
# we have to close it.
@ -263,7 +318,7 @@ class AsyncIOLoop(BaseAsyncIOLoop):
def close(self, all_fds: bool = False) -> None:
if self.is_current:
self.clear_current()
super(AsyncIOLoop, self).close(all_fds=all_fds)
super().close(all_fds=all_fds)
def make_current(self) -> None:
if not self.is_current:
@ -338,9 +393,219 @@ class AnyThreadEventLoopPolicy(_BasePolicy): # type: ignore
try:
return super().get_event_loop()
except (RuntimeError, AssertionError):
# This was an AssertionError in python 3.4.2 (which ships with debian jessie)
# This was an AssertionError in Python 3.4.2 (which ships with Debian Jessie)
# and changed to a RuntimeError in 3.4.3.
# "There is no current event loop in thread %r"
loop = self.new_event_loop()
self.set_event_loop(loop)
return loop
class AddThreadSelectorEventLoop(asyncio.AbstractEventLoop):
"""Wrap an event loop to add implementations of the ``add_reader`` method family.
Instances of this class start a second thread to run a selector.
This thread is completely hidden from the user; all callbacks are
run on the wrapped event loop's thread.
This class is used automatically by Tornado; applications should not need
to refer to it directly.
It is safe to wrap any event loop with this class, although it only makes sense
for event loops that do not implement the ``add_reader`` family of methods
themselves (i.e. ``WindowsProactorEventLoop``)
Closing the ``AddThreadSelectorEventLoop`` also closes the wrapped event loop.
"""
# This class is a __getattribute__-based proxy. All attributes other than those
# in this set are proxied through to the underlying loop.
MY_ATTRIBUTES = {
"_consume_waker",
"_select_cond",
"_select_args",
"_closing_selector",
"_thread",
"_handle_event",
"_readers",
"_real_loop",
"_start_select",
"_run_select",
"_handle_select",
"_wake_selector",
"_waker_r",
"_waker_w",
"_writers",
"add_reader",
"add_writer",
"close",
"remove_reader",
"remove_writer",
}
def __getattribute__(self, name: str) -> Any:
if name in AddThreadSelectorEventLoop.MY_ATTRIBUTES:
return super().__getattribute__(name)
return getattr(self._real_loop, name)
def __init__(self, real_loop: asyncio.AbstractEventLoop) -> None:
self._real_loop = real_loop
# Create a thread to run the select system call. We manage this thread
# manually so we can trigger a clean shutdown from an atexit hook. Note
# that due to the order of operations at shutdown, only daemon threads
# can be shut down in this way (non-daemon threads would require the
# introduction of a new hook: https://bugs.python.org/issue41962)
self._select_cond = threading.Condition()
self._select_args = (
None
) # type: Optional[Tuple[List[_FileDescriptorLike], List[_FileDescriptorLike]]]
self._closing_selector = False
self._thread = threading.Thread(
name="Tornado selector", daemon=True, target=self._run_select,
)
self._thread.start()
# Start the select loop once the loop is started.
self._real_loop.call_soon(self._start_select)
self._readers = {} # type: Dict[_FileDescriptorLike, Callable]
self._writers = {} # type: Dict[_FileDescriptorLike, Callable]
# Writing to _waker_w will wake up the selector thread, which
# watches for _waker_r to be readable.
self._waker_r, self._waker_w = socket.socketpair()
self._waker_r.setblocking(False)
self._waker_w.setblocking(False)
_selector_loops.add(self)
self.add_reader(self._waker_r, self._consume_waker)
def __del__(self) -> None:
# If the top-level application code uses asyncio interfaces to
# start and stop the event loop, no objects created in Tornado
# can get a clean shutdown notification. If we're just left to
# be GC'd, we must explicitly close our sockets to avoid
# logging warnings.
_selector_loops.discard(self)
self._waker_r.close()
self._waker_w.close()
def close(self) -> None:
with self._select_cond:
self._closing_selector = True
self._select_cond.notify()
self._wake_selector()
self._thread.join()
_selector_loops.discard(self)
self._waker_r.close()
self._waker_w.close()
self._real_loop.close()
def _wake_selector(self) -> None:
try:
self._waker_w.send(b"a")
except BlockingIOError:
pass
def _consume_waker(self) -> None:
try:
self._waker_r.recv(1024)
except BlockingIOError:
pass
def _start_select(self) -> None:
# Capture reader and writer sets here in the event loop
# thread to avoid any problems with concurrent
# modification while the select loop uses them.
with self._select_cond:
assert self._select_args is None
self._select_args = (list(self._readers.keys()), list(self._writers.keys()))
self._select_cond.notify()
def _run_select(self) -> None:
while True:
with self._select_cond:
while self._select_args is None and not self._closing_selector:
self._select_cond.wait()
if self._closing_selector:
return
assert self._select_args is not None
to_read, to_write = self._select_args
self._select_args = None
# We use the simpler interface of the select module instead of
# the more stateful interface in the selectors module because
# this class is only intended for use on windows, where
# select.select is the only option. The selector interface
# does not have well-documented thread-safety semantics that
# we can rely on so ensuring proper synchronization would be
# tricky.
try:
# On windows, selecting on a socket for write will not
# return the socket when there is an error (but selecting
# for reads works). Also select for errors when selecting
# for writes, and merge the results.
#
# This pattern is also used in
# https://github.com/python/cpython/blob/v3.8.0/Lib/selectors.py#L312-L317
rs, ws, xs = select.select(to_read, to_write, to_write)
ws = ws + xs
except OSError as e:
# After remove_reader or remove_writer is called, the file
# descriptor may subsequently be closed on the event loop
# thread. It's possible that this select thread hasn't
# gotten into the select system call by the time that
# happens in which case (at least on macOS), select may
# raise a "bad file descriptor" error. If we get that
# error, check and see if we're also being woken up by
# polling the waker alone. If we are, just return to the
# event loop and we'll get the updated set of file
# descriptors on the next iteration. Otherwise, raise the
# original error.
if e.errno == getattr(errno, "WSAENOTSOCK", errno.EBADF):
rs, _, _ = select.select([self._waker_r.fileno()], [], [], 0)
if rs:
ws = []
else:
raise
else:
raise
self._real_loop.call_soon_threadsafe(self._handle_select, rs, ws)
def _handle_select(
self, rs: List["_FileDescriptorLike"], ws: List["_FileDescriptorLike"]
) -> None:
for r in rs:
self._handle_event(r, self._readers)
for w in ws:
self._handle_event(w, self._writers)
self._start_select()
def _handle_event(
self, fd: "_FileDescriptorLike", cb_map: Dict["_FileDescriptorLike", Callable],
) -> None:
try:
callback = cb_map[fd]
except KeyError:
return
callback()
def add_reader(
self, fd: "_FileDescriptorLike", callback: Callable[..., None], *args: Any
) -> None:
self._readers[fd] = functools.partial(callback, *args)
self._wake_selector()
def add_writer(
self, fd: "_FileDescriptorLike", callback: Callable[..., None], *args: Any
) -> None:
self._writers[fd] = functools.partial(callback, *args)
self._wake_selector()
def remove_reader(self, fd: "_FileDescriptorLike") -> None:
del self._readers[fd]
self._wake_selector()
def remove_writer(self, fd: "_FileDescriptorLike") -> None:
del self._writers[fd]
self._wake_selector()

32
lib/tornado_py3/platform/auto.py

@ -1,32 +0,0 @@
#
# Copyright 2011 Facebook
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Implementation of platform-specific functionality.
For each function or class described in `tornado.platform.interface`,
the appropriate platform-specific implementation exists in this module.
Most code that needs access to this functionality should do e.g.::
from tornado_py3.platform.auto import set_close_exec
"""
import os
if os.name == "nt":
from tornado_py3.platform.windows import set_close_exec
else:
from tornado_py3.platform.posix import set_close_exec
__all__ = ["set_close_exec"]

4
lib/tornado_py3/platform/auto.pyi

@ -1,4 +0,0 @@
# auto.py is full of patterns mypy doesn't like, so for type checking
# purposes we replace it with interface.py.
from .interface import *

26
lib/tornado_py3/platform/interface.py

@ -1,26 +0,0 @@
#
# Copyright 2011 Facebook
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Interfaces for platform-specific functionality.
This module exists primarily for documentation purposes and as base classes
for other tornado.platform modules. Most code should import the appropriate
implementation from `tornado.platform.auto`.
"""
def set_close_exec(fd: int) -> None:
"""Sets the close-on-exec bit (``FD_CLOEXEC``)for a file descriptor."""
raise NotImplementedError()

29
lib/tornado_py3/platform/posix.py

@ -1,29 +0,0 @@
#
# Copyright 2011 Facebook
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Posix implementations of platform-specific functionality."""
import fcntl
import os
def set_close_exec(fd: int) -> None:
flags = fcntl.fcntl(fd, fcntl.F_GETFD)
fcntl.fcntl(fd, fcntl.F_SETFD, flags | fcntl.FD_CLOEXEC)
def _set_nonblocking(fd: int) -> None:
flags = fcntl.fcntl(fd, fcntl.F_GETFL)
fcntl.fcntl(fd, fcntl.F_SETFL, flags | os.O_NONBLOCK)

35
lib/tornado_py3/platform/twisted.py

@ -1,6 +1,3 @@
# Author: Ovidiu Predescu
# Date: July 2011
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
@ -12,13 +9,7 @@
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Bridges between the Twisted reactor and Tornado IOLoop.
This module lets you run applications and libraries written for
Twisted in a Tornado application. It can be used in two modes,
depending on which library's underlying event loop you want to use.
This module has been tested with Twisted versions 11.0.0 and newer.
"""Bridges between the Twisted package and Tornado.
"""
import socket
@ -113,6 +104,30 @@ class TwistedResolver(Resolver):
return result
def install() -> None:
"""Install ``AsyncioSelectorReactor`` as the default Twisted reactor.
.. deprecated:: 5.1
This function is provided for backwards compatibility; code
that does not require compatibility with older versions of
Tornado should use
``twisted.internet.asyncioreactor.install()`` directly.
.. versionchanged:: 6.0.3
In Tornado 5.x and before, this function installed a reactor
based on the Tornado ``IOLoop``. When that reactor
implementation was removed in Tornado 6.0.0, this function was
removed as well. It was restored in Tornado 6.0.3 using the
``asyncio`` reactor instead.
"""
from twisted.internet.asyncioreactor import install
install()
if hasattr(gen.convert_yielded, "register"):
@gen.convert_yielded.register(Deferred) # type: ignore

22
lib/tornado_py3/platform/windows.py

@ -1,22 +0,0 @@
# NOTE: win32 support is currently experimental, and not recommended
# for production use.
import ctypes
import ctypes.wintypes
# See: http://msdn.microsoft.com/en-us/library/ms724935(VS.85).aspx
SetHandleInformation = ctypes.windll.kernel32.SetHandleInformation # type: ignore
SetHandleInformation.argtypes = (
ctypes.wintypes.HANDLE,
ctypes.wintypes.DWORD,
ctypes.wintypes.DWORD,
)
SetHandleInformation.restype = ctypes.wintypes.BOOL
HANDLE_FLAG_INHERIT = 0x00000001
def set_close_exec(fd: int) -> None:
success = SetHandleInformation(fd, HANDLE_FLAG_INHERIT, 0)
if not success:
raise ctypes.WinError() # type: ignore

66
lib/tornado_py3/process.py

@ -17,7 +17,6 @@
the server into multiple processes and managing subprocesses.
"""
import errno
import os
import multiprocessing
import signal
@ -35,11 +34,9 @@ from tornado_py3.concurrent import (
from tornado_py3 import ioloop
from tornado_py3.iostream import PipeIOStream
from tornado_py3.log import gen_log
from tornado_py3.platform.auto import set_close_exec
from tornado_py3.util import errno_from_exception
import typing
from typing import Tuple, Optional, Any, Callable
from typing import Optional, Any, Callable
if typing.TYPE_CHECKING:
from typing import List # noqa: F401
@ -57,7 +54,7 @@ def cpu_count() -> int:
except NotImplementedError:
pass
try:
return os.sysconf("SC_NPROCESSORS_CONF")
return os.sysconf("SC_NPROCESSORS_CONF") # type: ignore
except (AttributeError, ValueError):
pass
gen_log.error("Could not detect number of processors; assuming 1")
@ -79,17 +76,12 @@ def _reseed_random() -> None:
random.seed(seed)
def _pipe_cloexec() -> Tuple[int, int]:
r, w = os.pipe()
set_close_exec(r)
set_close_exec(w)
return r, w
_task_id = None
def fork_processes(num_processes: Optional[int], max_restarts: int = None) -> int:
def fork_processes(
num_processes: Optional[int], max_restarts: Optional[int] = None
) -> int:
"""Starts multiple worker processes.
If ``num_processes`` is None or <= 0, we detect the number of cores
@ -110,12 +102,17 @@ def fork_processes(num_processes: Optional[int], max_restarts: int = None) -> in
number between 0 and ``num_processes``. Processes that exit
abnormally (due to a signal or non-zero exit status) are restarted
with the same id (up to ``max_restarts`` times). In the parent
process, ``fork_processes`` returns None if all child processes
have exited normally, but will otherwise only exit by throwing an
exception.
process, ``fork_processes`` calls ``sys.exit(0)`` after all child
processes have exited normally.
max_restarts defaults to 100.
Availability: Unix
"""
if sys.platform == "win32":
# The exact form of this condition matters to mypy; it understands
# if but not assert in this context.
raise Exception("fork not available on windows")
if max_restarts is None:
max_restarts = 100
@ -144,12 +141,7 @@ def fork_processes(num_processes: Optional[int], max_restarts: int = None) -> in
return id
num_restarts = 0
while children:
try:
pid, status = os.wait()
except OSError as e:
if errno_from_exception(e) == errno.EINTR:
continue
raise
pid, status = os.wait()
if pid not in children:
continue
id = children.pop(pid)
@ -227,19 +219,19 @@ class Subprocess(object):
pipe_fds = [] # type: List[int]
to_close = [] # type: List[int]
if kwargs.get("stdin") is Subprocess.STREAM:
in_r, in_w = _pipe_cloexec()
in_r, in_w = os.pipe()
kwargs["stdin"] = in_r
pipe_fds.extend((in_r, in_w))
to_close.append(in_r)
self.stdin = PipeIOStream(in_w)
if kwargs.get("stdout") is Subprocess.STREAM:
out_r, out_w = _pipe_cloexec()
out_r, out_w = os.pipe()
kwargs["stdout"] = out_w
pipe_fds.extend((out_r, out_w))
to_close.append(out_w)
self.stdout = PipeIOStream(out_r)
if kwargs.get("stderr") is Subprocess.STREAM:
err_r, err_w = _pipe_cloexec()
err_r, err_w = os.pipe()
kwargs["stderr"] = err_w
pipe_fds.extend((err_r, err_w))
to_close.append(err_w)
@ -273,6 +265,8 @@ class Subprocess(object):
In many cases a close callback on the stdout or stderr streams
can be used as an alternative to an exit callback if the
signal handler is causing a problem.
Availability: Unix
"""
self._exit_callback = callback
Subprocess.initialize()
@ -294,6 +288,8 @@ class Subprocess(object):
to suppress this behavior and return the exit status without raising.
.. versionadded:: 4.2
Availability: Unix
"""
future = Future() # type: Future[int]
@ -321,6 +317,8 @@ class Subprocess(object):
.. versionchanged:: 5.0
The ``io_loop`` argument (deprecated since version 4.1) has been
removed.
Availability: Unix
"""
if cls._initialized:
return
@ -347,10 +345,9 @@ class Subprocess(object):
@classmethod
def _try_cleanup_process(cls, pid: int) -> None:
try:
ret_pid, status = os.waitpid(pid, os.WNOHANG)
except OSError as e:
if errno_from_exception(e) == errno.ECHILD:
return
ret_pid, status = os.waitpid(pid, os.WNOHANG) # type: ignore
except ChildProcessError:
return
if ret_pid == 0:
return
assert ret_pid == pid
@ -358,11 +355,14 @@ class Subprocess(object):
subproc.io_loop.add_callback_from_signal(subproc._set_returncode, status)
def _set_returncode(self, status: int) -> None:
if os.WIFSIGNALED(status):
self.returncode = -os.WTERMSIG(status)
if sys.platform == "win32":
self.returncode = -1
else:
assert os.WIFEXITED(status)
self.returncode = os.WEXITSTATUS(status)
if os.WIFSIGNALED(status):
self.returncode = -os.WTERMSIG(status)
else:
assert os.WIFEXITED(status)
self.returncode = os.WEXITSTATUS(status)
# We've taken over wait() duty from the subprocess.Popen
# object. If we don't inform it of the process's return code,
# it will log a warning at destruction in python 3.6+.

14
lib/tornado_py3/queues.py

@ -33,11 +33,11 @@ from tornado_py3 import gen, ioloop
from tornado_py3.concurrent import Future, future_set_result_unless_cancelled
from tornado_py3.locks import Event
from typing import Union, TypeVar, Generic, Awaitable
from typing import Union, TypeVar, Generic, Awaitable, Optional
import typing
if typing.TYPE_CHECKING:
from typing import Deque, Tuple, List, Any # noqa: F401
from typing import Deque, Tuple, Any # noqa: F401
_T = TypeVar("_T")
@ -184,7 +184,7 @@ class Queue(Generic[_T]):
return self.qsize() >= self.maxsize
def put(
self, item: _T, timeout: Union[float, datetime.timedelta] = None
self, item: _T, timeout: Optional[Union[float, datetime.timedelta]] = None
) -> "Future[None]":
"""Put an item into the queue, perhaps waiting until there is room.
@ -222,7 +222,9 @@ class Queue(Generic[_T]):
else:
self.__put_internal(item)
def get(self, timeout: Union[float, datetime.timedelta] = None) -> Awaitable[_T]:
def get(
self, timeout: Optional[Union[float, datetime.timedelta]] = None
) -> Awaitable[_T]:
"""Remove and return an item from the queue.
Returns an awaitable which resolves once an item is available, or raises
@ -287,7 +289,9 @@ class Queue(Generic[_T]):
if self._unfinished_tasks == 0:
self._finished.set()
def join(self, timeout: Union[float, datetime.timedelta] = None) -> Awaitable[None]:
def join(
self, timeout: Optional[Union[float, datetime.timedelta]] = None
) -> Awaitable[None]:
"""Block until all items in the queue are processed.
Returns an awaitable, which raises `tornado.util.TimeoutError` after a

28
lib/tornado_py3/routing.py

@ -142,7 +142,7 @@ Of course a nested `RuleRouter` or a `~.web.Application` is allowed:
router = RuleRouter([
Rule(HostMatches("example.com"), RuleRouter([
Rule(PathMatches("/app1/.*"), Application([(r"/app1/handler", Handler)]))),
Rule(PathMatches("/app1/.*"), Application([(r"/app1/handler", Handler)])),
]))
])
@ -300,7 +300,7 @@ _RuleList = List[
class RuleRouter(Router):
"""Rule-based router implementation."""
def __init__(self, rules: _RuleList = None) -> None:
def __init__(self, rules: Optional[_RuleList] = None) -> None:
"""Constructs a router from an ordered list of rules::
RuleRouter([
@ -409,12 +409,12 @@ class ReversibleRuleRouter(ReversibleRouter, RuleRouter):
in a rule's matcher (see `Matcher.reverse`).
"""
def __init__(self, rules: _RuleList = None) -> None:
def __init__(self, rules: Optional[_RuleList] = None) -> None:
self.named_rules = {} # type: Dict[str, Any]
super(ReversibleRuleRouter, self).__init__(rules)
super().__init__(rules)
def process_rule(self, rule: "Rule") -> "Rule":
rule = super(ReversibleRuleRouter, self).process_rule(rule)
rule = super().process_rule(rule)
if rule.name:
if rule.name in self.named_rules:
@ -445,8 +445,8 @@ class Rule(object):
self,
matcher: "Matcher",
target: Any,
target_kwargs: Dict[str, Any] = None,
name: str = None,
target_kwargs: Optional[Dict[str, Any]] = None,
name: Optional[str] = None,
) -> None:
"""Constructs a Rule instance.
@ -627,7 +627,13 @@ class PathMatches(Matcher):
if ")" in fragment:
paren_loc = fragment.index(")")
if paren_loc >= 0:
pieces.append("%s" + fragment[paren_loc + 1 :])
try:
unescaped_fragment = re_unescape(fragment[paren_loc + 1 :])
except ValueError:
# If we can't unescape part of it, we can't
# reverse this url.
return (None, None)
pieces.append("%s" + unescaped_fragment)
else:
try:
unescaped_fragment = re_unescape(fragment)
@ -652,8 +658,8 @@ class URLSpec(Rule):
self,
pattern: Union[str, Pattern],
handler: Any,
kwargs: Dict[str, Any] = None,
name: str = None,
kwargs: Optional[Dict[str, Any]] = None,
name: Optional[str] = None,
) -> None:
"""Parameters:
@ -673,7 +679,7 @@ class URLSpec(Rule):
"""
matcher = PathMatches(pattern)
super(URLSpec, self).__init__(matcher, handler, kwargs, name)
super().__init__(matcher, handler, kwargs, name)
self.regex = matcher.regex
self.handler_class = self.target

273
lib/tornado_py3/simple_httpclient.py

@ -1,5 +1,5 @@
from tornado_py3.escape import _unicode
from tornado_py3 import gen
from tornado_py3 import gen, version
from tornado_py3.httpclient import (
HTTPResponse,
HTTPError,
@ -51,7 +51,7 @@ class HTTPTimeoutError(HTTPError):
"""
def __init__(self, message: str) -> None:
super(HTTPTimeoutError, self).__init__(599, message=message)
super().__init__(599, message=message)
def __str__(self) -> str:
return self.message or "Timeout"
@ -70,7 +70,7 @@ class HTTPStreamClosedError(HTTPError):
"""
def __init__(self, message: str) -> None:
super(HTTPStreamClosedError, self).__init__(599, message=message)
super().__init__(599, message=message)
def __str__(self) -> str:
return self.message or "Stream closed"
@ -89,12 +89,12 @@ class SimpleAsyncHTTPClient(AsyncHTTPClient):
def initialize( # type: ignore
self,
max_clients: int = 10,
hostname_mapping: Dict[str, str] = None,
hostname_mapping: Optional[Dict[str, str]] = None,
max_buffer_size: int = 104857600,
resolver: Resolver = None,
defaults: Dict[str, Any] = None,
max_header_size: int = None,
max_body_size: int = None,
resolver: Optional[Resolver] = None,
defaults: Optional[Dict[str, Any]] = None,
max_header_size: Optional[int] = None,
max_body_size: Optional[int] = None,
) -> None:
"""Creates a AsyncHTTPClient.
@ -128,7 +128,7 @@ class SimpleAsyncHTTPClient(AsyncHTTPClient):
.. versionchanged:: 4.2
Added the ``max_body_size`` argument.
"""
super(SimpleAsyncHTTPClient, self).initialize(defaults=defaults)
super().initialize(defaults=defaults)
self.max_clients = max_clients
self.queue = (
collections.deque()
@ -157,7 +157,7 @@ class SimpleAsyncHTTPClient(AsyncHTTPClient):
self.tcp_client = TCPClient(resolver=self.resolver)
def close(self) -> None:
super(SimpleAsyncHTTPClient, self).close()
super().close()
if self.own_resolver:
self.resolver.close()
self.tcp_client.close()
@ -167,16 +167,20 @@ class SimpleAsyncHTTPClient(AsyncHTTPClient):
) -> None:
key = object()
self.queue.append((key, request, callback))
if not len(self.active) < self.max_clients:
assert request.connect_timeout is not None
assert request.request_timeout is not None
timeout_handle = self.io_loop.add_timeout(
self.io_loop.time()
+ min(request.connect_timeout, request.request_timeout),
functools.partial(self._on_timeout, key, "in request queue"),
)
else:
timeout_handle = None
assert request.connect_timeout is not None
assert request.request_timeout is not None
timeout_handle = None
if len(self.active) >= self.max_clients:
timeout = (
min(request.connect_timeout, request.request_timeout)
or request.connect_timeout
or request.request_timeout
) # min but skip zero
if timeout:
timeout_handle = self.io_loop.add_timeout(
self.io_loop.time() + timeout,
functools.partial(self._on_timeout, key, "in request queue"),
)
self.waiting[key] = (request, callback, timeout_handle)
self._process_queue()
if self.queue:
@ -226,7 +230,7 @@ class SimpleAsyncHTTPClient(AsyncHTTPClient):
self.io_loop.remove_timeout(timeout_handle)
del self.waiting[key]
def _on_timeout(self, key: object, info: str = None) -> None:
def _on_timeout(self, key: object, info: Optional[str] = None) -> None:
"""Timeout callback of request.
Construct a timeout HTTPResponse when a timeout occurs.
@ -321,121 +325,123 @@ class _HTTPConnection(httputil.HTTPMessageDelegate):
% (self.request.network_interface,)
)
timeout = min(self.request.connect_timeout, self.request.request_timeout)
timeout = (
min(self.request.connect_timeout, self.request.request_timeout)
or self.request.connect_timeout
or self.request.request_timeout
) # min but skip zero
if timeout:
self._timeout = self.io_loop.add_timeout(
self.start_time + timeout,
functools.partial(self._on_timeout, "while connecting"),
)
stream = await self.tcp_client.connect(
host,
port,
af=af,
ssl_options=ssl_options,
max_buffer_size=self.max_buffer_size,
source_ip=source_ip,
)
stream = await self.tcp_client.connect(
host,
port,
af=af,
ssl_options=ssl_options,
max_buffer_size=self.max_buffer_size,
source_ip=source_ip,
)
if self.final_callback is None:
# final_callback is cleared if we've hit our timeout.
stream.close()
return
self.stream = stream
self.stream.set_close_callback(self.on_connection_close)
self._remove_timeout()
if self.final_callback is None:
return
if self.request.request_timeout:
self._timeout = self.io_loop.add_timeout(
self.start_time + self.request.request_timeout,
functools.partial(self._on_timeout, "during request"),
)
if (
self.request.method not in self._SUPPORTED_METHODS
and not self.request.allow_nonstandard_methods
):
raise KeyError("unknown method %s" % self.request.method)
for key in (
"proxy_host",
"proxy_port",
"proxy_username",
"proxy_password",
"proxy_auth_mode",
):
if getattr(self.request, key, None):
raise NotImplementedError("%s not supported" % key)
if "Connection" not in self.request.headers:
self.request.headers["Connection"] = "close"
if "Host" not in self.request.headers:
if "@" in self.parsed.netloc:
self.request.headers["Host"] = self.parsed.netloc.rpartition(
"@"
)[-1]
else:
self.request.headers["Host"] = self.parsed.netloc
username, password = None, None
if self.parsed.username is not None:
username, password = self.parsed.username, self.parsed.password
elif self.request.auth_username is not None:
username = self.request.auth_username
password = self.request.auth_password or ""
if username is not None:
assert password is not None
if self.request.auth_mode not in (None, "basic"):
raise ValueError(
"unsupported auth_mode %s", self.request.auth_mode
)
self.request.headers["Authorization"] = "Basic " + _unicode(
base64.b64encode(
httputil.encode_username_password(username, password)
)
)
if self.request.user_agent:
self.request.headers["User-Agent"] = self.request.user_agent
if not self.request.allow_nonstandard_methods:
# Some HTTP methods nearly always have bodies while others
# almost never do. Fail in this case unless the user has
# opted out of sanity checks with allow_nonstandard_methods.
body_expected = self.request.method in ("POST", "PATCH", "PUT")
body_present = (
self.request.body is not None
or self.request.body_producer is not None
if self.final_callback is None:
# final_callback is cleared if we've hit our timeout.
stream.close()
return
self.stream = stream
self.stream.set_close_callback(self.on_connection_close)
self._remove_timeout()
if self.final_callback is None:
return
if self.request.request_timeout:
self._timeout = self.io_loop.add_timeout(
self.start_time + self.request.request_timeout,
functools.partial(self._on_timeout, "during request"),
)
if (
self.request.method not in self._SUPPORTED_METHODS
and not self.request.allow_nonstandard_methods
):
raise KeyError("unknown method %s" % self.request.method)
for key in (
"proxy_host",
"proxy_port",
"proxy_username",
"proxy_password",
"proxy_auth_mode",
):
if getattr(self.request, key, None):
raise NotImplementedError("%s not supported" % key)
if "Connection" not in self.request.headers:
self.request.headers["Connection"] = "close"
if "Host" not in self.request.headers:
if "@" in self.parsed.netloc:
self.request.headers["Host"] = self.parsed.netloc.rpartition("@")[
-1
]
else:
self.request.headers["Host"] = self.parsed.netloc
username, password = None, None
if self.parsed.username is not None:
username, password = self.parsed.username, self.parsed.password
elif self.request.auth_username is not None:
username = self.request.auth_username
password = self.request.auth_password or ""
if username is not None:
assert password is not None
if self.request.auth_mode not in (None, "basic"):
raise ValueError("unsupported auth_mode %s", self.request.auth_mode)
self.request.headers["Authorization"] = "Basic " + _unicode(
base64.b64encode(
httputil.encode_username_password(username, password)
)
if (body_expected and not body_present) or (
body_present and not body_expected
):
raise ValueError(
"Body must %sbe None for method %s (unless "
"allow_nonstandard_methods is true)"
% ("not " if body_expected else "", self.request.method)
)
if self.request.expect_100_continue:
self.request.headers["Expect"] = "100-continue"
if self.request.body is not None:
# When body_producer is used the caller is responsible for
# setting Content-Length (or else chunked encoding will be used).
self.request.headers["Content-Length"] = str(len(self.request.body))
if (
self.request.method == "POST"
and "Content-Type" not in self.request.headers
):
self.request.headers[
"Content-Type"
] = "application/x-www-form-urlencoded"
if self.request.decompress_response:
self.request.headers["Accept-Encoding"] = "gzip"
req_path = (self.parsed.path or "/") + (
("?" + self.parsed.query) if self.parsed.query else ""
)
self.connection = self._create_connection(stream)
start_line = httputil.RequestStartLine(
self.request.method, req_path, ""
if self.request.user_agent:
self.request.headers["User-Agent"] = self.request.user_agent
elif self.request.headers.get("User-Agent") is None:
self.request.headers["User-Agent"] = "Tornado/{}".format(version)
if not self.request.allow_nonstandard_methods:
# Some HTTP methods nearly always have bodies while others
# almost never do. Fail in this case unless the user has
# opted out of sanity checks with allow_nonstandard_methods.
body_expected = self.request.method in ("POST", "PATCH", "PUT")
body_present = (
self.request.body is not None
or self.request.body_producer is not None
)
self.connection.write_headers(start_line, self.request.headers)
if self.request.expect_100_continue:
await self.connection.read_response(self)
else:
await self._write_body(True)
if (body_expected and not body_present) or (
body_present and not body_expected
):
raise ValueError(
"Body must %sbe None for method %s (unless "
"allow_nonstandard_methods is true)"
% ("not " if body_expected else "", self.request.method)
)
if self.request.expect_100_continue:
self.request.headers["Expect"] = "100-continue"
if self.request.body is not None:
# When body_producer is used the caller is responsible for
# setting Content-Length (or else chunked encoding will be used).
self.request.headers["Content-Length"] = str(len(self.request.body))
if (
self.request.method == "POST"
and "Content-Type" not in self.request.headers
):
self.request.headers[
"Content-Type"
] = "application/x-www-form-urlencoded"
if self.request.decompress_response:
self.request.headers["Accept-Encoding"] = "gzip"
req_path = (self.parsed.path or "/") + (
("?" + self.parsed.query) if self.parsed.query else ""
)
self.connection = self._create_connection(stream)
start_line = httputil.RequestStartLine(self.request.method, req_path, "")
self.connection.write_headers(start_line, self.request.headers)
if self.request.expect_100_continue:
await self.connection.read_response(self)
else:
await self._write_body(True)
except Exception:
if not self._handle_exception(*sys.exc_info()):
raise
@ -471,7 +477,7 @@ class _HTTPConnection(httputil.HTTPMessageDelegate):
return ssl_ctx
return None
def _on_timeout(self, info: str = None) -> None:
def _on_timeout(self, info: Optional[str] = None) -> None:
"""Timeout callback of _HTTPConnection instance.
Raise a `HTTPTimeoutError` when a timeout occurs.
@ -632,11 +638,12 @@ class _HTTPConnection(httputil.HTTPMessageDelegate):
# redirect, the request method should be preserved.
# However, browsers implemented this by changing the
# method to GET, and the behavior stuck. 303 redirects
# always specified this POST-to-GET behavior (arguably 303
# redirects should change *all* requests to GET, but
# libcurl only does this for POST so we follow their
# example).
if self.code in (301, 302, 303) and self.request.method == "POST":
# always specified this POST-to-GET behavior, arguably
# for *all* methods, but libcurl < 7.70 only does this
# for POST, while libcurl >= 7.70 does it for other methods.
if (self.code == 303 and self.request.method != "HEAD") or (
self.code in (301, 302) and self.request.method == "POST"
):
new_request.method = "GET"
new_request.body = None
for h in [

7
lib/tornado_py3/speedups.c

@ -56,7 +56,6 @@ static PyMethodDef methods[] = {
{NULL, NULL, 0, NULL}
};
#if PY_MAJOR_VERSION >= 3
static struct PyModuleDef speedupsmodule = {
PyModuleDef_HEAD_INIT,
"speedups",
@ -69,9 +68,3 @@ PyMODINIT_FUNC
PyInit_speedups(void) {
return PyModule_Create(&speedupsmodule);
}
#else // Python 2.x
PyMODINIT_FUNC
initspeedups(void) {
Py_InitModule("tornado.speedups", methods);
}
#endif

26
lib/tornado_py3/tcpclient.py

@ -27,14 +27,9 @@ from tornado_py3.ioloop import IOLoop
from tornado_py3.iostream import IOStream
from tornado_py3 import gen
from tornado_py3.netutil import Resolver
from tornado_py3.platform.auto import set_close_exec
from tornado_py3.gen import TimeoutError
import typing
from typing import Any, Union, Dict, Tuple, List, Callable, Iterator
if typing.TYPE_CHECKING:
from typing import Optional, Set # noqa: F401
from typing import Any, Union, Dict, Tuple, List, Callable, Iterator, Optional, Set
_INITIAL_CONNECT_TIMEOUT = 0.3
@ -105,7 +100,7 @@ class _Connector(object):
def start(
self,
timeout: float = _INITIAL_CONNECT_TIMEOUT,
connect_timeout: Union[float, datetime.timedelta] = None,
connect_timeout: Optional[Union[float, datetime.timedelta]] = None,
) -> "Future[Tuple[socket.AddressFamily, Any, IOStream]]":
self.try_connect(iter(self.primary_addrs))
self.set_timeout(timeout)
@ -207,7 +202,7 @@ class TCPClient(object):
The ``io_loop`` argument (deprecated since version 4.1) has been removed.
"""
def __init__(self, resolver: Resolver = None) -> None:
def __init__(self, resolver: Optional[Resolver] = None) -> None:
if resolver is not None:
self.resolver = resolver
self._own_resolver = False
@ -224,11 +219,11 @@ class TCPClient(object):
host: str,
port: int,
af: socket.AddressFamily = socket.AF_UNSPEC,
ssl_options: Union[Dict[str, Any], ssl.SSLContext] = None,
max_buffer_size: int = None,
source_ip: str = None,
source_port: int = None,
timeout: Union[float, datetime.timedelta] = None,
ssl_options: Optional[Union[Dict[str, Any], ssl.SSLContext]] = None,
max_buffer_size: Optional[int] = None,
source_ip: Optional[str] = None,
source_port: Optional[int] = None,
timeout: Optional[Union[float, datetime.timedelta]] = None,
) -> IOStream:
"""Connect to the given host and port.
@ -300,8 +295,8 @@ class TCPClient(object):
max_buffer_size: int,
af: socket.AddressFamily,
addr: Tuple,
source_ip: str = None,
source_port: int = None,
source_ip: Optional[str] = None,
source_port: Optional[int] = None,
) -> Tuple[IOStream, "Future[IOStream]"]:
# Always connect in plaintext; we'll convert to ssl if necessary
# after one connection has completed.
@ -315,7 +310,6 @@ class TCPClient(object):
# - 127.0.0.1 for IPv4
# - ::1 for IPv6
socket_obj = socket.socket(af)
set_close_exec(socket_obj.fileno())
if source_port_bind or source_ip_bind:
# If the user requires binding also to a specific IP/port.
try:

14
lib/tornado_py3/tcpserver.py

@ -107,9 +107,9 @@ class TCPServer(object):
def __init__(
self,
ssl_options: Union[Dict[str, Any], ssl.SSLContext] = None,
max_buffer_size: int = None,
read_chunk_size: int = None,
ssl_options: Optional[Union[Dict[str, Any], ssl.SSLContext]] = None,
max_buffer_size: Optional[int] = None,
read_chunk_size: Optional[int] = None,
) -> None:
self.ssl_options = ssl_options
self._sockets = {} # type: Dict[int, socket.socket]
@ -173,7 +173,7 @@ class TCPServer(object):
def bind(
self,
port: int,
address: str = None,
address: Optional[str] = None,
family: socket.AddressFamily = socket.AF_UNSPEC,
backlog: int = 128,
reuse_port: bool = False,
@ -209,7 +209,9 @@ class TCPServer(object):
else:
self._pending_sockets.extend(sockets)
def start(self, num_processes: Optional[int] = 1, max_restarts: int = None) -> None:
def start(
self, num_processes: Optional[int] = 1, max_restarts: Optional[int] = None
) -> None:
"""Starts this server in the `.IOLoop`.
By default, we run the server in this process and do not fork any
@ -229,6 +231,8 @@ class TCPServer(object):
When using multiple processes, no IOLoops can be created or
referenced until after the call to ``TCPServer.start(n)``.
Values of ``num_processes`` other than 1 are not supported on Windows.
The ``max_restarts`` argument is passed to `.fork_processes`.
.. versionchanged:: 6.0

45
lib/tornado_py3/template.py

@ -98,8 +98,9 @@ template directives use ``{% %}``.
To comment out a section so that it is omitted from the output, surround it
with ``{# ... #}``.
These tags may be escaped as ``{{!``, ``{%!``, and ``{#!``
if you need to include a literal ``{{``, ``{%``, or ``{#`` in the output.
To include a literal ``{{``, ``{%``, or ``{#`` in the output, escape them as
``{{!``, ``{%!``, and ``{#!``, respectively.
``{% apply *function* %}...{% end %}``
@ -262,10 +263,10 @@ class Template(object):
self,
template_string: Union[str, bytes],
name: str = "<string>",
loader: "BaseLoader" = None,
loader: Optional["BaseLoader"] = None,
compress_whitespace: Union[bool, _UnsetMarker] = _UNSET,
autoescape: Union[str, _UnsetMarker] = _UNSET,
whitespace: str = None,
autoescape: Optional[Union[str, _UnsetMarker]] = _UNSET,
whitespace: Optional[str] = None,
) -> None:
"""Construct a Template.
@ -399,8 +400,8 @@ class BaseLoader(object):
def __init__(
self,
autoescape: str = _DEFAULT_AUTOESCAPE,
namespace: Dict[str, Any] = None,
whitespace: str = None,
namespace: Optional[Dict[str, Any]] = None,
whitespace: Optional[str] = None,
) -> None:
"""Construct a template loader.
@ -433,11 +434,11 @@ class BaseLoader(object):
with self.lock:
self.templates = {}
def resolve_path(self, name: str, parent_path: str = None) -> str:
def resolve_path(self, name: str, parent_path: Optional[str] = None) -> str:
"""Converts a possibly-relative path to absolute (used internally)."""
raise NotImplementedError()
def load(self, name: str, parent_path: str = None) -> Template:
def load(self, name: str, parent_path: Optional[str] = None) -> Template:
"""Loads a template."""
name = self.resolve_path(name, parent_path=parent_path)
with self.lock:
@ -454,10 +455,10 @@ class Loader(BaseLoader):
"""
def __init__(self, root_directory: str, **kwargs: Any) -> None:
super(Loader, self).__init__(**kwargs)
super().__init__(**kwargs)
self.root = os.path.abspath(root_directory)
def resolve_path(self, name: str, parent_path: str = None) -> str:
def resolve_path(self, name: str, parent_path: Optional[str] = None) -> str:
if (
parent_path
and not parent_path.startswith("<")
@ -482,10 +483,10 @@ class DictLoader(BaseLoader):
"""A template loader that loads from a dictionary."""
def __init__(self, dict: Dict[str, str], **kwargs: Any) -> None:
super(DictLoader, self).__init__(**kwargs)
super().__init__(**kwargs)
self.dict = dict
def resolve_path(self, name: str, parent_path: str = None) -> str:
def resolve_path(self, name: str, parent_path: Optional[str] = None) -> str:
if (
parent_path
and not parent_path.startswith("<")
@ -676,7 +677,7 @@ class _Expression(_Node):
class _Module(_Expression):
def __init__(self, expression: str, line: int) -> None:
super(_Module, self).__init__("_tt_modules." + expression, line, raw=True)
super().__init__("_tt_modules." + expression, line, raw=True)
class _Text(_Node):
@ -707,7 +708,9 @@ class ParseError(Exception):
Added ``filename`` and ``lineno`` attributes.
"""
def __init__(self, message: str, filename: str = None, lineno: int = 0) -> None:
def __init__(
self, message: str, filename: Optional[str] = None, lineno: int = 0
) -> None:
self.message = message
# The names "filename" and "lineno" are chosen for consistency
# with python SyntaxError.
@ -762,7 +765,9 @@ class _CodeWriter(object):
return IncludeTemplate()
def write_line(self, line: str, line_number: int, indent: int = None) -> None:
def write_line(
self, line: str, line_number: int, indent: Optional[int] = None
) -> None:
if indent is None:
indent = self._indent
line_comment = " # %s:%d" % (self.current_template.name, line_number)
@ -782,7 +787,7 @@ class _TemplateReader(object):
self.line = 1
self.pos = 0
def find(self, needle: str, start: int = 0, end: int = None) -> int:
def find(self, needle: str, start: int = 0, end: Optional[int] = None) -> int:
assert start >= 0, start
pos = self.pos
start += pos
@ -796,7 +801,7 @@ class _TemplateReader(object):
index -= pos
return index
def consume(self, count: int = None) -> str:
def consume(self, count: Optional[int] = None) -> str:
if count is None:
count = len(self.text) - self.pos
newpos = self.pos + count
@ -843,8 +848,8 @@ def _format_code(code: str) -> str:
def _parse(
reader: _TemplateReader,
template: Template,
in_block: str = None,
in_loop: str = None,
in_block: Optional[str] = None,
in_loop: Optional[str] = None,
) -> _ChunkList:
body = _ChunkList([])
while True:

50
lib/tornado_py3/testing.py

@ -161,7 +161,7 @@ class AsyncTestCase(unittest.TestCase):
"""
def __init__(self, methodName: str = "runTest") -> None:
super(AsyncTestCase, self).__init__(methodName)
super().__init__(methodName)
self.__stopped = False
self.__running = False
self.__failure = None # type: Optional[_ExcInfoTuple]
@ -178,7 +178,7 @@ class AsyncTestCase(unittest.TestCase):
self._test_generator = None # type: Optional[Union[Generator, Coroutine]]
def setUp(self) -> None:
super(AsyncTestCase, self).setUp()
super().setUp()
self.io_loop = self.get_new_ioloop()
self.io_loop.make_current()
@ -222,7 +222,7 @@ class AsyncTestCase(unittest.TestCase):
# in the same process with autoreload (because curl does not
# set FD_CLOEXEC on its file descriptors)
self.io_loop.close(all_fds=True)
super(AsyncTestCase, self).tearDown()
super().tearDown()
# In case an exception escaped or the StackContext caught an exception
# when there wasn't a wait() to re-raise it, do so here.
# This is our last chance to raise an exception in a way that the
@ -260,8 +260,10 @@ class AsyncTestCase(unittest.TestCase):
self.__failure = None
raise_exc_info(failure)
def run(self, result: unittest.TestResult = None) -> unittest.TestCase:
ret = super(AsyncTestCase, self).run(result)
def run(
self, result: Optional[unittest.TestResult] = None
) -> Optional[unittest.TestResult]:
ret = super().run(result)
# As a last resort, if an exception escaped super.run() and wasn't
# re-raised in tearDown, raise it here. This will cause the
# unittest run to fail messily, but that's better than silently
@ -288,8 +290,10 @@ class AsyncTestCase(unittest.TestCase):
self.__stopped = True
def wait(
self, condition: Callable[..., bool] = None, timeout: float = None
) -> None:
self,
condition: Optional[Callable[..., bool]] = None,
timeout: Optional[float] = None,
) -> Any:
"""Runs the `.IOLoop` until stop is called or timeout has passed.
In the event of a timeout, an exception will be thrown. The
@ -375,7 +379,7 @@ class AsyncHTTPTestCase(AsyncTestCase):
"""
def setUp(self) -> None:
super(AsyncHTTPTestCase, self).setUp()
super().setUp()
sock, port = bind_unused_port()
self.__port = port
@ -469,7 +473,7 @@ class AsyncHTTPTestCase(AsyncTestCase):
self.http_client.close()
del self.http_server
del self._app
super(AsyncHTTPTestCase, self).tearDown()
super().tearDown()
class AsyncHTTPSTestCase(AsyncHTTPTestCase):
@ -508,7 +512,7 @@ class AsyncHTTPSTestCase(AsyncHTTPTestCase):
@typing.overload
def gen_test(
*, timeout: float = None
*, timeout: Optional[float] = None
) -> Callable[[Callable[..., Union[Generator, "Coroutine"]]], Callable[..., None]]:
pass
@ -519,7 +523,8 @@ def gen_test(func: Callable[..., Union[Generator, "Coroutine"]]) -> Callable[...
def gen_test( # noqa: F811
func: Callable[..., Union[Generator, "Coroutine"]] = None, timeout: float = None
func: Optional[Callable[..., Union[Generator, "Coroutine"]]] = None,
timeout: Optional[float] = None,
) -> Union[
Callable[..., None],
Callable[[Callable[..., Union[Generator, "Coroutine"]]], Callable[..., None]],
@ -652,6 +657,7 @@ class ExpectLog(logging.Filter):
logger: Union[logging.Logger, basestring_type],
regex: str,
required: bool = True,
level: Optional[int] = None,
) -> None:
"""Constructs an ExpectLog context manager.
@ -661,6 +667,15 @@ class ExpectLog(logging.Filter):
the specified logger that match this regex will be suppressed.
:param required: If true, an exception will be raised if the end of
the ``with`` statement is reached without matching any log entries.
:param level: A constant from the ``logging`` module indicating the
expected log level. If this parameter is provided, only log messages
at this level will be considered to match. Additionally, the
supplied ``logger`` will have its level adjusted if necessary
(for the duration of the ``ExpectLog`` to enable the expected
message.
.. versionchanged:: 6.1
Added the ``level`` parameter.
"""
if isinstance(logger, basestring_type):
logger = logging.getLogger(logger)
@ -669,17 +684,28 @@ class ExpectLog(logging.Filter):
self.required = required
self.matched = False
self.logged_stack = False
self.level = level
self.orig_level = None # type: Optional[int]
def filter(self, record: logging.LogRecord) -> bool:
if record.exc_info:
self.logged_stack = True
message = record.getMessage()
if self.regex.match(message):
if self.level is not None and record.levelno != self.level:
app_log.warning(
"Got expected log message %r at unexpected level (%s vs %s)"
% (message, logging.getLevelName(self.level), record.levelname)
)
return True
self.matched = True
return False
return True
def __enter__(self) -> "ExpectLog":
if self.level is not None and self.level < self.logger.getEffectiveLevel():
self.orig_level = self.logger.level
self.logger.setLevel(self.level)
self.logger.addFilter(self)
return self
@ -689,6 +715,8 @@ class ExpectLog(logging.Filter):
value: Optional[BaseException],
tb: Optional[TracebackType],
) -> None:
if self.orig_level is not None:
self.logger.setLevel(self.orig_level)
self.logger.removeFilter(self)
if not typ and self.required and not self.matched:
raise Exception("did not get expected log message")

12
lib/tornado_py3/util.py

@ -134,12 +134,12 @@ def import_object(name: str) -> Any:
``import_object('x')`` is equivalent to ``import x``.
``import_object('x.y.z')`` is equivalent to ``from x.y import z``.
>>> import tornado_py3.escape
>>> import_object('tornado.escape') is tornado_py3.escape
>>> import tornado.escape
>>> import_object('tornado.escape') is tornado.escape
True
>>> import_object('tornado.escape.utf8') is tornado_py3.escape.utf8
>>> import_object('tornado.escape.utf8') is tornado.escape.utf8
True
>>> import_object('tornado') is tornado_py3
>>> import_object('tornado') is tornado
True
>>> import_object('tornado.missing_module')
Traceback (most recent call last):
@ -157,7 +157,9 @@ def import_object(name: str) -> Any:
raise ImportError("No module named %s" % parts[-1])
def exec_in(code: Any, glob: Dict[str, Any], loc: Mapping[str, Any] = None) -> None:
def exec_in(
code: Any, glob: Dict[str, Any], loc: Optional[Optional[Mapping[str, Any]]] = None
) -> None:
if isinstance(code, str):
# exec(string) inherits the caller's future imports; compile
# the string first to prevent that.

165
lib/tornado_py3/web.py

@ -206,7 +206,7 @@ class RequestHandler(object):
request: httputil.HTTPServerRequest,
**kwargs: Any
) -> None:
super(RequestHandler, self).__init__()
super().__init__()
self.application = application
self.request = request
@ -340,7 +340,7 @@ class RequestHandler(object):
"""
pass
def set_status(self, status_code: int, reason: str = None) -> None:
def set_status(self, status_code: int, reason: Optional[str] = None) -> None:
"""Sets the status code for our response.
:arg int status_code: Response status code.
@ -424,14 +424,14 @@ class RequestHandler(object):
def get_argument(self, name: str, default: str, strip: bool = True) -> str:
pass
@overload # noqa: F811
def get_argument(
@overload
def get_argument( # noqa: F811
self, name: str, default: _ArgDefaultMarker = _ARG_DEFAULT, strip: bool = True
) -> str:
pass
@overload # noqa: F811
def get_argument(
@overload
def get_argument( # noqa: F811
self, name: str, default: None, strip: bool = True
) -> Optional[str]:
pass
@ -554,7 +554,7 @@ class RequestHandler(object):
values.append(s)
return values
def decode_argument(self, value: bytes, name: str = None) -> str:
def decode_argument(self, value: bytes, name: Optional[str] = None) -> str:
"""Decodes an argument from the request.
The argument has been percent-decoded and is now a byte string.
@ -580,7 +580,7 @@ class RequestHandler(object):
`self.request.cookies <.httputil.HTTPServerRequest.cookies>`."""
return self.request.cookies
def get_cookie(self, name: str, default: str = None) -> Optional[str]:
def get_cookie(self, name: str, default: Optional[str] = None) -> Optional[str]:
"""Returns the value of the request cookie with the given name.
If the named cookie is not present, returns ``default``.
@ -597,10 +597,10 @@ class RequestHandler(object):
self,
name: str,
value: Union[str, bytes],
domain: str = None,
expires: Union[float, Tuple, datetime.datetime] = None,
domain: Optional[str] = None,
expires: Optional[Union[float, Tuple, datetime.datetime]] = None,
path: str = "/",
expires_days: int = None,
expires_days: Optional[float] = None,
**kwargs: Any
) -> None:
"""Sets an outgoing cookie name/value with the given options.
@ -624,7 +624,9 @@ class RequestHandler(object):
# Don't let us accidentally inject bad stuff
raise ValueError("Invalid cookie %r: %r" % (name, value))
if not hasattr(self, "_new_cookie"):
self._new_cookie = http.cookies.SimpleCookie()
self._new_cookie = (
http.cookies.SimpleCookie()
) # type: http.cookies.SimpleCookie
if name in self._new_cookie:
del self._new_cookie[name]
self._new_cookie[name] = value
@ -648,7 +650,9 @@ class RequestHandler(object):
morsel[k] = v
def clear_cookie(self, name: str, path: str = "/", domain: str = None) -> None:
def clear_cookie(
self, name: str, path: str = "/", domain: Optional[str] = None
) -> None:
"""Deletes the cookie with the given name.
Due to limitations of the cookie protocol, you must pass the same
@ -662,7 +666,7 @@ class RequestHandler(object):
expires = datetime.datetime.utcnow() - datetime.timedelta(days=365)
self.set_cookie(name, value="", path=path, expires=expires, domain=domain)
def clear_all_cookies(self, path: str = "/", domain: str = None) -> None:
def clear_all_cookies(self, path: str = "/", domain: Optional[str] = None) -> None:
"""Deletes all the cookies the user sent with this request.
See `clear_cookie` for more information on the path and domain
@ -682,8 +686,8 @@ class RequestHandler(object):
self,
name: str,
value: Union[str, bytes],
expires_days: int = 30,
version: int = None,
expires_days: Optional[float] = 30,
version: Optional[int] = None,
**kwargs: Any
) -> None:
"""Signs and timestamps a cookie so it cannot be forged.
@ -697,6 +701,7 @@ class RequestHandler(object):
Note that the ``expires_days`` parameter sets the lifetime of the
cookie in the browser, but is independent of the ``max_age_days``
parameter to `get_secure_cookie`.
A value of None limits the lifetime to the current browser session.
Secure cookies may contain arbitrary byte values, not just unicode
strings (unlike regular cookies)
@ -717,7 +722,7 @@ class RequestHandler(object):
)
def create_signed_value(
self, name: str, value: Union[str, bytes], version: int = None
self, name: str, value: Union[str, bytes], version: Optional[int] = None
) -> bytes:
"""Signs and timestamps a string so it cannot be forged.
@ -745,9 +750,9 @@ class RequestHandler(object):
def get_secure_cookie(
self,
name: str,
value: str = None,
max_age_days: int = 31,
min_version: int = None,
value: Optional[str] = None,
max_age_days: float = 31,
min_version: Optional[int] = None,
) -> Optional[bytes]:
"""Returns the given signed cookie if it validates, or None.
@ -775,7 +780,7 @@ class RequestHandler(object):
)
def get_secure_cookie_key_version(
self, name: str, value: str = None
self, name: str, value: Optional[str] = None
) -> Optional[int]:
"""Returns the signing key version of the secure cookie.
@ -788,7 +793,9 @@ class RequestHandler(object):
return None
return get_signature_key_version(value)
def redirect(self, url: str, permanent: bool = False, status: int = None) -> None:
def redirect(
self, url: str, permanent: bool = False, status: Optional[int] = None
) -> None:
"""Sends a redirect to the given (optionally relative) URL.
If the ``status`` argument is specified, that value is used as the
@ -1051,12 +1058,6 @@ class RequestHandler(object):
def flush(self, include_footers: bool = False) -> "Future[None]":
"""Flushes the current output buffer to the network.
The ``callback`` argument, if given, can be used for flow control:
it will be run when all flushed data has been written to the socket.
Note that only one flush callback can be outstanding at a time;
if another flush occurs before the previous flush's callback
has been run, the previous callback will be discarded.
.. versionchanged:: 4.0
Now returns a `.Future` if no callback is given.
@ -1104,7 +1105,7 @@ class RequestHandler(object):
future.set_result(None)
return future
def finish(self, chunk: Union[str, bytes, dict] = None) -> "Future[None]":
def finish(self, chunk: Optional[Union[str, bytes, dict]] = None) -> "Future[None]":
"""Finishes this response, ending the HTTP request.
Passing a ``chunk`` to ``finish()`` is equivalent to passing that
@ -1137,13 +1138,11 @@ class RequestHandler(object):
if self.check_etag_header():
self._write_buffer = []
self.set_status(304)
if self._status_code in (204, 304) or (
self._status_code >= 100 and self._status_code < 200
):
if self._status_code in (204, 304) or (100 <= self._status_code < 200):
assert not self._write_buffer, (
"Cannot send body with %s" % self._status_code
)
self._clear_headers_for_304()
self._clear_representation_headers()
elif "Content-Length" not in self._headers:
content_length = sum(len(part) for part in self._write_buffer)
self.set_header("Content-Length", content_length)
@ -1301,7 +1300,7 @@ class RequestHandler(object):
locales.append((parts[0], score))
if locales:
locales.sort(key=lambda pair: pair[1], reverse=True)
codes = [l[0] for l in locales]
codes = [loc[0] for loc in locales]
return locale.get(*codes)
return locale.get(default)
@ -1544,7 +1543,9 @@ class RequestHandler(object):
+ '"/>'
)
def static_url(self, path: str, include_host: bool = None, **kwargs: Any) -> str:
def static_url(
self, path: str, include_host: Optional[bool] = None, **kwargs: Any
) -> str:
"""Returns a static URL for the given relative static file path.
This method requires you set the ``static_path`` setting in your
@ -1785,11 +1786,11 @@ class RequestHandler(object):
args = [value.status_code, self._request_summary()] + list(value.args)
gen_log.warning(format, *args)
else:
app_log.error( # type: ignore
app_log.error(
"Uncaught exception %s\n%r",
self._request_summary(),
self.request,
exc_info=(typ, value, tb),
exc_info=(typ, value, tb), # type: ignore
)
def _ui_module(self, name: str, module: Type["UIModule"]) -> Callable[..., str]:
@ -1806,21 +1807,13 @@ class RequestHandler(object):
def _ui_method(self, method: Callable[..., str]) -> Callable[..., str]:
return lambda *args, **kwargs: method(self, *args, **kwargs)
def _clear_headers_for_304(self) -> None:
# 304 responses should not contain entity headers (defined in
# http://www.w3.org/Protocols/rfc2616/rfc2616-sec7.html#sec7.1)
def _clear_representation_headers(self) -> None:
# 304 responses should not contain representation metadata
# headers (defined in
# https://tools.ietf.org/html/rfc7231#section-3.1)
# not explicitly allowed by
# http://www.w3.org/Protocols/rfc2616/rfc2616-sec10.html#sec10.3.5
headers = [
"Allow",
"Content-Encoding",
"Content-Language",
"Content-Length",
"Content-MD5",
"Content-Range",
"Content-Type",
"Last-Modified",
]
# https://tools.ietf.org/html/rfc7232#section-4.1
headers = ["Content-Encoding", "Content-Language", "Content-Type"]
for h in headers:
self.clear_header(h)
@ -1925,17 +1918,19 @@ class _ApplicationRouter(ReversibleRuleRouter):
`_ApplicationRouter` instance.
"""
def __init__(self, application: "Application", rules: _RuleList = None) -> None:
def __init__(
self, application: "Application", rules: Optional[_RuleList] = None
) -> None:
assert isinstance(application, Application)
self.application = application
super(_ApplicationRouter, self).__init__(rules)
super().__init__(rules)
def process_rule(self, rule: Rule) -> Rule:
rule = super(_ApplicationRouter, self).process_rule(rule)
rule = super().process_rule(rule)
if isinstance(rule.target, (list, tuple)):
rule.target = _ApplicationRouter( # type: ignore
self.application, rule.target
rule.target = _ApplicationRouter(
self.application, rule.target # type: ignore
)
return rule
@ -1948,9 +1943,7 @@ class _ApplicationRouter(ReversibleRuleRouter):
request, target, **target_params
)
return super(_ApplicationRouter, self).get_target_delegate(
target, request, **target_params
)
return super().get_target_delegate(target, request, **target_params)
class Application(ReversibleRouter):
@ -2039,9 +2032,9 @@ class Application(ReversibleRouter):
def __init__(
self,
handlers: _RuleList = None,
default_host: str = None,
transforms: List[Type["OutputTransform"]] = None,
handlers: Optional[_RuleList] = None,
default_host: Optional[str] = None,
transforms: Optional[List[Type["OutputTransform"]]] = None,
**settings: Any
) -> None:
if transforms is None:
@ -2192,9 +2185,9 @@ class Application(ReversibleRouter):
self,
request: httputil.HTTPServerRequest,
target_class: Type[RequestHandler],
target_kwargs: Dict[str, Any] = None,
path_args: List[bytes] = None,
path_kwargs: Dict[str, bytes] = None,
target_kwargs: Optional[Dict[str, Any]] = None,
path_args: Optional[List[bytes]] = None,
path_kwargs: Optional[Dict[str, bytes]] = None,
) -> "_HandlerDelegate":
"""Returns `~.httputil.HTTPMessageDelegate` that can serve a request
for application and `RequestHandler` subclass.
@ -2361,7 +2354,11 @@ class HTTPError(Exception):
"""
def __init__(
self, status_code: int = 500, log_message: str = None, *args: Any, **kwargs: Any
self,
status_code: int = 500,
log_message: Optional[str] = None,
*args: Any,
**kwargs: Any
) -> None:
self.status_code = status_code
self.log_message = log_message
@ -2419,9 +2416,7 @@ class MissingArgumentError(HTTPError):
"""
def __init__(self, arg_name: str) -> None:
super(MissingArgumentError, self).__init__(
400, "Missing argument %s" % arg_name
)
super().__init__(400, "Missing argument %s" % arg_name)
self.arg_name = arg_name
@ -2478,8 +2473,8 @@ class RedirectHandler(RequestHandler):
self._url = url
self._permanent = permanent
def get(self, *args: Any) -> None:
to_url = self._url.format(*args)
def get(self, *args: Any, **kwargs: Any) -> None:
to_url = self._url.format(*args, **kwargs)
if self.request.query_arguments:
# TODO: figure out typing for the next line.
to_url = httputil.url_concat(
@ -2561,7 +2556,7 @@ class StaticFileHandler(RequestHandler):
_static_hashes = {} # type: Dict[str, Optional[str]]
_lock = threading.Lock() # protects _static_hashes
def initialize(self, path: str, default_filename: str = None) -> None:
def initialize(self, path: str, default_filename: Optional[str] = None) -> None:
self.root = path
self.default_filename = default_filename
@ -2787,7 +2782,7 @@ class StaticFileHandler(RequestHandler):
@classmethod
def get_content(
cls, abspath: str, start: int = None, end: int = None
cls, abspath: str, start: Optional[int] = None, end: Optional[int] = None
) -> Generator[bytes, None, None]:
"""Retrieve the content of the requested resource which is located
at the given absolute path.
@ -2829,12 +2824,12 @@ class StaticFileHandler(RequestHandler):
"""Returns a version string for the resource at the given path.
This class method may be overridden by subclasses. The
default implementation is a hash of the file's contents.
default implementation is a SHA-512 hash of the file's contents.
.. versionadded:: 3.1
"""
data = cls.get_content(abspath)
hasher = hashlib.md5()
hasher = hashlib.sha512()
if isinstance(data, bytes):
hasher.update(data)
else:
@ -3265,7 +3260,7 @@ class TemplateModule(UIModule):
Template()) instead of inheriting the outer template's namespace.
Templates rendered through this module also get access to UIModule's
automatic javascript/css features. Simply call set_resources
automatic JavaScript/CSS features. Simply call set_resources
inside the template and give it keyword arguments corresponding to
the methods on UIModule: {{ set_resources(js_files=static_url("my.js")) }}
Note that these resources are output once per template file, not once
@ -3274,7 +3269,7 @@ class TemplateModule(UIModule):
"""
def __init__(self, handler: RequestHandler) -> None:
super(TemplateModule, self).__init__(handler)
super().__init__(handler)
# keep resources in both a list and a dict to preserve order
self._resource_list = [] # type: List[Dict[str, Any]]
self._resource_dict = {} # type: Dict[str, Dict[str, Any]]
@ -3351,9 +3346,9 @@ def create_signed_value(
secret: _CookieSecretTypes,
name: str,
value: Union[str, bytes],
version: int = None,
clock: Callable[[], float] = None,
key_version: int = None,
version: Optional[int] = None,
clock: Optional[Callable[[], float]] = None,
key_version: Optional[int] = None,
) -> bytes:
if version is None:
version = DEFAULT_SIGNED_VALUE_VERSION
@ -3441,9 +3436,9 @@ def decode_signed_value(
secret: _CookieSecretTypes,
name: str,
value: Union[None, str, bytes],
max_age_days: int = 31,
clock: Callable[[], float] = None,
min_version: int = None,
max_age_days: float = 31,
clock: Optional[Callable[[], float]] = None,
min_version: Optional[int] = None,
) -> Optional[bytes]:
if clock is None:
clock = time.time
@ -3472,7 +3467,7 @@ def _decode_signed_value_v1(
secret: Union[str, bytes],
name: str,
value: bytes,
max_age_days: int,
max_age_days: float,
clock: Callable[[], float],
) -> Optional[bytes]:
parts = utf8(value).split(b"|")
@ -3527,7 +3522,7 @@ def _decode_signed_value_v2(
secret: _CookieSecretTypes,
name: str,
value: bytes,
max_age_days: int,
max_age_days: float,
clock: Callable[[], float],
) -> Optional[bytes]:
try:

77
lib/tornado_py3/websocket.py

@ -77,7 +77,7 @@ if TYPE_CHECKING:
# the server side and WebSocketClientConnection on the client
# side.
def on_ws_connection_close(
self, close_code: int = None, close_reason: str = None
self, close_code: Optional[int] = None, close_reason: Optional[str] = None
) -> None:
pass
@ -122,10 +122,10 @@ class _DecompressTooLargeError(Exception):
class _WebSocketParams(object):
def __init__(
self,
ping_interval: float = None,
ping_timeout: float = None,
ping_interval: Optional[float] = None,
ping_timeout: Optional[float] = None,
max_message_size: int = _default_max_message_size,
compression_options: Dict[str, Any] = None,
compression_options: Optional[Dict[str, Any]] = None,
) -> None:
self.ping_interval = ping_interval
self.ping_timeout = ping_timeout
@ -190,7 +190,7 @@ class WebSocketHandler(tornado_py3.web.RequestHandler):
Web browsers allow any site to open a websocket connection to any other,
instead of using the same-origin policy that governs other network
access from javascript. This can be surprising and is a potential
access from JavaScript. This can be surprising and is a potential
security hole, so since Tornado 4.0 `WebSocketHandler` requires
applications that wish to receive cross-origin websockets to opt in
by overriding the `~WebSocketHandler.check_origin` method (see that
@ -221,7 +221,7 @@ class WebSocketHandler(tornado_py3.web.RequestHandler):
request: httputil.HTTPServerRequest,
**kwargs: Any
) -> None:
super(WebSocketHandler, self).__init__(application, request, **kwargs)
super().__init__(application, request, **kwargs)
self.ws_connection = None # type: Optional[WebSocketProtocol]
self.close_code = None # type: Optional[int]
self.close_reason = None # type: Optional[str]
@ -280,8 +280,6 @@ class WebSocketHandler(tornado_py3.web.RequestHandler):
self.set_status(426, "Upgrade Required")
self.set_header("Sec-WebSocket-Version", "7, 8, 13")
stream = None
@property
def ping_interval(self) -> Optional[float]:
"""The interval for websocket keep-alive pings.
@ -468,7 +466,7 @@ class WebSocketHandler(tornado_py3.web.RequestHandler):
"""
pass
def close(self, code: int = None, reason: str = None) -> None:
def close(self, code: Optional[int] = None, reason: Optional[str] = None) -> None:
"""Closes this Web Socket.
Once the close handshake is successful the socket will be closed.
@ -571,7 +569,7 @@ class WebSocketHandler(tornado_py3.web.RequestHandler):
self._break_cycles()
def on_ws_connection_close(
self, close_code: int = None, close_reason: str = None
self, close_code: Optional[int] = None, close_reason: Optional[str] = None
) -> None:
self.close_code = close_code
self.close_reason = close_reason
@ -584,11 +582,11 @@ class WebSocketHandler(tornado_py3.web.RequestHandler):
# connection (if it was established in the first place,
# indicated by status code 101).
if self.get_status() != 101 or self._on_close_called:
super(WebSocketHandler, self)._break_cycles()
super()._break_cycles()
def send_error(self, *args: Any, **kwargs: Any) -> None:
if self.stream is None:
super(WebSocketHandler, self).send_error(*args, **kwargs)
super().send_error(*args, **kwargs)
else:
# If we get an uncaught exception during the handshake,
# we have no choice but to abruptly close the connection.
@ -670,7 +668,7 @@ class WebSocketProtocol(abc.ABC):
self.close() # let the subclass cleanup
@abc.abstractmethod
def close(self, code: int = None, reason: str = None) -> None:
def close(self, code: Optional[int] = None, reason: Optional[str] = None) -> None:
raise NotImplementedError()
@abc.abstractmethod
@ -724,7 +722,7 @@ class _PerMessageDeflateCompressor(object):
self,
persistent: bool,
max_wbits: Optional[int],
compression_options: Dict[str, Any] = None,
compression_options: Optional[Dict[str, Any]] = None,
) -> None:
if max_wbits is None:
max_wbits = zlib.MAX_WBITS
@ -773,7 +771,7 @@ class _PerMessageDeflateDecompressor(object):
persistent: bool,
max_wbits: Optional[int],
max_message_size: int,
compression_options: Dict[str, Any] = None,
compression_options: Optional[Dict[str, Any]] = None,
) -> None:
self._max_message_size = max_message_size
if max_wbits is None:
@ -996,7 +994,7 @@ class WebSocketProtocol13(WebSocketProtocol):
self,
side: str,
agreed_parameters: Dict[str, Any],
compression_options: Dict[str, Any] = None,
compression_options: Optional[Dict[str, Any]] = None,
) -> Dict[str, Any]:
"""Converts a websocket agreed_parameters set to keyword arguments
for our compressor objects.
@ -1016,7 +1014,7 @@ class WebSocketProtocol13(WebSocketProtocol):
self,
side: str,
agreed_parameters: Dict[str, Any],
compression_options: Dict[str, Any] = None,
compression_options: Optional[Dict[str, Any]] = None,
) -> None:
# TODO: handle invalid parameters gracefully
allowed_keys = set(
@ -1259,7 +1257,7 @@ class WebSocketProtocol13(WebSocketProtocol):
self._abort()
return None
def close(self, code: int = None, reason: str = None) -> None:
def close(self, code: Optional[int] = None, reason: Optional[str] = None) -> None:
"""Closes the WebSocket connection."""
if not self.server_terminated:
if not self.stream.closed():
@ -1287,6 +1285,9 @@ class WebSocketProtocol13(WebSocketProtocol):
self._waiting = self.stream.io_loop.add_timeout(
self.stream.io_loop.time() + 5, self._abort
)
if self.ping_callback:
self.ping_callback.stop()
self.ping_callback = None
def is_closing(self) -> bool:
"""Return ``True`` if this connection is closing.
@ -1365,10 +1366,10 @@ class WebSocketClientConnection(simple_httpclient._HTTPConnection):
def __init__(
self,
request: httpclient.HTTPRequest,
on_message_callback: Callable[[Union[None, str, bytes]], None] = None,
compression_options: Dict[str, Any] = None,
ping_interval: float = None,
ping_timeout: float = None,
on_message_callback: Optional[Callable[[Union[None, str, bytes]], None]] = None,
compression_options: Optional[Dict[str, Any]] = None,
ping_interval: Optional[float] = None,
ping_timeout: Optional[float] = None,
max_message_size: int = _default_max_message_size,
subprotocols: Optional[List[str]] = [],
) -> None:
@ -1408,8 +1409,11 @@ class WebSocketClientConnection(simple_httpclient._HTTPConnection):
"Sec-WebSocket-Extensions"
] = "permessage-deflate; client_max_window_bits"
# Websocket connection is currently unable to follow redirects
request.follow_redirects = False
self.tcp_client = TCPClient()
super(WebSocketClientConnection, self).__init__(
super().__init__(
None,
request,
lambda: None,
@ -1420,7 +1424,7 @@ class WebSocketClientConnection(simple_httpclient._HTTPConnection):
104857600,
)
def close(self, code: int = None, reason: str = None) -> None:
def close(self, code: Optional[int] = None, reason: Optional[str] = None) -> None:
"""Closes the websocket connection.
``code`` and ``reason`` are documented under
@ -1441,10 +1445,10 @@ class WebSocketClientConnection(simple_httpclient._HTTPConnection):
self.connect_future.set_exception(StreamClosedError())
self._on_message(None)
self.tcp_client.close()
super(WebSocketClientConnection, self).on_connection_close()
super().on_connection_close()
def on_ws_connection_close(
self, close_code: int = None, close_reason: str = None
self, close_code: Optional[int] = None, close_reason: Optional[str] = None
) -> None:
self.close_code = close_code
self.close_reason = close_reason
@ -1466,9 +1470,7 @@ class WebSocketClientConnection(simple_httpclient._HTTPConnection):
) -> None:
assert isinstance(start_line, httputil.ResponseStartLine)
if start_line.code != 101:
await super(WebSocketClientConnection, self).headers_received(
start_line, headers
)
await super().headers_received(start_line, headers)
return
if self._timeout is not None:
@ -1506,7 +1508,8 @@ class WebSocketClientConnection(simple_httpclient._HTTPConnection):
return self.protocol.write_message(message, binary=binary)
def read_message(
self, callback: Callable[["Future[Union[None, str, bytes]]"], None] = None
self,
callback: Optional[Callable[["Future[Union[None, str, bytes]]"], None]] = None,
) -> Awaitable[Union[None, str, bytes]]:
"""Reads a message from the WebSocket server.
@ -1585,14 +1588,14 @@ class WebSocketClientConnection(simple_httpclient._HTTPConnection):
def websocket_connect(
url: Union[str, httpclient.HTTPRequest],
callback: Callable[["Future[WebSocketClientConnection]"], None] = None,
connect_timeout: float = None,
on_message_callback: Callable[[Union[None, str, bytes]], None] = None,
compression_options: Dict[str, Any] = None,
ping_interval: float = None,
ping_timeout: float = None,
callback: Optional[Callable[["Future[WebSocketClientConnection]"], None]] = None,
connect_timeout: Optional[float] = None,
on_message_callback: Optional[Callable[[Union[None, str, bytes]], None]] = None,
compression_options: Optional[Dict[str, Any]] = None,
ping_interval: Optional[float] = None,
ping_timeout: Optional[float] = None,
max_message_size: int = _default_max_message_size,
subprotocols: List[str] = None,
subprotocols: Optional[List[str]] = None,
) -> "Awaitable[WebSocketClientConnection]":
"""Client-side websocket support.

Loading…
Cancel
Save