diff --git a/couchpotato/core/database.py b/couchpotato/core/database.py
index 944dca6..66c4267 100644
--- a/couchpotato/core/database.py
+++ b/couchpotato/core/database.py
@@ -621,6 +621,8 @@ class Database(object):
except OperationalError:
log.error('Migrating from faulty database, probably a (too) old version: %s', traceback.format_exc())
+
+ rename_old = True
except:
log.error('Migration failed: %s', traceback.format_exc())
diff --git a/couchpotato/core/downloaders/deluge.py b/couchpotato/core/downloaders/deluge.py
index 0c49233..1230cd6 100644
--- a/couchpotato/core/downloaders/deluge.py
+++ b/couchpotato/core/downloaders/deluge.py
@@ -27,6 +27,11 @@ class Deluge(DownloaderBase):
def connect(self, reconnect = False):
# Load host from config and split out port.
host = cleanHost(self.conf('host'), protocol = False).split(':')
+
+ # Force host assignment
+ if len(host) == 1:
+ host.append(80)
+
if not isInt(host[1]):
log.error('Config properties are not filled in correctly, port is missing.')
return False
diff --git a/couchpotato/core/logger.py b/couchpotato/core/logger.py
index a1b5e7d..ce99d68 100644
--- a/couchpotato/core/logger.py
+++ b/couchpotato/core/logger.py
@@ -62,7 +62,7 @@ class CPLog(object):
if isinstance(replace_tuple, tuple):
msg = msg % tuple([ss(x) if not isinstance(x, (int, float)) else x for x in list(replace_tuple)])
elif isinstance(replace_tuple, dict):
- msg = msg % dict((k, ss(v)) for k, v in replace_tuple.iteritems())
+ msg = msg % dict((k, ss(v) if not isinstance(v, (int, float)) else v) for k, v in replace_tuple.iteritems())
else:
msg = msg % ss(replace_tuple)
except Exception as e:
diff --git a/couchpotato/core/media/_base/providers/torrent/bithdtv.py b/couchpotato/core/media/_base/providers/torrent/bithdtv.py
index 57bc221..f686f9a 100644
--- a/couchpotato/core/media/_base/providers/torrent/bithdtv.py
+++ b/couchpotato/core/media/_base/providers/torrent/bithdtv.py
@@ -13,11 +13,11 @@ log = CPLog(__name__)
class Base(TorrentProvider):
urls = {
- 'test': 'http://www.bit-hdtv.com/',
- 'login': 'http://www.bit-hdtv.com/takelogin.php',
- 'login_check': 'http://www.bit-hdtv.com/messages.php',
- 'detail': 'http://www.bit-hdtv.com/details.php?id=%s',
- 'search': 'http://www.bit-hdtv.com/torrents.php?',
+ 'test': 'https://www.bit-hdtv.com/',
+ 'login': 'https://www.bit-hdtv.com/takelogin.php',
+ 'login_check': 'https://www.bit-hdtv.com/messages.php',
+ 'detail': 'https://www.bit-hdtv.com/details.php?id=%s',
+ 'search': 'https://www.bit-hdtv.com/torrents.php?',
}
# Searches for movies only - BiT-HDTV's subcategory and resolution search filters appear to be broken
@@ -93,7 +93,7 @@ config = [{
'tab': 'searcher',
'list': 'torrent_providers',
'name': 'BiT-HDTV',
- 'description': 'BiT-HDTV',
+ 'description': 'BiT-HDTV',
'wizard': True,
'icon': 'iVBORw0KGgoAAAANSUhEUgAAABAAAAAQCAIAAACQkWg2AAAABnRSTlMAAAAAAABupgeRAAABMklEQVR4AZ3Qu0ojcQCF8W9MJcQbJNgEEQUbQVIqWgnaWfkIvoCgggixEAmIhRtY2GV3w7KwU61B0EYIxmiw0YCik84ipaCuc0nmP5dcjIUgOjqDvxf4OAdf9mnMLcUJyPyGSCP+YRdC+Kp8iagJKhuS+InYRhTGgDbeV2uEMand4ZRxizjXHQEimxhraAnUr73BNqQxMiNeV2SwcjTLEVtb4Zl10mXutvOWm2otw5Sxz6TGTbdd6ncuYvVLXAXrvM+ruyBpy1S3JLGDfUQ1O6jn5vTsrJXvqSt4UNfj6vxTRPxBHER5QeSirhLGk/5rWN+ffB1XZuxjnDy1q87m7TS+xOGA+Iv4gfkbaw+nOMXHDHnITGEk0VfRFnn4Po4vNYm6RGukmggR0L08+l+e4HMeASo/i6AJUjLgAAAAAElFTkSuQmCC',
'options': [
diff --git a/couchpotato/core/media/_base/providers/torrent/torrentleech.py b/couchpotato/core/media/_base/providers/torrent/torrentleech.py
deleted file mode 100644
index 83eb5f1..0000000
--- a/couchpotato/core/media/_base/providers/torrent/torrentleech.py
+++ /dev/null
@@ -1,126 +0,0 @@
-import traceback
-
-from bs4 import BeautifulSoup
-from couchpotato.core.helpers.variable import tryInt
-from couchpotato.core.logger import CPLog
-from couchpotato.core.media._base.providers.torrent.base import TorrentProvider
-import six
-
-
-log = CPLog(__name__)
-
-
-class Base(TorrentProvider):
-
- urls = {
- 'test': 'https://www.torrentleech.org/',
- 'login': 'https://www.torrentleech.org/user/account/login/',
- 'login_check': 'https://torrentleech.org/user/messages',
- 'detail': 'https://www.torrentleech.org/torrent/%s',
- 'search': 'https://www.torrentleech.org/torrents/browse/index/query/%s/categories/%d',
- 'download': 'https://www.torrentleech.org%s',
- }
-
- http_time_between_calls = 1 # Seconds
- cat_backup_id = None
-
- def _searchOnTitle(self, title, media, quality, results):
-
- url = self.urls['search'] % self.buildUrl(title, media, quality)
-
- data = self.getHTMLData(url)
-
- if data:
- html = BeautifulSoup(data)
-
- try:
- result_table = html.find('table', attrs = {'id': 'torrenttable'})
- if not result_table:
- return
-
- entries = result_table.find_all('tr')
-
- for result in entries[1:]:
-
- link = result.find('td', attrs = {'class': 'name'}).find('a')
- url = result.find('td', attrs = {'class': 'quickdownload'}).find('a')
- details = result.find('td', attrs = {'class': 'name'}).find('a')
-
- results.append({
- 'id': link['href'].replace('/torrent/', ''),
- 'name': six.text_type(link.string),
- 'url': self.urls['download'] % url['href'],
- 'detail_url': self.urls['download'] % details['href'],
- 'size': self.parseSize(result.find_all('td')[4].string),
- 'seeders': tryInt(result.find('td', attrs = {'class': 'seeders'}).string),
- 'leechers': tryInt(result.find('td', attrs = {'class': 'leechers'}).string),
- })
-
- except:
- log.error('Failed to parsing %s: %s', (self.getName(), traceback.format_exc()))
-
- def getLoginParams(self):
- return {
- 'username': self.conf('username'),
- 'password': self.conf('password'),
- 'remember_me': 'on',
- 'login': 'submit',
- }
-
- def loginSuccess(self, output):
- return '/user/account/logout' in output.lower() or 'welcome back' in output.lower()
-
- loginCheckSuccess = loginSuccess
-
-
-config = [{
- 'name': 'torrentleech',
- 'groups': [
- {
- 'tab': 'searcher',
- 'list': 'torrent_providers',
- 'name': 'TorrentLeech',
- 'description': 'TorrentLeech',
- 'wizard': True,
- 'icon': 'iVBORw0KGgoAAAANSUhEUgAAABAAAAAQCAIAAACQkWg2AAACHUlEQVR4AZVSO48SYRSdGTCBEMKzILLAWiybkKAGMZRUUJEoDZX7B9zsbuQPYEEjNLTQkYgJDwsoSaxspEBsCITXjjNAIKi8AkzceXgmbHQ1NJ5iMufmO9/9zrmXlCSJ+B8o75J8Pp/NZj0eTzweBy0Wi4PBYD6f12o1r9ebTCZx+22HcrnMsuxms7m6urTZ7LPZDMVYLBZ8ZV3yo8aq9Pq0wzCMTqe77dDv9y8uLyAWBH6xWOyL0K/56fcb+rrPgPZ6PZfLRe1fsl6vCUmGKIqoqNXqdDr9Dbjps9znUV0uTqdTjuPkDoVCIfcuJ4gizjMMm8u9vW+1nr04czqdK56c37CbKY9j2+1WEARZ0Gq1RFHAz2q1qlQqXxoN69HRcDjUarW8ZD6QUigUOnY8uKYH8N1sNkul9yiGw+F6vS4Rxn8EsodEIqHRaOSnq9T7ajQazWQycEIR1AEBYDabSZJyHDucJyegwWBQr9ebTCaKvHd4cCQANUU9evwQ1Ofz4YvUKUI43GE8HouSiFiNRhOowWBIpVLyHITJkuW3PwgAEf3pgIwxF5r+OplMEsk3CPT5szCMnY7EwUdhwUh/CXiej0Qi3idPz89fdrpdbsfBzH7S3Q9K5pP4c0sAKpVKoVAQGO1ut+t0OoFAQHkH2Da/3/+but3uarWK0ZMQoNdyucRutdttmqZxMTzY7XaYxsrgtUjEZrNhkSwWyy/0NCatZumrNQAAAABJRU5ErkJggg==',
- 'options': [
- {
- 'name': 'enabled',
- 'type': 'enabler',
- 'default': False,
- },
- {
- 'name': 'username',
- 'default': '',
- },
- {
- 'name': 'password',
- 'default': '',
- 'type': 'password',
- },
- {
- 'name': 'seed_ratio',
- 'label': 'Seed ratio',
- 'type': 'float',
- 'default': 1,
- 'description': 'Will not be (re)moved until this seed ratio is met.',
- },
- {
- 'name': 'seed_time',
- 'label': 'Seed time',
- 'type': 'int',
- 'default': 40,
- 'description': 'Will not be (re)moved until this seed time (in hours) is met.',
- },
- {
- 'name': 'extra_score',
- 'advanced': True,
- 'label': 'Extra Score',
- 'type': 'int',
- 'default': 20,
- 'description': 'Starting score for each release found via this provider.',
- }
- ],
- },
- ],
-}]
diff --git a/couchpotato/core/media/movie/charts/static/charts.css b/couchpotato/core/media/movie/charts/static/charts.css
index 261169f..0084ed9 100644
--- a/couchpotato/core/media/movie/charts/static/charts.css
+++ b/couchpotato/core/media/movie/charts/static/charts.css
@@ -264,3 +264,11 @@
height: 40px;
}
+@media all and (max-width: 480px) {
+ .toggle_menu h2 {
+ font-size: 16px;
+ text-align: center;
+ height: 30px;
+ }
+}
+
diff --git a/couchpotato/core/media/movie/providers/info/fanarttv.py b/couchpotato/core/media/movie/providers/info/fanarttv.py
index 49d944e..74cd942 100644
--- a/couchpotato/core/media/movie/providers/info/fanarttv.py
+++ b/couchpotato/core/media/movie/providers/info/fanarttv.py
@@ -4,6 +4,7 @@ from couchpotato import tryInt
from couchpotato.core.event import addEvent
from couchpotato.core.logger import CPLog
from couchpotato.core.media.movie.providers.base import MovieProvider
+from requests import HTTPError
log = CPLog(__name__)
@@ -32,12 +33,14 @@ class FanartTV(MovieProvider):
try:
url = self.urls['api'] % identifier
- fanart_data = self.getJsonData(url)
+ fanart_data = self.getJsonData(url, show_error = False)
if fanart_data:
log.debug('Found images for %s', fanart_data.get('name'))
images = self._parseMovie(fanart_data)
-
+ except HTTPError as e:
+ log.debug('Failed getting extra art for %s: %s',
+ (identifier, e))
except:
log.error('Failed getting extra art for %s: %s',
(identifier, traceback.format_exc()))
diff --git a/couchpotato/core/media/movie/providers/info/themoviedb.py b/couchpotato/core/media/movie/providers/info/themoviedb.py
index d1dcd78..713d505 100644
--- a/couchpotato/core/media/movie/providers/info/themoviedb.py
+++ b/couchpotato/core/media/movie/providers/info/themoviedb.py
@@ -59,7 +59,8 @@ class TheMovieDb(MovieProvider):
for movie in raw:
parsed_movie = self.parseMovie(movie, extended = False)
- results.append(parsed_movie)
+ if parsed_movie:
+ results.append(parsed_movie)
nr += 1
if nr == limit:
@@ -83,7 +84,7 @@ class TheMovieDb(MovieProvider):
'id': identifier
}, extended = extended)
- return result
+ return result or {}
def parseMovie(self, movie, extended = True):
@@ -91,6 +92,8 @@ class TheMovieDb(MovieProvider):
movie = self.request('movie/%s' % movie.get('id'), {
'append_to_response': 'alternative_titles' + (',images,casts' if extended else '')
})
+ if not movie:
+ return
# Images
poster = self.getImage(movie, type = 'poster', size = 'w154')
@@ -192,8 +195,12 @@ class TheMovieDb(MovieProvider):
params = dict((k, v) for k, v in params.items() if v)
params = tryUrlencode(params)
- url = 'http://api.themoviedb.org/3/%s?api_key=%s%s' % (call, self.conf('api_key'), '&%s' % params if params else '')
- data = self.getJsonData(url)
+ try:
+ url = 'http://api.themoviedb.org/3/%s?api_key=%s%s' % (call, self.conf('api_key'), '&%s' % params if params else '')
+ data = self.getJsonData(url, show_error = False)
+ except:
+ log.debug('Movie not found: %s, %s', (call, params))
+ data = None
if data and return_key and return_key in data:
data = data.get(return_key)
diff --git a/couchpotato/core/media/movie/providers/torrent/iptorrents.py b/couchpotato/core/media/movie/providers/torrent/iptorrents.py
index 1c75feb..84a52f5 100644
--- a/couchpotato/core/media/movie/providers/torrent/iptorrents.py
+++ b/couchpotato/core/media/movie/providers/torrent/iptorrents.py
@@ -13,7 +13,7 @@ class IPTorrents(MovieProvider, Base):
([87], ['3d']),
([48], ['720p', '1080p', 'bd50']),
([72], ['cam', 'ts', 'tc', 'r5', 'scr']),
- ([7,48], ['dvdrip', 'brrip']),
+ ([7, 48, 20], ['dvdrip', 'brrip']),
([6], ['dvdr']),
]
diff --git a/couchpotato/core/media/movie/providers/torrent/torrentleech.py b/couchpotato/core/media/movie/providers/torrent/torrentleech.py
deleted file mode 100644
index d72f425..0000000
--- a/couchpotato/core/media/movie/providers/torrent/torrentleech.py
+++ /dev/null
@@ -1,27 +0,0 @@
-from couchpotato.core.helpers.encoding import tryUrlencode
-from couchpotato.core.logger import CPLog
-from couchpotato.core.media._base.providers.torrent.torrentleech import Base
-from couchpotato.core.media.movie.providers.base import MovieProvider
-
-log = CPLog(__name__)
-
-autoload = 'TorrentLeech'
-
-
-class TorrentLeech(MovieProvider, Base):
-
- cat_ids = [
- ([13], ['720p', '1080p', 'bd50']),
- ([8], ['cam']),
- ([9], ['ts', 'tc']),
- ([10], ['r5', 'scr']),
- ([11], ['dvdrip']),
- ([14], ['brrip']),
- ([12], ['dvdr']),
- ]
-
- def buildUrl(self, title, media, quality):
- return (
- tryUrlencode(title.replace(':', '')),
- self.getCatId(quality)[0]
- )
diff --git a/couchpotato/core/media/movie/searcher.py b/couchpotato/core/media/movie/searcher.py
index 3c26386..11aa975 100755
--- a/couchpotato/core/media/movie/searcher.py
+++ b/couchpotato/core/media/movie/searcher.py
@@ -203,13 +203,6 @@ class MovieSearcher(SearcherBase, MovieTypeBase):
quality['custom'] = quality_custom
results = fireEvent('searcher.search', search_protocols, movie, quality, single = True) or []
- results_count = len(results)
- total_result_count += results_count
- if results_count == 0:
- log.debug('Nothing found for %s in %s', (default_title, quality['label']))
-
- # Keep track of releases found outside ETA window
- outside_eta_results += results_count if could_not_be_released else 0
# Check if movie isn't deleted while searching
if not fireEvent('media.get', movie.get('_id'), single = True):
@@ -217,11 +210,17 @@ class MovieSearcher(SearcherBase, MovieTypeBase):
# Add them to this movie releases list
found_releases += fireEvent('release.create_from_search', results, movie, quality, single = True)
+ results_count = len(found_releases)
+ total_result_count += results_count
+ if results_count == 0:
+ log.debug('Nothing found for %s in %s', (default_title, quality['label']))
+
+ # Keep track of releases found outside ETA window
+ outside_eta_results += results_count if could_not_be_released else 0
# Don't trigger download, but notify user of available releases
- if could_not_be_released:
- if results_count > 0:
- log.debug('Found %s releases for "%s", but ETA isn\'t correct yet.', (results_count, default_title))
+ if could_not_be_released and results_count > 0:
+ log.debug('Found %s releases for "%s", but ETA isn\'t correct yet.', (results_count, default_title))
# Try find a valid result and download it
if (force_download or not could_not_be_released or always_search) and fireEvent('release.try_download_result', results, movie, quality_custom, single = True):
diff --git a/couchpotato/core/plugins/release/main.py b/couchpotato/core/plugins/release/main.py
index 375547c..a241c34 100644
--- a/couchpotato/core/plugins/release/main.py
+++ b/couchpotato/core/plugins/release/main.py
@@ -441,7 +441,6 @@ class Release(Plugin):
for rel in search_results:
rel_identifier = md5(rel['url'])
- found_releases.append(rel_identifier)
release = {
'_t': 'release',
@@ -482,6 +481,9 @@ class Release(Plugin):
# Update release in search_results
rel['status'] = rls.get('status')
+ if rel['status'] == 'available':
+ found_releases.append(rel_identifier)
+
return found_releases
except:
log.error('Failed: %s', traceback.format_exc())
diff --git a/couchpotato/core/plugins/renamer.py b/couchpotato/core/plugins/renamer.py
index 94e1002..d6381a3 100755
--- a/couchpotato/core/plugins/renamer.py
+++ b/couchpotato/core/plugins/renamer.py
@@ -220,6 +220,10 @@ class Renamer(Plugin):
nfo_name = self.conf('nfo_name')
separator = self.conf('separator')
+ if len(file_name) == 0:
+ log.error('Please fill in the filename option under renamer settings. Forcing it on . to keep the same name as source file.')
+ file_name = '.'
+
cd_keys = ['','', '']
if not any(x in folder_name for x in cd_keys) and not any(x in file_name for x in cd_keys):
log.error('Missing `cd` or `cd_nr` in the renamer. This will cause multi-file releases of being renamed to the same file. '
@@ -791,7 +795,7 @@ Remove it if you want it to be renamed (again, or at least let it try again)
dest = sp(dest)
try:
- if os.path.exists(dest):
+ if os.path.exists(dest) and os.path.isfile(dest):
raise Exception('Destination "%s" already exists' % dest)
move_type = self.conf('file_action')
diff --git a/libs/tornado/__init__.py b/libs/tornado/__init__.py
index eefe0f2..0e39f84 100755
--- a/libs/tornado/__init__.py
+++ b/libs/tornado/__init__.py
@@ -25,5 +25,5 @@ from __future__ import absolute_import, division, print_function, with_statement
# is zero for an official release, positive for a development branch,
# or negative for a release candidate or beta (after the base version
# number has been incremented)
-version = "4.0.1"
-version_info = (4, 0, 1, -100)
+version = "4.1.dev1"
+version_info = (4, 1, 0, -100)
diff --git a/libs/tornado/auth.py b/libs/tornado/auth.py
index 7bd3fa1..ac2fd0d 100755
--- a/libs/tornado/auth.py
+++ b/libs/tornado/auth.py
@@ -76,7 +76,7 @@ from tornado import escape
from tornado.httputil import url_concat
from tornado.log import gen_log
from tornado.stack_context import ExceptionStackContext
-from tornado.util import bytes_type, u, unicode_type, ArgReplacer
+from tornado.util import u, unicode_type, ArgReplacer
try:
import urlparse # py2
@@ -333,7 +333,7 @@ class OAuthMixin(object):
The ``callback_uri`` may be omitted if you have previously
registered a callback URI with the third-party service. For
- some sevices (including Friendfeed), you must use a
+ some services (including Friendfeed), you must use a
previously-registered callback URI and cannot specify a
callback via this method.
@@ -1112,7 +1112,7 @@ class FacebookMixin(object):
args["cancel_url"] = urlparse.urljoin(
self.request.full_url(), cancel_uri)
if extended_permissions:
- if isinstance(extended_permissions, (unicode_type, bytes_type)):
+ if isinstance(extended_permissions, (unicode_type, bytes)):
extended_permissions = [extended_permissions]
args["req_perms"] = ",".join(extended_permissions)
self.redirect("http://www.facebook.com/login.php?" +
diff --git a/libs/tornado/concurrent.py b/libs/tornado/concurrent.py
index 702aa35..6bab5d2 100755
--- a/libs/tornado/concurrent.py
+++ b/libs/tornado/concurrent.py
@@ -29,6 +29,7 @@ import sys
from tornado.stack_context import ExceptionStackContext, wrap
from tornado.util import raise_exc_info, ArgReplacer
+from tornado.log import app_log
try:
from concurrent import futures
@@ -173,8 +174,11 @@ class Future(object):
def _set_done(self):
self._done = True
for cb in self._callbacks:
- # TODO: error handling
- cb(self)
+ try:
+ cb(self)
+ except Exception:
+ app_log.exception('exception calling callback %r for %r',
+ cb, self)
self._callbacks = None
TracebackFuture = Future
diff --git a/libs/tornado/curl_httpclient.py b/libs/tornado/curl_httpclient.py
index 3da59a4..68047cc 100755
--- a/libs/tornado/curl_httpclient.py
+++ b/libs/tornado/curl_httpclient.py
@@ -19,10 +19,12 @@
from __future__ import absolute_import, division, print_function, with_statement
import collections
+import functools
import logging
import pycurl
import threading
import time
+from io import BytesIO
from tornado import httputil
from tornado import ioloop
@@ -31,12 +33,6 @@ from tornado import stack_context
from tornado.escape import utf8, native_str
from tornado.httpclient import HTTPResponse, HTTPError, AsyncHTTPClient, main
-from tornado.util import bytes_type
-
-try:
- from io import BytesIO # py3
-except ImportError:
- from cStringIO import StringIO as BytesIO # py2
class CurlAsyncHTTPClient(AsyncHTTPClient):
@@ -45,7 +41,7 @@ class CurlAsyncHTTPClient(AsyncHTTPClient):
self._multi = pycurl.CurlMulti()
self._multi.setopt(pycurl.M_TIMERFUNCTION, self._set_timeout)
self._multi.setopt(pycurl.M_SOCKETFUNCTION, self._handle_socket)
- self._curls = [_curl_create() for i in range(max_clients)]
+ self._curls = [self._curl_create() for i in range(max_clients)]
self._free_list = self._curls[:]
self._requests = collections.deque()
self._fds = {}
@@ -211,8 +207,8 @@ class CurlAsyncHTTPClient(AsyncHTTPClient):
"callback": callback,
"curl_start_time": time.time(),
}
- _curl_setup_request(curl, request, curl.info["buffer"],
- curl.info["headers"])
+ self._curl_setup_request(curl, request, curl.info["buffer"],
+ curl.info["headers"])
self._multi.add_handle(curl)
if not started:
@@ -259,218 +255,212 @@ class CurlAsyncHTTPClient(AsyncHTTPClient):
def handle_callback_exception(self, callback):
self.io_loop.handle_callback_exception(callback)
+ def _curl_create(self):
+ curl = pycurl.Curl()
+ if gen_log.isEnabledFor(logging.DEBUG):
+ curl.setopt(pycurl.VERBOSE, 1)
+ curl.setopt(pycurl.DEBUGFUNCTION, self._curl_debug)
+ return curl
+
+ def _curl_setup_request(self, curl, request, buffer, headers):
+ curl.setopt(pycurl.URL, native_str(request.url))
+
+ # libcurl's magic "Expect: 100-continue" behavior causes delays
+ # with servers that don't support it (which include, among others,
+ # Google's OpenID endpoint). Additionally, this behavior has
+ # a bug in conjunction with the curl_multi_socket_action API
+ # (https://sourceforge.net/tracker/?func=detail&atid=100976&aid=3039744&group_id=976),
+ # which increases the delays. It's more trouble than it's worth,
+ # so just turn off the feature (yes, setting Expect: to an empty
+ # value is the official way to disable this)
+ if "Expect" not in request.headers:
+ request.headers["Expect"] = ""
+
+ # libcurl adds Pragma: no-cache by default; disable that too
+ if "Pragma" not in request.headers:
+ request.headers["Pragma"] = ""
-class CurlError(HTTPError):
- def __init__(self, errno, message):
- HTTPError.__init__(self, 599, message)
- self.errno = errno
-
-
-def _curl_create():
- curl = pycurl.Curl()
- if gen_log.isEnabledFor(logging.DEBUG):
- curl.setopt(pycurl.VERBOSE, 1)
- curl.setopt(pycurl.DEBUGFUNCTION, _curl_debug)
- return curl
-
-
-def _curl_setup_request(curl, request, buffer, headers):
- curl.setopt(pycurl.URL, native_str(request.url))
-
- # libcurl's magic "Expect: 100-continue" behavior causes delays
- # with servers that don't support it (which include, among others,
- # Google's OpenID endpoint). Additionally, this behavior has
- # a bug in conjunction with the curl_multi_socket_action API
- # (https://sourceforge.net/tracker/?func=detail&atid=100976&aid=3039744&group_id=976),
- # which increases the delays. It's more trouble than it's worth,
- # so just turn off the feature (yes, setting Expect: to an empty
- # value is the official way to disable this)
- if "Expect" not in request.headers:
- request.headers["Expect"] = ""
-
- # libcurl adds Pragma: no-cache by default; disable that too
- if "Pragma" not in request.headers:
- request.headers["Pragma"] = ""
-
- # Request headers may be either a regular dict or HTTPHeaders object
- if isinstance(request.headers, httputil.HTTPHeaders):
- curl.setopt(pycurl.HTTPHEADER,
- [native_str("%s: %s" % i) for i in request.headers.get_all()])
- else:
curl.setopt(pycurl.HTTPHEADER,
- [native_str("%s: %s" % i) for i in request.headers.items()])
+ ["%s: %s" % (native_str(k), native_str(v))
+ for k, v in request.headers.get_all()])
- if request.header_callback:
curl.setopt(pycurl.HEADERFUNCTION,
- lambda line: request.header_callback(native_str(line)))
- else:
- curl.setopt(pycurl.HEADERFUNCTION,
- lambda line: _curl_header_callback(headers,
- native_str(line)))
- if request.streaming_callback:
- write_function = request.streaming_callback
- else:
- write_function = buffer.write
- if bytes_type is str: # py2
- curl.setopt(pycurl.WRITEFUNCTION, write_function)
- else: # py3
- # Upstream pycurl doesn't support py3, but ubuntu 12.10 includes
- # a fork/port. That version has a bug in which it passes unicode
- # strings instead of bytes to the WRITEFUNCTION. This means that
- # if you use a WRITEFUNCTION (which tornado always does), you cannot
- # download arbitrary binary data. This needs to be fixed in the
- # ported pycurl package, but in the meantime this lambda will
- # make it work for downloading (utf8) text.
- curl.setopt(pycurl.WRITEFUNCTION, lambda s: write_function(utf8(s)))
- curl.setopt(pycurl.FOLLOWLOCATION, request.follow_redirects)
- curl.setopt(pycurl.MAXREDIRS, request.max_redirects)
- curl.setopt(pycurl.CONNECTTIMEOUT_MS, int(1000 * request.connect_timeout))
- curl.setopt(pycurl.TIMEOUT_MS, int(1000 * request.request_timeout))
- if request.user_agent:
- curl.setopt(pycurl.USERAGENT, native_str(request.user_agent))
- else:
- curl.setopt(pycurl.USERAGENT, "Mozilla/5.0 (compatible; pycurl)")
- if request.network_interface:
- curl.setopt(pycurl.INTERFACE, request.network_interface)
- if request.decompress_response:
- curl.setopt(pycurl.ENCODING, "gzip,deflate")
- else:
- curl.setopt(pycurl.ENCODING, "none")
- if request.proxy_host and request.proxy_port:
- curl.setopt(pycurl.PROXY, request.proxy_host)
- curl.setopt(pycurl.PROXYPORT, request.proxy_port)
- if request.proxy_username:
- credentials = '%s:%s' % (request.proxy_username,
- request.proxy_password)
- curl.setopt(pycurl.PROXYUSERPWD, credentials)
- else:
- curl.setopt(pycurl.PROXY, '')
- curl.unsetopt(pycurl.PROXYUSERPWD)
- if request.validate_cert:
- curl.setopt(pycurl.SSL_VERIFYPEER, 1)
- curl.setopt(pycurl.SSL_VERIFYHOST, 2)
- else:
- curl.setopt(pycurl.SSL_VERIFYPEER, 0)
- curl.setopt(pycurl.SSL_VERIFYHOST, 0)
- if request.ca_certs is not None:
- curl.setopt(pycurl.CAINFO, request.ca_certs)
- else:
- # There is no way to restore pycurl.CAINFO to its default value
- # (Using unsetopt makes it reject all certificates).
- # I don't see any way to read the default value from python so it
- # can be restored later. We'll have to just leave CAINFO untouched
- # if no ca_certs file was specified, and require that if any
- # request uses a custom ca_certs file, they all must.
- pass
-
- if request.allow_ipv6 is False:
- # Curl behaves reasonably when DNS resolution gives an ipv6 address
- # that we can't reach, so allow ipv6 unless the user asks to disable.
- curl.setopt(pycurl.IPRESOLVE, pycurl.IPRESOLVE_V4)
- else:
- curl.setopt(pycurl.IPRESOLVE, pycurl.IPRESOLVE_WHATEVER)
-
- # Set the request method through curl's irritating interface which makes
- # up names for almost every single method
- curl_options = {
- "GET": pycurl.HTTPGET,
- "POST": pycurl.POST,
- "PUT": pycurl.UPLOAD,
- "HEAD": pycurl.NOBODY,
- }
- custom_methods = set(["DELETE", "OPTIONS", "PATCH"])
- for o in curl_options.values():
- curl.setopt(o, False)
- if request.method in curl_options:
- curl.unsetopt(pycurl.CUSTOMREQUEST)
- curl.setopt(curl_options[request.method], True)
- elif request.allow_nonstandard_methods or request.method in custom_methods:
- curl.setopt(pycurl.CUSTOMREQUEST, request.method)
- else:
- raise KeyError('unknown method ' + request.method)
-
- # Handle curl's cryptic options for every individual HTTP method
- if request.method in ("POST", "PUT"):
- if request.body is None:
- raise AssertionError(
- 'Body must not be empty for "%s" request'
- % request.method)
-
- request_buffer = BytesIO(utf8(request.body))
- curl.setopt(pycurl.READFUNCTION, request_buffer.read)
- if request.method == "POST":
+ functools.partial(self._curl_header_callback,
+ headers, request.header_callback))
+ if request.streaming_callback:
+ write_function = lambda chunk: self.io_loop.add_callback(
+ request.streaming_callback, chunk)
+ else:
+ write_function = buffer.write
+ if bytes is str: # py2
+ curl.setopt(pycurl.WRITEFUNCTION, write_function)
+ else: # py3
+ # Upstream pycurl doesn't support py3, but ubuntu 12.10 includes
+ # a fork/port. That version has a bug in which it passes unicode
+ # strings instead of bytes to the WRITEFUNCTION. This means that
+ # if you use a WRITEFUNCTION (which tornado always does), you cannot
+ # download arbitrary binary data. This needs to be fixed in the
+ # ported pycurl package, but in the meantime this lambda will
+ # make it work for downloading (utf8) text.
+ curl.setopt(pycurl.WRITEFUNCTION, lambda s: write_function(utf8(s)))
+ curl.setopt(pycurl.FOLLOWLOCATION, request.follow_redirects)
+ curl.setopt(pycurl.MAXREDIRS, request.max_redirects)
+ curl.setopt(pycurl.CONNECTTIMEOUT_MS, int(1000 * request.connect_timeout))
+ curl.setopt(pycurl.TIMEOUT_MS, int(1000 * request.request_timeout))
+ if request.user_agent:
+ curl.setopt(pycurl.USERAGENT, native_str(request.user_agent))
+ else:
+ curl.setopt(pycurl.USERAGENT, "Mozilla/5.0 (compatible; pycurl)")
+ if request.network_interface:
+ curl.setopt(pycurl.INTERFACE, request.network_interface)
+ if request.decompress_response:
+ curl.setopt(pycurl.ENCODING, "gzip,deflate")
+ else:
+ curl.setopt(pycurl.ENCODING, "none")
+ if request.proxy_host and request.proxy_port:
+ curl.setopt(pycurl.PROXY, request.proxy_host)
+ curl.setopt(pycurl.PROXYPORT, request.proxy_port)
+ if request.proxy_username:
+ credentials = '%s:%s' % (request.proxy_username,
+ request.proxy_password)
+ curl.setopt(pycurl.PROXYUSERPWD, credentials)
+ else:
+ curl.setopt(pycurl.PROXY, '')
+ curl.unsetopt(pycurl.PROXYUSERPWD)
+ if request.validate_cert:
+ curl.setopt(pycurl.SSL_VERIFYPEER, 1)
+ curl.setopt(pycurl.SSL_VERIFYHOST, 2)
+ else:
+ curl.setopt(pycurl.SSL_VERIFYPEER, 0)
+ curl.setopt(pycurl.SSL_VERIFYHOST, 0)
+ if request.ca_certs is not None:
+ curl.setopt(pycurl.CAINFO, request.ca_certs)
+ else:
+ # There is no way to restore pycurl.CAINFO to its default value
+ # (Using unsetopt makes it reject all certificates).
+ # I don't see any way to read the default value from python so it
+ # can be restored later. We'll have to just leave CAINFO untouched
+ # if no ca_certs file was specified, and require that if any
+ # request uses a custom ca_certs file, they all must.
+ pass
+
+ if request.allow_ipv6 is False:
+ # Curl behaves reasonably when DNS resolution gives an ipv6 address
+ # that we can't reach, so allow ipv6 unless the user asks to disable.
+ curl.setopt(pycurl.IPRESOLVE, pycurl.IPRESOLVE_V4)
+ else:
+ curl.setopt(pycurl.IPRESOLVE, pycurl.IPRESOLVE_WHATEVER)
+
+ # Set the request method through curl's irritating interface which makes
+ # up names for almost every single method
+ curl_options = {
+ "GET": pycurl.HTTPGET,
+ "POST": pycurl.POST,
+ "PUT": pycurl.UPLOAD,
+ "HEAD": pycurl.NOBODY,
+ }
+ custom_methods = set(["DELETE", "OPTIONS", "PATCH"])
+ for o in curl_options.values():
+ curl.setopt(o, False)
+ if request.method in curl_options:
+ curl.unsetopt(pycurl.CUSTOMREQUEST)
+ curl.setopt(curl_options[request.method], True)
+ elif request.allow_nonstandard_methods or request.method in custom_methods:
+ curl.setopt(pycurl.CUSTOMREQUEST, request.method)
+ else:
+ raise KeyError('unknown method ' + request.method)
+
+ # Handle curl's cryptic options for every individual HTTP method
+ if request.method == "GET":
+ if request.body is not None:
+ raise ValueError('Body must be None for GET request')
+ elif request.method in ("POST", "PUT") or request.body:
+ if request.body is None:
+ raise ValueError(
+ 'Body must not be None for "%s" request'
+ % request.method)
+
+ request_buffer = BytesIO(utf8(request.body))
def ioctl(cmd):
if cmd == curl.IOCMD_RESTARTREAD:
request_buffer.seek(0)
+ curl.setopt(pycurl.READFUNCTION, request_buffer.read)
curl.setopt(pycurl.IOCTLFUNCTION, ioctl)
- curl.setopt(pycurl.POSTFIELDSIZE, len(request.body))
- else:
- curl.setopt(pycurl.INFILESIZE, len(request.body))
- elif request.method == "GET":
- if request.body is not None:
- raise AssertionError('Body must be empty for GET request')
-
- if request.auth_username is not None:
- userpwd = "%s:%s" % (request.auth_username, request.auth_password or '')
-
- if request.auth_mode is None or request.auth_mode == "basic":
- curl.setopt(pycurl.HTTPAUTH, pycurl.HTTPAUTH_BASIC)
- elif request.auth_mode == "digest":
- curl.setopt(pycurl.HTTPAUTH, pycurl.HTTPAUTH_DIGEST)
+ if request.method == "POST":
+ curl.setopt(pycurl.POSTFIELDSIZE, len(request.body))
+ else:
+ curl.setopt(pycurl.UPLOAD, True)
+ curl.setopt(pycurl.INFILESIZE, len(request.body))
+
+ if request.auth_username is not None:
+ userpwd = "%s:%s" % (request.auth_username, request.auth_password or '')
+
+ if request.auth_mode is None or request.auth_mode == "basic":
+ curl.setopt(pycurl.HTTPAUTH, pycurl.HTTPAUTH_BASIC)
+ elif request.auth_mode == "digest":
+ curl.setopt(pycurl.HTTPAUTH, pycurl.HTTPAUTH_DIGEST)
+ else:
+ raise ValueError("Unsupported auth_mode %s" % request.auth_mode)
+
+ curl.setopt(pycurl.USERPWD, native_str(userpwd))
+ gen_log.debug("%s %s (username: %r)", request.method, request.url,
+ request.auth_username)
else:
- raise ValueError("Unsupported auth_mode %s" % request.auth_mode)
-
- curl.setopt(pycurl.USERPWD, native_str(userpwd))
- gen_log.debug("%s %s (username: %r)", request.method, request.url,
- request.auth_username)
- else:
- curl.unsetopt(pycurl.USERPWD)
- gen_log.debug("%s %s", request.method, request.url)
-
- if request.client_cert is not None:
- curl.setopt(pycurl.SSLCERT, request.client_cert)
-
- if request.client_key is not None:
- curl.setopt(pycurl.SSLKEY, request.client_key)
-
- if threading.activeCount() > 1:
- # libcurl/pycurl is not thread-safe by default. When multiple threads
- # are used, signals should be disabled. This has the side effect
- # of disabling DNS timeouts in some environments (when libcurl is
- # not linked against ares), so we don't do it when there is only one
- # thread. Applications that use many short-lived threads may need
- # to set NOSIGNAL manually in a prepare_curl_callback since
- # there may not be any other threads running at the time we call
- # threading.activeCount.
- curl.setopt(pycurl.NOSIGNAL, 1)
- if request.prepare_curl_callback is not None:
- request.prepare_curl_callback(curl)
-
-
-def _curl_header_callback(headers, header_line):
- # header_line as returned by curl includes the end-of-line characters.
- header_line = header_line.strip()
- if header_line.startswith("HTTP/"):
- headers.clear()
- try:
- (__, __, reason) = httputil.parse_response_start_line(header_line)
- header_line = "X-Http-Reason: %s" % reason
- except httputil.HTTPInputError:
+ curl.unsetopt(pycurl.USERPWD)
+ gen_log.debug("%s %s", request.method, request.url)
+
+ if request.client_cert is not None:
+ curl.setopt(pycurl.SSLCERT, request.client_cert)
+
+ if request.client_key is not None:
+ curl.setopt(pycurl.SSLKEY, request.client_key)
+
+ if threading.activeCount() > 1:
+ # libcurl/pycurl is not thread-safe by default. When multiple threads
+ # are used, signals should be disabled. This has the side effect
+ # of disabling DNS timeouts in some environments (when libcurl is
+ # not linked against ares), so we don't do it when there is only one
+ # thread. Applications that use many short-lived threads may need
+ # to set NOSIGNAL manually in a prepare_curl_callback since
+ # there may not be any other threads running at the time we call
+ # threading.activeCount.
+ curl.setopt(pycurl.NOSIGNAL, 1)
+ if request.prepare_curl_callback is not None:
+ request.prepare_curl_callback(curl)
+
+ def _curl_header_callback(self, headers, header_callback, header_line):
+ header_line = native_str(header_line)
+ if header_callback is not None:
+ self.io_loop.add_callback(header_callback, header_line)
+ # header_line as returned by curl includes the end-of-line characters.
+ header_line = header_line.strip()
+ if header_line.startswith("HTTP/"):
+ headers.clear()
+ try:
+ (__, __, reason) = httputil.parse_response_start_line(header_line)
+ header_line = "X-Http-Reason: %s" % reason
+ except httputil.HTTPInputError:
+ return
+ if not header_line:
return
- if not header_line:
- return
- headers.parse_line(header_line)
-
-
-def _curl_debug(debug_type, debug_msg):
- debug_types = ('I', '<', '>', '<', '>')
- if debug_type == 0:
- gen_log.debug('%s', debug_msg.strip())
- elif debug_type in (1, 2):
- for line in debug_msg.splitlines():
- gen_log.debug('%s %s', debug_types[debug_type], line)
- elif debug_type == 4:
- gen_log.debug('%s %r', debug_types[debug_type], debug_msg)
+ headers.parse_line(header_line)
+
+ def _curl_debug(self, debug_type, debug_msg):
+ debug_types = ('I', '<', '>', '<', '>')
+ if debug_type == 0:
+ gen_log.debug('%s', debug_msg.strip())
+ elif debug_type in (1, 2):
+ for line in debug_msg.splitlines():
+ gen_log.debug('%s %s', debug_types[debug_type], line)
+ elif debug_type == 4:
+ gen_log.debug('%s %r', debug_types[debug_type], debug_msg)
+
+
+class CurlError(HTTPError):
+ def __init__(self, errno, message):
+ HTTPError.__init__(self, 599, message)
+ self.errno = errno
+
if __name__ == "__main__":
AsyncHTTPClient.configure(CurlAsyncHTTPClient)
diff --git a/libs/tornado/escape.py b/libs/tornado/escape.py
index 48fa673..24be226 100755
--- a/libs/tornado/escape.py
+++ b/libs/tornado/escape.py
@@ -25,7 +25,7 @@ from __future__ import absolute_import, division, print_function, with_statement
import re
import sys
-from tornado.util import bytes_type, unicode_type, basestring_type, u
+from tornado.util import unicode_type, basestring_type, u
try:
from urllib.parse import parse_qs as _parse_qs # py3
@@ -187,7 +187,7 @@ else:
return encoded
-_UTF8_TYPES = (bytes_type, type(None))
+_UTF8_TYPES = (bytes, type(None))
def utf8(value):
@@ -215,7 +215,7 @@ def to_unicode(value):
"""
if isinstance(value, _TO_UNICODE_TYPES):
return value
- if not isinstance(value, bytes_type):
+ if not isinstance(value, bytes):
raise TypeError(
"Expected bytes, unicode, or None; got %r" % type(value)
)
@@ -246,7 +246,7 @@ def to_basestring(value):
"""
if isinstance(value, _BASESTRING_TYPES):
return value
- if not isinstance(value, bytes_type):
+ if not isinstance(value, bytes):
raise TypeError(
"Expected bytes, unicode, or None; got %r" % type(value)
)
@@ -264,7 +264,7 @@ def recursive_unicode(obj):
return list(recursive_unicode(i) for i in obj)
elif isinstance(obj, tuple):
return tuple(recursive_unicode(i) for i in obj)
- elif isinstance(obj, bytes_type):
+ elif isinstance(obj, bytes):
return to_unicode(obj)
else:
return obj
diff --git a/libs/tornado/gen.py b/libs/tornado/gen.py
index 06f2715..2fc9b0c 100755
--- a/libs/tornado/gen.py
+++ b/libs/tornado/gen.py
@@ -109,7 +109,10 @@ def engine(func):
raise ReturnValueIgnoredError(
"@gen.engine functions cannot return values: %r" %
(future.result(),))
- future.add_done_callback(final_callback)
+ # The engine interface doesn't give us any way to return
+ # errors but to raise them into the stack context.
+ # Save the stack context here to use when the Future has resolved.
+ future.add_done_callback(stack_context.wrap(final_callback))
return wrapper
@@ -136,6 +139,17 @@ def coroutine(func, replace_callback=True):
From the caller's perspective, ``@gen.coroutine`` is similar to
the combination of ``@return_future`` and ``@gen.engine``.
+
+ .. warning::
+
+ When exceptions occur inside a coroutine, the exception
+ information will be stored in the `.Future` object. You must
+ examine the result of the `.Future` object, or the exception
+ may go unnoticed by your code. This means yielding the function
+ if called from another coroutine, using something like
+ `.IOLoop.run_sync` for top-level calls, or passing the `.Future`
+ to `.IOLoop.add_future`.
+
"""
return _make_coroutine_wrapper(func, replace_callback=True)
@@ -185,7 +199,18 @@ def _make_coroutine_wrapper(func, replace_callback):
future.set_exc_info(sys.exc_info())
else:
Runner(result, future, yielded)
- return future
+ try:
+ return future
+ finally:
+ # Subtle memory optimization: if next() raised an exception,
+ # the future's exc_info contains a traceback which
+ # includes this stack frame. This creates a cycle,
+ # which will be collected at the next full GC but has
+ # been shown to greatly increase memory usage of
+ # benchmarks (relative to the refcount-based scheme
+ # used in the absence of cycles). We can avoid the
+ # cycle by clearing the local variable after we return it.
+ future = None
future.set_result(result)
return future
return wrapper
diff --git a/libs/tornado/http1connection.py b/libs/tornado/http1connection.py
index 1ac24f5..90895cc 100644
--- a/libs/tornado/http1connection.py
+++ b/libs/tornado/http1connection.py
@@ -306,6 +306,8 @@ class HTTP1Connection(httputil.HTTPConnection):
self._clear_callbacks()
stream = self.stream
self.stream = None
+ if not self._finish_future.done():
+ self._finish_future.set_result(None)
return stream
def set_body_timeout(self, timeout):
@@ -467,6 +469,7 @@ class HTTP1Connection(httputil.HTTPConnection):
if start_line.version == "HTTP/1.1":
return connection_header != "close"
elif ("Content-Length" in headers
+ or headers.get("Transfer-Encoding", "").lower() == "chunked"
or start_line.method in ("HEAD", "GET")):
return connection_header == "keep-alive"
return False
@@ -483,7 +486,11 @@ class HTTP1Connection(httputil.HTTPConnection):
self._finish_future.set_result(None)
def _parse_headers(self, data):
- data = native_str(data.decode('latin1'))
+ # The lstrip removes newlines that some implementations sometimes
+ # insert between messages of a reused connection. Per RFC 7230,
+ # we SHOULD ignore at least one empty line before the request.
+ # http://tools.ietf.org/html/rfc7230#section-3.5
+ data = native_str(data.decode('latin1')).lstrip("\r\n")
eol = data.find("\r\n")
start_line = data[:eol]
try:
diff --git a/libs/tornado/httpclient.py b/libs/tornado/httpclient.py
index c8ecf47..df42951 100755
--- a/libs/tornado/httpclient.py
+++ b/libs/tornado/httpclient.py
@@ -63,7 +63,12 @@ class HTTPClient(object):
response = http_client.fetch("http://www.google.com/")
print response.body
except httpclient.HTTPError as e:
- print "Error:", e
+ # HTTPError is raised for non-200 responses; the response
+ # can be found in e.response.
+ print("Error: " + str(e))
+ except Exception as e:
+ # Other errors are possible, such as IOError.
+ print("Error: " + str(e))
http_client.close()
"""
def __init__(self, async_client_class=None, **kwargs):
diff --git a/libs/tornado/httpserver.py b/libs/tornado/httpserver.py
index 03b5fc7..d4c990c 100755
--- a/libs/tornado/httpserver.py
+++ b/libs/tornado/httpserver.py
@@ -50,12 +50,13 @@ class HTTPServer(TCPServer, httputil.HTTPServerConnectionDelegate):
import tornado.httpserver
import tornado.ioloop
+ from tornado import httputil
def handle_request(request):
message = "You requested %s\n" % request.uri
request.connection.write_headers(
httputil.ResponseStartLine('HTTP/1.1', 200, 'OK'),
- {"Content-Length": str(len(message))})
+ httputil.HTTPHeaders({"Content-Length": str(len(message))}))
request.connection.write(message)
request.connection.finish()
diff --git a/libs/tornado/httputil.py b/libs/tornado/httputil.py
index a674897..f5c9c04 100755
--- a/libs/tornado/httputil.py
+++ b/libs/tornado/httputil.py
@@ -33,7 +33,7 @@ import time
from tornado.escape import native_str, parse_qs_bytes, utf8
from tornado.log import gen_log
-from tornado.util import ObjectDict, bytes_type
+from tornado.util import ObjectDict
try:
import Cookie # py2
@@ -335,7 +335,7 @@ class HTTPServerRequest(object):
# set remote IP and protocol
context = getattr(connection, 'context', None)
- self.remote_ip = getattr(context, 'remote_ip')
+ self.remote_ip = getattr(context, 'remote_ip', None)
self.protocol = getattr(context, 'protocol', "http")
self.host = host or self.headers.get("Host") or "127.0.0.1"
@@ -379,7 +379,7 @@ class HTTPServerRequest(object):
Use ``request.connection`` and the `.HTTPConnection` methods
to write the response.
"""
- assert isinstance(chunk, bytes_type)
+ assert isinstance(chunk, bytes)
self.connection.write(chunk, callback=callback)
def finish(self):
@@ -562,11 +562,18 @@ class HTTPConnection(object):
def url_concat(url, args):
- """Concatenate url and argument dictionary regardless of whether
+ """Concatenate url and arguments regardless of whether
url has existing query parameters.
+ ``args`` may be either a dictionary or a list of key-value pairs
+ (the latter allows for multiple values with the same key.
+
+ >>> url_concat("http://example.com/foo", dict(c="d"))
+ 'http://example.com/foo?c=d'
>>> url_concat("http://example.com/foo?a=b", dict(c="d"))
'http://example.com/foo?a=b&c=d'
+ >>> url_concat("http://example.com/foo?a=b", [("c", "d"), ("c", "d2")])
+ 'http://example.com/foo?a=b&c=d&c=d2'
"""
if not args:
return url
@@ -803,6 +810,8 @@ def parse_response_start_line(line):
# _parseparam and _parse_header are copied and modified from python2.7's cgi.py
# The original 2.7 version of this code did not correctly support some
# combinations of semicolons and double quotes.
+# It has also been modified to support valueless parameters as seen in
+# websocket extension negotiations.
def _parseparam(s):
@@ -836,9 +845,31 @@ def _parse_header(line):
value = value[1:-1]
value = value.replace('\\\\', '\\').replace('\\"', '"')
pdict[name] = value
+ else:
+ pdict[p] = None
return key, pdict
+def _encode_header(key, pdict):
+ """Inverse of _parse_header.
+
+ >>> _encode_header('permessage-deflate',
+ ... {'client_max_window_bits': 15, 'client_no_context_takeover': None})
+ 'permessage-deflate; client_max_window_bits=15; client_no_context_takeover'
+ """
+ if not pdict:
+ return key
+ out = [key]
+ # Sort the parameters just to make it easy to test.
+ for k, v in sorted(pdict.items()):
+ if v is None:
+ out.append(k)
+ else:
+ # TODO: quote if necessary.
+ out.append('%s=%s' % (k, v))
+ return '; '.join(out)
+
+
def doctests():
import doctest
return doctest.DocTestSuite()
diff --git a/libs/tornado/ioloop.py b/libs/tornado/ioloop.py
index e15252d..0319386 100755
--- a/libs/tornado/ioloop.py
+++ b/libs/tornado/ioloop.py
@@ -197,7 +197,7 @@ class IOLoop(Configurable):
An `IOLoop` automatically becomes current for its thread
when it is started, but it is sometimes useful to call
- `make_current` explictly before starting the `IOLoop`,
+ `make_current` explicitly before starting the `IOLoop`,
so that code run at startup time can find the right
instance.
"""
@@ -724,7 +724,7 @@ class PollIOLoop(IOLoop):
#
# If someone has already set a wakeup fd, we don't want to
# disturb it. This is an issue for twisted, which does its
- # SIGCHILD processing in response to its own wakeup fd being
+ # SIGCHLD processing in response to its own wakeup fd being
# written to. As long as the wakeup fd is registered on the IOLoop,
# the loop will still wake up and everything should work.
old_wakeup_fd = None
@@ -754,17 +754,18 @@ class PollIOLoop(IOLoop):
# Do not run anything until we have determined which ones
# are ready, so timeouts that call add_timeout cannot
# schedule anything in this iteration.
+ due_timeouts = []
if self._timeouts:
now = self.time()
while self._timeouts:
if self._timeouts[0].callback is None:
- # the timeout was cancelled
+ # The timeout was cancelled. Note that the
+ # cancellation check is repeated below for timeouts
+ # that are cancelled by another timeout or callback.
heapq.heappop(self._timeouts)
self._cancellations -= 1
elif self._timeouts[0].deadline <= now:
- timeout = heapq.heappop(self._timeouts)
- callbacks.append(timeout.callback)
- del timeout
+ due_timeouts.append(heapq.heappop(self._timeouts))
else:
break
if (self._cancellations > 512
@@ -778,9 +779,12 @@ class PollIOLoop(IOLoop):
for callback in callbacks:
self._run_callback(callback)
+ for timeout in due_timeouts:
+ if timeout.callback is not None:
+ self._run_callback(timeout.callback)
# Closures may be holding on to a lot of memory, so allow
# them to be freed before we go into our poll wait.
- callbacks = callback = None
+ callbacks = callback = due_timeouts = timeout = None
if self._callbacks:
# If any callbacks or timeouts called add_callback,
diff --git a/libs/tornado/iostream.py b/libs/tornado/iostream.py
index 99c681d..eced6d6 100755
--- a/libs/tornado/iostream.py
+++ b/libs/tornado/iostream.py
@@ -39,7 +39,7 @@ from tornado import ioloop
from tornado.log import gen_log, app_log
from tornado.netutil import ssl_wrap_socket, ssl_match_hostname, SSLCertificateError
from tornado import stack_context
-from tornado.util import bytes_type, errno_from_exception
+from tornado.util import errno_from_exception
try:
from tornado.platform.posix import _set_nonblocking
@@ -324,7 +324,7 @@ class BaseIOStream(object):
.. versionchanged:: 4.0
Now returns a `.Future` if no callback is given.
"""
- assert isinstance(data, bytes_type)
+ assert isinstance(data, bytes)
self._check_closed()
# We use bool(_write_buffer) as a proxy for write_buffer_size>0,
# so never put empty strings in the buffer.
@@ -554,7 +554,7 @@ class BaseIOStream(object):
# Pretend to have a pending callback so that an EOF in
# _read_to_buffer doesn't trigger an immediate close
# callback. At the end of this method we'll either
- # estabilsh a real pending callback via
+ # establish a real pending callback via
# _read_from_buffer or run the close callback.
#
# We need two try statements here so that
@@ -993,6 +993,11 @@ class IOStream(BaseIOStream):
"""
self._connecting = True
+ if callback is not None:
+ self._connect_callback = stack_context.wrap(callback)
+ future = None
+ else:
+ future = self._connect_future = TracebackFuture()
try:
self.socket.connect(address)
except socket.error as e:
@@ -1008,12 +1013,7 @@ class IOStream(BaseIOStream):
gen_log.warning("Connect error on fd %s: %s",
self.socket.fileno(), e)
self.close(exc_info=True)
- return
- if callback is not None:
- self._connect_callback = stack_context.wrap(callback)
- future = None
- else:
- future = self._connect_future = TracebackFuture()
+ return future
self._add_io_state(self.io_loop.WRITE)
return future
@@ -1185,8 +1185,14 @@ class SSLIOStream(IOStream):
return self.close(exc_info=True)
raise
except socket.error as err:
- if err.args[0] in _ERRNO_CONNRESET:
+ # Some port scans (e.g. nmap in -sT mode) have been known
+ # to cause do_handshake to raise EBADF, so make that error
+ # quiet as well.
+ # https://groups.google.com/forum/?fromgroups#!topic/python-tornado/ApucKJat1_0
+ if (err.args[0] in _ERRNO_CONNRESET or
+ err.args[0] == errno.EBADF):
return self.close(exc_info=True)
+ raise
except AttributeError:
# On Linux, if the connection was reset before the call to
# wrap_socket, do_handshake will fail with an
diff --git a/libs/tornado/netutil.py b/libs/tornado/netutil.py
index 336c806..f147c97 100755
--- a/libs/tornado/netutil.py
+++ b/libs/tornado/netutil.py
@@ -35,6 +35,11 @@ except ImportError:
# ssl is not available on Google App Engine
ssl = None
+try:
+ xrange # py2
+except NameError:
+ xrange = range # py3
+
if hasattr(ssl, 'match_hostname') and hasattr(ssl, 'CertificateError'): # python 3.2+
ssl_match_hostname = ssl.match_hostname
SSLCertificateError = ssl.CertificateError
@@ -60,8 +65,11 @@ _ERRNO_WOULDBLOCK = (errno.EWOULDBLOCK, errno.EAGAIN)
if hasattr(errno, "WSAEWOULDBLOCK"):
_ERRNO_WOULDBLOCK += (errno.WSAEWOULDBLOCK,)
+# Default backlog used when calling sock.listen()
+_DEFAULT_BACKLOG = 128
-def bind_sockets(port, address=None, family=socket.AF_UNSPEC, backlog=128, flags=None):
+def bind_sockets(port, address=None, family=socket.AF_UNSPEC,
+ backlog=_DEFAULT_BACKLOG, flags=None):
"""Creates listening sockets bound to the given port and address.
Returns a list of socket objects (multiple sockets are returned if
@@ -141,7 +149,7 @@ def bind_sockets(port, address=None, family=socket.AF_UNSPEC, backlog=128, flags
return sockets
if hasattr(socket, 'AF_UNIX'):
- def bind_unix_socket(file, mode=0o600, backlog=128):
+ def bind_unix_socket(file, mode=0o600, backlog=_DEFAULT_BACKLOG):
"""Creates a listening unix socket.
If a socket with the given name already exists, it will be deleted.
@@ -184,7 +192,18 @@ def add_accept_handler(sock, callback, io_loop=None):
io_loop = IOLoop.current()
def accept_handler(fd, events):
- while True:
+ # More connections may come in while we're handling callbacks;
+ # to prevent starvation of other tasks we must limit the number
+ # of connections we accept at a time. Ideally we would accept
+ # up to the number of connections that were waiting when we
+ # entered this method, but this information is not available
+ # (and rearranging this method to call accept() as many times
+ # as possible before running any callbacks would have adverse
+ # effects on load balancing in multiprocess configurations).
+ # Instead, we use the (default) listen backlog as a rough
+ # heuristic for the number of connections we can reasonably
+ # accept at once.
+ for i in xrange(_DEFAULT_BACKLOG):
try:
connection, address = sock.accept()
except socket.error as e:
diff --git a/libs/tornado/options.py b/libs/tornado/options.py
index fa9c269..5e23e29 100755
--- a/libs/tornado/options.py
+++ b/libs/tornado/options.py
@@ -79,7 +79,7 @@ import sys
import os
import textwrap
-from tornado.escape import _unicode
+from tornado.escape import _unicode, native_str
from tornado.log import define_logging_options
from tornado import stack_context
from tornado.util import basestring_type, exec_in
@@ -271,10 +271,14 @@ class OptionParser(object):
If ``final`` is ``False``, parse callbacks will not be run.
This is useful for applications that wish to combine configurations
from multiple sources.
+
+ .. versionchanged:: 4.1
+ Config files are now always interpreted as utf-8 instead of
+ the system default encoding.
"""
config = {}
- with open(path) as f:
- exec_in(f.read(), config, config)
+ with open(path, 'rb') as f:
+ exec_in(native_str(f.read()), config, config)
for name in config:
if name in self._options:
self._options[name].set(config[name])
diff --git a/libs/tornado/platform/asyncio.py b/libs/tornado/platform/asyncio.py
index b40f014..dd6722a 100644
--- a/libs/tornado/platform/asyncio.py
+++ b/libs/tornado/platform/asyncio.py
@@ -10,12 +10,10 @@ unfinished callbacks on the event loop that fail when it resumes)
"""
from __future__ import absolute_import, division, print_function, with_statement
-import datetime
import functools
from tornado.ioloop import IOLoop
from tornado import stack_context
-from tornado.util import timedelta_to_seconds
try:
# Import the real asyncio module for py33+ first. Older versions of the
diff --git a/libs/tornado/platform/twisted.py b/libs/tornado/platform/twisted.py
index b271dfc..27d991c 100755
--- a/libs/tornado/platform/twisted.py
+++ b/libs/tornado/platform/twisted.py
@@ -141,7 +141,7 @@ class TornadoDelayedCall(object):
class TornadoReactor(PosixReactorBase):
"""Twisted reactor built on the Tornado IOLoop.
- Since it is intented to be used in applications where the top-level
+ Since it is intended to be used in applications where the top-level
event loop is ``io_loop.start()`` rather than ``reactor.run()``,
it is implemented a little differently than other Twisted reactors.
We override `mainLoop` instead of `doIteration` and must implement
diff --git a/libs/tornado/process.py b/libs/tornado/process.py
index 0f38b85..cea3dbd 100755
--- a/libs/tornado/process.py
+++ b/libs/tornado/process.py
@@ -39,7 +39,7 @@ from tornado.util import errno_from_exception
try:
import multiprocessing
except ImportError:
- # Multiprocessing is not availble on Google App Engine.
+ # Multiprocessing is not available on Google App Engine.
multiprocessing = None
try:
@@ -240,7 +240,7 @@ class Subprocess(object):
The callback takes one argument, the return code of the process.
- This method uses a ``SIGCHILD`` handler, which is a global setting
+ This method uses a ``SIGCHLD`` handler, which is a global setting
and may conflict if you have other libraries trying to handle the
same signal. If you are using more than one ``IOLoop`` it may
be necessary to call `Subprocess.initialize` first to designate
@@ -257,7 +257,7 @@ class Subprocess(object):
@classmethod
def initialize(cls, io_loop=None):
- """Initializes the ``SIGCHILD`` handler.
+ """Initializes the ``SIGCHLD`` handler.
The signal handler is run on an `.IOLoop` to avoid locking issues.
Note that the `.IOLoop` used for signal handling need not be the
@@ -275,7 +275,7 @@ class Subprocess(object):
@classmethod
def uninitialize(cls):
- """Removes the ``SIGCHILD`` handler."""
+ """Removes the ``SIGCHLD`` handler."""
if not cls._initialized:
return
signal.signal(signal.SIGCHLD, cls._old_sigchld)
diff --git a/libs/tornado/simple_httpclient.py b/libs/tornado/simple_httpclient.py
index 516dc20..e60c434 100755
--- a/libs/tornado/simple_httpclient.py
+++ b/libs/tornado/simple_httpclient.py
@@ -19,11 +19,8 @@ import functools
import re
import socket
import sys
+from io import BytesIO
-try:
- from io import BytesIO # python 3
-except ImportError:
- from cStringIO import StringIO as BytesIO # python 2
try:
import urlparse # py2
@@ -222,6 +219,7 @@ class _HTTPConnection(httputil.HTTPMessageDelegate):
stack_context.wrap(self._on_timeout))
self.tcp_client.connect(host, port, af=af,
ssl_options=ssl_options,
+ max_buffer_size=self.max_buffer_size,
callback=self._on_connect)
def _get_ssl_options(self, scheme):
@@ -316,18 +314,18 @@ class _HTTPConnection(httputil.HTTPMessageDelegate):
if self.request.user_agent:
self.request.headers["User-Agent"] = self.request.user_agent
if not self.request.allow_nonstandard_methods:
- if self.request.method in ("POST", "PATCH", "PUT"):
- if (self.request.body is None and
- self.request.body_producer is None):
- raise AssertionError(
- 'Body must not be empty for "%s" request'
- % self.request.method)
- else:
- if (self.request.body is not None or
- self.request.body_producer is not None):
- raise AssertionError(
- 'Body must be empty for "%s" request'
- % self.request.method)
+ # Some HTTP methods nearly always have bodies while others
+ # almost never do. Fail in this case unless the user has
+ # opted out of sanity checks with allow_nonstandard_methods.
+ body_expected = self.request.method in ("POST", "PATCH", "PUT")
+ body_present = (self.request.body is not None or
+ self.request.body_producer is not None)
+ if ((body_expected and not body_present) or
+ (body_present and not body_expected)):
+ raise ValueError(
+ 'Body must %sbe None for method %s (unelss '
+ 'allow_nonstandard_methods is true)' %
+ ('not ' if body_expected else '', self.request.method))
if self.request.expect_100_continue:
self.request.headers["Expect"] = "100-continue"
if self.request.body is not None:
diff --git a/libs/tornado/stack_context.py b/libs/tornado/stack_context.py
index 2e845ab..2c0d9ee 100755
--- a/libs/tornado/stack_context.py
+++ b/libs/tornado/stack_context.py
@@ -41,13 +41,13 @@ Example usage::
sys.exit(1)
with StackContext(die_on_error):
- # Any exception thrown here *or in callback and its desendents*
+ # Any exception thrown here *or in callback and its descendants*
# will cause the process to exit instead of spinning endlessly
# in the ioloop.
http_client.fetch(url, callback)
ioloop.start()
-Most applications shouln't have to work with `StackContext` directly.
+Most applications shouldn't have to work with `StackContext` directly.
Here are a few rules of thumb for when it's necessary:
* If you're writing an asynchronous library that doesn't rely on a
diff --git a/libs/tornado/tcpclient.py b/libs/tornado/tcpclient.py
index d49eb5c..0abbea2 100644
--- a/libs/tornado/tcpclient.py
+++ b/libs/tornado/tcpclient.py
@@ -163,7 +163,7 @@ class TCPClient(object):
functools.partial(self._create_stream, max_buffer_size))
af, addr, stream = yield connector.start()
# TODO: For better performance we could cache the (af, addr)
- # information here and re-use it on sbusequent connections to
+ # information here and re-use it on subsequent connections to
# the same host. (http://tools.ietf.org/html/rfc6555#section-4.2)
if ssl_options is not None:
stream = yield stream.start_tls(False, ssl_options=ssl_options,
diff --git a/libs/tornado/template.py b/libs/tornado/template.py
index 4dcec5d..3882ed0 100755
--- a/libs/tornado/template.py
+++ b/libs/tornado/template.py
@@ -199,7 +199,7 @@ import threading
from tornado import escape
from tornado.log import app_log
-from tornado.util import bytes_type, ObjectDict, exec_in, unicode_type
+from tornado.util import ObjectDict, exec_in, unicode_type
try:
from cStringIO import StringIO # py2
@@ -261,7 +261,7 @@ class Template(object):
"linkify": escape.linkify,
"datetime": datetime,
"_tt_utf8": escape.utf8, # for internal use
- "_tt_string_types": (unicode_type, bytes_type),
+ "_tt_string_types": (unicode_type, bytes),
# __name__ and __loader__ allow the traceback mechanism to find
# the generated source code.
"__name__": self.name.replace('.', '_'),
diff --git a/libs/tornado/testing.py b/libs/tornado/testing.py
index b4bfb27..4d85abe 100755
--- a/libs/tornado/testing.py
+++ b/libs/tornado/testing.py
@@ -28,7 +28,7 @@ except ImportError:
IOLoop = None
netutil = None
SimpleAsyncHTTPClient = None
-from tornado.log import gen_log
+from tornado.log import gen_log, app_log
from tornado.stack_context import ExceptionStackContext
from tornado.util import raise_exc_info, basestring_type
import functools
@@ -114,8 +114,8 @@ class _TestMethodWrapper(object):
def __init__(self, orig_method):
self.orig_method = orig_method
- def __call__(self):
- result = self.orig_method()
+ def __call__(self, *args, **kwargs):
+ result = self.orig_method(*args, **kwargs)
if isinstance(result, types.GeneratorType):
raise TypeError("Generator test methods should be decorated with "
"tornado.testing.gen_test")
@@ -237,7 +237,11 @@ class AsyncTestCase(unittest.TestCase):
return IOLoop()
def _handle_exception(self, typ, value, tb):
- self.__failure = (typ, value, tb)
+ if self.__failure is None:
+ self.__failure = (typ, value, tb)
+ else:
+ app_log.error("multiple unhandled exceptions in test",
+ exc_info=(typ, value, tb))
self.stop()
return True
@@ -395,7 +399,8 @@ class AsyncHTTPTestCase(AsyncTestCase):
def tearDown(self):
self.http_server.stop()
- self.io_loop.run_sync(self.http_server.close_all_connections)
+ self.io_loop.run_sync(self.http_server.close_all_connections,
+ timeout=get_async_test_timeout())
if (not IOLoop.initialized() or
self.http_client.io_loop is not IOLoop.instance()):
self.http_client.close()
diff --git a/libs/tornado/util.py b/libs/tornado/util.py
index b6e06c6..34c4b07 100755
--- a/libs/tornado/util.py
+++ b/libs/tornado/util.py
@@ -115,16 +115,17 @@ def import_object(name):
if type('') is not type(b''):
def u(s):
return s
- bytes_type = bytes
unicode_type = str
basestring_type = str
else:
def u(s):
return s.decode('unicode_escape')
- bytes_type = str
unicode_type = unicode
basestring_type = basestring
+# Deprecated alias that was used before we dropped py25 support.
+# Left here in case anyone outside Tornado is using it.
+bytes_type = bytes
if sys.version_info > (3,):
exec("""
@@ -154,7 +155,7 @@ def errno_from_exception(e):
"""Provides the errno from an Exception object.
There are cases that the errno attribute was not set so we pull
- the errno out of the args but if someone instatiates an Exception
+ the errno out of the args but if someone instantiates an Exception
without any args you will get a tuple error. So this function
abstracts all that behavior to give you a safe way to get the
errno.
@@ -202,7 +203,7 @@ class Configurable(object):
impl = cls
args.update(kwargs)
instance = super(Configurable, cls).__new__(impl)
- # initialize vs __init__ chosen for compatiblity with AsyncHTTPClient
+ # initialize vs __init__ chosen for compatibility with AsyncHTTPClient
# singleton magic. If we get rid of that we can switch to __init__
# here too.
instance.initialize(**args)
@@ -237,7 +238,7 @@ class Configurable(object):
some parameters.
"""
base = cls.configurable_base()
- if isinstance(impl, (unicode_type, bytes_type)):
+ if isinstance(impl, (unicode_type, bytes)):
impl = import_object(impl)
if impl is not None and not issubclass(impl, cls):
raise ValueError("Invalid subclass of %s" % cls)
diff --git a/libs/tornado/web.py b/libs/tornado/web.py
index 25ac56e..a038265 100755
--- a/libs/tornado/web.py
+++ b/libs/tornado/web.py
@@ -72,6 +72,7 @@ import time
import tornado
import traceback
import types
+from io import BytesIO
from tornado.concurrent import Future, is_future
from tornado import escape
@@ -83,12 +84,8 @@ from tornado.log import access_log, app_log, gen_log
from tornado import stack_context
from tornado import template
from tornado.escape import utf8, _unicode
-from tornado.util import bytes_type, import_object, ObjectDict, raise_exc_info, unicode_type, _websocket_mask
+from tornado.util import import_object, ObjectDict, raise_exc_info, unicode_type, _websocket_mask
-try:
- from io import BytesIO # python 3
-except ImportError:
- from cStringIO import StringIO as BytesIO # python 2
try:
import Cookie # py2
@@ -344,7 +341,7 @@ class RequestHandler(object):
_INVALID_HEADER_CHAR_RE = re.compile(br"[\x00-\x1f]")
def _convert_header_value(self, value):
- if isinstance(value, bytes_type):
+ if isinstance(value, bytes):
pass
elif isinstance(value, unicode_type):
value = value.encode('utf-8')
@@ -652,7 +649,7 @@ class RequestHandler(object):
raise RuntimeError("Cannot write() after finish(). May be caused "
"by using async operations without the "
"@asynchronous decorator.")
- if not isinstance(chunk, (bytes_type, unicode_type, dict)):
+ if not isinstance(chunk, (bytes, unicode_type, dict)):
raise TypeError("write() only accepts bytes, unicode, and dict objects")
if isinstance(chunk, dict):
chunk = escape.json_encode(chunk)
@@ -677,7 +674,7 @@ class RequestHandler(object):
js_embed.append(utf8(embed_part))
file_part = module.javascript_files()
if file_part:
- if isinstance(file_part, (unicode_type, bytes_type)):
+ if isinstance(file_part, (unicode_type, bytes)):
js_files.append(file_part)
else:
js_files.extend(file_part)
@@ -686,7 +683,7 @@ class RequestHandler(object):
css_embed.append(utf8(embed_part))
file_part = module.css_files()
if file_part:
- if isinstance(file_part, (unicode_type, bytes_type)):
+ if isinstance(file_part, (unicode_type, bytes)):
css_files.append(file_part)
else:
css_files.extend(file_part)
@@ -919,7 +916,7 @@ class RequestHandler(object):
return
self.clear()
- reason = None
+ reason = kwargs.get('reason')
if 'exc_info' in kwargs:
exception = kwargs['exc_info'][1]
if isinstance(exception, HTTPError) and exception.reason:
@@ -959,12 +956,15 @@ class RequestHandler(object):
@property
def locale(self):
- """The local for the current session.
+ """The locale for the current session.
Determined by either `get_user_locale`, which you can override to
set the locale based on, e.g., a user preference stored in a
database, or `get_browser_locale`, which uses the ``Accept-Language``
header.
+
+ .. versionchanged: 4.1
+ Added a property setter.
"""
if not hasattr(self, "_locale"):
self._locale = self.get_user_locale()
@@ -973,6 +973,10 @@ class RequestHandler(object):
assert self._locale
return self._locale
+ @locale.setter
+ def locale(self, value):
+ self._locale = value
+
def get_user_locale(self):
"""Override to determine the locale from the authenticated user.
@@ -2165,11 +2169,14 @@ class StaticFileHandler(RequestHandler):
if include_body:
content = self.get_content(self.absolute_path, start, end)
- if isinstance(content, bytes_type):
+ if isinstance(content, bytes):
content = [content]
for chunk in content:
- self.write(chunk)
- yield self.flush()
+ try:
+ self.write(chunk)
+ yield self.flush()
+ except iostream.StreamClosedError:
+ return
else:
assert self.request.method == "HEAD"
@@ -2336,7 +2343,7 @@ class StaticFileHandler(RequestHandler):
"""
data = cls.get_content(abspath)
hasher = hashlib.md5()
- if isinstance(data, bytes_type):
+ if isinstance(data, bytes):
hasher.update(data)
else:
for chunk in data:
@@ -2548,7 +2555,6 @@ class GZipContentEncoding(OutputTransform):
ctype = _unicode(headers.get("Content-Type", "")).split(";")[0]
self._gzipping = self._compressible_type(ctype) and \
(not finishing or len(chunk) >= self.MIN_LENGTH) and \
- (finishing or "Content-Length" not in headers) and \
("Content-Encoding" not in headers)
if self._gzipping:
headers["Content-Encoding"] = "gzip"
@@ -2556,7 +2562,14 @@ class GZipContentEncoding(OutputTransform):
self._gzip_file = gzip.GzipFile(mode="w", fileobj=self._gzip_value)
chunk = self.transform_chunk(chunk, finishing)
if "Content-Length" in headers:
- headers["Content-Length"] = str(len(chunk))
+ # The original content length is no longer correct.
+ # If this is the last (and only) chunk, we can set the new
+ # content-length; otherwise we remove it and fall back to
+ # chunked encoding.
+ if finishing:
+ headers["Content-Length"] = str(len(chunk))
+ else:
+ del headers["Content-Length"]
return status_code, headers, chunk
def transform_chunk(self, chunk, finishing):
@@ -2705,7 +2718,7 @@ class TemplateModule(UIModule):
def javascript_files(self):
result = []
for f in self._get_resources("javascript_files"):
- if isinstance(f, (unicode_type, bytes_type)):
+ if isinstance(f, (unicode_type, bytes)):
result.append(f)
else:
result.extend(f)
@@ -2717,7 +2730,7 @@ class TemplateModule(UIModule):
def css_files(self):
result = []
for f in self._get_resources("css_files"):
- if isinstance(f, (unicode_type, bytes_type)):
+ if isinstance(f, (unicode_type, bytes)):
result.append(f)
else:
result.extend(f)
@@ -2822,7 +2835,7 @@ class URLSpec(object):
return self._path
converted_args = []
for a in args:
- if not isinstance(a, (unicode_type, bytes_type)):
+ if not isinstance(a, (unicode_type, bytes)):
a = str(a)
converted_args.append(escape.url_escape(utf8(a), plus=False))
return self._path % tuple(converted_args)
diff --git a/libs/tornado/websocket.py b/libs/tornado/websocket.py
index ed520d5..d960b0e 100755
--- a/libs/tornado/websocket.py
+++ b/libs/tornado/websocket.py
@@ -26,6 +26,7 @@ import os
import struct
import tornado.escape
import tornado.web
+import zlib
from tornado.concurrent import TracebackFuture
from tornado.escape import utf8, native_str, to_unicode
@@ -35,7 +36,7 @@ from tornado.iostream import StreamClosedError
from tornado.log import gen_log, app_log
from tornado import simple_httpclient
from tornado.tcpclient import TCPClient
-from tornado.util import bytes_type, _websocket_mask
+from tornado.util import _websocket_mask
try:
from urllib.parse import urlparse # py2
@@ -171,13 +172,15 @@ class WebSocketHandler(tornado.web.RequestHandler):
self.stream.set_close_callback(self.on_connection_close)
if self.request.headers.get("Sec-WebSocket-Version") in ("7", "8", "13"):
- self.ws_connection = WebSocketProtocol13(self)
+ self.ws_connection = WebSocketProtocol13(
+ self, compression_options=self.get_compression_options())
self.ws_connection.accept_connection()
else:
- self.stream.write(tornado.escape.utf8(
- "HTTP/1.1 426 Upgrade Required\r\n"
- "Sec-WebSocket-Version: 8\r\n\r\n"))
- self.stream.close()
+ if not self.stream.closed():
+ self.stream.write(tornado.escape.utf8(
+ "HTTP/1.1 426 Upgrade Required\r\n"
+ "Sec-WebSocket-Version: 8\r\n\r\n"))
+ self.stream.close()
def write_message(self, message, binary=False):
@@ -213,6 +216,19 @@ class WebSocketHandler(tornado.web.RequestHandler):
"""
return None
+ def get_compression_options(self):
+ """Override to return compression options for the connection.
+
+ If this method returns None (the default), compression will
+ be disabled. If it returns a dict (even an empty one), it
+ will be enabled. The contents of the dict may be used to
+ control the memory and CPU usage of the compression,
+ but no such options are currently implemented.
+
+ .. versionadded:: 4.1
+ """
+ return None
+
def open(self):
"""Invoked when a new WebSocket is opened.
@@ -336,6 +352,15 @@ class WebSocketHandler(tornado.web.RequestHandler):
self.ws_connection = None
self.on_close()
+ def send_error(self, *args, **kwargs):
+ if self.stream is None:
+ super(WebSocketHandler, self).send_error(*args, **kwargs)
+ else:
+ # If we get an uncaught exception during the handshake,
+ # we have no choice but to abruptly close the connection.
+ # TODO: for uncaught exceptions after the handshake,
+ # we can close the connection more gracefully.
+ self.stream.close()
def _wrap_method(method):
def _disallow_for_websocket(self, *args, **kwargs):
@@ -344,7 +369,7 @@ def _wrap_method(method):
else:
raise RuntimeError("Method not supported for Web Sockets")
return _disallow_for_websocket
-for method in ["write", "redirect", "set_header", "send_error", "set_cookie",
+for method in ["write", "redirect", "set_header", "set_cookie",
"set_status", "flush", "finish"]:
setattr(WebSocketHandler, method,
_wrap_method(getattr(WebSocketHandler, method)))
@@ -383,13 +408,68 @@ class WebSocketProtocol(object):
self.close() # let the subclass cleanup
+class _PerMessageDeflateCompressor(object):
+ def __init__(self, persistent, max_wbits):
+ if max_wbits is None:
+ max_wbits = zlib.MAX_WBITS
+ # There is no symbolic constant for the minimum wbits value.
+ if not (8 <= max_wbits <= zlib.MAX_WBITS):
+ raise ValueError("Invalid max_wbits value %r; allowed range 8-%d",
+ max_wbits, zlib.MAX_WBITS)
+ self._max_wbits = max_wbits
+ if persistent:
+ self._compressor = self._create_compressor()
+ else:
+ self._compressor = None
+
+ def _create_compressor(self):
+ return zlib.compressobj(-1, zlib.DEFLATED, -self._max_wbits)
+
+ def compress(self, data):
+ compressor = self._compressor or self._create_compressor()
+ data = (compressor.compress(data) +
+ compressor.flush(zlib.Z_SYNC_FLUSH))
+ assert data.endswith(b'\x00\x00\xff\xff')
+ return data[:-4]
+
+
+class _PerMessageDeflateDecompressor(object):
+ def __init__(self, persistent, max_wbits):
+ if max_wbits is None:
+ max_wbits = zlib.MAX_WBITS
+ if not (8 <= max_wbits <= zlib.MAX_WBITS):
+ raise ValueError("Invalid max_wbits value %r; allowed range 8-%d",
+ max_wbits, zlib.MAX_WBITS)
+ self._max_wbits = max_wbits
+ if persistent:
+ self._decompressor = self._create_decompressor()
+ else:
+ self._decompressor = None
+
+ def _create_decompressor(self):
+ return zlib.decompressobj(-self._max_wbits)
+
+ def decompress(self, data):
+ decompressor = self._decompressor or self._create_decompressor()
+ return decompressor.decompress(data + b'\x00\x00\xff\xff')
+
+
class WebSocketProtocol13(WebSocketProtocol):
"""Implementation of the WebSocket protocol from RFC 6455.
This class supports versions 7 and 8 of the protocol in addition to the
final version 13.
"""
- def __init__(self, handler, mask_outgoing=False):
+ # Bit masks for the first byte of a frame.
+ FIN = 0x80
+ RSV1 = 0x40
+ RSV2 = 0x20
+ RSV3 = 0x10
+ RSV_MASK = RSV1 | RSV2 | RSV3
+ OPCODE_MASK = 0x0f
+
+ def __init__(self, handler, mask_outgoing=False,
+ compression_options=None):
WebSocketProtocol.__init__(self, handler)
self.mask_outgoing = mask_outgoing
self._final_frame = False
@@ -400,6 +480,19 @@ class WebSocketProtocol13(WebSocketProtocol):
self._fragmented_message_buffer = None
self._fragmented_message_opcode = None
self._waiting = None
+ self._compression_options = compression_options
+ self._decompressor = None
+ self._compressor = None
+ self._frame_compressed = None
+ # The total uncompressed size of all messages received or sent.
+ # Unicode messages are encoded to utf8.
+ # Only for testing; subject to change.
+ self._message_bytes_in = 0
+ self._message_bytes_out = 0
+ # The total size of all packets received or sent. Includes
+ # the effect of compression, frame overhead, and control frames.
+ self._wire_bytes_in = 0
+ self._wire_bytes_out = 0
def accept_connection(self):
try:
@@ -444,24 +537,99 @@ class WebSocketProtocol13(WebSocketProtocol):
assert selected in subprotocols
subprotocol_header = "Sec-WebSocket-Protocol: %s\r\n" % selected
+ extension_header = ''
+ extensions = self._parse_extensions_header(self.request.headers)
+ for ext in extensions:
+ if (ext[0] == 'permessage-deflate' and
+ self._compression_options is not None):
+ # TODO: negotiate parameters if compression_options
+ # specifies limits.
+ self._create_compressors('server', ext[1])
+ if ('client_max_window_bits' in ext[1] and
+ ext[1]['client_max_window_bits'] is None):
+ # Don't echo an offered client_max_window_bits
+ # parameter with no value.
+ del ext[1]['client_max_window_bits']
+ extension_header = ('Sec-WebSocket-Extensions: %s\r\n' %
+ httputil._encode_header(
+ 'permessage-deflate', ext[1]))
+ break
+
+ if self.stream.closed():
+ self._abort()
+ return
self.stream.write(tornado.escape.utf8(
"HTTP/1.1 101 Switching Protocols\r\n"
"Upgrade: websocket\r\n"
"Connection: Upgrade\r\n"
"Sec-WebSocket-Accept: %s\r\n"
- "%s"
- "\r\n" % (self._challenge_response(), subprotocol_header)))
+ "%s%s"
+ "\r\n" % (self._challenge_response(),
+ subprotocol_header, extension_header)))
self._run_callback(self.handler.open, *self.handler.open_args,
**self.handler.open_kwargs)
self._receive_frame()
- def _write_frame(self, fin, opcode, data):
+ def _parse_extensions_header(self, headers):
+ extensions = headers.get("Sec-WebSocket-Extensions", '')
+ if extensions:
+ return [httputil._parse_header(e.strip())
+ for e in extensions.split(',')]
+ return []
+
+ def _process_server_headers(self, key, headers):
+ """Process the headers sent by the server to this client connection.
+
+ 'key' is the websocket handshake challenge/response key.
+ """
+ assert headers['Upgrade'].lower() == 'websocket'
+ assert headers['Connection'].lower() == 'upgrade'
+ accept = self.compute_accept_value(key)
+ assert headers['Sec-Websocket-Accept'] == accept
+
+ extensions = self._parse_extensions_header(headers)
+ for ext in extensions:
+ if (ext[0] == 'permessage-deflate' and
+ self._compression_options is not None):
+ self._create_compressors('client', ext[1])
+ else:
+ raise ValueError("unsupported extension %r", ext)
+
+ def _get_compressor_options(self, side, agreed_parameters):
+ """Converts a websocket agreed_parameters set to keyword arguments
+ for our compressor objects.
+ """
+ options = dict(
+ persistent=(side + '_no_context_takeover') not in agreed_parameters)
+ wbits_header = agreed_parameters.get(side + '_max_window_bits', None)
+ if wbits_header is None:
+ options['max_wbits'] = zlib.MAX_WBITS
+ else:
+ options['max_wbits'] = int(wbits_header)
+ return options
+
+ def _create_compressors(self, side, agreed_parameters):
+ # TODO: handle invalid parameters gracefully
+ allowed_keys = set(['server_no_context_takeover',
+ 'client_no_context_takeover',
+ 'server_max_window_bits',
+ 'client_max_window_bits'])
+ for key in agreed_parameters:
+ if key not in allowed_keys:
+ raise ValueError("unsupported compression parameter %r" % key)
+ other_side = 'client' if (side == 'server') else 'server'
+ self._compressor = _PerMessageDeflateCompressor(
+ **self._get_compressor_options(side, agreed_parameters))
+ self._decompressor = _PerMessageDeflateDecompressor(
+ **self._get_compressor_options(other_side, agreed_parameters))
+
+ def _write_frame(self, fin, opcode, data, flags=0):
if fin:
- finbit = 0x80
+ finbit = self.FIN
else:
finbit = 0
- frame = struct.pack("B", finbit | opcode)
+ frame = struct.pack("B", finbit | opcode | flags)
l = len(data)
if self.mask_outgoing:
mask_bit = 0x80
@@ -477,7 +645,11 @@ class WebSocketProtocol13(WebSocketProtocol):
mask = os.urandom(4)
data = mask + _websocket_mask(mask, data)
frame += data
- self.stream.write(frame)
+ self._wire_bytes_out += len(frame)
+ try:
+ self.stream.write(frame)
+ except StreamClosedError:
+ self._abort()
def write_message(self, message, binary=False):
"""Sends the given message to the client of this Web Socket."""
@@ -486,15 +658,17 @@ class WebSocketProtocol13(WebSocketProtocol):
else:
opcode = 0x1
message = tornado.escape.utf8(message)
- assert isinstance(message, bytes_type)
- try:
- self._write_frame(True, opcode, message)
- except StreamClosedError:
- self._abort()
+ assert isinstance(message, bytes)
+ self._message_bytes_out += len(message)
+ flags = 0
+ if self._compressor:
+ message = self._compressor.compress(message)
+ flags |= self.RSV1
+ self._write_frame(True, opcode, message, flags=flags)
def write_ping(self, data):
"""Send ping frame."""
- assert isinstance(data, bytes_type)
+ assert isinstance(data, bytes)
self._write_frame(True, 0x9, data)
def _receive_frame(self):
@@ -504,11 +678,15 @@ class WebSocketProtocol13(WebSocketProtocol):
self._abort()
def _on_frame_start(self, data):
+ self._wire_bytes_in += len(data)
header, payloadlen = struct.unpack("BB", data)
- self._final_frame = header & 0x80
- reserved_bits = header & 0x70
- self._frame_opcode = header & 0xf
+ self._final_frame = header & self.FIN
+ reserved_bits = header & self.RSV_MASK
+ self._frame_opcode = header & self.OPCODE_MASK
self._frame_opcode_is_control = self._frame_opcode & 0x8
+ if self._decompressor is not None:
+ self._frame_compressed = bool(reserved_bits & self.RSV1)
+ reserved_bits &= ~self.RSV1
if reserved_bits:
# client is using as-yet-undefined extensions; abort
self._abort()
@@ -534,6 +712,7 @@ class WebSocketProtocol13(WebSocketProtocol):
self._abort()
def _on_frame_length_16(self, data):
+ self._wire_bytes_in += len(data)
self._frame_length = struct.unpack("!H", data)[0]
try:
if self._masked_frame:
@@ -544,6 +723,7 @@ class WebSocketProtocol13(WebSocketProtocol):
self._abort()
def _on_frame_length_64(self, data):
+ self._wire_bytes_in += len(data)
self._frame_length = struct.unpack("!Q", data)[0]
try:
if self._masked_frame:
@@ -554,6 +734,7 @@ class WebSocketProtocol13(WebSocketProtocol):
self._abort()
def _on_masking_key(self, data):
+ self._wire_bytes_in += len(data)
self._frame_mask = data
try:
self.stream.read_bytes(self._frame_length, self._on_masked_frame_data)
@@ -561,9 +742,11 @@ class WebSocketProtocol13(WebSocketProtocol):
self._abort()
def _on_masked_frame_data(self, data):
+ # Don't touch _wire_bytes_in; we'll do it in _on_frame_data.
self._on_frame_data(_websocket_mask(self._frame_mask, data))
def _on_frame_data(self, data):
+ self._wire_bytes_in += len(data)
if self._frame_opcode_is_control:
# control frames may be interleaved with a series of fragmented
# data frames, so control frames must not interact with
@@ -604,8 +787,12 @@ class WebSocketProtocol13(WebSocketProtocol):
if self.client_terminated:
return
+ if self._frame_compressed:
+ data = self._decompressor.decompress(data)
+
if opcode == 0x1:
# UTF-8 data
+ self._message_bytes_in += len(data)
try:
decoded = data.decode("utf-8")
except UnicodeDecodeError:
@@ -614,6 +801,7 @@ class WebSocketProtocol13(WebSocketProtocol):
self._run_callback(self.handler.on_message, decoded)
elif opcode == 0x2:
# Binary data
+ self._message_bytes_in += len(data)
self._run_callback(self.handler.on_message, data)
elif opcode == 0x8:
# Close
@@ -664,7 +852,8 @@ class WebSocketClientConnection(simple_httpclient._HTTPConnection):
This class should not be instantiated directly; use the
`websocket_connect` function instead.
"""
- def __init__(self, io_loop, request):
+ def __init__(self, io_loop, request, compression_options=None):
+ self.compression_options = compression_options
self.connect_future = TracebackFuture()
self.read_future = None
self.read_queue = collections.deque()
@@ -679,6 +868,14 @@ class WebSocketClientConnection(simple_httpclient._HTTPConnection):
'Sec-WebSocket-Key': self.key,
'Sec-WebSocket-Version': '13',
})
+ if self.compression_options is not None:
+ # Always offer to let the server set our max_wbits (and even though
+ # we don't offer it, we will accept a client_no_context_takeover
+ # from the server).
+ # TODO: set server parameters for deflate extension
+ # if requested in self.compression_options.
+ request.headers['Sec-WebSocket-Extensions'] = (
+ 'permessage-deflate; client_max_window_bits')
self.tcp_client = TCPClient(io_loop=io_loop)
super(WebSocketClientConnection, self).__init__(
@@ -722,12 +919,10 @@ class WebSocketClientConnection(simple_httpclient._HTTPConnection):
start_line, headers)
self.headers = headers
- assert self.headers['Upgrade'].lower() == 'websocket'
- assert self.headers['Connection'].lower() == 'upgrade'
- accept = WebSocketProtocol13.compute_accept_value(self.key)
- assert self.headers['Sec-Websocket-Accept'] == accept
-
- self.protocol = WebSocketProtocol13(self, mask_outgoing=True)
+ self.protocol = WebSocketProtocol13(
+ self, mask_outgoing=True,
+ compression_options=self.compression_options)
+ self.protocol._process_server_headers(self.key, self.headers)
self.protocol._receive_frame()
if self._timeout is not None:
@@ -777,14 +972,21 @@ class WebSocketClientConnection(simple_httpclient._HTTPConnection):
pass
-def websocket_connect(url, io_loop=None, callback=None, connect_timeout=None):
+def websocket_connect(url, io_loop=None, callback=None, connect_timeout=None,
+ compression_options=None):
"""Client-side websocket support.
Takes a url and returns a Future whose result is a
`WebSocketClientConnection`.
+ ``compression_options`` is interpreted in the same way as the
+ return value of `.WebSocketHandler.get_compression_options`.
+
.. versionchanged:: 3.2
Also accepts ``HTTPRequest`` objects in place of urls.
+
+ .. versionchanged:: 4.1
+ Added ``compression_options``.
"""
if io_loop is None:
io_loop = IOLoop.current()
@@ -798,7 +1000,7 @@ def websocket_connect(url, io_loop=None, callback=None, connect_timeout=None):
request = httpclient.HTTPRequest(url, connect_timeout=connect_timeout)
request = httpclient._RequestProxy(
request, httpclient.HTTPRequest._DEFAULTS)
- conn = WebSocketClientConnection(io_loop, request)
+ conn = WebSocketClientConnection(io_loop, request, compression_options)
if callback is not None:
io_loop.add_future(conn.connect_future, callback)
return conn.connect_future
diff --git a/libs/tornado/wsgi.py b/libs/tornado/wsgi.py
index 6e115e1..f3aa665 100755
--- a/libs/tornado/wsgi.py
+++ b/libs/tornado/wsgi.py
@@ -32,6 +32,7 @@ provides WSGI support in two ways:
from __future__ import absolute_import, division, print_function, with_statement
import sys
+from io import BytesIO
import tornado
from tornado.concurrent import Future
@@ -40,12 +41,8 @@ from tornado import httputil
from tornado.log import access_log
from tornado import web
from tornado.escape import native_str
-from tornado.util import bytes_type, unicode_type
+from tornado.util import unicode_type
-try:
- from io import BytesIO # python 3
-except ImportError:
- from cStringIO import StringIO as BytesIO # python 2
try:
import urllib.parse as urllib_parse # py3
@@ -58,7 +55,7 @@ except ImportError:
# here to minimize the temptation to use them in non-wsgi contexts.
if str is unicode_type:
def to_wsgi_str(s):
- assert isinstance(s, bytes_type)
+ assert isinstance(s, bytes)
return s.decode('latin1')
def from_wsgi_str(s):
@@ -66,7 +63,7 @@ if str is unicode_type:
return s.encode('latin1')
else:
def to_wsgi_str(s):
- assert isinstance(s, bytes_type)
+ assert isinstance(s, bytes)
return s
def from_wsgi_str(s):