Browse Source

Subliminal update

pull/66/head
Ruud 14 years ago
parent
commit
d08d519ff2
  1. 8
      libs/requests/__init__.py
  2. 39
      libs/requests/api.py
  3. 5
      libs/requests/async.py
  4. 2
      libs/requests/defaults.py
  5. 3
      libs/requests/exceptions.py
  6. 163
      libs/requests/models.py
  7. 8
      libs/requests/packages/urllib3/connectionpool.py
  8. 4
      libs/requests/packages/urllib3/packages/__init__.py
  9. 61
      libs/requests/packages/urllib3/packages/ssl_match_hostname/__init__.py
  10. 13
      libs/requests/sessions.py
  11. 27
      libs/requests/utils.py
  12. 12
      libs/subliminal/__init__.py
  13. 10
      libs/subliminal/core.py
  14. 4
      libs/subliminal/plugins.py
  15. 2
      libs/subliminal/subtitles.py

8
libs/requests/__init__.py

@ -9,17 +9,17 @@
requests
~~~~~~~~
:copyright: (c) 2011 by Kenneth Reitz.
:copyright: (c) 2012 by Kenneth Reitz.
:license: ISC, see LICENSE for more details.
"""
__title__ = 'requests'
__version__ = '0.8.6'
__build__ = 0x000806
__version__ = '0.9.1'
__build__ = 0x000901
__author__ = 'Kenneth Reitz'
__license__ = 'ISC'
__copyright__ = 'Copyright 2011 Kenneth Reitz'
__copyright__ = 'Copyright 2012 Kenneth Reitz'

39
libs/requests/api.py

@ -6,28 +6,14 @@ requests.api
This module implements the Requests API.
:copyright: (c) 2011 by Kenneth Reitz.
:copyright: (c) 2012 by Kenneth Reitz.
:license: ISC, see LICENSE for more details.
"""
from . import sessions
def request(method, url,
params=None,
data=None,
headers=None,
cookies=None,
files=None,
auth=None,
timeout=None,
allow_redirects=False,
proxies=None,
hooks=None,
return_response=True,
prefetch=False,
session=None,
config=None):
def request(method, url, **kwargs):
"""Constructs and sends a :class:`Request <Request>`.
Returns :class:`Response <Response>` object.
@ -45,26 +31,11 @@ def request(method, url,
:param return_response: (optional) If False, an un-sent Request object will returned.
:param session: (optional) A :class:`Session` object to be used for the request.
:param config: (optional) A configuration dictionary.
:param verify: (optional) if ``True``, the SSL cert will be verified. A CA_BUNDLE path can also be provided.
"""
s = session or sessions.session()
return s.request(
method=method,
url=url,
params=params,
data=data,
headers=headers,
cookies=cookies,
files=files,
auth=auth,
timeout=timeout,
allow_redirects=allow_redirects,
proxies=proxies,
hooks=hooks,
return_response=return_response,
config=config,
prefetch=prefetch
)
s = kwargs.get('session') or sessions.session()
return s.request(method=method, url=url, **kwargs)

5
libs/requests/async.py

@ -36,6 +36,11 @@ def patched(f):
kwargs['return_response'] = False
kwargs['prefetch'] = True
config = kwargs.get('config', {})
config.update(safe_mode=True)
kwargs['config'] = config
return f(*args, **kwargs)
return wrapped

2
libs/requests/defaults.py

@ -15,6 +15,7 @@ Configurations:
:decode_unicode: Decode unicode responses automatically?
:keep_alive: Reuse HTTP Connections?
:max_retries: The number of times a request should be retried in the event of a connection failure.
:danger_mode: If true, Requests will raise errors immediately.
:safe_mode: If true, Requests will catch all errors.
:pool_maxsize: The maximium size of an HTTP connection pool.
:pool_connections: The number of active HTTP connection pools to use.
@ -38,5 +39,6 @@ defaults['decode_unicode'] = True
defaults['pool_connections'] = 10
defaults['pool_maxsize'] = 10
defaults['max_retries'] = 0
defaults['danger_mode'] = False
defaults['safe_mode'] = False
defaults['keep_alive'] = True

3
libs/requests/exceptions.py

@ -18,6 +18,9 @@ class HTTPError(RequestException):
class ConnectionError(RequestException):
"""A Connection error occurred."""
class SSLError(ConnectionError):
"""An SSL error occurred."""
class Timeout(RequestException):
"""The request timed out."""

163
libs/requests/models.py

@ -7,8 +7,8 @@ requests.models
This module contains the primary objects that power Requests.
"""
import os
import urllib
import zlib
from urlparse import urlparse, urlunparse, urljoin, urlsplit
from datetime import datetime
@ -18,16 +18,18 @@ from .structures import CaseInsensitiveDict
from .status_codes import codes
from .packages import oreos
from .auth import HTTPBasicAuth, HTTPProxyAuth
from .packages.urllib3.response import HTTPResponse
from .packages.urllib3.exceptions import MaxRetryError
from .packages.urllib3.exceptions import SSLError as _SSLError
from .packages.urllib3.exceptions import HTTPError as _HTTPError
from .packages.urllib3 import connectionpool, poolmanager
from .packages.urllib3.filepost import encode_multipart_formdata
from .exceptions import (
Timeout, URLRequired, TooManyRedirects, HTTPError, ConnectionError)
ConnectionError, HTTPError, RequestException, Timeout, TooManyRedirects,
URLRequired, SSLError)
from .utils import (
get_encoding_from_headers, stream_decode_response_unicode,
decode_gzip, stream_decode_gzip, guess_filename, requote_path)
stream_decompress, guess_filename, requote_path)
REDIRECT_STATI = (codes.moved, codes.found, codes.other, codes.temporary_moved)
@ -54,7 +56,8 @@ class Request(object):
proxies=None,
hooks=None,
config=None,
_poolmanager=None):
_poolmanager=None,
verify=None):
#: Float describes the timeout of the request.
# (Use socket.setdefaulttimeout() as fallback)
@ -116,11 +119,15 @@ class Request(object):
#: Session.
self.session = None
#: SSL Verification.
self.verify = verify
if headers:
headers = CaseInsensitiveDict(self.headers)
else:
headers = CaseInsensitiveDict()
# Add configured base headers.
for (k, v) in self.config.get('base_headers', {}).items():
if k not in headers:
headers[k] = v
@ -171,6 +178,9 @@ class Request(object):
# Save cookies in Response.
response.cookies = cookies
# No exceptions were harmed in the making of this request.
response.error = getattr(resp, 'error', None)
# Save original response for later.
response.raw = resp
@ -237,6 +247,7 @@ class Request(object):
timeout=self.timeout,
_poolmanager=self._poolmanager,
proxies = self.proxies,
verify = self.verify
)
request.send()
@ -420,6 +431,30 @@ class Request(object):
else:
conn = connectionpool.connection_from_url(url)
if url.startswith('https') and self.verify:
cert_loc = None
# Allow self-specified cert location.
if self.verify is not True:
cert_loc = self.verify
# Look for configuration.
if not cert_loc:
cert_loc = os.environ.get('REQUESTS_CA_BUNDLE')
# Curl compatiblity.
if not cert_loc:
cert_loc = os.environ.get('CURL_CA_BUNDLE')
# Use the awesome certifi list.
if not cert_loc:
cert_loc = __import__('certifi').where()
conn.cert_reqs = 'CERT_REQUIRED'
conn.ca_certs = cert_loc
if not self.sent or anyway:
if self.cookies:
@ -439,32 +474,43 @@ class Request(object):
self.headers['Cookie'] = cookie_header
try:
# Send the request.
r = conn.urlopen(
method=self.method,
url=self.path_url,
body=body,
headers=self.headers,
redirect=False,
assert_same_host=False,
preload_content=False,
decode_content=False,
retries=self.config.get('max_retries', 0),
timeout=self.timeout,
)
self.sent = True
except MaxRetryError, e:
if not self.config.get('safe_mode', False):
# The inner try .. except re-raises certain exceptions as
# internal exception types; the outer suppresses exceptions
# when safe mode is set.
try:
# Send the request.
r = conn.urlopen(
method=self.method,
url=self.path_url,
body=body,
headers=self.headers,
redirect=False,
assert_same_host=False,
preload_content=False,
decode_content=True,
retries=self.config.get('max_retries', 0),
timeout=self.timeout,
)
self.sent = True
except MaxRetryError, e:
raise ConnectionError(e)
else:
r = None
except (_SSLError, _HTTPError), e:
if not self.config.get('safe_mode', False):
except (_SSLError, _HTTPError), e:
if self.verify and isinstance(e, _SSLError):
raise SSLError(e)
raise Timeout('Request timed out.')
except RequestException, e:
if self.config.get('safe_mode', False):
# In safe mode, catch the exception and attach it to
# a blank urllib3.HTTPResponse object.
r = HTTPResponse()
r.error = e
else:
raise
self._build_response(r)
# Response manipulation hook.
@ -478,6 +524,9 @@ class Request(object):
if prefetch:
# Save the response.
self.response.content
if self.config.get('danger_mode'):
self.response.raise_for_status()
return self.sent
@ -567,7 +616,9 @@ class Response(object):
gen = generate()
if 'gzip' in self.headers.get('content-encoding', ''):
gen = stream_decode_gzip(gen)
gen = stream_decompress(gen, mode='gzip')
elif 'deflate' in self.headers.get('content-encoding', ''):
gen = stream_decompress(gen, mode='deflate')
if decode_unicode is None:
decode_unicode = self.config.get('decode_unicode')
@ -578,50 +629,25 @@ class Response(object):
return gen
def iter_lines(self, newlines=None, decode_unicode=None):
def iter_lines(self, chunk_size=10 * 1024, decode_unicode=None):
"""Iterates over the response data, one line at a time. This
avoids reading the content at once into memory for large
responses.
:param newlines: a collection of bytes to seperate lines with.
"""
if newlines is None:
newlines = ('\r', '\n', '\r\n')
pending = None
for chunk in self.iter_content(chunk_size, decode_unicode=decode_unicode):
if pending is not None:
chunk = pending + chunk
lines = chunk.splitlines(True)
for line in lines[:-1]:
yield line.rstrip()
# Save the last part of the chunk for next iteration, to keep full line together
pending = lines[-1]
if self._content_consumed:
raise RuntimeError(
'The content for this response was already consumed'
)
def generate():
chunk = []
while 1:
c = self.raw.read(1)
if not c:
break
if c in newlines:
yield ''.join(chunk)
chunk = []
else:
chunk.append(c)
self._content_consumed = True
gen = generate()
if 'gzip' in self.headers.get('content-encoding', ''):
gen = stream_decode_gzip(gen)
if decode_unicode is None:
decode_unicode = self.config.get('decode_unicode')
if decode_unicode:
gen = stream_decode_response_unicode(gen, self)
return gen
# Yield the last line
if pending is not None:
yield pending.rstrip()
@property
@ -643,13 +669,6 @@ class Response(object):
content = self._content
# Decode GZip'd content.
if 'gzip' in self.headers.get('content-encoding', ''):
try:
content = decode_gzip(self._content)
except zlib.error:
pass
# Decode unicode content.
if self.config.get('decode_unicode'):

8
libs/requests/packages/urllib3/connectionpool.py

@ -13,6 +13,7 @@ from Queue import Queue, Empty, Full
from select import select
from socket import error as SocketError, timeout as SocketTimeout
from .packages.ssl_match_hostname import match_hostname, CertificateError
try:
import ssl
@ -70,7 +71,8 @@ class VerifiedHTTPSConnection(HTTPSConnection):
self.sock = ssl.wrap_socket(sock, self.key_file, self.cert_file,
cert_reqs=self.cert_reqs,
ca_certs=self.ca_certs)
if self.ca_certs:
match_hostname(self.sock.getpeercert(), self.host)
## Pool objects
@ -364,6 +366,10 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods):
# SSL certificate error
raise SSLError(e)
except (CertificateError), e:
# Name mismatch
raise SSLError(e)
except (HTTPException, SocketError), e:
# Connection broken, discard. It will be replaced next _get_conn().
conn = None

4
libs/requests/packages/urllib3/packages/__init__.py

@ -0,0 +1,4 @@
from __future__ import absolute_import
from . import ssl_match_hostname

61
libs/requests/packages/urllib3/packages/ssl_match_hostname/__init__.py

@ -0,0 +1,61 @@
"""The match_hostname() function from Python 3.2, essential when using SSL."""
import re
__version__ = '3.2.2'
class CertificateError(ValueError):
pass
def _dnsname_to_pat(dn):
pats = []
for frag in dn.split(r'.'):
if frag == '*':
# When '*' is a fragment by itself, it matches a non-empty dotless
# fragment.
pats.append('[^.]+')
else:
# Otherwise, '*' matches any dotless fragment.
frag = re.escape(frag)
pats.append(frag.replace(r'\*', '[^.]*'))
return re.compile(r'\A' + r'\.'.join(pats) + r'\Z', re.IGNORECASE)
def match_hostname(cert, hostname):
"""Verify that *cert* (in decoded format as returned by
SSLSocket.getpeercert()) matches the *hostname*. RFC 2818 rules
are mostly followed, but IP addresses are not accepted for *hostname*.
CertificateError is raised on failure. On success, the function
returns nothing.
"""
if not cert:
raise ValueError("empty or no certificate")
dnsnames = []
san = cert.get('subjectAltName', ())
for key, value in san:
if key == 'DNS':
if _dnsname_to_pat(value).match(hostname):
return
dnsnames.append(value)
if not dnsnames:
# The subject is only checked when there is no dNSName entry
# in subjectAltName
for sub in cert.get('subject', ()):
for key, value in sub:
# XXX according to RFC 2818, the most specific Common Name
# must be used.
if key == 'commonName':
if _dnsname_to_pat(value).match(hostname):
return
dnsnames.append(value)
if len(dnsnames) > 1:
raise CertificateError("hostname %r "
"doesn't match either of %s"
% (hostname, ', '.join(map(repr, dnsnames))))
elif len(dnsnames) == 1:
raise CertificateError("hostname %r "
"doesn't match %r"
% (hostname, dnsnames[0]))
else:
raise CertificateError("no appropriate commonName or "
"subjectAltName fields were found")

13
libs/requests/sessions.py

@ -63,7 +63,8 @@ class Session(object):
proxies=None,
hooks=None,
params=None,
config=None):
config=None,
verify=True):
self.headers = headers or {}
self.cookies = cookies or {}
@ -73,6 +74,7 @@ class Session(object):
self.hooks = hooks or {}
self.params = params or {}
self.config = config or {}
self.verify = verify
for (k, v) in defaults.items():
self.config.setdefault(k, v)
@ -111,7 +113,8 @@ class Session(object):
hooks=None,
return_response=True,
config=None,
prefetch=False):
prefetch=False,
verify=None):
"""Constructs and sends a :class:`Request <Request>`.
Returns :class:`Response <Response>` object.
@ -130,6 +133,7 @@ class Session(object):
:param return_response: (optional) If False, an un-sent Request object will returned.
:param config: (optional) A configuration dictionary.
:param prefetch: (optional) if ``True``, the response content will be immediately downloaded.
:param verify: (optional) if ``True``, the SSL cert will be verified. A CA_BUNDLE path can also be provided.
"""
method = str(method).upper()
@ -141,6 +145,10 @@ class Session(object):
headers = {} if headers is None else headers
params = {} if params is None else params
hooks = {} if hooks is None else hooks
if verify is None:
verify = self.verify
# use session's hooks as defaults
for key, cb in self.hooks.iteritems():
hooks.setdefault(key, cb)
@ -164,6 +172,7 @@ class Session(object):
allow_redirects=allow_redirects,
proxies=proxies,
config=config,
verify=verify,
_poolmanager=self.poolmanager
)

27
libs/requests/utils.py

@ -354,20 +354,37 @@ def decode_gzip(content):
return zlib.decompress(content, 16 + zlib.MAX_WBITS)
def stream_decode_gzip(iterator):
"""Stream decodes a gzip-encoded iterator"""
def stream_decompress(iterator, mode='gzip'):
"""
Stream decodes an iterator over compressed data
:param iterator: An iterator over compressed data
:param mode: 'gzip' or 'deflate'
:return: An iterator over decompressed data
"""
if mode not in ['gzip', 'deflate']:
raise ValueError('stream_decompress mode must be gzip or deflate')
zlib_mode = 16 + zlib.MAX_WBITS if mode == 'gzip' else -zlib.MAX_WBITS
dec = zlib.decompressobj(zlib_mode)
try:
dec = zlib.decompressobj(16 + zlib.MAX_WBITS)
for chunk in iterator:
rv = dec.decompress(chunk)
if rv:
yield rv
except zlib.error:
# If there was an error decompressing, just return the raw chunk
yield chunk
# Continue to return the rest of the raw data
for chunk in iterator:
yield chunk
else:
# Make sure everything has been returned from the decompression object
buf = dec.decompress('')
rv = buf + dec.flush()
if rv:
yield rv
except zlib.error:
pass
def requote_path(path):

12
libs/subliminal/__init__.py

@ -18,14 +18,6 @@
# You should have received a copy of the Lesser GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
__all__ = ['Subliminal']
from infos import *
from languages import *
from utils import *
from exceptions import *
from videos import *
from tasks import *
from subtitles import *
from core import *
from plugins import *
from core import Subliminal

10
libs/subliminal/core.py

@ -27,7 +27,7 @@ from exceptions import InvalidLanguageError, PluginError, BadStateError, \
WrongTaskError, DownloadFailedError
from itertools import groupby
from languages import list_languages
from subliminal.utils import NullHandler
from utils import NullHandler
from tasks import Task, DownloadTask, ListTask, StopTask
import Queue
import guessit
@ -216,16 +216,16 @@ class Subliminal(object):
key = ''
for sort_item in order:
if sort_item == LANGUAGE_INDEX:
key += '{:03d}'.format(len(self._languages) - self._languages.index(subtitle.language) - 1)
key += '{0:03d}'.format(len(self._languages) - self._languages.index(subtitle.language) - 1)
elif sort_item == PLUGIN_INDEX:
key += '{:02d}'.format(len(self._plugins) - self._plugins.index(subtitle.plugin) - 1)
key += '{0:02d}'.format(len(self._plugins) - self._plugins.index(subtitle.plugin) - 1)
elif sort_item == PLUGIN_CONFIDENCE:
key += '{:04d}'.format(int(subtitle.confidence * 1000))
key += '{0:04d}'.format(int(subtitle.confidence * 1000))
elif sort_item == MATCHING_CONFIDENCE:
confidence = 0
if subtitle.release:
confidence = matching_confidence(video, subtitle)
key += '{:04d}'.format(int(confidence * 1000))
key += '{0:04d}'.format(int(confidence * 1000))
return int(key)
def groupByVideo(self, list_result):

4
libs/subliminal/plugins.py

@ -22,8 +22,8 @@ __all__ = ['PluginBase', 'OpenSubtitles', 'BierDopje', 'TheSubDB', 'SubsWiki', '
from exceptions import DownloadFailedError, MissingLanguageError, PluginError
from subliminal.utils import get_keywords, PluginConfig, split_keyword
from subliminal.videos import Episode, Movie, UnknownVideo
from utils import get_keywords, PluginConfig, split_keyword
from videos import Episode, Movie, UnknownVideo
from subtitles import ResultSubtitle, get_subtitle_path
import BeautifulSoup
import abc

2
libs/subliminal/subtitles.py

@ -21,7 +21,7 @@
__all__ = ['Subtitle', 'EmbeddedSubtitle', 'ExternalSubtitle', 'ResultSubtitle', 'get_subtitle_path']
from subliminal.languages import list_languages, convert_language
from languages import list_languages, convert_language
import abc
import os.path

Loading…
Cancel
Save