Browse Source

Fix subtitle providers that don't use auth.

Fix rTorrent exception handling.
Change make IMDB id parsing one central function.
tags/release_0.21.14^2
JackDandy 5 years ago
parent
commit
63a6dc6a4a
  1. 11
      CHANGES.md
  2. 2
      lib/rtorrent/__init__.py
  3. 6
      lib/rtorrent/rpc/__init__.py
  4. 2
      lib/subliminal/core.py
  5. 8
      lib/subliminal/services/addic7ed.py
  6. 12
      lib/subliminal/services/tvsubtitles.py
  7. 2
      sickbeard/__init__.py
  8. 17
      sickbeard/helpers.py
  9. 4
      sickbeard/tv.py
  10. 7
      sickbeard/webserve.py

11
CHANGES.md

@ -1,4 +1,13 @@
### 0.21.13 (2020-02-08 20:55:00 UTC)
### 0.21.14 (2020-02-22 17:55:00 UTC)
* Fix manual search status change on display show
* Fix encoding issue in Boxcar2, Pushbullet, and Pushover notifiers
* Fix ParseResult logging during Process Media
* Fix subtitle providers that don't use auth
* Fix rTorrent exception handling
### 0.21.13 (2020-02-08 20:55:00 UTC)
* Fix Windows Kodi episode library update

2
lib/rtorrent/__init__.py

@ -439,7 +439,7 @@ class RTorrent(object):
try:
func = next(filter_iter(lambda m: self.method_exists(m), (method.rpc_call,) + method.aliases))
getattr(self.get_connection(), func)(info_hash)
except(xmlrpclib.Fault, BaseException):
except (BaseException, Exception):
result = False
return result

6
lib/rtorrent/rpc/__init__.py

@ -97,7 +97,7 @@ class Method(object):
self.varname = get_varname(next(filter_iter(lambda f: rt_obj.method_exists(f),
(self.rpc_call,) + tuple(getattr(self, 'aliases', '')))))
return True
except IndexError:
except (BaseException, Exception):
pass
return False
@ -163,7 +163,7 @@ class Multicall(object):
try:
results = tuple(next(filter_iter(lambda x: isinstance(x, list), xmc().results)))
except IndexError:
except (BaseException, Exception):
return [[]]
results_processed = []
@ -220,7 +220,7 @@ def find_method(rpc_call):
lambda n: n.lower(), [m.rpc_call] + list(getattr(m, 'aliases', []))),
rtorrent.methods + rtorrent.torrent.methods +
rtorrent.file.methods + rtorrent.tracker.methods + rtorrent.peer.methods))
except IndexError:
except (BaseException, Exception):
return -1

2
lib/subliminal/core.py

@ -204,7 +204,7 @@ def get_service(services, service_name, config=None, os_auth=None):
"""
if service_name not in services:
mod = __import__('services.' + service_name, globals=globals(), locals=locals(), fromlist=['Service'], level=1)
services[service_name] = mod.Service(os_auth=os_auth)
services[service_name] = mod.Service(**(dict(os_auth=os_auth), {})[not hasattr(mod.Service, 'username')])
services[service_name].init()
services[service_name].config = config
return services[service_name]

8
lib/subliminal/services/addic7ed.py

@ -44,13 +44,13 @@ class Addic7ed(ServiceBase):
u'Català': Language('cat')}
videos = [Episode]
require_video = False
required_features = ['permissive']
required_features = ['lxml'] # ['permissive']
@cachedmethod
def get_series_id(self, name):
"""Get the show page and cache every show found in it"""
r = self.session.get('%s/shows.php' % self.server_url)
soup = BeautifulSoup(r.content, self.required_features)
soup = BeautifulSoup(r.content, self.required_features[0])
for html_series in soup.select('h3 > a'):
series_name = html_series.text.lower()
match = re.search('show/([0-9]+)', html_series['href'])
@ -73,7 +73,7 @@ class Addic7ed(ServiceBase):
logger.debug(u'Could not find series id for %s' % series)
return []
r = self.session.get('%s/show/%d&season=%d' % (self.server_url, series_id, season))
soup = BeautifulSoup(r.content, self.required_features)
soup = BeautifulSoup(r.content, self.required_features[0])
subtitles = []
for row in soup('tr', {'class': 'epeven completed'}):
cells = row('td')
@ -105,7 +105,7 @@ class Addic7ed(ServiceBase):
logger.info(u'Downloading %s in %s' % (subtitle.link, subtitle.path))
try:
r = self.session.get(subtitle.link, headers={'Referer': subtitle.link, 'User-Agent': self.user_agent})
soup = BeautifulSoup(r.content, self.required_features)
soup = BeautifulSoup(r.content, self.required_features[0])
if soup.title is not None and u'Addic7ed.com' in soup.title.text.strip():
raise DownloadFailedError('Download limit exceeded')
with open(subtitle.path, 'wb') as f:

12
lib/subliminal/services/tvsubtitles.py

@ -49,12 +49,12 @@ class TvSubtitles(ServiceBase):
'cn': Language('chi'), 'br': Language('pob')}
videos = [Episode]
require_video = False
required_features = ['permissive']
required_features = ['lxml'] # ['permissive']
@cachedmethod
def get_likely_series_id(self, name):
r = self.session.post('%s/search.php' % self.server_url, data={'q': name})
soup = BeautifulSoup(r.content, self.required_features)
soup = BeautifulSoup(r.content, self.required_features[0])
maindiv = soup.find('div', 'left')
results = []
for elem in maindiv.find_all('li'):
@ -74,7 +74,7 @@ class TvSubtitles(ServiceBase):
# download the page of the season, contains ids for all episodes
episode_id = None
r = self.session.get('%s/tvshow-%d-%d.html' % (self.server_url, series_id, season))
soup = BeautifulSoup(r.content, self.required_features)
soup = BeautifulSoup(r.content, self.required_features[0])
table = soup.find('table', id='table5')
for row in table.find_all('tr'):
cells = row.find_all('td')
@ -97,7 +97,7 @@ class TvSubtitles(ServiceBase):
def get_sub_ids(self, episode_id):
subids = []
r = self.session.get('%s/episode-%d.html' % (self.server_url, episode_id))
epsoup = BeautifulSoup(r.content, self.required_features)
epsoup = BeautifulSoup(r.content, self.required_features[0])
for subdiv in epsoup.find_all('a'):
if 'href' not in subdiv.attrs or not subdiv['href'].startswith('/subtitle'):
continue
@ -121,7 +121,7 @@ class TvSubtitles(ServiceBase):
sid = self.get_likely_series_id(series.lower())
try:
ep_id = self.get_episode_id(sid, season, episode)
except KeyError:
except (KeyError, TypeError):
logger.debug(u'Could not find episode id for %s season %d episode %d' % (series, season, episode))
return []
subids = self.get_sub_ids(ep_id)
@ -142,4 +142,4 @@ class TvSubtitles(ServiceBase):
return subtitle
Service = TvSubtitles
Service = TvSubtitles

2
sickbeard/__init__.py

@ -1242,7 +1242,7 @@ def init_stage_1(console_logging):
HISTORY_LAYOUT = check_setting_str(CFG, 'GUI', 'history_layout', 'detailed')
BROWSELIST_HIDDEN = map_list(
lambda y: TVidProdid.glue in y and y or '%s%s%s' % (
(TVINFO_TVDB, TVINFO_IMDB)[bool(re.search(r'(?i)tt\d{7}', y))], TVidProdid.glue, y),
(TVINFO_TVDB, TVINFO_IMDB)[bool(helpers.parse_imdb_id(y))], TVidProdid.glue, y),
[x.strip() for x in check_setting_str(CFG, 'GUI', 'browselist_hidden', '').split('|~|') if x.strip()])
# initialize NZB and TORRENT providers

17
sickbeard/helpers.py

@ -70,6 +70,7 @@ if False:
from typing import Any, AnyStr, Dict, NoReturn, Iterable, Iterator, List, Optional, Tuple, Union
RE_XML_ENCODING = re.compile(r'^(<\?xml[^>]+)\s+(encoding\s*=\s*[\"\'][^\"\']*[\"\'])(\s*\?>|)', re.U)
RE_IMDB_ID = re.compile(r'(?i)(tt\d{4,})')
def indent_xml(elem, level=0):
@ -2021,3 +2022,19 @@ def cmdline_runner(cmd, shell=False):
out = out.strip()
return out, err, p.returncode
def parse_imdb_id(string):
# type: (AnyStr) -> Optional[AnyStr]
""" Parse an IMDB ID from a string
:param string: string to parse
:return: parsed ID of the form char, char, number of digits or None if no match
"""
result = None
try:
result = RE_IMDB_ID.findall(string)[0]
except(BaseException, Exception):
pass
return result

4
sickbeard/tv.py

@ -1338,7 +1338,7 @@ class TVShow(TVShowBase):
try:
response = requests.head(page_url, allow_redirects=True)
if response.history and any([h for h in response.history if 301 == h.status_code]):
return re.search(r'(tt\d{7})', response.url, flags=re.I).group(1)
return helpers.parse_imdb_id(response.url)
except (BaseException, Exception):
pass
@ -1370,7 +1370,7 @@ class TVShow(TVShowBase):
imdb_id = redirect_check
imdb_info['imdb_id'] = self.imdbid
i = imdbpie.Imdb(exclude_episodes=True, cachedir=ek.ek(os.path.join, sickbeard.CACHE_DIR, 'imdb-pie'))
if not re.search(r'tt\d{7}', imdb_id, flags=re.I):
if not helpers.parse_imdb_id(imdb_id):
logger.log('Not a valid imdbid: %s for show: %s' % (imdb_id, self.name), logger.WARNING)
return
imdb_ratings = i.get_title_ratings(imdb_id=imdb_id)

7
sickbeard/webserve.py

@ -3350,7 +3350,7 @@ class AddShows(Home):
search_id, tvdb_prodid, trakt_prodid, tmdb_prodid, trakt_id = '', None, None, None, TVINFO_TRAKT
try:
search_id = re.search(r'(?m)((?:tt\d{4,})|^\d{4,}$)', search_term).group(1)
search_id = helpers.parse_imdb_id(search_term) or re.search(r'(?m)(^\d{4,}$)', search_term).group(1)
tvinfo_config = sickbeard.TVInfoAPI(trakt_id).api_params.copy()
tvinfo_config['language'] = lang
@ -3947,7 +3947,6 @@ class AddShows(Home):
def parse_imdb_html(self, html, filtered, kwargs):
img_size = re.compile(r'(?im)(V1[^XY]+([XY]))(\d+)([^\d]+)(\d+)([^\d]+)(\d+)([^\d]+)(\d+)([^\d]+)(\d+)(.*?)$')
imdb_id = re.compile(r'(?i).*(tt\d+).*')
with BS4Parser(html, features=['html5lib', 'permissive']) as soup:
show_list = soup.select('.lister-list')
@ -3958,7 +3957,7 @@ class AddShows(Home):
try:
title = row.select('.lister-item-header a[href*=title]')[0]
url_path = title['href'].strip('/')
ids = dict(imdb=imdb_id.sub(r'\1', url_path))
ids = dict(imdb=helpers.parse_imdb_id(url_path))
year, ended = 2 * [None]
first_aired = row.select('.lister-item-header .lister-item-year')
if len(first_aired):
@ -4126,7 +4125,7 @@ class AddShows(Home):
def info_imdb(self, ids, show_name):
return self.new_show('|'.join(['', '', '', re.search(r'(?i)tt\d+$', ids) and ids or show_name]),
return self.new_show('|'.join(['', '', '', helpers.parse_imdb_id(ids) and ids or show_name]),
use_show_name=True)
def trakt_anticipated(self):

Loading…
Cancel
Save