Browse Source

Fixed bug in TVRage API that was not properly handling malformed airdates for shows.

Fixed bug in metadata function that was throwing attribute errors when checking for banner images for shows from TVRage

Converted some list comprehensions to generators to improve speed and lower memory usage of SB
tags/release_0.1.0
echel0n 11 years ago
parent
commit
6a7906eeb1
  1. 43
      lib/tvdb_api/tvdb_api.py
  2. 45
      lib/tvrage_api/tvrage_api.py
  3. 4
      sickbeard/metadata/generic.py
  4. 4
      sickbeard/tv.py
  5. 9
      sickbeard/webserve.py

43
lib/tvdb_api/tvdb_api.py

@ -47,10 +47,6 @@ from tvdb_exceptions import (tvdb_error, tvdb_userabort, tvdb_shownotfound,
# Cached Session Handler
from lib.httpcache import CachingHTTPAdapter
s = requests.Session()
s.mount('http://', CachingHTTPAdapter())
def log():
return logging.getLogger("tvdb_api")
@ -518,12 +514,36 @@ class Tvdb:
return os.path.join(tempfile.gettempdir(), "tvdb_api-%s" % (uid))
def retry(ExceptionToCheck, tries=4, delay=3, backoff=2):
def deco_retry(f):
def f_retry(*args, **kwargs):
mtries, mdelay = tries, delay
while mtries > 0:
try:
return f(*args, **kwargs)
except ExceptionToCheck, e:
print "%s, Retrying in %d seconds..." % (str(e), mdelay)
time.sleep(mdelay)
mtries -= 1
mdelay *= backoff
lastException = e
raise lastException
return f_retry # true decorator
return deco_retry
@retry(tvdb_error, tries=4)
def _loadUrl(self, url, params=None, language=None):
try:
log().debug("Retrieving URL %s" % url)
# get response from TVDB
if self.config['cache_enabled']:
s = requests.Session()
s.mount('http://', CachingHTTPAdapter())
resp = s.get(url, params=params)
else:
resp = requests.get(url, params=params)
@ -629,7 +649,7 @@ class Tvdb:
log().debug("Searching for show %s" % series)
self.config['params_getSeries']['seriesname'] = series
seriesEt = self._getetsrc(self.config['url_getSeries'], self.config['params_getSeries'])
allSeries = [dict((s.tag.lower(), s.text) for s in x.getchildren()) for x in seriesEt]
allSeries = list(dict((s.tag.lower(), s.text) for s in x.getchildren()) for x in seriesEt)
return allSeries
@ -869,20 +889,17 @@ class Tvdb:
already been grabbed), or grabs all episodes and returns
the correct SID.
"""
sid = set()
if name in self.corrections:
log().debug('Correcting %s to %s' % (name, self.corrections[name]))
sid = self.corrections[name]
return self.corrections[name]
else:
log().debug('Getting show %s' % (name))
selected_series = self._getSeries(name)
if isinstance(selected_series, dict):
selected_series = [selected_series]
[sid.add(int(x['id'])) for x in selected_series if
self._getShowData(int(x['id']), self.config['language'], seriesSearch=True)]
[self.corrections.update({x['seriesname']: int(x['id'])}) for x in selected_series]
return sid
sids = list(int(x['id']) for x in selected_series if self._getShowData(int(x['id']), self.config['language'], seriesSearch=True))
self.corrections.update(dict((x['seriesname'], int(x['id'])) for x in selected_series))
return sids
def __getitem__(self, key):
"""Handles tvdb_instance['seriesname'] calls.
@ -896,7 +913,7 @@ class Tvdb:
key = key.lower() # make key lower case
sids = self._nameToSid(key)
return [self.shows[sid] for sid in sids]
return list(self.shows[sid] for sid in sids)
def __repr__(self):
return str(self.shows)

45
lib/tvrage_api/tvrage_api.py

@ -29,7 +29,6 @@ try:
except ImportError:
import xml.etree.ElementTree as ElementTree
from collections import defaultdict
from lib.dateutil.parser import parse
from lib import requests
@ -39,8 +38,6 @@ from tvrage_exceptions import (tvrage_error, tvrage_userabort, tvrage_shownotfou
# Cached Session Handler
from lib.httpcache import CachingHTTPAdapter
s = requests.Session()
s.mount('http://', CachingHTTPAdapter())
def log():
return logging.getLogger("tvrage_api")
@ -343,12 +340,35 @@ class TVRage:
return os.path.join(tempfile.gettempdir(), "tvrage_api-%s" % (uid))
def retry(ExceptionToCheck, tries=4, delay=3, backoff=2):
def deco_retry(f):
def f_retry(*args, **kwargs):
mtries, mdelay = tries, delay
while mtries > 0:
try:
return f(*args, **kwargs)
except ExceptionToCheck, e:
print "%s, Retrying in %d seconds..." % (str(e), mdelay)
time.sleep(mdelay)
mtries -= 1
mdelay *= backoff
lastException = e
raise lastException
return f_retry # true decorator
return deco_retry
@retry(tvrage_error, tries=4)
def _loadUrl(self, url, params=None):
try:
log().debug("Retrieving URL %s" % url)
# get response from TVRage
if self.config['cache_enabled']:
s = requests.Session()
s.mount('http://', CachingHTTPAdapter())
resp = s.get(url, params=params)
else:
resp = requests.get(url, params=params)
@ -396,7 +416,8 @@ class TVRage:
if elm.tag in 'firstaired':
try:
if elm.text is "0000-00-00": elm.text = str(dt.date.fromordinal(1))
if elm.text in "0000-00-00":
elm.text = str(dt.date.fromordinal(1))
elm.text = re.sub("([-]0{2}){1,}", "", elm.text)
fixDate = parse(elm.text, fuzzy=True).date()
elm.text = fixDate.strftime("%Y-%m-%d")
@ -487,7 +508,7 @@ class TVRage:
log().debug("Searching for show %s" % series)
self.config['params_getSeries']['show'] = series
seriesEt = self._getetsrc(self.config['url_getSeries'], self.config['params_getSeries'])
allSeries = [dict((s.tag.lower(),s.text) for s in x.getchildren()) for x in seriesEt]
allSeries = list(dict((s.tag.lower(),s.text) for s in x.getchildren()) for x in seriesEt)
return allSeries
@ -589,19 +610,17 @@ class TVRage:
already been grabbed), or grabs all episodes and returns
the correct SID.
"""
sid = set()
if name in self.corrections:
log().debug('Correcting %s to %s' % (name, self.corrections[name]) )
sid = self.corrections[name]
return self.corrections[name]
else:
log().debug('Getting show %s' % (name))
selected_series = self._getSeries( name )
selected_series = self._getSeries(name)
if isinstance(selected_series, dict):
selected_series = [selected_series]
[sid.add(int(x['id'])) for x in selected_series if self._getShowData(int(x['id']), seriesSearch=True)]
[self.corrections.update({x['seriesname']:int(x['id'])}) for x in selected_series]
return sid
sids = list(int(x['id']) for x in selected_series if self._getShowData(int(x['id']), seriesSearch=True))
self.corrections.update(dict((x['seriesname'], int(x['id'])) for x in selected_series))
return sids
def __getitem__(self, key):
"""Handles tvrage_instance['seriesname'] calls.
@ -615,7 +634,7 @@ class TVRage:
key = key.lower() # make key lower case
sids = self._nameToSid(key)
return [self.shows[sid] for sid in sids]
return list(self.shows[sid] for sid in sids)
def __repr__(self):
return str(self.shows)

4
sickbeard/metadata/generic.py

@ -875,6 +875,10 @@ class GenericMetadata():
return result
# if we have no season banners then just finish
if getattr(indexer_show_obj, '_banners', None) is None:
return result
# if we have no season banners then just finish
if 'season' not in indexer_show_obj['_banners'] or 'seasonwide' not in indexer_show_obj['_banners']['season']:
return result

4
sickbeard/tv.py

@ -1424,13 +1424,13 @@ class TVEpisode(object):
self.description = getattr(myEp, 'overview', "")
firstaired = getattr(myEp, 'firstaired', None)
if firstaired is None or firstaired is "0000-00-00":
if firstaired is None or firstaired in "0000-00-00":
firstaired = str(datetime.date.fromordinal(1))
rawAirdate = [int(x) for x in firstaired.split("-")]
try:
self.airdate = datetime.date(rawAirdate[0], rawAirdate[1], rawAirdate[2])
except ValueError:
except (ValueError, IndexError):
logger.log(u"Malformed air date retrieved from " + sickbeard.indexerApi(
self.indexer).name + " (" + self.show.name + " - " + str(season) + "x" + str(episode) + ")",
logger.ERROR)

9
sickbeard/webserve.py

@ -29,7 +29,7 @@ import random
import locale
import logging
import itertools
import string
import operator
from Cheetah.Template import Template
import cherrypy.lib
@ -1991,13 +1991,14 @@ class NewHomeAddShows:
search = [search]
# add search results
results += [[sickbeard.indexerApi(indexer).name, int(sickbeard.indexerApi(indexer).config['id']),
results += list([sickbeard.indexerApi(indexer).name, int(sickbeard.indexerApi(indexer).config['id']),
sickbeard.indexerApi(indexer).config["show_url"], int(x['id']), x['seriesname'],
x['firstaired']] for x in search if x['firstaired']]
x['firstaired']] for x in search if x['firstaired'])
except:
continue
# remove duplicates
# remove duplicates and sort by firstaired
results = sorted(results, reverse=True, key=operator.itemgetter(5))
results = list(results for results, _ in itertools.groupby(results))
lang_id = sickbeard.indexerApi().config['langabbv_to_id'][lang]

Loading…
Cancel
Save