Browse Source

Add timeframe support to show_updates (None, "day", "week", "month").

Change replace 'episodes' embed with new 'episodeswithspecials'.
Add filter images on tvmaze with given aspect ratio.
Add new height, width attr from tvmaze api and use it in tvmaze_api as preferred for show.
Add new averageRuntime to tvmaze lib.
Change refactor getting data for coming episodes views.
Add new parameter to overwritten _fake_specify_ep unit tests.
Fix coming episodes webapi.
Filter unneeded data from coming episodes.
Add verified to NewIdDict.
Add search show for indexermapper to imdb API.
Add tmdb_api.
Add trakt show search cache.
Change split ids and name search in indexermapper.
Change indexermapper to TVINFOAPI (tvmaze).
Change prevent already queued backlog to be added twice for add show.
Change add check if show is already in show_list when adding shows.
tags/release_0.25.1
Prinz23 4 years ago
committed by JackDandy
parent
commit
ed8d7d393b
  1. 63
      lib/imdb_api/imdb_api.py
  2. 47
      lib/libtrakt/indexerapiinterface.py
  3. 5
      lib/pytvmaze/exceptions.py
  4. 25
      lib/pytvmaze/tvmaze.py
  5. 73
      lib/tmdb_api/tmdb_api.py
  6. 32
      lib/tvdb_api/tvdb_api.py
  7. 38
      lib/tvinfo_base/base.py
  8. 58
      lib/tvmaze_api/tvmaze_api.py
  9. 465
      sickbeard/indexermapper.py
  10. 15
      sickbeard/search_backlog.py
  11. 8
      sickbeard/show_queue.py
  12. 112
      sickbeard/webapi.py
  13. 38
      sickbeard/webserve.py
  14. 2
      tests/test_lib.py

63
lib/imdb_api/imdb_api.py

@ -15,7 +15,8 @@ from bs4_parser import BS4Parser
from lib import imdbpie
from lib.tvinfo_base.exceptions import BaseTVinfoShownotfound
from lib.tvinfo_base import TVInfoBase, TVINFO_TRAKT, TVINFO_TMDB, TVINFO_TVDB, TVINFO_TVRAGE, TVINFO_IMDB, \
Person, PersonGenders, TVINFO_TWITTER, TVINFO_FACEBOOK, TVINFO_WIKIPEDIA, TVINFO_INSTAGRAM, Character, TVInfoShow
Person, PersonGenders, TVINFO_TWITTER, TVINFO_FACEBOOK, TVINFO_WIKIPEDIA, TVINFO_INSTAGRAM, Character, TVInfoShow, \
TVInfoIDs
from sg_helpers import get_url, try_int
from lib.dateutil.parser import parser
@ -32,12 +33,72 @@ log.addHandler(logging.NullHandler())
class IMDbIndexer(TVInfoBase):
# supported_id_searches = [TVINFO_IMDB]
supported_person_id_searches = [TVINFO_IMDB]
supported_id_searches = [TVINFO_IMDB]
# noinspection PyUnusedLocal
# noinspection PyDefaultArgument
def __init__(self, *args, **kwargs):
super(IMDbIndexer, self).__init__(*args, **kwargs)
def search(self, series):
# type: (AnyStr) -> List
"""This searches for the series name
and returns the result list
"""
result = []
cache_name_key = 's-title-%s' % series
is_none, shows = self._get_cache_entry(cache_name_key)
if not self.config.get('cache_search') or (None is shows and not is_none):
try:
result = imdbpie.Imdb().search_for_title(series)
except (BaseException, Exception):
pass
self._set_cache_entry(cache_name_key, result, expire=self.search_cache_expire)
else:
result = shows
return result
def _search_show(self, name=None, ids=None, **kwargs):
# type: (AnyStr, Dict[integer_types, integer_types], Optional[Any]) -> List[TVInfoShow]
"""This searches IMDB for the series name,
"""
def _make_result_dict(s):
imdb_id = try_int(re.search(r'tt(\d+)', s.get('id') or s.get('imdb_id')).group(1), None)
tvs = TVInfoShow()
tvs.seriesname, tvs.id, tvs.firstaired, tvs.genre_list, tvs.overview, tvs.poster, tvs.ids = \
s['title'], imdb_id, s.get('releaseDetails', {}).get('date') or s.get('year'), s.get('genres'), \
s.get('plot', {}).get('outline', {}).get('text'), s.get('image') and s['image'].get('url'), \
TVInfoIDs(imdb=imdb_id)
return tvs
results = []
if ids:
for t, p in iteritems(ids):
if t in self.supported_id_searches:
if t == TVINFO_IMDB:
cache_id_key = 's-id-%s-%s' % (TVINFO_IMDB, p)
is_none, shows = self._get_cache_entry(cache_id_key)
if not self.config.get('cache_search') or (None is shows and not is_none):
try:
show = imdbpie.Imdb().get_title_auxiliary('tt%07d' % p)
except (BaseException, Exception):
continue
self._set_cache_entry(cache_id_key, show, expire=self.search_cache_expire)
else:
show = shows
if show:
results.extend([_make_result_dict(show)])
if name:
for n in ([name], name)[isinstance(name, list)]:
try:
shows = self.search(n)
results.extend([_make_result_dict(s) for s in shows])
except (BaseException, Exception) as e:
log.debug('Error searching for show: %s' % ex(e))
seen = set()
results = [seen.add(r.id) or r for r in results if r.id not in seen]
return results
@staticmethod
def _convert_person(person_obj, filmography=None, bio=None):
if isinstance(person_obj, dict) and 'imdb_id' in person_obj:

47
lib/libtrakt/indexerapiinterface.py

@ -55,6 +55,11 @@ class TraktSearchTypes(object):
pass
map_id_search = {TVINFO_TVDB: TraktSearchTypes.tvdb_id, TVINFO_IMDB: TraktSearchTypes.imdb_id,
TVINFO_TMDB: TraktSearchTypes.tmdb_id, TVINFO_TRAKT: TraktSearchTypes.trakt_id,
TVINFO_TRAKT_SLUG: TraktSearchTypes.trakt_slug}
class TraktResultTypes(object):
show = 'show'
episode = 'episode'
@ -102,6 +107,7 @@ class TraktIndexer(TVInfoBase):
s['id'] = s['ids']['trakt']
s['ids'] = TVInfoIDs(
trakt=s['ids']['trakt'], tvdb=s['ids']['tvdb'], tmdb=s['ids']['tmdb'],
rage=s['ids']['tvrage'],
imdb=s['ids']['imdb'] and try_int(s['ids']['imdb'].replace('tt', ''), None))
results.append(s)
except (BaseException, Exception) as e:
@ -117,31 +123,17 @@ class TraktIndexer(TVInfoBase):
if ids:
for t, p in iteritems(ids):
if t in self.supported_id_searches:
if t == TVINFO_TVDB:
try:
show = self.search(p, search_type=TraktSearchTypes.tvdb_id)
except (BaseException, Exception):
continue
elif t == TVINFO_IMDB:
try:
show = self.search(p, search_type=TraktSearchTypes.imdb_id)
except (BaseException, Exception):
continue
elif t == TVINFO_TMDB:
try:
show = self.search(p, search_type=TraktSearchTypes.tmdb_id)
except (BaseException, Exception):
continue
elif t == TVINFO_TRAKT:
try:
show = self.search(p, search_type=TraktSearchTypes.trakt_id)
except (BaseException, Exception):
continue
elif t == TVINFO_TRAKT_SLUG:
if t in (TVINFO_TVDB, TVINFO_IMDB, TVINFO_TMDB, TVINFO_TRAKT, TVINFO_TRAKT_SLUG):
cache_id_key = 's-id-%s-%s' % (t, p)
is_none, shows = self._get_cache_entry(cache_id_key)
if not self.config.get('cache_search') or (None is shows and not is_none):
try:
show = self.search(p, search_type=TraktSearchTypes.trakt_slug)
show = self.search(p, search_type=map_id_search[t])
except (BaseException, Exception):
continue
self._set_cache_entry(cache_id_key, show, expire=self.search_cache_expire)
else:
show = shows
else:
continue
self._make_result_obj(show, results)
@ -149,7 +141,16 @@ class TraktIndexer(TVInfoBase):
names = ([name], name)[isinstance(name, list)]
len_names = len(names)
for i, n in enumerate(names, 1):
cache_name_key = 's-name-%s' % n
is_none, shows = self._get_cache_entry(cache_name_key)
if not self.config.get('cache_search') or (None is shows and not is_none):
try:
all_series = self.search(n)
self._set_cache_entry(cache_name_key, all_series, expire=self.search_cache_expire)
except (BaseException, Exception):
all_series = []
else:
all_series = shows
if not isinstance(all_series, list):
all_series = [all_series]
@ -167,6 +168,8 @@ class TraktIndexer(TVInfoBase):
else:
self._make_result_obj(all_series, results)
seen = set()
results = [seen.add(r['id']) or r for r in results if r['id'] not in seen]
return results
@staticmethod

5
lib/pytvmaze/exceptions.py

@ -114,6 +114,11 @@ class InvalidEmbedValue(BaseError):
pass
class InvalidTimeFrame(BaseError):
""" used for show updates"""
pass
class NetworkNotFollowed(BaseError):
pass

25
lib/pytvmaze/tvmaze.py

@ -37,6 +37,7 @@ class Show(object):
else:
self.web_channel = None
self.runtime = data.get('runtime') # type: Optional[integer_types]
self.average_runtime = data.get('averageRuntime')
self.type = data.get('type') # type: Optional[AnyStr]
self.id = data.get('id') # type: integer_types
self.maze_id = self.id # type: integer_types
@ -177,11 +178,14 @@ class Show(object):
def populate(self, data):
embedded = data.get('_embedded')
if embedded:
if embedded.get('episodes'):
episodes = embedded.get('episodeswithspecials') or embedded.get('episodes')
if episodes:
self.__episodes = []
for episode in embedded.get('episodes'):
for episode in episodes:
self.__episodes.append(Episode(episode, self))
self._set_season_numbers()
if 'episodeswithspecials' in embedded:
self._specials_loaded = True
if embedded.get('cast'):
self._cast = Cast(embedded.get('cast'))
@ -901,7 +905,7 @@ class TVmaze(object):
show_web_channel: Show Web Channel (like Netflix, Amazon, etc.)
show_language: Show language
show_country: Show country
embed: embed parameter to include additional data. Currently 'episodes' and 'cast' are supported
embed: embed parameter to include additional data. Currently 'episodes', 'cast', 'episodeswithspecials' are supported
"""
errors = []
if not (maze_id or tvdb_id or tvrage_id or imdb_id or show_name):
@ -1332,7 +1336,7 @@ def get_full_schedule():
def show_main_info(maze_id, embed=None):
url = _embed_url(endpoints.show_main_info.format(maze_id), embed,
[None, 'episodes', 'cast', 'previousepisode', 'nextepisode'], '?')
[None, 'episodes', 'cast', 'previousepisode', 'nextepisode', 'episodeswithspecials'], '?')
q = TVmaze.endpoint_standard_get(url)
if q:
return Show(q)
@ -1464,9 +1468,16 @@ def get_show_images(maze_id, raise_error=True):
return []
def show_updates():
# type: (...) -> Updates
url = endpoints.show_updates
def show_updates(since=None):
# type: (AnyStr) -> Updates
"""
returns all or in given timeframe changed shows
:param since: None, "day", "week", "month"
"""
if since not in ('day', 'week', 'month', None):
raise InvalidTimeFrame('Only supported are: None, "day", "week", "month"')
url = '%s%s' % (endpoints.show_updates, ('', '?since=%s' % since)[None is not since])
q = TVmaze.endpoint_standard_get(url)
if q:
return Updates(q)

73
lib/tmdb_api/tmdb_api.py

@ -9,6 +9,7 @@ __api_version__ = '1.0.0'
import json
import logging
import datetime
import re
from six import iteritems
from sg_helpers import get_url, try_int
@ -157,8 +158,8 @@ def get_tmdb_constants():
class TmdbIndexer(TVInfoBase):
API_KEY = tmdbsimple.API_KEY
# supported_id_searches = [TVINFO_TVDB, TVINFO_IMDB, TVINFO_TMDB, TVINFO_TRAKT]
supported_person_id_searches = [TVINFO_TMDB, TVINFO_IMDB, TVINFO_TWITTER, TVINFO_INSTAGRAM, TVINFO_FACEBOOK]
supported_id_searches = [TVINFO_TMDB, TVINFO_IMDB, TVINFO_TVDB]
# noinspection PyUnusedLocal
# noinspection PyDefaultArgument
@ -169,6 +170,76 @@ class TmdbIndexer(TVInfoBase):
self.size_map = response.get('size_map')
self.tv_genres = response.get('genres')
def _search_show(self, name=None, ids=None, **kwargs):
# type: (AnyStr, Dict[integer_types, integer_types], Optional[Any]) -> List[TVInfoShow]
"""This searches TMDB for the series name,
"""
def _make_result_dict(s):
tvs = TVInfoShow()
tvs.seriesname, tvs.id, tvs.firstaired, tvs.genre_list, tvs.overview, tvs.poster, tvs.ids = \
s['name'], s['id'], s.get('first_air_date'), \
[self.tv_genres.get(g) for g in s.get('genre_ids') or []], \
s.get('overview'), s.get('poster_path') and '%s%s%s' % (
self.img_base_url, self.size_map[TVInfoImageType.poster][TVInfoImageSize.original],
s.get('poster_path')), \
TVInfoIDs(tvdb=s.get('external_ids') and s['external_ids'].get('tvdb_id'),
tmdb=s['id'], rage=s.get('external_ids') and s['external_ids'].get('tvrage_id'),
imdb=s.get('external_ids') and s['external_ids'].get('imdb_id') and
try_int(s['external_ids'].get('imdb_id', '').replace('tt', ''), None))
return tvs
results = []
if ids:
for t, p in iteritems(ids):
if t in self.supported_id_searches:
if t == TVINFO_TMDB:
cache_id_key = 's-id-%s-%s' % (TVINFO_TMDB, p)
is_none, shows = self._get_cache_entry(cache_id_key)
if not self.config.get('cache_search') or (None is shows and not is_none):
try:
show = tmdbsimple.TV(id=p).info(append_to_response='external_ids')
except (BaseException, Exception):
continue
self._set_cache_entry(cache_id_key, show, expire=self.search_cache_expire)
else:
show = shows
if show:
results.extend([_make_result_dict(show)])
elif t in (TVINFO_IMDB, TVINFO_TVDB):
cache_id_key = 's-id-%s-%s' % (t, p)
is_none, shows = self._get_cache_entry(cache_id_key)
if not self.config.get('cache_search') or (None is shows and not is_none):
try:
show = tmdbsimple.Find(id=(p, 'tt%07d' % p)[t == TVINFO_IMDB]).info(
external_source=id_map[t])
if show.get('tv_results') and 1 == len(show['tv_results']):
show = tmdbsimple.TV(id=show['tv_results'][0]['id']).info(
append_to_response='external_ids')
except (BaseException, Exception):
continue
self._set_cache_entry(cache_id_key, show, expire=self.search_cache_expire)
else:
show = shows
if show:
results.extend([_make_result_dict(s)
for s in show.get('tv_results') or (show.get('id') and [show]) or []])
if name:
for n in ([name], name)[isinstance(name, list)]:
cache_name_key = 's-name-%s' % n
is_none, shows = self._get_cache_entry(cache_name_key)
if not self.config.get('cache_search') or (None is shows and not is_none):
try:
shows = tmdbsimple.Search().tv(query=n)
self._set_cache_entry(cache_name_key, shows, expire=self.search_cache_expire)
results.extend([_make_result_dict(s) for s in shows.get('results') or []])
except (BaseException, Exception) as e:
log.debug('Error searching for show: %s' % ex(e))
else:
results.extend([_make_result_dict(s) for s in (shows and shows.get('results')) or []])
seen = set()
results = [seen.add(r.id) or r for r in results if r.id not in seen]
return results
def _convert_person_obj(self, person_obj):
gender = PersonGenders.tmdb_map.get(person_obj.get('gender'), PersonGenders.unknown)
try:

32
lib/tvdb_api/tvdb_api.py

@ -343,31 +343,51 @@ class Tvdb(TVInfoBase):
results = []
if ids:
if ids.get(TVINFO_TVDB):
cache_id_key = 's-id-%s-%s' % (TVINFO_TVDB, ids[TVINFO_TVDB])
is_none, shows = self._get_cache_entry(cache_id_key)
if not self.config.get('cache_search') or (None is shows and not is_none):
try:
d_m = self._get_show_data(ids.get(TVINFO_TVDB), self.config['language'], direct_data=True)
self._set_cache_entry(cache_id_key, d_m, expire=self.search_cache_expire)
except (BaseException, Exception):
d_m = None
else:
d_m = shows
if d_m:
results = map_list(map_data, [d_m['data']])
except (BaseException, Exception):
pass
if ids.get(TVINFO_TVDB_SLUG):
cache_id_key = 's-id-%s-%s' % (TVINFO_TVDB, ids[TVINFO_TVDB_SLUG])
is_none, shows = self._get_cache_entry(cache_id_key)
if not self.config.get('cache_search') or (None is shows and not is_none):
try:
d_m = self.get_series(ids.get(TVINFO_TVDB_SLUG).replace('-', ' '))
self._set_cache_entry(cache_id_key, d_m, expire=self.search_cache_expire)
except (BaseException, Exception):
d_m = None
else:
d_m = shows
if d_m:
for r in d_m:
if ids.get(TVINFO_TVDB_SLUG) == r['slug']:
results = map_list(map_data, [r])
break
except (BaseException, Exception):
pass
if name:
for n in ([name], name)[isinstance(name, list)]:
cache_name_key = 's-name-%s' % n
is_none, shows = self._get_cache_entry(cache_name_key)
if not self.config.get('cache_search') or (None is shows and not is_none):
try:
r = self.get_series(n)
self._set_cache_entry(cache_name_key, r, expire=self.search_cache_expire)
except (BaseException, Exception):
r = None
else:
r = shows
if r:
results.extend(map_list(map_data, r))
except (BaseException, Exception):
pass
seen = set()
results = [seen.add(r['id']) or r for r in results if r['id'] not in seen]
return results
def get_new_token(self):

38
lib/tvinfo_base/base.py

@ -788,6 +788,7 @@ class TVInfoBase(object):
self._cachedir = kwargs.get('diskcache_dir') # type: AnyStr
self.diskcache = diskcache.Cache(directory=self._cachedir, disk_pickle_protocol=2) # type: diskcache.Cache
self.cache_expire = 60 * 60 * 18 # type: integer_types
self.search_cache_expire = 60 * 15 # type: integer_types
self.config = {
'apikey': '',
'debug_enabled': False,
@ -805,6 +806,7 @@ class TVInfoBase(object):
'seasonwides_enabled': seasonwides,
'fanart_enabled': fanart,
'actors_enabled': actors,
'cache_search': kwargs.get('cache_search'),
} # type: Dict[AnyStr, Any]
def _must_load_data(self, sid, load_episodes, banners, posters, seasons, seasonwides, fanart, actors):
@ -895,11 +897,11 @@ class TVInfoBase(object):
log.error('Error getting %s from cache: %s' % (key, ex(e)))
return False, None
def _set_cache_entry(self, key, value, tag=None):
# type: (Any, Any, AnyStr) -> None
def _set_cache_entry(self, key, value, tag=None, expire=None):
# type: (Any, Any, AnyStr, int) -> None
try:
with self.diskcache as dc:
dc.set(key, (value, 'None')[None is value], expire=self.cache_expire, tag=tag)
dc.set(key, (value, 'None')[None is value], expire=expire or self.cache_expire, tag=tag)
except (BaseException, Exception) as e:
log.error('Error setting %s to cache: %s' % (key, ex(e)))
@ -1178,6 +1180,36 @@ class TVInfoBase(object):
"""
return []
@staticmethod
def _which_type(img_width, img_ratio):
# type: (integer_types, Union[integer_types, float]) -> Optional[int]
"""
:param img_width:
:param img_ratio:
"""
msg_success = 'Treating image as %s with extracted aspect ratio'
# most posters are around 0.68 width/height ratio (eg. 680/1000)
if 0.55 <= img_ratio <= 0.8:
log.debug(msg_success % 'poster')
return TVInfoImageType.poster
# most banners are around 5.4 width/height ratio (eg. 758/140)
if 5 <= img_ratio <= 6:
log.debug(msg_success % 'banner')
return TVInfoImageType.banner
# most fan art are around 1.7 width/height ratio (eg. 1280/720 or 1920/1080)
if 1.7 <= img_ratio <= 1.8:
if 500 < img_width:
log.debug(msg_success % 'fanart')
return TVInfoImageType.fanart
log.warning(u'Skipped image with fanart aspect ratio but less than 500 pixels wide')
else:
log.warning(u'Skipped image with useless ratio %s' % img_ratio)
def __str__(self):
return '<TVInfo(%s) (containing: %s)>' % (self.__class__.__name__, text_type(self.shows))

58
lib/tvmaze_api/tvmaze_api.py

@ -81,7 +81,7 @@ show_map = {
# 'network_country': '',
# 'network_country_code': '',
# 'network_is_stream': '',
'runtime': 'runtime',
# 'runtime': 'runtime',
'language': 'language',
'official_site': 'official_site',
# 'imdb_id': '',
@ -147,22 +147,36 @@ class TvMaze(TVInfoBase):
if ids:
for t, p in iteritems(ids):
if t in self.supported_id_searches:
cache_id_key = 's-id-%s-%s' % (t, ids[t])
is_none, shows = self._get_cache_entry(cache_id_key)
if t == TVINFO_TVDB:
if not self.config.get('cache_search') or (None is shows and not is_none):
try:
show = tvmaze.lookup_tvdb(p)
self._set_cache_entry(cache_id_key, show, expire=self.search_cache_expire)
except (BaseException, Exception):
continue
else:
show = shows
elif t == TVINFO_IMDB:
if not self.config.get('cache_search') or (None is shows and not is_none):
try:
show = tvmaze.lookup_imdb((p, 'tt%07d' % p)[not str(p).startswith('tt')])
self._set_cache_entry(cache_id_key, show, expire=self.search_cache_expire)
except (BaseException, Exception):
continue
else:
show = shows
elif t == TVINFO_TVMAZE:
if not self.config.get('cache_search') or (None is shows and not is_none):
try:
show = tvm_obj.get_show(maze_id=p)
self._set_cache_entry(cache_id_key, show, expire=self.search_cache_expire)
except (BaseException, Exception):
continue
else:
show = shows
else:
continue
if show:
try:
@ -172,11 +186,18 @@ class TvMaze(TVInfoBase):
log.debug('Error creating result dict: %s' % ex(e))
if name:
for n in ([name], name)[isinstance(name, list)]:
cache_name_key = 's-name-%s' % n
is_none, shows = self._get_cache_entry(cache_name_key)
if not self.config.get('cache_search') or (None is shows and not is_none):
try:
shows = tvmaze.show_search(n)
results = [_make_result_dict(s) for s in shows]
except (BaseException, Exception) as e:
log.debug('Error searching for show: %s' % ex(e))
continue
results.extend([_make_result_dict(s) for s in shows or []])
seen = set()
results = [seen.add(r['id']) or r for r in results if r['id'] not in seen]
return results
def _set_episode(self, sid, ep_obj):
@ -213,7 +234,7 @@ class TvMaze(TVInfoBase):
log.debug('Getting all series data for %s' % sid)
try:
self.show_not_found = False
show_data = tvm_obj.get_show(maze_id=sid, embed='cast%s' % ('', ',episodes')[get_ep_info])
show_data = tvm_obj.get_show(maze_id=sid, embed='cast%s' % ('', ',episodeswithspecials')[get_ep_info])
except tvmaze.ShowNotFound:
self.show_not_found = True
return False
@ -225,7 +246,10 @@ class TvMaze(TVInfoBase):
for k, v in iteritems(show_obj):
if k not in ('cast', 'crew', 'images'):
show_obj[k] = getattr(show_data, show_map.get(k, k), show_obj[k])
show_obj['runtime'] = show_data.average_runtime or show_data.runtime
p_set = False
if show_data.image:
p_set = True
show_obj['poster'] = show_data.image.get('original')
show_obj['poster_thumb'] = show_data.image.get('medium')
@ -239,19 +263,36 @@ class TvMaze(TVInfoBase):
self.shows[sid].fanart_loaded = True
for img in show_data.images:
img_type = img_type_map.get(img.type, TVInfoImageType.other)
img_width, img_height = img.resolutions['original'].get('width'), \
img.resolutions['original'].get('height')
img_ar = img_width and img_height and float(img_width) / float(img_height)
img_ar_type = self._which_type(img_width, img_ar)
if TVInfoImageType.poster == img_type and img_ar and img_ar_type != img_type and \
show_obj['poster'] == img.resolutions.get('original')['url']:
p_set = False
show_obj['poster'] = None
show_obj['poster_thumb'] = None
img_type = (TVInfoImageType.other, img_type)[
not img_ar or img_ar_type == img_type or
img_type not in (TVInfoImageType.banner, TVInfoImageType.poster, TVInfoImageType.fanart)]
img_src = {}
for res, img_url in iteritems(img.resolutions):
img_size = img_size_map.get(res)
if img_size:
img_src[img_size] = img_url.get('url')
show_obj['images'].setdefault(img_type, []).append(
TVInfoImage(image_type=img_type, sizes=img_src, img_id=img.id, main_image=img.main,
type_str=img.type))
if not b_set and 'banner' == img.type:
TVInfoImage(
image_type=img_type, sizes=img_src, img_id=img.id, main_image=img.main,
type_str=img.type, width=img_width, height=img_height, aspect_ratio=img_ar))
if not p_set and TVInfoImageType.poster == img_type:
p_set = True
show_obj['poster'] = img.resolutions.get('original')['url']
show_obj['poster_thumb'] = img.resolutions.get('original')['url']
elif not b_set and 'banner' == img.type and TVInfoImageType.banner == img_type:
b_set = True
show_obj['banner'] = img.resolutions.get('original')['url']
show_obj['banner_thumb'] = img.resolutions.get('medium')['url']
elif not f_set and 'background' == img.type:
elif not f_set and 'background' == img.type and TVInfoImageType.fanart == img_type:
f_set = True
show_obj['fanart'] = img.resolutions.get('original')['url']
@ -367,7 +408,8 @@ class TvMaze(TVInfoBase):
if None is show_data:
try:
self.show_not_found = False
show_data = tvm_obj.get_show(maze_id=sid, embed='cast%s' % ('', ',episodes')[get_ep_info])
show_data = tvm_obj.get_show(maze_id=sid, embed='cast%s' % ('', ',episodeswithspecials')[
get_ep_info])
except tvmaze.ShowNotFound:
self.show_not_found = True
return False

465
sickbeard/indexermapper.py

@ -14,52 +14,46 @@
# You should have received a copy of the GNU General Public License
# along with SickGear. If not, see <http://www.gnu.org/licenses/>.
from collections import OrderedDict
from time import sleep
import datetime
import os
import re
import traceback
from . import db, logger
from .helpers import get_url, try_int
from .indexers.indexer_config import TVINFO_IMDB, TVINFO_TMDB, TVINFO_TRAKT, TVINFO_TVDB, TVINFO_TVMAZE, TVINFO_TVRAGE
from . import classes, db, logger
from .helpers import try_int
from .indexers.indexer_config import TVINFO_IMDB, TVINFO_TMDB, TVINFO_TRAKT, TVINFO_TVDB, TVINFO_TVMAZE
# noinspection PyPep8Naming
import encodingKludge as ek
import requests
import sickbeard
from exceptions_helper import ConnectionSkipException
# noinspection PyPep8Naming
from lib import tmdbsimple as TMDB
from lib.dateutil.parser import parse
from lib.imdbpie import Imdb
from libtrakt import TraktAPI
from libtrakt.exceptions import TraktAuthException, TraktException
from _23 import unidecode, urlencode
from six import iteritems, iterkeys, string_types, PY2
from _23 import unidecode
from six import iteritems, moves, string_types, PY2
# noinspection PyUnreachableCode
if False:
# noinspection PyUnresolvedReferences
from typing import AnyStr, Dict, List, Optional
from typing import Any, AnyStr, Dict, List, Optional, Tuple, Union
from six import integer_types
from sickbeard.tv import TVShow
tv_maze_retry_wait = 10
defunct_indexer = []
indexer_list = []
tmdb_ids = {TVINFO_TVDB: 'tvdb_id', TVINFO_IMDB: 'imdb_id', TVINFO_TVRAGE: 'tvrage_id'}
class NewIdDict(dict):
def __init__(self, *args, **kwargs):
tv_src = kwargs.pop('tv_src')
super(NewIdDict, self).__init__(*args, **kwargs)
self.verified = {s: (False, True)[s == tv_src] for s in indexer_list}
@staticmethod
def set_value(value, old_value=None):
if old_value is MapStatus.MISMATCH or (0 < value and old_value not in [None, value] and 0 < old_value):
def set_value(self, value, old_value=None, tv_src=None, key=None):
# type: (Any, Any, int, int) -> Any
if (None is tv_src or tv_src != key) and old_value is MapStatus.MISMATCH or (
0 < value and old_value not in [None, value] and 0 < old_value):
return MapStatus.MISMATCH
if value and tv_src and tv_src == key:
self.verified[tv_src] = True
return value
@staticmethod
@ -77,86 +71,86 @@ class NewIdDict(dict):
def __setitem__(self, key, value):
super(NewIdDict, self).__setitem__(key, self.set_value(value, self.get(key)))
def update(self, other=None, **kwargs):
def update(self, other=None, tv_src=None, **kwargs):
# type: (Dict[int, Any], int, Any) -> None
"""
updates dict with new ids
set MapStatus.MISMATCH if values mismatch, except if it's tv_src (this will be treated as verified source id)
:param other: new data dict
:param tv_src: verified tv src id
:param kwargs:
"""
if isinstance(other, dict):
other = {o: self.set_value(v, self.get(o)) for o, v in iteritems(other)}
other = {o: self.set_value(v, self.get(o), tv_src, o) for o, v in iteritems(other)}
super(NewIdDict, self).update(other, **kwargs)
class TvmazeDict(OrderedDict):
tvmaze_ids = {TVINFO_TVDB: 'thetvdb', TVINFO_IMDB: 'imdb', TVINFO_TVRAGE: 'tvrage'}
def __init__(self, *args, **kwds):
super(TvmazeDict, self).__init__(*args, **kwds)
def get_url(self, key):
if TVINFO_TVMAZE == key:
return '%sshows/%s' % (sickbeard.TVInfoAPI(TVINFO_TVMAZE).config['base_url'], self.tvmaze_ids[key])
return '%slookup/shows?%s=%s%s' % (sickbeard.TVInfoAPI(TVINFO_TVMAZE).config['base_url'],
self.tvmaze_ids[key], ('', 'tt')[key == TVINFO_IMDB],
(self[key], '%07d' % self[key])[key == TVINFO_IMDB])
class TraktDict(OrderedDict):
trakt_ids = {TVINFO_TVDB: 'tvdb', TVINFO_IMDB: 'imdb', TVINFO_TVRAGE: 'tvrage'}
def __init__(self, *args, **kwds):
super(TraktDict, self).__init__(*args, **kwds)
def get_url(self, key):
return 'search/%s/%s%s?type=show' % (self.trakt_ids[key], ('', 'tt')[key == TVINFO_IMDB],
(self[key], '%07d' % self[key])[key == TVINFO_IMDB])
# noinspection PyUnusedLocal
def tvmaze_record_hook(r, *args, **kwargs):
r.hook_called = True
# noinspection HttpUrlsUsage
if 301 == r.status_code and isinstance(r.headers.get('Location'), string_types) \
and r.headers.get('Location').startswith('http''://api.tvmaze'):
# noinspection HttpUrlsUsage
r.headers['Location'] = r.headers['Location'].replace('http://', 'https://')
return r
def get_missing_ids(show_ids, show_obj, tv_src):
# type: (Dict[int, integer_types], TVShow, int) -> Dict[int, integer_types]
"""
def get_tvmaze_data(count=0, *args, **kwargs):
res = None
count += 1
kwargs['hooks'] = {'response': tvmaze_record_hook}
if 3 >= count:
:param show_ids:
:param show_obj:
:param tv_src:
:return:
"""
try:
tvinfo_config = sickbeard.TVInfoAPI(tv_src).api_params.copy()
tvinfo_config['cache_search'] = True
tvinfo_config['custom_ui'] = classes.AllShowInfosNoFilterListUI
t = sickbeard.TVInfoAPI(tv_src).setup(**tvinfo_config)
show_name, f_date = None, None
if any(1 for k, v in iteritems(show_ids) if v and k in t.supported_id_searches):
try:
res = get_url(*args, **kwargs)
except requests.HTTPError as e:
# rate limit
if 429 == e.response.status_code:
sleep(tv_maze_retry_wait)
return get_tvmaze_data(*args, count=count, **kwargs)
found_shows = t.search_show(ids=show_ids)
res_count = len(found_shows or [])
if 1 < res_count:
show_name, f_date = get_show_name_date(show_obj)
for show in found_shows or []:
if 1 == res_count or confirm_show(f_date, show['firstaired'], show_name,
clean_show_name(show['seriesname'])):
return combine_new_ids(show_ids, show['ids'], tv_src)
except (BaseException, Exception):
pass
found_shows = t.search_show(name=clean_show_name(show_obj.name))
if not show_name:
show_name, f_date = get_show_name_date(show_obj)
for show in found_shows or []:
if confirm_show(f_date, show['firstaired'], show_name, clean_show_name(show['seriesname'])):
if any(v for k, v in iteritems(show['ids']) if tv_src != k and v):
f_show = [show]
else:
f_show = t.search_show(ids={tv_src: show['id']})
if f_show and 1 == len(f_show):
return combine_new_ids(show_ids, f_show[0]['ids'], tv_src)
except (BaseException, Exception):
pass
return res
return {}
def get_tvmaze_ids(url_tvmaze):
def confirm_show(premiere_date, shows_premiere, expected_name, show_name):
# type: (Optional[datetime.date], Optional[Union[AnyStr, datetime.date]], AnyStr, AnyStr) -> bool
"""
:param url_tvmaze: tvmaze url
:type url_tvmaze: TvmazeDict
:return:
:rtype: Dict
confirm show possible confirmations:
1. premiere dates are less then 2 days apart
2. show name is the same and premiere year is 1 year or less apart
:param premiere_date: expected show premiere date
:param shows_premiere: compare date
:param expected_name:
:param show_name:
"""
ids = {}
for url_key in iterkeys(url_tvmaze):
if any(t is None for t in (premiere_date, shows_premiere)):
return False
if isinstance(shows_premiere, string_types):
try:
res = get_tvmaze_data(url=url_tvmaze.get_url(url_key), parse_json=True, raise_status_code=True, timeout=120)
if res and 'externals' in res:
ids[TVINFO_IMDB] = try_int(str(res['externals'].get('imdb')).replace('tt', ''))
ids[TVINFO_TVDB] = res['externals'].get('thetvdb', 0)
ids[TVINFO_TVMAZE] = res.get('id', 0)
ids[TVINFO_TVRAGE] = res['externals'].get('tvrage', 0)
break
shows_premiere = parse(shows_premiere).date()
except (BaseException, Exception):
pass
return {k: v for k, v in iteritems(ids) if v not in (None, '', 0)}
return False
start_year = (shows_premiere and shows_premiere.year) or 0
return abs(premiere_date - shows_premiere) < datetime.timedelta(days=2) or (
expected_name == show_name and abs(premiere_date.year - start_year) <= 1)
def get_premieredate(show_obj):
@ -189,136 +183,27 @@ def clean_show_name(showname):
return re.sub(r'[(\s]*(?:19|20)\d\d[)\s]*$', '', unidecode(showname))
def get_tvmaze_by_name(showname, premiere_date):
"""
:param showname: show name
:type showname: AnyStr
:param premiere_date: premiere date
:type premiere_date: datetime.date
:return:
:rtype: Dict
"""
ids = {}
try:
url = '%ssearch/shows?%s' % (sickbeard.TVInfoAPI(TVINFO_TVMAZE).config['base_url'],
urlencode({'q': clean_show_name(showname)}))
res = get_tvmaze_data(url=url, parse_json=True, raise_status_code=True, timeout=120)
if res:
for r in res:
if 'show' in r and 'premiered' in r['show'] and 'externals' in r['show']:
premiered = parse(r['show']['premiered'], fuzzy=True)
if abs(premiere_date - premiered.date()) < datetime.timedelta(days=2):
ids[TVINFO_IMDB] = try_int(str(r['show']['externals'].get('imdb')).replace('tt', ''))
ids[TVINFO_TVDB] = r['show']['externals'].get('thetvdb', 0)
ids[TVINFO_TVMAZE] = r['show'].get('id', 0)
ids[TVINFO_TVRAGE] = r['show']['externals'].get('tvrage', 0)
break
except (BaseException, Exception):
pass
return {k: v for k, v in iteritems(ids) if v not in (None, '', 0)}
def get_trakt_ids(url_trakt):
"""
:param url_trakt: trakt url
:type url_trakt: TraktDict
:return:
:rtype: Dict
"""
ids = {}
for url_key in iterkeys(url_trakt):
try:
res = TraktAPI().trakt_request(url_trakt.get_url(url_key))
if res:
found = False
for r in res:
if 'show' == r.get('type', '') and 'show' in r and 'ids' in r['show']:
ids[TVINFO_IMDB] = try_int(str(r['show']['ids'].get('imdb')).replace('tt', ''))
ids[TVINFO_TMDB] = try_int(r['show']['ids'].get('tmdb', 0))
ids[TVINFO_TRAKT] = try_int(r['show']['ids'].get('trakt', 0))
ids[TVINFO_TVDB] = try_int(r['show']['ids'].get('tvdb', 0))
ids[TVINFO_TVRAGE] = try_int(r['show']['ids'].get('tvrage', 0))
found = True
break
if found:
break
except (ConnectionSkipException, TraktAuthException, TraktException, IndexError, KeyError):
pass
return {k: v for k, v in iteritems(ids) if v not in (None, '', 0)}
def get_imdbid_by_name(name, startyear):
"""
:param name: name
:type name: AnyStr
:param startyear: start year
:type startyear: int or AnyStr
:return:
:rtype: Dict
"""
ids = {}
try:
res = Imdb(exclude_episodes=True,
cachedir=ek.ek(os.path.join, sickbeard.CACHE_DIR, 'imdb-pie')).search_for_title(title=name)
for r in res:
if isinstance(r.get('type'), string_types) and 'tv series' == r.get('type').lower() \
and str(startyear) == str(r.get('year')):
ids[TVINFO_IMDB] = try_int(re.sub(r'[^0-9]*', '', r.get('imdb_id')))
break
except (BaseException, Exception):
pass
return {k: v for k, v in iteritems(ids) if v not in (None, '', 0)}
def get_show_name_date(show_obj):
# type: (TVShow) -> Tuple[Optional[AnyStr], Optional[datetime.date]]
return clean_show_name(show_obj.name), get_premieredate(show_obj)
def check_imdbid_by_id(name, startyear, imdb_id):
# type: (AnyStr, int, int) -> bool
"""
check if the name and start year match the imdb id
:param name: name
:param startyear: start year
:param imdb_id: imdb id
:return: match bool
"""
try:
res = Imdb(exclude_episodes=True,
cachedir=ek.ek(os.path.join, sickbeard.CACHE_DIR, 'imdb-pie')).get_title_auxiliary(
imdb_id='tt%07d' % imdb_id)
name = clean_show_name(name)
if (str(startyear) == str(res.get('year')) or abs(try_int(startyear, 10) - try_int(res.get('year'), 1)) <= 1
or 1930 > try_int(startyear, 0)) and clean_show_name(res.get('title')).lower() == name.lower():
return True
except (BaseException, Exception):
pass
return False
def combine_mapped_new_dict(mapped, new_ids):
# type: (Dict[int, Dict], Dict[int, integer_types]) -> Dict[int, integer_types]
return {n: m for d in ({k: v['id'] for k, v in iteritems(mapped) if v['id']}, new_ids) for n, m in iteritems(d)}
def check_missing_trakt_id(n_ids, show_obj, url_trakt):
def combine_new_ids(cur_ids, new_ids, src_id):
# type: (Dict[int, integer_types], Dict[int, integer_types], int) -> Dict[int, integer_types]
"""
combine cur_ids with new_ids, priority has cur_ids with exception of src_id key
:param n_ids:
:type n_ids: NewIdDict
:param show_obj: show objects
:type show_obj: sickbeard.tv.TVShow
:param url_trakt: trakt url
:type url_trakt: TraktDict
:return:
:rtype: NewIdDict
:param cur_ids:
:param new_ids:
:param src_id:
"""
if TVINFO_TRAKT not in n_ids:
new_url_trakt = TraktDict()
for k, v in iteritems(n_ids):
if k != show_obj.tvid and k in [TVINFO_TVDB, TVINFO_TVRAGE, TVINFO_IMDB] and 0 < v \
and k not in url_trakt:
new_url_trakt[k] = v
if 0 < len(new_url_trakt):
n_ids.update(get_trakt_ids(new_url_trakt))
return n_ids
return {k: v for d in (cur_ids, new_ids) for k, v in iteritems(d)
if v and (k == src_id or not cur_ids.get(k) or v == cur_ids.get(k, ''))}
def map_indexers_to_show(show_obj, update=False, force=False, recheck=False, im_sql_result=None):
@ -348,14 +233,10 @@ def map_indexers_to_show(show_obj, update=False, force=False, recheck=False, im_
if not sql_result:
my_db = db.DBConnection()
sql_result = my_db.select(
"""
SELECT *
FROM indexer_mapping
WHERE indexer = ? AND indexer_id = ?
""", [show_obj.tvid, show_obj.prodid])
'SELECT * FROM indexer_mapping WHERE indexer = ? AND indexer_id = ?', [show_obj.tvid, show_obj.prodid])
# for each mapped entry
for cur_row in sql_result:
for cur_row in sql_result or []:
date = try_int(cur_row['date'])
mapped[int(cur_row['mindexer'])] = {'status': int(cur_row['status']),
'id': int(cur_row['mindexer_id']),
@ -367,105 +248,42 @@ def map_indexers_to_show(show_obj, update=False, force=False, recheck=False, im_
and ((0 == v['id'] and MapStatus.NONE == v['status'])
or force or recheck or (update and 0 == v['id'] and k not in defunct_indexer))]
if mis_map:
url_tvmaze = TvmazeDict()
url_trakt = TraktDict()
if show_obj.tvid in (TVINFO_TVDB, TVINFO_TVRAGE):
url_tvmaze[show_obj.tvid] = show_obj.prodid
url_trakt[show_obj.tvid] = show_obj.prodid
elif show_obj.tvid == TVINFO_TVMAZE:
url_tvmaze[TVINFO_TVMAZE] = show_obj.tvid
src_tv_id = show_obj._tvid
new_ids = NewIdDict(tv_src=src_tv_id) # type: NewIdDict
if show_obj.imdbid and re.search(r'\d+$', show_obj.imdbid):
url_tvmaze[TVINFO_IMDB] = try_int(re.search(r'(?:tt)?(\d+)', show_obj.imdbid).group(1))
url_trakt[TVINFO_IMDB] = try_int(re.search(r'(?:tt)?(\d+)', show_obj.imdbid).group(1))
for m, v in iteritems(mapped):
if m != show_obj.tvid and m in [TVINFO_TVDB, TVINFO_TVRAGE, TVINFO_TVRAGE, TVINFO_IMDB] and \
0 < v.get('id', 0):
url_tvmaze[m] = v['id']
new_ids = NewIdDict() # type: NewIdDict
src_imdb_id = None
if isinstance(show_obj.imdbid, string_types) and re.search(r'\d+$', show_obj.imdbid):
try:
new_ids[TVINFO_IMDB] = try_int(re.search(r'(?:tt)?(\d+)', show_obj.imdbid).group(1))
src_imdb_id = new_ids[TVINFO_IMDB]
except (BaseException, Exception):
pass
if 0 < len(url_tvmaze):
new_ids.update(get_tvmaze_ids(url_tvmaze))
for m, v in iteritems(new_ids):
if m != show_obj.tvid and m in [TVINFO_TVDB, TVINFO_TVRAGE, TVINFO_TVRAGE, TVINFO_IMDB] and 0 < v:
url_trakt[m] = v
if url_trakt:
new_ids.update(get_trakt_ids(url_trakt))
if TVINFO_TVMAZE not in new_ids:
new_url_tvmaze = TvmazeDict()
for k, v in iteritems(new_ids):
if k != show_obj.tvid and k in [TVINFO_TVDB, TVINFO_TVRAGE, TVINFO_TVRAGE, TVINFO_IMDB] \
and 0 < v and k not in url_tvmaze:
new_url_tvmaze[k] = v
if 0 < len(new_url_tvmaze):
new_ids.update(get_tvmaze_ids(new_url_tvmaze))
if TVINFO_TVMAZE not in new_ids:
f_date = get_premieredate(show_obj)
if f_date and f_date != datetime.date.fromordinal(1):
tvids = {k: v for k, v in iteritems(get_tvmaze_by_name(show_obj.name, f_date)) if k == TVINFO_TVMAZE
or k not in new_ids or new_ids.get(k) in (None, 0, '', MapStatus.NOT_FOUND)}
new_ids.update(tvids)
new_ids = check_missing_trakt_id(new_ids, show_obj, url_trakt)
if TVINFO_IMDB not in new_ids:
new_ids.update(get_imdbid_by_name(show_obj.name, show_obj.startyear))
new_ids = check_missing_trakt_id(new_ids, show_obj, url_trakt)
if TVINFO_TMDB in mis_map \
and (None is new_ids.get(TVINFO_TMDB) or MapStatus.NOT_FOUND == new_ids.get(TVINFO_TMDB)) \
and (0 < mapped.get(TVINFO_TVDB, {'id': 0}).get('id', 0) or 0 < new_ids.get(TVINFO_TVDB, 0)
or 0 < mapped.get(TVINFO_IMDB, {'id': 0}).get('id', 0) or 0 < new_ids.get(TVINFO_TMDB, 0)
or 0 < mapped.get(TVINFO_TVRAGE, {'id': 0}).get('id', 0) or 0 < new_ids.get(TVINFO_TVRAGE, 0)):
try:
TMDB.API_KEY = sickbeard.TMDB_API_KEY
for d in [TVINFO_TVDB, TVINFO_IMDB, TVINFO_TVRAGE]:
c = (new_ids.get(d), mapped.get(d, {'id': 0}).get('id'))[0 < mapped.get(d, {'id': 0}).get('id', 0)]
if 0 >= c:
all_ids_srcs = [src_tv_id] + [s for s in (TVINFO_TRAKT, TVINFO_TMDB, TVINFO_TVMAZE, TVINFO_TVDB, TVINFO_IMDB)
if s != src_tv_id]
searched, confirmed = {}, False
for r in moves.range(len(all_ids_srcs)):
search_done = False
for i in all_ids_srcs:
if new_ids.verified.get(i):
continue
if None is not c and 0 < c:
if TVINFO_IMDB == d:
c = 'tt%07d' % c
tmdb_data = TMDB.Find(c).info(**{'external_source': tmdb_ids[d]})
if isinstance(tmdb_data, dict) \
and 'tv_results' in tmdb_data and 0 < len(tmdb_data['tv_results']) \
and 'id' in tmdb_data['tv_results'][0] \
and 0 < try_int(tmdb_data['tv_results'][0]['id']):
new_ids[TVINFO_TMDB] = try_int(tmdb_data['tv_results'][0]['id'])
search_ids = {k: v for k, v in iteritems(combine_mapped_new_dict(mapped, new_ids))
if k not in searched.setdefault(i, {})}
if search_ids:
search_done = True
searched[i].update(search_ids)
new_ids.update(get_missing_ids(search_ids, show_obj, tv_src=i), tv_src=i)
if new_ids.get(i) and 0 < new_ids.get(i):
searched[i].update({i: new_ids[i]})
confirmed = all(v for k, v in iteritems(new_ids.verified) if k not in defunct_indexer)
if confirmed:
break
except (BaseException, Exception):
pass
if TVINFO_TMDB not in new_ids:
try:
TMDB.API_KEY = sickbeard.TMDB_API_KEY
tmdb_data = TMDB.Search().tv(**{'query': clean_show_name(show_obj.name),
'first_air_date_year': show_obj.startyear})
for cur_row in tmdb_data.get('results'):
if clean_show_name(cur_row['name']) == clean_show_name(show_obj.name):
new_ids[TVINFO_TMDB] = try_int(cur_row['id'])
if confirmed or not search_done:
break
except (BaseException, Exception):
pass
for i in indexer_list:
if i != show_obj.tvid and i in mis_map and 0 != new_ids.get(i, 0):
if TVINFO_IMDB == i and 0 > new_ids[i] and src_imdb_id and \
check_imdbid_by_id(show_obj.name, startyear=show_obj.startyear, imdb_id=src_imdb_id):
mapped[i] = {'status': MapStatus.NONE, 'id': src_imdb_id}
if i != show_obj.tvid and ((i in mis_map and 0 != new_ids.get(i, 0)) or
(new_ids.verified.get(i) and 0 < new_ids.get(i, 0))):
if i not in new_ids:
mapped[i] = {'status': MapStatus.NOT_FOUND, 'id': 0}
continue
if new_ids.verified.get(i) and 0 < new_ids[i] and mapped.get(i, {'id': 0})['id'] != new_ids[i]:
if i not in mis_map:
mis_map.append(i)
mapped[i] = {'status': MapStatus.NONE, 'id': new_ids[i]}
continue
if 0 > new_ids[i]:
mapped[i] = {'status': new_ids[i], 'id': 0}
@ -485,17 +303,14 @@ def map_indexers_to_show(show_obj, update=False, force=False, recheck=False, im_
if 0 != mapped[tvid]['id'] or MapStatus.NONE != mapped[tvid]['status']:
mapped[tvid]['date'] = today
sql_l.append([
"""
INSERT OR REPLACE
INTO indexer_mapping (indexer_id, indexer, mindexer_id, mindexer, date, status)
VALUES (?,?,?,?,?,?)
""", [show_obj.prodid, show_obj.tvid, mapped[tvid]['id'], tvid, date, mapped[tvid]['status']]])
'REPLACE INTO indexer_mapping (indexer_id, indexer, mindexer_id, mindexer, date, status)'
' VALUES (?,?,?,?,?,?)',
[show_obj.prodid, show_obj.tvid, mapped[tvid]['id'], tvid, date, mapped[tvid]['status']]])
else:
sql_l.append([
"""
DELETE FROM indexer_mapping
WHERE indexer = ? AND indexer_id = ? AND mindexer = ?
""", [show_obj.tvid, show_obj.prodid, tvid]])
'DELETE FROM indexer_mapping'
' WHERE indexer = ? AND indexer_id = ? AND mindexer = ?',
[show_obj.tvid, show_obj.prodid, tvid]])
if 0 < len(sql_l):
logger.log('Adding TV info mapping to DB for show: %s' % show_obj.name, logger.DEBUG)
@ -524,16 +339,14 @@ def save_mapping(show_obj, save_map=None):
if 0 != show_obj.ids[tvid]['id'] or MapStatus.NONE != show_obj.ids[tvid]['status']:
show_obj.ids[tvid]['date'] = today
sql_l.append([
'INSERT OR REPLACE INTO indexer_mapping'
'REPLACE INTO indexer_mapping'
' (indexer_id, indexer, mindexer_id, mindexer, date, status) VALUES (?,?,?,?,?,?)',
[show_obj.prodid, show_obj.tvid, show_obj.ids[tvid]['id'],
tvid, date, show_obj.ids[tvid]['status']]])
else:
sql_l.append([
"""
DELETE FROM indexer_mapping
WHERE indexer = ? AND indexer_id = ? AND mindexer = ?
""", [show_obj.tvid, show_obj.prodid, tvid]])
'DELETE FROM indexer_mapping WHERE indexer = ? AND indexer_id = ? AND mindexer = ?',
[show_obj.tvid, show_obj.prodid, tvid]])
if 0 < len(sql_l):
logger.log('Saving TV info mapping to DB for show: %s' % show_obj.name, logger.DEBUG)
@ -550,11 +363,7 @@ def del_mapping(tvid, prodid):
:type prodid: int or long
"""
my_db = db.DBConnection()
my_db.action(
"""
DELETE FROM indexer_mapping
WHERE indexer = ? AND indexer_id = ?
""", [tvid, prodid])
my_db.action('DELETE FROM indexer_mapping WHERE indexer = ? AND indexer_id = ?', [tvid, prodid])
def should_recheck_update_ids(show_obj):

15
sickbeard/search_backlog.py

@ -116,7 +116,8 @@ class BacklogSearcher(object):
standard_backlog, # type: bool
limited_backlog, # type: bool
forced, # type: bool
torrent_only # type: bool
torrent_only, # type: bool
prevent_same=False # type: bool
):
"""
add given list of show items to search backlog queue
@ -126,10 +127,15 @@ class BacklogSearcher(object):
:param limited_backlog: limited search
:param forced: forced search
:param torrent_only: only torrents
:param prevent_same: prevent same search
"""
for segments in items:
if len(segments):
for season, segment in iteritems(segments): # type: int, List[TVEpisode]
if prevent_same and \
sickbeard.search_queue_scheduler.action.is_in_queue(segment[0].show_obj, segment):
continue
self.currentSearchInfo = {'title': segment[0].show_obj.name + ' Season ' + str(season)}
backlog_queue_item = search_queue.BacklogQueueItem(
@ -180,7 +186,8 @@ class BacklogSearcher(object):
def search_backlog(self,
which_shows=None, # type: Optional[List[TVShow]]
force_type=NORMAL_BACKLOG, # type: int
force=False # type: bool
force=False, # type: bool
prevent_same=False # type: bool
):
"""
start backlog for given list of shows or start next scheduled backlog
@ -188,6 +195,7 @@ class BacklogSearcher(object):
:param which_shows: optional list of shows to backlog search
:param force_type: type of backlog
:param force: force backlog
:param prevent_same: don't start same search again
:return: nothing
:rtype: None
"""
@ -297,7 +305,8 @@ class BacklogSearcher(object):
if w:
limited_wanted_list.append(w)
self.add_backlog_item(wanted_list, standard_backlog, limited_backlog, forced, any_torrent_enabled)
self.add_backlog_item(wanted_list, standard_backlog, limited_backlog, forced, any_torrent_enabled,
prevent_same=prevent_same)
if standard_backlog and not any_torrent_enabled and limited_wanted_list:
self.add_backlog_item(limited_wanted_list, standard_backlog, True, forced, any_torrent_enabled)

8
sickbeard/show_queue.py

@ -1013,7 +1013,8 @@ class QueueItemAdd(ShowQueueItem):
return
try:
new_show_obj = TVShow(self.tvid, self.prodid, self.lang)
show_exists = find_show_by_id({self.tvid: self.prodid}, no_exceptions=True)
new_show_obj = show_exists or TVShow(self.tvid, self.prodid, self.lang)
result = new_show_obj.load_from_tvinfo()
self.show_obj = new_show_obj
@ -1083,7 +1084,8 @@ class QueueItemAdd(ShowQueueItem):
self._finishEarly()
raise
# add it to the show list
if not show_exists:
# add it to the show list if not already in it
sickbeard.showList.append(self.show_obj)
sickbeard.showDict[self.show_obj.sid_int] = self.show_obj
@ -1182,7 +1184,7 @@ class QueueItemAdd(ShowQueueItem):
# if started with WANTED eps then run the backlog
if WANTED == self.default_status or items_wanted:
logger.log('Launching backlog for this show since episodes are WANTED')
sickbeard.backlog_search_scheduler.action.search_backlog([self.show_obj])
sickbeard.backlog_search_scheduler.action.search_backlog([self.show_obj], prevent_same=True)
ui.notifications.message('Show added/search', 'Adding and searching for episodes of' + msg)
else:
ui.notifications.message('Show added', 'Adding' + msg)

112
sickbeard/webapi.py

@ -124,6 +124,10 @@ class PythonObjectEncoder(json.JSONEncoder):
return {'season': obj.season, 'episode': obj.episode}
elif isinstance(obj, TVShow):
return {'name': obj.name, 'indexer': obj.tvid, 'indexer_id': obj.prodid}
elif isinstance(obj, datetime.datetime):
return SGDatetime.sbfdatetime(obj, d_preset=dateFormat, t_preset='%H:%M %z')
elif isinstance(obj, datetime.date):
return SGDatetime.sbfdate(obj, d_preset=dateFormat)
return json.JSONEncoder.default(self, obj)
@ -965,94 +969,8 @@ class CMD_SickGearComingEpisodes(ApiCall):
def run(self):
""" get the daily schedule """
today_dt = datetime.date.today()
today = today_dt.toordinal()
yesterday_dt = today_dt - datetime.timedelta(days=1)
yesterday = yesterday_dt.toordinal()
tomorrow = (datetime.date.today() + datetime.timedelta(days=1)).toordinal()
next_week_dt = (datetime.date.today() + datetime.timedelta(days=7))
next_week = (next_week_dt + datetime.timedelta(days=1)).toordinal()
recently = (yesterday_dt - datetime.timedelta(days=sickbeard.EPISODE_VIEW_MISSED_RANGE)).toordinal()
done_show_list = []
qualList = Quality.SNATCHED + Quality.DOWNLOADED + Quality.ARCHIVED + [IGNORED]
my_db = db.DBConnection()
# noinspection SqlResolve
sql_result = my_db.select(
"SELECT airdate, airs, runtime, tv_shows.indexer AS 'tv_id', episode, name AS 'ep_name', "
"tv_episodes.status as 'status', description AS 'ep_plot', network, season, showid AS 'prod_id', "
"show_name, tv_shows.quality AS quality, tv_shows.status AS 'show_status', "
"tv_shows.paused AS 'paused' FROM tv_episodes, tv_shows WHERE " +
("", "tv_shows.indexer = %s AND " % TVINFO_TVDB)[self.sickbeard_call] +
"season != 0 AND airdate >= ? AND "
"airdate <= ? AND tv_shows.indexer_id = tv_episodes.showid AND tv_shows.indexer == tv_episodes.indexer AND "
"tv_episodes.status NOT IN (" + ','.join(['?'] * len(qualList)) + ")", [yesterday, next_week] + qualList)
for cur_result in sql_result:
done_show_list.append((int(cur_result['prod_id']), int(cur_result['tv_id'])))
# noinspection SqlResolve,SqlRedundantOrderingDirection
more_sql_result = [m for m in my_db.select(
"SELECT airdate, airs, runtime, tv_shows.indexer AS 'tv_id', episode, name AS 'ep_name', "
"outer_eps.status as 'status', description AS 'ep_plot', network, season, showid AS 'prod_id', "
"show_name, tv_shows.quality AS quality, tv_shows.status AS 'show_status', "
"tv_shows.paused AS 'paused' FROM tv_episodes outer_eps, tv_shows WHERE " +
("", "tv_shows.indexer = %s AND " % TVINFO_TVDB)[self.sickbeard_call] +
"season != 0 AND "
"tv_shows.indexer_id = outer_eps.showid AND tv_shows.indexer == outer_eps.indexer AND "
"airdate = (SELECT airdate FROM tv_episodes inner_eps WHERE inner_eps.season != 0 AND "
"inner_eps.showid = outer_eps.showid AND inner_eps.indexer == outer_eps.indexer AND "
"inner_eps.airdate >= ? ORDER BY inner_eps.airdate ASC LIMIT 1) AND "
"outer_eps.status NOT IN (" + ','.join(['?'] * len(Quality.DOWNLOADED + Quality.SNATCHED)) + ")",
[next_week] + Quality.DOWNLOADED + Quality.SNATCHED) if (int(m['prod_id']), int(m['tv_id']))
not in done_show_list]
sql_result += more_sql_result
# noinspection SqlResolve
more_sql_result = my_db.select(
"SELECT airdate, airs, runtime, tv_shows.indexer AS 'tv_id', episode, name AS 'ep_name', "
"tv_episodes.status as 'status', description AS 'ep_plot', network, season, showid AS 'prod_id', "
"show_name, tv_shows.quality AS quality, tv_shows.status AS 'show_status', "
"tv_shows.paused AS 'paused' FROM tv_episodes, tv_shows WHERE " +
("", "tv_shows.indexer = %s AND " % TVINFO_TVDB)[self.sickbeard_call] +
"season != 0 AND tv_shows.indexer_id = tv_episodes.showid AND tv_shows.indexer == tv_episodes.indexer AND "
"airdate <= ? AND airdate >= ? AND "
"tv_episodes.status = ? AND tv_episodes.status NOT IN (" + ','.join(
['?'] * len(qualList)) + ")", [tomorrow, recently, WANTED] + qualList)
sql_result += more_sql_result
sql_result = list(set(sql_result))
# make a dict out of the sql results
sql_result = [dict(cur_result) for cur_result in sql_result
if Quality.splitCompositeStatus(helpers.try_int(cur_result['status']))[0] not in
[DOWNLOADED, SNATCHED, SNATCHED_PROPER, SNATCHED_BEST, ARCHIVED, IGNORED, SKIPPED]]
# multi dimension sort
sorts = {
'network': lambda a: (a['data_network'], a['local_datetime'], a['data_show_name'], a['season'], a['episode']),
'show': lambda a: (a['data_show_name'], a['local_datetime'], a['season'], a['episode']),
'date': lambda a: (a['local_datetime'], a['data_show_name'], a['season'], a['episode'])
}
def value_maybe_article(value=None):
if None is value:
return ''
return (remove_article(value.lower()), value.lower())[sickbeard.SORT_ARTICLE]
# add parsed_datetime to the dict
for index, item in enumerate(sql_result):
timezone, sql_result[index]['timezone'] = network_timezones.get_network_timezone(item['network'],
return_name=True)
p_t = network_timezones.parse_date_time(item['airdate'], item['airs'], timezone)
sql_result[index]['parsed_datetime'] = p_t
sql_result[index]['local_datetime'] = SGDatetime.sbstrftime(
SGDatetime.convert_to_setting(p_t, force_local=True), dateTimeFormat)
sql_result[index]['data_show_name'] = value_maybe_article(item['show_name'])
sql_result[index]['data_network'] = value_maybe_article(item['network'])
sql_result[index]['status_str'] = statusStrings[item['status']]
sql_result.sort(key=sorts[self.sort])
sql_result, fanart, sorts, next_week_dt, today, next_week = webserve.MainHandler.get_daily_schedule()
sql_result.sort(key=sorts[(self.sort, 'time')['date' == self.sort]])
finalEpResults = {}
@ -1096,6 +1014,24 @@ class CMD_SickGearComingEpisodes(ApiCall):
ep['airs'] = str(ep['airs']).replace('am', ' AM').replace('pm', ' PM').replace(' ', ' ')
# start day of the week on 1 (monday)
ep['weekday'] = 1 + datetime.date.fromordinal(ep['airdate']).weekday()
ep['ep_name'] = ep['name']
ep['ep_plot'] = ep['description']
# add parsed_datetime to the dict
ep['local_datetime'] = SGDatetime.sbstrftime(
SGDatetime.convert_to_setting(ep['parsed_datetime'], force_local=True), dateTimeFormat)
ep['status_str'] = statusStrings[ep['status']]
ep['network'] = ep['episode_network'] or ep['network']
ep['timezone'] = ep['ep_timezone'] or ep['show_timezone'] or ep['timezone'] or (
ep['network'] and network_timezones.get_network_timezone(ep['network'], return_name=True)[1])
# remove all field we don't want for api response
for f in ('localtime', 'ep_airtime', 'airtime', 'timestamp', 'show_airtime', 'network_id',
'network_is_stream', 'notify_list', 'src_update_timestamp', 'subtitles_lastsearch', 'subtitles',
'subtitles_searchcount', 'name', 'description', 'hasnfo', 'hastbn', 'last_update_indexer',
'file_size', 'flatten_folders', 'is_proper', 'prune', 'episode_network', 'network_country',
'network_country_code', 'show_timezone', 'release_group', 'release_name',
'rls_global_exclude_ignore', 'rls_global_exclude_require', 'rls_ignore_words',
'rls_require_words', 'location', 'ep_timezone', 'dvdorder', 'anime', 'sports'):
del ep[f]
# Add tvdbid for backward compatibility
try:
show_obj = helpers.find_show_by_id({ep['tv_id']: ep['prod_id']})

38
sickbeard/webserve.py

@ -985,10 +985,12 @@ class MainHandler(WebHandler):
if int(redir):
self.redirect('/daily-schedule/')
def daily_schedule(self, layout='None'):
@staticmethod
def get_daily_schedule():
# type: (...) -> Tuple[List[Dict], Dict, Dict, datetime.date, integer_types, integer_types]
""" display the episodes """
today_dt = datetime.date.today()
# today = today_dt.toordinal()
today = today_dt.toordinal()
yesterday_dt = today_dt - datetime.timedelta(days=1)
yesterday = yesterday_dt.toordinal()
tomorrow = (datetime.date.today() + datetime.timedelta(days=1)).toordinal()
@ -1061,12 +1063,13 @@ class MainHandler(WebHandler):
# add localtime to the dict
cache_obj = image_cache.ImageCache()
t = PageTemplate(web_handler=self, file='episodeView.tmpl')
t.fanart = {}
fanarts = {}
cur_prodid = None
for index, item in enumerate(sql_result):
tvid_prodid_obj = TVidProdid({item['indexer']: item['showid']})
tvid_prodid = str(tvid_prodid_obj)
sql_result[index]['tv_id'] = item['indexer']
sql_result[index]['prod_id'] = item['showid']
sql_result[index]['tvid_prodid'] = tvid_prodid
if cur_prodid != tvid_prodid:
cur_prodid = tvid_prodid
@ -1079,6 +1082,7 @@ class MainHandler(WebHandler):
item['timestamp'], item['episode_network'], item['ep_airtime'], item['ep_timezone'])
# noinspection PyCallByClass,PyTypeChecker
sql_result[index]['parsed_datetime'] = val
sql_result[index]['localtime'] = SGDatetime.convert_to_setting(val)
sql_result[index]['data_show_name'] = value_maybe_article(item['show_name'])
sql_result[index]['data_network'] = value_maybe_article(item['network'])
@ -1098,7 +1102,7 @@ class MainHandler(WebHandler):
else:
sql_result[index]['imdb_url'] = ''
if tvid_prodid in t.fanart:
if tvid_prodid in fanarts:
continue
for img in ek.ek(glob.glob, cache_obj.fanart_path(*tvid_prodid_obj.tuple).replace('fanart.jpg', '*')) or []:
@ -1106,23 +1110,29 @@ class MainHandler(WebHandler):
if not match:
continue
fanart = [(match.group(1), sickbeard.FANART_RATINGS.get(tvid_prodid, {}).get(match.group(1), ''))]
if tvid_prodid not in t.fanart:
t.fanart[tvid_prodid] = fanart
if tvid_prodid not in fanarts:
fanarts[tvid_prodid] = fanart
else:
t.fanart[tvid_prodid] += fanart
fanarts[tvid_prodid] += fanart
for tvid_prodid in t.fanart:
fanart_rating = [(n, v) for n, v in t.fanart[tvid_prodid] if 20 == v]
for tvid_prodid in fanarts:
fanart_rating = [(n, v) for n, v in fanarts[tvid_prodid] if 20 == v]
if fanart_rating:
t.fanart[tvid_prodid] = fanart_rating
fanarts[tvid_prodid] = fanart_rating
else:
rnd = [(n, v) for (n, v) in t.fanart[tvid_prodid] if 30 != v]
rnd = [(n, v) for (n, v) in fanarts[tvid_prodid] if 30 != v]
grouped = [(n, v) for (n, v) in rnd if 10 == v]
if grouped:
t.fanart[tvid_prodid] = [grouped[random.randint(0, len(grouped) - 1)]]
fanarts[tvid_prodid] = [grouped[random.randint(0, len(grouped) - 1)]]
elif rnd:
t.fanart[tvid_prodid] = [rnd[random.randint(0, len(rnd) - 1)]]
fanarts[tvid_prodid] = [rnd[random.randint(0, len(rnd) - 1)]]
return sql_result, fanarts, sorts, next_week_dt, today, next_week
def daily_schedule(self, layout='None'):
""" display the episodes """
t = PageTemplate(web_handler=self, file='episodeView.tmpl')
sql_result, t.fanart, sorts, next_week_dt, today, next_week = self.get_daily_schedule()
# Allow local overriding of layout parameter
if layout and layout in ('banner', 'daybyday', 'list', 'poster'):
t.layout = layout

2
tests/test_lib.py

@ -122,7 +122,7 @@ mainDB.sickbeard.save_config = _dummy_save_config
# the real one tries to contact tvdb just stop it from getting more info on the ep
# noinspection PyUnusedLocal
def _fake_specify_ep(self, season, episode, show_sql=None):
def _fake_specify_ep(self, season, episode, show_sql=None, existing_only=False, **kwargs):
pass

Loading…
Cancel
Save