diff --git a/gui/slick/interfaces/default/displayShow.tmpl b/gui/slick/interfaces/default/displayShow.tmpl
index a276f2b..4e97e5f 100644
--- a/gui/slick/interfaces/default/displayShow.tmpl
+++ b/gui/slick/interfaces/default/displayShow.tmpl
@@ -85,7 +85,7 @@
#end if
#for $cur_show_obj in $cur_showlist
#set $show_ended = '' != $cur_show_obj.status and $cur_show_obj.status in ['ended', 'Ended', 'Canceled']
- #set void = $displayshowlist.append('\t\t\t' % (('', 'class="ended" ')[$show_ended], $cur_show_obj.tvid_prodid, ('', ' selected="selected"')[$cur_show_obj == $show_obj], $cur_show_obj.name))
+ #set void = $displayshowlist.append('\t\t\t' % (('', 'class="ended" ')[$show_ended], $cur_show_obj.tvid_prodid, ('', ' selected="selected"')[$cur_show_obj == $show_obj], getattr($cur_show_obj, 'unique_name', $cur_show_obj.name)))
#end for
#if 1 < len($sortedShowLists)
#set void = $displayshowlist.append('\t\t\t')
diff --git a/gui/slick/interfaces/default/home.tmpl b/gui/slick/interfaces/default/home.tmpl
index 9a2c4e9..a31072b 100644
--- a/gui/slick/interfaces/default/home.tmpl
+++ b/gui/slick/interfaces/default/home.tmpl
@@ -128,7 +128,7 @@
#set $cur_total = 0
#set $download_stat_tip = ''
#set $display_status = $cur_show_obj.status
- #set $display_name = (re.sub(r'^((?:A(?!\s+to)n?)|The)\s(\w)', r'\1 \2', $cur_show_obj.name), $cur_show_obj.name)[$sg_var('SORT_ARTICLE')]
+ #set $display_name = (re.sub(r'^((?:A(?!\s+to)n?)|The)\s(\w)', r'\1 \2', getattr($cur_show_obj, 'unique_name', $cur_show_obj.name)), getattr($cur_show_obj, 'unique_name', $cur_show_obj.name))[$sg_var('SORT_ARTICLE')]
#set $poster_id += 1
#if None is not $display_status
#if re.search(r'(?i)(?:(?:new|returning)\s*series|upcoming)', $cur_show_obj.status)
@@ -324,7 +324,7 @@
#set $cur_downloaded = 0
#set $cur_total = 0
#set $download_stat_tip = ''
- #set $display_name = (re.sub(r'^((?:A(?!\s+to)n?)|The)\s(\w)', r'\1 \2', $cur_show_obj.name), $cur_show_obj.name)[$sg_var('SORT_ARTICLE')]
+ #set $display_name = (re.sub(r'^((?:A(?!\s+to)n?)|The)\s(\w)', r'\1 \2', getattr($cur_show_obj, 'unique_name', $cur_show_obj.name)), getattr($cur_show_obj, 'unique_name', $cur_show_obj.name))[$sg_var('SORT_ARTICLE')]
#set $poster_id += 1
##
#if $cur_show_obj.tvid_prodid in $show_stat
diff --git a/gui/slick/interfaces/default/inc_top.tmpl b/gui/slick/interfaces/default/inc_top.tmpl
index 4293c34..97cbeb5 100644
--- a/gui/slick/interfaces/default/inc_top.tmpl
+++ b/gui/slick/interfaces/default/inc_top.tmpl
@@ -207,7 +207,7 @@
#else
#for item in $added_last
#if $hasattr($item, 'tvid_prodid')
-
$abbr_showname($item.name)
+ $abbr_showname(getattr($item, 'unique_name',$item.name))
#end if
#end for
#end if
diff --git a/gui/slick/interfaces/default/manage.tmpl b/gui/slick/interfaces/default/manage.tmpl
index ca9a8d9..a7ff97a 100644
--- a/gui/slick/interfaces/default/manage.tmpl
+++ b/gui/slick/interfaces/default/manage.tmpl
@@ -223,7 +223,7 @@ $xsrf_form_html
#set $curRemove = ($tip, $option_state % (('', $disabled)[$curRemove_disabled], 'remove', $tip))
|
- #set $display_name = (re.sub(r'^((?:A(?!\s+to)n?)|The)\s(\w)', r'\1 \2', $cur_show_obj.name), $cur_show_obj.name)[$sickbeard.SORT_ARTICLE]
+ #set $display_name = (re.sub(r'^((?:A(?!\s+to)n?)|The)\s(\w)', r'\1 \2', getattr($cur_show_obj, 'unique_name', $cur_show_obj.name)), getattr($cur_show_obj, 'unique_name', $cur_show_obj.name))[$sickbeard.SORT_ARTICLE]
#if not $show_loc##end if#$display_name |
#if 0 <= $show_size < $max#$human($show_size)#end if# |
#if $enable_tvinfo
diff --git a/sickbeard/databases/cache_db.py b/sickbeard/databases/cache_db.py
index 938d377..ac47390 100644
--- a/sickbeard/databases/cache_db.py
+++ b/sickbeard/databases/cache_db.py
@@ -21,8 +21,8 @@ from collections import OrderedDict
from .. import db
MIN_DB_VERSION = 1
-MAX_DB_VERSION = 100002
-TEST_BASE_VERSION = 6 # the base production db version, only needed for TEST db versions (>=100000)
+MAX_DB_VERSION = 7
+TEST_BASE_VERSION = None # the base production db version, only needed for TEST db versions (>=100000)
# Add new migrations at the bottom of the list; subclass the previous migration.
@@ -164,4 +164,4 @@ class AddSaveQueues(AddGenericFailureHandling):
def execute(self):
self.do_query(self.queries['save_queues'])
- self.setDBVersion(100002, check_db_version=False)
+ self.finish()
diff --git a/sickbeard/databases/mainDB.py b/sickbeard/databases/mainDB.py
index a9cdf87..ce6eaf9 100644
--- a/sickbeard/databases/mainDB.py
+++ b/sickbeard/databases/mainDB.py
@@ -28,8 +28,8 @@ import encodingKludge as ek
from six import iteritems
MIN_DB_VERSION = 9 # oldest db version we support migrating from
-MAX_DB_VERSION = 100008
-TEST_BASE_VERSION = 20014 # the base production db version, only needed for TEST db versions (>=100000)
+MAX_DB_VERSION = 20015
+TEST_BASE_VERSION = None # the base production db version, only needed for TEST db versions (>=100000)
class MainSanityCheck(db.DBSanityCheck):
@@ -1699,9 +1699,11 @@ class AddHistoryHideColumn(db.SchemaUpgrade):
return self.setDBVersion(20014)
-# 20014 -> 100008
+# 20014 -> 20015
class ChangeShowData(db.SchemaUpgrade):
def execute(self):
+ db.backup_database('sickbeard.db', self.checkDBVersion())
+
self.upgrade_log('Adding new data columns to tv_shows')
self.addColumns('tv_shows', [('timezone', 'TEXT', ''), ('airtime', 'NUMERIC'),
('network_country', 'TEXT', ''), ('network_country_code', 'TEXT', ''),
@@ -1913,5 +1915,4 @@ class ChangeShowData(db.SchemaUpgrade):
self.connection.mass_action(cl)
self.connection.action('VACUUM')
- self.setDBVersion(100008)
- return self.checkDBVersion()
+ return self.setDBVersion(20015)
diff --git a/sickbeard/name_cache.py b/sickbeard/name_cache.py
index eb8bdf1..a6e98b2 100644
--- a/sickbeard/name_cache.py
+++ b/sickbeard/name_cache.py
@@ -93,7 +93,7 @@ def buildNameCache(show_obj=None, update_only_scene=False):
if not (v[0] == show_obj.tvid and v[1] == show_obj.prodid)])
# add standard indexer name to namecache
- nameCache[full_sanitize_scene_name(show_obj.name)] = [show_obj.tvid, show_obj.prodid, -1]
+ nameCache[full_sanitize_scene_name(show_obj.unique_name or show_obj.name)] = [show_obj.tvid, show_obj.prodid, -1]
else:
# generate list of production ids to look up in cache.db
show_ids = {}
@@ -102,7 +102,7 @@ def buildNameCache(show_obj=None, update_only_scene=False):
# add all standard show indexer names to namecache
nameCache = dict(
- [(full_sanitize_scene_name(cur_so.name), [cur_so.tvid, cur_so.prodid, -1])
+ [(full_sanitize_scene_name(cur_so.unique_name or cur_so.name), [cur_so.tvid, cur_so.prodid, -1])
for cur_so in sickbeard.showList if cur_so])
sceneNameCache = {}
diff --git a/sickbeard/name_parser/parser.py b/sickbeard/name_parser/parser.py
index 53bb93d..5373da9 100644
--- a/sickbeard/name_parser/parser.py
+++ b/sickbeard/name_parser/parser.py
@@ -564,7 +564,9 @@ class NameParser(object):
cache_result = False
cached = name_parser_cache.get(name)
- if cached:
+ show_obj_given = bool(self.show_obj)
+ if cached and ((not show_obj_given and not cached.show_obj_match)
+ or (show_obj_given and self.show_obj == cached.show_obj)):
return cached
# break it into parts if there are any (dirname, file name, extension)
@@ -576,7 +578,8 @@ class NameParser(object):
base_file_name = file_name
# set up a result to use
- final_result = ParseResult(name)
+ # set if parsed with given show_obj set
+ final_result = ParseResult(name, show_obj_match=show_obj_given)
# try parsing the file name
file_name_result = self._parse_string(base_file_name)
@@ -660,6 +663,7 @@ class ParseResult(LegacyParseResult):
score=None,
quality=None,
version=None,
+ show_obj_match=False,
**kwargs):
self.original_name = original_name # type: AnyStr
@@ -695,6 +699,8 @@ class ParseResult(LegacyParseResult):
self.version = version # type: Optional[int]
+ self.show_obj_match = show_obj_match # type: bool
+
super(ParseResult, self).__init__(**kwargs)
@property
diff --git a/sickbeard/show_queue.py b/sickbeard/show_queue.py
index 7b26d8a..f84b43a 100644
--- a/sickbeard/show_queue.py
+++ b/sickbeard/show_queue.py
@@ -142,7 +142,7 @@ class ShowQueue(generic_queue.GenericQueue):
action_id, status, uid, mark_wanted, set_pause, force_id)
VALUES (?,?,?,?,?,?,?,?,?,?)
""", [item.show_obj.tvid, item.show_obj.prodid, item.new_tvid, item.new_prodid,
- ShowQueueActions.SWITCH, 0, item.uid, int(item.mark_wanted), int(item.set_pause),
+ ShowQueueActions.SWITCH, TVSWITCH_NORMAL, item.uid, int(item.mark_wanted), int(item.set_pause),
int(item.force_id)])
else:
generic_queue.GenericQueue.save_item(self, item)
@@ -1087,6 +1087,9 @@ class QueueItemAdd(ShowQueueItem):
# add it to the show list if not already in it
sickbeard.showList.append(self.show_obj)
sickbeard.showDict[self.show_obj.sid_int] = self.show_obj
+ sickbeard.webserve.Home.make_showlist_unique_names()
+ sickbeard.MEMCACHE['history_tab'] = sickbeard.webserve.History.menu_tab(
+ sickbeard.MEMCACHE['history_tab_limit'])
try:
self.show_obj.load_episodes_from_tvinfo(tvinfo_data=(None, result)[
@@ -1375,6 +1378,7 @@ class QueueItemUpdate(ShowQueueItem):
ShowQueueItem.run(self)
last_update = datetime.date.fromordinal(self.show_obj.last_update_indexer)
+ old_name = self.show_obj.name
if not sickbeard.TVInfoAPI(self.show_obj.tvid).config['active']:
logger.log('TV info source %s is marked inactive, aborting update for show %s and continue with refresh.'
@@ -1478,6 +1482,10 @@ class QueueItemUpdate(ShowQueueItem):
if self.priority != generic_queue.QueuePriorities.NORMAL:
self.kwargs['priority'] = self.priority
+ if self.kwargs.get('switch_src', False) or old_name != self.show_obj.name:
+ sickbeard.webserve.Home.make_showlist_unique_names()
+ sickbeard.MEMCACHE['history_tab'] = sickbeard.webserve.History.menu_tab(
+ sickbeard.MEMCACHE['history_tab_limit'])
if not getattr(self, 'skip_refresh', False):
sickbeard.show_queue_scheduler.action.refreshShow(self.show_obj, self.force, self.scheduled_update,
after_update=True, force_image_cache=self.force_web,
@@ -1783,7 +1791,8 @@ class QueueItemSwitchSource(ShowQueueItem):
# we directly update and refresh the show without queue as part of the switch
self.progress = 'Updating from new source'
update_show = QueueItemUpdate(show_obj=self.show_obj, skip_refresh=True, pausestatus_after=pausestatus_after,
- switch=True, tvinfo_data=td, old_tvid=self.old_tvid, old_prodid=self.old_prodid)
+ switch=True, tvinfo_data=td, old_tvid=self.old_tvid, old_prodid=self.old_prodid,
+ switch_src=True)
update_show.run()
self.progress = 'Refreshing from disk'
refresh_show = QueueItemRefresh(show_obj=self.show_obj, force_image_cache=True,
diff --git a/sickbeard/tv.py b/sickbeard/tv.py
index bae7895..b9144e1 100644
--- a/sickbeard/tv.py
+++ b/sickbeard/tv.py
@@ -1305,12 +1305,14 @@ class Character(Referential):
class TVShow(TVShowBase):
__slots__ = (
'path',
+ 'unique_name',
)
def __init__(self, tvid, prodid, lang='', show_result=None, imdb_info_result=None):
# type: (int, int, Text, Optional[Row], Optional[Union[Row, Dict]]) -> None
super(TVShow, self).__init__(tvid, prodid, lang)
+ self.unique_name = ''
self.tvid = int(tvid)
self.prodid = int(prodid)
self.sid_int = self.create_sid(self.tvid, self.prodid)
@@ -3162,6 +3164,10 @@ class TVShow(TVShowBase):
self.remove_character_images()
name_cache.remove_from_namecache(self.tvid, self.prodid)
+ try:
+ sickbeard.name_parser.parser.name_parser_cache.flush(self)
+ except (BaseException, Exception):
+ pass
action = ('delete', 'trash')[sickbeard.TRASH_REMOVE_SHOW]
@@ -3171,6 +3177,8 @@ class TVShow(TVShowBase):
del sickbeard.showDict[self.sid_int]
except (BaseException, Exception):
pass
+ sickbeard.webserve.Home.make_showlist_unique_names()
+ sickbeard.MEMCACHE['history_tab'] = sickbeard.webserve.History.menu_tab(sickbeard.MEMCACHE['history_tab_limit'])
try:
tvid_prodid = self.tvid_prodid
@@ -3465,7 +3473,7 @@ class TVShow(TVShowBase):
try:
sickbeard.show_queue_scheduler.action.updateShow(
self, force=True, web=True, priority=QueuePriorities.VERYHIGH,
- pausestatus_after=pausestatus_after)
+ pausestatus_after=pausestatus_after, switch_src=True)
except exceptions_helper.CantUpdateException as e:
logger.log('Unable to update this show. %s' % ex(e), logger.ERROR)
diff --git a/sickbeard/webserve.py b/sickbeard/webserve.py
index b450283..14bc6d2 100644
--- a/sickbeard/webserve.py
+++ b/sickbeard/webserve.py
@@ -48,7 +48,7 @@ import sg_helpers
from sg_helpers import scantree
import sickbeard
-from . import classes, clients, config, db, helpers, history, image_cache, logger, naming, \
+from . import classes, clients, config, db, helpers, history, image_cache, logger, name_cache, naming, \
network_timezones, notifiers, nzbget, processTV, sab, scene_exceptions, search_queue, subtitles, ui
from .anime import AniGroupList, pull_anidb_groups, short_group_names
from .browser import folders_at_path
@@ -69,7 +69,7 @@ from .show_name_helpers import abbr_showname
from .show_updater import clean_ignore_require_words
from .trakt_helpers import build_config, trakt_collection_remove_account
from .tv import TVidProdid, Person as TVPerson, Character as TVCharacter, TVSWITCH_NORMAL, tvswitch_names, \
- TVSWITCH_EP_DELETED, tvswitch_ep_names, usable_id
+ TVSWITCH_EP_DELETED, tvswitch_ep_names, TVSWITCH_NORMAL, usable_id
from bs4_parser import BS4Parser
from Cheetah.Template import Template
@@ -2306,8 +2306,45 @@ class Home(MainHandler):
return t.respond()
@staticmethod
- def sorted_show_lists():
+ def make_showlist_unique_names():
+ def titler(x):
+ return (remove_article(x), x)[not x or sickbeard.SORT_ARTICLE].lower()
+ sorted_show_list = sorted(sickbeard.showList, key=lambda x: titler(x.name))
+ year_check = re.compile(r' \(\d{4}\)$')
+ dups = {}
+
+ for i, val in enumerate(sorted_show_list):
+ if val.name not in dups:
+ # Store index of first occurrence and occurrence value
+ dups[val.name] = i
+ val.unique_name = val.name
+ else:
+ # remove cached parsed result
+ sickbeard.name_parser.parser.name_parser_cache.flush(val)
+ if not year_check.search(sorted_show_list[dups[val.name]].name):
+ # add year to first show
+ first_ep = sorted_show_list[dups[val.name]].first_aired_regular_episode
+ start_year = (first_ep and first_ep.airdate and first_ep.airdate.year) or \
+ sorted_show_list[dups[val.name]].startyear
+ if start_year:
+ sorted_show_list[dups[val.name]].unique_name = '%s (%s)' % (
+ sorted_show_list[dups[val.name]].name,
+ start_year)
+ dups[sorted_show_list[dups[val.name]].unique_name] = i
+ if not year_check.search(sorted_show_list[i].name):
+ # add year to duplicate
+ first_ep = sorted_show_list[i].first_aired_regular_episode
+ start_year = (first_ep and first_ep.airdate and first_ep.airdate.year) or sorted_show_list[
+ i].startyear
+ if start_year:
+ sorted_show_list[i].unique_name = '%s (%s)' % (sorted_show_list[i].name, start_year)
+ dups[sorted_show_list[i].unique_name] = i
+
+ name_cache.buildNameCache()
+
+ @staticmethod
+ def sorted_show_lists():
def titler(x):
return (remove_article(x), x)[not x or sickbeard.SORT_ARTICLE].lower()
@@ -2316,7 +2353,7 @@ class Home(MainHandler):
for tag in sickbeard.SHOW_TAGS:
results = filter_list(lambda _so: _so.tag == tag, sickbeard.showList)
if results:
- sorted_show_lists.append([tag, sorted(results, key=lambda x: titler(x.name))])
+ sorted_show_lists.append([tag, sorted(results, key=lambda x: titler(x.unique_name))])
# handle orphaned shows
if len(sickbeard.showList) != sum([len(so[1]) for so in sorted_show_lists]):
used_ids = set()
@@ -2348,12 +2385,12 @@ class Home(MainHandler):
anime.append(cur_show_obj)
else:
shows.append(cur_show_obj)
- sorted_show_lists = [['Shows', sorted(shows, key=lambda x: titler(x.name))],
- ['Anime', sorted(anime, key=lambda x: titler(x.name))]]
+ sorted_show_lists = [['Shows', sorted(shows, key=lambda x: titler(x.unique_name))],
+ ['Anime', sorted(anime, key=lambda x: titler(x.unique_name))]]
else:
sorted_show_lists = [
- ['Show List', sorted(sickbeard.showList, key=lambda x: titler(x.name))]]
+ ['Show List', sorted(sickbeard.showList, key=lambda x: titler(x.unique_name))]]
return sorted_show_lists
@@ -6885,7 +6922,7 @@ class ShowTasks(Manage):
t.defunct_indexer = defunct_sql_result
t.not_found_shows = sql_result
- failed_result = my_db.select('SELECT * FROM tv_src_switch WHERE status != 0')
+ failed_result = my_db.select('SELECT * FROM tv_src_switch WHERE status != ?', [TVSWITCH_NORMAL])
t.failed_switch = []
for f in failed_result:
try:
@@ -7022,10 +7059,12 @@ class History(MainHandler):
and record['season'] == cur_result['season']
and record['episode'] == cur_result['episode']
and record['quality'] == cur_result['quality']) for record in compact]):
-
+ show_obj = helpers.find_show_by_id({cur_result['indexer']: cur_result['showid']}, no_mapped_ids=False,
+ no_exceptions=True)
cur_res = dict(show_id=cur_result['showid'], indexer=cur_result['indexer'],
tvid_prodid=cur_result['tvid_prodid'],
- show_name=cur_result['show_name'],
+ show_name=(show_obj and getattr(show_obj, 'unique_name', show_obj.name)) or
+ cur_result['show_name'],
season=cur_result['season'], episode=cur_result['episode'],
quality=cur_result['quality'], resource=cur_result['resource'], actions=[])
diff --git a/sickgear.py b/sickgear.py
index 535555f..061a120 100755
--- a/sickgear.py
+++ b/sickgear.py
@@ -522,7 +522,12 @@ class SickGear(object):
if not sickbeard.MEMCACHE.get('update_restart'):
# Build from the DB to start with
sickbeard.classes.loading_msg.message = 'Loading shows from db'
+ sickbeard.indexermapper.indexer_list = [i for i in sickbeard.TVInfoAPI().all_sources
+ if sickbeard.TVInfoAPI(i).config.get('show_url')
+ and True is not sickbeard.TVInfoAPI(i).config.get('people_only')]
self.load_shows_from_db()
+ sickbeard.MEMCACHE['history_tab'] = sickbeard.webserve.History.menu_tab(
+ sickbeard.MEMCACHE['history_tab_limit'])
if not db.DBConnection().has_flag('ignore_require_cleaned'):
from sickbeard.show_updater import clean_ignore_require_words
sickbeard.classes.loading_msg.message = 'Cleaning ignore/require words lists'
@@ -738,9 +743,11 @@ class SickGear(object):
show_obj.helper_load_failed_db(sql_result=failed_result)
sickbeard.showList.append(show_obj)
sickbeard.showDict[show_obj.sid_int] = show_obj
+ _ = show_obj.ids
except (BaseException, Exception) as err:
logger.log('There was an error creating the show in %s: %s' % (
cur_result['location'], ex(err)), logger.ERROR)
+ sickbeard.webserve.Home.make_showlist_unique_names()
@staticmethod
def restore(src_dir, dst_dir):
diff --git a/tests/db_tests.py b/tests/db_tests.py
index 7198583..e3c8bf5 100644
--- a/tests/db_tests.py
+++ b/tests/db_tests.py
@@ -30,6 +30,13 @@ class DBBasicTests(test.SickbeardTestDBCase):
super(DBBasicTests, self).setUp()
self.db = test.db.DBConnection()
+ def tearDown(self):
+ try:
+ self.db.close()
+ except (BaseException, Exception):
+ pass
+ super(DBBasicTests, self).tearDown()
+
def is_testdb(self, version):
if isinstance(version, integer_types):
return 100000 <= version
@@ -41,6 +48,30 @@ class DBBasicTests(test.SickbeardTestDBCase):
self.assertEqual(mainDB.TEST_BASE_VERSION is not None, self.is_testdb(mainDB.MAX_DB_VERSION))
self.assertEqual(failed_db.TEST_BASE_VERSION is not None, self.is_testdb(failed_db.MAX_DB_VERSION))
+ def test_mass_action(self):
+ field_list = ['show_id', 'indexer_id', 'indexer', 'show_name', 'location', 'network', 'genre', 'classification',
+ 'runtime', 'quality', 'airs', 'status', 'flatten_folders', 'paused', 'startyear', 'air_by_date',
+ 'lang', 'subtitles', 'notify_list', 'imdb_id', 'last_update_indexer', 'dvdorder',
+ 'archive_firstmatch', 'rls_require_words', 'rls_ignore_words', 'sports', 'anime', 'scene',
+ 'overview', 'tag', 'prune', 'rls_global_exclude_ignore', 'rls_global_exclude_require', 'airtime',
+ 'network_id', 'network_is_stream', 'src_update_timestamp']
+ insert_para = [123, 321, 1, 'Test Show', '', 'ABC', 'Comedy', '14', 45, 1, 'Mondays', 1, 0, 0, 2010, 0, 'en',
+ '', '', 'tt123456', 1234567, 0, 0, None, None, 0, 0, 0, 'Some Show', None, 0, None, None, 2000,
+ 4, 0, 852645]
+ result = self.db.mass_action([
+ ['REPLACE INTO tv_shows (show_id, indexer_id, indexer, show_name, location, network, genre, classification,'
+ ' runtime, quality, airs, status, flatten_folders, paused, startyear, air_by_date, lang, subtitles,'
+ ' notify_list, imdb_id, last_update_indexer, dvdorder, archive_firstmatch, rls_require_words,'
+ ' rls_ignore_words, sports, anime, scene, overview, tag, prune, rls_global_exclude_ignore,'
+ ' rls_global_exclude_require, airtime, network_id, network_is_stream, src_update_timestamp)'
+ ' VALUES (?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?)',
+ insert_para],
+ ['SELECT * FROM tv_shows WHERE show_id = ? AND indexer = ?', [123, 1]]
+ ])
+ for i, f in enumerate(field_list):
+ self.assertEqual(str(result[-1][0][f]), str(insert_para[i]),
+ msg='Field %s: %s != %s' % (f, result[-1][0][f], insert_para[i]))
+
if '__main__' == __name__:
print('==================')
diff --git a/tests/name_parser_tests.py b/tests/name_parser_tests.py
index 4aff59c..0922b3c 100644
--- a/tests/name_parser_tests.py
+++ b/tests/name_parser_tests.py
@@ -403,6 +403,13 @@ extra_info_no_name_tests = [('The Show Name', [('Episode 302', 3, 2)],
'REPACK.720p.AMZN.WEBRip.DDP5.1.x264'),
]
+dupe_shows = [('The Show Name', (2, 1), 1990, [('Episode 302', 3, 2)],
+ 'The.Show.Name.S03E02.REPACK.Episode.302.720p.AMZN.WEBRip.DDP5.1.x264-GROUP'),
+ ('The Show Name', (2, 2), 1995, [('Episode 302', 3, 2)],
+ 'The.Show.Name.S03E02.REPACK.Episode.302.720p.AMZN.WEBRip.DDP5.1.x264-GROUP'),
+]
+
+dupe_shows_test = [('The.Show.Name.S03E02.REPACK.Episode.302.720p.AMZN.WEBRip.DDP5.1.x264-GROUP', (2, 1), 1990)]
class InvalidCases(unittest.TestCase):
@@ -708,19 +715,70 @@ class BasicTests(unittest.TestCase):
class TVShowTest(tv.TVShow):
# noinspection PyMissingConstructor
- def __init__(self, is_anime=False, name='', prodid=0, tvid=0):
+ def __init__(self, is_anime=False, name='', prodid=1, tvid=1, year=1990):
self._anime = is_anime
self._name = name
+ self._startyear = year
+ self.unique_name = name
self.tvid = tvid
self.prodid = prodid
self.sid_int = self.create_sid(self.tvid, self.prodid)
self.sxe_ep_obj = {}
+ def __str__(self):
+ return '%s (%s)' % (self._name, self.startyear)
+
class TVEpisodeTest(tv.TVEpisode):
# noinspection PyMissingConstructor
def __init__(self, name=''):
self._name = name
+ self._tvid = 1
+ self._indexer = 1
+ self.tvid = 1
+ self._epid = 1
+ self._indexerid = 1
+ self.epid = 1
+
+
+class DupeNameTests(test.SickbeardTestDBCase):
+
+ def tearDown(self):
+ super(DupeNameTests, self).tearDown()
+ sickbeard.showList = []
+ sickbeard.showDict = {}
+ name_cache.nameCache = {}
+
+ def test_dupe_names(self):
+ sickbeard.showList = []
+ sickbeard.showDict = {}
+ name_cache.nameCache = {}
+ for case in dupe_shows:
+ tvs = TVShowTest(False, case[0], case[1][1], case[1][0], case[2])
+ for e in case[3]:
+ tvs.sxe_ep_obj.setdefault(e[1], {}).update({e[2]: TVEpisodeTest(e[0])})
+
+ sickbeard.showList.append(tvs)
+ sickbeard.showDict[tvs.sid_int] = tvs
+ sickbeard.webserve.Home.make_showlist_unique_names()
+
+ for case in dupe_shows_test:
+ for cache_check in range(6):
+ should_get_show = cache_check in (1, 3, 4)
+ should_find = cache_check in (1, 3, 4)
+ show_obj = should_get_show and sickbeard.helpers.find_show_by_id({case[1][0]: case[1][1]})
+ if 3 == cache_check:
+ show_obj = [so for so in sickbeard.showList if so != show_obj][0]
+ np = parser.NameParser(show_obj=show_obj)
+ try:
+ result = np.parse(case[0])
+ except sickbeard.name_parser.parser.InvalidShowException:
+ if not should_find:
+ continue
+ self.assertTrue(False, msg='Failed to find show')
+ if not should_find:
+ self.assertTrue(False, msg='Found show, when it should fail')
+ self.assertEqual((show_obj.tvid, show_obj.prodid), (result.show_obj.tvid, result.show_obj.prodid))
class ExtraInfoNoNameTests(test.SickbeardTestDBCase):
diff --git a/tests/scene_helpers_tests.py b/tests/scene_helpers_tests.py
index 518fad2..0662ce9 100644
--- a/tests/scene_helpers_tests.py
+++ b/tests/scene_helpers_tests.py
@@ -69,6 +69,7 @@ class SceneExceptionTestCase(test.SickbeardTestDBCase):
for s in [TVShow(TVINFO_TVDB, 79604), TVShow(TVINFO_TVDB, 251085), TVShow(TVINFO_TVDB, 78744)]:
sickbeard.showList.append(s)
sickbeard.showDict[s.sid_int] = s
+ sickbeard.webserve.Home.make_showlist_unique_names()
scene_exceptions.retrieve_exceptions()
name_cache.buildNameCache()
diff --git a/tests/test_lib.py b/tests/test_lib.py
index 1e0c4bc..43d8a68 100644
--- a/tests/test_lib.py
+++ b/tests/test_lib.py
@@ -106,6 +106,7 @@ sickbeard.CACHE_DIR = os.path.join(TESTDIR, 'cache')
sickbeard.ZONEINFO_DIR = os.path.join(TESTDIR, 'cache', 'zoneinfo')
create_test_cache_folder()
sickbeard.GUI_NAME = 'slick'
+sickbeard.MEMCACHE = {'history_tab_limit': 10, 'history_tab': []}
# =================
@@ -223,11 +224,20 @@ def teardown_test_db():
pass
for filename in glob.glob(os.path.join(TESTDIR, TESTDBNAME) + '*'):
- os.remove(filename)
+ try:
+ os.remove(filename)
+ except (BaseException, Exception):
+ pass
for filename in glob.glob(os.path.join(TESTDIR, TESTCACHEDBNAME) + '*'):
- os.remove(filename)
+ try:
+ os.remove(filename)
+ except (BaseException, Exception):
+ pass
for filename in glob.glob(os.path.join(TESTDIR, TESTFAILEDDBNAME) + '*'):
- os.remove(filename)
+ try:
+ os.remove(filename)
+ except (BaseException, Exception):
+ pass
def setup_test_episode_file():
diff --git a/tests/webapi_tests.py b/tests/webapi_tests.py
index f00813c..06a02b3 100644
--- a/tests/webapi_tests.py
+++ b/tests/webapi_tests.py
@@ -227,6 +227,8 @@ class WebAPICase(test.SickbeardTestDBCase):
history.log_download(ep_obj, '%s.S%sE%s.group.mkv' % (
show_obj.name, ep_obj.season, ep_obj.episode), quality, 'group')
+ sickbeard.webserve.Home.make_showlist_unique_names()
+
def tearDown(self):
if None is not self.org_mass_action:
db.DBConnection.mass_action = self.org_mass_action