From c16903bd738a5522cf20c8da5d87ba8ed5678741 Mon Sep 17 00:00:00 2001 From: JackDandy Date: Mon, 22 Dec 2014 18:30:53 +0000 Subject: [PATCH] Change "Daily" search to "Recent" search. --- CHANGES.md | 1 + gui/slick/interfaces/default/config_providers.tmpl | 30 +++--- gui/slick/interfaces/default/config_search.tmpl | 14 +-- gui/slick/interfaces/default/inc_bottom.tmpl | 2 +- .../interfaces/default/manage_manageSearches.tmpl | 6 +- sickbeard/__init__.py | 102 +++++++++++--------- sickbeard/config.py | 10 +- sickbeard/dailysearcher.py | 104 --------------------- sickbeard/providers/__init__.py | 14 +-- sickbeard/providers/generic.py | 2 +- sickbeard/providers/newznab.py | 6 +- sickbeard/providers/rsstorrent.py | 6 +- sickbeard/search.py | 4 +- sickbeard/searchRecent.py | 104 +++++++++++++++++++++ sickbeard/search_queue.py | 22 ++--- sickbeard/tvcache.py | 4 +- sickbeard/webserve.py | 38 ++++---- 17 files changed, 243 insertions(+), 226 deletions(-) delete mode 100644 sickbeard/dailysearcher.py create mode 100644 sickbeard/searchRecent.py diff --git a/CHANGES.md b/CHANGES.md index c7963fe..c0ba8d4 100644 --- a/CHANGES.md +++ b/CHANGES.md @@ -18,6 +18,7 @@ * Add pull request checkout option to General Config/Advanced Settings * Add BTN api call parameter debug logging * Fix anime searches on BTN provider +* Change replace "Daily-Search" with "Recent-Search" [develop changelog] diff --git a/gui/slick/interfaces/default/config_providers.tmpl b/gui/slick/interfaces/default/config_providers.tmpl index 7eaccdf..8c4c31e 100644 --- a/gui/slick/interfaces/default/config_providers.tmpl +++ b/gui/slick/interfaces/default/config_providers.tmpl @@ -214,13 +214,13 @@ #end if - #if $hasattr($curNewznabProvider, 'enable_daily'): + #if $hasattr($curNewznabProvider, 'enable_recentsearch'):
-
@@ -292,13 +292,13 @@ #end if - #if $hasattr($curNzbProvider, 'enable_daily'): + #if $hasattr($curNzbProvider, 'enable_recentsearch'):
-
@@ -512,13 +512,13 @@ #end if - #if $hasattr($curTorrentProvider, 'enable_daily'): + #if $hasattr($curTorrentProvider, 'enable_recentsearch'):
-
diff --git a/gui/slick/interfaces/default/config_search.tmpl b/gui/slick/interfaces/default/config_search.tmpl index 9a4182f..92cc1e7 100755 --- a/gui/slick/interfaces/default/config_search.tmpl +++ b/gui/slick/interfaces/default/config_search.tmpl @@ -90,10 +90,10 @@
@@ -139,11 +139,11 @@
-
diff --git a/gui/slick/interfaces/default/inc_bottom.tmpl b/gui/slick/interfaces/default/inc_bottom.tmpl index 1f1b3df..9d8d38a 100644 --- a/gui/slick/interfaces/default/inc_bottom.tmpl +++ b/gui/slick/interfaces/default/inc_bottom.tmpl @@ -62,7 +62,7 @@ )[0 < ep_snatched] %>  / $ep_total episodes downloaded - | daily search: <%= str(sickbeard.dailySearchScheduler.timeLeft()).split('.')[0] %> + | recent search: <%= str(sickbeard.recentSearchScheduler.timeLeft()).split('.')[0] %> | backlog search: $sbdatetime.sbdatetime.sbfdate($sickbeard.backlogSearchScheduler.nextRun()) diff --git a/gui/slick/interfaces/default/manage_manageSearches.tmpl b/gui/slick/interfaces/default/manage_manageSearches.tmpl index a3de903..8a281b1 100644 --- a/gui/slick/interfaces/default/manage_manageSearches.tmpl +++ b/gui/slick/interfaces/default/manage_manageSearches.tmpl @@ -29,9 +29,9 @@ Currently running
#end if
-

Daily Search:

+

Recent Search:

Force -#if not $dailySearchStatus: +#if not $recentSearchStatus: Not in progress
#else: In Progress
@@ -53,7 +53,7 @@ In Progress

Search Queue:

Backlog: $queueLength['backlog'] pending items
-Daily: $queueLength['daily'] pending items
+Recent: $queueLength['recent'] pending items
Manual: $queueLength['manual'] pending items
Failed: $queueLength['failed'] pending items
diff --git a/sickbeard/__init__.py b/sickbeard/__init__.py index 8cbddb7..bad13bc 100755 --- a/sickbeard/__init__.py +++ b/sickbeard/__init__.py @@ -42,7 +42,7 @@ from sickbeard import searchBacklog, showUpdater, versionChecker, properFinder, from sickbeard import helpers, db, exceptions, show_queue, search_queue, scheduler, show_name_helpers from sickbeard import logger from sickbeard import naming -from sickbeard import dailysearcher +from sickbeard import searchRecent from sickbeard import scene_numbering, scene_exceptions, name_cache from indexers.indexer_api import indexerApi from indexers.indexer_exceptions import indexer_shownotfound, indexer_exception, indexer_error, indexer_episodenotfound, \ @@ -78,7 +78,7 @@ NO_RESIZE = False # system events events = None -dailySearchScheduler = None +recentSearchScheduler = None backlogSearchScheduler = None showUpdateScheduler = None versionCheckScheduler = None @@ -196,19 +196,19 @@ CHECK_PROPERS_INTERVAL = None ALLOW_HIGH_PRIORITY = False AUTOPOSTPROCESSER_FREQUENCY = None -DAILYSEARCH_FREQUENCY = None +RECENTSEARCH_FREQUENCY = None UPDATE_FREQUENCY = None -DAILYSEARCH_STARTUP = False +RECENTSEARCH_STARTUP = False BACKLOG_FREQUENCY = None BACKLOG_STARTUP = False DEFAULT_AUTOPOSTPROCESSER_FREQUENCY = 10 -DEFAULT_DAILYSEARCH_FREQUENCY = 40 +DEFAULT_RECENTSEARCH_FREQUENCY = 40 DEFAULT_BACKLOG_FREQUENCY = 21 DEFAULT_UPDATE_FREQUENCY = 1 MIN_AUTOPOSTPROCESSER_FREQUENCY = 1 -MIN_DAILYSEARCH_FREQUENCY = 10 +MIN_RECENTSEARCH_FREQUENCY = 10 MIN_BACKLOG_FREQUENCY = 10 MIN_UPDATE_FREQUENCY = 1 @@ -459,7 +459,7 @@ TRAKT_API_KEY = 'abd806c54516240c76e4ebc9c5ccf394' __INITIALIZED__ = False def get_backlog_cycle_time(): - cycletime = DAILYSEARCH_FREQUENCY * 2 + 7 + cycletime = RECENTSEARCH_FREQUENCY * 2 + 7 return max([cycletime, 720]) def initialize(consoleLogging=True): @@ -477,7 +477,7 @@ def initialize(consoleLogging=True): PLEX_SERVER_HOST, PLEX_HOST, PLEX_USERNAME, PLEX_PASSWORD, DEFAULT_BACKLOG_FREQUENCY, MIN_BACKLOG_FREQUENCY, BACKLOG_STARTUP, SKIP_REMOVED_FILES, \ showUpdateScheduler, __INITIALIZED__, LAUNCH_BROWSER, UPDATE_SHOWS_ON_START, TRASH_REMOVE_SHOW, TRASH_ROTATE_LOGS, HOME_SEARCH_FOCUS, SORT_ARTICLE, showList, loadingShowList, \ NEWZNAB_DATA, NZBS, NZBS_UID, NZBS_HASH, INDEXER_DEFAULT, INDEXER_TIMEOUT, USENET_RETENTION, TORRENT_DIR, \ - QUALITY_DEFAULT, FLATTEN_FOLDERS_DEFAULT, SUBTITLES_DEFAULT, STATUS_DEFAULT, DAILYSEARCH_STARTUP, \ + QUALITY_DEFAULT, FLATTEN_FOLDERS_DEFAULT, SUBTITLES_DEFAULT, STATUS_DEFAULT, RECENTSEARCH_STARTUP, \ GROWL_NOTIFY_ONSNATCH, GROWL_NOTIFY_ONDOWNLOAD, GROWL_NOTIFY_ONSUBTITLEDOWNLOAD, TWITTER_NOTIFY_ONSNATCH, TWITTER_NOTIFY_ONDOWNLOAD, TWITTER_NOTIFY_ONSUBTITLEDOWNLOAD, \ USE_GROWL, GROWL_HOST, GROWL_PASSWORD, USE_PROWL, PROWL_NOTIFY_ONSNATCH, PROWL_NOTIFY_ONDOWNLOAD, PROWL_NOTIFY_ONSUBTITLEDOWNLOAD, PROWL_API, PROWL_PRIORITY, PROG_DIR, \ USE_PYTIVO, PYTIVO_NOTIFY_ONSNATCH, PYTIVO_NOTIFY_ONDOWNLOAD, PYTIVO_NOTIFY_ONSUBTITLEDOWNLOAD, PYTIVO_UPDATE_LIBRARY, PYTIVO_HOST, PYTIVO_SHARE_NAME, PYTIVO_TIVO_NAME, \ @@ -485,12 +485,12 @@ def initialize(consoleLogging=True): USE_PUSHALOT, PUSHALOT_NOTIFY_ONSNATCH, PUSHALOT_NOTIFY_ONDOWNLOAD, PUSHALOT_NOTIFY_ONSUBTITLEDOWNLOAD, PUSHALOT_AUTHORIZATIONTOKEN, \ USE_PUSHBULLET, PUSHBULLET_NOTIFY_ONSNATCH, PUSHBULLET_NOTIFY_ONDOWNLOAD, PUSHBULLET_NOTIFY_ONSUBTITLEDOWNLOAD, PUSHBULLET_API, PUSHBULLET_DEVICE, \ versionCheckScheduler, VERSION_NOTIFY, AUTO_UPDATE, NOTIFY_ON_UPDATE, PROCESS_AUTOMATICALLY, UNPACK, CPU_PRESET, \ - KEEP_PROCESSED_DIR, PROCESS_METHOD, TV_DOWNLOAD_DIR, MIN_DAILYSEARCH_FREQUENCY, DEFAULT_UPDATE_FREQUENCY, MIN_UPDATE_FREQUENCY, UPDATE_FREQUENCY, \ + KEEP_PROCESSED_DIR, PROCESS_METHOD, TV_DOWNLOAD_DIR, MIN_RECENTSEARCH_FREQUENCY, DEFAULT_UPDATE_FREQUENCY, MIN_UPDATE_FREQUENCY, UPDATE_FREQUENCY, \ showQueueScheduler, searchQueueScheduler, ROOT_DIRS, CACHE_DIR, ACTUAL_CACHE_DIR, TIMEZONE_DISPLAY, \ NAMING_PATTERN, NAMING_MULTI_EP, NAMING_ANIME_MULTI_EP, NAMING_FORCE_FOLDERS, NAMING_ABD_PATTERN, NAMING_CUSTOM_ABD, NAMING_SPORTS_PATTERN, NAMING_CUSTOM_SPORTS, NAMING_ANIME_PATTERN, NAMING_CUSTOM_ANIME, NAMING_STRIP_YEAR, \ RENAME_EPISODES, AIRDATE_EPISODES, properFinderScheduler, PROVIDER_ORDER, autoPostProcesserScheduler, \ WOMBLE, OMGWTFNZBS, OMGWTFNZBS_USERNAME, OMGWTFNZBS_APIKEY, providerList, newznabProviderList, torrentRssProviderList, \ - EXTRA_SCRIPTS, USE_TWITTER, TWITTER_USERNAME, TWITTER_PASSWORD, TWITTER_PREFIX, DAILYSEARCH_FREQUENCY, \ + EXTRA_SCRIPTS, USE_TWITTER, TWITTER_USERNAME, TWITTER_PASSWORD, TWITTER_PREFIX, RECENTSEARCH_FREQUENCY, \ USE_BOXCAR, BOXCAR_USERNAME, BOXCAR_PASSWORD, BOXCAR_NOTIFY_ONDOWNLOAD, BOXCAR_NOTIFY_ONSUBTITLEDOWNLOAD, BOXCAR_NOTIFY_ONSNATCH, \ USE_BOXCAR2, BOXCAR2_ACCESSTOKEN, BOXCAR2_NOTIFY_ONDOWNLOAD, BOXCAR2_NOTIFY_ONSUBTITLEDOWNLOAD, BOXCAR2_NOTIFY_ONSNATCH, \ USE_PUSHOVER, PUSHOVER_USERKEY, PUSHOVER_APIKEY, PUSHOVER_NOTIFY_ONDOWNLOAD, PUSHOVER_NOTIFY_ONSUBTITLEDOWNLOAD, PUSHOVER_NOTIFY_ONSNATCH, \ @@ -498,7 +498,7 @@ def initialize(consoleLogging=True): USE_SYNOLOGYNOTIFIER, SYNOLOGYNOTIFIER_NOTIFY_ONSNATCH, SYNOLOGYNOTIFIER_NOTIFY_ONDOWNLOAD, SYNOLOGYNOTIFIER_NOTIFY_ONSUBTITLEDOWNLOAD, \ USE_EMAIL, EMAIL_HOST, EMAIL_PORT, EMAIL_TLS, EMAIL_USER, EMAIL_PASSWORD, EMAIL_FROM, EMAIL_NOTIFY_ONSNATCH, EMAIL_NOTIFY_ONDOWNLOAD, EMAIL_NOTIFY_ONSUBTITLEDOWNLOAD, EMAIL_LIST, \ USE_LISTVIEW, METADATA_XBMC, METADATA_XBMC_12PLUS, METADATA_MEDIABROWSER, METADATA_PS3, metadata_provider_dict, \ - NEWZBIN, NEWZBIN_USERNAME, NEWZBIN_PASSWORD, GIT_PATH, MOVE_ASSOCIATED_FILES, POSTPONE_IF_SYNC_FILES, dailySearchScheduler, NFO_RENAME, \ + NEWZBIN, NEWZBIN_USERNAME, NEWZBIN_PASSWORD, GIT_PATH, MOVE_ASSOCIATED_FILES, POSTPONE_IF_SYNC_FILES, recentSearchScheduler, NFO_RENAME, \ GUI_NAME, HOME_LAYOUT, HISTORY_LAYOUT, DISPLAY_SHOW_SPECIALS, COMING_EPS_LAYOUT, COMING_EPS_SORT, COMING_EPS_DISPLAY_PAUSED, COMING_EPS_MISSED_RANGE, FUZZY_DATING, TRIM_ZERO, DATE_PRESET, TIME_PRESET, TIME_PRESET_W_SECONDS, THEME_NAME, \ POSTER_SORTBY, POSTER_SORTDIR, \ METADATA_WDTV, METADATA_TIVO, METADATA_MEDE8ER, IGNORE_WORDS, REQUIRE_WORDS, CALENDAR_UNPROTECTED, CREATE_MISSING_SHOW_DIRS, \ @@ -674,7 +674,10 @@ def initialize(consoleLogging=True): ALLOW_HIGH_PRIORITY = bool(check_setting_int(CFG, 'General', 'allow_high_priority', 1)) - DAILYSEARCH_STARTUP = bool(check_setting_int(CFG, 'General', 'dailysearch_startup', 1)) + RECENTSEARCH_STARTUP = check_setting_int(CFG, 'General', 'dailysearch_startup', 'deprecated') + if 'deprecated' == RECENTSEARCH_STARTUP: + RECENTSEARCH_STARTUP = check_setting_int(CFG, 'General', 'recentsearch_startup', 1) + RECENTSEARCH_STARTUP = bool(RECENTSEARCH_STARTUP) BACKLOG_STARTUP = bool(check_setting_int(CFG, 'General', 'backlog_startup', 1)) SKIP_REMOVED_FILES = bool(check_setting_int(CFG, 'General', 'skip_removed_files', 0)) @@ -685,10 +688,13 @@ def initialize(consoleLogging=True): if AUTOPOSTPROCESSER_FREQUENCY < MIN_AUTOPOSTPROCESSER_FREQUENCY: AUTOPOSTPROCESSER_FREQUENCY = MIN_AUTOPOSTPROCESSER_FREQUENCY - DAILYSEARCH_FREQUENCY = check_setting_int(CFG, 'General', 'dailysearch_frequency', - DEFAULT_DAILYSEARCH_FREQUENCY) - if DAILYSEARCH_FREQUENCY < MIN_DAILYSEARCH_FREQUENCY: - DAILYSEARCH_FREQUENCY = MIN_DAILYSEARCH_FREQUENCY + RECENTSEARCH_FREQUENCY = check_setting_int(CFG, 'General', 'dailysearch_frequency', + 'deprecated') + if 'deprecated' == RECENTSEARCH_FREQUENCY: + RECENTSEARCH_FREQUENCY = check_setting_int(CFG, 'General', 'recentsearch_frequency', + DEFAULT_RECENTSEARCH_FREQUENCY) + if RECENTSEARCH_FREQUENCY < MIN_RECENTSEARCH_FREQUENCY: + RECENTSEARCH_FREQUENCY = MIN_RECENTSEARCH_FREQUENCY MIN_BACKLOG_FREQUENCY = get_backlog_cycle_time() BACKLOG_FREQUENCY = check_setting_int(CFG, 'General', 'backlog_frequency', DEFAULT_BACKLOG_FREQUENCY) @@ -1022,10 +1028,15 @@ def initialize(consoleLogging=True): curTorrentProvider.getID() + '_search_fallback', 0)) - if hasattr(curTorrentProvider, 'enable_daily'): - curTorrentProvider.enable_daily = bool(check_setting_int(CFG, curTorrentProvider.getID().upper(), - curTorrentProvider.getID() + '_enable_daily', - 1)) + if hasattr(curTorrentProvider, 'enable_recentsearch'): + curTorrentProvider.enable_recentsearch = check_setting_int(CFG, curTorrentProvider.getID().upper(), + curTorrentProvider.getID() + '_enable_dailysearch', + 'deprecated') + if 'deprecated' == curTorrentProvider.enable_recentsearch: + curTorrentProvider.enable_recentsearch = check_setting_int(CFG, curTorrentProvider.getID().upper(), + curTorrentProvider.getID() + '_enable_recentsearch', + 1) + curTorrentProvider.enable_recentsearch = bool(curTorrentProvider.enable_recentsearch) if hasattr(curTorrentProvider, 'enable_backlog'): curTorrentProvider.enable_backlog = bool(check_setting_int(CFG, curTorrentProvider.getID().upper(), @@ -1050,10 +1061,15 @@ def initialize(consoleLogging=True): curNzbProvider.search_fallback = bool(check_setting_int(CFG, curNzbProvider.getID().upper(), curNzbProvider.getID() + '_search_fallback', 0)) - if hasattr(curNzbProvider, 'enable_daily'): - curNzbProvider.enable_daily = bool(check_setting_int(CFG, curNzbProvider.getID().upper(), - curNzbProvider.getID() + '_enable_daily', - 1)) + if hasattr(curNzbProvider, 'enable_recentsearch'): + curNzbProvider.enable_recentsearch = check_setting_int(CFG, curNzbProvider.getID().upper(), + curNzbProvider.getID() + '_enable_dailysearch', + 'deprecated') + if 'deprecated' == curNzbProvider.enable_recentsearch: + curNzbProvider.enable_recentsearch = check_setting_int(CFG, curNzbProvider.getID().upper(), + curNzbProvider.getID() + '_enable_recentsearch', + 1) + curNzbProvider.enable_recentsearch = bool(curNzbProvider.enable_recentsearch) if hasattr(curNzbProvider, 'enable_backlog'): curNzbProvider.enable_backlog = bool(check_setting_int(CFG, curNzbProvider.getID().upper(), @@ -1124,11 +1140,11 @@ def initialize(consoleLogging=True): cycleTime=datetime.timedelta(seconds=3), threadName="SEARCHQUEUE") - update_interval = datetime.timedelta(minutes=DAILYSEARCH_FREQUENCY) - dailySearchScheduler = scheduler.Scheduler(dailysearcher.DailySearcher(), + update_interval = datetime.timedelta(minutes=RECENTSEARCH_FREQUENCY) + recentSearchScheduler = scheduler.Scheduler(searchRecent.RecentSearcher(), cycleTime=update_interval, - threadName="DAILYSEARCHER", - run_delay=update_now if DAILYSEARCH_STARTUP + threadName="RECENTSEARCHER", + run_delay=update_now if RECENTSEARCH_STARTUP else update_interval) update_interval = datetime.timedelta(minutes=BACKLOG_FREQUENCY) @@ -1181,15 +1197,15 @@ def start(): showUpdateScheduler, versionCheckScheduler, showQueueScheduler, \ properFinderScheduler, autoPostProcesserScheduler, searchQueueScheduler, \ subtitlesFinderScheduler, USE_SUBTITLES, traktCheckerScheduler, \ - dailySearchScheduler, events, started + recentSearchScheduler, events, started with INIT_LOCK: if __INITIALIZED__: # start sysetm events queue events.start() - # start the daily search scheduler - dailySearchScheduler.start() + # start the recent search scheduler + recentSearchScheduler.start() # start the backlog scheduler backlogSearchScheduler.start() @@ -1230,7 +1246,7 @@ def halt(): showUpdateScheduler, versionCheckScheduler, showQueueScheduler, \ properFinderScheduler, autoPostProcesserScheduler, searchQueueScheduler, \ subtitlesFinderScheduler, traktCheckerScheduler, \ - dailySearchScheduler, events, started + recentSearchScheduler, events, started with INIT_LOCK: @@ -1245,10 +1261,10 @@ def halt(): except: pass - dailySearchScheduler.stop.set() - logger.log(u"Waiting for the DAILYSEARCH thread to exit") + recentSearchScheduler.stop.set() + logger.log(u"Waiting for the RECENTSEARCH thread to exit") try: - dailySearchScheduler.join(10) + recentSearchScheduler.join(10) except: pass @@ -1397,13 +1413,13 @@ def save_config(): new_config['General']['torrent_method'] = TORRENT_METHOD new_config['General']['usenet_retention'] = int(USENET_RETENTION) new_config['General']['autopostprocesser_frequency'] = int(AUTOPOSTPROCESSER_FREQUENCY) - new_config['General']['dailysearch_frequency'] = int(DAILYSEARCH_FREQUENCY) + new_config['General']['recentsearch_frequency'] = int(RECENTSEARCH_FREQUENCY) new_config['General']['backlog_frequency'] = int(BACKLOG_FREQUENCY) new_config['General']['update_frequency'] = int(UPDATE_FREQUENCY) new_config['General']['download_propers'] = int(DOWNLOAD_PROPERS) new_config['General']['check_propers_interval'] = CHECK_PROPERS_INTERVAL new_config['General']['allow_high_priority'] = int(ALLOW_HIGH_PRIORITY) - new_config['General']['dailysearch_startup'] = int(DAILYSEARCH_STARTUP) + new_config['General']['recentsearch_startup'] = int(RECENTSEARCH_STARTUP) new_config['General']['backlog_startup'] = int(BACKLOG_STARTUP) new_config['General']['skip_removed_files'] = int(SKIP_REMOVED_FILES) new_config['General']['quality_default'] = int(QUALITY_DEFAULT) @@ -1526,9 +1542,9 @@ def save_config(): if hasattr(curTorrentProvider, 'search_fallback'): new_config[curTorrentProvider.getID().upper()][curTorrentProvider.getID() + '_search_fallback'] = int( curTorrentProvider.search_fallback) - if hasattr(curTorrentProvider, 'enable_daily'): - new_config[curTorrentProvider.getID().upper()][curTorrentProvider.getID() + '_enable_daily'] = int( - curTorrentProvider.enable_daily) + if hasattr(curTorrentProvider, 'enable_recentsearch'): + new_config[curTorrentProvider.getID().upper()][curTorrentProvider.getID() + '_enable_recentsearch'] = int( + curTorrentProvider.enable_recentsearch) if hasattr(curTorrentProvider, 'enable_backlog'): new_config[curTorrentProvider.getID().upper()][curTorrentProvider.getID() + '_enable_backlog'] = int( curTorrentProvider.enable_backlog) @@ -1550,9 +1566,9 @@ def save_config(): if hasattr(curNzbProvider, 'search_fallback'): new_config[curNzbProvider.getID().upper()][curNzbProvider.getID() + '_search_fallback'] = int( curNzbProvider.search_fallback) - if hasattr(curNzbProvider, 'enable_daily'): - new_config[curNzbProvider.getID().upper()][curNzbProvider.getID() + '_enable_daily'] = int( - curNzbProvider.enable_daily) + if hasattr(curNzbProvider, 'enable_recentsearch'): + new_config[curNzbProvider.getID().upper()][curNzbProvider.getID() + '_enable_recentsearch'] = int( + curNzbProvider.enable_recentsearch) if hasattr(curNzbProvider, 'enable_backlog'): new_config[curNzbProvider.getID().upper()][curNzbProvider.getID() + '_enable_backlog'] = int( curNzbProvider.enable_backlog) diff --git a/sickbeard/config.py b/sickbeard/config.py index f630192..a87dabf 100644 --- a/sickbeard/config.py +++ b/sickbeard/config.py @@ -155,13 +155,13 @@ def change_AUTOPOSTPROCESSER_FREQUENCY(freq): sickbeard.autoPostProcesserScheduler.cycleTime = datetime.timedelta(minutes=sickbeard.AUTOPOSTPROCESSER_FREQUENCY) -def change_DAILYSEARCH_FREQUENCY(freq): - sickbeard.DAILYSEARCH_FREQUENCY = to_int(freq, default=sickbeard.DEFAULT_DAILYSEARCH_FREQUENCY) +def change_RECENTSEARCH_FREQUENCY(freq): + sickbeard.RECENTSEARCH_FREQUENCY = to_int(freq, default=sickbeard.DEFAULT_RECENTSEARCH_FREQUENCY) - if sickbeard.DAILYSEARCH_FREQUENCY < sickbeard.MIN_DAILYSEARCH_FREQUENCY: - sickbeard.DAILYSEARCH_FREQUENCY = sickbeard.MIN_DAILYSEARCH_FREQUENCY + if sickbeard.RECENTSEARCH_FREQUENCY < sickbeard.MIN_RECENTSEARCH_FREQUENCY: + sickbeard.RECENTSEARCH_FREQUENCY = sickbeard.MIN_RECENTSEARCH_FREQUENCY - sickbeard.dailySearchScheduler.cycleTime = datetime.timedelta(minutes=sickbeard.DAILYSEARCH_FREQUENCY) + sickbeard.recentSearchScheduler.cycleTime = datetime.timedelta(minutes=sickbeard.RECENTSEARCH_FREQUENCY) def change_BACKLOG_FREQUENCY(freq): sickbeard.BACKLOG_FREQUENCY = to_int(freq, default=sickbeard.DEFAULT_BACKLOG_FREQUENCY) diff --git a/sickbeard/dailysearcher.py b/sickbeard/dailysearcher.py deleted file mode 100644 index d51b542..0000000 --- a/sickbeard/dailysearcher.py +++ /dev/null @@ -1,104 +0,0 @@ -# Author: Nic Wolfe -# URL: http://code.google.com/p/sickbeard/ -# -# This file is part of SickGear. -# -# SickGear is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# SickGear is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with SickGear. If not, see . - -from __future__ import with_statement - -import datetime -import threading -import traceback - -import sickbeard -from sickbeard import logger -from sickbeard import db -from sickbeard import common -from sickbeard import helpers -from sickbeard import exceptions -from sickbeard import network_timezones -from sickbeard.exceptions import ex - - -class DailySearcher(): - def __init__(self): - self.lock = threading.Lock() - self.amActive = False - - def run(self, force=False): - - self.amActive = True - - logger.log(u"Searching for new released episodes ...") - - if not network_timezones.network_dict: - network_timezones.update_network_dict() - - if network_timezones.network_dict: - curDate = (datetime.date.today() + datetime.timedelta(days=1)).toordinal() - else: - curDate = (datetime.date.today() - datetime.timedelta(days=2)).toordinal() - - curTime = datetime.datetime.now(network_timezones.sb_timezone) - - myDB = db.DBConnection() - sqlResults = myDB.select("SELECT * FROM tv_episodes WHERE status = ? AND season > 0 AND airdate <= ?", - [common.UNAIRED, curDate]) - - sql_l = [] - show = None - - for sqlEp in sqlResults: - try: - if not show or int(sqlEp["showid"]) != show.indexerid: - show = helpers.findCertainShow(sickbeard.showList, int(sqlEp["showid"])) - - # for when there is orphaned series in the database but not loaded into our showlist - if not show: - continue - - except exceptions.MultipleShowObjectsException: - logger.log(u"ERROR: expected to find a single show matching " + str(sqlEp['showid'])) - continue - - try: - end_time = network_timezones.parse_date_time(sqlEp['airdate'], show.airs, show.network) + datetime.timedelta(minutes=helpers.tryInt(show.runtime, 60)) - # filter out any episodes that haven't aried yet - if end_time > curTime: - continue - except: - # if an error occured assume the episode hasn't aired yet - continue - - ep = show.getEpisode(int(sqlEp["season"]), int(sqlEp["episode"])) - with ep.lock: - if ep.show.paused: - ep.status = common.SKIPPED - else: - ep.status = common.WANTED - - sql_l.append(ep.get_sql()) - else: - logger.log(u"No new released episodes found ...") - - if len(sql_l) > 0: - myDB = db.DBConnection() - myDB.mass_action(sql_l) - - # queue episode for daily search - dailysearch_queue_item = sickbeard.search_queue.DailySearchQueueItem() - sickbeard.searchQueueScheduler.action.add_item(dailysearch_queue_item) - - self.amActive = False \ No newline at end of file diff --git a/sickbeard/providers/__init__.py b/sickbeard/providers/__init__.py index c4e752f..d20a101 100755 --- a/sickbeard/providers/__init__.py +++ b/sickbeard/providers/__init__.py @@ -97,7 +97,7 @@ def getNewznabProviderList(data): providerDict[curDefault.name].needs_auth = curDefault.needs_auth providerDict[curDefault.name].search_mode = curDefault.search_mode providerDict[curDefault.name].search_fallback = curDefault.search_fallback - providerDict[curDefault.name].enable_daily = curDefault.enable_daily + providerDict[curDefault.name].enable_recentsearch = curDefault.enable_recentsearch providerDict[curDefault.name].enable_backlog = curDefault.enable_backlog return filter(lambda x: x, providerList) @@ -109,13 +109,13 @@ def makeNewznabProvider(configString): search_mode = 'eponly' search_fallback = 0 - enable_daily = 0 + enable_recentsearch = 0 enable_backlog = 0 try: values = configString.split('|') if len(values) == 9: - name, url, key, catIDs, enabled, search_mode, search_fallback, enable_daily, enable_backlog = values + name, url, key, catIDs, enabled, search_mode, search_fallback, enable_recentsearch, enable_backlog = values else: name = values[0] url = values[1] @@ -129,7 +129,7 @@ def makeNewznabProvider(configString): newznab = sys.modules['sickbeard.providers.newznab'] newProvider = newznab.NewznabProvider(name, url, key=key, catIDs=catIDs, search_mode=search_mode, - search_fallback=search_fallback, enable_daily=enable_daily, + search_fallback=search_fallback, enable_recentsearch=enable_recentsearch, enable_backlog=enable_backlog) newProvider.enabled = enabled == '1' @@ -157,13 +157,13 @@ def makeTorrentRssProvider(configString): cookies = None search_mode = 'eponly' search_fallback = 0 - enable_daily = 0 + enable_recentsearch = 0 enable_backlog = 0 try: values = configString.split('|') if len(values) == 8: - name, url, cookies, enabled, search_mode, search_fallback, enable_daily, enable_backlog = values + name, url, cookies, enabled, search_mode, search_fallback, enable_recentsearch, enable_backlog = values else: name = values[0] url = values[1] @@ -178,7 +178,7 @@ def makeTorrentRssProvider(configString): except: return - newProvider = torrentRss.TorrentRssProvider(name, url, cookies, search_mode, search_fallback, enable_daily, + newProvider = torrentRss.TorrentRssProvider(name, url, cookies, search_mode, search_fallback, enable_recentsearch, enable_backlog) newProvider.enabled = enabled == '1' diff --git a/sickbeard/providers/generic.py b/sickbeard/providers/generic.py index 46a45d3..f2a401a 100644 --- a/sickbeard/providers/generic.py +++ b/sickbeard/providers/generic.py @@ -56,7 +56,7 @@ class GenericProvider: self.search_mode = None self.search_fallback = False - self.enable_daily = False + self.enable_recentsearch = False self.enable_backlog = False self.cache = tvcache.TVCache(self) diff --git a/sickbeard/providers/newznab.py b/sickbeard/providers/newznab.py index ebc6e82..345e2c2 100755 --- a/sickbeard/providers/newznab.py +++ b/sickbeard/providers/newznab.py @@ -43,7 +43,7 @@ from lib.bencode import bdecode class NewznabProvider(generic.NZBProvider): def __init__(self, name, url, key='', catIDs='5030,5040', search_mode='eponly', search_fallback=False, - enable_daily=False, enable_backlog=False): + enable_recentsearch=False, enable_backlog=False): generic.NZBProvider.__init__(self, name) @@ -55,7 +55,7 @@ class NewznabProvider(generic.NZBProvider): self.search_mode = search_mode self.search_fallback = search_fallback - self.enable_daily = enable_daily + self.enable_recentsearch = enable_recentsearch self.enable_backlog = enable_backlog # a 0 in the key spot indicates that no key is needed @@ -77,7 +77,7 @@ class NewznabProvider(generic.NZBProvider): def configStr(self): return self.name + '|' + self.url + '|' + self.key + '|' + self.catIDs + '|' + str( int(self.enabled)) + '|' + self.search_mode + '|' + str(int(self.search_fallback)) + '|' + str( - int(self.enable_daily)) + '|' + str(int(self.enable_backlog)) + int(self.enable_recentsearch)) + '|' + str(int(self.enable_backlog)) def imageName(self): if ek.ek(os.path.isfile, diff --git a/sickbeard/providers/rsstorrent.py b/sickbeard/providers/rsstorrent.py index 12ca62a..d65e33b 100644 --- a/sickbeard/providers/rsstorrent.py +++ b/sickbeard/providers/rsstorrent.py @@ -36,7 +36,7 @@ from lib.bencode import bdecode class TorrentRssProvider(generic.TorrentProvider): - def __init__(self, name, url, cookies='', search_mode='eponly', search_fallback=False, enable_daily=False, + def __init__(self, name, url, cookies='', search_mode='eponly', search_fallback=False, enable_recentsearch=False, enable_backlog=False): generic.TorrentProvider.__init__(self, name) self.cache = TorrentRssCache(self) @@ -48,7 +48,7 @@ class TorrentRssProvider(generic.TorrentProvider): self.search_mode = search_mode self.search_fallback = search_fallback - self.enable_daily = enable_daily + self.enable_recentsearch = enable_recentsearch self.enable_backlog = enable_backlog self.cookies = cookies @@ -59,7 +59,7 @@ class TorrentRssProvider(generic.TorrentProvider): self.enabled, self.search_mode or '', self.search_fallback, - self.enable_daily, + self.enable_recentsearch, self.enable_backlog) def imageName(self): diff --git a/sickbeard/search.py b/sickbeard/search.py index fcb0ee1..6094807 100644 --- a/sickbeard/search.py +++ b/sickbeard/search.py @@ -375,7 +375,7 @@ def searchForNeededEpisodes(): episodes.extend(wantedEpisodes(curShow, fromDate)) - providers = [x for x in sickbeard.providers.sortedProviderList() if x.isActive() and x.enable_daily] + providers = [x for x in sickbeard.providers.sortedProviderList() if x.isActive() and x.enable_recentsearch] for curProvider in providers: # spawn separate threads for each provider so we don't need to wait for providers with slow network operation @@ -435,7 +435,7 @@ def searchForNeededEpisodes(): if not didSearch: logger.log( - u"No NZB/Torrent providers found or enabled in the SickGear config for daily searches. Please check your settings.", + u"No NZB/Torrent providers found or enabled in the SickGear config for recent searches. Please check your settings.", logger.ERROR) return foundResults.values() diff --git a/sickbeard/searchRecent.py b/sickbeard/searchRecent.py new file mode 100644 index 0000000..dcaf131 --- /dev/null +++ b/sickbeard/searchRecent.py @@ -0,0 +1,104 @@ +# Author: Nic Wolfe +# URL: http://code.google.com/p/sickbeard/ +# +# This file is part of SickGear. +# +# SickGear is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# SickGear is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with SickGear. If not, see . + +from __future__ import with_statement + +import datetime +import threading +import traceback + +import sickbeard +from sickbeard import logger +from sickbeard import db +from sickbeard import common +from sickbeard import helpers +from sickbeard import exceptions +from sickbeard import network_timezones +from sickbeard.exceptions import ex + + +class RecentSearcher(): + def __init__(self): + self.lock = threading.Lock() + self.amActive = False + + def run(self, force=False): + + self.amActive = True + + logger.log(u"Searching for new released episodes ...") + + if not network_timezones.network_dict: + network_timezones.update_network_dict() + + if network_timezones.network_dict: + curDate = (datetime.date.today() + datetime.timedelta(days=1)).toordinal() + else: + curDate = (datetime.date.today() - datetime.timedelta(days=2)).toordinal() + + curTime = datetime.datetime.now(network_timezones.sb_timezone) + + myDB = db.DBConnection() + sqlResults = myDB.select("SELECT * FROM tv_episodes WHERE status = ? AND season > 0 AND airdate <= ?", + [common.UNAIRED, curDate]) + + sql_l = [] + show = None + + for sqlEp in sqlResults: + try: + if not show or int(sqlEp["showid"]) != show.indexerid: + show = helpers.findCertainShow(sickbeard.showList, int(sqlEp["showid"])) + + # for when there is orphaned series in the database but not loaded into our showlist + if not show: + continue + + except exceptions.MultipleShowObjectsException: + logger.log(u"ERROR: expected to find a single show matching " + str(sqlEp['showid'])) + continue + + try: + end_time = network_timezones.parse_date_time(sqlEp['airdate'], show.airs, show.network) + datetime.timedelta(minutes=helpers.tryInt(show.runtime, 60)) + # filter out any episodes that haven't aried yet + if end_time > curTime: + continue + except: + # if an error occured assume the episode hasn't aired yet + continue + + ep = show.getEpisode(int(sqlEp["season"]), int(sqlEp["episode"])) + with ep.lock: + if ep.show.paused: + ep.status = common.SKIPPED + else: + ep.status = common.WANTED + + sql_l.append(ep.get_sql()) + else: + logger.log(u"No new released episodes found ...") + + if len(sql_l) > 0: + myDB = db.DBConnection() + myDB.mass_action(sql_l) + + # queue episode for recent search + recentsearch_queue_item = sickbeard.search_queue.RecentSearchQueueItem() + sickbeard.searchQueueScheduler.action.add_item(recentsearch_queue_item) + + self.amActive = False \ No newline at end of file diff --git a/sickbeard/search_queue.py b/sickbeard/search_queue.py index e87ed6e..242dc54 100644 --- a/sickbeard/search_queue.py +++ b/sickbeard/search_queue.py @@ -33,7 +33,7 @@ from sickbeard.search import pickBestResult search_queue_lock = threading.Lock() BACKLOG_SEARCH = 10 -DAILY_SEARCH = 20 +RECENT_SEARCH = 20 FAILED_SEARCH = 30 MANUAL_SEARCH = 40 @@ -95,17 +95,17 @@ class SearchQueue(generic_queue.GenericQueue): return True return False - def is_dailysearch_in_progress(self): + def is_recentsearch_in_progress(self): for cur_item in self.queue + [self.currentItem]: - if isinstance(cur_item, DailySearchQueueItem): + if isinstance(cur_item, RecentSearchQueueItem): return True return False def queue_length(self): - length = {'backlog': 0, 'daily': 0, 'manual': 0, 'failed': 0} + length = {'backlog': 0, 'recent': 0, 'manual': 0, 'failed': 0} for cur_item in self.queue: - if isinstance(cur_item, DailySearchQueueItem): - length['daily'] += 1 + if isinstance(cur_item, RecentSearchQueueItem): + length['recent'] += 1 elif isinstance(cur_item, BacklogQueueItem): length['backlog'] += 1 elif isinstance(cur_item, ManualSearchQueueItem): @@ -116,8 +116,8 @@ class SearchQueue(generic_queue.GenericQueue): def add_item(self, item): - if isinstance(item, DailySearchQueueItem): - # daily searches + if isinstance(item, RecentSearchQueueItem): + # recent searches generic_queue.GenericQueue.add_item(self, item) elif isinstance(item, BacklogQueueItem) and not self.is_in_queue(item.show, item.segment): # backlog searches @@ -128,16 +128,16 @@ class SearchQueue(generic_queue.GenericQueue): else: logger.log(u"Not adding item, it's already in the queue", logger.DEBUG) -class DailySearchQueueItem(generic_queue.QueueItem): +class RecentSearchQueueItem(generic_queue.QueueItem): def __init__(self): self.success = None - generic_queue.QueueItem.__init__(self, 'Daily Search', DAILY_SEARCH) + generic_queue.QueueItem.__init__(self, 'Recent Search', RECENT_SEARCH) def run(self): generic_queue.QueueItem.run(self) try: - logger.log("Beginning daily search for new episodes") + logger.log("Beginning recent search for new episodes") foundResults = search.searchForNeededEpisodes() if not len(foundResults): diff --git a/sickbeard/tvcache.py b/sickbeard/tvcache.py index 681ab31..a15c3c6 100644 --- a/sickbeard/tvcache.py +++ b/sickbeard/tvcache.py @@ -97,7 +97,7 @@ class TVCache(): myDB.action('DELETE FROM [' + self.providerID + '] WHERE 1') def _get_title_and_url(self, item): - # override this in the provider if daily search has a different data layout to backlog searches + # override this in the provider if recent search has a different data layout to backlog searches return self.provider._get_title_and_url(item) def _getRSSData(self): @@ -222,7 +222,7 @@ class TVCache(): return True def shouldClearCache(self): - # if daily search hasn't used our previous results yet then don't clear the cache + # if recent search hasn't used our previous results yet then don't clear the cache if self.lastUpdate > self.lastSearch: return False diff --git a/sickbeard/webserve.py b/sickbeard/webserve.py index 619a176..2519e22 100644 --- a/sickbeard/webserve.py +++ b/sickbeard/webserve.py @@ -630,7 +630,7 @@ class ManageSearches(MainHandler): # t.backlogPI = sickbeard.backlogSearchScheduler.action.getProgressIndicator() t.backlogPaused = sickbeard.searchQueueScheduler.action.is_backlog_paused() # @UndefinedVariable t.backlogRunning = sickbeard.searchQueueScheduler.action.is_backlog_in_progress() # @UndefinedVariable - t.dailySearchStatus = sickbeard.dailySearchScheduler.action.amActive # @UndefinedVariable + t.recentSearchStatus = sickbeard.recentSearchScheduler.action.amActive # @UndefinedVariable t.findPropersStatus = sickbeard.properFinderScheduler.action.amActive # @UndefinedVariable t.queueLength = sickbeard.searchQueueScheduler.action.queue_length() @@ -658,10 +658,10 @@ class ManageSearches(MainHandler): def forceSearch(self, *args, **kwargs): # force it to run the next time it looks - result = sickbeard.dailySearchScheduler.forceRun() + result = sickbeard.recentSearchScheduler.forceRun() if result: - logger.log(u"Daily search forced") - ui.notifications.message('Daily search started') + logger.log(u"Recent search forced") + ui.notifications.message('Recent search started') redirect("/manage/manageSearches/") @@ -1628,10 +1628,10 @@ class ConfigSearch(MainHandler): def saveSearch(self, use_nzbs=None, use_torrents=None, nzb_dir=None, sab_username=None, sab_password=None, sab_apikey=None, sab_category=None, sab_host=None, nzbget_username=None, nzbget_password=None, nzbget_category=None, nzbget_priority=None, nzbget_host=None, nzbget_use_https=None, - backlog_days=None, backlog_frequency=None, dailysearch_frequency=None, + backlog_days=None, backlog_frequency=None, recentsearch_frequency=None, nzb_method=None, torrent_method=None, usenet_retention=None, download_propers=None, check_propers_interval=None, allow_high_priority=None, - backlog_startup=None, dailysearch_startup=None, + backlog_startup=None, recentsearch_startup=None, torrent_dir=None, torrent_username=None, torrent_password=None, torrent_host=None, torrent_label=None, torrent_path=None, torrent_verify_cert=None, torrent_seed_time=None, torrent_paused=None, torrent_high_bandwidth=None, ignore_words=None, require_words=None): @@ -1644,7 +1644,7 @@ class ConfigSearch(MainHandler): if not config.change_TORRENT_DIR(torrent_dir): results += ["Unable to create directory " + os.path.normpath(torrent_dir) + ", dir not changed."] - config.change_DAILYSEARCH_FREQUENCY(dailysearch_frequency) + config.change_RECENTSEARCH_FREQUENCY(recentsearch_frequency) config.change_BACKLOG_FREQUENCY(backlog_frequency) sickbeard.BACKLOG_DAYS = config.to_int(backlog_days, default=7) @@ -1664,7 +1664,7 @@ class ConfigSearch(MainHandler): sickbeard.ALLOW_HIGH_PRIORITY = config.checkbox_to_value(allow_high_priority) - sickbeard.DAILYSEARCH_STARTUP = config.checkbox_to_value(dailysearch_startup) + sickbeard.RECENTSEARCH_STARTUP = config.checkbox_to_value(recentsearch_startup) sickbeard.BACKLOG_STARTUP = config.checkbox_to_value(backlog_startup) sickbeard.SAB_USERNAME = sab_username @@ -2092,10 +2092,10 @@ class ConfigProviders(MainHandler): newznabProviderDict[cur_id].search_fallback = 0 try: - newznabProviderDict[cur_id].enable_daily = config.checkbox_to_value( - kwargs[cur_id + '_enable_daily']) + newznabProviderDict[cur_id].enable_recentsearch = config.checkbox_to_value( + kwargs[cur_id + '_enable_recentsearch']) except: - newznabProviderDict[cur_id].enable_daily = 0 + newznabProviderDict[cur_id].enable_recentsearch = 0 try: newznabProviderDict[cur_id].enable_backlog = config.checkbox_to_value( @@ -2258,12 +2258,12 @@ class ConfigProviders(MainHandler): except: curTorrentProvider.search_fallback = 0 # these exceptions are catching unselected checkboxes - if hasattr(curTorrentProvider, 'enable_daily'): + if hasattr(curTorrentProvider, 'enable_recentsearch'): try: - curTorrentProvider.enable_daily = config.checkbox_to_value( - kwargs[curTorrentProvider.getID() + '_enable_daily']) + curTorrentProvider.enable_recentsearch = config.checkbox_to_value( + kwargs[curTorrentProvider.getID() + '_enable_recentsearch']) except: - curTorrentProvider.enable_daily = 0 # these exceptions are actually catching unselected checkboxes + curTorrentProvider.enable_recentsearch = 0 # these exceptions are actually catching unselected checkboxes if hasattr(curTorrentProvider, 'enable_backlog'): try: @@ -2300,12 +2300,12 @@ class ConfigProviders(MainHandler): except: curNzbProvider.search_fallback = 0 # these exceptions are actually catching unselected checkboxes - if hasattr(curNzbProvider, 'enable_daily'): + if hasattr(curNzbProvider, 'enable_recentsearch'): try: - curNzbProvider.enable_daily = config.checkbox_to_value( - kwargs[curNzbProvider.getID() + '_enable_daily']) + curNzbProvider.enable_recentsearch = config.checkbox_to_value( + kwargs[curNzbProvider.getID() + '_enable_recentsearch']) except: - curNzbProvider.enable_daily = 0 # these exceptions are actually catching unselected checkboxes + curNzbProvider.enable_recentsearch = 0 # these exceptions are actually catching unselected checkboxes if hasattr(curNzbProvider, 'enable_backlog'): try: