From ab020a0654465a3d2ebf5066a088336b1b4119c5 Mon Sep 17 00:00:00 2001 From: Safihre Date: Sat, 19 Sep 2020 16:58:22 +0200 Subject: [PATCH] Rework the naming of the main SABnzbd threads --- SABnzbd.py | 2 +- sabnzbd/__init__.py | 201 ++++++++++++++++++++++++------------------------ sabnzbd/api.py | 126 +++++++++++++++--------------- sabnzbd/articlecache.py | 6 -- sabnzbd/assembler.py | 28 +++---- sabnzbd/bpsmeter.py | 23 ++---- sabnzbd/decoder.py | 22 +++--- sabnzbd/dirscanner.py | 7 -- sabnzbd/downloader.py | 63 +++++++-------- sabnzbd/interface.py | 98 +++++++++++------------ sabnzbd/newswrapper.py | 2 +- sabnzbd/nzbparser.py | 10 +-- sabnzbd/nzbqueue.py | 17 ++-- sabnzbd/nzbstuff.py | 24 +++--- sabnzbd/osxmenu.py | 22 +++--- sabnzbd/postproc.py | 24 +++--- sabnzbd/rating.py | 1 - sabnzbd/rss.py | 1 - sabnzbd/sabtray.py | 3 +- sabnzbd/sabtraylinux.py | 3 +- sabnzbd/scheduler.py | 47 ++++++----- sabnzbd/urlgrabber.py | 9 +-- 22 files changed, 335 insertions(+), 404 deletions(-) diff --git a/SABnzbd.py b/SABnzbd.py index 11c259e..89c836c 100755 --- a/SABnzbd.py +++ b/SABnzbd.py @@ -1529,7 +1529,7 @@ def main(): # Shutdown sabnzbd.shutdown_program() - if sabnzbd.downloader.Downloader.do.paused: + if sabnzbd.Downloader.paused: sabnzbd.RESTART_ARGS.append("-p") if autorestarted: sabnzbd.RESTART_ARGS.append("--autorestarted") diff --git a/sabnzbd/__init__.py b/sabnzbd/__init__.py index 5aad268..cfc5603 100644 --- a/sabnzbd/__init__.py +++ b/sabnzbd/__init__.py @@ -75,34 +75,34 @@ elif os.name == "posix": pass # Now we can import safely -from sabnzbd.nzbqueue import NzbQueue -from sabnzbd.postproc import PostProcessor -from sabnzbd.downloader import Downloader -from sabnzbd.decoder import Decoder -from sabnzbd.assembler import Assembler -from sabnzbd.rating import Rating import sabnzbd.misc as misc import sabnzbd.filesystem as filesystem import sabnzbd.powersup as powersup -from sabnzbd.dirscanner import DirScanner -from sabnzbd.urlgrabber import URLGrabber import sabnzbd.scheduler as scheduler import sabnzbd.rss as rss -import sabnzbd.emailer as emailer -from sabnzbd.articlecache import ArticleCache -import sabnzbd.newsunpack import sabnzbd.encoding as encoding import sabnzbd.config as config -from sabnzbd.bpsmeter import BPSMeter import sabnzbd.cfg as cfg import sabnzbd.database import sabnzbd.lang as lang -import sabnzbd.par2file as par2file import sabnzbd.nzbparser as nzbparser +import sabnzbd.nzbstuff +import sabnzbd.emailer +import sabnzbd.getipaddress import sabnzbd.api import sabnzbd.interface -import sabnzbd.nzbstuff as nzbstuff +import sabnzbd.zconfig import sabnzbd.directunpacker as directunpacker +import sabnzbd.dirscanner +import sabnzbd.urlgrabber +import sabnzbd.nzbqueue +import sabnzbd.postproc +import sabnzbd.downloader +import sabnzbd.decoder +import sabnzbd.assembler +import sabnzbd.rating +import sabnzbd.articlecache +import sabnzbd.bpsmeter from sabnzbd.decorators import synchronized from sabnzbd.constants import ( DEFAULT_PRIORITY, @@ -112,12 +112,21 @@ from sabnzbd.constants import ( QUEUE_VERSION, QUEUE_FILE_TMPL, ) -import sabnzbd.getipaddress as getipaddress - -LINUX_POWER = powersup.HAVE_DBUS +# Storage for the threads, variables are filled during initialization +ArticleCache: sabnzbd.articlecache.ArticleCache +Rating: sabnzbd.rating.Rating +Assembler: sabnzbd.assembler.Assembler +Decoder: sabnzbd.decoder.Decoder +Downloader: sabnzbd.downloader.Downloader +PostProcessor: sabnzbd.postproc.PostProcessor +NzbQueue: sabnzbd.nzbqueue.NzbQueue +URLGrabber: sabnzbd.urlgrabber.URLGrabber +DirScanner: sabnzbd.dirscanner.DirScanner +BPSMeter: sabnzbd.bpsmeter.BPSMeter + +# Regular constants START = datetime.datetime.now() - MY_NAME = None MY_FULLNAME = None RESTART_ARGS = [] @@ -135,6 +144,7 @@ QUEUECOMPLETEACTION = None # stores the name of the function to be called QUEUECOMPLETEARG = None # stores an extra arguments that need to be passed DAEMON = None +LINUX_POWER = powersup.HAVE_DBUS LOGFILE = None WEBLOGFILE = None @@ -169,6 +179,7 @@ DOWNLOAD_DIR_SPEED = 0 COMPLETE_DIR_SPEED = 0 INTERNET_BANDWIDTH = 0 + # Rendering of original command line arguments in Config CMDLINE = " ".join(['"%s"' % arg for arg in sys.argv]) @@ -272,11 +283,6 @@ def initialize(pause_downloader=False, clean_up=False, evalSched=False, repair=0 cfg.enable_https_verification.callback(guard_https_ver) guard_https_ver() - # Set cache limit - if not cfg.cache_limit() or (cfg.cache_limit() in ("200M", "450M") and (sabnzbd.WIN32 or sabnzbd.DARWIN)): - cfg.cache_limit.set(misc.get_cache_limit()) - ArticleCache.do.new_limit(cfg.cache_limit.get_int()) - check_incomplete_vs_complete() # Set language files @@ -322,31 +328,27 @@ def initialize(pause_downloader=False, clean_up=False, evalSched=False, repair=0 # Initialize threads rss.init() - paused = BPSMeter.do.read() - - NzbQueue() - - Downloader(pause_downloader or paused) - - Decoder() - - Assembler() - - PostProcessor() - - NzbQueue.do.read_queue(repair) - - DirScanner() - - Rating() - - URLGrabber() + sabnzbd.ArticleCache = sabnzbd.articlecache.ArticleCache() + sabnzbd.BPSMeter = sabnzbd.bpsmeter.BPSMeter() + sabnzbd.NzbQueue = sabnzbd.nzbqueue.NzbQueue() + sabnzbd.Downloader = sabnzbd.downloader.Downloader(pause_downloader or sabnzbd.BPSMeter.read()) + sabnzbd.Decoder = sabnzbd.decoder.Decoder() + sabnzbd.Assembler = sabnzbd.assembler.Assembler() + sabnzbd.PostProcessor = sabnzbd.postproc.PostProcessor() + sabnzbd.DirScanner = sabnzbd.dirscanner.DirScanner() + sabnzbd.Rating = sabnzbd.rating.Rating() + sabnzbd.URLGrabber = sabnzbd.urlgrabber.URLGrabber() + sabnzbd.NzbQueue.read_queue(repair) scheduler.init() - if evalSched: scheduler.analyse(pause_downloader) + # Set cache limit + if not cfg.cache_limit() or (cfg.cache_limit() in ("200M", "450M") and (sabnzbd.WIN32 or sabnzbd.DARWIN)): + cfg.cache_limit.set(misc.get_cache_limit()) + sabnzbd.ArticleCache.new_limit(cfg.cache_limit.get_int()) + logging.info("All processes started") RESTART_REQ = False __INITIALIZED__ = True @@ -359,26 +361,26 @@ def start(): if __INITIALIZED__: logging.debug("Starting postprocessor") - PostProcessor.do.start() + sabnzbd.PostProcessor.start() logging.debug("Starting assembler") - Assembler.do.start() + sabnzbd.Assembler.start() logging.debug("Starting downloader") - Downloader.do.start() + sabnzbd.Downloader.start() logging.debug("Starting decoders") - Decoder.do.start() + sabnzbd.Decoder.start() scheduler.start() logging.debug("Starting dirscanner") - DirScanner.do.start() + sabnzbd.DirScanner.start() - Rating.do.start() + sabnzbd.Rating.start() logging.debug("Starting urlgrabber") - URLGrabber.do.start() + sabnzbd.URLGrabber.start() @synchronized(INIT_LOCK) @@ -400,23 +402,23 @@ def halt(): rss.stop() logging.debug("Stopping URLGrabber") - URLGrabber.do.stop() + sabnzbd.URLGrabber.stop() try: - URLGrabber.do.join() + sabnzbd.URLGrabber.join() except: pass logging.debug("Stopping rating") - Rating.do.stop() + sabnzbd.Rating.stop() try: - Rating.do.join() + sabnzbd.Rating.join() except: pass logging.debug("Stopping dirscanner") - DirScanner.do.stop() + sabnzbd.DirScanner.stop() try: - DirScanner.do.join() + sabnzbd.DirScanner.join() except: pass @@ -426,20 +428,20 @@ def halt(): # Decoder handles join gracefully logging.debug("Stopping decoders") - Decoder.do.stop() - Decoder.do.join() + sabnzbd.Decoder.stop() + sabnzbd.Decoder.join() logging.debug("Stopping assembler") - Assembler.do.stop() + sabnzbd.Assembler.stop() try: - Assembler.do.join() + sabnzbd.Assembler.join() except: pass logging.debug("Stopping postprocessor") - PostProcessor.do.stop() + sabnzbd.PostProcessor.stop() try: - PostProcessor.do.join() + sabnzbd.PostProcessor.join() except: pass @@ -467,7 +469,7 @@ def trigger_restart(timeout=None): time.sleep(timeout) # Add extra arguments - if sabnzbd.downloader.Downloader.do.paused: + if sabnzbd.Downloader.paused: sabnzbd.RESTART_ARGS.append("-p") sys.argv = sabnzbd.RESTART_ARGS @@ -492,7 +494,7 @@ def trigger_restart(timeout=None): ############################################################################## def new_limit(): """ Callback for article cache changes """ - ArticleCache.do.new_limit(cfg.cache_limit.get_int()) + sabnzbd.ArticleCache.new_limit(cfg.cache_limit.get_int()) def guard_restart(): @@ -503,7 +505,7 @@ def guard_restart(): def guard_top_only(): """ Callback for change of top_only option """ - NzbQueue.do.set_top_only(cfg.top_only()) + sabnzbd.NzbQueue.set_top_only(cfg.top_only()) def guard_pause_on_pp(): @@ -512,12 +514,12 @@ def guard_pause_on_pp(): pass # Not safe to idle downloader, because we don't know # if post-processing is active now else: - Downloader.do.resume_from_postproc() + sabnzbd.Downloader.resume_from_postproc() def guard_quota_size(): """ Callback for change of quota_size """ - BPSMeter.do.change_quota() + sabnzbd.BPSMeter.change_quota() def guard_quota_dp(): @@ -566,33 +568,33 @@ def add_url(url, pp=None, script=None, cat=None, priority=None, nzbname=None, pa msg = "%s - %s" % (nzbname, msg) # Generate the placeholder - future_nzo = NzbQueue.do.generate_future(msg, pp, script, cat, url=url, priority=priority, nzbname=nzbname) + future_nzo = sabnzbd.NzbQueue.generate_future(msg, pp, script, cat, url=url, priority=priority, nzbname=nzbname) # Set password if not future_nzo.password: future_nzo.password = password # Get it! - URLGrabber.do.add(url, future_nzo) + sabnzbd.URLGrabber.add(url, future_nzo) return future_nzo.nzo_id def save_state(): """ Save all internal bookkeeping to disk """ - ArticleCache.do.flush_articles() - NzbQueue.do.save() - BPSMeter.do.save() + sabnzbd.ArticleCache.flush_articles() + sabnzbd.NzbQueue.save() + sabnzbd.BPSMeter.save() rss.save() - Rating.do.save() - DirScanner.do.save() - PostProcessor.do.save() + sabnzbd.Rating.save() + sabnzbd.DirScanner.save() + sabnzbd.PostProcessor.save() def pause_all(): """ Pause all activities than cause disk access """ global PAUSED_ALL PAUSED_ALL = True - Downloader.do.pause() + sabnzbd.Downloader.pause() logging.debug("PAUSED_ALL active") @@ -600,7 +602,7 @@ def unpause_all(): """ Resume all activities """ global PAUSED_ALL PAUSED_ALL = False - Downloader.do.resume() + sabnzbd.Downloader.resume() logging.debug("PAUSED_ALL inactive") @@ -746,7 +748,7 @@ def enable_server(server): logging.warning(T("Trying to set status of non-existing server %s"), server) return config.save_config() - Downloader.do.update_server(server, server) + sabnzbd.Downloader.update_server(server, server) def disable_server(server): @@ -757,7 +759,7 @@ def disable_server(server): logging.warning(T("Trying to set status of non-existing server %s"), server) return config.save_config() - Downloader.do.update_server(server, server) + sabnzbd.Downloader.update_server(server, server) def system_shutdown(): @@ -866,7 +868,7 @@ def run_script(script): def empty_queues(): """ Return True if queues empty or non-existent """ global __INITIALIZED__ - return (not __INITIALIZED__) or (PostProcessor.do.empty() and NzbQueue.do.is_empty()) + return (not __INITIALIZED__) or (sabnzbd.PostProcessor.empty() and sabnzbd.NzbQueue.is_empty()) def keep_awake(): @@ -875,8 +877,8 @@ def keep_awake(): if sabnzbd.cfg.keep_awake(): ES_CONTINUOUS = 0x80000000 ES_SYSTEM_REQUIRED = 0x00000001 - if (not Downloader.do.is_paused() and not NzbQueue.do.is_empty()) or ( - not PostProcessor.do.paused and not PostProcessor.do.empty() + if (not sabnzbd.Downloader.is_paused() and not sabnzbd.NzbQueue.is_empty()) or ( + not sabnzbd.PostProcessor.paused and not sabnzbd.PostProcessor.empty() ): if KERNEL32: # Set ES_SYSTEM_REQUIRED until the next call @@ -1028,45 +1030,45 @@ def check_all_tasks(): return True # Non-restartable threads, require program restart - if not sabnzbd.PostProcessor.do.is_alive(): + if not sabnzbd.PostProcessor.is_alive(): logging.info("Restarting because of crashed postprocessor") return False - if not Downloader.do.is_alive(): + if not sabnzbd.Downloader.is_alive(): logging.info("Restarting because of crashed downloader") return False - if not Decoder.do.is_alive(): + if not sabnzbd.Decoder.is_alive(): logging.info("Restarting because of crashed decoder") return False - if not Assembler.do.is_alive(): + if not sabnzbd.Assembler.is_alive(): logging.info("Restarting because of crashed assembler") return False # Kick the downloader, in case it missed the semaphore - Downloader.do.wakeup() + sabnzbd.Downloader.wakeup() # Make sure the right servers are active - Downloader.do.check_timers() + sabnzbd.Downloader.check_timers() # Restartable threads - if not DirScanner.do.is_alive(): + if not sabnzbd.DirScanner.is_alive(): logging.info("Restarting crashed dirscanner") - DirScanner.do.__init__() - if not URLGrabber.do.is_alive(): + sabnzbd.DirScanner.__init__() + if not sabnzbd.URLGrabber.is_alive(): logging.info("Restarting crashed urlgrabber") - URLGrabber.do.__init__() - if not Rating.do.is_alive(): + sabnzbd.URLGrabber.__init__() + if not sabnzbd.Rating.is_alive(): logging.info("Restarting crashed rating") - Rating.do.__init__() + sabnzbd.Rating.__init__() if not sabnzbd.scheduler.sched_check(): logging.info("Restarting crashed scheduler") sabnzbd.scheduler.init() - sabnzbd.downloader.Downloader.do.unblock_all() + sabnzbd.Downloader.unblock_all() # Check one-shot pause sabnzbd.scheduler.pause_check() # Check (and terminate) idle jobs - sabnzbd.nzbqueue.NzbQueue.do.stop_idle_jobs() + sabnzbd.NzbQueue.stop_idle_jobs() return True @@ -1112,18 +1114,13 @@ def wait_for_download_folder(): time.sleep(2.0) -# Required wrapper because nzbstuff.py cannot import downloader.py -def highest_server(me): - return sabnzbd.downloader.Downloader.do.highest_server(me) - - def test_ipv6(): """ Check if external IPv6 addresses are reachable """ if not cfg.selftest_host(): # User disabled the test, assume active IPv6 return True try: - info = getipaddress.addresslookup6(cfg.selftest_host()) + info = sabnzbd.getipaddress.addresslookup6(cfg.selftest_host()) except: logging.debug( "Test IPv6: Disabling IPv6, because it looks like it's not available. Reason: %s", sys.exc_info()[0] diff --git a/sabnzbd/api.py b/sabnzbd/api.py index 14f956b..794e21d 100644 --- a/sabnzbd/api.py +++ b/sabnzbd/api.py @@ -50,8 +50,6 @@ from sabnzbd.constants import ( ) import sabnzbd.config as config import sabnzbd.cfg as cfg -from sabnzbd.downloader import Downloader -from sabnzbd.nzbqueue import NzbQueue import sabnzbd.scheduler as scheduler from sabnzbd.skintext import SKIN_TEXT from sabnzbd.utils.pathbrowser import folders_at_path @@ -68,11 +66,7 @@ from sabnzbd.misc import ( ) from sabnzbd.filesystem import diskspace, get_ext, globber_full, clip_path, remove_all, userxbit from sabnzbd.encoding import xml_name -from sabnzbd.postproc import PostProcessor -from sabnzbd.articlecache import ArticleCache from sabnzbd.utils.servertests import test_nntp_server_dict -from sabnzbd.bpsmeter import BPSMeter -from sabnzbd.rating import Rating from sabnzbd.getipaddress import localipv4, publicipv4, ipv6, addresslookup from sabnzbd.database import build_history_info, unpack_history_info, HistoryDB import sabnzbd.notifier @@ -199,12 +193,12 @@ def _api_queue(name, output, kwargs): def _api_queue_delete(output, value, kwargs): """ API: accepts output, value """ if value.lower() == "all": - removed = NzbQueue.do.remove_all(kwargs.get("search")) + removed = sabnzbd.NzbQueue.remove_all(kwargs.get("search")) return report(output, keyword="", data={"status": bool(removed), "nzo_ids": removed}) elif value: items = value.split(",") delete_all_data = int_conv(kwargs.get("del_files")) - removed = NzbQueue.do.remove_multiple(items, delete_all_data=delete_all_data) + removed = sabnzbd.NzbQueue.remove_multiple(items, delete_all_data=delete_all_data) return report(output, keyword="", data={"status": bool(removed), "nzo_ids": removed}) else: return report(output, _MSG_NO_VALUE) @@ -214,7 +208,7 @@ def _api_queue_delete_nzf(output, value, kwargs): """ API: accepts value(=nzo_id), value2(=nzf_id) """ value2 = kwargs.get("value2") if value and value2: - removed = NzbQueue.do.remove_nzf(value, value2, force_delete=True) + removed = sabnzbd.NzbQueue.remove_nzf(value, value2, force_delete=True) return report(output, keyword="", data={"status": bool(removed), "nzf_ids": removed}) else: return report(output, _MSG_NO_VALUE2) @@ -225,7 +219,7 @@ def _api_queue_rename(output, value, kwargs): value2 = kwargs.get("value2") value3 = kwargs.get("value3") if value and value2: - ret = NzbQueue.do.change_name(value, value2, value3) + ret = sabnzbd.NzbQueue.change_name(value, value2, value3) return report(output, keyword="", data={"status": ret}) else: return report(output, _MSG_NO_VALUE2) @@ -239,7 +233,7 @@ def _api_queue_change_complete_action(output, value, kwargs): def _api_queue_purge(output, value, kwargs): """ API: accepts output """ - removed = NzbQueue.do.remove_all(kwargs.get("search")) + removed = sabnzbd.NzbQueue.remove_all(kwargs.get("search")) return report(output, keyword="", data={"status": bool(removed), "nzo_ids": removed}) @@ -247,7 +241,7 @@ def _api_queue_pause(output, value, kwargs): """ API: accepts output, value(=list of nzo_id) """ if value: items = value.split(",") - handled = NzbQueue.do.pause_multiple_nzo(items) + handled = sabnzbd.NzbQueue.pause_multiple_nzo(items) else: handled = False return report(output, keyword="", data={"status": bool(handled), "nzo_ids": handled}) @@ -257,7 +251,7 @@ def _api_queue_resume(output, value, kwargs): """ API: accepts output, value(=list of nzo_id) """ if value: items = value.split(",") - handled = NzbQueue.do.resume_multiple_nzo(items) + handled = sabnzbd.NzbQueue.resume_multiple_nzo(items) else: handled = False return report(output, keyword="", data={"status": bool(handled), "nzo_ids": handled}) @@ -272,7 +266,7 @@ def _api_queue_priority(output, value, kwargs): priority = int(value2) except: return report(output, _MSG_INT_VALUE) - pos = NzbQueue.do.set_priority(value, priority) + pos = sabnzbd.NzbQueue.set_priority(value, priority) # Returns the position in the queue, -1 is incorrect job-id return report(output, keyword="position", data=pos) except: @@ -286,7 +280,7 @@ def _api_queue_sort(output, value, kwargs): sort = kwargs.get("sort") direction = kwargs.get("dir", "") if sort: - NzbQueue.do.sort_queue(sort, direction) + sabnzbd.NzbQueue.sort_queue(sort, direction) return report(output) else: return report(output, _MSG_NO_VALUE2) @@ -304,13 +298,13 @@ def _api_queue_default(output, value, kwargs): def _api_queue_rating(output, value, kwargs): """ API: accepts output, value(=nzo_id), type, setting, detail """ - vote_map = {"up": Rating.VOTE_UP, "down": Rating.VOTE_DOWN} + vote_map = {"up": sabnzbd.Rating.VOTE_UP, "down": sabnzbd.Rating.VOTE_DOWN} flag_map = { - "spam": Rating.FLAG_SPAM, - "encrypted": Rating.FLAG_ENCRYPTED, - "expired": Rating.FLAG_EXPIRED, - "other": Rating.FLAG_OTHER, - "comment": Rating.FLAG_COMMENT, + "spam": sabnzbd.Rating.FLAG_SPAM, + "encrypted": sabnzbd.Rating.FLAG_ENCRYPTED, + "expired": sabnzbd.Rating.FLAG_EXPIRED, + "other": sabnzbd.Rating.FLAG_OTHER, + "comment": sabnzbd.Rating.FLAG_COMMENT, } content_type = kwargs.get("type") setting = kwargs.get("setting") @@ -326,7 +320,7 @@ def _api_queue_rating(output, value, kwargs): if content_type == "flag": flag = flag_map[setting] if cfg.rating_enable(): - Rating.do.update_user_rating(value, video, audio, vote, flag, kwargs.get("detail")) + sabnzbd.Rating.update_user_rating(value, video, audio, vote, flag, kwargs.get("detail")) return report(output) except: return report(output, _MSG_BAD_SERVER_PARMS) @@ -389,7 +383,7 @@ def _api_retry(name, output, kwargs): def _api_cancel_pp(name, output, kwargs): """ API: accepts name, output, value(=nzo_id) """ nzo_id = kwargs.get("value") - if PostProcessor.do.cancel_pp(nzo_id): + if sabnzbd.PostProcessor.cancel_pp(nzo_id): return report(output, keyword="", data={"status": True, "nzo_id": nzo_id}) else: return report(output, _MSG_NO_ITEM) @@ -438,7 +432,7 @@ def _api_switch(name, output, kwargs): value = kwargs.get("value") value2 = kwargs.get("value2") if value and value2: - pos, prio = NzbQueue.do.switch(value, value2) + pos, prio = sabnzbd.NzbQueue.switch(value, value2) # Returns the new position and new priority (if different) return report(output, keyword="result", data={"position": pos, "priority": prio}) else: @@ -454,7 +448,7 @@ def _api_change_cat(name, output, kwargs): cat = value2 if cat == "None": cat = None - result = NzbQueue.do.change_cat(nzo_id, cat) + result = sabnzbd.NzbQueue.change_cat(nzo_id, cat) return report(output, keyword="status", data=bool(result > 0)) else: return report(output, _MSG_NO_VALUE) @@ -469,7 +463,7 @@ def _api_change_script(name, output, kwargs): script = value2 if script.lower() == "none": script = None - result = NzbQueue.do.change_script(nzo_id, script) + result = sabnzbd.NzbQueue.change_script(nzo_id, script) return report(output, keyword="status", data=bool(result > 0)) else: return report(output, _MSG_NO_VALUE) @@ -481,7 +475,7 @@ def _api_change_opts(name, output, kwargs): value2 = kwargs.get("value2") result = 0 if value and value2 and value2.isdigit(): - result = NzbQueue.do.change_opts(value, int(value2)) + result = sabnzbd.NzbQueue.change_opts(value, int(value2)) return report(output, keyword="status", data=bool(result > 0)) @@ -534,7 +528,7 @@ def _api_history(name, output, kwargs): return report(output, _MSG_NO_VALUE) elif not name: history = {} - grand, month, week, day = BPSMeter.do.get_sums() + grand, month, week, day = sabnzbd.BPSMeter.get_sums() history["total_size"], history["month_size"], history["week_size"], history["day_size"] = ( to_units(grand), to_units(month), @@ -581,7 +575,7 @@ def _api_addurl(name, output, kwargs): def _api_pause(name, output, kwargs): """ API: accepts output """ scheduler.plan_resume(0) - Downloader.do.pause() + sabnzbd.Downloader.pause() return report(output) @@ -660,7 +654,7 @@ def _api_restart_repair(name, output, kwargs): def _api_disconnect(name, output, kwargs): """ API: accepts output """ - Downloader.do.disconnect() + sabnzbd.Downloader.disconnect() return report(output) @@ -673,7 +667,7 @@ def _api_osx_icon(name, output, kwargs): def _api_rescan(name, output, kwargs): """ API: accepts output """ - NzbQueue.do.scan_jobs(all_jobs=False, action=True) + sabnzbd.NzbQueue.scan_jobs(all_jobs=False, action=True) return report(output) @@ -692,19 +686,19 @@ def _api_eval_sort(name, output, kwargs): def _api_watched_now(name, output, kwargs): """ API: accepts output """ - sabnzbd.dirscanner.dirscan() + sabnzbd.DirScanner.scan() return report(output) def _api_resume_pp(name, output, kwargs): """ API: accepts output """ - PostProcessor.do.paused = False + sabnzbd.PostProcessor.paused = False return report(output) def _api_pause_pp(name, output, kwargs): """ API: accepts output """ - PostProcessor.do.paused = True + sabnzbd.PostProcessor.paused = True return report(output) @@ -722,7 +716,7 @@ def _api_retry_all(name, output, kwargs): def _api_reset_quota(name, output, kwargs): """ Reset quota left """ - BPSMeter.do.reset_quota(force=True) + sabnzbd.BPSMeter.reset_quota(force=True) def _api_test_email(name, output, kwargs): @@ -827,13 +821,13 @@ def _api_config_speedlimit(output, kwargs): value = kwargs.get("value") if not value: value = "0" - Downloader.do.limit_speed(value) + sabnzbd.Downloader.limit_speed(value) return report(output) def _api_config_get_speedlimit(output, kwargs): """ API: accepts output """ - return report(output, keyword="speedlimit", data=Downloader.do.get_limit()) + return report(output, keyword="speedlimit", data=sabnzbd.Downloader.get_limit()) def _api_config_set_colorscheme(output, kwargs): @@ -898,11 +892,11 @@ def _api_config_undefined(output, kwargs): def _api_server_stats(name, output, kwargs): """ API: accepts output """ - sum_t, sum_m, sum_w, sum_d = BPSMeter.do.get_sums() + sum_t, sum_m, sum_w, sum_d = sabnzbd.BPSMeter.get_sums() stats = {"total": sum_t, "month": sum_m, "week": sum_w, "day": sum_d, "servers": {}} for svr in config.get_servers(): - t, m, w, d, daily = BPSMeter.do.amounts(svr) + t, m, w, d, daily = sabnzbd.BPSMeter.amounts(svr) stats["servers"][svr] = {"total": t or 0, "month": m or 0, "week": w or 0, "day": d or 0, "daily": daily or {}} return report(output, keyword="", data=stats) @@ -1119,7 +1113,7 @@ def handle_server_api(output, kwargs): else: config.ConfigServer(name, kwargs) old_name = None - Downloader.do.update_server(old_name, name) + sabnzbd.Downloader.update_server(old_name, name) return name @@ -1180,7 +1174,7 @@ def build_status(skip_dashboard=False, output=None): info["logfile"] = sabnzbd.LOGFILE info["weblogfile"] = sabnzbd.WEBLOGFILE info["loglevel"] = str(cfg.log_level()) - info["folders"] = NzbQueue.do.scan_jobs(all_jobs=False, action=False) + info["folders"] = sabnzbd.NzbQueue.scan_jobs(all_jobs=False, action=False) info["configfn"] = config.get_filename() # Dashboard: Speed of System @@ -1211,7 +1205,7 @@ def build_status(skip_dashboard=False, output=None): info["dnslookup"] = None info["servers"] = [] - servers = sorted(Downloader.do.servers[:], key=lambda svr: "%02d%s" % (svr.priority, svr.displayname.lower())) + servers = sorted(sabnzbd.Downloader.servers[:], key=lambda svr: "%02d%s" % (svr.priority, svr.displayname.lower())) for server in servers: serverconnections = [] connected = 0 @@ -1343,7 +1337,7 @@ def build_queue(start=0, limit=0, trans=False, output=None, search=None): slot["mb_fmt"] = locale.format_string("%d", int(mb), True) slot["mbdone_fmt"] = locale.format_string("%d", int(mb - mbleft), True) - if not Downloader.do.paused and status not in (Status.PAUSED, Status.FETCHING, Status.GRABBING): + if not sabnzbd.Downloader.paused and status not in (Status.PAUSED, Status.FETCHING, Status.GRABBING): if is_propagating: slot["status"] = Status.PROP elif status == Status.CHECKING: @@ -1357,8 +1351,8 @@ def build_queue(start=0, limit=0, trans=False, output=None, search=None): slot["status"] = "%s" % status if ( - Downloader.do.paused - or Downloader.do.postproc + sabnzbd.Downloader.paused + or sabnzbd.Downloader.postproc or is_propagating or status not in (Status.DOWNLOADING, Status.FETCHING, Status.QUEUED) ) and priority != FORCE_PRIORITY: @@ -1381,7 +1375,7 @@ def build_queue(start=0, limit=0, trans=False, output=None, search=None): else: slot["avg_age"] = calc_age(average_date, bool(trans)) - rating = Rating.do.get_rating_by_nzo(nzo_id) + rating = sabnzbd.Rating.get_rating_by_nzo(nzo_id) slot["has_rating"] = rating is not None if rating: slot["rating_avg_video"] = rating.avg_video @@ -1400,9 +1394,9 @@ def build_queue(start=0, limit=0, trans=False, output=None, search=None): def fast_queue(): """ Return paused, bytes_left, bpsnow, time_left """ - bytes_left = NzbQueue.do.remaining() - paused = Downloader.do.paused - bpsnow = BPSMeter.do.bps + bytes_left = sabnzbd.sabnzbd.NzbQueue.remaining() + paused = sabnzbd.Downloader.paused + bpsnow = sabnzbd.BPSMeter.bps time_left = calc_timeleft(bytes_left, bpsnow) return paused, bytes_left, bpsnow, time_left @@ -1410,7 +1404,7 @@ def fast_queue(): def build_file_list(nzo_id: str): """Build file lists for specified job""" jobs = [] - nzo = NzbQueue.do.get_nzo(nzo_id) + nzo = sabnzbd.sabnzbd.NzbQueue.get_nzo(nzo_id) if nzo: pnfo = nzo.gather_info(full=True) @@ -1487,7 +1481,7 @@ def retry_job(job, new_nzb=None, password=None): nzo_id = sabnzbd.add_url(url, pp, script, cat) else: path = history_db.get_path(job) - nzo_id = NzbQueue.do.repair_job(path, new_nzb, password) + nzo_id = sabnzbd.NzbQueue.repair_job(path, new_nzb, password) if nzo_id: # Only remove from history if we repaired something history_db.remove_history(job) @@ -1516,9 +1510,9 @@ def del_job_files(job_paths): def del_hist_job(job, del_files): """ Remove history element """ if job: - path = PostProcessor.do.get_path(job) + path = sabnzbd.PostProcessor.get_path(job) if path: - PostProcessor.do.delete(job, del_files=del_files) + sabnzbd.PostProcessor.delete(job, del_files=del_files) else: history_db = sabnzbd.get_db_connection() remove_all(history_db.get_path(job), recursive=True) @@ -1568,10 +1562,10 @@ def build_header(webdir="", output=None, trans_functions=True): except: uptime = "-" - speed_limit = Downloader.do.get_limit() + speed_limit = sabnzbd.Downloader.get_limit() if speed_limit <= 0: speed_limit = 100 - speed_limit_abs = Downloader.do.get_limit_abs() + speed_limit_abs = sabnzbd.Downloader.get_limit_abs() if speed_limit_abs <= 0: speed_limit_abs = "" @@ -1609,7 +1603,7 @@ def build_header(webdir="", output=None, trans_functions=True): header["new_release"], header["new_rel_url"] = sabnzbd.NEW_VERSION header["version"] = sabnzbd.__version__ - header["paused"] = bool(Downloader.do.paused or Downloader.do.postproc) + header["paused"] = bool(sabnzbd.Downloader.paused or sabnzbd.Downloader.postproc) header["pause_int"] = scheduler.pause_int() header["paused_all"] = sabnzbd.PAUSED_ALL @@ -1626,11 +1620,11 @@ def build_header(webdir="", output=None, trans_functions=True): header["have_warnings"] = str(sabnzbd.GUIHANDLER.count()) header["finishaction"] = sabnzbd.QUEUECOMPLETE - header["quota"] = to_units(BPSMeter.do.quota) - header["have_quota"] = bool(BPSMeter.do.quota > 0.0) - header["left_quota"] = to_units(BPSMeter.do.left) + header["quota"] = to_units(sabnzbd.BPSMeter.quota) + header["have_quota"] = bool(sabnzbd.BPSMeter.quota > 0.0) + header["left_quota"] = to_units(sabnzbd.BPSMeter.left) - anfo = ArticleCache.do.cache_info() + anfo = sabnzbd.ArticleCache.cache_info() header["cache_art"] = str(anfo.article_sum) header["cache_size"] = to_units(anfo.cache_size, "B") header["cache_max"] = str(anfo.cache_limit) @@ -1643,8 +1637,8 @@ def build_queue_header(search=None, start=0, limit=0, output=None): header = build_header(output=output) - bytespersec = BPSMeter.do.bps - qnfo = NzbQueue.do.queue_info(search=search, start=start, limit=limit) + bytespersec = sabnzbd.BPSMeter.bps + qnfo = sabnzbd.NzbQueue.queue_info(search=search, start=start, limit=limit) bytesleft = qnfo.bytes_left bytes_total = qnfo.bytes @@ -1657,7 +1651,7 @@ def build_queue_header(search=None, start=0, limit=0, output=None): header["size"] = to_units(bytes_total, "B") header["noofslots_total"] = qnfo.q_fullsize - if Downloader.do.paused or Downloader.do.postproc: + if sabnzbd.Downloader.paused or sabnzbd.Downloader.postproc: status = Status.PAUSED elif bytespersec > 0: status = Status.DOWNLOADING @@ -1682,7 +1676,7 @@ def build_history(start=0, limit=0, search=None, failed_only=0, categories=None) limit = 1000000 # Grab any items that are active or queued in postproc - postproc_queue = PostProcessor.do.get_queue() + postproc_queue = sabnzbd.PostProcessor.get_queue() # Filter out any items that don't match the search term or category if postproc_queue: @@ -1762,7 +1756,7 @@ def build_history(start=0, limit=0, search=None, failed_only=0, categories=None) item["retry"] = True if rating_enabled: - rating = Rating.do.get_rating_by_nzo(item["nzo_id"]) + rating = sabnzbd.Rating.get_rating_by_nzo(item["nzo_id"]) item["has_rating"] = rating is not None if rating: item["rating_avg_video"] = rating.avg_video @@ -1913,7 +1907,7 @@ def del_from_section(kwargs): del item config.save_config() if section == "servers": - Downloader.do.update_server(keyword, None) + sabnzbd.Downloader.update_server(keyword, None) return True else: return False diff --git a/sabnzbd/articlecache.py b/sabnzbd/articlecache.py index 66c8477..caa3ef5 100644 --- a/sabnzbd/articlecache.py +++ b/sabnzbd/articlecache.py @@ -52,8 +52,6 @@ class ArticleCache: if sabnzbd.DARWIN or sabnzbd.WIN64 or (struct.calcsize("P") * 8) == 64: self.__cache_upper_limit = 4 * GIGI - ArticleCache.do = self - def cache_info(self): return ANFO(len(self.__article_table), abs(self.__cache_size), self.__cache_limit_org) @@ -171,7 +169,3 @@ class ArticleCache: # Save data, but don't complain when destination folder is missing # because this flush may come after completion of the NZO. sabnzbd.save_data(data, article.get_art_id(), nzo.workpath, do_pickle=False, silent=True) - - -# Create the instance -ArticleCache() diff --git a/sabnzbd/assembler.py b/sabnzbd/assembler.py index 5d559ec..b063100 100644 --- a/sabnzbd/assembler.py +++ b/sabnzbd/assembler.py @@ -33,13 +33,10 @@ from sabnzbd.misc import get_all_passwords from sabnzbd.filesystem import set_permissions, clip_path, has_win_device, diskspace, get_filename, get_ext from sabnzbd.constants import Status, GIGI, MAX_ASSEMBLER_QUEUE import sabnzbd.cfg as cfg -from sabnzbd.articlecache import ArticleCache -from sabnzbd.postproc import PostProcessor from sabnzbd.nzbstuff import NzbObject, NzbFile import sabnzbd.downloader import sabnzbd.par2file as par2file import sabnzbd.utils.rarfile as rarfile -from sabnzbd.rating import Rating class Assembler(Thread): @@ -48,7 +45,6 @@ class Assembler(Thread): def __init__(self): Thread.__init__(self) self.queue: queue.Queue[Tuple[NzbObject, NzbFile, bool]] = queue.Queue() - Assembler.do = self def stop(self): self.process(None) @@ -76,10 +72,10 @@ class Assembler(Thread): and diskspace(force=True)["download_dir"][1] < (cfg.download_free.get_float() + nzf.bytes) / GIGI ): # Only warn and email once - if not sabnzbd.downloader.Downloader.do.paused: + if not sabnzbd.Downloader.paused: logging.warning(T("Too little diskspace forcing PAUSE")) # Pause downloader, but don't save, since the disk is almost full! - sabnzbd.downloader.Downloader.do.pause() + sabnzbd.Downloader.pause() sabnzbd.emailer.diskfull_mail() # Abort all direct unpackers, just to be sure sabnzbd.directunpacker.abort_all() @@ -102,7 +98,7 @@ class Assembler(Thread): # Log traceback logging.info("Traceback: ", exc_info=True) # Pause without saving - sabnzbd.downloader.Downloader.do.pause() + sabnzbd.Downloader.pause() continue except: logging.error(T("Fatal error in Assembler"), exc_info=True) @@ -137,7 +133,7 @@ class Assembler(Thread): nzo.final_name, ) nzo.fail_msg = T("Aborted, encryption detected") - sabnzbd.nzbqueue.NzbQueue.do.end_job(nzo) + sabnzbd.NzbQueue.end_job(nzo) if unwanted_file: logging.warning( @@ -153,7 +149,7 @@ class Assembler(Thread): if cfg.action_on_unwanted_extensions() == 2: logging.debug("Unwanted extension ... aborting") nzo.fail_msg = T("Aborted, unwanted extension detected") - sabnzbd.nzbqueue.NzbQueue.do.end_job(nzo) + sabnzbd.NzbQueue.end_job(nzo) # Add to direct unpack nzo.add_to_direct_unpacker(nzf) @@ -177,11 +173,11 @@ class Assembler(Thread): reason, ) nzo.fail_msg = T("Aborted, rating filter matched (%s)") % reason - sabnzbd.nzbqueue.NzbQueue.do.end_job(nzo) + sabnzbd.NzbQueue.end_job(nzo) else: - sabnzbd.nzbqueue.NzbQueue.do.remove(nzo.nzo_id, add_to_history=False, cleanup=False) - PostProcessor.do.process(nzo) + sabnzbd.NzbQueue.remove(nzo.nzo_id, add_to_history=False, cleanup=False) + sabnzbd.PostProcessor.process(nzo) @staticmethod def assemble(nzf, file_done): @@ -205,7 +201,7 @@ class Assembler(Thread): # Write all decoded articles if article.decoded: - data = ArticleCache.do.load_article(article) + data = sabnzbd.ArticleCache.load_article(article) # Could be empty in case nzo was deleted if data: fout.write(data) @@ -235,7 +231,7 @@ def file_has_articles(nzf): has = False for article in nzf.decodetable: sleep(0.01) - data = ArticleCache.do.load_article(article) + data = sabnzbd.ArticleCache.load_article(article) if data: has = True return has @@ -369,8 +365,8 @@ def check_encrypted_and_unwanted_files(nzo, filepath): def nzo_filtered_by_rating(nzo): - if Rating.do and cfg.rating_enable() and cfg.rating_filter_enable() and (nzo.rating_filtered < 2): - rating = Rating.do.get_rating_by_nzo(nzo.nzo_id) + if cfg.rating_enable() and cfg.rating_filter_enable() and (nzo.rating_filtered < 2): + rating = sabnzbd.Rating.get_rating_by_nzo(nzo.nzo_id) if rating is not None: nzo.rating_filtered = 1 reason = rating_filtered(rating, nzo.filename.lower(), True) diff --git a/sabnzbd/bpsmeter.py b/sabnzbd/bpsmeter.py index 73de361..e8a3fea 100644 --- a/sabnzbd/bpsmeter.py +++ b/sabnzbd/bpsmeter.py @@ -118,7 +118,6 @@ class BPSMeter: self.q_hour = 0 # Quota reset hour self.q_minute = 0 # Quota reset minute self.quota_enabled = True # Scheduled quota enable/disable - BPSMeter.do = self def save(self): """ Save admin to disk """ @@ -235,8 +234,8 @@ class BPSMeter: if self.have_quota and self.quota_enabled: self.left -= amount if self.left <= 0.0: - if sabnzbd.downloader.Downloader.do and not sabnzbd.downloader.Downloader.do.paused: - sabnzbd.downloader.Downloader.do.pause() + if sabnzbd.Downloader.do and not sabnzbd.Downloader.paused: + sabnzbd.Downloader.pause() logging.warning(T("Quota spent, pausing downloading")) # Speedometer @@ -355,8 +354,8 @@ class BPSMeter: logging.info("Quota was reset to %s", self.quota) if cfg.quota_resume(): logging.info("Auto-resume due to quota reset") - if sabnzbd.downloader.Downloader.do: - sabnzbd.downloader.Downloader.do.resume() + if sabnzbd.Downloader.do: + sabnzbd.Downloader.resume() self.next_reset() return False else: @@ -464,8 +463,8 @@ class BPSMeter: @staticmethod def resume(): """ Resume downloading """ - if cfg.quota_resume() and sabnzbd.downloader.Downloader.do and sabnzbd.downloader.Downloader.do.paused: - sabnzbd.downloader.Downloader.do.resume() + if cfg.quota_resume() and sabnzbd.Downloader.paused: + sabnzbd.Downloader.resume() def midnight(self): """ Midnight action: dummy update for all servers """ @@ -476,12 +475,4 @@ class BPSMeter: def quota_handler(): """ To be called from scheduler """ logging.debug("Checking quota") - BPSMeter.do.reset_quota() - - -def midnight_action(): - if BPSMeter.do: - BPSMeter.do.midnight() - - -BPSMeter() + sabnzbd.BPSMeter.reset_quota() diff --git a/sabnzbd/decoder.py b/sabnzbd/decoder.py index fd1bf41..80a2716 100644 --- a/sabnzbd/decoder.py +++ b/sabnzbd/decoder.py @@ -27,9 +27,6 @@ from typing import Tuple, List import sabnzbd from sabnzbd.constants import SABYENC_VERSION_REQUIRED -from sabnzbd.articlecache import ArticleCache -from sabnzbd.downloader import Downloader -from sabnzbd.nzbqueue import NzbQueue from sabnzbd.nzbstuff import Article import sabnzbd.cfg as cfg from sabnzbd.misc import match_str @@ -75,7 +72,6 @@ class Decoder: self.decoder_workers = [] for i in range(cfg.num_decoders()): self.decoder_workers.append(DecoderWorker(self.decoder_queue)) - Decoder.do = self def start(self): for decoder_worker in self.decoder_workers: @@ -103,12 +99,12 @@ class Decoder: def process(self, article, raw_data): # We use reported article-size, just like sabyenc does - ArticleCache.do.reserve_space(article.bytes) + sabnzbd.ArticleCache.reserve_space(article.bytes) self.decoder_queue.put((article, raw_data)) def queue_full(self): # Check if the queue size exceeds the limits - return self.decoder_queue.qsize() >= ArticleCache.do.decoder_cache_article_limit + return self.decoder_queue.qsize() >= sabnzbd.ArticleCache.decoder_cache_article_limit class DecoderWorker(Thread): @@ -138,7 +134,7 @@ class DecoderWorker(Thread): art_id = article.article # Free space in the decoder-queue - ArticleCache.do.free_reserved_space(article.bytes) + sabnzbd.ArticleCache.free_reserved_space(article.bytes) # Keeping track decoded_data = None @@ -157,12 +153,12 @@ class DecoderWorker(Thread): except MemoryError: logging.warning(T("Decoder failure: Out of memory")) logging.info("Decoder-Queue: %d", self.decoder_queue.qsize()) - logging.info("Cache: %d, %d, %d", *ArticleCache.do.cache_info()) + logging.info("Cache: %d, %d, %d", *sabnzbd.ArticleCache.cache_info()) logging.info("Traceback: ", exc_info=True) - Downloader.do.pause() + sabnzbd.Downloader.pause() # This article should be fetched again - NzbQueue.do.reset_try_lists(article) + sabnzbd.NzbQueue.reset_try_lists(article) continue except CrcError: @@ -195,7 +191,7 @@ class DecoderWorker(Thread): logme = T("UUencode detected, only yEnc encoding is supported [%s]") % nzo.final_name logging.error(logme) nzo.fail_msg = logme - NzbQueue.do.end_job(nzo) + sabnzbd.NzbQueue.end_job(nzo) break # Pre-check, proper article found so just register @@ -221,9 +217,9 @@ class DecoderWorker(Thread): if decoded_data: # If the data needs to be written to disk due to full cache, this will be slow # Causing the decoder-queue to fill up and delay the downloader - ArticleCache.do.save_article(article, decoded_data) + sabnzbd.ArticleCache.save_article(article, decoded_data) - NzbQueue.do.register_article(article, article_success) + sabnzbd.NzbQueue.register_article(article, article_success) def decode(article, raw_data): diff --git a/sabnzbd/dirscanner.py b/sabnzbd/dirscanner.py index 4eba481..54b703c 100644 --- a/sabnzbd/dirscanner.py +++ b/sabnzbd/dirscanner.py @@ -89,7 +89,6 @@ class DirScanner(threading.Thread): self.trigger = False cfg.dirscan_dir.callback(self.newdir) cfg.dirscan_speed.callback(self.newspeed) - DirScanner.do = self def newdir(self): """ We're notified of a dir change """ @@ -213,9 +212,3 @@ class DirScanner(threading.Thread): if os.path.isdir(dpath) and dd.lower() in cats: run_dir(dpath, dd.lower()) self.busy = False - - -def dirscan(): - """ Wrapper required for scheduler """ - logging.info("Scheduled or manual watched folder scan") - DirScanner.do.scan() diff --git a/sabnzbd/downloader.py b/sabnzbd/downloader.py index 1e37631..5384b1b 100644 --- a/sabnzbd/downloader.py +++ b/sabnzbd/downloader.py @@ -35,7 +35,6 @@ from sabnzbd.newswrapper import NewsWrapper, request_server_info import sabnzbd.notifier as notifier import sabnzbd.config as config import sabnzbd.cfg as cfg -from sabnzbd.bpsmeter import BPSMeter import sabnzbd.scheduler from sabnzbd.misc import from_units, nntp_to_msg, int_conv from sabnzbd.utils.happyeyeballs import happyeyeballs @@ -80,10 +79,10 @@ class Server: self.restart = False self.displayname = displayname self.host = host - self.port = port + self.port: int = port self.timeout = timeout self.threads = threads - self.priority = priority + self.priority: int = priority self.ssl = ssl self.ssl_verify = ssl_verify self.ssl_ciphers = ssl_ciphers @@ -211,8 +210,6 @@ class Downloader(Thread): for server in config.get_servers(): self.init_server(None, server) - Downloader.do = self - def init_server(self, oldserver, newserver): """Setup or re-setup single server When oldserver is defined and in use, delay startup. @@ -298,7 +295,7 @@ class Downloader(Thread): logging.info("Pausing") notifier.send_notification("SABnzbd", T("Paused"), "pause_resume") if self.is_paused(): - BPSMeter.do.reset() + sabnzbd.BPSMeter.reset() if cfg.autodisconnect(): self.disconnect() @@ -358,7 +355,7 @@ class Downloader(Thread): if not self.paused: return False else: - if sabnzbd.nzbqueue.NzbQueue.do.has_forced_items(): + if sabnzbd.NzbQueue.has_forced_items(): return False else: return True @@ -404,7 +401,7 @@ class Downloader(Thread): # Make sure server address resolution is refreshed server.info = None - sabnzbd.nzbqueue.NzbQueue.do.reset_all_try_lists() + sabnzbd.NzbQueue.reset_all_try_lists() def decode(self, article, raw_data): """Decode article and check the status of @@ -413,23 +410,21 @@ class Downloader(Thread): # Handle broken articles directly if not raw_data: if not article.search_new_server(): - sabnzbd.nzbqueue.NzbQueue.do.register_article(article, success=False) + sabnzbd.NzbQueue.register_article(article, success=False) return # Send to decoder-queue - sabnzbd.decoder.Decoder.do.process(article, raw_data) + sabnzbd.Decoder.process(article, raw_data) # See if we need to delay because the queues are full logged = False - while not self.shutdown and ( - sabnzbd.decoder.Decoder.do.queue_full() or sabnzbd.assembler.Assembler.do.queue_full() - ): + while not self.shutdown and (sabnzbd.Decoder.queue_full() or sabnzbd.Assembler.queue_full()): if not logged: # Only log once, to not waste any CPU-cycles logging.debug( "Delaying - Decoder queue: %s - Assembler queue: %s", - sabnzbd.decoder.Decoder.do.decoder_queue.qsize(), - sabnzbd.assembler.Assembler.do.queue.qsize(), + sabnzbd.Decoder.decoder_queue.qsize(), + sabnzbd.Assembler.queue.qsize(), ) logged = True time.sleep(0.05) @@ -444,7 +439,7 @@ class Downloader(Thread): logging.debug("SSL verification test: %s", sabnzbd.CERTIFICATE_VALIDATION) # Kick BPS-Meter to check quota - BPSMeter.do.update() + sabnzbd.BPSMeter.update() while 1: for server in self.servers: @@ -464,7 +459,7 @@ class Downloader(Thread): if newid: self.init_server(None, newid) self.__restart -= 1 - sabnzbd.nzbqueue.NzbQueue.do.reset_all_try_lists() + sabnzbd.NzbQueue.reset_all_try_lists() # Have to leave this loop, because we removed element break else: @@ -486,12 +481,12 @@ class Downloader(Thread): if not server.info: # Only request info if there's stuff in the queue - if not sabnzbd.nzbqueue.NzbQueue.do.is_empty(): + if not sabnzbd.NzbQueue.is_empty(): self.maybe_block_server(server) request_server_info(server) break - article = sabnzbd.nzbqueue.NzbQueue.do.get_article(server, self.servers) + article = sabnzbd.NzbQueue.get_article(server, self.servers) if not article: break @@ -563,26 +558,26 @@ class Downloader(Thread): # Need to initialize the check during first 20 seconds if self.can_be_slowed is None or self.can_be_slowed_timer: # Wait for stable speed to start testing - if not self.can_be_slowed_timer and BPSMeter.do.get_stable_speed(timespan=10): + if not self.can_be_slowed_timer and sabnzbd.BPSMeter.get_stable_speed(timespan=10): self.can_be_slowed_timer = time.time() # Check 10 seconds after enabling slowdown if self.can_be_slowed_timer and time.time() > self.can_be_slowed_timer + 10: # Now let's check if it was stable in the last 10 seconds - self.can_be_slowed = BPSMeter.do.get_stable_speed(timespan=10) + self.can_be_slowed = sabnzbd.BPSMeter.get_stable_speed(timespan=10) self.can_be_slowed_timer = 0 logging.debug("Downloader-slowdown: %r", self.can_be_slowed) else: read, write, error = ([], [], []) - BPSMeter.do.reset() + sabnzbd.BPSMeter.reset() time.sleep(1.0) DOWNLOADER_CV.acquire() while ( - (sabnzbd.nzbqueue.NzbQueue.do.is_empty() or self.is_paused() or self.postproc) + (sabnzbd.NzbQueue.is_empty() or self.is_paused() or self.postproc) and not self.shutdown and not self.__restart ): @@ -603,7 +598,7 @@ class Downloader(Thread): self.write_fds.pop(fileno) if not read: - BPSMeter.do.update() + sabnzbd.BPSMeter.update() continue for selected in read: @@ -620,7 +615,7 @@ class Downloader(Thread): bytes_received, done, skip = (0, False, False) if skip: - BPSMeter.do.update() + sabnzbd.BPSMeter.update() continue if bytes_received < 1: @@ -630,12 +625,12 @@ class Downloader(Thread): else: if self.bandwidth_limit: limit = self.bandwidth_limit - if bytes_received + BPSMeter.do.bps > limit: - while BPSMeter.do.bps > limit: + if bytes_received + sabnzbd.BPSMeter.bps > limit: + while sabnzbd.BPSMeter.bps > limit: time.sleep(0.05) - BPSMeter.do.update() - BPSMeter.do.update(server.id, bytes_received) - nzo.update_download_stats(BPSMeter.do.bps, server.id, bytes_received) + sabnzbd.BPSMeter.update() + sabnzbd.BPSMeter.update(server.id, bytes_received) + nzo.update_download_stats(sabnzbd.BPSMeter.bps, server.id, bytes_received) if not done and nw.status_code != 222: if not nw.connected or nw.status_code == 480: @@ -717,7 +712,7 @@ class Downloader(Thread): server.active = False if penalty and (block or server.optional): self.plan_server(server, penalty) - sabnzbd.nzbqueue.NzbQueue.do.reset_all_try_lists() + sabnzbd.NzbQueue.reset_all_try_lists() self.__reset_nw(nw, None, warn=False, send_quit=True) continue except: @@ -824,7 +819,7 @@ class Downloader(Thread): self.decode(article, None) else: # Allow all servers to iterate over each nzo/nzf again - sabnzbd.nzbqueue.NzbQueue.do.reset_try_lists(article) + sabnzbd.NzbQueue.reset_try_lists(article) if destroy: nw.terminate(quit=send_quit) @@ -947,12 +942,12 @@ class Downloader(Thread): def stop(): DOWNLOADER_CV.acquire() try: - Downloader.do.stop() + sabnzbd.Downloader.stop() finally: DOWNLOADER_CV.notify_all() DOWNLOADER_CV.release() try: - Downloader.do.join() + sabnzbd.Downloader.join() except: pass diff --git a/sabnzbd/interface.py b/sabnzbd/interface.py index 7a863ec..870747b 100644 --- a/sabnzbd/interface.py +++ b/sabnzbd/interface.py @@ -33,12 +33,11 @@ import functools from threading import Thread from random import randint from xml.sax.saxutils import escape +from Cheetah.Template import Template import sabnzbd import sabnzbd.rss import sabnzbd.scheduler as scheduler - -from Cheetah.Template import Template from sabnzbd.misc import ( to_units, from_units, @@ -52,24 +51,17 @@ from sabnzbd.misc import ( ) from sabnzbd.filesystem import real_path, long_path, globber, globber_full, remove_all, clip_path, same_file from sabnzbd.newswrapper import GetServerParms -from sabnzbd.bpsmeter import BPSMeter from sabnzbd.encoding import xml_name, utob import sabnzbd.config as config import sabnzbd.cfg as cfg import sabnzbd.notifier as notifier import sabnzbd.newsunpack -from sabnzbd.downloader import Downloader -from sabnzbd.nzbqueue import NzbQueue from sabnzbd.utils.servertests import test_nntp_server_dict -from sabnzbd.decoder import SABYENC_ENABLED from sabnzbd.utils.diskspeed import diskspeedmeasure from sabnzbd.utils.getperformance import getpystone from sabnzbd.utils.internetspeed import internetspeed - from sabnzbd.constants import MEBI, DEF_SKIN_COLORS, DEF_STDCONFIG, DEF_MAIN_TMPL, DEFAULT_PRIORITY, CHEETAH_DIRECTIVES - from sabnzbd.lang import list_languages - from sabnzbd.api import ( list_scripts, list_cats, @@ -408,7 +400,7 @@ class MainPage: ) ) - bytespersec_list = BPSMeter.do.get_bps_list() + bytespersec_list = sabnzbd.BPSMeter.get_bps_list() info["bytespersec_list"] = ",".join([str(bps) for bps in bytespersec_list]) template = Template( @@ -432,7 +424,7 @@ class MainPage: @secured_expose(check_api_key=True) def pause(self, **kwargs): scheduler.plan_resume(0) - Downloader.do.pause() + sabnzbd.Downloader.pause() raise Raiser(self.__root) @secured_expose(check_api_key=True) @@ -723,7 +715,7 @@ class NzoPage: nzo_id = a break - nzo = NzbQueue.do.get_nzo(nzo_id) + nzo = sabnzbd.NzbQueue.get_nzo(nzo_id) if nzo_id and nzo: info, pnfo_list, bytespersec, q_size, bytes_left_previous_page = build_queue_header() @@ -762,7 +754,7 @@ class NzoPage: n = 0 for pnfo in pnfo_list: if pnfo.nzo_id == nzo_id: - nzo = NzbQueue.do.get_nzo(nzo_id) + nzo = sabnzbd.NzbQueue.get_nzo(nzo_id) repair = pnfo.repair unpack = pnfo.unpack delete = pnfo.delete @@ -795,7 +787,7 @@ class NzoPage: def nzo_files(self, info, nzo_id): active = [] - nzo = NzbQueue.do.get_nzo(nzo_id) + nzo = sabnzbd.NzbQueue.get_nzo(nzo_id) if nzo: pnfo = nzo.gather_info(full=True) info["nzo_id"] = pnfo.nzo_id @@ -831,15 +823,15 @@ class NzoPage: script = kwargs.get("script", None) cat = kwargs.get("cat", None) priority = kwargs.get("priority", None) - nzo = NzbQueue.do.get_nzo(nzo_id) + nzo = sabnzbd.NzbQueue.get_nzo(nzo_id) if index is not None: - NzbQueue.do.switch(nzo_id, index) + sabnzbd.NzbQueue.switch(nzo_id, index) if name is not None: - NzbQueue.do.change_name(nzo_id, name, password) + sabnzbd.NzbQueue.change_name(nzo_id, name, password) if cat is not None and nzo.cat is not cat and not (nzo.cat == "*" and cat == "Default"): - NzbQueue.do.change_cat(nzo_id, cat, priority) + sabnzbd.NzbQueue.change_cat(nzo_id, cat, priority) # Category changed, so make sure "Default" attributes aren't set again if script == "Default": script = None @@ -849,11 +841,11 @@ class NzoPage: pp = None if script is not None and nzo.script != script: - NzbQueue.do.change_script(nzo_id, script) + sabnzbd.NzbQueue.change_script(nzo_id, script) if pp is not None and nzo.pp != pp: - NzbQueue.do.change_opts(nzo_id, pp) + sabnzbd.NzbQueue.change_opts(nzo_id, pp) if priority is not None and nzo.priority != int(priority): - NzbQueue.do.set_priority(nzo_id, priority) + sabnzbd.NzbQueue.set_priority(nzo_id, priority) raise Raiser(urllib.parse.urljoin(self.__root, "../queue/")) @@ -862,7 +854,7 @@ class NzoPage: if kwargs["action_key"] == "Delete": for key in kwargs: if kwargs[key] == "on": - NzbQueue.do.remove_nzf(nzo_id, key, force_delete=True) + sabnzbd.NzbQueue.remove_nzf(nzo_id, key, force_delete=True) elif kwargs["action_key"] in ("Top", "Up", "Down", "Bottom"): nzf_ids = [] @@ -871,15 +863,15 @@ class NzoPage: nzf_ids.append(key) size = int_conv(kwargs.get("action_size", 1)) if kwargs["action_key"] == "Top": - NzbQueue.do.move_top_bulk(nzo_id, nzf_ids) + sabnzbd.NzbQueue.move_top_bulk(nzo_id, nzf_ids) elif kwargs["action_key"] == "Up": - NzbQueue.do.move_up_bulk(nzo_id, nzf_ids, size) + sabnzbd.NzbQueue.move_up_bulk(nzo_id, nzf_ids, size) elif kwargs["action_key"] == "Down": - NzbQueue.do.move_down_bulk(nzo_id, nzf_ids, size) + sabnzbd.NzbQueue.move_down_bulk(nzo_id, nzf_ids, size) elif kwargs["action_key"] == "Bottom": - NzbQueue.do.move_bottom_bulk(nzo_id, nzf_ids) + sabnzbd.NzbQueue.move_bottom_bulk(nzo_id, nzf_ids) - if NzbQueue.do.get_nzo(nzo_id): + if sabnzbd.NzbQueue.get_nzo(nzo_id): url = urllib.parse.urljoin(self.__root, nzo_id) else: url = urllib.parse.urljoin(self.__root, "../queue") @@ -910,12 +902,12 @@ class QueuePage: uid = kwargs.get("uid") del_files = int_conv(kwargs.get("del_files")) if uid: - NzbQueue.do.remove(uid, add_to_history=False, delete_all_data=del_files) + sabnzbd.NzbQueue.remove(uid, add_to_history=False, delete_all_data=del_files) raise queueRaiser(self.__root, kwargs) @secured_expose(check_api_key=True) def purge(self, **kwargs): - NzbQueue.do.remove_all(kwargs.get("search")) + sabnzbd.NzbQueue.remove_all(kwargs.get("search")) raise queueRaiser(self.__root, kwargs) @secured_expose(check_api_key=True) @@ -932,7 +924,7 @@ class QueuePage: uid1 = kwargs.get("uid1") uid2 = kwargs.get("uid2") if uid1 and uid2: - NzbQueue.do.switch(uid1, uid2) + sabnzbd.NzbQueue.switch(uid1, uid2) raise queueRaiser(self.__root, kwargs) @secured_expose(check_api_key=True) @@ -940,7 +932,7 @@ class QueuePage: nzo_id = kwargs.get("nzo_id") pp = kwargs.get("pp", "") if nzo_id and pp and pp.isdigit(): - NzbQueue.do.change_opts(nzo_id, int(pp)) + sabnzbd.NzbQueue.change_opts(nzo_id, int(pp)) raise queueRaiser(self.__root, kwargs) @secured_expose(check_api_key=True) @@ -950,7 +942,7 @@ class QueuePage: if nzo_id and script: if script == "None": script = None - NzbQueue.do.change_script(nzo_id, script) + sabnzbd.NzbQueue.change_script(nzo_id, script) raise queueRaiser(self.__root, kwargs) @secured_expose(check_api_key=True) @@ -960,7 +952,7 @@ class QueuePage: if nzo_id and cat: if cat == "None": cat = None - NzbQueue.do.change_cat(nzo_id, cat) + sabnzbd.NzbQueue.change_cat(nzo_id, cat) raise queueRaiser(self.__root, kwargs) @@ -972,7 +964,7 @@ class QueuePage: @secured_expose(check_api_key=True) def pause(self, **kwargs): scheduler.plan_resume(0) - Downloader.do.pause() + sabnzbd.Downloader.pause() raise queueRaiser(self.__root, kwargs) @secured_expose(check_api_key=True) @@ -984,33 +976,33 @@ class QueuePage: @secured_expose(check_api_key=True) def pause_nzo(self, **kwargs): uid = kwargs.get("uid", "") - NzbQueue.do.pause_multiple_nzo(uid.split(",")) + sabnzbd.NzbQueue.pause_multiple_nzo(uid.split(",")) raise queueRaiser(self.__root, kwargs) @secured_expose(check_api_key=True) def resume_nzo(self, **kwargs): uid = kwargs.get("uid", "") - NzbQueue.do.resume_multiple_nzo(uid.split(",")) + sabnzbd.NzbQueue.resume_multiple_nzo(uid.split(",")) raise queueRaiser(self.__root, kwargs) @secured_expose(check_api_key=True) def set_priority(self, **kwargs): - NzbQueue.do.set_priority(kwargs.get("nzo_id"), kwargs.get("priority")) + sabnzbd.NzbQueue.set_priority(kwargs.get("nzo_id"), kwargs.get("priority")) raise queueRaiser(self.__root, kwargs) @secured_expose(check_api_key=True) def sort_by_avg_age(self, **kwargs): - NzbQueue.do.sort_queue("avg_age", kwargs.get("dir")) + sabnzbd.NzbQueue.sort_queue("avg_age", kwargs.get("dir")) raise queueRaiser(self.__root, kwargs) @secured_expose(check_api_key=True) def sort_by_name(self, **kwargs): - NzbQueue.do.sort_queue("name", kwargs.get("dir")) + sabnzbd.NzbQueue.sort_queue("name", kwargs.get("dir")) raise queueRaiser(self.__root, kwargs) @secured_expose(check_api_key=True) def sort_by_size(self, **kwargs): - NzbQueue.do.sort_queue("size", kwargs.get("dir")) + sabnzbd.NzbQueue.sort_queue("size", kwargs.get("dir")) raise queueRaiser(self.__root, kwargs) @@ -1034,7 +1026,7 @@ class HistoryPage: history["rating_enable"] = bool(cfg.rating_enable()) postfix = T("B") # : Abbreviation for bytes, as in GB - grand, month, week, day = BPSMeter.do.get_sums() + grand, month, week, day = sabnzbd.BPSMeter.get_sums() history["total_size"], history["month_size"], history["week_size"], history["day_size"] = ( to_units(grand, postfix=postfix), to_units(month, postfix=postfix), @@ -1113,7 +1105,7 @@ class ConfigPage: conf["have_unzip"] = bool(sabnzbd.newsunpack.ZIP_COMMAND) conf["have_7zip"] = bool(sabnzbd.newsunpack.SEVEN_COMMAND) - conf["have_sabyenc"] = SABYENC_ENABLED + conf["have_sabyenc"] = sabnzbd.decoder.SABYENC_ENABLED conf["have_mt_par2"] = sabnzbd.newsunpack.PAR2_MT conf["certificate_validation"] = sabnzbd.CERTIFICATE_VALIDATION @@ -1124,7 +1116,7 @@ class ConfigPage: new[svr] = {} conf["servers"] = new - conf["folders"] = NzbQueue.do.scan_jobs(all_jobs=False, action=False) + conf["folders"] = sabnzbd.NzbQueue.scan_jobs(all_jobs=False, action=False) template = Template( file=os.path.join(sabnzbd.WEB_DIR_CONFIG, "config.tmpl"), @@ -1589,7 +1581,7 @@ class ConfigServer: ) for svr in server_names: new.append(servers[svr].get_dict(safe=True)) - t, m, w, d, timeline = BPSMeter.do.amounts(svr) + t, m, w, d, timeline = sabnzbd.BPSMeter.amounts(svr) if t: new[-1]["amounts"] = to_units(t), to_units(m), to_units(w), to_units(d), timeline conf["servers"] = new @@ -1626,7 +1618,7 @@ class ConfigServer: def clrServer(self, **kwargs): server = kwargs.get("server") if server: - BPSMeter.do.clear_server(server) + sabnzbd.BPSMeter.clear_server(server) raise Raiser(self.__root) @secured_expose(check_api_key=True, check_configlock=True) @@ -1637,7 +1629,7 @@ class ConfigServer: if svr: svr.enable.set(not svr.enable()) config.save_config() - Downloader.do.update_server(server, server) + sabnzbd.Downloader.update_server(server, server) raise Raiser(self.__root) @@ -1715,7 +1707,7 @@ def handle_server(kwargs, root=None, new_svr=False): config.ConfigServer(server, kwargs) config.save_config() - Downloader.do.update_server(old_server, server) + sabnzbd.Downloader.update_server(old_server, server) if root: if ajax: return sabnzbd.api.report("json") @@ -2420,12 +2412,12 @@ class Status: @secured_expose(check_api_key=True) def reset_quota(self, **kwargs): - BPSMeter.do.reset_quota(force=True) + sabnzbd.BPSMeter.reset_quota(force=True) raise Raiser(self.__root) @secured_expose(check_api_key=True) def disconnect(self, **kwargs): - Downloader.do.disconnect() + sabnzbd.Downloader.disconnect() raise Raiser(self.__root) @secured_expose(check_api_key=True) @@ -2487,7 +2479,7 @@ class Status: @secured_expose(check_api_key=True) def unblock_server(self, **kwargs): - Downloader.do.unblock(kwargs.get("server")) + sabnzbd.Downloader.unblock(kwargs.get("server")) # Short sleep so that UI shows new server status time.sleep(1.0) raise Raiser(self.__root) @@ -2550,7 +2542,7 @@ def orphan_delete(kwargs): def orphan_delete_all(): - paths = NzbQueue.do.scan_jobs(all_jobs=False, action=False) + paths = sabnzbd.NzbQueue.scan_jobs(all_jobs=False, action=False) for path in paths: kwargs = {"name": path} orphan_delete(kwargs) @@ -2561,11 +2553,11 @@ def orphan_add(kwargs): if path: path = os.path.join(long_path(cfg.download_dir.get_path()), path) logging.info("Re-adding orphaned job %s", path) - NzbQueue.do.repair_job(path, None, None) + sabnzbd.NzbQueue.repair_job(path, None, None) def orphan_add_all(): - paths = NzbQueue.do.scan_jobs(all_jobs=False, action=False) + paths = sabnzbd.NzbQueue.scan_jobs(all_jobs=False, action=False) for path in paths: kwargs = {"name": path} orphan_add(kwargs) diff --git a/sabnzbd/newswrapper.py b/sabnzbd/newswrapper.py index 0ba858b..8ef7de2 100644 --- a/sabnzbd/newswrapper.py +++ b/sabnzbd/newswrapper.py @@ -50,7 +50,7 @@ def _retrieve_info(server): else: server.bad_cons = 0 (server.info, server.request) = (info, False) - sabnzbd.downloader.Downloader.do.wakeup() + sabnzbd.Downloader.wakeup() def request_server_info(server): diff --git a/sabnzbd/nzbparser.py b/sabnzbd/nzbparser.py index 1b29420..52c888a 100644 --- a/sabnzbd/nzbparser.py +++ b/sabnzbd/nzbparser.py @@ -234,10 +234,10 @@ def process_nzb_archive_file( if nzo: if nzo_id: # Re-use existing nzo_id, when a "future" job gets it payload - sabnzbd.nzbqueue.NzbQueue.do.remove(nzo_id, add_to_history=False, delete_all_data=False) + sabnzbd.NzbQueue.remove(nzo_id, add_to_history=False, delete_all_data=False) nzo.nzo_id = nzo_id nzo_id = None - nzo_ids.append(sabnzbd.nzbqueue.NzbQueue.do.add(nzo)) + nzo_ids.append(sabnzbd.NzbQueue.add(nzo)) nzo.update_rating() zf.close() try: @@ -329,7 +329,7 @@ def process_single_nzb( except TypeError: # Duplicate, ignore if nzo_id: - sabnzbd.nzbqueue.NzbQueue.do.remove(nzo_id, add_to_history=False) + sabnzbd.NzbQueue.remove(nzo_id, add_to_history=False) nzo = None except ValueError: # Empty @@ -346,9 +346,9 @@ def process_single_nzb( if nzo: if nzo_id: # Re-use existing nzo_id, when a "future" job gets it payload - sabnzbd.nzbqueue.NzbQueue.do.remove(nzo_id, add_to_history=False, delete_all_data=False) + sabnzbd.NzbQueue.remove(nzo_id, add_to_history=False, delete_all_data=False) nzo.nzo_id = nzo_id - nzo_ids.append(sabnzbd.nzbqueue.NzbQueue.do.add(nzo, quiet=reuse)) + nzo_ids.append(sabnzbd.NzbQueue.add(nzo, quiet=reuse)) nzo.update_rating() try: diff --git a/sabnzbd/nzbqueue.py b/sabnzbd/nzbqueue.py index f542b1b..2067347 100644 --- a/sabnzbd/nzbqueue.py +++ b/sabnzbd/nzbqueue.py @@ -55,9 +55,8 @@ from sabnzbd.constants import ( import sabnzbd.cfg as cfg import sabnzbd.downloader -from sabnzbd.assembler import Assembler, file_has_articles +from sabnzbd.assembler import file_has_articles import sabnzbd.notifier as notifier -from sabnzbd.bpsmeter import BPSMeter class NzbQueue: @@ -70,8 +69,6 @@ class NzbQueue: self.__nzo_list: List[NzbObject] = [] self.__nzo_table: Dict[str, NzbObject] = {} - NzbQueue.do = self - def read_queue(self, repair): """Read queue from disk, supporting repair modes 0 = no repairs @@ -411,7 +408,7 @@ class NzbQueue: # Any files left? Otherwise let's disconnect if self.actives(grabs=False) == 0 and cfg.autodisconnect(): # This was the last job, close server connections - sabnzbd.downloader.Downloader.do.disconnect() + sabnzbd.Downloader.disconnect() return removed @@ -754,7 +751,7 @@ class NzbQueue: # Only start decoding if we have a filename and type # The type is only set if sabyenc could decode the article if nzf.filename and nzf.type: - Assembler.do.process((nzo, nzf, file_done)) + sabnzbd.Assembler.process((nzo, nzf, file_done)) elif nzf.filename.lower().endswith(".par2"): # Broken par2 file, try to get another one nzo.promote_par2(nzf) @@ -765,7 +762,7 @@ class NzbQueue: # Save bookkeeping in case of crash if file_done and (nzo.next_save is None or time.time() > nzo.next_save): nzo.save_to_disk() - BPSMeter.do.save() + sabnzbd.BPSMeter.save() if nzo.save_timeout is None: nzo.next_save = None else: @@ -793,7 +790,7 @@ class NzbQueue: else: # Not enough data, let postprocessor show it as failed pass - Assembler.do.process((nzo, None, None)) + sabnzbd.Assembler.process((nzo, None, None)) def actives(self, grabs=True) -> int: """Return amount of non-paused jobs, optionally with 'grabbing' items @@ -869,10 +866,10 @@ class NzbQueue: # Stall prevention by checking if all servers are in the trylist # This is a CPU-cheaper alternative to prevent stalling - if len(nzo.try_list) == sabnzbd.downloader.Downloader.do.server_nr: + if len(nzo.try_list) == sabnzbd.Downloader.server_nr: # Maybe the NZF's need a reset too? for nzf in nzo.files: - if len(nzf.try_list) == sabnzbd.downloader.Downloader.do.server_nr: + if len(nzf.try_list) == sabnzbd.Downloader.server_nr: # We do not want to reset all article trylists, they are good logging.info("Resetting bad trylist for file %s in job %s", nzf.filename, nzo.final_name) nzf.reset_try_list() diff --git a/sabnzbd/nzbstuff.py b/sabnzbd/nzbstuff.py index 42a7892..bf5ce43 100644 --- a/sabnzbd/nzbstuff.py +++ b/sabnzbd/nzbstuff.py @@ -85,8 +85,6 @@ import sabnzbd.config as config import sabnzbd.cfg as cfg import sabnzbd.nzbparser from sabnzbd.database import HistoryDB -from sabnzbd.articlecache import ArticleCache -from sabnzbd.rating import Rating # Name patterns SUBJECT_FN_MATCHER = re.compile(r'"([^"]*)"') @@ -112,7 +110,7 @@ class TryList: self.try_list: List[sabnzbd.downloader.Server] = [] self.fetcher_priority = 0 - def server_in_try_list(self, server): + def server_in_try_list(self, server: sabnzbd.downloader.Server): """ Return whether specified server has been tried """ with TRYLIST_LOCK: return server in self.try_list @@ -135,8 +133,8 @@ class TryList: def __setstate__(self, servers_ids): self.try_list = [] for server_id in servers_ids: - if server_id in sabnzbd.downloader.Downloader.do.server_dict: - self.add_to_try_list(sabnzbd.downloader.Downloader.do.server_dict[server_id]) + if server_id in sabnzbd.Downloader.server_dict: + self.add_to_try_list(sabnzbd.Downloader.server_dict[server_id]) ############################################################################## @@ -238,12 +236,12 @@ class Article(TryList): def search_new_server(self): # Search new server self.add_to_try_list(self.fetcher) - for server in sabnzbd.downloader.Downloader.do.servers: + for server in sabnzbd.Downloader.servers: if server.active and not self.server_in_try_list(server): if server.priority >= self.fetcher.priority: self.tries = 0 # Allow all servers for this nzo and nzf again (but not for this article) - sabnzbd.nzbqueue.NzbQueue.do.reset_try_lists(self, article_reset=False) + sabnzbd.NzbQueue.reset_try_lists(self, article_reset=False) return True logging.info(T("%s => missing from all servers, discarding") % self) @@ -915,8 +913,8 @@ class NzbObject(TryList): if accept == 2: self.deleted = True self.status = Status.FAILED - sabnzbd.NzbQueue.do.add(self, quiet=True) - sabnzbd.NzbQueue.do.end_job(self) + sabnzbd.NzbQueue.add(self, quiet=True) + sabnzbd.NzbQueue.end_job(self) # Raise error, so it's not added raise TypeError @@ -1576,7 +1574,7 @@ class NzbObject(TryList): if not nzf.import_finished: # Only load NZF when it's a primary server # or when it's a backup server without active primaries - if sabnzbd.highest_server(server): + if sabnzbd.Downloader.highest_server(server): nzf.finish_import() # Still not finished? Something went wrong... if not nzf.import_finished and not self.is_gone(): @@ -1598,7 +1596,7 @@ class NzbObject(TryList): # If cleanup emptied the active files list, end this job if nzf_remove_list and not self.files: - sabnzbd.NzbQueue.do.end_job(self) + sabnzbd.NzbQueue.end_job(self) if not article: # No articles for this server, block for next time @@ -1758,7 +1756,7 @@ class NzbObject(TryList): fields = {} for k in rating_types: fields[k] = _get_first_meta(k) - Rating.do.add_rating(_get_first_meta("id"), self.nzo_id, fields) + sabnzbd.Rating.add_rating(_get_first_meta("id"), self.nzo_id, fields) except: pass @@ -1798,7 +1796,7 @@ class NzbObject(TryList): self.abort_direct_unpacker() # Remove all cached files - ArticleCache.do.purge_articles(self.saved_articles) + sabnzbd.ArticleCache.purge_articles(self.saved_articles) # Delete all, or just basic files if self.futuretype: diff --git a/sabnzbd/osxmenu.py b/sabnzbd/osxmenu.py index 5afa31f..3792ac0 100644 --- a/sabnzbd/osxmenu.py +++ b/sabnzbd/osxmenu.py @@ -41,11 +41,9 @@ from sabnzbd.panic import launch_a_browser import sabnzbd.notifier as notifier from sabnzbd.api import fast_queue -from sabnzbd.nzbqueue import NzbQueue import sabnzbd.config as config import sabnzbd.scheduler as scheduler import sabnzbd.downloader -from sabnzbd.bpsmeter import BPSMeter status_icons = { "idle": "icons/sabnzbd_osx_idle.tiff", @@ -113,7 +111,7 @@ class SABnzbdDelegate(NSObject): # Variables self.state = "Idle" try: - self.speed = sabnzbd.downloader.Downloader.do.get_limit() + self.speed = sabnzbd.Downloader.get_limit() except: self.speed = 0 self.version_notify = 1 @@ -386,7 +384,7 @@ class SABnzbdDelegate(NSObject): def queueUpdate(self): try: - qnfo = NzbQueue.do.queue_info(start=0, limit=10) + qnfo = sabnzbd.NzbQueue.queue_info(start=0, limit=10) pnfo_list = qnfo.list bytesleftprogess = 0 @@ -407,7 +405,7 @@ class SABnzbdDelegate(NSObject): bytesleftprogess += pnfo.bytes_left bytes_total = pnfo.bytes / MEBI nzo_id = pnfo.nzo_id - timeleft = self.calc_timeleft_(bytesleftprogess, BPSMeter.do.bps) + timeleft = self.calc_timeleft_(bytesleftprogess, sabnzbd.BPSMeter.bps) job = "%s\t(%d/%d MB) %s" % (pnfo.filename, bytesleft, bytes_total, timeleft) menu_queue_item = NSMenuItem.alloc().initWithTitle_action_keyEquivalent_(job, "", "") @@ -546,7 +544,7 @@ class SABnzbdDelegate(NSObject): def iconUpdate(self): try: - if sabnzbd.downloader.Downloader.do.paused: + if sabnzbd.Downloader.paused: self.status_item.setImage_(self.icons["pause"]) else: self.status_item.setImage_(self.icons["idle"]) @@ -555,7 +553,7 @@ class SABnzbdDelegate(NSObject): def pauseUpdate(self): try: - if sabnzbd.downloader.Downloader.do.paused: + if sabnzbd.Downloader.paused: if self.isLeopard: self.resume_menu_item.setHidden_(NO) self.pause_menu_item.setHidden_(YES) @@ -574,7 +572,7 @@ class SABnzbdDelegate(NSObject): def speedlimitUpdate(self): try: - speed = int(sabnzbd.downloader.Downloader.do.get_limit()) + speed = int(sabnzbd.Downloader.get_limit()) if self.speed != speed: self.speed = speed speedsValues = self.menu_speed.numberOfItems() @@ -735,14 +733,14 @@ class SABnzbdDelegate(NSObject): # logging.info("[osx] speed limit to %s" % (sender.representedObject())) speed = int(sender.representedObject()) if speed != self.speed: - sabnzbd.downloader.Downloader.do.limit_speed("%s%%" % speed) + sabnzbd.Downloader.limit_speed("%s%%" % speed) self.speedlimitUpdate() def purgeAction_(self, sender): mode = sender.representedObject() # logging.info("[osx] purge %s" % (mode)) if mode == "queue": - NzbQueue.do.remove_all() + sabnzbd.NzbQueue.remove_all() elif mode == "history": if not self.history_db: self.history_db = sabnzbd.database.HistoryDB() @@ -754,13 +752,13 @@ class SABnzbdDelegate(NSObject): if minutes: scheduler.plan_resume(minutes) else: - sabnzbd.downloader.Downloader.do.pause() + sabnzbd.Downloader.pause() def resumeAction_(self, sender): scheduler.plan_resume(0) def watchedFolderAction_(self, sender): - sabnzbd.dirscanner.dirscan() + sabnzbd.DirScanner.scan() def rssAction_(self, sender): scheduler.force_rss() diff --git a/sabnzbd/postproc.py b/sabnzbd/postproc.py index f678630..5ab5a7f 100644 --- a/sabnzbd/postproc.py +++ b/sabnzbd/postproc.py @@ -75,7 +75,6 @@ from sabnzbd.constants import ( VERIFIED_FILE, ) from sabnzbd.nzbparser import process_single_nzb -from sabnzbd.rating import Rating import sabnzbd.emailer as emailer import sabnzbd.downloader import sabnzbd.config as config @@ -126,7 +125,6 @@ class PostProcessor(Thread): self.__stop = False self.__busy = False self.paused = False - PostProcessor.do = self def save(self): """ Save postproc queue """ @@ -162,7 +160,7 @@ class PostProcessor(Thread): nzo.work_name = "" # Mark as deleted job break - def process(self, nzo): + def process(self, nzo: sabnzbd.nzbstuff.NzbObject): """ Push on finished job in the queue """ if nzo not in self.history_queue: self.history_queue.append(nzo) @@ -273,7 +271,7 @@ class PostProcessor(Thread): # Pause downloader, if users wants that if cfg.pause_on_post_processing(): - sabnzbd.downloader.Downloader.do.wait_for_postproc() + sabnzbd.Downloader.wait_for_postproc() self.__busy = True @@ -292,7 +290,7 @@ class PostProcessor(Thread): check_eoq = True # Allow download to proceed - sabnzbd.downloader.Downloader.do.resume_from_postproc() + sabnzbd.Downloader.resume_from_postproc() def process_job(nzo): @@ -381,9 +379,9 @@ def process_job(nzo): return False # If we don't need extra par2, we can disconnect - if sabnzbd.nzbqueue.NzbQueue.do.actives(grabs=False) == 0 and cfg.autodisconnect(): + if sabnzbd.NzbQueue.actives(grabs=False) == 0 and cfg.autodisconnect(): # This was the last job, close server connections - sabnzbd.downloader.Downloader.do.disconnect() + sabnzbd.Downloader.disconnect() # Sanitize the resulting files if sabnzbd.WIN32: @@ -591,13 +589,13 @@ def process_job(nzo): # Update indexer with results if cfg.rating_enable(): if nzo.encrypted > 0: - Rating.do.update_auto_flag(nzo.nzo_id, Rating.FLAG_ENCRYPTED) + sabnzbd.Rating.update_auto_flag(nzo.nzo_id, sabnzbd.Rating.FLAG_ENCRYPTED) if empty: - hosts = [s.host for s in sabnzbd.downloader.Downloader.do.nzo_servers(nzo)] + hosts = [s.host for s in sabnzbd.Downloader.nzo_servers(nzo)] if not hosts: hosts = [None] for host in hosts: - Rating.do.update_auto_flag(nzo.nzo_id, Rating.FLAG_EXPIRED, host) + sabnzbd.Rating.update_auto_flag(nzo.nzo_id, sabnzbd.Rating.FLAG_EXPIRED, host) except: logging.error(T("Post Processing Failed for %s (%s)"), filename, T("see logfile")) @@ -791,8 +789,8 @@ def parring(nzo, workdir): if nzo.priority != FORCE_PRIORITY: nzo.priority = REPAIR_PRIORITY nzo.status = Status.FETCHING - sabnzbd.nzbqueue.NzbQueue.do.add(nzo) - sabnzbd.downloader.Downloader.do.resume_from_postproc() + sabnzbd.NzbQueue.add(nzo) + sabnzbd.Downloader.resume_from_postproc() sabnzbd.save_data(verified, VERIFIED_FILE, nzo.workpath) @@ -988,7 +986,7 @@ def rar_renamer(nzo, workdir): def handle_empty_queue(): """ Check if empty queue calls for action """ - if sabnzbd.nzbqueue.NzbQueue.do.actives() == 0: + if sabnzbd.NzbQueue.actives() == 0: sabnzbd.save_state() notifier.send_notification("SABnzbd", T("Queue finished"), "queue_done") diff --git a/sabnzbd/rating.py b/sabnzbd/rating.py index ad16c8f..076617a 100644 --- a/sabnzbd/rating.py +++ b/sabnzbd/rating.py @@ -113,7 +113,6 @@ class Rating(Thread): do = None def __init__(self): - Rating.do = self self.shutdown = False self.queue = OrderedSetQueue() try: diff --git a/sabnzbd/rss.py b/sabnzbd/rss.py index 65268e1..c6109b1 100644 --- a/sabnzbd/rss.py +++ b/sabnzbd/rss.py @@ -232,7 +232,6 @@ class RSSQueue: uris = feeds.uri() defCat = feeds.cat() - import sabnzbd.api if not notdefault(defCat) or defCat not in sabnzbd.api.list_cats(default=False): defCat = None diff --git a/sabnzbd/sabtray.py b/sabnzbd/sabtray.py index 24ce28c..bddc9f5 100644 --- a/sabnzbd/sabtray.py +++ b/sabnzbd/sabtray.py @@ -27,7 +27,6 @@ import sabnzbd from sabnzbd.panic import launch_a_browser import sabnzbd.api as api import sabnzbd.scheduler as scheduler -from sabnzbd.downloader import Downloader import sabnzbd.cfg as cfg from sabnzbd.misc import to_units @@ -194,7 +193,7 @@ class SABTrayThread(SysTrayIconThread): def pause(self): scheduler.plan_resume(0) - Downloader.do.pause() + sabnzbd.Downloader.pause() def resume(self): scheduler.plan_resume(0) diff --git a/sabnzbd/sabtraylinux.py b/sabnzbd/sabtraylinux.py index 9fb49f2..8bb9d8c 100644 --- a/sabnzbd/sabtraylinux.py +++ b/sabnzbd/sabtraylinux.py @@ -43,7 +43,6 @@ import sabnzbd from sabnzbd.panic import launch_a_browser import sabnzbd.api as api import sabnzbd.scheduler as scheduler -from sabnzbd.downloader import Downloader import sabnzbd.cfg as cfg from sabnzbd.misc import to_units @@ -196,7 +195,7 @@ class StatusIcon(Thread): def pause(self): scheduler.plan_resume(0) - Downloader.do.pause() + sabnzbd.Downloader.pause() def resume(self): scheduler.plan_resume(0) diff --git a/sabnzbd/scheduler.py b/sabnzbd/scheduler.py index 4a5d50b..1f3a812 100644 --- a/sabnzbd/scheduler.py +++ b/sabnzbd/scheduler.py @@ -30,7 +30,6 @@ import sabnzbd.dirscanner import sabnzbd.misc import sabnzbd.config as config import sabnzbd.cfg as cfg -from sabnzbd.postproc import PostProcessor from sabnzbd.constants import LOW_PRIORITY, NORMAL_PRIORITY, HIGH_PRIORITY @@ -47,11 +46,11 @@ def schedule_guard(): def pp_pause(): - PostProcessor.do.paused = True + sabnzbd.PostProcessor.paused = True def pp_resume(): - PostProcessor.do.paused = False + sabnzbd.PostProcessor.paused = False def pp_pause_event(): @@ -98,7 +97,7 @@ def init(): action = scheduled_resume arguments = [] elif action_name == "pause": - action = sabnzbd.downloader.Downloader.do.pause + action = sabnzbd.Downloader.pause arguments = [] elif action_name == "pause_all": action = sabnzbd.pause_all @@ -114,7 +113,7 @@ def init(): elif action_name == "resume_post": action = pp_resume elif action_name == "speedlimit" and arguments != []: - action = sabnzbd.downloader.Downloader.do.limit_speed + action = sabnzbd.Downloader.limit_speed elif action_name == "enable_server" and arguments != []: action = sabnzbd.enable_server elif action_name == "disable_server" and arguments != []: @@ -129,34 +128,34 @@ def init(): elif action_name == "remove_completed": action = sabnzbd.api.history_remove_completed elif action_name == "enable_quota": - action = sabnzbd.bpsmeter.BPSMeter.do.set_status + action = sabnzbd.BPSMeter.set_status arguments = [True] elif action_name == "disable_quota": - action = sabnzbd.bpsmeter.BPSMeter.do.set_status + action = sabnzbd.BPSMeter.set_status arguments = [False] elif action_name == "pause_all_low": - action = sabnzbd.nzbqueue.NzbQueue.do.pause_on_prio + action = sabnzbd.NzbQueue.pause_on_prio arguments = [LOW_PRIORITY] elif action_name == "pause_all_normal": - action = sabnzbd.nzbqueue.NzbQueue.do.pause_on_prio + action = sabnzbd.NzbQueue.pause_on_prio arguments = [NORMAL_PRIORITY] elif action_name == "pause_all_high": - action = sabnzbd.nzbqueue.NzbQueue.do.pause_on_prio + action = sabnzbd.NzbQueue.pause_on_prio arguments = [HIGH_PRIORITY] elif action_name == "resume_all_low": - action = sabnzbd.nzbqueue.NzbQueue.do.resume_on_prio + action = sabnzbd.NzbQueue.resume_on_prio arguments = [LOW_PRIORITY] elif action_name == "resume_all_normal": - action = sabnzbd.nzbqueue.NzbQueue.do.resume_on_prio + action = sabnzbd.NzbQueue.resume_on_prio arguments = [NORMAL_PRIORITY] elif action_name == "resume_all_high": - action = sabnzbd.nzbqueue.NzbQueue.do.resume_on_prio + action = sabnzbd.NzbQueue.resume_on_prio arguments = [HIGH_PRIORITY] elif action_name == "pause_cat": - action = sabnzbd.nzbqueue.NzbQueue.do.pause_on_cat + action = sabnzbd.NzbQueue.pause_on_cat arguments = [argument_list] elif action_name == "resume_cat": - action = sabnzbd.nzbqueue.NzbQueue.do.resume_on_cat + action = sabnzbd.NzbQueue.resume_on_cat arguments = [argument_list] else: logging.warning(T("Unknown action: %s"), action_name) @@ -193,7 +192,7 @@ def init(): sabnzbd.misc.check_latest_version, "VerCheck", d, None, (h, m), kronos.method.sequential, [], None ) - action, hour, minute = sabnzbd.bpsmeter.BPSMeter.do.get_quota() + action, hour, minute = sabnzbd.BPSMeter.get_quota() if action: logging.info("Setting schedule for quota check daily at %s:%s", hour, minute) __SCHED.add_daytime_task( @@ -215,7 +214,7 @@ def init(): logging.info("Setting schedule for midnight BPS reset") __SCHED.add_daytime_task( - sabnzbd.bpsmeter.midnight_action, + sabnzbd.BPSMeter.midnight, "midnight_bps", list(range(1, 8)), None, @@ -248,7 +247,7 @@ def restart(force=False): SCHEDULE_GUARD_FLAG = False stop() - analyse(sabnzbd.downloader.Downloader.do.paused) + analyse(sabnzbd.Downloader.paused) init() start() @@ -406,13 +405,13 @@ def analyse(was_paused=False, priority=None): sabnzbd.pause_all() else: sabnzbd.unpause_all() - sabnzbd.downloader.Downloader.do.set_paused_state(paused or paused_all) + sabnzbd.Downloader.set_paused_state(paused or paused_all) - PostProcessor.do.paused = pause_post + sabnzbd.PostProcessor.paused = pause_post if speedlimit is not None: - sabnzbd.downloader.Downloader.do.limit_speed(speedlimit) + sabnzbd.Downloader.limit_speed(speedlimit) - sabnzbd.bpsmeter.BPSMeter.do.set_status(quota, action=False) + sabnzbd.BPSMeter.set_status(quota, action=False) for serv in servers: try: @@ -420,7 +419,7 @@ def analyse(was_paused=False, priority=None): value = servers[serv] if bool(item.enable()) != bool(value): item.enable.set(value) - sabnzbd.downloader.Downloader.do.init_server(serv, serv) + sabnzbd.Downloader.init_server(serv, serv) except: pass config.save_config() @@ -457,7 +456,7 @@ def plan_resume(interval): __PAUSE_END = time.time() + (interval * 60) logging.debug("Schedule resume at %s", __PAUSE_END) __SCHED.add_single_task(__oneshot_resume, "", interval * 60, kronos.method.sequential, [__PAUSE_END], None) - sabnzbd.downloader.Downloader.do.pause() + sabnzbd.Downloader.pause() else: __PAUSE_END = None sabnzbd.unpause_all() diff --git a/sabnzbd/urlgrabber.py b/sabnzbd/urlgrabber.py index dfbcb91..daa40c2 100644 --- a/sabnzbd/urlgrabber.py +++ b/sabnzbd/urlgrabber.py @@ -36,8 +36,6 @@ import sabnzbd from sabnzbd.constants import DEF_TIMEOUT, FUTURE_Q_FOLDER, VALID_NZB_FILES, Status, VALID_ARCHIVES import sabnzbd.misc as misc import sabnzbd.filesystem -from sabnzbd.nzbqueue import NzbQueue -from sabnzbd.postproc import PostProcessor import sabnzbd.cfg as cfg import sabnzbd.emailer as emailer import sabnzbd.notifier as notifier @@ -67,11 +65,10 @@ class URLGrabber(Thread): def __init__(self): Thread.__init__(self) self.queue: queue.Queue[Tuple[str, sabnzbd.nzbstuff.NzbObject]] = queue.Queue() - for tup in NzbQueue.do.get_urls(): + for tup in sabnzbd.NzbQueue.get_urls(): url, nzo = tup self.queue.put((url, nzo)) self.shutdown = False - URLGrabber.do = self def add(self, url, future_nzo, when=None): """ Add an URL to the URLGrabber queue, 'when' is seconds from now """ @@ -330,8 +327,8 @@ class URLGrabber(Thread): nzo.cat, _, nzo.script, _ = misc.cat_to_opts(nzo.cat, script=nzo.script) # Add to history and run script if desired - NzbQueue.do.remove(nzo.nzo_id, add_to_history=False) - PostProcessor.do.process(nzo) + sabnzbd.NzbQueue.remove(nzo.nzo_id, add_to_history=False) + sabnzbd.PostProcessor.process(nzo) def _build_request(url):