Browse Source

Rework the naming of the main SABnzbd threads

tags/3.1.0RC2
Safihre 5 years ago
parent
commit
ab020a0654
  1. 2
      SABnzbd.py
  2. 201
      sabnzbd/__init__.py
  3. 126
      sabnzbd/api.py
  4. 6
      sabnzbd/articlecache.py
  5. 28
      sabnzbd/assembler.py
  6. 23
      sabnzbd/bpsmeter.py
  7. 22
      sabnzbd/decoder.py
  8. 7
      sabnzbd/dirscanner.py
  9. 63
      sabnzbd/downloader.py
  10. 98
      sabnzbd/interface.py
  11. 2
      sabnzbd/newswrapper.py
  12. 10
      sabnzbd/nzbparser.py
  13. 17
      sabnzbd/nzbqueue.py
  14. 24
      sabnzbd/nzbstuff.py
  15. 22
      sabnzbd/osxmenu.py
  16. 24
      sabnzbd/postproc.py
  17. 1
      sabnzbd/rating.py
  18. 1
      sabnzbd/rss.py
  19. 3
      sabnzbd/sabtray.py
  20. 3
      sabnzbd/sabtraylinux.py
  21. 47
      sabnzbd/scheduler.py
  22. 9
      sabnzbd/urlgrabber.py

2
SABnzbd.py

@ -1529,7 +1529,7 @@ def main():
# Shutdown # Shutdown
sabnzbd.shutdown_program() sabnzbd.shutdown_program()
if sabnzbd.downloader.Downloader.do.paused: if sabnzbd.Downloader.paused:
sabnzbd.RESTART_ARGS.append("-p") sabnzbd.RESTART_ARGS.append("-p")
if autorestarted: if autorestarted:
sabnzbd.RESTART_ARGS.append("--autorestarted") sabnzbd.RESTART_ARGS.append("--autorestarted")

201
sabnzbd/__init__.py

@ -75,34 +75,34 @@ elif os.name == "posix":
pass pass
# Now we can import safely # Now we can import safely
from sabnzbd.nzbqueue import NzbQueue
from sabnzbd.postproc import PostProcessor
from sabnzbd.downloader import Downloader
from sabnzbd.decoder import Decoder
from sabnzbd.assembler import Assembler
from sabnzbd.rating import Rating
import sabnzbd.misc as misc import sabnzbd.misc as misc
import sabnzbd.filesystem as filesystem import sabnzbd.filesystem as filesystem
import sabnzbd.powersup as powersup import sabnzbd.powersup as powersup
from sabnzbd.dirscanner import DirScanner
from sabnzbd.urlgrabber import URLGrabber
import sabnzbd.scheduler as scheduler import sabnzbd.scheduler as scheduler
import sabnzbd.rss as rss import sabnzbd.rss as rss
import sabnzbd.emailer as emailer
from sabnzbd.articlecache import ArticleCache
import sabnzbd.newsunpack
import sabnzbd.encoding as encoding import sabnzbd.encoding as encoding
import sabnzbd.config as config import sabnzbd.config as config
from sabnzbd.bpsmeter import BPSMeter
import sabnzbd.cfg as cfg import sabnzbd.cfg as cfg
import sabnzbd.database import sabnzbd.database
import sabnzbd.lang as lang import sabnzbd.lang as lang
import sabnzbd.par2file as par2file
import sabnzbd.nzbparser as nzbparser import sabnzbd.nzbparser as nzbparser
import sabnzbd.nzbstuff
import sabnzbd.emailer
import sabnzbd.getipaddress
import sabnzbd.api import sabnzbd.api
import sabnzbd.interface import sabnzbd.interface
import sabnzbd.nzbstuff as nzbstuff import sabnzbd.zconfig
import sabnzbd.directunpacker as directunpacker import sabnzbd.directunpacker as directunpacker
import sabnzbd.dirscanner
import sabnzbd.urlgrabber
import sabnzbd.nzbqueue
import sabnzbd.postproc
import sabnzbd.downloader
import sabnzbd.decoder
import sabnzbd.assembler
import sabnzbd.rating
import sabnzbd.articlecache
import sabnzbd.bpsmeter
from sabnzbd.decorators import synchronized from sabnzbd.decorators import synchronized
from sabnzbd.constants import ( from sabnzbd.constants import (
DEFAULT_PRIORITY, DEFAULT_PRIORITY,
@ -112,12 +112,21 @@ from sabnzbd.constants import (
QUEUE_VERSION, QUEUE_VERSION,
QUEUE_FILE_TMPL, QUEUE_FILE_TMPL,
) )
import sabnzbd.getipaddress as getipaddress
LINUX_POWER = powersup.HAVE_DBUS
# Storage for the threads, variables are filled during initialization
ArticleCache: sabnzbd.articlecache.ArticleCache
Rating: sabnzbd.rating.Rating
Assembler: sabnzbd.assembler.Assembler
Decoder: sabnzbd.decoder.Decoder
Downloader: sabnzbd.downloader.Downloader
PostProcessor: sabnzbd.postproc.PostProcessor
NzbQueue: sabnzbd.nzbqueue.NzbQueue
URLGrabber: sabnzbd.urlgrabber.URLGrabber
DirScanner: sabnzbd.dirscanner.DirScanner
BPSMeter: sabnzbd.bpsmeter.BPSMeter
# Regular constants
START = datetime.datetime.now() START = datetime.datetime.now()
MY_NAME = None MY_NAME = None
MY_FULLNAME = None MY_FULLNAME = None
RESTART_ARGS = [] RESTART_ARGS = []
@ -135,6 +144,7 @@ QUEUECOMPLETEACTION = None # stores the name of the function to be called
QUEUECOMPLETEARG = None # stores an extra arguments that need to be passed QUEUECOMPLETEARG = None # stores an extra arguments that need to be passed
DAEMON = None DAEMON = None
LINUX_POWER = powersup.HAVE_DBUS
LOGFILE = None LOGFILE = None
WEBLOGFILE = None WEBLOGFILE = None
@ -169,6 +179,7 @@ DOWNLOAD_DIR_SPEED = 0
COMPLETE_DIR_SPEED = 0 COMPLETE_DIR_SPEED = 0
INTERNET_BANDWIDTH = 0 INTERNET_BANDWIDTH = 0
# Rendering of original command line arguments in Config # Rendering of original command line arguments in Config
CMDLINE = " ".join(['"%s"' % arg for arg in sys.argv]) CMDLINE = " ".join(['"%s"' % arg for arg in sys.argv])
@ -272,11 +283,6 @@ def initialize(pause_downloader=False, clean_up=False, evalSched=False, repair=0
cfg.enable_https_verification.callback(guard_https_ver) cfg.enable_https_verification.callback(guard_https_ver)
guard_https_ver() guard_https_ver()
# Set cache limit
if not cfg.cache_limit() or (cfg.cache_limit() in ("200M", "450M") and (sabnzbd.WIN32 or sabnzbd.DARWIN)):
cfg.cache_limit.set(misc.get_cache_limit())
ArticleCache.do.new_limit(cfg.cache_limit.get_int())
check_incomplete_vs_complete() check_incomplete_vs_complete()
# Set language files # Set language files
@ -322,31 +328,27 @@ def initialize(pause_downloader=False, clean_up=False, evalSched=False, repair=0
# Initialize threads # Initialize threads
rss.init() rss.init()
paused = BPSMeter.do.read() sabnzbd.ArticleCache = sabnzbd.articlecache.ArticleCache()
sabnzbd.BPSMeter = sabnzbd.bpsmeter.BPSMeter()
NzbQueue() sabnzbd.NzbQueue = sabnzbd.nzbqueue.NzbQueue()
sabnzbd.Downloader = sabnzbd.downloader.Downloader(pause_downloader or sabnzbd.BPSMeter.read())
Downloader(pause_downloader or paused) sabnzbd.Decoder = sabnzbd.decoder.Decoder()
sabnzbd.Assembler = sabnzbd.assembler.Assembler()
Decoder() sabnzbd.PostProcessor = sabnzbd.postproc.PostProcessor()
sabnzbd.DirScanner = sabnzbd.dirscanner.DirScanner()
Assembler() sabnzbd.Rating = sabnzbd.rating.Rating()
sabnzbd.URLGrabber = sabnzbd.urlgrabber.URLGrabber()
PostProcessor() sabnzbd.NzbQueue.read_queue(repair)
NzbQueue.do.read_queue(repair)
DirScanner()
Rating()
URLGrabber()
scheduler.init() scheduler.init()
if evalSched: if evalSched:
scheduler.analyse(pause_downloader) scheduler.analyse(pause_downloader)
# Set cache limit
if not cfg.cache_limit() or (cfg.cache_limit() in ("200M", "450M") and (sabnzbd.WIN32 or sabnzbd.DARWIN)):
cfg.cache_limit.set(misc.get_cache_limit())
sabnzbd.ArticleCache.new_limit(cfg.cache_limit.get_int())
logging.info("All processes started") logging.info("All processes started")
RESTART_REQ = False RESTART_REQ = False
__INITIALIZED__ = True __INITIALIZED__ = True
@ -359,26 +361,26 @@ def start():
if __INITIALIZED__: if __INITIALIZED__:
logging.debug("Starting postprocessor") logging.debug("Starting postprocessor")
PostProcessor.do.start() sabnzbd.PostProcessor.start()
logging.debug("Starting assembler") logging.debug("Starting assembler")
Assembler.do.start() sabnzbd.Assembler.start()
logging.debug("Starting downloader") logging.debug("Starting downloader")
Downloader.do.start() sabnzbd.Downloader.start()
logging.debug("Starting decoders") logging.debug("Starting decoders")
Decoder.do.start() sabnzbd.Decoder.start()
scheduler.start() scheduler.start()
logging.debug("Starting dirscanner") logging.debug("Starting dirscanner")
DirScanner.do.start() sabnzbd.DirScanner.start()
Rating.do.start() sabnzbd.Rating.start()
logging.debug("Starting urlgrabber") logging.debug("Starting urlgrabber")
URLGrabber.do.start() sabnzbd.URLGrabber.start()
@synchronized(INIT_LOCK) @synchronized(INIT_LOCK)
@ -400,23 +402,23 @@ def halt():
rss.stop() rss.stop()
logging.debug("Stopping URLGrabber") logging.debug("Stopping URLGrabber")
URLGrabber.do.stop() sabnzbd.URLGrabber.stop()
try: try:
URLGrabber.do.join() sabnzbd.URLGrabber.join()
except: except:
pass pass
logging.debug("Stopping rating") logging.debug("Stopping rating")
Rating.do.stop() sabnzbd.Rating.stop()
try: try:
Rating.do.join() sabnzbd.Rating.join()
except: except:
pass pass
logging.debug("Stopping dirscanner") logging.debug("Stopping dirscanner")
DirScanner.do.stop() sabnzbd.DirScanner.stop()
try: try:
DirScanner.do.join() sabnzbd.DirScanner.join()
except: except:
pass pass
@ -426,20 +428,20 @@ def halt():
# Decoder handles join gracefully # Decoder handles join gracefully
logging.debug("Stopping decoders") logging.debug("Stopping decoders")
Decoder.do.stop() sabnzbd.Decoder.stop()
Decoder.do.join() sabnzbd.Decoder.join()
logging.debug("Stopping assembler") logging.debug("Stopping assembler")
Assembler.do.stop() sabnzbd.Assembler.stop()
try: try:
Assembler.do.join() sabnzbd.Assembler.join()
except: except:
pass pass
logging.debug("Stopping postprocessor") logging.debug("Stopping postprocessor")
PostProcessor.do.stop() sabnzbd.PostProcessor.stop()
try: try:
PostProcessor.do.join() sabnzbd.PostProcessor.join()
except: except:
pass pass
@ -467,7 +469,7 @@ def trigger_restart(timeout=None):
time.sleep(timeout) time.sleep(timeout)
# Add extra arguments # Add extra arguments
if sabnzbd.downloader.Downloader.do.paused: if sabnzbd.Downloader.paused:
sabnzbd.RESTART_ARGS.append("-p") sabnzbd.RESTART_ARGS.append("-p")
sys.argv = sabnzbd.RESTART_ARGS sys.argv = sabnzbd.RESTART_ARGS
@ -492,7 +494,7 @@ def trigger_restart(timeout=None):
############################################################################## ##############################################################################
def new_limit(): def new_limit():
""" Callback for article cache changes """ """ Callback for article cache changes """
ArticleCache.do.new_limit(cfg.cache_limit.get_int()) sabnzbd.ArticleCache.new_limit(cfg.cache_limit.get_int())
def guard_restart(): def guard_restart():
@ -503,7 +505,7 @@ def guard_restart():
def guard_top_only(): def guard_top_only():
""" Callback for change of top_only option """ """ Callback for change of top_only option """
NzbQueue.do.set_top_only(cfg.top_only()) sabnzbd.NzbQueue.set_top_only(cfg.top_only())
def guard_pause_on_pp(): def guard_pause_on_pp():
@ -512,12 +514,12 @@ def guard_pause_on_pp():
pass # Not safe to idle downloader, because we don't know pass # Not safe to idle downloader, because we don't know
# if post-processing is active now # if post-processing is active now
else: else:
Downloader.do.resume_from_postproc() sabnzbd.Downloader.resume_from_postproc()
def guard_quota_size(): def guard_quota_size():
""" Callback for change of quota_size """ """ Callback for change of quota_size """
BPSMeter.do.change_quota() sabnzbd.BPSMeter.change_quota()
def guard_quota_dp(): def guard_quota_dp():
@ -566,33 +568,33 @@ def add_url(url, pp=None, script=None, cat=None, priority=None, nzbname=None, pa
msg = "%s - %s" % (nzbname, msg) msg = "%s - %s" % (nzbname, msg)
# Generate the placeholder # Generate the placeholder
future_nzo = NzbQueue.do.generate_future(msg, pp, script, cat, url=url, priority=priority, nzbname=nzbname) future_nzo = sabnzbd.NzbQueue.generate_future(msg, pp, script, cat, url=url, priority=priority, nzbname=nzbname)
# Set password # Set password
if not future_nzo.password: if not future_nzo.password:
future_nzo.password = password future_nzo.password = password
# Get it! # Get it!
URLGrabber.do.add(url, future_nzo) sabnzbd.URLGrabber.add(url, future_nzo)
return future_nzo.nzo_id return future_nzo.nzo_id
def save_state(): def save_state():
""" Save all internal bookkeeping to disk """ """ Save all internal bookkeeping to disk """
ArticleCache.do.flush_articles() sabnzbd.ArticleCache.flush_articles()
NzbQueue.do.save() sabnzbd.NzbQueue.save()
BPSMeter.do.save() sabnzbd.BPSMeter.save()
rss.save() rss.save()
Rating.do.save() sabnzbd.Rating.save()
DirScanner.do.save() sabnzbd.DirScanner.save()
PostProcessor.do.save() sabnzbd.PostProcessor.save()
def pause_all(): def pause_all():
""" Pause all activities than cause disk access """ """ Pause all activities than cause disk access """
global PAUSED_ALL global PAUSED_ALL
PAUSED_ALL = True PAUSED_ALL = True
Downloader.do.pause() sabnzbd.Downloader.pause()
logging.debug("PAUSED_ALL active") logging.debug("PAUSED_ALL active")
@ -600,7 +602,7 @@ def unpause_all():
""" Resume all activities """ """ Resume all activities """
global PAUSED_ALL global PAUSED_ALL
PAUSED_ALL = False PAUSED_ALL = False
Downloader.do.resume() sabnzbd.Downloader.resume()
logging.debug("PAUSED_ALL inactive") logging.debug("PAUSED_ALL inactive")
@ -746,7 +748,7 @@ def enable_server(server):
logging.warning(T("Trying to set status of non-existing server %s"), server) logging.warning(T("Trying to set status of non-existing server %s"), server)
return return
config.save_config() config.save_config()
Downloader.do.update_server(server, server) sabnzbd.Downloader.update_server(server, server)
def disable_server(server): def disable_server(server):
@ -757,7 +759,7 @@ def disable_server(server):
logging.warning(T("Trying to set status of non-existing server %s"), server) logging.warning(T("Trying to set status of non-existing server %s"), server)
return return
config.save_config() config.save_config()
Downloader.do.update_server(server, server) sabnzbd.Downloader.update_server(server, server)
def system_shutdown(): def system_shutdown():
@ -866,7 +868,7 @@ def run_script(script):
def empty_queues(): def empty_queues():
""" Return True if queues empty or non-existent """ """ Return True if queues empty or non-existent """
global __INITIALIZED__ global __INITIALIZED__
return (not __INITIALIZED__) or (PostProcessor.do.empty() and NzbQueue.do.is_empty()) return (not __INITIALIZED__) or (sabnzbd.PostProcessor.empty() and sabnzbd.NzbQueue.is_empty())
def keep_awake(): def keep_awake():
@ -875,8 +877,8 @@ def keep_awake():
if sabnzbd.cfg.keep_awake(): if sabnzbd.cfg.keep_awake():
ES_CONTINUOUS = 0x80000000 ES_CONTINUOUS = 0x80000000
ES_SYSTEM_REQUIRED = 0x00000001 ES_SYSTEM_REQUIRED = 0x00000001
if (not Downloader.do.is_paused() and not NzbQueue.do.is_empty()) or ( if (not sabnzbd.Downloader.is_paused() and not sabnzbd.NzbQueue.is_empty()) or (
not PostProcessor.do.paused and not PostProcessor.do.empty() not sabnzbd.PostProcessor.paused and not sabnzbd.PostProcessor.empty()
): ):
if KERNEL32: if KERNEL32:
# Set ES_SYSTEM_REQUIRED until the next call # Set ES_SYSTEM_REQUIRED until the next call
@ -1028,45 +1030,45 @@ def check_all_tasks():
return True return True
# Non-restartable threads, require program restart # Non-restartable threads, require program restart
if not sabnzbd.PostProcessor.do.is_alive(): if not sabnzbd.PostProcessor.is_alive():
logging.info("Restarting because of crashed postprocessor") logging.info("Restarting because of crashed postprocessor")
return False return False
if not Downloader.do.is_alive(): if not sabnzbd.Downloader.is_alive():
logging.info("Restarting because of crashed downloader") logging.info("Restarting because of crashed downloader")
return False return False
if not Decoder.do.is_alive(): if not sabnzbd.Decoder.is_alive():
logging.info("Restarting because of crashed decoder") logging.info("Restarting because of crashed decoder")
return False return False
if not Assembler.do.is_alive(): if not sabnzbd.Assembler.is_alive():
logging.info("Restarting because of crashed assembler") logging.info("Restarting because of crashed assembler")
return False return False
# Kick the downloader, in case it missed the semaphore # Kick the downloader, in case it missed the semaphore
Downloader.do.wakeup() sabnzbd.Downloader.wakeup()
# Make sure the right servers are active # Make sure the right servers are active
Downloader.do.check_timers() sabnzbd.Downloader.check_timers()
# Restartable threads # Restartable threads
if not DirScanner.do.is_alive(): if not sabnzbd.DirScanner.is_alive():
logging.info("Restarting crashed dirscanner") logging.info("Restarting crashed dirscanner")
DirScanner.do.__init__() sabnzbd.DirScanner.__init__()
if not URLGrabber.do.is_alive(): if not sabnzbd.URLGrabber.is_alive():
logging.info("Restarting crashed urlgrabber") logging.info("Restarting crashed urlgrabber")
URLGrabber.do.__init__() sabnzbd.URLGrabber.__init__()
if not Rating.do.is_alive(): if not sabnzbd.Rating.is_alive():
logging.info("Restarting crashed rating") logging.info("Restarting crashed rating")
Rating.do.__init__() sabnzbd.Rating.__init__()
if not sabnzbd.scheduler.sched_check(): if not sabnzbd.scheduler.sched_check():
logging.info("Restarting crashed scheduler") logging.info("Restarting crashed scheduler")
sabnzbd.scheduler.init() sabnzbd.scheduler.init()
sabnzbd.downloader.Downloader.do.unblock_all() sabnzbd.Downloader.unblock_all()
# Check one-shot pause # Check one-shot pause
sabnzbd.scheduler.pause_check() sabnzbd.scheduler.pause_check()
# Check (and terminate) idle jobs # Check (and terminate) idle jobs
sabnzbd.nzbqueue.NzbQueue.do.stop_idle_jobs() sabnzbd.NzbQueue.stop_idle_jobs()
return True return True
@ -1112,18 +1114,13 @@ def wait_for_download_folder():
time.sleep(2.0) time.sleep(2.0)
# Required wrapper because nzbstuff.py cannot import downloader.py
def highest_server(me):
return sabnzbd.downloader.Downloader.do.highest_server(me)
def test_ipv6(): def test_ipv6():
""" Check if external IPv6 addresses are reachable """ """ Check if external IPv6 addresses are reachable """
if not cfg.selftest_host(): if not cfg.selftest_host():
# User disabled the test, assume active IPv6 # User disabled the test, assume active IPv6
return True return True
try: try:
info = getipaddress.addresslookup6(cfg.selftest_host()) info = sabnzbd.getipaddress.addresslookup6(cfg.selftest_host())
except: except:
logging.debug( logging.debug(
"Test IPv6: Disabling IPv6, because it looks like it's not available. Reason: %s", sys.exc_info()[0] "Test IPv6: Disabling IPv6, because it looks like it's not available. Reason: %s", sys.exc_info()[0]

126
sabnzbd/api.py

@ -50,8 +50,6 @@ from sabnzbd.constants import (
) )
import sabnzbd.config as config import sabnzbd.config as config
import sabnzbd.cfg as cfg import sabnzbd.cfg as cfg
from sabnzbd.downloader import Downloader
from sabnzbd.nzbqueue import NzbQueue
import sabnzbd.scheduler as scheduler import sabnzbd.scheduler as scheduler
from sabnzbd.skintext import SKIN_TEXT from sabnzbd.skintext import SKIN_TEXT
from sabnzbd.utils.pathbrowser import folders_at_path from sabnzbd.utils.pathbrowser import folders_at_path
@ -68,11 +66,7 @@ from sabnzbd.misc import (
) )
from sabnzbd.filesystem import diskspace, get_ext, globber_full, clip_path, remove_all, userxbit from sabnzbd.filesystem import diskspace, get_ext, globber_full, clip_path, remove_all, userxbit
from sabnzbd.encoding import xml_name from sabnzbd.encoding import xml_name
from sabnzbd.postproc import PostProcessor
from sabnzbd.articlecache import ArticleCache
from sabnzbd.utils.servertests import test_nntp_server_dict from sabnzbd.utils.servertests import test_nntp_server_dict
from sabnzbd.bpsmeter import BPSMeter
from sabnzbd.rating import Rating
from sabnzbd.getipaddress import localipv4, publicipv4, ipv6, addresslookup from sabnzbd.getipaddress import localipv4, publicipv4, ipv6, addresslookup
from sabnzbd.database import build_history_info, unpack_history_info, HistoryDB from sabnzbd.database import build_history_info, unpack_history_info, HistoryDB
import sabnzbd.notifier import sabnzbd.notifier
@ -199,12 +193,12 @@ def _api_queue(name, output, kwargs):
def _api_queue_delete(output, value, kwargs): def _api_queue_delete(output, value, kwargs):
""" API: accepts output, value """ """ API: accepts output, value """
if value.lower() == "all": if value.lower() == "all":
removed = NzbQueue.do.remove_all(kwargs.get("search")) removed = sabnzbd.NzbQueue.remove_all(kwargs.get("search"))
return report(output, keyword="", data={"status": bool(removed), "nzo_ids": removed}) return report(output, keyword="", data={"status": bool(removed), "nzo_ids": removed})
elif value: elif value:
items = value.split(",") items = value.split(",")
delete_all_data = int_conv(kwargs.get("del_files")) delete_all_data = int_conv(kwargs.get("del_files"))
removed = NzbQueue.do.remove_multiple(items, delete_all_data=delete_all_data) removed = sabnzbd.NzbQueue.remove_multiple(items, delete_all_data=delete_all_data)
return report(output, keyword="", data={"status": bool(removed), "nzo_ids": removed}) return report(output, keyword="", data={"status": bool(removed), "nzo_ids": removed})
else: else:
return report(output, _MSG_NO_VALUE) return report(output, _MSG_NO_VALUE)
@ -214,7 +208,7 @@ def _api_queue_delete_nzf(output, value, kwargs):
""" API: accepts value(=nzo_id), value2(=nzf_id) """ """ API: accepts value(=nzo_id), value2(=nzf_id) """
value2 = kwargs.get("value2") value2 = kwargs.get("value2")
if value and value2: if value and value2:
removed = NzbQueue.do.remove_nzf(value, value2, force_delete=True) removed = sabnzbd.NzbQueue.remove_nzf(value, value2, force_delete=True)
return report(output, keyword="", data={"status": bool(removed), "nzf_ids": removed}) return report(output, keyword="", data={"status": bool(removed), "nzf_ids": removed})
else: else:
return report(output, _MSG_NO_VALUE2) return report(output, _MSG_NO_VALUE2)
@ -225,7 +219,7 @@ def _api_queue_rename(output, value, kwargs):
value2 = kwargs.get("value2") value2 = kwargs.get("value2")
value3 = kwargs.get("value3") value3 = kwargs.get("value3")
if value and value2: if value and value2:
ret = NzbQueue.do.change_name(value, value2, value3) ret = sabnzbd.NzbQueue.change_name(value, value2, value3)
return report(output, keyword="", data={"status": ret}) return report(output, keyword="", data={"status": ret})
else: else:
return report(output, _MSG_NO_VALUE2) return report(output, _MSG_NO_VALUE2)
@ -239,7 +233,7 @@ def _api_queue_change_complete_action(output, value, kwargs):
def _api_queue_purge(output, value, kwargs): def _api_queue_purge(output, value, kwargs):
""" API: accepts output """ """ API: accepts output """
removed = NzbQueue.do.remove_all(kwargs.get("search")) removed = sabnzbd.NzbQueue.remove_all(kwargs.get("search"))
return report(output, keyword="", data={"status": bool(removed), "nzo_ids": removed}) return report(output, keyword="", data={"status": bool(removed), "nzo_ids": removed})
@ -247,7 +241,7 @@ def _api_queue_pause(output, value, kwargs):
""" API: accepts output, value(=list of nzo_id) """ """ API: accepts output, value(=list of nzo_id) """
if value: if value:
items = value.split(",") items = value.split(",")
handled = NzbQueue.do.pause_multiple_nzo(items) handled = sabnzbd.NzbQueue.pause_multiple_nzo(items)
else: else:
handled = False handled = False
return report(output, keyword="", data={"status": bool(handled), "nzo_ids": handled}) return report(output, keyword="", data={"status": bool(handled), "nzo_ids": handled})
@ -257,7 +251,7 @@ def _api_queue_resume(output, value, kwargs):
""" API: accepts output, value(=list of nzo_id) """ """ API: accepts output, value(=list of nzo_id) """
if value: if value:
items = value.split(",") items = value.split(",")
handled = NzbQueue.do.resume_multiple_nzo(items) handled = sabnzbd.NzbQueue.resume_multiple_nzo(items)
else: else:
handled = False handled = False
return report(output, keyword="", data={"status": bool(handled), "nzo_ids": handled}) return report(output, keyword="", data={"status": bool(handled), "nzo_ids": handled})
@ -272,7 +266,7 @@ def _api_queue_priority(output, value, kwargs):
priority = int(value2) priority = int(value2)
except: except:
return report(output, _MSG_INT_VALUE) return report(output, _MSG_INT_VALUE)
pos = NzbQueue.do.set_priority(value, priority) pos = sabnzbd.NzbQueue.set_priority(value, priority)
# Returns the position in the queue, -1 is incorrect job-id # Returns the position in the queue, -1 is incorrect job-id
return report(output, keyword="position", data=pos) return report(output, keyword="position", data=pos)
except: except:
@ -286,7 +280,7 @@ def _api_queue_sort(output, value, kwargs):
sort = kwargs.get("sort") sort = kwargs.get("sort")
direction = kwargs.get("dir", "") direction = kwargs.get("dir", "")
if sort: if sort:
NzbQueue.do.sort_queue(sort, direction) sabnzbd.NzbQueue.sort_queue(sort, direction)
return report(output) return report(output)
else: else:
return report(output, _MSG_NO_VALUE2) return report(output, _MSG_NO_VALUE2)
@ -304,13 +298,13 @@ def _api_queue_default(output, value, kwargs):
def _api_queue_rating(output, value, kwargs): def _api_queue_rating(output, value, kwargs):
""" API: accepts output, value(=nzo_id), type, setting, detail """ """ API: accepts output, value(=nzo_id), type, setting, detail """
vote_map = {"up": Rating.VOTE_UP, "down": Rating.VOTE_DOWN} vote_map = {"up": sabnzbd.Rating.VOTE_UP, "down": sabnzbd.Rating.VOTE_DOWN}
flag_map = { flag_map = {
"spam": Rating.FLAG_SPAM, "spam": sabnzbd.Rating.FLAG_SPAM,
"encrypted": Rating.FLAG_ENCRYPTED, "encrypted": sabnzbd.Rating.FLAG_ENCRYPTED,
"expired": Rating.FLAG_EXPIRED, "expired": sabnzbd.Rating.FLAG_EXPIRED,
"other": Rating.FLAG_OTHER, "other": sabnzbd.Rating.FLAG_OTHER,
"comment": Rating.FLAG_COMMENT, "comment": sabnzbd.Rating.FLAG_COMMENT,
} }
content_type = kwargs.get("type") content_type = kwargs.get("type")
setting = kwargs.get("setting") setting = kwargs.get("setting")
@ -326,7 +320,7 @@ def _api_queue_rating(output, value, kwargs):
if content_type == "flag": if content_type == "flag":
flag = flag_map[setting] flag = flag_map[setting]
if cfg.rating_enable(): if cfg.rating_enable():
Rating.do.update_user_rating(value, video, audio, vote, flag, kwargs.get("detail")) sabnzbd.Rating.update_user_rating(value, video, audio, vote, flag, kwargs.get("detail"))
return report(output) return report(output)
except: except:
return report(output, _MSG_BAD_SERVER_PARMS) return report(output, _MSG_BAD_SERVER_PARMS)
@ -389,7 +383,7 @@ def _api_retry(name, output, kwargs):
def _api_cancel_pp(name, output, kwargs): def _api_cancel_pp(name, output, kwargs):
""" API: accepts name, output, value(=nzo_id) """ """ API: accepts name, output, value(=nzo_id) """
nzo_id = kwargs.get("value") nzo_id = kwargs.get("value")
if PostProcessor.do.cancel_pp(nzo_id): if sabnzbd.PostProcessor.cancel_pp(nzo_id):
return report(output, keyword="", data={"status": True, "nzo_id": nzo_id}) return report(output, keyword="", data={"status": True, "nzo_id": nzo_id})
else: else:
return report(output, _MSG_NO_ITEM) return report(output, _MSG_NO_ITEM)
@ -438,7 +432,7 @@ def _api_switch(name, output, kwargs):
value = kwargs.get("value") value = kwargs.get("value")
value2 = kwargs.get("value2") value2 = kwargs.get("value2")
if value and value2: if value and value2:
pos, prio = NzbQueue.do.switch(value, value2) pos, prio = sabnzbd.NzbQueue.switch(value, value2)
# Returns the new position and new priority (if different) # Returns the new position and new priority (if different)
return report(output, keyword="result", data={"position": pos, "priority": prio}) return report(output, keyword="result", data={"position": pos, "priority": prio})
else: else:
@ -454,7 +448,7 @@ def _api_change_cat(name, output, kwargs):
cat = value2 cat = value2
if cat == "None": if cat == "None":
cat = None cat = None
result = NzbQueue.do.change_cat(nzo_id, cat) result = sabnzbd.NzbQueue.change_cat(nzo_id, cat)
return report(output, keyword="status", data=bool(result > 0)) return report(output, keyword="status", data=bool(result > 0))
else: else:
return report(output, _MSG_NO_VALUE) return report(output, _MSG_NO_VALUE)
@ -469,7 +463,7 @@ def _api_change_script(name, output, kwargs):
script = value2 script = value2
if script.lower() == "none": if script.lower() == "none":
script = None script = None
result = NzbQueue.do.change_script(nzo_id, script) result = sabnzbd.NzbQueue.change_script(nzo_id, script)
return report(output, keyword="status", data=bool(result > 0)) return report(output, keyword="status", data=bool(result > 0))
else: else:
return report(output, _MSG_NO_VALUE) return report(output, _MSG_NO_VALUE)
@ -481,7 +475,7 @@ def _api_change_opts(name, output, kwargs):
value2 = kwargs.get("value2") value2 = kwargs.get("value2")
result = 0 result = 0
if value and value2 and value2.isdigit(): if value and value2 and value2.isdigit():
result = NzbQueue.do.change_opts(value, int(value2)) result = sabnzbd.NzbQueue.change_opts(value, int(value2))
return report(output, keyword="status", data=bool(result > 0)) return report(output, keyword="status", data=bool(result > 0))
@ -534,7 +528,7 @@ def _api_history(name, output, kwargs):
return report(output, _MSG_NO_VALUE) return report(output, _MSG_NO_VALUE)
elif not name: elif not name:
history = {} history = {}
grand, month, week, day = BPSMeter.do.get_sums() grand, month, week, day = sabnzbd.BPSMeter.get_sums()
history["total_size"], history["month_size"], history["week_size"], history["day_size"] = ( history["total_size"], history["month_size"], history["week_size"], history["day_size"] = (
to_units(grand), to_units(grand),
to_units(month), to_units(month),
@ -581,7 +575,7 @@ def _api_addurl(name, output, kwargs):
def _api_pause(name, output, kwargs): def _api_pause(name, output, kwargs):
""" API: accepts output """ """ API: accepts output """
scheduler.plan_resume(0) scheduler.plan_resume(0)
Downloader.do.pause() sabnzbd.Downloader.pause()
return report(output) return report(output)
@ -660,7 +654,7 @@ def _api_restart_repair(name, output, kwargs):
def _api_disconnect(name, output, kwargs): def _api_disconnect(name, output, kwargs):
""" API: accepts output """ """ API: accepts output """
Downloader.do.disconnect() sabnzbd.Downloader.disconnect()
return report(output) return report(output)
@ -673,7 +667,7 @@ def _api_osx_icon(name, output, kwargs):
def _api_rescan(name, output, kwargs): def _api_rescan(name, output, kwargs):
""" API: accepts output """ """ API: accepts output """
NzbQueue.do.scan_jobs(all_jobs=False, action=True) sabnzbd.NzbQueue.scan_jobs(all_jobs=False, action=True)
return report(output) return report(output)
@ -692,19 +686,19 @@ def _api_eval_sort(name, output, kwargs):
def _api_watched_now(name, output, kwargs): def _api_watched_now(name, output, kwargs):
""" API: accepts output """ """ API: accepts output """
sabnzbd.dirscanner.dirscan() sabnzbd.DirScanner.scan()
return report(output) return report(output)
def _api_resume_pp(name, output, kwargs): def _api_resume_pp(name, output, kwargs):
""" API: accepts output """ """ API: accepts output """
PostProcessor.do.paused = False sabnzbd.PostProcessor.paused = False
return report(output) return report(output)
def _api_pause_pp(name, output, kwargs): def _api_pause_pp(name, output, kwargs):
""" API: accepts output """ """ API: accepts output """
PostProcessor.do.paused = True sabnzbd.PostProcessor.paused = True
return report(output) return report(output)
@ -722,7 +716,7 @@ def _api_retry_all(name, output, kwargs):
def _api_reset_quota(name, output, kwargs): def _api_reset_quota(name, output, kwargs):
""" Reset quota left """ """ Reset quota left """
BPSMeter.do.reset_quota(force=True) sabnzbd.BPSMeter.reset_quota(force=True)
def _api_test_email(name, output, kwargs): def _api_test_email(name, output, kwargs):
@ -827,13 +821,13 @@ def _api_config_speedlimit(output, kwargs):
value = kwargs.get("value") value = kwargs.get("value")
if not value: if not value:
value = "0" value = "0"
Downloader.do.limit_speed(value) sabnzbd.Downloader.limit_speed(value)
return report(output) return report(output)
def _api_config_get_speedlimit(output, kwargs): def _api_config_get_speedlimit(output, kwargs):
""" API: accepts output """ """ API: accepts output """
return report(output, keyword="speedlimit", data=Downloader.do.get_limit()) return report(output, keyword="speedlimit", data=sabnzbd.Downloader.get_limit())
def _api_config_set_colorscheme(output, kwargs): def _api_config_set_colorscheme(output, kwargs):
@ -898,11 +892,11 @@ def _api_config_undefined(output, kwargs):
def _api_server_stats(name, output, kwargs): def _api_server_stats(name, output, kwargs):
""" API: accepts output """ """ API: accepts output """
sum_t, sum_m, sum_w, sum_d = BPSMeter.do.get_sums() sum_t, sum_m, sum_w, sum_d = sabnzbd.BPSMeter.get_sums()
stats = {"total": sum_t, "month": sum_m, "week": sum_w, "day": sum_d, "servers": {}} stats = {"total": sum_t, "month": sum_m, "week": sum_w, "day": sum_d, "servers": {}}
for svr in config.get_servers(): for svr in config.get_servers():
t, m, w, d, daily = BPSMeter.do.amounts(svr) t, m, w, d, daily = sabnzbd.BPSMeter.amounts(svr)
stats["servers"][svr] = {"total": t or 0, "month": m or 0, "week": w or 0, "day": d or 0, "daily": daily or {}} stats["servers"][svr] = {"total": t or 0, "month": m or 0, "week": w or 0, "day": d or 0, "daily": daily or {}}
return report(output, keyword="", data=stats) return report(output, keyword="", data=stats)
@ -1119,7 +1113,7 @@ def handle_server_api(output, kwargs):
else: else:
config.ConfigServer(name, kwargs) config.ConfigServer(name, kwargs)
old_name = None old_name = None
Downloader.do.update_server(old_name, name) sabnzbd.Downloader.update_server(old_name, name)
return name return name
@ -1180,7 +1174,7 @@ def build_status(skip_dashboard=False, output=None):
info["logfile"] = sabnzbd.LOGFILE info["logfile"] = sabnzbd.LOGFILE
info["weblogfile"] = sabnzbd.WEBLOGFILE info["weblogfile"] = sabnzbd.WEBLOGFILE
info["loglevel"] = str(cfg.log_level()) info["loglevel"] = str(cfg.log_level())
info["folders"] = NzbQueue.do.scan_jobs(all_jobs=False, action=False) info["folders"] = sabnzbd.NzbQueue.scan_jobs(all_jobs=False, action=False)
info["configfn"] = config.get_filename() info["configfn"] = config.get_filename()
# Dashboard: Speed of System # Dashboard: Speed of System
@ -1211,7 +1205,7 @@ def build_status(skip_dashboard=False, output=None):
info["dnslookup"] = None info["dnslookup"] = None
info["servers"] = [] info["servers"] = []
servers = sorted(Downloader.do.servers[:], key=lambda svr: "%02d%s" % (svr.priority, svr.displayname.lower())) servers = sorted(sabnzbd.Downloader.servers[:], key=lambda svr: "%02d%s" % (svr.priority, svr.displayname.lower()))
for server in servers: for server in servers:
serverconnections = [] serverconnections = []
connected = 0 connected = 0
@ -1343,7 +1337,7 @@ def build_queue(start=0, limit=0, trans=False, output=None, search=None):
slot["mb_fmt"] = locale.format_string("%d", int(mb), True) slot["mb_fmt"] = locale.format_string("%d", int(mb), True)
slot["mbdone_fmt"] = locale.format_string("%d", int(mb - mbleft), True) slot["mbdone_fmt"] = locale.format_string("%d", int(mb - mbleft), True)
if not Downloader.do.paused and status not in (Status.PAUSED, Status.FETCHING, Status.GRABBING): if not sabnzbd.Downloader.paused and status not in (Status.PAUSED, Status.FETCHING, Status.GRABBING):
if is_propagating: if is_propagating:
slot["status"] = Status.PROP slot["status"] = Status.PROP
elif status == Status.CHECKING: elif status == Status.CHECKING:
@ -1357,8 +1351,8 @@ def build_queue(start=0, limit=0, trans=False, output=None, search=None):
slot["status"] = "%s" % status slot["status"] = "%s" % status
if ( if (
Downloader.do.paused sabnzbd.Downloader.paused
or Downloader.do.postproc or sabnzbd.Downloader.postproc
or is_propagating or is_propagating
or status not in (Status.DOWNLOADING, Status.FETCHING, Status.QUEUED) or status not in (Status.DOWNLOADING, Status.FETCHING, Status.QUEUED)
) and priority != FORCE_PRIORITY: ) and priority != FORCE_PRIORITY:
@ -1381,7 +1375,7 @@ def build_queue(start=0, limit=0, trans=False, output=None, search=None):
else: else:
slot["avg_age"] = calc_age(average_date, bool(trans)) slot["avg_age"] = calc_age(average_date, bool(trans))
rating = Rating.do.get_rating_by_nzo(nzo_id) rating = sabnzbd.Rating.get_rating_by_nzo(nzo_id)
slot["has_rating"] = rating is not None slot["has_rating"] = rating is not None
if rating: if rating:
slot["rating_avg_video"] = rating.avg_video slot["rating_avg_video"] = rating.avg_video
@ -1400,9 +1394,9 @@ def build_queue(start=0, limit=0, trans=False, output=None, search=None):
def fast_queue(): def fast_queue():
""" Return paused, bytes_left, bpsnow, time_left """ """ Return paused, bytes_left, bpsnow, time_left """
bytes_left = NzbQueue.do.remaining() bytes_left = sabnzbd.sabnzbd.NzbQueue.remaining()
paused = Downloader.do.paused paused = sabnzbd.Downloader.paused
bpsnow = BPSMeter.do.bps bpsnow = sabnzbd.BPSMeter.bps
time_left = calc_timeleft(bytes_left, bpsnow) time_left = calc_timeleft(bytes_left, bpsnow)
return paused, bytes_left, bpsnow, time_left return paused, bytes_left, bpsnow, time_left
@ -1410,7 +1404,7 @@ def fast_queue():
def build_file_list(nzo_id: str): def build_file_list(nzo_id: str):
"""Build file lists for specified job""" """Build file lists for specified job"""
jobs = [] jobs = []
nzo = NzbQueue.do.get_nzo(nzo_id) nzo = sabnzbd.sabnzbd.NzbQueue.get_nzo(nzo_id)
if nzo: if nzo:
pnfo = nzo.gather_info(full=True) pnfo = nzo.gather_info(full=True)
@ -1487,7 +1481,7 @@ def retry_job(job, new_nzb=None, password=None):
nzo_id = sabnzbd.add_url(url, pp, script, cat) nzo_id = sabnzbd.add_url(url, pp, script, cat)
else: else:
path = history_db.get_path(job) path = history_db.get_path(job)
nzo_id = NzbQueue.do.repair_job(path, new_nzb, password) nzo_id = sabnzbd.NzbQueue.repair_job(path, new_nzb, password)
if nzo_id: if nzo_id:
# Only remove from history if we repaired something # Only remove from history if we repaired something
history_db.remove_history(job) history_db.remove_history(job)
@ -1516,9 +1510,9 @@ def del_job_files(job_paths):
def del_hist_job(job, del_files): def del_hist_job(job, del_files):
""" Remove history element """ """ Remove history element """
if job: if job:
path = PostProcessor.do.get_path(job) path = sabnzbd.PostProcessor.get_path(job)
if path: if path:
PostProcessor.do.delete(job, del_files=del_files) sabnzbd.PostProcessor.delete(job, del_files=del_files)
else: else:
history_db = sabnzbd.get_db_connection() history_db = sabnzbd.get_db_connection()
remove_all(history_db.get_path(job), recursive=True) remove_all(history_db.get_path(job), recursive=True)
@ -1568,10 +1562,10 @@ def build_header(webdir="", output=None, trans_functions=True):
except: except:
uptime = "-" uptime = "-"
speed_limit = Downloader.do.get_limit() speed_limit = sabnzbd.Downloader.get_limit()
if speed_limit <= 0: if speed_limit <= 0:
speed_limit = 100 speed_limit = 100
speed_limit_abs = Downloader.do.get_limit_abs() speed_limit_abs = sabnzbd.Downloader.get_limit_abs()
if speed_limit_abs <= 0: if speed_limit_abs <= 0:
speed_limit_abs = "" speed_limit_abs = ""
@ -1609,7 +1603,7 @@ def build_header(webdir="", output=None, trans_functions=True):
header["new_release"], header["new_rel_url"] = sabnzbd.NEW_VERSION header["new_release"], header["new_rel_url"] = sabnzbd.NEW_VERSION
header["version"] = sabnzbd.__version__ header["version"] = sabnzbd.__version__
header["paused"] = bool(Downloader.do.paused or Downloader.do.postproc) header["paused"] = bool(sabnzbd.Downloader.paused or sabnzbd.Downloader.postproc)
header["pause_int"] = scheduler.pause_int() header["pause_int"] = scheduler.pause_int()
header["paused_all"] = sabnzbd.PAUSED_ALL header["paused_all"] = sabnzbd.PAUSED_ALL
@ -1626,11 +1620,11 @@ def build_header(webdir="", output=None, trans_functions=True):
header["have_warnings"] = str(sabnzbd.GUIHANDLER.count()) header["have_warnings"] = str(sabnzbd.GUIHANDLER.count())
header["finishaction"] = sabnzbd.QUEUECOMPLETE header["finishaction"] = sabnzbd.QUEUECOMPLETE
header["quota"] = to_units(BPSMeter.do.quota) header["quota"] = to_units(sabnzbd.BPSMeter.quota)
header["have_quota"] = bool(BPSMeter.do.quota > 0.0) header["have_quota"] = bool(sabnzbd.BPSMeter.quota > 0.0)
header["left_quota"] = to_units(BPSMeter.do.left) header["left_quota"] = to_units(sabnzbd.BPSMeter.left)
anfo = ArticleCache.do.cache_info() anfo = sabnzbd.ArticleCache.cache_info()
header["cache_art"] = str(anfo.article_sum) header["cache_art"] = str(anfo.article_sum)
header["cache_size"] = to_units(anfo.cache_size, "B") header["cache_size"] = to_units(anfo.cache_size, "B")
header["cache_max"] = str(anfo.cache_limit) header["cache_max"] = str(anfo.cache_limit)
@ -1643,8 +1637,8 @@ def build_queue_header(search=None, start=0, limit=0, output=None):
header = build_header(output=output) header = build_header(output=output)
bytespersec = BPSMeter.do.bps bytespersec = sabnzbd.BPSMeter.bps
qnfo = NzbQueue.do.queue_info(search=search, start=start, limit=limit) qnfo = sabnzbd.NzbQueue.queue_info(search=search, start=start, limit=limit)
bytesleft = qnfo.bytes_left bytesleft = qnfo.bytes_left
bytes_total = qnfo.bytes bytes_total = qnfo.bytes
@ -1657,7 +1651,7 @@ def build_queue_header(search=None, start=0, limit=0, output=None):
header["size"] = to_units(bytes_total, "B") header["size"] = to_units(bytes_total, "B")
header["noofslots_total"] = qnfo.q_fullsize header["noofslots_total"] = qnfo.q_fullsize
if Downloader.do.paused or Downloader.do.postproc: if sabnzbd.Downloader.paused or sabnzbd.Downloader.postproc:
status = Status.PAUSED status = Status.PAUSED
elif bytespersec > 0: elif bytespersec > 0:
status = Status.DOWNLOADING status = Status.DOWNLOADING
@ -1682,7 +1676,7 @@ def build_history(start=0, limit=0, search=None, failed_only=0, categories=None)
limit = 1000000 limit = 1000000
# Grab any items that are active or queued in postproc # Grab any items that are active or queued in postproc
postproc_queue = PostProcessor.do.get_queue() postproc_queue = sabnzbd.PostProcessor.get_queue()
# Filter out any items that don't match the search term or category # Filter out any items that don't match the search term or category
if postproc_queue: if postproc_queue:
@ -1762,7 +1756,7 @@ def build_history(start=0, limit=0, search=None, failed_only=0, categories=None)
item["retry"] = True item["retry"] = True
if rating_enabled: if rating_enabled:
rating = Rating.do.get_rating_by_nzo(item["nzo_id"]) rating = sabnzbd.Rating.get_rating_by_nzo(item["nzo_id"])
item["has_rating"] = rating is not None item["has_rating"] = rating is not None
if rating: if rating:
item["rating_avg_video"] = rating.avg_video item["rating_avg_video"] = rating.avg_video
@ -1913,7 +1907,7 @@ def del_from_section(kwargs):
del item del item
config.save_config() config.save_config()
if section == "servers": if section == "servers":
Downloader.do.update_server(keyword, None) sabnzbd.Downloader.update_server(keyword, None)
return True return True
else: else:
return False return False

6
sabnzbd/articlecache.py

@ -52,8 +52,6 @@ class ArticleCache:
if sabnzbd.DARWIN or sabnzbd.WIN64 or (struct.calcsize("P") * 8) == 64: if sabnzbd.DARWIN or sabnzbd.WIN64 or (struct.calcsize("P") * 8) == 64:
self.__cache_upper_limit = 4 * GIGI self.__cache_upper_limit = 4 * GIGI
ArticleCache.do = self
def cache_info(self): def cache_info(self):
return ANFO(len(self.__article_table), abs(self.__cache_size), self.__cache_limit_org) return ANFO(len(self.__article_table), abs(self.__cache_size), self.__cache_limit_org)
@ -171,7 +169,3 @@ class ArticleCache:
# Save data, but don't complain when destination folder is missing # Save data, but don't complain when destination folder is missing
# because this flush may come after completion of the NZO. # because this flush may come after completion of the NZO.
sabnzbd.save_data(data, article.get_art_id(), nzo.workpath, do_pickle=False, silent=True) sabnzbd.save_data(data, article.get_art_id(), nzo.workpath, do_pickle=False, silent=True)
# Create the instance
ArticleCache()

28
sabnzbd/assembler.py

@ -33,13 +33,10 @@ from sabnzbd.misc import get_all_passwords
from sabnzbd.filesystem import set_permissions, clip_path, has_win_device, diskspace, get_filename, get_ext from sabnzbd.filesystem import set_permissions, clip_path, has_win_device, diskspace, get_filename, get_ext
from sabnzbd.constants import Status, GIGI, MAX_ASSEMBLER_QUEUE from sabnzbd.constants import Status, GIGI, MAX_ASSEMBLER_QUEUE
import sabnzbd.cfg as cfg import sabnzbd.cfg as cfg
from sabnzbd.articlecache import ArticleCache
from sabnzbd.postproc import PostProcessor
from sabnzbd.nzbstuff import NzbObject, NzbFile from sabnzbd.nzbstuff import NzbObject, NzbFile
import sabnzbd.downloader import sabnzbd.downloader
import sabnzbd.par2file as par2file import sabnzbd.par2file as par2file
import sabnzbd.utils.rarfile as rarfile import sabnzbd.utils.rarfile as rarfile
from sabnzbd.rating import Rating
class Assembler(Thread): class Assembler(Thread):
@ -48,7 +45,6 @@ class Assembler(Thread):
def __init__(self): def __init__(self):
Thread.__init__(self) Thread.__init__(self)
self.queue: queue.Queue[Tuple[NzbObject, NzbFile, bool]] = queue.Queue() self.queue: queue.Queue[Tuple[NzbObject, NzbFile, bool]] = queue.Queue()
Assembler.do = self
def stop(self): def stop(self):
self.process(None) self.process(None)
@ -76,10 +72,10 @@ class Assembler(Thread):
and diskspace(force=True)["download_dir"][1] < (cfg.download_free.get_float() + nzf.bytes) / GIGI and diskspace(force=True)["download_dir"][1] < (cfg.download_free.get_float() + nzf.bytes) / GIGI
): ):
# Only warn and email once # Only warn and email once
if not sabnzbd.downloader.Downloader.do.paused: if not sabnzbd.Downloader.paused:
logging.warning(T("Too little diskspace forcing PAUSE")) logging.warning(T("Too little diskspace forcing PAUSE"))
# Pause downloader, but don't save, since the disk is almost full! # Pause downloader, but don't save, since the disk is almost full!
sabnzbd.downloader.Downloader.do.pause() sabnzbd.Downloader.pause()
sabnzbd.emailer.diskfull_mail() sabnzbd.emailer.diskfull_mail()
# Abort all direct unpackers, just to be sure # Abort all direct unpackers, just to be sure
sabnzbd.directunpacker.abort_all() sabnzbd.directunpacker.abort_all()
@ -102,7 +98,7 @@ class Assembler(Thread):
# Log traceback # Log traceback
logging.info("Traceback: ", exc_info=True) logging.info("Traceback: ", exc_info=True)
# Pause without saving # Pause without saving
sabnzbd.downloader.Downloader.do.pause() sabnzbd.Downloader.pause()
continue continue
except: except:
logging.error(T("Fatal error in Assembler"), exc_info=True) logging.error(T("Fatal error in Assembler"), exc_info=True)
@ -137,7 +133,7 @@ class Assembler(Thread):
nzo.final_name, nzo.final_name,
) )
nzo.fail_msg = T("Aborted, encryption detected") nzo.fail_msg = T("Aborted, encryption detected")
sabnzbd.nzbqueue.NzbQueue.do.end_job(nzo) sabnzbd.NzbQueue.end_job(nzo)
if unwanted_file: if unwanted_file:
logging.warning( logging.warning(
@ -153,7 +149,7 @@ class Assembler(Thread):
if cfg.action_on_unwanted_extensions() == 2: if cfg.action_on_unwanted_extensions() == 2:
logging.debug("Unwanted extension ... aborting") logging.debug("Unwanted extension ... aborting")
nzo.fail_msg = T("Aborted, unwanted extension detected") nzo.fail_msg = T("Aborted, unwanted extension detected")
sabnzbd.nzbqueue.NzbQueue.do.end_job(nzo) sabnzbd.NzbQueue.end_job(nzo)
# Add to direct unpack # Add to direct unpack
nzo.add_to_direct_unpacker(nzf) nzo.add_to_direct_unpacker(nzf)
@ -177,11 +173,11 @@ class Assembler(Thread):
reason, reason,
) )
nzo.fail_msg = T("Aborted, rating filter matched (%s)") % reason nzo.fail_msg = T("Aborted, rating filter matched (%s)") % reason
sabnzbd.nzbqueue.NzbQueue.do.end_job(nzo) sabnzbd.NzbQueue.end_job(nzo)
else: else:
sabnzbd.nzbqueue.NzbQueue.do.remove(nzo.nzo_id, add_to_history=False, cleanup=False) sabnzbd.NzbQueue.remove(nzo.nzo_id, add_to_history=False, cleanup=False)
PostProcessor.do.process(nzo) sabnzbd.PostProcessor.process(nzo)
@staticmethod @staticmethod
def assemble(nzf, file_done): def assemble(nzf, file_done):
@ -205,7 +201,7 @@ class Assembler(Thread):
# Write all decoded articles # Write all decoded articles
if article.decoded: if article.decoded:
data = ArticleCache.do.load_article(article) data = sabnzbd.ArticleCache.load_article(article)
# Could be empty in case nzo was deleted # Could be empty in case nzo was deleted
if data: if data:
fout.write(data) fout.write(data)
@ -235,7 +231,7 @@ def file_has_articles(nzf):
has = False has = False
for article in nzf.decodetable: for article in nzf.decodetable:
sleep(0.01) sleep(0.01)
data = ArticleCache.do.load_article(article) data = sabnzbd.ArticleCache.load_article(article)
if data: if data:
has = True has = True
return has return has
@ -369,8 +365,8 @@ def check_encrypted_and_unwanted_files(nzo, filepath):
def nzo_filtered_by_rating(nzo): def nzo_filtered_by_rating(nzo):
if Rating.do and cfg.rating_enable() and cfg.rating_filter_enable() and (nzo.rating_filtered < 2): if cfg.rating_enable() and cfg.rating_filter_enable() and (nzo.rating_filtered < 2):
rating = Rating.do.get_rating_by_nzo(nzo.nzo_id) rating = sabnzbd.Rating.get_rating_by_nzo(nzo.nzo_id)
if rating is not None: if rating is not None:
nzo.rating_filtered = 1 nzo.rating_filtered = 1
reason = rating_filtered(rating, nzo.filename.lower(), True) reason = rating_filtered(rating, nzo.filename.lower(), True)

23
sabnzbd/bpsmeter.py

@ -118,7 +118,6 @@ class BPSMeter:
self.q_hour = 0 # Quota reset hour self.q_hour = 0 # Quota reset hour
self.q_minute = 0 # Quota reset minute self.q_minute = 0 # Quota reset minute
self.quota_enabled = True # Scheduled quota enable/disable self.quota_enabled = True # Scheduled quota enable/disable
BPSMeter.do = self
def save(self): def save(self):
""" Save admin to disk """ """ Save admin to disk """
@ -235,8 +234,8 @@ class BPSMeter:
if self.have_quota and self.quota_enabled: if self.have_quota and self.quota_enabled:
self.left -= amount self.left -= amount
if self.left <= 0.0: if self.left <= 0.0:
if sabnzbd.downloader.Downloader.do and not sabnzbd.downloader.Downloader.do.paused: if sabnzbd.Downloader.do and not sabnzbd.Downloader.paused:
sabnzbd.downloader.Downloader.do.pause() sabnzbd.Downloader.pause()
logging.warning(T("Quota spent, pausing downloading")) logging.warning(T("Quota spent, pausing downloading"))
# Speedometer # Speedometer
@ -355,8 +354,8 @@ class BPSMeter:
logging.info("Quota was reset to %s", self.quota) logging.info("Quota was reset to %s", self.quota)
if cfg.quota_resume(): if cfg.quota_resume():
logging.info("Auto-resume due to quota reset") logging.info("Auto-resume due to quota reset")
if sabnzbd.downloader.Downloader.do: if sabnzbd.Downloader.do:
sabnzbd.downloader.Downloader.do.resume() sabnzbd.Downloader.resume()
self.next_reset() self.next_reset()
return False return False
else: else:
@ -464,8 +463,8 @@ class BPSMeter:
@staticmethod @staticmethod
def resume(): def resume():
""" Resume downloading """ """ Resume downloading """
if cfg.quota_resume() and sabnzbd.downloader.Downloader.do and sabnzbd.downloader.Downloader.do.paused: if cfg.quota_resume() and sabnzbd.Downloader.paused:
sabnzbd.downloader.Downloader.do.resume() sabnzbd.Downloader.resume()
def midnight(self): def midnight(self):
""" Midnight action: dummy update for all servers """ """ Midnight action: dummy update for all servers """
@ -476,12 +475,4 @@ class BPSMeter:
def quota_handler(): def quota_handler():
""" To be called from scheduler """ """ To be called from scheduler """
logging.debug("Checking quota") logging.debug("Checking quota")
BPSMeter.do.reset_quota() sabnzbd.BPSMeter.reset_quota()
def midnight_action():
if BPSMeter.do:
BPSMeter.do.midnight()
BPSMeter()

22
sabnzbd/decoder.py

@ -27,9 +27,6 @@ from typing import Tuple, List
import sabnzbd import sabnzbd
from sabnzbd.constants import SABYENC_VERSION_REQUIRED from sabnzbd.constants import SABYENC_VERSION_REQUIRED
from sabnzbd.articlecache import ArticleCache
from sabnzbd.downloader import Downloader
from sabnzbd.nzbqueue import NzbQueue
from sabnzbd.nzbstuff import Article from sabnzbd.nzbstuff import Article
import sabnzbd.cfg as cfg import sabnzbd.cfg as cfg
from sabnzbd.misc import match_str from sabnzbd.misc import match_str
@ -75,7 +72,6 @@ class Decoder:
self.decoder_workers = [] self.decoder_workers = []
for i in range(cfg.num_decoders()): for i in range(cfg.num_decoders()):
self.decoder_workers.append(DecoderWorker(self.decoder_queue)) self.decoder_workers.append(DecoderWorker(self.decoder_queue))
Decoder.do = self
def start(self): def start(self):
for decoder_worker in self.decoder_workers: for decoder_worker in self.decoder_workers:
@ -103,12 +99,12 @@ class Decoder:
def process(self, article, raw_data): def process(self, article, raw_data):
# We use reported article-size, just like sabyenc does # We use reported article-size, just like sabyenc does
ArticleCache.do.reserve_space(article.bytes) sabnzbd.ArticleCache.reserve_space(article.bytes)
self.decoder_queue.put((article, raw_data)) self.decoder_queue.put((article, raw_data))
def queue_full(self): def queue_full(self):
# Check if the queue size exceeds the limits # Check if the queue size exceeds the limits
return self.decoder_queue.qsize() >= ArticleCache.do.decoder_cache_article_limit return self.decoder_queue.qsize() >= sabnzbd.ArticleCache.decoder_cache_article_limit
class DecoderWorker(Thread): class DecoderWorker(Thread):
@ -138,7 +134,7 @@ class DecoderWorker(Thread):
art_id = article.article art_id = article.article
# Free space in the decoder-queue # Free space in the decoder-queue
ArticleCache.do.free_reserved_space(article.bytes) sabnzbd.ArticleCache.free_reserved_space(article.bytes)
# Keeping track # Keeping track
decoded_data = None decoded_data = None
@ -157,12 +153,12 @@ class DecoderWorker(Thread):
except MemoryError: except MemoryError:
logging.warning(T("Decoder failure: Out of memory")) logging.warning(T("Decoder failure: Out of memory"))
logging.info("Decoder-Queue: %d", self.decoder_queue.qsize()) logging.info("Decoder-Queue: %d", self.decoder_queue.qsize())
logging.info("Cache: %d, %d, %d", *ArticleCache.do.cache_info()) logging.info("Cache: %d, %d, %d", *sabnzbd.ArticleCache.cache_info())
logging.info("Traceback: ", exc_info=True) logging.info("Traceback: ", exc_info=True)
Downloader.do.pause() sabnzbd.Downloader.pause()
# This article should be fetched again # This article should be fetched again
NzbQueue.do.reset_try_lists(article) sabnzbd.NzbQueue.reset_try_lists(article)
continue continue
except CrcError: except CrcError:
@ -195,7 +191,7 @@ class DecoderWorker(Thread):
logme = T("UUencode detected, only yEnc encoding is supported [%s]") % nzo.final_name logme = T("UUencode detected, only yEnc encoding is supported [%s]") % nzo.final_name
logging.error(logme) logging.error(logme)
nzo.fail_msg = logme nzo.fail_msg = logme
NzbQueue.do.end_job(nzo) sabnzbd.NzbQueue.end_job(nzo)
break break
# Pre-check, proper article found so just register # Pre-check, proper article found so just register
@ -221,9 +217,9 @@ class DecoderWorker(Thread):
if decoded_data: if decoded_data:
# If the data needs to be written to disk due to full cache, this will be slow # If the data needs to be written to disk due to full cache, this will be slow
# Causing the decoder-queue to fill up and delay the downloader # Causing the decoder-queue to fill up and delay the downloader
ArticleCache.do.save_article(article, decoded_data) sabnzbd.ArticleCache.save_article(article, decoded_data)
NzbQueue.do.register_article(article, article_success) sabnzbd.NzbQueue.register_article(article, article_success)
def decode(article, raw_data): def decode(article, raw_data):

7
sabnzbd/dirscanner.py

@ -89,7 +89,6 @@ class DirScanner(threading.Thread):
self.trigger = False self.trigger = False
cfg.dirscan_dir.callback(self.newdir) cfg.dirscan_dir.callback(self.newdir)
cfg.dirscan_speed.callback(self.newspeed) cfg.dirscan_speed.callback(self.newspeed)
DirScanner.do = self
def newdir(self): def newdir(self):
""" We're notified of a dir change """ """ We're notified of a dir change """
@ -213,9 +212,3 @@ class DirScanner(threading.Thread):
if os.path.isdir(dpath) and dd.lower() in cats: if os.path.isdir(dpath) and dd.lower() in cats:
run_dir(dpath, dd.lower()) run_dir(dpath, dd.lower())
self.busy = False self.busy = False
def dirscan():
""" Wrapper required for scheduler """
logging.info("Scheduled or manual watched folder scan")
DirScanner.do.scan()

63
sabnzbd/downloader.py

@ -35,7 +35,6 @@ from sabnzbd.newswrapper import NewsWrapper, request_server_info
import sabnzbd.notifier as notifier import sabnzbd.notifier as notifier
import sabnzbd.config as config import sabnzbd.config as config
import sabnzbd.cfg as cfg import sabnzbd.cfg as cfg
from sabnzbd.bpsmeter import BPSMeter
import sabnzbd.scheduler import sabnzbd.scheduler
from sabnzbd.misc import from_units, nntp_to_msg, int_conv from sabnzbd.misc import from_units, nntp_to_msg, int_conv
from sabnzbd.utils.happyeyeballs import happyeyeballs from sabnzbd.utils.happyeyeballs import happyeyeballs
@ -80,10 +79,10 @@ class Server:
self.restart = False self.restart = False
self.displayname = displayname self.displayname = displayname
self.host = host self.host = host
self.port = port self.port: int = port
self.timeout = timeout self.timeout = timeout
self.threads = threads self.threads = threads
self.priority = priority self.priority: int = priority
self.ssl = ssl self.ssl = ssl
self.ssl_verify = ssl_verify self.ssl_verify = ssl_verify
self.ssl_ciphers = ssl_ciphers self.ssl_ciphers = ssl_ciphers
@ -211,8 +210,6 @@ class Downloader(Thread):
for server in config.get_servers(): for server in config.get_servers():
self.init_server(None, server) self.init_server(None, server)
Downloader.do = self
def init_server(self, oldserver, newserver): def init_server(self, oldserver, newserver):
"""Setup or re-setup single server """Setup or re-setup single server
When oldserver is defined and in use, delay startup. When oldserver is defined and in use, delay startup.
@ -298,7 +295,7 @@ class Downloader(Thread):
logging.info("Pausing") logging.info("Pausing")
notifier.send_notification("SABnzbd", T("Paused"), "pause_resume") notifier.send_notification("SABnzbd", T("Paused"), "pause_resume")
if self.is_paused(): if self.is_paused():
BPSMeter.do.reset() sabnzbd.BPSMeter.reset()
if cfg.autodisconnect(): if cfg.autodisconnect():
self.disconnect() self.disconnect()
@ -358,7 +355,7 @@ class Downloader(Thread):
if not self.paused: if not self.paused:
return False return False
else: else:
if sabnzbd.nzbqueue.NzbQueue.do.has_forced_items(): if sabnzbd.NzbQueue.has_forced_items():
return False return False
else: else:
return True return True
@ -404,7 +401,7 @@ class Downloader(Thread):
# Make sure server address resolution is refreshed # Make sure server address resolution is refreshed
server.info = None server.info = None
sabnzbd.nzbqueue.NzbQueue.do.reset_all_try_lists() sabnzbd.NzbQueue.reset_all_try_lists()
def decode(self, article, raw_data): def decode(self, article, raw_data):
"""Decode article and check the status of """Decode article and check the status of
@ -413,23 +410,21 @@ class Downloader(Thread):
# Handle broken articles directly # Handle broken articles directly
if not raw_data: if not raw_data:
if not article.search_new_server(): if not article.search_new_server():
sabnzbd.nzbqueue.NzbQueue.do.register_article(article, success=False) sabnzbd.NzbQueue.register_article(article, success=False)
return return
# Send to decoder-queue # Send to decoder-queue
sabnzbd.decoder.Decoder.do.process(article, raw_data) sabnzbd.Decoder.process(article, raw_data)
# See if we need to delay because the queues are full # See if we need to delay because the queues are full
logged = False logged = False
while not self.shutdown and ( while not self.shutdown and (sabnzbd.Decoder.queue_full() or sabnzbd.Assembler.queue_full()):
sabnzbd.decoder.Decoder.do.queue_full() or sabnzbd.assembler.Assembler.do.queue_full()
):
if not logged: if not logged:
# Only log once, to not waste any CPU-cycles # Only log once, to not waste any CPU-cycles
logging.debug( logging.debug(
"Delaying - Decoder queue: %s - Assembler queue: %s", "Delaying - Decoder queue: %s - Assembler queue: %s",
sabnzbd.decoder.Decoder.do.decoder_queue.qsize(), sabnzbd.Decoder.decoder_queue.qsize(),
sabnzbd.assembler.Assembler.do.queue.qsize(), sabnzbd.Assembler.queue.qsize(),
) )
logged = True logged = True
time.sleep(0.05) time.sleep(0.05)
@ -444,7 +439,7 @@ class Downloader(Thread):
logging.debug("SSL verification test: %s", sabnzbd.CERTIFICATE_VALIDATION) logging.debug("SSL verification test: %s", sabnzbd.CERTIFICATE_VALIDATION)
# Kick BPS-Meter to check quota # Kick BPS-Meter to check quota
BPSMeter.do.update() sabnzbd.BPSMeter.update()
while 1: while 1:
for server in self.servers: for server in self.servers:
@ -464,7 +459,7 @@ class Downloader(Thread):
if newid: if newid:
self.init_server(None, newid) self.init_server(None, newid)
self.__restart -= 1 self.__restart -= 1
sabnzbd.nzbqueue.NzbQueue.do.reset_all_try_lists() sabnzbd.NzbQueue.reset_all_try_lists()
# Have to leave this loop, because we removed element # Have to leave this loop, because we removed element
break break
else: else:
@ -486,12 +481,12 @@ class Downloader(Thread):
if not server.info: if not server.info:
# Only request info if there's stuff in the queue # Only request info if there's stuff in the queue
if not sabnzbd.nzbqueue.NzbQueue.do.is_empty(): if not sabnzbd.NzbQueue.is_empty():
self.maybe_block_server(server) self.maybe_block_server(server)
request_server_info(server) request_server_info(server)
break break
article = sabnzbd.nzbqueue.NzbQueue.do.get_article(server, self.servers) article = sabnzbd.NzbQueue.get_article(server, self.servers)
if not article: if not article:
break break
@ -563,26 +558,26 @@ class Downloader(Thread):
# Need to initialize the check during first 20 seconds # Need to initialize the check during first 20 seconds
if self.can_be_slowed is None or self.can_be_slowed_timer: if self.can_be_slowed is None or self.can_be_slowed_timer:
# Wait for stable speed to start testing # Wait for stable speed to start testing
if not self.can_be_slowed_timer and BPSMeter.do.get_stable_speed(timespan=10): if not self.can_be_slowed_timer and sabnzbd.BPSMeter.get_stable_speed(timespan=10):
self.can_be_slowed_timer = time.time() self.can_be_slowed_timer = time.time()
# Check 10 seconds after enabling slowdown # Check 10 seconds after enabling slowdown
if self.can_be_slowed_timer and time.time() > self.can_be_slowed_timer + 10: if self.can_be_slowed_timer and time.time() > self.can_be_slowed_timer + 10:
# Now let's check if it was stable in the last 10 seconds # Now let's check if it was stable in the last 10 seconds
self.can_be_slowed = BPSMeter.do.get_stable_speed(timespan=10) self.can_be_slowed = sabnzbd.BPSMeter.get_stable_speed(timespan=10)
self.can_be_slowed_timer = 0 self.can_be_slowed_timer = 0
logging.debug("Downloader-slowdown: %r", self.can_be_slowed) logging.debug("Downloader-slowdown: %r", self.can_be_slowed)
else: else:
read, write, error = ([], [], []) read, write, error = ([], [], [])
BPSMeter.do.reset() sabnzbd.BPSMeter.reset()
time.sleep(1.0) time.sleep(1.0)
DOWNLOADER_CV.acquire() DOWNLOADER_CV.acquire()
while ( while (
(sabnzbd.nzbqueue.NzbQueue.do.is_empty() or self.is_paused() or self.postproc) (sabnzbd.NzbQueue.is_empty() or self.is_paused() or self.postproc)
and not self.shutdown and not self.shutdown
and not self.__restart and not self.__restart
): ):
@ -603,7 +598,7 @@ class Downloader(Thread):
self.write_fds.pop(fileno) self.write_fds.pop(fileno)
if not read: if not read:
BPSMeter.do.update() sabnzbd.BPSMeter.update()
continue continue
for selected in read: for selected in read:
@ -620,7 +615,7 @@ class Downloader(Thread):
bytes_received, done, skip = (0, False, False) bytes_received, done, skip = (0, False, False)
if skip: if skip:
BPSMeter.do.update() sabnzbd.BPSMeter.update()
continue continue
if bytes_received < 1: if bytes_received < 1:
@ -630,12 +625,12 @@ class Downloader(Thread):
else: else:
if self.bandwidth_limit: if self.bandwidth_limit:
limit = self.bandwidth_limit limit = self.bandwidth_limit
if bytes_received + BPSMeter.do.bps > limit: if bytes_received + sabnzbd.BPSMeter.bps > limit:
while BPSMeter.do.bps > limit: while sabnzbd.BPSMeter.bps > limit:
time.sleep(0.05) time.sleep(0.05)
BPSMeter.do.update() sabnzbd.BPSMeter.update()
BPSMeter.do.update(server.id, bytes_received) sabnzbd.BPSMeter.update(server.id, bytes_received)
nzo.update_download_stats(BPSMeter.do.bps, server.id, bytes_received) nzo.update_download_stats(sabnzbd.BPSMeter.bps, server.id, bytes_received)
if not done and nw.status_code != 222: if not done and nw.status_code != 222:
if not nw.connected or nw.status_code == 480: if not nw.connected or nw.status_code == 480:
@ -717,7 +712,7 @@ class Downloader(Thread):
server.active = False server.active = False
if penalty and (block or server.optional): if penalty and (block or server.optional):
self.plan_server(server, penalty) self.plan_server(server, penalty)
sabnzbd.nzbqueue.NzbQueue.do.reset_all_try_lists() sabnzbd.NzbQueue.reset_all_try_lists()
self.__reset_nw(nw, None, warn=False, send_quit=True) self.__reset_nw(nw, None, warn=False, send_quit=True)
continue continue
except: except:
@ -824,7 +819,7 @@ class Downloader(Thread):
self.decode(article, None) self.decode(article, None)
else: else:
# Allow all servers to iterate over each nzo/nzf again # Allow all servers to iterate over each nzo/nzf again
sabnzbd.nzbqueue.NzbQueue.do.reset_try_lists(article) sabnzbd.NzbQueue.reset_try_lists(article)
if destroy: if destroy:
nw.terminate(quit=send_quit) nw.terminate(quit=send_quit)
@ -947,12 +942,12 @@ class Downloader(Thread):
def stop(): def stop():
DOWNLOADER_CV.acquire() DOWNLOADER_CV.acquire()
try: try:
Downloader.do.stop() sabnzbd.Downloader.stop()
finally: finally:
DOWNLOADER_CV.notify_all() DOWNLOADER_CV.notify_all()
DOWNLOADER_CV.release() DOWNLOADER_CV.release()
try: try:
Downloader.do.join() sabnzbd.Downloader.join()
except: except:
pass pass

98
sabnzbd/interface.py

@ -33,12 +33,11 @@ import functools
from threading import Thread from threading import Thread
from random import randint from random import randint
from xml.sax.saxutils import escape from xml.sax.saxutils import escape
from Cheetah.Template import Template
import sabnzbd import sabnzbd
import sabnzbd.rss import sabnzbd.rss
import sabnzbd.scheduler as scheduler import sabnzbd.scheduler as scheduler
from Cheetah.Template import Template
from sabnzbd.misc import ( from sabnzbd.misc import (
to_units, to_units,
from_units, from_units,
@ -52,24 +51,17 @@ from sabnzbd.misc import (
) )
from sabnzbd.filesystem import real_path, long_path, globber, globber_full, remove_all, clip_path, same_file from sabnzbd.filesystem import real_path, long_path, globber, globber_full, remove_all, clip_path, same_file
from sabnzbd.newswrapper import GetServerParms from sabnzbd.newswrapper import GetServerParms
from sabnzbd.bpsmeter import BPSMeter
from sabnzbd.encoding import xml_name, utob from sabnzbd.encoding import xml_name, utob
import sabnzbd.config as config import sabnzbd.config as config
import sabnzbd.cfg as cfg import sabnzbd.cfg as cfg
import sabnzbd.notifier as notifier import sabnzbd.notifier as notifier
import sabnzbd.newsunpack import sabnzbd.newsunpack
from sabnzbd.downloader import Downloader
from sabnzbd.nzbqueue import NzbQueue
from sabnzbd.utils.servertests import test_nntp_server_dict from sabnzbd.utils.servertests import test_nntp_server_dict
from sabnzbd.decoder import SABYENC_ENABLED
from sabnzbd.utils.diskspeed import diskspeedmeasure from sabnzbd.utils.diskspeed import diskspeedmeasure
from sabnzbd.utils.getperformance import getpystone from sabnzbd.utils.getperformance import getpystone
from sabnzbd.utils.internetspeed import internetspeed from sabnzbd.utils.internetspeed import internetspeed
from sabnzbd.constants import MEBI, DEF_SKIN_COLORS, DEF_STDCONFIG, DEF_MAIN_TMPL, DEFAULT_PRIORITY, CHEETAH_DIRECTIVES from sabnzbd.constants import MEBI, DEF_SKIN_COLORS, DEF_STDCONFIG, DEF_MAIN_TMPL, DEFAULT_PRIORITY, CHEETAH_DIRECTIVES
from sabnzbd.lang import list_languages from sabnzbd.lang import list_languages
from sabnzbd.api import ( from sabnzbd.api import (
list_scripts, list_scripts,
list_cats, list_cats,
@ -408,7 +400,7 @@ class MainPage:
) )
) )
bytespersec_list = BPSMeter.do.get_bps_list() bytespersec_list = sabnzbd.BPSMeter.get_bps_list()
info["bytespersec_list"] = ",".join([str(bps) for bps in bytespersec_list]) info["bytespersec_list"] = ",".join([str(bps) for bps in bytespersec_list])
template = Template( template = Template(
@ -432,7 +424,7 @@ class MainPage:
@secured_expose(check_api_key=True) @secured_expose(check_api_key=True)
def pause(self, **kwargs): def pause(self, **kwargs):
scheduler.plan_resume(0) scheduler.plan_resume(0)
Downloader.do.pause() sabnzbd.Downloader.pause()
raise Raiser(self.__root) raise Raiser(self.__root)
@secured_expose(check_api_key=True) @secured_expose(check_api_key=True)
@ -723,7 +715,7 @@ class NzoPage:
nzo_id = a nzo_id = a
break break
nzo = NzbQueue.do.get_nzo(nzo_id) nzo = sabnzbd.NzbQueue.get_nzo(nzo_id)
if nzo_id and nzo: if nzo_id and nzo:
info, pnfo_list, bytespersec, q_size, bytes_left_previous_page = build_queue_header() info, pnfo_list, bytespersec, q_size, bytes_left_previous_page = build_queue_header()
@ -762,7 +754,7 @@ class NzoPage:
n = 0 n = 0
for pnfo in pnfo_list: for pnfo in pnfo_list:
if pnfo.nzo_id == nzo_id: if pnfo.nzo_id == nzo_id:
nzo = NzbQueue.do.get_nzo(nzo_id) nzo = sabnzbd.NzbQueue.get_nzo(nzo_id)
repair = pnfo.repair repair = pnfo.repair
unpack = pnfo.unpack unpack = pnfo.unpack
delete = pnfo.delete delete = pnfo.delete
@ -795,7 +787,7 @@ class NzoPage:
def nzo_files(self, info, nzo_id): def nzo_files(self, info, nzo_id):
active = [] active = []
nzo = NzbQueue.do.get_nzo(nzo_id) nzo = sabnzbd.NzbQueue.get_nzo(nzo_id)
if nzo: if nzo:
pnfo = nzo.gather_info(full=True) pnfo = nzo.gather_info(full=True)
info["nzo_id"] = pnfo.nzo_id info["nzo_id"] = pnfo.nzo_id
@ -831,15 +823,15 @@ class NzoPage:
script = kwargs.get("script", None) script = kwargs.get("script", None)
cat = kwargs.get("cat", None) cat = kwargs.get("cat", None)
priority = kwargs.get("priority", None) priority = kwargs.get("priority", None)
nzo = NzbQueue.do.get_nzo(nzo_id) nzo = sabnzbd.NzbQueue.get_nzo(nzo_id)
if index is not None: if index is not None:
NzbQueue.do.switch(nzo_id, index) sabnzbd.NzbQueue.switch(nzo_id, index)
if name is not None: if name is not None:
NzbQueue.do.change_name(nzo_id, name, password) sabnzbd.NzbQueue.change_name(nzo_id, name, password)
if cat is not None and nzo.cat is not cat and not (nzo.cat == "*" and cat == "Default"): if cat is not None and nzo.cat is not cat and not (nzo.cat == "*" and cat == "Default"):
NzbQueue.do.change_cat(nzo_id, cat, priority) sabnzbd.NzbQueue.change_cat(nzo_id, cat, priority)
# Category changed, so make sure "Default" attributes aren't set again # Category changed, so make sure "Default" attributes aren't set again
if script == "Default": if script == "Default":
script = None script = None
@ -849,11 +841,11 @@ class NzoPage:
pp = None pp = None
if script is not None and nzo.script != script: if script is not None and nzo.script != script:
NzbQueue.do.change_script(nzo_id, script) sabnzbd.NzbQueue.change_script(nzo_id, script)
if pp is not None and nzo.pp != pp: if pp is not None and nzo.pp != pp:
NzbQueue.do.change_opts(nzo_id, pp) sabnzbd.NzbQueue.change_opts(nzo_id, pp)
if priority is not None and nzo.priority != int(priority): if priority is not None and nzo.priority != int(priority):
NzbQueue.do.set_priority(nzo_id, priority) sabnzbd.NzbQueue.set_priority(nzo_id, priority)
raise Raiser(urllib.parse.urljoin(self.__root, "../queue/")) raise Raiser(urllib.parse.urljoin(self.__root, "../queue/"))
@ -862,7 +854,7 @@ class NzoPage:
if kwargs["action_key"] == "Delete": if kwargs["action_key"] == "Delete":
for key in kwargs: for key in kwargs:
if kwargs[key] == "on": if kwargs[key] == "on":
NzbQueue.do.remove_nzf(nzo_id, key, force_delete=True) sabnzbd.NzbQueue.remove_nzf(nzo_id, key, force_delete=True)
elif kwargs["action_key"] in ("Top", "Up", "Down", "Bottom"): elif kwargs["action_key"] in ("Top", "Up", "Down", "Bottom"):
nzf_ids = [] nzf_ids = []
@ -871,15 +863,15 @@ class NzoPage:
nzf_ids.append(key) nzf_ids.append(key)
size = int_conv(kwargs.get("action_size", 1)) size = int_conv(kwargs.get("action_size", 1))
if kwargs["action_key"] == "Top": if kwargs["action_key"] == "Top":
NzbQueue.do.move_top_bulk(nzo_id, nzf_ids) sabnzbd.NzbQueue.move_top_bulk(nzo_id, nzf_ids)
elif kwargs["action_key"] == "Up": elif kwargs["action_key"] == "Up":
NzbQueue.do.move_up_bulk(nzo_id, nzf_ids, size) sabnzbd.NzbQueue.move_up_bulk(nzo_id, nzf_ids, size)
elif kwargs["action_key"] == "Down": elif kwargs["action_key"] == "Down":
NzbQueue.do.move_down_bulk(nzo_id, nzf_ids, size) sabnzbd.NzbQueue.move_down_bulk(nzo_id, nzf_ids, size)
elif kwargs["action_key"] == "Bottom": elif kwargs["action_key"] == "Bottom":
NzbQueue.do.move_bottom_bulk(nzo_id, nzf_ids) sabnzbd.NzbQueue.move_bottom_bulk(nzo_id, nzf_ids)
if NzbQueue.do.get_nzo(nzo_id): if sabnzbd.NzbQueue.get_nzo(nzo_id):
url = urllib.parse.urljoin(self.__root, nzo_id) url = urllib.parse.urljoin(self.__root, nzo_id)
else: else:
url = urllib.parse.urljoin(self.__root, "../queue") url = urllib.parse.urljoin(self.__root, "../queue")
@ -910,12 +902,12 @@ class QueuePage:
uid = kwargs.get("uid") uid = kwargs.get("uid")
del_files = int_conv(kwargs.get("del_files")) del_files = int_conv(kwargs.get("del_files"))
if uid: if uid:
NzbQueue.do.remove(uid, add_to_history=False, delete_all_data=del_files) sabnzbd.NzbQueue.remove(uid, add_to_history=False, delete_all_data=del_files)
raise queueRaiser(self.__root, kwargs) raise queueRaiser(self.__root, kwargs)
@secured_expose(check_api_key=True) @secured_expose(check_api_key=True)
def purge(self, **kwargs): def purge(self, **kwargs):
NzbQueue.do.remove_all(kwargs.get("search")) sabnzbd.NzbQueue.remove_all(kwargs.get("search"))
raise queueRaiser(self.__root, kwargs) raise queueRaiser(self.__root, kwargs)
@secured_expose(check_api_key=True) @secured_expose(check_api_key=True)
@ -932,7 +924,7 @@ class QueuePage:
uid1 = kwargs.get("uid1") uid1 = kwargs.get("uid1")
uid2 = kwargs.get("uid2") uid2 = kwargs.get("uid2")
if uid1 and uid2: if uid1 and uid2:
NzbQueue.do.switch(uid1, uid2) sabnzbd.NzbQueue.switch(uid1, uid2)
raise queueRaiser(self.__root, kwargs) raise queueRaiser(self.__root, kwargs)
@secured_expose(check_api_key=True) @secured_expose(check_api_key=True)
@ -940,7 +932,7 @@ class QueuePage:
nzo_id = kwargs.get("nzo_id") nzo_id = kwargs.get("nzo_id")
pp = kwargs.get("pp", "") pp = kwargs.get("pp", "")
if nzo_id and pp and pp.isdigit(): if nzo_id and pp and pp.isdigit():
NzbQueue.do.change_opts(nzo_id, int(pp)) sabnzbd.NzbQueue.change_opts(nzo_id, int(pp))
raise queueRaiser(self.__root, kwargs) raise queueRaiser(self.__root, kwargs)
@secured_expose(check_api_key=True) @secured_expose(check_api_key=True)
@ -950,7 +942,7 @@ class QueuePage:
if nzo_id and script: if nzo_id and script:
if script == "None": if script == "None":
script = None script = None
NzbQueue.do.change_script(nzo_id, script) sabnzbd.NzbQueue.change_script(nzo_id, script)
raise queueRaiser(self.__root, kwargs) raise queueRaiser(self.__root, kwargs)
@secured_expose(check_api_key=True) @secured_expose(check_api_key=True)
@ -960,7 +952,7 @@ class QueuePage:
if nzo_id and cat: if nzo_id and cat:
if cat == "None": if cat == "None":
cat = None cat = None
NzbQueue.do.change_cat(nzo_id, cat) sabnzbd.NzbQueue.change_cat(nzo_id, cat)
raise queueRaiser(self.__root, kwargs) raise queueRaiser(self.__root, kwargs)
@ -972,7 +964,7 @@ class QueuePage:
@secured_expose(check_api_key=True) @secured_expose(check_api_key=True)
def pause(self, **kwargs): def pause(self, **kwargs):
scheduler.plan_resume(0) scheduler.plan_resume(0)
Downloader.do.pause() sabnzbd.Downloader.pause()
raise queueRaiser(self.__root, kwargs) raise queueRaiser(self.__root, kwargs)
@secured_expose(check_api_key=True) @secured_expose(check_api_key=True)
@ -984,33 +976,33 @@ class QueuePage:
@secured_expose(check_api_key=True) @secured_expose(check_api_key=True)
def pause_nzo(self, **kwargs): def pause_nzo(self, **kwargs):
uid = kwargs.get("uid", "") uid = kwargs.get("uid", "")
NzbQueue.do.pause_multiple_nzo(uid.split(",")) sabnzbd.NzbQueue.pause_multiple_nzo(uid.split(","))
raise queueRaiser(self.__root, kwargs) raise queueRaiser(self.__root, kwargs)
@secured_expose(check_api_key=True) @secured_expose(check_api_key=True)
def resume_nzo(self, **kwargs): def resume_nzo(self, **kwargs):
uid = kwargs.get("uid", "") uid = kwargs.get("uid", "")
NzbQueue.do.resume_multiple_nzo(uid.split(",")) sabnzbd.NzbQueue.resume_multiple_nzo(uid.split(","))
raise queueRaiser(self.__root, kwargs) raise queueRaiser(self.__root, kwargs)
@secured_expose(check_api_key=True) @secured_expose(check_api_key=True)
def set_priority(self, **kwargs): def set_priority(self, **kwargs):
NzbQueue.do.set_priority(kwargs.get("nzo_id"), kwargs.get("priority")) sabnzbd.NzbQueue.set_priority(kwargs.get("nzo_id"), kwargs.get("priority"))
raise queueRaiser(self.__root, kwargs) raise queueRaiser(self.__root, kwargs)
@secured_expose(check_api_key=True) @secured_expose(check_api_key=True)
def sort_by_avg_age(self, **kwargs): def sort_by_avg_age(self, **kwargs):
NzbQueue.do.sort_queue("avg_age", kwargs.get("dir")) sabnzbd.NzbQueue.sort_queue("avg_age", kwargs.get("dir"))
raise queueRaiser(self.__root, kwargs) raise queueRaiser(self.__root, kwargs)
@secured_expose(check_api_key=True) @secured_expose(check_api_key=True)
def sort_by_name(self, **kwargs): def sort_by_name(self, **kwargs):
NzbQueue.do.sort_queue("name", kwargs.get("dir")) sabnzbd.NzbQueue.sort_queue("name", kwargs.get("dir"))
raise queueRaiser(self.__root, kwargs) raise queueRaiser(self.__root, kwargs)
@secured_expose(check_api_key=True) @secured_expose(check_api_key=True)
def sort_by_size(self, **kwargs): def sort_by_size(self, **kwargs):
NzbQueue.do.sort_queue("size", kwargs.get("dir")) sabnzbd.NzbQueue.sort_queue("size", kwargs.get("dir"))
raise queueRaiser(self.__root, kwargs) raise queueRaiser(self.__root, kwargs)
@ -1034,7 +1026,7 @@ class HistoryPage:
history["rating_enable"] = bool(cfg.rating_enable()) history["rating_enable"] = bool(cfg.rating_enable())
postfix = T("B") # : Abbreviation for bytes, as in GB postfix = T("B") # : Abbreviation for bytes, as in GB
grand, month, week, day = BPSMeter.do.get_sums() grand, month, week, day = sabnzbd.BPSMeter.get_sums()
history["total_size"], history["month_size"], history["week_size"], history["day_size"] = ( history["total_size"], history["month_size"], history["week_size"], history["day_size"] = (
to_units(grand, postfix=postfix), to_units(grand, postfix=postfix),
to_units(month, postfix=postfix), to_units(month, postfix=postfix),
@ -1113,7 +1105,7 @@ class ConfigPage:
conf["have_unzip"] = bool(sabnzbd.newsunpack.ZIP_COMMAND) conf["have_unzip"] = bool(sabnzbd.newsunpack.ZIP_COMMAND)
conf["have_7zip"] = bool(sabnzbd.newsunpack.SEVEN_COMMAND) conf["have_7zip"] = bool(sabnzbd.newsunpack.SEVEN_COMMAND)
conf["have_sabyenc"] = SABYENC_ENABLED conf["have_sabyenc"] = sabnzbd.decoder.SABYENC_ENABLED
conf["have_mt_par2"] = sabnzbd.newsunpack.PAR2_MT conf["have_mt_par2"] = sabnzbd.newsunpack.PAR2_MT
conf["certificate_validation"] = sabnzbd.CERTIFICATE_VALIDATION conf["certificate_validation"] = sabnzbd.CERTIFICATE_VALIDATION
@ -1124,7 +1116,7 @@ class ConfigPage:
new[svr] = {} new[svr] = {}
conf["servers"] = new conf["servers"] = new
conf["folders"] = NzbQueue.do.scan_jobs(all_jobs=False, action=False) conf["folders"] = sabnzbd.NzbQueue.scan_jobs(all_jobs=False, action=False)
template = Template( template = Template(
file=os.path.join(sabnzbd.WEB_DIR_CONFIG, "config.tmpl"), file=os.path.join(sabnzbd.WEB_DIR_CONFIG, "config.tmpl"),
@ -1589,7 +1581,7 @@ class ConfigServer:
) )
for svr in server_names: for svr in server_names:
new.append(servers[svr].get_dict(safe=True)) new.append(servers[svr].get_dict(safe=True))
t, m, w, d, timeline = BPSMeter.do.amounts(svr) t, m, w, d, timeline = sabnzbd.BPSMeter.amounts(svr)
if t: if t:
new[-1]["amounts"] = to_units(t), to_units(m), to_units(w), to_units(d), timeline new[-1]["amounts"] = to_units(t), to_units(m), to_units(w), to_units(d), timeline
conf["servers"] = new conf["servers"] = new
@ -1626,7 +1618,7 @@ class ConfigServer:
def clrServer(self, **kwargs): def clrServer(self, **kwargs):
server = kwargs.get("server") server = kwargs.get("server")
if server: if server:
BPSMeter.do.clear_server(server) sabnzbd.BPSMeter.clear_server(server)
raise Raiser(self.__root) raise Raiser(self.__root)
@secured_expose(check_api_key=True, check_configlock=True) @secured_expose(check_api_key=True, check_configlock=True)
@ -1637,7 +1629,7 @@ class ConfigServer:
if svr: if svr:
svr.enable.set(not svr.enable()) svr.enable.set(not svr.enable())
config.save_config() config.save_config()
Downloader.do.update_server(server, server) sabnzbd.Downloader.update_server(server, server)
raise Raiser(self.__root) raise Raiser(self.__root)
@ -1715,7 +1707,7 @@ def handle_server(kwargs, root=None, new_svr=False):
config.ConfigServer(server, kwargs) config.ConfigServer(server, kwargs)
config.save_config() config.save_config()
Downloader.do.update_server(old_server, server) sabnzbd.Downloader.update_server(old_server, server)
if root: if root:
if ajax: if ajax:
return sabnzbd.api.report("json") return sabnzbd.api.report("json")
@ -2420,12 +2412,12 @@ class Status:
@secured_expose(check_api_key=True) @secured_expose(check_api_key=True)
def reset_quota(self, **kwargs): def reset_quota(self, **kwargs):
BPSMeter.do.reset_quota(force=True) sabnzbd.BPSMeter.reset_quota(force=True)
raise Raiser(self.__root) raise Raiser(self.__root)
@secured_expose(check_api_key=True) @secured_expose(check_api_key=True)
def disconnect(self, **kwargs): def disconnect(self, **kwargs):
Downloader.do.disconnect() sabnzbd.Downloader.disconnect()
raise Raiser(self.__root) raise Raiser(self.__root)
@secured_expose(check_api_key=True) @secured_expose(check_api_key=True)
@ -2487,7 +2479,7 @@ class Status:
@secured_expose(check_api_key=True) @secured_expose(check_api_key=True)
def unblock_server(self, **kwargs): def unblock_server(self, **kwargs):
Downloader.do.unblock(kwargs.get("server")) sabnzbd.Downloader.unblock(kwargs.get("server"))
# Short sleep so that UI shows new server status # Short sleep so that UI shows new server status
time.sleep(1.0) time.sleep(1.0)
raise Raiser(self.__root) raise Raiser(self.__root)
@ -2550,7 +2542,7 @@ def orphan_delete(kwargs):
def orphan_delete_all(): def orphan_delete_all():
paths = NzbQueue.do.scan_jobs(all_jobs=False, action=False) paths = sabnzbd.NzbQueue.scan_jobs(all_jobs=False, action=False)
for path in paths: for path in paths:
kwargs = {"name": path} kwargs = {"name": path}
orphan_delete(kwargs) orphan_delete(kwargs)
@ -2561,11 +2553,11 @@ def orphan_add(kwargs):
if path: if path:
path = os.path.join(long_path(cfg.download_dir.get_path()), path) path = os.path.join(long_path(cfg.download_dir.get_path()), path)
logging.info("Re-adding orphaned job %s", path) logging.info("Re-adding orphaned job %s", path)
NzbQueue.do.repair_job(path, None, None) sabnzbd.NzbQueue.repair_job(path, None, None)
def orphan_add_all(): def orphan_add_all():
paths = NzbQueue.do.scan_jobs(all_jobs=False, action=False) paths = sabnzbd.NzbQueue.scan_jobs(all_jobs=False, action=False)
for path in paths: for path in paths:
kwargs = {"name": path} kwargs = {"name": path}
orphan_add(kwargs) orphan_add(kwargs)

2
sabnzbd/newswrapper.py

@ -50,7 +50,7 @@ def _retrieve_info(server):
else: else:
server.bad_cons = 0 server.bad_cons = 0
(server.info, server.request) = (info, False) (server.info, server.request) = (info, False)
sabnzbd.downloader.Downloader.do.wakeup() sabnzbd.Downloader.wakeup()
def request_server_info(server): def request_server_info(server):

10
sabnzbd/nzbparser.py

@ -234,10 +234,10 @@ def process_nzb_archive_file(
if nzo: if nzo:
if nzo_id: if nzo_id:
# Re-use existing nzo_id, when a "future" job gets it payload # Re-use existing nzo_id, when a "future" job gets it payload
sabnzbd.nzbqueue.NzbQueue.do.remove(nzo_id, add_to_history=False, delete_all_data=False) sabnzbd.NzbQueue.remove(nzo_id, add_to_history=False, delete_all_data=False)
nzo.nzo_id = nzo_id nzo.nzo_id = nzo_id
nzo_id = None nzo_id = None
nzo_ids.append(sabnzbd.nzbqueue.NzbQueue.do.add(nzo)) nzo_ids.append(sabnzbd.NzbQueue.add(nzo))
nzo.update_rating() nzo.update_rating()
zf.close() zf.close()
try: try:
@ -329,7 +329,7 @@ def process_single_nzb(
except TypeError: except TypeError:
# Duplicate, ignore # Duplicate, ignore
if nzo_id: if nzo_id:
sabnzbd.nzbqueue.NzbQueue.do.remove(nzo_id, add_to_history=False) sabnzbd.NzbQueue.remove(nzo_id, add_to_history=False)
nzo = None nzo = None
except ValueError: except ValueError:
# Empty # Empty
@ -346,9 +346,9 @@ def process_single_nzb(
if nzo: if nzo:
if nzo_id: if nzo_id:
# Re-use existing nzo_id, when a "future" job gets it payload # Re-use existing nzo_id, when a "future" job gets it payload
sabnzbd.nzbqueue.NzbQueue.do.remove(nzo_id, add_to_history=False, delete_all_data=False) sabnzbd.NzbQueue.remove(nzo_id, add_to_history=False, delete_all_data=False)
nzo.nzo_id = nzo_id nzo.nzo_id = nzo_id
nzo_ids.append(sabnzbd.nzbqueue.NzbQueue.do.add(nzo, quiet=reuse)) nzo_ids.append(sabnzbd.NzbQueue.add(nzo, quiet=reuse))
nzo.update_rating() nzo.update_rating()
try: try:

17
sabnzbd/nzbqueue.py

@ -55,9 +55,8 @@ from sabnzbd.constants import (
import sabnzbd.cfg as cfg import sabnzbd.cfg as cfg
import sabnzbd.downloader import sabnzbd.downloader
from sabnzbd.assembler import Assembler, file_has_articles from sabnzbd.assembler import file_has_articles
import sabnzbd.notifier as notifier import sabnzbd.notifier as notifier
from sabnzbd.bpsmeter import BPSMeter
class NzbQueue: class NzbQueue:
@ -70,8 +69,6 @@ class NzbQueue:
self.__nzo_list: List[NzbObject] = [] self.__nzo_list: List[NzbObject] = []
self.__nzo_table: Dict[str, NzbObject] = {} self.__nzo_table: Dict[str, NzbObject] = {}
NzbQueue.do = self
def read_queue(self, repair): def read_queue(self, repair):
"""Read queue from disk, supporting repair modes """Read queue from disk, supporting repair modes
0 = no repairs 0 = no repairs
@ -411,7 +408,7 @@ class NzbQueue:
# Any files left? Otherwise let's disconnect # Any files left? Otherwise let's disconnect
if self.actives(grabs=False) == 0 and cfg.autodisconnect(): if self.actives(grabs=False) == 0 and cfg.autodisconnect():
# This was the last job, close server connections # This was the last job, close server connections
sabnzbd.downloader.Downloader.do.disconnect() sabnzbd.Downloader.disconnect()
return removed return removed
@ -754,7 +751,7 @@ class NzbQueue:
# Only start decoding if we have a filename and type # Only start decoding if we have a filename and type
# The type is only set if sabyenc could decode the article # The type is only set if sabyenc could decode the article
if nzf.filename and nzf.type: if nzf.filename and nzf.type:
Assembler.do.process((nzo, nzf, file_done)) sabnzbd.Assembler.process((nzo, nzf, file_done))
elif nzf.filename.lower().endswith(".par2"): elif nzf.filename.lower().endswith(".par2"):
# Broken par2 file, try to get another one # Broken par2 file, try to get another one
nzo.promote_par2(nzf) nzo.promote_par2(nzf)
@ -765,7 +762,7 @@ class NzbQueue:
# Save bookkeeping in case of crash # Save bookkeeping in case of crash
if file_done and (nzo.next_save is None or time.time() > nzo.next_save): if file_done and (nzo.next_save is None or time.time() > nzo.next_save):
nzo.save_to_disk() nzo.save_to_disk()
BPSMeter.do.save() sabnzbd.BPSMeter.save()
if nzo.save_timeout is None: if nzo.save_timeout is None:
nzo.next_save = None nzo.next_save = None
else: else:
@ -793,7 +790,7 @@ class NzbQueue:
else: else:
# Not enough data, let postprocessor show it as failed # Not enough data, let postprocessor show it as failed
pass pass
Assembler.do.process((nzo, None, None)) sabnzbd.Assembler.process((nzo, None, None))
def actives(self, grabs=True) -> int: def actives(self, grabs=True) -> int:
"""Return amount of non-paused jobs, optionally with 'grabbing' items """Return amount of non-paused jobs, optionally with 'grabbing' items
@ -869,10 +866,10 @@ class NzbQueue:
# Stall prevention by checking if all servers are in the trylist # Stall prevention by checking if all servers are in the trylist
# This is a CPU-cheaper alternative to prevent stalling # This is a CPU-cheaper alternative to prevent stalling
if len(nzo.try_list) == sabnzbd.downloader.Downloader.do.server_nr: if len(nzo.try_list) == sabnzbd.Downloader.server_nr:
# Maybe the NZF's need a reset too? # Maybe the NZF's need a reset too?
for nzf in nzo.files: for nzf in nzo.files:
if len(nzf.try_list) == sabnzbd.downloader.Downloader.do.server_nr: if len(nzf.try_list) == sabnzbd.Downloader.server_nr:
# We do not want to reset all article trylists, they are good # We do not want to reset all article trylists, they are good
logging.info("Resetting bad trylist for file %s in job %s", nzf.filename, nzo.final_name) logging.info("Resetting bad trylist for file %s in job %s", nzf.filename, nzo.final_name)
nzf.reset_try_list() nzf.reset_try_list()

24
sabnzbd/nzbstuff.py

@ -85,8 +85,6 @@ import sabnzbd.config as config
import sabnzbd.cfg as cfg import sabnzbd.cfg as cfg
import sabnzbd.nzbparser import sabnzbd.nzbparser
from sabnzbd.database import HistoryDB from sabnzbd.database import HistoryDB
from sabnzbd.articlecache import ArticleCache
from sabnzbd.rating import Rating
# Name patterns # Name patterns
SUBJECT_FN_MATCHER = re.compile(r'"([^"]*)"') SUBJECT_FN_MATCHER = re.compile(r'"([^"]*)"')
@ -112,7 +110,7 @@ class TryList:
self.try_list: List[sabnzbd.downloader.Server] = [] self.try_list: List[sabnzbd.downloader.Server] = []
self.fetcher_priority = 0 self.fetcher_priority = 0
def server_in_try_list(self, server): def server_in_try_list(self, server: sabnzbd.downloader.Server):
""" Return whether specified server has been tried """ """ Return whether specified server has been tried """
with TRYLIST_LOCK: with TRYLIST_LOCK:
return server in self.try_list return server in self.try_list
@ -135,8 +133,8 @@ class TryList:
def __setstate__(self, servers_ids): def __setstate__(self, servers_ids):
self.try_list = [] self.try_list = []
for server_id in servers_ids: for server_id in servers_ids:
if server_id in sabnzbd.downloader.Downloader.do.server_dict: if server_id in sabnzbd.Downloader.server_dict:
self.add_to_try_list(sabnzbd.downloader.Downloader.do.server_dict[server_id]) self.add_to_try_list(sabnzbd.Downloader.server_dict[server_id])
############################################################################## ##############################################################################
@ -238,12 +236,12 @@ class Article(TryList):
def search_new_server(self): def search_new_server(self):
# Search new server # Search new server
self.add_to_try_list(self.fetcher) self.add_to_try_list(self.fetcher)
for server in sabnzbd.downloader.Downloader.do.servers: for server in sabnzbd.Downloader.servers:
if server.active and not self.server_in_try_list(server): if server.active and not self.server_in_try_list(server):
if server.priority >= self.fetcher.priority: if server.priority >= self.fetcher.priority:
self.tries = 0 self.tries = 0
# Allow all servers for this nzo and nzf again (but not for this article) # Allow all servers for this nzo and nzf again (but not for this article)
sabnzbd.nzbqueue.NzbQueue.do.reset_try_lists(self, article_reset=False) sabnzbd.NzbQueue.reset_try_lists(self, article_reset=False)
return True return True
logging.info(T("%s => missing from all servers, discarding") % self) logging.info(T("%s => missing from all servers, discarding") % self)
@ -915,8 +913,8 @@ class NzbObject(TryList):
if accept == 2: if accept == 2:
self.deleted = True self.deleted = True
self.status = Status.FAILED self.status = Status.FAILED
sabnzbd.NzbQueue.do.add(self, quiet=True) sabnzbd.NzbQueue.add(self, quiet=True)
sabnzbd.NzbQueue.do.end_job(self) sabnzbd.NzbQueue.end_job(self)
# Raise error, so it's not added # Raise error, so it's not added
raise TypeError raise TypeError
@ -1576,7 +1574,7 @@ class NzbObject(TryList):
if not nzf.import_finished: if not nzf.import_finished:
# Only load NZF when it's a primary server # Only load NZF when it's a primary server
# or when it's a backup server without active primaries # or when it's a backup server without active primaries
if sabnzbd.highest_server(server): if sabnzbd.Downloader.highest_server(server):
nzf.finish_import() nzf.finish_import()
# Still not finished? Something went wrong... # Still not finished? Something went wrong...
if not nzf.import_finished and not self.is_gone(): if not nzf.import_finished and not self.is_gone():
@ -1598,7 +1596,7 @@ class NzbObject(TryList):
# If cleanup emptied the active files list, end this job # If cleanup emptied the active files list, end this job
if nzf_remove_list and not self.files: if nzf_remove_list and not self.files:
sabnzbd.NzbQueue.do.end_job(self) sabnzbd.NzbQueue.end_job(self)
if not article: if not article:
# No articles for this server, block for next time # No articles for this server, block for next time
@ -1758,7 +1756,7 @@ class NzbObject(TryList):
fields = {} fields = {}
for k in rating_types: for k in rating_types:
fields[k] = _get_first_meta(k) fields[k] = _get_first_meta(k)
Rating.do.add_rating(_get_first_meta("id"), self.nzo_id, fields) sabnzbd.Rating.add_rating(_get_first_meta("id"), self.nzo_id, fields)
except: except:
pass pass
@ -1798,7 +1796,7 @@ class NzbObject(TryList):
self.abort_direct_unpacker() self.abort_direct_unpacker()
# Remove all cached files # Remove all cached files
ArticleCache.do.purge_articles(self.saved_articles) sabnzbd.ArticleCache.purge_articles(self.saved_articles)
# Delete all, or just basic files # Delete all, or just basic files
if self.futuretype: if self.futuretype:

22
sabnzbd/osxmenu.py

@ -41,11 +41,9 @@ from sabnzbd.panic import launch_a_browser
import sabnzbd.notifier as notifier import sabnzbd.notifier as notifier
from sabnzbd.api import fast_queue from sabnzbd.api import fast_queue
from sabnzbd.nzbqueue import NzbQueue
import sabnzbd.config as config import sabnzbd.config as config
import sabnzbd.scheduler as scheduler import sabnzbd.scheduler as scheduler
import sabnzbd.downloader import sabnzbd.downloader
from sabnzbd.bpsmeter import BPSMeter
status_icons = { status_icons = {
"idle": "icons/sabnzbd_osx_idle.tiff", "idle": "icons/sabnzbd_osx_idle.tiff",
@ -113,7 +111,7 @@ class SABnzbdDelegate(NSObject):
# Variables # Variables
self.state = "Idle" self.state = "Idle"
try: try:
self.speed = sabnzbd.downloader.Downloader.do.get_limit() self.speed = sabnzbd.Downloader.get_limit()
except: except:
self.speed = 0 self.speed = 0
self.version_notify = 1 self.version_notify = 1
@ -386,7 +384,7 @@ class SABnzbdDelegate(NSObject):
def queueUpdate(self): def queueUpdate(self):
try: try:
qnfo = NzbQueue.do.queue_info(start=0, limit=10) qnfo = sabnzbd.NzbQueue.queue_info(start=0, limit=10)
pnfo_list = qnfo.list pnfo_list = qnfo.list
bytesleftprogess = 0 bytesleftprogess = 0
@ -407,7 +405,7 @@ class SABnzbdDelegate(NSObject):
bytesleftprogess += pnfo.bytes_left bytesleftprogess += pnfo.bytes_left
bytes_total = pnfo.bytes / MEBI bytes_total = pnfo.bytes / MEBI
nzo_id = pnfo.nzo_id nzo_id = pnfo.nzo_id
timeleft = self.calc_timeleft_(bytesleftprogess, BPSMeter.do.bps) timeleft = self.calc_timeleft_(bytesleftprogess, sabnzbd.BPSMeter.bps)
job = "%s\t(%d/%d MB) %s" % (pnfo.filename, bytesleft, bytes_total, timeleft) job = "%s\t(%d/%d MB) %s" % (pnfo.filename, bytesleft, bytes_total, timeleft)
menu_queue_item = NSMenuItem.alloc().initWithTitle_action_keyEquivalent_(job, "", "") menu_queue_item = NSMenuItem.alloc().initWithTitle_action_keyEquivalent_(job, "", "")
@ -546,7 +544,7 @@ class SABnzbdDelegate(NSObject):
def iconUpdate(self): def iconUpdate(self):
try: try:
if sabnzbd.downloader.Downloader.do.paused: if sabnzbd.Downloader.paused:
self.status_item.setImage_(self.icons["pause"]) self.status_item.setImage_(self.icons["pause"])
else: else:
self.status_item.setImage_(self.icons["idle"]) self.status_item.setImage_(self.icons["idle"])
@ -555,7 +553,7 @@ class SABnzbdDelegate(NSObject):
def pauseUpdate(self): def pauseUpdate(self):
try: try:
if sabnzbd.downloader.Downloader.do.paused: if sabnzbd.Downloader.paused:
if self.isLeopard: if self.isLeopard:
self.resume_menu_item.setHidden_(NO) self.resume_menu_item.setHidden_(NO)
self.pause_menu_item.setHidden_(YES) self.pause_menu_item.setHidden_(YES)
@ -574,7 +572,7 @@ class SABnzbdDelegate(NSObject):
def speedlimitUpdate(self): def speedlimitUpdate(self):
try: try:
speed = int(sabnzbd.downloader.Downloader.do.get_limit()) speed = int(sabnzbd.Downloader.get_limit())
if self.speed != speed: if self.speed != speed:
self.speed = speed self.speed = speed
speedsValues = self.menu_speed.numberOfItems() speedsValues = self.menu_speed.numberOfItems()
@ -735,14 +733,14 @@ class SABnzbdDelegate(NSObject):
# logging.info("[osx] speed limit to %s" % (sender.representedObject())) # logging.info("[osx] speed limit to %s" % (sender.representedObject()))
speed = int(sender.representedObject()) speed = int(sender.representedObject())
if speed != self.speed: if speed != self.speed:
sabnzbd.downloader.Downloader.do.limit_speed("%s%%" % speed) sabnzbd.Downloader.limit_speed("%s%%" % speed)
self.speedlimitUpdate() self.speedlimitUpdate()
def purgeAction_(self, sender): def purgeAction_(self, sender):
mode = sender.representedObject() mode = sender.representedObject()
# logging.info("[osx] purge %s" % (mode)) # logging.info("[osx] purge %s" % (mode))
if mode == "queue": if mode == "queue":
NzbQueue.do.remove_all() sabnzbd.NzbQueue.remove_all()
elif mode == "history": elif mode == "history":
if not self.history_db: if not self.history_db:
self.history_db = sabnzbd.database.HistoryDB() self.history_db = sabnzbd.database.HistoryDB()
@ -754,13 +752,13 @@ class SABnzbdDelegate(NSObject):
if minutes: if minutes:
scheduler.plan_resume(minutes) scheduler.plan_resume(minutes)
else: else:
sabnzbd.downloader.Downloader.do.pause() sabnzbd.Downloader.pause()
def resumeAction_(self, sender): def resumeAction_(self, sender):
scheduler.plan_resume(0) scheduler.plan_resume(0)
def watchedFolderAction_(self, sender): def watchedFolderAction_(self, sender):
sabnzbd.dirscanner.dirscan() sabnzbd.DirScanner.scan()
def rssAction_(self, sender): def rssAction_(self, sender):
scheduler.force_rss() scheduler.force_rss()

24
sabnzbd/postproc.py

@ -75,7 +75,6 @@ from sabnzbd.constants import (
VERIFIED_FILE, VERIFIED_FILE,
) )
from sabnzbd.nzbparser import process_single_nzb from sabnzbd.nzbparser import process_single_nzb
from sabnzbd.rating import Rating
import sabnzbd.emailer as emailer import sabnzbd.emailer as emailer
import sabnzbd.downloader import sabnzbd.downloader
import sabnzbd.config as config import sabnzbd.config as config
@ -126,7 +125,6 @@ class PostProcessor(Thread):
self.__stop = False self.__stop = False
self.__busy = False self.__busy = False
self.paused = False self.paused = False
PostProcessor.do = self
def save(self): def save(self):
""" Save postproc queue """ """ Save postproc queue """
@ -162,7 +160,7 @@ class PostProcessor(Thread):
nzo.work_name = "" # Mark as deleted job nzo.work_name = "" # Mark as deleted job
break break
def process(self, nzo): def process(self, nzo: sabnzbd.nzbstuff.NzbObject):
""" Push on finished job in the queue """ """ Push on finished job in the queue """
if nzo not in self.history_queue: if nzo not in self.history_queue:
self.history_queue.append(nzo) self.history_queue.append(nzo)
@ -273,7 +271,7 @@ class PostProcessor(Thread):
# Pause downloader, if users wants that # Pause downloader, if users wants that
if cfg.pause_on_post_processing(): if cfg.pause_on_post_processing():
sabnzbd.downloader.Downloader.do.wait_for_postproc() sabnzbd.Downloader.wait_for_postproc()
self.__busy = True self.__busy = True
@ -292,7 +290,7 @@ class PostProcessor(Thread):
check_eoq = True check_eoq = True
# Allow download to proceed # Allow download to proceed
sabnzbd.downloader.Downloader.do.resume_from_postproc() sabnzbd.Downloader.resume_from_postproc()
def process_job(nzo): def process_job(nzo):
@ -381,9 +379,9 @@ def process_job(nzo):
return False return False
# If we don't need extra par2, we can disconnect # If we don't need extra par2, we can disconnect
if sabnzbd.nzbqueue.NzbQueue.do.actives(grabs=False) == 0 and cfg.autodisconnect(): if sabnzbd.NzbQueue.actives(grabs=False) == 0 and cfg.autodisconnect():
# This was the last job, close server connections # This was the last job, close server connections
sabnzbd.downloader.Downloader.do.disconnect() sabnzbd.Downloader.disconnect()
# Sanitize the resulting files # Sanitize the resulting files
if sabnzbd.WIN32: if sabnzbd.WIN32:
@ -591,13 +589,13 @@ def process_job(nzo):
# Update indexer with results # Update indexer with results
if cfg.rating_enable(): if cfg.rating_enable():
if nzo.encrypted > 0: if nzo.encrypted > 0:
Rating.do.update_auto_flag(nzo.nzo_id, Rating.FLAG_ENCRYPTED) sabnzbd.Rating.update_auto_flag(nzo.nzo_id, sabnzbd.Rating.FLAG_ENCRYPTED)
if empty: if empty:
hosts = [s.host for s in sabnzbd.downloader.Downloader.do.nzo_servers(nzo)] hosts = [s.host for s in sabnzbd.Downloader.nzo_servers(nzo)]
if not hosts: if not hosts:
hosts = [None] hosts = [None]
for host in hosts: for host in hosts:
Rating.do.update_auto_flag(nzo.nzo_id, Rating.FLAG_EXPIRED, host) sabnzbd.Rating.update_auto_flag(nzo.nzo_id, sabnzbd.Rating.FLAG_EXPIRED, host)
except: except:
logging.error(T("Post Processing Failed for %s (%s)"), filename, T("see logfile")) logging.error(T("Post Processing Failed for %s (%s)"), filename, T("see logfile"))
@ -791,8 +789,8 @@ def parring(nzo, workdir):
if nzo.priority != FORCE_PRIORITY: if nzo.priority != FORCE_PRIORITY:
nzo.priority = REPAIR_PRIORITY nzo.priority = REPAIR_PRIORITY
nzo.status = Status.FETCHING nzo.status = Status.FETCHING
sabnzbd.nzbqueue.NzbQueue.do.add(nzo) sabnzbd.NzbQueue.add(nzo)
sabnzbd.downloader.Downloader.do.resume_from_postproc() sabnzbd.Downloader.resume_from_postproc()
sabnzbd.save_data(verified, VERIFIED_FILE, nzo.workpath) sabnzbd.save_data(verified, VERIFIED_FILE, nzo.workpath)
@ -988,7 +986,7 @@ def rar_renamer(nzo, workdir):
def handle_empty_queue(): def handle_empty_queue():
""" Check if empty queue calls for action """ """ Check if empty queue calls for action """
if sabnzbd.nzbqueue.NzbQueue.do.actives() == 0: if sabnzbd.NzbQueue.actives() == 0:
sabnzbd.save_state() sabnzbd.save_state()
notifier.send_notification("SABnzbd", T("Queue finished"), "queue_done") notifier.send_notification("SABnzbd", T("Queue finished"), "queue_done")

1
sabnzbd/rating.py

@ -113,7 +113,6 @@ class Rating(Thread):
do = None do = None
def __init__(self): def __init__(self):
Rating.do = self
self.shutdown = False self.shutdown = False
self.queue = OrderedSetQueue() self.queue = OrderedSetQueue()
try: try:

1
sabnzbd/rss.py

@ -232,7 +232,6 @@ class RSSQueue:
uris = feeds.uri() uris = feeds.uri()
defCat = feeds.cat() defCat = feeds.cat()
import sabnzbd.api
if not notdefault(defCat) or defCat not in sabnzbd.api.list_cats(default=False): if not notdefault(defCat) or defCat not in sabnzbd.api.list_cats(default=False):
defCat = None defCat = None

3
sabnzbd/sabtray.py

@ -27,7 +27,6 @@ import sabnzbd
from sabnzbd.panic import launch_a_browser from sabnzbd.panic import launch_a_browser
import sabnzbd.api as api import sabnzbd.api as api
import sabnzbd.scheduler as scheduler import sabnzbd.scheduler as scheduler
from sabnzbd.downloader import Downloader
import sabnzbd.cfg as cfg import sabnzbd.cfg as cfg
from sabnzbd.misc import to_units from sabnzbd.misc import to_units
@ -194,7 +193,7 @@ class SABTrayThread(SysTrayIconThread):
def pause(self): def pause(self):
scheduler.plan_resume(0) scheduler.plan_resume(0)
Downloader.do.pause() sabnzbd.Downloader.pause()
def resume(self): def resume(self):
scheduler.plan_resume(0) scheduler.plan_resume(0)

3
sabnzbd/sabtraylinux.py

@ -43,7 +43,6 @@ import sabnzbd
from sabnzbd.panic import launch_a_browser from sabnzbd.panic import launch_a_browser
import sabnzbd.api as api import sabnzbd.api as api
import sabnzbd.scheduler as scheduler import sabnzbd.scheduler as scheduler
from sabnzbd.downloader import Downloader
import sabnzbd.cfg as cfg import sabnzbd.cfg as cfg
from sabnzbd.misc import to_units from sabnzbd.misc import to_units
@ -196,7 +195,7 @@ class StatusIcon(Thread):
def pause(self): def pause(self):
scheduler.plan_resume(0) scheduler.plan_resume(0)
Downloader.do.pause() sabnzbd.Downloader.pause()
def resume(self): def resume(self):
scheduler.plan_resume(0) scheduler.plan_resume(0)

47
sabnzbd/scheduler.py

@ -30,7 +30,6 @@ import sabnzbd.dirscanner
import sabnzbd.misc import sabnzbd.misc
import sabnzbd.config as config import sabnzbd.config as config
import sabnzbd.cfg as cfg import sabnzbd.cfg as cfg
from sabnzbd.postproc import PostProcessor
from sabnzbd.constants import LOW_PRIORITY, NORMAL_PRIORITY, HIGH_PRIORITY from sabnzbd.constants import LOW_PRIORITY, NORMAL_PRIORITY, HIGH_PRIORITY
@ -47,11 +46,11 @@ def schedule_guard():
def pp_pause(): def pp_pause():
PostProcessor.do.paused = True sabnzbd.PostProcessor.paused = True
def pp_resume(): def pp_resume():
PostProcessor.do.paused = False sabnzbd.PostProcessor.paused = False
def pp_pause_event(): def pp_pause_event():
@ -98,7 +97,7 @@ def init():
action = scheduled_resume action = scheduled_resume
arguments = [] arguments = []
elif action_name == "pause": elif action_name == "pause":
action = sabnzbd.downloader.Downloader.do.pause action = sabnzbd.Downloader.pause
arguments = [] arguments = []
elif action_name == "pause_all": elif action_name == "pause_all":
action = sabnzbd.pause_all action = sabnzbd.pause_all
@ -114,7 +113,7 @@ def init():
elif action_name == "resume_post": elif action_name == "resume_post":
action = pp_resume action = pp_resume
elif action_name == "speedlimit" and arguments != []: elif action_name == "speedlimit" and arguments != []:
action = sabnzbd.downloader.Downloader.do.limit_speed action = sabnzbd.Downloader.limit_speed
elif action_name == "enable_server" and arguments != []: elif action_name == "enable_server" and arguments != []:
action = sabnzbd.enable_server action = sabnzbd.enable_server
elif action_name == "disable_server" and arguments != []: elif action_name == "disable_server" and arguments != []:
@ -129,34 +128,34 @@ def init():
elif action_name == "remove_completed": elif action_name == "remove_completed":
action = sabnzbd.api.history_remove_completed action = sabnzbd.api.history_remove_completed
elif action_name == "enable_quota": elif action_name == "enable_quota":
action = sabnzbd.bpsmeter.BPSMeter.do.set_status action = sabnzbd.BPSMeter.set_status
arguments = [True] arguments = [True]
elif action_name == "disable_quota": elif action_name == "disable_quota":
action = sabnzbd.bpsmeter.BPSMeter.do.set_status action = sabnzbd.BPSMeter.set_status
arguments = [False] arguments = [False]
elif action_name == "pause_all_low": elif action_name == "pause_all_low":
action = sabnzbd.nzbqueue.NzbQueue.do.pause_on_prio action = sabnzbd.NzbQueue.pause_on_prio
arguments = [LOW_PRIORITY] arguments = [LOW_PRIORITY]
elif action_name == "pause_all_normal": elif action_name == "pause_all_normal":
action = sabnzbd.nzbqueue.NzbQueue.do.pause_on_prio action = sabnzbd.NzbQueue.pause_on_prio
arguments = [NORMAL_PRIORITY] arguments = [NORMAL_PRIORITY]
elif action_name == "pause_all_high": elif action_name == "pause_all_high":
action = sabnzbd.nzbqueue.NzbQueue.do.pause_on_prio action = sabnzbd.NzbQueue.pause_on_prio
arguments = [HIGH_PRIORITY] arguments = [HIGH_PRIORITY]
elif action_name == "resume_all_low": elif action_name == "resume_all_low":
action = sabnzbd.nzbqueue.NzbQueue.do.resume_on_prio action = sabnzbd.NzbQueue.resume_on_prio
arguments = [LOW_PRIORITY] arguments = [LOW_PRIORITY]
elif action_name == "resume_all_normal": elif action_name == "resume_all_normal":
action = sabnzbd.nzbqueue.NzbQueue.do.resume_on_prio action = sabnzbd.NzbQueue.resume_on_prio
arguments = [NORMAL_PRIORITY] arguments = [NORMAL_PRIORITY]
elif action_name == "resume_all_high": elif action_name == "resume_all_high":
action = sabnzbd.nzbqueue.NzbQueue.do.resume_on_prio action = sabnzbd.NzbQueue.resume_on_prio
arguments = [HIGH_PRIORITY] arguments = [HIGH_PRIORITY]
elif action_name == "pause_cat": elif action_name == "pause_cat":
action = sabnzbd.nzbqueue.NzbQueue.do.pause_on_cat action = sabnzbd.NzbQueue.pause_on_cat
arguments = [argument_list] arguments = [argument_list]
elif action_name == "resume_cat": elif action_name == "resume_cat":
action = sabnzbd.nzbqueue.NzbQueue.do.resume_on_cat action = sabnzbd.NzbQueue.resume_on_cat
arguments = [argument_list] arguments = [argument_list]
else: else:
logging.warning(T("Unknown action: %s"), action_name) logging.warning(T("Unknown action: %s"), action_name)
@ -193,7 +192,7 @@ def init():
sabnzbd.misc.check_latest_version, "VerCheck", d, None, (h, m), kronos.method.sequential, [], None sabnzbd.misc.check_latest_version, "VerCheck", d, None, (h, m), kronos.method.sequential, [], None
) )
action, hour, minute = sabnzbd.bpsmeter.BPSMeter.do.get_quota() action, hour, minute = sabnzbd.BPSMeter.get_quota()
if action: if action:
logging.info("Setting schedule for quota check daily at %s:%s", hour, minute) logging.info("Setting schedule for quota check daily at %s:%s", hour, minute)
__SCHED.add_daytime_task( __SCHED.add_daytime_task(
@ -215,7 +214,7 @@ def init():
logging.info("Setting schedule for midnight BPS reset") logging.info("Setting schedule for midnight BPS reset")
__SCHED.add_daytime_task( __SCHED.add_daytime_task(
sabnzbd.bpsmeter.midnight_action, sabnzbd.BPSMeter.midnight,
"midnight_bps", "midnight_bps",
list(range(1, 8)), list(range(1, 8)),
None, None,
@ -248,7 +247,7 @@ def restart(force=False):
SCHEDULE_GUARD_FLAG = False SCHEDULE_GUARD_FLAG = False
stop() stop()
analyse(sabnzbd.downloader.Downloader.do.paused) analyse(sabnzbd.Downloader.paused)
init() init()
start() start()
@ -406,13 +405,13 @@ def analyse(was_paused=False, priority=None):
sabnzbd.pause_all() sabnzbd.pause_all()
else: else:
sabnzbd.unpause_all() sabnzbd.unpause_all()
sabnzbd.downloader.Downloader.do.set_paused_state(paused or paused_all) sabnzbd.Downloader.set_paused_state(paused or paused_all)
PostProcessor.do.paused = pause_post sabnzbd.PostProcessor.paused = pause_post
if speedlimit is not None: if speedlimit is not None:
sabnzbd.downloader.Downloader.do.limit_speed(speedlimit) sabnzbd.Downloader.limit_speed(speedlimit)
sabnzbd.bpsmeter.BPSMeter.do.set_status(quota, action=False) sabnzbd.BPSMeter.set_status(quota, action=False)
for serv in servers: for serv in servers:
try: try:
@ -420,7 +419,7 @@ def analyse(was_paused=False, priority=None):
value = servers[serv] value = servers[serv]
if bool(item.enable()) != bool(value): if bool(item.enable()) != bool(value):
item.enable.set(value) item.enable.set(value)
sabnzbd.downloader.Downloader.do.init_server(serv, serv) sabnzbd.Downloader.init_server(serv, serv)
except: except:
pass pass
config.save_config() config.save_config()
@ -457,7 +456,7 @@ def plan_resume(interval):
__PAUSE_END = time.time() + (interval * 60) __PAUSE_END = time.time() + (interval * 60)
logging.debug("Schedule resume at %s", __PAUSE_END) logging.debug("Schedule resume at %s", __PAUSE_END)
__SCHED.add_single_task(__oneshot_resume, "", interval * 60, kronos.method.sequential, [__PAUSE_END], None) __SCHED.add_single_task(__oneshot_resume, "", interval * 60, kronos.method.sequential, [__PAUSE_END], None)
sabnzbd.downloader.Downloader.do.pause() sabnzbd.Downloader.pause()
else: else:
__PAUSE_END = None __PAUSE_END = None
sabnzbd.unpause_all() sabnzbd.unpause_all()

9
sabnzbd/urlgrabber.py

@ -36,8 +36,6 @@ import sabnzbd
from sabnzbd.constants import DEF_TIMEOUT, FUTURE_Q_FOLDER, VALID_NZB_FILES, Status, VALID_ARCHIVES from sabnzbd.constants import DEF_TIMEOUT, FUTURE_Q_FOLDER, VALID_NZB_FILES, Status, VALID_ARCHIVES
import sabnzbd.misc as misc import sabnzbd.misc as misc
import sabnzbd.filesystem import sabnzbd.filesystem
from sabnzbd.nzbqueue import NzbQueue
from sabnzbd.postproc import PostProcessor
import sabnzbd.cfg as cfg import sabnzbd.cfg as cfg
import sabnzbd.emailer as emailer import sabnzbd.emailer as emailer
import sabnzbd.notifier as notifier import sabnzbd.notifier as notifier
@ -67,11 +65,10 @@ class URLGrabber(Thread):
def __init__(self): def __init__(self):
Thread.__init__(self) Thread.__init__(self)
self.queue: queue.Queue[Tuple[str, sabnzbd.nzbstuff.NzbObject]] = queue.Queue() self.queue: queue.Queue[Tuple[str, sabnzbd.nzbstuff.NzbObject]] = queue.Queue()
for tup in NzbQueue.do.get_urls(): for tup in sabnzbd.NzbQueue.get_urls():
url, nzo = tup url, nzo = tup
self.queue.put((url, nzo)) self.queue.put((url, nzo))
self.shutdown = False self.shutdown = False
URLGrabber.do = self
def add(self, url, future_nzo, when=None): def add(self, url, future_nzo, when=None):
""" Add an URL to the URLGrabber queue, 'when' is seconds from now """ """ Add an URL to the URLGrabber queue, 'when' is seconds from now """
@ -330,8 +327,8 @@ class URLGrabber(Thread):
nzo.cat, _, nzo.script, _ = misc.cat_to_opts(nzo.cat, script=nzo.script) nzo.cat, _, nzo.script, _ = misc.cat_to_opts(nzo.cat, script=nzo.script)
# Add to history and run script if desired # Add to history and run script if desired
NzbQueue.do.remove(nzo.nzo_id, add_to_history=False) sabnzbd.NzbQueue.remove(nzo.nzo_id, add_to_history=False)
PostProcessor.do.process(nzo) sabnzbd.PostProcessor.process(nzo)
def _build_request(url): def _build_request(url):

Loading…
Cancel
Save