From 9bcbcaefdfecc85aedfd8e2f8aaa1ca7f959404e Mon Sep 17 00:00:00 2001 From: Safihre Date: Thu, 11 Jun 2020 10:16:53 +0200 Subject: [PATCH] Black code-style everything --- .github/workflows/black.yml | 22 +- SABnzbd.py | 674 ++++++++------- po/main/SABnzbd.pot | 70 +- sabnzbd/api.py | 1245 +++++++++++++++------------- sabnzbd/articlecache.py | 11 +- sabnzbd/assembler.py | 147 ++-- sabnzbd/bpsmeter.py | 132 +-- sabnzbd/directunpacker.py | 169 ++-- sabnzbd/downloader.py | 224 +++-- sabnzbd/encoding.py | 14 +- sabnzbd/interface.py | 1870 ++++++++++++++++++++++++----------------- sabnzbd/newsunpack.py | 1127 ++++++++++++++----------- sabnzbd/newswrapper.py | 104 ++- sabnzbd/nzbqueue.py | 172 ++-- sabnzbd/nzbstuff.py | 624 +++++++++----- sabnzbd/osxmenu.py | 229 +++-- sabnzbd/panic.py | 131 ++- sabnzbd/powersup.py | 92 +- sabnzbd/rating.py | 153 ++-- sabnzbd/sabtray.py | 90 +- sabnzbd/sabtraylinux.py | 32 +- sabnzbd/scheduler.py | 183 ++-- sabnzbd/skintext.py | 1935 ++++++++++++++++++++++--------------------- sabnzbd/sorting.py | 573 +++++++------ sabnzbd/urlgrabber.py | 178 ++-- sabnzbd/zconfig.py | 38 +- 26 files changed, 5890 insertions(+), 4349 deletions(-) diff --git a/.github/workflows/black.yml b/.github/workflows/black.yml index 62cdb50..0d2f2a3 100644 --- a/.github/workflows/black.yml +++ b/.github/workflows/black.yml @@ -10,29 +10,11 @@ jobs: uses: lgeiger/black-action@v1.0.1 with: args: > + SABnzbd.py + sabnzbd scripts tools tests - sabnzbd/utils - sabnzbd/__init__.py - sabnzbd/cfg.py - sabnzbd/config.py - sabnzbd/emailer.py - sabnzbd/constants.py - sabnzbd/decorators.py - sabnzbd/decoder.py - sabnzbd/database.py - sabnzbd/getipaddress.py - sabnzbd/filesystem.py - sabnzbd/dirscanner.py - sabnzbd/postproc.py - sabnzbd/misc.py - sabnzbd/lang.py - sabnzbd/nzbparser.py - sabnzbd/notifier.py - sabnzbd/rss.py - sabnzbd/par2file.py - sabnzbd/version.py --line-length=120 --target-version=py35 --check diff --git a/SABnzbd.py b/SABnzbd.py index 3cc2b7c..1022d3e 100755 --- a/SABnzbd.py +++ b/SABnzbd.py @@ -16,6 +16,7 @@ # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. import sys + if sys.hexversion < 0x03050000: print("Sorry, requires Python 3.5 or above") print("You can read more at: https://sabnzbd.org/python3") @@ -35,7 +36,8 @@ import re try: import Cheetah - if Cheetah.Version[0] != '3': + + if Cheetah.Version[0] != "3": raise ValueError import feedparser import configobj @@ -58,9 +60,17 @@ import sabnzbd.lang import sabnzbd.interface from sabnzbd.constants import * import sabnzbd.newsunpack -from sabnzbd.misc import check_latest_version, exit_sab, \ - split_host, create_https_certificates, windows_variant, ip_extract, \ - set_serv_parms, get_serv_parms, get_from_url +from sabnzbd.misc import ( + check_latest_version, + exit_sab, + split_host, + create_https_certificates, + windows_variant, + ip_extract, + set_serv_parms, + get_serv_parms, + get_from_url, +) from sabnzbd.filesystem import get_ext, real_path, long_path, globber_full, remove_file from sabnzbd.panic import panic_tmpl, panic_port, panic_host, panic, launch_a_browser import sabnzbd.scheduler as scheduler @@ -78,6 +88,7 @@ try: import win32service import win32ts import pywintypes + win32api.SetConsoleCtrlHandler(sabnzbd.sig_handler, True) from sabnzbd.utils.apireg import get_connection_info, set_connection_info, del_connection_info except ImportError: @@ -108,7 +119,7 @@ class GUIHandler(logging.Handler): def emit(self, record): """ Emit a record by adding it to our private queue """ - if record.levelname == 'WARNING': + if record.levelname == "WARNING": sabnzbd.LAST_WARNING = record.msg % record.args else: sabnzbd.LAST_ERROR = record.msg % record.args @@ -118,9 +129,9 @@ class GUIHandler(logging.Handler): self.store.pop(0) try: # Append traceback, if available - warning = {'type': record.levelname, 'text': record.msg % record.args, 'time': int(time.time())} + warning = {"type": record.levelname, "text": record.msg % record.args, "time": int(time.time())} if record.exc_info: - warning['text'] = '%s\n%s' % (warning['text'], traceback.format_exc()) + warning["text"] = "%s\n%s" % (warning["text"], traceback.format_exc()) self.store.append(warning) except UnicodeDecodeError: # Catch elusive Unicode conversion problems @@ -179,7 +190,9 @@ def print_help(): def print_version(): - print((""" + print( + ( + """ %s-%s Copyright (C) 2007-2020 The SABnzbd-Team @@ -188,7 +201,10 @@ This is free software, and you are welcome to redistribute it under certain conditions. It is licensed under the GNU GENERAL PUBLIC LICENSE Version 2 or (at your option) any later version. -""" % (sabnzbd.MY_NAME, sabnzbd.__version__))) +""" + % (sabnzbd.MY_NAME, sabnzbd.__version__) + ) + ) def daemonize(): @@ -205,7 +221,7 @@ def daemonize(): os.setsid() # Make sure I can read my own files and shut out others prev = os.umask(0) - os.umask(prev and int('077', 8)) + os.umask(prev and int("077", 8)) try: pid = os.fork() @@ -225,19 +241,19 @@ def daemonize(): remove_file(log_path) # Replace file descriptors for stdin, stdout, and stderr - with open('/dev/null', 'rb', 0) as f: + with open("/dev/null", "rb", 0) as f: os.dup2(f.fileno(), sys.stdin.fileno()) - with open(log_path, 'ab', 0) as f: + with open(log_path, "ab", 0) as f: os.dup2(f.fileno(), sys.stdout.fileno()) - with open(log_path, 'ab', 0) as f: + with open(log_path, "ab", 0) as f: os.dup2(f.fileno(), sys.stderr.fileno()) -def abort_and_show_error(browserhost, cherryport, err=''): +def abort_and_show_error(browserhost, cherryport, err=""): """ Abort program because of CherryPy troubles """ - logging.error(T('Failed to start web-interface') + ' : ' + str(err)) + logging.error(T("Failed to start web-interface") + " : " + str(err)) if not sabnzbd.DAEMON: - if '49' in err: + if "49" in err: panic_host(browserhost, cherryport) else: panic_port(browserhost, cherryport) @@ -251,24 +267,24 @@ def identify_web_template(key, defweb, wdir): try: wdir = fix_webname(key()) except: - wdir = '' + wdir = "" if not wdir: wdir = defweb if key: key.set(wdir) if not wdir: # No default value defined, accept empty path - return '' + return "" full_dir = real_path(sabnzbd.DIR_INTERFACES, wdir) full_main = real_path(full_dir, DEF_MAIN_TMPL) if not os.path.exists(full_main): - logging.warning(T('Cannot find web template: %s, trying standard template'), full_main) + logging.warning(T("Cannot find web template: %s, trying standard template"), full_main) full_dir = real_path(sabnzbd.DIR_INTERFACES, DEF_STDINTF) full_main = real_path(full_dir, DEF_MAIN_TMPL) if not os.path.exists(full_main): - logging.exception('Cannot find standard template: %s', full_dir) + logging.exception("Cannot find standard template: %s", full_dir) panic_tmpl(full_dir) exit_sab(1) @@ -278,27 +294,27 @@ def identify_web_template(key, defweb, wdir): def check_template_scheme(color, web_dir): """ Check existence of color-scheme """ - if color and os.path.exists(os.path.join(web_dir, 'static', 'stylesheets', 'colorschemes', color + '.css')): + if color and os.path.exists(os.path.join(web_dir, "static", "stylesheets", "colorschemes", color + ".css")): return color - elif color and os.path.exists(os.path.join(web_dir, 'static', 'stylesheets', 'colorschemes', color)): + elif color and os.path.exists(os.path.join(web_dir, "static", "stylesheets", "colorschemes", color)): return color else: - return '' + return "" def fix_webname(name): if name: xname = name.title() else: - xname = '' - if xname in ('Default', ): - return 'Glitter' - elif xname in ('Glitter', 'Plush'): + xname = "" + if xname in ("Default",): + return "Glitter" + elif xname in ("Glitter", "Plush"): return xname - elif xname in ('Wizard', ): + elif xname in ("Wizard",): return name.lower() - elif xname in ('Config',): - return 'Glitter' + elif xname in ("Config",): + return "Glitter" else: return name @@ -321,21 +337,22 @@ def get_user_profile_paths(vista_plus): elif sabnzbd.WIN32: try: from win32com.shell import shell, shellcon + path = shell.SHGetFolderPath(0, shellcon.CSIDL_APPDATA, None, 0) sabnzbd.DIR_APPDATA = os.path.join(path, DEF_WORKDIR) path = shell.SHGetFolderPath(0, shellcon.CSIDL_LOCAL_APPDATA, None, 0) sabnzbd.DIR_LCLDATA = os.path.join(path, DEF_WORKDIR) - sabnzbd.DIR_HOME = os.environ['USERPROFILE'] + sabnzbd.DIR_HOME = os.environ["USERPROFILE"] except: try: if vista_plus: - root = os.environ['AppData'] - user = os.environ['USERPROFILE'] - sabnzbd.DIR_APPDATA = '%s\\%s' % (root.replace('\\Roaming', '\\Local'), DEF_WORKDIR) + root = os.environ["AppData"] + user = os.environ["USERPROFILE"] + sabnzbd.DIR_APPDATA = "%s\\%s" % (root.replace("\\Roaming", "\\Local"), DEF_WORKDIR) sabnzbd.DIR_HOME = user else: - root = os.environ['USERPROFILE'] - sabnzbd.DIR_APPDATA = '%s\\%s' % (root, DEF_WORKDIR) + root = os.environ["USERPROFILE"] + sabnzbd.DIR_APPDATA = "%s\\%s" % (root, DEF_WORKDIR) sabnzbd.DIR_HOME = root sabnzbd.DIR_LCLDATA = sabnzbd.DIR_APPDATA except: @@ -348,24 +365,23 @@ def get_user_profile_paths(vista_plus): return elif sabnzbd.DARWIN: - home = os.environ.get('HOME') + home = os.environ.get("HOME") if home: - sabnzbd.DIR_APPDATA = '%s/Library/Application Support/SABnzbd' % home + sabnzbd.DIR_APPDATA = "%s/Library/Application Support/SABnzbd" % home sabnzbd.DIR_LCLDATA = sabnzbd.DIR_APPDATA sabnzbd.DIR_HOME = home return else: # Unix/Linux - home = os.environ.get('HOME') + home = os.environ.get("HOME") if home: - sabnzbd.DIR_APPDATA = '%s/.%s' % (home, DEF_WORKDIR) + sabnzbd.DIR_APPDATA = "%s/.%s" % (home, DEF_WORKDIR) sabnzbd.DIR_LCLDATA = sabnzbd.DIR_APPDATA sabnzbd.DIR_HOME = home return # Nothing worked - panic("Cannot access the user profile.", - "Please start with sabnzbd.ini file in another location") + panic("Cannot access the user profile.", "Please start with sabnzbd.ini file in another location") exit_sab(2) @@ -378,51 +394,61 @@ def print_modules(): # Something wrong with SABYenc, so let's determine and print what: if sabnzbd.decoder.SABYENC_VERSION: # We have a VERSION, thus a SABYenc module, but it's not the correct version - logging.error(T("SABYenc disabled: no correct version found! (Found v%s, expecting v%s)") % (sabnzbd.decoder.SABYENC_VERSION, sabnzbd.constants.SABYENC_VERSION_REQUIRED)) + logging.error( + T("SABYenc disabled: no correct version found! (Found v%s, expecting v%s)") + % (sabnzbd.decoder.SABYENC_VERSION, sabnzbd.constants.SABYENC_VERSION_REQUIRED) + ) else: # No SABYenc module at all - logging.error(T("SABYenc module... NOT found! Expecting v%s - https://sabnzbd.org/sabyenc") % sabnzbd.constants.SABYENC_VERSION_REQUIRED) + logging.error( + T("SABYenc module... NOT found! Expecting v%s - https://sabnzbd.org/sabyenc") + % sabnzbd.constants.SABYENC_VERSION_REQUIRED + ) # Do not allow downloading sabnzbd.NO_DOWNLOADING = True - logging.info('Cryptography module (v%s)... found!', cryptography.__version__) + logging.info("Cryptography module (v%s)... found!", cryptography.__version__) if sabnzbd.newsunpack.PAR2_COMMAND: logging.info("par2 binary... found (%s)", sabnzbd.newsunpack.PAR2_COMMAND) else: - logging.error(T('par2 binary... NOT found!')) + logging.error(T("par2 binary... NOT found!")) # Do not allow downloading sabnzbd.NO_DOWNLOADING = True if sabnzbd.newsunpack.MULTIPAR_COMMAND: logging.info("MultiPar binary... found (%s)", sabnzbd.newsunpack.MULTIPAR_COMMAND) elif sabnzbd.WIN32: - logging.error('%s %s' % (T('MultiPar binary... NOT found!'), T('Verification and repair will not be possible.'))) + logging.error( + "%s %s" % (T("MultiPar binary... NOT found!"), T("Verification and repair will not be possible.")) + ) if sabnzbd.newsunpack.RAR_COMMAND: logging.info("UNRAR binary... found (%s)", sabnzbd.newsunpack.RAR_COMMAND) # Report problematic unrar if sabnzbd.newsunpack.RAR_PROBLEM and not sabnzbd.cfg.ignore_wrong_unrar(): - have_str = '%.2f' % (float(sabnzbd.newsunpack.RAR_VERSION) / 100) - want_str = '%.2f' % (float(sabnzbd.constants.REC_RAR_VERSION) / 100) - logging.warning(T('Your UNRAR version is %s, we recommend version %s or higher.
') % (have_str, want_str)) + have_str = "%.2f" % (float(sabnzbd.newsunpack.RAR_VERSION) / 100) + want_str = "%.2f" % (float(sabnzbd.constants.REC_RAR_VERSION) / 100) + logging.warning( + T("Your UNRAR version is %s, we recommend version %s or higher.
") % (have_str, want_str) + ) elif not (sabnzbd.WIN32 or sabnzbd.DARWIN): - logging.info('UNRAR binary version %.2f', (float(sabnzbd.newsunpack.RAR_VERSION) / 100)) + logging.info("UNRAR binary version %.2f", (float(sabnzbd.newsunpack.RAR_VERSION) / 100)) else: - logging.error(T('unrar binary... NOT found')) + logging.error(T("unrar binary... NOT found")) # Do not allow downloading sabnzbd.NO_DOWNLOADING = True if sabnzbd.newsunpack.ZIP_COMMAND: logging.info("unzip binary... found (%s)", sabnzbd.newsunpack.ZIP_COMMAND) else: - logging.info(T('unzip binary... NOT found!')) + logging.info(T("unzip binary... NOT found!")) if sabnzbd.newsunpack.SEVEN_COMMAND: logging.info("7za binary... found (%s)", sabnzbd.newsunpack.SEVEN_COMMAND) else: - logging.info(T('7za binary... NOT found!')) + logging.info(T("7za binary... NOT found!")) if not sabnzbd.WIN32: if sabnzbd.newsunpack.NICE_COMMAND: @@ -436,22 +462,22 @@ def print_modules(): # Show fatal warning if sabnzbd.NO_DOWNLOADING: - logging.error(T('Essential modules are missing, downloading cannot start.')) + logging.error(T("Essential modules are missing, downloading cannot start.")) def all_localhosts(): """ Return all unique values of localhost in order of preference """ - ips = ['127.0.0.1'] + ips = ["127.0.0.1"] try: # Check whether IPv6 is available and enabled - info = socket.getaddrinfo('::1', None) + info = socket.getaddrinfo("::1", None) af, socktype, proto, _canonname, _sa = info[0] s = socket.socket(af, socktype, proto) s.close() except socket.error: return ips try: - info = socket.getaddrinfo('localhost', None) + info = socket.getaddrinfo("localhost", None) except socket.error: # localhost does not resolve return ips @@ -462,7 +488,7 @@ def all_localhosts(): if not isinstance(item, str): continue # Only return IPv6 when enabled - if item not in ips and ('::1' not in item or sabnzbd.cfg.ipv6_hosting()): + if item not in ips and ("::1" not in item or sabnzbd.cfg.ipv6_hosting()): ips.append(item) return ips @@ -481,10 +507,10 @@ def get_webhost(cherryhost, cherryport, https_port): """ Determine the webhost address and port, return (host, port, browserhost) """ - if cherryhost == '0.0.0.0' and not check_resolve('127.0.0.1'): - cherryhost = '' - elif cherryhost == '::' and not check_resolve('::1'): - cherryhost = '' + if cherryhost == "0.0.0.0" and not check_resolve("127.0.0.1"): + cherryhost = "" + elif cherryhost == "::" and not check_resolve("::1"): + cherryhost = "" if cherryhost is None: cherryhost = sabnzbd.cfg.cherryhost() @@ -494,7 +520,7 @@ def get_webhost(cherryhost, cherryport, https_port): # Get IP address, but discard APIPA/IPV6 # If only APIPA's or IPV6 are found, fall back to localhost ipv4 = ipv6 = False - localhost = hostip = 'localhost' + localhost = hostip = "localhost" try: info = socket.getaddrinfo(socket.gethostname(), None) except socket.error: @@ -503,25 +529,25 @@ def get_webhost(cherryhost, cherryport, https_port): # Valid user defined name? info = socket.getaddrinfo(cherryhost, None) except socket.error: - if cherryhost not in ('localhost', '127.0.0.1', '::1'): - cherryhost = '0.0.0.0' + if cherryhost not in ("localhost", "127.0.0.1", "::1"): + cherryhost = "0.0.0.0" try: info = socket.getaddrinfo(localhost, None) except socket.error: - info = socket.getaddrinfo('127.0.0.1', None) - localhost = '127.0.0.1' + info = socket.getaddrinfo("127.0.0.1", None) + localhost = "127.0.0.1" for item in info: ip = str(item[4][0]) - if ip.startswith('169.254.'): + if ip.startswith("169.254."): pass # Automatic Private IP Addressing (APIPA) - elif ':' in ip: + elif ":" in ip: ipv6 = True - elif '.' in ip and not ipv4: + elif "." in ip and not ipv4: ipv4 = True hostip = ip # A blank host will use the local ip address - if cherryhost == '': + if cherryhost == "": if ipv6 and ipv4: # To protect Firefox users, use numeric IP cherryhost = hostip @@ -531,23 +557,23 @@ def get_webhost(cherryhost, cherryport, https_port): browserhost = cherryhost # 0.0.0.0 will listen on all ipv4 interfaces (no ipv6 addresses) - elif cherryhost == '0.0.0.0': + elif cherryhost == "0.0.0.0": # Just take the gamble for this - cherryhost = '0.0.0.0' + cherryhost = "0.0.0.0" browserhost = localhost # :: will listen on all ipv6 interfaces (no ipv4 addresses) - elif cherryhost in ('::', '[::]'): - cherryhost = cherryhost.strip('[').strip(']') + elif cherryhost in ("::", "[::]"): + cherryhost = cherryhost.strip("[").strip("]") # Assume '::1' == 'localhost' browserhost = localhost # IPV6 address - elif '[' in cherryhost or ':' in cherryhost: + elif "[" in cherryhost or ":" in cherryhost: browserhost = cherryhost # IPV6 numeric address - elif cherryhost.replace('.', '').isdigit(): + elif cherryhost.replace(".", "").isdigit(): # IPV4 numerical browserhost = cherryhost @@ -561,35 +587,34 @@ def get_webhost(cherryhost, cherryport, https_port): cherryhost = hostip browserhost = cherryhost - # Some systems don't like brackets in numerical ipv6 + # Some systems don't like brackets in numerical ipv6 if sabnzbd.DARWIN: - cherryhost = cherryhost.strip('[]') + cherryhost = cherryhost.strip("[]") else: try: socket.getaddrinfo(cherryhost, None) except socket.error: - cherryhost = cherryhost.strip('[]') + cherryhost = cherryhost.strip("[]") - if ipv6 and ipv4 and \ - (browserhost not in ('localhost', '127.0.0.1', '[::1]', '::1')): + if ipv6 and ipv4 and (browserhost not in ("localhost", "127.0.0.1", "[::1]", "::1")): sabnzbd.AMBI_LOCALHOST = True logging.info("IPV6 has priority on this system, potential Firefox issue") - if ipv6 and ipv4 and cherryhost == '' and sabnzbd.WIN32: - logging.warning(T('Please be aware the 0.0.0.0 hostname will need an IPv6 address for external access')) + if ipv6 and ipv4 and cherryhost == "" and sabnzbd.WIN32: + logging.warning(T("Please be aware the 0.0.0.0 hostname will need an IPv6 address for external access")) - if cherryhost == 'localhost' and not sabnzbd.WIN32 and not sabnzbd.DARWIN: + if cherryhost == "localhost" and not sabnzbd.WIN32 and not sabnzbd.DARWIN: # On the Ubuntu family, localhost leads to problems for CherryPy ips = ip_extract() - if '127.0.0.1' in ips and '::1' in ips: - cherryhost = '127.0.0.1' - if ips[0] != '127.0.0.1': - browserhost = '127.0.0.1' + if "127.0.0.1" in ips and "::1" in ips: + cherryhost = "127.0.0.1" + if ips[0] != "127.0.0.1": + browserhost = "127.0.0.1" # This is to please Chrome on OSX - if cherryhost == 'localhost' and sabnzbd.DARWIN: - cherryhost = '127.0.0.1' - browserhost = 'localhost' + if cherryhost == "localhost" and sabnzbd.DARWIN: + cherryhost = "127.0.0.1" + browserhost = "localhost" if cherryport is None: cherryport = sabnzbd.cfg.cherryport.get_int() @@ -606,18 +631,18 @@ def get_webhost(cherryhost, cherryport, https_port): if cherryport == https_port and sabnzbd.cfg.enable_https(): sabnzbd.cfg.enable_https.set(False) # Should have a translated message, but that's not available yet - logging.error(T('HTTP and HTTPS ports cannot be the same')) + logging.error(T("HTTP and HTTPS ports cannot be the same")) return cherryhost, cherryport, browserhost, https_port def attach_server(host, port, cert=None, key=None, chain=None): """ Define and attach server, optionally HTTPS """ - if sabnzbd.cfg.ipv6_hosting() or '::1' not in host: + if sabnzbd.cfg.ipv6_hosting() or "::1" not in host: http_server = cherrypy._cpserver.Server() http_server.bind_addr = (host, port) if cert and key: - http_server.ssl_module = 'builtin' + http_server.ssl_module = "builtin" http_server.ssl_certificate = cert http_server.ssl_private_key = key http_server.ssl_certificate_chain = chain @@ -627,12 +652,12 @@ def attach_server(host, port, cert=None, key=None, chain=None): def is_sabnzbd_running(url): """ Return True when there's already a SABnzbd instance running. """ try: - url = '%s&mode=version' % url + url = "%s&mode=version" % url # Do this without certificate verification, few installations will have that prev = sabnzbd.set_https_verification(False) ver = get_from_url(url) sabnzbd.set_https_verification(prev) - return ver and (re.search(r'\d+\.\d+\.', ver) or ver.strip() == sabnzbd.__version__) + return ver and (re.search(r"\d+\.\d+\.", ver) or ver.strip() == sabnzbd.__version__) except: return False @@ -660,6 +685,7 @@ def check_for_sabnzbd(url, upload_nzbs, allow_browser=True): # Upload any specified nzb files to the running instance if upload_nzbs: from sabnzbd.utils.upload import upload_file + prev = sabnzbd.set_https_verification(False) for f in upload_nzbs: upload_file(url, f) @@ -667,7 +693,7 @@ def check_for_sabnzbd(url, upload_nzbs, allow_browser=True): else: # Launch the web browser and quit since sabnzbd is already running # Trim away everything after the final slash in the URL - url = url[:url.rfind('/') + 1] + url = url[: url.rfind("/") + 1] launch_a_browser(url, force=allow_browser) exit_sab(0) return True @@ -684,11 +710,11 @@ def evaluate_inipath(path): inipath = os.path.join(path, DEF_INI_FILE) if os.path.isdir(path): return inipath - elif os.path.isfile(path) or os.path.isfile(path + '.bak'): + elif os.path.isfile(path) or os.path.isfile(path + ".bak"): return path else: _dirpart, name = os.path.split(path) - if name.find('.') < 1: + if name.find(".") < 1: return inipath else: return path @@ -699,47 +725,75 @@ def commandline_handler(): Returns: service, sab_opts, serv_opts, upload_nzbs """ - service = '' + service = "" sab_opts = [] serv_opts = [os.path.normpath(os.path.abspath(sys.argv[0]))] upload_nzbs = [] # OSX binary: get rid of the weird -psn_0_123456 parameter for arg in sys.argv: - if arg.startswith('-psn_'): + if arg.startswith("-psn_"): sys.argv.remove(arg) break # Ugly hack to remove the extra "SABnzbd*" parameter the Windows binary # gets when it's restarted - if len(sys.argv) > 1 and \ - 'sabnzbd' in sys.argv[1].lower() and \ - not sys.argv[1].startswith('-'): + if len(sys.argv) > 1 and "sabnzbd" in sys.argv[1].lower() and not sys.argv[1].startswith("-"): slice_start = 2 else: slice_start = 1 # Prepend options from env-variable to options - info = os.environ.get('SABnzbd', '').split() + info = os.environ.get("SABnzbd", "").split() info.extend(sys.argv[slice_start:]) try: - opts, args = getopt.getopt(info, "phdvncwl:s:f:t:b:2:", - ['pause', 'help', 'daemon', 'nobrowser', 'clean', 'logging=', - 'weblogging', 'server=', 'templates', 'ipv6_hosting=', - 'template2', 'browser=', 'config-file=', 'force', 'disable-file-log', - 'version', 'https=', 'autorestarted', 'repair', 'repair-all', - 'log-all', 'no-login', 'pid=', 'new', 'console', 'pidfile=', - # Below Win32 Service options - 'password=', 'username=', 'startup=', 'perfmonini=', 'perfmondll=', - 'interactive', 'wait=', - ]) + opts, args = getopt.getopt( + info, + "phdvncwl:s:f:t:b:2:", + [ + "pause", + "help", + "daemon", + "nobrowser", + "clean", + "logging=", + "weblogging", + "server=", + "templates", + "ipv6_hosting=", + "template2", + "browser=", + "config-file=", + "force", + "disable-file-log", + "version", + "https=", + "autorestarted", + "repair", + "repair-all", + "log-all", + "no-login", + "pid=", + "new", + "console", + "pidfile=", + # Below Win32 Service options + "password=", + "username=", + "startup=", + "perfmonini=", + "perfmondll=", + "interactive", + "wait=", + ], + ) except getopt.GetoptError: print_help() exit_sab(2) # Check for Win32 service commands - if args and args[0] in ('install', 'update', 'remove', 'start', 'stop', 'restart', 'debug'): + if args and args[0] in ("install", "update", "remove", "start", "stop", "restart", "debug"): service = args[0] serv_opts.extend(args) @@ -750,14 +804,14 @@ def commandline_handler(): upload_nzbs.append(os.path.abspath(entry)) for opt, arg in opts: - if opt in ('password', 'username', 'startup', 'perfmonini', 'perfmondll', 'interactive', 'wait'): + if opt in ("password", "username", "startup", "perfmonini", "perfmondll", "interactive", "wait"): # Service option, just collect if service: serv_opts.append(opt) if arg: serv_opts.append(arg) else: - if opt == '-f': + if opt == "-f": arg = os.path.normpath(os.path.abspath(arg)) sab_opts.append((opt, arg)) @@ -767,7 +821,7 @@ def commandline_handler(): def get_f_option(opts): """ Return value of the -f option """ for opt, arg in opts: - if opt == '-f': + if opt == "-f": return arg else: return None @@ -806,39 +860,39 @@ def main(): _service, sab_opts, _serv_opts, upload_nzbs = commandline_handler() for opt, arg in sab_opts: - if opt == '--servicecall': + if opt == "--servicecall": sabnzbd.MY_FULLNAME = arg - elif opt in ('-d', '--daemon'): + elif opt in ("-d", "--daemon"): if not sabnzbd.WIN32: fork = True autobrowser = False sabnzbd.DAEMON = True sabnzbd.RESTART_ARGS.append(opt) - elif opt in ('-f', '--config-file'): + elif opt in ("-f", "--config-file"): inifile = arg sabnzbd.RESTART_ARGS.append(opt) sabnzbd.RESTART_ARGS.append(arg) - elif opt in ('-h', '--help'): + elif opt in ("-h", "--help"): print_help() exit_sab(0) - elif opt in ('-t', '--templates'): + elif opt in ("-t", "--templates"): web_dir = arg - elif opt in ('-s', '--server'): + elif opt in ("-s", "--server"): (cherryhost, cherryport) = split_host(arg) - elif opt in ('-n', '--nobrowser'): + elif opt in ("-n", "--nobrowser"): autobrowser = False - elif opt in ('-b', '--browser'): + elif opt in ("-b", "--browser"): try: autobrowser = bool(int(arg)) except ValueError: autobrowser = True - elif opt == '--autorestarted': + elif opt == "--autorestarted": autorestarted = True - elif opt in ('-c', '--clean'): + elif opt in ("-c", "--clean"): clean_up = True - elif opt in ('-w', '--weblogging'): + elif opt in ("-w", "--weblogging"): cherrypylogging = True - elif opt in ('-l', '--logging'): + elif opt in ("-l", "--logging"): try: logging_level = int(arg) except: @@ -846,38 +900,38 @@ def main(): if logging_level < -1 or logging_level > 2: print_help() exit_sab(1) - elif opt in ('-v', '--version'): + elif opt in ("-v", "--version"): print_version() exit_sab(0) - elif opt in ('-p', '--pause'): + elif opt in ("-p", "--pause"): pause = True - elif opt == '--https': + elif opt == "--https": https_port = int(arg) sabnzbd.RESTART_ARGS.append(opt) sabnzbd.RESTART_ARGS.append(arg) - elif opt == '--repair': + elif opt == "--repair": repair = 1 pause = True - elif opt == '--repair-all': + elif opt == "--repair-all": repair = 2 pause = True - elif opt == '--log-all': + elif opt == "--log-all": sabnzbd.LOG_ALL = True - elif opt == '--disable-file-log': + elif opt == "--disable-file-log": no_file_log = True - elif opt == '--no-login': + elif opt == "--no-login": no_login = True - elif opt == '--pid': + elif opt == "--pid": pid_path = arg sabnzbd.RESTART_ARGS.append(opt) sabnzbd.RESTART_ARGS.append(arg) - elif opt == '--pidfile': + elif opt == "--pidfile": pid_file = arg sabnzbd.RESTART_ARGS.append(opt) sabnzbd.RESTART_ARGS.append(arg) - elif opt == '--new': + elif opt == "--new": new_instance = True - elif opt == '--ipv6_hosting': + elif opt == "--ipv6_hosting": ipv6_hosting = arg sabnzbd.MY_FULLNAME = os.path.normpath(os.path.abspath(sabnzbd.MY_FULLNAME)) @@ -888,7 +942,7 @@ def main(): org_dir = os.getcwd() # Need console logging for SABnzbd.py and SABnzbd-console.exe - console_logging = (not hasattr(sys, "frozen")) or (sabnzbd.MY_NAME.lower().find('-console') > 0) + console_logging = (not hasattr(sys, "frozen")) or (sabnzbd.MY_NAME.lower().find("-console") > 0) console_logging = console_logging and not sabnzbd.DAEMON LOGLEVELS = (logging.FATAL, logging.WARNING, logging.INFO, logging.DEBUG) @@ -896,12 +950,12 @@ def main(): # Setup primary logging to prevent default console logging gui_log = GUIHandler(MAX_WARNINGS) gui_log.setLevel(logging.WARNING) - format_gui = '%(asctime)s\n%(levelname)s\n%(message)s' + format_gui = "%(asctime)s\n%(levelname)s\n%(message)s" gui_log.setFormatter(logging.Formatter(format_gui)) sabnzbd.GUIHANDLER = gui_log # Create logger - logger = logging.getLogger('') + logger = logging.getLogger("") logger.setLevel(logging.WARNING) logger.addHandler(gui_log) @@ -929,18 +983,18 @@ def main(): # All system data dirs are relative to the place we found the INI file sabnzbd.DIR_LCLDATA = os.path.dirname(inifile) - if not os.path.exists(inifile) and not os.path.exists(inifile + '.bak') and not os.path.exists(sabnzbd.DIR_LCLDATA): + if not os.path.exists(inifile) and not os.path.exists(inifile + ".bak") and not os.path.exists(sabnzbd.DIR_LCLDATA): try: os.makedirs(sabnzbd.DIR_LCLDATA) except IOError: - panic('Cannot create folder "%s".' % sabnzbd.DIR_LCLDATA, 'Check specified INI file location.') + panic('Cannot create folder "%s".' % sabnzbd.DIR_LCLDATA, "Check specified INI file location.") exit_sab(1) sabnzbd.cfg.set_root_folders(sabnzbd.DIR_HOME, sabnzbd.DIR_LCLDATA) res, msg = config.read_config(inifile) if not res: - panic(msg, 'Specify a correct file or delete this file.') + panic(msg, "Specify a correct file or delete this file.") exit_sab(1) # Set root folders for HTTPS server file paths @@ -961,13 +1015,13 @@ def main(): except IOError: abort_and_show_error(browserhost, cherryport) except: - abort_and_show_error(browserhost, cherryport, '49') + abort_and_show_error(browserhost, cherryport, "49") try: portend.free(cherryhost, cherryport, timeout=0.05) except IOError: abort_and_show_error(browserhost, cherryport) except: - abort_and_show_error(browserhost, cherryport, '49') + abort_and_show_error(browserhost, cherryport, "49") # Windows instance is reachable through registry url = None @@ -982,11 +1036,11 @@ def main(): try: portend.free(browserhost, port, timeout=0.05) except IOError as error: - if str(error) == 'Port not bound.': + if str(error) == "Port not bound.": pass else: if not url: - url = 'https://%s:%s%s/api?' % (browserhost, port, sabnzbd.cfg.url_base()) + url = "https://%s:%s%s/api?" % (browserhost, port, sabnzbd.cfg.url_base()) if new_instance or not check_for_sabnzbd(url, upload_nzbs, autobrowser): # Bail out if we have fixed our ports after first start-up if sabnzbd.cfg.fixed_ports(): @@ -1004,18 +1058,18 @@ def main(): sabnzbd.cfg.cherryport.set(newport) except: # Something else wrong, probably badly specified host - abort_and_show_error(browserhost, cherryport, '49') + abort_and_show_error(browserhost, cherryport, "49") # NonSSL check if there's no HTTPS or we only use 1 port if not (enable_https and not https_port): try: portend.free(browserhost, cherryport, timeout=0.05) except IOError as error: - if str(error) == 'Port not bound.': + if str(error) == "Port not bound.": pass else: if not url: - url = 'http://%s:%s%s/api?' % (browserhost, cherryport, sabnzbd.cfg.url_base()) + url = "http://%s:%s%s/api?" % (browserhost, cherryport, sabnzbd.cfg.url_base()) if new_instance or not check_for_sabnzbd(url, upload_nzbs, autobrowser): # Bail out if we have fixed our ports after first start-up if sabnzbd.cfg.fixed_ports(): @@ -1027,7 +1081,7 @@ def main(): cherryport = port except: # Something else wrong, probably badly specified host - abort_and_show_error(browserhost, cherryport, '49') + abort_and_show_error(browserhost, cherryport, "49") # We found a port, now we never check again sabnzbd.cfg.fixed_ports.set(True) @@ -1057,15 +1111,14 @@ def main(): else: sabnzbd.cfg.log_level.set(logging_level) sabnzbd.LOGFILE = os.path.join(logdir, DEF_LOG_FILE) - logformat = '%(asctime)s::%(levelname)s::[%(module)s:%(lineno)d] %(message)s' + logformat = "%(asctime)s::%(levelname)s::[%(module)s:%(lineno)d] %(message)s" logger.setLevel(LOGLEVELS[logging_level + 1]) try: if not no_file_log: rollover_log = logging.handlers.RotatingFileHandler( - sabnzbd.LOGFILE, 'a+', - sabnzbd.cfg.log_size.get_int(), - sabnzbd.cfg.log_backups()) + sabnzbd.LOGFILE, "a+", sabnzbd.cfg.log_size.get_int(), sabnzbd.cfg.log_backups() + ) rollover_log.setFormatter(logging.Formatter(logformat)) logger.addHandler(rollover_log) @@ -1084,34 +1137,39 @@ def main(): console.setFormatter(logging.Formatter(logformat)) logger.addHandler(console) if no_file_log: - logging.info('Console logging only') + logging.info("Console logging only") - logging.info('--------------------------------') - logging.info('%s-%s (rev=%s)', sabnzbd.MY_NAME, sabnzbd.__version__, sabnzbd.__baseline__) - logging.info('Full executable path = %s', sabnzbd.MY_FULLNAME) + logging.info("--------------------------------") + logging.info("%s-%s (rev=%s)", sabnzbd.MY_NAME, sabnzbd.__version__, sabnzbd.__baseline__) + logging.info("Full executable path = %s", sabnzbd.MY_FULLNAME) if sabnzbd.WIN32: - suffix = '' + suffix = "" if win64: - suffix = '(win64)' + suffix = "(win64)" try: - logging.info('Platform = %s %s', platform.platform(), suffix) + logging.info("Platform = %s %s", platform.platform(), suffix) except: - logging.info('Platform = %s ', suffix) + logging.info("Platform = %s ", suffix) else: - logging.info('Platform = %s', os.name) - logging.info('Python-version = %s', sys.version) - logging.info('Arguments = %s', sabnzbd.CMDLINE) + logging.info("Platform = %s", os.name) + logging.info("Python-version = %s", sys.version) + logging.info("Arguments = %s", sabnzbd.CMDLINE) if sabnzbd.DOCKER: logging.info("Running inside a docker container") else: logging.info("Not inside a docker container") # Find encoding; relevant for external processing activities - logging.info('Preferred encoding = %s', sabnzbd.encoding.CODEPAGE) + logging.info("Preferred encoding = %s", sabnzbd.encoding.CODEPAGE) # On Linux/FreeBSD/Unix "UTF-8" is strongly, strongly adviced: - if not sabnzbd.WIN32 and not sabnzbd.DARWIN and not ('utf-8' in sabnzbd.encoding.CODEPAGE.lower()): - logging.warning(T("SABnzbd was started with encoding %s, this should be UTF-8. Expect problems with Unicoded file and directory names in downloads.") % sabnzbd.encoding.CODEPAGE) + if not sabnzbd.WIN32 and not sabnzbd.DARWIN and not ("utf-8" in sabnzbd.encoding.CODEPAGE.lower()): + logging.warning( + T( + "SABnzbd was started with encoding %s, this should be UTF-8. Expect problems with Unicoded file and directory names in downloads." + ) + % sabnzbd.encoding.CODEPAGE + ) # SSL Information logging.info("SSL version = %s", ssl.OPENSSL_VERSION) @@ -1120,48 +1178,49 @@ def main(): if hasattr(sys, "frozen") and (sabnzbd.WIN32 or sabnzbd.DARWIN): # The certifi package brings the latest certificates on build # This will cause the create_default_context to load it automatically - os.environ["SSL_CERT_FILE"] = os.path.join(sabnzbd.DIR_PROG, 'cacert.pem') - logging.info('Loaded additional certificates from %s', os.environ["SSL_CERT_FILE"]) + os.environ["SSL_CERT_FILE"] = os.path.join(sabnzbd.DIR_PROG, "cacert.pem") + logging.info("Loaded additional certificates from %s", os.environ["SSL_CERT_FILE"]) # Extra startup info if sabnzbd.cfg.log_level() > 1: # List the number of certificates available (can take up to 1.5 seconds) ctx = ssl.create_default_context() - logging.debug('Available certificates: %s', repr(ctx.cert_store_stats())) + logging.debug("Available certificates: %s", repr(ctx.cert_store_stats())) # Show IPv4/IPv6 address from sabnzbd.getipaddress import localipv4, publicipv4, ipv6 mylocalipv4 = localipv4() if mylocalipv4: - logging.debug('My local IPv4 address = %s', mylocalipv4) + logging.debug("My local IPv4 address = %s", mylocalipv4) else: - logging.debug('Could not determine my local IPv4 address') + logging.debug("Could not determine my local IPv4 address") mypublicipv4 = publicipv4() if mypublicipv4: - logging.debug('My public IPv4 address = %s', mypublicipv4) + logging.debug("My public IPv4 address = %s", mypublicipv4) else: - logging.debug('Could not determine my public IPv4 address') + logging.debug("Could not determine my public IPv4 address") myipv6 = ipv6() if myipv6: - logging.debug('My IPv6 address = %s', myipv6) + logging.debug("My IPv6 address = %s", myipv6) else: - logging.debug('Could not determine my IPv6 address') + logging.debug("Could not determine my IPv6 address") # Measure and log system performance measured by pystone and - if possible - CPU model from sabnzbd.utils.getperformance import getpystone, getcpu + pystoneperf = getpystone() if pystoneperf: - logging.debug('CPU Pystone available performance = %s', pystoneperf) + logging.debug("CPU Pystone available performance = %s", pystoneperf) else: - logging.debug('CPU Pystone available performance could not be calculated') + logging.debug("CPU Pystone available performance could not be calculated") cpumodel = getcpu() # Linux only if cpumodel: - logging.debug('CPU model = %s', cpumodel) + logging.debug("CPU model = %s", cpumodel) - logging.info('Using INI file %s', inifile) + logging.info("Using INI file %s", inifile) if autobrowser is not None: sabnzbd.cfg.autobrowser.set(autobrowser) @@ -1171,8 +1230,8 @@ def main(): os.chdir(sabnzbd.DIR_PROG) sabnzbd.WEB_DIR = identify_web_template(sabnzbd.cfg.web_dir, DEF_STDINTF, fix_webname(web_dir)) - sabnzbd.WEB_DIR_CONFIG = identify_web_template(None, DEF_STDCONFIG, '') - sabnzbd.WIZARD_DIR = os.path.join(sabnzbd.DIR_INTERFACES, 'wizard') + sabnzbd.WEB_DIR_CONFIG = identify_web_template(None, DEF_STDCONFIG, "") + sabnzbd.WIZARD_DIR = os.path.join(sabnzbd.DIR_INTERFACES, "wizard") sabnzbd.WEB_COLOR = check_template_scheme(sabnzbd.cfg.web_color(), sabnzbd.WEB_DIR) sabnzbd.cfg.web_color.set(sabnzbd.WEB_COLOR) @@ -1181,13 +1240,16 @@ def main(): if sabnzbd.cfg.win_menu() and not sabnzbd.DAEMON: if sabnzbd.WIN32: import sabnzbd.sabtray + sabnzbd.WINTRAY = sabnzbd.sabtray.SABTrayThread() - elif sabnzbd.LINUX_POWER and os.environ.get('DISPLAY'): + elif sabnzbd.LINUX_POWER and os.environ.get("DISPLAY"): try: import gi - gi.require_version('Gtk', '3.0') + + gi.require_version("Gtk", "3.0") from gi.repository import Gtk import sabnzbd.sabtraylinux + sabnzbd.LINUXTRAY = sabnzbd.sabtraylinux.StatusIcon() except: logging.info("python3-gi not found, no SysTray.") @@ -1209,7 +1271,7 @@ def main(): create_https_certificates(https_cert, https_key) if not (os.path.exists(https_cert) and os.path.exists(https_key)): - logging.warning(T('Disabled HTTPS because of missing CERT and KEY files')) + logging.warning(T("Disabled HTTPS because of missing CERT and KEY files")) enable_https = False sabnzbd.cfg.enable_https.set(False) @@ -1219,7 +1281,7 @@ def main(): trialcontext.load_cert_chain(https_cert, https_key) logging.info("HTTPS keys are OK") except: - logging.warning(T('Disabled HTTPS because of invalid CERT and KEY files')) + logging.warning(T("Disabled HTTPS because of invalid CERT and KEY files")) logging.info("Traceback: ", exc_info=True) enable_https = False sabnzbd.cfg.enable_https.set(False) @@ -1227,14 +1289,14 @@ def main(): # Starting of the webserver # Determine if this system has multiple definitions for 'localhost' hosts = all_localhosts() - multilocal = len(hosts) > 1 and cherryhost in ('localhost', '0.0.0.0') + multilocal = len(hosts) > 1 and cherryhost in ("localhost", "0.0.0.0") # For 0.0.0.0 CherryPy will always pick IPv4, so make sure the secondary localhost is IPv6 - if multilocal and cherryhost == '0.0.0.0' and hosts[1] == '127.0.0.1': - hosts[1] = '::1' + if multilocal and cherryhost == "0.0.0.0" and hosts[1] == "127.0.0.1": + hosts[1] = "::1" # The Windows binary requires numeric localhost as primary address - if cherryhost == 'localhost': + if cherryhost == "localhost": cherryhost = hosts[0] if enable_https: @@ -1251,40 +1313,48 @@ def main(): # Extra HTTPS port for secondary localhost attach_server(hosts[1], cherryport, https_cert, https_key, https_chain) - cherrypy.config.update({'server.ssl_module': 'builtin', - 'server.ssl_certificate': https_cert, - 'server.ssl_private_key': https_key, - 'server.ssl_certificate_chain': https_chain}) + cherrypy.config.update( + { + "server.ssl_module": "builtin", + "server.ssl_certificate": https_cert, + "server.ssl_private_key": https_key, + "server.ssl_certificate_chain": https_chain, + } + ) elif multilocal: # Extra HTTP port for secondary localhost attach_server(hosts[1], cherryport) if no_login: - sabnzbd.cfg.username.set('') - sabnzbd.cfg.password.set('') - - mime_gzip = ('text/*', - 'application/javascript', - 'application/x-javascript', - 'application/json', - 'application/xml', - 'application/vnd.ms-fontobject', - 'application/font*', - 'image/svg+xml' - ) - cherrypy.config.update({'server.environment': 'production', - 'server.socket_host': cherryhost, - 'server.socket_port': cherryport, - 'server.shutdown_timeout': 0, - 'log.screen': False, - 'engine.autoreload.on': False, - 'tools.encode.on': True, - 'tools.gzip.on': True, - 'tools.gzip.mime_types': mime_gzip, - 'request.show_tracebacks': True, - 'error_page.401': sabnzbd.panic.error_page_401, - 'error_page.404': sabnzbd.panic.error_page_404 - }) + sabnzbd.cfg.username.set("") + sabnzbd.cfg.password.set("") + + mime_gzip = ( + "text/*", + "application/javascript", + "application/x-javascript", + "application/json", + "application/xml", + "application/vnd.ms-fontobject", + "application/font*", + "image/svg+xml", + ) + cherrypy.config.update( + { + "server.environment": "production", + "server.socket_host": cherryhost, + "server.socket_port": cherryport, + "server.shutdown_timeout": 0, + "log.screen": False, + "engine.autoreload.on": False, + "tools.encode.on": True, + "tools.gzip.on": True, + "tools.gzip.mime_types": mime_gzip, + "request.show_tracebacks": True, + "error_page.401": sabnzbd.panic.error_page_401, + "error_page.404": sabnzbd.panic.error_page_404, + } + ) # Do we want CherryPy Logging? Cannot be done via the config if cherrypylogging: @@ -1296,50 +1366,71 @@ def main(): cherrypy.log.access_log.propagate = False # Force mimetypes (OS might overwrite them) - forced_mime_types = {'css': 'text/css', 'js': 'application/javascript'} - - static = {'tools.staticdir.on': True, 'tools.staticdir.dir': os.path.join(sabnzbd.WEB_DIR, 'static'), 'tools.staticdir.content_types': forced_mime_types} - staticcfg = {'tools.staticdir.on': True, 'tools.staticdir.dir': os.path.join(sabnzbd.WEB_DIR_CONFIG, 'staticcfg'), 'tools.staticdir.content_types': forced_mime_types} - wizard_static = {'tools.staticdir.on': True, 'tools.staticdir.dir': os.path.join(sabnzbd.WIZARD_DIR, 'static'), 'tools.staticdir.content_types': forced_mime_types} - - appconfig = {'/api': { - 'tools.auth_basic.on': False, - 'tools.response_headers.on': True, - 'tools.response_headers.headers': [('Access-Control-Allow-Origin', '*')] - }, - '/static': static, - '/wizard/static': wizard_static, - '/favicon.ico': {'tools.staticfile.on': True, 'tools.staticfile.filename': os.path.join(sabnzbd.WEB_DIR_CONFIG, 'staticcfg', 'ico', 'favicon.ico')}, - '/staticcfg': staticcfg - } + forced_mime_types = {"css": "text/css", "js": "application/javascript"} + + static = { + "tools.staticdir.on": True, + "tools.staticdir.dir": os.path.join(sabnzbd.WEB_DIR, "static"), + "tools.staticdir.content_types": forced_mime_types, + } + staticcfg = { + "tools.staticdir.on": True, + "tools.staticdir.dir": os.path.join(sabnzbd.WEB_DIR_CONFIG, "staticcfg"), + "tools.staticdir.content_types": forced_mime_types, + } + wizard_static = { + "tools.staticdir.on": True, + "tools.staticdir.dir": os.path.join(sabnzbd.WIZARD_DIR, "static"), + "tools.staticdir.content_types": forced_mime_types, + } + + appconfig = { + "/api": { + "tools.auth_basic.on": False, + "tools.response_headers.on": True, + "tools.response_headers.headers": [("Access-Control-Allow-Origin", "*")], + }, + "/static": static, + "/wizard/static": wizard_static, + "/favicon.ico": { + "tools.staticfile.on": True, + "tools.staticfile.filename": os.path.join(sabnzbd.WEB_DIR_CONFIG, "staticcfg", "ico", "favicon.ico"), + }, + "/staticcfg": staticcfg, + } # Make available from both URLs main_page = sabnzbd.interface.MainPage() - cherrypy.tree.mount(main_page, '/', config=appconfig) + cherrypy.tree.mount(main_page, "/", config=appconfig) cherrypy.tree.mount(main_page, sabnzbd.cfg.url_base(), config=appconfig) # Set authentication for CherryPy sabnzbd.interface.set_auth(cherrypy.config) - logging.info('Starting web-interface on %s:%s', cherryhost, cherryport) + logging.info("Starting web-interface on %s:%s", cherryhost, cherryport) sabnzbd.cfg.log_level.callback(guard_loglevel) try: cherrypy.engine.start() except: - logging.error(T('Failed to start web-interface: '), exc_info=True) + logging.error(T("Failed to start web-interface: "), exc_info=True) abort_and_show_error(browserhost, cherryport) # Wait for server to become ready cherrypy.engine.wait(cherrypy.process.wspbus.states.STARTED) - if sabnzbd.WIN32: if enable_https: - mode = 's' + mode = "s" else: - mode = '' - api_url = 'http%s://%s:%s%s/api?apikey=%s' % (mode, browserhost, cherryport, sabnzbd.cfg.url_base(), sabnzbd.cfg.api_key()) + mode = "" + api_url = "http%s://%s:%s%s/api?apikey=%s" % ( + mode, + browserhost, + cherryport, + sabnzbd.cfg.url_base(), + sabnzbd.cfg.api_key(), + ) # Write URL directly to registry set_connection_info(api_url) @@ -1352,7 +1443,7 @@ def main(): return # Start all SABnzbd tasks - logging.info('Starting %s-%s', sabnzbd.MY_NAME, sabnzbd.__version__) + logging.info("Starting %s-%s", sabnzbd.MY_NAME, sabnzbd.__version__) try: sabnzbd.start() except: @@ -1362,6 +1453,7 @@ def main(): # Upload any nzb/zip/rar/nzb.gz/nzb.bz2 files from file association if upload_nzbs: from sabnzbd.utils.upload import add_local + for f in upload_nzbs: add_local(f) @@ -1374,7 +1466,7 @@ def main(): if not autorestarted: launch_a_browser(browser_url) - notifier.send_notification('SABnzbd', T('SABnzbd %s started') % sabnzbd.__version__, 'startup') + notifier.send_notification("SABnzbd", T("SABnzbd %s started") % sabnzbd.__version__, "startup") # Now's the time to check for a new version check_latest_version() autorestarted = False @@ -1392,11 +1484,11 @@ def main(): if sabnzbd.LAST_WARNING: msg = sabnzbd.LAST_WARNING sabnzbd.LAST_WARNING = None - sabnzbd.notifier.send_notification(T('Warning'), msg, 'warning') + sabnzbd.notifier.send_notification(T("Warning"), msg, "warning") if sabnzbd.LAST_ERROR: msg = sabnzbd.LAST_ERROR sabnzbd.LAST_ERROR = None - sabnzbd.notifier.send_notification(T('Error'), msg, 'error') + sabnzbd.notifier.send_notification(T("Error"), msg, "error") time.sleep(3) @@ -1432,9 +1524,9 @@ def main(): sabnzbd.shutdown_program() if sabnzbd.downloader.Downloader.do.paused: - sabnzbd.RESTART_ARGS.append('-p') + sabnzbd.RESTART_ARGS.append("-p") if autorestarted: - sabnzbd.RESTART_ARGS.append('--autorestarted') + sabnzbd.RESTART_ARGS.append("--autorestarted") sys.argv = sabnzbd.RESTART_ARGS os.chdir(org_dir) @@ -1443,15 +1535,15 @@ def main(): # [[NSProcessInfo processInfo] processIdentifier]] # logging.info("%s" % (NSProcessInfo.processInfo().processIdentifier())) my_pid = os.getpid() - my_name = sabnzbd.MY_FULLNAME.replace('/Contents/MacOS/SABnzbd', '') - my_args = ' '.join(sys.argv[1:]) + my_name = sabnzbd.MY_FULLNAME.replace("/Contents/MacOS/SABnzbd", "") + my_args = " ".join(sys.argv[1:]) cmd = 'kill -9 %s && open "%s" --args %s' % (my_pid, my_name, my_args) - logging.info('Launching: ', cmd) + logging.info("Launching: ", cmd) os.system(cmd) elif sabnzbd.WIN_SERVICE: # Use external service handler to do the restart # Wait 5 seconds to clean up - subprocess.Popen('timeout 5 & sc start SABnzbd', shell=True) + subprocess.Popen("timeout 5 & sc start SABnzbd", shell=True) else: cherrypy.engine._do_execv() @@ -1463,8 +1555,8 @@ def main(): del_connection_info() # Send our final goodbyes! - notifier.send_notification('SABnzbd', T('SABnzbd shutdown finished'), 'startup') - logging.info('Leaving SABnzbd') + notifier.send_notification("SABnzbd", T("SABnzbd shutdown finished"), "startup") + logging.info("Leaving SABnzbd") sys.stderr.flush() sys.stdout.flush() sabnzbd.pid_file() @@ -1492,13 +1584,16 @@ if sabnzbd.WIN32: class SABnzbd(win32serviceutil.ServiceFramework): """ Win32 Service Handler """ - _svc_name_ = 'SABnzbd' - _svc_display_name_ = 'SABnzbd Binary Newsreader' + + _svc_name_ = "SABnzbd" + _svc_display_name_ = "SABnzbd Binary Newsreader" _svc_deps_ = ["EventLog", "Tcpip"] - _svc_description_ = 'Automated downloading from Usenet. ' \ - 'Set to "automatic" to start the service at system startup. ' \ - 'You may need to login with a real user account when you need ' \ - 'access to network shares.' + _svc_description_ = ( + "Automated downloading from Usenet. " + 'Set to "automatic" to start the service at system startup. ' + "You may need to login with a real user account when you need " + "access to network shares." + ) # Only SABnzbd-console.exe can print to the console, so the service is installed # from there. But we run SABnzbd.exe so nothing is logged. Logging can cause the @@ -1512,11 +1607,11 @@ if sabnzbd.WIN32: sabnzbd.WIN_SERVICE = self def SvcDoRun(self): - msg = 'SABnzbd-service %s' % sabnzbd.__version__ - self.Logger(servicemanager.PYS_SERVICE_STARTED, msg + ' has started') + msg = "SABnzbd-service %s" % sabnzbd.__version__ + self.Logger(servicemanager.PYS_SERVICE_STARTED, msg + " has started") sys.argv = get_serv_parms(self._svc_name_) main() - self.Logger(servicemanager.PYS_SERVICE_STOPPED, msg + ' has stopped') + self.Logger(servicemanager.PYS_SERVICE_STOPPED, msg + " has stopped") def SvcStop(self): sabnzbd.shutdown_program() @@ -1524,16 +1619,19 @@ if sabnzbd.WIN32: win32event.SetEvent(self.hWaitStop) def Logger(self, state, msg): - win32evtlogutil.ReportEvent(self._svc_display_name_, - state, 0, - servicemanager.EVENTLOG_INFORMATION_TYPE, - (self._svc_name_, msg)) + win32evtlogutil.ReportEvent( + self._svc_display_name_, state, 0, servicemanager.EVENTLOG_INFORMATION_TYPE, (self._svc_name_, msg) + ) def ErrLogger(self, msg, text): - win32evtlogutil.ReportEvent(self._svc_display_name_, - servicemanager.PYS_SERVICE_STOPPED, 0, - servicemanager.EVENTLOG_ERROR_TYPE, - (self._svc_name_, msg), text) + win32evtlogutil.ReportEvent( + self._svc_display_name_, + servicemanager.PYS_SERVICE_STOPPED, + 0, + servicemanager.EVENTLOG_ERROR_TYPE, + (self._svc_name_, msg), + text, + ) SERVICE_MSG = """ @@ -1561,12 +1659,11 @@ def handle_windows_service(): service, sab_opts, serv_opts, _upload_nzbs = commandline_handler() if service: - if service in ('install', 'update'): + if service in ("install", "update"): # In this case check for required parameters path = get_f_option(sab_opts) if not path: - print(('The -f parameter is required.\n' \ - 'Use: -f %s' % service)) + print(("The -f parameter is required.\n" "Use: -f %s" % service)) return True # First run the service installed, because this will @@ -1577,7 +1674,7 @@ def handle_windows_service(): if set_serv_parms(SABnzbd._svc_name_, sab_opts): print(SERVICE_MSG) else: - print('ERROR: Cannot set required registry info.') + print("ERROR: Cannot set required registry info.") else: # Pass the other commands directly win32serviceutil.HandleCommandLine(SABnzbd) @@ -1590,7 +1687,7 @@ def handle_windows_service(): ############################################################################## -if __name__ == '__main__': +if __name__ == "__main__": # We can only register these in the main thread signal.signal(signal.SIGINT, sabnzbd.sig_handler) signal.signal(signal.SIGTERM, sabnzbd.sig_handler) @@ -1612,12 +1709,11 @@ if __name__ == '__main__': # This code is made with trial-and-error, please improve! class startApp(Thread): def run(self): - logging.info('[osx] sabApp Starting - starting main thread') + logging.info("[osx] sabApp Starting - starting main thread") main() - logging.info('[osx] sabApp Stopping - main thread quit ') + logging.info("[osx] sabApp Stopping - main thread quit ") AppHelper.stopEventLoop() - sabApp = startApp() sabApp.start() diff --git a/po/main/SABnzbd.pot b/po/main/SABnzbd.pot index 81c1e04..d504ed5 100644 --- a/po/main/SABnzbd.pot +++ b/po/main/SABnzbd.pot @@ -12,7 +12,7 @@ msgstr "" "MIME-Version: 1.0\n" "Content-Type: text/plain; charset=ASCII\n" "Content-Transfer-Encoding: 7bit\n" -"POT-Creation-Date: 2020-06-07 09:41+0200\n" +"POT-Creation-Date: 2020-06-11 10:12+0200\n" "Generated-By: pygettext.py 1.5\n" @@ -24,11 +24,11 @@ msgstr "" msgid "Cannot find web template: %s, trying standard template" msgstr "" -#: SABnzbd.py [Error message] +#: SABnzbd.py msgid "SABYenc disabled: no correct version found! (Found v%s, expecting v%s)" msgstr "" -#: SABnzbd.py [Error message] +#: SABnzbd.py msgid "SABYenc module... NOT found! Expecting v%s - https://sabnzbd.org/sabyenc" msgstr "" @@ -36,15 +36,15 @@ msgstr "" msgid "par2 binary... NOT found!" msgstr "" -#: SABnzbd.py [Error message] +#: SABnzbd.py msgid "MultiPar binary... NOT found!" msgstr "" -#: SABnzbd.py [Error message] +#: SABnzbd.py msgid "Verification and repair will not be possible." msgstr "" -#: SABnzbd.py [Warning message] +#: SABnzbd.py msgid "Your UNRAR version is %s, we recommend version %s or higher.
" msgstr "" @@ -72,7 +72,7 @@ msgstr "" msgid "HTTP and HTTPS ports cannot be the same" msgstr "" -#: SABnzbd.py [Warning message] +#: SABnzbd.py msgid "SABnzbd was started with encoding %s, this should be UTF-8. Expect problems with Unicoded file and directory names in downloads." msgstr "" @@ -236,11 +236,11 @@ msgstr "" msgid "Fatal error in Assembler" msgstr "" -#: sabnzbd/assembler.py [Warning message] +#: sabnzbd/assembler.py msgid "WARNING: Paused job \"%s\" because of encrypted RAR file (if supplied, all passwords were tried)" msgstr "" -#: sabnzbd/assembler.py [Warning message] +#: sabnzbd/assembler.py msgid "WARNING: Aborted job \"%s\" because of encrypted RAR file (if supplied, all passwords were tried)" msgstr "" @@ -248,7 +248,7 @@ msgstr "" msgid "Aborted, encryption detected" msgstr "" -#: sabnzbd/assembler.py [Warning message] +#: sabnzbd/assembler.py msgid "WARNING: In \"%s\" unwanted extension in RAR file. Unwanted file is %s " msgstr "" @@ -260,11 +260,11 @@ msgstr "" msgid "Aborted, unwanted extension detected" msgstr "" -#: sabnzbd/assembler.py [Warning message] +#: sabnzbd/assembler.py msgid "WARNING: Paused job \"%s\" because of rating (%s)" msgstr "" -#: sabnzbd/assembler.py [Warning message] +#: sabnzbd/assembler.py msgid "WARNING: Aborted job \"%s\" because of rating (%s)" msgstr "" @@ -272,7 +272,7 @@ msgstr "" msgid "Aborted, rating filter matched (%s)" msgstr "" -#: sabnzbd/assembler.py [Warning message] +#: sabnzbd/assembler.py msgid "Job \"%s\" is probably encrypted due to RAR with same name inside this RAR" msgstr "" @@ -376,10 +376,6 @@ msgstr "" msgid "UUencode detected, only yEnc encoding is supported [%s]" msgstr "" -#: sabnzbd/decoder.py -msgid "Badly formed yEnc article in %s" -msgstr "" - #: sabnzbd/decoder.py [Warning message] msgid "Unknown Error while decoding %s" msgstr "" @@ -396,11 +392,11 @@ msgstr "" msgid "Unpacked %s files/folders in %s" msgstr "" -#: sabnzbd/directunpacker.py [Warning message] +#: sabnzbd/directunpacker.py msgid "Direct Unpack was automatically enabled." msgstr "" -#: sabnzbd/directunpacker.py [Warning message] # sabnzbd/skintext.py +#: sabnzbd/directunpacker.py # sabnzbd/skintext.py msgid "Jobs will start unpacking during the downloading to reduce post-processing time. Only works for jobs that do not need repair." msgstr "" @@ -444,7 +440,7 @@ msgstr "" msgid "Server %s will be ignored for %s minutes" msgstr "" -#: sabnzbd/downloader.py [Error message] +#: sabnzbd/downloader.py msgid "Failed to initialize %s@%s with reason: %s" msgstr "" @@ -460,7 +456,7 @@ msgstr "" msgid "Failed login for server %s" msgstr "" -#: sabnzbd/downloader.py [Error message] +#: sabnzbd/downloader.py msgid "Connecting %s@%s failed, message=%s" msgstr "" @@ -646,15 +642,15 @@ msgstr "" msgid "Category folder cannot be a subfolder of the Temporary Download Folder." msgstr "" -#: sabnzbd/interface.py -msgid "Back" -msgstr "" - #: sabnzbd/interface.py # sabnzbd/skintext.py msgid "ERROR:" msgstr "" #: sabnzbd/interface.py +msgid "Back" +msgstr "" + +#: sabnzbd/interface.py msgid "Incorrect value for %s: %s" msgstr "" @@ -1014,7 +1010,7 @@ msgstr "" msgid "Incompatible queuefile found, cannot proceed" msgstr "" -#: sabnzbd/nzbqueue.py [Error message] +#: sabnzbd/nzbqueue.py msgid "Error loading %s, corrupt file detected" msgstr "" @@ -1291,11 +1287,11 @@ msgid "Program did not start!" msgstr "" #: sabnzbd/panic.py -msgid "Unable to bind to port %s on %s. Some other software uses the port or SABnzbd is already running." +msgid "Fatal error" msgstr "" #: sabnzbd/panic.py -msgid "Fatal error" +msgid "Unable to bind to port %s on %s. Some other software uses the port or SABnzbd is already running." msgstr "" #: sabnzbd/panic.py [Warning message] @@ -2210,7 +2206,7 @@ msgstr "" msgid "Purge Failed NZBs" msgstr "" -#: sabnzbd/skintext.py [Button to delete all failed jobs in History, including files] +#: sabnzbd/skintext.py msgid "Purge Failed NZBs & Delete Files" msgstr "" @@ -2254,7 +2250,7 @@ msgstr "" msgid "Force Disconnect" msgstr "" -#: sabnzbd/skintext.py [Status page button text] +#: sabnzbd/skintext.py msgid "Disconnect all active connections to usenet servers. Connections will be reopened after a few seconds if there are items in the queue." msgstr "" @@ -3322,7 +3318,7 @@ msgstr "" msgid "Optional" msgstr "" -#: sabnzbd/skintext.py [Explain server optional tickbox] +#: sabnzbd/skintext.py msgid "For unreliable servers, will be ignored longer in case of failures" msgstr "" @@ -3606,7 +3602,7 @@ msgstr "" msgid "Emergency retry" msgstr "" -#: sabnzbd/skintext.py [Pushover settings] +#: sabnzbd/skintext.py msgid "How often (in seconds) the same notification will be sent" msgstr "" @@ -3614,7 +3610,7 @@ msgstr "" msgid "Emergency expire" msgstr "" -#: sabnzbd/skintext.py [Pushover settings] +#: sabnzbd/skintext.py msgid "How many seconds your notification will continue to be retried" msgstr "" @@ -4110,7 +4106,7 @@ msgstr "" msgid "Update Available!" msgstr "" -#: sabnzbd/skintext.py [Don't translate LocalStorage] +#: sabnzbd/skintext.py msgid "LocalStorage (cookies) are disabled in your browser, interface settings will be lost after you close the browser!" msgstr "" @@ -4386,11 +4382,11 @@ msgstr "" msgid "Closing any browser windows/tabs will NOT close SABnzbd." msgstr "" -#: sabnzbd/skintext.py [Wizard tip] +#: sabnzbd/skintext.py msgid "It is recommended you right click and bookmark this location and use this bookmark to access SABnzbd when it is running in the background." msgstr "" -#: sabnzbd/skintext.py [Will be appended with a wiki-link, adjust word order accordingly] +#: sabnzbd/skintext.py msgid "Further help can be found on our" msgstr "" @@ -4426,7 +4422,7 @@ msgstr "" msgid "Error getting TV info (%s)" msgstr "" -#: sabnzbd/sorting.py [Error message] +#: sabnzbd/sorting.py [Error message] # sabnzbd/sorting.py msgid "Failed to rename: %s to %s" msgstr "" diff --git a/sabnzbd/api.py b/sabnzbd/api.py index 98fcbce..d31975d 100644 --- a/sabnzbd/api.py +++ b/sabnzbd/api.py @@ -37,9 +37,19 @@ except ImportError: pass import sabnzbd -from sabnzbd.constants import VALID_ARCHIVES, VALID_NZB_FILES, Status, \ - TOP_PRIORITY, REPAIR_PRIORITY, HIGH_PRIORITY, NORMAL_PRIORITY, LOW_PRIORITY, \ - KIBI, MEBI, GIGI +from sabnzbd.constants import ( + VALID_ARCHIVES, + VALID_NZB_FILES, + Status, + TOP_PRIORITY, + REPAIR_PRIORITY, + HIGH_PRIORITY, + NORMAL_PRIORITY, + LOW_PRIORITY, + KIBI, + MEBI, + GIGI, +) import sabnzbd.config as config import sabnzbd.cfg as cfg from sabnzbd.downloader import Downloader @@ -48,10 +58,17 @@ import sabnzbd.scheduler as scheduler from sabnzbd.skintext import SKIN_TEXT from sabnzbd.utils.pathbrowser import folders_at_path from sabnzbd.utils.getperformance import getcpu -from sabnzbd.misc import loadavg, to_units, int_conv, time_format, \ - cat_convert, create_https_certificates, calc_age, opts_to_pp -from sabnzbd.filesystem import diskspace, get_ext, get_filename, \ - globber_full, clip_path, remove_all +from sabnzbd.misc import ( + loadavg, + to_units, + int_conv, + time_format, + cat_convert, + create_https_certificates, + calc_age, + opts_to_pp, +) +from sabnzbd.filesystem import diskspace, get_ext, get_filename, globber_full, clip_path, remove_all from sabnzbd.encoding import xml_name from sabnzbd.postproc import PostProcessor from sabnzbd.articlecache import ArticleCache @@ -68,21 +85,21 @@ import sabnzbd.emailer ############################################################################## # API error messages ############################################################################## -_MSG_NO_VALUE = 'expects one parameter' -_MSG_NO_VALUE2 = 'expects two parameters' -_MSG_INT_VALUE = 'expects integer value' -_MSG_NO_ITEM = 'item does not exist' -_MSG_NOT_IMPLEMENTED = 'not implemented' -_MSG_NO_FILE = 'no file given' -_MSG_NO_PATH = 'file does not exist' -_MSG_OUTPUT_FORMAT = 'Format not supported' -_MSG_NO_SUCH_CONFIG = 'Config item does not exist' -_MSG_CONFIG_LOCKED = 'Configuration locked' -_MSG_BAD_SERVER_PARMS = 'Incorrect server settings' +_MSG_NO_VALUE = "expects one parameter" +_MSG_NO_VALUE2 = "expects two parameters" +_MSG_INT_VALUE = "expects integer value" +_MSG_NO_ITEM = "item does not exist" +_MSG_NOT_IMPLEMENTED = "not implemented" +_MSG_NO_FILE = "no file given" +_MSG_NO_PATH = "file does not exist" +_MSG_OUTPUT_FORMAT = "Format not supported" +_MSG_NO_SUCH_CONFIG = "Config item does not exist" +_MSG_CONFIG_LOCKED = "Configuration locked" +_MSG_BAD_SERVER_PARMS = "Incorrect server settings" # For Windows: determine executable extensions -if os.name == 'nt': - PATHEXT = os.environ.get('PATHEXT', '').lower().split(';') +if os.name == "nt": + PATHEXT = os.environ.get("PATHEXT", "").lower().split(";") else: PATHEXT = [] @@ -91,24 +108,33 @@ def api_handler(kwargs): """ API Dispatcher """ if cfg.api_logging(): # Was it proxy forwarded? - xff = cherrypy.request.headers.get('X-Forwarded-For') + xff = cherrypy.request.headers.get("X-Forwarded-For") if xff: - logging.debug('API-call from %s (X-Forwarded-For: %s) [%s] %s', cherrypy.request.remote.ip, - xff, cherrypy.request.headers.get('User-Agent', '??'), kwargs) + logging.debug( + "API-call from %s (X-Forwarded-For: %s) [%s] %s", + cherrypy.request.remote.ip, + xff, + cherrypy.request.headers.get("User-Agent", "??"), + kwargs, + ) else: - logging.debug('API-call from %s [%s] %s', cherrypy.request.remote.ip, - cherrypy.request.headers.get('User-Agent', '??'), kwargs) + logging.debug( + "API-call from %s [%s] %s", + cherrypy.request.remote.ip, + cherrypy.request.headers.get("User-Agent", "??"), + kwargs, + ) # Clean-up the arguments - for vr in ('mode', 'output', 'name'): + for vr in ("mode", "output", "name"): if isinstance(kwargs.get(vr, None), list): kwargs[vr] = vr[0] - mode = kwargs.get('mode', '') - output = kwargs.get('output', '') - name = kwargs.get('name', '') + mode = kwargs.get("mode", "") + output = kwargs.get("output", "") + name = kwargs.get("name", "") - if mode not in ('version', 'auth'): + if mode not in ("version", "auth"): msg = sabnzbd.interface.check_apikey(kwargs) if msg: return report(output, msg) @@ -119,38 +145,38 @@ def api_handler(kwargs): def _api_get_config(name, output, kwargs): """ API: accepts output, keyword, section """ - _, data = config.get_dconfig(kwargs.get('section'), kwargs.get('keyword')) - return report(output, keyword='config', data=data) + _, data = config.get_dconfig(kwargs.get("section"), kwargs.get("keyword")) + return report(output, keyword="config", data=data) def _api_set_config(name, output, kwargs): """ API: accepts output, keyword, section """ if cfg.configlock(): return report(output, _MSG_CONFIG_LOCKED) - if kwargs.get('section') == 'servers': - kwargs['keyword'] = handle_server_api(output, kwargs) - elif kwargs.get('section') == 'rss': - kwargs['keyword'] = handle_rss_api(output, kwargs) - elif kwargs.get('section') == 'categories': - kwargs['keyword'] = handle_cat_api(output, kwargs) + if kwargs.get("section") == "servers": + kwargs["keyword"] = handle_server_api(output, kwargs) + elif kwargs.get("section") == "rss": + kwargs["keyword"] = handle_rss_api(output, kwargs) + elif kwargs.get("section") == "categories": + kwargs["keyword"] = handle_cat_api(output, kwargs) else: res = config.set_config(kwargs) if not res: return report(output, _MSG_NO_SUCH_CONFIG) config.save_config() - res, data = config.get_dconfig(kwargs.get('section'), kwargs.get('keyword')) - return report(output, keyword='config', data=data) + res, data = config.get_dconfig(kwargs.get("section"), kwargs.get("keyword")) + return report(output, keyword="config", data=data) def _api_set_config_default(name, output, kwargs): """ API: Reset requested config variables back to defaults. Currently only for misc-section """ if cfg.configlock(): return report(output, _MSG_CONFIG_LOCKED) - keywords = kwargs.get('keyword', []) + keywords = kwargs.get("keyword", []) if not isinstance(keywords, list): keywords = [keywords] for keyword in keywords: - item = config.get_config('misc', keyword) + item = config.get_config("misc", keyword) if item: item.set(item.default()) config.save_config() @@ -169,41 +195,41 @@ def _api_del_config(name, output, kwargs): def _api_queue(name, output, kwargs): """ API: Dispatcher for mode=queue """ - value = kwargs.get('value', '') + value = kwargs.get("value", "") return _api_queue_table.get(name, (_api_queue_default, 2))[0](output, value, kwargs) def _api_queue_delete(output, value, kwargs): """ API: accepts output, value """ - if value.lower() == 'all': - removed = NzbQueue.do.remove_all(kwargs.get('search')) - return report(output, keyword='', data={'status': bool(removed), 'nzo_ids': removed}) + if value.lower() == "all": + removed = NzbQueue.do.remove_all(kwargs.get("search")) + return report(output, keyword="", data={"status": bool(removed), "nzo_ids": removed}) elif value: - items = value.split(',') - delete_all_data = int_conv(kwargs.get('del_files')) + items = value.split(",") + delete_all_data = int_conv(kwargs.get("del_files")) removed = NzbQueue.do.remove_multiple(items, delete_all_data=delete_all_data) - return report(output, keyword='', data={'status': bool(removed), 'nzo_ids': removed}) + return report(output, keyword="", data={"status": bool(removed), "nzo_ids": removed}) else: return report(output, _MSG_NO_VALUE) def _api_queue_delete_nzf(output, value, kwargs): """ API: accepts value(=nzo_id), value2(=nzf_id) """ - value2 = kwargs.get('value2') + value2 = kwargs.get("value2") if value and value2: removed = NzbQueue.do.remove_nzf(value, value2, force_delete=True) - return report(output, keyword='', data={'status': bool(removed), 'nzf_ids': removed}) + return report(output, keyword="", data={"status": bool(removed), "nzf_ids": removed}) else: return report(output, _MSG_NO_VALUE2) def _api_queue_rename(output, value, kwargs): """ API: accepts output, value(=old name), value2(=new name), value3(=password) """ - value2 = kwargs.get('value2') - value3 = kwargs.get('value3') + value2 = kwargs.get("value2") + value3 = kwargs.get("value3") if value and value2: ret = NzbQueue.do.change_name(value, value2, value3) - return report(output, keyword='', data={'status': ret}) + return report(output, keyword="", data={"status": ret}) else: return report(output, _MSG_NO_VALUE2) @@ -216,33 +242,33 @@ def _api_queue_change_complete_action(output, value, kwargs): def _api_queue_purge(output, value, kwargs): """ API: accepts output """ - removed = NzbQueue.do.remove_all(kwargs.get('search')) - return report(output, keyword='', data={'status': bool(removed), 'nzo_ids': removed}) + removed = NzbQueue.do.remove_all(kwargs.get("search")) + return report(output, keyword="", data={"status": bool(removed), "nzo_ids": removed}) def _api_queue_pause(output, value, kwargs): """ API: accepts output, value(=list of nzo_id) """ if value: - items = value.split(',') + items = value.split(",") handled = NzbQueue.do.pause_multiple_nzo(items) else: handled = False - return report(output, keyword='', data={'status': bool(handled), 'nzo_ids': handled}) + return report(output, keyword="", data={"status": bool(handled), "nzo_ids": handled}) def _api_queue_resume(output, value, kwargs): """ API: accepts output, value(=list of nzo_id) """ if value: - items = value.split(',') + items = value.split(",") handled = NzbQueue.do.resume_multiple_nzo(items) else: handled = False - return report(output, keyword='', data={'status': bool(handled), 'nzo_ids': handled}) + return report(output, keyword="", data={"status": bool(handled), "nzo_ids": handled}) def _api_queue_priority(output, value, kwargs): """ API: accepts output, value(=nzo_id), value2(=priority) """ - value2 = kwargs.get('value2') + value2 = kwargs.get("value2") if value and value2: try: try: @@ -251,7 +277,7 @@ def _api_queue_priority(output, value, kwargs): return report(output, _MSG_INT_VALUE) pos = NzbQueue.do.set_priority(value, priority) # Returns the position in the queue, -1 is incorrect job-id - return report(output, keyword='position', data=pos) + return report(output, keyword="position", data=pos) except: return report(output, _MSG_NO_VALUE2) else: @@ -260,8 +286,8 @@ def _api_queue_priority(output, value, kwargs): def _api_queue_sort(output, value, kwargs): """ API: accepts output, sort, dir """ - sort = kwargs.get('sort') - direction = kwargs.get('dir', '') + sort = kwargs.get("sort") + direction = kwargs.get("dir", "") if sort: NzbQueue.do.sort_queue(sort, direction) return report(output) @@ -271,33 +297,39 @@ def _api_queue_sort(output, value, kwargs): def _api_queue_default(output, value, kwargs): """ API: accepts output, sort, dir, start, limit """ - start = int_conv(kwargs.get('start')) - limit = int_conv(kwargs.get('limit')) - search = kwargs.get('search') + start = int_conv(kwargs.get("start")) + limit = int_conv(kwargs.get("limit")) + search = kwargs.get("search") info, pnfo_list, bytespersec = build_queue(start=start, limit=limit, output=output, search=search) - return report(output, keyword='queue', data=info) + return report(output, keyword="queue", data=info) def _api_queue_rating(output, value, kwargs): """ API: accepts output, value(=nzo_id), type, setting, detail """ - vote_map = {'up': Rating.VOTE_UP, 'down': Rating.VOTE_DOWN} - flag_map = {'spam': Rating.FLAG_SPAM, 'encrypted': Rating.FLAG_ENCRYPTED, 'expired': Rating.FLAG_EXPIRED, 'other': Rating.FLAG_OTHER, 'comment': Rating.FLAG_COMMENT} - content_type = kwargs.get('type') - setting = kwargs.get('setting') + vote_map = {"up": Rating.VOTE_UP, "down": Rating.VOTE_DOWN} + flag_map = { + "spam": Rating.FLAG_SPAM, + "encrypted": Rating.FLAG_ENCRYPTED, + "expired": Rating.FLAG_EXPIRED, + "other": Rating.FLAG_OTHER, + "comment": Rating.FLAG_COMMENT, + } + content_type = kwargs.get("type") + setting = kwargs.get("setting") if value: try: video = audio = vote = flag = None - if content_type == 'video' and setting != "-": + if content_type == "video" and setting != "-": video = setting - if content_type == 'audio' and setting != "-": + if content_type == "audio" and setting != "-": audio = setting - if content_type == 'vote': + if content_type == "vote": vote = vote_map[setting] - if content_type == 'flag': + if content_type == "flag": flag = flag_map[setting] if cfg.rating_enable(): - Rating.do.update_user_rating(value, video, audio, vote, flag, kwargs.get('detail')) + Rating.do.update_user_rating(value, video, audio, vote, flag, kwargs.get("detail")) return report(output) except: return report(output, _MSG_BAD_SERVER_PARMS) @@ -312,65 +344,66 @@ def _api_options(name, output, kwargs): def _api_translate(name, output, kwargs): """ API: accepts output, value(=acronym) """ - return report(output, keyword='value', data=T(kwargs.get('value', ''))) + return report(output, keyword="value", data=T(kwargs.get("value", ""))) def _api_addfile(name, output, kwargs): """ API: accepts name, output, pp, script, cat, priority, nzbname """ # Normal upload will send the nzb in a kw arg called nzbfile if name is None or isinstance(name, str): - name = kwargs.get('nzbfile') - if hasattr(name, 'getvalue'): + name = kwargs.get("nzbfile") + if hasattr(name, "getvalue"): # Side effect of next line is that attribute .value is created # which is needed to make add_nzbfile() work size = name.length - elif hasattr(name, 'file') and hasattr(name, 'filename') and name.filename: + elif hasattr(name, "file") and hasattr(name, "filename") and name.filename: # CherryPy 3.2.2 object - if hasattr(name.file, 'file'): + if hasattr(name.file, "file"): name.value = name.file.file.read() else: name.value = name.file.read() size = len(name.value) - elif hasattr(name, 'value'): + elif hasattr(name, "value"): size = len(name.value) else: size = 0 if name is not None and size and name.filename: - cat = kwargs.get('cat') - xcat = kwargs.get('xcat') + cat = kwargs.get("cat") + xcat = kwargs.get("xcat") if not cat and xcat: # Indexer category, so do mapping cat = cat_convert(xcat) - res = sabnzbd.add_nzbfile(name, kwargs.get('pp'), kwargs.get('script'), cat, - kwargs.get('priority'), kwargs.get('nzbname')) - return report(output, keyword='', data={'status': res[0] == 0, 'nzo_ids': res[1]}) + res = sabnzbd.add_nzbfile( + name, kwargs.get("pp"), kwargs.get("script"), cat, kwargs.get("priority"), kwargs.get("nzbname") + ) + return report(output, keyword="", data={"status": res[0] == 0, "nzo_ids": res[1]}) else: return report(output, _MSG_NO_VALUE) def _api_retry(name, output, kwargs): """ API: accepts name, output, value(=nzo_id), nzbfile(=optional NZB), password (optional) """ - value = kwargs.get('value') + value = kwargs.get("value") # Normal upload will send the nzb in a kw arg called nzbfile if name is None or isinstance(name, str): - name = kwargs.get('nzbfile') - password = kwargs.get('password') + name = kwargs.get("nzbfile") + password = kwargs.get("password") password = password[0] if isinstance(password, list) else password nzo_id = retry_job(value, name, password) if nzo_id: if isinstance(nzo_id, list): nzo_id = nzo_id[0] - return report(output, keyword='', data={'status': True, 'nzo_id': nzo_id}) + return report(output, keyword="", data={"status": True, "nzo_id": nzo_id}) else: return report(output, _MSG_NO_ITEM) def _api_cancel_pp(name, output, kwargs): """ API: accepts name, output, value(=nzo_id) """ - nzo_id = kwargs.get('value') + nzo_id = kwargs.get("value") if PostProcessor.do.cancel_pp(nzo_id): - return report(output, keyword='', data={'status': True, 'nzo_id': nzo_id}) + return report(output, keyword="", data={"status": True, "nzo_id": nzo_id}) else: return report(output, _MSG_NO_ITEM) @@ -381,105 +414,107 @@ def _api_addlocalfile(name, output, kwargs): if os.path.exists(name): fn = get_filename(name) if fn: - pp = kwargs.get('pp') - script = kwargs.get('script') - cat = kwargs.get('cat') - xcat = kwargs.get('xcat') + pp = kwargs.get("pp") + script = kwargs.get("script") + cat = kwargs.get("cat") + xcat = kwargs.get("xcat") if not cat and xcat: # Indexer category, so do mapping cat = cat_convert(xcat) - priority = kwargs.get('priority') - nzbname = kwargs.get('nzbname') + priority = kwargs.get("priority") + nzbname = kwargs.get("nzbname") if get_ext(name) in VALID_ARCHIVES: res = sabnzbd.dirscanner.process_nzb_archive_file( - fn, name, pp=pp, script=script, cat=cat, priority=priority, keep=True, nzbname=nzbname) + fn, name, pp=pp, script=script, cat=cat, priority=priority, keep=True, nzbname=nzbname + ) elif get_ext(name) in VALID_NZB_FILES: res = sabnzbd.dirscanner.process_single_nzb( - fn, name, pp=pp, script=script, cat=cat, priority=priority, keep=True, nzbname=nzbname) + fn, name, pp=pp, script=script, cat=cat, priority=priority, keep=True, nzbname=nzbname + ) else: logging.info('API-call addlocalfile: "%s" not a proper file name', name) return report(output, _MSG_NO_FILE) else: logging.info('API-call addlocalfile: file "%s" not found', name) return report(output, _MSG_NO_PATH) - return report(output, keyword='', data={'status': res[0] == 0, 'nzo_ids': res[1]}) + return report(output, keyword="", data={"status": res[0] == 0, "nzo_ids": res[1]}) else: - logging.info('API-call addlocalfile: no file name given') + logging.info("API-call addlocalfile: no file name given") return report(output, _MSG_NO_VALUE) def _api_switch(name, output, kwargs): """ API: accepts output, value(=first id), value2(=second id) """ - value = kwargs.get('value') - value2 = kwargs.get('value2') + value = kwargs.get("value") + value2 = kwargs.get("value2") if value and value2: pos, prio = NzbQueue.do.switch(value, value2) # Returns the new position and new priority (if different) - return report(output, keyword='result', data={'position': pos, 'priority': prio}) + return report(output, keyword="result", data={"position": pos, "priority": prio}) else: return report(output, _MSG_NO_VALUE2) def _api_change_cat(name, output, kwargs): """ API: accepts output, value(=nzo_id), value2(=category) """ - value = kwargs.get('value') - value2 = kwargs.get('value2') + value = kwargs.get("value") + value2 = kwargs.get("value2") if value and value2: nzo_id = value cat = value2 - if cat == 'None': + if cat == "None": cat = None result = NzbQueue.do.change_cat(nzo_id, cat) - return report(output, keyword='status', data=bool(result > 0)) + return report(output, keyword="status", data=bool(result > 0)) else: return report(output, _MSG_NO_VALUE) def _api_change_script(name, output, kwargs): """ API: accepts output, value(=nzo_id), value2(=script) """ - value = kwargs.get('value') - value2 = kwargs.get('value2') + value = kwargs.get("value") + value2 = kwargs.get("value2") if value and value2: nzo_id = value script = value2 - if script.lower() == 'none': + if script.lower() == "none": script = None result = NzbQueue.do.change_script(nzo_id, script) - return report(output, keyword='status', data=bool(result > 0)) + return report(output, keyword="status", data=bool(result > 0)) else: return report(output, _MSG_NO_VALUE) def _api_change_opts(name, output, kwargs): """ API: accepts output, value(=nzo_id), value2(=pp) """ - value = kwargs.get('value') - value2 = kwargs.get('value2') + value = kwargs.get("value") + value2 = kwargs.get("value2") result = 0 if value and value2 and value2.isdigit(): result = NzbQueue.do.change_opts(value, int(value2)) - return report(output, keyword='status', data=bool(result > 0)) + return report(output, keyword="status", data=bool(result > 0)) def _api_fullstatus(name, output, kwargs): """ API: full history status""" - status = build_status(skip_dashboard=kwargs.get('skip_dashboard', 1), output=output) - return report(output, keyword='status', data=status) + status = build_status(skip_dashboard=kwargs.get("skip_dashboard", 1), output=output) + return report(output, keyword="status", data=status) def _api_history(name, output, kwargs): """ API: accepts output, value(=nzo_id), start, limit, search """ - value = kwargs.get('value', '') - start = int_conv(kwargs.get('start')) - limit = int_conv(kwargs.get('limit')) - last_history_update = int_conv(kwargs.get('last_history_update', 0)) - search = kwargs.get('search') - failed_only = kwargs.get('failed_only') - categories = kwargs.get('category') + value = kwargs.get("value", "") + start = int_conv(kwargs.get("start")) + limit = int_conv(kwargs.get("limit")) + last_history_update = int_conv(kwargs.get("last_history_update", 0)) + search = kwargs.get("search") + failed_only = kwargs.get("failed_only") + categories = kwargs.get("category") # Do we need to send anything? if last_history_update == sabnzbd.LAST_HISTORY_UPDATE: - return report(output, keyword='history', data=False) + return report(output, keyword="history", data=False) if categories and not isinstance(categories, list): categories = [categories] @@ -487,21 +522,21 @@ def _api_history(name, output, kwargs): if not limit: limit = cfg.history_limit() - if name == 'delete': + if name == "delete": special = value.lower() - del_files = bool(int_conv(kwargs.get('del_files'))) - if special in ('all', 'failed', 'completed'): + del_files = bool(int_conv(kwargs.get("del_files"))) + if special in ("all", "failed", "completed"): history_db = sabnzbd.get_db_connection() - if special in ('all', 'failed'): + if special in ("all", "failed"): if del_files: del_job_files(history_db.get_failed_paths(search)) history_db.remove_failed(search) - if special in ('all', 'completed'): + if special in ("all", "completed"): history_db.remove_completed(search) sabnzbd.history_updated() return report(output) elif value: - jobs = value.split(',') + jobs = value.split(",") for job in jobs: del_hist_job(job, del_files) sabnzbd.history_updated() @@ -511,42 +546,44 @@ def _api_history(name, output, kwargs): elif not name: history = {} grand, month, week, day = BPSMeter.do.get_sums() - history['total_size'], history['month_size'], history['week_size'], history['day_size'] = \ - to_units(grand), to_units(month), to_units(week), to_units(day) - history['slots'], fetched_items, history['noofslots'] = build_history(start=start, - limit=limit, - search=search, failed_only=failed_only, - categories=categories, - output=output) - history['last_history_update'] = sabnzbd.LAST_HISTORY_UPDATE - history['version'] = sabnzbd.__version__ - return report(output, keyword='history', data=history) + history["total_size"], history["month_size"], history["week_size"], history["day_size"] = ( + to_units(grand), + to_units(month), + to_units(week), + to_units(day), + ) + history["slots"], fetched_items, history["noofslots"] = build_history( + start=start, limit=limit, search=search, failed_only=failed_only, categories=categories, output=output + ) + history["last_history_update"] = sabnzbd.LAST_HISTORY_UPDATE + history["version"] = sabnzbd.__version__ + return report(output, keyword="history", data=history) else: return report(output, _MSG_NOT_IMPLEMENTED) def _api_get_files(name, output, kwargs): """ API: accepts output, value(=nzo_id) """ - value = kwargs.get('value') + value = kwargs.get("value") if value: - return report(output, keyword='files', data=build_file_list(value)) + return report(output, keyword="files", data=build_file_list(value)) else: return report(output, _MSG_NO_VALUE) def _api_addurl(name, output, kwargs): """ API: accepts name, output, pp, script, cat, priority, nzbname """ - pp = kwargs.get('pp') - script = kwargs.get('script') - cat = kwargs.get('cat') - priority = kwargs.get('priority') - nzbname = kwargs.get('nzbname', '') + pp = kwargs.get("pp") + script = kwargs.get("script") + cat = kwargs.get("cat") + priority = kwargs.get("priority") + nzbname = kwargs.get("nzbname", "") if name: nzo_id = sabnzbd.add_url(name, pp, script, cat, priority, nzbname) - return report(output, keyword='', data={'status': True, 'nzo_ids': nzo_id}) + return report(output, keyword="", data={"status": True, "nzo_ids": nzo_id}) else: - logging.info('API-call addurl: no URLs recieved') + logging.info("API-call addurl: no URLs recieved") return report(output, _MSG_NO_VALUE) @@ -572,9 +609,9 @@ def _api_shutdown(name, output, kwargs): def _api_warnings(name, output, kwargs): """ API: accepts name, output """ - if name == 'clear': + if name == "clear": return report(output, keyword="warnings", data=sabnzbd.GUIHANDLER.clear()) - elif name == 'show': + elif name == "show": return report(output, keyword="warnings", data=sabnzbd.GUIHANDLER.content()) elif name: return report(output, _MSG_NOT_IMPLEMENTED) @@ -593,38 +630,38 @@ def _api_get_scripts(name, output, kwargs): def _api_version(name, output, kwargs): """ API: accepts output """ - return report(output, keyword='version', data=sabnzbd.__version__) + return report(output, keyword="version", data=sabnzbd.__version__) def _api_auth(name, output, kwargs): """ API: accepts output """ - auth = 'None' + auth = "None" if not cfg.disable_key(): - auth = 'badkey' - key = kwargs.get('key', '') + auth = "badkey" + key = kwargs.get("key", "") if not key: - auth = 'apikey' + auth = "apikey" else: if key == cfg.nzb_key(): - auth = 'nzbkey' + auth = "nzbkey" if key == cfg.api_key(): - auth = 'apikey' + auth = "apikey" elif cfg.username() and cfg.password(): - auth = 'login' - return report(output, keyword='auth', data=auth) + auth = "login" + return report(output, keyword="auth", data=auth) def _api_restart(name, output, kwargs): """ API: accepts output """ - logging.info('Restart requested by API') + logging.info("Restart requested by API") # Do the shutdown async to still send goodbye to browser - Thread(target=sabnzbd.trigger_restart, kwargs={'timeout': 1}).start() + Thread(target=sabnzbd.trigger_restart, kwargs={"timeout": 1}).start() return report(output) def _api_restart_repair(name, output, kwargs): """ API: accepts output """ - logging.info('Queue repair requested by API') + logging.info("Queue repair requested by API") sabnzbd.request_repair() sabnzbd.trigger_restart() return report(output) @@ -638,8 +675,8 @@ def _api_disconnect(name, output, kwargs): def _api_osx_icon(name, output, kwargs): """ API: accepts output, value """ - value = kwargs.get('value', '1').strip() - cfg.osx_menu.set(value != '0') + value = kwargs.get("value", "1").strip() + cfg.osx_menu.set(value != "0") return report(output) @@ -651,15 +688,15 @@ def _api_rescan(name, output, kwargs): def _api_eval_sort(name, output, kwargs): """ API: evaluate sorting expression """ - name = kwargs.get('name', '') - value = kwargs.get('value', '') - title = kwargs.get('title') - multipart = kwargs.get('movieextra', '') + name = kwargs.get("name", "") + value = kwargs.get("value", "") + title = kwargs.get("title") + multipart = kwargs.get("movieextra", "") path = sabnzbd.sorting.eval_sort(value, title, name, multipart) if path is None: return report(output, _MSG_NOT_IMPLEMENTED) else: - return report(output, keyword='result', data=path) + return report(output, keyword="result", data=path) def _api_watched_now(name, output, kwargs): @@ -689,7 +726,7 @@ def _api_rss_now(name, output, kwargs): def _api_retry_all(name, output, kwargs): """ API: Retry all failed items in History """ - return report(output, keyword='status', data=retry_all_jobs()) + return report(output, keyword="status", data=retry_all_jobs()) def _api_reset_quota(name, output, kwargs): @@ -700,12 +737,21 @@ def _api_reset_quota(name, output, kwargs): def _api_test_email(name, output, kwargs): """ API: send a test email, return result """ logging.info("Sending test email") - pack = {'download': ['action 1', 'action 2'], 'unpack': ['action 1', 'action 2']} - res = sabnzbd.emailer.endjob('I had a d\xe8ja vu', 'unknown', True, - os.path.normpath(os.path.join(cfg.complete_dir.get_path(), '/unknown/I had a d\xe8ja vu')), - 123 * MEBI, None, pack, 'my_script', 'Line 1\nLine 2\nLine 3\nd\xe8ja vu\n', 0, - test=kwargs) - if res == T('Email succeeded'): + pack = {"download": ["action 1", "action 2"], "unpack": ["action 1", "action 2"]} + res = sabnzbd.emailer.endjob( + "I had a d\xe8ja vu", + "unknown", + True, + os.path.normpath(os.path.join(cfg.complete_dir.get_path(), "/unknown/I had a d\xe8ja vu")), + 123 * MEBI, + None, + pack, + "my_script", + "Line 1\nLine 2\nLine 3\nd\xe8ja vu\n", + 0, + test=kwargs, + ) + if res == T("Email succeeded"): res = None return report(output, error=res) @@ -713,49 +759,49 @@ def _api_test_email(name, output, kwargs): def _api_test_windows(name, output, kwargs): """ API: send a test to Windows, return result """ logging.info("Sending test notification") - res = sabnzbd.notifier.send_windows('SABnzbd', T('Test Notification'), 'other') + res = sabnzbd.notifier.send_windows("SABnzbd", T("Test Notification"), "other") return report(output, error=res) def _api_test_notif(name, output, kwargs): """ API: send a test to Notification Center, return result """ logging.info("Sending test notification") - res = sabnzbd.notifier.send_notification_center('SABnzbd', T('Test Notification'), 'other') + res = sabnzbd.notifier.send_notification_center("SABnzbd", T("Test Notification"), "other") return report(output, error=res) def _api_test_osd(name, output, kwargs): """ API: send a test OSD notification, return result """ logging.info("Sending OSD notification") - res = sabnzbd.notifier.send_notify_osd('SABnzbd', T('Test Notification')) + res = sabnzbd.notifier.send_notify_osd("SABnzbd", T("Test Notification")) return report(output, error=res) def _api_test_prowl(name, output, kwargs): """ API: send a test Prowl notification, return result """ logging.info("Sending Prowl notification") - res = sabnzbd.notifier.send_prowl('SABnzbd', T('Test Notification'), 'other', force=True, test=kwargs) + res = sabnzbd.notifier.send_prowl("SABnzbd", T("Test Notification"), "other", force=True, test=kwargs) return report(output, error=res) def _api_test_pushover(name, output, kwargs): """ API: send a test Pushover notification, return result """ logging.info("Sending Pushover notification") - res = sabnzbd.notifier.send_pushover('SABnzbd', T('Test Notification'), 'other', force=True, test=kwargs) + res = sabnzbd.notifier.send_pushover("SABnzbd", T("Test Notification"), "other", force=True, test=kwargs) return report(output, error=res) def _api_test_pushbullet(name, output, kwargs): """ API: send a test Pushbullet notification, return result """ logging.info("Sending Pushbullet notification") - res = sabnzbd.notifier.send_pushbullet('SABnzbd', T('Test Notification'), 'other', force=True, test=kwargs) + res = sabnzbd.notifier.send_pushbullet("SABnzbd", T("Test Notification"), "other", force=True, test=kwargs) return report(output, error=res) def _api_test_nscript(name, output, kwargs): """ API: execute a test notification script, return result """ logging.info("Executing notification script") - res = sabnzbd.notifier.send_nscript('SABnzbd', T('Test Notification'), 'other', force=True, test=kwargs) + res = sabnzbd.notifier.send_nscript("SABnzbd", T("Test Notification"), "other", force=True, test=kwargs) return report(output, error=res) @@ -766,16 +812,16 @@ def _api_undefined(name, output, kwargs): def _api_browse(name, output, kwargs): """ Return tree of local path """ - compact = kwargs.get('compact') + compact = kwargs.get("compact") - if compact and compact == '1': - name = kwargs.get('term', '') - paths = [entry['path'] for entry in folders_at_path(os.path.dirname(name)) if 'path' in entry] - return report(output, keyword='', data=paths) + if compact and compact == "1": + name = kwargs.get("term", "") + paths = [entry["path"] for entry in folders_at_path(os.path.dirname(name)) if "path" in entry] + return report(output, keyword="", data=paths) else: - show_hidden = kwargs.get('show_hidden_folders') + show_hidden = kwargs.get("show_hidden_folders") paths = folders_at_path(name, True, show_hidden) - return report(output, keyword='paths', data=paths) + return report(output, keyword="paths", data=paths) def _api_config(name, output, kwargs): @@ -787,21 +833,21 @@ def _api_config(name, output, kwargs): def _api_config_speedlimit(output, kwargs): """ API: accepts output, value(=speed) """ - value = kwargs.get('value') + value = kwargs.get("value") if not value: - value = '0' + value = "0" Downloader.do.limit_speed(value) return report(output) def _api_config_get_speedlimit(output, kwargs): """ API: accepts output """ - return report(output, keyword='speedlimit', data=Downloader.do.get_limit()) + return report(output, keyword="speedlimit", data=Downloader.do.get_limit()) def _api_config_set_colorscheme(output, kwargs): """ API: accepts output""" - value = kwargs.get('value') + value = kwargs.get("value") if value: cfg.web_color.set(value) return report(output) @@ -811,7 +857,7 @@ def _api_config_set_colorscheme(output, kwargs): def _api_config_set_pause(output, kwargs): """ API: accepts output, value(=pause interval) """ - value = kwargs.get('value') + value = kwargs.get("value") scheduler.plan_resume(int_conv(value)) return report(output) @@ -820,20 +866,23 @@ def _api_config_set_apikey(output, kwargs): """ API: accepts output """ cfg.api_key.set(config.create_api_key()) config.save_config() - return report(output, keyword='apikey', data=cfg.api_key()) + return report(output, keyword="apikey", data=cfg.api_key()) def _api_config_set_nzbkey(output, kwargs): """ API: accepts output """ cfg.nzb_key.set(config.create_api_key()) config.save_config() - return report(output, keyword='nzbkey', data=cfg.nzb_key()) + return report(output, keyword="nzbkey", data=cfg.nzb_key()) def _api_config_regenerate_certs(output, kwargs): # Make sure we only over-write default locations result = False - if sabnzbd.cfg.https_cert() is sabnzbd.cfg.https_cert.default() and sabnzbd.cfg.https_key() is sabnzbd.cfg.https_key.default(): + if ( + sabnzbd.cfg.https_cert() is sabnzbd.cfg.https_cert.default() + and sabnzbd.cfg.https_key() is sabnzbd.cfg.https_key.default() + ): https_cert = sabnzbd.cfg.https_cert.get_path() https_key = sabnzbd.cfg.https_key.get_path() result = create_https_certificates(https_cert, https_key) @@ -844,7 +893,7 @@ def _api_config_regenerate_certs(output, kwargs): def _api_config_test_server(output, kwargs): """ API: accepts output, server-params """ result, msg = test_nntp_server_dict(kwargs) - response = {'result': result, 'message': msg} + response = {"result": result, "message": msg} if output: return report(output, data=response) else: @@ -859,93 +908,93 @@ def _api_config_undefined(output, kwargs): def _api_server_stats(name, output, kwargs): """ API: accepts output """ sum_t, sum_m, sum_w, sum_d = BPSMeter.do.get_sums() - stats = {'total': sum_t, 'month': sum_m, 'week': sum_w, 'day': sum_d, 'servers': {}} + stats = {"total": sum_t, "month": sum_m, "week": sum_w, "day": sum_d, "servers": {}} for svr in config.get_servers(): t, m, w, d, daily = BPSMeter.do.amounts(svr) - stats['servers'][svr] = {'total': t or 0, 'month': m or 0, 'week': w or 0, 'day': d or 0, 'daily': daily or {}} + stats["servers"][svr] = {"total": t or 0, "month": m or 0, "week": w or 0, "day": d or 0, "daily": daily or {}} - return report(output, keyword='', data=stats) + return report(output, keyword="", data=stats) ############################################################################## _api_table = { - 'server_stats': (_api_server_stats, 2), - 'get_config': (_api_get_config, 3), - 'set_config': (_api_set_config, 3), - 'set_config_default': (_api_set_config_default, 3), - 'del_config': (_api_del_config, 3), - 'queue': (_api_queue, 2), - 'options': (_api_options, 2), - 'translate': (_api_translate, 2), - 'addfile': (_api_addfile, 1), - 'retry': (_api_retry, 2), - 'cancel_pp': (_api_cancel_pp, 2), - 'addlocalfile': (_api_addlocalfile, 1), - 'switch': (_api_switch, 2), - 'change_cat': (_api_change_cat, 2), - 'change_script': (_api_change_script, 2), - 'change_opts': (_api_change_opts, 2), - 'fullstatus': (_api_fullstatus, 2), - 'history': (_api_history, 2), - 'get_files': (_api_get_files, 2), - 'addurl': (_api_addurl, 1), - 'addid': (_api_addurl, 1), - 'pause': (_api_pause, 2), - 'resume': (_api_resume, 2), - 'shutdown': (_api_shutdown, 3), - 'warnings': (_api_warnings, 2), - 'config': (_api_config, 2), - 'get_cats': (_api_get_cats, 2), - 'get_scripts': (_api_get_scripts, 2), - 'version': (_api_version, 1), - 'auth': (_api_auth, 1), - 'restart': (_api_restart, 3), - 'restart_repair': (_api_restart_repair, 2), - 'disconnect': (_api_disconnect, 2), - 'osx_icon': (_api_osx_icon, 3), - 'rescan': (_api_rescan, 2), - 'eval_sort': (_api_eval_sort, 2), - 'watched_now': (_api_watched_now, 2), - 'resume_pp': (_api_resume_pp, 2), - 'pause_pp': (_api_pause_pp, 2), - 'rss_now': (_api_rss_now, 2), - 'browse': (_api_browse, 2), - 'retry_all': (_api_retry_all, 2), - 'reset_quota': (_api_reset_quota, 2), - 'test_email': (_api_test_email, 2), - 'test_windows': (_api_test_windows, 2), - 'test_notif': (_api_test_notif, 2), - 'test_osd': (_api_test_osd, 2), - 'test_pushover': (_api_test_pushover, 2), - 'test_pushbullet': (_api_test_pushbullet, 2), - 'test_prowl': (_api_test_prowl, 2), - 'test_nscript': (_api_test_nscript, 2), + "server_stats": (_api_server_stats, 2), + "get_config": (_api_get_config, 3), + "set_config": (_api_set_config, 3), + "set_config_default": (_api_set_config_default, 3), + "del_config": (_api_del_config, 3), + "queue": (_api_queue, 2), + "options": (_api_options, 2), + "translate": (_api_translate, 2), + "addfile": (_api_addfile, 1), + "retry": (_api_retry, 2), + "cancel_pp": (_api_cancel_pp, 2), + "addlocalfile": (_api_addlocalfile, 1), + "switch": (_api_switch, 2), + "change_cat": (_api_change_cat, 2), + "change_script": (_api_change_script, 2), + "change_opts": (_api_change_opts, 2), + "fullstatus": (_api_fullstatus, 2), + "history": (_api_history, 2), + "get_files": (_api_get_files, 2), + "addurl": (_api_addurl, 1), + "addid": (_api_addurl, 1), + "pause": (_api_pause, 2), + "resume": (_api_resume, 2), + "shutdown": (_api_shutdown, 3), + "warnings": (_api_warnings, 2), + "config": (_api_config, 2), + "get_cats": (_api_get_cats, 2), + "get_scripts": (_api_get_scripts, 2), + "version": (_api_version, 1), + "auth": (_api_auth, 1), + "restart": (_api_restart, 3), + "restart_repair": (_api_restart_repair, 2), + "disconnect": (_api_disconnect, 2), + "osx_icon": (_api_osx_icon, 3), + "rescan": (_api_rescan, 2), + "eval_sort": (_api_eval_sort, 2), + "watched_now": (_api_watched_now, 2), + "resume_pp": (_api_resume_pp, 2), + "pause_pp": (_api_pause_pp, 2), + "rss_now": (_api_rss_now, 2), + "browse": (_api_browse, 2), + "retry_all": (_api_retry_all, 2), + "reset_quota": (_api_reset_quota, 2), + "test_email": (_api_test_email, 2), + "test_windows": (_api_test_windows, 2), + "test_notif": (_api_test_notif, 2), + "test_osd": (_api_test_osd, 2), + "test_pushover": (_api_test_pushover, 2), + "test_pushbullet": (_api_test_pushbullet, 2), + "test_prowl": (_api_test_prowl, 2), + "test_nscript": (_api_test_nscript, 2), } _api_queue_table = { - 'delete': (_api_queue_delete, 2), - 'delete_nzf': (_api_queue_delete_nzf, 2), - 'rename': (_api_queue_rename, 2), - 'change_complete_action': (_api_queue_change_complete_action, 2), - 'purge': (_api_queue_purge, 2), - 'pause': (_api_queue_pause, 2), - 'resume': (_api_queue_resume, 2), - 'priority': (_api_queue_priority, 2), - 'sort': (_api_queue_sort, 2), - 'rating': (_api_queue_rating, 2) + "delete": (_api_queue_delete, 2), + "delete_nzf": (_api_queue_delete_nzf, 2), + "rename": (_api_queue_rename, 2), + "change_complete_action": (_api_queue_change_complete_action, 2), + "purge": (_api_queue_purge, 2), + "pause": (_api_queue_pause, 2), + "resume": (_api_queue_resume, 2), + "priority": (_api_queue_priority, 2), + "sort": (_api_queue_sort, 2), + "rating": (_api_queue_rating, 2), } _api_config_table = { - 'speedlimit': (_api_config_speedlimit, 2), - 'set_speedlimit': (_api_config_speedlimit, 2), - 'get_speedlimit': (_api_config_get_speedlimit, 2), - 'set_colorscheme': (_api_config_set_colorscheme, 2), - 'set_pause': (_api_config_set_pause, 2), - 'set_apikey': (_api_config_set_apikey, 3), - 'set_nzbkey': (_api_config_set_nzbkey, 3), - 'regenerate_certs': (_api_config_regenerate_certs, 3), - 'test_server': (_api_config_test_server, 2) + "speedlimit": (_api_config_speedlimit, 2), + "set_speedlimit": (_api_config_speedlimit, 2), + "get_speedlimit": (_api_config_get_speedlimit, 2), + "set_colorscheme": (_api_config_set_colorscheme, 2), + "set_pause": (_api_config_set_pause, 2), + "set_apikey": (_api_config_set_apikey, 3), + "set_nzbkey": (_api_config_set_nzbkey, 3), + "regenerate_certs": (_api_config_regenerate_certs, 3), + "test_server": (_api_config_test_server, 2), } @@ -953,42 +1002,42 @@ def api_level(cmd, name): """ Return access level required for this API call """ if cmd in _api_table: return _api_table[cmd][1] - if name == 'queue' and cmd in _api_queue_table: + if name == "queue" and cmd in _api_queue_table: return _api_queue_table[cmd][1] - if name == 'config' and cmd in _api_config_table: + if name == "config" and cmd in _api_config_table: return _api_config_table[cmd][1] return 4 -def report(output, error=None, keyword='value', data=None): +def report(output, error=None, keyword="value", data=None): """ Report message in json, xml or plain text If error is set, only an status/error report is made. If no error and no data, only a status report is made. Else, a data report is made (optional 'keyword' for outer XML section). """ - if output == 'json': + if output == "json": content = "application/json;charset=UTF-8" if error: - info = {'status': False, 'error': error} + info = {"status": False, "error": error} elif data is None: - info = {'status': True} + info = {"status": True} else: - if hasattr(data, '__iter__') and not keyword: + if hasattr(data, "__iter__") and not keyword: info = data else: info = {keyword: data} - response = json.dumps(info).encode('utf-8') + response = json.dumps(info).encode("utf-8") - elif output == 'xml': + elif output == "xml": if not keyword: # xml always needs an outer keyword, even when json doesn't - keyword = 'result' + keyword = "result" content = "text/xml" xmlmaker = xml_factory() if error: - status_str = xmlmaker.run('result', {'status': False, 'error': error}) + status_str = xmlmaker.run("result", {"status": False, "error": error}) elif data is None: - status_str = xmlmaker.run('result', {'status': True}) + status_str = xmlmaker.run("result", {"status": True}) else: status_str = xmlmaker.run(keyword, data) response = '\n%s\n' % status_str @@ -998,12 +1047,12 @@ def report(output, error=None, keyword='value', data=None): if error: response = "error: %s\n" % error elif not data: - response = 'ok\n' + response = "ok\n" else: - response = '%s\n' % str(data) + response = "%s\n" % str(data) - cherrypy.response.headers['Content-Type'] = content - cherrypy.response.headers['Pragma'] = 'no-cache' + cherrypy.response.headers["Content-Type"] = content + cherrypy.response.headers["Pragma"] = "no-cache" return response @@ -1015,41 +1064,41 @@ class xml_factory: """ def __init__(self): - self.__text = '' + self.__text = "" def _tuple(self, keyw, lst): text = [] for item in lst: text.append(self.run(keyw, item)) - return ''.join(text) + return "".join(text) def _dict(self, keyw, lst): text = [] for key in lst.keys(): text.append(self.run(key, lst[key])) if keyw: - return '<%s>%s\n' % (keyw, ''.join(text), keyw) + return "<%s>%s\n" % (keyw, "".join(text), keyw) else: - return '' + return "" def _list(self, keyw, lst): text = [] for cat in lst: if isinstance(cat, dict): - text.append(self._dict(plural_to_single(keyw, 'slot'), cat)) + text.append(self._dict(plural_to_single(keyw, "slot"), cat)) elif isinstance(cat, list): - text.append(self._list(plural_to_single(keyw, 'list'), cat)) + text.append(self._list(plural_to_single(keyw, "list"), cat)) elif isinstance(cat, tuple): - text.append(self._tuple(plural_to_single(keyw, 'tuple'), cat)) + text.append(self._tuple(plural_to_single(keyw, "tuple"), cat)) else: if not isinstance(cat, str): cat = str(cat) - name = plural_to_single(keyw, 'item') - text.append('<%s>%s\n' % (name, xml_name(cat), name)) + name = plural_to_single(keyw, "item") + text.append("<%s>%s\n" % (name, xml_name(cat), name)) if keyw: - return '<%s>%s\n' % (keyw, ''.join(text), keyw) + return "<%s>%s\n" % (keyw, "".join(text), keyw) else: - return '' + return "" def run(self, keyw, lst): if isinstance(lst, dict): @@ -1059,20 +1108,20 @@ class xml_factory: elif isinstance(lst, tuple): text = self._tuple(keyw, lst) elif keyw: - text = '<%s>%s\n' % (keyw, xml_name(lst), keyw) + text = "<%s>%s\n" % (keyw, xml_name(lst), keyw) else: - text = '' + text = "" return text def handle_server_api(output, kwargs): """ Special handler for API-call 'set_config' [servers] """ - name = kwargs.get('keyword') + name = kwargs.get("keyword") if not name: - name = kwargs.get('name') + name = kwargs.get("name") if name: - server = config.get_config('servers', name) + server = config.get_config("servers", name) if server: server.set_dict(kwargs) old_name = name @@ -1085,32 +1134,32 @@ def handle_server_api(output, kwargs): def handle_rss_api(output, kwargs): """ Special handler for API-call 'set_config' [rss] """ - name = kwargs.get('keyword') + name = kwargs.get("keyword") if not name: - name = kwargs.get('name') + name = kwargs.get("name") if not name: return None - feed = config.get_config('rss', name) + feed = config.get_config("rss", name) if feed: feed.set_dict(kwargs) else: config.ConfigRSS(name, kwargs) - action = kwargs.get('filter_action') - if action in ('add', 'update'): + action = kwargs.get("filter_action") + if action in ("add", "update"): # Use the general function, but catch the redirect-raise try: - kwargs['feed'] = name - sabnzbd.interface.ConfigRss('/').internal_upd_rss_filter(**kwargs) + kwargs["feed"] = name + sabnzbd.interface.ConfigRss("/").internal_upd_rss_filter(**kwargs) except cherrypy.HTTPRedirect: pass - elif action == 'delete': + elif action == "delete": # Use the general function, but catch the redirect-raise try: - kwargs['feed'] = name - sabnzbd.interface.ConfigRss('/').internal_del_rss_filter(**kwargs) + kwargs["feed"] = name + sabnzbd.interface.ConfigRss("/").internal_del_rss_filter(**kwargs) except cherrypy.HTTPRedirect: pass @@ -1119,13 +1168,13 @@ def handle_rss_api(output, kwargs): def handle_cat_api(output, kwargs): """ Special handler for API-call 'set_config' [categories] """ - name = kwargs.get('keyword') + name = kwargs.get("keyword") if not name: - name = kwargs.get('name') + name = kwargs.get("name") if not name: return None - feed = config.get_config('categories', name) + feed = config.get_config("categories", name) if feed: feed.set_dict(kwargs) else: @@ -1137,41 +1186,41 @@ def build_status(skip_dashboard=False, output=None): # build up header full of basic information info = build_header(trans_functions=not output) - info['logfile'] = sabnzbd.LOGFILE - info['weblogfile'] = sabnzbd.WEBLOGFILE - info['loglevel'] = str(cfg.log_level()) - info['folders'] = NzbQueue.do.scan_jobs(all_jobs=False, action=False) - info['configfn'] = config.get_filename() + info["logfile"] = sabnzbd.LOGFILE + info["weblogfile"] = sabnzbd.WEBLOGFILE + info["loglevel"] = str(cfg.log_level()) + info["folders"] = NzbQueue.do.scan_jobs(all_jobs=False, action=False) + info["configfn"] = config.get_filename() # Dashboard: Speed of System - info['cpumodel'] = getcpu() - info['pystone'] = sabnzbd.PYSTONE_SCORE + info["cpumodel"] = getcpu() + info["pystone"] = sabnzbd.PYSTONE_SCORE # Dashboard: Speed of Download directory: - info['downloaddir'] = cfg.download_dir.get_clipped_path() - info['downloaddirspeed'] = sabnzbd.DOWNLOAD_DIR_SPEED + info["downloaddir"] = cfg.download_dir.get_clipped_path() + info["downloaddirspeed"] = sabnzbd.DOWNLOAD_DIR_SPEED # Dashboard: Speed of Complete directory: - info['completedir'] = cfg.complete_dir.get_clipped_path() - info['completedirspeed'] = sabnzbd.COMPLETE_DIR_SPEED + info["completedir"] = cfg.complete_dir.get_clipped_path() + info["completedirspeed"] = sabnzbd.COMPLETE_DIR_SPEED # Dashboard: Measured download-speed - info['internetbandwidth'] = sabnzbd.INTERNET_BANDWIDTH + info["internetbandwidth"] = sabnzbd.INTERNET_BANDWIDTH # Dashboard: Connection information if not int_conv(skip_dashboard): - info['localipv4'] = localipv4() - info['publicipv4'] = publicipv4() - info['ipv6'] = ipv6() + info["localipv4"] = localipv4() + info["publicipv4"] = publicipv4() + info["ipv6"] = ipv6() # Dashboard: DNS-check try: addresslookup(cfg.selftest_host()) - info['dnslookup'] = "OK" + info["dnslookup"] = "OK" except: - info['dnslookup'] = None + info["dnslookup"] = None - info['servers'] = [] - servers = sorted(Downloader.do.servers[:], key=lambda svr: '%02d%s' % (svr.priority, svr.displayname.lower())) + info["servers"] = [] + servers = sorted(Downloader.do.servers[:], key=lambda svr: "%02d%s" % (svr.priority, svr.displayname.lower())) for server in servers: serverconnections = [] connected = 0 @@ -1200,10 +1249,7 @@ def build_status(skip_dashboard=False, output=None): # For the templates or for JSON if output: - thread_info = {'thrdnum': nw.thrdnum, - 'art_name': art_name, - 'nzf_name': nzf_name, - 'nzo_name': nzo_name} + thread_info = {"thrdnum": nw.thrdnum, "art_name": art_name, "nzf_name": nzf_name, "nzo_name": nzo_name} serverconnections.append(thread_info) else: serverconnections.append((nw.thrdnum, art_name, nzf_name, nzo_name)) @@ -1215,47 +1261,68 @@ def build_status(skip_dashboard=False, output=None): connected = server.warning if server.request and not server.info: - connected = T(' Resolving address').replace(' ', '') + connected = T(" Resolving address").replace(" ", "") # For the templates or for JSON if output: - server_info = {'servername': server.displayname, - 'serveractiveconn': connected, - 'servertotalconn': server.threads, - 'serverconnections': serverconnections, - 'serverssl': server.ssl, - 'serversslinfo': server.ssl_info, - 'serveractive': server.active, - 'servererror': server.errormsg, - 'serverpriority': server.priority, - 'serveroptional': server.optional} - info['servers'].append(server_info) + server_info = { + "servername": server.displayname, + "serveractiveconn": connected, + "servertotalconn": server.threads, + "serverconnections": serverconnections, + "serverssl": server.ssl, + "serversslinfo": server.ssl_info, + "serveractive": server.active, + "servererror": server.errormsg, + "serverpriority": server.priority, + "serveroptional": server.optional, + } + info["servers"].append(server_info) else: - info['servers'].append((server.displayname, '', connected, serverconnections, server.ssl, - server.active, server.errormsg, server.priority, server.optional)) - - info['warnings'] = sabnzbd.GUIHANDLER.content() + info["servers"].append( + ( + server.displayname, + "", + connected, + serverconnections, + server.ssl, + server.active, + server.errormsg, + server.priority, + server.optional, + ) + ) + + info["warnings"] = sabnzbd.GUIHANDLER.content() return info def build_queue(start=0, limit=0, trans=False, output=None, search=None): # build up header full of basic information - info, pnfo_list, bytespersec, q_size, bytes_left_previous_page = build_queue_header(search=search, start=start, limit=limit, output=output) + info, pnfo_list, bytespersec, q_size, bytes_left_previous_page = build_queue_header( + search=search, start=start, limit=limit, output=output + ) datestart = datetime.datetime.now() - priorities = {TOP_PRIORITY: 'Force', REPAIR_PRIORITY: 'Repair', HIGH_PRIORITY: 'High', NORMAL_PRIORITY: 'Normal', LOW_PRIORITY: 'Low'} + priorities = { + TOP_PRIORITY: "Force", + REPAIR_PRIORITY: "Repair", + HIGH_PRIORITY: "High", + NORMAL_PRIORITY: "Normal", + LOW_PRIORITY: "Low", + } limit = int_conv(limit) start = int_conv(start) - info['refresh_rate'] = str(cfg.refresh_rate()) if cfg.refresh_rate() > 0 else '' - info['scripts'] = list_scripts() - info['categories'] = list_cats(output is None) - info['rating_enable'] = bool(cfg.rating_enable()) - info['noofslots'] = q_size - info['start'] = start - info['limit'] = limit - info['finish'] = info['start'] + info['limit'] + info["refresh_rate"] = str(cfg.refresh_rate()) if cfg.refresh_rate() > 0 else "" + info["scripts"] = list_scripts() + info["categories"] = list_cats(output is None) + info["rating_enable"] = bool(cfg.rating_enable()) + info["noofslots"] = q_size + info["start"] = start + info["limit"] = limit + info["finish"] = info["start"] + info["limit"] n = start running_bytes = bytes_left_previous_page @@ -1268,77 +1335,81 @@ def build_queue(start=0, limit=0, trans=False, output=None, search=None): is_propagating = (pnfo.avg_stamp + float(cfg.propagation_delay() * 60)) > time.time() status = pnfo.status priority = pnfo.priority - mbleft = (bytesleft / MEBI) - mb = (bytes_total / MEBI) + mbleft = bytesleft / MEBI + mb = bytes_total / MEBI slot = {} - slot['index'] = n - slot['nzo_id'] = str(nzo_id) - slot['unpackopts'] = str(opts_to_pp(pnfo.repair, pnfo.unpack, pnfo.delete)) - slot['priority'] = priorities[priority] if priority >= LOW_PRIORITY else priorities[NORMAL_PRIORITY] - slot['script'] = pnfo.script if pnfo.script else 'None' - slot['filename'] = pnfo.filename - slot['labels'] = pnfo.labels - slot['password'] = pnfo.password if pnfo.password else '' - slot['cat'] = pnfo.category if pnfo.category else 'None' - slot['mbleft'] = "%.2f" % mbleft - slot['mb'] = "%.2f" % mb - slot['size'] = format_bytes(bytes_total) - slot['sizeleft'] = format_bytes(bytesleft) - slot['percentage'] = "%s" % (int(((mb - mbleft) / mb) * 100)) if mb != mbleft else '0' - slot['mbmissing'] = "%.2f" % (pnfo.bytes_missing / MEBI) - slot['direct_unpack'] = pnfo.direct_unpack + slot["index"] = n + slot["nzo_id"] = str(nzo_id) + slot["unpackopts"] = str(opts_to_pp(pnfo.repair, pnfo.unpack, pnfo.delete)) + slot["priority"] = priorities[priority] if priority >= LOW_PRIORITY else priorities[NORMAL_PRIORITY] + slot["script"] = pnfo.script if pnfo.script else "None" + slot["filename"] = pnfo.filename + slot["labels"] = pnfo.labels + slot["password"] = pnfo.password if pnfo.password else "" + slot["cat"] = pnfo.category if pnfo.category else "None" + slot["mbleft"] = "%.2f" % mbleft + slot["mb"] = "%.2f" % mb + slot["size"] = format_bytes(bytes_total) + slot["sizeleft"] = format_bytes(bytesleft) + slot["percentage"] = "%s" % (int(((mb - mbleft) / mb) * 100)) if mb != mbleft else "0" + slot["mbmissing"] = "%.2f" % (pnfo.bytes_missing / MEBI) + slot["direct_unpack"] = pnfo.direct_unpack if not output: - slot['mb_fmt'] = locale.format_string('%d', int(mb), True) - slot['mbdone_fmt'] = locale.format_string('%d', int(mb - mbleft), True) + slot["mb_fmt"] = locale.format_string("%d", int(mb), True) + slot["mbdone_fmt"] = locale.format_string("%d", int(mb - mbleft), True) if not Downloader.do.paused and status not in (Status.PAUSED, Status.FETCHING, Status.GRABBING): if is_propagating: - slot['status'] = Status.PROP + slot["status"] = Status.PROP elif status == Status.CHECKING: - slot['status'] = Status.CHECKING + slot["status"] = Status.CHECKING else: - slot['status'] = Status.DOWNLOADING + slot["status"] = Status.DOWNLOADING else: # Ensure compatibility of API status if status == Status.DELETED or priority == TOP_PRIORITY: status = Status.DOWNLOADING - slot['status'] = "%s" % status - - if (Downloader.do.paused or Downloader.do.postproc or is_propagating or - status not in (Status.DOWNLOADING, Status.FETCHING, Status.QUEUED)) and priority != TOP_PRIORITY: - slot['timeleft'] = '0:00:00' - slot['eta'] = 'unknown' + slot["status"] = "%s" % status + + if ( + Downloader.do.paused + or Downloader.do.postproc + or is_propagating + or status not in (Status.DOWNLOADING, Status.FETCHING, Status.QUEUED) + ) and priority != TOP_PRIORITY: + slot["timeleft"] = "0:00:00" + slot["eta"] = "unknown" else: running_bytes += bytesleft - slot['timeleft'] = calc_timeleft(running_bytes, bytespersec) + slot["timeleft"] = calc_timeleft(running_bytes, bytespersec) try: datestart = datestart + datetime.timedelta(seconds=bytesleft / bytespersec) # new eta format: 16:00 Fri 07 Feb - slot['eta'] = datestart.strftime(time_format('%H:%M %a %d %b')) + slot["eta"] = datestart.strftime(time_format("%H:%M %a %d %b")) except: datestart = datetime.datetime.now() - slot['eta'] = 'unknown' + slot["eta"] = "unknown" # Do not show age when it's not known if average_date.year < 2000: - slot['avg_age'] = '-' + slot["avg_age"] = "-" else: - slot['avg_age'] = calc_age(average_date, bool(trans)) + slot["avg_age"] = calc_age(average_date, bool(trans)) rating = Rating.do.get_rating_by_nzo(nzo_id) - slot['has_rating'] = rating is not None + slot["has_rating"] = rating is not None if rating: - slot['rating_avg_video'] = rating.avg_video - slot['rating_avg_audio'] = rating.avg_audio + slot["rating_avg_video"] = rating.avg_video + slot["rating_avg_audio"] = rating.avg_audio slotinfo.append(slot) n += 1 if slotinfo: - info['slots'] = slotinfo + info["slots"] = slotinfo else: - info['slots'] = [] + info["slots"] = [] return info, pnfo_list, bytespersec @@ -1365,46 +1436,63 @@ def build_file_list(nzo_id): queued_files = pnfo.queued_files for nzf in finished_files: - jobs.append({'filename': nzf.filename if nzf.filename else nzf.subject, - 'mbleft': "%.2f" % (nzf.bytes_left / MEBI), - 'mb': "%.2f" % (nzf.bytes / MEBI), - 'bytes': "%.2f" % nzf.bytes, - 'age': calc_age(nzf.date), - 'nzf_id': nzf.nzf_id, - 'status': 'finished'}) + jobs.append( + { + "filename": nzf.filename if nzf.filename else nzf.subject, + "mbleft": "%.2f" % (nzf.bytes_left / MEBI), + "mb": "%.2f" % (nzf.bytes / MEBI), + "bytes": "%.2f" % nzf.bytes, + "age": calc_age(nzf.date), + "nzf_id": nzf.nzf_id, + "status": "finished", + } + ) for nzf in active_files: - jobs.append({'filename': nzf.filename if nzf.filename else nzf.subject, - 'mbleft': "%.2f" % (nzf.bytes_left / MEBI), - 'mb': "%.2f" % (nzf.bytes / MEBI), - 'bytes': "%.2f" % nzf.bytes, - 'age': calc_age(nzf.date), - 'nzf_id': nzf.nzf_id, - 'status': 'active'}) + jobs.append( + { + "filename": nzf.filename if nzf.filename else nzf.subject, + "mbleft": "%.2f" % (nzf.bytes_left / MEBI), + "mb": "%.2f" % (nzf.bytes / MEBI), + "bytes": "%.2f" % nzf.bytes, + "age": calc_age(nzf.date), + "nzf_id": nzf.nzf_id, + "status": "active", + } + ) for nzf in queued_files: - jobs.append({'filename': nzf.filename if nzf.filename else nzf.subject, - 'set': nzf.setname, - 'mbleft': "%.2f" % (nzf.bytes_left / MEBI), - 'mb': "%.2f" % (nzf.bytes / MEBI), - 'bytes': "%.2f" % nzf.bytes, - 'age': calc_age(nzf.date), - 'nzf_id': nzf.nzf_id, - 'status': 'queued'}) + jobs.append( + { + "filename": nzf.filename if nzf.filename else nzf.subject, + "set": nzf.setname, + "mbleft": "%.2f" % (nzf.bytes_left / MEBI), + "mb": "%.2f" % (nzf.bytes / MEBI), + "bytes": "%.2f" % nzf.bytes, + "age": calc_age(nzf.date), + "nzf_id": nzf.nzf_id, + "status": "queued", + } + ) return jobs + def options_list(output): - return report(output, keyword='options', data={ - 'sabyenc': sabnzbd.decoder.SABYENC_ENABLED, - 'par2': sabnzbd.newsunpack.PAR2_COMMAND, - 'multipar': sabnzbd.newsunpack.MULTIPAR_COMMAND, - 'rar': sabnzbd.newsunpack.RAR_COMMAND, - 'zip': sabnzbd.newsunpack.ZIP_COMMAND, - '7zip': sabnzbd.newsunpack.SEVEN_COMMAND, - 'nice': sabnzbd.newsunpack.NICE_COMMAND, - 'ionice': sabnzbd.newsunpack.IONICE_COMMAND - }) + return report( + output, + keyword="options", + data={ + "sabyenc": sabnzbd.decoder.SABYENC_ENABLED, + "par2": sabnzbd.newsunpack.PAR2_COMMAND, + "multipar": sabnzbd.newsunpack.MULTIPAR_COMMAND, + "rar": sabnzbd.newsunpack.RAR_COMMAND, + "zip": sabnzbd.newsunpack.ZIP_COMMAND, + "7zip": sabnzbd.newsunpack.SEVEN_COMMAND, + "nice": sabnzbd.newsunpack.NICE_COMMAND, + "ionice": sabnzbd.newsunpack.IONICE_COMMAND, + }, + ) def retry_job(job, new_nzb=None, password=None): @@ -1430,8 +1518,8 @@ def retry_all_jobs(): items = sabnzbd.api.build_history()[0] nzo_ids = [] for item in items: - if item['retry']: - nzo_ids.append(retry_job(item['nzo_id'])) + if item["retry"]: + nzo_ids.append(retry_job(item["nzo_id"])) return nzo_ids @@ -1456,15 +1544,17 @@ def del_hist_job(job, del_files): def Tspec(txt): """ Translate special terms """ - if txt == 'None': - return T('None') - elif txt in ('Default', '*'): - return T('Default') + if txt == "None": + return T("None") + elif txt in ("Default", "*"): + return T("Default") else: return txt _SKIN_CACHE = {} # Stores pre-translated acronyms + + def Ttemplate(txt): """ Translation function for Skin texts This special is to be used in interface.py for template processing @@ -1476,7 +1566,7 @@ def Ttemplate(txt): else: # We need to remove the " and ' to be JS/JSON-string-safe # Saving it in dictionary is 20x faster on next look-up - tra = T(SKIN_TEXT.get(txt, txt)).replace('"', '"').replace("'", ''') + tra = T(SKIN_TEXT.get(txt, txt)).replace('"', """).replace("'", "'") _SKIN_CACHE[txt] = tra return tra @@ -1488,7 +1578,7 @@ def clear_trans_cache(): sabnzbd.WEBUI_READY = True -def build_header(webdir='', output=None, trans_functions=True): +def build_header(webdir="", output=None, trans_functions=True): """ Build the basic header """ try: uptime = calc_age(sabnzbd.START) @@ -1500,7 +1590,7 @@ def build_header(webdir='', output=None, trans_functions=True): speed_limit = 100 speed_limit_abs = Downloader.do.get_limit_abs() if speed_limit_abs <= 0: - speed_limit_abs = '' + speed_limit_abs = "" diskspace_info = diskspace() @@ -1510,57 +1600,57 @@ def build_header(webdir='', output=None, trans_functions=True): if not output: # These are functions, and cause problems for JSON if trans_functions: - header['T'] = Ttemplate - header['Tspec'] = Tspec + header["T"] = Ttemplate + header["Tspec"] = Tspec - header['uptime'] = uptime - header['color_scheme'] = sabnzbd.WEB_COLOR or '' - header['helpuri'] = 'https://sabnzbd.org/wiki/' + header["uptime"] = uptime + header["color_scheme"] = sabnzbd.WEB_COLOR or "" + header["helpuri"] = "https://sabnzbd.org/wiki/" - header['restart_req'] = sabnzbd.RESTART_REQ - header['pid'] = os.getpid() - header['active_lang'] = cfg.language() + header["restart_req"] = sabnzbd.RESTART_REQ + header["pid"] = os.getpid() + header["active_lang"] = cfg.language() - header['my_lcldata'] = clip_path(sabnzbd.DIR_LCLDATA) - header['my_home'] = clip_path(sabnzbd.DIR_HOME) - header['webdir'] = webdir or sabnzbd.WEB_DIR - header['url_base'] = cfg.url_base() + header["my_lcldata"] = clip_path(sabnzbd.DIR_LCLDATA) + header["my_home"] = clip_path(sabnzbd.DIR_HOME) + header["webdir"] = webdir or sabnzbd.WEB_DIR + header["url_base"] = cfg.url_base() - header['nt'] = sabnzbd.WIN32 - header['darwin'] = sabnzbd.DARWIN + header["nt"] = sabnzbd.WIN32 + header["darwin"] = sabnzbd.DARWIN - header['power_options'] = sabnzbd.WIN32 or sabnzbd.DARWIN or sabnzbd.LINUX_POWER - header['pp_pause_event'] = sabnzbd.scheduler.pp_pause_event() + header["power_options"] = sabnzbd.WIN32 or sabnzbd.DARWIN or sabnzbd.LINUX_POWER + header["pp_pause_event"] = sabnzbd.scheduler.pp_pause_event() - header['apikey'] = cfg.api_key() - header['new_release'], header['new_rel_url'] = sabnzbd.NEW_VERSION + header["apikey"] = cfg.api_key() + header["new_release"], header["new_rel_url"] = sabnzbd.NEW_VERSION - header['version'] = sabnzbd.__version__ - header['paused'] = Downloader.do.paused or Downloader.do.postproc - header['pause_int'] = scheduler.pause_int() - header['paused_all'] = sabnzbd.PAUSED_ALL + header["version"] = sabnzbd.__version__ + header["paused"] = Downloader.do.paused or Downloader.do.postproc + header["pause_int"] = scheduler.pause_int() + header["paused_all"] = sabnzbd.PAUSED_ALL - header['diskspace1'] = "%.2f" % diskspace_info['download_dir'][1] - header['diskspace2'] = "%.2f" % diskspace_info['complete_dir'][1] - header['diskspace1_norm'] = to_units(diskspace_info['download_dir'][1] * GIGI) - header['diskspace2_norm'] = to_units(diskspace_info['complete_dir'][1] * GIGI) - header['diskspacetotal1'] = "%.2f" % diskspace_info['download_dir'][0] - header['diskspacetotal2'] = "%.2f" % diskspace_info['complete_dir'][0] - header['loadavg'] = loadavg() - header['speedlimit'] = "{1:0.{0}f}".format(int(speed_limit % 1 > 0), speed_limit) - header['speedlimit_abs'] = "%s" % speed_limit_abs + header["diskspace1"] = "%.2f" % diskspace_info["download_dir"][1] + header["diskspace2"] = "%.2f" % diskspace_info["complete_dir"][1] + header["diskspace1_norm"] = to_units(diskspace_info["download_dir"][1] * GIGI) + header["diskspace2_norm"] = to_units(diskspace_info["complete_dir"][1] * GIGI) + header["diskspacetotal1"] = "%.2f" % diskspace_info["download_dir"][0] + header["diskspacetotal2"] = "%.2f" % diskspace_info["complete_dir"][0] + header["loadavg"] = loadavg() + header["speedlimit"] = "{1:0.{0}f}".format(int(speed_limit % 1 > 0), speed_limit) + header["speedlimit_abs"] = "%s" % speed_limit_abs - header['have_warnings'] = str(sabnzbd.GUIHANDLER.count()) - header['finishaction'] = sabnzbd.QUEUECOMPLETE + header["have_warnings"] = str(sabnzbd.GUIHANDLER.count()) + header["finishaction"] = sabnzbd.QUEUECOMPLETE - header['quota'] = to_units(BPSMeter.do.quota) - header['have_quota'] = bool(BPSMeter.do.quota > 0.0) - header['left_quota'] = to_units(BPSMeter.do.left) + header["quota"] = to_units(BPSMeter.do.quota) + header["have_quota"] = bool(BPSMeter.do.quota > 0.0) + header["left_quota"] = to_units(BPSMeter.do.left) anfo = ArticleCache.do.cache_info() - header['cache_art'] = str(anfo.article_sum) - header['cache_size'] = format_bytes(anfo.cache_size) - header['cache_max'] = str(anfo.cache_limit) + header["cache_art"] = str(anfo.article_sum) + header["cache_size"] = format_bytes(anfo.cache_size) + header["cache_max"] = str(anfo.cache_limit) return header @@ -1576,34 +1666,34 @@ def build_queue_header(search=None, start=0, limit=0, output=None): bytesleft = qnfo.bytes_left bytes_total = qnfo.bytes - header['kbpersec'] = "%.2f" % (bytespersec / KIBI) - header['speed'] = to_units(bytespersec) - header['mbleft'] = "%.2f" % (bytesleft / MEBI) - header['mb'] = "%.2f" % (bytes_total / MEBI) - header['sizeleft'] = format_bytes(bytesleft) - header['size'] = format_bytes(bytes_total) - header['noofslots_total'] = qnfo.q_fullsize + header["kbpersec"] = "%.2f" % (bytespersec / KIBI) + header["speed"] = to_units(bytespersec) + header["mbleft"] = "%.2f" % (bytesleft / MEBI) + header["mb"] = "%.2f" % (bytes_total / MEBI) + header["sizeleft"] = format_bytes(bytesleft) + header["size"] = format_bytes(bytes_total) + header["noofslots_total"] = qnfo.q_fullsize if Downloader.do.paused or Downloader.do.postproc: status = Status.PAUSED elif bytespersec > 0: status = Status.DOWNLOADING else: - status = 'Idle' - header['status'] = status - header['timeleft'] = calc_timeleft(bytesleft, bytespersec) + status = "Idle" + header["status"] = status + header["timeleft"] = calc_timeleft(bytesleft, bytespersec) try: datestart = datetime.datetime.now() + datetime.timedelta(seconds=bytesleft / bytespersec) # new eta format: 16:00 Fri 07 Feb - header['eta'] = datestart.strftime(time_format('%H:%M %a %d %b')) + header["eta"] = datestart.strftime(time_format("%H:%M %a %d %b")) except: - header['eta'] = T('unknown') + header["eta"] = T("unknown") return header, qnfo.list, bytespersec, qnfo.q_fullsize, qnfo.bytes_left_previous_page -def build_history(start=None, limit=None,search=None, failed_only=0, categories=None, output=None): +def build_history(start=None, limit=None, search=None, failed_only=0, categories=None, output=None): limit = int_conv(limit) if not limit: limit = 1000000 @@ -1612,11 +1702,11 @@ def build_history(start=None, limit=None,search=None, failed_only=0, categories= def matches_search(text, search_text): # Replace * with .* and ' ' with . - search_text = search_text.strip().replace('*', '.*').replace(' ', '.*') + '.*?' + search_text = search_text.strip().replace("*", ".*").replace(" ", ".*") + ".*?" try: re_search = re.compile(search_text, re.I) except: - logging.error(T('Failed to compile regex for search term: %s'), search_text) + logging.error(T("Failed to compile regex for search term: %s"), search_text) return False return re_search.search(text) @@ -1636,7 +1726,7 @@ def build_history(start=None, limit=None,search=None, failed_only=0, categories= else: try: if limit: - queue = queue[start:start + limit] + queue = queue[start : start + limit] else: queue = queue[start:] except: @@ -1672,32 +1762,32 @@ def build_history(start=None, limit=None,search=None, failed_only=0, categories= items.reverse() for item in items: - item['size'] = format_bytes(item['bytes']) + item["size"] = format_bytes(item["bytes"]) - if 'loaded' not in item: - item['loaded'] = False + if "loaded" not in item: + item["loaded"] = False - path = item.get('path', '') + path = item.get("path", "") - item['retry'] = int_conv(item.get('status') == Status.FAILED and path and os.path.exists(path)) + item["retry"] = int_conv(item.get("status") == Status.FAILED and path and os.path.exists(path)) # Retry of failed URL-fetch - if item['report'] == 'future': - item['retry'] = True + if item["report"] == "future": + item["retry"] = True if Rating.do: - rating = Rating.do.get_rating_by_nzo(item['nzo_id']) + rating = Rating.do.get_rating_by_nzo(item["nzo_id"]) else: rating = None - item['has_rating'] = rating is not None + item["has_rating"] = rating is not None if rating: - item['rating_avg_video'] = rating.avg_video - item['rating_avg_audio'] = rating.avg_audio - item['rating_avg_vote_up'] = rating.avg_vote_up - item['rating_avg_vote_down'] = rating.avg_vote_down - item['rating_user_video'] = rating.user_video - item['rating_user_audio'] = rating.user_audio - item['rating_user_vote'] = rating.user_vote + item["rating_avg_video"] = rating.avg_video + item["rating_avg_audio"] = rating.avg_audio + item["rating_avg_vote_up"] = rating.avg_vote_up + item["rating_avg_vote_down"] = rating.avg_vote_down + item["rating_user_video"] = rating.user_video + item["rating_user_audio"] = rating.user_audio + item["rating_user_vote"] = rating.user_vote total_items += full_queue_size fetched_items = len(items) @@ -1718,19 +1808,41 @@ def get_active_history(queue=None, items=None): for nzo in queue: history = build_history_info(nzo) item = {} - item['completed'], item['name'], item['nzb_name'], item['category'], item['pp'], item['script'], item['report'], \ - item['url'], item['status'], item['nzo_id'], item['storage'], item['path'], item['script_log'], \ - item['script_line'], item['download_time'], item['postproc_time'], item['stage_log'], \ - item['downloaded'], item['completeness'], item['fail_message'], item['url_info'], item['bytes'], \ - _, _, item['password'] = history - item['action_line'] = nzo.action_line + ( + item["completed"], + item["name"], + item["nzb_name"], + item["category"], + item["pp"], + item["script"], + item["report"], + item["url"], + item["status"], + item["nzo_id"], + item["storage"], + item["path"], + item["script_log"], + item["script_line"], + item["download_time"], + item["postproc_time"], + item["stage_log"], + item["downloaded"], + item["completeness"], + item["fail_message"], + item["url_info"], + item["bytes"], + _, + _, + item["password"], + ) = history + item["action_line"] = nzo.action_line item = unpack_history_info(item) - item['loaded'] = nzo.pp_active - if item['bytes']: - item['size'] = format_bytes(item['bytes']) + item["loaded"] = nzo.pp_active + if item["bytes"]: + item["size"] = format_bytes(item["bytes"]) else: - item['size'] = '' + item["size"] = "" items.append(item) return items @@ -1738,33 +1850,33 @@ def get_active_history(queue=None, items=None): def format_bytes(bytes_string): b = to_units(bytes_string) - if b == '': + if b == "": return b else: - return b + 'B' + return b + "B" def calc_timeleft(bytesleft, bps): """ Calculate the time left in the format HH:MM:SS """ try: if bytesleft <= 0: - return '0:00:00' + return "0:00:00" totalseconds = int(bytesleft / bps) minutes, seconds = divmod(totalseconds, 60) hours, minutes = divmod(minutes, 60) days, hours = divmod(hours, 24) if minutes < 10: - minutes = '0%s' % minutes + minutes = "0%s" % minutes if seconds < 10: - seconds = '0%s' % seconds + seconds = "0%s" % seconds if days > 0: if hours < 10: - hours = '0%s' % hours - return '%s:%s:%s:%s' % (days, hours, minutes, seconds) + hours = "0%s" % hours + return "%s:%s:%s:%s" % (days, hours, minutes, seconds) else: - return '%s:%s:%s' % (hours, minutes, seconds) + return "%s:%s:%s" % (hours, minutes, seconds) except: - return '0:00:00' + return "0:00:00" def list_scripts(default=False, none=True): @@ -1774,15 +1886,20 @@ def list_scripts(default=False, none=True): if path and os.access(path, os.R_OK): for script in globber_full(path): if os.path.isfile(script): - if (sabnzbd.WIN32 and os.path.splitext(script)[1].lower() in PATHEXT and - not win32api.GetFileAttributes(script) & win32file.FILE_ATTRIBUTE_HIDDEN) or \ - script.endswith('.py') or \ - (not sabnzbd.WIN32 and userxbit(script) and not os.path.basename(script).startswith('.')): + if ( + ( + sabnzbd.WIN32 + and os.path.splitext(script)[1].lower() in PATHEXT + and not win32api.GetFileAttributes(script) & win32file.FILE_ATTRIBUTE_HIDDEN + ) + or script.endswith(".py") + or (not sabnzbd.WIN32 and userxbit(script) and not os.path.basename(script).startswith(".")) + ): lst.append(os.path.basename(script)) if none: - lst.insert(0, 'None') + lst.insert(0, "None") if default: - lst.insert(0, 'Default') + lst.insert(0, "Default") return lst @@ -1790,25 +1907,25 @@ def list_cats(default=True): """ Return list of (ordered) categories, when default==False use '*' for Default category """ - lst = [cat['name'] for cat in config.get_ordered_categories()] + lst = [cat["name"] for cat in config.get_ordered_categories()] if default: - lst.remove('*') - lst.insert(0, 'Default') + lst.remove("*") + lst.insert(0, "Default") return lst _PLURAL_TO_SINGLE = { - 'categories': 'category', - 'servers': 'server', - 'rss': 'feed', - 'scripts': 'script', - 'warnings': 'warning', - 'files': 'file', - 'jobs': 'job' + "categories": "category", + "servers": "server", + "rss": "feed", + "scripts": "script", + "warnings": "warning", + "files": "file", + "jobs": "job", } -def plural_to_single(kw, def_kw=''): +def plural_to_single(kw, def_kw=""): try: return _PLURAL_TO_SINGLE[kw] except KeyError: @@ -1817,16 +1934,16 @@ def plural_to_single(kw, def_kw=''): def del_from_section(kwargs): """ Remove keyword in section """ - section = kwargs.get('section', '') - if section in ('servers', 'rss', 'categories'): - keyword = kwargs.get('keyword') + section = kwargs.get("section", "") + if section in ("servers", "rss", "categories"): + keyword = kwargs.get("keyword") if keyword: item = config.get_config(section, keyword) if item: item.delete() del item config.save_config() - if section == 'servers': + if section == "servers": Downloader.do.update_server(keyword, None) return True else: @@ -1835,7 +1952,7 @@ def del_from_section(kwargs): def history_remove_failed(): """ Remove all failed jobs from history, including files """ - logging.info('Scheduled removal of all failed jobs') + logging.info("Scheduled removal of all failed jobs") history_db = HistoryDB() del_job_files(history_db.get_failed_paths()) history_db.remove_failed() @@ -1844,7 +1961,7 @@ def history_remove_failed(): def history_remove_completed(): """ Remove all completed jobs from history """ - logging.info('Scheduled removal of all completed jobs') + logging.info("Scheduled removal of all completed jobs") history_db = HistoryDB() history_db.remove_completed() history_db.close() diff --git a/sabnzbd/articlecache.py b/sabnzbd/articlecache.py index 14ad4ef..d79ad08 100644 --- a/sabnzbd/articlecache.py +++ b/sabnzbd/articlecache.py @@ -39,8 +39,8 @@ class ArticleCache: self.__cache_limit_org = 0 self.__cache_limit = 0 self.__cache_size = 0 - self.__article_list = [] # List of buffered articles - self.__article_table = {} # Dict of buffered articles + self.__article_list = [] # List of buffered articles + self.__article_table = {} # Dict of buffered articles # Limit for the decoder is based on the total available cache # so it can be larger on memory-rich systems @@ -50,7 +50,7 @@ class ArticleCache: # For 64 bit we allow up to 4GB, in case somebody wants that self.__cache_upper_limit = GIGI if sabnzbd.DARWIN or sabnzbd.WIN64 or (struct.calcsize("P") * 8) == 64: - self.__cache_upper_limit = 4*GIGI + self.__cache_upper_limit = 4 * GIGI ArticleCache.do = self @@ -67,7 +67,7 @@ class ArticleCache: # The decoder-limit should not be larger than 1/3th of the whole cache # Calculated in number of articles, assuming 1 article = 1MB max - decoder_cache_limit = int(min(self.__cache_limit/3/MEBI, LIMIT_DECODE_QUEUE)) + decoder_cache_limit = int(min(self.__cache_limit / 3 / MEBI, LIMIT_DECODE_QUEUE)) # The cache should also not be too small self.decoder_cache_article_limit = max(decoder_cache_limit, MIN_DECODE_QUEUE) @@ -128,8 +128,7 @@ class ArticleCache: self.__article_list.remove(article) self.free_reserved_space(len(data)) elif article.art_id: - data = sabnzbd.load_data(article.art_id, nzo.workpath, remove=True, - do_pickle=False, silent=True) + data = sabnzbd.load_data(article.art_id, nzo.workpath, remove=True, do_pickle=False, silent=True) nzo.remove_saved_article(article) return data diff --git a/sabnzbd/assembler.py b/sabnzbd/assembler.py index 7cf3454..fdcf247 100644 --- a/sabnzbd/assembler.py +++ b/sabnzbd/assembler.py @@ -29,8 +29,7 @@ import hashlib import sabnzbd from sabnzbd.misc import get_all_passwords -from sabnzbd.filesystem import set_permissions, clip_path, has_win_device, \ - diskspace, get_filename, get_ext +from sabnzbd.filesystem import set_permissions, clip_path, has_win_device, diskspace, get_filename, get_ext from sabnzbd.constants import Status, GIGI, MAX_ASSEMBLER_QUEUE import sabnzbd.cfg as cfg from sabnzbd.articlecache import ArticleCache @@ -70,10 +69,13 @@ class Assembler(Thread): if nzf: # Check if enough disk space is free after each file is done # If not enough space left, pause downloader and send email - if file_done and diskspace(force=True)['download_dir'][1] < (cfg.download_free.get_float() + nzf.bytes) / GIGI: + if ( + file_done + and diskspace(force=True)["download_dir"][1] < (cfg.download_free.get_float() + nzf.bytes) / GIGI + ): # Only warn and email once if not sabnzbd.downloader.Downloader.do.paused: - logging.warning(T('Too little diskspace forcing PAUSE')) + logging.warning(T("Too little diskspace forcing PAUSE")) # Pause downloader, but don't save, since the disk is almost full! sabnzbd.downloader.Downloader.do.pause() sabnzbd.emailer.diskfull_mail() @@ -84,7 +86,7 @@ class Assembler(Thread): filepath = nzf.prepare_filepath() if filepath: - logging.debug('Decoding part of %s', filepath) + logging.debug("Decoding part of %s", filepath) try: self.assemble(nzf, file_done) except IOError as err: @@ -92,16 +94,16 @@ class Assembler(Thread): if not nzo.deleted and not nzo.is_gone() and not nzo.pp_active: # 28 == disk full => pause downloader if err.errno == 28: - logging.error(T('Disk full! Forcing Pause')) + logging.error(T("Disk full! Forcing Pause")) else: - logging.error(T('Disk error on creating file %s'), clip_path(filepath)) + logging.error(T("Disk error on creating file %s"), clip_path(filepath)) # Log traceback - logging.info('Traceback: ', exc_info=True) + logging.info("Traceback: ", exc_info=True) # Pause without saving sabnzbd.downloader.Downloader.do.pause() continue except: - logging.error(T('Fatal error in Assembler'), exc_info=True) + logging.error(T("Fatal error in Assembler"), exc_info=True) break # Continue after partly written data @@ -109,7 +111,7 @@ class Assembler(Thread): continue # Clean-up admin data - logging.info('Decoding finished %s', filepath) + logging.info("Decoding finished %s", filepath) nzf.remove_admin() # Do rar-related processing @@ -118,23 +120,43 @@ class Assembler(Thread): rar_encrypted, unwanted_file = check_encrypted_and_unwanted_files(nzo, filepath) if rar_encrypted: if cfg.pause_on_pwrar() == 1: - logging.warning(remove_warning_label(T('WARNING: Paused job "%s" because of encrypted RAR file (if supplied, all passwords were tried)')), nzo.final_name) + logging.warning( + remove_warning_label( + T( + 'WARNING: Paused job "%s" because of encrypted RAR file (if supplied, all passwords were tried)' + ) + ), + nzo.final_name, + ) nzo.pause() else: - logging.warning(remove_warning_label(T('WARNING: Aborted job "%s" because of encrypted RAR file (if supplied, all passwords were tried)')), nzo.final_name) - nzo.fail_msg = T('Aborted, encryption detected') + logging.warning( + remove_warning_label( + T( + 'WARNING: Aborted job "%s" because of encrypted RAR file (if supplied, all passwords were tried)' + ) + ), + nzo.final_name, + ) + nzo.fail_msg = T("Aborted, encryption detected") sabnzbd.nzbqueue.NzbQueue.do.end_job(nzo) if unwanted_file: - logging.warning(remove_warning_label(T('WARNING: In "%s" unwanted extension in RAR file. Unwanted file is %s ')), nzo.final_name, unwanted_file) - logging.debug(T('Unwanted extension is in rar file %s'), filepath) + logging.warning( + remove_warning_label( + T('WARNING: In "%s" unwanted extension in RAR file. Unwanted file is %s ') + ), + nzo.final_name, + unwanted_file, + ) + logging.debug(T("Unwanted extension is in rar file %s"), filepath) if cfg.action_on_unwanted_extensions() == 1 and nzo.unwanted_ext == 0: - logging.debug('Unwanted extension ... pausing') + logging.debug("Unwanted extension ... pausing") nzo.unwanted_ext = 1 nzo.pause() if cfg.action_on_unwanted_extensions() == 2: - logging.debug('Unwanted extension ... aborting') - nzo.fail_msg = T('Aborted, unwanted extension detected') + logging.debug("Unwanted extension ... aborting") + nzo.fail_msg = T("Aborted, unwanted extension detected") sabnzbd.nzbqueue.NzbQueue.do.end_job(nzo) # Add to direct unpack @@ -146,11 +168,19 @@ class Assembler(Thread): filter_output, reason = nzo_filtered_by_rating(nzo) if filter_output == 1: - logging.warning(remove_warning_label(T('WARNING: Paused job "%s" because of rating (%s)')), nzo.final_name, reason) + logging.warning( + remove_warning_label(T('WARNING: Paused job "%s" because of rating (%s)')), + nzo.final_name, + reason, + ) nzo.pause() elif filter_output == 2: - logging.warning(remove_warning_label(T('WARNING: Aborted job "%s" because of rating (%s)')), nzo.final_name, reason) - nzo.fail_msg = T('Aborted, rating filter matched (%s)') % reason + logging.warning( + remove_warning_label(T('WARNING: Aborted job "%s" because of rating (%s)')), + nzo.final_name, + reason, + ) + nzo.fail_msg = T("Aborted, rating filter matched (%s)") % reason sabnzbd.nzbqueue.NzbQueue.do.end_job(nzo) else: @@ -166,7 +196,7 @@ class Assembler(Thread): if not nzf.md5: nzf.md5 = hashlib.md5() - with open(nzf.filepath, 'ab') as fout: + with open(nzf.filepath, "ab") as fout: for article in nzf.decodetable: # Break if deleted during writing if nzf.nzo.status is Status.DELETED: @@ -214,21 +244,31 @@ def file_has_articles(nzf): return has -RE_SUBS = re.compile(r'\W+sub|subs|subpack|subtitle|subtitles(?![a-z])', re.I) -SAFE_EXTS = ('.mkv', '.mp4', '.avi', '.wmv', '.mpg', '.webm') +RE_SUBS = re.compile(r"\W+sub|subs|subpack|subtitle|subtitles(?![a-z])", re.I) +SAFE_EXTS = (".mkv", ".mp4", ".avi", ".wmv", ".mpg", ".webm") + + def is_cloaked(nzo, path, names): """ Return True if this is likely to be a cloaked encrypted post """ fname = os.path.splitext(get_filename(path.lower()))[0] for name in names: name = get_filename(name.lower()) name, ext = os.path.splitext(name) - if ext == '.rar' and fname.startswith(name) and (len(fname) - len(name)) < 8 and len(names) < 3 and not RE_SUBS.search(fname): + if ( + ext == ".rar" + and fname.startswith(name) + and (len(fname) - len(name)) < 8 + and len(names) < 3 + and not RE_SUBS.search(fname) + ): # Only warn once if nzo.encrypted == 0: - logging.warning(T('Job "%s" is probably encrypted due to RAR with same name inside this RAR'), nzo.final_name) + logging.warning( + T('Job "%s" is probably encrypted due to RAR with same name inside this RAR'), nzo.final_name + ) nzo.encrypted = 1 return True - elif 'password' in name and ext not in SAFE_EXTS: + elif "password" in name and ext not in SAFE_EXTS: # Only warn once if nzo.encrypted == 0: logging.warning(T('Job "%s" is probably encrypted: "password" in filename "%s"'), nzo.final_name, name) @@ -242,7 +282,9 @@ def check_encrypted_and_unwanted_files(nzo, filepath): encrypted = False unwanted = None - if (cfg.unwanted_extensions() and cfg.action_on_unwanted_extensions()) or (nzo.encrypted == 0 and cfg.pause_on_pwrar()): + if (cfg.unwanted_extensions() and cfg.action_on_unwanted_extensions()) or ( + nzo.encrypted == 0 and cfg.pause_on_pwrar() + ): # These checks should not break the assembler try: # Rarfile freezes on Windows special names, so don't try those! @@ -256,7 +298,11 @@ def check_encrypted_and_unwanted_files(nzo, filepath): zf = rarfile.RarFile(filepath, single_file_check=True) # Check for encryption - if nzo.encrypted == 0 and cfg.pause_on_pwrar() and (zf.needs_password() or is_cloaked(nzo, filepath, zf.namelist())): + if ( + nzo.encrypted == 0 + and cfg.pause_on_pwrar() + and (zf.needs_password() or is_cloaked(nzo, filepath, zf.namelist())) + ): # Load all passwords passwords = get_all_passwords(nzo) @@ -290,7 +336,7 @@ def check_encrypted_and_unwanted_files(nzo, filepath): break except Exception as e: # Did we start from the right volume? - if 'need to start extraction from a previous volume' in str(e): + if "need to start extraction from a previous volume" in str(e): return encrypted, unwanted # This one failed pass @@ -312,15 +358,15 @@ def check_encrypted_and_unwanted_files(nzo, filepath): # Check for unwanted extensions if cfg.unwanted_extensions() and cfg.action_on_unwanted_extensions(): for somefile in zf.namelist(): - logging.debug('File contains: %s', somefile) - if get_ext(somefile).replace('.', '').lower() in cfg.unwanted_extensions(): - logging.debug('Unwanted file %s', somefile) + logging.debug("File contains: %s", somefile) + if get_ext(somefile).replace(".", "").lower() in cfg.unwanted_extensions(): + logging.debug("Unwanted file %s", somefile) unwanted = somefile zf.close() del zf except: - logging.info('Error during inspection of RAR-file %s', filepath) - logging.debug('Traceback: ', exc_info=True) + logging.info("Error during inspection of RAR-file %s", filepath) + logging.debug("Traceback: ", exc_info=True) return encrypted, unwanted @@ -343,32 +389,39 @@ def rating_filtered(rating, filename, abort): def check_keyword(keyword): clean_keyword = keyword.strip().lower() return (len(clean_keyword) > 0) and (clean_keyword in filename) + audio = cfg.rating_filter_abort_audio() if abort else cfg.rating_filter_pause_audio() video = cfg.rating_filter_abort_video() if abort else cfg.rating_filter_pause_video() spam = cfg.rating_filter_abort_spam() if abort else cfg.rating_filter_pause_spam() spam_confirm = cfg.rating_filter_abort_spam_confirm() if abort else cfg.rating_filter_pause_spam_confirm() encrypted = cfg.rating_filter_abort_encrypted() if abort else cfg.rating_filter_pause_encrypted() - encrypted_confirm = cfg.rating_filter_abort_encrypted_confirm() if abort else cfg.rating_filter_pause_encrypted_confirm() + encrypted_confirm = ( + cfg.rating_filter_abort_encrypted_confirm() if abort else cfg.rating_filter_pause_encrypted_confirm() + ) downvoted = cfg.rating_filter_abort_downvoted() if abort else cfg.rating_filter_pause_downvoted() keywords = cfg.rating_filter_abort_keywords() if abort else cfg.rating_filter_pause_keywords() if (video > 0) and (rating.avg_video > 0) and (rating.avg_video <= video): - return T('video') + return T("video") if (audio > 0) and (rating.avg_audio > 0) and (rating.avg_audio <= audio): - return T('audio') - if (spam and ((rating.avg_spam_cnt > 0) or rating.avg_encrypted_confirm)) or (spam_confirm and rating.avg_spam_confirm): - return T('spam') - if (encrypted and ((rating.avg_encrypted_cnt > 0) or rating.avg_encrypted_confirm)) or (encrypted_confirm and rating.avg_encrypted_confirm): - return T('passworded') + return T("audio") + if (spam and ((rating.avg_spam_cnt > 0) or rating.avg_encrypted_confirm)) or ( + spam_confirm and rating.avg_spam_confirm + ): + return T("spam") + if (encrypted and ((rating.avg_encrypted_cnt > 0) or rating.avg_encrypted_confirm)) or ( + encrypted_confirm and rating.avg_encrypted_confirm + ): + return T("passworded") if downvoted and (rating.avg_vote_up < rating.avg_vote_down): - return T('downvoted') - if any(check_keyword(k) for k in keywords.split(',')): - return T('keywords') + return T("downvoted") + if any(check_keyword(k) for k in keywords.split(",")): + return T("keywords") return None def remove_warning_label(msg): """ Standardize errors by removing obsolete "WARNING:" part in all languages """ - if ':' in msg: - return msg.split(':')[1].strip() + if ":" in msg: + return msg.split(":")[1].strip() return msg diff --git a/sabnzbd/bpsmeter.py b/sabnzbd/bpsmeter.py index 5ec2157..5dd10e9 100644 --- a/sabnzbd/bpsmeter.py +++ b/sabnzbd/bpsmeter.py @@ -62,6 +62,8 @@ def this_month(t): _DAYS = (0, 31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31) + + def last_month_day(tm): """ Return last day of this month """ year, month = tm[:2] @@ -105,31 +107,40 @@ class BPSMeter: self.timeline_total = {} self.day_label = time.strftime("%Y-%m-%d") - self.end_of_day = tomorrow(t) # Time that current day will end - self.end_of_week = next_week(t) # Time that current day will end + self.end_of_day = tomorrow(t) # Time that current day will end + self.end_of_week = next_week(t) # Time that current day will end self.end_of_month = next_month(t) # Time that current month will end - self.q_day = 1 # Day of quota reset - self.q_period = 'm' # Daily/Weekly/Monthly quota = d/w/m - self.quota = self.left = 0.0 # Quota and remaining quota - self.have_quota = False # Flag for quota active - self.q_time = 0 # Next reset time for quota - self.q_hour = 0 # Quota reset hour - self.q_minute = 0 # Quota reset minute - self.quota_enabled = True # Scheduled quota enable/disable + self.q_day = 1 # Day of quota reset + self.q_period = "m" # Daily/Weekly/Monthly quota = d/w/m + self.quota = self.left = 0.0 # Quota and remaining quota + self.have_quota = False # Flag for quota active + self.q_time = 0 # Next reset time for quota + self.q_hour = 0 # Quota reset hour + self.q_minute = 0 # Quota reset minute + self.quota_enabled = True # Scheduled quota enable/disable BPSMeter.do = self def save(self): """ Save admin to disk """ - data = (self.last_update, self.grand_total, - self.day_total, self.week_total, self.month_total, - self.end_of_day, self.end_of_week, self.end_of_month, - self.quota, self.left, self.q_time, self.timeline_total - ) + data = ( + self.last_update, + self.grand_total, + self.day_total, + self.week_total, + self.month_total, + self.end_of_day, + self.end_of_week, + self.end_of_month, + self.quota, + self.left, + self.q_time, + self.timeline_total, + ) sabnzbd.save_admin(data, BYTES_FILE_NAME) def defaults(self): """ Get the latest data from the database and assign to a fake server """ - logging.debug('Setting default BPS meter values') + logging.debug("Setting default BPS meter values") history_db = sabnzbd.database.HistoryDB() grand, month, week = history_db.get_history_size() history_db.close() @@ -138,11 +149,11 @@ class BPSMeter: self.week_total = {} self.day_total = {} if grand: - self.grand_total['x'] = grand + self.grand_total["x"] = grand if month: - self.month_total['x'] = month + self.month_total["x"] = month if week: - self.week_total['x'] = week + self.week_total["x"] = week self.quota = self.left = cfg.quota_size.get_float() def read(self): @@ -152,10 +163,20 @@ class BPSMeter: self.have_quota = bool(cfg.quota_size()) data = sabnzbd.load_admin(BYTES_FILE_NAME) try: - self.last_update, self.grand_total, \ - self.day_total, self.week_total, self.month_total, \ - self.end_of_day, self.end_of_week, self.end_of_month, \ - self.quota, self.left, self.q_time, self.timeline_total = data + ( + self.last_update, + self.grand_total, + self.day_total, + self.week_total, + self.month_total, + self.end_of_day, + self.end_of_week, + self.end_of_month, + self.quota, + self.left, + self.q_time, + self.timeline_total, + ) = data if abs(quota - self.quota) > 0.5: self.change_quota() res = self.reset_quota() @@ -210,7 +231,7 @@ class BPSMeter: if server not in self.timeline_total: self.timeline_total[server] = {} if self.day_label not in self.timeline_total[server]: - self.timeline_total[server][self.day_label]= 0 + self.timeline_total[server][self.day_label] = 0 self.timeline_total[server][self.day_label] += amount # Quota check @@ -219,7 +240,7 @@ class BPSMeter: if self.left <= 0.0: if sabnzbd.downloader.Downloader.do and not sabnzbd.downloader.Downloader.do.paused: sabnzbd.downloader.Downloader.do.pause() - logging.warning(T('Quota spent, pausing downloading')) + logging.warning(T("Quota spent, pausing downloading")) # Speedometer try: @@ -261,22 +282,26 @@ class BPSMeter: # Always trim the list to the max-length if len(self.bps_list) > self.bps_list_max: - self.bps_list = self.bps_list[len(self.bps_list) - self.bps_list_max:] + self.bps_list = self.bps_list[len(self.bps_list) - self.bps_list_max :] def get_sums(self): """ return tuple of grand, month, week, day totals """ - return (sum([v for v in self.grand_total.values()]), - sum([v for v in self.month_total.values()]), - sum([v for v in self.week_total.values()]), - sum([v for v in self.day_total.values()])) + return ( + sum([v for v in self.grand_total.values()]), + sum([v for v in self.month_total.values()]), + sum([v for v in self.week_total.values()]), + sum([v for v in self.day_total.values()]), + ) def amounts(self, server): """ Return grand, month, week, day totals for specified server """ - return self.grand_total.get(server, 0), \ - self.month_total.get(server, 0), \ - self.week_total.get(server, 0), \ - self.day_total.get(server, 0), \ - self.timeline_total.get(server, {}) + return ( + self.grand_total.get(server, 0), + self.month_total.get(server, 0), + self.week_total.get(server, 0), + self.day_total.get(server, 0), + self.timeline_total.get(server, {}), + ) def clear_server(self, server): """ Clean counters for specified server """ @@ -330,9 +355,9 @@ class BPSMeter: """ if force or (self.have_quota and time.time() > (self.q_time - 50)): self.quota = self.left = cfg.quota_size.get_float() - logging.info('Quota was reset to %s', self.quota) + logging.info("Quota was reset to %s", self.quota) if cfg.quota_resume(): - logging.info('Auto-resume due to quota reset') + logging.info("Auto-resume due to quota reset") if sabnzbd.downloader.Downloader.do: sabnzbd.downloader.Downloader.do.resume() self.next_reset() @@ -344,20 +369,24 @@ class BPSMeter: """ Determine next reset time """ t = t or time.time() tm = time.localtime(t) - if self.q_period == 'd': + if self.q_period == "d": nx = (tm[0], tm[1], tm[2], self.q_hour, self.q_minute, 0, 0, 0, tm[8]) if (tm.tm_hour * 60 + tm.tm_min) >= (self.q_hour * 60 + self.q_minute): # If today's moment has passed, it will happen tomorrow t = time.mktime(nx) + 24 * 3600 tm = time.localtime(t) - elif self.q_period == 'w': - if self.q_day < tm.tm_wday + 1 or (self.q_day == tm.tm_wday + 1 and (tm.tm_hour * 60 + tm.tm_min) >= (self.q_hour * 60 + self.q_minute)): + elif self.q_period == "w": + if self.q_day < tm.tm_wday + 1 or ( + self.q_day == tm.tm_wday + 1 and (tm.tm_hour * 60 + tm.tm_min) >= (self.q_hour * 60 + self.q_minute) + ): tm = time.localtime(next_week(t)) dif = abs(self.q_day - tm.tm_wday - 1) t = time.mktime(tm) + dif * 24 * 3600 tm = time.localtime(t) - elif self.q_period == 'm': - if self.q_day < tm.tm_mday or (self.q_day == tm.tm_mday and (tm.tm_hour * 60 + tm.tm_min) >= (self.q_hour * 60 + self.q_minute)): + elif self.q_period == "m": + if self.q_day < tm.tm_mday or ( + self.q_day == tm.tm_mday and (tm.tm_hour * 60 + tm.tm_min) >= (self.q_hour * 60 + self.q_minute) + ): tm = time.localtime(next_month(t)) day = min(last_month_day(tm), self.q_day) tm = (tm[0], tm[1], day, self.q_hour, self.q_minute, 0, 0, 0, tm[8]) @@ -365,7 +394,7 @@ class BPSMeter: return tm = (tm[0], tm[1], tm[2], self.q_hour, self.q_minute, 0, 0, 0, tm[8]) self.q_time = time.mktime(tm) - logging.debug('Will reset quota at %s', tm) + logging.debug("Will reset quota at %s", tm) def change_quota(self, allow_resume=True): """ Update quota, potentially pausing downloader """ @@ -373,11 +402,11 @@ class BPSMeter: # Never set, use last period's size per = cfg.quota_period() sums = self.get_sums() - if per == 'd': + if per == "d": self.left = sums[3] - elif per == 'w': + elif per == "w": self.left = sums[2] - elif per == 'm': + elif per == "m": self.left = sums[1] self.have_quota = bool(cfg.quota_size()) @@ -399,8 +428,9 @@ class BPSMeter: # Pattern = # The and part can both be optional - __re_day = re.compile(r'^\s*(\d+)[^:]*') - __re_hm = re.compile(r'(\d+):(\d+)\s*$') + __re_day = re.compile(r"^\s*(\d+)[^:]*") + __re_hm = re.compile(r"(\d+):(\d+)\s*$") + def get_quota(self): """ If quota active, return check-function, hour, minute """ if self.have_quota: @@ -415,10 +445,10 @@ class BPSMeter: if m: self.q_hour = int(m.group(1)) self.q_minute = int(m.group(2)) - if self.q_period == 'w': + if self.q_period == "w": self.q_day = max(1, self.q_day) self.q_day = min(7, self.q_day) - elif self.q_period == 'm': + elif self.q_period == "m": self.q_day = max(1, self.q_day) self.q_day = min(31, self.q_day) else: @@ -447,7 +477,7 @@ class BPSMeter: def quota_handler(): """ To be called from scheduler """ - logging.debug('Checking quota') + logging.debug("Checking quota") BPSMeter.do.reset_quota() diff --git a/sabnzbd/directunpacker.py b/sabnzbd/directunpacker.py index d375516..e98b71a 100644 --- a/sabnzbd/directunpacker.py +++ b/sabnzbd/directunpacker.py @@ -44,11 +44,10 @@ START_STOP_LOCK = threading.RLock() ACTIVE_UNPACKERS = [] -RAR_NR = re.compile(r'(.*?)(\.part(\d*).rar|\.r(\d*))$', re.IGNORECASE) +RAR_NR = re.compile(r"(.*?)(\.part(\d*).rar|\.r(\d*))$", re.IGNORECASE) class DirectUnpacker(threading.Thread): - def __init__(self, nzo): threading.Thread.__init__(self) @@ -94,7 +93,13 @@ class DirectUnpacker(threading.Thread): self.rarfile_nzf = None def check_requirements(self): - if not cfg.direct_unpack() or self.killed or not self.nzo.unpack or self.nzo.bad_articles or sabnzbd.newsunpack.RAR_PROBLEM: + if ( + not cfg.direct_unpack() + or self.killed + or not self.nzo.unpack + or self.nzo.bad_articles + or sabnzbd.newsunpack.RAR_PROBLEM + ): return False return True @@ -137,12 +142,12 @@ class DirectUnpacker(threading.Thread): # Are we doing this set? if self.cur_setname and self.cur_setname == nzf.setname: - logging.debug('DirectUnpack queued %s for %s', nzf.filename, self.cur_setname) + logging.debug("DirectUnpack queued %s for %s", nzf.filename, self.cur_setname) # Is this the first one of the first set? if not self.active_instance and not self.is_alive() and self.have_next_volume(): # Too many runners already? if len(ACTIVE_UNPACKERS) >= cfg.direct_unpack_threads(): - logging.info('Too many DirectUnpackers currently to start %s', self.cur_setname) + logging.info("Too many DirectUnpackers currently to start %s", self.cur_setname) return # Start the unrar command and the loop @@ -158,8 +163,8 @@ class DirectUnpacker(threading.Thread): def run(self): # Input and output - linebuf = '' - last_volume_linebuf = '' + linebuf = "" + last_volume_linebuf = "" unrar_log = [] rarfiles = [] extracted = [] @@ -179,17 +184,30 @@ class DirectUnpacker(threading.Thread): linebuf += char # Error? Let PP-handle it - if linebuf.endswith(('ERROR: ', 'Cannot create', 'in the encrypted file', 'CRC failed', 'checksum failed', - 'You need to start extraction from a previous volume', 'password is incorrect', - 'Incorrect password', 'Write error', 'checksum error', 'Cannot open', - 'start extraction from a previous volume', 'Unexpected end of archive')): - logging.info('Error in DirectUnpack of %s: %s', self.cur_setname, linebuf.strip()) + if linebuf.endswith( + ( + "ERROR: ", + "Cannot create", + "in the encrypted file", + "CRC failed", + "checksum failed", + "You need to start extraction from a previous volume", + "password is incorrect", + "Incorrect password", + "Write error", + "checksum error", + "Cannot open", + "start extraction from a previous volume", + "Unexpected end of archive", + ) + ): + logging.info("Error in DirectUnpack of %s: %s", self.cur_setname, linebuf.strip()) self.abort() - if linebuf.endswith('\n'): + if linebuf.endswith("\n"): # List files we used - if linebuf.startswith('Extracting from'): - filename = (re.search(EXTRACTFROM_RE, linebuf.strip()).group(1)) + if linebuf.startswith("Extracting from"): + filename = re.search(EXTRACTFROM_RE, linebuf.strip()).group(1) if filename not in rarfiles: rarfiles.append(filename) @@ -203,27 +221,30 @@ class DirectUnpacker(threading.Thread): extracted.append(real_path(self.unpack_dir_info[0], unpacked_file)) # Did we reach the end? - if linebuf.endswith('All OK'): + if linebuf.endswith("All OK"): # Stop timer and finish self.unpack_time += time.time() - start_time ACTIVE_UNPACKERS.remove(self) # Add to success rarfile_path = os.path.join(self.nzo.downpath, self.rarfile_nzf.filename) - self.success_sets[self.cur_setname] = (rar_volumelist(rarfile_path, self.nzo.password, rarfiles), extracted) - logging.info('DirectUnpack completed for %s', self.cur_setname) - self.nzo.set_action_line(T('Direct Unpack'), T('Completed')) + self.success_sets[self.cur_setname] = ( + rar_volumelist(rarfile_path, self.nzo.password, rarfiles), + extracted, + ) + logging.info("DirectUnpack completed for %s", self.cur_setname) + self.nzo.set_action_line(T("Direct Unpack"), T("Completed")) # List success in history-info - msg = T('Unpacked %s files/folders in %s') % (len(extracted), format_time_string(self.unpack_time)) - msg = '%s - %s' % (T('Direct Unpack'), msg) - self.nzo.set_unpack_info('Unpack', msg, self.cur_setname) + msg = T("Unpacked %s files/folders in %s") % (len(extracted), format_time_string(self.unpack_time)) + msg = "%s - %s" % (T("Direct Unpack"), msg) + self.nzo.set_unpack_info("Unpack", msg, self.cur_setname) # Write current log and clear unrar_log.append(linebuf.strip()) - linebuf = '' - last_volume_linebuf = '' - logging.debug('DirectUnpack Unrar output %s', '\n'.join(unrar_log)) + linebuf = "" + last_volume_linebuf = "" + logging.debug("DirectUnpack Unrar output %s", "\n".join(unrar_log)) unrar_log = [] rarfiles = [] extracted = [] @@ -247,7 +268,7 @@ class DirectUnpacker(threading.Thread): self.killed = True break - if linebuf.endswith('[C]ontinue, [Q]uit '): + if linebuf.endswith("[C]ontinue, [Q]uit "): # Stop timer self.unpack_time += time.time() - start_time @@ -259,7 +280,7 @@ class DirectUnpacker(threading.Thread): # If unrar stopped or is killed somehow, writing will cause a crash try: # Give unrar some time to do it's thing - self.active_instance.stdin.write(b'C\n') + self.active_instance.stdin.write(b"C\n") start_time = time.time() time.sleep(0.1) except IOError: @@ -270,14 +291,14 @@ class DirectUnpacker(threading.Thread): if not last_volume_linebuf or last_volume_linebuf != linebuf: # Next volume self.cur_volume += 1 - self.nzo.set_action_line(T('Direct Unpack'), self.get_formatted_stats()) - logging.info('DirectUnpacked volume %s for %s', self.cur_volume, self.cur_setname) + self.nzo.set_action_line(T("Direct Unpack"), self.get_formatted_stats()) + logging.info("DirectUnpacked volume %s for %s", self.cur_volume, self.cur_setname) # If lines did not change and we don't have the next volume, this download is missing files! # In rare occasions we can get stuck forever with repeating lines if last_volume_linebuf == linebuf: if not self.have_next_volume() or self.duplicate_lines > 10: - logging.info('DirectUnpack failed due to missing files %s', self.cur_setname) + logging.info("DirectUnpack failed due to missing files %s", self.cur_setname) self.abort() else: logging.debug('Duplicate output line detected: "%s"', last_volume_linebuf) @@ -287,13 +308,13 @@ class DirectUnpacker(threading.Thread): last_volume_linebuf = linebuf # Show the log - if linebuf.endswith('\n'): + if linebuf.endswith("\n"): unrar_log.append(linebuf.strip()) - linebuf = '' + linebuf = "" # Add last line unrar_log.append(linebuf.strip()) - logging.debug('DirectUnpack Unrar output %s', '\n'.join(unrar_log)) + logging.debug("DirectUnpack Unrar output %s", "\n".join(unrar_log)) # Make more space self.reset_active() @@ -309,7 +330,7 @@ class DirectUnpacker(threading.Thread): Make sure that files are 100% written to disk by checking md5sum """ for nzf_search in reversed(self.nzo.finished_files): - if nzf_search.setname == self.cur_setname and nzf_search.vol == (self.cur_volume+1) and nzf_search.md5sum: + if nzf_search.setname == self.cur_setname and nzf_search.vol == (self.cur_volume + 1) and nzf_search.md5sum: return nzf_search return False @@ -338,14 +359,14 @@ class DirectUnpacker(threading.Thread): # Set options if self.nzo.password: - password_command = '-p%s' % self.nzo.password + password_command = "-p%s" % self.nzo.password else: - password_command = '-p-' + password_command = "-p-" if one_folder or cfg.flat_unpack(): - action = 'e' + action = "e" else: - action = 'x' + action = "x" # The first NZF self.rarfile_nzf = self.have_next_volume() @@ -360,36 +381,60 @@ class DirectUnpacker(threading.Thread): if sabnzbd.WIN32: # For Unrar to support long-path, we need to cricumvent Python's list2cmdline # See: https://github.com/sabnzbd/sabnzbd/issues/1043 - command = ['%s' % sabnzbd.newsunpack.RAR_COMMAND, action, '-vp', '-idp', '-o+', '-ai', password_command, - '%s' % clip_path(rarfile_path), '%s\\' % long_path(extraction_path)] + command = [ + "%s" % sabnzbd.newsunpack.RAR_COMMAND, + action, + "-vp", + "-idp", + "-o+", + "-ai", + password_command, + "%s" % clip_path(rarfile_path), + "%s\\" % long_path(extraction_path), + ] else: # Don't use "-ai" (not needed for non-Windows) - command = ['%s' % sabnzbd.newsunpack.RAR_COMMAND, action, '-vp', '-idp', '-o+', password_command, - '%s' % rarfile_path, '%s/' % extraction_path] + command = [ + "%s" % sabnzbd.newsunpack.RAR_COMMAND, + action, + "-vp", + "-idp", + "-o+", + password_command, + "%s" % rarfile_path, + "%s/" % extraction_path, + ] if cfg.ignore_unrar_dates(): - command.insert(3, '-tsm-') + command.insert(3, "-tsm-") # Let's start from the first one! self.cur_volume = 1 stup, need_shell, command, creationflags = build_command(command, flatten_command=True) - logging.debug('Running unrar for DirectUnpack %s', command) + logging.debug("Running unrar for DirectUnpack %s", command) # Need to disable buffer to have direct feedback - self.active_instance = Popen(command, shell=False, stdin=subprocess.PIPE, - stdout=subprocess.PIPE, stderr=subprocess.STDOUT, - startupinfo=stup, creationflags=creationflags, bufsize=0) + self.active_instance = Popen( + command, + shell=False, + stdin=subprocess.PIPE, + stdout=subprocess.PIPE, + stderr=subprocess.STDOUT, + startupinfo=stup, + creationflags=creationflags, + bufsize=0, + ) # Add to runners ACTIVE_UNPACKERS.append(self) # Doing the first - logging.info('DirectUnpacked volume %s for %s', self.cur_volume, self.cur_setname) + logging.info("DirectUnpacked volume %s for %s", self.cur_volume, self.cur_setname) @synchronized(START_STOP_LOCK) def abort(self): """ Abort running instance and delete generated files """ if not self.killed and self.cur_setname: - logging.info('Aborting DirectUnpack for %s', self.cur_setname) + logging.info("Aborting DirectUnpack for %s", self.cur_setname) self.killed = True # Save reference to the first rarfile @@ -399,7 +444,7 @@ class DirectUnpacker(threading.Thread): if self.active_instance: # First we try to abort gracefully try: - self.active_instance.stdin.write(b'Q\n') + self.active_instance.stdin.write(b"Q\n") time.sleep(0.2) except IOError: pass @@ -427,14 +472,18 @@ class DirectUnpacker(threading.Thread): if one_folder: # RarFile can fail for mysterious reasons try: - rar_contents = RarFile(os.path.join(self.nzo.downpath, rarfile_nzf.filename), single_file_check=True).filelist() + rar_contents = RarFile( + os.path.join(self.nzo.downpath, rarfile_nzf.filename), single_file_check=True + ).filelist() for rm_file in rar_contents: # Flat-unpack, so remove foldername from RarFile output f = os.path.join(extraction_path, os.path.basename(rm_file)) remove_file(f) except: # The user will have to remove it themselves - logging.info('Failed to clean Direct Unpack after aborting %s', rarfile_nzf.filename, exc_info=True) + logging.info( + "Failed to clean Direct Unpack after aborting %s", rarfile_nzf.filename, exc_info=True + ) else: # We can just remove the whole path remove_all(extraction_path, recursive=True) @@ -449,7 +498,7 @@ class DirectUnpacker(threading.Thread): if self.cur_setname and self.cur_setname in self.total_volumes: # This won't work on obfuscated posts if self.total_volumes[self.cur_setname] >= self.cur_volume and self.cur_volume: - return '%02d/%02d' % (self.cur_volume, self.total_volumes[self.cur_setname]) + return "%02d/%02d" % (self.cur_volume, self.total_volumes[self.cur_setname]) return self.cur_volume @@ -465,14 +514,14 @@ def analyze_rar_filename(filename): return m.group(1), int_conv(m.group(3)) else: # Detect if first of "rxx" set - if filename.endswith('.rar'): + if filename.endswith(".rar"): return os.path.splitext(filename)[0], 1 return None, None def abort_all(): """ Abort all running DirectUnpackers """ - logging.info('Aborting all DirectUnpackers') + logging.info("Aborting all DirectUnpackers") for direct_unpacker in ACTIVE_UNPACKERS: direct_unpacker.abort() @@ -483,8 +532,14 @@ def test_disk_performance(): """ if diskspeedmeasure(sabnzbd.cfg.download_dir.get_path()) > 40: cfg.direct_unpack.set(True) - logging.warning(T('Direct Unpack was automatically enabled.') + ' ' + T('Jobs will start unpacking during the downloading to reduce post-processing time. Only works for jobs that do not need repair.')) + logging.warning( + T("Direct Unpack was automatically enabled.") + + " " + + T( + "Jobs will start unpacking during the downloading to reduce post-processing time. Only works for jobs that do not need repair." + ) + ) else: - logging.info('Direct Unpack was not enabled, incomplete folder disk speed below 40MB/s') + logging.info("Direct Unpack was not enabled, incomplete folder disk speed below 40MB/s") cfg.direct_unpack_tested.set(True) sabnzbd.config.save_config() diff --git a/sabnzbd/downloader.py b/sabnzbd/downloader.py index 6a1319d..ff61787 100644 --- a/sabnzbd/downloader.py +++ b/sabnzbd/downloader.py @@ -41,13 +41,13 @@ from sabnzbd.utils.happyeyeballs import happyeyeballs # Timeout penalty in minutes for each cause -_PENALTY_UNKNOWN = 3 # Unknown cause -_PENALTY_502 = 5 # Unknown 502 -_PENALTY_TIMEOUT = 10 # Server doesn't give an answer (multiple times) -_PENALTY_SHARE = 10 # Account sharing detected -_PENALTY_TOOMANY = 10 # Too many connections -_PENALTY_PERM = 10 # Permanent error, like bad username/password -_PENALTY_SHORT = 1 # Minimal penalty when no_penalties is set +_PENALTY_UNKNOWN = 3 # Unknown cause +_PENALTY_502 = 5 # Unknown 502 +_PENALTY_TIMEOUT = 10 # Server doesn't give an answer (multiple times) +_PENALTY_SHARE = 10 # Account sharing detected +_PENALTY_TOOMANY = 10 # Too many connections +_PENALTY_PERM = 10 # Permanent error, like bad username/password +_PENALTY_SHORT = 1 # Minimal penalty when no_penalties is set _PENALTY_VERYSHORT = 0.1 # Error 400 without cause clues @@ -55,9 +55,24 @@ TIMER_LOCK = RLock() class Server: - - def __init__(self, server_id, displayname, host, port, timeout, threads, priority, ssl, ssl_verify, ssl_ciphers, - send_group, username=None, password=None, optional=False, retention=0): + def __init__( + self, + server_id, + displayname, + host, + port, + timeout, + threads, + priority, + ssl, + ssl_verify, + ssl_ciphers, + send_group, + username=None, + password=None, + optional=False, + retention=0, + ): self.id = server_id self.newid = None @@ -82,12 +97,12 @@ class Server: self.idle_threads = [] self.active = True self.bad_cons = 0 - self.errormsg = '' - self.warning = '' - self.info = None # Will hold getaddrinfo() list - self.ssl_info = '' # Will hold the type and cipher of SSL connection + self.errormsg = "" + self.warning = "" + self.info = None # Will hold getaddrinfo() list + self.ssl_info = "" # Will hold the type and cipher of SSL connection self.request = False # True if a getaddrinfo() request is pending - self.have_body = 'free.xsusenet.com' not in host + self.have_body = "free.xsusenet.com" not in host self.have_stat = True # Assume server has "STAT", until proven otherwise for i in range(threads): @@ -105,30 +120,30 @@ class Server: # Check if already a successful ongoing connection if self.busy_threads and self.busy_threads[0].nntp: # Re-use that IP - logging.debug('%s: Re-using address %s', self.host, self.busy_threads[0].nntp.host) + logging.debug("%s: Re-using address %s", self.host, self.busy_threads[0].nntp.host) return self.busy_threads[0].nntp.host # Determine new IP if cfg.load_balancing() == 0 and self.info: # Just return the first one, so all next threads use the same IP ip = self.info[0][4][0] - logging.debug('%s: Connecting to address %s', self.host, ip) + logging.debug("%s: Connecting to address %s", self.host, ip) elif cfg.load_balancing() == 1 and self.info and len(self.info) > 1: # Return a random entry from the possible IPs rnd = random.randint(0, len(self.info) - 1) ip = self.info[rnd][4][0] - logging.debug('%s: Connecting to address %s', self.host, ip) + logging.debug("%s: Connecting to address %s", self.host, ip) elif cfg.load_balancing() == 2 and self.info and len(self.info) > 1: # RFC6555 / Happy Eyeballs: ip = happyeyeballs(self.host, port=self.port, ssl=self.ssl) if ip: - logging.debug('%s: Connecting to address %s', self.host, ip) + logging.debug("%s: Connecting to address %s", self.host, ip) else: # nothing returned, so there was a connection problem ip = self.host - logging.debug('%s: No successful IP connection was possible', self.host) + logging.debug("%s: No successful IP connection was possible", self.host) else: ip = self.host return ip @@ -152,6 +167,7 @@ class Server: class Downloader(Thread): """ Singleton Downloader Thread """ + do = None def __init__(self, paused=False): @@ -187,7 +203,7 @@ class Downloader(Thread): self.write_fds = {} self.servers = [] - self.server_dict = {} # For faster lookups, but is not updated later! + self.server_dict = {} # For faster lookups, but is not updated later! self.server_nr = 0 self._timers = {} @@ -235,8 +251,23 @@ class Downloader(Thread): break if create and enabled and host and port and threads: - server = Server(newserver, displayname, host, port, timeout, threads, priority, ssl, ssl_verify, - ssl_ciphers, send_group, username, password, optional, retention) + server = Server( + newserver, + displayname, + host, + port, + timeout, + threads, + priority, + ssl, + ssl_verify, + ssl_ciphers, + send_group, + username, + password, + optional, + retention, + ) self.servers.append(server) self.server_dict[newserver] = server @@ -255,7 +286,7 @@ class Downloader(Thread): # Do not notify when SABnzbd is still starting if self.paused and sabnzbd.WEB_DIR: logging.info("Resuming") - notifier.send_notification("SABnzbd", T('Resuming'), 'download') + notifier.send_notification("SABnzbd", T("Resuming"), "download") self.paused = False @NzbQueueLocker @@ -264,7 +295,7 @@ class Downloader(Thread): if not self.paused: self.paused = True logging.info("Pausing") - notifier.send_notification("SABnzbd", T('Paused'), 'download') + notifier.send_notification("SABnzbd", T("Paused"), "download") if self.is_paused(): BPSMeter.do.reset() if cfg.autodisconnect(): @@ -289,13 +320,13 @@ class Downloader(Thread): """ if value: mx = cfg.bandwidth_max.get_int() - if '%' in str(value) or (0 < from_units(value) < 101): - limit = value.strip(' %') + if "%" in str(value) or (0 < from_units(value) < 101): + limit = value.strip(" %") self.bandwidth_perc = from_units(limit) if mx: self.bandwidth_limit = mx * self.bandwidth_perc / 100 else: - logging.warning(T('You must set a maximum bandwidth before you can set a bandwidth limit')) + logging.warning(T("You must set a maximum bandwidth before you can set a bandwidth limit")) else: self.bandwidth_limit = from_units(value) if mx: @@ -347,11 +378,11 @@ class Downloader(Thread): # Was it resolving problem? if server.info is False: # Warn about resolving issues - errormsg = T('Cannot connect to server %s [%s]') % (server.host, T('Server name does not resolve')) + errormsg = T("Cannot connect to server %s [%s]") % (server.host, T("Server name does not resolve")) if server.errormsg != errormsg: server.errormsg = errormsg logging.warning(errormsg) - logging.warning(T('Server %s will be ignored for %s minutes'), server.host, _PENALTY_TIMEOUT) + logging.warning(T("Server %s will be ignored for %s minutes"), server.host, _PENALTY_TIMEOUT) # Not fully the same as the code below for optional servers server.bad_cons = 0 @@ -363,7 +394,7 @@ class Downloader(Thread): if server.optional and server.active and (server.bad_cons / server.threads) > 3: server.bad_cons = 0 server.active = False - logging.warning(T('Server %s will be ignored for %s minutes'), server.host, _PENALTY_TIMEOUT) + logging.warning(T("Server %s will be ignored for %s minutes"), server.host, _PENALTY_TIMEOUT) self.plan_server(server, _PENALTY_TIMEOUT) # Remove all connections to server @@ -389,23 +420,27 @@ class Downloader(Thread): # See if we need to delay because the queues are full logged = False - while not self.shutdown and (sabnzbd.decoder.Decoder.do.queue_full() or sabnzbd.assembler.Assembler.do.queue_full()): + while not self.shutdown and ( + sabnzbd.decoder.Decoder.do.queue_full() or sabnzbd.assembler.Assembler.do.queue_full() + ): if not logged: # Only log once, to not waste any CPU-cycles - logging.debug("Delaying - Decoder queue: %s - Assembler queue: %s", - sabnzbd.decoder.Decoder.do.decoder_queue.qsize(), - sabnzbd.assembler.Assembler.do.queue.qsize()) + logging.debug( + "Delaying - Decoder queue: %s - Assembler queue: %s", + sabnzbd.decoder.Decoder.do.decoder_queue.qsize(), + sabnzbd.assembler.Assembler.do.queue.qsize(), + ) logged = True time.sleep(0.05) def run(self): # First check IPv6 connectivity sabnzbd.EXTERNAL_IPV6 = sabnzbd.test_ipv6() - logging.debug('External IPv6 test result: %s', sabnzbd.EXTERNAL_IPV6) + logging.debug("External IPv6 test result: %s", sabnzbd.EXTERNAL_IPV6) # Then we check SSL certificate checking sabnzbd.CERTIFICATE_VALIDATION = sabnzbd.test_cert_checking() - logging.debug('SSL verification test: %s', sabnzbd.CERTIFICATE_VALIDATION) + logging.debug("SSL verification test: %s", sabnzbd.CERTIFICATE_VALIDATION) # Kick BPS-Meter to check quota BPSMeter.do.update() @@ -462,7 +497,7 @@ class Downloader(Thread): if server.retention and article.nzf.nzo.avg_stamp < time.time() - server.retention: # Let's get rid of all the articles for this server at once - logging.info('Job %s too old for %s, moving on', article.nzf.nzo.final_name, server.host) + logging.info("Job %s too old for %s, moving on", article.nzf.nzo.final_name, server.host) while article: self.decode(article, None) article = article.nzf.nzo.get_article(server, self.servers) @@ -480,7 +515,12 @@ class Downloader(Thread): logging.info("%s@%s: Initiating connection", nw.thrdnum, server.host) nw.init_connect(self.write_fds) except: - logging.error(T('Failed to initialize %s@%s with reason: %s'), nw.thrdnum, server.host, sys.exc_info()[1]) + logging.error( + T("Failed to initialize %s@%s with reason: %s"), + nw.thrdnum, + server.host, + sys.exc_info()[1], + ) self.__reset_nw(nw, "failed to initialize") # Exit-point @@ -530,7 +570,7 @@ class Downloader(Thread): # Now let's check if it was stable in the last 10 seconds self.can_be_slowed = BPSMeter.do.get_stable_speed(timespan=10) self.can_be_slowed_timer = 0 - logging.debug('Downloader-slowdown: %r', self.can_be_slowed) + logging.debug("Downloader-slowdown: %r", self.can_be_slowed) else: read, write, error = ([], [], []) @@ -540,8 +580,11 @@ class Downloader(Thread): time.sleep(1.0) DOWNLOADER_CV.acquire() - while (sabnzbd.nzbqueue.NzbQueue.do.is_empty() or self.is_paused() or self.postproc) and not \ - self.shutdown and not self.__restart: + while ( + (sabnzbd.nzbqueue.NzbQueue.do.is_empty() or self.is_paused() or self.postproc) + and not self.shutdown + and not self.__restart + ): DOWNLOADER_CV.wait() DOWNLOADER_CV.release() @@ -599,7 +642,9 @@ class Downloader(Thread): try: nw.finish_connect(nw.status_code) if sabnzbd.LOG_ALL: - logging.debug("%s@%s last message -> %s", nw.thrdnum, nw.server.host, nntp_to_msg(nw.data)) + logging.debug( + "%s@%s last message -> %s", nw.thrdnum, nw.server.host, nntp_to_msg(nw.data) + ) nw.clear_data() except NNTPPermanentError as error: # Handle login problems @@ -607,45 +652,45 @@ class Downloader(Thread): penalty = 0 msg = error.response ecode = int_conv(msg[:3]) - display_msg = ' [%s]' % msg - logging.debug('Server login problem: %s, %s', ecode, msg) + display_msg = " [%s]" % msg + logging.debug("Server login problem: %s, %s", ecode, msg) if ecode in (502, 400, 481, 482) and clues_too_many(msg): # Too many connections: remove this thread and reduce thread-setting for server # Plan to go back to the full number after a penalty timeout if server.active: - errormsg = T('Too many connections to server %s') % display_msg + errormsg = T("Too many connections to server %s") % display_msg if server.errormsg != errormsg: server.errormsg = errormsg - logging.warning(T('Too many connections to server %s'), server.host) + logging.warning(T("Too many connections to server %s"), server.host) self.__reset_nw(nw, None, warn=False, destroy=True, send_quit=True) self.plan_server(server, _PENALTY_TOOMANY) server.threads -= 1 elif ecode in (502, 481, 482) and clues_too_many_ip(msg): # Account sharing? if server.active: - errormsg = T('Probable account sharing') + display_msg + errormsg = T("Probable account sharing") + display_msg if server.errormsg != errormsg: server.errormsg = errormsg - name = ' (%s)' % server.host - logging.warning(T('Probable account sharing') + name) + name = " (%s)" % server.host + logging.warning(T("Probable account sharing") + name) penalty = _PENALTY_SHARE block = True elif ecode in (452, 481, 482, 381) or (ecode == 502 and clues_login(msg)): # Cannot login, block this server if server.active: - errormsg = T('Failed login for server %s') % display_msg + errormsg = T("Failed login for server %s") % display_msg if server.errormsg != errormsg: server.errormsg = errormsg - logging.error(T('Failed login for server %s'), server.host) + logging.error(T("Failed login for server %s"), server.host) penalty = _PENALTY_PERM block = True elif ecode in (502, 482): # Cannot connect (other reasons), block this server if server.active: - errormsg = T('Cannot connect to server %s [%s]') % ('', display_msg) + errormsg = T("Cannot connect to server %s [%s]") % ("", display_msg) if server.errormsg != errormsg: server.errormsg = errormsg - logging.warning(T('Cannot connect to server %s [%s]'), server.host, msg) + logging.warning(T("Cannot connect to server %s [%s]"), server.host, msg) if clues_pay(msg): penalty = _PENALTY_PERM else: @@ -654,16 +699,16 @@ class Downloader(Thread): elif ecode == 400: # Temp connection problem? if server.active: - logging.debug('Unspecified error 400 from server %s', server.host) + logging.debug("Unspecified error 400 from server %s", server.host) penalty = _PENALTY_VERYSHORT block = True else: # Unknown error, just keep trying if server.active: - errormsg = T('Cannot connect to server %s [%s]') % ('', display_msg) + errormsg = T("Cannot connect to server %s [%s]") % ("", display_msg) if server.errormsg != errormsg: server.errormsg = errormsg - logging.warning(T('Cannot connect to server %s [%s]'), server.host, msg) + logging.warning(T("Cannot connect to server %s [%s]"), server.host, msg) penalty = _PENALTY_UNKNOWN block = True if block or (penalty and server.optional): @@ -675,8 +720,12 @@ class Downloader(Thread): self.__reset_nw(nw, None, warn=False, send_quit=True) continue except: - logging.error(T('Connecting %s@%s failed, message=%s'), - nw.thrdnum, nw.server.host, nntp_to_msg(nw.data)) + logging.error( + T("Connecting %s@%s failed, message=%s"), + nw.thrdnum, + nw.server.host, + nntp_to_msg(nw.data), + ) # No reset-warning needed, above logging is sufficient self.__reset_nw(nw, None, warn=False) @@ -686,7 +735,7 @@ class Downloader(Thread): elif nw.status_code == 223: done = True - logging.debug('Article <%s> is present', article.article) + logging.debug("Article <%s> is present", article.article) elif nw.status_code == 211: done = False @@ -697,27 +746,32 @@ class Downloader(Thread): elif nw.status_code in (411, 423, 430): done = True - logging.debug('Thread %s@%s: Article %s missing (error=%s)', - nw.thrdnum, nw.server.host, article.article, nw.status_code) + logging.debug( + "Thread %s@%s: Article %s missing (error=%s)", + nw.thrdnum, + nw.server.host, + article.article, + nw.status_code, + ) nw.clear_data() elif nw.status_code == 500: if nzo.precheck: # Assume "STAT" command is not supported server.have_stat = False - logging.debug('Server %s does not support STAT', server.host) + logging.debug("Server %s does not support STAT", server.host) else: # Assume "BODY" command is not supported server.have_body = False - logging.debug('Server %s does not support BODY', server.host) + logging.debug("Server %s does not support BODY", server.host) nw.clear_data() self.__request_article(nw) if done: server.bad_cons = 0 # Successful data, clear "bad" counter - server.errormsg = server.warning = '' + server.errormsg = server.warning = "" if sabnzbd.LOG_ALL: - logging.debug('Thread %s@%s: %s done', nw.thrdnum, server.host, article.article) + logging.debug("Thread %s@%s: %s done", nw.thrdnum, server.host, article.article) self.decode(article, nw.data) nw.soft_reset() @@ -749,9 +803,9 @@ class Downloader(Thread): if warn and reset_msg: server.warning = reset_msg - logging.info('Thread %s@%s: %s', nw.thrdnum, server.host, reset_msg) + logging.info("Thread %s@%s: %s", nw.thrdnum, server.host, reset_msg) elif reset_msg: - logging.debug('Thread %s@%s: %s', nw.thrdnum, server.host, reset_msg) + logging.debug("Thread %s@%s: %s", nw.thrdnum, server.host, reset_msg) if nw in server.busy_threads: server.busy_threads.remove(nw) @@ -777,7 +831,7 @@ class Downloader(Thread): nw.hard_reset(wait, send_quit=send_quit) # Empty SSL info, it might change on next connect - server.ssl_info = '' + server.ssl_info = "" def __request_article(self, nw): try: @@ -785,25 +839,25 @@ class Downloader(Thread): if nw.server.send_group and nzo.group != nw.group: group = nzo.group if sabnzbd.LOG_ALL: - logging.debug('Thread %s@%s: GROUP <%s>', nw.thrdnum, nw.server.host, group) + logging.debug("Thread %s@%s: GROUP <%s>", nw.thrdnum, nw.server.host, group) nw.send_group(group) else: if sabnzbd.LOG_ALL: - logging.debug('Thread %s@%s: BODY %s', nw.thrdnum, nw.server.host, nw.article.article) + logging.debug("Thread %s@%s: BODY %s", nw.thrdnum, nw.server.host, nw.article.article) nw.body(nzo.precheck) fileno = nw.nntp.sock.fileno() if fileno not in self.read_fds: self.read_fds[fileno] = nw except socket.error as err: - logging.info('Looks like server closed connection: %s', err) + logging.info("Looks like server closed connection: %s", err) self.__reset_nw(nw, "server broke off connection", send_quit=False) except: - logging.error(T('Suspect error in downloader')) + logging.error(T("Suspect error in downloader")) logging.info("Traceback: ", exc_info=True) self.__reset_nw(nw, "server broke off connection", send_quit=False) - #------------------------------------------------------------------------------ + # ------------------------------------------------------------------------------ # Timed restart of servers admin. # For each server all planned events are kept in a list. # When the first timer of a server fires, all other existing timers @@ -817,7 +871,7 @@ class Downloader(Thread): # Overwrite in case of no_penalties interval = _PENALTY_SHORT - logging.debug('Set planned server resume %s in %s mins', server.host, interval) + logging.debug("Set planned server resume %s in %s mins", server.host, interval) if server.id not in self._timers: self._timers[server.id] = [] stamp = time.time() + 60.0 * interval @@ -828,7 +882,7 @@ class Downloader(Thread): @synchronized(TIMER_LOCK) def trigger_server(self, server_id, timestamp): """ Called by scheduler, start server if timer still valid """ - logging.debug('Trigger planned server resume for server-id %s', server_id) + logging.debug("Trigger planned server resume for server-id %s", server_id) if server_id in self._timers: if timestamp in self._timers[server_id]: del self._timers[server_id] @@ -845,7 +899,7 @@ class Downloader(Thread): # Activate server if it was inactive for server in self.servers: if server.id == server_id and not server.active: - logging.debug('Unblock server %s', server.host) + logging.debug("Unblock server %s", server.host) self.init_server(server_id, server_id) break @@ -862,7 +916,7 @@ class Downloader(Thread): kicked = [] for server_id in self._timers.keys(): if not [stamp for stamp in self._timers[server_id] if stamp >= now]: - logging.debug('Forcing re-evaluation of server-id %s', server_id) + logging.debug("Forcing re-evaluation of server-id %s", server_id) del self._timers[server_id] self.init_server(server_id, server_id) kicked.append(server_id) @@ -870,7 +924,7 @@ class Downloader(Thread): for server in self.servers: if server.id not in self._timers: if server.id not in kicked and not server.active: - logging.debug('Forcing activation of server %s', server.host) + logging.debug("Forcing activation of server %s", server.host) self.init_server(server.id, server.id) def update_server(self, oldserver, newserver): @@ -886,7 +940,7 @@ class Downloader(Thread): def stop(self): self.shutdown = True - notifier.send_notification("SABnzbd", T('Shutting down'), 'startup') + notifier.send_notification("SABnzbd", T("Shutting down"), "startup") def stop(): @@ -905,7 +959,7 @@ def stop(): def clues_login(text): """ Check for any "failed login" clues in the response code """ text = text.lower() - for clue in ('username', 'password', 'invalid', 'authen', 'access denied'): + for clue in ("username", "password", "invalid", "authen", "access denied"): if clue in text: return True return False @@ -914,9 +968,9 @@ def clues_login(text): def clues_too_many(text): """ Check for any "too many connections" clues in the response code """ text = text.lower() - for clue in ('exceed', 'connections', 'too many', 'threads', 'limit'): + for clue in ("exceed", "connections", "too many", "threads", "limit"): # Not 'download limit exceeded' error - if (clue in text) and ('download' not in text) and ('byte' not in text): + if (clue in text) and ("download" not in text) and ("byte" not in text): return True return False @@ -924,7 +978,7 @@ def clues_too_many(text): def clues_too_many_ip(text): """ Check for any "account sharing" clues in the response code """ text = text.lower() - for clue in ('simultaneous ip', 'multiple ip'): + for clue in ("simultaneous ip", "multiple ip"): if clue in text: return True return False @@ -933,7 +987,7 @@ def clues_too_many_ip(text): def clues_pay(text): """ Check for messages about payments """ text = text.lower() - for clue in ('credits', 'paym', 'expired', 'exceeded'): + for clue in ("credits", "paym", "expired", "exceeded"): if clue in text: return True return False diff --git a/sabnzbd/encoding.py b/sabnzbd/encoding.py index fc44b34..df31003 100644 --- a/sabnzbd/encoding.py +++ b/sabnzbd/encoding.py @@ -31,14 +31,14 @@ def utob(str_in): """ Shorthand for converting UTF-8 to bytes """ if isinstance(str_in, bytes): return str_in - return str_in.encode('utf-8') + return str_in.encode("utf-8") def ubtou(str_in): """ Shorthand for converting unicode bytes to UTF-8 """ if not isinstance(str_in, bytes): return str_in - return str_in.decode('utf-8') + return str_in.decode("utf-8") def platform_btou(str_in): @@ -50,7 +50,7 @@ def platform_btou(str_in): try: return ubtou(str_in) except UnicodeDecodeError: - return str_in.decode(CODEPAGE, errors='replace').replace('?', '!') + return str_in.decode(CODEPAGE, errors="replace").replace("?", "!") else: return str_in @@ -63,7 +63,7 @@ def correct_unknown_encoding(str_or_bytes_in): """ # If already string, back to bytes if not isinstance(str_or_bytes_in, bytes): - str_or_bytes_in = str_or_bytes_in.encode('utf-8', 'surrogateescape') + str_or_bytes_in = str_or_bytes_in.encode("utf-8", "surrogateescape") # Try simple bytes-to-string try: @@ -71,14 +71,12 @@ def correct_unknown_encoding(str_or_bytes_in): except UnicodeDecodeError: try: # Try using 8-bit ASCII, if came from Windows - return str_or_bytes_in.decode('ISO-8859-1') + return str_or_bytes_in.decode("ISO-8859-1") except ValueError: # Last resort we use the slow chardet package - return str_or_bytes_in.decode(chardet.detect(str_or_bytes_in)['encoding']) + return str_or_bytes_in.decode(chardet.detect(str_or_bytes_in)["encoding"]) def xml_name(p): """ Prepare name for use in HTML/XML contect """ return escape(str(p)) - - diff --git a/sabnzbd/interface.py b/sabnzbd/interface.py index cbcbbc4..d8a33a1 100644 --- a/sabnzbd/interface.py +++ b/sabnzbd/interface.py @@ -39,10 +39,18 @@ import sabnzbd.rss import sabnzbd.scheduler as scheduler from Cheetah.Template import Template -from sabnzbd.misc import to_units, from_units, time_format, calc_age, \ - int_conv, get_base_url, probablyipv4, probablyipv6, opts_to_pp -from sabnzbd.filesystem import real_path, long_path, globber, globber_full, remove_all, \ - clip_path, same_file +from sabnzbd.misc import ( + to_units, + from_units, + time_format, + calc_age, + int_conv, + get_base_url, + probablyipv4, + probablyipv6, + opts_to_pp, +) +from sabnzbd.filesystem import real_path, long_path, globber, globber_full, remove_all, clip_path, same_file from sabnzbd.newswrapper import GetServerParms from sabnzbd.bpsmeter import BPSMeter from sabnzbd.encoding import xml_name, utob @@ -58,14 +66,26 @@ from sabnzbd.utils.diskspeed import diskspeedmeasure from sabnzbd.utils.getperformance import getpystone from sabnzbd.utils.internetspeed import internetspeed -from sabnzbd.constants import MEBI, DEF_SKIN_COLORS, \ - DEF_STDCONFIG, DEF_MAIN_TMPL, DEFAULT_PRIORITY, CHEETAH_DIRECTIVES +from sabnzbd.constants import MEBI, DEF_SKIN_COLORS, DEF_STDCONFIG, DEF_MAIN_TMPL, DEFAULT_PRIORITY, CHEETAH_DIRECTIVES from sabnzbd.lang import list_languages -from sabnzbd.api import list_scripts, list_cats, del_from_section, \ - api_handler, build_queue, build_status, retry_job, build_header, build_history, \ - format_bytes, report, del_hist_job, Ttemplate, build_queue_header +from sabnzbd.api import ( + list_scripts, + list_cats, + del_from_section, + api_handler, + build_queue, + build_status, + retry_job, + build_header, + build_history, + format_bytes, + report, + del_hist_job, + Ttemplate, + build_queue_header, +) ############################################################################## # Global constants @@ -87,26 +107,26 @@ def secured_expose(wrap_func=None, check_configlock=False, check_api_key=False): def internal_wrap(*args, **kwargs): # Add X-Frame-Headers headers to page-requests if cfg.x_frame_options(): - cherrypy.response.headers['X-Frame-Options'] = 'SameOrigin' + cherrypy.response.headers["X-Frame-Options"] = "SameOrigin" # Check if config is locked if check_configlock and cfg.configlock(): cherrypy.response.status = 403 - return 'Access denied - Configuration locked' + return "Access denied - Configuration locked" # Check if external access if not check_access(): cherrypy.response.status = 403 - return 'Access denied' + return "Access denied" # Verify login status, only for non-key pages if not check_login() and not check_api_key: - raise Raiser('/login/') + raise Raiser("/login/") # Verify host used for the visit if not check_hostname(): cherrypy.response.status = 403 - return 'Access denied - Hostname verification failed: https://sabnzbd.org/hostname-check' + return "Access denied - Hostname verification failed: https://sabnzbd.org/hostname-check" # Some pages need correct API key if check_api_key: @@ -116,6 +136,7 @@ def secured_expose(wrap_func=None, check_configlock=False, check_api_key=False): # All good, cool! return wrap_func(*args, **kwargs) + return internal_wrap @@ -131,10 +152,12 @@ def check_access(access_type=4): # CherryPy will report ::ffff:192.168.0.10 on dual-stack situation # It will always contain that ::ffff: prefix - range_ok = not cfg.local_ranges() or bool([1 for r in cfg.local_ranges() if (referrer.startswith(r) or referrer.replace('::ffff:', '').startswith(r))]) - allowed = referrer in ('127.0.0.1', '::ffff:127.0.0.1', '::1') or range_ok or access_type <= cfg.inet_exposure() + range_ok = not cfg.local_ranges() or bool( + [1 for r in cfg.local_ranges() if (referrer.startswith(r) or referrer.replace("::ffff:", "").startswith(r))] + ) + allowed = referrer in ("127.0.0.1", "::ffff:127.0.0.1", "::1") or range_ok or access_type <= cfg.inet_exposure() if not allowed: - logging.debug('Refused connection from %s', referrer) + logging.debug("Refused connection from %s", referrer) return allowed @@ -148,16 +171,16 @@ def check_hostname(): return True # Don't allow requests without Host - host = cherrypy.request.headers.get('Host') + host = cherrypy.request.headers.get("Host") if not host: return False # Remove the port-part (like ':8080'), if it is there, always on the right hand side. # Not to be confused with IPv6 colons (within square brackets) - host = re.sub(':[0123456789]+$', '', host).lower() + host = re.sub(":[0123456789]+$", "", host).lower() # Fine if localhost or IP - if host == 'localhost' or probablyipv4(host) or probablyipv6(host): + if host == "localhost" or probablyipv4(host) or probablyipv6(host): return True # Check on the whitelist @@ -166,7 +189,7 @@ def check_hostname(): # Fine if ends with ".local" or ".local.", aka mDNS name # See rfc6762 Multicast DNS - if host.endswith(('.local', '.local.')): + if host.endswith((".local", ".local.")): return True # Ohoh, bad @@ -175,7 +198,8 @@ def check_hostname(): # Create a more unique ID for each instance -COOKIE_SECRET = str(randint(1000,100000)*os.getpid()) +COOKIE_SECRET = str(randint(1000, 100000) * os.getpid()) + def set_login_cookie(remove=False, remember_me=False): """ We try to set a cookie as unique as possible @@ -185,34 +209,34 @@ def set_login_cookie(remove=False, remember_me=False): """ salt = randint(1, 1000) cookie_str = utob(str(salt) + cherrypy.request.remote.ip + COOKIE_SECRET) - cherrypy.response.cookie['login_cookie'] = hashlib.sha1(cookie_str).hexdigest() - cherrypy.response.cookie['login_cookie']['path'] = '/' - cherrypy.response.cookie['login_cookie']['httponly'] = 1 - cherrypy.response.cookie['login_salt'] = salt - cherrypy.response.cookie['login_salt']['path'] = '/' - cherrypy.response.cookie['login_salt']['httponly'] = 1 + cherrypy.response.cookie["login_cookie"] = hashlib.sha1(cookie_str).hexdigest() + cherrypy.response.cookie["login_cookie"]["path"] = "/" + cherrypy.response.cookie["login_cookie"]["httponly"] = 1 + cherrypy.response.cookie["login_salt"] = salt + cherrypy.response.cookie["login_salt"]["path"] = "/" + cherrypy.response.cookie["login_salt"]["httponly"] = 1 # If we want to be remembered if remember_me: - cherrypy.response.cookie['login_cookie']['max-age'] = 3600*24*14 - cherrypy.response.cookie['login_salt']['max-age'] = 3600*24*14 + cherrypy.response.cookie["login_cookie"]["max-age"] = 3600 * 24 * 14 + cherrypy.response.cookie["login_salt"]["max-age"] = 3600 * 24 * 14 # To remove if remove: - cherrypy.response.cookie['login_cookie']['expires'] = 0 - cherrypy.response.cookie['login_salt']['expires'] = 0 + cherrypy.response.cookie["login_cookie"]["expires"] = 0 + cherrypy.response.cookie["login_salt"]["expires"] = 0 else: # Notify about new login - notifier.send_notification(T('User logged in'), T('User logged in to the web interface'), 'new_login') + notifier.send_notification(T("User logged in"), T("User logged in to the web interface"), "new_login") def check_login_cookie(): # Do we have everything? - if 'login_cookie' not in cherrypy.request.cookie or 'login_salt' not in cherrypy.request.cookie: + if "login_cookie" not in cherrypy.request.cookie or "login_salt" not in cherrypy.request.cookie: return False - cookie_str = utob(str(cherrypy.request.cookie['login_salt'].value) + cherrypy.request.remote.ip + COOKIE_SECRET) - return cherrypy.request.cookie['login_cookie'].value == hashlib.sha1(cookie_str).hexdigest() + cookie_str = utob(str(cherrypy.request.cookie["login_salt"].value) + cherrypy.request.remote.ip + COOKIE_SECRET) + return cherrypy.request.cookie["login_cookie"].value == hashlib.sha1(cookie_str).hexdigest() def check_login(): @@ -240,21 +264,27 @@ def encrypt_pwd(pwd): def set_auth(conf): """ Set the authentication for CherryPy """ if cfg.username() and cfg.password() and not cfg.html_login(): - conf.update({'tools.auth_basic.on': True, 'tools.auth_basic.realm': 'SABnzbd', - 'tools.auth_basic.users': get_users, 'tools.auth_basic.encrypt': encrypt_pwd}) - conf.update({'/api': {'tools.auth_basic.on': False}, - '%s/api' % cfg.url_base(): {'tools.auth_basic.on': False}, - }) + conf.update( + { + "tools.auth_basic.on": True, + "tools.auth_basic.realm": "SABnzbd", + "tools.auth_basic.users": get_users, + "tools.auth_basic.encrypt": encrypt_pwd, + } + ) + conf.update( + {"/api": {"tools.auth_basic.on": False}, "%s/api" % cfg.url_base(): {"tools.auth_basic.on": False},} + ) else: - conf.update({'tools.auth_basic.on': False}) + conf.update({"tools.auth_basic.on": False}) def check_apikey(kwargs): """ Check API-key or NZB-key Return None when OK, otherwise an error message """ - mode = kwargs.get('mode', '') - name = kwargs.get('name', '') + mode = kwargs.get("mode", "") + name = kwargs.get("name", "") # Lookup required access level, returns 4 for config-things req_access = sabnzbd.api.api_level(mode, name) @@ -263,49 +293,62 @@ def check_apikey(kwargs): # NZB-only actions pass elif not check_access(req_access): - return 'Access denied' + return "Access denied" # First check API-key, if OK that's sufficient if not cfg.disable_key(): - key = kwargs.get('apikey') + key = kwargs.get("apikey") if not key: if cfg.api_warnings(): - log_warning_and_ip(T('API Key missing, please enter the api key from Config->General into your 3rd party program:')) - return 'API Key Required' + log_warning_and_ip( + T("API Key missing, please enter the api key from Config->General into your 3rd party program:") + ) + return "API Key Required" elif req_access == 1 and key == cfg.nzb_key(): return None elif key == cfg.api_key(): return None else: - log_warning_and_ip(T('API Key incorrect, Use the api key from Config->General in your 3rd party program:')) - return 'API Key Incorrect' + log_warning_and_ip(T("API Key incorrect, Use the api key from Config->General in your 3rd party program:")) + return "API Key Incorrect" # No active API-key, check web credentials instead if cfg.username() and cfg.password(): - if check_login() or (kwargs.get('ma_username') == cfg.username() and kwargs.get('ma_password') == cfg.password()): + if check_login() or ( + kwargs.get("ma_username") == cfg.username() and kwargs.get("ma_password") == cfg.password() + ): pass else: if cfg.api_warnings(): - log_warning_and_ip(T('Authentication missing, please enter username/password from Config->General into your 3rd party program:')) - return 'Missing authentication' + log_warning_and_ip( + T( + "Authentication missing, please enter username/password from Config->General into your 3rd party program:" + ) + ) + return "Missing authentication" return None def log_warning_and_ip(txt): """ Include the IP and the Proxy-IP for warnings """ # Was it proxy forwarded? - xff = cherrypy.request.headers.get('X-Forwarded-For') + xff = cherrypy.request.headers.get("X-Forwarded-For") if xff: - txt = '%s %s (X-Forwarded-For: %s)>%s' % (txt, cherrypy.request.remote.ip, xff, cherrypy.request.headers.get('User-Agent', '??')) + txt = "%s %s (X-Forwarded-For: %s)>%s" % ( + txt, + cherrypy.request.remote.ip, + xff, + cherrypy.request.headers.get("User-Agent", "??"), + ) else: - txt = '%s %s>%s' % (txt, cherrypy.request.remote.ip, cherrypy.request.headers.get('User-Agent', '??')) - logging.warning('%s', txt) + txt = "%s %s>%s" % (txt, cherrypy.request.remote.ip, cherrypy.request.headers.get("User-Agent", "??")) + logging.warning("%s", txt) ############################################################################## # Helper raiser functions ############################################################################## -def Raiser(root='', **kwargs): +def Raiser(root="", **kwargs): args = {} for key in kwargs: val = kwargs.get(key) @@ -313,7 +356,7 @@ def Raiser(root='', **kwargs): args[key] = val # Add extras if args: - root = '%s?%s' % (root, urllib.parse.urlencode(args)) + root = "%s?%s" % (root, urllib.parse.urlencode(args)) # Optionally add the leading /sabnzbd/ (or what the user set) if not root.startswith(cfg.url_base()): root = cherrypy.request.script_name + root @@ -322,68 +365,73 @@ def Raiser(root='', **kwargs): def queueRaiser(root, kwargs): - return Raiser(root, start=kwargs.get('start'), - limit=kwargs.get('limit'), - search=kwargs.get('search')) + return Raiser(root, start=kwargs.get("start"), limit=kwargs.get("limit"), search=kwargs.get("search")) def rssRaiser(root, kwargs): - return Raiser(root, feed=kwargs.get('feed')) + return Raiser(root, feed=kwargs.get("feed")) ############################################################################## # Page definitions ############################################################################## class MainPage: - def __init__(self): - self.__root = '/' + self.__root = "/" # Add all sub-pages self.login = LoginPage() - self.queue = QueuePage('/queue/') - self.history = HistoryPage('/history/') - self.status = Status('/status/') - self.config = ConfigPage('/config/') - self.nzb = NzoPage('/nzb/') - self.wizard = Wizard('/wizard/') + self.queue = QueuePage("/queue/") + self.history = HistoryPage("/history/") + self.status = Status("/status/") + self.config = ConfigPage("/config/") + self.nzb = NzoPage("/nzb/") + self.wizard = Wizard("/wizard/") @secured_expose def index(self, **kwargs): # Redirect to wizard if no servers are set - if kwargs.get('skip_wizard') or config.get_servers(): + if kwargs.get("skip_wizard") or config.get_servers(): info = build_header() - info['scripts'] = list_scripts(default=True) - info['script'] = 'Default' + info["scripts"] = list_scripts(default=True) + info["script"] = "Default" - info['cat'] = 'Default' - info['categories'] = list_cats(True) - info['have_rss_defined'] = bool(config.get_rss()) - info['have_watched_dir'] = bool(cfg.dirscan_dir()) + info["cat"] = "Default" + info["categories"] = list_cats(True) + info["have_rss_defined"] = bool(config.get_rss()) + info["have_watched_dir"] = bool(cfg.dirscan_dir()) # Have logout only with HTML and if inet=5, only when we are external - info['have_logout'] = cfg.username() and cfg.password() and (cfg.html_login() and (cfg.inet_exposure() < 5 or (cfg.inet_exposure() == 5 and not check_access(access_type=6)))) + info["have_logout"] = ( + cfg.username() + and cfg.password() + and ( + cfg.html_login() + and (cfg.inet_exposure() < 5 or (cfg.inet_exposure() == 5 and not check_access(access_type=6))) + ) + ) bytespersec_list = BPSMeter.do.get_bps_list() - info['bytespersec_list'] = ','.join([str(bps) for bps in bytespersec_list]) + info["bytespersec_list"] = ",".join([str(bps) for bps in bytespersec_list]) - template = Template(file=os.path.join(sabnzbd.WEB_DIR, 'main.tmpl'), - searchList=[info], compilerSettings=CHEETAH_DIRECTIVES) + template = Template( + file=os.path.join(sabnzbd.WEB_DIR, "main.tmpl"), searchList=[info], compilerSettings=CHEETAH_DIRECTIVES + ) return template.respond() else: # Redirect to the setup wizard - raise cherrypy.HTTPRedirect('%s/wizard/' % cfg.url_base()) + raise cherrypy.HTTPRedirect("%s/wizard/" % cfg.url_base()) @secured_expose(check_api_key=True) def shutdown(self, **kwargs): # Check for PID - pid_in = kwargs.get('pid') + pid_in = kwargs.get("pid") if pid_in and int(pid_in) != os.getpid(): return "Incorrect PID for this instance, remove PID from URL to initiate shutdown." sabnzbd.shutdown_program() - return T('SABnzbd shutdown finished') + return T("SABnzbd shutdown finished") @secured_expose(check_api_key=True) def pause(self, **kwargs): @@ -406,7 +454,7 @@ class MainPage: def scriptlog(self, **kwargs): """ Needed for all skins, URL is fixed due to postproc """ # No session key check, due to fixed URLs - name = kwargs.get('name') + name = kwargs.get("name") if name: history_db = sabnzbd.get_db_connection() return ShowString(history_db.get_name(name), history_db.get_script_log(name)) @@ -416,31 +464,31 @@ class MainPage: @secured_expose(check_api_key=True) def retry(self, **kwargs): """ Duplicate of retry of History, needed for some skins """ - job = kwargs.get('job', '') - url = kwargs.get('url', '').strip() - pp = kwargs.get('pp') - cat = kwargs.get('cat') - script = kwargs.get('script') + job = kwargs.get("job", "") + url = kwargs.get("url", "").strip() + pp = kwargs.get("pp") + cat = kwargs.get("cat") + script = kwargs.get("script") if url: - sabnzbd.add_url(url, pp, script, cat, nzbname=kwargs.get('nzbname')) + sabnzbd.add_url(url, pp, script, cat, nzbname=kwargs.get("nzbname")) del_hist_job(job, del_files=True) raise Raiser(self.__root) @secured_expose(check_api_key=True) def retry_pp(self, **kwargs): # Duplicate of History/retry_pp to please the SMPL skin :( - retry_job(kwargs.get('job'), kwargs.get('nzbfile'), kwargs.get('password')) + retry_job(kwargs.get("job"), kwargs.get("nzbfile"), kwargs.get("password")) raise Raiser(self.__root) @secured_expose def robots_txt(self, **kwargs): """ Keep web crawlers out """ - cherrypy.response.headers['Content-Type'] = 'text/plain' - return 'User-agent: *\nDisallow: /\n' + cherrypy.response.headers["Content-Type"] = "text/plain" + return "User-agent: *\nDisallow: /\n" + ############################################################################## class Wizard: - def __init__(self, root): self.__root = root @@ -449,54 +497,61 @@ class Wizard: """ Show the language selection page """ if sabnzbd.WIN32: from sabnzbd.utils.apireg import get_install_lng + cfg.language.set(get_install_lng()) logging.debug('Installer language code "%s"', cfg.language()) info = build_header(sabnzbd.WIZARD_DIR) - info['languages'] = list_languages() - template = Template(file=os.path.join(sabnzbd.WIZARD_DIR, 'index.html'), - searchList=[info], compilerSettings=CHEETAH_DIRECTIVES) + info["languages"] = list_languages() + template = Template( + file=os.path.join(sabnzbd.WIZARD_DIR, "index.html"), searchList=[info], compilerSettings=CHEETAH_DIRECTIVES + ) return template.respond() @secured_expose(check_configlock=True) def one(self, **kwargs): """ Accept language and show server page """ - if kwargs.get('lang'): - cfg.language.set(kwargs.get('lang')) + if kwargs.get("lang"): + cfg.language.set(kwargs.get("lang")) # Always setup Glitter - change_web_dir('Glitter - Default') + change_web_dir("Glitter - Default") info = build_header(sabnzbd.WIZARD_DIR) - info['certificate_validation'] = sabnzbd.CERTIFICATE_VALIDATION + info["certificate_validation"] = sabnzbd.CERTIFICATE_VALIDATION # Just in case, add server servers = config.get_servers() if not servers: - info['host'] = '' - info['port'] = '' - info['username'] = '' - info['password'] = '' - info['connections'] = '' - info['ssl'] = 0 - info['ssl_verify'] = 2 + info["host"] = "" + info["port"] = "" + info["username"] = "" + info["password"] = "" + info["connections"] = "" + info["ssl"] = 0 + info["ssl_verify"] = 2 else: # Sort servers to get the first enabled one - server_names = sorted(servers.keys(), key=lambda svr: '%d%02d%s' % (int(not servers[svr].enable()), servers[svr].priority(), servers[svr].displayname().lower())) + server_names = sorted( + servers.keys(), + key=lambda svr: "%d%02d%s" + % (int(not servers[svr].enable()), servers[svr].priority(), servers[svr].displayname().lower()), + ) for server in server_names: # If there are multiple servers, just use the first enabled one s = servers[server] - info['host'] = s.host() - info['port'] = s.port() - info['username'] = s.username() - info['password'] = s.password.get_stars() - info['connections'] = s.connections() - info['ssl'] = s.ssl() - info['ssl_verify'] = s.ssl_verify() + info["host"] = s.host() + info["port"] = s.port() + info["username"] = s.username() + info["password"] = s.password.get_stars() + info["connections"] = s.connections() + info["ssl"] = s.ssl() + info["ssl_verify"] = s.ssl_verify() if s.enable(): break - template = Template(file=os.path.join(sabnzbd.WIZARD_DIR, 'one.html'), - searchList=[info], compilerSettings=CHEETAH_DIRECTIVES) + template = Template( + file=os.path.join(sabnzbd.WIZARD_DIR, "one.html"), searchList=[info], compilerSettings=CHEETAH_DIRECTIVES + ) return template.respond() @secured_expose(check_configlock=True) @@ -504,7 +559,7 @@ class Wizard: """ Accept server and show the final page for restart """ # Save server details if kwargs: - kwargs['enable'] = 1 + kwargs["enable"] = 1 handle_server(kwargs) config.save_config() @@ -512,27 +567,29 @@ class Wizard: # Show Restart screen info = build_header(sabnzbd.WIZARD_DIR) - info['access_url'], info['urls'] = get_access_info() - info['download_dir'] = cfg.download_dir.get_clipped_path() - info['complete_dir'] = cfg.complete_dir.get_clipped_path() + info["access_url"], info["urls"] = get_access_info() + info["download_dir"] = cfg.download_dir.get_clipped_path() + info["complete_dir"] = cfg.complete_dir.get_clipped_path() - template = Template(file=os.path.join(sabnzbd.WIZARD_DIR, 'two.html'), - searchList=[info], compilerSettings=CHEETAH_DIRECTIVES) + template = Template( + file=os.path.join(sabnzbd.WIZARD_DIR, "two.html"), searchList=[info], compilerSettings=CHEETAH_DIRECTIVES + ) return template.respond() @secured_expose def exit(self, **kwargs): """ Stop SABnzbd """ sabnzbd.shutdown_program() - return T('SABnzbd shutdown finished') + return T("SABnzbd shutdown finished") + def get_access_info(): """ Build up a list of url's that sabnzbd can be accessed from """ # Access_url is used to provide the user a link to sabnzbd depending on the host - access_uri = 'localhost' + access_uri = "localhost" cherryhost = cfg.cherryhost() - if cherryhost == '0.0.0.0': + if cherryhost == "0.0.0.0": host = socket.gethostname() socks = [host] # Grab a list of all ips for the hostname @@ -543,17 +600,17 @@ def get_access_info(): for addr in addresses: address = addr[4][0] # Filter out ipv6 addresses (should not be allowed) - if ':' not in address and address not in socks: + if ":" not in address and address not in socks: socks.append(address) if "host" in cherrypy.request.headers: - host = cherrypy.request.headers['host'] - host = host.rsplit(':')[0] + host = cherrypy.request.headers["host"] + host = host.rsplit(":")[0] access_uri = host socks.insert(0, host) else: - socks.insert(0, 'localhost') + socks.insert(0, "localhost") - elif cherryhost == '::': + elif cherryhost == "::": host = socket.gethostname() socks = [host] # Grab a list of all ips for the hostname @@ -561,17 +618,17 @@ def get_access_info(): for addr in addresses: address = addr[4][0] # Only ipv6 addresses will work - if ':' in address: - address = '[%s]' % address + if ":" in address: + address = "[%s]" % address if address not in socks: socks.append(address) if "host" in cherrypy.request.headers: - host = cherrypy.request.headers['host'] - host = host.rsplit(':')[0] + host = cherrypy.request.headers["host"] + host = host.rsplit(":")[0] access_uri = host socks.insert(0, host) else: - socks.insert(0, 'localhost') + socks.insert(0, "localhost") elif not cherryhost: socks = [socket.gethostname()] @@ -584,72 +641,74 @@ def get_access_info(): for sock in socks: if sock: if cfg.enable_https() and cfg.https_port(): - url = 'https://%s:%s%s' % (sock, cfg.https_port(), cfg.url_base()) + url = "https://%s:%s%s" % (sock, cfg.https_port(), cfg.url_base()) elif cfg.enable_https(): - url = 'https://%s:%s%s' % (sock, cfg.cherryport(), cfg.url_base()) + url = "https://%s:%s%s" % (sock, cfg.cherryport(), cfg.url_base()) else: - url = 'http://%s:%s%s' % (sock, cfg.cherryport(), cfg.url_base()) + url = "http://%s:%s%s" % (sock, cfg.cherryport(), cfg.url_base()) urls.append(url) if cfg.enable_https() and cfg.https_port(): - access_url = 'https://%s:%s%s' % (sock, cfg.https_port(), cfg.url_base()) + access_url = "https://%s:%s%s" % (sock, cfg.https_port(), cfg.url_base()) elif cfg.enable_https(): - access_url = 'https://%s:%s%s' % (access_uri, cfg.cherryport(), cfg.url_base()) + access_url = "https://%s:%s%s" % (access_uri, cfg.cherryport(), cfg.url_base()) else: - access_url = 'http://%s:%s%s' % (access_uri, cfg.cherryport(), cfg.url_base()) + access_url = "http://%s:%s%s" % (access_uri, cfg.cherryport(), cfg.url_base()) return access_url, urls + ############################################################################## class LoginPage: - @cherrypy.expose def index(self, **kwargs): # Base output var info = build_header(sabnzbd.WEB_DIR_CONFIG) - info['error'] = '' + info["error"] = "" # Logout? - if kwargs.get('logout'): + if kwargs.get("logout"): set_login_cookie(remove=True) raise Raiser() # Check if there's even a username/password set if check_login(): - raise Raiser(cherrypy.request.script_name + '/') + raise Raiser(cherrypy.request.script_name + "/") # Was it proxy forwarded? - xff = cherrypy.request.headers.get('X-Forwarded-For') + xff = cherrypy.request.headers.get("X-Forwarded-For") # Check login info - if kwargs.get('username') == cfg.username() and kwargs.get('password') == cfg.password(): + if kwargs.get("username") == cfg.username() and kwargs.get("password") == cfg.password(): # Save login cookie - set_login_cookie(remember_me=kwargs.get('remember_me', False)) + set_login_cookie(remember_me=kwargs.get("remember_me", False)) # Log the succes if xff: - logging.info('Successful login from %s (X-Forwarded-For: %s)', cherrypy.request.remote.ip, xff) + logging.info("Successful login from %s (X-Forwarded-For: %s)", cherrypy.request.remote.ip, xff) else: - logging.info('Successful login from %s', cherrypy.request.remote.ip) + logging.info("Successful login from %s", cherrypy.request.remote.ip) # Redirect - raise Raiser(cherrypy.request.script_name + '/') - elif kwargs.get('username') or kwargs.get('password'): - info['error'] = T('Authentication failed, check username/password.') + raise Raiser(cherrypy.request.script_name + "/") + elif kwargs.get("username") or kwargs.get("password"): + info["error"] = T("Authentication failed, check username/password.") # Warn about the potential security problem - fail_msg = T('Unsuccessful login attempt from %s') % cherrypy.request.remote.ip + fail_msg = T("Unsuccessful login attempt from %s") % cherrypy.request.remote.ip if xff: - fail_msg = '%s (X-Forwarded-For: %s)' % (fail_msg, xff) + fail_msg = "%s (X-Forwarded-For: %s)" % (fail_msg, xff) logging.warning(fail_msg) # Show login - template = Template(file=os.path.join(sabnzbd.WEB_DIR_CONFIG, 'login', 'main.tmpl'), - searchList=[info], compilerSettings=CHEETAH_DIRECTIVES) + template = Template( + file=os.path.join(sabnzbd.WEB_DIR_CONFIG, "login", "main.tmpl"), + searchList=[info], + compilerSettings=CHEETAH_DIRECTIVES, + ) return template.respond() ############################################################################## class NzoPage: - def __init__(self, root): self.__root = root self.__cached_selection = {} # None @@ -664,7 +723,7 @@ class NzoPage: # /nzb/SABnzbd_nzo_xxxxx/save nzo_id = None for a in args: - if a.startswith('SABnzbd_nzo'): + if a.startswith("SABnzbd_nzo"): nzo_id = a break @@ -673,19 +732,19 @@ class NzoPage: info, pnfo_list, bytespersec, q_size, bytes_left_previous_page = build_queue_header() # /SABnzbd_nzo_xxxxx/bulk_operation - if 'bulk_operation' in args: + if "bulk_operation" in args: return self.bulk_operation(nzo_id, kwargs) # /SABnzbd_nzo_xxxxx/details - elif 'details' in args: + elif "details" in args: info = self.nzo_details(info, pnfo_list, nzo_id) # /SABnzbd_nzo_xxxxx/files - elif 'files' in args: + elif "files" in args: info = self.nzo_files(info, nzo_id) # /SABnzbd_nzo_xxxxx/save - elif 'save' in args: + elif "save" in args: self.save_details(nzo_id, args, kwargs) return # never reached @@ -694,12 +753,13 @@ class NzoPage: info = self.nzo_details(info, pnfo_list, nzo_id) info = self.nzo_files(info, nzo_id) - template = Template(file=os.path.join(sabnzbd.WEB_DIR, 'nzo.tmpl'), - searchList=[info], compilerSettings=CHEETAH_DIRECTIVES) + template = Template( + file=os.path.join(sabnzbd.WEB_DIR, "nzo.tmpl"), searchList=[info], compilerSettings=CHEETAH_DIRECTIVES + ) return template.respond() else: # Job no longer exists, go to main page - raise Raiser(urllib.parse.urljoin(self.__root, '../queue/')) + raise Raiser(urllib.parse.urljoin(self.__root, "../queue/")) def nzo_details(self, info, pnfo_list, nzo_id): slot = {} @@ -713,27 +773,27 @@ class NzoPage: unpackopts = opts_to_pp(repair, unpack, delete) script = pnfo.script if script is None: - script = 'None' + script = "None" cat = pnfo.category if not cat: - cat = 'None' - - slot['nzo_id'] = str(nzo_id) - slot['cat'] = cat - slot['filename'] = nzo.final_name - slot['filename_clean'] = nzo.final_name - slot['password'] = nzo.password or '' - slot['script'] = script - slot['priority'] = str(pnfo.priority) - slot['unpackopts'] = str(unpackopts) - info['index'] = n + cat = "None" + + slot["nzo_id"] = str(nzo_id) + slot["cat"] = cat + slot["filename"] = nzo.final_name + slot["filename_clean"] = nzo.final_name + slot["password"] = nzo.password or "" + slot["script"] = script + slot["priority"] = str(pnfo.priority) + slot["unpackopts"] = str(unpackopts) + info["index"] = n break n += 1 - info['slot'] = slot - info['scripts'] = list_scripts() - info['categories'] = list_cats() - info['noofslots'] = len(pnfo_list) + info["slot"] = slot + info["scripts"] = list_scripts() + info["categories"] = list_cats() + info["noofslots"] = len(pnfo_list) return info @@ -742,36 +802,39 @@ class NzoPage: nzo = NzbQueue.do.get_nzo(nzo_id) if nzo: pnfo = nzo.gather_info(full=True) - info['nzo_id'] = pnfo.nzo_id - info['filename'] = pnfo.filename + info["nzo_id"] = pnfo.nzo_id + info["filename"] = pnfo.filename for nzf in pnfo.active_files: checked = False - if nzf.nzf_id in self.__cached_selection and \ - self.__cached_selection[nzf.nzf_id] == 'on': + if nzf.nzf_id in self.__cached_selection and self.__cached_selection[nzf.nzf_id] == "on": checked = True - active.append({'filename': nzf.filename if nzf.filename else nzf.subject, - 'mbleft': "%.2f" % (nzf.bytes_left / MEBI), - 'mb': "%.2f" % (nzf.bytes / MEBI), - 'size': format_bytes(nzf.bytes), - 'sizeleft': format_bytes(nzf.bytes_left), - 'nzf_id': nzf.nzf_id, - 'age': calc_age(nzf.date), - 'checked': checked}) - - info['active_files'] = active + active.append( + { + "filename": nzf.filename if nzf.filename else nzf.subject, + "mbleft": "%.2f" % (nzf.bytes_left / MEBI), + "mb": "%.2f" % (nzf.bytes / MEBI), + "size": format_bytes(nzf.bytes), + "sizeleft": format_bytes(nzf.bytes_left), + "nzf_id": nzf.nzf_id, + "age": calc_age(nzf.date), + "checked": checked, + } + ) + + info["active_files"] = active return info def save_details(self, nzo_id, args, kwargs): - index = kwargs.get('index', None) - name = kwargs.get('name', None) - password = kwargs.get('password', None) + index = kwargs.get("index", None) + name = kwargs.get("name", None) + password = kwargs.get("password", None) if password == "": password = None - pp = kwargs.get('pp', None) - script = kwargs.get('script', None) - cat = kwargs.get('cat', None) - priority = kwargs.get('priority', None) + pp = kwargs.get("pp", None) + script = kwargs.get("script", None) + cat = kwargs.get("cat", None) + priority = kwargs.get("priority", None) nzo = NzbQueue.do.get_nzo(nzo_id) if index is not None: @@ -779,14 +842,14 @@ class NzoPage: if name is not None: NzbQueue.do.change_name(nzo_id, name, password) - if cat is not None and nzo.cat is not cat and not (nzo.cat == '*' and cat == 'Default'): + if cat is not None and nzo.cat is not cat and not (nzo.cat == "*" and cat == "Default"): NzbQueue.do.change_cat(nzo_id, cat, priority) # Category changed, so make sure "Default" attributes aren't set again - if script == 'Default': + if script == "Default": script = None - if priority == 'Default': + if priority == "Default": priority = None - if pp == 'Default': + if pp == "Default": pp = None if script is not None and nzo.script != script: @@ -796,67 +859,67 @@ class NzoPage: if priority is not None and nzo.priority != int(priority): NzbQueue.do.set_priority(nzo_id, priority) - raise Raiser(urllib.parse.urljoin(self.__root, '../queue/')) + raise Raiser(urllib.parse.urljoin(self.__root, "../queue/")) def bulk_operation(self, nzo_id, kwargs): self.__cached_selection = kwargs - if kwargs['action_key'] == 'Delete': + if kwargs["action_key"] == "Delete": for key in kwargs: - if kwargs[key] == 'on': + if kwargs[key] == "on": NzbQueue.do.remove_nzf(nzo_id, key, force_delete=True) - elif kwargs['action_key'] in ('Top', 'Up', 'Down', 'Bottom'): + elif kwargs["action_key"] in ("Top", "Up", "Down", "Bottom"): nzf_ids = [] for key in kwargs: - if kwargs[key] == 'on': + if kwargs[key] == "on": nzf_ids.append(key) - size = int_conv(kwargs.get('action_size', 1)) - if kwargs['action_key'] == 'Top': + size = int_conv(kwargs.get("action_size", 1)) + if kwargs["action_key"] == "Top": NzbQueue.do.move_top_bulk(nzo_id, nzf_ids) - elif kwargs['action_key'] == 'Up': + elif kwargs["action_key"] == "Up": NzbQueue.do.move_up_bulk(nzo_id, nzf_ids, size) - elif kwargs['action_key'] == 'Down': + elif kwargs["action_key"] == "Down": NzbQueue.do.move_down_bulk(nzo_id, nzf_ids, size) - elif kwargs['action_key'] == 'Bottom': + elif kwargs["action_key"] == "Bottom": NzbQueue.do.move_bottom_bulk(nzo_id, nzf_ids) if NzbQueue.do.get_nzo(nzo_id): url = urllib.parse.urljoin(self.__root, nzo_id) else: - url = urllib.parse.urljoin(self.__root, '../queue') - if url and not url.endswith('/'): - url += '/' + url = urllib.parse.urljoin(self.__root, "../queue") + if url and not url.endswith("/"): + url += "/" raise Raiser(url) ############################################################################## class QueuePage: - def __init__(self, root): self.__root = root @secured_expose def index(self, **kwargs): - start = int_conv(kwargs.get('start')) - limit = int_conv(kwargs.get('limit')) - search = kwargs.get('search') + start = int_conv(kwargs.get("start")) + limit = int_conv(kwargs.get("limit")) + search = kwargs.get("search") info, _pnfo_list, _bytespersec = build_queue(start=start, limit=limit, trans=True, search=search) - template = Template(file=os.path.join(sabnzbd.WEB_DIR, 'queue.tmpl'), - searchList=[info], compilerSettings=CHEETAH_DIRECTIVES) + template = Template( + file=os.path.join(sabnzbd.WEB_DIR, "queue.tmpl"), searchList=[info], compilerSettings=CHEETAH_DIRECTIVES + ) return template.respond() @secured_expose(check_api_key=True) def delete(self, **kwargs): - uid = kwargs.get('uid') - del_files = int_conv(kwargs.get('del_files')) + uid = kwargs.get("uid") + del_files = int_conv(kwargs.get("del_files")) if uid: NzbQueue.do.remove(uid, add_to_history=False, delete_all_data=del_files) raise queueRaiser(self.__root, kwargs) @secured_expose(check_api_key=True) def purge(self, **kwargs): - NzbQueue.do.remove_all(kwargs.get('search')) + NzbQueue.do.remove_all(kwargs.get("search")) raise queueRaiser(self.__root, kwargs) @secured_expose(check_api_key=True) @@ -864,42 +927,42 @@ class QueuePage: """ Action or script to be performed once the queue has been completed Scripts are prefixed with 'script_' """ - action = kwargs.get('action') + action = kwargs.get("action") sabnzbd.change_queue_complete_action(action) raise queueRaiser(self.__root, kwargs) @secured_expose(check_api_key=True) def switch(self, **kwargs): - uid1 = kwargs.get('uid1') - uid2 = kwargs.get('uid2') + uid1 = kwargs.get("uid1") + uid2 = kwargs.get("uid2") if uid1 and uid2: NzbQueue.do.switch(uid1, uid2) raise queueRaiser(self.__root, kwargs) @secured_expose(check_api_key=True) def change_opts(self, **kwargs): - nzo_id = kwargs.get('nzo_id') - pp = kwargs.get('pp', '') + nzo_id = kwargs.get("nzo_id") + pp = kwargs.get("pp", "") if nzo_id and pp and pp.isdigit(): NzbQueue.do.change_opts(nzo_id, int(pp)) raise queueRaiser(self.__root, kwargs) @secured_expose(check_api_key=True) def change_script(self, **kwargs): - nzo_id = kwargs.get('nzo_id') - script = kwargs.get('script', '') + nzo_id = kwargs.get("nzo_id") + script = kwargs.get("script", "") if nzo_id and script: - if script == 'None': + if script == "None": script = None NzbQueue.do.change_script(nzo_id, script) raise queueRaiser(self.__root, kwargs) @secured_expose(check_api_key=True) def change_cat(self, **kwargs): - nzo_id = kwargs.get('nzo_id') - cat = kwargs.get('cat', '') + nzo_id = kwargs.get("nzo_id") + cat = kwargs.get("cat", "") if nzo_id and cat: - if cat == 'None': + if cat == "None": cat = None NzbQueue.do.change_cat(nzo_id, cat) @@ -908,7 +971,7 @@ class QueuePage: @secured_expose(check_api_key=True) def shutdown(self, **kwargs): sabnzbd.shutdown_program() - return T('SABnzbd shutdown finished') + return T("SABnzbd shutdown finished") @secured_expose(check_api_key=True) def pause(self, **kwargs): @@ -924,81 +987,88 @@ class QueuePage: @secured_expose(check_api_key=True) def pause_nzo(self, **kwargs): - uid = kwargs.get('uid', '') - NzbQueue.do.pause_multiple_nzo(uid.split(',')) + uid = kwargs.get("uid", "") + NzbQueue.do.pause_multiple_nzo(uid.split(",")) raise queueRaiser(self.__root, kwargs) @secured_expose(check_api_key=True) def resume_nzo(self, **kwargs): - uid = kwargs.get('uid', '') - NzbQueue.do.resume_multiple_nzo(uid.split(',')) + uid = kwargs.get("uid", "") + NzbQueue.do.resume_multiple_nzo(uid.split(",")) raise queueRaiser(self.__root, kwargs) @secured_expose(check_api_key=True) def set_priority(self, **kwargs): - NzbQueue.do.set_priority(kwargs.get('nzo_id'), kwargs.get('priority')) + NzbQueue.do.set_priority(kwargs.get("nzo_id"), kwargs.get("priority")) raise queueRaiser(self.__root, kwargs) @secured_expose(check_api_key=True) def sort_by_avg_age(self, **kwargs): - NzbQueue.do.sort_queue('avg_age', kwargs.get('dir')) + NzbQueue.do.sort_queue("avg_age", kwargs.get("dir")) raise queueRaiser(self.__root, kwargs) @secured_expose(check_api_key=True) def sort_by_name(self, **kwargs): - NzbQueue.do.sort_queue('name', kwargs.get('dir')) + NzbQueue.do.sort_queue("name", kwargs.get("dir")) raise queueRaiser(self.__root, kwargs) @secured_expose(check_api_key=True) def sort_by_size(self, **kwargs): - NzbQueue.do.sort_queue('size', kwargs.get('dir')) + NzbQueue.do.sort_queue("size", kwargs.get("dir")) raise queueRaiser(self.__root, kwargs) ############################################################################## class HistoryPage: - def __init__(self, root): self.__root = root self.__failed_only = False @secured_expose def index(self, **kwargs): - start = int_conv(kwargs.get('start')) - limit = int_conv(kwargs.get('limit')) - search = kwargs.get('search') - failed_only = kwargs.get('failed_only') + start = int_conv(kwargs.get("start")) + limit = int_conv(kwargs.get("limit")) + search = kwargs.get("search") + failed_only = kwargs.get("failed_only") if failed_only is None: failed_only = self.__failed_only history = build_header() - history['failed_only'] = failed_only - history['rating_enable'] = bool(cfg.rating_enable()) + history["failed_only"] = failed_only + history["rating_enable"] = bool(cfg.rating_enable()) - postfix = T('B') # : Abbreviation for bytes, as in GB + postfix = T("B") # : Abbreviation for bytes, as in GB grand, month, week, day = BPSMeter.do.get_sums() - history['total_size'], history['month_size'], history['week_size'], history['day_size'] = \ - to_units(grand, postfix=postfix), to_units(month, postfix=postfix), \ - to_units(week, postfix=postfix), to_units(day, postfix=postfix) + history["total_size"], history["month_size"], history["week_size"], history["day_size"] = ( + to_units(grand, postfix=postfix), + to_units(month, postfix=postfix), + to_units(week, postfix=postfix), + to_units(day, postfix=postfix), + ) - history['lines'], history['fetched'], history['noofslots'] = build_history(limit=limit, start=start, search=search, failed_only=failed_only) + history["lines"], history["fetched"], history["noofslots"] = build_history( + limit=limit, start=start, search=search, failed_only=failed_only + ) if search: - history['search'] = escape(search) + history["search"] = escape(search) else: - history['search'] = '' - - history['start'] = int_conv(start) - history['limit'] = int_conv(limit) - history['finish'] = history['start'] + history['limit'] - if history['finish'] > history['noofslots']: - history['finish'] = history['noofslots'] - if not history['finish']: - history['finish'] = history['fetched'] - history['time_format'] = time_format - - template = Template(file=os.path.join(sabnzbd.WEB_DIR, 'history.tmpl'), - searchList=[history], compilerSettings=CHEETAH_DIRECTIVES) + history["search"] = "" + + history["start"] = int_conv(start) + history["limit"] = int_conv(limit) + history["finish"] = history["start"] + history["limit"] + if history["finish"] > history["noofslots"]: + history["finish"] = history["noofslots"] + if not history["finish"]: + history["finish"] = history["fetched"] + history["time_format"] = time_format + + template = Template( + file=os.path.join(sabnzbd.WEB_DIR, "history.tmpl"), + searchList=[history], + compilerSettings=CHEETAH_DIRECTIVES, + ) return template.respond() @secured_expose(check_api_key=True) @@ -1009,88 +1079,102 @@ class HistoryPage: @secured_expose(check_api_key=True) def delete(self, **kwargs): - job = kwargs.get('job') - del_files = int_conv(kwargs.get('del_files')) + job = kwargs.get("job") + del_files = int_conv(kwargs.get("del_files")) if job: - jobs = job.split(',') + jobs = job.split(",") for job in jobs: del_hist_job(job, del_files=del_files) raise queueRaiser(self.__root, kwargs) @secured_expose(check_api_key=True) def retry_pp(self, **kwargs): - retry_job(kwargs.get('job'), kwargs.get('nzbfile'), kwargs.get('password')) + retry_job(kwargs.get("job"), kwargs.get("nzbfile"), kwargs.get("password")) raise queueRaiser(self.__root, kwargs) ############################################################################## class ConfigPage: - def __init__(self, root): self.__root = root - self.folders = ConfigFolders('/config/folders/') - self.notify = ConfigNotify('/config/notify/') - self.general = ConfigGeneral('/config/general/') - self.rss = ConfigRss('/config/rss/') - self.scheduling = ConfigScheduling('/config/scheduling/') - self.server = ConfigServer('/config/server/') - self.switches = ConfigSwitches('/config/switches/') - self.categories = ConfigCats('/config/categories/') - self.sorting = ConfigSorting('/config/sorting/') - self.special = ConfigSpecial('/config/special/') + self.folders = ConfigFolders("/config/folders/") + self.notify = ConfigNotify("/config/notify/") + self.general = ConfigGeneral("/config/general/") + self.rss = ConfigRss("/config/rss/") + self.scheduling = ConfigScheduling("/config/scheduling/") + self.server = ConfigServer("/config/server/") + self.switches = ConfigSwitches("/config/switches/") + self.categories = ConfigCats("/config/categories/") + self.sorting = ConfigSorting("/config/sorting/") + self.special = ConfigSpecial("/config/special/") @secured_expose(check_configlock=True) def index(self, **kwargs): conf = build_header(sabnzbd.WEB_DIR_CONFIG) - conf['configfn'] = clip_path(config.get_filename()) - conf['cmdline'] = sabnzbd.CMDLINE - conf['build'] = sabnzbd.version.__baseline__[:7] + conf["configfn"] = clip_path(config.get_filename()) + conf["cmdline"] = sabnzbd.CMDLINE + conf["build"] = sabnzbd.version.__baseline__[:7] - conf['have_unzip'] = bool(sabnzbd.newsunpack.ZIP_COMMAND) - conf['have_7zip'] = bool(sabnzbd.newsunpack.SEVEN_COMMAND) - conf['have_sabyenc'] = SABYENC_ENABLED - conf['have_mt_par2'] = sabnzbd.newsunpack.PAR2_MT + conf["have_unzip"] = bool(sabnzbd.newsunpack.ZIP_COMMAND) + conf["have_7zip"] = bool(sabnzbd.newsunpack.SEVEN_COMMAND) + conf["have_sabyenc"] = SABYENC_ENABLED + conf["have_mt_par2"] = sabnzbd.newsunpack.PAR2_MT - conf['certificate_validation'] = sabnzbd.CERTIFICATE_VALIDATION - conf['ssl_version'] = ssl.OPENSSL_VERSION + conf["certificate_validation"] = sabnzbd.CERTIFICATE_VALIDATION + conf["ssl_version"] = ssl.OPENSSL_VERSION new = {} for svr in config.get_servers(): new[svr] = {} - conf['servers'] = new + conf["servers"] = new - conf['folders'] = NzbQueue.do.scan_jobs(all_jobs=False, action=False) + conf["folders"] = NzbQueue.do.scan_jobs(all_jobs=False, action=False) - template = Template(file=os.path.join(sabnzbd.WEB_DIR_CONFIG, 'config.tmpl'), - searchList=[conf], compilerSettings=CHEETAH_DIRECTIVES) + template = Template( + file=os.path.join(sabnzbd.WEB_DIR_CONFIG, "config.tmpl"), + searchList=[conf], + compilerSettings=CHEETAH_DIRECTIVES, + ) return template.respond() @secured_expose(check_api_key=True) def restart(self, **kwargs): - logging.info('Restart requested by interface') + logging.info("Restart requested by interface") # Do the shutdown async to still send goodbye to browser - Thread(target=sabnzbd.trigger_restart, kwargs={'timeout': 1}).start() - return T(' 
SABnzbd shutdown finished.
Wait for about 5 second and then click the button below.

Refresh
') + Thread(target=sabnzbd.trigger_restart, kwargs={"timeout": 1}).start() + return T( + ' 
SABnzbd shutdown finished.
Wait for about 5 second and then click the button below.

Refresh
' + ) @secured_expose(check_api_key=True) def repair(self, **kwargs): - logging.info('Queue repair requested by interface') + logging.info("Queue repair requested by interface") sabnzbd.request_repair() # Do the shutdown async to still send goodbye to browser - Thread(target=sabnzbd.trigger_restart, kwargs={'timeout': 1}).start() - return T(' 
SABnzbd shutdown finished.
Wait for about 5 second and then click the button below.

Refresh
') + Thread(target=sabnzbd.trigger_restart, kwargs={"timeout": 1}).start() + return T( + ' 
SABnzbd shutdown finished.
Wait for about 5 second and then click the button below.

Refresh
' + ) ############################################################################## LIST_DIRPAGE = ( - 'download_dir', 'download_free', 'complete_dir', 'admin_dir', - 'nzb_backup_dir', 'dirscan_dir', 'dirscan_speed', 'script_dir', - 'email_dir', 'permissions', 'log_dir', 'password_file' + "download_dir", + "download_free", + "complete_dir", + "admin_dir", + "nzb_backup_dir", + "dirscan_dir", + "dirscan_speed", + "script_dir", + "email_dir", + "permissions", + "log_dir", + "password_file", ) class ConfigFolders: - def __init__(self, root): self.__root = root @@ -1099,10 +1183,13 @@ class ConfigFolders: conf = build_header(sabnzbd.WEB_DIR_CONFIG) for kw in LIST_DIRPAGE: - conf[kw] = config.get_config('misc', kw)() + conf[kw] = config.get_config("misc", kw)() - template = Template(file=os.path.join(sabnzbd.WEB_DIR_CONFIG, 'config_folders.tmpl'), - searchList=[conf], compilerSettings=CHEETAH_DIRECTIVES) + template = Template( + file=os.path.join(sabnzbd.WEB_DIR_CONFIG, "config_folders.tmpl"), + searchList=[conf], + compilerSettings=CHEETAH_DIRECTIVES, + ) return template.respond() @secured_expose(check_api_key=True, check_configlock=True) @@ -1110,45 +1197,86 @@ class ConfigFolders: for kw in LIST_DIRPAGE: value = kwargs.get(kw) if value is not None: - if kw in ('complete_dir', 'dirscan_dir'): - msg = config.get_config('misc', kw).set(value, create=True) + if kw in ("complete_dir", "dirscan_dir"): + msg = config.get_config("misc", kw).set(value, create=True) else: - msg = config.get_config('misc', kw).set(value) + msg = config.get_config("misc", kw).set(value) if msg: # return sabnzbd.api.report('json', error=msg) - return badParameterResponse(msg, kwargs.get('ajax')) + return badParameterResponse(msg, kwargs.get("ajax")) sabnzbd.check_incomplete_vs_complete() config.save_config() - if kwargs.get('ajax'): - return sabnzbd.api.report('json') + if kwargs.get("ajax"): + return sabnzbd.api.report("json") else: raise Raiser(self.__root) ############################################################################## -SWITCH_LIST = \ - ('par_option', 'top_only', 'direct_unpack', 'enable_meta', 'win_process_prio', - 'auto_sort', 'propagation_delay', 'auto_disconnect', 'flat_unpack', - 'safe_postproc', 'no_dupes', 'replace_spaces', 'replace_dots', - 'ignore_samples', 'pause_on_post_processing', 'nice', 'ionice', - 'pre_script', 'pause_on_pwrar', 'sfv_check', 'folder_rename', 'load_balancing', - 'quota_size', 'quota_day', 'quota_resume', 'quota_period', 'history_retention', - 'pre_check', 'max_art_tries', 'fail_hopeless_jobs', 'enable_all_par', - 'enable_recursive', 'no_series_dupes', 'series_propercheck', 'script_can_fail', - 'new_nzb_on_failure', 'unwanted_extensions', 'action_on_unwanted_extensions', 'sanitize_safe', - 'rating_enable', 'rating_api_key', 'rating_filter_enable', - 'rating_filter_abort_audio', 'rating_filter_abort_video', 'rating_filter_abort_encrypted', - 'rating_filter_abort_encrypted_confirm', 'rating_filter_abort_spam', 'rating_filter_abort_spam_confirm', - 'rating_filter_abort_downvoted', 'rating_filter_abort_keywords', - 'rating_filter_pause_audio', 'rating_filter_pause_video', 'rating_filter_pause_encrypted', - 'rating_filter_pause_encrypted_confirm', 'rating_filter_pause_spam', 'rating_filter_pause_spam_confirm', - 'rating_filter_pause_downvoted', 'rating_filter_pause_keywords' - ) +SWITCH_LIST = ( + "par_option", + "top_only", + "direct_unpack", + "enable_meta", + "win_process_prio", + "auto_sort", + "propagation_delay", + "auto_disconnect", + "flat_unpack", + "safe_postproc", + "no_dupes", + "replace_spaces", + "replace_dots", + "ignore_samples", + "pause_on_post_processing", + "nice", + "ionice", + "pre_script", + "pause_on_pwrar", + "sfv_check", + "folder_rename", + "load_balancing", + "quota_size", + "quota_day", + "quota_resume", + "quota_period", + "history_retention", + "pre_check", + "max_art_tries", + "fail_hopeless_jobs", + "enable_all_par", + "enable_recursive", + "no_series_dupes", + "series_propercheck", + "script_can_fail", + "new_nzb_on_failure", + "unwanted_extensions", + "action_on_unwanted_extensions", + "sanitize_safe", + "rating_enable", + "rating_api_key", + "rating_filter_enable", + "rating_filter_abort_audio", + "rating_filter_abort_video", + "rating_filter_abort_encrypted", + "rating_filter_abort_encrypted_confirm", + "rating_filter_abort_spam", + "rating_filter_abort_spam_confirm", + "rating_filter_abort_downvoted", + "rating_filter_abort_keywords", + "rating_filter_pause_audio", + "rating_filter_pause_video", + "rating_filter_pause_encrypted", + "rating_filter_pause_encrypted_confirm", + "rating_filter_pause_spam", + "rating_filter_pause_spam_confirm", + "rating_filter_pause_downvoted", + "rating_filter_pause_keywords", +) class ConfigSwitches: - def __init__(self, root): self.__root = root @@ -1156,33 +1284,36 @@ class ConfigSwitches: def index(self, **kwargs): conf = build_header(sabnzbd.WEB_DIR_CONFIG) - conf['certificate_validation'] = sabnzbd.CERTIFICATE_VALIDATION - conf['have_nice'] = bool(sabnzbd.newsunpack.NICE_COMMAND) - conf['have_ionice'] = bool(sabnzbd.newsunpack.IONICE_COMMAND) - conf['cleanup_list'] = cfg.cleanup_list.get_string() + conf["certificate_validation"] = sabnzbd.CERTIFICATE_VALIDATION + conf["have_nice"] = bool(sabnzbd.newsunpack.NICE_COMMAND) + conf["have_ionice"] = bool(sabnzbd.newsunpack.IONICE_COMMAND) + conf["cleanup_list"] = cfg.cleanup_list.get_string() for kw in SWITCH_LIST: - conf[kw] = config.get_config('misc', kw)() - conf['unwanted_extensions'] = cfg.unwanted_extensions.get_string() + conf[kw] = config.get_config("misc", kw)() + conf["unwanted_extensions"] = cfg.unwanted_extensions.get_string() - conf['scripts'] = list_scripts() or ['None'] + conf["scripts"] = list_scripts() or ["None"] - template = Template(file=os.path.join(sabnzbd.WEB_DIR_CONFIG, 'config_switches.tmpl'), - searchList=[conf], compilerSettings=CHEETAH_DIRECTIVES) + template = Template( + file=os.path.join(sabnzbd.WEB_DIR_CONFIG, "config_switches.tmpl"), + searchList=[conf], + compilerSettings=CHEETAH_DIRECTIVES, + ) return template.respond() @secured_expose(check_api_key=True, check_configlock=True) def saveSwitches(self, **kwargs): for kw in SWITCH_LIST: - item = config.get_config('misc', kw) + item = config.get_config("misc", kw) value = kwargs.get(kw) - if kw == 'unwanted_extensions' and value: - value = value.lower().replace('.', '') + if kw == "unwanted_extensions" and value: + value = value.lower().replace(".", "") msg = item.set(value) if msg: return badParameterResponse(msg) - cleanup_list = kwargs.get('cleanup_list') + cleanup_list = kwargs.get("cleanup_list") if cleanup_list and sabnzbd.WIN32: cleanup_list = cleanup_list.lower() cfg.cleanup_list.set(cleanup_list) @@ -1192,42 +1323,92 @@ class ConfigSwitches: ############################################################################## -SPECIAL_BOOL_LIST = \ - ('start_paused', 'no_penalties', 'fast_fail', 'ignore_wrong_unrar', 'overwrite_files', 'enable_par_cleanup', - 'queue_complete_pers', 'api_warnings', 'ampm', 'enable_unrar', 'enable_unzip', 'enable_7zip', - 'enable_filejoin', 'enable_tsjoin', 'ignore_unrar_dates', 'multipar', 'osx_menu', 'osx_speed', 'win_menu', - 'allow_incomplete_nzb', 'rss_filenames', 'ipv6_hosting', 'keep_awake', 'empty_postproc', 'html_login', - 'wait_for_dfolder', 'max_art_opt', 'warn_empty_nzb', 'enable_bonjour', 'warn_dupl_jobs', - 'replace_illegal', 'backup_for_duplicates', 'disable_api_key', 'api_logging', 'x_frame_options', - 'require_modern_tls' - ) -SPECIAL_VALUE_LIST = \ - ('size_limit', 'movie_rename_limit', 'nomedia_marker', 'max_url_retries', 'req_completion_rate', 'wait_ext_drive', - 'show_sysload', 'url_base', 'direct_unpack_threads', 'ipv6_servers', 'selftest_host', 'rating_host' - ) -SPECIAL_LIST_LIST = ('rss_odd_titles', 'quick_check_ext_ignore', 'host_whitelist') +SPECIAL_BOOL_LIST = ( + "start_paused", + "no_penalties", + "fast_fail", + "ignore_wrong_unrar", + "overwrite_files", + "enable_par_cleanup", + "queue_complete_pers", + "api_warnings", + "ampm", + "enable_unrar", + "enable_unzip", + "enable_7zip", + "enable_filejoin", + "enable_tsjoin", + "ignore_unrar_dates", + "multipar", + "osx_menu", + "osx_speed", + "win_menu", + "allow_incomplete_nzb", + "rss_filenames", + "ipv6_hosting", + "keep_awake", + "empty_postproc", + "html_login", + "wait_for_dfolder", + "max_art_opt", + "warn_empty_nzb", + "enable_bonjour", + "warn_dupl_jobs", + "replace_illegal", + "backup_for_duplicates", + "disable_api_key", + "api_logging", + "x_frame_options", + "require_modern_tls", +) +SPECIAL_VALUE_LIST = ( + "size_limit", + "movie_rename_limit", + "nomedia_marker", + "max_url_retries", + "req_completion_rate", + "wait_ext_drive", + "show_sysload", + "url_base", + "direct_unpack_threads", + "ipv6_servers", + "selftest_host", + "rating_host", +) +SPECIAL_LIST_LIST = ("rss_odd_titles", "quick_check_ext_ignore", "host_whitelist") class ConfigSpecial: - def __init__(self, root): self.__root = root @secured_expose(check_configlock=True) def index(self, **kwargs): conf = build_header(sabnzbd.WEB_DIR_CONFIG) - conf['switches'] = [(kw, config.get_config('misc', kw)(), config.get_config('misc', kw).default()) for kw in SPECIAL_BOOL_LIST] - conf['entries'] = [(kw, config.get_config('misc', kw)(), config.get_config('misc', kw).default()) for kw in SPECIAL_VALUE_LIST] - conf['entries'].extend([(kw, config.get_config('misc', kw).get_string(), config.get_config('misc', kw).default_string()) for kw in SPECIAL_LIST_LIST]) - - template = Template(file=os.path.join(sabnzbd.WEB_DIR_CONFIG, 'config_special.tmpl'), - searchList=[conf], compilerSettings=CHEETAH_DIRECTIVES) + conf["switches"] = [ + (kw, config.get_config("misc", kw)(), config.get_config("misc", kw).default()) for kw in SPECIAL_BOOL_LIST + ] + conf["entries"] = [ + (kw, config.get_config("misc", kw)(), config.get_config("misc", kw).default()) for kw in SPECIAL_VALUE_LIST + ] + conf["entries"].extend( + [ + (kw, config.get_config("misc", kw).get_string(), config.get_config("misc", kw).default_string()) + for kw in SPECIAL_LIST_LIST + ] + ) + + template = Template( + file=os.path.join(sabnzbd.WEB_DIR_CONFIG, "config_special.tmpl"), + searchList=[conf], + compilerSettings=CHEETAH_DIRECTIVES, + ) return template.respond() @secured_expose(check_api_key=True, check_configlock=True) def saveSpecial(self, **kwargs): for kw in SPECIAL_BOOL_LIST + SPECIAL_VALUE_LIST + SPECIAL_LIST_LIST: - item = config.get_config('misc', kw) + item = config.get_config("misc", kw) value = kwargs.get(kw) msg = item.set(value) if msg: @@ -1239,15 +1420,26 @@ class ConfigSpecial: ############################################################################## GENERAL_LIST = ( - 'host', 'port', 'username', 'refresh_rate', 'language', 'cache_limit', - 'local_ranges', 'inet_exposure', 'enable_https', 'https_port', - 'https_cert', 'https_key', 'https_chain', 'enable_https_verification', - 'auto_browser', 'check_new_rel' + "host", + "port", + "username", + "refresh_rate", + "language", + "cache_limit", + "local_ranges", + "inet_exposure", + "enable_https", + "https_port", + "https_cert", + "https_key", + "https_chain", + "enable_https_verification", + "auto_browser", + "check_new_rel", ) class ConfigGeneral: - def __init__(self, root): self.__root = root @@ -1256,11 +1448,11 @@ class ConfigGeneral: def ListColors(web_dir): lst = [] web_dir = os.path.join(sabnzbd.DIR_INTERFACES, web_dir) - dd = os.path.abspath(web_dir + '/templates/static/stylesheets/colorschemes') + dd = os.path.abspath(web_dir + "/templates/static/stylesheets/colorschemes") if (not dd) or (not os.access(dd, os.R_OK)): return lst for color in globber(dd): - col = color.replace('.css', '') + col = color.replace(".css", "") lst.append(col) return lst @@ -1271,14 +1463,14 @@ class ConfigGeneral: color = DEF_SKIN_COLORS[skin_dir.lower()] except KeyError: return skin_dir - return '%s - %s' % (skin_dir, color) + return "%s - %s" % (skin_dir, color) else: - return '' + return "" conf = build_header(sabnzbd.WEB_DIR_CONFIG) - conf['configfn'] = config.get_filename() - conf['certificate_validation'] = sabnzbd.CERTIFICATE_VALIDATION + conf["configfn"] = config.get_filename() + conf["certificate_validation"] = sabnzbd.CERTIFICATE_VALIDATION wlist = [] interfaces = globber_full(sabnzbd.DIR_INTERFACES) @@ -1296,79 +1488,82 @@ class ConfigGeneral: wlist.append(add_color(rweb, col)) else: wlist.append(rweb) - conf['web_list'] = wlist - conf['web_dir'] = add_color(cfg.web_dir(), cfg.web_color()) - conf['password'] = cfg.password.get_stars() + conf["web_list"] = wlist + conf["web_dir"] = add_color(cfg.web_dir(), cfg.web_color()) + conf["password"] = cfg.password.get_stars() - conf['language'] = cfg.language() + conf["language"] = cfg.language() lang_list = list_languages() if len(lang_list) < 2: lang_list = [] - conf['lang_list'] = lang_list + conf["lang_list"] = lang_list for kw in GENERAL_LIST: - conf[kw] = config.get_config('misc', kw)() - - conf['bandwidth_max'] = cfg.bandwidth_max() - conf['bandwidth_perc'] = cfg.bandwidth_perc() - conf['nzb_key'] = cfg.nzb_key() - conf['local_ranges'] = cfg.local_ranges.get_string() - conf['my_lcldata'] = cfg.admin_dir.get_clipped_path() - conf['caller_url'] = cherrypy.request.base + cfg.url_base() - - template = Template(file=os.path.join(sabnzbd.WEB_DIR_CONFIG, 'config_general.tmpl'), - searchList=[conf], compilerSettings=CHEETAH_DIRECTIVES) + conf[kw] = config.get_config("misc", kw)() + + conf["bandwidth_max"] = cfg.bandwidth_max() + conf["bandwidth_perc"] = cfg.bandwidth_perc() + conf["nzb_key"] = cfg.nzb_key() + conf["local_ranges"] = cfg.local_ranges.get_string() + conf["my_lcldata"] = cfg.admin_dir.get_clipped_path() + conf["caller_url"] = cherrypy.request.base + cfg.url_base() + + template = Template( + file=os.path.join(sabnzbd.WEB_DIR_CONFIG, "config_general.tmpl"), + searchList=[conf], + compilerSettings=CHEETAH_DIRECTIVES, + ) return template.respond() @secured_expose(check_api_key=True, check_configlock=True) def saveGeneral(self, **kwargs): # Handle general options for kw in GENERAL_LIST: - item = config.get_config('misc', kw) + item = config.get_config("misc", kw) value = kwargs.get(kw) msg = item.set(value) if msg: return badParameterResponse(msg) # Handle special options - cfg.password.set(kwargs.get('password')) + cfg.password.set(kwargs.get("password")) - web_dir = kwargs.get('web_dir') + web_dir = kwargs.get("web_dir") change_web_dir(web_dir) - bandwidth_max = kwargs.get('bandwidth_max') + bandwidth_max = kwargs.get("bandwidth_max") if bandwidth_max is not None: cfg.bandwidth_max.set(bandwidth_max) - bandwidth_perc = kwargs.get('bandwidth_perc') + bandwidth_perc = kwargs.get("bandwidth_perc") if bandwidth_perc is not None: cfg.bandwidth_perc.set(bandwidth_perc) bandwidth_perc = cfg.bandwidth_perc() if bandwidth_perc and not bandwidth_max: - logging.warning(T('You must set a maximum bandwidth before you can set a bandwidth limit')) + logging.warning(T("You must set a maximum bandwidth before you can set a bandwidth limit")) config.save_config() # Update CherryPy authentication set_auth(cherrypy.config) - if kwargs.get('ajax'): - return sabnzbd.api.report('json', data={'success': True, 'restart_req': sabnzbd.RESTART_REQ}) + if kwargs.get("ajax"): + return sabnzbd.api.report("json", data={"success": True, "restart_req": sabnzbd.RESTART_REQ}) else: raise Raiser(self.__root) def change_web_dir(web_dir): try: - web_dir, web_color = web_dir.split(' - ') + web_dir, web_color = web_dir.split(" - ") except: try: web_color = DEF_SKIN_COLORS[web_dir.lower()] except: - web_color = '' + web_color = "" web_dir_path = real_path(sabnzbd.DIR_INTERFACES, web_dir) if not os.path.exists(web_dir_path): - return badParameterResponse('Cannot find web template: %s' % web_dir_path) + return badParameterResponse("Cannot find web template: %s" % web_dir_path) else: cfg.web_dir.set(web_dir) cfg.web_color.set(web_color) @@ -1376,7 +1571,6 @@ def change_web_dir(web_dir): ############################################################################## class ConfigServer: - def __init__(self, root): self.__root = root @@ -1385,18 +1579,25 @@ class ConfigServer: conf = build_header(sabnzbd.WEB_DIR_CONFIG) new = [] servers = config.get_servers() - server_names = sorted(list(servers.keys()), key=lambda svr: '%d%02d%s' % (int(not servers[svr].enable()), servers[svr].priority(), servers[svr].displayname().lower())) + server_names = sorted( + list(servers.keys()), + key=lambda svr: "%d%02d%s" + % (int(not servers[svr].enable()), servers[svr].priority(), servers[svr].displayname().lower()), + ) for svr in server_names: new.append(servers[svr].get_dict(safe=True)) t, m, w, d, timeline = BPSMeter.do.amounts(svr) if t: - new[-1]['amounts'] = to_units(t), to_units(m), to_units(w), to_units(d), timeline - conf['servers'] = new - conf['cats'] = list_cats(default=True) - conf['certificate_validation'] = sabnzbd.CERTIFICATE_VALIDATION - - template = Template(file=os.path.join(sabnzbd.WEB_DIR_CONFIG, 'config_server.tmpl'), - searchList=[conf], compilerSettings=CHEETAH_DIRECTIVES) + new[-1]["amounts"] = to_units(t), to_units(m), to_units(w), to_units(d), timeline + conf["servers"] = new + conf["cats"] = list_cats(default=True) + conf["certificate_validation"] = sabnzbd.CERTIFICATE_VALIDATION + + template = Template( + file=os.path.join(sabnzbd.WEB_DIR_CONFIG, "config_server.tmpl"), + searchList=[conf], + compilerSettings=CHEETAH_DIRECTIVES, + ) return template.respond() @secured_expose(check_api_key=True, check_configlock=True) @@ -1413,23 +1614,23 @@ class ConfigServer: @secured_expose(check_api_key=True, check_configlock=True) def delServer(self, **kwargs): - kwargs['section'] = 'servers' - kwargs['keyword'] = kwargs.get('server') + kwargs["section"] = "servers" + kwargs["keyword"] = kwargs.get("server") del_from_section(kwargs) raise Raiser(self.__root) @secured_expose(check_api_key=True, check_configlock=True) def clrServer(self, **kwargs): - server = kwargs.get('server') + server = kwargs.get("server") if server: BPSMeter.do.clear_server(server) raise Raiser(self.__root) @secured_expose(check_api_key=True, check_configlock=True) def toggleServer(self, **kwargs): - server = kwargs.get('server') + server = kwargs.get("server") if server: - svr = config.get_config('servers', server) + svr = config.get_config("servers", server) if svr: svr.enable.set(not svr.enable()) config.save_config() @@ -1444,18 +1645,18 @@ def unique_svr_name(server): new_name = server while svr: if num: - new_name = '%s@%d' % (server, num) + new_name = "%s@%d" % (server, num) else: - new_name = '%s' % server - svr = config.get_config('servers', new_name) + new_name = "%s" % server + svr = config.get_config("servers", new_name) num += 1 return new_name def check_server(host, port, ajax): """ Check if server address resolves properly """ - if host.lower() == 'localhost' and sabnzbd.AMBI_LOCALHOST: - return badParameterResponse(T('Warning: LOCALHOST is ambiguous, use numerical IP-address.'), ajax) + if host.lower() == "localhost" and sabnzbd.AMBI_LOCALHOST: + return badParameterResponse(T("Warning: LOCALHOST is ambiguous, use numerical IP-address."), ajax) if GetServerParms(host, int_conv(port)): return "" @@ -1465,23 +1666,23 @@ def check_server(host, port, ajax): def handle_server(kwargs, root=None, new_svr=False): """ Internal server handler """ - ajax = kwargs.get('ajax') - host = kwargs.get('host', '').strip() + ajax = kwargs.get("ajax") + host = kwargs.get("host", "").strip() if not host: - return badParameterResponse(T('Server address required'), ajax) + return badParameterResponse(T("Server address required"), ajax) - port = kwargs.get('port', '').strip() + port = kwargs.get("port", "").strip() if not port: - if not kwargs.get('ssl', '').strip(): - port = '119' + if not kwargs.get("ssl", "").strip(): + port = "119" else: - port = '563' - kwargs['port'] = port + port = "563" + kwargs["port"] = port - if kwargs.get('connections', '').strip() == '': - kwargs['connections'] = '1' + if kwargs.get("connections", "").strip() == "": + kwargs["connections"] = "1" - if kwargs.get('enable') == '1': + if kwargs.get("enable") == "1": msg = check_server(host, port, ajax) if msg: return msg @@ -1490,18 +1691,18 @@ def handle_server(kwargs, root=None, new_svr=False): server = host svr = None - old_server = kwargs.get('server') + old_server = kwargs.get("server") if old_server: - svr = config.get_config('servers', old_server) + svr = config.get_config("servers", old_server) if svr: server = old_server else: - svr = config.get_config('servers', server) + svr = config.get_config("servers", server) if new_svr: server = unique_svr_name(server) - for kw in ('ssl', 'send_group', 'enable', 'optional'): + for kw in ("ssl", "send_group", "enable", "optional"): if kw not in kwargs.keys(): kwargs[kw] = None if svr and not new_svr: @@ -1514,7 +1715,7 @@ def handle_server(kwargs, root=None, new_svr=False): Downloader.do.update_server(old_server, server) if root: if ajax: - return sabnzbd.api.report('json') + return sabnzbd.api.report("json") else: raise Raiser(root) @@ -1526,61 +1727,65 @@ def handle_server_test(kwargs, root): ############################################################################## class ConfigRss: - def __init__(self, root): self.__root = root - self.__refresh_readout = None # Set to URL when new readout is needed - self.__refresh_download = False # True when feed needs to be read - self.__refresh_force = False # True if forced download of all matches is required - self.__refresh_ignore = False # True if first batch of new feed must be ignored - self.__evaluate = False # True if feed needs to be re-filtered - self.__show_eval_button = False # True if the "Apply filers" button should be shown - self.__last_msg = '' # Last error message from RSS reader + self.__refresh_readout = None # Set to URL when new readout is needed + self.__refresh_download = False # True when feed needs to be read + self.__refresh_force = False # True if forced download of all matches is required + self.__refresh_ignore = False # True if first batch of new feed must be ignored + self.__evaluate = False # True if feed needs to be re-filtered + self.__show_eval_button = False # True if the "Apply filers" button should be shown + self.__last_msg = "" # Last error message from RSS reader @secured_expose(check_configlock=True) def index(self, **kwargs): conf = build_header(sabnzbd.WEB_DIR_CONFIG) - conf['scripts'] = list_scripts(default=True) - pick_script = conf['scripts'] != [] + conf["scripts"] = list_scripts(default=True) + pick_script = conf["scripts"] != [] - conf['categories'] = list_cats(default=True) - pick_cat = conf['categories'] != [] + conf["categories"] = list_cats(default=True) + pick_cat = conf["categories"] != [] - conf['rss_rate'] = cfg.rss_rate() + conf["rss_rate"] = cfg.rss_rate() rss = {} feeds = config.get_rss() for feed in feeds: rss[feed] = feeds[feed].get_dict() filters = feeds[feed].filters() - rss[feed]['filters'] = filters - rss[feed]['filter_states'] = [bool(sabnzbd.rss.convert_filter(f[4])) for f in filters] - rss[feed]['filtercount'] = len(filters) + rss[feed]["filters"] = filters + rss[feed]["filter_states"] = [bool(sabnzbd.rss.convert_filter(f[4])) for f in filters] + rss[feed]["filtercount"] = len(filters) - rss[feed]['pick_cat'] = pick_cat - rss[feed]['pick_script'] = pick_script - rss[feed]['link'] = urllib.parse.quote_plus(feed.encode('utf-8')) - rss[feed]['baselink'] = [get_base_url(uri) for uri in rss[feed]['uri']] - rss[feed]['uris'] = feeds[feed].uri.get_string() + rss[feed]["pick_cat"] = pick_cat + rss[feed]["pick_script"] = pick_script + rss[feed]["link"] = urllib.parse.quote_plus(feed.encode("utf-8")) + rss[feed]["baselink"] = [get_base_url(uri) for uri in rss[feed]["uri"]] + rss[feed]["uris"] = feeds[feed].uri.get_string() - active_feed = kwargs.get('feed', '') - conf['active_feed'] = active_feed - conf['rss'] = rss - conf['rss_next'] = time.strftime(time_format('%H:%M'), time.localtime(sabnzbd.rss.next_run())) + active_feed = kwargs.get("feed", "") + conf["active_feed"] = active_feed + conf["rss"] = rss + conf["rss_next"] = time.strftime(time_format("%H:%M"), time.localtime(sabnzbd.rss.next_run())) if active_feed: readout = bool(self.__refresh_readout) - logging.debug('RSS READOUT = %s', readout) + logging.debug("RSS READOUT = %s", readout) if not readout: self.__refresh_download = False self.__refresh_force = False self.__refresh_ignore = False if self.__evaluate: - msg = sabnzbd.rss.run_feed(active_feed, download=self.__refresh_download, force=self.__refresh_force, - ignoreFirst=self.__refresh_ignore, readout=readout) + msg = sabnzbd.rss.run_feed( + active_feed, + download=self.__refresh_download, + force=self.__refresh_force, + ignoreFirst=self.__refresh_ignore, + readout=readout, + ) else: - msg = '' + msg = "" self.__evaluate = False if readout: sabnzbd.rss.save() @@ -1588,28 +1793,31 @@ class ConfigRss: else: msg = self.__last_msg self.__refresh_readout = None - conf['evalButton'] = self.__show_eval_button - conf['error'] = msg + conf["evalButton"] = self.__show_eval_button + conf["error"] = msg - conf['downloaded'], conf['matched'], conf['unmatched'] = GetRssLog(active_feed) + conf["downloaded"], conf["matched"], conf["unmatched"] = GetRssLog(active_feed) else: - self.__last_msg = '' + self.__last_msg = "" # Find a unique new Feed name unum = 1 - txt = T('Feed') # : Used as default Feed name in Config->RSS + txt = T("Feed") # : Used as default Feed name in Config->RSS while txt + str(unum) in feeds: unum += 1 - conf['feed'] = txt + str(unum) + conf["feed"] = txt + str(unum) - template = Template(file=os.path.join(sabnzbd.WEB_DIR_CONFIG, 'config_rss.tmpl'), - searchList=[conf], compilerSettings=CHEETAH_DIRECTIVES) + template = Template( + file=os.path.join(sabnzbd.WEB_DIR_CONFIG, "config_rss.tmpl"), + searchList=[conf], + compilerSettings=CHEETAH_DIRECTIVES, + ) return template.respond() @secured_expose(check_api_key=True, check_configlock=True) def save_rss_rate(self, **kwargs): """ Save changed RSS automatic readout rate """ - cfg.rss_rate.set(kwargs.get('rss_rate')) + cfg.rss_rate.set(kwargs.get("rss_rate")) config.save_config() scheduler.restart() raise rssRaiser(self.__root, kwargs) @@ -1619,15 +1827,15 @@ class ConfigRss: """ Update Feed level attributes, legacy version: ignores 'enable' parameter """ - if kwargs.get('enable') is not None: - del kwargs['enable'] + if kwargs.get("enable") is not None: + del kwargs["enable"] try: - cf = config.get_rss()[kwargs.get('feed')] + cf = config.get_rss()[kwargs.get("feed")] except KeyError: cf = None - uri = Strip(kwargs.get('uri')) + uri = Strip(kwargs.get("uri")) if cf and uri: - kwargs['uri'] = uri + kwargs["uri"] = uri cf.set_dict(kwargs) config.save_config() @@ -1639,14 +1847,14 @@ class ConfigRss: def save_rss_feed(self, **kwargs): """ Update Feed level attributes """ try: - cf = config.get_rss()[kwargs.get('feed')] + cf = config.get_rss()[kwargs.get("feed")] except KeyError: cf = None - if 'enable' not in kwargs: - kwargs['enable'] = 0 - uri = Strip(kwargs.get('uri')) + if "enable" not in kwargs: + kwargs["enable"] = 0 + uri = Strip(kwargs.get("uri")) if cf and uri: - kwargs['uri'] = uri + kwargs["uri"] = uri cf.set_dict(kwargs) config.save_config() @@ -1656,13 +1864,13 @@ class ConfigRss: def toggle_rss_feed(self, **kwargs): """ Toggle automatic read-out flag of Feed """ try: - item = config.get_rss()[kwargs.get('feed')] + item = config.get_rss()[kwargs.get("feed")] except KeyError: item = None if cfg: item.enable.set(not item.enable()) config.save_config() - if kwargs.get('table'): + if kwargs.get("table"): raise Raiser(self.__root) else: raise rssRaiser(self.__root, kwargs) @@ -1670,16 +1878,16 @@ class ConfigRss: @secured_expose(check_api_key=True, check_configlock=True) def add_rss_feed(self, **kwargs): """ Add one new RSS feed definition """ - feed = Strip(kwargs.get('feed')).strip('[]') - uri = Strip(kwargs.get('uri')) + feed = Strip(kwargs.get("feed")).strip("[]") + uri = Strip(kwargs.get("uri")) if feed and uri: try: cfg = config.get_rss()[feed] except KeyError: cfg = None if (not cfg) and uri: - kwargs['feed'] = feed - kwargs['uri'] = uri + kwargs["feed"] = feed + kwargs["uri"] = uri config.ConfigRSS(feed, kwargs) # Clear out any existing reference to this feed name # Otherwise first-run detection can fail @@ -1704,26 +1912,27 @@ class ConfigRss: def internal_upd_rss_filter(self, **kwargs): """ Save updated filter definition """ try: - feed_cfg = config.get_rss()[kwargs.get('feed')] + feed_cfg = config.get_rss()[kwargs.get("feed")] except KeyError: raise rssRaiser(self.__root, kwargs) - pp = kwargs.get('pp') + pp = kwargs.get("pp") if IsNone(pp): - pp = '' - script = ConvertSpecials(kwargs.get('script')) - cat = ConvertSpecials(kwargs.get('cat')) - prio = ConvertSpecials(kwargs.get('priority')) - filt = kwargs.get('filter_text') - enabled = kwargs.get('enabled', '0') + pp = "" + script = ConvertSpecials(kwargs.get("script")) + cat = ConvertSpecials(kwargs.get("cat")) + prio = ConvertSpecials(kwargs.get("priority")) + filt = kwargs.get("filter_text") + enabled = kwargs.get("enabled", "0") if filt: - feed_cfg.filters.update(int(kwargs.get('index', 0)), (cat, pp, script, kwargs.get('filter_type'), - filt, prio, enabled)) + feed_cfg.filters.update( + int(kwargs.get("index", 0)), (cat, pp, script, kwargs.get("filter_type"), filt, prio, enabled) + ) # Move filter if requested - index = int_conv(kwargs.get('index', '')) - new_index = kwargs.get('new_index', '') + index = int_conv(kwargs.get("index", "")) + new_index = kwargs.get("new_index", "") if new_index and int_conv(new_index) != index: feed_cfg.filters.move(int(index), int_conv(new_index)) @@ -1735,10 +1944,10 @@ class ConfigRss: @secured_expose(check_api_key=True, check_configlock=True) def del_rss_feed(self, *args, **kwargs): """ Remove complete RSS feed """ - kwargs['section'] = 'rss' - kwargs['keyword'] = kwargs.get('feed') + kwargs["section"] = "rss" + kwargs["keyword"] = kwargs.get("feed") del_from_section(kwargs) - sabnzbd.rss.clear_feed(kwargs.get('feed')) + sabnzbd.rss.clear_feed(kwargs.get("feed")) raise Raiser(self.__root) @secured_expose(check_api_key=True, check_configlock=True) @@ -1749,11 +1958,11 @@ class ConfigRss: def internal_del_rss_filter(self, **kwargs): """ Remove one RSS filter """ try: - feed_cfg = config.get_rss()[kwargs.get('feed')] + feed_cfg = config.get_rss()[kwargs.get("feed")] except KeyError: raise rssRaiser(self.__root, kwargs) - feed_cfg.filters.delete(int(kwargs.get('index', 0))) + feed_cfg.filters.delete(int(kwargs.get("index", 0))) config.save_config() self.__evaluate = False self.__show_eval_button = True @@ -1762,8 +1971,8 @@ class ConfigRss: @secured_expose(check_api_key=True, check_configlock=True) def download_rss_feed(self, *args, **kwargs): """ Force download of all matching jobs in a feed """ - if 'feed' in kwargs: - feed = kwargs['feed'] + if "feed" in kwargs: + feed = kwargs["feed"] self.__refresh_readout = feed self.__refresh_download = True self.__refresh_force = True @@ -1774,15 +1983,15 @@ class ConfigRss: @secured_expose(check_api_key=True, check_configlock=True) def clean_rss_jobs(self, *args, **kwargs): """ Remove processed RSS jobs from UI """ - sabnzbd.rss.clear_downloaded(kwargs['feed']) + sabnzbd.rss.clear_downloaded(kwargs["feed"]) self.__evaluate = True raise rssRaiser(self.__root, kwargs) @secured_expose(check_api_key=True, check_configlock=True) def test_rss_feed(self, *args, **kwargs): """ Read the feed content again and show results """ - if 'feed' in kwargs: - feed = kwargs['feed'] + if "feed" in kwargs: + feed = kwargs["feed"] self.__refresh_readout = feed self.__refresh_download = False self.__refresh_force = False @@ -1794,7 +2003,7 @@ class ConfigRss: @secured_expose(check_api_key=True, check_configlock=True) def eval_rss_feed(self, *args, **kwargs): """ Re-apply the filters to the feed """ - if 'feed' in kwargs: + if "feed" in kwargs: self.__refresh_download = False self.__refresh_force = False self.__refresh_ignore = False @@ -1806,15 +2015,15 @@ class ConfigRss: @secured_expose(check_api_key=True, check_configlock=True) def download(self, **kwargs): """ Download NZB from provider (Download button) """ - feed = kwargs.get('feed') - url = kwargs.get('url') - nzbname = kwargs.get('nzbname') + feed = kwargs.get("feed") + url = kwargs.get("url") + nzbname = kwargs.get("nzbname") att = sabnzbd.rss.lookup_url(feed, url) if att: - pp = att.get('pp') - cat = att.get('cat') - script = att.get('script') - prio = att.get('prio') + pp = att.get("pp") + cat = att.get("cat") + script = att.get("script") + prio = att.get("prio") if url: sabnzbd.add_url(url, pp, script, cat, prio, nzbname) @@ -1832,15 +2041,15 @@ class ConfigRss: def ConvertSpecials(p): """ Convert None to 'None' and 'Default' to '' """ if p is None: - p = 'None' - elif p.lower() == T('Default').lower(): - p = '' + p = "None" + elif p.lower() == T("Default").lower(): + p = "" return p def IsNone(value): """ Return True if either None, 'None' or '' """ - return value is None or value == "" or value.lower() == 'none' + return value is None or value == "" or value.lower() == "none" def Strip(txt): @@ -1852,24 +2061,47 @@ def Strip(txt): ############################################################################## -_SCHED_ACTIONS = ('resume', 'pause', 'pause_all', 'shutdown', 'restart', 'speedlimit', - 'pause_post', 'resume_post', 'scan_folder', 'rss_scan', 'remove_failed', - 'remove_completed', 'pause_all_low', 'pause_all_normal', 'pause_all_high', - 'resume_all_low', 'resume_all_normal', 'resume_all_high', - 'enable_quota', 'disable_quota' - ) +_SCHED_ACTIONS = ( + "resume", + "pause", + "pause_all", + "shutdown", + "restart", + "speedlimit", + "pause_post", + "resume_post", + "scan_folder", + "rss_scan", + "remove_failed", + "remove_completed", + "pause_all_low", + "pause_all_normal", + "pause_all_high", + "resume_all_low", + "resume_all_normal", + "resume_all_high", + "enable_quota", + "disable_quota", +) class ConfigScheduling: - def __init__(self, root): self.__root = root @secured_expose(check_configlock=True) def index(self, **kwargs): def get_days(): - days = {"*": T('Daily'), "1": T('Monday'), "2": T('Tuesday'), "3": T('Wednesday'), "4": T('Thursday'), - "5": T('Friday'), "6": T('Saturday'), "7": T('Sunday')} + days = { + "*": T("Daily"), + "1": T("Monday"), + "2": T("Tuesday"), + "3": T("Wednesday"), + "4": T("Thursday"), + "5": T("Friday"), + "6": T("Saturday"), + "7": T("Sunday"), + } return days conf = build_header(sabnzbd.WEB_DIR_CONFIG) @@ -1879,42 +2111,42 @@ class ConfigScheduling: day_names = get_days() categories = list_cats(False) snum = 1 - conf['schedlines'] = [] - conf['taskinfo'] = [] + conf["schedlines"] = [] + conf["taskinfo"] = [] for ev in scheduler.sort_schedules(all_events=False): line = ev[3] - conf['schedlines'].append(line) + conf["schedlines"].append(line) try: - enabled, m, h, day_numbers, action = line.split(' ', 4) + enabled, m, h, day_numbers, action = line.split(" ", 4) except: continue action = action.strip() try: - action, value = action.split(' ', 1) + action, value = action.split(" ", 1) except: - value = '' + value = "" value = value.strip() - if value and not value.lower().strip('0123456789kmgtp%.'): - if '%' not in value and from_units(value) < 1.0: - value = T('off') # : "Off" value for speedlimit in scheduler + if value and not value.lower().strip("0123456789kmgtp%."): + if "%" not in value and from_units(value) < 1.0: + value = T("off") # : "Off" value for speedlimit in scheduler else: - if '%' not in value and 1 < int_conv(value) < 101: - value += '%' + if "%" not in value and 1 < int_conv(value) < 101: + value += "%" value = value.upper() if action in actions: action = Ttemplate("sch-" + action) else: - if action in ('enable_server', 'disable_server'): + if action in ("enable_server", "disable_server"): try: value = '"%s"' % config.get_servers()[value].displayname() except KeyError: - value = '"%s" <<< %s' % (value, T('Undefined server!')) + value = '"%s" <<< %s' % (value, T("Undefined server!")) action = Ttemplate("sch-" + action) - if action in ('pause_cat', 'resume_cat'): + if action in ("pause_cat", "resume_cat"): action = Ttemplate("sch-" + action) if value not in categories: # Category name change - value = '"%s" <<< %s' % (value, T('Incorrect parameter')) + value = '"%s" <<< %s' % (value, T("Incorrect parameter")) else: value = '"%s"' % value @@ -1927,9 +2159,9 @@ class ConfigScheduling: else: days_of_week = ", ".join([day_names.get(i, "**") for i in day_numbers]) - item = (snum, '%02d' % int(h), '%02d' % int(m), days_of_week, '%s %s' % (action, value), enabled) + item = (snum, "%02d" % int(h), "%02d" % int(m), days_of_week, "%s %s" % (action, value), enabled) - conf['taskinfo'].append(item) + conf["taskinfo"].append(item) snum += 1 actions_lng = {} @@ -1941,47 +2173,50 @@ class ConfigScheduling: for srv in servers: actions_servers[srv] = servers[srv].displayname() - conf['actions_servers'] = actions_servers - conf['actions'] = actions - conf['actions_lng'] = actions_lng - conf['categories'] = categories + conf["actions_servers"] = actions_servers + conf["actions"] = actions + conf["actions_lng"] = actions_lng + conf["categories"] = categories - template = Template(file=os.path.join(sabnzbd.WEB_DIR_CONFIG, 'config_scheduling.tmpl'), - searchList=[conf], compilerSettings=CHEETAH_DIRECTIVES) + template = Template( + file=os.path.join(sabnzbd.WEB_DIR_CONFIG, "config_scheduling.tmpl"), + searchList=[conf], + compilerSettings=CHEETAH_DIRECTIVES, + ) return template.respond() @secured_expose(check_api_key=True, check_configlock=True) def addSchedule(self, **kwargs): servers = config.get_servers() - minute = kwargs.get('minute') - hour = kwargs.get('hour') - days_of_week = ''.join([str(x) for x in kwargs.get('daysofweek', '')]) + minute = kwargs.get("minute") + hour = kwargs.get("hour") + days_of_week = "".join([str(x) for x in kwargs.get("daysofweek", "")]) if not days_of_week: - days_of_week = '1234567' - action = kwargs.get('action') - arguments = kwargs.get('arguments') + days_of_week = "1234567" + action = kwargs.get("action") + arguments = kwargs.get("arguments") arguments = arguments.strip().lower() - if arguments in ('on', 'enable'): - arguments = '1' - elif arguments in ('off', 'disable'): - arguments = '0' + if arguments in ("on", "enable"): + arguments = "1" + elif arguments in ("off", "disable"): + arguments = "0" if minute and hour and days_of_week and action: - if action == 'speedlimit': - if not arguments or arguments.strip('0123456789kmgtp%.'): + if action == "speedlimit": + if not arguments or arguments.strip("0123456789kmgtp%."): arguments = 0 elif action in _SCHED_ACTIONS: - arguments = '' + arguments = "" elif action in servers: - if arguments == '1': + if arguments == "1": arguments = action - action = 'enable_server' + action = "enable_server" else: arguments = action - action = 'disable_server' + action = "disable_server" - elif action in ('pause_cat', 'resume_cat'): + elif action in ("pause_cat", "resume_cat"): # Need original category name, not lowercased arguments = arguments.strip() else: @@ -1990,8 +2225,7 @@ class ConfigScheduling: if action: sched = cfg.schedules() - sched.append('%s %s %s %s %s %s' % - (1, minute, hour, days_of_week, action, arguments)) + sched.append("%s %s %s %s %s %s" % (1, minute, hour, days_of_week, action, arguments)) cfg.schedules.set(sched) config.save_config() @@ -2001,7 +2235,7 @@ class ConfigScheduling: @secured_expose(check_api_key=True, check_configlock=True) def delSchedule(self, **kwargs): schedules = cfg.schedules() - line = kwargs.get('line') + line = kwargs.get("line") if line and line in schedules: schedules.remove(line) cfg.schedules.set(schedules) @@ -2012,14 +2246,14 @@ class ConfigScheduling: @secured_expose(check_api_key=True, check_configlock=True) def toggleSchedule(self, **kwargs): schedules = cfg.schedules() - line = kwargs.get('line') + line = kwargs.get("line") if line: for i, schedule in enumerate(schedules): if schedule == line: # Toggle the schedule schedule_split = schedule.split() - schedule_split[0] = '%d' % (schedule_split[0] == '0') - schedules[i] = ' '.join(schedule_split) + schedule_split[0] = "%d" % (schedule_split[0] == "0") + schedules[i] = " ".join(schedule_split) break cfg.schedules.set(schedules) config.save_config() @@ -2029,7 +2263,6 @@ class ConfigScheduling: ############################################################################## class ConfigCats: - def __init__(self, root): self.__root = root @@ -2037,48 +2270,59 @@ class ConfigCats: def index(self, **kwargs): conf = build_header(sabnzbd.WEB_DIR_CONFIG) - conf['scripts'] = list_scripts(default=True) - conf['defdir'] = cfg.complete_dir.get_clipped_path() + conf["scripts"] = list_scripts(default=True) + conf["defdir"] = cfg.complete_dir.get_clipped_path() categories = config.get_ordered_categories() - conf['have_cats'] = len(categories) > 1 + conf["have_cats"] = len(categories) > 1 slotinfo = [] for cat in categories: - cat['newzbin'] = cat['newzbin'].replace('"', '"') + cat["newzbin"] = cat["newzbin"].replace('"', """) slotinfo.append(cat) # Add empty line - empty = {'name': '', 'order': '0', 'pp': '-1', 'script': '', 'dir': '', 'newzbin': '', 'priority': DEFAULT_PRIORITY} + empty = { + "name": "", + "order": "0", + "pp": "-1", + "script": "", + "dir": "", + "newzbin": "", + "priority": DEFAULT_PRIORITY, + } slotinfo.insert(1, empty) - conf['slotinfo'] = slotinfo + conf["slotinfo"] = slotinfo - template = Template(file=os.path.join(sabnzbd.WEB_DIR_CONFIG, 'config_cat.tmpl'), - searchList=[conf], compilerSettings=CHEETAH_DIRECTIVES) + template = Template( + file=os.path.join(sabnzbd.WEB_DIR_CONFIG, "config_cat.tmpl"), + searchList=[conf], + compilerSettings=CHEETAH_DIRECTIVES, + ) return template.respond() @secured_expose(check_api_key=True, check_configlock=True) def delete(self, **kwargs): - kwargs['section'] = 'categories' - kwargs['keyword'] = kwargs.get('name') + kwargs["section"] = "categories" + kwargs["keyword"] = kwargs.get("name") del_from_section(kwargs) raise Raiser(self.__root) @secured_expose(check_api_key=True, check_configlock=True) def save(self, **kwargs): - name = kwargs.get('name', '*') - if name == '*': + name = kwargs.get("name", "*") + if name == "*": newname = name else: - newname = re.sub('"', '', kwargs.get('newname', '')) + newname = re.sub('"', "", kwargs.get("newname", "")) if newname: # Check if this cat-dir is not sub-folder of incomplete - if same_file(cfg.download_dir.get_path(), real_path(cfg.complete_dir.get_path(), kwargs['dir'])): - return T('Category folder cannot be a subfolder of the Temporary Download Folder.') + if same_file(cfg.download_dir.get_path(), real_path(cfg.complete_dir.get_path(), kwargs["dir"])): + return T("Category folder cannot be a subfolder of the Temporary Download Folder.") # Delete current one and replace with new one if name: - config.delete('categories', name) + config.delete("categories", name) config.ConfigCat(newname.lower(), kwargs) config.save_config() @@ -2087,47 +2331,57 @@ class ConfigCats: ############################################################################## SORT_LIST = ( - 'enable_tv_sorting', 'tv_sort_string', 'tv_categories', - 'enable_movie_sorting', 'movie_sort_string', 'movie_sort_extra', 'movie_extra_folder', - 'enable_date_sorting', 'date_sort_string', 'movie_categories', 'date_categories' + "enable_tv_sorting", + "tv_sort_string", + "tv_categories", + "enable_movie_sorting", + "movie_sort_string", + "movie_sort_extra", + "movie_extra_folder", + "enable_date_sorting", + "date_sort_string", + "movie_categories", + "date_categories", ) class ConfigSorting: - def __init__(self, root): self.__root = root @secured_expose(check_configlock=True) def index(self, **kwargs): conf = build_header(sabnzbd.WEB_DIR_CONFIG) - conf['complete_dir'] = cfg.complete_dir.get_clipped_path() + conf["complete_dir"] = cfg.complete_dir.get_clipped_path() for kw in SORT_LIST: - conf[kw] = config.get_config('misc', kw)() - conf['categories'] = list_cats(False) - - template = Template(file=os.path.join(sabnzbd.WEB_DIR_CONFIG, 'config_sorting.tmpl'), - searchList=[conf], compilerSettings=CHEETAH_DIRECTIVES) + conf[kw] = config.get_config("misc", kw)() + conf["categories"] = list_cats(False) + + template = Template( + file=os.path.join(sabnzbd.WEB_DIR_CONFIG, "config_sorting.tmpl"), + searchList=[conf], + compilerSettings=CHEETAH_DIRECTIVES, + ) return template.respond() @secured_expose(check_api_key=True, check_configlock=True) def saveSorting(self, **kwargs): try: - kwargs['movie_categories'] = kwargs['movie_cat'] + kwargs["movie_categories"] = kwargs["movie_cat"] except: pass try: - kwargs['date_categories'] = kwargs['date_cat'] + kwargs["date_categories"] = kwargs["date_cat"] except: pass try: - kwargs['tv_categories'] = kwargs['tv_cat'] + kwargs["tv_categories"] = kwargs["tv_cat"] except: pass for kw in SORT_LIST: - item = config.get_config('misc', kw) + item = config.get_config("misc", kw) value = kwargs.get(kw) msg = item.set(value) if msg: @@ -2142,19 +2396,23 @@ LOG_API_RE = re.compile(rb"(apikey|api)(=|:)[\w]+", re.I) LOG_API_JSON_RE = re.compile(rb"'(apikey|api)': '[\w]+'", re.I) LOG_USER_RE = re.compile(rb"(user|username)\s?=\s?[\S]+", re.I) LOG_PASS_RE = re.compile(rb"(password)\s?=\s?[\S]+", re.I) -LOG_INI_HIDE_RE = re.compile(rb"(email_pwd|email_account|email_to|rating_api_key|pushover_token|pushover_userkey|pushbullet_apikey|prowl_apikey|growl_password|growl_server|IPv[4|6] address)\s?=\s?[\S]+", re.I) +LOG_INI_HIDE_RE = re.compile( + rb"(email_pwd|email_account|email_to|rating_api_key|pushover_token|pushover_userkey|pushbullet_apikey|prowl_apikey|growl_password|growl_server|IPv[4|6] address)\s?=\s?[\S]+", + re.I, +) LOG_HASH_RE = re.compile(rb"([a-fA-F\d]{25})", re.I) -class Status: +class Status: def __init__(self, root): self.__root = root @secured_expose(check_configlock=True) def index(self, **kwargs): - header = build_status(skip_dashboard=kwargs.get('skip_dashboard')) - template = Template(file=os.path.join(sabnzbd.WEB_DIR, 'status.tmpl'), - searchList=[header], compilerSettings=CHEETAH_DIRECTIVES) + header = build_status(skip_dashboard=kwargs.get("skip_dashboard")) + template = Template( + file=os.path.join(sabnzbd.WEB_DIR, "status.tmpl"), searchList=[header], compilerSettings=CHEETAH_DIRECTIVES + ) return template.respond() @secured_expose(check_api_key=True) @@ -2180,20 +2438,20 @@ class Status: pass # Fetch the INI and the log-data and add a message at the top - log_data = b'--------------------------------\n\n' - log_data += b'The log includes a copy of your sabnzbd.ini with\nall usernames, passwords and API-keys removed.' - log_data += b'\n\n--------------------------------\n' + log_data = b"--------------------------------\n\n" + log_data += b"The log includes a copy of your sabnzbd.ini with\nall usernames, passwords and API-keys removed." + log_data += b"\n\n--------------------------------\n" with open(sabnzbd.LOGFILE, "rb") as f: log_data += f.read() - with open(config.get_filename(), 'rb') as f: + with open(config.get_filename(), "rb") as f: log_data += f.read() # We need to remove all passwords/usernames/api-keys log_data = LOG_API_RE.sub(b"apikey=", log_data) log_data = LOG_API_JSON_RE.sub(b"'apikey':'", log_data) - log_data = LOG_USER_RE.sub(b'\\g<1>=', log_data) + log_data = LOG_USER_RE.sub(b"\\g<1>=", log_data) log_data = LOG_PASS_RE.sub(b"password=", log_data) log_data = LOG_INI_HIDE_RE.sub(b"\\1 = ", log_data) log_data = LOG_HASH_RE.sub(b"", log_data) @@ -2201,14 +2459,15 @@ class Status: # Try to replace the username try: import getpass + cur_user = getpass.getuser() if cur_user: - log_data = log_data.replace(utob(cur_user), b'') + log_data = log_data.replace(utob(cur_user), b"") except: pass # Set headers - cherrypy.response.headers['Content-Type'] = 'application/x-download;charset=utf-8' - cherrypy.response.headers['Content-Disposition'] = 'attachment;filename="sabnzbd.log"' + cherrypy.response.headers["Content-Type"] = "application/x-download;charset=utf-8" + cherrypy.response.headers["Content-Disposition"] = 'attachment;filename="sabnzbd.log"' return log_data @secured_expose(check_api_key=True) @@ -2218,14 +2477,14 @@ class Status: @secured_expose(check_api_key=True) def change_loglevel(self, **kwargs): - cfg.log_level.set(kwargs.get('loglevel')) + cfg.log_level.set(kwargs.get("loglevel")) config.save_config() raise Raiser(self.__root) @secured_expose(check_api_key=True) def unblock_server(self, **kwargs): - Downloader.do.unblock(kwargs.get('server')) + Downloader.do.unblock(kwargs.get("server")) # Short sleep so that UI shows new server status time.sleep(1.0) raise Raiser(self.__root) @@ -2280,41 +2539,41 @@ class Status: def orphan_delete(kwargs): - path = kwargs.get('name') + path = kwargs.get("name") if path: path = os.path.join(long_path(cfg.download_dir.get_path()), path) - logging.info('Removing orphaned job %s', path) + logging.info("Removing orphaned job %s", path) remove_all(path, recursive=True) def orphan_delete_all(): paths = NzbQueue.do.scan_jobs(all_jobs=False, action=False) for path in paths: - kwargs = {'name': path} + kwargs = {"name": path} orphan_delete(kwargs) def orphan_add(kwargs): - path = kwargs.get('name') + path = kwargs.get("name") if path: path = os.path.join(long_path(cfg.download_dir.get_path()), path) - logging.info('Re-adding orphaned job %s', path) + logging.info("Re-adding orphaned job %s", path) NzbQueue.do.repair_job(path, None, None) def orphan_add_all(): paths = NzbQueue.do.scan_jobs(all_jobs=False, action=False) for path in paths: - kwargs = {'name': path} + kwargs = {"name": path} orphan_add(kwargs) def badParameterResponse(msg, ajax=None): """ Return a html page with error message and a 'back' button """ if ajax: - return sabnzbd.api.report('json', error=msg) + return sabnzbd.api.report("json", error=msg) else: - return ''' + return """ @@ -2327,12 +2586,18 @@ def badParameterResponse(msg, ajax=None):
-''' % (sabnzbd.__version__, T('ERROR:'), T('Incorrect parameter'), msg, T('Back')) +""" % ( + sabnzbd.__version__, + T("ERROR:"), + T("Incorrect parameter"), + msg, + T("Back"), + ) def ShowString(name, msg): """ Return a html page listing a file and a 'back' button """ - return ''' + return """ @@ -2344,7 +2609,12 @@ def ShowString(name, msg):
%s
-''' % (xml_name(name), T('Back'), xml_name(name), escape(msg)) +""" % ( + xml_name(name), + T("Back"), + xml_name(name), + escape(msg), + ) def GetRssLog(feed): @@ -2353,61 +2623,61 @@ def GetRssLog(feed): job = job.copy() # Now we apply some formatting - job['title'] = job['title'] - job['skip'] = '*' * int(job.get('status', '').endswith('*')) + job["title"] = job["title"] + job["skip"] = "*" * int(job.get("status", "").endswith("*")) # These fields could be empty - job['cat'] = job.get('cat', '') - job['size'] = job.get('size', '') - job['infourl'] = job.get('infourl', '') + job["cat"] = job.get("cat", "") + job["size"] = job.get("size", "") + job["infourl"] = job.get("infourl", "") # Auto-fetched jobs didn't have these fields set - if job.get('url'): - job['baselink'] = get_base_url(job.get('url')) - if sabnzbd.rss.special_rss_site(job.get('url')): - job['nzbname'] = '' + if job.get("url"): + job["baselink"] = get_base_url(job.get("url")) + if sabnzbd.rss.special_rss_site(job.get("url")): + job["nzbname"] = "" else: - job['nzbname'] = job['title'] + job["nzbname"] = job["title"] else: - job['baselink'] = '' - job['nzbname'] = job['title'] + job["baselink"] = "" + job["nzbname"] = job["title"] - if job.get('size', 0): - job['size_units'] = to_units(job['size']) + if job.get("size", 0): + job["size_units"] = to_units(job["size"]) else: - job['size_units'] = '-' + job["size_units"] = "-" # And we add extra fields for sorting - if job.get('age', 0): - job['age_ms'] = (job['age'] - datetime.utcfromtimestamp(0)).total_seconds() - job['age'] = calc_age(job['age'], True) + if job.get("age", 0): + job["age_ms"] = (job["age"] - datetime.utcfromtimestamp(0)).total_seconds() + job["age"] = calc_age(job["age"], True) else: - job['age_ms'] = '' - job['age'] = '' + job["age_ms"] = "" + job["age"] = "" - if job.get('time_downloaded'): - job['time_downloaded_ms'] = time.mktime(job['time_downloaded']) - job['time_downloaded'] = time.strftime(time_format('%H:%M %a %d %b'), job['time_downloaded']) + if job.get("time_downloaded"): + job["time_downloaded_ms"] = time.mktime(job["time_downloaded"]) + job["time_downloaded"] = time.strftime(time_format("%H:%M %a %d %b"), job["time_downloaded"]) else: - job['time_downloaded_ms'] = '' - job['time_downloaded'] = '' + job["time_downloaded_ms"] = "" + job["time_downloaded"] = "" return job jobs = list(sabnzbd.rss.show_result(feed).values()) good, bad, done = ([], [], []) for job in jobs: - if job['status'][0] == 'G': + if job["status"][0] == "G": good.append(make_item(job)) - elif job['status'][0] == 'B': + elif job["status"][0] == "B": bad.append(make_item(job)) - elif job['status'] == 'D': + elif job["status"] == "D": done.append(make_item(job)) try: # Sort based on actual age, in try-catch just to be sure - good.sort(key=lambda job: job['age_ms'], reverse=True) - bad.sort(key=lambda job: job['age_ms'], reverse=True) - done.sort(key=lambda job: job['time_downloaded_ms'], reverse=True) + good.sort(key=lambda job: job["age_ms"], reverse=True) + bad.sort(key=lambda job: job["age_ms"], reverse=True) + done.sort(key=lambda job: job["time_downloaded_ms"], reverse=True) except: # Let the javascript do it then.. pass @@ -2417,42 +2687,134 @@ def GetRssLog(feed): ############################################################################## LIST_EMAIL = ( - 'email_endjob', 'email_cats', 'email_full', - 'email_server', 'email_to', 'email_from', - 'email_account', 'email_pwd', 'email_rss' + "email_endjob", + "email_cats", + "email_full", + "email_server", + "email_to", + "email_from", + "email_account", + "email_pwd", + "email_rss", +) +LIST_NCENTER = ( + "ncenter_enable", + "ncenter_cats", + "ncenter_prio_startup", + "ncenter_prio_download", + "ncenter_prio_pp", + "ncenter_prio_complete", + "ncenter_prio_failed", + "ncenter_prio_disk_full", + "ncenter_prio_warning", + "ncenter_prio_error", + "ncenter_prio_queue_done", + "ncenter_prio_other", + "ncenter_prio_new_login", +) +LIST_ACENTER = ( + "acenter_enable", + "acenter_cats", + "acenter_prio_startup", + "acenter_prio_download", + "acenter_prio_pp", + "acenter_prio_complete", + "acenter_prio_failed", + "acenter_prio_disk_full", + "acenter_prio_warning", + "acenter_prio_error", + "acenter_prio_queue_done", + "acenter_prio_other", + "acenter_prio_new_login", +) +LIST_NTFOSD = ( + "ntfosd_enable", + "ntfosd_cats", + "ntfosd_prio_startup", + "ntfosd_prio_download", + "ntfosd_prio_pp", + "ntfosd_prio_complete", + "ntfosd_prio_failed", + "ntfosd_prio_disk_full", + "ntfosd_prio_warning", + "ntfosd_prio_error", + "ntfosd_prio_queue_done", + "ntfosd_prio_other", + "ntfosd_prio_new_login", +) +LIST_PROWL = ( + "prowl_enable", + "prowl_cats", + "prowl_apikey", + "prowl_prio_startup", + "prowl_prio_download", + "prowl_prio_pp", + "prowl_prio_complete", + "prowl_prio_failed", + "prowl_prio_disk_full", + "prowl_prio_warning", + "prowl_prio_error", + "prowl_prio_queue_done", + "prowl_prio_other", + "prowl_prio_new_login", +) +LIST_PUSHOVER = ( + "pushover_enable", + "pushover_cats", + "pushover_token", + "pushover_userkey", + "pushover_device", + "pushover_prio_startup", + "pushover_prio_download", + "pushover_prio_pp", + "pushover_prio_complete", + "pushover_prio_failed", + "pushover_prio_disk_full", + "pushover_prio_warning", + "pushover_prio_error", + "pushover_prio_queue_done", + "pushover_prio_other", + "pushover_prio_new_login", + "pushover_emergency_retry", + "pushover_emergency_expire", +) +LIST_PUSHBULLET = ( + "pushbullet_enable", + "pushbullet_cats", + "pushbullet_apikey", + "pushbullet_device", + "pushbullet_prio_startup", + "pushbullet_prio_download", + "pushbullet_prio_pp", + "pushbullet_prio_complete", + "pushbullet_prio_failed", + "pushbullet_prio_disk_full", + "pushbullet_prio_warning", + "pushbullet_prio_error", + "pushbullet_prio_queue_done", + "pushbullet_prio_other", + "pushbullet_prio_new_login", +) +LIST_NSCRIPT = ( + "nscript_enable", + "nscript_cats", + "nscript_script", + "nscript_parameters", + "nscript_prio_startup", + "nscript_prio_download", + "nscript_prio_pp", + "nscript_prio_complete", + "nscript_prio_failed", + "nscript_prio_disk_full", + "nscript_prio_warning", + "nscript_prio_error", + "nscript_prio_queue_done", + "nscript_prio_other", + "nscript_prio_new_login", ) -LIST_NCENTER = ('ncenter_enable', 'ncenter_cats', - 'ncenter_prio_startup', 'ncenter_prio_download', 'ncenter_prio_pp', 'ncenter_prio_complete', 'ncenter_prio_failed', - 'ncenter_prio_disk_full', 'ncenter_prio_warning', 'ncenter_prio_error', 'ncenter_prio_queue_done', 'ncenter_prio_other', - 'ncenter_prio_new_login') -LIST_ACENTER = ('acenter_enable', 'acenter_cats', - 'acenter_prio_startup', 'acenter_prio_download', 'acenter_prio_pp', 'acenter_prio_complete', 'acenter_prio_failed', - 'acenter_prio_disk_full', 'acenter_prio_warning', 'acenter_prio_error', 'acenter_prio_queue_done', 'acenter_prio_other', - 'acenter_prio_new_login') -LIST_NTFOSD = ('ntfosd_enable', 'ntfosd_cats', - 'ntfosd_prio_startup', 'ntfosd_prio_download', 'ntfosd_prio_pp', 'ntfosd_prio_complete', 'ntfosd_prio_failed', - 'ntfosd_prio_disk_full', 'ntfosd_prio_warning', 'ntfosd_prio_error', 'ntfosd_prio_queue_done', 'ntfosd_prio_other', - 'ntfosd_prio_new_login') -LIST_PROWL = ('prowl_enable', 'prowl_cats', 'prowl_apikey', - 'prowl_prio_startup', 'prowl_prio_download', 'prowl_prio_pp', 'prowl_prio_complete', 'prowl_prio_failed', - 'prowl_prio_disk_full', 'prowl_prio_warning', 'prowl_prio_error', 'prowl_prio_queue_done', 'prowl_prio_other', - 'prowl_prio_new_login') -LIST_PUSHOVER = ('pushover_enable', 'pushover_cats', 'pushover_token', 'pushover_userkey', 'pushover_device', - 'pushover_prio_startup', 'pushover_prio_download', 'pushover_prio_pp', 'pushover_prio_complete', 'pushover_prio_failed', - 'pushover_prio_disk_full', 'pushover_prio_warning', 'pushover_prio_error', 'pushover_prio_queue_done', 'pushover_prio_other', - 'pushover_prio_new_login', 'pushover_emergency_retry', 'pushover_emergency_expire') -LIST_PUSHBULLET = ('pushbullet_enable', 'pushbullet_cats', 'pushbullet_apikey', 'pushbullet_device', - 'pushbullet_prio_startup', 'pushbullet_prio_download', 'pushbullet_prio_pp', 'pushbullet_prio_complete', 'pushbullet_prio_failed', - 'pushbullet_prio_disk_full', 'pushbullet_prio_warning', 'pushbullet_prio_error', 'pushbullet_prio_queue_done', 'pushbullet_prio_other', - 'pushbullet_prio_new_login') -LIST_NSCRIPT = ('nscript_enable', 'nscript_cats', 'nscript_script', 'nscript_parameters', - 'nscript_prio_startup', 'nscript_prio_download', 'nscript_prio_pp', 'nscript_prio_complete', 'nscript_prio_failed', - 'nscript_prio_disk_full', 'nscript_prio_warning', 'nscript_prio_error', 'nscript_prio_queue_done', 'nscript_prio_other', - 'nscript_prio_new_login') class ConfigNotify: - def __init__(self, root): self.__root = root self.__lastmail = None @@ -2461,76 +2823,78 @@ class ConfigNotify: def index(self, **kwargs): conf = build_header(sabnzbd.WEB_DIR_CONFIG) - conf['categories'] = list_cats(False) - conf['lastmail'] = self.__lastmail - conf['have_ntfosd'] = sabnzbd.notifier.have_ntfosd() - conf['have_ncenter'] = sabnzbd.DARWIN and sabnzbd.FOUNDATION - conf['scripts'] = list_scripts(default=False, none=True) + conf["categories"] = list_cats(False) + conf["lastmail"] = self.__lastmail + conf["have_ntfosd"] = sabnzbd.notifier.have_ntfosd() + conf["have_ncenter"] = sabnzbd.DARWIN and sabnzbd.FOUNDATION + conf["scripts"] = list_scripts(default=False, none=True) for kw in LIST_EMAIL: - conf[kw] = config.get_config('misc', kw).get_string() + conf[kw] = config.get_config("misc", kw).get_string() for kw in LIST_PROWL: - conf[kw] = config.get_config('prowl', kw)() + conf[kw] = config.get_config("prowl", kw)() for kw in LIST_PUSHOVER: - conf[kw] = config.get_config('pushover', kw)() + conf[kw] = config.get_config("pushover", kw)() for kw in LIST_PUSHBULLET: - conf[kw] = config.get_config('pushbullet', kw)() + conf[kw] = config.get_config("pushbullet", kw)() for kw in LIST_NCENTER: - conf[kw] = config.get_config('ncenter', kw)() + conf[kw] = config.get_config("ncenter", kw)() for kw in LIST_ACENTER: - conf[kw] = config.get_config('acenter', kw)() + conf[kw] = config.get_config("acenter", kw)() for kw in LIST_NTFOSD: - conf[kw] = config.get_config('ntfosd', kw)() + conf[kw] = config.get_config("ntfosd", kw)() for kw in LIST_NSCRIPT: - conf[kw] = config.get_config('nscript', kw)() - conf['notify_keys'] = sabnzbd.constants.NOTIFY_KEYS - conf['notify_texts'] = sabnzbd.notifier.NOTIFICATION - - template = Template(file=os.path.join(sabnzbd.WEB_DIR_CONFIG, 'config_notify.tmpl'), - searchList=[conf], compilerSettings=CHEETAH_DIRECTIVES) + conf[kw] = config.get_config("nscript", kw)() + conf["notify_keys"] = sabnzbd.constants.NOTIFY_KEYS + conf["notify_texts"] = sabnzbd.notifier.NOTIFICATION + + template = Template( + file=os.path.join(sabnzbd.WEB_DIR_CONFIG, "config_notify.tmpl"), + searchList=[conf], + compilerSettings=CHEETAH_DIRECTIVES, + ) return template.respond() @secured_expose(check_api_key=True, check_configlock=True) def saveEmail(self, **kwargs): - ajax = kwargs.get('ajax') + ajax = kwargs.get("ajax") for kw in LIST_EMAIL: - msg = config.get_config('misc', kw).set(kwargs.get(kw)) + msg = config.get_config("misc", kw).set(kwargs.get(kw)) if msg: - return badParameterResponse(T('Incorrect value for %s: %s') % (kw, msg), ajax) + return badParameterResponse(T("Incorrect value for %s: %s") % (kw, msg), ajax) for kw in LIST_NCENTER: - msg = config.get_config('ncenter', kw).set(kwargs.get(kw)) + msg = config.get_config("ncenter", kw).set(kwargs.get(kw)) if msg: - return badParameterResponse(T('Incorrect value for %s: %s') % (kw, msg), ajax) + return badParameterResponse(T("Incorrect value for %s: %s") % (kw, msg), ajax) for kw in LIST_ACENTER: - msg = config.get_config('acenter', kw).set(kwargs.get(kw)) + msg = config.get_config("acenter", kw).set(kwargs.get(kw)) if msg: - return badParameterResponse(T('Incorrect value for %s: %s') % (kw, msg), ajax) + return badParameterResponse(T("Incorrect value for %s: %s") % (kw, msg), ajax) for kw in LIST_NTFOSD: - msg = config.get_config('ntfosd', kw).set(kwargs.get(kw)) + msg = config.get_config("ntfosd", kw).set(kwargs.get(kw)) if msg: - return badParameterResponse(T('Incorrect value for %s: %s') % (kw, msg), ajax) + return badParameterResponse(T("Incorrect value for %s: %s") % (kw, msg), ajax) for kw in LIST_PROWL: - msg = config.get_config('prowl', kw).set(kwargs.get(kw)) + msg = config.get_config("prowl", kw).set(kwargs.get(kw)) if msg: - return badParameterResponse(T('Incorrect value for %s: %s') % (kw, msg), ajax) + return badParameterResponse(T("Incorrect value for %s: %s") % (kw, msg), ajax) for kw in LIST_PUSHOVER: - msg = config.get_config('pushover', kw).set(kwargs.get(kw)) + msg = config.get_config("pushover", kw).set(kwargs.get(kw)) if msg: - return badParameterResponse(T('Incorrect value for %s: %s') % (kw, msg), ajax) + return badParameterResponse(T("Incorrect value for %s: %s") % (kw, msg), ajax) for kw in LIST_PUSHBULLET: - msg = config.get_config('pushbullet', kw).set(kwargs.get(kw, 0)) + msg = config.get_config("pushbullet", kw).set(kwargs.get(kw, 0)) if msg: - return badParameterResponse(T('Incorrect value for %s: %s') % (kw, msg), ajax) + return badParameterResponse(T("Incorrect value for %s: %s") % (kw, msg), ajax) for kw in LIST_NSCRIPT: - msg = config.get_config('nscript', kw).set(kwargs.get(kw, 0)) + msg = config.get_config("nscript", kw).set(kwargs.get(kw, 0)) if msg: - return badParameterResponse(T('Incorrect value for %s: %s') % (kw, msg), ajax) + return badParameterResponse(T("Incorrect value for %s: %s") % (kw, msg), ajax) config.save_config() self.__lastmail = None if ajax: - return sabnzbd.api.report('json') + return sabnzbd.api.report("json") else: raise Raiser(self.__root) - diff --git a/sabnzbd/newsunpack.py b/sabnzbd/newsunpack.py index d6c422a..94f11b1 100644 --- a/sabnzbd/newsunpack.py +++ b/sabnzbd/newsunpack.py @@ -33,10 +33,20 @@ from subprocess import Popen import sabnzbd from sabnzbd.encoding import platform_btou, correct_unknown_encoding, ubtou import sabnzbd.utils.rarfile as rarfile -from sabnzbd.misc import format_time_string, find_on_path, int_conv, \ - get_all_passwords, calc_age, cmp, caller_name -from sabnzbd.filesystem import make_script_path, real_path, globber, globber_full, \ - renamer, clip_path, long_path, remove_file, recursive_listdir, setname_from_path, get_ext +from sabnzbd.misc import format_time_string, find_on_path, int_conv, get_all_passwords, calc_age, cmp, caller_name +from sabnzbd.filesystem import ( + make_script_path, + real_path, + globber, + globber_full, + renamer, + clip_path, + long_path, + remove_file, + recursive_listdir, + setname_from_path, + get_ext, +) from sabnzbd.sorting import SeriesSorter import sabnzbd.cfg as cfg from sabnzbd.constants import Status @@ -48,25 +58,29 @@ if sabnzbd.WIN32: import win32process # Define scheduling priorities - WIN_SCHED_PRIOS = {1: win32process.IDLE_PRIORITY_CLASS, 2: win32process.BELOW_NORMAL_PRIORITY_CLASS, - 3: win32process.NORMAL_PRIORITY_CLASS, 4: win32process.ABOVE_NORMAL_PRIORITY_CLASS,} + WIN_SCHED_PRIOS = { + 1: win32process.IDLE_PRIORITY_CLASS, + 2: win32process.BELOW_NORMAL_PRIORITY_CLASS, + 3: win32process.NORMAL_PRIORITY_CLASS, + 4: win32process.ABOVE_NORMAL_PRIORITY_CLASS, + } except ImportError: pass # Regex globals -RAR_RE = re.compile(r'\.(?Ppart\d*\.rar|rar|r\d\d|s\d\d|t\d\d|u\d\d|v\d\d|\d\d\d?\d)$', re.I) -RAR_RE_V3 = re.compile(r'\.(?Ppart\d*)$', re.I) +RAR_RE = re.compile(r"\.(?Ppart\d*\.rar|rar|r\d\d|s\d\d|t\d\d|u\d\d|v\d\d|\d\d\d?\d)$", re.I) +RAR_RE_V3 = re.compile(r"\.(?Ppart\d*)$", re.I) LOADING_RE = re.compile(r'^Loading "(.+)"') TARGET_RE = re.compile(r'^(?:File|Target): "(.+)" -') -EXTRACTFROM_RE = re.compile(r'^Extracting\sfrom\s(.+)') -EXTRACTED_RE = re.compile(r'^(Extracting|Creating|...)\s+(.*?)\s+OK\s*$') -SPLITFILE_RE = re.compile(r'\.(\d\d\d?\d$)', re.I) -ZIP_RE = re.compile(r'\.(zip$)', re.I) -SEVENZIP_RE = re.compile(r'\.7z$', re.I) -SEVENMULTI_RE = re.compile(r'\.7z\.\d+$', re.I) -TS_RE = re.compile(r'\.(\d+)\.(ts$)', re.I) +EXTRACTFROM_RE = re.compile(r"^Extracting\sfrom\s(.+)") +EXTRACTED_RE = re.compile(r"^(Extracting|Creating|...)\s+(.*?)\s+OK\s*$") +SPLITFILE_RE = re.compile(r"\.(\d\d\d?\d$)", re.I) +ZIP_RE = re.compile(r"\.(zip$)", re.I) +SEVENZIP_RE = re.compile(r"\.7z$", re.I) +SEVENMULTI_RE = re.compile(r"\.7z\.\d+$", re.I) +TS_RE = re.compile(r"\.(\d+)\.(ts$)", re.I) PAR2_COMMAND = None MULTIPAR_COMMAND = None @@ -82,6 +96,7 @@ RAR_VERSION = 0 def find_programs(curdir): """ Find external programs """ + def check(path, program): p = os.path.abspath(os.path.join(path, program)) if os.access(p, os.X_OK): @@ -90,34 +105,34 @@ def find_programs(curdir): return None if sabnzbd.DARWIN: - sabnzbd.newsunpack.PAR2_COMMAND = check(curdir, 'osx/par2/par2-sl64') - sabnzbd.newsunpack.RAR_COMMAND = check(curdir, 'osx/unrar/unrar') - sabnzbd.newsunpack.SEVEN_COMMAND = check(curdir, 'osx/7zip/7za') + sabnzbd.newsunpack.PAR2_COMMAND = check(curdir, "osx/par2/par2-sl64") + sabnzbd.newsunpack.RAR_COMMAND = check(curdir, "osx/unrar/unrar") + sabnzbd.newsunpack.SEVEN_COMMAND = check(curdir, "osx/7zip/7za") if sabnzbd.WIN32: if sabnzbd.WIN64: # 64 bit versions - sabnzbd.newsunpack.MULTIPAR_COMMAND = check(curdir, 'win/par2/multipar/par2j64.exe') - sabnzbd.newsunpack.RAR_COMMAND = check(curdir, 'win/unrar/x64/UnRAR.exe') + sabnzbd.newsunpack.MULTIPAR_COMMAND = check(curdir, "win/par2/multipar/par2j64.exe") + sabnzbd.newsunpack.RAR_COMMAND = check(curdir, "win/unrar/x64/UnRAR.exe") else: # 32 bit versions - sabnzbd.newsunpack.MULTIPAR_COMMAND = check(curdir, 'win/par2/multipar/par2j.exe') - sabnzbd.newsunpack.RAR_COMMAND = check(curdir, 'win/unrar/UnRAR.exe') - sabnzbd.newsunpack.PAR2_COMMAND = check(curdir, 'win/par2/par2.exe') - sabnzbd.newsunpack.SEVEN_COMMAND = check(curdir, 'win/7zip/7za.exe') + sabnzbd.newsunpack.MULTIPAR_COMMAND = check(curdir, "win/par2/multipar/par2j.exe") + sabnzbd.newsunpack.RAR_COMMAND = check(curdir, "win/unrar/UnRAR.exe") + sabnzbd.newsunpack.PAR2_COMMAND = check(curdir, "win/par2/par2.exe") + sabnzbd.newsunpack.SEVEN_COMMAND = check(curdir, "win/7zip/7za.exe") else: if not sabnzbd.newsunpack.PAR2_COMMAND: - sabnzbd.newsunpack.PAR2_COMMAND = find_on_path('par2') + sabnzbd.newsunpack.PAR2_COMMAND = find_on_path("par2") if not sabnzbd.newsunpack.RAR_COMMAND: - sabnzbd.newsunpack.RAR_COMMAND = find_on_path(('unrar', 'rar', 'unrar3', 'rar3',)) - sabnzbd.newsunpack.NICE_COMMAND = find_on_path('nice') - sabnzbd.newsunpack.IONICE_COMMAND = find_on_path('ionice') + sabnzbd.newsunpack.RAR_COMMAND = find_on_path(("unrar", "rar", "unrar3", "rar3",)) + sabnzbd.newsunpack.NICE_COMMAND = find_on_path("nice") + sabnzbd.newsunpack.IONICE_COMMAND = find_on_path("ionice") if not sabnzbd.newsunpack.ZIP_COMMAND: - sabnzbd.newsunpack.ZIP_COMMAND = find_on_path('unzip') + sabnzbd.newsunpack.ZIP_COMMAND = find_on_path("unzip") if not sabnzbd.newsunpack.SEVEN_COMMAND: - sabnzbd.newsunpack.SEVEN_COMMAND = find_on_path('7za') + sabnzbd.newsunpack.SEVEN_COMMAND = find_on_path("7za") if not sabnzbd.newsunpack.SEVEN_COMMAND: - sabnzbd.newsunpack.SEVEN_COMMAND = find_on_path('7z') + sabnzbd.newsunpack.SEVEN_COMMAND = find_on_path("7z") if not (sabnzbd.WIN32 or sabnzbd.DARWIN): # Run check on rar version @@ -129,38 +144,87 @@ def find_programs(curdir): sabnzbd.newsunpack.PAR2_MT = par2_mt_check(sabnzbd.newsunpack.PAR2_COMMAND) -ENV_NZO_FIELDS = ['bytes', 'bytes_downloaded', 'bytes_tried', 'cat', 'duplicate', 'encrypted', - 'fail_msg', 'filename', 'final_name', 'group', 'nzo_id', 'oversized', 'password', 'pp', - 'priority', 'repair', 'script', 'status', 'unpack', 'unwanted_ext', 'url'] +ENV_NZO_FIELDS = [ + "bytes", + "bytes_downloaded", + "bytes_tried", + "cat", + "duplicate", + "encrypted", + "fail_msg", + "filename", + "final_name", + "group", + "nzo_id", + "oversized", + "password", + "pp", + "priority", + "repair", + "script", + "status", + "unpack", + "unwanted_ext", + "url", +] def external_processing(extern_proc, nzo, complete_dir, nicename, status): """ Run a user postproc script, return console output and exit value """ - failure_url = nzo.nzo_info.get('failure', '') + failure_url = nzo.nzo_info.get("failure", "") # Items can be bool or null, causing POpen to fail - command = [str(extern_proc), str(complete_dir), str(nzo.filename), str(nicename), '', - str(nzo.cat), str(nzo.group), str(status), str(failure_url)] + command = [ + str(extern_proc), + str(complete_dir), + str(nzo.filename), + str(nicename), + "", + str(nzo.cat), + str(nzo.group), + str(status), + str(failure_url), + ] # Add path to original NZB - nzb_paths = globber_full(nzo.workpath, '*.gz') + nzb_paths = globber_full(nzo.workpath, "*.gz") # Fields not in the NZO directly - extra_env_fields = {'failure_url': failure_url, - 'complete_dir': complete_dir, - 'pp_status': status, - 'download_time': nzo.nzo_info.get('download_time', ''), - 'avg_bps': int(nzo.avg_bps_total / nzo.avg_bps_freq) if nzo.avg_bps_freq else 0, - 'age': calc_age(nzo.avg_date), - 'orig_nzb_gz': clip_path(nzb_paths[0]) if nzb_paths else ''} + extra_env_fields = { + "failure_url": failure_url, + "complete_dir": complete_dir, + "pp_status": status, + "download_time": nzo.nzo_info.get("download_time", ""), + "avg_bps": int(nzo.avg_bps_total / nzo.avg_bps_freq) if nzo.avg_bps_freq else 0, + "age": calc_age(nzo.avg_date), + "orig_nzb_gz": clip_path(nzb_paths[0]) if nzb_paths else "", + } try: stup, need_shell, command, creationflags = build_command(command) env = create_env(nzo, extra_env_fields) - logging.info('Running external script %s(%s, %s, %s, %s, %s, %s, %s, %s)', - extern_proc, complete_dir, nzo.filename, nicename, '', nzo.cat, nzo.group, status, failure_url) - p = Popen(command, shell=need_shell, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, - startupinfo=stup, env=env, creationflags=creationflags) + logging.info( + "Running external script %s(%s, %s, %s, %s, %s, %s, %s, %s)", + extern_proc, + complete_dir, + nzo.filename, + nicename, + "", + nzo.cat, + nzo.group, + status, + failure_url, + ) + p = Popen( + command, + shell=need_shell, + stdin=subprocess.PIPE, + stdout=subprocess.PIPE, + stderr=subprocess.STDOUT, + startupinfo=stup, + env=env, + creationflags=creationflags, + ) # Follow the output, so we can abort it proc = p.stdout @@ -176,20 +240,20 @@ def external_processing(extern_proc, nzo, complete_dir, nicename, status): lines.append(line) # Show current line in history - nzo.set_action_line(T('Running script'), line) + nzo.set_action_line(T("Running script"), line) # Check if we should still continue if not nzo.pp_active: p.kill() - lines.append(T('PostProcessing was aborted (%s)') % T('Script')) + lines.append(T("PostProcessing was aborted (%s)") % T("Script")) # Print at least what we got - output = '\n'.join(lines) + output = "\n".join(lines) return output, 1 except: logging.debug("Failed script %s, Traceback: ", extern_proc, exc_info=True) return "Cannot run script %s\r\n" % extern_proc, -1 - output = '\n'.join(lines) + output = "\n".join(lines) ret = p.wait() return output, ret @@ -201,9 +265,17 @@ def external_script(script, p1, p2, p3=None, p4=None): try: stup, need_shell, command, creationflags = build_command(command) env = create_env() - logging.info('Running user script %s(%s, %s)', script, p1, p2) - p = Popen(command, shell=need_shell, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, - startupinfo=stup, env=env, creationflags=creationflags) + logging.info("Running user script %s(%s, %s)", script, p1, p2) + p = Popen( + command, + shell=need_shell, + stdin=subprocess.PIPE, + stdout=subprocess.PIPE, + stderr=subprocess.STDOUT, + startupinfo=stup, + env=env, + creationflags=creationflags, + ) except: logging.debug("Failed script %s, Traceback: ", script, exc_info=True) return "Cannot run script %s\r\n" % script, -1 @@ -216,7 +288,7 @@ def external_script(script, p1, p2, p3=None, p4=None): def unpack_magic(nzo, workdir, workdir_complete, dele, one_folder, joinables, zips, rars, sevens, ts, depth=0): """ Do a recursive unpack from all archives in 'workdir' to 'workdir_complete' """ if depth > 5: - logging.warning(T('Unpack nesting too deep [%s]'), nzo.final_name) + logging.warning(T("Unpack nesting too deep [%s]"), nzo.final_name) return False, [] depth += 1 @@ -234,50 +306,50 @@ def unpack_magic(nzo, workdir, workdir_complete, dele, one_folder, joinables, zi if cfg.enable_filejoin(): new_joins = [jn for jn in xjoinables if jn not in joinables] if new_joins: - logging.info('Filejoin starting on %s', workdir) + logging.info("Filejoin starting on %s", workdir) error, newf = file_join(nzo, workdir, workdir_complete, dele, new_joins) if newf: newfiles.extend(newf) - logging.info('Filejoin finished on %s', workdir) + logging.info("Filejoin finished on %s", workdir) if cfg.enable_unrar(): new_rars = [rar for rar in xrars if rar not in rars] if new_rars: - logging.info('Unrar starting on %s', workdir) + logging.info("Unrar starting on %s", workdir) error, newf = rar_unpack(nzo, workdir, workdir_complete, dele, one_folder, new_rars) if newf: newfiles.extend(newf) - logging.info('Unrar finished on %s', workdir) + logging.info("Unrar finished on %s", workdir) if cfg.enable_7zip(): new_sevens = [seven for seven in xsevens if seven not in sevens] if new_sevens: - logging.info('7za starting on %s', workdir) + logging.info("7za starting on %s", workdir) error, newf = unseven(nzo, workdir, workdir_complete, dele, one_folder, new_sevens) if newf: newfiles.extend(newf) - logging.info('7za finished on %s', workdir) + logging.info("7za finished on %s", workdir) if cfg.enable_unzip(): new_zips = [zip for zip in xzips if zip not in zips] if new_zips: - logging.info('Unzip starting on %s', workdir) + logging.info("Unzip starting on %s", workdir) if SEVEN_COMMAND: error, newf = unseven(nzo, workdir, workdir_complete, dele, one_folder, new_zips) else: error, newf = unzip(nzo, workdir, workdir_complete, dele, one_folder, new_zips) if newf: newfiles.extend(newf) - logging.info('Unzip finished on %s', workdir) + logging.info("Unzip finished on %s", workdir) if cfg.enable_tsjoin(): new_ts = [_ts for _ts in xts if _ts not in ts] if new_ts: - logging.info('TS Joining starting on %s', workdir) + logging.info("TS Joining starting on %s", workdir) error, newf = file_join(nzo, workdir, workdir_complete, dele, new_ts) if newf: newfiles.extend(newf) - logging.info('TS Joining finished on %s', workdir) + logging.info("TS Joining finished on %s", workdir) # Refresh history and set output nzo.set_action_line() @@ -302,8 +374,9 @@ def unpack_magic(nzo, workdir, workdir_complete, dele, one_folder, joinables, zi xjoinables, xzips, xrars, xsevens, xts = ([], [], [], [], []) if rerun and (cfg.enable_recursive() or new_ts or new_joins or force_rerun): - z, y = unpack_magic(nzo, workdir, workdir_complete, dele, one_folder, - xjoinables, xzips, xrars, xsevens, xts, depth) + z, y = unpack_magic( + nzo, workdir, workdir_complete, dele, one_folder, xjoinables, xzips, xrars, xsevens, xts, depth + ) if z: error = z if y: @@ -319,14 +392,14 @@ def match_ts(file): """ Return True if file is a joinable TS file """ match = TS_RE.search(file) if not match: - return False, '', 0 + return False, "", 0 num = int(match.group(1)) try: - set = file[:match.start()] - set += '.ts' + set = file[: match.start()] + set += ".ts" except: - set = '' + set = "" return match, set, num @@ -349,7 +422,7 @@ def clean_up_joinables(names): def get_seq_number(name): """ Return sequence number if name as an int """ head, tail = os.path.splitext(name) - if tail == '.ts': + if tail == ".ts": match, set, num = match_ts(name) else: num = tail[1:] @@ -371,7 +444,7 @@ def file_join(nzo, workdir, workdir_complete, delete, joinables): joinable_set = None for joinable in joinables: head, tail = os.path.splitext(joinable) - if tail == '.ts': + if tail == ".ts": head = match_ts(joinable)[1] if head not in joinable_sets: joinable_sets[head] = [] @@ -404,7 +477,7 @@ def file_join(nzo, workdir, workdir_complete, delete, joinables): logging.debug("file_join(): Assembling %s", filename) # Join the segments - with open(filename, 'ab') as joined_file: + with open(filename, "ab") as joined_file: n = get_seq_number(current[0]) seq_error = n > 1 for joinable in current: @@ -412,8 +485,8 @@ def file_join(nzo, workdir, workdir_complete, delete, joinables): seq_error = True perc = (100.0 / size) * n logging.debug("Processing %s", joinable) - nzo.set_action_line(T('Joining'), '%.0f%%' % perc) - with open(joinable, 'rb') as f: + nzo.set_action_line(T("Joining"), "%.0f%%" % perc) + with open(joinable, "rb") as f: shutil.copyfileobj(f, joined_file, bufsize) if delete: remove_file(joinable) @@ -427,18 +500,20 @@ def file_join(nzo, workdir, workdir_complete, delete, joinables): setname = setname_from_path(joinable_set) if seq_error: - msg = T('Incomplete sequence of joinable files') - nzo.fail_msg = T('File join of %s failed') % setname - nzo.set_unpack_info('Filejoin', T('[%s] Error "%s" while joining files') % (setname, msg)) + msg = T("Incomplete sequence of joinable files") + nzo.fail_msg = T("File join of %s failed") % setname + nzo.set_unpack_info("Filejoin", T('[%s] Error "%s" while joining files') % (setname, msg)) logging.error(T('Error "%s" while running file_join on %s'), msg, nzo.final_name) return True, [] else: - msg = T('[%s] Joined %s files') % (joinable_set, size) - nzo.set_unpack_info('Filejoin', msg, setname) + msg = T("[%s] Joined %s files") % (joinable_set, size) + nzo.set_unpack_info("Filejoin", msg, setname) except: msg = sys.exc_info()[1] - nzo.fail_msg = T('File join of %s failed') % msg - nzo.set_unpack_info('Filejoin', T('[%s] Error "%s" while joining files') % (setname_from_path(joinable_set), msg)) + nzo.fail_msg = T("File join of %s failed") % msg + nzo.set_unpack_info( + "Filejoin", T('[%s] Error "%s" while joining files') % (setname_from_path(joinable_set), msg) + ) logging.error(T('Error "%s" while running file_join on %s'), msg, nzo.final_name) return True, [] @@ -464,7 +539,7 @@ def rar_unpack(nzo, workdir, workdir_complete, delete, one_folder, rars): rar_sets[rar_set] = [] rar_sets[rar_set].append(rar) - logging.debug('Rar_sets: %s', rar_sets) + logging.debug("Rar_sets: %s", rar_sets) for rar_set in rar_sets: # Run the RAR extractor @@ -482,7 +557,7 @@ def rar_unpack(nzo, workdir, workdir_complete, delete, one_folder, rars): wait_count = 0 last_stats = nzo.direct_unpacker.get_formatted_stats() while nzo.direct_unpacker.is_alive(): - logging.debug('DirectUnpacker still alive for %s: %s', nzo.final_name, last_stats) + logging.debug("DirectUnpacker still alive for %s: %s", nzo.final_name, last_stats) # Bump the file-lock in case it's stuck with nzo.direct_unpacker.next_file_lock: @@ -506,11 +581,11 @@ def rar_unpack(nzo, workdir, workdir_complete, delete, one_folder, rars): success = True rars, newfiles = nzo.direct_unpacker.success_sets.pop(rar_set) else: - logging.info("Extracting rarfile %s (belonging to %s) to %s", - rarpath, rar_set, extraction_path) + logging.info("Extracting rarfile %s (belonging to %s) to %s", rarpath, rar_set, extraction_path) try: - fail, newfiles, rars = rar_extract(rarpath, len(rar_sets[rar_set]), - one_folder, nzo, rar_set, extraction_path) + fail, newfiles, rars = rar_extract( + rarpath, len(rar_sets[rar_set]), one_folder, nzo, rar_set, extraction_path + ) # Was it aborted? if not nzo.pp_active: fail = True @@ -520,31 +595,31 @@ def rar_unpack(nzo, workdir, workdir_complete, delete, one_folder, rars): success = False fail = True msg = sys.exc_info()[1] - nzo.fail_msg = T('Unpacking failed, %s') % msg + nzo.fail_msg = T("Unpacking failed, %s") % msg setname = nzo.final_name - nzo.set_unpack_info('Unpack', T('[%s] Error "%s" while unpacking RAR files') % (setname, msg)) + nzo.set_unpack_info("Unpack", T('[%s] Error "%s" while unpacking RAR files') % (setname, msg)) logging.error(T('Error "%s" while running rar_unpack on %s'), msg, setname) logging.debug("Traceback: ", exc_info=True) if success: - logging.debug('rar_unpack(): Rars: %s', rars) - logging.debug('rar_unpack(): Newfiles: %s', newfiles) + logging.debug("rar_unpack(): Rars: %s", rars) + logging.debug("rar_unpack(): Newfiles: %s", newfiles) extracted_files.extend(newfiles) # Do not fail if this was a recursive unpack if fail and rarpath.startswith(workdir_complete): # Do not delete the files, leave it to user! - logging.info('Ignoring failure to do recursive unpack of %s', rarpath) + logging.info("Ignoring failure to do recursive unpack of %s", rarpath) fail = 0 success = True newfiles = [] # Do not fail if this was maybe just some duplicate fileset # Multipar and par2tbb will detect and log them, par2cmdline will not - if fail and rar_set.endswith(('.1', '.2')): + if fail and rar_set.endswith((".1", ".2")): # Just in case, we leave the raw files - logging.info('Ignoring failure of unpack for possible duplicate file %s', rarpath) + logging.info("Ignoring failure of unpack for possible duplicate file %s", rarpath) fail = 0 success = True newfiles = [] @@ -556,9 +631,9 @@ def rar_unpack(nzo, workdir, workdir_complete, delete, one_folder, rars): remove_file(rar) except OSError: if os.path.exists(rar): - logging.warning(T('Deleting %s failed!'), rar) + logging.warning(T("Deleting %s failed!"), rar) - brokenrar = '%s.1' % rar + brokenrar = "%s.1" % rar if os.path.exists(brokenrar): logging.info("Deleting %s", brokenrar) @@ -566,7 +641,7 @@ def rar_unpack(nzo, workdir, workdir_complete, delete, one_folder, rars): remove_file(brokenrar) except OSError: if os.path.exists(brokenrar): - logging.warning(T('Deleting %s failed!'), brokenrar) + logging.warning(T("Deleting %s failed!"), brokenrar) return fail, extracted_files @@ -586,13 +661,15 @@ def rar_extract(rarfile_path, numrars, one_folder, nzo, setname, extraction_path logging.debug('Trying unrar with password "%s"', password) msg = T('Trying unrar with password "%s"') % password nzo.fail_msg = msg - nzo.set_unpack_info('Unpack', msg, setname) - fail, new_files, rars = rar_extract_core(rarfile_path, numrars, one_folder, nzo, setname, extraction_path, password) + nzo.set_unpack_info("Unpack", msg, setname) + fail, new_files, rars = rar_extract_core( + rarfile_path, numrars, one_folder, nzo, setname, extraction_path, password + ) if fail != 2: break if fail == 2: - logging.error('%s (%s)', T('Unpacking failed, archive requires a password'), os.path.split(rarfile_path)[1]) + logging.error("%s (%s)", T("Unpacking failed, archive requires a password"), os.path.split(rarfile_path)[1]) return fail, new_files, rars @@ -605,54 +682,85 @@ def rar_extract_core(rarfile_path, numrars, one_folder, nzo, setname, extraction logging.debug("rar_extract(): Extractionpath: %s", extraction_path) if password: - password_command = '-p%s' % password + password_command = "-p%s" % password else: - password_command = '-p-' + password_command = "-p-" ############################################################################ if one_folder or cfg.flat_unpack(): - action = 'e' + action = "e" else: - action = 'x' + action = "x" if cfg.overwrite_files(): - overwrite = '-o+' # Enable overwrite - rename = '-o+' # Dummy + overwrite = "-o+" # Enable overwrite + rename = "-o+" # Dummy else: - overwrite = '-o-' # Disable overwrite - rename = '-or' # Auto renaming + overwrite = "-o-" # Disable overwrite + rename = "-or" # Auto renaming if sabnzbd.WIN32: # For Unrar to support long-path, we need to cricumvent Python's list2cmdline # See: https://github.com/sabnzbd/sabnzbd/issues/1043 - command = ['%s' % RAR_COMMAND, action, '-idp', overwrite, rename, '-ai', password_command, - '%s' % clip_path(rarfile_path), '%s\\' % long_path(extraction_path)] + command = [ + "%s" % RAR_COMMAND, + action, + "-idp", + overwrite, + rename, + "-ai", + password_command, + "%s" % clip_path(rarfile_path), + "%s\\" % long_path(extraction_path), + ] elif RAR_PROBLEM: # Use only oldest options (specifically no "-or") - command = ['%s' % RAR_COMMAND, action, '-idp', overwrite, password_command, - '%s' % rarfile_path, '%s/' % extraction_path] + command = [ + "%s" % RAR_COMMAND, + action, + "-idp", + overwrite, + password_command, + "%s" % rarfile_path, + "%s/" % extraction_path, + ] else: # Don't use "-ai" (not needed for non-Windows) - command = ['%s' % RAR_COMMAND, action, '-idp', overwrite, rename, password_command, - '%s' % rarfile_path, '%s/' % extraction_path] + command = [ + "%s" % RAR_COMMAND, + action, + "-idp", + overwrite, + rename, + password_command, + "%s" % rarfile_path, + "%s/" % extraction_path, + ] if cfg.ignore_unrar_dates(): - command.insert(3, '-tsm-') + command.insert(3, "-tsm-") stup, need_shell, command, creationflags = build_command(command, flatten_command=True) # Get list of all the volumes part of this set logging.debug("Analyzing rar file ... %s found", rarfile.is_rarfile(rarfile_path)) logging.debug("Running unrar %s", command) - p = Popen(command, shell=need_shell, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, - startupinfo=stup, creationflags=creationflags) + p = Popen( + command, + shell=need_shell, + stdin=subprocess.PIPE, + stdout=subprocess.PIPE, + stderr=subprocess.STDOUT, + startupinfo=stup, + creationflags=creationflags, + ) proc = p.stdout if p.stdin: p.stdin.close() - nzo.set_action_line(T('Unpacking'), '00/%02d' % numrars) + nzo.set_action_line(T("Unpacking"), "00/%02d" % numrars) # Loop over the output from rar! curr = 0 @@ -670,107 +778,110 @@ def rar_extract_core(rarfile_path, numrars, one_folder, nzo, setname, extraction # Check if we should still continue if not nzo.pp_active: p.kill() - msg = T('PostProcessing was aborted (%s)') % T('Unpack') + msg = T("PostProcessing was aborted (%s)") % T("Unpack") nzo.fail_msg = msg - nzo.set_unpack_info('Unpack', msg, setname) + nzo.set_unpack_info("Unpack", msg, setname) nzo.status = Status.FAILED return fail, (), () line = line.strip() lines.append(line) - if line.startswith('Extracting from'): - filename = (re.search(EXTRACTFROM_RE, line).group(1)) + if line.startswith("Extracting from"): + filename = re.search(EXTRACTFROM_RE, line).group(1) if filename not in rarfiles: rarfiles.append(filename) curr += 1 - nzo.set_action_line(T('Unpacking'), '%02d/%02d' % (curr, numrars)) + nzo.set_action_line(T("Unpacking"), "%02d/%02d" % (curr, numrars)) - elif line.find('recovery volumes found') > -1: + elif line.find("recovery volumes found") > -1: inrecovery = True # and thus start ignoring "Cannot find volume" for a while logging.debug("unrar recovery start: %s" % line) - elif line.startswith('Reconstruct'): + elif line.startswith("Reconstruct"): # end of reconstruction: 'Reconstructing... 100%' or 'Reconstructing... ' (both success), or 'Reconstruction impossible' inrecovery = False logging.debug("unrar recovery result: %s" % line) - elif line.startswith('Cannot find volume') and not inrecovery: + elif line.startswith("Cannot find volume") and not inrecovery: filename = os.path.basename(line[19:]) - msg = T('Unpacking failed, unable to find %s') % filename + msg = T("Unpacking failed, unable to find %s") % filename nzo.fail_msg = msg - nzo.set_unpack_info('Unpack', msg, setname) + nzo.set_unpack_info("Unpack", msg, setname) logging.warning(T('ERROR: unable to find "%s"'), filename) fail = 1 - elif line.endswith('- CRC failed'): - msg = T('Unpacking failed, CRC error') + elif line.endswith("- CRC failed"): + msg = T("Unpacking failed, CRC error") nzo.fail_msg = msg - nzo.set_unpack_info('Unpack', msg, setname) + nzo.set_unpack_info("Unpack", msg, setname) logging.warning(T('ERROR: CRC failed in "%s"'), setname) fail = 2 # Older unrar versions report a wrong password as a CRC error - elif line.startswith('File too large'): - msg = T('Unpacking failed, file too large for filesystem (FAT?)') + elif line.startswith("File too large"): + msg = T("Unpacking failed, file too large for filesystem (FAT?)") nzo.fail_msg = msg - nzo.set_unpack_info('Unpack', msg, setname) + nzo.set_unpack_info("Unpack", msg, setname) # ERROR: File too large for file system (bigfile-5000MB) - logging.error(T('ERROR: File too large for filesystem (%s)'), setname) + logging.error(T("ERROR: File too large for filesystem (%s)"), setname) fail = 1 - elif line.startswith('Write error'): - msg = T('Unpacking failed, write error or disk is full?') + elif line.startswith("Write error"): + msg = T("Unpacking failed, write error or disk is full?") nzo.fail_msg = msg - nzo.set_unpack_info('Unpack', msg, setname) - logging.error(T('ERROR: write error (%s)'), line[11:]) + nzo.set_unpack_info("Unpack", msg, setname) + logging.error(T("ERROR: write error (%s)"), line[11:]) fail = 1 - elif line.startswith('Cannot create'): + elif line.startswith("Cannot create"): line2 = platform_btou(proc.readline()) - if 'must not exceed 260' in line2: - msg = '%s: %s' % (T('Unpacking failed, path is too long'), line[13:]) + if "must not exceed 260" in line2: + msg = "%s: %s" % (T("Unpacking failed, path is too long"), line[13:]) nzo.fail_msg = msg - logging.error(T('ERROR: path too long (%s)'), line[13:]) + logging.error(T("ERROR: path too long (%s)"), line[13:]) else: - msg = '%s: %s' % (T('Unpacking failed, write error or disk is full?'), line[13:]) + msg = "%s: %s" % (T("Unpacking failed, write error or disk is full?"), line[13:]) nzo.fail_msg = msg - logging.error(T('ERROR: write error (%s)'), line[13:]) - nzo.set_unpack_info('Unpack', msg, setname) + logging.error(T("ERROR: write error (%s)"), line[13:]) + nzo.set_unpack_info("Unpack", msg, setname) fail = 1 # Kill the process (can stay in endless loop on Windows Server) p.kill() - elif line.startswith('ERROR: '): - msg = T('ERROR: %s' % line[7:]) + elif line.startswith("ERROR: "): + msg = T("ERROR: %s" % line[7:]) nzo.fail_msg = msg logging.warning(msg) - nzo.set_unpack_info('Unpack', msg, setname) + nzo.set_unpack_info("Unpack", msg, setname) fail = 1 - elif 'The specified password is incorrect' in line or 'Incorrect password' in line or \ - ('ncrypted file' in line and (('CRC failed' in line) or ('Checksum error' in line))): + elif ( + "The specified password is incorrect" in line + or "Incorrect password" in line + or ("ncrypted file" in line and (("CRC failed" in line) or ("Checksum error" in line))) + ): # unrar 3.x: "Encrypted file: CRC failed in oLKQfrcNVivzdzSG22a2xo7t001.part1.rar (password incorrect ?)" # unrar 4.x: "CRC failed in the encrypted file oLKQfrcNVivzdzSG22a2xo7t001.part1.rar. Corrupt file or wrong password." # unrar 5.x: "Checksum error in the encrypted file oLKQfrcNVivzdzSG22a2xo7t001.part1.rar. Corrupt file or wrong password." # unrar 5.01: "The specified password is incorrect." # unrar 5.80: "Incorrect password for oLKQfrcNVivzdzSG22a2xo7t001.part1.rar" - msg = T('Unpacking failed, archive requires a password') + msg = T("Unpacking failed, archive requires a password") nzo.fail_msg = msg - nzo.set_unpack_info('Unpack', msg, setname) + nzo.set_unpack_info("Unpack", msg, setname) fail = 2 - elif 'is not RAR archive' in line: + elif "is not RAR archive" in line: # Unrecognizable RAR file - msg = T('Unusable RAR file') + msg = T("Unusable RAR file") nzo.fail_msg = msg - nzo.set_unpack_info('Unpack', msg, setname) + nzo.set_unpack_info("Unpack", msg, setname) fail = 3 - elif 'checksum error' in line or 'Unexpected end of archive' in line: + elif "checksum error" in line or "Unexpected end of archive" in line: # Corrupt archive or passworded, we can't know # packed data checksum error in volume FILE - msg = T('Corrupt RAR file') + msg = T("Corrupt RAR file") nzo.fail_msg = msg - nzo.set_unpack_info('Unpack', msg, setname) + nzo.set_unpack_info("Unpack", msg, setname) fail = 3 else: @@ -786,7 +897,7 @@ def rar_extract_core(rarfile_path, numrars, one_folder, nzo, setname, extraction if proc: proc.close() p.wait() - logging.debug('UNRAR output %s', '\n'.join(lines)) + logging.debug("UNRAR output %s", "\n".join(lines)) return fail, (), () if proc: @@ -796,11 +907,11 @@ def rar_extract_core(rarfile_path, numrars, one_folder, nzo, setname, extraction # Which files did we use to extract this? rarfiles = rar_volumelist(rarfile_path, password, rarfiles) - logging.debug('UNRAR output %s', '\n'.join(lines)) - nzo.fail_msg = '' - msg = T('Unpacked %s files/folders in %s') % (str(len(extracted)), format_time_string(time.time() - start)) - nzo.set_unpack_info('Unpack', msg, setname) - logging.info('%s', msg) + logging.debug("UNRAR output %s", "\n".join(lines)) + nzo.fail_msg = "" + msg = T("Unpacked %s files/folders in %s") % (str(len(extracted)), format_time_string(time.time() - start)) + nzo.set_unpack_info("Unpack", msg, setname) + logging.info("%s", msg) return 0, extracted, rarfiles @@ -823,7 +934,7 @@ def unzip(nzo, workdir, workdir_complete, delete, one_folder, zips): for _zip in zips: logging.info("Starting extract on zipfile: %s ", _zip) - nzo.set_action_line(T('Unpacking'), '%s' % setname_from_path(_zip)) + nzo.set_action_line(T("Unpacking"), "%s" % setname_from_path(_zip)) if workdir_complete and _zip.startswith(workdir): extraction_path = workdir_complete @@ -835,8 +946,8 @@ def unzip(nzo, workdir, workdir_complete, delete, one_folder, zips): else: i += 1 - msg = T('%s files in %s') % (str(i), format_time_string(time.time() - tms)) - nzo.set_unpack_info('Unpack', msg) + msg = T("%s files in %s") % (str(i), format_time_string(time.time() - tms)) + nzo.set_unpack_info("Unpack", msg) # What's new? new_files = list(set(orig_dir_content + recursive_listdir(workdir_complete))) @@ -850,41 +961,46 @@ def unzip(nzo, workdir, workdir_complete, delete, one_folder, zips): remove_file(_zip) i += 1 except OSError: - logging.warning(T('Deleting %s failed!'), _zip) + logging.warning(T("Deleting %s failed!"), _zip) - brokenzip = '%s.1' % _zip + brokenzip = "%s.1" % _zip if os.path.exists(brokenzip): try: remove_file(brokenzip) i += 1 except OSError: - logging.warning(T('Deleting %s failed!'), brokenzip) + logging.warning(T("Deleting %s failed!"), brokenzip) return unzip_failed, new_files except: msg = sys.exc_info()[1] - nzo.fail_msg = T('Unpacking failed, %s') % msg + nzo.fail_msg = T("Unpacking failed, %s") % msg logging.error(T('Error "%s" while running unzip() on %s'), msg, nzo.final_name) return True, [] def ZIP_Extract(zipfile, extraction_path, one_folder): """ Unzip single zip set 'zipfile' to 'extraction_path' """ - command = ['%s' % ZIP_COMMAND, '-o', '-Pnone', '%s' % clip_path(zipfile), - '-d%s' % extraction_path] + command = ["%s" % ZIP_COMMAND, "-o", "-Pnone", "%s" % clip_path(zipfile), "-d%s" % extraction_path] if one_folder or cfg.flat_unpack(): - command.insert(3, '-j') # Unpack without folders + command.insert(3, "-j") # Unpack without folders stup, need_shell, command, creationflags = build_command(command) - logging.debug('Starting unzip: %s', command) - p = Popen(command, shell=need_shell, stdin=subprocess.PIPE, - stdout=subprocess.PIPE, stderr=subprocess.STDOUT, - startupinfo=stup, creationflags=creationflags) + logging.debug("Starting unzip: %s", command) + p = Popen( + command, + shell=need_shell, + stdin=subprocess.PIPE, + stdout=subprocess.PIPE, + stderr=subprocess.STDOUT, + startupinfo=stup, + creationflags=creationflags, + ) output = platform_btou(p.stdout.read()) - logging.debug('unzip output: \n%s', output) + logging.debug("unzip output: \n%s", output) ret = p.wait() @@ -907,7 +1023,7 @@ def unseven(nzo, workdir, workdir_complete, delete, one_folder, sevens): sets = {} for seven in sevens: name, ext = os.path.splitext(seven) - ext = ext.strip('.') + ext = ext.strip(".") if not ext.isdigit(): name = seven ext = None @@ -920,7 +1036,7 @@ def unseven(nzo, workdir, workdir_complete, delete, one_folder, sevens): for seven in sets: extensions = sets[seven] logging.info("Starting extract on 7zip set/file: %s ", seven) - nzo.set_action_line(T('Unpacking'), '%s' % setname_from_path(seven)) + nzo.set_action_line(T("Unpacking"), "%s" % setname_from_path(seven)) if workdir_complete and seven.startswith(workdir): extraction_path = workdir_complete @@ -930,14 +1046,14 @@ def unseven(nzo, workdir, workdir_complete, delete, one_folder, sevens): res, new_files_set, msg = seven_extract(nzo, seven, extensions, extraction_path, one_folder, delete) if res: unseven_failed = True - nzo.set_unpack_info('Unpack', msg, setname_from_path(seven)) + nzo.set_unpack_info("Unpack", msg, setname_from_path(seven)) else: i += 1 new_files.extend(new_files_set) if not unseven_failed: - msg = T('%s files in %s') % (str(i), format_time_string(time.time() - tms)) - nzo.set_unpack_info('Unpack', msg) + msg = T("%s files in %s") % (str(i), format_time_string(time.time() - tms)) + nzo.set_unpack_info("Unpack", msg) return unseven_failed, new_files @@ -960,14 +1076,14 @@ def seven_extract(nzo, sevenset, extensions, extraction_path, one_folder, delete msg = T('Trying 7zip with password "%s"') % password logging.debug(msg) nzo.fail_msg = msg - nzo.set_unpack_info('Unpack', msg, setname_from_path(sevenset)) + nzo.set_unpack_info("Unpack", msg, setname_from_path(sevenset)) fail, new_files, msg = seven_extract_core(sevenset, extensions, extraction_path, one_folder, delete, password) if fail != 2: break - nzo.fail_msg = '' + nzo.fail_msg = "" if fail == 2: - msg = '%s (%s)' % (T('Unpacking failed, archive requires a password'), os.path.basename(sevenset)) + msg = "%s (%s)" % (T("Unpacking failed, archive requires a password"), os.path.basename(sevenset)) if fail > 0: nzo.fail_msg = msg nzo.status = Status.FAILED @@ -980,28 +1096,28 @@ def seven_extract_core(sevenset, extensions, extraction_path, one_folder, delete Return fail==0(ok)/fail==1(error)/fail==2(wrong password), new_files, message """ if one_folder: - method = 'e' # Unpack without folders + method = "e" # Unpack without folders else: - method = 'x' # Unpack with folders + method = "x" # Unpack with folders if sabnzbd.WIN32 or sabnzbd.DARWIN: - case = '-ssc-' # Case insensitive + case = "-ssc-" # Case insensitive else: - case = '-ssc' # Case sensitive + case = "-ssc" # Case sensitive if cfg.overwrite_files(): - overwrite = '-aoa' + overwrite = "-aoa" else: - overwrite = '-aou' + overwrite = "-aou" if password: - password = '-p%s' % password + password = "-p%s" % password else: - password = '-p' + password = "-p" if len(extensions) > 0: - name = '%s.001' % sevenset - parm = '-tsplit' + name = "%s.001" % sevenset + parm = "-tsplit" else: name = sevenset - parm = '-tzip' if sevenset.lower().endswith('.zip') else '-t7z' + parm = "-tzip" if sevenset.lower().endswith(".zip") else "-t7z" if not os.path.exists(name): return 1, [], T('7ZIP set "%s" is incomplete, cannot unpack') % setname_from_path(sevenset) @@ -1009,28 +1125,33 @@ def seven_extract_core(sevenset, extensions, extraction_path, one_folder, delete # For file-bookkeeping orig_dir_content = recursive_listdir(extraction_path) - command = [SEVEN_COMMAND, method, '-y', overwrite, parm, case, password, - '-o%s' % extraction_path, name] + command = [SEVEN_COMMAND, method, "-y", overwrite, parm, case, password, "-o%s" % extraction_path, name] stup, need_shell, command, creationflags = build_command(command) - logging.debug('Starting 7za: %s', command) - p = Popen(command, shell=need_shell, stdin=subprocess.PIPE, - stdout=subprocess.PIPE, stderr=subprocess.STDOUT, - startupinfo=stup, creationflags=creationflags) + logging.debug("Starting 7za: %s", command) + p = Popen( + command, + shell=need_shell, + stdin=subprocess.PIPE, + stdout=subprocess.PIPE, + stderr=subprocess.STDOUT, + startupinfo=stup, + creationflags=creationflags, + ) output = platform_btou(p.stdout.read()) - logging.debug('7za output: %s', output) + logging.debug("7za output: %s", output) ret = p.wait() # Return-code for CRC and Password is the same - if ret == 2 and 'ERROR: CRC Failed' in output: + if ret == 2 and "ERROR: CRC Failed" in output: # We can output a more general error ret = 1 msg = T('ERROR: CRC failed in "%s"') % setname_from_path(sevenset) else: # Default message - msg = T('Could not unpack %s') % setname_from_path(sevenset) + msg = T("Could not unpack %s") % setname_from_path(sevenset) # What's new? new_files = list(set(orig_dir_content + recursive_listdir(extraction_path))) @@ -1038,16 +1159,16 @@ def seven_extract_core(sevenset, extensions, extraction_path, one_folder, delete if ret == 0 and delete: if extensions: for ext in extensions: - path = '%s.%s' % (sevenset, ext) + path = "%s.%s" % (sevenset, ext) try: remove_file(path) except: - logging.warning(T('Deleting %s failed!'), path) + logging.warning(T("Deleting %s failed!"), path) else: try: remove_file(sevenset) except: - logging.warning(T('Deleting %s failed!'), sevenset) + logging.warning(T("Deleting %s failed!"), sevenset) # Always return an error message, even when return code is 0 return ret, new_files, msg @@ -1082,11 +1203,11 @@ def par2_repair(parfile_nzf, nzo, workdir, setname, single): # Start QuickCheck nzo.status = Status.QUICK_CHECK - nzo.set_action_line(T('Repair'), T('Quick Checking')) + nzo.set_action_line(T("Repair"), T("Quick Checking")) qc_result = quick_check_set(setname, nzo) if qc_result: logging.info("Quick-check for %s is OK, skipping repair", setname) - nzo.set_unpack_info('Repair', T('[%s] Quick Check OK') % setname) + nzo.set_unpack_info("Repair", T("[%s] Quick Check OK") % setname) result = True if not result and cfg.enable_all_par(): @@ -1105,25 +1226,29 @@ def par2_repair(parfile_nzf, nzo, workdir, setname, single): result = False readd = False try: - nzo.set_action_line(T('Repair'), T('Starting Repair')) + nzo.set_action_line(T("Repair"), T("Starting Repair")) logging.info('Scanning "%s"', parfile) joinables, zips, rars, sevens, ts = build_filelists(workdir, check_rar=False) # Multipar or not? if sabnzbd.WIN32 and cfg.multipar(): - finished, readd, datafiles, used_joinables, used_for_repair = MultiPar_Verify(parfile, nzo, setname, joinables, single=single) + finished, readd, datafiles, used_joinables, used_for_repair = MultiPar_Verify( + parfile, nzo, setname, joinables, single=single + ) else: - finished, readd, datafiles, used_joinables, used_for_repair = PAR_Verify(parfile, nzo, setname, joinables, single=single) + finished, readd, datafiles, used_joinables, used_for_repair = PAR_Verify( + parfile, nzo, setname, joinables, single=single + ) if finished: result = True - logging.info('Par verify finished ok on %s!', parfile) + logging.info("Par verify finished ok on %s!", parfile) # Remove this set so we don't try to check it again nzo.remove_parset(parfile_nzf.setname) else: - logging.info('Par verify failed on %s!', parfile) + logging.info("Par verify failed on %s!", parfile) if not readd: # Failed to repair -> remove this set @@ -1131,8 +1256,8 @@ def par2_repair(parfile_nzf, nzo, workdir, setname, single): return readd, False except: msg = sys.exc_info()[1] - nzo.fail_msg = T('Repairing failed, %s') % msg - logging.error(T('Error %s while running par2_repair on set %s'), msg, setname) + nzo.fail_msg = T("Repairing failed, %s") % msg + logging.error(T("Error %s while running par2_repair on set %s"), msg, setname) logging.info("Traceback: ", exc_info=True) return readd, result @@ -1143,10 +1268,10 @@ def par2_repair(parfile_nzf, nzo, workdir, setname, single): # Remove extra files created during repair and par2 base files for path in new_dir_content: - if os.path.splitext(path)[1] == '.1' and path not in old_dir_content: + if os.path.splitext(path)[1] == ".1" and path not in old_dir_content: deletables.append(os.path.join(workdir, path)) - deletables.append(os.path.join(workdir, setname + '.par2')) - deletables.append(os.path.join(workdir, setname + '.PAR2')) + deletables.append(os.path.join(workdir, setname + ".par2")) + deletables.append(os.path.join(workdir, setname + ".PAR2")) deletables.append(parfile) # Add output of par2-repair to remove @@ -1163,10 +1288,10 @@ def par2_repair(parfile_nzf, nzo, workdir, setname, single): try: remove_file(filepath) except OSError: - logging.warning(T('Deleting %s failed!'), filepath) + logging.warning(T("Deleting %s failed!"), filepath) except: msg = sys.exc_info()[1] - nzo.fail_msg = T('Repairing failed, %s') % msg + nzo.fail_msg = T("Repairing failed, %s") % msg logging.error(T('Error "%s" while running par2_repair on set %s'), msg, setname, exc_info=True) return readd, result @@ -1175,7 +1300,7 @@ def par2_repair(parfile_nzf, nzo, workdir, setname, single): _RE_BLOCK_FOUND = re.compile(r'File: "([^"]+)" - found \d+ of \d+ data blocks from "([^"]+)"') _RE_IS_MATCH_FOR = re.compile(r'File: "([^"]+)" - is a match for "([^"]+)"') _RE_LOADING_PAR2 = re.compile(r'Loading "([^"]+)"\.') -_RE_LOADED_PAR2 = re.compile(r'Loaded (\d+) new packets') +_RE_LOADED_PAR2 = re.compile(r"Loaded (\d+) new packets") def PAR_Verify(parfile, nzo, setname, joinables, single=False): @@ -1187,16 +1312,16 @@ def PAR_Verify(parfile, nzo, setname, joinables, single=False): start = time.time() options = cfg.par_option().strip() - command = [str(PAR2_COMMAND), 'r', options, parfile] + command = [str(PAR2_COMMAND), "r", options, parfile] # Append the wildcard for this set parfolder = os.path.split(parfile)[0] - if single or len(globber(parfolder, setname + '*')) < 2: + if single or len(globber(parfolder, setname + "*")) < 2: # Support bizarre naming conventions - wildcard = '*' + wildcard = "*" else: # Normal case, everything is named after set - wildcard = setname + '*' + wildcard = setname + "*" if sabnzbd.WIN32 or sabnzbd.DARWIN: command.append(os.path.join(parfolder, wildcard)) @@ -1209,13 +1334,13 @@ def PAR_Verify(parfile, nzo, setname, joinables, single=False): # Or the one that complains about basepath # Only if we're not doing multicore if not sabnzbd.WIN32 and not sabnzbd.DARWIN: - par2text = run_simple([command[0], '-h']) - if 'No data skipping' in par2text: - logging.info('Detected par2cmdline version that skips blocks, adding -N parameter') - command.insert(2, '-N') - if 'Set the basepath' in par2text: - logging.info('Detected par2cmdline version that needs basepath, adding -B parameter') - command.insert(2, '-B') + par2text = run_simple([command[0], "-h"]) + if "No data skipping" in par2text: + logging.info("Detected par2cmdline version that skips blocks, adding -N parameter") + command.insert(2, "-N") + if "Set the basepath" in par2text: + logging.info("Detected par2cmdline version that needs basepath, adding -B parameter") + command.insert(2, "-B") command.insert(3, parfolder) stup, need_shell, command, creationflags = build_command(command) @@ -1223,13 +1348,19 @@ def PAR_Verify(parfile, nzo, setname, joinables, single=False): # par2multicore wants to see \\.\ paths on Windows # See: https://github.com/sabnzbd/sabnzbd/pull/771 if sabnzbd.WIN32: - command = [clip_path(x) if x.startswith('\\\\?\\') else x for x in command] + command = [clip_path(x) if x.startswith("\\\\?\\") else x for x in command] # Run the external command - logging.info('Starting par2: %s', command) - p = Popen(command, shell=need_shell, stdin=subprocess.PIPE, - stdout=subprocess.PIPE, stderr=subprocess.STDOUT, - startupinfo=stup, creationflags=creationflags) + logging.info("Starting par2: %s", command) + p = Popen( + command, + shell=need_shell, + stdin=subprocess.PIPE, + stdout=subprocess.PIPE, + stderr=subprocess.STDOUT, + startupinfo=stup, + creationflags=creationflags, + ) proc = p.stdout if p.stdin: @@ -1241,7 +1372,7 @@ def PAR_Verify(parfile, nzo, setname, joinables, single=False): renames = {} reconstructed = [] - linebuf = '' + linebuf = "" finished = 0 readd = False @@ -1258,58 +1389,56 @@ def PAR_Verify(parfile, nzo, setname, joinables, single=False): break # Line not complete yet - if char not in ('\n', '\r'): + if char not in ("\n", "\r"): linebuf += char continue line = linebuf.strip() - linebuf = '' + linebuf = "" # Check if we should still continue if not nzo.pp_active: p.kill() - msg = T('PostProcessing was aborted (%s)') % T('Repair') + msg = T("PostProcessing was aborted (%s)") % T("Repair") nzo.fail_msg = msg - nzo.set_unpack_info('Repair', msg, setname) + nzo.set_unpack_info("Repair", msg, setname) nzo.status = Status.FAILED readd = False break # Skip empty lines - if line == '': + if line == "": continue - if 'Repairing:' not in line: + if "Repairing:" not in line: lines.append(line) - if line.startswith(('Invalid option specified', 'Invalid thread option', 'Cannot specify recovery file count')): - msg = T('[%s] PAR2 received incorrect options, check your Config->Switches settings') % setname - nzo.set_unpack_info('Repair', msg) + if line.startswith(("Invalid option specified", "Invalid thread option", "Cannot specify recovery file count")): + msg = T("[%s] PAR2 received incorrect options, check your Config->Switches settings") % setname + nzo.set_unpack_info("Repair", msg) nzo.status = Status.FAILED logging.error(msg) - elif line.startswith('All files are correct'): - msg = T('[%s] Verified in %s, all files correct') % (setname, format_time_string(time.time() - start)) - nzo.set_unpack_info('Repair', msg) - logging.info('Verified in %s, all files correct', - format_time_string(time.time() - start)) + elif line.startswith("All files are correct"): + msg = T("[%s] Verified in %s, all files correct") % (setname, format_time_string(time.time() - start)) + nzo.set_unpack_info("Repair", msg) + logging.info("Verified in %s, all files correct", format_time_string(time.time() - start)) finished = 1 - elif line.startswith('Repair is required'): - msg = T('[%s] Verified in %s, repair is required') % (setname, format_time_string(time.time() - start)) - nzo.set_unpack_info('Repair', msg) - logging.info('Verified in %s, repair is required', - format_time_string(time.time() - start)) + elif line.startswith("Repair is required"): + msg = T("[%s] Verified in %s, repair is required") % (setname, format_time_string(time.time() - start)) + nzo.set_unpack_info("Repair", msg) + logging.info("Verified in %s, repair is required", format_time_string(time.time() - start)) start = time.time() verified = 1 # Reset to use them again for verification of repair verifytotal = 0 verifynum = 0 - elif line.startswith('Main packet not found') or 'The recovery file does not exist' in line: + elif line.startswith("Main packet not found") or "The recovery file does not exist" in line: # Initialparfile probably didn't decode properly or bad user parameters # We will try to get another par2 file, but 99% of time it's user parameters - msg = T('Invalid par2 files or invalid PAR2 parameters, cannot verify or repair') + msg = T("Invalid par2 files or invalid PAR2 parameters, cannot verify or repair") logging.info(msg) logging.info("Extra pars = %s", nzo.extrapars[setname]) @@ -1329,10 +1458,10 @@ def PAR_Verify(parfile, nzo, setname, joinables, single=False): readd = True else: nzo.fail_msg = msg - nzo.set_unpack_info('Repair', msg, setname) + nzo.set_unpack_info("Repair", msg, setname) nzo.status = Status.FAILED - elif line.startswith('You need'): + elif line.startswith("You need"): # We need more blocks, but are they available? chunks = line.split() needed_blocks = int(chunks[2]) @@ -1340,47 +1469,47 @@ def PAR_Verify(parfile, nzo, setname, joinables, single=False): # Check if we have enough blocks added_blocks = nzo.get_extra_blocks(setname, needed_blocks) if added_blocks: - msg = T('Fetching %s blocks...') % str(added_blocks) - nzo.set_action_line(T('Fetching'), msg) + msg = T("Fetching %s blocks...") % str(added_blocks) + nzo.set_action_line(T("Fetching"), msg) readd = True else: # Failed - msg = T('Repair failed, not enough repair blocks (%s short)') % str(needed_blocks) + msg = T("Repair failed, not enough repair blocks (%s short)") % str(needed_blocks) nzo.fail_msg = msg - nzo.set_unpack_info('Repair', msg, setname) + nzo.set_unpack_info("Repair", msg, setname) nzo.status = Status.FAILED - elif line.startswith('Repair is possible'): + elif line.startswith("Repair is possible"): start = time.time() - nzo.set_action_line(T('Repairing'), '%2d%%' % 0) + nzo.set_action_line(T("Repairing"), "%2d%%" % 0) - elif line.startswith('Repairing:'): + elif line.startswith("Repairing:"): chunks = line.split() per = float(chunks[-1][:-1]) - nzo.set_action_line(T('Repairing'), '%2d%%' % per) + nzo.set_action_line(T("Repairing"), "%2d%%" % per) nzo.status = Status.REPAIRING - elif line.startswith('Repair complete'): - msg = T('[%s] Repaired in %s') % (setname, format_time_string(time.time() - start)) - nzo.set_unpack_info('Repair', msg) - logging.info('Repaired in %s', format_time_string(time.time() - start)) + elif line.startswith("Repair complete"): + msg = T("[%s] Repaired in %s") % (setname, format_time_string(time.time() - start)) + nzo.set_unpack_info("Repair", msg) + logging.info("Repaired in %s", format_time_string(time.time() - start)) finished = 1 - elif verified and line.endswith(('are missing.', 'exist but are damaged.')): + elif verified and line.endswith(("are missing.", "exist but are damaged.")): # Files that will later be verified after repair chunks = line.split() verifytotal += int(chunks[0]) - elif line.startswith('Verifying repaired files'): + elif line.startswith("Verifying repaired files"): in_verify_repaired = True - nzo.set_action_line(T('Verifying repair'), '%02d/%02d' % (verifynum, verifytotal)) + nzo.set_action_line(T("Verifying repair"), "%02d/%02d" % (verifynum, verifytotal)) - elif in_verify_repaired and line.startswith('Target'): + elif in_verify_repaired and line.startswith("Target"): verifynum += 1 if verifynum <= verifytotal: - nzo.set_action_line(T('Verifying repair'), '%02d/%02d' % (verifynum, verifytotal)) + nzo.set_action_line(T("Verifying repair"), "%02d/%02d" % (verifynum, verifytotal)) - elif line.startswith('File:') and line.find('data blocks from') > 0: + elif line.startswith("File:") and line.find("data blocks from") > 0: m = _RE_BLOCK_FOUND.search(line) if m: workdir = os.path.split(parfile)[0] @@ -1394,13 +1523,13 @@ def PAR_Verify(parfile, nzo, setname, joinables, single=False): break # Special case of joined RAR files, the "of" and "from" must both be RAR files # This prevents the joined rars files from being seen as an extra rar-set - if '.rar' in old_name.lower() and '.rar' in new_name.lower(): + if ".rar" in old_name.lower() and ".rar" in new_name.lower(): used_joinables.append(os.path.join(workdir, old_name)) else: logging.debug('PAR2 will reconstruct "%s" from "%s"', new_name, old_name) reconstructed.append(os.path.join(workdir, old_name)) - elif 'Could not write' in line and 'at offset 0:' in line: + elif "Could not write" in line and "at offset 0:" in line: # If there are joinables, this error will only happen in case of 100% complete files # We can just skip the retry, because par2cmdline will fail in those cases # becauses it refuses to scan the ".001" file @@ -1408,21 +1537,21 @@ def PAR_Verify(parfile, nzo, setname, joinables, single=False): finished = 1 used_joinables = [] - elif ' cannot be renamed to ' in line: + elif " cannot be renamed to " in line: msg = line.strip() nzo.fail_msg = msg - nzo.set_unpack_info('Repair', msg, setname) + nzo.set_unpack_info("Repair", msg, setname) nzo.status = Status.FAILED - elif 'There is not enough space on the disk' in line: + elif "There is not enough space on the disk" in line: # Oops, disk is full! - msg = T('Repairing failed, %s') % T('Disk full') + msg = T("Repairing failed, %s") % T("Disk full") nzo.fail_msg = msg - nzo.set_unpack_info('Repair', msg, setname) + nzo.set_unpack_info("Repair", msg, setname) nzo.status = Status.FAILED # File: "oldname.rar" - is a match for "newname.rar". - elif 'is a match for' in line: + elif "is a match for" in line: m = _RE_IS_MATCH_FOR.search(line) if m: old_name = m.group(1) @@ -1433,32 +1562,32 @@ def PAR_Verify(parfile, nzo, setname, joinables, single=False): # Show progress if verifytotal == 0 or verifynum < verifytotal: verifynum += 1 - nzo.set_action_line(T('Verifying'), '%02d/%02d' % (verifynum, verifytotal)) + nzo.set_action_line(T("Verifying"), "%02d/%02d" % (verifynum, verifytotal)) - elif 'Scanning extra files' in line: + elif "Scanning extra files" in line: # Obfuscated post most likely, so reset counter to show progress verifynum = 1 - elif 'No details available for recoverable file' in line: + elif "No details available for recoverable file" in line: msg = line.strip() nzo.fail_msg = msg - nzo.set_unpack_info('Repair', msg, setname) + nzo.set_unpack_info("Repair", msg, setname) nzo.status = Status.FAILED - elif line.startswith('Repair Failed.'): + elif line.startswith("Repair Failed."): # Unknown repair problem - msg = T('Repairing failed, %s') % line + msg = T("Repairing failed, %s") % line nzo.fail_msg = msg - nzo.set_unpack_info('Repair', msg, setname) + nzo.set_unpack_info("Repair", msg, setname) nzo.status = Status.FAILED finished = 0 elif not verified: - if line.startswith('Verifying source files'): - nzo.set_action_line(T('Verifying'), '01/%02d' % verifytotal) + if line.startswith("Verifying source files"): + nzo.set_action_line(T("Verifying"), "01/%02d" % verifytotal) nzo.status = Status.VERIFYING - elif line.startswith('Scanning:'): + elif line.startswith("Scanning:"): pass # Target files @@ -1467,19 +1596,19 @@ def PAR_Verify(parfile, nzo, setname, joinables, single=False): nzo.status = Status.VERIFYING verifynum += 1 if verifytotal == 0 or verifynum < verifytotal: - nzo.set_action_line(T('Verifying'), '%02d/%02d' % (verifynum, verifytotal)) + nzo.set_action_line(T("Verifying"), "%02d/%02d" % (verifynum, verifytotal)) else: - nzo.set_action_line(T('Checking extra files'), '%02d' % verifynum) + nzo.set_action_line(T("Checking extra files"), "%02d" % verifynum) # Remove redundant extra files that are just duplicates of original ones - if 'duplicate data blocks' in line: + if "duplicate data blocks" in line: used_for_repair.append(m.group(1)) else: datafiles.append(m.group(1)) continue # Verify done - m = re.match(r'There are (\d+) recoverable files', line) + m = re.match(r"There are (\d+) recoverable files", line) if m: verifytotal = int(m.group(1)) @@ -1489,7 +1618,7 @@ def PAR_Verify(parfile, nzo, setname, joinables, single=False): if nzo.fail_msg: logging.info(nzo.fail_msg) - logging.debug('PAR2 output was\n%s', '\n'.join(lines)) + logging.debug("PAR2 output was\n%s", "\n".join(lines)) # If successful, add renamed files to the collection if finished and renames: @@ -1502,8 +1631,10 @@ def PAR_Verify(parfile, nzo, setname, joinables, single=False): return finished, readd, datafiles, used_joinables, used_for_repair + _RE_FILENAME = re.compile(r'"([^"]+)"') + def MultiPar_Verify(parfile, nzo, setname, joinables, single=False): """ Run par2 on par-set """ parfolder = os.path.split(parfile)[0] @@ -1516,35 +1647,42 @@ def MultiPar_Verify(parfile, nzo, setname, joinables, single=False): # Caching of verification implemented by adding: # But not really required due to prospective-par2 - command = [str(MULTIPAR_COMMAND), 'r', '-vs2', '-vd%s' % parfolder, parfile] + command = [str(MULTIPAR_COMMAND), "r", "-vs2", "-vd%s" % parfolder, parfile] # Check if there are maybe par2cmdline/par2tbb commands supplied - if '-t' in cfg.par_option() or '-p' in cfg.par_option(): - logging.info('Removing old par2cmdline/par2tbb options for MultiPar') - cfg.par_option.set('') + if "-t" in cfg.par_option() or "-p" in cfg.par_option(): + logging.info("Removing old par2cmdline/par2tbb options for MultiPar") + cfg.par_option.set("") # Only add user-options if supplied options = cfg.par_option().strip() if options: # We wrongly instructed users to use /x parameter style instead of -x - options = options.replace('/', '-', 1) + options = options.replace("/", "-", 1) command.insert(2, options) # Append the wildcard for this set - if single or len(globber(parfolder, setname + '*')) < 2: + if single or len(globber(parfolder, setname + "*")) < 2: # Support bizarre naming conventions - wildcard = '*' + wildcard = "*" else: # Normal case, everything is named after set - wildcard = setname + '*' + wildcard = setname + "*" command.append(os.path.join(parfolder, wildcard)) stup, need_shell, command, creationflags = build_command(command) - logging.info('Starting MultiPar: %s', command) + logging.info("Starting MultiPar: %s", command) lines = [] - p = Popen(command, shell=need_shell, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, - startupinfo=stup, creationflags=creationflags) + p = Popen( + command, + shell=need_shell, + stdin=subprocess.PIPE, + stdout=subprocess.PIPE, + stderr=subprocess.STDOUT, + startupinfo=stup, + creationflags=creationflags, + ) proc = p.stdout @@ -1556,7 +1694,7 @@ def MultiPar_Verify(parfile, nzo, setname, joinables, single=False): renames = {} reconstructed = [] - linebuf = '' + linebuf = "" finished = 0 readd = False @@ -1577,42 +1715,42 @@ def MultiPar_Verify(parfile, nzo, setname, joinables, single=False): break # Line not complete yet - if char not in ('\n', '\r'): + if char not in ("\n", "\r"): linebuf += char continue line = linebuf.strip() - linebuf = '' + linebuf = "" # Check if we should still continue if not nzo.pp_active: p.kill() - msg = T('PostProcessing was aborted (%s)') % T('Repair') + msg = T("PostProcessing was aborted (%s)") % T("Repair") nzo.fail_msg = msg - nzo.set_unpack_info('Repair', msg, setname) + nzo.set_unpack_info("Repair", msg, setname) nzo.status = Status.FAILED readd = False break # Skip empty lines - if line == '': + if line == "": continue # Save it all lines.append(line) # ----------------- Startup - if line.startswith('invalid option'): + if line.startswith("invalid option"): # Option error - msg = T('[%s] PAR2 received incorrect options, check your Config->Switches settings') % setname - nzo.set_unpack_info('Repair', msg) + msg = T("[%s] PAR2 received incorrect options, check your Config->Switches settings") % setname + nzo.set_unpack_info("Repair", msg) nzo.status = Status.FAILED logging.error(msg) - elif line.startswith('valid file is not found'): + elif line.startswith("valid file is not found"): # Initialparfile probably didn't decode properly, or bad user parameters # We will try to get another par2 file, but 99% of time it's user parameters - msg = T('Invalid par2 files or invalid PAR2 parameters, cannot verify or repair') + msg = T("Invalid par2 files or invalid PAR2 parameters, cannot verify or repair") logging.info(msg) logging.info("Extra pars = %s", nzo.extrapars[setname]) @@ -1632,40 +1770,40 @@ def MultiPar_Verify(parfile, nzo, setname, joinables, single=False): readd = True else: nzo.fail_msg = msg - nzo.set_unpack_info('Repair', msg, setname) + nzo.set_unpack_info("Repair", msg, setname) nzo.status = Status.FAILED - elif line.startswith('There is not enough space on the disk'): - msg = T('Repairing failed, %s') % T('Disk full') + elif line.startswith("There is not enough space on the disk"): + msg = T("Repairing failed, %s") % T("Disk full") nzo.fail_msg = msg - nzo.set_unpack_info('Repair', msg, setname) + nzo.set_unpack_info("Repair", msg, setname) nzo.status = Status.FAILED # ----------------- Start check/verify stage - elif line.startswith('Recovery Set ID'): + elif line.startswith("Recovery Set ID"): # Remove files were MultiPar stores verification result when repaired succesfull recovery_id = line.split()[-1] - used_for_repair.append('2_%s.bin' % recovery_id) - used_for_repair.append('2_%s.ini' % recovery_id) + used_for_repair.append("2_%s.bin" % recovery_id) + used_for_repair.append("2_%s.ini" % recovery_id) - elif line.startswith('Input File total count'): + elif line.startswith("Input File total count"): # How many files will it try to find? verifytotal = int(line.split()[-1]) # ----------------- Misnamed-detection stage # Misnamed files - elif line.startswith('Searching misnamed file'): + elif line.startswith("Searching misnamed file"): # We are in the misnamed files block misnamed_files = True verifynum = 0 - elif misnamed_files and 'Found' in line: + elif misnamed_files and "Found" in line: # First it reports the current filename m = _RE_FILENAME.search(line) if m: verifynum += 1 - nzo.set_action_line(T('Checking'), '%02d/%02d' % (verifynum, verifytotal)) + nzo.set_action_line(T("Checking"), "%02d/%02d" % (verifynum, verifytotal)) old_name = m.group(1) - elif misnamed_files and 'Misnamed' in line: + elif misnamed_files and "Misnamed" in line: # Then it finds the actual m = _RE_FILENAME.search(line) if m and old_name: @@ -1678,40 +1816,40 @@ def MultiPar_Verify(parfile, nzo, setname, joinables, single=False): # ----------------- Checking stage # Checking input files - elif line.startswith('Complete file count'): + elif line.startswith("Complete file count"): in_check = False verifynum = 0 old_name = None - elif line.startswith('Verifying Input File'): + elif line.startswith("Verifying Input File"): in_check = True nzo.status = Status.VERIFYING elif in_check: m = _RE_FILENAME.search(line) if m: # Only increase counter if it was really the detection line - if line.startswith('= ') or '%' not in line: + if line.startswith("= ") or "%" not in line: verifynum += 1 - nzo.set_action_line(T('Checking'), '%02d/%02d' % (verifynum, verifytotal)) + nzo.set_action_line(T("Checking"), "%02d/%02d" % (verifynum, verifytotal)) old_name = m.group(1) # ----------------- Verify stage # Which files need extra verification? - elif line.startswith('Damaged file count'): + elif line.startswith("Damaged file count"): verifytotal = int(line.split()[-1]) - elif line.startswith('Missing file count'): + elif line.startswith("Missing file count"): verifytotal += int(line.split()[-1]) # Actual verification - elif line.startswith('Input File Slice found'): + elif line.startswith("Input File Slice found"): # End of verification AND end of misnamed file search in_verify = False misnamed_files = False old_name = None - elif line.startswith('Finding available slice'): + elif line.startswith("Finding available slice"): # The actual scanning of the files in_verify = True - nzo.set_action_line(T('Verifying'), T('Checking')) + nzo.set_action_line(T("Verifying"), T("Checking")) elif in_verify: m = _RE_FILENAME.search(line) if m: @@ -1719,20 +1857,20 @@ def MultiPar_Verify(parfile, nzo, setname, joinables, single=False): # 'datafiles' will not contain all data-files in par-set, only the # ones that got scanned, but it's ouput is never used! nzo.status = Status.VERIFYING - if line.split()[1] in ('Damaged', 'Found'): + if line.split()[1] in ("Damaged", "Found"): verifynum += 1 datafiles.append(m.group(1)) # Set old_name in case it was misnamed and found (not when we are joining) old_name = None - if line.split()[1] == 'Found' and not joinables: + if line.split()[1] == "Found" and not joinables: old_name = m.group(1) # Sometimes we don't know the total (filejoin) if verifytotal <= 1: - nzo.set_action_line(T('Verifying'), '%02d' % verifynum) + nzo.set_action_line(T("Verifying"), "%02d" % verifynum) else: - nzo.set_action_line(T('Verifying'), '%02d/%02d' % (verifynum, verifytotal)) + nzo.set_action_line(T("Verifying"), "%02d/%02d" % (verifynum, verifytotal)) elif old_name and old_name != m.group(1): # Hey we found another misnamed one! @@ -1752,7 +1890,7 @@ def MultiPar_Verify(parfile, nzo, setname, joinables, single=False): # For damaged files it reports the filename twice, so only then start verifynum += 1 if verifynum / 2 > verifytotal: - nzo.set_action_line(T('Checking extra files'), '%02d' % verifynum) + nzo.set_action_line(T("Checking extra files"), "%02d" % verifynum) if joinables: # Find out if a joinable file has been used for joining @@ -1762,7 +1900,7 @@ def MultiPar_Verify(parfile, nzo, setname, joinables, single=False): datafiles.append(m.group(1)) break - elif line.startswith('Need'): + elif line.startswith("Need"): # We need more blocks, but are they available? chunks = line.split() needed_blocks = int(chunks[1]) @@ -1770,14 +1908,14 @@ def MultiPar_Verify(parfile, nzo, setname, joinables, single=False): # Check if we have enough blocks added_blocks = nzo.get_extra_blocks(setname, needed_blocks) if added_blocks: - msg = T('Fetching %s blocks...') % str(added_blocks) - nzo.set_action_line(T('Fetching'), msg) + msg = T("Fetching %s blocks...") % str(added_blocks) + nzo.set_action_line(T("Fetching"), msg) readd = True else: # Failed - msg = T('Repair failed, not enough repair blocks (%s short)') % str(needed_blocks) + msg = T("Repair failed, not enough repair blocks (%s short)") % str(needed_blocks) nzo.fail_msg = msg - nzo.set_unpack_info('Repair', msg, setname) + nzo.set_unpack_info("Repair", msg, setname) nzo.status = Status.FAILED # MultiPar can say 'PAR File(s) Incomplete' also when it needs more blocks @@ -1785,41 +1923,39 @@ def MultiPar_Verify(parfile, nzo, setname, joinables, single=False): finished = 0 # Result of verification - elif line.startswith('All Files Complete') or line.endswith('PAR File(s) Incomplete'): + elif line.startswith("All Files Complete") or line.endswith("PAR File(s) Incomplete"): # Completed without damage! # 'PAR File(s) Incomplete' is reported for success # but when there are very similar filenames in the folder - msg = T('[%s] Verified in %s, all files correct') % (setname, format_time_string(time.time() - start)) - nzo.set_unpack_info('Repair', msg) - logging.info('Verified in %s, all files correct', - format_time_string(time.time() - start)) + msg = T("[%s] Verified in %s, all files correct") % (setname, format_time_string(time.time() - start)) + nzo.set_unpack_info("Repair", msg) + logging.info("Verified in %s, all files correct", format_time_string(time.time() - start)) finished = 1 - elif line.startswith(('Ready to repair', 'Ready to rejoin')): + elif line.startswith(("Ready to repair", "Ready to rejoin")): # Ready to repair! # Or we are re-joining a split file when there's no damage but takes time - msg = T('[%s] Verified in %s, repair is required') % (setname, format_time_string(time.time() - start)) - nzo.set_unpack_info('Repair', msg) - logging.info('Verified in %s, repair is required', - format_time_string(time.time() - start)) + msg = T("[%s] Verified in %s, repair is required") % (setname, format_time_string(time.time() - start)) + nzo.set_unpack_info("Repair", msg) + logging.info("Verified in %s, repair is required", format_time_string(time.time() - start)) start = time.time() # Set message for user in case of joining - if line.startswith('Ready to rejoin'): - nzo.set_action_line(T('Joining'), '%2d' % len(used_joinables)) + if line.startswith("Ready to rejoin"): + nzo.set_action_line(T("Joining"), "%2d" % len(used_joinables)) else: # If we are repairing a joinable set, it won't actually # do the joining. So we can't remove those files! used_joinables = [] # ----------------- Repair stage - elif 'Recovering slice' in line: + elif "Recovering slice" in line: # Before this it will calculate matrix, here is where it starts start = time.time() in_repair = True - nzo.set_action_line(T('Repairing'), '%2d%%' % 0) + nzo.set_action_line(T("Repairing"), "%2d%%" % 0) - elif in_repair and line.startswith('Verifying repair'): + elif in_repair and line.startswith("Verifying repair"): in_repair = False in_verify_repaired = True # How many will be checked? @@ -1830,36 +1966,36 @@ def MultiPar_Verify(parfile, nzo, setname, joinables, single=False): try: # Line with percentage of repair (nothing else) per = float(line[:-1]) - nzo.set_action_line(T('Repairing'), '%2d%%' % per) + nzo.set_action_line(T("Repairing"), "%2d%%" % per) nzo.status = Status.REPAIRING except: # Checksum error - if 'checksum' in line: + if "checksum" in line: # Failed due to checksum error of multipar - msg = T('Repairing failed, %s') % line + msg = T("Repairing failed, %s") % line nzo.fail_msg = msg - nzo.set_unpack_info('Repair', msg, setname) + nzo.set_unpack_info("Repair", msg, setname) nzo.status = Status.FAILED else: # Not sure, log error logging.info("Traceback: ", exc_info=True) - elif line.startswith('Repaired successfully'): - msg = T('[%s] Repaired in %s') % (setname, format_time_string(time.time() - start)) - nzo.set_unpack_info('Repair', msg) - logging.info('Repaired in %s', format_time_string(time.time() - start)) + elif line.startswith("Repaired successfully"): + msg = T("[%s] Repaired in %s") % (setname, format_time_string(time.time() - start)) + nzo.set_unpack_info("Repair", msg) + logging.info("Repaired in %s", format_time_string(time.time() - start)) finished = 1 - elif in_verify_repaired and line.startswith('Repaired :'): + elif in_verify_repaired and line.startswith("Repaired :"): # Track verification of repaired files (can sometimes take a while) verifynum += 1 - nzo.set_action_line(T('Verifying repair'), '%02d/%02d' % (verifynum, verifytotal)) + nzo.set_action_line(T("Verifying repair"), "%02d/%02d" % (verifynum, verifytotal)) - elif line.startswith('Failed to repair'): + elif line.startswith("Failed to repair"): # Unknown repair problem - msg = T('Repairing failed, %s') % line + msg = T("Repairing failed, %s") % line nzo.fail_msg = msg - nzo.set_unpack_info('Repair', msg, setname) + nzo.set_unpack_info("Repair", msg, setname) nzo.status = Status.FAILED finished = 0 @@ -1869,7 +2005,7 @@ def MultiPar_Verify(parfile, nzo, setname, joinables, single=False): if nzo.fail_msg: logging.info(nzo.fail_msg) - logging.debug('MultiPar output was\n%s', '\n'.join(lines)) + logging.debug("MultiPar output was\n%s", "\n".join(lines)) # Add renamed files to the collection # MultiPar always(!!) renames automatically whatever it can in the 'Searching misnamed file:'-section @@ -1889,6 +2025,7 @@ def MultiPar_Verify(parfile, nzo, setname, joinables, single=False): return finished, readd, datafiles, used_joinables, used_for_repair + def create_env(nzo=None, extra_env_fields={}): """ Modify the environment for pp-scripts with extra information OSX: Return copy of environment without PYTHONPATH and PYTHONHOME @@ -1904,40 +2041,44 @@ def create_env(nzo=None, extra_env_fields={}): field_value = getattr(nzo, field) # Special filters for Python types if field_value is None: - env['SAB_' + field.upper()] = '' + env["SAB_" + field.upper()] = "" elif isinstance(field_value, bool): - env['SAB_' + field.upper()] = str(field_value*1) + env["SAB_" + field.upper()] = str(field_value * 1) else: - env['SAB_' + field.upper()] = str(field_value) + env["SAB_" + field.upper()] = str(field_value) except: # Catch key errors pass # Always supply basic info - extra_env_fields.update({'program_dir': sabnzbd.DIR_PROG, - 'par2_command': sabnzbd.newsunpack.PAR2_COMMAND, - 'multipar_command': sabnzbd.newsunpack.MULTIPAR_COMMAND, - 'rar_command': sabnzbd.newsunpack.RAR_COMMAND, - 'zip_command': sabnzbd.newsunpack.ZIP_COMMAND, - '7zip_command': sabnzbd.newsunpack.SEVEN_COMMAND, - 'version': sabnzbd.__version__}) + extra_env_fields.update( + { + "program_dir": sabnzbd.DIR_PROG, + "par2_command": sabnzbd.newsunpack.PAR2_COMMAND, + "multipar_command": sabnzbd.newsunpack.MULTIPAR_COMMAND, + "rar_command": sabnzbd.newsunpack.RAR_COMMAND, + "zip_command": sabnzbd.newsunpack.ZIP_COMMAND, + "7zip_command": sabnzbd.newsunpack.SEVEN_COMMAND, + "version": sabnzbd.__version__, + } + ) # Add extra fields for field in extra_env_fields: try: if extra_env_fields[field] is not None: - env['SAB_' + field.upper()] = str(extra_env_fields[field]) + env["SAB_" + field.upper()] = str(extra_env_fields[field]) else: - env['SAB_' + field.upper()] = '' + env["SAB_" + field.upper()] = "" except: # Catch key errors pass if sabnzbd.DARWIN: - if 'PYTHONPATH' in env: - del env['PYTHONPATH'] - if 'PYTHONHOME' in env: - del env['PYTHONHOME'] + if "PYTHONPATH" in env: + del env["PYTHONPATH"] + if "PYTHONHOME" in env: + del env["PYTHONHOME"] elif not nzo: # No modification return None @@ -1952,8 +2093,8 @@ def userxbit(filename): # rwx rwx rwx # 876 543 210 # we want bit 6 from the right, counting from 0 - userxbit = 1<<6 # bit 6 - rwxbits = os.stat(filename)[0] # the first element of os.stat() is "mode" + userxbit = 1 << 6 # bit 6 + rwxbits = os.stat(filename)[0] # the first element of os.stat() is "mode" # do logical AND, check if it is not 0: xbitset = (rwxbits & userxbit) > 0 return xbitset @@ -1965,19 +2106,19 @@ def build_command(command, flatten_command=False): """ # command[0] should be set, and thus not None if not command[0]: - logging.error(T('[%s] The command in build_command is undefined.'), caller_name()) + logging.error(T("[%s] The command in build_command is undefined."), caller_name()) raise IOError if not sabnzbd.WIN32: - if command[0].endswith('.py'): - with open(command[0], 'r') as script_file: + if command[0].endswith(".py"): + with open(command[0], "r") as script_file: if not userxbit(command[0]): # Inform user that Python scripts need x-bit and then stop logging.error(T('Python script "%s" does not have execute (+x) permission set'), command[0]) raise IOError - elif script_file.read(2) != '#!': + elif script_file.read(2) != "#!": # No shebang (#!) defined, add default python - command.insert(0, 'python') + command.insert(0, "python") if IONICE_COMMAND and cfg.ionice().strip(): lst = cfg.ionice().split() @@ -1997,10 +2138,10 @@ def build_command(command, flatten_command=False): else: # For Windows we always need to add python interpreter - if command[0].endswith('.py'): - command.insert(0, 'python') + if command[0].endswith(".py"): + command.insert(0, "python") - need_shell = os.path.splitext(command[0])[1].lower() not in ('.exe', '.com') + need_shell = os.path.splitext(command[0])[1].lower() not in (".exe", ".com") stup = subprocess.STARTUPINFO() stup.dwFlags = win32process.STARTF_USESHOWWINDOW stup.wShowWindow = win32con.SW_HIDE @@ -2044,14 +2185,14 @@ def rar_volumelist(rarfile_path, password, known_volumes): # Sort the various RAR filename formats properly :\ def rar_sort(a, b): """ Define sort method for rar file names """ - aext = a.split('.')[-1] - bext = b.split('.')[-1] + aext = a.split(".")[-1] + bext = b.split(".")[-1] - if aext == 'rar' and bext == 'rar': + if aext == "rar" and bext == "rar": return cmp(a, b) - elif aext == 'rar': + elif aext == "rar": return -1 - elif bext == 'rar': + elif bext == "rar": return 1 else: return cmp(a, b) @@ -2119,27 +2260,27 @@ def quick_check_set(set, nzo): for file in md5pack: found = False - file_to_ignore = get_ext(file).replace('.', '') in ignore_ext + file_to_ignore = get_ext(file).replace(".", "") in ignore_ext for nzf in nzf_list: # Do a simple filename based check if file == nzf.filename: found = True if (nzf.md5sum is not None) and nzf.md5sum == md5pack[file]: - logging.debug('Quick-check of file %s OK', file) + logging.debug("Quick-check of file %s OK", file) result &= True elif file_to_ignore: # We don't care about these files - logging.debug('Quick-check ignoring file %s', file) + logging.debug("Quick-check ignoring file %s", file) result &= True else: - logging.info('Quick-check of file %s failed!', file) + logging.info("Quick-check of file %s failed!", file) result = False break # Now lets do obfuscation check if nzf.md5sum == md5pack[file]: try: - logging.debug('Quick-check will rename %s to %s', nzf.filename, file) + logging.debug("Quick-check will rename %s to %s", nzf.filename, file) renamer(os.path.join(nzo.downpath, nzf.filename), os.path.join(nzo.downpath, file)) renames[file] = nzf.filename nzf.filename = file @@ -2153,10 +2294,10 @@ def quick_check_set(set, nzo): if not found: if file_to_ignore: # We don't care about these files - logging.debug('Quick-check ignoring missing file %s', file) + logging.debug("Quick-check ignoring missing file %s", file) continue - logging.info('Cannot Quick-check missing file %s!', file) + logging.info("Cannot Quick-check missing file %s!", file) result = False # Save renames @@ -2172,7 +2313,7 @@ def unrar_check(rar): (version, original) """ version = 0 - original = '' + original = "" if rar: try: version = run_simple(rar) @@ -2190,9 +2331,9 @@ def unrar_check(rar): def par2_mt_check(par2_path): """ Detect if we have multicore par2 variants """ try: - par2_version = run_simple([par2_path, '-h']) + par2_version = run_simple([par2_path, "-h"]) # Look for a threads option - if '-t<' in par2_version: + if "-t<" in par2_version: return True except: pass @@ -2264,7 +2405,7 @@ def sfv_check(sfvs, nzo, workdir): verifynum = 0 for nzf in nzf_list: verifynum += 1 - nzo.set_action_line(T('Verifying'), '%02d/%02d' % (verifynum, verifytotal)) + nzo.set_action_line(T("Verifying"), "%02d/%02d" % (verifynum, verifytotal)) calculated_crc32[nzf.filename] = crc_calculate(os.path.join(workdir, nzf.filename)) sfv_parse_results = {} @@ -2279,27 +2420,27 @@ def sfv_check(sfvs, nzo, workdir): for file in sfv_parse_results: found = False - file_to_ignore = get_ext(file).replace('.', '') in ignore_ext + file_to_ignore = get_ext(file).replace(".", "") in ignore_ext for nzf in nzf_list: # Do a simple filename based check if file == nzf.filename: found = True if nzf.filename in calculated_crc32 and calculated_crc32[nzf.filename] == sfv_parse_results[file]: - logging.debug('SFV-check of file %s OK', file) + logging.debug("SFV-check of file %s OK", file) result &= True elif file_to_ignore: # We don't care about these files - logging.debug('SFV-check ignoring file %s', file) + logging.debug("SFV-check ignoring file %s", file) result &= True else: - logging.info('SFV-check of file %s failed!', file) + logging.info("SFV-check of file %s failed!", file) result = False break # Now lets do obfuscation check if nzf.filename in calculated_crc32 and calculated_crc32[nzf.filename] == sfv_parse_results[file]: try: - logging.debug('SFV-check will rename %s to %s', nzf.filename, file) + logging.debug("SFV-check will rename %s to %s", nzf.filename, file) renamer(os.path.join(nzo.downpath, nzf.filename), os.path.join(nzo.downpath, file)) renames[file] = nzf.filename nzf.filename = file @@ -2313,10 +2454,10 @@ def sfv_check(sfvs, nzo, workdir): if not found: if file_to_ignore: # We don't care about these files - logging.debug('SVF-check ignoring missing file %s', file) + logging.debug("SVF-check ignoring missing file %s", file) continue - logging.info('Cannot SFV-check missing file %s!', file) + logging.info("Cannot SFV-check missing file %s!", file) result = False # Save renames @@ -2351,7 +2492,7 @@ def crc_calculate(path): if not data: break crc = zlib.crc32(data, crc) - return b"%08x" % (crc & 0xffffffff) + return b"%08x" % (crc & 0xFFFFFFFF) def analyse_show(name): @@ -2361,67 +2502,83 @@ def analyse_show(name): if job.is_match(): job.get_values() info = job.show_info - show_name = info.get('show_name', '').replace('.', ' ').replace('_', ' ') - show_name = show_name.replace(' ', ' ') - return show_name, \ - info.get('season_num', ''), \ - info.get('episode_num', ''), \ - info.get('ep_name', '') + show_name = info.get("show_name", "").replace(".", " ").replace("_", " ") + show_name = show_name.replace(" ", " ") + return show_name, info.get("season_num", ""), info.get("episode_num", ""), info.get("ep_name", "") def pre_queue(nzo, pp, cat): """ Run pre-queue script (if any) and process results. pp and cat are supplied seperate since they can change. """ + def fix(p): # If added via API, some items can still be "None" (as a string) - if not p or str(p).lower() == 'none': - return '' + if not p or str(p).lower() == "none": + return "" return str(p) values = [1, nzo.final_name_with_password, pp, cat, nzo.script, nzo.priority, None] script_path = make_script_path(cfg.pre_script()) if script_path: # Basic command-line parameters - command = [script_path, nzo.final_name_with_password, pp, cat, nzo.script, nzo.priority, str(nzo.bytes), ' '.join(nzo.groups)] + command = [ + script_path, + nzo.final_name_with_password, + pp, + cat, + nzo.script, + nzo.priority, + str(nzo.bytes), + " ".join(nzo.groups), + ] command.extend(analyse_show(nzo.final_name_with_password)) command = [fix(arg) for arg in command] # Fields not in the NZO directly - extra_env_fields = {'groups': ' '.join(nzo.groups), - 'show_name': command[8], - 'show_season': command[9], - 'show_episode': command[10], - 'show_episode_name': command[11]} + extra_env_fields = { + "groups": " ".join(nzo.groups), + "show_name": command[8], + "show_season": command[9], + "show_episode": command[10], + "show_episode_name": command[11], + } try: stup, need_shell, command, creationflags = build_command(command) env = create_env(nzo, extra_env_fields) - logging.info('Running pre-queue script %s', command) - p = Popen(command, shell=need_shell, stdin=subprocess.PIPE, stdout=subprocess.PIPE, - stderr=subprocess.STDOUT, startupinfo=stup, env=env, - creationflags=creationflags) + logging.info("Running pre-queue script %s", command) + p = Popen( + command, + shell=need_shell, + stdin=subprocess.PIPE, + stdout=subprocess.PIPE, + stderr=subprocess.STDOUT, + startupinfo=stup, + env=env, + creationflags=creationflags, + ) except: logging.debug("Failed script %s, Traceback: ", script_path, exc_info=True) return values output = platform_btou(p.stdout.read()) ret = p.wait() - logging.info('Pre-queue script returns %s and output=\n%s', ret, output) + logging.info("Pre-queue script returns %s and output=\n%s", ret, output) if ret == 0: n = 0 - for line in output.split('\n'): - line = line.strip('\r\n \'"') + for line in output.split("\n"): + line = line.strip("\r\n '\"") if n < len(values) and line: values[n] = line n += 1 accept = int_conv(values[0]) if accept < 1: - logging.info('Pre-Q refuses %s', nzo.final_name) + logging.info("Pre-Q refuses %s", nzo.final_name) elif accept == 2: - logging.info('Pre-Q accepts&fails %s', nzo.final_name) + logging.info("Pre-Q accepts&fails %s", nzo.final_name) else: - logging.info('Pre-Q accepts %s', nzo.final_name) + logging.info("Pre-Q accepts %s", nzo.final_name) return values @@ -2434,12 +2591,12 @@ def list2cmdline(lst): nlst.append('""') else: nlst.append('"%s"' % arg) - return ' '.join(nlst) + return " ".join(nlst) def is_sevenfile(path): """ Return True if path has proper extension and 7Zip is installed """ - return SEVEN_COMMAND and os.path.splitext(path)[1].lower() == '.7z' + return SEVEN_COMMAND and os.path.splitext(path)[1].lower() == ".7z" class SevenZip: @@ -2452,19 +2609,26 @@ class SevenZip: """ Return list of names in 7Zip """ names = [] # Future extension: use '-sccUTF-8' to get names in UTF8 encoding - command = [SEVEN_COMMAND, 'l', '-p', '-y', '-slt', self.path] + command = [SEVEN_COMMAND, "l", "-p", "-y", "-slt", self.path] stup, need_shell, command, creationflags = build_command(command) - p = Popen(command, shell=need_shell, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, - startupinfo=stup, creationflags=creationflags) + p = Popen( + command, + shell=need_shell, + stdin=subprocess.PIPE, + stdout=subprocess.PIPE, + stderr=subprocess.STDOUT, + startupinfo=stup, + creationflags=creationflags, + ) output = platform_btou(p.stdout.read()) _ = p.wait() - re_path = re.compile('^Path = (.+)') - for line in output.split('\n'): + re_path = re.compile("^Path = (.+)") + for line in output.split("\n"): m = re_path.search(line) if m: - names.append(m.group(1).strip('\r')) + names.append(m.group(1).strip("\r")) if names: # Remove name of archive itself del names[0] @@ -2472,12 +2636,19 @@ class SevenZip: def read(self, name): """ Read named file from 7Zip and return data """ - command = [SEVEN_COMMAND, 'e', '-p', '-y', '-so', self.path, name] + command = [SEVEN_COMMAND, "e", "-p", "-y", "-so", self.path, name] stup, need_shell, command, creationflags = build_command(command) # Ignore diagnostic output, otherwise it will be appended to content - p = Popen(command, shell=need_shell, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.DEVNULL, - startupinfo=stup, creationflags=creationflags) + p = Popen( + command, + shell=need_shell, + stdin=subprocess.PIPE, + stdout=subprocess.PIPE, + stderr=subprocess.DEVNULL, + startupinfo=stup, + creationflags=creationflags, + ) output = platform_btou(p.stdout.read()) _ = p.wait() return output diff --git a/sabnzbd/newswrapper.py b/sabnzbd/newswrapper.py index 34b9e28..fcbe707 100644 --- a/sabnzbd/newswrapper.py +++ b/sabnzbd/newswrapper.py @@ -43,7 +43,7 @@ socket.setdefaulttimeout(DEF_TIMEOUT) def _retrieve_info(server): """ Async attempt to run getaddrinfo() for specified server """ - logging.debug('Retrieving server address information for %s', server.host) + logging.debug("Retrieving server address information for %s", server.host) info = GetServerParms(server.host, server.port) if not info: server.bad_cons += server.threads @@ -67,12 +67,12 @@ def GetServerParms(host, port): except: port = 119 opt = sabnzbd.cfg.ipv6_servers() - ''' ... with the following meaning for 'opt': + """ ... with the following meaning for 'opt': Control the use of IPv6 Usenet server addresses. Meaning: 0 = don't use 1 = use when available and reachable (DEFAULT) 2 = force usage (when SABnzbd's detection fails) - ''' + """ try: # Standard IPV4 or IPV6 ips = socket.getaddrinfo(host, port, 0, socket.SOCK_STREAM) @@ -82,13 +82,14 @@ def GetServerParms(host, port): return ips else: # IPv6 unreachable or not allowed by user, so only return IPv4 address(es): - return [ip for ip in ips if ':' not in ip[4][0]] + return [ip for ip in ips if ":" not in ip[4][0]] except: if opt == 2 or (opt == 1 and sabnzbd.EXTERNAL_IPV6) or (opt == 1 and sabnzbd.cfg.load_balancing() == 2): try: # Try IPV6 explicitly - return socket.getaddrinfo(host, port, socket.AF_INET6, - socket.SOCK_STREAM, socket.IPPROTO_IP, socket.AI_CANONNAME) + return socket.getaddrinfo( + host, port, socket.AF_INET6, socket.SOCK_STREAM, socket.IPPROTO_IP, socket.AI_CANONNAME + ) except: # Nothing found! pass @@ -101,8 +102,9 @@ def con(sock, host, port, sslenabled, write_fds, nntp): sock.setblocking(0) if sslenabled: # Log SSL/TLS info - logging.info("%s@%s: Connected using %s (%s)", - nntp.nw.thrdnum, nntp.nw.server.host, sock.version(), sock.cipher()[0]) + logging.info( + "%s@%s: Connected using %s (%s)", nntp.nw.thrdnum, nntp.nw.server.host, sock.version(), sock.cipher()[0] + ) nntp.nw.server.ssl_info = "%s (%s)" % (sock.version(), sock.cipher()[0]) # Now it's safe to add the socket to the list of active sockets. @@ -132,7 +134,7 @@ def con(sock, host, port, sslenabled, write_fds, nntp): class NNTP: # Pre-define attributes to save memory - __slots__ = ('host', 'port', 'nw', 'blocking', 'error_msg', 'sock') + __slots__ = ("host", "port", "nw", "blocking", "error_msg", "sock") def __init__(self, host, port, info, sslenabled, nw, block=False, write_fds=None): self.host = host @@ -192,8 +194,13 @@ class NNTP: self.sock.connect((self.host, self.port)) if sslenabled: # Log SSL/TLS info - logging.info("%s@%s: Connected using %s (%s)", - self.nw.thrdnum, self.nw.server.host, self.sock.version(), self.sock.cipher()[0]) + logging.info( + "%s@%s: Connected using %s (%s)", + self.nw.thrdnum, + self.nw.server.host, + self.sock.version(), + self.sock.cipher()[0], + ) self.nw.server.ssl_info = "%s (%s)" % (self.sock.version(), self.sock.cipher()[0]) except (ssl.SSLError, ssl.CertificateError) as e: @@ -216,23 +223,25 @@ class NNTP: def error(self, error): raw_error_str = str(error) - if 'SSL23_GET_SERVER_HELLO' in str(error) or 'SSL3_GET_RECORD' in raw_error_str: - error = T('This server does not allow SSL on this port') + if "SSL23_GET_SERVER_HELLO" in str(error) or "SSL3_GET_RECORD" in raw_error_str: + error = T("This server does not allow SSL on this port") # Catch certificate errors - if type(error) == ssl.CertificateError or 'CERTIFICATE_VERIFY_FAILED' in raw_error_str: + if type(error) == ssl.CertificateError or "CERTIFICATE_VERIFY_FAILED" in raw_error_str: # Log the raw message for debug purposes - logging.info('Certificate error for host %s: %s', self.nw.server.host, raw_error_str) + logging.info("Certificate error for host %s: %s", self.nw.server.host, raw_error_str) # Try to see if we should catch this message and provide better text - if 'hostname' in raw_error_str: - raw_error_str = T('Certificate hostname mismatch: the server hostname is not listed in the certificate. This is a server issue.') - elif 'certificate verify failed' in raw_error_str: - raw_error_str = T('Certificate not valid. This is most probably a server issue.') + if "hostname" in raw_error_str: + raw_error_str = T( + "Certificate hostname mismatch: the server hostname is not listed in the certificate. This is a server issue." + ) + elif "certificate verify failed" in raw_error_str: + raw_error_str = T("Certificate not valid. This is most probably a server issue.") # Reformat error - error = T('Server %s uses an untrusted certificate [%s]') % (self.nw.server.host, raw_error_str) - error = '%s - %s: %s' % (error, T('Wiki'), 'https://sabnzbd.org/certificate-errors') + error = T("Server %s uses an untrusted certificate [%s]") % (self.nw.server.host, raw_error_str) + error = "%s - %s: %s" % (error, T("Wiki"), "https://sabnzbd.org/certificate-errors") # Prevent throwing a lot of errors or when testing server if error not in self.nw.server.warning and not self.blocking: @@ -254,8 +263,24 @@ class NNTP: class NewsWrapper: # Pre-define attributes to save memory - __slots__ = ('server', 'thrdnum', 'blocking', 'timeout', 'article', 'data', 'last_line', 'nntp', - 'recv', 'connected', 'user_sent', 'pass_sent', 'group', 'user_ok', 'pass_ok', 'force_login') + __slots__ = ( + "server", + "thrdnum", + "blocking", + "timeout", + "article", + "data", + "last_line", + "nntp", + "recv", + "connected", + "user_sent", + "pass_sent", + "group", + "user_ok", + "pass_ok", + "force_login", + ) def __init__(self, server, thrdnum, block=False): self.server = server @@ -265,7 +290,7 @@ class NewsWrapper: self.timeout = None self.article = None self.data = [] - self.last_line = '' + self.last_line = "" self.nntp = None self.recv = None @@ -296,8 +321,9 @@ class NewsWrapper: self.server.info = GetServerParms(self.server.host, self.server.port) # Construct NNTP object and shorthands - self.nntp = NNTP(self.server.hostip, self.server.port, self.server.info, self.server.ssl, - self, self.blocking, write_fds) + self.nntp = NNTP( + self.server.hostip, self.server.port, self.server.info, self.server.ssl, self, self.blocking, write_fds + ) self.recv = self.nntp.sock.recv self.timeout = time.time() + self.server.timeout @@ -312,7 +338,7 @@ class NewsWrapper: if code == 501 and self.user_sent: # Change to a sensible text code = 481 - self.data[0] = "%d %s" % (code, T('Authentication failed, check username/password.')) + self.data[0] = "%d %s" % (code, T("Authentication failed, check username/password.")) self.user_ok = True self.pass_sent = True @@ -327,7 +353,7 @@ class NewsWrapper: if code in (400, 502): raise NNTPPermanentError(nntp_to_msg(self.data)) elif not self.user_sent: - command = utob('authinfo user %s\r\n' % self.server.username) + command = utob("authinfo user %s\r\n" % self.server.username) self.nntp.sock.sendall(command) self.data = [] self.user_sent = True @@ -342,7 +368,7 @@ class NewsWrapper: self.connected = True if self.user_ok and not self.pass_sent: - command = utob('authinfo pass %s\r\n' % self.server.password) + command = utob("authinfo pass %s\r\n" % self.server.password) self.nntp.sock.sendall(command) self.data = [] self.pass_sent = True @@ -359,19 +385,19 @@ class NewsWrapper: self.timeout = time.time() + self.server.timeout if precheck: if self.server.have_stat: - command = utob('STAT <%s>\r\n' % (self.article.article)) + command = utob("STAT <%s>\r\n" % (self.article.article)) else: - command = utob('HEAD <%s>\r\n' % (self.article.article)) + command = utob("HEAD <%s>\r\n" % (self.article.article)) elif self.server.have_body: - command = utob('BODY <%s>\r\n' % (self.article.article)) + command = utob("BODY <%s>\r\n" % (self.article.article)) else: - command = utob('ARTICLE <%s>\r\n' % (self.article.article)) + command = utob("ARTICLE <%s>\r\n" % (self.article.article)) self.nntp.sock.sendall(command) self.data = [] def send_group(self, group): self.timeout = time.time() + self.server.timeout - command = utob('GROUP %s\r\n' % (group)) + command = utob("GROUP %s\r\n" % (group)) self.nntp.sock.sendall(command) self.data = [] @@ -403,13 +429,13 @@ class NewsWrapper: # Official end-of-article is ".\r\n" but sometimes it can get lost between 2 chunks chunk_len = len(chunk) - if chunk[-5:] == b'\r\n.\r\n': + if chunk[-5:] == b"\r\n.\r\n": return (chunk_len, True, False) elif chunk_len < 5 and len(self.data) > 1: # We need to make sure the end is not split over 2 chunks # This is faster than join() combine_chunk = self.data[-2][-5:] + chunk - if combine_chunk[-5:] == b'\r\n.\r\n': + if combine_chunk[-5:] == b"\r\n.\r\n": return (chunk_len, True, False) # Still in middle of data, so continue! @@ -422,13 +448,13 @@ class NewsWrapper: def clear_data(self): self.data = [] - self.last_line = '' + self.last_line = "" def hard_reset(self, wait=True, send_quit=True): if self.nntp: try: if send_quit: - self.nntp.sock.sendall(b'QUIT\r\n') + self.nntp.sock.sendall(b"QUIT\r\n") time.sleep(0.1) self.nntp.sock.close() except: @@ -449,7 +475,7 @@ class NewsWrapper: if self.nntp: try: if quit: - self.nntp.sock.sendall(b'QUIT\r\n') + self.nntp.sock.sendall(b"QUIT\r\n") time.sleep(0.1) self.nntp.sock.close() except: diff --git a/sabnzbd/nzbqueue.py b/sabnzbd/nzbqueue.py index 4a17ad2..463205c 100644 --- a/sabnzbd/nzbqueue.py +++ b/sabnzbd/nzbqueue.py @@ -32,10 +32,23 @@ from sabnzbd.filesystem import get_admin_path, remove_all, globber_full, remove_ from sabnzbd.panic import panic_queue import sabnzbd.database as database from sabnzbd.decorators import NzbQueueLocker -from sabnzbd.constants import QUEUE_FILE_NAME, QUEUE_VERSION, FUTURE_Q_FOLDER, \ - JOB_ADMIN, LOW_PRIORITY, NORMAL_PRIORITY, HIGH_PRIORITY, TOP_PRIORITY, \ - REPAIR_PRIORITY, STOP_PRIORITY, VERIFIED_FILE, \ - Status, IGNORED_FOLDERS, QNFO, DIRECT_WRITE_TRIGGER +from sabnzbd.constants import ( + QUEUE_FILE_NAME, + QUEUE_VERSION, + FUTURE_Q_FOLDER, + JOB_ADMIN, + LOW_PRIORITY, + NORMAL_PRIORITY, + HIGH_PRIORITY, + TOP_PRIORITY, + REPAIR_PRIORITY, + STOP_PRIORITY, + VERIFIED_FILE, + Status, + IGNORED_FOLDERS, + QNFO, + DIRECT_WRITE_TRIGGER, +) import sabnzbd.cfg as cfg import sabnzbd.downloader @@ -47,6 +60,7 @@ from sabnzbd.dirscanner import process_single_nzb class NzbQueue: """ Singleton NzbQueue """ + do = None def __init__(self): @@ -71,14 +85,16 @@ class NzbQueue: queue_vers, nzo_ids, _ = data if not queue_vers == QUEUE_VERSION: nzo_ids = [] - logging.error(T('Incompatible queuefile found, cannot proceed')) + logging.error(T("Incompatible queuefile found, cannot proceed")) if not repair: panic_queue(os.path.join(cfg.admin_dir.get_path(), QUEUE_FILE_NAME)) exit_sab(2) except: nzo_ids = [] - logging.error(T('Error loading %s, corrupt file detected'), - os.path.join(cfg.admin_dir.get_path(), QUEUE_FILE_NAME)) + logging.error( + T("Error loading %s, corrupt file detected"), + os.path.join(cfg.admin_dir.get_path(), QUEUE_FILE_NAME), + ) # First handle jobs in the queue file folders = [] @@ -103,7 +119,7 @@ class NzbQueue: for item in globber_full(os.path.join(cfg.admin_dir.get_path(), FUTURE_Q_FOLDER)): path, nzo_id = os.path.split(item) if nzo_id not in self.__nzo_table: - if nzo_id.startswith('SABnzbd_nzo'): + if nzo_id.startswith("SABnzbd_nzo"): nzo = sabnzbd.load_data(nzo_id, path, remove=True) if nzo: self.add(nzo, save=True) @@ -130,20 +146,25 @@ class NzbQueue: # Retryable folders from History items = sabnzbd.api.build_history(output=True)[0] # Anything waiting or active or retryable is a known item - registered.extend([os.path.basename(item['path']) - for item in items if item['retry'] or item['loaded'] or item['status'] == Status.QUEUED]) + registered.extend( + [ + os.path.basename(item["path"]) + for item in items + if item["retry"] or item["loaded"] or item["status"] == Status.QUEUED + ] + ) # Repair unregistered folders for folder in globber_full(cfg.download_dir.get_path()): name = os.path.basename(folder) if os.path.isdir(folder) and name not in registered and name not in IGNORED_FOLDERS: if action: - logging.info('Repairing job %s', folder) + logging.info("Repairing job %s", folder) self.repair_job(folder) result.append(os.path.basename(folder)) else: if action: - logging.info('Skipping repair for job %s', folder) + logging.info("Skipping repair for job %s", folder) return result def repair_job(self, folder, new_nzb=None, password=None): @@ -154,40 +175,49 @@ class NzbQueue: name = os.path.basename(folder) path = os.path.join(folder, JOB_ADMIN) - if hasattr(new_nzb, 'filename'): + if hasattr(new_nzb, "filename"): filename = new_nzb.filename else: - filename = '' + filename = "" if not filename: # Was this file already post-processed? verified = sabnzbd.load_data(VERIFIED_FILE, path, remove=False) if not verified or not all(verified[x] for x in verified): - filename = globber_full(path, '*.gz') + filename = globber_full(path, "*.gz") if len(filename) > 0: - logging.debug('Repair job %s by re-parsing stored NZB', name) - nzo_id = sabnzbd.add_nzbfile(filename[0], pp=None, script=None, cat=None, priority=None, nzbname=name, - reuse=True, password=password)[1] + logging.debug("Repair job %s by re-parsing stored NZB", name) + nzo_id = sabnzbd.add_nzbfile( + filename[0], + pp=None, + script=None, + cat=None, + priority=None, + nzbname=name, + reuse=True, + password=password, + )[1] else: - logging.debug('Repair job %s without stored NZB', name) - nzo = NzbObject(name, pp=None, script=None, nzb='', cat=None, priority=None, nzbname=name, reuse=True) + logging.debug("Repair job %s without stored NZB", name) + nzo = NzbObject(name, pp=None, script=None, nzb="", cat=None, priority=None, nzbname=name, reuse=True) nzo.password = password self.add(nzo) nzo_id = nzo.nzo_id else: - remove_all(path, '*.gz') - logging.debug('Repair job %s with new NZB (%s)', name, filename) - nzo_id = sabnzbd.add_nzbfile(new_nzb, pp=None, script=None, cat=None, priority=None, nzbname=name, - reuse=True, password=password)[1] + remove_all(path, "*.gz") + logging.debug("Repair job %s with new NZB (%s)", name, filename) + nzo_id = sabnzbd.add_nzbfile( + new_nzb, pp=None, script=None, cat=None, priority=None, nzbname=name, reuse=True, password=password + )[1] return nzo_id @NzbQueueLocker def send_back(self, nzo): """ Send back job to queue after successful pre-check """ try: - nzb_path = globber_full(nzo.workpath, '*.gz')[0] + nzb_path = globber_full(nzo.workpath, "*.gz")[0] except: - logging.debug('Failed to find NZB file after pre-check (%s)', nzo.nzo_id) + logging.debug("Failed to find NZB file after pre-check (%s)", nzo.nzo_id) return # Need to remove it first, otherwise it might still be downloading @@ -221,14 +251,25 @@ class NzbQueue: def generate_future(self, msg, pp=None, script=None, cat=None, url=None, priority=NORMAL_PRIORITY, nzbname=None): """ Create and return a placeholder nzo object """ - logging.debug('Creating placeholder NZO') - future_nzo = NzbObject(msg, pp, script, None, futuretype=True, cat=cat, url=url, priority=priority, nzbname=nzbname, status=Status.GRABBING) + logging.debug("Creating placeholder NZO") + future_nzo = NzbObject( + msg, + pp, + script, + None, + futuretype=True, + cat=cat, + url=url, + priority=priority, + nzbname=nzbname, + status=Status.GRABBING, + ) self.add(future_nzo) return future_nzo def change_opts(self, nzo_ids, pp): result = 0 - for nzo_id in [item.strip() for item in nzo_ids.split(',')]: + for nzo_id in [item.strip() for item in nzo_ids.split(",")]: if nzo_id in self.__nzo_table: self.__nzo_table[nzo_id].set_pp(pp) result += 1 @@ -236,20 +277,20 @@ class NzbQueue: def change_script(self, nzo_ids, script): result = 0 - for nzo_id in [item.strip() for item in nzo_ids.split(',')]: + for nzo_id in [item.strip() for item in nzo_ids.split(",")]: if nzo_id in self.__nzo_table: self.__nzo_table[nzo_id].script = script - logging.info('Set script=%s for job %s', script, self.__nzo_table[nzo_id].final_name) + logging.info("Set script=%s for job %s", script, self.__nzo_table[nzo_id].final_name) result += 1 return result def change_cat(self, nzo_ids, cat, explicit_priority=None): result = 0 - for nzo_id in [item.strip() for item in nzo_ids.split(',')]: + for nzo_id in [item.strip() for item in nzo_ids.split(",")]: if nzo_id in self.__nzo_table: nzo = self.__nzo_table[nzo_id] nzo.cat, pp, nzo.script, prio = cat_to_opts(cat) - logging.info('Set cat=%s for job %s', cat, nzo.final_name) + logging.info("Set cat=%s for job %s", cat, nzo.final_name) nzo.set_pp(pp) if explicit_priority is None: self.set_priority(nzo_id, prio) @@ -261,7 +302,7 @@ class NzbQueue: def change_name(self, nzo_id, name, password=None): if nzo_id in self.__nzo_table: nzo = self.__nzo_table[nzo_id] - logging.info('Renaming %s to %s', nzo.final_name, name) + logging.info("Renaming %s to %s", nzo.final_name, name) # Abort any ongoing unpacking if the name changed (dirs change) nzo.abort_direct_unpacker() if not nzo.futuretype: @@ -283,12 +324,12 @@ class NzbQueue: @NzbQueueLocker def add(self, nzo, save=True, quiet=False): if not nzo.nzo_id: - nzo.nzo_id = sabnzbd.get_new_id('nzo', nzo.workpath, self.__nzo_table) + nzo.nzo_id = sabnzbd.get_new_id("nzo", nzo.workpath, self.__nzo_table) # If no files are to be downloaded anymore, send to postproc if not nzo.files and not nzo.futuretype: self.end_job(nzo) - return '' + return "" # Reset try_lists nzo.reset_try_list() @@ -330,7 +371,7 @@ class NzbQueue: self.save(nzo) if not (quiet or nzo.status == Status.FETCHING): - notifier.send_notification(T('NZB added to queue'), nzo.filename, 'download', nzo.cat) + notifier.send_notification(T("NZB added to queue"), nzo.filename, "download", nzo.cat) if not quiet and cfg.auto_sort(): self.sort_by_avg_age() @@ -344,7 +385,7 @@ class NzbQueue: """ if nzo_id in self.__nzo_table: nzo = self.__nzo_table.pop(nzo_id) - logging.info('[%s] Removing job %s', caller_name(), nzo.final_name) + logging.info("[%s] Removing job %s", caller_name(), nzo.final_name) # Set statuses nzo.deleted = True @@ -410,10 +451,10 @@ class NzbQueue: elif force_delete: # Force-remove all trace nzo.bytes -= nzf.bytes - nzo.bytes_tried -= (nzf.bytes - nzf.bytes_left) + nzo.bytes_tried -= nzf.bytes - nzf.bytes_left del nzo.files_table[nzf_id] nzo.finished_files.remove(nzf) - logging.info('Removed NZFs %s from job %s', removed, nzo.final_name) + logging.info("Removed NZFs %s from job %s", removed, nzo.final_name) return removed def pause_multiple_nzo(self, nzo_ids): @@ -492,7 +533,13 @@ class NzbQueue: item_id_pos2 = i if (item_id_pos1 > -1) and (item_id_pos2 > -1): item = self.__nzo_list[item_id_pos1] - logging.info('Switching job [%s] %s => [%s] %s', item_id_pos1, item.final_name, item_id_pos2, self.__nzo_list[item_id_pos2].final_name) + logging.info( + "Switching job [%s] %s => [%s] %s", + item_id_pos1, + item.final_name, + item_id_pos2, + self.__nzo_list[item_id_pos2].final_name, + ) del self.__nzo_list[item_id_pos1] self.__nzo_list.insert(item_id_pos2, item) return item_id_pos2, nzo1.priority @@ -538,17 +585,17 @@ class NzbQueue: def sort_queue(self, field, reverse=None): if isinstance(reverse, str): - if reverse.lower() == 'desc': + if reverse.lower() == "desc": reverse = True else: reverse = False if reverse is None: reverse = False - if field.lower() == 'name': + if field.lower() == "name": self.sort_by_name(reverse) - elif field.lower() == 'size' or field.lower() == 'bytes': + elif field.lower() == "size" or field.lower() == "bytes": self.sort_by_size(reverse) - elif field.lower() == 'avg_age': + elif field.lower() == "avg_age": self.sort_by_avg_age(reverse) else: logging.debug("Sort: %s not recognized", field) @@ -578,8 +625,11 @@ class NzbQueue: return nzo_id_pos1 nzo.set_priority(priority) - if sabnzbd.scheduler.analyse(False, priority) and \ - nzo.status in (Status.CHECKING, Status.DOWNLOADING, Status.QUEUED): + if sabnzbd.scheduler.analyse(False, priority) and nzo.status in ( + Status.CHECKING, + Status.DOWNLOADING, + Status.QUEUED, + ): nzo.status = Status.PAUSED elif nzo.status == Status.PAUSED: nzo.status = Status.QUEUED @@ -620,7 +670,9 @@ class NzbQueue: self.__nzo_list.append(nzo) pos = 0 - logging.info('Set priority=%s for job %s => position=%s ', priority, self.__nzo_table[nzo_id].final_name, pos) + logging.info( + "Set priority=%s for job %s => position=%s ", priority, self.__nzo_table[nzo_id].final_name, pos + ) return pos except: @@ -630,7 +682,7 @@ class NzbQueue: def set_priority(self, nzo_ids, priority): try: n = -1 - for nzo_id in [item.strip() for item in nzo_ids.split(',')]: + for nzo_id in [item.strip() for item in nzo_ids.split(",")]: n = self.__set_priority(nzo_id, priority) return n except: @@ -667,7 +719,11 @@ class NzbQueue: # Not when queue paused and not a forced item if nzo.status not in (Status.PAUSED, Status.GRABBING) or nzo.priority == TOP_PRIORITY: # Check if past propagation delay, or forced - if not propagtion_delay or nzo.priority == TOP_PRIORITY or (nzo.avg_stamp + propagtion_delay) < time.time(): + if ( + not propagtion_delay + or nzo.priority == TOP_PRIORITY + or (nzo.avg_stamp + propagtion_delay) < time.time() + ): if not nzo.server_in_try_list(server): article = nzo.get_article(server, servers) if article: @@ -690,7 +746,7 @@ class NzbQueue: articles_left, file_done, post_done = nzo.remove_article(article, success) if nzo.is_gone(): - logging.debug('Discarding article for file %s, no longer in queue', nzf.filename) + logging.debug("Discarding article for file %s, no longer in queue", nzf.filename) else: # Write data if file is done or at trigger time if file_done or (articles_left and (articles_left % DIRECT_WRITE_TRIGGER) == 0): @@ -699,12 +755,12 @@ class NzbQueue: # The type is only set if sabyenc could decode the article if nzf.filename and nzf.type: Assembler.do.process((nzo, nzf, file_done)) - elif nzf.filename.lower().endswith('.par2'): + elif nzf.filename.lower().endswith(".par2"): # Broken par2 file, try to get another one nzo.promote_par2(nzf) else: if file_has_articles(nzf): - logging.warning(T('%s -> Unknown encoding'), nzf.filename) + logging.warning(T("%s -> Unknown encoding"), nzf.filename) # Save bookkeeping in case of crash if file_done and (nzo.next_save is None or time.time() > nzo.next_save): @@ -721,7 +777,7 @@ class NzbQueue: def end_job(self, nzo): """ Send NZO to the post-processing queue """ - logging.info('[%s] Ending job %s', caller_name(), nzo.final_name) + logging.info("[%s] Ending job %s", caller_name(), nzo.final_name) # Notify assembler to call postprocessor if not nzo.deleted: @@ -808,7 +864,7 @@ class NzbQueue: empty = [] for nzo in self.__nzo_list: if not nzo.futuretype and not nzo.files and nzo.status not in (Status.PAUSED, Status.GRABBING): - logging.info('Found idle job %s', nzo.final_name) + logging.info("Found idle job %s", nzo.final_name) empty.append(nzo) # Stall prevention by checking if all servers are in the trylist @@ -818,11 +874,11 @@ class NzbQueue: for nzf in nzo.files: if len(nzf.try_list) == sabnzbd.downloader.Downloader.do.server_nr: # We do not want to reset all article trylists, they are good - logging.info('Resetting bad trylist for file %s in job %s', nzf.filename, nzo.final_name) + logging.info("Resetting bad trylist for file %s in job %s", nzf.filename, nzo.final_name) nzf.reset_try_list() # Reset main trylist, minimal performance impact - logging.info('Resetting bad trylist for job %s', nzo.final_name) + logging.info("Resetting bad trylist for job %s", nzo.final_name) nzo.reset_try_list() for nzo in empty: @@ -859,7 +915,7 @@ class NzbQueue: nzo = self.__nzo_table[nzo_id] if nzo.futuretype: url = nzo.url - if nzo.futuretype and url.lower().startswith('http'): + if nzo.futuretype and url.lower().startswith("http"): lst.append((url, nzo)) return lst diff --git a/sabnzbd/nzbstuff.py b/sabnzbd/nzbstuff.py index e3bb513..c9687d6 100644 --- a/sabnzbd/nzbstuff.py +++ b/sabnzbd/nzbstuff.py @@ -30,16 +30,55 @@ import difflib # SABnzbd modules import sabnzbd -from sabnzbd.constants import GIGI, ATTRIB_FILE, JOB_ADMIN, \ - REPAIR_PRIORITY, TOP_PRIORITY, HIGH_PRIORITY, NORMAL_PRIORITY, \ - LOW_PRIORITY, DEFAULT_PRIORITY, PAUSED_PRIORITY, DUP_PRIORITY, STOP_PRIORITY, \ - RENAMES_FILE, MAX_BAD_ARTICLES, Status, PNFO -from sabnzbd.misc import to_units, cat_to_opts, cat_convert, int_conv, \ - format_time_string, calc_age, cmp, caller_name, opts_to_pp, pp_to_opts -from sabnzbd.filesystem import sanitize_foldername, get_unique_path, get_admin_path, \ - remove_all, sanitize_filename, globber_full, set_permissions, long_path, \ - trim_win_path, fix_unix_encoding, is_obfuscated_filename, get_ext, get_filename, \ - get_unique_filename, renamer, remove_file, get_filepath +from sabnzbd.constants import ( + GIGI, + ATTRIB_FILE, + JOB_ADMIN, + REPAIR_PRIORITY, + TOP_PRIORITY, + HIGH_PRIORITY, + NORMAL_PRIORITY, + LOW_PRIORITY, + DEFAULT_PRIORITY, + PAUSED_PRIORITY, + DUP_PRIORITY, + STOP_PRIORITY, + RENAMES_FILE, + MAX_BAD_ARTICLES, + Status, + PNFO, +) +from sabnzbd.misc import ( + to_units, + cat_to_opts, + cat_convert, + int_conv, + format_time_string, + calc_age, + cmp, + caller_name, + opts_to_pp, + pp_to_opts, +) +from sabnzbd.filesystem import ( + sanitize_foldername, + get_unique_path, + get_admin_path, + remove_all, + sanitize_filename, + globber_full, + set_permissions, + long_path, + trim_win_path, + fix_unix_encoding, + is_obfuscated_filename, + get_ext, + get_filename, + get_unique_filename, + renamer, + remove_file, + get_filepath, +) from sabnzbd.decorators import synchronized import sabnzbd.config as config import sabnzbd.cfg as cfg @@ -50,10 +89,10 @@ from sabnzbd.rating import Rating # Name patterns SUBJECT_FN_MATCHER = re.compile(r'"([^"]*)"') -RE_NORMAL_NAME = re.compile(r'\.\w{1,5}$') # Test reasonably sized extension at the end -RE_QUICK_PAR2_CHECK = re.compile(r'\.par2\W*', re.I) -RE_RAR = re.compile(r'(\.rar|\.r\d\d|\.s\d\d|\.t\d\d|\.u\d\d|\.v\d\d)$', re.I) -RE_PROPER = re.compile(r'(^|[\. _-])(PROPER|REAL|REPACK)([\. _-]|$)') +RE_NORMAL_NAME = re.compile(r"\.\w{1,5}$") # Test reasonably sized extension at the end +RE_QUICK_PAR2_CHECK = re.compile(r"\.par2\W*", re.I) +RE_RAR = re.compile(r"(\.rar|\.r\d\d|\.s\d\d|\.t\d\d|\.u\d\d|\.v\d\d)$", re.I) +RE_PROPER = re.compile(r"(^|[\. _-])(PROPER|REAL|REPACK)([\. _-]|$)") ############################################################################## @@ -62,11 +101,13 @@ RE_PROPER = re.compile(r'(^|[\. _-])(PROPER|REAL|REPACK)([\. _-]|$)') TRYLIST_LOCK = threading.Lock() + class TryList: """ TryList keeps track of which servers have been tried for a specific article """ + # Pre-define attributes to save memory - __slots__ = ('try_list', 'fetcher_priority') + __slots__ = ("try_list", "fetcher_priority") def __init__(self): self.try_list = [] @@ -102,15 +143,14 @@ class TryList: ############################################################################## # Article ############################################################################## -ArticleSaver = ( - 'article', 'art_id', 'bytes', 'lowest_partnum', 'decoded', 'on_disk', 'nzf' -) +ArticleSaver = ("article", "art_id", "bytes", "lowest_partnum", "decoded", "on_disk", "nzf") class Article(TryList): """ Representation of one article """ + # Pre-define attributes to save memory - __slots__ = ArticleSaver + ('fetcher', 'fetcher_priority', 'tries') + __slots__ = ArticleSaver + ("fetcher", "fetcher_priority", "tries") def __init__(self, article, article_bytes, nzf): TryList.__init__(self) @@ -129,39 +169,48 @@ class Article(TryList): log = sabnzbd.LOG_ALL if not self.fetcher and not self.server_in_try_list(server): if log: - logging.debug('Article %s | Server: %s | in second if', self.article, server.host) + logging.debug("Article %s | Server: %s | in second if", self.article, server.host) # Is the current selected server of the same priority as this article? if log: - logging.debug('Article %s | Server: %s | Article priority: %s', self.article, server.host, self.fetcher_priority) + logging.debug( + "Article %s | Server: %s | Article priority: %s", self.article, server.host, self.fetcher_priority + ) if log: - logging.debug('Article %s | Server: %s | Server priority: %s', self.article, server.host, server.priority) + logging.debug( + "Article %s | Server: %s | Server priority: %s", self.article, server.host, server.priority + ) if server.priority == self.fetcher_priority: if log: - logging.debug('Article %s | Server: %s | same priority, use it', self.article, server.host) + logging.debug("Article %s | Server: %s | same priority, use it", self.article, server.host) self.fetcher = server self.tries += 1 if log: - logging.debug('Article %s | Server: %s | Article-try: %s', self.article, server.host, self.tries) + logging.debug("Article %s | Server: %s | Article-try: %s", self.article, server.host, self.tries) return self else: if log: - logging.debug('Article %s | Server: %s | not the same priority', self.article, server.host) + logging.debug("Article %s | Server: %s | not the same priority", self.article, server.host) # No, so is it a lower priority? if server.priority > self.fetcher_priority: if log: - logging.debug('Article %s | Server: %s | lower priority', self.article, server.host) + logging.debug("Article %s | Server: %s | lower priority", self.article, server.host) # Is there an available server that is a higher priority? found_priority = 1000 # for server_check in config.get_servers(): for server_check in servers: if log: - logging.debug('Article %s | Server: %s | checking', self.article, server.host) + logging.debug("Article %s | Server: %s | checking", self.article, server.host) # if (server_check.priority() < found_priority and server_check.priority() < server.priority and not self.server_in_try_list(server_check)): if server_check.active and (server_check.priority < found_priority): if server_check.priority < server.priority: if not self.server_in_try_list(server_check): if log: - logging.debug('Article %s | Server: %s | setting found priority to %s', self.article, server.host, server_check.priority) + logging.debug( + "Article %s | Server: %s | setting found priority to %s", + self.article, + server.host, + server_check.priority, + ) found_priority = server_check.priority if found_priority == 1000: # If no higher priority servers, use this server @@ -169,15 +218,17 @@ class Article(TryList): self.fetcher = server self.tries += 1 if log: - logging.debug('Article %s | Server: %s | Article-try: %s', self.article, server.host, self.tries) + logging.debug( + "Article %s | Server: %s | Article-try: %s", self.article, server.host, self.tries + ) return self else: # There is a higher priority server, so set article priority if log: - logging.debug('Article %s | Server: %s | setting self priority', self.article, server.host) + logging.debug("Article %s | Server: %s | setting self priority", self.article, server.host) self.fetcher_priority = found_priority if log: - logging.debug('Article %s | Server: %s | Returning None', self.article, server.host) + logging.debug("Article %s | Server: %s | Returning None", self.article, server.host) return None def get_art_id(self): @@ -206,7 +257,7 @@ class Article(TryList): dict_ = {} for item in ArticleSaver: dict_[item] = getattr(self, item) - dict_['try_list'] = TryList.__getstate__(self) + dict_["try_list"] = TryList.__getstate__(self) return dict_ def __setstate__(self, dict_): @@ -217,7 +268,7 @@ class Article(TryList): except KeyError: # Handle new attributes setattr(self, item, None) - TryList.__setstate__(self, dict_.get('try_list', [])) + TryList.__setstate__(self, dict_.get("try_list", [])) self.fetcher_priority = 0 self.fetcher = None self.tries = 0 @@ -234,25 +285,42 @@ class Article(TryList): return id(self) def __repr__(self): - return "" % \ - (self.article, self.bytes, self.art_id) + return "" % (self.article, self.bytes, self.art_id) ############################################################################## # NzbFile ############################################################################## NzbFileSaver = ( - 'date', 'subject', 'filename', 'filename_checked', 'filepath', 'type', - 'is_par2', 'vol', 'blocks', 'setname', 'articles', 'decodetable', 'bytes', - 'bytes_left', 'nzo', 'nzf_id', 'deleted', 'valid', 'import_finished', 'md5sum', - 'md5of16k' + "date", + "subject", + "filename", + "filename_checked", + "filepath", + "type", + "is_par2", + "vol", + "blocks", + "setname", + "articles", + "decodetable", + "bytes", + "bytes_left", + "nzo", + "nzf_id", + "deleted", + "valid", + "import_finished", + "md5sum", + "md5of16k", ) class NzbFile(TryList): """ Representation of one file consisting of multiple articles """ - # Pre-define attributes to save memory - __slots__ = NzbFileSaver + ('md5',) + + # Pre-define attributes to save memory + __slots__ = NzbFileSaver + ("md5",) def __init__(self, date, subject, raw_article_db, file_bytes, nzo): """ Setup object """ @@ -381,7 +449,7 @@ class NzbFile(TryList): def remove_admin(self): """ Remove article database from disk (sabnzbd_nzf_)""" try: - logging.debug('Removing article database for %s', self.nzf_id) + logging.debug("Removing article database for %s", self.nzf_id) remove_file(os.path.join(self.nzo.workpath, self.nzf_id)) except: pass @@ -391,7 +459,7 @@ class NzbFile(TryList): dict_ = {} for item in NzbFileSaver: dict_[item] = getattr(self, item) - dict_['try_list'] = TryList.__getstate__(self) + dict_["try_list"] = TryList.__getstate__(self) return dict_ def __setstate__(self, dict_): @@ -402,7 +470,7 @@ class NzbFile(TryList): except KeyError: # Handle new attributes setattr(self, item, None) - TryList.__setstate__(self, dict_.get('try_list', [])) + TryList.__setstate__(self, dict_.get("try_list", [])) # Convert 2.x.x jobs if isinstance(self.decodetable, dict): @@ -430,14 +498,61 @@ class NzbFile(TryList): # NzbObject ############################################################################## NzbObjectSaver = ( - 'filename', 'work_name', 'final_name', 'created', 'bytes', 'bytes_downloaded', 'bytes_tried', - 'bytes_missing', 'repair', 'unpack', 'delete', 'script', 'cat', 'url', 'groups', 'avg_date', - 'md5of16k', 'partable', 'extrapars', 'md5packs', 'files', 'files_table', 'finished_files', - 'status', 'avg_bps_freq', 'avg_bps_total', 'priority', 'saved_articles', 'nzo_id', - 'futuretype', 'deleted', 'parsed', 'action_line', 'unpack_info', 'fail_msg', 'nzo_info', - 'custom_name', 'password', 'next_save', 'save_timeout', 'encrypted', 'bad_articles', - 'duplicate', 'oversized', 'precheck', 'incomplete', 'reuse', 'meta', 'first_articles', - 'first_articles_count', 'md5sum', 'servercount', 'unwanted_ext', 'renames', 'rating_filtered' + "filename", + "work_name", + "final_name", + "created", + "bytes", + "bytes_downloaded", + "bytes_tried", + "bytes_missing", + "repair", + "unpack", + "delete", + "script", + "cat", + "url", + "groups", + "avg_date", + "md5of16k", + "partable", + "extrapars", + "md5packs", + "files", + "files_table", + "finished_files", + "status", + "avg_bps_freq", + "avg_bps_total", + "priority", + "saved_articles", + "nzo_id", + "futuretype", + "deleted", + "parsed", + "action_line", + "unpack_info", + "fail_msg", + "nzo_info", + "custom_name", + "password", + "next_save", + "save_timeout", + "encrypted", + "bad_articles", + "duplicate", + "oversized", + "precheck", + "incomplete", + "reuse", + "meta", + "first_articles", + "first_articles_count", + "md5sum", + "servercount", + "unwanted_ext", + "renames", + "rating_filtered", ) # Lock to prevent errors when saving the NZO data @@ -445,16 +560,27 @@ NZO_LOCK = threading.RLock() class NzbObject(TryList): - - def __init__(self, filename, pp, script, nzb=None, - futuretype=False, cat=None, url=None, - priority=NORMAL_PRIORITY, nzbname=None, status=Status.QUEUED, nzo_info=None, - reuse=False, dup_check=True): + def __init__( + self, + filename, + pp, + script, + nzb=None, + futuretype=False, + cat=None, + url=None, + priority=NORMAL_PRIORITY, + nzbname=None, + status=Status.QUEUED, + nzo_info=None, + reuse=False, + dup_check=True, + ): TryList.__init__(self) - self.filename = filename # Original filename + self.filename = filename # Original filename if nzbname and nzb: - self.work_name = nzbname # Use nzbname if set and only for non-future slot + self.work_name = nzbname # Use nzbname if set and only for non-future slot else: self.work_name = filename @@ -483,40 +609,40 @@ class NzbObject(TryList): else: r, u, d = pp_to_opts(pp) - self.set_priority(priority) # Parse priority of input - self.repair = r # True if we want to repair this set - self.unpack = u # True if we want to unpack this set - self.delete = d # True if we want to delete this set - self.script = script # External script for this set - self.cat = cat # User-set category + self.set_priority(priority) # Parse priority of input + self.repair = r # True if we want to repair this set + self.unpack = u # True if we want to unpack this set + self.delete = d # True if we want to delete this set + self.script = script # External script for this set + self.cat = cat # User-set category # Information fields self.url = url or filename self.groups = [] self.avg_date = datetime.datetime(1970, 1, 1, 1, 0) - self.avg_stamp = 0.0 # Avg age in seconds (calculated from avg_age) + self.avg_stamp = 0.0 # Avg age in seconds (calculated from avg_age) # Bookkeeping values self.meta = {} - self.servercount = {} # Dict to keep bytes per server - self.created = False # dirprefixes + work_name created - self.direct_unpacker = None # Holds the DirectUnpacker instance - self.bytes = 0 # Original bytesize - self.bytes_downloaded = 0 # Downloaded byte - self.bytes_tried = 0 # Which bytes did we try - self.bytes_missing = 0 # Bytes missing - self.bad_articles = 0 # How many bad (non-recoverable) articles - - self.partable = {} # Holds one parfile-name for each set - self.extrapars = {} # Holds the extra parfile names for all sets - self.md5packs = {} # Holds the md5pack for each set (name: hash) - self.md5of16k = {} # Holds the md5s of the first-16k of all files in the NZB (hash: name) - - self.files = [] # List of all NZFs - self.files_table = {} # Dictionary of NZFs indexed using NZF_ID - self.renames = {} # Dictionary of all renamed files - - self.finished_files = [] # List of all finished NZFs + self.servercount = {} # Dict to keep bytes per server + self.created = False # dirprefixes + work_name created + self.direct_unpacker = None # Holds the DirectUnpacker instance + self.bytes = 0 # Original bytesize + self.bytes_downloaded = 0 # Downloaded byte + self.bytes_tried = 0 # Which bytes did we try + self.bytes_missing = 0 # Bytes missing + self.bad_articles = 0 # How many bad (non-recoverable) articles + + self.partable = {} # Holds one parfile-name for each set + self.extrapars = {} # Holds the extra parfile names for all sets + self.md5packs = {} # Holds the md5pack for each set (name: hash) + self.md5of16k = {} # Holds the md5s of the first-16k of all files in the NZB (hash: name) + + self.files = [] # List of all NZFs + self.files_table = {} # Dictionary of NZFs indexed using NZF_ID + self.renames = {} # Dictionary of all renamed files + + self.finished_files = [] # List of all finished NZFs # The current status of the nzo eg: # Queued, Downloading, Repairing, Unpacking, Failed, Complete @@ -547,11 +673,11 @@ class NzbObject(TryList): self.status = Status.CHECKING # Store one line responses for filejoin/par2/unrar/unzip here for history display - self.action_line = '' + self.action_line = "" # Store the results from various filejoin/par2/unrar/unzip stages self.unpack_info = {} # Stores one line containing the last failure - self.fail_msg = '' + self.fail_msg = "" # Stores various info about the nzo to be self.nzo_info = nzo_info or {} @@ -572,11 +698,11 @@ class NzbObject(TryList): # Apply conversion option to final folder if cfg.replace_spaces(): - logging.info('Replacing spaces with underscores in %s', self.final_name) - self.final_name = self.final_name.replace(' ', '_') + logging.info("Replacing spaces with underscores in %s", self.final_name) + self.final_name = self.final_name.replace(" ", "_") if cfg.replace_dots(): - logging.info('Replacing dots with spaces in %s', self.final_name) - self.final_name = self.final_name.replace('.', ' ') + logging.info("Replacing dots with spaces in %s", self.final_name) + self.final_name = self.final_name.replace(".", " ") # Determine "incomplete" folder wdir = long_path(os.path.join(cfg.download_dir.get_path(), self.work_name)) @@ -589,8 +715,8 @@ class NzbObject(TryList): duplicate = series = 0 if reuse: - remove_all(adir, 'SABnzbd_nz?_*', keep_folder=True) - remove_all(adir, 'SABnzbd_article_*', keep_folder=True) + remove_all(adir, "SABnzbd_nz?_*", keep_folder=True) + remove_all(adir, "SABnzbd_article_*", keep_folder=True) else: wdir = trim_win_path(wdir) wdir = get_unique_path(wdir, create_dir=True) @@ -602,12 +728,12 @@ class NzbObject(TryList): _, self.work_name = os.path.split(wdir) self.created = True - if nzb and ' 0.5 and self.bytes > limit: - logging.info('Job too large, forcing low prio and paused (%s)', self.final_name) + logging.info("Job too large, forcing low prio and paused (%s)", self.final_name) self.pause() self.oversized = True self.priority = LOW_PRIORITY @@ -716,14 +842,14 @@ class NzbObject(TryList): if cfg.warn_dupl_jobs(): logging.warning(T('Failing duplicate NZB "%s"'), filename) # Move to history, utilizing the same code as accept&fail from pre-queue script - self.fail_msg = T('Duplicate NZB') + self.fail_msg = T("Duplicate NZB") accept = 2 duplicate = False if duplicate or self.priority == DUP_PRIORITY: if cfg.no_dupes() == 4 or cfg.no_series_dupes() == 4: if cfg.warn_dupl_jobs(): - logging.warning('%s: "%s"', T('Duplicate NZB'), filename) + logging.warning('%s: "%s"', T("Duplicate NZB"), filename) self.duplicate = True self.priority = NORMAL_PRIORITY else: @@ -751,8 +877,8 @@ class NzbObject(TryList): self.nzo_info[kw] = self.meta[kw][0] # Show first meta-password (if any), when there's no explicit password - if not self.password and self.meta.get('password'): - self.password = self.meta.get('password', [None])[0] + if not self.password and self.meta.get("password"): + self.password = self.meta.get("password", [None])[0] # Set nzo save-delay to minimum 120 seconds self.save_timeout = max(120, min(6.0 * float(self.bytes) / GIGI, 300.0)) @@ -800,27 +926,27 @@ class NzbObject(TryList): # So put the last rar immediately after the first rar file so that it gets detected early if cfg.unwanted_extensions() and not cfg.auto_sort(): # ... only useful if there are unwanted extensions defined and there is no sorting on date - logging.debug('Unwanted Extension: putting last rar after first rar') + logging.debug("Unwanted Extension: putting last rar after first rar") nzfposcounter = firstrarpos = lastrarpos = 0 for nzf in self.files: nzfposcounter += 1 - if '.rar' in str(nzf): + if ".rar" in str(nzf): # a NZF found with '.rar' in the name if firstrarpos == 0: # this is the first .rar found, so remember this position firstrarpos = nzfposcounter lastrarpos = nzfposcounter - lastrarnzf = nzf # The NZF itself + lastrarnzf = nzf # The NZF itself if firstrarpos != lastrarpos: # at least two different .rar's found - logging.debug('Unwanted Extension: First rar at %s, Last rar at %s', firstrarpos, lastrarpos) - logging.debug('Unwanted Extension: Last rar is %s', str(lastrarnzf)) + logging.debug("Unwanted Extension: First rar at %s, Last rar at %s", firstrarpos, lastrarpos) + logging.debug("Unwanted Extension: Last rar is %s", str(lastrarnzf)) try: - self.files.remove(lastrarnzf) # first remove. NB: remove() does searches for lastrarnzf - self.files.insert(firstrarpos, lastrarnzf) # ... and only then add after position firstrarpos + self.files.remove(lastrarnzf) # first remove. NB: remove() does searches for lastrarnzf + self.files.insert(firstrarpos, lastrarnzf) # ... and only then add after position firstrarpos except: - logging.debug('The lastrar swap did not go well') + logging.debug("The lastrar swap did not go well") def reset_all_try_lists(self): for nzf in self.files: @@ -848,7 +974,7 @@ class NzbObject(TryList): if setname and block and matcher(lparset, setname.lower()): xnzf.set_par2(parset, vol, block) # Don't postpone if all par2 are desired and should be kept or not repairing - if self.repair and not(cfg.enable_all_par() and not cfg.enable_par_cleanup()): + if self.repair and not (cfg.enable_all_par() and not cfg.enable_par_cleanup()): self.extrapars[parset].append(xnzf) self.files.remove(xnzf) # Already count these bytes as done @@ -877,7 +1003,7 @@ class NzbObject(TryList): # If we couldn't parse it, we ignore it if pack: if pack not in self.md5packs.values(): - logging.debug('Got md5pack for set %s', nzf.setname) + logging.debug("Got md5pack for set %s", nzf.setname) self.md5packs[setname] = pack # See if we need to postpone some pars self.postpone_pars(nzf, setname) @@ -889,7 +1015,7 @@ class NzbObject(TryList): # Change the properties nzf.set_par2(setname, vol, block) - logging.debug('Got additional md5pack for set %s', nzf.setname) + logging.debug("Got additional md5pack for set %s", nzf.setname) # Make sure it exists, could be removed by newsunpack if setname not in self.extrapars: @@ -901,10 +1027,10 @@ class NzbObject(TryList): self.promote_par2(nzf) # Is it an obfuscated file? - if get_ext(nzf.filename) != '.par2': + if get_ext(nzf.filename) != ".par2": # Do cheap renaming so it gets better picked up by par2 # Only basename has to be the same - new_fname = get_unique_filename(os.path.join(self.downpath, '%s.par2' % setname)) + new_fname = get_unique_filename(os.path.join(self.downpath, "%s.par2" % setname)) renamer(filepath, new_fname) self.renamed_file(get_filename(new_fname), nzf.filename) nzf.filename = get_filename(new_fname) @@ -936,7 +1062,7 @@ class NzbObject(TryList): In case of duplicate files for the same set, we might add too little par2 on the first add-run, but that's a risk we need to take. """ - logging.info('Need %s more blocks, checking blocks', needed_blocks) + logging.info("Need %s more blocks, checking blocks", needed_blocks) avail_blocks = 0 block_list = [] @@ -953,7 +1079,7 @@ class NzbObject(TryList): # Sort by smallest blocks last, to be popped first block_list.sort(key=lambda x: x.blocks, reverse=True) - logging.info('%s blocks available', avail_blocks) + logging.info("%s blocks available", avail_blocks) # Enough? if avail_blocks >= needed_blocks: @@ -963,7 +1089,7 @@ class NzbObject(TryList): self.add_parfile(new_nzf) added_blocks += new_nzf.blocks - logging.info('Added %s blocks to %s', added_blocks, self.final_name) + logging.info("Added %s blocks to %s", added_blocks, self.final_name) return added_blocks else: # Not enough @@ -1009,8 +1135,8 @@ class NzbObject(TryList): # Set the nzo status to return "Queued" self.status = Status.QUEUED self.set_download_report() - self.fail_msg = T('Aborted, cannot be completed') + ' - https://sabnzbd.org/not-complete' - self.set_unpack_info('Download', self.fail_msg, unique=False) + self.fail_msg = T("Aborted, cannot be completed") + " - https://sabnzbd.org/not-complete" + self.set_unpack_info("Download", self.fail_msg, unique=False) logging.debug('Abort job "%s", due to impossibility to complete it', self.final_name) return True, True, True @@ -1120,9 +1246,9 @@ class NzbObject(TryList): # Process par2 files if sabnzbd.par2file.is_parfile(filepath): self.handle_par2(nzf, filepath) - logging.info('Existing file %s added to job', filename) + logging.info("Existing file %s added to job", filename) except: - logging.debug('Bad NZB handling') + logging.debug("Bad NZB handling") logging.info("Traceback: ", exc_info=True) @property @@ -1134,7 +1260,7 @@ class NzbObject(TryList): def set_pp(self, value): self.repair, self.unpack, self.delete = pp_to_opts(value) - logging.info('Set pp=%s for job %s', value, self.final_name) + logging.info("Set pp=%s for job %s", value, self.final_name) # Abort unpacking if not desired anymore if not self.unpack: self.abort_direct_unpacker() @@ -1142,14 +1268,23 @@ class NzbObject(TryList): def set_priority(self, value): """ Check if this is a valid priority """ # When unknown (0 is a known one), set to DEFAULT - if value == '' or value is None: + if value == "" or value is None: self.priority = DEFAULT_PRIORITY return # Convert input value = int_conv(value) - if value in (REPAIR_PRIORITY, TOP_PRIORITY, HIGH_PRIORITY, NORMAL_PRIORITY, - LOW_PRIORITY, DEFAULT_PRIORITY, PAUSED_PRIORITY, DUP_PRIORITY, STOP_PRIORITY): + if value in ( + REPAIR_PRIORITY, + TOP_PRIORITY, + HIGH_PRIORITY, + NORMAL_PRIORITY, + LOW_PRIORITY, + DEFAULT_PRIORITY, + PAUSED_PRIORITY, + DUP_PRIORITY, + STOP_PRIORITY, + ): self.priority = value return @@ -1161,35 +1296,35 @@ class NzbObject(TryList): """ Return (translated) labels of job """ labels = [] if self.duplicate: - labels.append(T('DUPLICATE')) + labels.append(T("DUPLICATE")) if self.encrypted > 0: - labels.append(T('ENCRYPTED')) + labels.append(T("ENCRYPTED")) if self.oversized: - labels.append(T('TOO LARGE')) + labels.append(T("TOO LARGE")) if self.incomplete: - labels.append(T('INCOMPLETE')) + labels.append(T("INCOMPLETE")) if self.unwanted_ext: - labels.append(T('UNWANTED')) + labels.append(T("UNWANTED")) if self.rating_filtered: - labels.append(T('FILTERED')) + labels.append(T("FILTERED")) # Waiting for URL fetching if isinstance(self.url_wait, float): dif = int(self.url_wait - time.time() + 0.5) if dif > 0: - labels.append(T('WAIT %s sec') % dif) + labels.append(T("WAIT %s sec") % dif) # Propagation delay label if (self.avg_stamp + float(cfg.propagation_delay() * 60)) > time.time() and self.priority != TOP_PRIORITY: wait_time = int((self.avg_stamp + float(cfg.propagation_delay() * 60) - time.time()) / 60 + 0.5) - labels.append(T('PROPAGATING %s min') % wait_time) # Queue indicator while waiting for propagation of post + labels.append(T("PROPAGATING %s min") % wait_time) # Queue indicator while waiting for propagation of post return labels @property def final_name_with_password(self): if self.password: - return '%s / %s' % (self.final_name, self.password) + return "%s / %s" % (self.final_name, self.password) else: return self.final_name @@ -1258,7 +1393,7 @@ class NzbObject(TryList): This fails in case of multi-sets with identical setnames """ # Make sure to also select a parset if it was in the original filename - original_filename = self.renames.get(nzf.filename, '') + original_filename = self.renames.get(nzf.filename, "") # Get some blocks! if not nzf.is_par2: @@ -1271,7 +1406,7 @@ class NzbObject(TryList): blocks_new += new_nzf.blocks # Enough now? if blocks_new >= self.bad_articles: - logging.info('Prospectively added %s repair blocks to %s', blocks_new, self.final_name) + logging.info("Prospectively added %s repair blocks to %s", blocks_new, self.final_name) break # Reset NZO TryList self.reset_try_list() @@ -1293,7 +1428,7 @@ class NzbObject(TryList): """ # Few missing articles in RAR-only job might still work if self.bad_articles <= MAX_BAD_ARTICLES: - logging.debug('Download Quality: bad-articles=%s', self.bad_articles) + logging.debug("Download Quality: bad-articles=%s", self.bad_articles) return True, 200 # Do the full check @@ -1316,7 +1451,7 @@ class NzbObject(TryList): enough = ratio * 100.0 >= (req_ratio or float(cfg.req_completion_rate())) else: enough = have >= need - logging.debug('Download Quality: enough=%s, have=%s, need=%s, ratio=%s', enough, have, need, ratio) + logging.debug("Download Quality: enough=%s, have=%s, need=%s, ratio=%s", enough, have, need, ratio) return enough, ratio def check_first_article_availability(self): @@ -1339,9 +1474,17 @@ class NzbObject(TryList): if self.servercount: # Sort the servers first servers = config.get_servers() - server_names = sorted(servers.keys(), key=lambda svr: '%d%02d%s' % (int(not servers[svr].enable()), servers[svr].priority(), servers[svr].displayname().lower())) - msgs = ['%s=%sB' % (servers[server_name].displayname(), to_units(self.servercount[server_name])) for server_name in server_names if server_name in self.servercount] - self.set_unpack_info('Servers', ', '.join(msgs), unique=True) + server_names = sorted( + servers.keys(), + key=lambda svr: "%d%02d%s" + % (int(not servers[svr].enable()), servers[svr].priority(), servers[svr].displayname().lower()), + ) + msgs = [ + "%s=%sB" % (servers[server_name].displayname(), to_units(self.servercount[server_name])) + for server_name in server_names + if server_name in self.servercount + ] + self.set_unpack_info("Servers", ", ".join(msgs), unique=True) # In case there were no bytes available at all of this download # we list the number of bytes we used while trying @@ -1354,27 +1497,29 @@ class NzbObject(TryList): # Get the seconds it took to complete the download avg_bps = self.avg_bps_total / self.avg_bps_freq download_time = int_conv(self.bytes_downloaded / (avg_bps * 1024)) - self.nzo_info['download_time'] = download_time + self.nzo_info["download_time"] = download_time # Format the total time the download took, in days, hours, and minutes, or seconds. complete_time = format_time_string(download_time) - download_msgs.append(T('Downloaded in %s at an average of %sB/s') % (complete_time, to_units(avg_bps * 1024))) - download_msgs.append(T('Age') + ': ' + calc_age(self.avg_date, True)) + download_msgs.append( + T("Downloaded in %s at an average of %sB/s") % (complete_time, to_units(avg_bps * 1024)) + ) + download_msgs.append(T("Age") + ": " + calc_age(self.avg_date, True)) - bad = self.nzo_info.get('bad_articles', 0) - miss = self.nzo_info.get('missing_articles', 0) - dups = self.nzo_info.get('duplicate_articles', 0) + bad = self.nzo_info.get("bad_articles", 0) + miss = self.nzo_info.get("missing_articles", 0) + dups = self.nzo_info.get("duplicate_articles", 0) if bad: - download_msgs.append(T('%s articles were malformed') % bad) + download_msgs.append(T("%s articles were malformed") % bad) if miss: - download_msgs.append(T('%s articles were missing') % miss) + download_msgs.append(T("%s articles were missing") % miss) if dups: - download_msgs.append(T('%s articles had non-matching duplicates') % dups) - self.set_unpack_info('Download', u'
'.join(download_msgs), unique=True) + download_msgs.append(T("%s articles had non-matching duplicates") % dups) + self.set_unpack_info("Download", "
".join(download_msgs), unique=True) if self.url: - self.set_unpack_info('Source', self.url, unique=True) + self.set_unpack_info("Source", self.url, unique=True) @synchronized(NZO_LOCK) def increase_bad_articles_counter(self, article_type): @@ -1399,7 +1544,7 @@ class NzbObject(TryList): if not article: for nzf in self.files: if nzf.deleted: - logging.debug('Skipping existing file %s', nzf.filename or nzf.subject) + logging.debug("Skipping existing file %s", nzf.filename or nzf.subject) else: # Don't try to get an article if server is in try_list of nzf if not nzf.server_in_try_list(server): @@ -1410,7 +1555,7 @@ class NzbObject(TryList): nzf.finish_import() # Still not finished? Something went wrong... if not nzf.import_finished and not self.is_gone(): - logging.error(T('Error importing %s'), nzf) + logging.error(T("Error importing %s"), nzf) nzf_remove_list.append(nzf) nzf.nzo.status = Status.PAUSED continue @@ -1522,24 +1667,29 @@ class NzbObject(TryList): new_filename = self.md5of16k[nzf.md5of16k] # Was it even new? if new_filename != nzf.filename: - logging.info('Detected filename based on par2: %s -> %s', nzf.filename, new_filename) + logging.info("Detected filename based on par2: %s -> %s", nzf.filename, new_filename) self.renamed_file(new_filename, nzf.filename) nzf.filename = new_filename return # Fallback to yenc/nzb name (also when there is no partnum=1) # We also keep the NZB name in case it ends with ".par2" (usually correct) - if yenc_filename and yenc_filename != nzf.filename and not is_obfuscated_filename(yenc_filename) and not nzf.filename.endswith('.par2'): - logging.info('Detected filename from yenc: %s -> %s', nzf.filename, yenc_filename) + if ( + yenc_filename + and yenc_filename != nzf.filename + and not is_obfuscated_filename(yenc_filename) + and not nzf.filename.endswith(".par2") + ): + logging.info("Detected filename from yenc: %s -> %s", nzf.filename, yenc_filename) self.renamed_file(yenc_filename, nzf.filename) nzf.filename = yenc_filename def verify_all_filenames_and_resort(self): """ Verify all filenames based on par2 info and then re-sort files """ - logging.info('Checking all filenames for %s', self.final_name) + logging.info("Checking all filenames for %s", self.final_name) for nzf_verify in self.files: self.verify_nzf_filename(nzf_verify) - logging.info('Re-sorting %s after getting filename information', self.final_name) + logging.info("Re-sorting %s after getting filename information", self.final_name) self.sort_nzfs() @synchronized(NZO_LOCK) @@ -1559,15 +1709,31 @@ class NzbObject(TryList): def update_rating(self): if cfg.rating_enable(): try: + def _get_first_meta(rating_type): - values = self.nzo_info.get('x-oznzb-rating-' + rating_type, None) or self.nzo_info.get('x-rating-' + rating_type, None) + values = self.nzo_info.get("x-oznzb-rating-" + rating_type, None) or self.nzo_info.get( + "x-rating-" + rating_type, None + ) return values[0] if values and isinstance(values, list) else values - rating_types = ['url', 'host', 'video', 'videocnt', 'audio', 'audiocnt', 'voteup', - 'votedown', 'spam', 'confirmed-spam', 'passworded', 'confirmed-passworded'] + + rating_types = [ + "url", + "host", + "video", + "videocnt", + "audio", + "audiocnt", + "voteup", + "votedown", + "spam", + "confirmed-spam", + "passworded", + "confirmed-passworded", + ] fields = {} for k in rating_types: fields[k] = _get_first_meta(k) - Rating.do.add_rating(_get_first_meta('id'), self.nzo_id, fields) + Rating.do.add_rating(_get_first_meta("id"), self.nzo_id, fields) except: pass @@ -1583,7 +1749,7 @@ class NzbObject(TryList): def downpath(self): """ Return the full path for my download folder """ if self.futuretype: - return '' + return "" else: return long_path(os.path.join(cfg.download_dir.get_path(), self.work_name)) @@ -1602,7 +1768,9 @@ class NzbObject(TryList): @synchronized(NZO_LOCK) def purge_data(self, delete_all_data=True): """ Remove (all) job data """ - logging.info('[%s] Purging data for job %s (delete_all_data=%s)', caller_name(), self.final_name, delete_all_data) + logging.info( + "[%s] Purging data for job %s (delete_all_data=%s)", caller_name(), self.final_name, delete_all_data + ) # Abort DirectUnpack and let it remove files self.abort_direct_unpacker() @@ -1615,8 +1783,8 @@ class NzbObject(TryList): remove_all(self.downpath, recursive=True) else: # We remove any saved articles and save the renames file - remove_all(self.downpath, 'SABnzbd_nz?_*', keep_folder=True) - remove_all(self.downpath, 'SABnzbd_article_*', keep_folder=True) + remove_all(self.downpath, "SABnzbd_nz?_*", keep_folder=True) + remove_all(self.downpath, "SABnzbd_article_*", keep_folder=True) sabnzbd.save_data(self.renames, RENAMES_FILE, self.workpath, silent=True) def gather_info(self, full=False): @@ -1630,11 +1798,31 @@ class NzbObject(TryList): if not nzf.completed and nzf not in self.files: queued_files.append(nzf) - return PNFO(self.repair, self.unpack, self.delete, self.script, self.nzo_id, - self.final_name, self.labels, self.password, {}, '', self.cat, self.url, self.remaining, - self.bytes, self.avg_stamp, self.avg_date, self.finished_files if full else [], - self.files if full else [], queued_files, self.status, self.priority, - self.bytes_missing, self.direct_unpacker.get_formatted_stats() if self.direct_unpacker else 0) + return PNFO( + self.repair, + self.unpack, + self.delete, + self.script, + self.nzo_id, + self.final_name, + self.labels, + self.password, + {}, + "", + self.cat, + self.url, + self.remaining, + self.bytes, + self.avg_stamp, + self.avg_date, + self.finished_files if full else [], + self.files if full else [], + queued_files, + self.status, + self.priority, + self.bytes_missing, + self.direct_unpacker.get_formatted_stats() if self.direct_unpacker else 0, + ) def get_nzf_by_id(self, nzf_id): if nzf_id in self.files_table: @@ -1659,9 +1847,9 @@ class NzbObject(TryList): def set_action_line(self, action=None, msg=None): if action and msg: - self.action_line = '%s: %s' % (action, msg) + self.action_line = "%s: %s" % (action, msg) else: - self.action_line = '' + self.action_line = "" # Make sure it's updated in the interface sabnzbd.history_updated() @@ -1677,7 +1865,9 @@ class NzbObject(TryList): sabnzbd.save_data(self, self.nzo_id, self.workpath) def save_attribs(self): - set_attrib_file(self.workpath, (self.cat, self.pp, self.script, self.priority, self.final_name, self.password, self.url)) + set_attrib_file( + self.workpath, (self.cat, self.pp, self.script, self.priority, self.final_name, self.password, self.url) + ) @synchronized(NZO_LOCK) def build_pos_nzf_table(self, nzf_ids): @@ -1720,18 +1910,26 @@ class NzbObject(TryList): # dupe check off nzb contents if no_dupes: res = history_db.have_name_or_md5sum(self.final_name, self.md5sum) - logging.debug('Dupe checking NZB in history: filename=%s, md5sum=%s, result=%s', self.filename, self.md5sum, res) + logging.debug( + "Dupe checking NZB in history: filename=%s, md5sum=%s, result=%s", self.filename, self.md5sum, res + ) if not res and cfg.backup_for_duplicates(): res = sabnzbd.backup_exists(self.filename) - logging.debug('Dupe checking NZB against backup: filename=%s, result=%s', self.filename, res) + logging.debug("Dupe checking NZB against backup: filename=%s, result=%s", self.filename, res) # dupe check off nzb filename if not res and no_series_dupes: series, season, episode, misc = sabnzbd.newsunpack.analyse_show(self.final_name) if RE_PROPER.match(misc) and series_propercheck: - logging.debug('Dupe checking series+season+ep in history aborted due to PROPER/REAL/REPACK found') + logging.debug("Dupe checking series+season+ep in history aborted due to PROPER/REAL/REPACK found") else: res = history_db.have_episode(series, season, episode) - logging.debug('Dupe checking series+season+ep in history: series=%s, season=%s, episode=%s, result=%s', series, season, episode, res) + logging.debug( + "Dupe checking series+season+ep in history: series=%s, season=%s, episode=%s, result=%s", + series, + season, + episode, + res, + ) history_db.close() return res, series @@ -1745,7 +1943,7 @@ class NzbObject(TryList): dict_ = {} for item in NzbObjectSaver: dict_[item] = getattr(self, item) - dict_['try_list'] = TryList.__getstate__(self) + dict_["try_list"] = TryList.__getstate__(self) return dict_ def __setstate__(self, dict_): @@ -1756,7 +1954,7 @@ class NzbObject(TryList): except KeyError: # Handle new attributes setattr(self, item, None) - TryList.__setstate__(self, dict_.get('try_list', [])) + TryList.__setstate__(self, dict_.get("try_list", [])) # Set non-transferable values self.pp_active = False @@ -1799,7 +1997,7 @@ def nzf_get_filename(nzf): if not name: name = nzf.subject if not name: - name = '' + name = "" return name.lower() @@ -1817,13 +2015,13 @@ def nzf_cmp_name(nzf1, nzf2, name=True): name2 = nzf_get_filename(nzf2) # Determine vol-pars - is_par1 = '.vol' in name1 and '.par2' in name1 - is_par2 = '.vol' in name2 and '.par2' in name2 + is_par1 = ".vol" in name1 and ".par2" in name1 + is_par2 = ".vol" in name2 and ".par2" in name2 # mini-par2 in front - if not is_par1 and name1.endswith('.par2'): + if not is_par1 and name1.endswith(".par2"): return -1 - if not is_par2 and name2.endswith('.par2'): + if not is_par2 and name2.endswith(".par2"): return 1 # vol-pars go to the back @@ -1841,10 +2039,10 @@ def nzf_cmp_name(nzf1, nzf2, name=True): elif m2 and not (is_par1 or m1): return 1 # Force .rar to come before 'r00' - if m1 and m1.group(1) == '.rar': - name1 = name1.replace('.rar', '.r//') - if m2 and m2.group(1) == '.rar': - name2 = name2.replace('.rar', '.r//') + if m1 and m1.group(1) == ".rar": + name1 = name1.replace(".rar", ".r//") + if m2 and m2.group(1) == ".rar": + name2 = name2.replace(".rar", ".r//") return cmp(name1, name2) else: # Do date comparison @@ -1853,9 +2051,9 @@ def nzf_cmp_name(nzf1, nzf2, name=True): def create_work_name(name): """ Remove ".nzb" and ".par(2)" and sanitize """ - strip_ext = ['.nzb', '.par', '.par2'] + strip_ext = [".nzb", ".par", ".par2"] name = sanitize_foldername(name.strip()) - if name.find('://') < 0: + if name.find("://") < 0: name_base, ext = os.path.splitext(name) # In case it was one of these, there might be more while ext.lower() in strip_ext: @@ -1868,34 +2066,34 @@ def create_work_name(name): def scan_password(name): """ Get password (if any) from the title """ - if 'http://' in name or 'https://' in name: + if "http://" in name or "https://" in name: return name, None - braces = name.find('{{') + braces = name.find("{{") if braces < 0: braces = len(name) - slash = name.find('/') + slash = name.find("/") # Look for name/password, but make sure that '/' comes before any {{ - if 0 <= slash < braces and 'password=' not in name: + if 0 <= slash < braces and "password=" not in name: # Is it maybe in 'name / password' notation? - if slash == name.find(' / ') + 1: + if slash == name.find(" / ") + 1: # Remove the extra space after name and before password - return name[:slash - 1].strip('. '), name[slash + 2:] - return name[:slash].strip('. '), name[slash + 1:] + return name[: slash - 1].strip(". "), name[slash + 2 :] + return name[:slash].strip(". "), name[slash + 1 :] # Look for "name password=password" - pw = name.find('password=') + pw = name.find("password=") if pw >= 0: - return name[:pw].strip('. '), name[pw + 9:] + return name[:pw].strip(". "), name[pw + 9 :] # Look for name{{password}} - if braces < len(name) and name.endswith('}}'): - return name[:braces].strip('. '), name[braces + 2:len(name) - 2] + if braces < len(name) and name.endswith("}}"): + return name[:braces].strip(". "), name[braces + 2 : len(name) - 2] # Look again for name/password if slash >= 0: - return name[:slash].strip('. '), name[slash + 1:] + return name[:slash].strip(". "), name[slash + 1 :] # No password found return name, None @@ -1906,14 +2104,14 @@ def get_attrib_file(path, size): attribs = [] path = os.path.join(path, ATTRIB_FILE) try: - f = open(path, 'r', encoding="utf-8") + f = open(path, "r", encoding="utf-8") except: return [None for unused in range(size)] for unused in range(size): - line = f.readline().strip('\r\n ') + line = f.readline().strip("\r\n ") if line: - if line.lower() == 'none': + if line.lower() == "none": line = None try: line = int(line) @@ -1930,12 +2128,12 @@ def set_attrib_file(path, attribs): """ Write job's attributes to file """ path = os.path.join(path, ATTRIB_FILE) try: - f = open(path, 'w', encoding="utf-8") + f = open(path, "w", encoding="utf-8") except: return for item in attribs: - f.write('%s\n' % item) + f.write("%s\n" % item) f.close() @@ -1952,7 +2150,7 @@ def name_extractor(subject): def matcher(pattern, txt): """ Return True if `pattern` is sufficiently equal to `txt` """ if txt.endswith(pattern): - txt = txt[:txt.rfind(pattern)].strip() + txt = txt[: txt.rfind(pattern)].strip() return (not txt) or txt.endswith('"') else: return False diff --git a/sabnzbd/osxmenu.py b/sabnzbd/osxmenu.py index a188425..76da048 100644 --- a/sabnzbd/osxmenu.py +++ b/sabnzbd/osxmenu.py @@ -49,9 +49,9 @@ import sabnzbd.dirscanner as dirscanner from sabnzbd.bpsmeter import BPSMeter status_icons = { - 'idle': 'icons/sabnzbd_osx_idle.tiff', - 'pause': 'icons/sabnzbd_osx_pause.tiff', - 'clicked': 'icons/sabnzbd_osx_clicked.tiff' + "idle": "icons/sabnzbd_osx_idle.tiff", + "pause": "icons/sabnzbd_osx_pause.tiff", + "clicked": "icons/sabnzbd_osx_clicked.tiff", } start_time = NSDate.date() debug = 0 @@ -71,7 +71,9 @@ class SABnzbdDelegate(NSObject): NSLog("[osx] awake") self.buildMenu() # Timer for updating menu - self.timer = NSTimer.alloc().initWithFireDate_interval_target_selector_userInfo_repeats_(start_time, 3.0, self, 'updateAction:', None, True) + self.timer = NSTimer.alloc().initWithFireDate_interval_target_selector_userInfo_repeats_( + start_time, 3.0, self, "updateAction:", None, True + ) NSRunLoop.currentRunLoop().addTimer_forMode_(self.timer, NSDefaultRunLoopMode) NSRunLoop.currentRunLoop().addTimer_forMode_(self.timer, NSEventTrackingRunLoopMode) # NSRunLoop.currentRunLoop().addTimer_forMode_(self.timer, NSModalPanelRunLoopMode) @@ -86,15 +88,15 @@ class SABnzbdDelegate(NSObject): icon_path = status_icons[icon] if hasattr(sys, "frozen"): # Path is modified for the binary - icon_path = os.path.join(os.path.dirname(sys.executable), '..', 'Resources', status_icons[icon]) + icon_path = os.path.join(os.path.dirname(sys.executable), "..", "Resources", status_icons[icon]) self.icons[icon] = NSImage.alloc().initByReferencingFile_(icon_path) if sabnzbd.DARWIN_VERSION > 9: # Support for Yosemite Dark Mode self.icons[icon].setTemplate_(YES) - self.status_item.setImage_(self.icons['idle']) - self.status_item.setAlternateImage_(self.icons['clicked']) + self.status_item.setImage_(self.icons["idle"]) + self.status_item.setAlternateImage_(self.icons["clicked"]) self.status_item.setHighlightMode_(1) - self.status_item.setToolTip_('SABnzbd') + self.status_item.setToolTip_("SABnzbd") self.status_item.setEnabled_(YES) if debug == 1: @@ -125,7 +127,7 @@ class SABnzbdDelegate(NSObject): self.menu = NSMenu.alloc().init() try: - menu_item = NSMenuItem.alloc().initWithTitle_action_keyEquivalent_("Dummy", '', '') + menu_item = NSMenuItem.alloc().initWithTitle_action_keyEquivalent_("Dummy", "", "") menu_item.setHidden_(YES) self.isLeopard = 1 except: @@ -135,7 +137,9 @@ class SABnzbdDelegate(NSObject): NSLog("[osx] menu 3 construction") # Warnings Item - self.warnings_menu_item = NSMenuItem.alloc().initWithTitle_action_keyEquivalent_(T('Warnings'), 'openBrowserAction:', '') + self.warnings_menu_item = NSMenuItem.alloc().initWithTitle_action_keyEquivalent_( + T("Warnings"), "openBrowserAction:", "" + ) if self.isLeopard: self.warnings_menu_item.setHidden_(YES) else: @@ -147,7 +151,9 @@ class SABnzbdDelegate(NSObject): NSLog("[osx] menu 4 warning added") # State Item - self.state_menu_item = NSMenuItem.alloc().initWithTitle_action_keyEquivalent_(T('Idle'), 'openBrowserAction:', '') + self.state_menu_item = NSMenuItem.alloc().initWithTitle_action_keyEquivalent_( + T("Idle"), "openBrowserAction:", "" + ) self.state_menu_item.setRepresentedObject_("") self.menu.addItem_(self.state_menu_item) @@ -155,7 +161,7 @@ class SABnzbdDelegate(NSObject): NSLog("[osx] menu 5 state added") # Config Item - menu_item = NSMenuItem.alloc().initWithTitle_action_keyEquivalent_(T('Configuration'), 'openBrowserAction:', '') + menu_item = NSMenuItem.alloc().initWithTitle_action_keyEquivalent_(T("Configuration"), "openBrowserAction:", "") menu_item.setRepresentedObject_("config/general/") menu_item.setAlternate_(YES) menu_item.setKeyEquivalentModifierMask_(NSAlternateKeyMask) @@ -165,7 +171,9 @@ class SABnzbdDelegate(NSObject): NSLog("[osx] menu 6 config added") # Queue Item - self.queue_menu_item = NSMenuItem.alloc().initWithTitle_action_keyEquivalent_(T('Queue'), 'openBrowserAction:', '') + self.queue_menu_item = NSMenuItem.alloc().initWithTitle_action_keyEquivalent_( + T("Queue"), "openBrowserAction:", "" + ) self.queue_menu_item.setRepresentedObject_("") self.menu.addItem_(self.queue_menu_item) @@ -173,7 +181,9 @@ class SABnzbdDelegate(NSObject): NSLog("[osx] menu 7 queue added") # Purge Queue Item - self.purgequeue_menu_item = NSMenuItem.alloc().initWithTitle_action_keyEquivalent_(T('Purge Queue'), 'purgeAction:', '') + self.purgequeue_menu_item = NSMenuItem.alloc().initWithTitle_action_keyEquivalent_( + T("Purge Queue"), "purgeAction:", "" + ) self.purgequeue_menu_item.setRepresentedObject_("queue") self.purgequeue_menu_item.setAlternate_(YES) self.purgequeue_menu_item.setKeyEquivalentModifierMask_(NSAlternateKeyMask) @@ -183,7 +193,9 @@ class SABnzbdDelegate(NSObject): NSLog("[osx] menu 8 purge queue added") # History Item - self.history_menu_item = NSMenuItem.alloc().initWithTitle_action_keyEquivalent_(T('History'), 'openBrowserAction:', '') + self.history_menu_item = NSMenuItem.alloc().initWithTitle_action_keyEquivalent_( + T("History"), "openBrowserAction:", "" + ) self.history_menu_item.setRepresentedObject_("") self.menu.addItem_(self.history_menu_item) @@ -191,7 +203,9 @@ class SABnzbdDelegate(NSObject): NSLog("[osx] menu 9 history added") # Purge History Item - self.purgehistory_menu_item = NSMenuItem.alloc().initWithTitle_action_keyEquivalent_(T('Purge History'), 'purgeAction:', '') + self.purgehistory_menu_item = NSMenuItem.alloc().initWithTitle_action_keyEquivalent_( + T("Purge History"), "purgeAction:", "" + ) self.purgehistory_menu_item.setRepresentedObject_("history") self.purgehistory_menu_item.setAlternate_(YES) self.purgehistory_menu_item.setKeyEquivalentModifierMask_(NSAlternateKeyMask) @@ -204,16 +218,27 @@ class SABnzbdDelegate(NSObject): self.menu.addItem_(self.separator_menu_item) # Limit Speed Item & Submenu - self.speed_menu_item = NSMenuItem.alloc().initWithTitle_action_keyEquivalent_(T('Limit Speed'), '', '') + self.speed_menu_item = NSMenuItem.alloc().initWithTitle_action_keyEquivalent_(T("Limit Speed"), "", "") self.menu_speed = NSMenu.alloc().init() - speeds = {10: '10%', 20: '20%', 30: '30%', 40: '40%', 50: '50%', - 60: '60%', 70: '70%', 80: '80%', 90: '90%', 100: '100%' - } + speeds = { + 10: "10%", + 20: "20%", + 30: "30%", + 40: "40%", + 50: "50%", + 60: "60%", + 70: "70%", + 80: "80%", + 90: "90%", + 100: "100%", + } for speed in sorted(speeds.keys()): - menu_speed_item = NSMenuItem.alloc().initWithTitle_action_keyEquivalent_('%s' % (speeds[speed]), 'speedlimitAction:', '') + menu_speed_item = NSMenuItem.alloc().initWithTitle_action_keyEquivalent_( + "%s" % (speeds[speed]), "speedlimitAction:", "" + ) menu_speed_item.setRepresentedObject_("%s" % speed) self.menu_speed.addItem_(menu_speed_item) @@ -224,13 +249,15 @@ class SABnzbdDelegate(NSObject): NSLog("[osx] menu 11 limit speed added") # Pause Item & Submenu - self.pause_menu_item = NSMenuItem.alloc().initWithTitle_action_keyEquivalent_(T('Pause'), 'pauseAction:', '') - self.pause_menu_item.setRepresentedObject_('0') + self.pause_menu_item = NSMenuItem.alloc().initWithTitle_action_keyEquivalent_(T("Pause"), "pauseAction:", "") + self.pause_menu_item.setRepresentedObject_("0") self.menu_pause = NSMenu.alloc().init() for i in range(6): - menu_pause_item = NSMenuItem.alloc().initWithTitle_action_keyEquivalent_("%s %s" % ((i + 1) * 10, T('min.')), 'pauseAction:', '') + menu_pause_item = NSMenuItem.alloc().initWithTitle_action_keyEquivalent_( + "%s %s" % ((i + 1) * 10, T("min.")), "pauseAction:", "" + ) menu_pause_item.setRepresentedObject_("%s" % ((i + 1) * 10)) self.menu_pause.addItem_(menu_pause_item) @@ -241,7 +268,7 @@ class SABnzbdDelegate(NSObject): NSLog("[osx] menu 12 pause added") # Resume Item - self.resume_menu_item = NSMenuItem.alloc().initWithTitle_action_keyEquivalent_(T('Resume'), 'resumeAction:', '') + self.resume_menu_item = NSMenuItem.alloc().initWithTitle_action_keyEquivalent_(T("Resume"), "resumeAction:", "") if self.isLeopard: self.resume_menu_item.setHidden_(YES) else: @@ -252,7 +279,9 @@ class SABnzbdDelegate(NSObject): NSLog("[osx] menu 13 resume added") # Watched folder Item - self.watched_menu_item = NSMenuItem.alloc().initWithTitle_action_keyEquivalent_(T('Scan watched folder'), 'watchedFolderAction:', '') + self.watched_menu_item = NSMenuItem.alloc().initWithTitle_action_keyEquivalent_( + T("Scan watched folder"), "watchedFolderAction:", "" + ) if self.isLeopard: self.watched_menu_item.setHidden_(YES) else: @@ -260,7 +289,9 @@ class SABnzbdDelegate(NSObject): self.menu.addItem_(self.watched_menu_item) # All RSS feeds - self.rss_menu_item = NSMenuItem.alloc().initWithTitle_action_keyEquivalent_(T('Read all RSS feeds'), 'rssAction:', '') + self.rss_menu_item = NSMenuItem.alloc().initWithTitle_action_keyEquivalent_( + T("Read all RSS feeds"), "rssAction:", "" + ) if self.isLeopard: self.rss_menu_item.setHidden_(YES) else: @@ -274,12 +305,16 @@ class SABnzbdDelegate(NSObject): NSLog("[osx] menu 14 watched folder added") # Complete Folder Item - self.completefolder_menu_item = NSMenuItem.alloc().initWithTitle_action_keyEquivalent_(T('Complete Folder') + '\t\t\t', 'openFolderAction:', '') + self.completefolder_menu_item = NSMenuItem.alloc().initWithTitle_action_keyEquivalent_( + T("Complete Folder") + "\t\t\t", "openFolderAction:", "" + ) self.completefolder_menu_item.setRepresentedObject_(sabnzbd.cfg.complete_dir.get_path()) self.menu.addItem_(self.completefolder_menu_item) # Incomplete Folder Item - self.incompletefolder_menu_item = NSMenuItem.alloc().initWithTitle_action_keyEquivalent_(T('Incomplete Folder') + '\t\t', 'openFolderAction:', '') + self.incompletefolder_menu_item = NSMenuItem.alloc().initWithTitle_action_keyEquivalent_( + T("Incomplete Folder") + "\t\t", "openFolderAction:", "" + ) self.incompletefolder_menu_item.setRepresentedObject_(sabnzbd.cfg.download_dir.get_path()) self.menu.addItem_(self.incompletefolder_menu_item) @@ -289,14 +324,15 @@ class SABnzbdDelegate(NSObject): self.menu.addItem_(NSMenuItem.separatorItem()) # Set diagnostic menu - self.diagnostic_menu_item = NSMenuItem.alloc().initWithTitle_action_keyEquivalent_(T('Troubleshoot'), '', '') + self.diagnostic_menu_item = NSMenuItem.alloc().initWithTitle_action_keyEquivalent_(T("Troubleshoot"), "", "") self.menu_diagnostic = NSMenu.alloc().init() - diag_items = ((T('Restart'), 'restartAction:'), - (T('Restart') + ' - 127.0.0.1:8080', 'restartSafeHost:'), - (T('Restart without login'), 'restartNoLogin:') - ) + diag_items = ( + (T("Restart"), "restartAction:"), + (T("Restart") + " - 127.0.0.1:8080", "restartSafeHost:"), + (T("Restart without login"), "restartNoLogin:"), + ) for item in diag_items: - menu_diag_item = NSMenuItem.alloc().initWithTitle_action_keyEquivalent_(item[0], item[1], '') + menu_diag_item = NSMenuItem.alloc().initWithTitle_action_keyEquivalent_(item[0], item[1], "") menu_diag_item.setRepresentedObject_(item[0]) self.menu_diagnostic.addItem_(menu_diag_item) @@ -307,7 +343,7 @@ class SABnzbdDelegate(NSObject): NSLog("[osx] menu 16 Diagnostic added") # Quit Item - menu_item = NSMenuItem.alloc().initWithTitle_action_keyEquivalent_(T('Quit'), 'terminate:', '') + menu_item = NSMenuItem.alloc().initWithTitle_action_keyEquivalent_(T("Quit"), "terminate:", "") self.menu.addItem_(menu_item) if debug == 1: @@ -361,7 +397,9 @@ class SABnzbdDelegate(NSObject): if len(pnfo_list): - menu_queue_item = NSMenuItem.alloc().initWithTitle_action_keyEquivalent_(T('Queue First 10 Items'), '', '') + menu_queue_item = NSMenuItem.alloc().initWithTitle_action_keyEquivalent_( + T("Queue First 10 Items"), "", "" + ) self.menu_queue.addItem_(menu_queue_item) self.menu_queue.addItem_(NSMenuItem.separatorItem()) @@ -373,13 +411,17 @@ class SABnzbdDelegate(NSObject): timeleft = self.calc_timeleft_(bytesleftprogess, BPSMeter.do.bps) job = "%s\t(%d/%d MB) %s" % (pnfo.filename, bytesleft, bytes_total, timeleft) - menu_queue_item = NSMenuItem.alloc().initWithTitle_action_keyEquivalent_(job, '', '') + menu_queue_item = NSMenuItem.alloc().initWithTitle_action_keyEquivalent_(job, "", "") self.menu_queue.addItem_(menu_queue_item) - self.info = "%d nzb(s)\t( %d / %d MB )" % (qnfo.q_size_list, (qnfo.bytes_left / MEBI), (qnfo.bytes / MEBI)) + self.info = "%d nzb(s)\t( %d / %d MB )" % ( + qnfo.q_size_list, + (qnfo.bytes_left / MEBI), + (qnfo.bytes / MEBI), + ) else: - menu_queue_item = NSMenuItem.alloc().initWithTitle_action_keyEquivalent_(T('Empty'), '', '') + menu_queue_item = NSMenuItem.alloc().initWithTitle_action_keyEquivalent_(T("Empty"), "", "") self.menu_queue.addItem_(menu_queue_item) self.queue_menu_item.setSubmenu_(self.menu_queue) @@ -395,33 +437,40 @@ class SABnzbdDelegate(NSObject): items, fetched_items, _total_items = self.history_db.fetch_history(0, 10, None) self.menu_history = NSMenu.alloc().init() - self.failedAttributes = {NSForegroundColorAttributeName: NSColor.redColor(), NSFontAttributeName: NSFont.menuFontOfSize_(14.0)} + self.failedAttributes = { + NSForegroundColorAttributeName: NSColor.redColor(), + NSFontAttributeName: NSFont.menuFontOfSize_(14.0), + } - menu_history_item = NSMenuItem.alloc().initWithTitle_action_keyEquivalent_(T('History Last 10 Items'), '', '') + menu_history_item = NSMenuItem.alloc().initWithTitle_action_keyEquivalent_( + T("History Last 10 Items"), "", "" + ) self.menu_history.addItem_(menu_history_item) self.menu_history.addItem_(NSMenuItem.separatorItem()) if fetched_items: for history in items: # logging.info("[osx] history : %s" % (history)) - job = "%s" % (history['name']) + job = "%s" % (history["name"]) path = "" - if os.path.isdir(history['storage']) or os.path.isfile(history['storage']): - if os.path.isfile(history['storage']): - path = os.path.dirname(history['storage']) + if os.path.isdir(history["storage"]) or os.path.isfile(history["storage"]): + if os.path.isfile(history["storage"]): + path = os.path.dirname(history["storage"]) else: - path = history['storage'] + path = history["storage"] if path: - menu_history_item = NSMenuItem.alloc().initWithTitle_action_keyEquivalent_(job, 'openFolderAction:', '') + menu_history_item = NSMenuItem.alloc().initWithTitle_action_keyEquivalent_( + job, "openFolderAction:", "" + ) else: - menu_history_item = NSMenuItem.alloc().initWithTitle_action_keyEquivalent_(job, '', '') - if history['status'] != Status.COMPLETED: + menu_history_item = NSMenuItem.alloc().initWithTitle_action_keyEquivalent_(job, "", "") + if history["status"] != Status.COMPLETED: jobfailed = NSAttributedString.alloc().initWithString_attributes_(job, self.failedAttributes) menu_history_item.setAttributedTitle_(jobfailed) menu_history_item.setRepresentedObject_("%s" % path) self.menu_history.addItem_(menu_history_item) else: - menu_history_item = NSMenuItem.alloc().initWithTitle_action_keyEquivalent_(T('Empty'), '', '') + menu_history_item = NSMenuItem.alloc().initWithTitle_action_keyEquivalent_(T("Empty"), "", "") self.menu_history.addItem_(menu_history_item) self.history_menu_item.setSubmenu_(self.menu_history) @@ -434,10 +483,12 @@ class SABnzbdDelegate(NSObject): if warnings: warningsAttributes = { NSForegroundColorAttributeName: NSColor.redColor(), - NSFontAttributeName: NSFont.menuFontOfSize_(14.0) + NSFontAttributeName: NSFont.menuFontOfSize_(14.0), } - warningsTitle = NSAttributedString.alloc().initWithString_attributes_("%s : %s" % (T('Warnings'), warnings), warningsAttributes) + warningsTitle = NSAttributedString.alloc().initWithString_attributes_( + "%s : %s" % (T("Warnings"), warnings), warningsAttributes + ) self.warnings_menu_item.setAttributedTitle_(warningsTitle) if self.isLeopard: @@ -445,7 +496,7 @@ class SABnzbdDelegate(NSObject): else: self.warnings_menu_item.setEnabled_(YES) else: - self.warnings_menu_item.setTitle_("%s : 0" % (T('Warnings'))) + self.warnings_menu_item.setTitle_("%s : 0" % (T("Warnings"))) if self.isLeopard: self.warnings_menu_item.setHidden_(YES) else: @@ -458,7 +509,7 @@ class SABnzbdDelegate(NSObject): paused, bytes_left, bpsnow, time_left = fast_queue() if paused: - self.state = T('Paused') + self.state = T("Paused") if sabnzbd.scheduler.pause_int() != "0": self.setMenuTitle_("\n\n%s\n" % (sabnzbd.scheduler.pause_int())) else: @@ -467,8 +518,8 @@ class SABnzbdDelegate(NSObject): self.state = "" speed = to_units(bpsnow) # "10.1 MB/s" doesn't fit, remove space char - if 'M' in speed and len(speed) > 5: - speed = speed.replace(' ', '') + if "M" in speed and len(speed) > 5: + speed = speed.replace(" ", "") time_left = (bpsnow > 10 and time_left) or "------" statusbarText = "\n\n%s\n%sB/s\n" % (time_left, speed) @@ -481,7 +532,7 @@ class SABnzbdDelegate(NSObject): self.setMenuTitle_(statusbarText) else: - self.state = T('Idle') + self.state = T("Idle") self.setMenuTitle_("") if self.state != "" and self.info != "": @@ -497,9 +548,9 @@ class SABnzbdDelegate(NSObject): def iconUpdate(self): try: if sabnzbd.downloader.Downloader.do.paused: - self.status_item.setImage_(self.icons['pause']) + self.status_item.setImage_(self.icons["pause"]) else: - self.status_item.setImage_(self.icons['idle']) + self.status_item.setImage_(self.icons["idle"]) except: logging.info("[osx] iconUpdate Exception %s" % (sys.exc_info()[0])) @@ -542,7 +593,7 @@ class SABnzbdDelegate(NSObject): if sabnzbd.NEW_VERSION and self.version_notify: # logging.info("[osx] New Version : %s" % (sabnzbd.NEW_VERSION)) new_release, _new_rel_url = sabnzbd.NEW_VERSION - notifier.send_notification("SABnzbd", "%s : %s" % (T('New release available'), new_release), 'other') + notifier.send_notification("SABnzbd", "%s : %s" % (T("New release available"), new_release), "other") self.version_notify = 0 except: logging.info("[osx] versionUpdate Exception %s" % (sys.exc_info()[0])) @@ -574,7 +625,7 @@ class SABnzbdDelegate(NSObject): def serverUpdate(self): try: if not config.get_servers(): - self.state_menu_item.setTitle_(T('Go to wizard')) + self.state_menu_item.setTitle_(T("Go to wizard")) hide = YES alternate = NO value = 0 @@ -622,8 +673,12 @@ class SABnzbdDelegate(NSObject): def diskspaceUpdate(self): try: - self.completefolder_menu_item.setTitle_("%s%.2f GB" % (T('Complete Folder') + '\t\t\t', diskspace()['complete_dir'][1])) - self.incompletefolder_menu_item.setTitle_("%s%.2f GB" % (T('Incomplete Folder') + '\t\t', diskspace()['download_dir'][1])) + self.completefolder_menu_item.setTitle_( + "%s%.2f GB" % (T("Complete Folder") + "\t\t\t", diskspace()["complete_dir"][1]) + ) + self.incompletefolder_menu_item.setTitle_( + "%s%.2f GB" % (T("Incomplete Folder") + "\t\t", diskspace()["download_dir"][1]) + ) except: logging.info("[osx] diskspaceUpdate Exception %s" % (sys.exc_info()[0])) @@ -648,7 +703,7 @@ class SABnzbdDelegate(NSObject): NSBaselineOffsetAttributeName: 5.0, NSFontAttributeName: NSFont.menuFontOfSize_(9.0), NSParagraphStyleAttributeName: style - #,NSForegroundColorAttributeName: titleColor + # ,NSForegroundColorAttributeName: titleColor } title = NSAttributedString.alloc().initWithString_attributes_(text, titleAttributes) @@ -663,12 +718,12 @@ class SABnzbdDelegate(NSObject): minutes, seconds = divmod(totalseconds, 60) hours, minutes = divmod(minutes, 60) if minutes < 10: - minutes = '0%s' % minutes + minutes = "0%s" % minutes if seconds < 10: - seconds = '0%s' % seconds - return '%s:%s:%s' % (hours, minutes, seconds) + seconds = "0%s" % seconds + return "%s:%s:%s" % (hours, minutes, seconds) except: - return '0:00:00' + return "0:00:00" def openBrowserAction_(self, sender): if sender.representedObject: @@ -681,7 +736,7 @@ class SABnzbdDelegate(NSObject): # logging.info("[osx] speed limit to %s" % (sender.representedObject())) speed = int(sender.representedObject()) if speed != self.speed: - sabnzbd.downloader.Downloader.do.limit_speed('%s%%' % speed) + sabnzbd.downloader.Downloader.do.limit_speed("%s%%" % speed) self.speedlimitUpdate() def purgeAction_(self, sender): @@ -717,38 +772,38 @@ class SABnzbdDelegate(NSObject): NSLog("[osx] %@", folder2open) os.system('open "%s"' % folder2open) -# def aboutAction_(self, sender): -# app = NSApplication.sharedApplication() -# app.orderFrontStandardAboutPanel_(nil) + # def aboutAction_(self, sender): + # app = NSApplication.sharedApplication() + # app.orderFrontStandardAboutPanel_(nil) def restartAction_(self, sender): - self.setMenuTitle_("\n\n%s\n" % (T('Stopping...'))) - logging.info('Restart requested by tray') + self.setMenuTitle_("\n\n%s\n" % (T("Stopping..."))) + logging.info("Restart requested by tray") sabnzbd.trigger_restart() - self.setMenuTitle_("\n\n%s\n" % (T('Stopping...'))) + self.setMenuTitle_("\n\n%s\n" % (T("Stopping..."))) def restartSafeHost_(self, sender): - sabnzbd.cfg.cherryhost.set('127.0.0.1') - sabnzbd.cfg.cherryport.set('8080') + sabnzbd.cfg.cherryhost.set("127.0.0.1") + sabnzbd.cfg.cherryport.set("8080") sabnzbd.cfg.enable_https.set(False) sabnzbd.config.save_config() - self.setMenuTitle_("\n\n%s\n" % (T('Stopping...'))) + self.setMenuTitle_("\n\n%s\n" % (T("Stopping..."))) sabnzbd.trigger_restart() - self.setMenuTitle_("\n\n%s\n" % (T('Stopping...'))) + self.setMenuTitle_("\n\n%s\n" % (T("Stopping..."))) def restartNoLogin_(self, sender): - sabnzbd.cfg.username.set('') - sabnzbd.cfg.password.set('') + sabnzbd.cfg.username.set("") + sabnzbd.cfg.password.set("") sabnzbd.config.save_config() - self.setMenuTitle_("\n\n%s\n" % (T('Stopping...'))) + self.setMenuTitle_("\n\n%s\n" % (T("Stopping..."))) sabnzbd.trigger_restart() - self.setMenuTitle_("\n\n%s\n" % (T('Stopping...'))) + self.setMenuTitle_("\n\n%s\n" % (T("Stopping..."))) def application_openFiles_(self, nsapp, filenames): # logging.info('[osx] file open') # logging.info('[osx] file : %s' % (filenames)) for name in filenames: - logging.info('[osx] receiving from OSX : %s', name) + logging.info("[osx] receiving from OSX : %s", name) if os.path.exists(name): fn = get_filename(name) # logging.info('[osx] filename : %s' % (fn)) @@ -762,8 +817,8 @@ class SABnzbdDelegate(NSObject): # logging.info('opening done') def applicationShouldTerminate_(self, sender): - logging.info('[osx] application terminating') - self.setMenuTitle_("\n\n%s\n" % (T('Stopping...'))) + logging.info("[osx] application terminating") + self.setMenuTitle_("\n\n%s\n" % (T("Stopping..."))) self.status_item.setHighlightMode_(NO) self.osx_icon = False sabnzbd.shutdown_program() diff --git a/sabnzbd/panic.py b/sabnzbd/panic.py index 2b01adc..51eea09 100644 --- a/sabnzbd/panic.py +++ b/sabnzbd/panic.py @@ -42,10 +42,13 @@ PANIC_HOST = 8 def MSG_BAD_NEWS(): - return r''' + return ( + r""" - ''' + T('Problem with') + ''' %s %s + """ + + T("Problem with") + + """ %s %s

%s %s

@@ -57,83 +60,100 @@ def MSG_BAD_NEWS():
%s
-''' +""" + ) def MSG_BAD_PORT(): - return T(r''' + return ( + T( + r""" SABnzbd needs a free tcp/ip port for its internal web server.
Port %s on %s was tried , but it is not available.
Some other software uses the port or SABnzbd is already running.

- Please restart SABnzbd with a different port number.''') + \ - '''
+ Please restart SABnzbd with a different port number.""" + ) + + """

%s
    %s --server %s:%s
-
''' + \ - T(r'If you get this error message again, please try a different number.
') +
""" + + T(r"If you get this error message again, please try a different number.
") + ) def MSG_BAD_HOST(): - return T(r''' + return ( + T( + r""" SABnzbd needs a valid host address for its internal web server.
You have specified an invalid address.
Safe values are localhost and 0.0.0.0

- Please restart SABnzbd with a proper host address.''') + \ - '''
+ Please restart SABnzbd with a proper host address.""" + ) + + """

%s
    %s --server %s:%s

-''' +""" + ) def MSG_BAD_QUEUE(): - return T(r''' + return ( + T( + r""" SABnzbd detected saved data from an other SABnzbd version
but cannot re-use the data of the other program.

You may want to finish your queue first with the other program.

After that, start this program with the "--clean" option.
This will erase the current queue and history!
- SABnzbd read the file "%s".''') + \ - '''
+ SABnzbd read the file "%s".""" + ) + + """

%s
    %s --clean

-''' +""" + ) def MSG_BAD_TEMPL(): - return T(r''' + return T( + r""" SABnzbd cannot find its web interface files in %s.
Please install the program again.

-''') +""" + ) def MSG_OTHER(): - return T('SABnzbd detected a fatal error:') + '
%s

%s
' + return T("SABnzbd detected a fatal error:") + "
%s

%s
" def MSG_SQLITE(): - return T(r''' + return T( + r""" SABnzbd detected that the file sqlite3.dll is missing.

Some poorly designed virus-scanners remove this file.
Please check your virus-scanner, try to re-install SABnzbd and complain to your virus-scanner vendor.

-''') +""" + ) def panic_message(panic_code, a=None, b=None): """ Create the panic message from templates """ if sabnzbd.WIN32: - os_str = T('Press Startkey+R and type the line (example):') + os_str = T("Press Startkey+R and type the line (example):") prog_path = '"%s"' % sabnzbd.MY_FULLNAME else: - os_str = T('Open a Terminal window and type the line (example):') + os_str = T("Open a Terminal window and type the line (example):") prog_path = sabnzbd.MY_FULLNAME if panic_code == PANIC_PORT: @@ -147,27 +167,40 @@ def panic_message(panic_code, a=None, b=None): elif panic_code == PANIC_SQLITE: msg = MSG_SQLITE() elif panic_code == PANIC_HOST: - msg = MSG_BAD_HOST() % (os_str, prog_path, 'localhost', b) + msg = MSG_BAD_HOST() % (os_str, prog_path, "localhost", b) else: msg = MSG_OTHER() % (a, b) - msg = MSG_BAD_NEWS() % (sabnzbd.MY_NAME, sabnzbd.__version__, sabnzbd.MY_NAME, sabnzbd.__version__, - msg, T('Program did not start!')) + msg = MSG_BAD_NEWS() % ( + sabnzbd.MY_NAME, + sabnzbd.__version__, + sabnzbd.MY_NAME, + sabnzbd.__version__, + msg, + T("Program did not start!"), + ) if sabnzbd.WIN_SERVICE: - sabnzbd.WIN_SERVICE.ErrLogger('Panic exit', msg) + sabnzbd.WIN_SERVICE.ErrLogger("Panic exit", msg) if (not cfg.autobrowser()) or sabnzbd.DAEMON: return - msgfile, url = tempfile.mkstemp(suffix='.html') + msgfile, url = tempfile.mkstemp(suffix=".html") os.write(msgfile, utob(msg)) os.close(msgfile) return url def panic_port(host, port): - show_error_dialog("\n%s:\n %s" % (T('Fatal error'), T('Unable to bind to port %s on %s. Some other software uses the port or SABnzbd is already running.') % (port, host))) + show_error_dialog( + "\n%s:\n %s" + % ( + T("Fatal error"), + T("Unable to bind to port %s on %s. Some other software uses the port or SABnzbd is already running.") + % (port, host), + ) + ) launch_a_browser(panic_message(PANIC_PORT, host, port)) @@ -184,7 +217,7 @@ def panic_tmpl(name): def panic(reason, remedy=""): - show_error_dialog("\n%s:\n %s\n%s" % (T('Fatal error'), reason, remedy)) + show_error_dialog("\n%s:\n %s\n%s" % (T("Fatal error"), reason, remedy)) launch_a_browser(panic_message(PANIC_OTHER, reason, remedy)) @@ -193,26 +226,26 @@ def launch_a_browser(url, force=False): if not force and not cfg.autobrowser() or sabnzbd.DAEMON: return - if '::1' in url and '[::1]' not in url: + if "::1" in url and "[::1]" not in url: # Get around idiosyncrasy in Python runtime - url = url.replace('::1', '[::1]') + url = url.replace("::1", "[::1]") if cfg.enable_https() and not cfg.https_port.get_int(): # Must use https, because http is not available - url = url.replace('http:', 'https:') + url = url.replace("http:", "https:") - if 'localhost' in url and not cfg.ipv6_hosting(): - url = url.replace('localhost', '127.0.0.1') + if "localhost" in url and not cfg.ipv6_hosting(): + url = url.replace("localhost", "127.0.0.1") logging.info("Launching browser with %s", url) try: - if url and not url.startswith('http'): - url = 'file:///%s' % url + if url and not url.startswith("http"): + url = "file:///%s" % url if webbrowser: webbrowser.open(url, 2, 1) else: - logging.info('Not showing panic message in webbrowser, no support found') + logging.info("Not showing panic message in webbrowser, no support found") except: - logging.warning(T('Cannot launch the browser, probably not found')) + logging.warning(T("Cannot launch the browser, probably not found")) logging.info("Traceback: ", exc_info=True) @@ -221,15 +254,15 @@ def show_error_dialog(msg): Windows-only, otherwise only print to console """ if sabnzbd.WIN32: - ctypes.windll.user32.MessageBoxW(0, msg, T('Fatal error'), 0) + ctypes.windll.user32.MessageBoxW(0, msg, T("Fatal error"), 0) print(msg) def error_page_401(status, message, traceback, version): """ Custom handler for 401 error """ - title = T('Access denied') - body = T('Error %s: You need to provide a valid username and password.') % status - return r''' + title = T("Access denied") + body = T("Error %s: You need to provide a valid username and password.") % status + return r""" %s @@ -239,12 +272,16 @@ def error_page_401(status, message, traceback, version): %s -''' % (title, body) +""" % ( + title, + body, + ) def error_page_404(status, message, traceback, version): """ Custom handler for 404 error, redirect to main page """ - return r''' + return ( + r"""