diff --git a/SABnzbd.py b/SABnzbd.py
index c572be3..0b74af4 100755
--- a/SABnzbd.py
+++ b/SABnzbd.py
@@ -113,8 +113,8 @@ logging.warning_helpful = warning_helpful
class GUIHandler(logging.Handler):
- """ Logging handler collects the last warnings/errors/exceptions
- to be displayed in the web-gui
+ """Logging handler collects the last warnings/errors/exceptions
+ to be displayed in the web-gui
"""
def __init__(self, size):
@@ -507,8 +507,8 @@ def check_resolve(host):
def get_webhost(cherryhost, cherryport, https_port):
- """ Determine the webhost address and port,
- return (host, port, browserhost)
+ """Determine the webhost address and port,
+ return (host, port, browserhost)
"""
if cherryhost == "0.0.0.0" and not check_resolve("127.0.0.1"):
cherryhost = ""
@@ -679,8 +679,8 @@ def find_free_port(host, currentport):
def check_for_sabnzbd(url, upload_nzbs, allow_browser=True):
- """ Check for a running instance of sabnzbd on this port
- allow_browser==True|None will launch the browser, False will not.
+ """Check for a running instance of sabnzbd on this port
+ allow_browser==True|None will launch the browser, False will not.
"""
if allow_browser is None:
allow_browser = True
@@ -702,10 +702,10 @@ def check_for_sabnzbd(url, upload_nzbs, allow_browser=True):
def evaluate_inipath(path):
- """ Derive INI file path from a partial path.
- Full file path: if file does not exist the name must contain a dot
- but not a leading dot.
- foldername is enough, the standard name will be appended.
+ """Derive INI file path from a partial path.
+ Full file path: if file does not exist the name must contain a dot
+ but not a leading dot.
+ foldername is enough, the standard name will be appended.
"""
path = os.path.normpath(os.path.abspath(path))
inipath = os.path.join(path, DEF_INI_FILE)
@@ -722,9 +722,9 @@ def evaluate_inipath(path):
def commandline_handler():
- """ Split win32-service commands are true parameters
- Returns:
- service, sab_opts, serv_opts, upload_nzbs
+ """Split win32-service commands are true parameters
+ Returns:
+ service, sab_opts, serv_opts, upload_nzbs
"""
service = ""
sab_opts = []
@@ -1648,9 +1648,9 @@ https://sabnzbd.org/wiki/advanced/sabnzbd-as-a-windows-service
def handle_windows_service():
- """ Handle everything for Windows Service
- Returns True when any service commands were detected or
- when we have started as a service.
+ """Handle everything for Windows Service
+ Returns True when any service commands were detected or
+ when we have started as a service.
"""
# Detect if running as Windows Service (only Vista and above!)
# Adapted from https://stackoverflow.com/a/55248281/5235502
diff --git a/sabnzbd/__init__.py b/sabnzbd/__init__.py
index 6aeb38f..8dfd0d7 100644
--- a/sabnzbd/__init__.py
+++ b/sabnzbd/__init__.py
@@ -531,8 +531,8 @@ def guard_language():
def set_https_verification(value):
- """ Set HTTPS-verification state while returning current setting
- False = disable verification
+ """Set HTTPS-verification state while returning current setting
+ False = disable verification
"""
prev = ssl._create_default_https_context == ssl.create_default_context
if value:
@@ -660,8 +660,8 @@ def add_nzbfile(
password=None,
nzo_id=None,
):
- """ Add file, either a single NZB-file or an archive.
- All other parameters are passed to the NZO-creation.
+ """Add file, either a single NZB-file or an archive.
+ All other parameters are passed to the NZO-creation.
"""
if pp == "-1":
pp = None
@@ -815,9 +815,9 @@ def restart_program():
def change_queue_complete_action(action, new=True):
- """ Action or script to be performed once the queue has been completed
- Scripts are prefixed with 'script_'
- When "new" is False, check whether non-script actions are acceptable
+ """Action or script to be performed once the queue has been completed
+ Scripts are prefixed with 'script_'
+ When "new" is False, check whether non-script actions are acceptable
"""
global QUEUECOMPLETE, QUEUECOMPLETEACTION, QUEUECOMPLETEARG
@@ -896,8 +896,8 @@ def keep_awake():
def get_new_id(prefix, folder, check_list=None):
- """ Return unique prefixed admin identifier within folder
- optionally making sure that id is not in the check_list.
+ """Return unique prefixed admin identifier within folder
+ optionally making sure that id is not in the check_list.
"""
for n in range(100):
try:
@@ -1020,8 +1020,8 @@ def check_repair_request():
def check_all_tasks():
- """ Check every task and restart safe ones, else restart program
- Return True when everything is under control
+ """Check every task and restart safe ones, else restart program
+ Return True when everything is under control
"""
if __SHUTTING_DOWN__ or not __INITIALIZED__:
return True
diff --git a/sabnzbd/api.py b/sabnzbd/api.py
index 137a93c..62bf5b4 100644
--- a/sabnzbd/api.py
+++ b/sabnzbd/api.py
@@ -1003,10 +1003,10 @@ def api_level(cmd, name):
def report(output, error=None, keyword="value", data=None):
- """ Report message in json, xml or plain text
- If error is set, only an status/error report is made.
- If no error and no data, only a status report is made.
- Else, a data report is made (optional 'keyword' for outer XML section).
+ """Report message in json, xml or plain text
+ If error is set, only an status/error report is made.
+ If no error and no data, only a status report is made.
+ Else, a data report is made (optional 'keyword' for outer XML section).
"""
if output == "json":
content = "application/json;charset=UTF-8"
@@ -1050,10 +1050,10 @@ def report(output, error=None, keyword="value", data=None):
class xml_factory:
- """ Recursive xml string maker. Feed it a mixed tuple/dict/item object and will output into an xml string
- Current limitations:
- In Two tiered lists hard-coded name of "item": -
- In Three tiered lists hard-coded name of "slot":
+ """Recursive xml string maker. Feed it a mixed tuple/dict/item object and will output into an xml string
+ Current limitations:
+ In Two tiered lists hard-coded name of "item": -
+ In Three tiered lists hard-coded name of "slot":
"""
def __init__(self):
@@ -1417,8 +1417,7 @@ def fast_queue():
def build_file_list(nzo_id):
- """ Build file lists for specified job
- """
+ """Build file lists for specified job"""
jobs = []
nzo = NzbQueue.do.get_nzo(nzo_id)
if nzo:
@@ -1549,9 +1548,9 @@ _SKIN_CACHE = {} # Stores pre-translated acronyms
def Ttemplate(txt):
- """ Translation function for Skin texts
- This special is to be used in interface.py for template processing
- to be passed for the $T function: so { ..., 'T' : Ttemplate, ...}
+ """Translation function for Skin texts
+ This special is to be used in interface.py for template processing
+ to be passed for the $T function: so { ..., 'T' : Ttemplate, ...}
"""
global _SKIN_CACHE
if txt in _SKIN_CACHE:
@@ -1897,8 +1896,8 @@ def list_scripts(default=False, none=True):
def list_cats(default=True):
- """ Return list of (ordered) categories,
- when default==False use '*' for Default category
+ """Return list of (ordered) categories,
+ when default==False use '*' for Default category
"""
lst = [cat["name"] for cat in config.get_ordered_categories()]
if default:
diff --git a/sabnzbd/assembler.py b/sabnzbd/assembler.py
index 6e85600..d29273b 100644
--- a/sabnzbd/assembler.py
+++ b/sabnzbd/assembler.py
@@ -189,9 +189,9 @@ class Assembler(Thread):
@staticmethod
def assemble(nzf, file_done):
- """ Assemble a NZF from its table of articles
- 1) Partial write: write what we have
- 2) Nothing written before: write all
+ """Assemble a NZF from its table of articles
+ 1) Partial write: write what we have
+ 2) Nothing written before: write all
"""
# New hash-object needed?
if not nzf.md5:
@@ -233,8 +233,8 @@ class Assembler(Thread):
def file_has_articles(nzf):
- """ Do a quick check to see if any articles are present for this file.
- Destructive: only to be used to differentiate between unknown encoding and no articles.
+ """Do a quick check to see if any articles are present for this file.
+ Destructive: only to be used to differentiate between unknown encoding and no articles.
"""
has = False
for article in nzf.decodetable:
@@ -421,8 +421,8 @@ def rating_filtered(rating, filename, abort):
def remove_warning_label(msg):
- """ Standardize errors by removing obsolete
- "WARNING:" part in all languages """
+ """Standardize errors by removing obsolete
+ "WARNING:" part in all languages"""
if ":" in msg:
return msg.split(":")[1].strip()
return msg
diff --git a/sabnzbd/bpsmeter.py b/sabnzbd/bpsmeter.py
index 47f8f01..73de361 100644
--- a/sabnzbd/bpsmeter.py
+++ b/sabnzbd/bpsmeter.py
@@ -321,9 +321,9 @@ class BPSMeter:
return self.bps_list[::refresh_rate]
def get_stable_speed(self, timespan=10):
- """ See if there is a stable speed the last seconds
- None: indicates it can't determine yet
- False: the speed was not stable during
+ """See if there is a stable speed the last seconds
+ None: indicates it can't determine yet
+ False: the speed was not stable during
"""
if len(self.bps_list) < timespan:
return None
@@ -347,8 +347,8 @@ class BPSMeter:
return None
def reset_quota(self, force=False):
- """ Check if it's time to reset the quota, optionally resuming
- Return True, when still paused
+ """Check if it's time to reset the quota, optionally resuming
+ Return True, when still paused
"""
if force or (self.have_quota and time.time() > (self.q_time - 50)):
self.quota = self.left = cfg.quota_size.get_float()
diff --git a/sabnzbd/config.py b/sabnzbd/config.py
index b69c6b0..ba46781 100644
--- a/sabnzbd/config.py
+++ b/sabnzbd/config.py
@@ -54,13 +54,13 @@ class Option:
""" Basic option class, basic fields """
def __init__(self, section, keyword, default_val=None, add=True, protect=False):
- """ Basic option
- `section` : single section or comma-separated list of sections
- a list will be a hierarchy: "foo, bar" --> [foo][[bar]]
- `keyword` : keyword in the (last) section
- `default_val` : value returned when no value has been set
- `callback` : procedure to call when value is successfully changed
- `protect` : Do not allow setting via the API (specifically set_dict)
+ """Basic option
+ `section` : single section or comma-separated list of sections
+ a list will be a hierarchy: "foo, bar" --> [foo][[bar]]
+ `keyword` : keyword in the (last) section
+ `default_val` : value returned when no value has been set
+ `callback` : procedure to call when value is successfully changed
+ `protect` : Do not allow setting via the API (specifically set_dict)
"""
self.__sections = section.split(",")
self.__keyword = keyword
@@ -230,10 +230,10 @@ class OptionDir(Option):
self.__root = root
def set(self, value, create=False):
- """ Set new dir value, validate and create if needed
- Return None when directory is accepted
- Return error-string when not accepted, value will not be changed
- 'create' means try to create (but don't set permanent create flag)
+ """Set new dir value, validate and create if needed
+ Return None when directory is accepted
+ Return error-string when not accepted, value will not be changed
+ 'create' means try to create (but don't set permanent create flag)
"""
error = None
if value and (value != self.get() or create):
@@ -533,8 +533,8 @@ class OptionFilters(Option):
self.set(lst)
def update(self, pos, value):
- """ Update filter 'pos' definition, value is a list
- Append if 'pos' outside list
+ """Update filter 'pos' definition, value is a list
+ Append if 'pos' outside list
"""
lst = self.get()
try:
@@ -637,8 +637,8 @@ class ConfigRSS:
def get_dconfig(section, keyword, nested=False):
- """ Return a config values dictionary,
- Single item or slices based on 'section', 'keyword'
+ """Return a config values dictionary,
+ Single item or slices based on 'section', 'keyword'
"""
data = {}
if not section:
@@ -712,15 +712,15 @@ def delete(section, keyword):
##############################################################################
@synchronized(SAVE_CONFIG_LOCK)
def read_config(path):
- """ Read the complete INI file and check its version number
- if OK, pass values to config-database
+ """Read the complete INI file and check its version number
+ if OK, pass values to config-database
"""
return _read_config(path)
def _read_config(path, try_backup=False):
- """ Read the complete INI file and check its version number
- if OK, pass values to config-database
+ """Read the complete INI file and check its version number
+ if OK, pass values to config-database
"""
global CFG, database, modified
@@ -873,8 +873,8 @@ def save_config(force=False):
def define_servers():
- """ Define servers listed in the Setup file
- return a list of ConfigServer instances
+ """Define servers listed in the Setup file
+ return a list of ConfigServer instances
"""
global CFG
try:
@@ -901,8 +901,8 @@ def get_servers():
def define_categories():
- """ Define categories listed in the Setup file
- return a list of ConfigCat instances
+ """Define categories listed in the Setup file
+ return a list of ConfigCat instances
"""
global CFG, categories
try:
@@ -913,9 +913,9 @@ def define_categories():
def get_categories(cat=0):
- """ Return link to categories section.
- This section will always contain special category '*'
- When 'cat' is given, a link to that category or to '*' is returned
+ """Return link to categories section.
+ This section will always contain special category '*'
+ When 'cat' is given, a link to that category or to '*' is returned
"""
global database
if "categories" not in database:
@@ -942,8 +942,8 @@ def get_categories(cat=0):
def get_ordered_categories():
- """ Return list-copy of categories section that's ordered
- by user's ordering including Default-category
+ """Return list-copy of categories section that's ordered
+ by user's ordering including Default-category
"""
database_cats = get_categories()
@@ -961,8 +961,8 @@ def get_ordered_categories():
def define_rss():
- """ Define rss-feeds listed in the Setup file
- return a list of ConfigRSS instances
+ """Define rss-feeds listed in the Setup file
+ return a list of ConfigRSS instances
"""
global CFG
try:
@@ -1033,8 +1033,8 @@ def encode_password(pw):
def decode_password(pw, name):
- """ Decode hexadecimal encoded password
- but only decode when prefixed
+ """Decode hexadecimal encoded password
+ but only decode when prefixed
"""
decPW = ""
if pw and pw.startswith(__PW_PREFIX):
@@ -1102,8 +1102,8 @@ def validate_no_unc(root, value, default):
def validate_safedir(root, value, default):
- """ Allow only when queues are empty and no UNC
- On Windows path should be small
+ """Allow only when queues are empty and no UNC
+ On Windows path should be small
"""
if sabnzbd.WIN32 and value and len(real_path(root, value)) >= MAX_WIN_DFOLDER:
return T("Error: Path length should be below %s.") % MAX_WIN_DFOLDER, None
@@ -1122,8 +1122,8 @@ def validate_notempty(root, value, default):
def validate_single_tag(value):
- """ Don't split single indexer tags like "TV > HD"
- into ['TV', '>', 'HD']
+ """Don't split single indexer tags like "TV > HD"
+ into ['TV', '>', 'HD']
"""
if len(value) == 3:
if value[1] == ">":
diff --git a/sabnzbd/database.py b/sabnzbd/database.py
index 0ab0ac2..32ca1a7 100644
--- a/sabnzbd/database.py
+++ b/sabnzbd/database.py
@@ -61,10 +61,10 @@ def convert_search(search):
class HistoryDB:
- """ Class to access the History database
- Each class-instance will create an access channel that
- can be used in one thread.
- Each thread needs its own class-instance!
+ """Class to access the History database
+ Each class-instance will create an access channel that
+ can be used in one thread.
+ Each thread needs its own class-instance!
"""
# These class attributes will be accessed directly because
@@ -363,8 +363,8 @@ class HistoryDB:
return total > 0
def get_history_size(self):
- """ Returns the total size of the history and
- amounts downloaded in the last month and week
+ """Returns the total size of the history and
+ amounts downloaded in the last month and week
"""
# Total Size of the history
total = 0
@@ -526,8 +526,8 @@ def build_history_info(
def unpack_history_info(item):
- """ Expands the single line stage_log from the DB
- into a python dictionary for use in the history display
+ """Expands the single line stage_log from the DB
+ into a python dictionary for use in the history display
"""
# Stage Name is separated by ::: stage lines by ; and stages by \r\n
lst = item["stage_log"]
diff --git a/sabnzbd/deobfuscate_filenames.py b/sabnzbd/deobfuscate_filenames.py
index 72e1a41..b7f4254 100755
--- a/sabnzbd/deobfuscate_filenames.py
+++ b/sabnzbd/deobfuscate_filenames.py
@@ -83,8 +83,8 @@ def entropy(string):
def is_probably_obfuscated(myinputfilename):
- """ Returns boolean if filename is probably obfuscated
- myinputfilename can be a plain file name, or a full path """
+ """Returns boolean if filename is probably obfuscated
+ myinputfilename can be a plain file name, or a full path"""
# Find filebasename
path, filename = os.path.split(myinputfilename)
diff --git a/sabnzbd/directunpacker.py b/sabnzbd/directunpacker.py
index 4a1a6ec..d5793ae 100644
--- a/sabnzbd/directunpacker.py
+++ b/sabnzbd/directunpacker.py
@@ -323,9 +323,9 @@ class DirectUnpacker(threading.Thread):
self.killed = True
def have_next_volume(self):
- """ Check if next volume of set is available, start
- from the end of the list where latest completed files are
- Make sure that files are 100% written to disk by checking md5sum
+ """Check if next volume of set is available, start
+ from the end of the list where latest completed files are
+ Make sure that files are 100% written to disk by checking md5sum
"""
for nzf_search in reversed(self.nzo.finished_files):
if nzf_search.setname == self.cur_setname and nzf_search.vol == (self.cur_volume + 1) and nzf_search.md5sum:
@@ -333,8 +333,8 @@ class DirectUnpacker(threading.Thread):
return False
def wait_for_next_volume(self):
- """ Wait for the correct volume to appear
- But stop if it was killed or the NZB is done
+ """Wait for the correct volume to appear
+ But stop if it was killed or the NZB is done
"""
while not self.have_next_volume() and not self.killed and self.nzo.files:
with self.next_file_lock:
@@ -492,8 +492,8 @@ class DirectUnpacker(threading.Thread):
def analyze_rar_filename(filename):
- """ Extract volume number and setname from rar-filenames
- Both ".part01.rar" and ".r01"
+ """Extract volume number and setname from rar-filenames
+ Both ".part01.rar" and ".r01"
"""
m = RAR_NR.search(filename)
if m:
@@ -516,8 +516,8 @@ def abort_all():
def test_disk_performance():
- """ Test the incomplete-dir performance and enable
- Direct Unpack if good enough (> 40MB/s)
+ """Test the incomplete-dir performance and enable
+ Direct Unpack if good enough (> 40MB/s)
"""
if diskspeedmeasure(sabnzbd.cfg.download_dir.get_path()) > 40:
cfg.direct_unpack.set(True)
diff --git a/sabnzbd/dirscanner.py b/sabnzbd/dirscanner.py
index 3e55be1..4eba481 100644
--- a/sabnzbd/dirscanner.py
+++ b/sabnzbd/dirscanner.py
@@ -59,10 +59,10 @@ def clean_file_list(inp_list, folder, files):
class DirScanner(threading.Thread):
- """ Thread that periodically scans a given directory and picks up any
- valid NZB, NZB.GZ ZIP-with-only-NZB and even NZB.GZ named as .NZB
- Candidates which turned out wrong, will be remembered and skipped in
- subsequent scans, unless changed.
+ """Thread that periodically scans a given directory and picks up any
+ valid NZB, NZB.GZ ZIP-with-only-NZB and even NZB.GZ named as .NZB
+ Candidates which turned out wrong, will be remembered and skipped in
+ subsequent scans, unless changed.
"""
do = None # Access to instance of DirScanner
diff --git a/sabnzbd/downloader.py b/sabnzbd/downloader.py
index 485cf98..43411aa 100644
--- a/sabnzbd/downloader.py
+++ b/sabnzbd/downloader.py
@@ -110,12 +110,12 @@ class Server:
@property
def hostip(self):
- """ In case a server still has active connections, we use the same IP again
- If new connection then based on value of load_balancing() and self.info:
- 0 - return the first entry, so all threads use the same IP
- 1 - and self.info has more than 1 entry (read: IP address): Return a random entry from the possible IPs
- 2 - and self.info has more than 1 entry (read: IP address): Return the quickest IP based on the happyeyeballs algorithm
- In case of problems: return the host name itself
+ """In case a server still has active connections, we use the same IP again
+ If new connection then based on value of load_balancing() and self.info:
+ 0 - return the first entry, so all threads use the same IP
+ 1 - and self.info has more than 1 entry (read: IP address): Return a random entry from the possible IPs
+ 2 - and self.info has more than 1 entry (read: IP address): Return the quickest IP based on the happyeyeballs algorithm
+ In case of problems: return the host name itself
"""
# Check if already a successful ongoing connection
if self.busy_threads and self.busy_threads[0].nntp:
@@ -213,9 +213,9 @@ class Downloader(Thread):
Downloader.do = self
def init_server(self, oldserver, newserver):
- """ Setup or re-setup single server
- When oldserver is defined and in use, delay startup.
- Note that the server names are "host:port" strings!
+ """Setup or re-setup single server
+ When oldserver is defined and in use, delay startup.
+ Note that the server names are "host:port" strings!
"""
create = False
@@ -314,9 +314,9 @@ class Downloader(Thread):
self.force_disconnect = True
def limit_speed(self, value):
- """ Set the actual download speed in Bytes/sec
- When 'value' ends with a '%' sign or is within 1-100, it is interpreted as a pecentage of the maximum bandwidth
- When no '%' is found, it is interpreted as an absolute speed (including KMGT notation).
+ """Set the actual download speed in Bytes/sec
+ When 'value' ends with a '%' sign or is within 1-100, it is interpreted as a pecentage of the maximum bandwidth
+ When no '%' is found, it is interpreted as an absolute speed (including KMGT notation).
"""
if value:
mx = cfg.bandwidth_max.get_int()
@@ -363,8 +363,8 @@ class Downloader(Thread):
return True
def highest_server(self, me):
- """ Return True when this server has the highest priority of the active ones
- 0 is the highest priority
+ """Return True when this server has the highest priority of the active ones
+ 0 is the highest priority
"""
for server in self.servers:
if server is not me and server.active and server.priority < me.priority:
@@ -406,8 +406,8 @@ class Downloader(Thread):
sabnzbd.nzbqueue.NzbQueue.do.reset_all_try_lists()
def decode(self, article, raw_data):
- """ Decode article and check the status of
- the decoder and the assembler
+ """Decode article and check the status of
+ the decoder and the assembler
"""
# Handle broken articles directly
if not raw_data:
@@ -928,8 +928,8 @@ class Downloader(Thread):
self.init_server(server.id, server.id)
def update_server(self, oldserver, newserver):
- """ Update the server and make sure we trigger
- the update in the loop to do housekeeping """
+ """Update the server and make sure we trigger
+ the update in the loop to do housekeeping"""
self.init_server(oldserver, newserver)
self.wakeup()
diff --git a/sabnzbd/encoding.py b/sabnzbd/encoding.py
index df31003..b479ce9 100644
--- a/sabnzbd/encoding.py
+++ b/sabnzbd/encoding.py
@@ -42,9 +42,9 @@ def ubtou(str_in):
def platform_btou(str_in):
- """ Return Unicode, if not already Unicode, decode with locale encoding.
- NOTE: Used for POpen because universal_newlines/text parameter doesn't
- always work! We cannot use encoding-parameter because it's Python 3.7+
+ """Return Unicode, if not already Unicode, decode with locale encoding.
+ NOTE: Used for POpen because universal_newlines/text parameter doesn't
+ always work! We cannot use encoding-parameter because it's Python 3.7+
"""
if isinstance(str_in, bytes):
try:
@@ -56,10 +56,10 @@ def platform_btou(str_in):
def correct_unknown_encoding(str_or_bytes_in):
- """ Files created on Windows but unpacked/repaired on
- linux can result in invalid filenames. Try to fix this
- encoding by going to bytes and then back to unicode again.
- Last resort we use chardet package
+ """Files created on Windows but unpacked/repaired on
+ linux can result in invalid filenames. Try to fix this
+ encoding by going to bytes and then back to unicode again.
+ Last resort we use chardet package
"""
# If already string, back to bytes
if not isinstance(str_or_bytes_in, bytes):
diff --git a/sabnzbd/filesystem.py b/sabnzbd/filesystem.py
index 7c6842f..917fa10 100644
--- a/sabnzbd/filesystem.py
+++ b/sabnzbd/filesystem.py
@@ -93,9 +93,9 @@ _DEVICES = (
def replace_win_devices(name):
- """ Remove reserved Windows device names from a name.
- aux.txt ==> _aux.txt
- txt.aux ==> txt.aux
+ """Remove reserved Windows device names from a name.
+ aux.txt ==> _aux.txt
+ txt.aux ==> txt.aux
"""
if name:
lname = name.lower()
@@ -112,8 +112,8 @@ def replace_win_devices(name):
def has_win_device(p):
- """ Return True if filename part contains forbidden name
- Before and after sanitizing
+ """Return True if filename part contains forbidden name
+ Before and after sanitizing
"""
p = os.path.split(p)[1].lower()
for dev in _DEVICES:
@@ -129,8 +129,8 @@ CH_LEGAL_WIN = "++{}!@#'+-"
def sanitize_filename(name):
- """ Return filename with illegal chars converted to legal ones
- and with the par2 extension always in lowercase
+ """Return filename with illegal chars converted to legal ones
+ and with the par2 extension always in lowercase
"""
if not name:
return name
@@ -168,8 +168,8 @@ def sanitize_filename(name):
def sanitize_foldername(name):
- """ Return foldername with dodgy chars converted to safe ones
- Remove any leading and trailing dot and space characters
+ """Return foldername with dodgy chars converted to safe ones
+ Remove any leading and trailing dot and space characters
"""
if not name:
return name
@@ -233,8 +233,7 @@ def sanitize_and_trim_path(path):
def sanitize_files_in_folder(folder):
- """ Sanitize each file in the folder, return list of new names
- """
+ """Sanitize each file in the folder, return list of new names"""
lst = []
for root, _, files in os.walk(folder):
for file_ in files:
@@ -251,16 +250,16 @@ def sanitize_files_in_folder(folder):
def is_obfuscated_filename(filename):
- """ Check if this file has an extension, if not, it's
- probably obfuscated and we don't use it
+ """Check if this file has an extension, if not, it's
+ probably obfuscated and we don't use it
"""
return len(get_ext(filename)) < 2
def real_path(loc, path):
- """ When 'path' is relative, return normalized join of 'loc' and 'path'
- When 'path' is absolute, return normalized path
- A path starting with ~ will be located in the user's Home folder
+ """When 'path' is relative, return normalized join of 'loc' and 'path'
+ When 'path' is absolute, return normalized path
+ A path starting with ~ will be located in the user's Home folder
"""
# The Windows part is a bit convoluted because
# C: and C:\ are 2 different things
@@ -292,12 +291,12 @@ def real_path(loc, path):
def create_real_path(name, loc, path, umask=False, writable=True):
- """ When 'path' is relative, create join of 'loc' and 'path'
- When 'path' is absolute, create normalized path
- 'name' is used for logging.
- Optional 'umask' will be applied.
- 'writable' means that an existing folder should be writable
- Returns ('success', 'full path', 'error_msg')
+ """When 'path' is relative, create join of 'loc' and 'path'
+ When 'path' is absolute, create normalized path
+ 'name' is used for logging.
+ Optional 'umask' will be applied.
+ 'writable' means that an existing folder should be writable
+ Returns ('success', 'full path', 'error_msg')
"""
if path:
my_dir = real_path(loc, path)
@@ -320,9 +319,9 @@ def create_real_path(name, loc, path, umask=False, writable=True):
def same_file(a, b):
- """ Return 0 if A and B have nothing in common
- return 1 if A and B are actually the same path
- return 2 if B is a subfolder of A
+ """Return 0 if A and B have nothing in common
+ return 1 if A and B are actually the same path
+ return 2 if B is a subfolder of A
"""
if sabnzbd.WIN32 or sabnzbd.DARWIN:
a = clip_path(a.lower())
@@ -349,7 +348,7 @@ def same_file(a, b):
def is_archive(path):
- """ Check if file in path is an ZIP, RAR or 7z file
+ """Check if file in path is an ZIP, RAR or 7z file
:param path: path to file
:return: (zf, status, expected_extension)
status: -1==Error/Retry, 0==OK, 1==Ignore
@@ -383,8 +382,8 @@ def is_archive(path):
def check_mount(path):
- """ Return False if volume isn't mounted on Linux or OSX
- Retry 6 times with an interval of 1 sec.
+ """Return False if volume isn't mounted on Linux or OSX
+ Retry 6 times with an interval of 1 sec.
"""
if sabnzbd.DARWIN:
m = re.search(r"^(/Volumes/[^/]+)", path, re.I)
@@ -403,8 +402,8 @@ def check_mount(path):
def safe_fnmatch(f, pattern):
- """ fnmatch will fail if the pattern contains any of it's
- key characters, like [, ] or !.
+ """fnmatch will fail if the pattern contains any of it's
+ key characters, like [, ] or !.
"""
try:
return fnmatch.fnmatch(f, pattern)
@@ -440,9 +439,9 @@ def trim_win_path(path):
def fix_unix_encoding(folder):
- """ Fix bad name encoding for Unix systems
- This happens for example when files are created
- on Windows but unpacked/repaired on linux
+ """Fix bad name encoding for Unix systems
+ This happens for example when files are created
+ on Windows but unpacked/repaired on linux
"""
if not sabnzbd.WIN32 and not sabnzbd.DARWIN:
for root, dirs, files in os.walk(folder):
@@ -471,8 +470,8 @@ def make_script_path(script):
def get_admin_path(name, future):
- """ Return news-style full path to job-admin folder of names job
- or else the old cache path
+ """Return news-style full path to job-admin folder of names job
+ or else the old cache path
"""
if future:
return os.path.join(sabnzbd.cfg.admin_dir.get_path(), FUTURE_Q_FOLDER)
@@ -524,9 +523,9 @@ def set_permissions(path, recursive=True):
def userxbit(filename):
- """ Returns boolean if the x-bit for user is set on the given file.
- This is a workaround: os.access(filename, os.X_OK) does not work
- on certain mounted file systems. Does not work at all on Windows.
+ """Returns boolean if the x-bit for user is set on the given file.
+ This is a workaround: os.access(filename, os.X_OK) does not work
+ on certain mounted file systems. Does not work at all on Windows.
"""
# rwx rwx rwx
# 876 543 210 # we want bit 6 from the right, counting from 0
@@ -564,9 +563,9 @@ DIR_LOCK = threading.RLock()
@synchronized(DIR_LOCK)
def create_all_dirs(path, apply_umask=False):
- """ Create all required path elements and set umask on all
- The umask argument is ignored on Windows
- Return path if elements could be made or exists
+ """Create all required path elements and set umask on all
+ The umask argument is ignored on Windows
+ Return path if elements could be made or exists
"""
try:
logging.info("Creating directories: %s", path)
@@ -619,8 +618,8 @@ def get_unique_path(dirpath, n=0, create_dir=True):
@synchronized(DIR_LOCK)
def get_unique_filename(path):
- """ Check if path is unique.
- If not, add number like: "/path/name.NUM.ext".
+ """Check if path is unique.
+ If not, add number like: "/path/name.NUM.ext".
"""
num = 1
new_path, fname = os.path.split(path)
@@ -648,8 +647,8 @@ def listdir_full(input_dir, recursive=True):
@synchronized(DIR_LOCK)
def move_to_path(path, new_path):
- """ Move a file to a new path, optionally give unique filename
- Return (ok, new_path)
+ """Move a file to a new path, optionally give unique filename
+ Return (ok, new_path)
"""
ok = True
overwrite = sabnzbd.cfg.overwrite_files()
diff --git a/sabnzbd/getipaddress.py b/sabnzbd/getipaddress.py
index 2213285..3ee2b7f 100644
--- a/sabnzbd/getipaddress.py
+++ b/sabnzbd/getipaddress.py
@@ -76,9 +76,9 @@ def localipv4():
def publicipv4():
- """ Because of dual IPv4/IPv6 clients, finding the
- public ipv4 needs special attention, meaning forcing
- IPv4 connections, and not allowing IPv6 connections
+ """Because of dual IPv4/IPv6 clients, finding the
+ public ipv4 needs special attention, meaning forcing
+ IPv4 connections, and not allowing IPv6 connections
"""
public_ipv4 = None
try:
diff --git a/sabnzbd/interface.py b/sabnzbd/interface.py
index b2fb580..fe14650 100644
--- a/sabnzbd/interface.py
+++ b/sabnzbd/interface.py
@@ -141,12 +141,12 @@ def secured_expose(wrap_func=None, check_configlock=False, check_api_key=False):
def check_access(access_type=4):
- """ Check if external address is allowed given access_type:
- 1=nzb
- 2=api
- 3=full_api
- 4=webui
- 5=webui with login for external
+ """Check if external address is allowed given access_type:
+ 1=nzb
+ 2=api
+ 3=full_api
+ 4=webui
+ 5=webui with login for external
"""
referrer = cherrypy.request.remote.ip
@@ -162,9 +162,9 @@ def check_access(access_type=4):
def check_hostname():
- """ Check if hostname is allowed, to mitigate DNS-rebinding attack.
- Similar to CVE-2019-5702, we need to add protection even
- if only allowed to be accessed via localhost.
+ """Check if hostname is allowed, to mitigate DNS-rebinding attack.
+ Similar to CVE-2019-5702, we need to add protection even
+ if only allowed to be accessed via localhost.
"""
# If login is enabled, no API-key can be deducted
if cfg.username() and cfg.password():
@@ -202,10 +202,10 @@ COOKIE_SECRET = str(randint(1000, 100000) * os.getpid())
def set_login_cookie(remove=False, remember_me=False):
- """ We try to set a cookie as unique as possible
- to the current user. Based on it's IP and the
- current process ID of the SAB instance and a random
- number, so cookies cannot be re-used
+ """We try to set a cookie as unique as possible
+ to the current user. Based on it's IP and the
+ current process ID of the SAB instance and a random
+ number, so cookies cannot be re-used
"""
salt = randint(1, 1000)
cookie_str = utob(str(salt) + cherrypy.request.remote.ip + COOKIE_SECRET)
@@ -268,15 +268,18 @@ def set_auth(conf):
}
)
conf.update(
- {"/api": {"tools.auth_basic.on": False}, "%s/api" % cfg.url_base(): {"tools.auth_basic.on": False},}
+ {
+ "/api": {"tools.auth_basic.on": False},
+ "%s/api" % cfg.url_base(): {"tools.auth_basic.on": False},
+ }
)
else:
conf.update({"tools.auth_basic.on": False})
def check_apikey(kwargs):
- """ Check API-key or NZB-key
- Return None when OK, otherwise an error message
+ """Check API-key or NZB-key
+ Return None when OK, otherwise an error message
"""
mode = kwargs.get("mode", "")
name = kwargs.get("name", "")
@@ -919,8 +922,8 @@ class QueuePage:
@secured_expose(check_api_key=True)
def change_queue_complete_action(self, **kwargs):
- """ Action or script to be performed once the queue has been completed
- Scripts are prefixed with 'script_'
+ """Action or script to be performed once the queue has been completed
+ Scripts are prefixed with 'script_'
"""
action = kwargs.get("action")
sabnzbd.change_queue_complete_action(action)
@@ -1821,8 +1824,8 @@ class ConfigRss:
@secured_expose(check_api_key=True, check_configlock=True)
def upd_rss_feed(self, **kwargs):
- """ Update Feed level attributes,
- legacy version: ignores 'enable' parameter
+ """Update Feed level attributes,
+ legacy version: ignores 'enable' parameter
"""
if kwargs.get("enable") is not None:
del kwargs["enable"]
diff --git a/sabnzbd/lang.py b/sabnzbd/lang.py
index aac3296..1158a06 100644
--- a/sabnzbd/lang.py
+++ b/sabnzbd/lang.py
@@ -62,10 +62,10 @@ def set_language(language=None):
def list_languages():
- """ Return sorted list of (lang-code, lang-string) pairs,
- representing the available languages.
- When any language file is found, the default tuple ('en', 'English')
- will be included. Otherwise an empty list is returned.
+ """Return sorted list of (lang-code, lang-string) pairs,
+ representing the available languages.
+ When any language file is found, the default tuple ('en', 'English')
+ will be included. Otherwise an empty list is returned.
"""
# Find all the MO files.
lst = []
diff --git a/sabnzbd/misc.py b/sabnzbd/misc.py
index b38ac46..4d0a7b9 100644
--- a/sabnzbd/misc.py
+++ b/sabnzbd/misc.py
@@ -71,9 +71,9 @@ def time_format(fmt):
def calc_age(date, trans=False):
- """ Calculate the age difference between now and date.
- Value is returned as either days, hours, or minutes.
- When 'trans' is True, time symbols will be translated.
+ """Calculate the age difference between now and date.
+ Value is returned as either days, hours, or minutes.
+ When 'trans' is True, time symbols will be translated.
"""
if trans:
d = T("d") # : Single letter abbreviation of day
@@ -146,9 +146,9 @@ def name_to_cat(fname, cat=None):
def cat_to_opts(cat, pp=None, script=None, priority=None):
- """ Derive options from category, if options not already defined.
- Specified options have priority over category-options.
- If no valid category is given, special category '*' will supply default values
+ """Derive options from category, if options not already defined.
+ Specified options have priority over category-options.
+ If no valid category is given, special category '*' will supply default values
"""
def_cat = config.get_categories("*")
cat = safe_lower(cat)
@@ -230,10 +230,10 @@ def wildcard_to_re(text):
def cat_convert(cat):
- """ Convert indexer's category/group-name to user categories.
- If no match found, but indexer-cat equals user-cat, then return user-cat
- If no match found, but the indexer-cat starts with the user-cat, return user-cat
- If no match found, return None
+ """Convert indexer's category/group-name to user categories.
+ If no match found, but indexer-cat equals user-cat, then return user-cat
+ If no match found, but the indexer-cat starts with the user-cat, return user-cat
+ If no match found, return None
"""
if cat and cat.lower() != "none":
cats = config.get_ordered_categories()
@@ -270,8 +270,8 @@ def cat_convert(cat):
def windows_variant():
- """ Determine Windows variant
- Return vista_plus, x64
+ """Determine Windows variant
+ Return vista_plus, x64
"""
from win32api import GetVersionEx
from win32con import VER_PLATFORM_WIN32_NT
@@ -375,26 +375,26 @@ def convert_version(text):
def check_latest_version():
- """ Do an online check for the latest version
-
- Perform an online version check
- Syntax of online version file:
-
-
-
-
- The latter two lines are only present when an alpha/beta/rc is available.
- Formula for the version numbers (line 1 and 3).
- ..[rc|beta|alpha]
-
- The value for a final version is assumned to be 99.
- The value for the beta/rc version is 1..98, with RC getting
- a boost of 80 and Beta of 40.
- This is done to signal alpha/beta/rc users of availability of the final
- version (which is implicitly 99).
- People will only be informed to upgrade to a higher alpha/beta/rc version, if
- they are already using an alpha/beta/rc.
- RC's are valued higher than Beta's, which are valued higher than Alpha's.
+ """Do an online check for the latest version
+
+ Perform an online version check
+ Syntax of online version file:
+
+
+
+
+ The latter two lines are only present when an alpha/beta/rc is available.
+ Formula for the version numbers (line 1 and 3).
+ ..[rc|beta|alpha]
+
+ The value for a final version is assumned to be 99.
+ The value for the beta/rc version is 1..98, with RC getting
+ a boost of 80 and Beta of 40.
+ This is done to signal alpha/beta/rc users of availability of the final
+ version (which is implicitly 99).
+ People will only be informed to upgrade to a higher alpha/beta/rc version, if
+ they are already using an alpha/beta/rc.
+ RC's are valued higher than Beta's, which are valued higher than Alpha's.
"""
if not cfg.version_check():
@@ -503,8 +503,8 @@ def from_units(val):
def to_units(val, postfix=""):
- """ Convert number to K/M/G/T/P notation
- Show single decimal for M and higher
+ """Convert number to K/M/G/T/P notation
+ Show single decimal for M and higher
"""
dec_limit = 1
if val < 0:
@@ -533,8 +533,8 @@ def to_units(val, postfix=""):
def caller_name(skip=2):
"""Get a name of a caller in the format module.method
- Originally used: https://gist.github.com/techtonik/2151727
- Adapted for speed by using sys calls directly
+ Originally used: https://gist.github.com/techtonik/2151727
+ Adapted for speed by using sys calls directly
"""
# Only do the tracing on Debug (function is always called)
if cfg.log_level() != 2:
@@ -591,9 +591,9 @@ def split_host(srv):
def get_cache_limit():
- """ Depending on OS, calculate cache limits.
- In ArticleCache it will make sure we stay
- within system limits for 32/64 bit
+ """Depending on OS, calculate cache limits.
+ In ArticleCache it will make sure we stay
+ within system limits for 32/64 bit
"""
# Calculate, if possible
try:
@@ -890,8 +890,8 @@ def ip_extract():
def get_base_url(url):
- """ Return only the true root domain for the favicon, so api.oznzb.com -> oznzb.com
- But also api.althub.co.za -> althub.co.za
+ """Return only the true root domain for the favicon, so api.oznzb.com -> oznzb.com
+ But also api.althub.co.za -> althub.co.za
"""
url_host = urllib.parse.urlparse(url).hostname
if url_host:
@@ -927,10 +927,10 @@ def nntp_to_msg(text):
def build_and_run_command(command, flatten_command=False, **kwargs):
- """ Builds and then runs command with nessecary flags and optional
- IONice and Nice commands. Optional Popen arguments can be supplied.
- On Windows we need to run our own list2cmdline for Unrar.
- Returns the Popen-instance.
+ """Builds and then runs command with nessecary flags and optional
+ IONice and Nice commands. Optional Popen arguments can be supplied.
+ On Windows we need to run our own list2cmdline for Unrar.
+ Returns the Popen-instance.
"""
# command[0] should be set, and thus not None
if not command[0]:
diff --git a/sabnzbd/newsunpack.py b/sabnzbd/newsunpack.py
index ec45f90..83252f3 100644
--- a/sabnzbd/newsunpack.py
+++ b/sabnzbd/newsunpack.py
@@ -115,7 +115,14 @@ def find_programs(curdir):
if not sabnzbd.newsunpack.PAR2_COMMAND:
sabnzbd.newsunpack.PAR2_COMMAND = find_on_path("par2")
if not sabnzbd.newsunpack.RAR_COMMAND:
- sabnzbd.newsunpack.RAR_COMMAND = find_on_path(("unrar", "rar", "unrar3", "rar3",))
+ sabnzbd.newsunpack.RAR_COMMAND = find_on_path(
+ (
+ "unrar",
+ "rar",
+ "unrar3",
+ "rar3",
+ )
+ )
sabnzbd.newsunpack.NICE_COMMAND = find_on_path("nice")
sabnzbd.newsunpack.IONICE_COMMAND = find_on_path("ionice")
if not sabnzbd.newsunpack.ZIP_COMMAND:
@@ -374,8 +381,8 @@ def get_seq_number(name):
def file_join(nzo, workdir, workdir_complete, delete, joinables):
- """ Join and joinable files in 'workdir' to 'workdir_complete' and
- when successful, delete originals
+ """Join and joinable files in 'workdir' to 'workdir_complete' and
+ when successful, delete originals
"""
newfiles = []
bufsize = 24 * 1024 * 1024
@@ -465,9 +472,9 @@ def file_join(nzo, workdir, workdir_complete, delete, joinables):
# (Un)Rar Functions
##############################################################################
def rar_unpack(nzo, workdir, workdir_complete, delete, one_folder, rars):
- """ Unpack multiple sets 'rars' of RAR files from 'workdir' to 'workdir_complete.
- When 'delete' is set, originals will be deleted.
- When 'one_folder' is set, all files will be in a single folder
+ """Unpack multiple sets 'rars' of RAR files from 'workdir' to 'workdir_complete.
+ When 'delete' is set, originals will be deleted.
+ When 'one_folder' is set, all files will be in a single folder
"""
newfiles = extracted_files = []
rar_sets = {}
@@ -588,9 +595,9 @@ def rar_unpack(nzo, workdir, workdir_complete, delete, one_folder, rars):
def rar_extract(rarfile_path, numrars, one_folder, nzo, setname, extraction_path):
- """ Unpack single rar set 'rarfile' to 'extraction_path',
- with password tries
- Return fail==0(ok)/fail==1(error)/fail==2(wrong password), new_files, rars
+ """Unpack single rar set 'rarfile' to 'extraction_path',
+ with password tries
+ Return fail==0(ok)/fail==1(error)/fail==2(wrong password), new_files, rars
"""
fail = 0
new_files = None
@@ -615,8 +622,8 @@ def rar_extract(rarfile_path, numrars, one_folder, nzo, setname, extraction_path
def rar_extract_core(rarfile_path, numrars, one_folder, nzo, setname, extraction_path, password):
- """ Unpack single rar set 'rarfile_path' to 'extraction_path'
- Return fail==0(ok)/fail==1(error)/fail==2(wrong password)/fail==3(crc-error), new_files, rars
+ """Unpack single rar set 'rarfile_path' to 'extraction_path'
+ Return fail==0(ok)/fail==1(error)/fail==2(wrong password)/fail==3(crc-error), new_files, rars
"""
start = time.time()
@@ -850,8 +857,8 @@ def rar_extract_core(rarfile_path, numrars, one_folder, nzo, setname, extraction
# (Un)Zip Functions
##############################################################################
def unzip(nzo, workdir, workdir_complete, delete, one_folder, zips):
- """ Unpack multiple sets 'zips' of ZIP files from 'workdir' to 'workdir_complete.
- When 'delete' is ste, originals will be deleted.
+ """Unpack multiple sets 'zips' of ZIP files from 'workdir' to 'workdir_complete.
+ When 'delete' is ste, originals will be deleted.
"""
try:
@@ -928,8 +935,8 @@ def ZIP_Extract(zipfile, extraction_path, one_folder):
# 7Zip Functions
##############################################################################
def unseven(nzo, workdir, workdir_complete, delete, one_folder, sevens):
- """ Unpack multiple sets '7z' of 7Zip files from 'workdir' to 'workdir_complete.
- When 'delete' is set, originals will be deleted.
+ """Unpack multiple sets '7z' of 7Zip files from 'workdir' to 'workdir_complete.
+ When 'delete' is set, originals will be deleted.
"""
i = 0
unseven_failed = False
@@ -976,8 +983,8 @@ def unseven(nzo, workdir, workdir_complete, delete, one_folder, sevens):
def seven_extract(nzo, sevenset, extensions, extraction_path, one_folder, delete):
- """ Unpack single set 'sevenset' to 'extraction_path', with password tries
- Return fail==0(ok)/fail==1(error)/fail==2(wrong password), new_files, sevens
+ """Unpack single set 'sevenset' to 'extraction_path', with password tries
+ Return fail==0(ok)/fail==1(error)/fail==2(wrong password), new_files, sevens
"""
# Before we start, make sure the 7z binary SEVEN_COMMAND is defined
if not SEVEN_COMMAND:
@@ -1009,8 +1016,8 @@ def seven_extract(nzo, sevenset, extensions, extraction_path, one_folder, delete
def seven_extract_core(sevenset, extensions, extraction_path, one_folder, delete, password):
- """ Unpack single 7Z set 'sevenset' to 'extraction_path'
- Return fail==0(ok)/fail==1(error)/fail==2(wrong password), new_files, message
+ """Unpack single 7Z set 'sevenset' to 'extraction_path'
+ Return fail==0(ok)/fail==1(error)/fail==2(wrong password), new_files, message
"""
if one_folder:
method = "e" # Unpack without folders
@@ -1909,9 +1916,9 @@ def MultiPar_Verify(parfile, nzo, setname, joinables, single=False):
def create_env(nzo=None, extra_env_fields={}):
- """ Modify the environment for pp-scripts with extra information
- OSX: Return copy of environment without PYTHONPATH and PYTHONHOME
- other: return None
+ """Modify the environment for pp-scripts with extra information
+ OSX: Return copy of environment without PYTHONPATH and PYTHONHOME
+ other: return None
"""
env = os.environ.copy()
@@ -1969,8 +1976,8 @@ def create_env(nzo=None, extra_env_fields={}):
def rar_volumelist(rarfile_path, password, known_volumes):
- """ Extract volumes that are part of this rarset
- and merge them with existing list, removing duplicates
+ """Extract volumes that are part of this rarset
+ and merge them with existing list, removing duplicates
"""
# UnRar is required to read some RAR files
# RarFile can fail in special cases
@@ -2014,9 +2021,9 @@ def rar_sort(a, b):
def build_filelists(workdir, workdir_complete=None, check_both=False, check_rar=True):
- """ Build filelists, if workdir_complete has files, ignore workdir.
- Optionally scan both directories.
- Optionally test content to establish RAR-ness
+ """Build filelists, if workdir_complete has files, ignore workdir.
+ Optionally scan both directories.
+ Optionally test content to establish RAR-ness
"""
sevens, joinables, zips, rars, ts, filelist = ([], [], [], [], [], [])
@@ -2123,9 +2130,9 @@ def quick_check_set(set, nzo):
def unrar_check(rar):
- """ Return version number of unrar, where "5.01" returns 501
- Also return whether an original version is found
- (version, original)
+ """Return version number of unrar, where "5.01" returns 501
+ Also return whether an original version is found
+ (version, original)
"""
version = 0
original = ""
@@ -2323,8 +2330,8 @@ def analyse_show(name):
def pre_queue(nzo, pp, cat):
- """ Run pre-queue script (if any) and process results.
- pp and cat are supplied seperate since they can change.
+ """Run pre-queue script (if any) and process results.
+ pp and cat are supplied seperate since they can change.
"""
def fix(p):
diff --git a/sabnzbd/notifier.py b/sabnzbd/notifier.py
index 7cb4a48..fdf3559 100644
--- a/sabnzbd/notifier.py
+++ b/sabnzbd/notifier.py
@@ -102,8 +102,8 @@ def get_prio(gtype, section):
def check_cat(section, job_cat, keyword=None):
- """ Check if `job_cat` is enabled in `section`.
- * = All, if no other categories selected.
+ """Check if `job_cat` is enabled in `section`.
+ * = All, if no other categories selected.
"""
if not job_cat:
return True
diff --git a/sabnzbd/nzbparser.py b/sabnzbd/nzbparser.py
index ef44499..1b29420 100644
--- a/sabnzbd/nzbparser.py
+++ b/sabnzbd/nzbparser.py
@@ -169,10 +169,10 @@ def process_nzb_archive_file(
password=None,
nzo_id=None,
):
- """ Analyse ZIP file and create job(s).
- Accepts ZIP files with ONLY nzb/nfo/folder files in it.
- returns (status, nzo_ids)
- status: -1==Error, 0==OK, 1==Ignore
+ """Analyse ZIP file and create job(s).
+ Accepts ZIP files with ONLY nzb/nfo/folder files in it.
+ returns (status, nzo_ids)
+ status: -1==Error, 0==OK, 1==Ignore
"""
nzo_ids = []
if catdir is None:
@@ -270,10 +270,10 @@ def process_single_nzb(
password=None,
nzo_id=None,
):
- """ Analyze file and create a job from it
- Supports NZB, NZB.BZ2, NZB.GZ and GZ.NZB-in-disguise
- returns (status, nzo_ids)
- status: -2==Error/retry, -1==Error, 0==OK
+ """Analyze file and create a job from it
+ Supports NZB, NZB.BZ2, NZB.GZ and GZ.NZB-in-disguise
+ returns (status, nzo_ids)
+ status: -2==Error/retry, -1==Error, 0==OK
"""
nzo_ids = []
if catdir is None:
diff --git a/sabnzbd/nzbqueue.py b/sabnzbd/nzbqueue.py
index 5ab323a..0212a48 100644
--- a/sabnzbd/nzbqueue.py
+++ b/sabnzbd/nzbqueue.py
@@ -72,10 +72,10 @@ class NzbQueue:
NzbQueue.do = self
def read_queue(self, repair):
- """ Read queue from disk, supporting repair modes
- 0 = no repairs
- 1 = use existing queue, add missing "incomplete" folders
- 2 = Discard all queue admin, reconstruct from "incomplete" folders
+ """Read queue from disk, supporting repair modes
+ 0 = no repairs
+ 1 = use existing queue, add missing "incomplete" folders
+ 2 = Discard all queue admin, reconstruct from "incomplete" folders
"""
nzo_ids = []
if repair < 2:
@@ -132,10 +132,10 @@ class NzbQueue:
@NzbQueueLocker
def scan_jobs(self, all_jobs=False, action=True):
- """ Scan "incomplete" for missing folders,
- 'all' is True: Include active folders
- 'action' is True, do the recovery action
- returns list of orphaned folders
+ """Scan "incomplete" for missing folders,
+ 'all' is True: Include active folders
+ 'action' is True, do the recovery action
+ returns list of orphaned folders
"""
result = []
# Folders from the download queue
@@ -374,9 +374,9 @@ class NzbQueue:
@NzbQueueLocker
def remove(self, nzo_id, add_to_history=True, cleanup=True, delete_all_data=True):
- """ Remove NZO from queue.
- It can be added to history directly.
- Or, we do some clean-up, sometimes leaving some data.
+ """Remove NZO from queue.
+ It can be added to history directly.
+ Or, we do some clean-up, sometimes leaving some data.
"""
if nzo_id in self.__nzo_table:
nzo = self.__nzo_table.pop(nzo_id)
@@ -579,8 +579,8 @@ class NzbQueue:
self.__nzo_list = sort_queue_function(self.__nzo_list, _nzo_size_cmp, reverse)
def sort_queue(self, field, reverse=None):
- """ Sort queue by field: "name", "size" or "avg_age"
- Direction is specified as "desc"/True or "asc"/False
+ """Sort queue by field: "name", "size" or "avg_age"
+ Direction is specified as "desc"/True or "asc"/False
"""
if isinstance(reverse, str):
if reverse.lower() == "desc":
@@ -700,8 +700,8 @@ class NzbQueue:
nzo.reset_all_try_lists()
def has_forced_items(self):
- """ Check if the queue contains any Forced
- Priority items to download while paused
+ """Check if the queue contains any Forced
+ Priority items to download while paused
"""
for nzo in self.__nzo_list:
if nzo.priority == TOP_PRIORITY and nzo.status not in (Status.PAUSED, Status.GRABBING):
@@ -709,8 +709,8 @@ class NzbQueue:
return False
def get_article(self, server, servers):
- """ Get next article for jobs in the queue
- Not locked for performance, since it only reads the queue
+ """Get next article for jobs in the queue
+ Not locked for performance, since it only reads the queue
"""
# Pre-calculate propagation delay
propagtion_delay = float(cfg.propagation_delay() * 60)
@@ -732,8 +732,8 @@ class NzbQueue:
return
def register_article(self, article, success=True):
- """ Register the articles we tried
- Not locked for performance, since it only modifies individual NZOs
+ """Register the articles we tried
+ Not locked for performance, since it only modifies individual NZOs
"""
nzf = article.nzf
nzo = nzf.nzo
@@ -795,8 +795,8 @@ class NzbQueue:
Assembler.do.process((nzo, None, None))
def actives(self, grabs=True):
- """ Return amount of non-paused jobs, optionally with 'grabbing' items
- Not locked for performance, only reads the queue
+ """Return amount of non-paused jobs, optionally with 'grabbing' items
+ Not locked for performance, only reads the queue
"""
n = 0
for nzo in self.__nzo_list:
@@ -808,9 +808,9 @@ class NzbQueue:
return n
def queue_info(self, search=None, start=0, limit=0):
- """ Return list of queued jobs,
- optionally filtered by 'search' and limited by start and limit.
- Not locked for performance, only reads the queue
+ """Return list of queued jobs,
+ optionally filtered by 'search' and limited by start and limit.
+ Not locked for performance, only reads the queue
"""
if search:
search = search.lower()
@@ -841,8 +841,8 @@ class NzbQueue:
return QNFO(bytes_total, bytes_left, bytes_left_previous_page, pnfo_list, q_size, n)
def remaining(self):
- """ Return bytes left in the queue by non-paused items
- Not locked for performance, only reads the queue
+ """Return bytes left in the queue by non-paused items
+ Not locked for performance, only reads the queue
"""
bytes_left = 0
for nzo in self.__nzo_list:
diff --git a/sabnzbd/nzbstuff.py b/sabnzbd/nzbstuff.py
index 2585c4e..875afc9 100644
--- a/sabnzbd/nzbstuff.py
+++ b/sabnzbd/nzbstuff.py
@@ -479,8 +479,8 @@ class NzbFile(TryList):
self.md5 = None
def __eq__(self, other):
- """ Assume it's the same file if the numer bytes and first article
- are the same or if there are no articles left, use the filenames
+ """Assume it's the same file if the numer bytes and first article
+ are the same or if there are no articles left, use the filenames
"""
if self.bytes == other.bytes:
if self.decodetable and other.decodetable:
diff --git a/sabnzbd/panic.py b/sabnzbd/panic.py
index 51eea09..416e622 100644
--- a/sabnzbd/panic.py
+++ b/sabnzbd/panic.py
@@ -250,8 +250,8 @@ def launch_a_browser(url, force=False):
def show_error_dialog(msg):
- """ Show a pop-up when program cannot start
- Windows-only, otherwise only print to console
+ """Show a pop-up when program cannot start
+ Windows-only, otherwise only print to console
"""
if sabnzbd.WIN32:
ctypes.windll.user32.MessageBoxW(0, msg, T("Fatal error"), 0)
diff --git a/sabnzbd/par2file.py b/sabnzbd/par2file.py
index 64723a7..fa18feb 100644
--- a/sabnzbd/par2file.py
+++ b/sabnzbd/par2file.py
@@ -34,8 +34,8 @@ PAR_RECOVERY_ID = b"RecvSlic"
def is_parfile(filename):
- """ Check quickly whether file has par2 signature
- or if the filename has '.par2' in it
+ """Check quickly whether file has par2 signature
+ or if the filename has '.par2' in it
"""
if os.path.exists(filename):
try:
@@ -50,9 +50,9 @@ def is_parfile(filename):
def analyse_par2(name, filepath=None):
- """ Check if file is a par2-file and determine vol/block
- return setname, vol, block
- setname is empty when not a par2 file
+ """Check if file is a par2-file and determine vol/block
+ return setname, vol, block
+ setname is empty when not a par2 file
"""
name = name.strip()
vol = block = 0
@@ -83,12 +83,12 @@ def analyse_par2(name, filepath=None):
def parse_par2_file(fname, md5of16k):
- """ Get the hash table and the first-16k hash table from a PAR2 file
- Return as dictionary, indexed on names or hashes for the first-16 table
- The input md5of16k is modified in place and thus not returned!
+ """Get the hash table and the first-16k hash table from a PAR2 file
+ Return as dictionary, indexed on names or hashes for the first-16 table
+ The input md5of16k is modified in place and thus not returned!
- For a full description of the par2 specification, visit:
- http://parchive.sourceforge.net/docs/specifications/parity-volume-spec/article-spec.html
+ For a full description of the par2 specification, visit:
+ http://parchive.sourceforge.net/docs/specifications/parity-volume-spec/article-spec.html
"""
table = {}
duplicates16k = []
diff --git a/sabnzbd/postproc.py b/sabnzbd/postproc.py
index 40082bb..0b325fe 100644
--- a/sabnzbd/postproc.py
+++ b/sabnzbd/postproc.py
@@ -485,7 +485,9 @@ def process_job(nzo):
newfiles = rename_and_collapse_folder(tmp_workdir_complete, workdir_complete, newfiles)
except:
logging.error(
- T('Error renaming "%s" to "%s"'), clip_path(tmp_workdir_complete), clip_path(workdir_complete),
+ T('Error renaming "%s" to "%s"'),
+ clip_path(tmp_workdir_complete),
+ clip_path(workdir_complete),
)
logging.info("Traceback: ", exc_info=True)
# Better disable sorting because filenames are all off now
@@ -662,9 +664,9 @@ def process_job(nzo):
def prepare_extraction_path(nzo):
- """ Based on the information that we have, generate
- the extraction path and create the directory.
- Separated so it can be called from DirectUnpacker
+ """Based on the information that we have, generate
+ the extraction path and create the directory.
+ Separated so it can be called from DirectUnpacker
"""
one_folder = False
marker_file = None
@@ -796,8 +798,8 @@ def parring(nzo, workdir):
def try_sfv_check(nzo, workdir):
- """ Attempt to verify set using SFV file
- Return None if no SFV-sets, True/False based on verification
+ """Attempt to verify set using SFV file
+ Return None if no SFV-sets, True/False based on verification
"""
# Get list of SFV names
sfvs = globber_full(workdir, "*.sfv")
@@ -828,10 +830,10 @@ def try_sfv_check(nzo, workdir):
def try_rar_check(nzo, rars):
- """ Attempt to verify set using the RARs
- Return True if verified, False when failed
- When setname is '', all RAR files will be used, otherwise only the matching one
- If no RAR's are found, returns True
+ """Attempt to verify set using the RARs
+ Return True if verified, False when failed
+ When setname is '', all RAR files will be used, otherwise only the matching one
+ If no RAR's are found, returns True
"""
# Sort for better processing
rars.sort(key=functools.cmp_to_key(rar_sort))
@@ -1000,8 +1002,8 @@ def handle_empty_queue():
def cleanup_list(wdir, skip_nzb):
- """ Remove all files whose extension matches the cleanup list,
- optionally ignoring the nzb extension
+ """Remove all files whose extension matches the cleanup list,
+ optionally ignoring the nzb extension
"""
if cfg.cleanup_list():
try:
@@ -1026,17 +1028,17 @@ def cleanup_list(wdir, skip_nzb):
def prefix(path, pre):
- """ Apply prefix to last part of path
- '/my/path' and 'hi_' will give '/my/hi_path'
+ """Apply prefix to last part of path
+ '/my/path' and 'hi_' will give '/my/hi_path'
"""
p, d = os.path.split(path)
return os.path.join(p, pre + d)
def nzb_redirect(wdir, nzbname, pp, script, cat, priority):
- """ Check if this job contains only NZB files,
- if so send to queue and remove if on clean-up list
- Returns list of processed NZB's
+ """Check if this job contains only NZB files,
+ if so send to queue and remove if on clean-up list
+ Returns list of processed NZB's
"""
files = listdir_full(wdir)
@@ -1098,8 +1100,8 @@ def get_last_line(txt):
def remove_samples(path):
- """ Remove all files that match the sample pattern
- Skip deleting if it matches all files or there is only 1 file
+ """Remove all files that match the sample pattern
+ Skip deleting if it matches all files or there is only 1 file
"""
files_to_delete = []
nr_files = 0
@@ -1123,9 +1125,9 @@ def remove_samples(path):
def rename_and_collapse_folder(oldpath, newpath, files):
- """ Rename folder, collapsing when there's just a single subfolder
- oldpath --> newpath OR oldpath/subfolder --> newpath
- Modify list of filenames accordingly
+ """Rename folder, collapsing when there's just a single subfolder
+ oldpath --> newpath OR oldpath/subfolder --> newpath
+ Modify list of filenames accordingly
"""
orgpath = oldpath
items = globber(oldpath)
diff --git a/sabnzbd/rss.py b/sabnzbd/rss.py
index 9a63170..2d6d499 100644
--- a/sabnzbd/rss.py
+++ b/sabnzbd/rss.py
@@ -128,9 +128,9 @@ def notdefault(item):
def convert_filter(text):
- """ Return compiled regex.
- If string starts with re: it's a real regex
- else quote all regex specials, replace '*' by '.*'
+ """Return compiled regex.
+ If string starts with re: it's a real regex
+ else quote all regex specials, replace '*' by '.*'
"""
text = text.strip().lower()
if text.startswith("re:"):
@@ -145,8 +145,8 @@ def convert_filter(text):
def remove_obsolete(jobs, new_jobs):
- """ Expire G/B links that are not in new_jobs (mark them 'X')
- Expired links older than 3 days are removed from 'jobs'
+ """Expire G/B links that are not in new_jobs (mark them 'X')
+ Expired links older than 3 days are removed from 'jobs'
"""
now = time.time()
limit = now - 259200 # 3days (3x24x3600)
@@ -625,8 +625,8 @@ class RSSQueue:
self.jobs[feed][item]["status"] = "D-"
def check_duplicate(self, title):
- """ Check if this title was in this or other feeds
- Return matching feed name
+ """Check if this title was in this or other feeds
+ Return matching feed name
"""
title = title.lower()
for fd in self.jobs:
@@ -638,8 +638,8 @@ class RSSQueue:
def patch_feedparser():
- """ Apply options that work for SABnzbd
- Add additional parsing of attributes
+ """Apply options that work for SABnzbd
+ Add additional parsing of attributes
"""
feedparser.SANITIZE_HTML = 0
feedparser.PARSE_MICROFORMATS = 0
@@ -728,8 +728,8 @@ def _HandleLink(
def _get_link(entry):
- """ Retrieve the post link from this entry
- Returns (link, category, size)
+ """Retrieve the post link from this entry
+ Returns (link, category, size)
"""
size = 0
age = datetime.datetime.now()
@@ -809,8 +809,8 @@ def special_rss_site(url):
def ep_match(season, episode, expr, title=None):
- """ Return True if season, episode is at or above expected
- Optionally `title` can be matched
+ """Return True if season, episode is at or above expected
+ Optionally `title` can be matched
"""
m = _RE_SP.search(expr)
if m:
diff --git a/sabnzbd/scheduler.py b/sabnzbd/scheduler.py
index 8e0c299..4a5d50b 100644
--- a/sabnzbd/scheduler.py
+++ b/sabnzbd/scheduler.py
@@ -276,10 +276,10 @@ def abort():
def sort_schedules(all_events, now=None):
- """ Sort the schedules, based on order of happening from now
- `all_events=True`: Return an event for each active day
- `all_events=False`: Return only first occurring event of the week
- `now` : for testing: simulated localtime()
+ """Sort the schedules, based on order of happening from now
+ `all_events=True`: Return an event for each active day
+ `all_events=False`: Return only first occurring event of the week
+ `now` : for testing: simulated localtime()
"""
day_min = 24 * 60
@@ -321,8 +321,8 @@ def sort_schedules(all_events, now=None):
def analyse(was_paused=False, priority=None):
- """ Determine what pause/resume state we would have now.
- 'priority': evaluate only effect for given priority, return True for paused
+ """Determine what pause/resume state we would have now.
+ 'priority': evaluate only effect for given priority, return True for paused
"""
global PP_PAUSE_EVENT
PP_PAUSE_EVENT = False
@@ -438,8 +438,8 @@ def scheduled_resume():
def __oneshot_resume(when):
- """ Called by delayed resume schedule
- Only resumes if call comes at the planned time
+ """Called by delayed resume schedule
+ Only resumes if call comes at the planned time
"""
global __PAUSE_END
if __PAUSE_END is not None and (when > __PAUSE_END - 5) and (when < __PAUSE_END + 55):
diff --git a/sabnzbd/sorting.py b/sabnzbd/sorting.py
index e5a3382..c70a08a 100644
--- a/sabnzbd/sorting.py
+++ b/sabnzbd/sorting.py
@@ -979,10 +979,10 @@ class DateSorter:
def path_subst(path, mapping):
- """ Replace the sort sting elements by real values.
- Non-elements are copied literally.
- path = the sort string
- mapping = array of tuples that maps all elements to their values
+ """Replace the sort sting elements by real values.
+ Non-elements are copied literally.
+ path = the sort string
+ mapping = array of tuples that maps all elements to their values
"""
# Added ugly hack to prevent %ext from being masked by %e
newpath = []
@@ -1002,11 +1002,11 @@ def path_subst(path, mapping):
def get_titles(nzo, match, name, titleing=False):
- """ The title will be the part before the match
- Clean it up and title() it
+ """The title will be the part before the match
+ Clean it up and title() it
- ''.title() isn't very good under python so this contains
- a lot of little hacks to make it better and for more control
+ ''.title() isn't very good under python so this contains
+ a lot of little hacks to make it better and for more control
"""
if nzo:
title = nzo.nzo_info.get("propername")
@@ -1088,9 +1088,9 @@ def replace_word(word_input, one, two):
def get_descriptions(nzo, match, name):
- """ If present, get a description from the nzb name.
- A description has to be after the matched item, separated either
- like ' - Description' or '_-_Description'
+ """If present, get a description from the nzb name.
+ A description has to be after the matched item, separated either
+ like ' - Description' or '_-_Description'
"""
if nzo:
ep_name = nzo.nzo_info.get("episodename")
@@ -1151,8 +1151,8 @@ def to_lowercase(path):
def strip_folders(path):
- """ Return 'path' without leading and trailing spaces and underscores in each element
- For Windows, also remove leading and trailing dots
+ """Return 'path' without leading and trailing spaces and underscores in each element
+ For Windows, also remove leading and trailing dots
"""
unc = sabnzbd.WIN32 and (path.startswith("//") or path.startswith("\\\\"))
f = path.strip("/").split("/")
@@ -1179,10 +1179,10 @@ def strip_folders(path):
def rename_similar(folder, skip_ext, name, skipped_files):
- """ Rename all other files in the 'folder' hierarchy after 'name'
- and move them to the root of 'folder'.
- Files having extension 'skip_ext' will be moved, but not renamed.
- Don't touch files in list `skipped_files`
+ """Rename all other files in the 'folder' hierarchy after 'name'
+ and move them to the root of 'folder'.
+ Files having extension 'skip_ext' will be moved, but not renamed.
+ Don't touch files in list `skipped_files`
"""
logging.debug('Give files in set "%s" matching names.', name)
folder = os.path.normpath(folder)
@@ -1214,9 +1214,9 @@ def rename_similar(folder, skip_ext, name, skipped_files):
def check_regexs(filename, matchers):
- """ Regular Expression match for a list of regexes
- Returns the MatchObject if a match is made
- This version checks for an additional match
+ """Regular Expression match for a list of regexes
+ Returns the MatchObject if a match is made
+ This version checks for an additional match
"""
extras = []
for expressions in matchers:
@@ -1237,8 +1237,8 @@ def check_regexs(filename, matchers):
def check_for_date(filename, matcher):
- """ Regular Expression match for date based files
- Returns the MatchObject if a match is made
+ """Regular Expression match for date based files
+ Returns the MatchObject if a match is made
"""
x = 0
if matcher:
diff --git a/sabnzbd/urlgrabber.py b/sabnzbd/urlgrabber.py
index 2ec70f2..b538888 100644
--- a/sabnzbd/urlgrabber.py
+++ b/sabnzbd/urlgrabber.py
@@ -301,9 +301,9 @@ class URLGrabber(Thread):
@staticmethod
def fail_to_history(nzo, url, msg="", content=False):
- """ Create History entry for failed URL Fetch
- msg: message to be logged
- content: report in history that cause is a bad NZB file
+ """Create History entry for failed URL Fetch
+ msg: message to be logged
+ content: report in history that cause is a bad NZB file
"""
# Remove the "Trying to fetch" part
if url:
@@ -358,8 +358,8 @@ def _build_request(url):
def _analyse(fetch_request, future_nzo):
- """ Analyze response of indexer
- returns fetch_request|None, error-message|None, retry, wait-seconds, data
+ """Analyze response of indexer
+ returns fetch_request|None, error-message|None, retry, wait-seconds, data
"""
data = None
if not fetch_request or fetch_request.code != 200:
diff --git a/sabnzbd/utils/apireg.py b/sabnzbd/utils/apireg.py
index 6d38390..a86e202 100644
--- a/sabnzbd/utils/apireg.py
+++ b/sabnzbd/utils/apireg.py
@@ -36,8 +36,8 @@ def reg_info(user):
def get_connection_info(user=True):
- """ Return URL of the API running SABnzbd instance
- 'user' == True will first try user's registry, otherwise system is used
+ """Return URL of the API running SABnzbd instance
+ 'user' == True will first try user's registry, otherwise system is used
"""
section, keypath = reg_info(user)
url = None
diff --git a/sabnzbd/utils/certgen.py b/sabnzbd/utils/certgen.py
index 86002fe..722fa0f 100644
--- a/sabnzbd/utils/certgen.py
+++ b/sabnzbd/utils/certgen.py
@@ -17,8 +17,8 @@ from sabnzbd.getipaddress import localipv4
def generate_key(key_size=2048, output_file="key.pem"):
- """ Generate the private-key file for the self-signed certificate
- Ported from cryptography docs/x509/tutorial.rst (set with no encryption)
+ """Generate the private-key file for the self-signed certificate
+ Ported from cryptography docs/x509/tutorial.rst (set with no encryption)
"""
# Generate our key
private_key = rsa.generate_private_key(public_exponent=65537, key_size=key_size, backend=default_backend())
@@ -38,8 +38,8 @@ def generate_key(key_size=2048, output_file="key.pem"):
def generate_local_cert(private_key, days_valid=3560, output_file="cert.cert", LN="SABnzbd", ON="SABnzbd"):
- """ Generate a certificate, using basic information.
- Ported from cryptography docs/x509/tutorial.rst
+ """Generate a certificate, using basic information.
+ Ported from cryptography docs/x509/tutorial.rst
"""
# Various details about who we are. For a self-signed certificate the
# subject and issuer are always the same.
diff --git a/sabnzbd/utils/checkdir.py b/sabnzbd/utils/checkdir.py
index 41566f3..4d2500f 100644
--- a/sabnzbd/utils/checkdir.py
+++ b/sabnzbd/utils/checkdir.py
@@ -18,9 +18,9 @@ def getcmdoutput(cmd):
def isFAT(check_dir):
- """ Check if "check_dir" is on FAT. FAT considered harmful (for big files)
- Works for Linux, Windows, MacOS
- NB: On Windows, full path with drive letter is needed!
+ """Check if "check_dir" is on FAT. FAT considered harmful (for big files)
+ Works for Linux, Windows, MacOS
+ NB: On Windows, full path with drive letter is needed!
"""
if not (os.path.isdir(check_dir) or os.path.isfile(check_dir)):
# Not a dir, not a file ... so not FAT:
@@ -71,12 +71,12 @@ def isFAT(check_dir):
# MacOS formerly known as OSX
"""
MacOS needs a two-step approach:
-
+
# First: directory => device
server:~ sander$ df /Volumes/CARTUNES/Tuna/
Filesystem 512-blocks Used Available Capacity iused ifree %iused Mounted on
/dev/disk9s1 120815744 108840000 11975744 91% 0 0 100% /Volumes/CARTUNES
-
+
# Then: device => filesystem type
server:~ sander$ mount | grep /dev/disk9s1
/dev/disk9s1 on /Volumes/CARTUNES (msdos, local, nodev, nosuid, noowners)
diff --git a/sabnzbd/utils/diskspeed.py b/sabnzbd/utils/diskspeed.py
index 0eb6a7c..66458cd 100644
--- a/sabnzbd/utils/diskspeed.py
+++ b/sabnzbd/utils/diskspeed.py
@@ -9,10 +9,10 @@ _DUMP_DATA = os.urandom(_DUMP_DATA_SIZE)
def diskspeedmeasure(dirname):
- """ Returns writing speed to dirname in MB/s
- method: keep writing a file, until 1 second is passed.
- Then divide bytes written by time passed
- In case of problems (ie non-writable dir or file), return None
+ """Returns writing speed to dirname in MB/s
+ method: keep writing a file, until 1 second is passed.
+ Then divide bytes written by time passed
+ In case of problems (ie non-writable dir or file), return None
"""
maxtime = 1.0 # sec
total_written = 0
diff --git a/sabnzbd/utils/pathbrowser.py b/sabnzbd/utils/pathbrowser.py
index a629f6b..d9502b0 100644
--- a/sabnzbd/utils/pathbrowser.py
+++ b/sabnzbd/utils/pathbrowser.py
@@ -56,8 +56,8 @@ _JUNKFOLDERS = (
def get_win_drives():
- """ Return list of detected drives, adapted from:
- http://stackoverflow.com/questions/827371/is-there-a-way-to-list-all-the-available-drive-letters-in-python/827490
+ """Return list of detected drives, adapted from:
+ http://stackoverflow.com/questions/827371/is-there-a-way-to-list-all-the-available-drive-letters-in-python/827490
"""
assert NT
drives = []
@@ -70,9 +70,9 @@ def get_win_drives():
def folders_at_path(path, include_parent=False, show_hidden=False):
- """ Returns a list of dictionaries with the folders contained at the given path
- Give the empty string as the path to list the contents of the root path
- under Unix this means "/", on Windows this will be a list of drive letters)
+ """Returns a list of dictionaries with the folders contained at the given path
+ Give the empty string as the path to list the contents of the root path
+ under Unix this means "/", on Windows this will be a list of drive letters)
"""
if path == "":
if NT:
diff --git a/sabnzbd/utils/pybonjour.py b/sabnzbd/utils/pybonjour.py
index 21db62e..88654b3 100644
--- a/sabnzbd/utils/pybonjour.py
+++ b/sabnzbd/utils/pybonjour.py
@@ -802,7 +802,9 @@ def _length_and_void_p_to_string(length, void_p):
################################################################################
-def DNSServiceProcessResult(sdRef,):
+def DNSServiceProcessResult(
+ sdRef,
+):
"""
diff --git a/sabnzbd/utils/rarfile.py b/sabnzbd/utils/rarfile.py
index 42bbe0f..736eff4 100644
--- a/sabnzbd/utils/rarfile.py
+++ b/sabnzbd/utils/rarfile.py
@@ -154,8 +154,7 @@ except ImportError:
if sys.hexversion < 0x3000000:
def rar_crc32(data, prev=0):
- """CRC32 with unsigned values.
- """
+ """CRC32 with unsigned values."""
if (prev > 0) and (prev & 0x80000000):
prev -= 1 << 32
res = crc32(data, prev)
@@ -390,8 +389,7 @@ BSIZE = 32 * 1024
def _get_rar_version(xfile):
- """Check quickly whether file is rar archive.
- """
+ """Check quickly whether file is rar archive."""
with XFile(xfile) as fd:
buf = fd.read(len(RAR5_ID))
if buf.startswith(RAR_ID):
@@ -407,8 +405,7 @@ def _get_rar_version(xfile):
def is_rarfile(xfile):
- """Check quickly whether file is rar archive.
- """
+ """Check quickly whether file is rar archive."""
rar_ver = _get_rar_version(xfile)
if rar_ver:
return "RAR%d" % rar_ver
@@ -641,23 +638,20 @@ class RarInfo(object):
type = None
def isdir(self):
- """Returns True if entry is a directory.
- """
+ """Returns True if entry is a directory."""
if self.type == RAR_BLOCK_FILE:
return (self.flags & RAR_FILE_DIRECTORY) == RAR_FILE_DIRECTORY
return False
def needs_password(self):
- """Returns True if data is stored password-protected.
- """
+ """Returns True if data is stored password-protected."""
if self.type == RAR_BLOCK_FILE:
return (self.flags & RAR_FILE_PASSWORD) > 0
return False
class RarFile(object):
- """Parse RAR structure, provide access to files in archive.
- """
+ """Parse RAR structure, provide access to files in archive."""
#: Archive comment. Unicode string or None.
comment = None
@@ -752,8 +746,7 @@ class RarFile(object):
return self._file_parser.volumelist()
def getinfo(self, fname):
- """Return RarInfo for file.
- """
+ """Return RarInfo for file."""
return self._file_parser.getinfo(fname)
def open(self, fname, mode="r", psw=None):
@@ -864,8 +857,7 @@ class RarFile(object):
self._extract(fnlist, path, pwd)
def testrar(self):
- """Let 'unrar' test the archive.
- """
+ """Let 'unrar' test the archive."""
# Modified for SABnzbd by clipping paths
from sabnzbd.filesystem import clip_path
@@ -981,8 +973,7 @@ class CommonParser(object):
self._vol_list = []
def has_header_encryption(self):
- """Returns True if headers are encrypted
- """
+ """Returns True if headers are encrypted"""
if self._hdrenc_main:
return True
if self._main:
@@ -1007,13 +998,11 @@ class CommonParser(object):
return self._parse_error
def infolist(self):
- """List of RarInfo records.
- """
+ """List of RarInfo records."""
return self._info_list
def getinfo(self, fname):
- """Return RarInfo for filename
- """
+ """Return RarInfo for filename"""
# accept both ways here
if PATH_SEP == "/":
fname2 = fname.replace("\\", "/")
@@ -1289,8 +1278,7 @@ class Rar3Info(RarInfo):
class RAR3Parser(CommonParser):
- """Parse RAR3 file format.
- """
+ """Parse RAR3 file format."""
_expect_sig = RAR_ID
_last_aes_key = (None, None, None) # (salt, key, iv)
@@ -1553,8 +1541,7 @@ class RAR3Parser(CommonParser):
class Rar5Info(RarInfo):
- """Shared fields for RAR5 records.
- """
+ """Shared fields for RAR5 records."""
extract_version = 50
header_crc = None
@@ -1578,8 +1565,7 @@ class Rar5Info(RarInfo):
class Rar5BaseFile(Rar5Info):
- """Shared sturct for file & service record.
- """
+ """Shared sturct for file & service record."""
type = -1
file_flags = None
@@ -1603,22 +1589,19 @@ class Rar5BaseFile(Rar5Info):
class Rar5FileInfo(Rar5BaseFile):
- """RAR5 file record.
- """
+ """RAR5 file record."""
type = RAR_BLOCK_FILE
class Rar5ServiceInfo(Rar5BaseFile):
- """RAR5 service record.
- """
+ """RAR5 service record."""
type = RAR_BLOCK_SUB
class Rar5MainInfo(Rar5Info):
- """RAR5 archive main record.
- """
+ """RAR5 archive main record."""
type = RAR_BLOCK_MAIN
main_flags = None
@@ -1631,8 +1614,7 @@ class Rar5MainInfo(Rar5Info):
class Rar5EncryptionInfo(Rar5Info):
- """RAR5 archive header encryption record.
- """
+ """RAR5 archive header encryption record."""
type = RAR5_BLOCK_ENCRYPTION
encryption_algo = None
@@ -1646,16 +1628,14 @@ class Rar5EncryptionInfo(Rar5Info):
class Rar5EndArcInfo(Rar5Info):
- """RAR5 end of archive record.
- """
+ """RAR5 end of archive record."""
type = RAR_BLOCK_ENDARC
endarc_flags = None
class RAR5Parser(CommonParser):
- """Parse RAR5 format.
- """
+ """Parse RAR5 format."""
_expect_sig = RAR5_ID
_hdrenc_main = None
@@ -1981,8 +1961,7 @@ class RAR5Parser(CommonParser):
class UnicodeFilename(object):
- """Handle RAR3 unicode filename decompression.
- """
+ """Handle RAR3 unicode filename decompression."""
def __init__(self, name, encdata):
self.std_name = bytearray(name)
@@ -2309,8 +2288,7 @@ class PipeReader(RarExtFile):
class DirectReader(RarExtFile):
- """Read uncompressed data directly from archive.
- """
+ """Read uncompressed data directly from archive."""
_cur = None
_cur_avail = None
@@ -2326,8 +2304,7 @@ class DirectReader(RarExtFile):
self._cur_avail = self._cur.add_size
def _skip(self, cnt):
- """RAR Seek, skipping through rar files to get to correct position
- """
+ """RAR Seek, skipping through rar files to get to correct position"""
while cnt > 0:
# next vol needed?
@@ -2479,8 +2456,7 @@ class HeaderDecrypt(object):
# handle (filename|filelike) object
class XFile(object):
- """Input may be filename or file object.
- """
+ """Input may be filename or file object."""
__slots__ = ("_fd", "_need_close")
@@ -2561,8 +2537,7 @@ class CRC32Context(object):
class Blake2SP(object):
- """Blake2sp hash context.
- """
+ """Blake2sp hash context."""
__slots__ = ["_thread", "_buf", "_cur", "_digest"]
digest_size = 32
@@ -2592,8 +2567,7 @@ class Blake2SP(object):
self._cur = (self._cur + 1) % self.parallelism
def update(self, data):
- """Hash data.
- """
+ """Hash data."""
view = memoryview(data)
bs = self.block_size
if self._buf:
@@ -2609,8 +2583,7 @@ class Blake2SP(object):
self._buf = view.tobytes()
def digest(self):
- """Return final digest value.
- """
+ """Return final digest value."""
if self._digest is None:
if self._buf:
self._add_block(self._buf)
@@ -2785,8 +2758,7 @@ def _parse_xtime(flag, data, pos, basetime=None):
def is_filelike(obj):
- """Filename or file object?
- """
+ """Filename or file object?"""
if isinstance(obj, str) or isinstance(obj, str):
return False
res = True
@@ -2798,8 +2770,7 @@ def is_filelike(obj):
def rar3_s2k(psw, salt):
- """String-to-key hash for RAR3.
- """
+ """String-to-key hash for RAR3."""
if not isinstance(psw, str):
psw = psw.decode("utf8")
seed = psw.encode("utf-16le") + salt
@@ -2868,8 +2839,7 @@ def rar3_decompress(vers, meth, data, declen=0, flags=0, crc=0, psw=None, salt=N
def to_datetime(t):
- """Convert 6-part time tuple into datetime object.
- """
+ """Convert 6-part time tuple into datetime object."""
if t is None:
return None
@@ -2907,8 +2877,7 @@ def to_datetime(t):
def parse_dos_time(stamp):
- """Parse standard 32-bit DOS timestamp.
- """
+ """Parse standard 32-bit DOS timestamp."""
sec, stamp = stamp & 0x1F, stamp >> 5
mn, stamp = stamp & 0x3F, stamp >> 6
hr, stamp = stamp & 0x1F, stamp >> 5
@@ -2919,8 +2888,7 @@ def parse_dos_time(stamp):
def custom_popen(cmd):
- """Disconnect cmd from parent fds, read only from stdout.
- """
+ """Disconnect cmd from parent fds, read only from stdout."""
# needed for py2exe
creationflags = 0
if sys.platform == "win32":
@@ -2937,8 +2905,7 @@ def custom_popen(cmd):
def custom_check(cmd, ignore_retcode=False):
- """Run command, collect output, raise error if needed.
- """
+ """Run command, collect output, raise error if needed."""
p = custom_popen(cmd)
out, _ = p.communicate()
if p.returncode and not ignore_retcode:
@@ -2947,8 +2914,7 @@ def custom_check(cmd, ignore_retcode=False):
def add_password_arg(cmd, psw, ___required=False):
- """Append password switch to commandline.
- """
+ """Append password switch to commandline."""
if UNRAR_TOOL == ALT_TOOL:
return
if psw is not None:
@@ -2958,8 +2924,7 @@ def add_password_arg(cmd, psw, ___required=False):
def check_returncode(p, out):
- """Raise exception according to unrar exit code.
- """
+ """Raise exception according to unrar exit code."""
code = p.returncode
if code == 0:
return
diff --git a/sabnzbd/utils/sleepless.py b/sabnzbd/utils/sleepless.py
index 1df2d2d..63f14c5 100644
--- a/sabnzbd/utils/sleepless.py
+++ b/sabnzbd/utils/sleepless.py
@@ -40,9 +40,9 @@ assertion_id = None
def keep_awake(reason):
- """ Tell OS to stay awake. One argument: text to send to OS.
- Stays in effect until next 'allow_sleep' call.
- Multiple calls allowed.
+ """Tell OS to stay awake. One argument: text to send to OS.
+ Stays in effect until next 'allow_sleep' call.
+ Multiple calls allowed.
"""
global assertion_id
diff --git a/tests/test_config.py b/tests/test_config.py
index b27ae90..6284437 100644
--- a/tests/test_config.py
+++ b/tests/test_config.py
@@ -24,9 +24,9 @@ import sabnzbd.config as config
class TestValidators:
def test_clean_nice_ionice_parameters_allowed(self):
- """ Allowed nice and ionice parameters
- https://linux.die.net/man/1/nice
- https://linux.die.net/man/1/ionice
+ """Allowed nice and ionice parameters
+ https://linux.die.net/man/1/nice
+ https://linux.die.net/man/1/ionice
"""
def assert_allowed(inp_value):
diff --git a/tests/test_functional_downloads.py b/tests/test_functional_downloads.py
index 0c54d74..b314617 100644
--- a/tests/test_functional_downloads.py
+++ b/tests/test_functional_downloads.py
@@ -24,8 +24,8 @@ from tests.testhelper import *
class TestDownloadFlow(SABnzbdBaseTest):
def is_server_configured(self):
- """ Check if the wizard was already performed.
- If not: run the wizard!
+ """Check if the wizard was already performed.
+ If not: run the wizard!
"""
with open(os.path.join(SAB_CACHE_DIR, "sabnzbd.ini"), "r") as config_file:
if SAB_NEWSSERVER_HOST not in config_file.read():
diff --git a/tests/test_functional_misc.py b/tests/test_functional_misc.py
index 42931b6..f3a762c 100644
--- a/tests/test_functional_misc.py
+++ b/tests/test_functional_misc.py
@@ -104,9 +104,9 @@ class TestExtractPot:
@pytest.mark.skipif(sys.platform.startswith("darwin"), reason="Fails for now due to PyObjC problem")
class TestDaemonizing(SABnzbdBaseTest):
def test_daemonizing(self):
- """ Simple test to see if daemon-mode still works.
- Also test removal of large "sabnzbd.error.log"
- We inherit from SABnzbdBaseTest so we can use it's clean-up logic!
+ """Simple test to see if daemon-mode still works.
+ Also test removal of large "sabnzbd.error.log"
+ We inherit from SABnzbdBaseTest so we can use it's clean-up logic!
"""
daemon_host = "localhost"
daemon_port = 23456
diff --git a/tests/test_rss.py b/tests/test_rss.py
index b2f09e6..f6e7ca6 100644
--- a/tests/test_rss.py
+++ b/tests/test_rss.py
@@ -42,8 +42,8 @@ class TestRSS:
ConfigCat("movies", {})
def test_rss_newznab_parser(self):
- """ Test basic RSS-parsing of custom elements
- Harder to test in functional test
+ """Test basic RSS-parsing of custom elements
+ Harder to test in functional test
"""
feed_name = "TestFeedNewznab"
self.setup_rss(feed_name, "https://sabnzbd.org/tests/rss_newznab_test.xml")
diff --git a/tests/test_utils/test_happyeyeballs.py b/tests/test_utils/test_happyeyeballs.py
index bd5da50..7072e81 100644
--- a/tests/test_utils/test_happyeyeballs.py
+++ b/tests/test_utils/test_happyeyeballs.py
@@ -26,9 +26,9 @@ from sabnzbd.utils.happyeyeballs import happyeyeballs
@flaky
class TestHappyEyeballs:
- """ Tests of happyeyeballs() against various websites/servers
- happyeyeballs() returns the quickest IP address (IPv4, or IPv6 if available end-to-end),
- or None (if not resolvable, or not reachable)
+ """Tests of happyeyeballs() against various websites/servers
+ happyeyeballs() returns the quickest IP address (IPv4, or IPv6 if available end-to-end),
+ or None (if not resolvable, or not reachable)
"""
def test_google_http(self):
diff --git a/tests/test_utils/test_internetspeed.py b/tests/test_utils/test_internetspeed.py
index e62f95c..66b4bdd 100644
--- a/tests/test_utils/test_internetspeed.py
+++ b/tests/test_utils/test_internetspeed.py
@@ -23,8 +23,8 @@ from sabnzbd.utils.internetspeed import internetspeed, measurespeed, SizeUrlList
class TestInternetSpeed:
- """ This class contains tests to measure internet speed
- with an active and inactive connection
+ """This class contains tests to measure internet speed
+ with an active and inactive connection
"""
def test_measurespeed_invalid_url(self):
diff --git a/tests/test_win_utils.py b/tests/test_win_utils.py
index 5688587..06fdd8c 100644
--- a/tests/test_win_utils.py
+++ b/tests/test_win_utils.py
@@ -31,8 +31,8 @@ import sabnzbd.utils.apireg as ar
class TestAPIReg:
def test_set_get_connection_info_user(self):
- """ Test the saving of the URL in USER-registery
- We can't test the SYSTEM one.
+ """Test the saving of the URL in USER-registery
+ We can't test the SYSTEM one.
"""
test_url = "sab_test:8080"