From c6054e622c94e0f28c414ff84b7acc75380e058c Mon Sep 17 00:00:00 2001 From: Safihre Date: Sun, 8 Oct 2017 13:26:52 +0200 Subject: [PATCH] Py3: First 2to3 conversion --- SABHelper.py | 10 +-- SABnzbd.py | 125 ++++++++++++++++----------------- sabnzbd/__init__.py | 24 +++---- sabnzbd/api.py | 30 ++++---- sabnzbd/assembler.py | 10 +-- sabnzbd/bpsmeter.py | 36 +++++----- sabnzbd/config.py | 32 ++++----- sabnzbd/database.py | 10 +-- sabnzbd/decoder.py | 20 +++--- sabnzbd/dirscanner.py | 2 +- sabnzbd/downloader.py | 20 +++--- sabnzbd/emailer.py | 2 +- sabnzbd/encoding.py | 30 ++++---- sabnzbd/getipaddress.py | 6 +- sabnzbd/interface.py | 14 ++-- sabnzbd/lang.py | 12 ++-- sabnzbd/misc.py | 78 ++++++++++----------- sabnzbd/newsunpack.py | 47 ++++++++----- sabnzbd/newswrapper.py | 6 +- sabnzbd/notifier.py | 27 +++---- sabnzbd/nzbqueue.py | 8 +-- sabnzbd/nzbstuff.py | 48 ++++++------- sabnzbd/osxmenu.py | 4 +- sabnzbd/panic.py | 2 +- sabnzbd/postproc.py | 16 ++--- sabnzbd/powersup.py | 8 +-- sabnzbd/rating.py | 24 +++---- sabnzbd/rss.py | 26 +++---- sabnzbd/scheduler.py | 10 +-- sabnzbd/tvsort.py | 14 ++-- sabnzbd/urlgrabber.py | 20 +++--- sabnzbd/utils/certgen.py | 12 ++-- sabnzbd/utils/checkdir.py | 20 +++--- sabnzbd/utils/diskspeed.py | 18 ++--- sabnzbd/utils/getperformance.py | 8 +-- sabnzbd/utils/happyeyeballs.py | 32 ++++----- sabnzbd/utils/json.py | 16 ++--- sabnzbd/utils/kronos.py | 18 ++--- sabnzbd/utils/pathbrowser.py | 4 +- sabnzbd/utils/pybonjour.py | 16 ++--- sabnzbd/utils/pystone.py | 16 ++--- sabnzbd/utils/rarfile.py | 14 ++-- sabnzbd/utils/rsslib.py | 2 +- sabnzbd/utils/servertests.py | 12 ++-- sabnzbd/utils/systrayiconthread.py | 6 +- sabnzbd/utils/upload.py | 4 +- scripts/Deobfuscate.py | 36 +++++----- tests/conftest.py | 2 +- tests/testhelper.py | 2 +- tools/extract_pot.py | 8 +-- tools/make_mo.py | 30 ++++---- tools/msgfmt.py | 139 ++++++++++++++++++++----------------- util/apireg.py | 60 ++++++++-------- util/mailslot.py | 8 +-- 54 files changed, 616 insertions(+), 588 deletions(-) diff --git a/SABHelper.py b/SABHelper.py index 62777f5..7196932 100644 --- a/SABHelper.py +++ b/SABHelper.py @@ -17,7 +17,7 @@ import sys if sys.version_info[:2] < (2, 6) or sys.version_info[:2] >= (3, 0): - print "Sorry, requires Python 2.6 or 2.7." + print("Sorry, requires Python 2.6 or 2.7.") sys.exit(1) import os @@ -34,7 +34,7 @@ try: import win32service import pywintypes except ImportError: - print "Sorry, requires Python module PyWin32." + print("Sorry, requires Python module PyWin32.") sys.exit(1) from util.mailslot import MailSlot @@ -137,14 +137,14 @@ class SABHelper(win32serviceutil.ServiceFramework): win32evtlogutil.ReportEvent(self._svc_display_name_, state, 0, servicemanager.EVENTLOG_INFORMATION_TYPE, - (self._svc_name_, unicode(msg))) + (self._svc_name_, str(msg))) def ErrLogger(self, msg, text): win32evtlogutil.ReportEvent(self._svc_display_name_, servicemanager.PYS_SERVICE_STOPPED, 0, servicemanager.EVENTLOG_ERROR_TYPE, - (self._svc_name_, unicode(msg)), - unicode(text)) + (self._svc_name_, str(msg)), + str(text)) ############################################################################## diff --git a/SABnzbd.py b/SABnzbd.py index 2cb806b..7edb217 100755 --- a/SABnzbd.py +++ b/SABnzbd.py @@ -16,18 +16,19 @@ # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. import sys +import imp if sys.version_info[:2] < (2, 7) or sys.version_info[:2] >= (3, 0): - print "Sorry, requires Python 2.7." + print("Sorry, requires Python 2.7.") sys.exit(1) # Make sure UTF-8 is default 8bit encoding if not hasattr(sys, "setdefaultencoding"): - reload(sys) + imp.reload(sys) try: sys.setdefaultencoding('utf-8') except: - print 'Sorry, you MUST add the SABnzbd folder to the PYTHONPATH environment variable' - print 'or find another way to force Python to use UTF-8 for text encoding.' + print('Sorry, you MUST add the SABnzbd folder to the PYTHONPATH environment variable') + print('or find another way to force Python to use UTF-8 for text encoding.') sys.exit(1) import logging @@ -47,10 +48,10 @@ try: if Cheetah.Version[0] != '2': raise ValueError except ValueError: - print "Sorry, requires Python module Cheetah 2.0rc7 or higher." + print("Sorry, requires Python module Cheetah 2.0rc7 or higher.") sys.exit(1) except: - print "The Python module Cheetah is required" + print("The Python module Cheetah is required") sys.exit(1) import cherrypy @@ -64,20 +65,20 @@ except: from pysqlite2.dbapi2 import version as sqlite3_version except: if os.name != 'nt': - print "Sorry, requires Python module sqlite3" - print "Try: apt-get install python-pysqlite2" + print("Sorry, requires Python module sqlite3") + print("Try: apt-get install python-pysqlite2") sys.exit(1) else: SQLITE_DLL = False import locale -import __builtin__ +import builtins try: locale.setlocale(locale.LC_ALL, "") - __builtin__.__dict__['codepage'] = locale.getlocale()[1] or 'cp1252' + builtins.__dict__['codepage'] = locale.getlocale()[1] or 'cp1252' except: # Work-around for Python-ports with bad "locale" support - __builtin__.__dict__['codepage'] = 'cp1252' + builtins.__dict__['codepage'] = 'cp1252' import sabnzbd import sabnzbd.lang @@ -117,7 +118,7 @@ except ImportError: class MailSlot: pass if sabnzbd.WIN32: - print "Sorry, requires Python module PyWin32." + print("Sorry, requires Python module PyWin32.") sys.exit(1) @@ -170,44 +171,44 @@ class guiHandler(logging.Handler): def print_help(): - print - print "Usage: %s [-f ] " % sabnzbd.MY_NAME - print - print "Options marked [*] are stored in the config file" - print - print "Options:" - print " -f --config-file Location of config file" - print " -s --server Listen on server:port [*]" - print " -t --templates Template directory [*]" - print - print " -l --logging <0..2> Set logging level (-1=off, 0= least, 2= most) [*]" - print " -w --weblogging Enable cherrypy access logging" - print - print " -b --browser <0..1> Auto browser launch (0= off, 1= on) [*]" + print() + print(("Usage: %s [-f ] " % sabnzbd.MY_NAME)) + print() + print("Options marked [*] are stored in the config file") + print() + print("Options:") + print(" -f --config-file Location of config file") + print(" -s --server Listen on server:port [*]") + print(" -t --templates Template directory [*]") + print() + print(" -l --logging <0..2> Set logging level (-1=off, 0= least, 2= most) [*]") + print(" -w --weblogging Enable cherrypy access logging") + print() + print(" -b --browser <0..1> Auto browser launch (0= off, 1= on) [*]") if sabnzbd.WIN32: - print " -d --daemon Use when run as a service" + print(" -d --daemon Use when run as a service") else: - print " -d --daemon Fork daemon process" - print " --pid Create a PID file in the given folder (full path)" - print " --pidfile Create a PID file with the given name (full path)" - print - print " -h --help Print this message" - print " -v --version Print version information" - print " -c --clean Remove queue, cache and logs" - print " -p --pause Start in paused mode" - print " --repair Add orphaned jobs from the incomplete folder to the queue" - print " --repair-all Try to reconstruct the queue from the incomplete folder" - print " with full data reconstruction" - print " --https Port to use for HTTPS server" - print " --ipv6_hosting <0|1> Listen on IPv6 address [::1] [*]" - print " --no-login Start with username and password reset" - print " --log-all Log all article handling (for developers)" - print " --console Force console logging for OSX app" - print " --new Run a new instance of SABnzbd" + print(" -d --daemon Fork daemon process") + print(" --pid Create a PID file in the given folder (full path)") + print(" --pidfile Create a PID file with the given name (full path)") + print() + print(" -h --help Print this message") + print(" -v --version Print version information") + print(" -c --clean Remove queue, cache and logs") + print(" -p --pause Start in paused mode") + print(" --repair Add orphaned jobs from the incomplete folder to the queue") + print(" --repair-all Try to reconstruct the queue from the incomplete folder") + print(" with full data reconstruction") + print(" --https Port to use for HTTPS server") + print(" --ipv6_hosting <0|1> Listen on IPv6 address [::1] [*]") + print(" --no-login Start with username and password reset") + print(" --log-all Log all article handling (for developers)") + print(" --console Force console logging for OSX app") + print(" --new Run a new instance of SABnzbd") def print_version(): - print """ + print((""" %s-%s Copyright (C) 2008-2017, The SABnzbd-Team @@ -216,7 +217,7 @@ This is free software, and you are welcome to redistribute it under certain conditions. It is licensed under the GNU GENERAL PUBLIC LICENSE Version 2 or (at your option) any later version. -""" % (sabnzbd.MY_NAME, sabnzbd.__version__) +""" % (sabnzbd.MY_NAME, sabnzbd.__version__))) def daemonize(): @@ -225,7 +226,7 @@ def daemonize(): if pid > 0: sys.exit(0) except OSError: - print "fork() failed" + print("fork() failed") sys.exit(1) os.chdir(sabnzbd.DIR_PROG) @@ -239,7 +240,7 @@ def daemonize(): if pid > 0: sys.exit(0) except OSError: - print "fork() failed" + print("fork() failed") sys.exit(1) dev_null = file('/dev/null', 'r') @@ -484,7 +485,7 @@ def all_localhosts(): for item in info: item = item[4][0] # Avoid problems on strange Linux settings - if not isinstance(item, basestring): + if not isinstance(item, str): continue # Only return IPv6 when enabled if item not in ips and ('::1' not in item or sabnzbd.cfg.ipv6_hosting()): @@ -996,13 +997,13 @@ def main(): if enable_https and https_port: try: portend.free(cherryhost, https_port, timeout=0.05) - except IOError, error: + except IOError as error: Bail_Out(browserhost, cherryport) except: Bail_Out(browserhost, cherryport, '49') try: portend.free(cherryhost, cherryport, timeout=0.05) - except IOError, error: + except IOError as error: Bail_Out(browserhost, cherryport) except: Bail_Out(browserhost, cherryport, '49') @@ -1019,7 +1020,7 @@ def main(): port = https_port or cherryport try: portend.free(browserhost, port, timeout=0.05) - except IOError, error: + except IOError as error: if str(error) == 'Port not bound.': pass else: @@ -1048,7 +1049,7 @@ def main(): if not (enable_https and not https_port): try: portend.free(browserhost, cherryport, timeout=0.05) - except IOError, error: + except IOError as error: if str(error) == 'Port not bound.': pass else: @@ -1077,8 +1078,8 @@ def main(): logdir = sabnzbd.cfg.log_dir.get_path() if fork and not logdir: - print "Error:" - print "I refuse to fork without a log directory!" + print("Error:") + print("I refuse to fork without a log directory!") sys.exit(1) if clean_up: @@ -1108,8 +1109,8 @@ def main(): logger.setLevel(LOGLEVELS[logging_level + 1]) except IOError: - print "Error:" - print "Can't write to logfile" + print("Error:") + print("Can't write to logfile") exit_sab(2) if fork: @@ -1651,15 +1652,15 @@ def HandleCommandLine(allow_service=True): service, sab_opts, serv_opts, _upload_nzbs = commandline_handler() if service and not allow_service: # The other frozen apps don't support Services - print "For service support, use SABnzbd-service.exe" + print("For service support, use SABnzbd-service.exe") return True elif service: if service in ('install', 'update'): # In this case check for required parameters path = get_f_option(sab_opts) if not path: - print 'The -f parameter is required.\n' \ - 'Use: -f %s' % service + print(('The -f parameter is required.\n' \ + 'Use: -f %s' % service)) return True # First run the service installed, because this will @@ -1669,9 +1670,9 @@ def HandleCommandLine(allow_service=True): # Add our own parameter to the Registry sab_opts = prep_service_parms(sab_opts) if set_serv_parms(SABnzbd._svc_name_, sab_opts): - print SERVICE_MSG + print(SERVICE_MSG) else: - print 'Cannot set required Registry info.' + print('Cannot set required Registry info.') else: # Other service commands need no manipulation win32serviceutil.HandleCommandLine(SABnzbd) diff --git a/sabnzbd/__init__.py b/sabnzbd/__init__.py index 2bae911..9f6f505 100644 --- a/sabnzbd/__init__.py +++ b/sabnzbd/__init__.py @@ -22,7 +22,7 @@ import os import logging import datetime import tempfile -import cPickle +import pickle import pickle import gzip import subprocess @@ -638,7 +638,7 @@ def add_nzbfile(nzbfile, pp=None, script=None, cat=None, priority=NORMAL_PRIORIT if cat and cat.lower() == 'default': cat = None - if isinstance(nzbfile, basestring): + if isinstance(nzbfile, str): # File coming from queue repair filename = nzbfile keep = True @@ -669,7 +669,7 @@ def add_nzbfile(nzbfile, pp=None, script=None, cat=None, priority=NORMAL_PRIORIT logging.info('Adding %s', filename) - if isinstance(nzbfile, basestring): + if isinstance(nzbfile, str): path = nzbfile else: try: @@ -841,7 +841,7 @@ def keep_awake(): # set ES_SYSTEM_REQUIRED KERNEL32.SetThreadExecutionState(ctypes.c_int(0x00000001)) else: - sleepless.keep_awake(u'SABnzbd is busy downloading and/or post-processing') + sleepless.keep_awake('SABnzbd is busy downloading and/or post-processing') if not awake and sleepless: sleepless.allow_sleep() @@ -854,7 +854,7 @@ def get_new_id(prefix, folder, check_list=None): """ Return unique prefixed admin identifier within folder optionally making sure that id is not in the check_list. """ - for n in xrange(10000): + for n in range(10000): try: if not os.path.exists(folder): os.makedirs(folder) @@ -878,14 +878,14 @@ def save_data(data, _id, path, do_pickle=True, silent=False): path = os.path.join(path, _id) # We try 3 times, to avoid any dict or access problems - for t in xrange(3): + for t in range(3): try: with open(path, 'wb') as data_file: if do_pickle: if cfg.use_pickle(): pickle.dump(data, data_file) else: - cPickle.dump(data, data_file) + pickle.dump(data, data_file) else: data_file.write(data) break @@ -918,7 +918,7 @@ def load_data(_id, path, remove=True, do_pickle=True, silent=False): if cfg.use_pickle(): data = pickle.load(data_file) else: - data = cPickle.load(data_file) + data = pickle.load(data_file) else: data = data_file.read() @@ -948,13 +948,13 @@ def save_admin(data, _id): logging.debug("[%s] Saving data for %s in %s", misc.caller_name(), _id, path) # We try 3 times, to avoid any dict or access problems - for t in xrange(3): + for t in range(3): try: with open(path, 'wb') as data_file: if cfg.use_pickle(): data = pickle.dump(data, data_file) else: - data = cPickle.dump(data, data_file) + data = pickle.dump(data, data_file) break except: if t == 2: @@ -979,7 +979,7 @@ def load_admin(_id, remove=False, silent=False): if cfg.use_pickle(): data = pickle.load(data_file) else: - data = cPickle.load(data_file) + data = pickle.load(data_file) if remove: misc.remove_file(path) except: @@ -1183,5 +1183,5 @@ def history_updated(): """ To make sure we always have a fresh history """ sabnzbd.LAST_HISTORY_UPDATE += 1 # Never go over the limit - if sabnzbd.LAST_HISTORY_UPDATE+1 >= sys.maxint: + if sabnzbd.LAST_HISTORY_UPDATE+1 >= sys.maxsize: sabnzbd.LAST_HISTORY_UPDATE = 1 diff --git a/sabnzbd/api.py b/sabnzbd/api.py index f248268..14fb29d 100644 --- a/sabnzbd/api.py +++ b/sabnzbd/api.py @@ -317,10 +317,10 @@ def _api_translate(name, output, kwargs): def _api_addfile(name, output, kwargs): """ API: accepts name, output, pp, script, cat, priority, nzbname """ # When uploading via flash it will send the nzb in a kw arg called Filedata - if name is None or isinstance(name, basestring): + if name is None or isinstance(name, str): name = kwargs.get('Filedata') # Normal upload will send the nzb in a kw arg called nzbfile - if name is None or isinstance(name, basestring): + if name is None or isinstance(name, str): name = kwargs.get('nzbfile') if hasattr(name, 'getvalue'): # Side effect of next line is that attribute .value is created @@ -354,10 +354,10 @@ def _api_retry(name, output, kwargs): """ API: accepts name, output, value(=nzo_id), nzbfile(=optional NZB), password (optional) """ value = kwargs.get('value') # When uploading via flash it will send the nzb in a kw arg called Filedata - if name is None or isinstance(name, basestring): + if name is None or isinstance(name, str): name = kwargs.get('Filedata') # Normal upload will send the nzb in a kw arg called nzbfile - if name is None or isinstance(name, basestring): + if name is None or isinstance(name, str): name = kwargs.get('nzbfile') password = kwargs.get('password') password = password[0] if isinstance(password, list) else password @@ -557,7 +557,7 @@ def _api_addurl(names, output, kwargs): nzbnames = [nzbnames] nzo_ids = [] - for n in xrange(len(names)): + for n in range(len(names)): name = names[n] if n < len(nzbnames): nzbname = nzbnames[n] @@ -734,9 +734,9 @@ def _api_test_email(name, output, kwargs): pack = {} pack['download'] = ['action 1', 'action 2'] pack['unpack'] = ['action 1', 'action 2'] - res = sabnzbd.emailer.endjob(u'I had a d\xe8ja vu', 'unknown', True, - os.path.normpath(os.path.join(cfg.complete_dir.get_path(), u'/unknown/I had a d\xe8ja vu')), - 123 * MEBI, None, pack, 'my_script', u'Line 1\nLine 2\nLine 3\nd\xe8ja vu\n', 0, + res = sabnzbd.emailer.endjob('I had a d\xe8ja vu', 'unknown', True, + os.path.normpath(os.path.join(cfg.complete_dir.get_path(), '/unknown/I had a d\xe8ja vu')), + 123 * MEBI, None, pack, 'my_script', 'Line 1\nLine 2\nLine 3\nd\xe8ja vu\n', 0, test=kwargs) if res == 'Email succeeded': res = None @@ -1060,8 +1060,8 @@ def report(output, error=None, keyword='value', data=None, callback=None, compat # Special handling for list/tuple (backward compatibility) data = [str(val) for val in data] data = ' '.join(data) - if isinstance(data, unicode): - response = u'%s\n' % data + if isinstance(data, str): + response = '%s\n' % data else: response = '%s\n' % str(data) @@ -1088,7 +1088,7 @@ class xml_factory(object): def _dict(self, keyw, lst): text = [] - for key in lst.keys(): + for key in list(lst.keys()): text.append(self.run(key, lst[key])) if keyw: return '<%s>%s\n' % (keyw, ''.join(text), keyw) @@ -1105,7 +1105,7 @@ class xml_factory(object): elif isinstance(cat, tuple): text.append(self._tuple(plural_to_single(keyw, 'tuple'), cat)) else: - if not isinstance(cat, basestring): + if not isinstance(cat, str): cat = str(cat) name = plural_to_single(keyw, 'item') text.append('<%s>%s\n' % (name, xml_name(cat, encoding='utf-8'), name)) @@ -1482,7 +1482,7 @@ def rss_qstatus(): percentage = "%s%%" % (int(((mb - mbleft) / mb) * 100)) filename = xml_name(filename) - name = u'%s (%s)' % (filename, percentage) + name = '%s (%s)' % (filename, percentage) item = Item() item.title = name @@ -1813,7 +1813,7 @@ def build_history(start=None, limit=None, verbose=False, verbose_list=None, sear for item in items: for key in item: value = item[key] - if isinstance(value, basestring): + if isinstance(value, str): item[key] = converter(value) if details_show_all: @@ -1973,7 +1973,7 @@ def list_cats(default=True): def remove_callable(dic): """ Remove all callable items from dictionary """ - for key, value in dic.items(): + for key, value in list(dic.items()): if callable(value): del dic[key] return dic diff --git a/sabnzbd/assembler.py b/sabnzbd/assembler.py index 890d5da..cafabb9 100644 --- a/sabnzbd/assembler.py +++ b/sabnzbd/assembler.py @@ -20,7 +20,7 @@ sabnzbd.assembler - threaded assembly/decoding of files """ import os -import Queue +import queue import logging import re from threading import Thread @@ -51,7 +51,7 @@ class Assembler(Thread): if queue: self.queue = queue else: - self.queue = Queue.Queue() + self.queue = queue.Queue() Assembler.do = self def stop(self): @@ -96,11 +96,11 @@ class Assembler(Thread): logging.info('Decoding %s %s', filepath, nzf.type) try: filepath = self.assemble(nzf, filepath) - except IOError, (errno, strerror): + except IOError as err: # If job was deleted or in active post-processing, ignore error if not nzo.is_gone() and not nzo.pp_active: # 28 == disk full => pause downloader - if errno == 28: + if err.errno == 28: logging.error(T('Disk full! Forcing Pause')) else: logging.error(T('Disk error on creating file %s'), clip_path(filepath)) @@ -217,7 +217,7 @@ def is_cloaked(nzo, path, names): for name in names: name = get_filename(name.lower()) name, ext = os.path.splitext(unicoder(name)) - if ext == u'.rar' and fname.startswith(name) and (len(fname) - len(name)) < 8 and len(names) < 3 and not RE_SUBS.search(fname): + if ext == '.rar' and fname.startswith(name) and (len(fname) - len(name)) < 8 and len(names) < 3 and not RE_SUBS.search(fname): # Only warn once if nzo.encrypted == 0: logging.warning(T('Job "%s" is probably encrypted due to RAR with same name inside this RAR'), nzo.final_name) diff --git a/sabnzbd/bpsmeter.py b/sabnzbd/bpsmeter.py index bd3fd4a..4421844 100644 --- a/sabnzbd/bpsmeter.py +++ b/sabnzbd/bpsmeter.py @@ -89,7 +89,7 @@ def fix_keys(data): """ Convert keys of each dictionary in tuple 'data' to unicode """ new_data = [] if isinstance(data, list): - for n in xrange(len(data)): + for n in range(len(data)): if isinstance(data[n], dict): new = {} for key in data[n]: @@ -128,7 +128,7 @@ class BPSMeter(object): self.q_period = 'm' # Daily/Weekly/Monthly quota = d/w/m self.quota = self.left = 0.0 # Quota and remaining quota self.have_quota = False # Flag for quota active - self.q_time = 0L # Next reset time for quota + self.q_time = 0 # Next reset time for quota self.q_hour = 0 # Quota reset hour self.q_minute = 0 # Quota reset minute self.quota_enabled = True # Scheduled quota enable/disable @@ -190,7 +190,7 @@ class BPSMeter(object): self.defaults() # Force update of counters and validate data try: - for server in self.grand_total.keys(): + for server in list(self.grand_total.keys()): self.update(server) except TypeError: self.defaults() @@ -219,25 +219,25 @@ class BPSMeter(object): if server: if server not in self.day_total: - self.day_total[server] = 0L + self.day_total[server] = 0 self.day_total[server] += amount if server not in self.week_total: - self.week_total[server] = 0L + self.week_total[server] = 0 self.week_total[server] += amount if server not in self.month_total: - self.month_total[server] = 0L + self.month_total[server] = 0 self.month_total[server] += amount if server not in self.grand_total: - self.grand_total[server] = 0L + self.grand_total[server] = 0 self.grand_total[server] += amount if server not in self.timeline_total: self.timeline_total[server] = {} if self.day_label not in self.timeline_total[server]: - self.timeline_total[server][self.day_label]= 0L + self.timeline_total[server][self.day_label]= 0 self.timeline_total[server][self.day_label] += amount # Quota check @@ -292,18 +292,18 @@ class BPSMeter(object): def get_sums(self): """ return tuple of grand, month, week, day totals """ - return (sum([v for v in self.grand_total.values()]), - sum([v for v in self.month_total.values()]), - sum([v for v in self.week_total.values()]), - sum([v for v in self.day_total.values()]) + return (sum([v for v in list(self.grand_total.values())]), + sum([v for v in list(self.month_total.values())]), + sum([v for v in list(self.week_total.values())]), + sum([v for v in list(self.day_total.values())]) ) def amounts(self, server): """ Return grand, month, week, day totals for specified server """ - return self.grand_total.get(server, 0L), \ - self.month_total.get(server, 0L), \ - self.week_total.get(server, 0L), \ - self.day_total.get(server, 0L), \ + return self.grand_total.get(server, 0), \ + self.month_total.get(server, 0), \ + self.week_total.get(server, 0), \ + self.day_total.get(server, 0), \ self.timeline_total.get(server, {}) def clear_server(self, server): @@ -422,7 +422,7 @@ class BPSMeter(object): self.left = quota - self.left self.quota = quota else: - self.quota = self.left = 0L + self.quota = self.left = 0 self.update(0) self.next_reset() if self.left > 0.5 and allow_resume: @@ -472,7 +472,7 @@ class BPSMeter(object): def midnight(self): """ Midnight action: dummy update for all servers """ - for server in self.day_total.keys(): + for server in list(self.day_total.keys()): self.update(server) diff --git a/sabnzbd/config.py b/sabnzbd/config.py index b4881d8..c012471 100644 --- a/sabnzbd/config.py +++ b/sabnzbd/config.py @@ -27,7 +27,7 @@ import shutil import time import random from hashlib import md5 -from urlparse import urlparse +from urllib.parse import urlparse import sabnzbd.misc from sabnzbd.constants import CONFIG_VERSION, NORMAL_PRIORITY, DEFAULT_PRIORITY, MAX_WIN_DFOLDER import configobj @@ -275,7 +275,7 @@ class OptionList(Option): def get_string(self): """ Return the list as a comma-separated string """ lst = self.get() - if isinstance(lst, basestring): + if isinstance(lst, str): return lst else: return ', '.join(lst) @@ -283,7 +283,7 @@ class OptionList(Option): def default_string(self): """ Return the default list as a comma-separated string """ lst = self.default() - if isinstance(lst, basestring): + if isinstance(lst, str): return lst else: return ', '.join(lst) @@ -308,7 +308,7 @@ class OptionStr(Option): def set(self, value): """ Set stripped value """ error = None - if isinstance(value, basestring) and self.__strip: + if isinstance(value, str) and self.__strip: value = value.strip() if self.__validation: error, val = self.__validation(value) @@ -411,7 +411,7 @@ class ConfigServer(object): value = values[kw] except KeyError: continue - exec 'self.%s.set(value)' % kw + exec('self.%s.set(value)' % kw) if not self.displayname(): self.displayname.set(self.__name) return True @@ -476,7 +476,7 @@ class ConfigCat(object): value = values[kw] except KeyError: continue - exec 'self.%s.set(value)' % kw + exec('self.%s.set(value)' % kw) return True def get_dict(self, safe=False): @@ -545,7 +545,7 @@ class OptionFilters(Option): def set_dict(self, values): """ Create filter list from dictionary with keys 'filter[0-9]+' """ filters = [] - for n in xrange(len(values)): + for n in range(len(values)): kw = 'filter%d' % n val = values.get(kw) if val is not None: @@ -589,7 +589,7 @@ class ConfigRSS(object): value = values[kw] except KeyError: continue - exec 'self.%s.set(value)' % kw + exec('self.%s.set(value)' % kw) self.filters.set_dict(values) return True @@ -623,7 +623,7 @@ def get_dconfig(section, keyword, nested=False): """ data = {} if not section: - for section in database.keys(): + for section in list(database.keys()): res, conf = get_dconfig(section, None, True) data.update(conf) @@ -634,12 +634,12 @@ def get_dconfig(section, keyword, nested=False): return False, {} if section in ('servers', 'categories', 'rss'): data[section] = [] - for keyword in sect.keys(): + for keyword in list(sect.keys()): res, conf = get_dconfig(section, keyword, True) data[section].append(conf) else: data[section] = {} - for keyword in sect.keys(): + for keyword in list(sect.keys()): res, conf = get_dconfig(section, keyword, True) data[section].update(conf) @@ -717,7 +717,7 @@ def _read_config(path, try_backup=False): # No file found, create default INI file try: if not sabnzbd.WIN32: - prev = os.umask(077) + prev = os.umask(0o77) fp = open(path, "w") fp.write("__version__=%s\n[misc]\n[logging]\n" % CONFIG_VERSION) fp.close() @@ -746,7 +746,7 @@ def _read_config(path, try_backup=False): # INI file is still in 8bit ASCII encoding, so try Latin-1 instead CFG = configobj.ConfigObj(lines, default_encoding='cp1252', encoding='cp1252') - except (IOError, configobj.ConfigObjError, UnicodeEncodeError), strerror: + except (IOError, configobj.ConfigObjError, UnicodeEncodeError) as strerror: if try_backup: if isinstance(strerror, UnicodeEncodeError): strerror = 'Character encoding of the file is inconsistent' @@ -764,8 +764,8 @@ def _read_config(path, try_backup=False): CFG.filename = path CFG.encoding = 'utf-8' - CFG['__encoding__'] = u'utf-8' - CFG['__version__'] = unicode(CONFIG_VERSION) + CFG['__encoding__'] = 'utf-8' + CFG['__version__'] = str(CONFIG_VERSION) # Use CFG data to set values for all static options for section in database: @@ -951,7 +951,7 @@ def get_ordered_categories(): # Transform to list and sort categories = [] - for cat in database_cats.keys(): + for cat in list(database_cats.keys()): if cat != '*': categories.append(database_cats[cat].get_dict()) diff --git a/sabnzbd/database.py b/sabnzbd/database.py index 6b6bbea..49c1f7e 100644 --- a/sabnzbd/database.py +++ b/sabnzbd/database.py @@ -119,7 +119,7 @@ class HistoryDB(object): def execute(self, command, args=(), save=False): ''' Wrapper for executing SQL commands ''' - for tries in xrange(5, 0, -1): + for tries in range(5, 0, -1): try: if args and isinstance(args, tuple): self.c.execute(command, args) @@ -129,7 +129,7 @@ class HistoryDB(object): self.con.commit() return True except: - error = str(sys.exc_value) + error = str(sys.exc_info()[1]) if tries >= 0 and 'is locked' in error: logging.debug('Database locked, wait and retry') time.sleep(0.5) @@ -479,7 +479,7 @@ def build_history_info(nzo, storage='', downpath='', postproc_time=0, script_out # Pack the dictionary up into a single string # Stage Name is separated by ::: stage lines by ; and stages by \r\n lines = [] - for key, results in stages.iteritems(): + for key, results in list(stages.items()): lines.append('%s:::%s' % (key, ';'.join(results))) stage_log = '\r\n'.join(lines) @@ -487,11 +487,11 @@ def build_history_info(nzo, storage='', downpath='', postproc_time=0, script_out report = 'future' if nzo.futuretype else '' # Analyze series info only when job is finished - series = u'' + series = '' if postproc_time: seriesname, season, episode, dummy = sabnzbd.newsunpack.analyse_show(nzo.final_name) if seriesname and season and episode: - series = u'%s/%s/%s' % (seriesname.lower(), season, episode) + series = '%s/%s/%s' % (seriesname.lower(), season, episode) return (completed, name, nzb_name, category, pp, script, report, url, status, nzo_id, storage, path, script_log, script_line, download_time, postproc_time, stage_log, downloaded, completeness, diff --git a/sabnzbd/decoder.py b/sabnzbd/decoder.py index 86bb306..1d0f12e 100644 --- a/sabnzbd/decoder.py +++ b/sabnzbd/decoder.py @@ -70,7 +70,7 @@ class BadYenc(Exception): Exception.__init__(self) -YDEC_TRANS = ''.join([chr((i + 256 - 42) % 256) for i in xrange(256)]) +YDEC_TRANS = ''.join([chr((i + 256 - 42) % 256) for i in range(256)]) class Decoder(Thread): @@ -125,7 +125,7 @@ class Decoder(Thread): nzf.article_count += 1 found = True - except IOError, e: + except IOError as e: logme = T('Decoding %s failed') % art_id logging.warning(logme) logging.info("Traceback: ", exc_info=True) @@ -135,7 +135,7 @@ class Decoder(Thread): sabnzbd.nzbqueue.NzbQueue.do.reset_try_lists(nzf, nzo) register = False - except MemoryError, e: + except MemoryError as e: logme = T('Decoder failure: Out of memory') logging.warning(logme) anfo = sabnzbd.articlecache.ArticleCache.do.cache_info() @@ -147,7 +147,7 @@ class Decoder(Thread): sabnzbd.nzbqueue.NzbQueue.do.reset_try_lists(nzf, nzo) register = False - except CrcError, e: + except CrcError as e: logme = 'CRC Error in %s' % art_id logging.info(logme) @@ -236,7 +236,7 @@ class Decoder(Thread): # Continue for _yenc or Python-yEnc # Filter out empty ones - data = filter(None, data) + data = [_f for _f in data if _f] # No point in continuing if we don't have any data left if data: nzf = article.nzf @@ -248,7 +248,7 @@ class Decoder(Thread): if not ybegin: found = False try: - for i in xrange(min(40, len(data))): + for i in range(min(40, len(data))): if data[i].startswith('begin '): nzf.type = 'uu' found = True @@ -277,7 +277,7 @@ class Decoder(Thread): # Decode data if HAVE_YENC: decoded_data, crc = _yenc.decode_string(''.join(data))[:2] - partcrc = '%08X' % ((crc ^ -1) & 2 ** 32L - 1) + partcrc = '%08X' % ((crc ^ -1) & 2 ** 32 - 1) else: data = ''.join(data) for i in (0, 9, 10, 13, 27, 32, 46, 61): @@ -285,7 +285,7 @@ class Decoder(Thread): data = data.replace(j, chr(i)) decoded_data = data.translate(YDEC_TRANS) crc = binascii.crc32(decoded_data) - partcrc = '%08X' % (crc & 2 ** 32L - 1) + partcrc = '%08X' % (crc & 2 ** 32 - 1) if ypart: crcname = 'pcrc32' @@ -350,7 +350,7 @@ def yCheck(data): yend = None # Check head - for i in xrange(min(40, len(data))): + for i in range(min(40, len(data))): try: if data[i].startswith('=ybegin '): splits = 3 @@ -372,7 +372,7 @@ def yCheck(data): break # Check tail - for i in xrange(-1, -11, -1): + for i in range(-1, -11, -1): try: if data[i].startswith('=yend '): yend = ySplit(data[i]) diff --git a/sabnzbd/dirscanner.py b/sabnzbd/dirscanner.py index 0d65c5c..df7d23b 100644 --- a/sabnzbd/dirscanner.py +++ b/sabnzbd/dirscanner.py @@ -380,7 +380,7 @@ class DirScanner(threading.Thread): # Wait until the attributes are stable for 1 second # but give up after 3 sec stable = False - for n in xrange(3): + for n in range(3): time.sleep(1.0) try: stat_tuple_tmp = os.stat(path) diff --git a/sabnzbd/downloader.py b/sabnzbd/downloader.py index 2793b01..65295c7 100644 --- a/sabnzbd/downloader.py +++ b/sabnzbd/downloader.py @@ -27,7 +27,7 @@ from nntplib import NNTPPermanentError import socket import random import sys -import Queue +import queue import sabnzbd from sabnzbd.decorators import synchronized, NzbQueueLocker, DOWNLOADER_CV @@ -198,7 +198,7 @@ class Downloader(Thread): for server in config.get_servers(): self.init_server(None, server) - self.decoder_queue = Queue.Queue() + self.decoder_queue = queue.Queue() # Initialize decoders, only 1 for non-SABYenc self.decoder_workers = [] @@ -236,7 +236,7 @@ class Downloader(Thread): create = True if oldserver: - for n in xrange(len(self.servers)): + for n in range(len(self.servers)): if self.servers[n].id == oldserver: # Server exists, do re-init later create = False @@ -363,7 +363,7 @@ class Downloader(Thread): return True def nzo_servers(self, nzo): - return filter(nzo.server_in_try_list, self.servers) + return list(filter(nzo.server_in_try_list, self.servers)) def maybe_block_server(self, server): if server.optional and server.active and (server.bad_cons / server.threads) > 3: @@ -521,8 +521,8 @@ class Downloader(Thread): self.force_disconnect = False # => Select - readkeys = self.read_fds.keys() - writekeys = self.write_fds.keys() + readkeys = list(self.read_fds.keys()) + writekeys = list(self.write_fds.keys()) if readkeys or writekeys: read, write, error = select.select(readkeys, writekeys, (), 1.0) @@ -618,7 +618,7 @@ class Downloader(Thread): if sabnzbd.LOG_ALL: logging.debug("%s@%s last message -> %s", nw.thrdnum, nw.server.id, nntp_to_msg(nw.data)) nw.clear_data() - except NNTPPermanentError, error: + except NNTPPermanentError as error: # Handle login problems block = False penalty = 0 @@ -824,7 +824,7 @@ class Downloader(Thread): fileno = nw.nntp.sock.fileno() if fileno not in self.read_fds: self.read_fds[fileno] = nw - except socket.error, err: + except socket.error as err: logging.info('Looks like server closed connection: %s', err) self.__reset_nw(nw, "server broke off connection", quit=False) except: @@ -879,7 +879,7 @@ class Downloader(Thread): break def unblock_all(self): - for server_id in self._timers.keys(): + for server_id in list(self._timers.keys()): self.unblock(server_id) @NzbQueueLocker @@ -889,7 +889,7 @@ class Downloader(Thread): # Clean expired timers now = time.time() kicked = [] - for server_id in self._timers.keys(): + for server_id in list(self._timers.keys()): if not [stamp for stamp in self._timers[server_id] if stamp >= now]: logging.debug('Forcing re-evaluation of server %s', server_id) del self._timers[server_id] diff --git a/sabnzbd/emailer.py b/sabnzbd/emailer.py index 13ad1d6..404376d 100644 --- a/sabnzbd/emailer.py +++ b/sabnzbd/emailer.py @@ -85,7 +85,7 @@ def send(message, email_to, test=None): logging.debug("Connected to server %s:%s", server, port) - except Exception, errorcode: + except Exception as errorcode: if errorcode[0]: # Non SSL mail server diff --git a/sabnzbd/encoding.py b/sabnzbd/encoding.py index d348f09..fd64b5e 100644 --- a/sabnzbd/encoding.py +++ b/sabnzbd/encoding.py @@ -78,7 +78,7 @@ def special_fixer(p): if p: # Remove \" constructions from incoming headers p = p.replace(r'\"', r'"') - if not p or isinstance(p, unicode): + if not p or isinstance(p, str): return p try: # First see if it isn't just UTF-8 @@ -95,7 +95,7 @@ def unicoder(p, force=False): """ Make sure a Unicode string is returned When `force` is True, ignore filesystem encoding """ - if isinstance(p, unicode): + if isinstance(p, str): return p if isinstance(p, str): if gUTF or force: @@ -105,13 +105,13 @@ def unicoder(p, force=False): return p.decode(codepage, 'replace') return p.decode(codepage, 'replace') else: - return unicode(str(p)) + return str(str(p)) def xml_name(p, keep_escape=False, encoding=None): """ Prepare name for use in HTML/XML contect """ - if isinstance(p, unicode): + if isinstance(p, str): pass elif isinstance(p, str): if sabnzbd.DARWIN or encoding == 'utf-8': @@ -133,9 +133,9 @@ class LatinFilter(Filter): """ Make sure Cheetah gets only Unicode strings """ def filter(self, val, str=str, **kw): - if isinstance(val, unicode): + if isinstance(val, str): return val - elif isinstance(val, basestring): + elif isinstance(val, str): try: if sabnzbd.WIN32: return val.decode(codepage) @@ -144,9 +144,9 @@ class LatinFilter(Filter): except: return val.decode(codepage, 'replace') elif val is None: - return u'' + return '' else: - return unicode(str(val)) + return str(str(val)) class EmailFilter(Filter): @@ -155,17 +155,17 @@ class EmailFilter(Filter): """ def filter(self, val, str=str, **kw): - if isinstance(val, unicode): + if isinstance(val, str): return val - elif isinstance(val, basestring): + elif isinstance(val, str): try: return val.decode('utf-8') except: return val.decode(codepage, 'replace') elif val is None: - return u'' + return '' else: - return unicode(str(val)) + return str(str(val)) ################################################################################ @@ -241,7 +241,7 @@ def fixup_ff4(p): if ch.isdigit(): num += ch elif ch == ';': - name.append(unichr(int(num)).encode('utf8')) + name.append(chr(int(num)).encode('utf8')) start = False else: name.append('&#%s%s' % (num, ch)) @@ -282,12 +282,12 @@ def deunicode(p): """ Return the correct 8bit ASCII encoding for the platform: Latin-1 for Windows/Posix-non-UTF and UTF-8 for OSX/Posix-UTF """ - if isinstance(p, unicode): + if isinstance(p, str): if gUTF: return p.encode('utf-8') else: return p.encode(codepage, 'replace') - elif isinstance(p, basestring): + elif isinstance(p, str): if gUTF: try: p.decode('utf-8') diff --git a/sabnzbd/getipaddress.py b/sabnzbd/getipaddress.py index 8148adc..447d691 100644 --- a/sabnzbd/getipaddress.py +++ b/sabnzbd/getipaddress.py @@ -72,7 +72,7 @@ def publicipv4(): # Because of dual IPv4/IPv6 clients, finding the public ipv4 needs special attention, # meaning forcing IPv4 connections, and not allowing IPv6 connections try: - import urllib2 + import urllib.request, urllib.error, urllib.parse ipv4_found = False # we only want IPv4 resolving, so socket.AF_INET: result = addresslookup4(sabnzbd.cfg.selftest_host()) @@ -85,13 +85,13 @@ def publicipv4(): selftest_ipv4 = item[4][0] # get next IPv4 address of sabnzbd.cfg.selftest_host() try: # put the selftest_host's IPv4 address into the URL - req = urllib2.Request("http://" + selftest_ipv4 + "/") + req = urllib.request.Request("http://" + selftest_ipv4 + "/") # specify the User-Agent, because certain sites refuse connections with "python urllib2" as User-Agent: req.add_header('User-Agent', 'SABnzbd+/%s' % sabnzbd.version.__version__ ) # specify the Host, because we only provide the IPv4 address in the URL: req.add_header('Host', sabnzbd.cfg.selftest_host()) # get the response - public_ipv4 = urllib2.urlopen(req, timeout=2).read() # timeout 2 seconds, in case the website is not accessible + public_ipv4 = urllib.request.urlopen(req, timeout=2).read() # timeout 2 seconds, in case the website is not accessible # ... check the response is indeed an IPv4 address: socket.inet_aton(public_ipv4) # if we got anything else than a plain IPv4 address, this will raise an exception # if we get here without exception, we're done: diff --git a/sabnzbd/interface.py b/sabnzbd/interface.py index e45b110..b649288 100644 --- a/sabnzbd/interface.py +++ b/sabnzbd/interface.py @@ -23,7 +23,7 @@ import os import time import cherrypy import logging -import urllib +import urllib.request, urllib.parse, urllib.error import json import re import hashlib @@ -124,7 +124,7 @@ def Raiser(root='', **kwargs): args[key] = val # Add extras if args: - root = '%s?%s' % (root, urllib.urlencode(args)) + root = '%s?%s' % (root, urllib.parse.urlencode(args)) # Optionally add the leading /sabnzbd/ (or what the user set) if not root.startswith(cfg.url_base()): root = cherrypy.request.script_name + root @@ -233,7 +233,7 @@ def set_auth(conf): def check_session(kwargs): """ Check session key """ if not check_access(): - return u'Access denied' + return 'Access denied' key = kwargs.get('session') if not key: key = kwargs.get('apikey') @@ -1606,7 +1606,7 @@ class ConfigServer(object): new = [] servers = config.get_servers() - server_names = sorted(servers.keys(), key=lambda svr: '%d%02d%s' % (int(not servers[svr].enable()), servers[svr].priority(), servers[svr].displayname().lower())) + server_names = sorted(list(servers.keys()), key=lambda svr: '%d%02d%s' % (int(not servers[svr].enable()), servers[svr].priority(), servers[svr].displayname().lower())) for svr in server_names: new.append(servers[svr].get_dict(safe=True)) t, m, w, d, timeline = BPSMeter.do.amounts(svr) @@ -1725,7 +1725,7 @@ def handle_server(kwargs, root=None, new_svr=False): server = unique_svr_name(server) for kw in ('ssl', 'send_group', 'enable', 'optional'): - if kw not in kwargs.keys(): + if kw not in list(kwargs.keys()): kwargs[kw] = None if svr and not new_svr: svr.set_dict(kwargs) @@ -1788,7 +1788,7 @@ class ConfigRss(object): rss[feed]['pick_cat'] = pick_cat rss[feed]['pick_script'] = pick_script - rss[feed]['link'] = urllib.quote_plus(feed.encode('utf-8')) + rss[feed]['link'] = urllib.parse.quote_plus(feed.encode('utf-8')) rss[feed]['baselink'] = [get_base_url(uri) for uri in rss[feed]['uri']] rss[feed]['uris'] = feeds[feed].uri.get_string() @@ -2688,7 +2688,7 @@ def GetRssLog(feed): return job - jobs = sabnzbd.rss.show_result(feed).values() + jobs = list(sabnzbd.rss.show_result(feed).values()) good, bad, done = ([], [], []) for job in jobs: if job['status'][0] == 'G': diff --git a/sabnzbd/lang.py b/sabnzbd/lang.py index b7bed6b..aaa1f47 100644 --- a/sabnzbd/lang.py +++ b/sabnzbd/lang.py @@ -35,7 +35,7 @@ sabnzbd.lang - Language support import gettext -import __builtin__ +import builtins import glob import os import operator @@ -67,11 +67,11 @@ def set_language(language=None): lng = gettext.translation(_DOMAIN, _LOCALEDIR, [language], fallback=True, codeset='latin-1') # The unicode flag will make _() return Unicode - lng.install(unicode=True, names=['lgettext']) - __builtin__.__dict__['T'] = __builtin__.__dict__['_'] # Unicode - __builtin__.__dict__['Ta'] = __builtin__.__dict__['_'] # Unicode (Used to Latin-1, compatibility support) - __builtin__.__dict__['Tx'] = __builtin__.__dict__['_'] # Dynamic translation (unicode) - __builtin__.__dict__['TT'] = lambda x: unicode(x) # Use in text tables + lng.install(str=True, names=['lgettext']) + builtins.__dict__['T'] = builtins.__dict__['_'] # Unicode + builtins.__dict__['Ta'] = builtins.__dict__['_'] # Unicode (Used to Latin-1, compatibility support) + builtins.__dict__['Tx'] = builtins.__dict__['_'] # Dynamic translation (unicode) + builtins.__dict__['TT'] = lambda x: str(x) # Use in text tables def list_languages(): diff --git a/sabnzbd/misc.py b/sabnzbd/misc.py index ab8c034..2e5a823 100644 --- a/sabnzbd/misc.py +++ b/sabnzbd/misc.py @@ -22,7 +22,7 @@ sabnzbd.misc - misc classes import os import sys import logging -import urllib +import urllib.request, urllib.parse, urllib.error import re import shutil import threading @@ -99,7 +99,7 @@ def calc_age(date, trans=False): def monthrange(start, finish): """ Calculate months between 2 dates, used in the Config template """ months = (finish.year - start.year) * 12 + finish.month + 1 - for i in xrange(start.month, months): + for i in range(start.month, months): year = (i - 1) / 12 + start.year month = (i - 1) % 12 + 1 yield datetime.date(year, month, 1) @@ -123,7 +123,7 @@ def safe_fnmatch(f, pattern): return False -def globber(path, pattern=u'*'): +def globber(path, pattern='*'): """ Return matching base file/folder names in folder `path` """ # Cannot use glob.glob() because it doesn't support Windows long name notation if os.path.exists(path): @@ -131,7 +131,7 @@ def globber(path, pattern=u'*'): return [] -def globber_full(path, pattern=u'*'): +def globber_full(path, pattern='*'): """ Return matching full file/folder names in folder `path` """ # Cannot use glob.glob() because it doesn't support Windows long name notation if os.path.exists(path): @@ -335,7 +335,7 @@ def sanitize_foldername(name, limit=True): uFL_ILLEGAL = FL_ILLEGAL.decode('cp1252') uFL_LEGAL = FL_LEGAL.decode('cp1252') - if isinstance(name, unicode): + if isinstance(name, str): illegal = uFL_ILLEGAL legal = uFL_LEGAL else: @@ -380,11 +380,11 @@ def sanitize_and_trim_path(path): path = path.strip() new_path = '' if sabnzbd.WIN32: - if path.startswith(u'\\\\?\\UNC\\'): - new_path = u'\\\\?\\UNC\\' + if path.startswith('\\\\?\\UNC\\'): + new_path = '\\\\?\\UNC\\' path = path[8:] - elif path.startswith(u'\\\\?\\'): - new_path = u'\\\\?\\' + elif path.startswith('\\\\?\\'): + new_path = '\\\\?\\' path = path[4:] path = path.replace('\\', '/') @@ -456,7 +456,7 @@ def create_all_dirs(path, umask=False): mask = cfg.umask() if mask: try: - os.chmod(path, int(mask, 8) | 0700) + os.chmod(path, int(mask, 8) | 0o700) except: pass return result @@ -544,7 +544,7 @@ def windows_variant(): """ from win32api import GetVersionEx from win32con import VER_PLATFORM_WIN32_NT - import _winreg + import winreg vista_plus = x64 = False maj, _minor, _buildno, plat, _csd = GetVersionEx() @@ -556,14 +556,14 @@ def windows_variant(): # This does *not* work: # return os.environ['PROCESSOR_ARCHITECTURE'] == 'AMD64' # because the Python runtime returns 'X86' even on an x64 system! - key = _winreg.OpenKey(_winreg.HKEY_LOCAL_MACHINE, + key = winreg.OpenKey(winreg.HKEY_LOCAL_MACHINE, r"SYSTEM\CurrentControlSet\Control\Session Manager\Environment") - for n in xrange(_winreg.QueryInfoKey(key)[1]): - name, value, _val_type = _winreg.EnumValue(key, n) + for n in range(winreg.QueryInfoKey(key)[1]): + name, value, _val_type = winreg.EnumValue(key, n) if name == 'PROCESSOR_ARCHITECTURE': - x64 = value.upper() == u'AMD64' + x64 = value.upper() == 'AMD64' break - _winreg.CloseKey(key) + winreg.CloseKey(key) return vista_plus, x64 @@ -574,35 +574,35 @@ _SERVICE_PARM = 'CommandLine' def get_serv_parms(service): """ Get the service command line parameters from Registry """ - import _winreg + import winreg value = [] try: - key = _winreg.OpenKey(_winreg.HKEY_LOCAL_MACHINE, _SERVICE_KEY + service) - for n in xrange(_winreg.QueryInfoKey(key)[1]): - name, value, _val_type = _winreg.EnumValue(key, n) + key = winreg.OpenKey(winreg.HKEY_LOCAL_MACHINE, _SERVICE_KEY + service) + for n in range(winreg.QueryInfoKey(key)[1]): + name, value, _val_type = winreg.EnumValue(key, n) if name == _SERVICE_PARM: break - _winreg.CloseKey(key) + winreg.CloseKey(key) except WindowsError: pass - for n in xrange(len(value)): + for n in range(len(value)): value[n] = value[n] return value def set_serv_parms(service, args): """ Set the service command line parameters in Registry """ - import _winreg + import winreg uargs = [] for arg in args: uargs.append(unicoder(arg)) try: - key = _winreg.CreateKey(_winreg.HKEY_LOCAL_MACHINE, _SERVICE_KEY + service) - _winreg.SetValueEx(key, _SERVICE_PARM, None, _winreg.REG_MULTI_SZ, uargs) - _winreg.CloseKey(key) + key = winreg.CreateKey(winreg.HKEY_LOCAL_MACHINE, _SERVICE_KEY + service) + winreg.SetValueEx(key, _SERVICE_PARM, None, winreg.REG_MULTI_SZ, uargs) + winreg.CloseKey(key) except WindowsError: return False return True @@ -663,7 +663,7 @@ def check_latest_version(): # to bad file content. try: - fn = urllib.urlretrieve('https://raw.githubusercontent.com/sabnzbd/sabnzbd.github.io/master/latest.txt')[0] + fn = urllib.request.urlretrieve('https://raw.githubusercontent.com/sabnzbd/sabnzbd.github.io/master/latest.txt')[0] f = open(fn, 'r') data = f.read() f.close() @@ -866,7 +866,7 @@ def check_mount(path): m = re.search(r'^(/(?:mnt|media)/[^/]+)/', path) if m: - for n in xrange(cfg.wait_ext_drive() or 1): + for n in range(cfg.wait_ext_drive() or 1): if os.path.exists(m.group(1)): return True logging.debug('Waiting for %s to come online', m.group(1)) @@ -1018,7 +1018,7 @@ def get_filepath(path, nzo, filename): # download_dir is equal to the complete_dir. dName = nzo.work_name if not nzo.created: - for n in xrange(200): + for n in range(200): dName = dirname if n: dName += '.' + str(n) @@ -1074,7 +1074,7 @@ def renamer(old, new): try: shutil.move(old, new) return - except WindowsError, err: + except WindowsError as err: logging.debug('Error renaming "%s" to "%s" <%s>', old, new, err) if err[0] == 32: logging.debug('Retry rename %s to %s', old, new) @@ -1096,7 +1096,7 @@ def remove_dir(path): try: remove_dir(path) return - except WindowsError, err: + except WindowsError as err: if err[0] == 32: logging.debug('Retry delete %s', path) retries -= 1 @@ -1319,11 +1319,11 @@ else: try: s = os.statvfs(_dir) if s.f_blocks < 0: - disk_size = float(sys.maxint) * float(s.f_frsize) + disk_size = float(sys.maxsize) * float(s.f_frsize) else: disk_size = float(s.f_blocks) * float(s.f_frsize) if s.f_bavail < 0: - available = float(sys.maxint) * float(s.f_frsize) + available = float(sys.maxsize) * float(s.f_frsize) else: available = float(s.f_bavail) * float(s.f_frsize) return disk_size / GIGI, available / GIGI @@ -1383,7 +1383,7 @@ def create_https_certificates(ssl_cert, ssl_key): try: from sabnzbd.utils.certgen import generate_key, generate_local_cert private_key = generate_key(key_size=2048, output_file=ssl_key) - generate_local_cert(private_key, days_valid=3560, output_file=ssl_cert, LN=u'SABnzbd', ON=u'SABnzbd', CN=u'localhost') + generate_local_cert(private_key, days_valid=3560, output_file=ssl_cert, LN='SABnzbd', ON='SABnzbd', CN='localhost') logging.info('Self-signed certificates generated successfully') except: logging.error(T('Error creating SSL key and certificate')) @@ -1448,7 +1448,7 @@ def find_on_path(targets): else: paths = os.getenv('PATH').split(':') - if isinstance(targets, basestring): + if isinstance(targets, str): targets = (targets, ) for path in paths: @@ -1582,19 +1582,19 @@ def set_permissions(path, recursive=True): def clip_path(path): r""" Remove \\?\ or \\?\UNC\ prefix from Windows path """ if sabnzbd.WIN32 and path and '?' in path: - path = path.replace(u'\\\\?\\UNC\\', u'\\\\', 1).replace(u'\\\\?\\', u'', 1) + path = path.replace('\\\\?\\UNC\\', '\\\\', 1).replace('\\\\?\\', '', 1) return path def long_path(path): """ For Windows, convert to long style path; others, return same path """ - if sabnzbd.WIN32 and path and not path.startswith(u'\\\\?\\'): + if sabnzbd.WIN32 and path and not path.startswith('\\\\?\\'): if path.startswith('\\\\'): # Special form for UNC paths - path = path.replace(u'\\\\', u'\\\\?\\UNC\\', 1) + path = path.replace('\\\\', '\\\\?\\UNC\\', 1) else: # Normal form for local paths - path = u'\\\\?\\' + path + path = '\\\\?\\' + path return path diff --git a/sabnzbd/newsunpack.py b/sabnzbd/newsunpack.py index f108ea9..301daaf 100644 --- a/sabnzbd/newsunpack.py +++ b/sabnzbd/newsunpack.py @@ -707,7 +707,7 @@ def rar_extract_core(rarfile_path, numrars, one_folder, nzo, setname, extraction elif line.startswith('Cannot find volume') and not inrecovery: filename = os.path.basename(TRANS(line[19:])) nzo.fail_msg = T('Unpacking failed, unable to find %s') % unicoder(filename) - msg = (u'[%s] ' + T('Unpacking failed, unable to find %s')) % (setname, filename) + msg = ('[%s] ' + T('Unpacking failed, unable to find %s')) % (setname, filename) nzo.set_unpack_info('Unpack', unicoder(msg)) logging.warning(T('ERROR: unable to find "%s"'), filename) fail = 1 @@ -715,14 +715,14 @@ def rar_extract_core(rarfile_path, numrars, one_folder, nzo, setname, extraction elif line.endswith('- CRC failed'): filename = TRANS(line[:-12].strip()) nzo.fail_msg = T('Unpacking failed, CRC error') - msg = (u'[%s] ' + T('ERROR: CRC failed in "%s"')) % (setname, filename) + msg = ('[%s] ' + T('ERROR: CRC failed in "%s"')) % (setname, filename) nzo.set_unpack_info('Unpack', unicoder(msg)) logging.warning(T('ERROR: CRC failed in "%s"'), setname) fail = 2 # Older unrar versions report a wrong password as a CRC error elif line.startswith('File too large'): nzo.fail_msg = T('Unpacking failed, file too large for filesystem (FAT?)') - msg = (u'[%s] ' + T('Unpacking failed, file too large for filesystem (FAT?)')) % setname + msg = ('[%s] ' + T('Unpacking failed, file too large for filesystem (FAT?)')) % setname nzo.set_unpack_info('Unpack', unicoder(msg)) # ERROR: File too large for file system (bigfile-5000MB) logging.error(T('ERROR: File too large for filesystem (%s)'), setname) @@ -730,7 +730,7 @@ def rar_extract_core(rarfile_path, numrars, one_folder, nzo, setname, extraction elif line.startswith('Write error'): nzo.fail_msg = T('Unpacking failed, write error or disk is full?') - msg = (u'[%s] ' + T('Unpacking failed, write error or disk is full?')) % setname + msg = ('[%s] ' + T('Unpacking failed, write error or disk is full?')) % setname nzo.set_unpack_info('Unpack', unicoder(msg)) logging.error(T('ERROR: write error (%s)'), line[11:]) fail = 1 @@ -739,11 +739,11 @@ def rar_extract_core(rarfile_path, numrars, one_folder, nzo, setname, extraction line2 = proc.readline() if 'must not exceed 260' in line2: nzo.fail_msg = T('Unpacking failed, path is too long') - msg = u'[%s] %s: %s' % (T('Unpacking failed, path is too long'), setname, unicoder(line[13:])) + msg = '[%s] %s: %s' % (T('Unpacking failed, path is too long'), setname, unicoder(line[13:])) logging.error(T('ERROR: path too long (%s)'), unicoder(line[13:])) else: nzo.fail_msg = T('Unpacking failed, write error or disk is full?') - msg = u'[%s] %s: %s' % (T('Unpacking failed, write error or disk is full?'), setname, unicoder(line[13:])) + msg = '[%s] %s: %s' % (T('Unpacking failed, write error or disk is full?'), setname, unicoder(line[13:])) logging.error(T('ERROR: write error (%s)'), unicoder(line[13:])) nzo.set_unpack_info('Unpack', unicoder(msg)) fail = 1 @@ -753,7 +753,7 @@ def rar_extract_core(rarfile_path, numrars, one_folder, nzo, setname, extraction elif line.startswith('ERROR: '): nzo.fail_msg = T('Unpacking failed, see log') logging.warning(T('ERROR: %s'), (unicoder(line[7:]))) - msg = (u'[%s] ' + T('ERROR: %s')) % (setname, line[7:]) + msg = ('[%s] ' + T('ERROR: %s')) % (setname, line[7:]) nzo.set_unpack_info('Unpack', unicoder(msg)) fail = 1 @@ -772,7 +772,7 @@ def rar_extract_core(rarfile_path, numrars, one_folder, nzo, setname, extraction else: filename = os.path.split(rarfile_path)[1] nzo.fail_msg = T('Unpacking failed, archive requires a password') - msg = (u'[%s][%s] ' + T('Unpacking failed, archive requires a password')) % (setname, filename) + msg = ('[%s][%s] ' + T('Unpacking failed, archive requires a password')) % (setname, filename) nzo.set_unpack_info('Unpack', unicoder(msg)) fail = 2 @@ -1329,7 +1329,7 @@ def PAR_Verify(parfile, parfile_nzf, nzo, setname, joinables, single=False): else: msg = T('Invalid par2 files or invalid PAR2 parameters, cannot verify or repair') nzo.fail_msg = msg - msg = u'[%s] %s' % (unicoder(setname), msg) + msg = '[%s] %s' % (unicoder(setname), msg) nzo.set_unpack_info('Repair', msg) nzo.status = Status.FAILED @@ -1348,7 +1348,7 @@ def PAR_Verify(parfile, parfile_nzf, nzo, setname, joinables, single=False): # Failed msg = T('Repair failed, not enough repair blocks (%s short)') % str(needed_blocks) nzo.fail_msg = msg - msg = u'[%s] %s' % (unicoder(setname), msg) + msg = '[%s] %s' % (unicoder(setname), msg) nzo.set_unpack_info('Repair', msg) nzo.status = Status.FAILED @@ -1414,7 +1414,7 @@ def PAR_Verify(parfile, parfile_nzf, nzo, setname, joinables, single=False): elif ' cannot be renamed to ' in line: msg = unicoder(line.strip()) nzo.fail_msg = msg - msg = u'[%s] %s' % (unicoder(setname), msg) + msg = '[%s] %s' % (unicoder(setname), msg) nzo.set_unpack_info('Repair', msg) nzo.status = Status.FAILED @@ -1422,7 +1422,7 @@ def PAR_Verify(parfile, parfile_nzf, nzo, setname, joinables, single=False): # Oops, disk is full! msg = T('Repairing failed, %s') % T('Disk full') nzo.fail_msg = msg - msg = u'[%s] %s' % (unicoder(setname), msg) + msg = '[%s] %s' % (unicoder(setname), msg) nzo.set_unpack_info('Repair', msg) nzo.status = Status.FAILED @@ -1447,7 +1447,7 @@ def PAR_Verify(parfile, parfile_nzf, nzo, setname, joinables, single=False): elif 'No details available for recoverable file' in line: msg = unicoder(line.strip()) nzo.fail_msg = msg - msg = u'[%s] %s' % (unicoder(setname), msg) + msg = '[%s] %s' % (unicoder(setname), msg) nzo.set_unpack_info('Repair', msg) nzo.status = Status.FAILED @@ -1482,7 +1482,7 @@ def PAR_Verify(parfile, parfile_nzf, nzo, setname, joinables, single=False): verifytotal = int(m.group(1)) p.wait() - except WindowsError, err: + except WindowsError as err: raise WindowsError(err) logging.debug('PAR2 output was\n%s', '\n'.join(lines)) @@ -1626,14 +1626,14 @@ def MultiPar_Verify(parfile, parfile_nzf, nzo, setname, joinables, single=False) else: msg = T('Invalid par2 files or invalid PAR2 parameters, cannot verify or repair') nzo.fail_msg = msg - msg = u'[%s] %s' % (unicoder(setname), msg) + msg = '[%s] %s' % (unicoder(setname), msg) nzo.set_unpack_info('Repair', msg) nzo.status = Status.FAILED elif line.startswith('There is not enough space on the disk'): msg = T('Repairing failed, %s') % T('Disk full') nzo.fail_msg = msg - msg = u'[%s] %s' % (unicoder(setname), msg) + msg = '[%s] %s' % (unicoder(setname), msg) nzo.set_unpack_info('Repair', msg) nzo.status = Status.FAILED @@ -1774,7 +1774,7 @@ def MultiPar_Verify(parfile, parfile_nzf, nzo, setname, joinables, single=False) # Failed msg = T('Repair failed, not enough repair blocks (%s short)') % str(needed_blocks) nzo.fail_msg = msg - msg = u'[%s] %s' % (unicoder(setname), msg) + msg = '[%s] %s' % (unicoder(setname), msg) nzo.set_unpack_info('Repair', msg) nzo.status = Status.FAILED @@ -1849,7 +1849,7 @@ def MultiPar_Verify(parfile, parfile_nzf, nzo, setname, joinables, single=False) if renames: # If succes, we also remove the possibly previously renamed ones if finished: - reconstructed.extend(renames.values()) + reconstructed.extend(list(renames.values())) # Adding to the collection nzo.renamed_file(renames) @@ -2309,6 +2309,17 @@ def list2cmdline(lst): return ' '.join(nlst) +def get_from_url(url): + """ Retrieve URL and return content + `timeout` sets non-standard timeout + """ + import urllib.request, urllib.error, urllib.parse + try: + return urllib.request.urlopen(url).read() + except: + return None + + def is_sevenfile(path): """ Return True if path has proper extension and 7Zip is installed """ return SEVEN_COMMAND and os.path.splitext(path)[1].lower() == '.7z' diff --git a/sabnzbd/newswrapper.py b/sabnzbd/newswrapper.py index aeb11db..cc5676a 100644 --- a/sabnzbd/newswrapper.py +++ b/sabnzbd/newswrapper.py @@ -131,7 +131,7 @@ def con(sock, host, port, sslenabled, write_fds, nntp): except (ssl.SSLError, CertificateError) as e: nntp.error(e) - except socket.error, e: + except socket.error as e: try: # socket.error can either return a string or a tuple if isinstance(e, tuple): @@ -231,7 +231,7 @@ class NNTP(object): except (ssl.SSLError, CertificateError) as e: self.error(e) - except socket.error, e: + except socket.error as e: try: # socket.error can either return a string or a tuple if isinstance(e, tuple): @@ -460,7 +460,7 @@ class NewsWrapper(object): self.last_line = new_lines.pop() # Already remove the starting dots - for i in xrange(len(new_lines)): + for i in range(len(new_lines)): if new_lines[i][:2] == '..': new_lines[i] = new_lines[i][1:] self.lines.extend(new_lines) diff --git a/sabnzbd/notifier.py b/sabnzbd/notifier.py index 545e65e..7b6eb2a 100644 --- a/sabnzbd/notifier.py +++ b/sabnzbd/notifier.py @@ -20,13 +20,14 @@ sabnzbd.notifier - Send notifications to any notification services """ -from __future__ import with_statement + import os.path import logging import socket -import urllib2 -import httplib -import urllib +import urllib.request, urllib.error, urllib.parse +import http.client +import urllib.request, urllib.parse, urllib.error +import time import subprocess import json from threading import Thread @@ -268,7 +269,7 @@ def send_growl(title, msg, gtype, test=None): _GROWL, error = register_growl(growl_server, growl_password) if _GROWL: _GROWL_REG = True - if isinstance(msg, unicode): + if isinstance(msg, str): msg = msg.decode('utf-8') elif not isinstance(msg, str): msg = str(msg) @@ -408,8 +409,8 @@ def send_prowl(title, msg, gtype, force=False, test=None): return T('Cannot send, missing required data') title = Tx(NOTIFICATION.get(gtype, 'other')) - title = urllib2.quote(title.encode('utf8')) - msg = urllib2.quote(msg.encode('utf8')) + title = urllib.parse.quote(title.encode('utf8')) + msg = urllib.parse.quote(msg.encode('utf8')) prio = get_prio(gtype, 'prowl') if force: @@ -419,7 +420,7 @@ def send_prowl(title, msg, gtype, force=False, test=None): url = 'https://api.prowlapp.com/publicapi/add?apikey=%s&application=SABnzbd' \ '&event=%s&description=%s&priority=%d' % (apikey, title, msg, prio) try: - urllib2.urlopen(url) + urllib.request.urlopen(url) return '' except: logging.warning(T('Failed to send Prowl message')) @@ -473,8 +474,8 @@ def send_pushover(title, msg, gtype, force=False, test=None): def do_send_pushover(body): try: - conn = httplib.HTTPSConnection("api.pushover.net:443") - conn.request("POST", "/1/messages.json", urllib.urlencode(body), + conn = http.client.HTTPSConnection("api.pushover.net:443") + conn.request("POST", "/1/messages.json", urllib.parse.urlencode(body), {"Content-type": "application/x-www-form-urlencoded"}) res = conn.getresponse() if res.status != 200: @@ -499,10 +500,10 @@ def send_pushbullet(title, msg, gtype, force=False, test=None): if not apikey: return T('Cannot send, missing required data') - title = u'SABnzbd: ' + Tx(NOTIFICATION.get(gtype, 'other')) + title = 'SABnzbd: ' + Tx(NOTIFICATION.get(gtype, 'other')) try: - conn = httplib.HTTPSConnection('api.pushbullet.com:443') + conn = http.client.HTTPSConnection('api.pushbullet.com:443') conn.request('POST', '/v2/pushes', json.dumps({ 'type': 'note', @@ -534,7 +535,7 @@ def send_nscript(title, msg, gtype, force=False, test=None): parameters = sabnzbd.cfg.nscript_parameters() if not script: return T('Cannot send, missing required data') - title = u'SABnzbd: ' + Tx(NOTIFICATION.get(gtype, 'other')) + title = 'SABnzbd: ' + Tx(NOTIFICATION.get(gtype, 'other')) if force or check_classes(gtype, 'nscript'): script_path = make_script_path(script) diff --git a/sabnzbd/nzbqueue.py b/sabnzbd/nzbqueue.py index 390ec09..9c8c77a 100644 --- a/sabnzbd/nzbqueue.py +++ b/sabnzbd/nzbqueue.py @@ -458,7 +458,7 @@ class NzbQueue(object): if search: search = search.lower() removed = [] - for nzo_id in self.__nzo_table.keys(): + for nzo_id in list(self.__nzo_table.keys()): if (not search) or search in self.__nzo_table[nzo_id].final_name_pw_clean.lower(): nzo = self.__nzo_table.pop(nzo_id) nzo.deleted = True @@ -562,7 +562,7 @@ class NzbQueue(object): nzo1.priority = nzo2_priority item_id_pos1 = -1 item_id_pos2 = -1 - for i in xrange(len(self.__nzo_list)): + for i in range(len(self.__nzo_list)): if item_id_1 == self.__nzo_list[i].nzo_id: item_id_pos1 = i elif item_id_2 == self.__nzo_list[i].nzo_id: @@ -614,7 +614,7 @@ class NzbQueue(object): self.__nzo_list = sort_queue_function(self.__nzo_list, _nzo_size_cmp, reverse) def sort_queue(self, field, reverse=None): - if isinstance(reverse, basestring): + if isinstance(reverse, str): if reverse.lower() == 'desc': reverse = True else: @@ -645,7 +645,7 @@ class NzbQueue(object): return # Get the current position in the queue - for i in xrange(len(self.__nzo_list)): + for i in range(len(self.__nzo_list)): if nzo_id == self.__nzo_list[i].nzo_id: nzo_id_pos1 = i break diff --git a/sabnzbd/nzbstuff.py b/sabnzbd/nzbstuff.py index ac5023f..025003b 100644 --- a/sabnzbd/nzbstuff.py +++ b/sabnzbd/nzbstuff.py @@ -33,9 +33,9 @@ import hashlib import difflib try: - from cStringIO import StringIO + from io import StringIO except ImportError: - from StringIO import StringIO + from io import StringIO # SABnzbd modules import sabnzbd @@ -494,7 +494,7 @@ class NzbParser(xml.sax.handler.ContentHandler): # Check if file was added with same name if cfg.reject_duplicate_files(): - nzo_matches = filter(lambda x: (x.filename == nzf.filename), self.nzo.files) + nzo_matches = [x for x in self.nzo.files if (x.filename == nzf.filename)] if nzo_matches: logging.info('File %s occured twice in NZB, discarding smaller file', nzf.filename) @@ -743,14 +743,14 @@ class NzbObject(TryList): inpsrc.setByteStream(StringIO(nzb)) try: parser.parse(inpsrc) - except xml.sax.SAXParseException, err: + except xml.sax.SAXParseException as err: self.incomplete = True if '' not in nzb: logging.warning(T('Incomplete NZB file %s'), filename) else: logging.warning(T('Invalid NZB file %s, skipping (reason=%s, line=%s)'), filename, err.getMessage(), err.getLineNumber()) - except Exception, err: + except Exception as err: self.incomplete = True logging.warning(T('Invalid NZB file %s, skipping (reason=%s, line=%s)'), filename, err, 0) @@ -1002,7 +1002,7 @@ class NzbObject(TryList): # If we couldn't parse it, we ignore it if pack: - if pack not in self.md5packs.values(): + if pack not in list(self.md5packs.values()): logging.debug('Got md5pack for set %s', nzf.setname) self.md5packs[setname] = pack # See if we need to postpone some pars @@ -1277,7 +1277,7 @@ class NzbObject(TryList): return self.final_name def set_final_name_pw(self, name, password=None): - if isinstance(name, basestring): + if isinstance(name, str): if password is not None: name = platform_encode(name) self.password = platform_encode(password) @@ -1348,7 +1348,7 @@ class NzbObject(TryList): if not nzf.is_par2: # We have to find the right par-set blocks_new = 0 - for parset in self.extrapars.keys(): + for parset in list(self.extrapars.keys()): if (parset in nzf.filename or parset in original_filename) and self.extrapars[parset]: for new_nzf in self.extrapars[parset]: self.add_parfile(new_nzf) @@ -1381,9 +1381,9 @@ class NzbObject(TryList): return True, 200 # Do the full check - need = 0L - pars = 0L - short = 0L + need = 0 + pars = 0 + short = 0 anypars = False for nzf_id in self.files_table: nzf = self.files_table[nzf_id] @@ -1419,7 +1419,7 @@ class NzbObject(TryList): complete_time = format_time_string(seconds, timecompleted.days) msg1 = T('Downloaded in %s at an average of %sB/s') % (complete_time, to_units(avg_bps * 1024, dec_limit=1)) - msg1 += u'
' + T('Age') + ': ' + calc_age(self.avg_date, True) + msg1 += '
' + T('Age') + ': ' + calc_age(self.avg_date, True) bad = self.nzo_info.get('bad_articles', 0) miss = self.nzo_info.get('missing_articles', 0) @@ -1427,14 +1427,14 @@ class NzbObject(TryList): dups = self.nzo_info.get('duplicate_articles', 0) msg2 = msg3 = msg4 = msg5 = '' if bad: - msg2 = (u'
' + T('%s articles were malformed')) % bad + msg2 = ('
' + T('%s articles were malformed')) % bad if miss: - msg3 = (u'
' + T('%s articles were missing')) % miss + msg3 = ('
' + T('%s articles were missing')) % miss if dups: - msg4 = (u'
' + T('%s articles had non-matching duplicates')) % dups + msg4 = ('
' + T('%s articles had non-matching duplicates')) % dups if killed: - msg5 = (u'
' + T('%s articles were removed')) % killed - msg = u''.join((msg1, msg2, msg3, msg4, msg5, )) + msg5 = ('
' + T('%s articles were removed')) % killed + msg = ''.join((msg1, msg2, msg3, msg4, msg5, )) self.set_unpack_info('Download', msg, unique=True) if self.url: self.set_unpack_info('Source', self.url, unique=True) @@ -1442,7 +1442,7 @@ class NzbObject(TryList): if len(self.servercount) > 0: # Sort the servers first servers = config.get_servers() - server_names = sorted(servers.keys(), key=lambda svr: '%d%02d%s' % (int(not servers[svr].enable()), servers[svr].priority(), servers[svr].displayname().lower())) + server_names = sorted(list(servers.keys()), key=lambda svr: '%d%02d%s' % (int(not servers[svr].enable()), servers[svr].priority(), servers[svr].displayname().lower())) msgs = ['%s=%sB' % (servers[server_name].displayname(), to_units(self.servercount[server_name])) for server_name in server_names if server_name in self.servercount] self.set_unpack_info('Servers', ', '.join(msgs), unique=True) @@ -1500,14 +1500,14 @@ class NzbObject(TryList): def move_top_bulk(self, nzf_ids): self.cleanup_nzf_ids(nzf_ids) if nzf_ids: - target = range(len(nzf_ids)) + target = list(range(len(nzf_ids))) while 1: self.move_up_bulk(nzf_ids, cleanup=False) pos_nzf_table = self.build_pos_nzf_table(nzf_ids) - keys = pos_nzf_table.keys() + keys = list(pos_nzf_table.keys()) keys.sort() if target == keys: @@ -1517,14 +1517,14 @@ class NzbObject(TryList): def move_bottom_bulk(self, nzf_ids): self.cleanup_nzf_ids(nzf_ids) if nzf_ids: - target = range(len(self.files) - len(nzf_ids), len(self.files)) + target = list(range(len(self.files) - len(nzf_ids), len(self.files))) while 1: self.move_down_bulk(nzf_ids, cleanup=False) pos_nzf_table = self.build_pos_nzf_table(nzf_ids) - keys = pos_nzf_table.keys() + keys = list(pos_nzf_table.keys()) keys.sort() if target == keys: @@ -1961,9 +1961,9 @@ def get_attrib_file(path, size): try: f = open(path, 'r') except: - return [None for unused in xrange(size)] + return [None for unused in range(size)] - for unused in xrange(size): + for unused in range(size): line = f.readline().strip('\r\n ') if line: if line.lower() == 'none': diff --git a/sabnzbd/osxmenu.py b/sabnzbd/osxmenu.py index f130c77..47f08ee 100644 --- a/sabnzbd/osxmenu.py +++ b/sabnzbd/osxmenu.py @@ -80,7 +80,7 @@ class SABnzbdDelegate(NSObject): # logging.info("building menu") status_bar = NSStatusBar.systemStatusBar() self.status_item = status_bar.statusItemWithLength_(NSVariableStatusItemLength) - for i in status_icons.keys(): + for i in list(status_icons.keys()): self.icons[i] = NSImage.alloc().initByReferencingFile_(status_icons[i]) if sabnzbd.DARWIN_VERSION > 9: # Support for Yosemite Dark Mode @@ -709,7 +709,7 @@ class SABnzbdDelegate(NSObject): def openFolderAction_(self, sender): folder2open = sender.representedObject() - if isinstance(folder2open, unicode): + if isinstance(folder2open, str): folder2open = folder2open.encode("utf-8") if debug == 1: NSLog("[osx] %@", folder2open) diff --git a/sabnzbd/panic.py b/sabnzbd/panic.py index 6c9794f..d1c5de7 100644 --- a/sabnzbd/panic.py +++ b/sabnzbd/panic.py @@ -225,7 +225,7 @@ def show_error_dialog(msg): """ if sabnzbd.WIN32: ctypes.windll.user32.MessageBoxW(0, unicoder(msg), T('Fatal error'), 0) - print msg + print(msg) def error_page_401(status, message, traceback, version): diff --git a/sabnzbd/postproc.py b/sabnzbd/postproc.py index f7a94a4..f5b1f34 100644 --- a/sabnzbd/postproc.py +++ b/sabnzbd/postproc.py @@ -20,7 +20,7 @@ sabnzbd.postproc - threaded post-processing of jobs """ import os -import Queue +import queue import logging import sabnzbd import xml.sax.saxutils @@ -67,7 +67,7 @@ class PostProcessor(Thread): if self.history_queue is None: self.history_queue = [] - self.queue = Queue.Queue() + self.queue = queue.Queue() for nzo in self.history_queue: self.process(nzo) self.__stop = False @@ -185,7 +185,7 @@ class PostProcessor(Thread): try: nzo = self.queue.get(timeout=1) - except Queue.Empty: + except queue.Empty: if check_eoq: check_eoq = False handle_empty_queue() @@ -479,11 +479,11 @@ def process_job(nzo): script_ret = '' if len(script_log.rstrip().split('\n')) > 1: nzo.set_unpack_info('Script', - u'%s%s (%s)' % (script_ret, script_line, + '%s%s (%s)' % (script_ret, script_line, xml.sax.saxutils.escape(script_output), T('More')), unique=True) else: # No '(more)' button needed - nzo.set_unpack_info('Script', u'%s%s ' % (script_ret, script_line), unique=True) + nzo.set_unpack_info('Script', '%s%s ' % (script_ret, script_line), unique=True) # Cleanup again, including NZB files if all_ok: @@ -497,7 +497,7 @@ def process_job(nzo): if nzo.encrypted > 0: Rating.do.update_auto_flag(nzo.nzo_id, Rating.FLAG_ENCRYPTED) if empty: - hosts = map(lambda s: s.host, sabnzbd.downloader.Downloader.do.nzo_servers(nzo)) + hosts = [s.host for s in sabnzbd.downloader.Downloader.do.nzo_servers(nzo)] if not hosts: hosts = [None] for host in hosts: @@ -631,7 +631,7 @@ def parring(nzo, workdir): # Get verification status of sets verified = sabnzbd.load_data(VERIFIED_FILE, nzo.workpath, remove=False) or {} - repair_sets = nzo.extrapars.keys() + repair_sets = list(nzo.extrapars.keys()) re_add = False par_error = False @@ -947,7 +947,7 @@ def del_marker(path): def remove_from_list(name, lst): if name: - for n in xrange(len(lst)): + for n in range(len(lst)): if lst[n].endswith(name): logging.debug('Popping %s', lst[n]) lst.pop(n) diff --git a/sabnzbd/powersup.py b/sabnzbd/powersup.py index 6bd7755..a8750ed 100644 --- a/sabnzbd/powersup.py +++ b/sabnzbd/powersup.py @@ -165,7 +165,7 @@ def _get_systemproxy(method): try: bus = dbus.SystemBus() return bus.get_object(name, path), interface, pinterface - except dbus.exceptions.DBusException, msg: + except dbus.exceptions.DBusException as msg: logging.info('DBus not reachable (%s)', msg) return None, None, None @@ -193,7 +193,7 @@ def linux_shutdown(): proxy.Stop(dbus_interface=interface) else: logging.info('DBus does not support Stop (shutdown)') - except dbus.exceptions.DBusException, msg: + except dbus.exceptions.DBusException as msg: logging.error('Received a DBus exception %s', msg) os._exit(0) @@ -223,7 +223,7 @@ def linux_hibernate(): else: logging.info('DBus does not support Hibernate') time.sleep(10) - except dbus.exceptions.DBusException, msg: + except dbus.exceptions.DBusException as msg: logging.error('Received a DBus exception %s', msg) @@ -252,5 +252,5 @@ def linux_standby(): else: logging.info('DBus does not support Suspend (standby)') time.sleep(10) - except dbus.exceptions.DBusException, msg: + except dbus.exceptions.DBusException as msg: logging.error('Received a DBus exception %s', msg) diff --git a/sabnzbd/rating.py b/sabnzbd/rating.py index e8fd272..eaa1836 100644 --- a/sabnzbd/rating.py +++ b/sabnzbd/rating.py @@ -19,14 +19,14 @@ sabnzbd.rating - Rating support functions """ -import httplib -import urllib -import urlparse +import http.client +import urllib.request, urllib.parse, urllib.error +import urllib.parse import time import logging import copy import socket -import Queue +import queue import collections from threading import RLock, Thread import sabnzbd @@ -34,7 +34,7 @@ from sabnzbd.decorators import synchronized import sabnzbd.cfg as cfg # A queue which ignores duplicates but maintains ordering -class OrderedSetQueue(Queue.Queue): +class OrderedSetQueue(queue.Queue): def _init(self, maxsize): self.maxsize = maxsize self.queue = collections.OrderedDict() @@ -122,7 +122,7 @@ class Rating(Thread): silent=not cfg.rating_enable()) if self.version == 1: ratings = {} - for k, v in self.ratings.iteritems(): + for k, v in list(self.ratings.items()): ratings[k] = NzbRatingV2().to_v2(v) self.ratings = ratings self.version = 2 @@ -276,7 +276,7 @@ class Rating(Thread): _headers = {'User-agent': 'SABnzbd+/%s' % sabnzbd.version.__version__, 'Content-type': 'application/x-www-form-urlencoded'} rating = self._get_rating_by_indexer(indexer_id) # Requesting info here ensures always have latest information even on retry if hasattr(rating, 'host') and rating.host: - host_parsed = urlparse.urlparse(rating.host) + host_parsed = urllib.parse.urlparse(rating.host) rating_host = host_parsed.netloc # Is it an URL or just a HOST? if host_parsed.path and host_parsed.path != '/': @@ -303,19 +303,19 @@ class Rating(Thread): requests.append(self._flag_request(rating.auto_flag.get('val'), rating.auto_flag.get('detail'), 1)) try: - conn = httplib.HTTPSConnection(rating_host) - for request in filter(lambda r: r is not None, requests): + conn = http.client.HTTPSConnection(rating_host) + for request in [r for r in requests if r is not None]: if api_key: request['apikey'] = api_key request['i'] = indexer_id - conn.request('POST', rating_url, urllib.urlencode(request), headers=_headers) + conn.request('POST', rating_url, urllib.parse.urlencode(request), headers=_headers) response = conn.getresponse() response.read() - if response.status == httplib.UNAUTHORIZED: + if response.status == http.client.UNAUTHORIZED: _warn('Ratings server unauthorized user') return False - elif response.status != httplib.OK: + elif response.status != http.client.OK: _warn('Ratings server failed to process request (%s, %s)' % (response.status, response.reason)) return False self.ratings[indexer_id].changed = self.ratings[indexer_id].changed & ~rating.changed diff --git a/sabnzbd/rss.py b/sabnzbd/rss.py index c996614..ec39a52 100644 --- a/sabnzbd/rss.py +++ b/sabnzbd/rss.py @@ -150,7 +150,7 @@ def remove_obsolete(jobs, new_jobs): """ now = time.time() limit = now - 259200 # 3days (3x24x3600) - olds = jobs.keys() + olds = list(jobs.keys()) for old in olds: tm = jobs[old]['time'] if old not in new_jobs: @@ -175,7 +175,7 @@ class RSSQueue(object): self.jobs = sabnzbd.load_admin(RSS_FILE_NAME) if self.jobs: for feed in self.jobs: - remove_obsolete(self.jobs[feed], self.jobs[feed].keys()) + remove_obsolete(self.jobs[feed], list(self.jobs[feed].keys())) except: logging.warning(T('Cannot read %s'), RSS_FILE_NAME) logging.info("Traceback: ", exc_info=True) @@ -321,7 +321,7 @@ class RSSQueue(object): if not entries: return unicoder(msg) else: - entries = jobs.keys() + entries = list(jobs.keys()) # Filter out valid new links for entry in entries: @@ -333,8 +333,8 @@ class RSSQueue(object): link, category, size, age, season, episode = _get_link(uri, entry) except (AttributeError, IndexError): link = None - category = u'' - size = 0L + category = '' + size = 0 age = None logging.info(T('Incompatible feed') + ' ' + uri) logging.info("Traceback: ", exc_info=True) @@ -344,7 +344,7 @@ class RSSQueue(object): # If there's multiple feeds, remove the duplicates based on title and size if len(uris) > 1: skip_job = False - for job_link, job in jobs.items(): + for job_link, job in list(jobs.items()): # Allow 5% size deviation because indexers might have small differences for same release if job.get('title') == title and link != job_link and (job.get('size')*0.95) < size < (job.get('size')*1.05): logging.info("Ignoring job %s from other feed", title) @@ -358,7 +358,7 @@ class RSSQueue(object): if category in ('', '*'): category = None title = jobs[link].get('title', '') - size = jobs[link].get('size', 0L) + size = jobs[link].get('size', 0) age = jobs[link].get('age') season = jobs[link].get('season', 0) episode = jobs[link].get('episode', 0) @@ -387,7 +387,7 @@ class RSSQueue(object): # Match against all filters until an positive or negative match logging.debug('Size %s', size) - for n in xrange(regcount): + for n in range(regcount): if reEnabled[n]: if category and reTypes[n] == 'C': found = re.search(regexes[n], category) @@ -504,14 +504,14 @@ class RSSQueue(object): if self.next_run < time.time(): self.next_run = time.time() + cfg.rss_rate.get() * 60 feeds = config.get_rss() - for feed in feeds.keys(): + for feed in list(feeds.keys()): try: if feeds[feed].enable.get(): logging.info('Starting scheduled RSS read-out for "%s"', feed) active = True self.run_feed(feed, download=True, ignoreFirst=True) # Wait 15 seconds, else sites may get irritated - for unused in xrange(15): + for unused in range(15): if self.shutdown: return else: @@ -633,7 +633,7 @@ def _get_link(uri, entry): """ link = None category = '' - size = 0L + size = 0 uri = uri.lower() age = datetime.datetime.now() @@ -648,7 +648,7 @@ def _get_link(uri, entry): except: pass - if size == 0L: + if size == 0: _RE_SIZE1 = re.compile(r'Size:\s*(\d+\.\d+\s*[KMG]{0,1})B\W*', re.I) _RE_SIZE2 = re.compile(r'\W*(\d+\.\d+\s*[KMG]{0,1})B\W*', re.I) # Try to find size in Description @@ -700,7 +700,7 @@ def _get_link(uri, entry): return link, category, size, age, season, episode else: logging.warning(T('Empty RSS entry found (%s)'), link) - return None, '', 0L, None, 0, 0 + return None, '', 0, None, 0, 0 def special_rss_site(url): diff --git a/sabnzbd/scheduler.py b/sabnzbd/scheduler.py index 06bf95a..df7f941 100644 --- a/sabnzbd/scheduler.py +++ b/sabnzbd/scheduler.py @@ -92,7 +92,7 @@ def init(): if d.isdigit(): d = [int(i) for i in d] else: - d = range(1, 8) + d = list(range(1, 8)) if action_name == 'resume': action = scheduled_resume @@ -196,16 +196,16 @@ def init(): action, hour, minute = sabnzbd.bpsmeter.BPSMeter.do.get_quota() if action: logging.info('Setting schedule for quota check daily at %s:%s', hour, minute) - __SCHED.add_daytime_task(action, 'quota_reset', range(1, 8), None, (hour, minute), + __SCHED.add_daytime_task(action, 'quota_reset', list(range(1, 8)), None, (hour, minute), kronos.method.sequential, [], None) if sabnzbd.misc.int_conv(cfg.history_retention()) > 0: logging.info('Setting schedule for midnight auto history-purge') - __SCHED.add_daytime_task(sabnzbd.database.midnight_history_purge, 'midnight_history_purge', range(1, 8), None, (0, 0), + __SCHED.add_daytime_task(sabnzbd.database.midnight_history_purge, 'midnight_history_purge', list(range(1, 8)), None, (0, 0), kronos.method.sequential, [], None) logging.info('Setting schedule for midnight BPS reset') - __SCHED.add_daytime_task(sabnzbd.bpsmeter.midnight_action, 'midnight_bps', range(1, 8), None, (0, 0), + __SCHED.add_daytime_task(sabnzbd.bpsmeter.midnight_action, 'midnight_bps', list(range(1, 8)), None, (0, 0), kronos.method.sequential, [], None) # Subscribe to special schedule changes @@ -458,7 +458,7 @@ def pause_int(): val = abs(val) else: sign = '' - min = int(val / 60L) + min = int(val / 60) sec = int(val - min * 60) return "%s%d:%02d" % (sign, min, sec) diff --git a/sabnzbd/tvsort.py b/sabnzbd/tvsort.py index 9cebc40..ed8199e 100644 --- a/sabnzbd/tvsort.py +++ b/sabnzbd/tvsort.py @@ -360,7 +360,7 @@ class SeriesSorter(object): # Replace elements path = path_subst(sorter, mapping) - for key, name in REPLACE_AFTER.iteritems(): + for key, name in list(REPLACE_AFTER.items()): path = path.replace(key, name) # Lowercase all characters wrapped in {} @@ -458,7 +458,7 @@ def check_for_sequence(regex, files): prefix = name[:match1.start()] # Don't do anything if only one or no files matched - if len(matches.keys()) < 2: + if len(list(matches.keys())) < 2: return {} key_prev = 0 @@ -609,7 +609,7 @@ class GenericSorter(object): path = path_subst(sorter, mapping) - for key, name in REPLACE_AFTER.iteritems(): + for key, name in list(REPLACE_AFTER.items()): path = path.replace(key, name) # Lowercase all characters wrapped in {} @@ -675,8 +675,8 @@ class GenericSorter(object): # rename files marked as in a set if matched_files: logging.debug("Renaming a series of generic files (%s)", matched_files) - renamed = matched_files.values() - for index, file in matched_files.iteritems(): + renamed = list(matched_files.values()) + for index, file in list(matched_files.items()): filepath = os.path.join(current_path, file) renamed.append(filepath) self.fname, ext = os.path.splitext(os.path.split(file)[1]) @@ -828,7 +828,7 @@ class DateSorter(object): path = path_subst(sorter, mapping) - for key, name in REPLACE_AFTER.iteritems(): + for key, name in list(REPLACE_AFTER.items()): path = path.replace(key, name) # Lowercase all characters wrapped in {} @@ -896,7 +896,7 @@ def path_subst(path, mapping): break newpath.append(result) n += 1 - return u''.join([unicoder(x) for x in newpath]) + return ''.join([unicoder(x) for x in newpath]) def get_titles(nzo, match, name, titleing=False): diff --git a/sabnzbd/urlgrabber.py b/sabnzbd/urlgrabber.py index d428a0a..2dd7315 100644 --- a/sabnzbd/urlgrabber.py +++ b/sabnzbd/urlgrabber.py @@ -24,9 +24,9 @@ import sys import time import re import logging -import Queue -import urllib2 -from httplib import IncompleteRead +import queue +import urllib.request, urllib.error, urllib.parse +from http.client import IncompleteRead from threading import Thread import sabnzbd @@ -50,7 +50,7 @@ class URLGrabber(Thread): def __init__(self): Thread.__init__(self) - self.queue = Queue.Queue() + self.queue = queue.Queue() for tup in NzbQueue.do.get_urls(): url, nzo = tup self.queue.put((url, nzo)) @@ -115,7 +115,7 @@ class URLGrabber(Thread): logging.info('Grabbing URL %s', url) try: fn = _build_request(url) - except Exception, e: + except Exception as e: # Cannot list exceptions here, because of unpredictability over platforms error0 = str(sys.exc_info()[0]).lower() error1 = str(sys.exc_info()[1]).lower() @@ -281,15 +281,15 @@ class URLGrabber(Thread): def _build_request(url): # Detect basic auth # Adapted from python-feedparser - urltype, rest = urllib2.splittype(url) - realhost, rest = urllib2.splithost(rest) + urltype, rest = urllib.parse.splittype(url) + realhost, rest = urllib.parse.splithost(rest) if realhost: - user_passwd, realhost = urllib2.splituser(realhost) + user_passwd, realhost = urllib.parse.splituser(realhost) if user_passwd: url = '%s://%s%s' % (urltype, realhost, rest) # Start request - req = urllib2.Request(url) + req = urllib.request.Request(url) # Add headers req.add_header('User-Agent', 'SABnzbd+/%s' % sabnzbd.version.__version__) @@ -297,7 +297,7 @@ def _build_request(url): req.add_header('Accept-encoding', 'gzip') if user_passwd: req.add_header('Authorization', 'Basic ' + user_passwd.encode('base64').strip()) - return urllib2.urlopen(req) + return urllib.request.urlopen(req) def _analyse(fn, url): diff --git a/sabnzbd/utils/certgen.py b/sabnzbd/utils/certgen.py index e4911bf..be9a767 100644 --- a/sabnzbd/utils/certgen.py +++ b/sabnzbd/utils/certgen.py @@ -51,7 +51,7 @@ def generate_key(key_size=2048, output_file='key.pem'): # Ported from cryptography docs/x509/tutorial.rst -def generate_local_cert(private_key, days_valid=3560, output_file='cert.cert', LN=u'SABnzbd', ON=u'SABnzbd', CN=u'localhost'): +def generate_local_cert(private_key, days_valid=3560, output_file='cert.cert', LN='SABnzbd', ON='SABnzbd', CN='localhost'): # Various details about who we are. For a self-signed certificate the # subject and issuer are always the same. subject = issuer = x509.Name([ @@ -62,13 +62,13 @@ def generate_local_cert(private_key, days_valid=3560, output_file='cert.cert', L # build SubjectAltName list since we are not using a common name san_list = [ - x509.DNSName(u"localhost"), - x509.DNSName(u"127.0.0.1"), + x509.DNSName("localhost"), + x509.DNSName("127.0.0.1"), ] # append local v4 ip (functions already has try/catch logic) mylocalipv4 = localipv4() if mylocalipv4: - san_list.append(x509.DNSName(u"" + mylocalipv4)) + san_list.append(x509.DNSName("" + mylocalipv4)) cert = x509.CertificateBuilder().subject_name( subject @@ -94,7 +94,7 @@ def generate_local_cert(private_key, days_valid=3560, output_file='cert.cert', L return cert if __name__ == '__main__': - print 'Making key' + print('Making key') private_key = generate_key() - print 'Making cert' + print('Making cert') cert = generate_local_cert(private_key) diff --git a/sabnzbd/utils/checkdir.py b/sabnzbd/utils/checkdir.py index f5d042e..3da40d3 100644 --- a/sabnzbd/utils/checkdir.py +++ b/sabnzbd/utils/checkdir.py @@ -37,19 +37,19 @@ def isFAT(dir): if thisline.find('/') == 0: # Starts with /, so a real, local device fstype = thisline.split()[1] - if debug: print "File system type:", fstype + if debug: print(("File system type:", fstype)) if fstype.lower().find('fat') >= 0: FAT = True - if debug: print "FAT found" + if debug: print("FAT found") break elif 'win32' in sys.platform: import win32api if '?' in dir: # Remove \\?\ or \\?\UNC\ prefix from Windows path - dir = dir.replace(u'\\\\?\\UNC\\', u'\\\\', 1).replace(u'\\\\?\\', u'', 1) + dir = dir.replace('\\\\?\\UNC\\', '\\\\', 1).replace('\\\\?\\', '', 1) try: result = win32api.GetVolumeInformation(os.path.splitdrive(dir)[0]) - if debug: print result + if debug: print(result) if(result[4].startswith("FAT")): FAT = True except: @@ -74,12 +74,12 @@ def isFAT(dir): device = '' for thisline in os.popen(dfcmd).readlines(): if thisline.find('/')==0: - if debug: print thisline + if debug: print(thisline) # Starts with /, so a real, local device device = thisline.split()[0] mountcmd = "mount | grep " + device mountoutput = os.popen(mountcmd).readline().strip() - if debug: print mountoutput + if debug: print(mountoutput) if 'msdos' in mountoutput.split('(')[1]: FAT = True break @@ -91,15 +91,15 @@ def isFAT(dir): if __name__ == "__main__": - if debug: print sys.platform + if debug: print((sys.platform)) try: dir = sys.argv[1] except: - print "Specify dir on the command line" + print("Specify dir on the command line") sys.exit(0) if isFAT(dir): - print dir, "is on FAT" + print((dir, "is on FAT")) else: - print dir, "is not on FAT" + print((dir, "is not on FAT")) diff --git a/sabnzbd/utils/diskspeed.py b/sabnzbd/utils/diskspeed.py index c18e7ad..baf9585 100644 --- a/sabnzbd/utils/diskspeed.py +++ b/sabnzbd/utils/diskspeed.py @@ -18,7 +18,7 @@ def writetofile(filename, mysizeMB): return False try: - for x in xrange(writeloops): + for x in range(writeloops): f.write(_DUMP_DATA) except: logging.debug('Cannot write to file %s', filename) @@ -61,27 +61,27 @@ def diskspeedmeasure(dirname): if __name__ == "__main__": - print "Let's go" + print("Let's go") if len(sys.argv) >= 2: dirname = sys.argv[1] if not os.path.isdir(dirname): - print "Specified argument is not a directory. Bailing out" + print("Specified argument is not a directory. Bailing out") sys.exit(1) else: # no argument, so use current working directory dirname = os.getcwd() - print "Using current working directory" + print("Using current working directory") try: speed = diskspeedmeasure(dirname) - print "Disk writing speed: %.2f Mbytes per second" % speed - except IOError, e: + print(("Disk writing speed: %.2f Mbytes per second" % speed)) + except IOError as e: # print "IOError:", e if e.errno == 13: - print "Could not create test file. Check that you have write rights to directory", dirname + print(("Could not create test file. Check that you have write rights to directory", dirname)) except: - print "Something else went wrong" + print("Something else went wrong") raise - print "Done" + print("Done") diff --git a/sabnzbd/utils/getperformance.py b/sabnzbd/utils/getperformance.py index 1dad2db..93358df 100644 --- a/sabnzbd/utils/getperformance.py +++ b/sabnzbd/utils/getperformance.py @@ -9,7 +9,7 @@ def getcpu(): try: if platform.system() == "Windows": - import _winreg as winreg # needed on Python 2 + import winreg as winreg # needed on Python 2 key = winreg.OpenKey(winreg.HKEY_LOCAL_MACHINE, r"Hardware\Description\System\CentralProcessor\0") cputype = winreg.QueryValueEx(key, "ProcessorNameString")[0] winreg.CloseKey(key) @@ -42,7 +42,7 @@ def getpystone(): value = None for pystonemodule in ['test.pystone', 'pystone']: try: - exec "from " + pystonemodule + " import pystones" + exec("from " + pystonemodule + " import pystones") value = int(pystones(1000)[1]) break # import and calculation worked, so we're done. Get out of the for loop except: @@ -51,5 +51,5 @@ def getpystone(): if __name__ == '__main__': - print getpystone() - print getcpu() + print((getpystone())) + print((getcpu())) diff --git a/sabnzbd/utils/happyeyeballs.py b/sabnzbd/utils/happyeyeballs.py index 99354e7..02b45b7 100644 --- a/sabnzbd/utils/happyeyeballs.py +++ b/sabnzbd/utils/happyeyeballs.py @@ -21,7 +21,7 @@ print happyeyeballs('newszilla.xs4all.nl', port=119) import socket import ssl -import Queue +import queue import threading import time import logging @@ -115,7 +115,7 @@ def happyeyeballs(HOST, **kwargs): except: if DEBUG: logging.debug("No IPv6 address found for %s", HOST) - myqueue = Queue.Queue() # queue used for threads giving back the results + myqueue = queue.Queue() # queue used for threads giving back the results try: # Get all IP (IPv4 and IPv6) addresses: @@ -157,22 +157,22 @@ if __name__ == '__main__': if DEBUG: logger.setLevel(logging.DEBUG) # plain HTTP/HTTPS sites: - print happyeyeballs('www.google.com') - print happyeyeballs('www.google.com', port=443, ssl=True) - print happyeyeballs('www.nu.nl') + print((happyeyeballs('www.google.com'))) + print((happyeyeballs('www.google.com', port=443, ssl=True))) + print((happyeyeballs('www.nu.nl'))) # newsservers: - print happyeyeballs('newszilla6.xs4all.nl', port=119) - print happyeyeballs('newszilla.xs4all.nl', port=119) - print happyeyeballs('block.cheapnews.eu', port=119) - print happyeyeballs('block.cheapnews.eu', port=443, ssl=True) - print happyeyeballs('sslreader.eweka.nl', port=563, ssl=True) - print happyeyeballs('news.thundernews.com', port=119) - print happyeyeballs('news.thundernews.com', port=119, preferipv6=False) - print happyeyeballs('secure.eu.thundernews.com', port=563, ssl=True) + print((happyeyeballs('newszilla6.xs4all.nl', port=119))) + print((happyeyeballs('newszilla.xs4all.nl', port=119))) + print((happyeyeballs('block.cheapnews.eu', port=119))) + print((happyeyeballs('block.cheapnews.eu', port=443, ssl=True))) + print((happyeyeballs('sslreader.eweka.nl', port=563, ssl=True))) + print((happyeyeballs('news.thundernews.com', port=119))) + print((happyeyeballs('news.thundernews.com', port=119, preferipv6=False))) + print((happyeyeballs('secure.eu.thundernews.com', port=563, ssl=True))) # Strange cases - print happyeyeballs('does.not.resolve', port=443, ssl=True) - print happyeyeballs('www.google.com', port=119) - print happyeyeballs('216.58.211.164') + print((happyeyeballs('does.not.resolve', port=443, ssl=True))) + print((happyeyeballs('www.google.com', port=119))) + print((happyeyeballs('216.58.211.164'))) diff --git a/sabnzbd/utils/json.py b/sabnzbd/utils/json.py index 15be75a..765af1d 100644 --- a/sabnzbd/utils/json.py +++ b/sabnzbd/utils/json.py @@ -45,10 +45,10 @@ class JsonWriter(object): def _write(self, obj): global LATIN_COUNTER ty = type(obj) - if ty is types.DictType: + if ty is dict: n = len(obj) self._append("{") - for k, v in obj.items(): + for k, v in list(obj.items()): self._write(k) self._append(":") self._write(v) @@ -56,7 +56,7 @@ class JsonWriter(object): if n > 0: self._append(",") self._append("}") - elif ty is types.ListType or ty is types.TupleType: + elif ty is list or ty is tuple: n = len(obj) self._append("[") for item in obj: @@ -65,9 +65,9 @@ class JsonWriter(object): if n > 0: self._append(",") self._append("]") - elif ty is types.StringType or ty is types.UnicodeType: + elif ty is bytes or ty is str: self._append('"') - if ty is types.UnicodeType: + if ty is str: obj = obj.encode('utf-8', 'replace') else: try: @@ -88,9 +88,9 @@ class JsonWriter(object): obj = obj.replace('\t', r'\t') self._append(obj) self._append('"') - elif ty is types.IntType or ty is types.LongType: + elif ty is int or ty is int: self._append(str(obj)) - elif ty is types.FloatType: + elif ty is float: self._append("%f" % obj) elif obj is True: self._append("true") @@ -99,4 +99,4 @@ class JsonWriter(object): elif obj is None: self._append("null") else: - raise WriteException, "Cannot write in JSON: %s" % repr(obj) + raise WriteException("Cannot write in JSON: %s" % repr(obj)) diff --git a/sabnzbd/utils/kronos.py b/sabnzbd/utils/kronos.py index 238bcc0..3453028 100644 --- a/sabnzbd/utils/kronos.py +++ b/sabnzbd/utils/kronos.py @@ -282,7 +282,7 @@ class Scheduler: while self.running: try: self.sched.run() - except Exception,x: + except Exception as x: logging.error("ERROR DURING SCHEDULER EXECUTION %s" % str(x), exc_info=True) # queue is empty; sleep a short while before checking again if self.running: @@ -303,7 +303,7 @@ class Task: """Execute the task action in the scheduler's thread.""" try: self.execute() - except Exception,x: + except Exception as x: self.handle_exception(x) self.reschedule(schedulerref()) @@ -469,7 +469,7 @@ try: # do the execute() call and exception handling here. try: self.execute() - except Exception,x: + except Exception as x: self.handle_exception(x) class ThreadedIntervalTask(ThreadedTaskMixin, IntervalTask): @@ -536,7 +536,7 @@ if hasattr(os, "fork"): # we are the child try: self.execute() - except Exception,x: + except Exception as x: self.handle_exception(x) os._exit(0) else: @@ -564,18 +564,18 @@ if hasattr(os, "fork"): if __name__=="__main__": def testaction(arg): - print ">>>TASK",arg,"sleeping 3 seconds" + print((">>>TASK",arg,"sleeping 3 seconds")) time.sleep(3) - print "<<>sys.stderr, msg, - print >>sys.stderr, "usage: %s [number_of_loops]" % sys.argv[0] + print(msg, end=' ', file=sys.stderr) + print("usage: %s [number_of_loops]" % sys.argv[0], file=sys.stderr) sys.exit(100) nargs = len(sys.argv) - 1 if nargs > 1: @@ -267,4 +267,4 @@ if __name__ == '__main__': error("Invalid argument %r;" % sys.argv[1]) else: loops = LOOPS - main(loops) + main(loops) \ No newline at end of file diff --git a/sabnzbd/utils/rarfile.py b/sabnzbd/utils/rarfile.py index aade465..097bc43 100644 --- a/sabnzbd/utils/rarfile.py +++ b/sabnzbd/utils/rarfile.py @@ -61,7 +61,7 @@ For more details, refer to source. """ -from __future__ import division, print_function + ## ## Imports and compat - support both Python 2.x and 3.x @@ -163,7 +163,7 @@ else: # pragma: no cover """Return hex string.""" return hexlify(data).decode('ascii') rar_crc32 = crc32 - unicode = str + str = str _byte_code = int # noqa @@ -778,7 +778,7 @@ class RarFile(object): def printdir(self): """Print archive file list to stdout.""" for f in self.infolist(): - print(f.filename) + print((f.filename)) def extract(self, member, path=None, pwd=None): """Extract single file into current directory. @@ -986,7 +986,7 @@ class CommonParser(object): self._fd = fd sig = fd.read(len(self._expect_sig)) if sig != self._expect_sig: - if isinstance(self._rarfile, (str, unicode)): + if isinstance(self._rarfile, str): raise NotRarFile("Not a Rar archive: {}".format(self._rarfile)) raise NotRarFile("Not a Rar archive") @@ -1596,7 +1596,7 @@ class RAR5Parser(CommonParser): if kdf_count > 24: raise BadRarFile('Too large kdf_count') psw = self._password - if isinstance(psw, unicode): + if isinstance(psw, str): psw = psw.encode('utf8') key = pbkdf2_sha256(psw, salt, 1 << kdf_count) self._last_aes256_key = (kdf_count, salt, key) @@ -2684,7 +2684,7 @@ def _parse_xtime(flag, data, pos, basetime=None): def is_filelike(obj): """Filename or file object? """ - if isinstance(obj, str) or isinstance(obj, unicode): + if isinstance(obj, str) or isinstance(obj, str): return False res = True for a in ('read', 'tell', 'seek'): @@ -2696,7 +2696,7 @@ def is_filelike(obj): def rar3_s2k(psw, salt): """String-to-key hash for RAR3. """ - if not isinstance(psw, unicode): + if not isinstance(psw, str): psw = psw.decode('utf8') seed = psw.encode('utf-16le') + salt iv = EMPTY diff --git a/sabnzbd/utils/rsslib.py b/sabnzbd/utils/rsslib.py index 8b07de8..2605211 100644 --- a/sabnzbd/utils/rsslib.py +++ b/sabnzbd/utils/rsslib.py @@ -208,7 +208,7 @@ class RSS: c+= "length=\"" + str(i.enclosure.length )+ "\" " c+= "type=\"" + i.enclosure.type + "\"/>\n" - for k in i.nsItems.keys(): + for k in list(i.nsItems.keys()): c += self.optionalWrite( k , i.nsItems[ k ] ) c += "\n\n" diff --git a/sabnzbd/utils/servertests.py b/sabnzbd/utils/servertests.py index b321352..d1b9603 100644 --- a/sabnzbd/utils/servertests.py +++ b/sabnzbd/utils/servertests.py @@ -90,20 +90,20 @@ def test_nntp_server(host, port, server=None, username=None, password=None, ssl= nw.recv_chunk(block=True) nw.finish_connect(nw.status_code) - except socket.timeout, e: + except socket.timeout as e: if port != 119 and not ssl: return False, T('Timed out: Try enabling SSL or connecting on a different port.') else: return False, T('Timed out') - except socket.error, e: + except socket.error as e: # Trying SSL on non-SSL port? if 'unknown protocol' in str(e).lower(): return False, T('Unknown SSL protocol: Try disabling SSL or connecting on a different port.') - return False, unicode(e) + return False, str(e) - except TypeError, e: + except TypeError as e: return False, T('Invalid server address.') except IndexError: @@ -111,7 +111,7 @@ def test_nntp_server(host, port, server=None, username=None, password=None, ssl= return False, T('Server quit during login sequence.') except: - return False, unicode(sys.exc_info()[1]) + return False, str(sys.exc_info()[1]) if not username or not password: nw.nntp.sock.sendall('ARTICLE \r\n') @@ -119,7 +119,7 @@ def test_nntp_server(host, port, server=None, username=None, password=None, ssl= nw.clear_data() nw.recv_chunk(block=True) except: - return False, unicode(sys.exc_info()[1]) + return False, str(sys.exc_info()[1]) if nw.status_code == '480': return False, T('Server requires username and password.') diff --git a/sabnzbd/utils/systrayiconthread.py b/sabnzbd/utils/systrayiconthread.py index 13041fc..249ef0f 100644 --- a/sabnzbd/utils/systrayiconthread.py +++ b/sabnzbd/utils/systrayiconthread.py @@ -123,7 +123,7 @@ class SysTrayIconThread(Thread): self._add_ids_to_menu_options(option_action), self._next_action_id)) else: - print 'Unknown item', option_text, option_icon, option_action + print(('Unknown item', option_text, option_icon, option_action)) self._next_action_id += 1 return result @@ -143,7 +143,7 @@ class SysTrayIconThread(Thread): 0, icon_flags) else: - print "Can't find icon file - using default." + print("Can't find icon file - using default.") hicon = win32gui.LoadIcon(0, win32con.IDI_APPLICATION) self.icons[path] = hicon @@ -284,4 +284,4 @@ def non_string_iterable(obj): except TypeError: return False else: - return not isinstance(obj, basestring) + return not isinstance(obj, str) diff --git a/sabnzbd/utils/upload.py b/sabnzbd/utils/upload.py index 76dfb5b..538df49 100644 --- a/sabnzbd/utils/upload.py +++ b/sabnzbd/utils/upload.py @@ -19,7 +19,7 @@ sabnzbd.utils.upload - File association functions for adding nzb files to sabnzbd """ -import urllib +import urllib.request, urllib.parse, urllib.error import logging import os from sabnzbd.encoding import unicoder @@ -35,7 +35,7 @@ def upload_file(url, fp): """ Function for uploading nzbs to a running sabnzbd instance """ try: fp = unicoder(fp).encode('utf-8') - fp = urllib.quote_plus(fp) + fp = urllib.parse.quote_plus(fp) url = '%s&mode=addlocalfile&name=%s' % (url, fp) # Add local apikey if it wasn't already in the registered URL apikey = cfg.api_key() diff --git a/scripts/Deobfuscate.py b/scripts/Deobfuscate.py index b85151f..d9b567f 100644 --- a/scripts/Deobfuscate.py +++ b/scripts/Deobfuscate.py @@ -53,12 +53,12 @@ MIN_FILE_SIZE = 40*1024*1024 # Are we being called from SABnzbd? if not os.environ.get('SAB_VERSION'): - print "This script needs to be called from SABnzbd as post-processing script." + print("This script needs to be called from SABnzbd as post-processing script.") sys.exit(1) def print_splitter(): """ Simple helper function """ - print '\n------------------------\n' + print('\n------------------------\n') # Windows or others? par2_command = os.environ['SAB_PAR2_COMMAND'] @@ -67,9 +67,9 @@ if os.environ['SAB_MULTIPAR_COMMAND']: # Diagnostic info print_splitter() -print 'SABnzbd version: ', os.environ['SAB_VERSION'] -print 'Job location: ', os.environ['SAB_COMPLETE_DIR'] -print 'Par2-command: ', par2_command +print(('SABnzbd version: ', os.environ['SAB_VERSION'])) +print(('Job location: ', os.environ['SAB_COMPLETE_DIR'])) +print(('Par2-command: ', par2_command)) print_splitter() # Search for par2 files @@ -77,12 +77,12 @@ matches = [] for root, dirnames, filenames in os.walk(os.environ['SAB_COMPLETE_DIR']): for filename in fnmatch.filter(filenames, '*.par2'): matches.append(os.path.join(root, filename)) - print 'Found file:', os.path.join(root, filename) + print(('Found file:', os.path.join(root, filename))) # Found any par2 files we can use? run_renamer = True if not matches: - print "No par2 files found to process." + print("No par2 files found to process.") # Run par2 from SABnzbd on them for par2_file in matches: @@ -92,7 +92,7 @@ for par2_file in matches: # Start command print_splitter() - print 'Starting command: ', repr(command) + print(('Starting command: ', repr(command))) try: result = subprocess.check_output(command) except subprocess.CalledProcessError as e: @@ -101,23 +101,23 @@ for par2_file in matches: # Show output print_splitter() - print result + print(result) print_splitter() # Last status-line for the History # Check if the magic words are there if 'Repaired successfully' in result or 'All files are correct' in result or \ 'Repair complete' in result or 'All Files Complete' in result or 'PAR File(s) Incomplete' in result: - print 'Recursive repair/verify finished.' + print('Recursive repair/verify finished.') run_renamer = False else: - print 'Recursive repair/verify did not complete!' + print('Recursive repair/verify did not complete!') # No matches? Then we try to rename the largest file to the job-name if run_renamer: print_splitter() - print 'Trying to see if there are large files to rename' + print('Trying to see if there are large files to rename') print_splitter() # If there are more larger files, we don't rename @@ -130,10 +130,10 @@ if run_renamer: if file_size > MIN_FILE_SIZE and os.path.splitext(filename)[1].lower() not in EXCLUDED_FILE_EXTS: # Did we already found one? if largest_file: - print 'Found:', largest_file - print 'Found:', full_path + print(('Found:', largest_file)) + print(('Found:', full_path)) print_splitter() - print 'Found multiple larger files, aborting.' + print('Found multiple larger files, aborting.') largest_file = None break largest_file = full_path @@ -143,18 +143,18 @@ if run_renamer: # We don't need to do any cleaning of dir-names # since SABnzbd already did that! new_name = '%s%s' % (os.path.join(os.environ['SAB_COMPLETE_DIR'], os.environ['SAB_FINAL_NAME']), os.path.splitext(largest_file)[1].lower()) - print 'Renaming %s to %s' % (largest_file, new_name) + print(('Renaming %s to %s' % (largest_file, new_name))) # With retries for Windows for r in range(3): try: os.rename(largest_file, new_name) - print 'Renaming done!' + print('Renaming done!') break except: time.sleep(1) else: - print 'No par2 files or large files found' + print('No par2 files or large files found') # Always exit with succes-code sys.exit(0) diff --git a/tests/conftest.py b/tests/conftest.py index e12de00..bbe64ef 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -21,7 +21,7 @@ tests.conftest - Wrappers to start SABnzbd for testing import os import itertools -import urllib2 +import urllib.request, urllib.error, urllib.parse import pytest import shutil import time diff --git a/tests/testhelper.py b/tests/testhelper.py index 9a48099..7d3bbcf 100644 --- a/tests/testhelper.py +++ b/tests/testhelper.py @@ -19,7 +19,7 @@ tests.testhelper - Basic helper functions """ -import urllib2 +import urllib.request, urllib.error, urllib.parse import json import requests diff --git a/tools/extract_pot.py b/tools/extract_pot.py index 571b3e1..f94702a 100755 --- a/tools/extract_pot.py +++ b/tools/extract_pot.py @@ -141,11 +141,11 @@ else: cmd = '%s %s %s' % (TOOL, PARMS, FILES) -print 'Create POT file' +print('Create POT file') #print cmd os.system(cmd) -print 'Post-process the POT file' +print('Post-process the POT file') src = open('%s/%s.pot.tmp' % (PO_DIR, DOMAIN), 'r') dst = open('%s/%s.pot' % (PO_DIR, DOMAIN), 'wb') dst.write(HEADER.replace('__TYPE__', 'MAIN')) @@ -169,7 +169,7 @@ dst.close() os.remove('%s/%s.pot.tmp' % (PO_DIR, DOMAIN)) -print 'Create the email POT file' +print('Create the email POT file') if not os.path.exists(POE_DIR): os.makedirs(POE_DIR) dst = open(os.path.join(POE_DIR, DOMAIN_EMAIL + '.pot'), 'wb') @@ -185,7 +185,7 @@ NSIS = 'NSIS_Installer.nsi' RE_NSIS = re.compile(r'LangString\s+\w+\s+\$\{LANG_ENGLISH\}\s+(".*)', re.I) if os.path.exists(NSIS): - print 'Creating the NSIS POT file' + print('Creating the NSIS POT file') if not os.path.exists(PON_DIR): os.makedirs(PON_DIR) src = open(NSIS, 'r') diff --git a/tools/make_mo.py b/tools/make_mo.py index cdb377c..2d7824c 100755 --- a/tools/make_mo.py +++ b/tools/make_mo.py @@ -158,13 +158,13 @@ def process_po_folder(domain, folder, extra=''): # Create the MO file mo_file = os.path.join(mo_path, mo_name) - print 'Compile %s' % mo_file + print(('Compile %s' % mo_file)) ret, output = run('%s %s -o "%s" "%s"' % (TOOL, extra, mo_file, fname)) if ret != 0: - print '\nMissing %s. Please install this package first.' % TOOL + print(('\nMissing %s. Please install this package first.' % TOOL)) exit(1) if 'WARNING:' in output: - print output + print(output) result = False return result @@ -197,10 +197,10 @@ def make_templates(): for path in glob.glob(os.path.join(MO_DIR, '*')): lng = os.path.split(path)[1] if lng != 'en' and os.path.exists(os.path.join(POE_DIR,lng+'.po')): - print 'Create email template for %s' % lng + print(('Create email template for %s' % lng)) trans = gettext.translation(DOMAIN_E, MO_DIR, [lng], fallback=False, codeset='latin-1') # The unicode flag will make _() return Unicode - trans.install(unicode=True, names=['lgettext']) + trans.install(str=True, names=['lgettext']) translate_tmpl('email', lng) translate_tmpl('rss', lng) @@ -241,14 +241,14 @@ def patch_nsis(): else: trans = gettext.translation(DOMAIN_N, MO_DIR, [lcode], fallback=False, codeset='latin-1') # The unicode flag will make _() return Unicode - trans.install(unicode=True, names=['lgettext']) + trans.install(str=True, names=['lgettext']) trans = _(text).encode('utf-8') trans = trans.replace('\r', '').replace('\n', '\\r\\n') trans = trans.replace('\\', '$\\').replace('"', '$\\"') line = '%s%s%s%s} "%s"\n' % (leader, item, rest, lng, trans) new.append(line) elif lng is None: - print 'Warning: unsupported language %s (%s), add to table in this script' % (langname, lcode) + print(('Warning: unsupported language %s (%s), add to table in this script' % (langname, lcode))) else: new.append(line) src.close() @@ -271,29 +271,29 @@ if os.path.exists(tl): result = True if len(sys.argv) > 1 and sys.argv[1] == 'all': - print 'NSIS MO file' + print('NSIS MO file') result = result and process_po_folder(DOMAIN_N, PON_DIR) - print "Patch NSIS script" + print("Patch NSIS script") patch_nsis() -print 'Email MO files' +print('Email MO files') result = result and process_po_folder(DOMAIN_E, POE_DIR) -print "Create email templates from MO files" +print("Create email templates from MO files") make_templates() -print 'Main program MO files' +print('Main program MO files') # -n option added to remove all newlines from the translations result = result and process_po_folder(DOMAIN, PO_DIR, '-n') -print "Remove temporary templates" +print("Remove temporary templates") remove_mo_files() -print +print() if result: exit(0) else: - print 'WARNINGS present!' + print('WARNINGS present!') exit(1) diff --git a/tools/msgfmt.py b/tools/msgfmt.py index 1edf67c..1183397 100755 --- a/tools/msgfmt.py +++ b/tools/msgfmt.py @@ -1,8 +1,7 @@ -#! /usr/bin/env python -# -*- coding: iso-8859-1 -*- -# Written by Martin v. L�wis +#! /usr/bin/env python3 +# Written by Martin v. Löwis -r"""Generate binary message catalog from textual translation description. +"""Generate binary message catalog from textual translation description. This program converts a textual Uniforum-style message catalog (.po file) into a binary GNU catalog (.mo file). This is essentially the same function as the @@ -16,8 +15,6 @@ Options: Specify the output file to write to. If omitted, output will go to a file named filename.mo (based off the input file name). - -n Remove all newlines (\r\n) from translations - -h --help Print this message and exit. @@ -27,63 +24,51 @@ Options: Display version information and exit. """ -import sys import os +import sys +import ast import getopt import struct import array -import re +from email.parser import HeaderParser __version__ = "1.1" MESSAGES = {} -nonewlines = False - -# Detector for HTML elements -RE_HTML = re.compile('<[^>]+>') def usage(code, msg=''): - print >> sys.stderr, __doc__ + print(__doc__, file=sys.stderr) if msg: - print >> sys.stderr, msg + print(msg, file=sys.stderr) sys.exit(code) def add(id, str, fuzzy): - """ Add a non-fuzzy translation to the dictionary. """ - global MESSAGES, nonewlines, RE_HTML + "Add a non-fuzzy translation to the dictionary." + global MESSAGES if not fuzzy and str: - if id.count('%s') == str.count('%s'): - if nonewlines and id and ('\r' in str or '\n' in str) and RE_HTML.search(str): - MESSAGES[id] = str.replace('\n', '').replace('\r', '') - else: - MESSAGES[id] = str - else: - print 'WARNING: %s mismatch, skipping!' - print ' %s' % id - print ' %s' % str + MESSAGES[id] = str def generate(): - """ Return the generated output. """ + "Return the generated output." global MESSAGES - keys = MESSAGES.keys() # the keys are sorted in the .mo file - keys.sort() + keys = sorted(MESSAGES.keys()) offsets = [] - ids = strs = '' + ids = strs = b'' for id in keys: # For each string, we need size and file offset. Each string is NUL # terminated; the NUL does not count into the size. offsets.append((len(ids), len(id), len(strs), len(MESSAGES[id]))) - ids += id + '\0' - strs += MESSAGES[id] + '\0' + ids += id + b'\0' + strs += MESSAGES[id] + b'\0' output = '' # The header is 7 32-bit unsigned integers. We don't use hash tables, so # the keys start right after the index tables. # translated string. - keystart = 7 * 4 + 16 * len(keys) + keystart = 7*4+16*len(keys) # and the values start after the keys valuestart = keystart + len(ids) koffsets = [] @@ -91,15 +76,15 @@ def generate(): # The string table first has the list of keys, then the list of values. # Each entry has first the size of the string, then the file offset. for o1, l1, o2, l2 in offsets: - koffsets += [l1, o1 + keystart] - voffsets += [l2, o2 + valuestart] + koffsets += [l1, o1+keystart] + voffsets += [l2, o2+valuestart] offsets = koffsets + voffsets output = struct.pack("Iiiiiii", - 0x950412deL, # Magic + 0x950412de, # Magic 0, # Version len(keys), # # of entries - 7 * 4, # start of key index - 7 * 4 + len(keys) * 8, # start of value index + 7*4, # start of key index + 7*4+len(keys)*8, # start of value index 0, 0) # size and offset of hash table output += array.array("i", offsets).tostring() output += ids @@ -120,17 +105,22 @@ def make(filename, outfile): outfile = os.path.splitext(infile)[0] + '.mo' try: - lines = open(infile).readlines() - except IOError, msg: - print >> sys.stderr, msg + lines = open(infile, 'rb').readlines() + except IOError as msg: + print(msg, file=sys.stderr) sys.exit(1) section = None fuzzy = 0 + # Start off assuming Latin-1, so everything decodes without failure, + # until we know the exact encoding + encoding = 'latin-1' + # Parse the catalog lno = 0 for l in lines: + l = l.decode(encoding) lno += 1 # If we get a comment line after a msgstr, this is a new entry if l[0] == '#' and section == STR: @@ -144,30 +134,58 @@ def make(filename, outfile): if l[0] == '#': continue # Now we are in a msgid section, output previous section - if l.startswith('msgid'): + if l.startswith('msgid') and not l.startswith('msgid_plural'): if section == STR: add(msgid, msgstr, fuzzy) + if not msgid: + # See whether there is an encoding declaration + p = HeaderParser() + charset = p.parsestr(msgstr.decode(encoding)).get_content_charset() + if charset: + encoding = charset section = ID l = l[5:] - msgid = msgstr = '' + msgid = msgstr = b'' + is_plural = False + # This is a message with plural forms + elif l.startswith('msgid_plural'): + if section != ID: + print('msgid_plural not preceded by msgid on %s:%d' % (infile, lno), + file=sys.stderr) + sys.exit(1) + l = l[12:] + msgid += b'\0' # separator of singular and plural + is_plural = True # Now we are in a msgstr section elif l.startswith('msgstr'): section = STR - l = l[6:] + if l.startswith('msgstr['): + if not is_plural: + print('plural without msgid_plural on %s:%d' % (infile, lno), + file=sys.stderr) + sys.exit(1) + l = l.split(']', 1)[1] + if msgstr: + msgstr += b'\0' # Separator of the various plural forms + else: + if is_plural: + print('indexed msgstr required for plural on %s:%d' % (infile, lno), + file=sys.stderr) + sys.exit(1) + l = l[6:] # Skip empty lines l = l.strip() if not l: continue - # XXX: Does this always follow Python escape semantics? - l = eval(l) + l = ast.literal_eval(l) if section == ID: - msgid += l + msgid += l.encode(encoding) elif section == STR: - msgstr += l + msgstr += l.encode(encoding) else: - print >> sys.stderr, 'Syntax error on %s:%d' % (infile, lno), \ - 'before:' - print >> sys.stderr, l + print('Syntax error on %s:%d' % (infile, lno), \ + 'before:', file=sys.stderr) + print(l, file=sys.stderr) sys.exit(1) # Add last entry if section == STR: @@ -177,17 +195,16 @@ def make(filename, outfile): output = generate() try: - open(outfile, "wb").write(output) - except IOError, msg: - print >> sys.stderr, msg + open(outfile,"wb").write(output) + except IOError as msg: + print(msg, file=sys.stderr) def main(): - global nonewlines try: - opts, args = getopt.getopt(sys.argv[1:], 'nhVo:', + opts, args = getopt.getopt(sys.argv[1:], 'hVo:', ['help', 'version', 'output-file=']) - except getopt.error, msg: + except getopt.error as msg: usage(1, msg) outfile = None @@ -196,16 +213,14 @@ def main(): if opt in ('-h', '--help'): usage(0) elif opt in ('-V', '--version'): - print >> sys.stderr, "msgfmt.py", __version__ + print("msgfmt.py", __version__) sys.exit(0) elif opt in ('-o', '--output-file'): outfile = arg - elif opt in ('-n', ): - nonewlines = True # do it if not args: - print >> sys.stderr, 'No input file given' - print >> sys.stderr, "Try `msgfmt --help' for more information." + print('No input file given', file=sys.stderr) + print("Try `msgfmt --help' for more information.", file=sys.stderr) return for filename in args: @@ -213,4 +228,4 @@ def main(): if __name__ == '__main__': - main() + main() \ No newline at end of file diff --git a/util/apireg.py b/util/apireg.py index b6176e3..bfa86cc 100644 --- a/util/apireg.py +++ b/util/apireg.py @@ -19,18 +19,18 @@ util.apireg - Registration of API connection info """ -import _winreg +import winreg def reg_info(user): """ Return the reg key for API """ if user: # Normally use the USER part of the registry - section = _winreg.HKEY_CURRENT_USER + section = winreg.HKEY_CURRENT_USER keypath = r"Software\SABnzbd" else: # A Windows Service will use the service key instead - section = _winreg.HKEY_LOCAL_MACHINE + section = winreg.HKEY_LOCAL_MACHINE keypath = r"SYSTEM\CurrentControlSet\Services\SABnzbd" return section, keypath @@ -43,18 +43,18 @@ def get_connection_info(user=True): url = None try: - hive = _winreg.ConnectRegistry(None, section) - key = _winreg.OpenKey(hive, keypath + r'\api') - for i in range(0, _winreg.QueryInfoKey(key)[1]): - name, value, val_type = _winreg.EnumValue(key, i) + hive = winreg.ConnectRegistry(None, section) + key = winreg.OpenKey(hive, keypath + r'\api') + for i in range(0, winreg.QueryInfoKey(key)[1]): + name, value, val_type = winreg.EnumValue(key, i) if name == 'url': url = value - _winreg.CloseKey(key) + winreg.CloseKey(key) except WindowsError: pass finally: - _winreg.CloseKey(hive) + winreg.CloseKey(hive) # Nothing in user's registry, try system registry if user and not url: @@ -67,60 +67,60 @@ def set_connection_info(url, user=True): """ Set API info in register """ section, keypath = reg_info(user) try: - hive = _winreg.ConnectRegistry(None, section) + hive = winreg.ConnectRegistry(None, section) try: - key = _winreg.CreateKey(hive, keypath) + key = winreg.CreateKey(hive, keypath) except: pass - key = _winreg.OpenKey(hive, keypath) - mykey = _winreg.CreateKey(key, 'api') - _winreg.SetValueEx(mykey, 'url', None, _winreg.REG_SZ, url) - _winreg.CloseKey(mykey) - _winreg.CloseKey(key) + key = winreg.OpenKey(hive, keypath) + mykey = winreg.CreateKey(key, 'api') + winreg.SetValueEx(mykey, 'url', None, winreg.REG_SZ, url) + winreg.CloseKey(mykey) + winreg.CloseKey(key) except WindowsError: if user: set_connection_info(url, user=False) pass finally: - _winreg.CloseKey(hive) + winreg.CloseKey(hive) def del_connection_info(user=True): """ Remove API info from register """ section, keypath = reg_info(user) try: - hive = _winreg.ConnectRegistry(None, section) - key = _winreg.OpenKey(hive, keypath) - _winreg.DeleteKey(key, 'api') - _winreg.CloseKey(key) + hive = winreg.ConnectRegistry(None, section) + key = winreg.OpenKey(hive, keypath) + winreg.DeleteKey(key, 'api') + winreg.CloseKey(key) except WindowsError: if user: del_connection_info(user=False) pass finally: - _winreg.CloseKey(hive) + winreg.CloseKey(hive) def get_install_lng(): """ Return language-code used by the installer """ lng = 0 try: - hive = _winreg.ConnectRegistry(None, _winreg.HKEY_LOCAL_MACHINE) - key = _winreg.OpenKey(hive, r"Software\SABnzbd") - for i in range(0, _winreg.QueryInfoKey(key)[1]): - name, value, val_type = _winreg.EnumValue(key, i) + hive = winreg.ConnectRegistry(None, winreg.HKEY_LOCAL_MACHINE) + key = winreg.OpenKey(hive, r"Software\SABnzbd") + for i in range(0, winreg.QueryInfoKey(key)[1]): + name, value, val_type = winreg.EnumValue(key, i) if name == 'Installer Language': lng = value - _winreg.CloseKey(key) + winreg.CloseKey(key) except WindowsError: pass finally: - _winreg.CloseKey(hive) + winreg.CloseKey(hive) return lng if __name__ == '__main__': - print 'URL = %s' % get_connection_info() - print 'Language = %s' % get_install_lng() + print(('URL = %s' % get_connection_info())) + print(('Language = %s' % get_install_lng())) # del_connection_info() # set_connection_info('localhost', '8080', 'blabla', user=False) diff --git a/util/mailslot.py b/util/mailslot.py index 9e3b276..fdb3d19 100644 --- a/util/mailslot.py +++ b/util/mailslot.py @@ -92,7 +92,7 @@ if __name__ == '__main__': from time import sleep if not __debug__: - print 'Run this test in non-optimized mode' + print('Run this test in non-optimized mode') exit(1) if len(sys.argv) > 1 and 'server' in sys.argv[1]: @@ -103,7 +103,7 @@ if __name__ == '__main__': while True: data = recv.receive() if data is not None: - print data + print(data) if data.startswith('stop'): break sleep(2.0) @@ -114,7 +114,7 @@ if __name__ == '__main__': send = MailSlot() ret = send.connect() assert ret, 'Failed to connect' - for n in xrange(5): + for n in range(5): ret = send.send('restart') assert ret, 'Failed to send' sleep(2.0) @@ -123,4 +123,4 @@ if __name__ == '__main__': send.disconnect() else: - print 'Usage: mailslot.py server|client' + print('Usage: mailslot.py server|client')