From 3c4b09af758d09e5e85a0839e47f46a9ed34e8cb Mon Sep 17 00:00:00 2001 From: shypike Date: Fri, 25 Sep 2015 21:35:47 +0200 Subject: [PATCH] Make more error and warning strings translatable. --- SABnzbd.py | 12 ++++++------ sabnzbd/__init__.py | 4 ++-- sabnzbd/assembler.py | 2 +- sabnzbd/downloader.py | 2 +- sabnzbd/growler.py | 10 +++++----- sabnzbd/newsunpack.py | 4 ++-- sabnzbd/nzbqueue.py | 2 +- sabnzbd/rating.py | 14 +++++++------- sabnzbd/tvsort.py | 2 +- sabnzbd/urlgrabber.py | 2 +- 10 files changed, 27 insertions(+), 27 deletions(-) diff --git a/SABnzbd.py b/SABnzbd.py index 493fbdb..efa0f7e 100755 --- a/SABnzbd.py +++ b/SABnzbd.py @@ -694,7 +694,7 @@ def get_webhost(cherryhost, cherryport, https_port): if cherryport == https_port and sabnzbd.cfg.enable_https(): sabnzbd.cfg.enable_https.set(False) # TODO: Should have a translated message, but that's not available yet - logging.error('HTTP and HTTPS ports cannot be the same') + logging.error(T('HTTP and HTTPS ports cannot be the same')) return cherryhost, cherryport, browserhost, https_port @@ -1315,7 +1315,7 @@ def main(): init_ok = sabnzbd.initialize(pause, clean_up, evalSched=True, repair=repair) if not init_ok: - logging.error('Initializing %s-%s failed, aborting', + logging.error(T('Initializing %s-%s failed, aborting'), sabnzbd.MY_NAME, sabnzbd.__version__) exit_sab(2) @@ -1519,13 +1519,13 @@ def main(): sabnzbd.halt() exit_sab(2) else: - logging.error("Failed to start web-interface: ", exc_info=True) + logging.error(T('Failed to start web-interface: '), exc_info=True) Bail_Out(browserhost, cherryport, str(error)) except socket.error, error: - logging.error("Failed to start web-interface: ", exc_info=True) + logging.error(T('Failed to start web-interface: '), exc_info=True) Bail_Out(browserhost, cherryport) except: - logging.error("Failed to start web-interface: ", exc_info=True) + logging.error(T('Failed to start web-interface: '), exc_info=True) Bail_Out(browserhost, cherryport) # Wait for server to become ready @@ -1566,7 +1566,7 @@ def main(): logging.info('Connected to the SABHelper service') mail.send('api %s' % api_url) else: - logging.error('Cannot reach the SABHelper service') + logging.error(T('Cannot reach the SABHelper service')) mail = None else: # Write URL directly to registry diff --git a/sabnzbd/__init__.py b/sabnzbd/__init__.py index 9e272b8..54e513a 100644 --- a/sabnzbd/__init__.py +++ b/sabnzbd/__init__.py @@ -423,7 +423,7 @@ def halt(): try: save_state(flag=True) except: - logging.error('Fatal error at saving state', exc_info=True) + logging.error(T('Fatal error at saving state'), exc_info=True) # The Scheduler cannot be stopped when the stop was scheduled. @@ -573,7 +573,7 @@ def save_compressed(folder, filename, data): f.flush() f.close() except: - logging.error("Saving %s failed", os.path.join(folder, filename)) + logging.error(T('Saving %s failed'), os.path.join(folder, filename)) logging.info("Traceback: ", exc_info = True) os.chdir(here) diff --git a/sabnzbd/assembler.py b/sabnzbd/assembler.py index 0d9c67c..4a86f31 100644 --- a/sabnzbd/assembler.py +++ b/sabnzbd/assembler.py @@ -102,7 +102,7 @@ class Assembler(Thread): # Pause without saving sabnzbd.downloader.Downloader.do.pause(save=False) except: - logging.error('Fatal error in Assembler', exc_info=True) + logging.error(T('Fatal error in Assembler'), exc_info=True) break nzf.remove_admin() diff --git a/sabnzbd/downloader.py b/sabnzbd/downloader.py index 69c9730..2a48eb6 100644 --- a/sabnzbd/downloader.py +++ b/sabnzbd/downloader.py @@ -750,7 +750,7 @@ class Downloader(Thread): logging.info('Looks like server closed connection: %s', err) self.__reset_nw(nw, "server broke off connection", quit=False) except: - logging.error('Suspect error in downloader') + logging.error(T('Suspect error in downloader')) logging.info("Traceback: ", exc_info = True) self.__reset_nw(nw, "server broke off connection", quit=False) diff --git a/sabnzbd/growler.py b/sabnzbd/growler.py index c03c3de..c35f4ea 100644 --- a/sabnzbd/growler.py +++ b/sabnzbd/growler.py @@ -407,7 +407,7 @@ def send_prowl(title, msg, gtype, force=False, test=None): urllib2.urlopen(url) return '' except: - logging.warning('Failed to send Prowl message') + logging.warning(T('Failed to send Prowl message')) logging.info("Traceback: ", exc_info = True) return T('Failed to send Prowl message') return '' @@ -456,10 +456,10 @@ def send_pushover(title, msg, gtype, force=False, test=None): }), { "Content-type": "application/x-www-form-urlencoded" }) res = conn.getresponse() if res.status != 200: - logging.error("Bad response from Pushover (%s): %s", res.status, res.read()) + logging.error(T('Bad response from Pushover (%s): %s'), res.status, res.read()) except: - logging.warning('Failed to send pushover message') + logging.warning(T('Failed to send pushover message')) logging.info("Traceback: ", exc_info = True) return T('Failed to send pushover message') return '' @@ -507,12 +507,12 @@ def send_pushbullet(title, msg, gtype, force=False, test=None): 'Content-type': 'application/json'}) res = conn.getresponse() if res.status != 200: - logging.error('Bad response from Pushbullet (%s): %s', res.status, res.read()) + logging.error(T('Bad response from Pushbullet (%s): %s'), res.status, res.read()) else: logging.info('Successfully sent to Pushbullet') except: - logging.warning('Failed to send pushbullet message') + logging.warning(T('Failed to send pushbullet message')) logging.info('Traceback: ', exc_info = True) return T('Failed to send pushbullet message') return '' diff --git a/sabnzbd/newsunpack.py b/sabnzbd/newsunpack.py index 6a6d662..37c01d0 100644 --- a/sabnzbd/newsunpack.py +++ b/sabnzbd/newsunpack.py @@ -201,7 +201,7 @@ def unpack_magic(nzo, workdir, workdir_complete, dele, one_folder, joinables, zi """ Do a recursive unpack from all archives in 'workdir' to 'workdir_complete' """ if depth > 5: - logging.warning('Unpack nesting too deep [%s]', nzo.final_name) + logging.warning(T('Unpack nesting too deep [%s]'), nzo.final_name) return False, [] depth += 1 @@ -1073,7 +1073,7 @@ def par2_repair(parfile_nzf, nzo, workdir, setname, single): nzo.remove_parset(parfile_nzf.setname) else: if qc_result: - logging.warning('Par verify failed on %s, while QuickCheck succeeded!', parfile) + logging.warning(T('Par verify failed on %s, while QuickCheck succeeded!'), parfile) else: logging.info('Par verify failed on %s!', parfile) diff --git a/sabnzbd/nzbqueue.py b/sabnzbd/nzbqueue.py index 415486d..ee3c5e6 100644 --- a/sabnzbd/nzbqueue.py +++ b/sabnzbd/nzbqueue.py @@ -247,7 +247,7 @@ class NzbQueue(TryList): del nzo return new_nzo except: - logging.error('Failed to restart NZB after pre-check (%s)', nzo.nzo_id) + logging.error(T('Failed to restart NZB after pre-check (%s)'), nzo.nzo_id) logging.info("Traceback: ", exc_info = True) return nzo diff --git a/sabnzbd/rating.py b/sabnzbd/rating.py index 3e930a3..b238edd 100644 --- a/sabnzbd/rating.py +++ b/sabnzbd/rating.py @@ -73,11 +73,11 @@ class NzbRatingV2(NzbRating): self.avg_spam_confirm = False self.avg_encrypted_cnt = 0 self.avg_encrypted_confirm = False - + def to_v2(self, rating): self.__dict__.update(rating.__dict__) return self - + class Rating(Thread): VERSION = 2 @@ -119,7 +119,7 @@ class Rating(Thread): self.nzo_indexer_map = {} Thread.__init__(self) if not _HAVE_SSL: - logging.warning('Ratings server requires secure connection') + logging.warning(T('Ratings server requires secure connection')) self.stop() def stop(self): @@ -164,7 +164,7 @@ class Rating(Thread): if fields['votedown']: rating.avg_vote_down = int(float(fields['votedown'])) if fields['spam']: rating.avg_spam_cnt = int(float(fields['spam'])) if fields['confirmed-spam']: rating.avg_spam_confirm = (fields['confirmed-spam'].lower() == 'yes') - if fields['passworded']: rating.avg_encrypted_cnt = int(float(fields['passworded'])) + if fields['passworded']: rating.avg_encrypted_cnt = int(float(fields['passworded'])) if fields['confirmed-passworded']: rating.avg_encrypted_confirm = (fields['confirmed-passworded'].lower() == 'yes') rating.host = host[0] if host and isinstance(host, list) else host self.ratings[indexer_id] = rating @@ -176,7 +176,7 @@ class Rating(Thread): def update_user_rating(self, nzo_id, video, audio, vote, flag, flag_detail = None): logging.debug('Updating user rating (%s: %s, %s, %s, %s)', nzo_id, video, audio, vote, flag) if nzo_id not in self.nzo_indexer_map: - logging.warning('indexer id (%s) not found for ratings file', nzo_id) + logging.warning(T('Indexer id (%s) not found for ratings file'), nzo_id) return indexer_id = self.nzo_indexer_map[nzo_id] rating = self.ratings[indexer_id] @@ -203,7 +203,7 @@ class Rating(Thread): # Update if already a vote if rating.user_vote and rating.user_vote == Rating.VOTE_UP: rating.avg_vote_up -= 1 - + rating.user_vote = int(vote) self.queue.put(indexer_id) @@ -267,7 +267,7 @@ class Rating(Thread): requests.append(self._flag_request(rating.user_flag.get('val'), rating.user_flag.get('detail'), 0)) if rating.changed & Rating.CHANGED_AUTO_FLAG: requests.append(self._flag_request(rating.auto_flag.get('val'), rating.auto_flag.get('detail'), 1)) - + try: conn = httplib.HTTPSConnection(rating_host) for request in filter(lambda r: r is not None, requests): diff --git a/sabnzbd/tvsort.py b/sabnzbd/tvsort.py index 5203f5b..5b9a75e 100644 --- a/sabnzbd/tvsort.py +++ b/sabnzbd/tvsort.py @@ -437,7 +437,7 @@ class SeriesSorter(object): logging.debug("Rename: %s to %s", filepath, newpath) renamer(filepath, newpath) except: - logging.error("Failed to rename: %s to %s", clip_path(current_path), clip_path(newpath)) + logging.error(T('Failed to rename: %s to %s'), clip_path(current_path), clip_path(newpath)) logging.info("Traceback: ", exc_info = True) rename_similar(current_path, self.ext, self.filename_set, ()) else: diff --git a/sabnzbd/urlgrabber.py b/sabnzbd/urlgrabber.py index bd3c263..81cb21c 100644 --- a/sabnzbd/urlgrabber.py +++ b/sabnzbd/urlgrabber.py @@ -237,7 +237,7 @@ class URLGrabber(Thread): logging.info('Unknown filetype when fetching NZB, retry after 30s %s', url) self.add(url, future_nzo, 30) except: - logging.error('URLGRABBER CRASHED', exc_info=True) + logging.error(T('URLGRABBER CRASHED'), exc_info=True) logging.debug("URLGRABBER Traceback: ", exc_info=True)