Browse Source

Keep retrying when nzbmatrix sends nonsense data.

tags/0.6.0
ShyPike 15 years ago
parent
commit
ae90471c80
  1. 8
      sabnzbd/urlgrabber.py

8
sabnzbd/urlgrabber.py

@ -74,6 +74,7 @@ class URLGrabber(Thread):
if not url: if not url:
continue continue
try:
# If nzo entry deleted, give up # If nzo entry deleted, give up
try: try:
deleted = future_nzo.deleted deleted = future_nzo.deleted
@ -159,6 +160,9 @@ class URLGrabber(Thread):
nzbqueue.remove_nzo(future_nzo.nzo_id, add_to_history=False, unload=True) nzbqueue.remove_nzo(future_nzo.nzo_id, add_to_history=False, unload=True)
elif res == -2: elif res == -2:
self.add(url, future_nzo) self.add(url, future_nzo)
elif matrix_id:
# Keep retrying NzbMatrix forever
self.add(url, future_nzo)
else: else:
misc.bad_fetch(future_nzo, url, retry=True, content=True) misc.bad_fetch(future_nzo, url, retry=True, content=True)
# Check if a supported archive # Check if a supported archive
@ -172,6 +176,10 @@ class URLGrabber(Thread):
except: except:
pass pass
misc.bad_fetch(future_nzo, url, retry=True, content=True) misc.bad_fetch(future_nzo, url, retry=True, content=True)
except:
logging.error('URLGRABBER CRASHED', exc_info=True)
logging.debug("URLGRABBER Traceback: ", exc_info=True)
# Don't pound the website! # Don't pound the website!
time.sleep(5.0) time.sleep(5.0)

Loading…
Cancel
Save