Browse Source

Merge revisions 3376-3379 from 0.5.x to trunk.

tags/0.6.0
shypike 15 years ago
parent
commit
4bf60b459a
  1. 1
      main/CHANGELOG.txt
  2. 2
      main/SABnzbd.py
  3. 2
      main/sabnzbd/newsunpack.py
  4. 13
      main/sabnzbd/rss.py

1
main/CHANGELOG.txt

@ -4,6 +4,7 @@
- TV Season sort now has "affected categories" too. Check your settings!! - TV Season sort now has "affected categories" too. Check your settings!!
- Fixed problems that could lead to an infinite unpacking loop - Fixed problems that could lead to an infinite unpacking loop
(when using a "sick" NZB and using +U instead of +D postprocessing) (when using a "sick" NZB and using +U instead of +D postprocessing)
- Duplicate title checking in RSS is now done across all feeds
- Fixed issues with accented characters in NZB names when using RSS - Fixed issues with accented characters in NZB names when using RSS
- Fixed issues with accented characters in category folder names when using RSS - Fixed issues with accented characters in category folder names when using RSS
- Plush: fix nzo range selection when clicking checkboxes - Plush: fix nzo range selection when clicking checkboxes

2
main/SABnzbd.py

@ -451,7 +451,7 @@ def get_webhost(cherryhost, cherryport, https_port):
cherryhost = '0.0.0.0' cherryhost = '0.0.0.0'
info = socket.getaddrinfo(localhost, None) info = socket.getaddrinfo(localhost, None)
for item in info: for item in info:
ip = item[4][0] ip = str(item[4][0])
if ip.startswith('169.254.'): if ip.startswith('169.254.'):
pass # Is an APIPA pass # Is an APIPA
elif ':' in ip: elif ':' in ip:

2
main/sabnzbd/newsunpack.py

@ -1016,7 +1016,7 @@ def PAR_Verify(parfile, parfile_nzf, nzo, setname, joinables, classic=False):
#------------------------------------------------------------------------------- #-------------------------------------------------------------------------------
_RE_PYTHON = re.compile(r'^(#!.*/python)\s+(.*)$') _RE_PYTHON = re.compile(r'^#!(.*/python)\s+(.*)$')
def fix_python_script(command): def fix_python_script(command):
""" Implement a work-around for Python userscripts on OSX """ """ Implement a work-around for Python userscripts on OSX """

13
main/sabnzbd/rss.py

@ -170,16 +170,15 @@ class RSSQueue:
""" Run the query for one URI and apply filters """ """ Run the query for one URI and apply filters """
self.shutdown = False self.shutdown = False
def dup_title(fd, title): def dup_title(title):
for f in self.jobs: title = title.lower()
if f == fd: for fd in self.jobs:
for lk in self.jobs[fd]: for lk in self.jobs[fd]:
item = self.jobs[fd][lk] item = self.jobs[fd][lk]
if item.get('status', ' ')[0] == 'D' and \ if item.get('status', ' ')[0] == 'D' and \
item.get('title', '').lower() == title.lower(): item.get('title', '').lower() == title:
return True return True
return False return False
return False
if not feed: if not feed:
@ -277,7 +276,7 @@ class RSSQueue:
newlinks.append(link) newlinks.append(link)
if cfg.no_dupes() and dup_title(feed, title): if cfg.no_dupes() and dup_title(title):
logging.info("Ignoring duplicate job %s", atitle) logging.info("Ignoring duplicate job %s", atitle)
continue continue
@ -421,7 +420,7 @@ class RSSQueue:
lst = self.jobs[feed] lst = self.jobs[feed]
for link in lst: for link in lst:
if lst[link].get('url', '') == id: if lst[link].get('url', '') == id:
lst[link]['time'] = 'D' lst[link]['status'] = 'D'
@synchronized(LOCK) @synchronized(LOCK)
def clear_feed(self, feed): def clear_feed(self, feed):

Loading…
Cancel
Save