diff --git a/main/CHANGELOG.txt b/main/CHANGELOG.txt index caae32c..8623cd5 100644 --- a/main/CHANGELOG.txt +++ b/main/CHANGELOG.txt @@ -4,6 +4,7 @@ - TV Season sort now has "affected categories" too. Check your settings!! - Fixed problems that could lead to an infinite unpacking loop (when using a "sick" NZB and using +U instead of +D postprocessing) +- Duplicate title checking in RSS is now done across all feeds - Fixed issues with accented characters in NZB names when using RSS - Fixed issues with accented characters in category folder names when using RSS - Plush: fix nzo range selection when clicking checkboxes diff --git a/main/SABnzbd.py b/main/SABnzbd.py index a6b5469..469d2c3 100755 --- a/main/SABnzbd.py +++ b/main/SABnzbd.py @@ -451,7 +451,7 @@ def get_webhost(cherryhost, cherryport, https_port): cherryhost = '0.0.0.0' info = socket.getaddrinfo(localhost, None) for item in info: - ip = item[4][0] + ip = str(item[4][0]) if ip.startswith('169.254.'): pass # Is an APIPA elif ':' in ip: diff --git a/main/sabnzbd/newsunpack.py b/main/sabnzbd/newsunpack.py index 04bd8ae..36c0da2 100644 --- a/main/sabnzbd/newsunpack.py +++ b/main/sabnzbd/newsunpack.py @@ -1016,7 +1016,7 @@ def PAR_Verify(parfile, parfile_nzf, nzo, setname, joinables, classic=False): #------------------------------------------------------------------------------- -_RE_PYTHON = re.compile(r'^(#!.*/python)\s+(.*)$') +_RE_PYTHON = re.compile(r'^#!(.*/python)\s+(.*)$') def fix_python_script(command): """ Implement a work-around for Python userscripts on OSX """ diff --git a/main/sabnzbd/rss.py b/main/sabnzbd/rss.py index 9516eb1..e91f33f 100644 --- a/main/sabnzbd/rss.py +++ b/main/sabnzbd/rss.py @@ -170,15 +170,14 @@ class RSSQueue: """ Run the query for one URI and apply filters """ self.shutdown = False - def dup_title(fd, title): - for f in self.jobs: - if f == fd: - for lk in self.jobs[fd]: - item = self.jobs[fd][lk] - if item.get('status', ' ')[0] == 'D' and \ - item.get('title', '').lower() == title.lower(): - return True - return False + def dup_title(title): + title = title.lower() + for fd in self.jobs: + for lk in self.jobs[fd]: + item = self.jobs[fd][lk] + if item.get('status', ' ')[0] == 'D' and \ + item.get('title', '').lower() == title: + return True return False @@ -277,7 +276,7 @@ class RSSQueue: newlinks.append(link) - if cfg.no_dupes() and dup_title(feed, title): + if cfg.no_dupes() and dup_title(title): logging.info("Ignoring duplicate job %s", atitle) continue @@ -421,7 +420,7 @@ class RSSQueue: lst = self.jobs[feed] for link in lst: if lst[link].get('url', '') == id: - lst[link]['time'] = 'D' + lst[link]['status'] = 'D' @synchronized(LOCK) def clear_feed(self, feed):