Browse Source

Fix thesubdb under py3.

Change autoProcessTV.py to remove bytestring identifiers that are printed under py3.
The actual media process has no issue, however, the aesthetic output caused confusion.
Fix saving nzb data to blackhole under py3.
Fix deprecated use of elementtree obj without len or explicit None test.
Change init provider options _after_ loading settings not before.
Change handle search task attributes that do not exist.
tags/release_0.21.44^2
JackDandy 5 years ago
parent
commit
6ec3938e9a
  1. 9
      CHANGES.md
  2. 61
      autoProcessTV/autoProcessTV.py
  3. 10
      lib/subliminal/services/addic7ed.py
  4. 23
      lib/subliminal/services/thesubdb.py
  5. 6
      lib/subliminal/services/tvsubtitles.py
  6. 20
      sickbeard/__init__.py
  7. 1
      sickbeard/nzbget.py
  8. 11
      sickbeard/providers/__init__.py
  9. 12
      sickbeard/providers/newznab.py
  10. 39
      sickbeard/providers/omgwtfnzbs.py
  11. 15
      sickbeard/search.py
  12. 10
      sickbeard/search_backlog.py
  13. 3
      sickbeard/tv.py
  14. 9
      sickbeard/webserve.py

9
CHANGES.md

@ -1,4 +1,11 @@
### 0.21.43 (2020-09-09 19:20:00 UTC)
### 0.21.44 (2020-09-11 10:10:00 UTC)
* Fix thesubdb subtitle service under py3
* Change autoProcessTV.py to remove bytestring identifiers that are printed under py3
* Fix saving nzb data to blackhole under py3
### 0.21.43 (2020-09-09 19:20:00 UTC)
* Add missing parameter 'failed' to sg.postprocess
* Fix legacy command help for episode.search

61
autoProcessTV/autoProcessTV.py

@ -35,7 +35,7 @@ warnings.filterwarnings('ignore', module=r'.*ssl_.*', message='.*SSLContext obje
try:
import requests
except ImportError:
print ('You need to install python requests library')
print('You need to install python requests library')
sys.exit(1)
try: # Try importing Python 3 modules
@ -44,10 +44,12 @@ try: # Try importing Python 3 modules
import urllib.request as urllib2
# noinspection PyUnresolvedReferences,PyCompatibility
from urllib.parse import urlencode
except ImportError: # On error import Python 2 modules using new names
# noinspection PyPep8Naming
except ImportError: # On error, import Python 2 modules
# noinspection PyPep8Naming,PyUnresolvedReferences
import ConfigParser as configparser
# noinspection PyUnresolvedReferences
import urllib2
# noinspection PyUnresolvedReferences
from urllib import urlencode
@ -56,32 +58,25 @@ def process_files(dir_to_process, org_nzb_name=None, status=None):
# Default values
host = 'localhost'
port = '8081'
username = ''
password = ''
ssl = 0
default_url = 'http://%s:%s/' % (host, port)
ssl = username = password = ''
web_root = '/'
default_url = host + ':' + port + web_root
if ssl:
default_url = 'https://' + default_url
else:
default_url = 'http://' + default_url
# Get values from config_file
config = configparser.RawConfigParser()
config_filename = os.path.join(os.path.dirname(sys.argv[0]), 'autoProcessTV.cfg')
if not os.path.isfile(config_filename):
print ('ERROR: ' + config_filename + " doesn't exist")
print ('copy /rename ' + config_filename + '.sample and edit\n')
print ('Trying default url: ' + default_url + '\n')
print('ERROR: %s doesn\'t exist' % config_filename)
print('copy /rename %s.sample and edit\n' % config_filename)
print('Trying default url: %s\n' % default_url)
else:
try:
print ('Loading config from ' + config_filename + '\n')
print('Loading config from %s\n' % config_filename)
with open(config_filename, 'r') as fp:
config.readfp(fp)
config.read_file(fp)
# Replace default values with config_file values
host = config.get('SickBeard', 'host')
@ -90,25 +85,21 @@ def process_files(dir_to_process, org_nzb_name=None, status=None):
password = config.get('SickBeard', 'password')
try:
ssl = int(config.get('SickBeard', 'ssl'))
ssl = int(config.get('SickBeard', 'ssl')) and 's' or ''
except (configparser.NoOptionError, ValueError):
pass
try:
web_root = config.get('SickBeard', 'web_root')
if not web_root.startswith('/'):
web_root = '/' + web_root
if not web_root.endswith('/'):
web_root = web_root + '/'
web_root = ('/%s/' % web_root.strip('/')).replace('//', '/')
except configparser.NoOptionError:
pass
except EnvironmentError:
e = sys.exc_info()[1]
print ('Could not read configuration file: ' + str(e))
print('Could not read configuration file: ' + str(e))
# There was a config_file, don't use default values but exit
sys.exit(1)
@ -120,15 +111,11 @@ def process_files(dir_to_process, org_nzb_name=None, status=None):
if None is not status:
params['failed'] = status
if ssl:
protocol = 'https://'
else:
protocol = 'http://'
url = protocol + host + ':' + port + web_root + 'home/process-media/files'
login_url = protocol + host + ':' + port + web_root + 'login'
url = 'http%s://%s:%s%s' % (ssl, host, port, web_root)
login_url = url + 'login'
url = url + 'home/process-media/files'
print ('Opening URL: ' + url)
print('Opening URL: ' + url)
try:
sess = requests.Session()
@ -142,17 +129,17 @@ def process_files(dir_to_process, org_nzb_name=None, status=None):
if 401 == result.status_code:
print('Verify and use correct username and password in autoProcessTV.cfg')
else:
for line in result.iter_lines():
for line in result.iter_lines(decode_unicode=True):
if line:
print (line.strip())
print(line.strip())
except IOError:
e = sys.exc_info()[1]
print ('Unable to open URL: ' + str(e))
print('Unable to open URL: ' + str(e))
sys.exit(1)
if '__main__' == __name__:
print ('This module is supposed to be used as import in other scripts and not run standalone.')
print ('Use sabToSickBeard instead.')
print('This module is supposed to be used as import in other scripts and not run standalone.')
print('Use sabToSickBeard instead.')
sys.exit(1)

10
lib/subliminal/services/addic7ed.py

@ -36,7 +36,7 @@ class Addic7ed(ServiceBase):
server_url = 'http://www.addic7ed.com'
site_url = 'http://www.addic7ed.com'
api_based = False
#TODO: Complete this
# TODO: Complete this
languages = language_set(['ar', 'ca', 'de', 'el', 'en', 'es', 'eu', 'fr', 'ga', 'gl', 'he', 'hr', 'hu',
'it', 'pl', 'pt', 'ro', 'ru', 'se', 'pb'])
language_map = {'Portuguese (Brazilian)': Language('pob'), 'Greek': Language('gre'),
@ -50,7 +50,7 @@ class Addic7ed(ServiceBase):
def get_series_id(self, name):
"""Get the show page and cache every show found in it"""
r = self.session.get('%s/shows.php' % self.server_url)
soup = BeautifulSoup(r.content, self.required_features[0])
soup = BeautifulSoup(r.text, self.required_features[0])
for html_series in soup.select('h3 > a'):
series_name = html_series.text.lower()
match = re.search('show/([0-9]+)', html_series['href'])
@ -73,7 +73,7 @@ class Addic7ed(ServiceBase):
logger.debug(u'Could not find series id for %s' % series)
return []
r = self.session.get('%s/show/%d&season=%d' % (self.server_url, series_id, season))
soup = BeautifulSoup(r.content, self.required_features[0])
soup = BeautifulSoup(r.text, self.required_features[0])
subtitles = []
for row in soup('tr', {'class': 'epeven completed'}):
cells = row('td')
@ -91,7 +91,7 @@ class Addic7ed(ServiceBase):
logger.debug(u'Language %r not in wanted languages %r' % (sub_language, languages))
continue
sub_keywords = split_keyword(cells[4].text.strip().lower())
#TODO: Maybe allow empty keywords here? (same in Subtitulos)
# TODO: Maybe allow empty keywords here? (same in Subtitulos)
if keywords and not keywords & sub_keywords:
logger.debug(u'None of subtitle keywords %r in %r' % (sub_keywords, keywords))
continue
@ -105,7 +105,7 @@ class Addic7ed(ServiceBase):
logger.info(u'Downloading %s in %s' % (subtitle.link, subtitle.path))
try:
r = self.session.get(subtitle.link, headers={'Referer': subtitle.link, 'User-Agent': self.user_agent})
soup = BeautifulSoup(r.content, self.required_features[0])
soup = BeautifulSoup(r.text, self.required_features[0])
if soup.title is not None and u'Addic7ed.com' in soup.title.text.strip():
raise DownloadFailedError('Download limit exceeded')
with open(subtitle.path, 'wb') as f:

23
lib/subliminal/services/thesubdb.py

@ -40,27 +40,30 @@ class TheSubDB(ServiceBase):
def list_checked(self, video, languages):
return self.query(video.path, video.hashes['TheSubDB'], languages)
def query(self, filepath, moviehash, languages):
r = self.session.get(self.server_url, params={'action': 'search', 'hash': moviehash})
if r.status_code == 404:
logger.debug(u'Could not find subtitles for hash %s' % moviehash)
def query(self, filepath, filehash, languages):
r = self.session.get(self.server_url, params={'action': 'search', 'hash': filehash})
if 404 == r.status_code or (200 == r.status_code and not r.text):
logger.debug(u'Could not find subtitles for hash %s' % filehash)
return []
if r.status_code != 200:
if 200 != r.status_code:
logger.error(u'Request %s returned status code %d' % (r.url, r.status_code))
return []
available_languages = language_set(r.content.split(','))
#this is needed becase for theSubDB pt languages is Portoguese Brazil and not Portoguese#
#So we are deleting pt language and adding pb language
available_languages = language_set(r.text.split(','))
# this is needed because for theSubDB pt languages is Portuguese Brazil and not Portuguese #
# So we are deleting pt language and adding pb language
if Language('pt') in available_languages:
available_languages = available_languages - language_set(['pt']) | language_set(['pb'])
languages &= available_languages
if not languages:
logger.debug(u'Could not find subtitles for hash %s with languages %r (only %r available)' % (moviehash, languages, available_languages))
logger.debug(u'Could not find subtitles for hash %s with languages %r (only %r available)' % (
filehash, languages, available_languages))
return []
subtitles = []
for language in languages:
path = get_subtitle_path(filepath, language, self.config.multi)
subtitle = ResultSubtitle(path, language, self.__class__.__name__.lower(), '%s?action=download&hash=%s&language=%s' % (self.server_url, moviehash, language.alpha2))
subtitle = ResultSubtitle(
path, language, self.__class__.__name__.lower(),
'%s?action=download&hash=%s&language=%s' % (self.server_url, filehash, language.alpha2))
subtitles.append(subtitle)
return subtitles

6
lib/subliminal/services/tvsubtitles.py

@ -54,7 +54,7 @@ class TvSubtitles(ServiceBase):
@cachedmethod
def get_likely_series_id(self, name):
r = self.session.post('%s/search.php' % self.server_url, data={'q': name})
soup = BeautifulSoup(r.content, self.required_features[0])
soup = BeautifulSoup(r.text, self.required_features[0])
maindiv = soup.find('div', 'left')
results = []
for elem in maindiv.find_all('li'):
@ -74,7 +74,7 @@ class TvSubtitles(ServiceBase):
# download the page of the season, contains ids for all episodes
episode_id = None
r = self.session.get('%s/tvshow-%d-%d.html' % (self.server_url, series_id, season))
soup = BeautifulSoup(r.content, self.required_features[0])
soup = BeautifulSoup(r.text, self.required_features[0])
table = soup.find('table', id='table5')
for row in table.find_all('tr'):
cells = row.find_all('td')
@ -97,7 +97,7 @@ class TvSubtitles(ServiceBase):
def get_sub_ids(self, episode_id):
subids = []
r = self.session.get('%s/episode-%d.html' % (self.server_url, episode_id))
epsoup = BeautifulSoup(r.content, self.required_features[0])
epsoup = BeautifulSoup(r.text, self.required_features[0])
for subdiv in epsoup.find_all('a'):
if 'href' not in subdiv.attrs or not subdiv['href'].startswith('/subtitle'):
continue

20
sickbeard/__init__.py

@ -35,6 +35,7 @@ import os.path
import sys
import threading
import uuid
import zlib
# noinspection PyPep8Naming
import encodingKludge as ek
@ -54,10 +55,11 @@ from .tv import TVidProdid
from .watchedstate import EmbyWatchedStateUpdater, PlexWatchedStateUpdater
from adba.aniDBerrors import AniDBError
from browser_ua import get_ua
from configobj import ConfigObj
from libtrakt import TraktAPI
from _23 import b64encodestring, filter_iter, list_items, map_list
from _23 import b64encodestring, decode_bytes, filter_iter, list_items, map_list
from six import iteritems, PY2, string_types
import sg_helpers
@ -1326,6 +1328,18 @@ def init_stage_1(console_logging):
setattr(nzb_prov, attr, check_setting_str(CFG, prov_id_uc, attr_check, default))
elif isinstance(default, int):
setattr(nzb_prov, attr, check_setting_int(CFG, prov_id_uc, attr_check, default))
for cur_provider in filter_iter(lambda p: abs(zlib.crc32(decode_bytes(p.name))) + 40000400 in (
1449593765, 1597250020, 1524942228, 160758496
) or (p.url and abs(zlib.crc32(decode_bytes(re.sub(r'[./]', '', p.url[-10:])))) + 40000400 in (
2417143804,)), providers.sortedProviderList()):
header = {'User-Agent': get_ua()}
if hasattr(cur_provider, 'nn'):
cur_provider.nn = False
cur_provider.ui_string()
# noinspection PyProtectedMember
header = callable(getattr(cur_provider, '_init_api', False)) and False is cur_provider._init_api() \
and header or {}
cur_provider.headers.update(header)
if not os.path.isfile(CONFIG_FILE):
logger.log(u'Unable to find \'%s\', all settings will be default!' % CONFIG_FILE, logger.DEBUG)
@ -1452,7 +1466,7 @@ def init_stage_2():
init_search_delay = int(os.environ.get('INIT_SEARCH_DELAY', 0))
# enter 4490 (was 4489) for experimental internal provider frequencies
# enter 4499 (was 4489) for experimental internal provider frequencies
update_interval = datetime.timedelta(minutes=(RECENTSEARCH_FREQUENCY, 1)[4499 == RECENTSEARCH_FREQUENCY])
recentSearchScheduler = scheduler.Scheduler(
search_recent.RecentSearcher(),
@ -1462,7 +1476,7 @@ def init_stage_2():
prevent_cycle_run=searchQueueScheduler.action.is_recentsearch_in_progress)
if [x for x in providers.sortedProviderList() if x.is_active() and
x.enable_backlog and x.providerType == GenericProvider.NZB]:
getattr(x, 'enable_backlog', None) and GenericProvider.NZB == x.providerType]:
nextbacklogpossible = datetime.datetime.fromtimestamp(
search_backlog.BacklogSearcher().last_runtime) + datetime.timedelta(hours=23)
now = datetime.datetime.now()

1
sickbeard/nzbget.py

@ -14,7 +14,6 @@
# You should have received a copy of the GNU General Public License
# along with SickGear. If not, see <http://www.gnu.org/licenses/>.
import datetime
import re
import sickbeard

11
sickbeard/providers/__init__.py

@ -24,7 +24,7 @@ from .newznab import NewznabConstants
from .. import logger
import sickbeard
from _23 import decode_bytes, filter_list, filter_iter
from _23 import filter_list, filter_iter
from six import iteritems, itervalues
__all__ = [
@ -80,14 +80,7 @@ def sortedProviderList():
def makeProviderList():
providers = [x.provider for x in [getProviderModule(y) for y in __all__] if x]
import browser_ua
import zlib
headers = [1449593765, 1597250020, 1524942228]
for p in providers:
if abs(zlib.crc32(decode_bytes(p.name))) + 40000400 in headers:
p.headers.update({'User-Agent': browser_ua.get_ua()})
return providers
return [x.provider for x in [getProviderModule(y) for y in __all__] if x]
def getNewznabProviderList(data):

12
sickbeard/providers/newznab.py

@ -248,7 +248,7 @@ class NewznabProvider(generic.NZBProvider):
data = self.get_url('%s/api?t=caps&apikey=%s' % (self.url, api_key))
if data:
xml_caps = helpers.parse_xml(data)
if xml_caps and hasattr(xml_caps, 'tag') and 'caps' == xml_caps.tag:
if None is not xml_caps and 'caps' == getattr(xml_caps, 'tag', ''):
self._caps_need_apikey = {'need': True, 'date': datetime.date.today()}
return xml_caps
@ -421,8 +421,9 @@ class NewznabProvider(generic.NZBProvider):
# type: (...) -> AnyStr
return '%s|%s|%s|%s|%i|%s|%i|%i|%i|%i|%i' \
% (self.name or '', self.url or '', self.maybe_apikey() or '', self.cat_ids or '', self.enabled,
self.search_mode or '', self.search_fallback, self.enable_recentsearch, self.enable_backlog,
self.enable_scheduled_backlog, self.server_type)
self.search_mode or '', self.search_fallback, getattr(self, 'enable_recentsearch', False),
getattr(self, 'enable_backlog', False), getattr(self, 'enable_scheduled_backlog', False),
self.server_type)
def _season_strings(self,
ep_obj # type: TVEpisode
@ -1116,8 +1117,9 @@ class NewznabProvider(generic.NZBProvider):
(self.name, ('disabled', 'enabled')[self.enabled in (True, 1)],
NewznabConstants.server_types.get(self.server_type, 'unknown'),
','.join(en[1] for en in
((self.enable_recentsearch, 'recent'), (self.enable_scheduled_backlog, 'backlog'),
(self.enable_scheduled_backlog, 'scheduled')) if en[0]) or 'None')
((getattr(self, 'enable_recentsearch', False), 'recent'),
(getattr(self, 'enable_backlog', False), 'backlog'),
(getattr(self, 'enable_scheduled_backlog', False), 'scheduled')) if en[0]) or 'None')
def __repr__(self):
return self.__str__()

39
sickbeard/providers/omgwtfnzbs.py

@ -20,6 +20,7 @@ from datetime import datetime
import re
import time
import traceback
from random import randint
import sickbeard
from . import generic
@ -253,7 +254,7 @@ class OmgwtfnzbsProvider(generic.NZBProvider):
:return:
:rtype: List
"""
api_key = self._init_api()
api_key = self._init_api() or search_mode in ['Propers'] and None
if False is api_key:
return self.search_html(search, search_mode, needed=needed, **kwargs)
results = []
@ -388,13 +389,14 @@ class OmgwtfnzbsProvider(generic.NZBProvider):
return None
try:
s = re.compile(r'(?i)([\s\']+|cookee\s*:)')
api_key = self._check_auth()
if not api_key.startswith('cookie:'):
if not s.match(api_key):
return api_key
except (BaseException, Exception):
return None
self.cookies = re.sub(r'(?i)([\s\']+|cookie\s*:)', '', api_key)
self.cookies = s.sub('', api_key)
success, msg = self._check_cookie()
if success and self.nn:
success, msg = None, 'pm dev in irc about this feature'
@ -404,8 +406,7 @@ class OmgwtfnzbsProvider(generic.NZBProvider):
return None
return False
@staticmethod
def ui_string(key):
def ui_string(self, key=None):
"""
:param key:
@ -413,7 +414,18 @@ class OmgwtfnzbsProvider(generic.NZBProvider):
:return:
:rtype: AnyStr
"""
return 'omgwtfnzbs_api_key' == key and 'Or use... \'cookie: cookname=xx; cookpass=yy\'' or ''
try:
ca = ':' == self._check_auth()[6]
except (BaseException, Exception):
ca = False
if not ca:
if not hasattr(self, 'enable_backlog'):
for cur_attr in ('enable_backlog', 'enable_scheduled_backlog'):
setattr(self, cur_attr, None)
elif hasattr(self, 'enable_backlog'):
for cur_attr in ('enable_backlog', 'enable_scheduled_backlog'):
delattr(self, cur_attr)
return 'omgwtfnzbs_api_key' == key and '' or ''
class OmgwtfnzbsCache(tvcache.TVCache):
@ -421,7 +433,20 @@ class OmgwtfnzbsCache(tvcache.TVCache):
def __init__(self, this_provider):
tvcache.TVCache.__init__(self, this_provider)
self.update_freq = 20 # type: int
@property
def update_freq(self):
try:
ca = ':' == self.provider._check_auth()[6]
except (BaseException, Exception):
ca = False
try:
return (10, 20 + randint(0, min(40, sickbeard.RECENTSEARCH_FREQUENCY * 3)))[ca]
except (BaseException, Exception):
return 20
@update_freq.setter
def update_freq(self, v):
return
def _cache_data(self, **kwargs):

15
sickbeard/search.py

@ -28,6 +28,7 @@ import traceback
import encodingKludge as ek
import exceptions_helper
from exceptions_helper import ex
from sg_helpers import write_file
import sickbeard
from . import clients, common, db, failed_history, helpers, history, logger, \
@ -78,12 +79,9 @@ def _download_result(result):
if not data:
new_result = False
else:
with ek.ek(open, file_name, 'wb') as file_out:
file_out.write(data)
write_file(file_name, data, raise_exceptions=True)
helpers.chmod_as_parent(file_name)
except EnvironmentError as e:
except (EnvironmentError, IOError) as e:
logger.log(u'Error trying to save NZB to black hole: %s' % ex(e), logger.ERROR)
new_result = False
elif 'torrent' == res_provider.providerType:
@ -747,9 +745,10 @@ def search_providers(
use_quality_list = (status not in (
common.WANTED, common.FAILED, common.UNAIRED, common.SKIPPED, common.IGNORED, common.UNKNOWN))
provider_list = [x for x in sickbeard.providers.sortedProviderList() if x.is_active() and x.enable_backlog and
(not torrent_only or x.providerType == GenericProvider.TORRENT) and
(not scheduled or x.enable_scheduled_backlog)]
provider_list = [x for x in sickbeard.providers.sortedProviderList() if x.is_active() and
getattr(x, 'enable_backlog', None) and
(not torrent_only or GenericProvider.TORRENT == x.providerType) and
(not scheduled or getattr(x, 'enable_scheduled_backlog', None))]
for cur_provider in provider_list:
if cur_provider.anime_only and not show_obj.is_anime:
logger.log(u'%s is not an anime, skipping' % show_obj.name, logger.DEBUG)

10
sickbeard/search_backlog.py

@ -59,7 +59,7 @@ class BacklogSearchScheduler(scheduler.Scheduler):
def next_backlog_timeleft(self):
now = datetime.datetime.now()
torrent_enabled = 0 < len([x for x in sickbeard.providers.sortedProviderList() if x.is_active() and
x.enable_backlog and x.providerType == GenericProvider.TORRENT])
getattr(x, 'enable_backlog', None) and GenericProvider.TORRENT == x.providerType])
if now > self.action.nextBacklog or self.action.nextCyleTime != self.cycleTime:
nextruntime = now + self.timeLeft()
if not torrent_enabled:
@ -172,9 +172,10 @@ class BacklogSearcher(object):
:param scheduled: scheduled backlog search (can be from webif or scheduler)
:return: any provider is active for given backlog
"""
return 0 < len([x for x in sickbeard.providers.sortedProviderList() if x.is_active() and x.enable_backlog and
return 0 < len([x for x in sickbeard.providers.sortedProviderList() if x.is_active() and
getattr(x, 'enable_backlog', None) and
(not torrent_only or GenericProvider.TORRENT == x.providerType) and
(not scheduled or x.enable_scheduled_backlog)])
(not scheduled or getattr(x, 'enable_scheduled_backlog', None))])
def search_backlog(self,
which_shows=None, # type: Optional[List[TVShow]]
@ -206,7 +207,8 @@ class BacklogSearcher(object):
if not force and standard_backlog and (datetime.datetime.now() - datetime.datetime.fromtimestamp(
self._get_last_runtime())) < datetime.timedelta(hours=23):
any_torrent_enabled = any(map_iter(
lambda x: x.is_active() and x.enable_backlog and x.providerType == GenericProvider.TORRENT,
lambda x: x.is_active() and getattr(x, 'enable_backlog', None)
and GenericProvider.TORRENT == x.providerType,
sickbeard.providers.sortedProviderList()))
if not any_torrent_enabled:
logger.log('Last scheduled backlog run was within the last day, skipping this run.', logger.DEBUG)

3
sickbeard/tv.py

@ -2767,7 +2767,8 @@ class TVEpisode(TVEpisodeBase):
self.season, self.episode,
self.epid, self.tvid,
self.name, self.description,
','.join([sub for sub in self.subtitles]), self.subtitles_searchcount, self.subtitles_lastsearch,
','.join([isinstance(sub, string_types) and sub or sub.alpha2 for sub in self.subtitles]),
self.subtitles_searchcount, self.subtitles_lastsearch,
self.airdate.toordinal(), self.hasnfo, self.hastbn, self.status, self.location, self.file_size,
self.release_name, self.is_proper,
self.show_obj.prodid, self.season, self.episode, self.absolute_number,

9
sickbeard/webserve.py

@ -7013,9 +7013,10 @@ class ConfigProviders(Config):
[k for k in nzb_src.may_filter
if config.checkbox_to_value(kwargs.get('%s_filter_%s' % (cur_id, k)))])
for attr in ['search_fallback', 'enable_recentsearch', 'enable_backlog', 'enable_scheduled_backlog',
'scene_only', 'scene_loose', 'scene_loose_active',
'scene_rej_nuked', 'scene_nuked_active']:
for attr in filter_iter(lambda a: hasattr(nzb_src, a), [
'search_fallback', 'enable_recentsearch', 'enable_backlog', 'enable_scheduled_backlog',
'scene_only', 'scene_loose', 'scene_loose_active', 'scene_rej_nuked', 'scene_nuked_active'
]):
setattr(nzb_src, attr, config.checkbox_to_value(kwargs.get(cur_id + '_' + attr)))
for attr in ['scene_or_contain', 'search_mode']:
@ -7175,7 +7176,7 @@ class ConfigProviders(Config):
setattr(nzb_src, attr, config.checkbox_to_value(kwargs.get(src_id_prefix + attr)) or
not getattr(nzb_src, 'supports_backlog', True))
for attr in filter_iter(lambda _x: hasattr(nzb_src, _x),
for attr in filter_iter(lambda a: hasattr(nzb_src, a),
['search_fallback', 'enable_backlog', 'enable_scheduled_backlog',
'scene_only', 'scene_loose', 'scene_loose_active',
'scene_rej_nuked', 'scene_nuked_active']):

Loading…
Cancel
Save