Browse Source

Fix thesubdb under py3.

Change autoProcessTV.py to remove bytestring identifiers that are printed under py3.
The actual media process has no issue, however, the aesthetic output caused confusion.
Fix saving nzb data to blackhole under py3.
Fix deprecated use of elementtree obj without len or explicit None test.
Change init provider options _after_ loading settings not before.
Change handle search task attributes that do not exist.
tags/release_0.21.44^2
JackDandy 5 years ago
parent
commit
6ec3938e9a
  1. 9
      CHANGES.md
  2. 61
      autoProcessTV/autoProcessTV.py
  3. 10
      lib/subliminal/services/addic7ed.py
  4. 23
      lib/subliminal/services/thesubdb.py
  5. 6
      lib/subliminal/services/tvsubtitles.py
  6. 20
      sickbeard/__init__.py
  7. 1
      sickbeard/nzbget.py
  8. 11
      sickbeard/providers/__init__.py
  9. 12
      sickbeard/providers/newznab.py
  10. 39
      sickbeard/providers/omgwtfnzbs.py
  11. 15
      sickbeard/search.py
  12. 10
      sickbeard/search_backlog.py
  13. 9
      sickbeard/tv.py
  14. 9
      sickbeard/webserve.py

9
CHANGES.md

@ -1,4 +1,11 @@
### 0.21.43 (2020-09-09 19:20:00 UTC) ### 0.21.44 (2020-09-11 10:10:00 UTC)
* Fix thesubdb subtitle service under py3
* Change autoProcessTV.py to remove bytestring identifiers that are printed under py3
* Fix saving nzb data to blackhole under py3
### 0.21.43 (2020-09-09 19:20:00 UTC)
* Add missing parameter 'failed' to sg.postprocess * Add missing parameter 'failed' to sg.postprocess
* Fix legacy command help for episode.search * Fix legacy command help for episode.search

61
autoProcessTV/autoProcessTV.py

@ -35,7 +35,7 @@ warnings.filterwarnings('ignore', module=r'.*ssl_.*', message='.*SSLContext obje
try: try:
import requests import requests
except ImportError: except ImportError:
print ('You need to install python requests library') print('You need to install python requests library')
sys.exit(1) sys.exit(1)
try: # Try importing Python 3 modules try: # Try importing Python 3 modules
@ -44,10 +44,12 @@ try: # Try importing Python 3 modules
import urllib.request as urllib2 import urllib.request as urllib2
# noinspection PyUnresolvedReferences,PyCompatibility # noinspection PyUnresolvedReferences,PyCompatibility
from urllib.parse import urlencode from urllib.parse import urlencode
except ImportError: # On error import Python 2 modules using new names except ImportError: # On error, import Python 2 modules
# noinspection PyPep8Naming # noinspection PyPep8Naming,PyUnresolvedReferences
import ConfigParser as configparser import ConfigParser as configparser
# noinspection PyUnresolvedReferences
import urllib2 import urllib2
# noinspection PyUnresolvedReferences
from urllib import urlencode from urllib import urlencode
@ -56,32 +58,25 @@ def process_files(dir_to_process, org_nzb_name=None, status=None):
# Default values # Default values
host = 'localhost' host = 'localhost'
port = '8081' port = '8081'
username = '' default_url = 'http://%s:%s/' % (host, port)
password = '' ssl = username = password = ''
ssl = 0
web_root = '/' web_root = '/'
default_url = host + ':' + port + web_root
if ssl:
default_url = 'https://' + default_url
else:
default_url = 'http://' + default_url
# Get values from config_file # Get values from config_file
config = configparser.RawConfigParser() config = configparser.RawConfigParser()
config_filename = os.path.join(os.path.dirname(sys.argv[0]), 'autoProcessTV.cfg') config_filename = os.path.join(os.path.dirname(sys.argv[0]), 'autoProcessTV.cfg')
if not os.path.isfile(config_filename): if not os.path.isfile(config_filename):
print ('ERROR: ' + config_filename + " doesn't exist") print('ERROR: %s doesn\'t exist' % config_filename)
print ('copy /rename ' + config_filename + '.sample and edit\n') print('copy /rename %s.sample and edit\n' % config_filename)
print ('Trying default url: ' + default_url + '\n') print('Trying default url: %s\n' % default_url)
else: else:
try: try:
print ('Loading config from ' + config_filename + '\n') print('Loading config from %s\n' % config_filename)
with open(config_filename, 'r') as fp: with open(config_filename, 'r') as fp:
config.readfp(fp) config.read_file(fp)
# Replace default values with config_file values # Replace default values with config_file values
host = config.get('SickBeard', 'host') host = config.get('SickBeard', 'host')
@ -90,25 +85,21 @@ def process_files(dir_to_process, org_nzb_name=None, status=None):
password = config.get('SickBeard', 'password') password = config.get('SickBeard', 'password')
try: try:
ssl = int(config.get('SickBeard', 'ssl')) ssl = int(config.get('SickBeard', 'ssl')) and 's' or ''
except (configparser.NoOptionError, ValueError): except (configparser.NoOptionError, ValueError):
pass pass
try: try:
web_root = config.get('SickBeard', 'web_root') web_root = config.get('SickBeard', 'web_root')
if not web_root.startswith('/'): web_root = ('/%s/' % web_root.strip('/')).replace('//', '/')
web_root = '/' + web_root
if not web_root.endswith('/'):
web_root = web_root + '/'
except configparser.NoOptionError: except configparser.NoOptionError:
pass pass
except EnvironmentError: except EnvironmentError:
e = sys.exc_info()[1] e = sys.exc_info()[1]
print ('Could not read configuration file: ' + str(e)) print('Could not read configuration file: ' + str(e))
# There was a config_file, don't use default values but exit # There was a config_file, don't use default values but exit
sys.exit(1) sys.exit(1)
@ -120,15 +111,11 @@ def process_files(dir_to_process, org_nzb_name=None, status=None):
if None is not status: if None is not status:
params['failed'] = status params['failed'] = status
if ssl: url = 'http%s://%s:%s%s' % (ssl, host, port, web_root)
protocol = 'https://' login_url = url + 'login'
else: url = url + 'home/process-media/files'
protocol = 'http://'
url = protocol + host + ':' + port + web_root + 'home/process-media/files'
login_url = protocol + host + ':' + port + web_root + 'login'
print ('Opening URL: ' + url) print('Opening URL: ' + url)
try: try:
sess = requests.Session() sess = requests.Session()
@ -142,17 +129,17 @@ def process_files(dir_to_process, org_nzb_name=None, status=None):
if 401 == result.status_code: if 401 == result.status_code:
print('Verify and use correct username and password in autoProcessTV.cfg') print('Verify and use correct username and password in autoProcessTV.cfg')
else: else:
for line in result.iter_lines(): for line in result.iter_lines(decode_unicode=True):
if line: if line:
print (line.strip()) print(line.strip())
except IOError: except IOError:
e = sys.exc_info()[1] e = sys.exc_info()[1]
print ('Unable to open URL: ' + str(e)) print('Unable to open URL: ' + str(e))
sys.exit(1) sys.exit(1)
if '__main__' == __name__: if '__main__' == __name__:
print ('This module is supposed to be used as import in other scripts and not run standalone.') print('This module is supposed to be used as import in other scripts and not run standalone.')
print ('Use sabToSickBeard instead.') print('Use sabToSickBeard instead.')
sys.exit(1) sys.exit(1)

10
lib/subliminal/services/addic7ed.py

@ -36,7 +36,7 @@ class Addic7ed(ServiceBase):
server_url = 'http://www.addic7ed.com' server_url = 'http://www.addic7ed.com'
site_url = 'http://www.addic7ed.com' site_url = 'http://www.addic7ed.com'
api_based = False api_based = False
#TODO: Complete this # TODO: Complete this
languages = language_set(['ar', 'ca', 'de', 'el', 'en', 'es', 'eu', 'fr', 'ga', 'gl', 'he', 'hr', 'hu', languages = language_set(['ar', 'ca', 'de', 'el', 'en', 'es', 'eu', 'fr', 'ga', 'gl', 'he', 'hr', 'hu',
'it', 'pl', 'pt', 'ro', 'ru', 'se', 'pb']) 'it', 'pl', 'pt', 'ro', 'ru', 'se', 'pb'])
language_map = {'Portuguese (Brazilian)': Language('pob'), 'Greek': Language('gre'), language_map = {'Portuguese (Brazilian)': Language('pob'), 'Greek': Language('gre'),
@ -50,7 +50,7 @@ class Addic7ed(ServiceBase):
def get_series_id(self, name): def get_series_id(self, name):
"""Get the show page and cache every show found in it""" """Get the show page and cache every show found in it"""
r = self.session.get('%s/shows.php' % self.server_url) r = self.session.get('%s/shows.php' % self.server_url)
soup = BeautifulSoup(r.content, self.required_features[0]) soup = BeautifulSoup(r.text, self.required_features[0])
for html_series in soup.select('h3 > a'): for html_series in soup.select('h3 > a'):
series_name = html_series.text.lower() series_name = html_series.text.lower()
match = re.search('show/([0-9]+)', html_series['href']) match = re.search('show/([0-9]+)', html_series['href'])
@ -73,7 +73,7 @@ class Addic7ed(ServiceBase):
logger.debug(u'Could not find series id for %s' % series) logger.debug(u'Could not find series id for %s' % series)
return [] return []
r = self.session.get('%s/show/%d&season=%d' % (self.server_url, series_id, season)) r = self.session.get('%s/show/%d&season=%d' % (self.server_url, series_id, season))
soup = BeautifulSoup(r.content, self.required_features[0]) soup = BeautifulSoup(r.text, self.required_features[0])
subtitles = [] subtitles = []
for row in soup('tr', {'class': 'epeven completed'}): for row in soup('tr', {'class': 'epeven completed'}):
cells = row('td') cells = row('td')
@ -91,7 +91,7 @@ class Addic7ed(ServiceBase):
logger.debug(u'Language %r not in wanted languages %r' % (sub_language, languages)) logger.debug(u'Language %r not in wanted languages %r' % (sub_language, languages))
continue continue
sub_keywords = split_keyword(cells[4].text.strip().lower()) sub_keywords = split_keyword(cells[4].text.strip().lower())
#TODO: Maybe allow empty keywords here? (same in Subtitulos) # TODO: Maybe allow empty keywords here? (same in Subtitulos)
if keywords and not keywords & sub_keywords: if keywords and not keywords & sub_keywords:
logger.debug(u'None of subtitle keywords %r in %r' % (sub_keywords, keywords)) logger.debug(u'None of subtitle keywords %r in %r' % (sub_keywords, keywords))
continue continue
@ -105,7 +105,7 @@ class Addic7ed(ServiceBase):
logger.info(u'Downloading %s in %s' % (subtitle.link, subtitle.path)) logger.info(u'Downloading %s in %s' % (subtitle.link, subtitle.path))
try: try:
r = self.session.get(subtitle.link, headers={'Referer': subtitle.link, 'User-Agent': self.user_agent}) r = self.session.get(subtitle.link, headers={'Referer': subtitle.link, 'User-Agent': self.user_agent})
soup = BeautifulSoup(r.content, self.required_features[0]) soup = BeautifulSoup(r.text, self.required_features[0])
if soup.title is not None and u'Addic7ed.com' in soup.title.text.strip(): if soup.title is not None and u'Addic7ed.com' in soup.title.text.strip():
raise DownloadFailedError('Download limit exceeded') raise DownloadFailedError('Download limit exceeded')
with open(subtitle.path, 'wb') as f: with open(subtitle.path, 'wb') as f:

23
lib/subliminal/services/thesubdb.py

@ -40,27 +40,30 @@ class TheSubDB(ServiceBase):
def list_checked(self, video, languages): def list_checked(self, video, languages):
return self.query(video.path, video.hashes['TheSubDB'], languages) return self.query(video.path, video.hashes['TheSubDB'], languages)
def query(self, filepath, moviehash, languages): def query(self, filepath, filehash, languages):
r = self.session.get(self.server_url, params={'action': 'search', 'hash': moviehash}) r = self.session.get(self.server_url, params={'action': 'search', 'hash': filehash})
if r.status_code == 404: if 404 == r.status_code or (200 == r.status_code and not r.text):
logger.debug(u'Could not find subtitles for hash %s' % moviehash) logger.debug(u'Could not find subtitles for hash %s' % filehash)
return [] return []
if r.status_code != 200: if 200 != r.status_code:
logger.error(u'Request %s returned status code %d' % (r.url, r.status_code)) logger.error(u'Request %s returned status code %d' % (r.url, r.status_code))
return [] return []
available_languages = language_set(r.content.split(',')) available_languages = language_set(r.text.split(','))
#this is needed becase for theSubDB pt languages is Portoguese Brazil and not Portoguese# # this is needed because for theSubDB pt languages is Portuguese Brazil and not Portuguese #
#So we are deleting pt language and adding pb language # So we are deleting pt language and adding pb language
if Language('pt') in available_languages: if Language('pt') in available_languages:
available_languages = available_languages - language_set(['pt']) | language_set(['pb']) available_languages = available_languages - language_set(['pt']) | language_set(['pb'])
languages &= available_languages languages &= available_languages
if not languages: if not languages:
logger.debug(u'Could not find subtitles for hash %s with languages %r (only %r available)' % (moviehash, languages, available_languages)) logger.debug(u'Could not find subtitles for hash %s with languages %r (only %r available)' % (
filehash, languages, available_languages))
return [] return []
subtitles = [] subtitles = []
for language in languages: for language in languages:
path = get_subtitle_path(filepath, language, self.config.multi) path = get_subtitle_path(filepath, language, self.config.multi)
subtitle = ResultSubtitle(path, language, self.__class__.__name__.lower(), '%s?action=download&hash=%s&language=%s' % (self.server_url, moviehash, language.alpha2)) subtitle = ResultSubtitle(
path, language, self.__class__.__name__.lower(),
'%s?action=download&hash=%s&language=%s' % (self.server_url, filehash, language.alpha2))
subtitles.append(subtitle) subtitles.append(subtitle)
return subtitles return subtitles

6
lib/subliminal/services/tvsubtitles.py

@ -54,7 +54,7 @@ class TvSubtitles(ServiceBase):
@cachedmethod @cachedmethod
def get_likely_series_id(self, name): def get_likely_series_id(self, name):
r = self.session.post('%s/search.php' % self.server_url, data={'q': name}) r = self.session.post('%s/search.php' % self.server_url, data={'q': name})
soup = BeautifulSoup(r.content, self.required_features[0]) soup = BeautifulSoup(r.text, self.required_features[0])
maindiv = soup.find('div', 'left') maindiv = soup.find('div', 'left')
results = [] results = []
for elem in maindiv.find_all('li'): for elem in maindiv.find_all('li'):
@ -74,7 +74,7 @@ class TvSubtitles(ServiceBase):
# download the page of the season, contains ids for all episodes # download the page of the season, contains ids for all episodes
episode_id = None episode_id = None
r = self.session.get('%s/tvshow-%d-%d.html' % (self.server_url, series_id, season)) r = self.session.get('%s/tvshow-%d-%d.html' % (self.server_url, series_id, season))
soup = BeautifulSoup(r.content, self.required_features[0]) soup = BeautifulSoup(r.text, self.required_features[0])
table = soup.find('table', id='table5') table = soup.find('table', id='table5')
for row in table.find_all('tr'): for row in table.find_all('tr'):
cells = row.find_all('td') cells = row.find_all('td')
@ -97,7 +97,7 @@ class TvSubtitles(ServiceBase):
def get_sub_ids(self, episode_id): def get_sub_ids(self, episode_id):
subids = [] subids = []
r = self.session.get('%s/episode-%d.html' % (self.server_url, episode_id)) r = self.session.get('%s/episode-%d.html' % (self.server_url, episode_id))
epsoup = BeautifulSoup(r.content, self.required_features[0]) epsoup = BeautifulSoup(r.text, self.required_features[0])
for subdiv in epsoup.find_all('a'): for subdiv in epsoup.find_all('a'):
if 'href' not in subdiv.attrs or not subdiv['href'].startswith('/subtitle'): if 'href' not in subdiv.attrs or not subdiv['href'].startswith('/subtitle'):
continue continue

20
sickbeard/__init__.py

@ -35,6 +35,7 @@ import os.path
import sys import sys
import threading import threading
import uuid import uuid
import zlib
# noinspection PyPep8Naming # noinspection PyPep8Naming
import encodingKludge as ek import encodingKludge as ek
@ -54,10 +55,11 @@ from .tv import TVidProdid
from .watchedstate import EmbyWatchedStateUpdater, PlexWatchedStateUpdater from .watchedstate import EmbyWatchedStateUpdater, PlexWatchedStateUpdater
from adba.aniDBerrors import AniDBError from adba.aniDBerrors import AniDBError
from browser_ua import get_ua
from configobj import ConfigObj from configobj import ConfigObj
from libtrakt import TraktAPI from libtrakt import TraktAPI
from _23 import b64encodestring, filter_iter, list_items, map_list from _23 import b64encodestring, decode_bytes, filter_iter, list_items, map_list
from six import iteritems, PY2, string_types from six import iteritems, PY2, string_types
import sg_helpers import sg_helpers
@ -1326,6 +1328,18 @@ def init_stage_1(console_logging):
setattr(nzb_prov, attr, check_setting_str(CFG, prov_id_uc, attr_check, default)) setattr(nzb_prov, attr, check_setting_str(CFG, prov_id_uc, attr_check, default))
elif isinstance(default, int): elif isinstance(default, int):
setattr(nzb_prov, attr, check_setting_int(CFG, prov_id_uc, attr_check, default)) setattr(nzb_prov, attr, check_setting_int(CFG, prov_id_uc, attr_check, default))
for cur_provider in filter_iter(lambda p: abs(zlib.crc32(decode_bytes(p.name))) + 40000400 in (
1449593765, 1597250020, 1524942228, 160758496
) or (p.url and abs(zlib.crc32(decode_bytes(re.sub(r'[./]', '', p.url[-10:])))) + 40000400 in (
2417143804,)), providers.sortedProviderList()):
header = {'User-Agent': get_ua()}
if hasattr(cur_provider, 'nn'):
cur_provider.nn = False
cur_provider.ui_string()
# noinspection PyProtectedMember
header = callable(getattr(cur_provider, '_init_api', False)) and False is cur_provider._init_api() \
and header or {}
cur_provider.headers.update(header)
if not os.path.isfile(CONFIG_FILE): if not os.path.isfile(CONFIG_FILE):
logger.log(u'Unable to find \'%s\', all settings will be default!' % CONFIG_FILE, logger.DEBUG) logger.log(u'Unable to find \'%s\', all settings will be default!' % CONFIG_FILE, logger.DEBUG)
@ -1452,7 +1466,7 @@ def init_stage_2():
init_search_delay = int(os.environ.get('INIT_SEARCH_DELAY', 0)) init_search_delay = int(os.environ.get('INIT_SEARCH_DELAY', 0))
# enter 4490 (was 4489) for experimental internal provider frequencies # enter 4499 (was 4489) for experimental internal provider frequencies
update_interval = datetime.timedelta(minutes=(RECENTSEARCH_FREQUENCY, 1)[4499 == RECENTSEARCH_FREQUENCY]) update_interval = datetime.timedelta(minutes=(RECENTSEARCH_FREQUENCY, 1)[4499 == RECENTSEARCH_FREQUENCY])
recentSearchScheduler = scheduler.Scheduler( recentSearchScheduler = scheduler.Scheduler(
search_recent.RecentSearcher(), search_recent.RecentSearcher(),
@ -1462,7 +1476,7 @@ def init_stage_2():
prevent_cycle_run=searchQueueScheduler.action.is_recentsearch_in_progress) prevent_cycle_run=searchQueueScheduler.action.is_recentsearch_in_progress)
if [x for x in providers.sortedProviderList() if x.is_active() and if [x for x in providers.sortedProviderList() if x.is_active() and
x.enable_backlog and x.providerType == GenericProvider.NZB]: getattr(x, 'enable_backlog', None) and GenericProvider.NZB == x.providerType]:
nextbacklogpossible = datetime.datetime.fromtimestamp( nextbacklogpossible = datetime.datetime.fromtimestamp(
search_backlog.BacklogSearcher().last_runtime) + datetime.timedelta(hours=23) search_backlog.BacklogSearcher().last_runtime) + datetime.timedelta(hours=23)
now = datetime.datetime.now() now = datetime.datetime.now()

1
sickbeard/nzbget.py

@ -14,7 +14,6 @@
# You should have received a copy of the GNU General Public License # You should have received a copy of the GNU General Public License
# along with SickGear. If not, see <http://www.gnu.org/licenses/>. # along with SickGear. If not, see <http://www.gnu.org/licenses/>.
import datetime
import re import re
import sickbeard import sickbeard

11
sickbeard/providers/__init__.py

@ -24,7 +24,7 @@ from .newznab import NewznabConstants
from .. import logger from .. import logger
import sickbeard import sickbeard
from _23 import decode_bytes, filter_list, filter_iter from _23 import filter_list, filter_iter
from six import iteritems, itervalues from six import iteritems, itervalues
__all__ = [ __all__ = [
@ -80,14 +80,7 @@ def sortedProviderList():
def makeProviderList(): def makeProviderList():
providers = [x.provider for x in [getProviderModule(y) for y in __all__] if x] return [x.provider for x in [getProviderModule(y) for y in __all__] if x]
import browser_ua
import zlib
headers = [1449593765, 1597250020, 1524942228]
for p in providers:
if abs(zlib.crc32(decode_bytes(p.name))) + 40000400 in headers:
p.headers.update({'User-Agent': browser_ua.get_ua()})
return providers
def getNewznabProviderList(data): def getNewznabProviderList(data):

12
sickbeard/providers/newznab.py

@ -248,7 +248,7 @@ class NewznabProvider(generic.NZBProvider):
data = self.get_url('%s/api?t=caps&apikey=%s' % (self.url, api_key)) data = self.get_url('%s/api?t=caps&apikey=%s' % (self.url, api_key))
if data: if data:
xml_caps = helpers.parse_xml(data) xml_caps = helpers.parse_xml(data)
if xml_caps and hasattr(xml_caps, 'tag') and 'caps' == xml_caps.tag: if None is not xml_caps and 'caps' == getattr(xml_caps, 'tag', ''):
self._caps_need_apikey = {'need': True, 'date': datetime.date.today()} self._caps_need_apikey = {'need': True, 'date': datetime.date.today()}
return xml_caps return xml_caps
@ -421,8 +421,9 @@ class NewznabProvider(generic.NZBProvider):
# type: (...) -> AnyStr # type: (...) -> AnyStr
return '%s|%s|%s|%s|%i|%s|%i|%i|%i|%i|%i' \ return '%s|%s|%s|%s|%i|%s|%i|%i|%i|%i|%i' \
% (self.name or '', self.url or '', self.maybe_apikey() or '', self.cat_ids or '', self.enabled, % (self.name or '', self.url or '', self.maybe_apikey() or '', self.cat_ids or '', self.enabled,
self.search_mode or '', self.search_fallback, self.enable_recentsearch, self.enable_backlog, self.search_mode or '', self.search_fallback, getattr(self, 'enable_recentsearch', False),
self.enable_scheduled_backlog, self.server_type) getattr(self, 'enable_backlog', False), getattr(self, 'enable_scheduled_backlog', False),
self.server_type)
def _season_strings(self, def _season_strings(self,
ep_obj # type: TVEpisode ep_obj # type: TVEpisode
@ -1116,8 +1117,9 @@ class NewznabProvider(generic.NZBProvider):
(self.name, ('disabled', 'enabled')[self.enabled in (True, 1)], (self.name, ('disabled', 'enabled')[self.enabled in (True, 1)],
NewznabConstants.server_types.get(self.server_type, 'unknown'), NewznabConstants.server_types.get(self.server_type, 'unknown'),
','.join(en[1] for en in ','.join(en[1] for en in
((self.enable_recentsearch, 'recent'), (self.enable_scheduled_backlog, 'backlog'), ((getattr(self, 'enable_recentsearch', False), 'recent'),
(self.enable_scheduled_backlog, 'scheduled')) if en[0]) or 'None') (getattr(self, 'enable_backlog', False), 'backlog'),
(getattr(self, 'enable_scheduled_backlog', False), 'scheduled')) if en[0]) or 'None')
def __repr__(self): def __repr__(self):
return self.__str__() return self.__str__()

39
sickbeard/providers/omgwtfnzbs.py

@ -20,6 +20,7 @@ from datetime import datetime
import re import re
import time import time
import traceback import traceback
from random import randint
import sickbeard import sickbeard
from . import generic from . import generic
@ -253,7 +254,7 @@ class OmgwtfnzbsProvider(generic.NZBProvider):
:return: :return:
:rtype: List :rtype: List
""" """
api_key = self._init_api() api_key = self._init_api() or search_mode in ['Propers'] and None
if False is api_key: if False is api_key:
return self.search_html(search, search_mode, needed=needed, **kwargs) return self.search_html(search, search_mode, needed=needed, **kwargs)
results = [] results = []
@ -388,13 +389,14 @@ class OmgwtfnzbsProvider(generic.NZBProvider):
return None return None
try: try:
s = re.compile(r'(?i)([\s\']+|cookee\s*:)')
api_key = self._check_auth() api_key = self._check_auth()
if not api_key.startswith('cookie:'): if not s.match(api_key):
return api_key return api_key
except (BaseException, Exception): except (BaseException, Exception):
return None return None
self.cookies = re.sub(r'(?i)([\s\']+|cookie\s*:)', '', api_key) self.cookies = s.sub('', api_key)
success, msg = self._check_cookie() success, msg = self._check_cookie()
if success and self.nn: if success and self.nn:
success, msg = None, 'pm dev in irc about this feature' success, msg = None, 'pm dev in irc about this feature'
@ -404,8 +406,7 @@ class OmgwtfnzbsProvider(generic.NZBProvider):
return None return None
return False return False
@staticmethod def ui_string(self, key=None):
def ui_string(key):
""" """
:param key: :param key:
@ -413,7 +414,18 @@ class OmgwtfnzbsProvider(generic.NZBProvider):
:return: :return:
:rtype: AnyStr :rtype: AnyStr
""" """
return 'omgwtfnzbs_api_key' == key and 'Or use... \'cookie: cookname=xx; cookpass=yy\'' or '' try:
ca = ':' == self._check_auth()[6]
except (BaseException, Exception):
ca = False
if not ca:
if not hasattr(self, 'enable_backlog'):
for cur_attr in ('enable_backlog', 'enable_scheduled_backlog'):
setattr(self, cur_attr, None)
elif hasattr(self, 'enable_backlog'):
for cur_attr in ('enable_backlog', 'enable_scheduled_backlog'):
delattr(self, cur_attr)
return 'omgwtfnzbs_api_key' == key and '' or ''
class OmgwtfnzbsCache(tvcache.TVCache): class OmgwtfnzbsCache(tvcache.TVCache):
@ -421,7 +433,20 @@ class OmgwtfnzbsCache(tvcache.TVCache):
def __init__(self, this_provider): def __init__(self, this_provider):
tvcache.TVCache.__init__(self, this_provider) tvcache.TVCache.__init__(self, this_provider)
self.update_freq = 20 # type: int @property
def update_freq(self):
try:
ca = ':' == self.provider._check_auth()[6]
except (BaseException, Exception):
ca = False
try:
return (10, 20 + randint(0, min(40, sickbeard.RECENTSEARCH_FREQUENCY * 3)))[ca]
except (BaseException, Exception):
return 20
@update_freq.setter
def update_freq(self, v):
return
def _cache_data(self, **kwargs): def _cache_data(self, **kwargs):

15
sickbeard/search.py

@ -28,6 +28,7 @@ import traceback
import encodingKludge as ek import encodingKludge as ek
import exceptions_helper import exceptions_helper
from exceptions_helper import ex from exceptions_helper import ex
from sg_helpers import write_file
import sickbeard import sickbeard
from . import clients, common, db, failed_history, helpers, history, logger, \ from . import clients, common, db, failed_history, helpers, history, logger, \
@ -78,12 +79,9 @@ def _download_result(result):
if not data: if not data:
new_result = False new_result = False
else: else:
with ek.ek(open, file_name, 'wb') as file_out: write_file(file_name, data, raise_exceptions=True)
file_out.write(data)
helpers.chmod_as_parent(file_name) except (EnvironmentError, IOError) as e:
except EnvironmentError as e:
logger.log(u'Error trying to save NZB to black hole: %s' % ex(e), logger.ERROR) logger.log(u'Error trying to save NZB to black hole: %s' % ex(e), logger.ERROR)
new_result = False new_result = False
elif 'torrent' == res_provider.providerType: elif 'torrent' == res_provider.providerType:
@ -747,9 +745,10 @@ def search_providers(
use_quality_list = (status not in ( use_quality_list = (status not in (
common.WANTED, common.FAILED, common.UNAIRED, common.SKIPPED, common.IGNORED, common.UNKNOWN)) common.WANTED, common.FAILED, common.UNAIRED, common.SKIPPED, common.IGNORED, common.UNKNOWN))
provider_list = [x for x in sickbeard.providers.sortedProviderList() if x.is_active() and x.enable_backlog and provider_list = [x for x in sickbeard.providers.sortedProviderList() if x.is_active() and
(not torrent_only or x.providerType == GenericProvider.TORRENT) and getattr(x, 'enable_backlog', None) and
(not scheduled or x.enable_scheduled_backlog)] (not torrent_only or GenericProvider.TORRENT == x.providerType) and
(not scheduled or getattr(x, 'enable_scheduled_backlog', None))]
for cur_provider in provider_list: for cur_provider in provider_list:
if cur_provider.anime_only and not show_obj.is_anime: if cur_provider.anime_only and not show_obj.is_anime:
logger.log(u'%s is not an anime, skipping' % show_obj.name, logger.DEBUG) logger.log(u'%s is not an anime, skipping' % show_obj.name, logger.DEBUG)

10
sickbeard/search_backlog.py

@ -59,7 +59,7 @@ class BacklogSearchScheduler(scheduler.Scheduler):
def next_backlog_timeleft(self): def next_backlog_timeleft(self):
now = datetime.datetime.now() now = datetime.datetime.now()
torrent_enabled = 0 < len([x for x in sickbeard.providers.sortedProviderList() if x.is_active() and torrent_enabled = 0 < len([x for x in sickbeard.providers.sortedProviderList() if x.is_active() and
x.enable_backlog and x.providerType == GenericProvider.TORRENT]) getattr(x, 'enable_backlog', None) and GenericProvider.TORRENT == x.providerType])
if now > self.action.nextBacklog or self.action.nextCyleTime != self.cycleTime: if now > self.action.nextBacklog or self.action.nextCyleTime != self.cycleTime:
nextruntime = now + self.timeLeft() nextruntime = now + self.timeLeft()
if not torrent_enabled: if not torrent_enabled:
@ -172,9 +172,10 @@ class BacklogSearcher(object):
:param scheduled: scheduled backlog search (can be from webif or scheduler) :param scheduled: scheduled backlog search (can be from webif or scheduler)
:return: any provider is active for given backlog :return: any provider is active for given backlog
""" """
return 0 < len([x for x in sickbeard.providers.sortedProviderList() if x.is_active() and x.enable_backlog and return 0 < len([x for x in sickbeard.providers.sortedProviderList() if x.is_active() and
getattr(x, 'enable_backlog', None) and
(not torrent_only or GenericProvider.TORRENT == x.providerType) and (not torrent_only or GenericProvider.TORRENT == x.providerType) and
(not scheduled or x.enable_scheduled_backlog)]) (not scheduled or getattr(x, 'enable_scheduled_backlog', None))])
def search_backlog(self, def search_backlog(self,
which_shows=None, # type: Optional[List[TVShow]] which_shows=None, # type: Optional[List[TVShow]]
@ -206,7 +207,8 @@ class BacklogSearcher(object):
if not force and standard_backlog and (datetime.datetime.now() - datetime.datetime.fromtimestamp( if not force and standard_backlog and (datetime.datetime.now() - datetime.datetime.fromtimestamp(
self._get_last_runtime())) < datetime.timedelta(hours=23): self._get_last_runtime())) < datetime.timedelta(hours=23):
any_torrent_enabled = any(map_iter( any_torrent_enabled = any(map_iter(
lambda x: x.is_active() and x.enable_backlog and x.providerType == GenericProvider.TORRENT, lambda x: x.is_active() and getattr(x, 'enable_backlog', None)
and GenericProvider.TORRENT == x.providerType,
sickbeard.providers.sortedProviderList())) sickbeard.providers.sortedProviderList()))
if not any_torrent_enabled: if not any_torrent_enabled:
logger.log('Last scheduled backlog run was within the last day, skipping this run.', logger.DEBUG) logger.log('Last scheduled backlog run was within the last day, skipping this run.', logger.DEBUG)

9
sickbeard/tv.py

@ -1987,7 +1987,7 @@ class TVEpisode(TVEpisodeBase):
def __init__(self, show_obj, season, episode, path='', show_sql=None): def __init__(self, show_obj, season, episode, path='', show_sql=None):
super(TVEpisode, self).__init__(season, episode, int(show_obj.tvid)) super(TVEpisode, self).__init__(season, episode, int(show_obj.tvid))
self._show_obj = show_obj # type: TVShow self._show_obj = show_obj # type: TVShow
self.scene_season = 0 # type: int self.scene_season = 0 # type: int
@ -2280,7 +2280,7 @@ class TVEpisode(TVEpisodeBase):
else: else:
self._file_size = 0 self._file_size = 0
# todo: change to _tvid , _epid after removing indexer, indexerid # todo: change to _tvid , _epid after removing indexer, indexerid
self.tvid = int(sql_result[0]['indexer']) self.tvid = int(sql_result[0]['indexer'])
self.epid = int(sql_result[0]['indexerid']) self.epid = int(sql_result[0]['indexerid'])
@ -2304,7 +2304,7 @@ class TVEpisode(TVEpisodeBase):
if 0 == self.scene_absolute_number: if 0 == self.scene_absolute_number:
self.scene_absolute_number = sickbeard.scene_numbering.get_scene_absolute_numbering( self.scene_absolute_number = sickbeard.scene_numbering.get_scene_absolute_numbering(
self.show_obj.tvid, self.show_obj.prodid, self.show_obj.tvid, self.show_obj.prodid,
absolute_number=self.absolute_number, absolute_number=self.absolute_number,
season=self.season, episode=episode) season=self.season, episode=episode)
if 0 == self.scene_season or 0 == self.scene_episode: if 0 == self.scene_season or 0 == self.scene_episode:
@ -2767,7 +2767,8 @@ class TVEpisode(TVEpisodeBase):
self.season, self.episode, self.season, self.episode,
self.epid, self.tvid, self.epid, self.tvid,
self.name, self.description, self.name, self.description,
','.join([sub for sub in self.subtitles]), self.subtitles_searchcount, self.subtitles_lastsearch, ','.join([isinstance(sub, string_types) and sub or sub.alpha2 for sub in self.subtitles]),
self.subtitles_searchcount, self.subtitles_lastsearch,
self.airdate.toordinal(), self.hasnfo, self.hastbn, self.status, self.location, self.file_size, self.airdate.toordinal(), self.hasnfo, self.hastbn, self.status, self.location, self.file_size,
self.release_name, self.is_proper, self.release_name, self.is_proper,
self.show_obj.prodid, self.season, self.episode, self.absolute_number, self.show_obj.prodid, self.season, self.episode, self.absolute_number,

9
sickbeard/webserve.py

@ -7013,9 +7013,10 @@ class ConfigProviders(Config):
[k for k in nzb_src.may_filter [k for k in nzb_src.may_filter
if config.checkbox_to_value(kwargs.get('%s_filter_%s' % (cur_id, k)))]) if config.checkbox_to_value(kwargs.get('%s_filter_%s' % (cur_id, k)))])
for attr in ['search_fallback', 'enable_recentsearch', 'enable_backlog', 'enable_scheduled_backlog', for attr in filter_iter(lambda a: hasattr(nzb_src, a), [
'scene_only', 'scene_loose', 'scene_loose_active', 'search_fallback', 'enable_recentsearch', 'enable_backlog', 'enable_scheduled_backlog',
'scene_rej_nuked', 'scene_nuked_active']: 'scene_only', 'scene_loose', 'scene_loose_active', 'scene_rej_nuked', 'scene_nuked_active'
]):
setattr(nzb_src, attr, config.checkbox_to_value(kwargs.get(cur_id + '_' + attr))) setattr(nzb_src, attr, config.checkbox_to_value(kwargs.get(cur_id + '_' + attr)))
for attr in ['scene_or_contain', 'search_mode']: for attr in ['scene_or_contain', 'search_mode']:
@ -7175,7 +7176,7 @@ class ConfigProviders(Config):
setattr(nzb_src, attr, config.checkbox_to_value(kwargs.get(src_id_prefix + attr)) or setattr(nzb_src, attr, config.checkbox_to_value(kwargs.get(src_id_prefix + attr)) or
not getattr(nzb_src, 'supports_backlog', True)) not getattr(nzb_src, 'supports_backlog', True))
for attr in filter_iter(lambda _x: hasattr(nzb_src, _x), for attr in filter_iter(lambda a: hasattr(nzb_src, a),
['search_fallback', 'enable_backlog', 'enable_scheduled_backlog', ['search_fallback', 'enable_backlog', 'enable_scheduled_backlog',
'scene_only', 'scene_loose', 'scene_loose_active', 'scene_only', 'scene_loose', 'scene_loose_active',
'scene_rej_nuked', 'scene_nuked_active']): 'scene_rej_nuked', 'scene_nuked_active']):

Loading…
Cancel
Save