Browse Source

Fix TL provider - replace user/pass with digest auth method.

Change improve TL and IPT provider recent search performance to process new items since the previous cycle.
Change log a tip for TL and IPT users who have not improved on the default site setting "Torrents per page".
Add recommended.txt file with recommended libs that can be installed via: python -m pip install -r recommended.txt
Fix saving .nfo metadata where the file name contains unicode on certain Linux OS configurations.
tags/release_0.20.13^2
JackDandy 5 years ago
parent
commit
7738a3143e
  1. 11
      CHANGES.md
  2. 4
      recommended.txt
  3. 8
      sickbeard/helpers.py
  4. 16
      sickbeard/providers/generic.py
  5. 57
      sickbeard/providers/iptorrents.py
  6. 11
      sickbeard/providers/torrentday.py
  7. 77
      sickbeard/providers/torrentleech.py

11
CHANGES.md

@ -1,4 +1,13 @@
### 0.20.12 (2019-12-09 16:30:00 UTC) ### 0.20.13 (2019-12-16 04:00:00 UTC)
* Fix TL provider - replace user/pass with digest auth method
* Change improve TL and IPT provider recent search performance to process new items since the previous cycle
* Change log a tip for TL and IPT users who have not improved on the default site setting "Torrents per page"
* Add recommended.txt file with recommended libs that can be installed via: python -m pip install -r recommended.txt
* Fix saving .nfo metadata where the file name contains unicode on certain Linux OS configurations
### 0.20.12 (2019-12-09 16:30:00 UTC)
* Fix using multiple hostnames with config General/Interface/"Allowed browser hostnames" * Fix using multiple hostnames with config General/Interface/"Allowed browser hostnames"
* Add config General/Interface/"Allow IP use for connections" * Add config General/Interface/"Allow IP use for connections"

4
recommended.txt

@ -0,0 +1,4 @@
lxml>=4.4.2
regex>=2019.11.1
python-Levenshtein>=0.12.0
scandir>=1.10.0; python_version < '3.0'

8
sickbeard/helpers.py

@ -1754,7 +1754,7 @@ def write_file(filepath, data, raw=False, xmltree=False, utf8=False, raise_excep
if make_dirs(ek.ek(os.path.dirname, filepath), False): if make_dirs(ek.ek(os.path.dirname, filepath), False):
try: try:
if raw: if raw:
with io.FileIO(filepath, 'wb') as fh: with ek.ek(io.FileIO, filepath, 'wb') as fh:
for chunk in data.iter_content(chunk_size=1024): for chunk in data.iter_content(chunk_size=1024):
if chunk: if chunk:
fh.write(chunk) fh.write(chunk)
@ -1764,17 +1764,17 @@ def write_file(filepath, data, raw=False, xmltree=False, utf8=False, raise_excep
w_mode = 'w' w_mode = 'w'
if utf8: if utf8:
w_mode = 'a' w_mode = 'a'
with io.FileIO(filepath, 'wb') as fh: with ek.ek(io.FileIO, filepath, 'wb') as fh:
fh.write(codecs.BOM_UTF8) fh.write(codecs.BOM_UTF8)
if xmltree: if xmltree:
with io.FileIO(filepath, w_mode) as fh: with ek.ek(io.FileIO, filepath, w_mode) as fh:
if utf8: if utf8:
data.write(fh, encoding='utf-8') data.write(fh, encoding='utf-8')
else: else:
data.write(fh) data.write(fh)
else: else:
with io.FileIO(filepath, w_mode) as fh: with ek.ek(io.FileIO, filepath, w_mode) as fh:
fh.write(data) fh.write(data)
chmodAsParent(filepath) chmodAsParent(filepath)

16
sickbeard/providers/generic.py

@ -29,7 +29,7 @@ import time
import urlparse import urlparse
import threading import threading
import socket import socket
from urllib import quote_plus from urllib import quote, quote_plus
import zlib import zlib
from base64 import b16encode, b32decode, b64decode from base64 import b16encode, b32decode, b64decode
@ -805,7 +805,14 @@ class GenericProvider(object):
def _link(self, url, url_tmpl=None): def _link(self, url, url_tmpl=None):
url = url and str(url).strip().replace('&amp;', '&') or '' if url:
try:
url = url.encode('utf-8')
except (BaseException, Exception):
pass
url = quote(url).strip().replace('&amp;', '&')
if not url:
url = ''
return url if re.match('(?i)(https?://|magnet:)', url) \ return url if re.match('(?i)(https?://|magnet:)', url) \
else (url_tmpl or self.urls.get('get', (getattr(self, 'url', '') or else (url_tmpl or self.urls.get('get', (getattr(self, 'url', '') or
getattr(self, 'url_base')) + '%s')) % url.lstrip('/') getattr(self, 'url_base')) + '%s')) % url.lstrip('/')
@ -1124,7 +1131,7 @@ class GenericProvider(object):
""" """
return '' return ''
def _log_search(self, mode='Cache', count=0, url='url missing'): def _log_search(self, mode='Cache', count=0, url='url missing', log_setting_hint=False):
""" """
Simple function to log the result of a search types except propers Simple function to log the result of a search types except propers
:param count: count of successfully processed items :param count: count of successfully processed items
@ -1133,6 +1140,9 @@ class GenericProvider(object):
if 'Propers' != mode: if 'Propers' != mode:
self.log_result(mode, count, url) self.log_result(mode, count, url)
if log_setting_hint:
logger.log('Perfomance tip: change "Torrents per Page" to 100 at the site/Settings page')
def log_result(self, mode='Cache', count=0, url='url missing'): def log_result(self, mode='Cache', count=0, url='url missing'):
""" """
Simple function to log the result of any search Simple function to log the result of any search

57
sickbeard/providers/iptorrents.py

@ -39,7 +39,7 @@ class IPTorrentsProvider(generic.TorrentProvider):
'RqHEa', 'LvEoDc0', 'Zvex2', 'LuF2', 'NXdu Vn', 'XZwQxeWY1', 'Yu42bzJ', 'tgG92']], 'RqHEa', 'LvEoDc0', 'Zvex2', 'LuF2', 'NXdu Vn', 'XZwQxeWY1', 'Yu42bzJ', 'tgG92']],
]]]) ]]])
self.url_vars = {'login': 't', 'search': 't?%s;q=%s;qf=ti%s%s#torrents'} self.url_vars = {'login': 't', 'search': 't?%s;q=%s;qf=ti%s%s;p=%s#torrents'}
self.url_tmpl = {'config_provider_home_uri': '%(home)s', 'login': '%(home)s%(vars)s', self.url_tmpl = {'config_provider_home_uri': '%(home)s', 'login': '%(home)s%(vars)s',
'search': '%(home)s%(vars)s'} 'search': '%(home)s%(vars)s'}
@ -54,7 +54,7 @@ class IPTorrentsProvider(generic.TorrentProvider):
return super(IPTorrentsProvider, self)._authorised( return super(IPTorrentsProvider, self)._authorised(
logged_in=(lambda y='': all( logged_in=(lambda y='': all(
['IPTorrents' in y, 'type="password"' not in y[0:2048], self.has_all_cookies()] + ['IPTorrents' in y, 'type="password"' not in y[0:2048], self.has_all_cookies()] +
[(self.session.cookies.get(x) or 'sg!no!pw') in self.digest for x in 'uid', 'pass'])), [(self.session.cookies.get(x, domain='') or 'sg!no!pw') in self.digest for x in ('uid', 'pass')])),
failed_msg=(lambda y=None: u'Invalid cookie details for %s. Check settings')) failed_msg=(lambda y=None: u'Invalid cookie details for %s. Check settings'))
@staticmethod @staticmethod
@ -67,22 +67,43 @@ class IPTorrentsProvider(generic.TorrentProvider):
if not self._authorised(): if not self._authorised():
return results return results
items = {'Cache': [], 'Season': [], 'Episode': [], 'Propers': []} last_recent_search = self.last_recent_search
last_recent_search = '' if not last_recent_search else last_recent_search.replace('id-', '')
rc = dict((k, re.compile('(?i)' + v)) for (k, v) in {'info': 'detail', 'get': 'download'}.items())
for mode in search_params.keys(): for mode in search_params.keys():
urls = []
for search_string in search_params[mode]: for search_string in search_params[mode]:
urls += [[]]
search_string = isinstance(search_string, unicode) and unidecode(search_string) or search_string search_string = isinstance(search_string, unicode) and unidecode(search_string) or search_string
# URL with 50 tv-show results, or max 150 if adjusted in IPTorrents profile for page in range((3, 5)['Cache' == mode])[1:]:
search_url = self.urls['search'] % ( # URL with 50 tv-show results, or max 150 if adjusted in IPTorrents profile
self._categories_string(mode, '%s', ';'), search_string, urls[-1] += [self.urls['search'] % (
(';free', '')[not self.freeleech], (';o=seeders', '')['Cache' == mode]) self._categories_string(mode, '%s', ';'), search_string,
(';free', '')[not self.freeleech], (';o=seeders', '')['Cache' == mode], page)]
results += self._search_urls(mode, last_recent_search, urls)
last_recent_search = ''
return results
def _search_urls(self, mode, last_recent_search, urls):
results = []
items = {'Cache': [], 'Season': [], 'Episode': [], 'Propers': []}
rc = dict((k, re.compile('(?i)' + v)) for (k, v) in dict(
info='detail', get='download', id=r'download.*?/([\d]+)').items())
lrs_found = False
lrs_new = True
for search_urls in urls: # this intentionally iterates once to preserve indentation
for search_url in search_urls:
html = self.get_url(search_url) html = self.get_url(search_url)
if self.should_skip(): if self.should_skip():
return results return results
cnt = len(items[mode]) cnt = len(items[mode])
cnt_search = 0
log_settings_hint = False
try: try:
if not html or self._has_no_results(html): if not html or self._has_no_results(html):
raise generic.HaltParseException raise generic.HaltParseException
@ -94,23 +115,33 @@ class IPTorrentsProvider(generic.TorrentProvider):
if 2 > len(tbl_rows): if 2 > len(tbl_rows):
raise generic.HaltParseException raise generic.HaltParseException
if 'Cache' == mode and 100 > len(tbl_rows):
log_settings_hint = True
head = None head = None
for tr in tbl_rows[1:]: for tr in tbl_rows[1:]:
cells = tr.find_all('td') cells = tr.find_all('td')
if 5 > len(cells): if 5 > len(cells):
continue continue
cnt_search += 1
try: try:
head = head if None is not head else self._header_row( head = head if None is not head else self._header_row(
tr, header_strip='(?i)(?:leechers|seeders|size);') tr, header_strip='(?i)(?:leechers|seeders|size);')
seeders, leechers = [tryInt(tr.find('td', class_='t_' + x).get_text().strip()) seeders, leechers = [tryInt(tr.find('td', class_='t_' + x).get_text().strip())
for x in 'seeders', 'leechers'] for x in ('seeders', 'leechers')]
if self._reject_item(seeders, leechers): if self._reject_item(seeders, leechers):
continue continue
dl = tr.find('a', href=rc['get'])['href']
dl_id = rc['id'].findall(dl)[0]
lrs_found = dl_id == last_recent_search
if lrs_found:
break
info = tr.find('a', href=rc['info']) info = tr.find('a', href=rc['info'])
title = (info.attrs.get('title') or info.get_text()).strip() title = (info.attrs.get('title') or info.get_text()).strip()
size = cells[head['size']].get_text().strip() size = cells[head['size']].get_text().strip()
download_url = self._link(tr.find('a', href=rc['get'])['href']) download_url = self._link(dl)
except (AttributeError, TypeError, ValueError): except (AttributeError, TypeError, ValueError):
continue continue
@ -121,7 +152,11 @@ class IPTorrentsProvider(generic.TorrentProvider):
pass pass
except (BaseException, Exception): except (BaseException, Exception):
logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR) logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR)
self._log_search(mode, len(items[mode]) - cnt, search_url) self._log_search(mode, len(items[mode]) - cnt, search_url, log_settings_hint)
if self.is_search_finished(mode, items, cnt_search, rc['id'], last_recent_search, lrs_new, lrs_found):
break
lrs_new = False
results = self._sort_seeding(mode, results + items[mode]) results = self._sort_seeding(mode, results + items[mode])

11
sickbeard/providers/torrentday.py

@ -22,7 +22,6 @@ import time
from . import generic from . import generic
from sickbeard.bs4_parser import BS4Parser from sickbeard.bs4_parser import BS4Parser
from sickbeard.helpers import tryInt, anon_url from sickbeard.helpers import tryInt, anon_url
from sickbeard import logger
class TorrentDayProvider(generic.TorrentProvider): class TorrentDayProvider(generic.TorrentProvider):
@ -54,7 +53,7 @@ class TorrentDayProvider(generic.TorrentProvider):
return super(TorrentDayProvider, self)._authorised( return super(TorrentDayProvider, self)._authorised(
logged_in=(lambda y='': all( logged_in=(lambda y='': all(
['RSS URL' in y, self.has_all_cookies()] + ['RSS URL' in y, self.has_all_cookies()] +
[(self.session.cookies.get(x) or 'sg!no!pw') in self.digest for x in 'uid', 'pass'])), [(self.session.cookies.get(x, domain='') or 'sg!no!pw') in self.digest for x in ('uid', 'pass')])),
failed_msg=(lambda y=None: u'Invalid cookie details for %s. Check settings')) failed_msg=(lambda y=None: u'Invalid cookie details for %s. Check settings'))
@staticmethod @staticmethod
@ -88,7 +87,7 @@ class TorrentDayProvider(generic.TorrentProvider):
items = {'Cache': [], 'Season': [], 'Episode': [], 'Propers': []} items = {'Cache': [], 'Season': [], 'Episode': [], 'Propers': []}
rc = dict((k, re.compile('(?i)' + v)) for (k, v) in {'get': 'download', 'id': r'download.*?/([\d]+)'}.items()) rc = dict((k, re.compile('(?i)' + v)) for (k, v) in dict(get='download', id=r'download.*?/([\d]+)').items())
lrs_found = False lrs_found = False
lrs_new = True lrs_new = True
for search_urls in urls: # this intentionally iterates once to preserve indentation for search_urls in urls: # this intentionally iterates once to preserve indentation
@ -124,7 +123,7 @@ class TorrentDayProvider(generic.TorrentProvider):
head = head if None is not head else self._header_row( head = head if None is not head else self._header_row(
tr, header_strip='(?i)(?:leechers|seeders|size);') tr, header_strip='(?i)(?:leechers|seeders|size);')
seeders, leechers, size = [tryInt(n, n) for n in [ seeders, leechers, size = [tryInt(n, n) for n in [
cells[head[x]].get_text().strip() for x in 'seed', 'leech', 'size']] cells[head[x]].get_text().strip() for x in ('seed', 'leech', 'size')]]
if self._reject_item(seeders, leechers): if self._reject_item(seeders, leechers):
continue continue
@ -146,9 +145,7 @@ class TorrentDayProvider(generic.TorrentProvider):
except (BaseException, Exception): except (BaseException, Exception):
time.sleep(1.1) time.sleep(1.1)
self._log_search(mode, len(items[mode]) - cnt, search_url) self._log_search(mode, len(items[mode]) - cnt, search_url, log_settings_hint)
if log_settings_hint:
logger.log('Perfomance tip: change "Torrents per Page" to 100 at the TD site/Settings page')
if self.is_search_finished(mode, items, cnt_search, rc['id'], last_recent_search, lrs_new, lrs_found): if self.is_search_finished(mode, items, cnt_search, rc['id'], last_recent_search, lrs_new, lrs_found):
break break

77
sickbeard/providers/torrentleech.py

@ -27,24 +27,29 @@ from lib.unidecode import unidecode
class TorrentLeechProvider(generic.TorrentProvider): class TorrentLeechProvider(generic.TorrentProvider):
def __init__(self): def __init__(self):
generic.TorrentProvider.__init__(self, 'TorrentLeech', cache_update_freq=15) generic.TorrentProvider.__init__(self, 'TorrentLeech')
self.url_base = 'https://v4.torrentleech.org/' self.url_base = 'https://v4.torrentleech.org/'
self.urls = {'config_provider_home_uri': self.url_base, self.urls = {'config_provider_home_uri': self.url_base,
'login_action': self.url_base, 'login': self.url_base,
'browse': self.url_base + 'torrents/browse/index/categories/%(cats)s', 'browse': self.url_base + 'torrents/browse/index/categories/%(cats)s/%(x)s',
'search': self.url_base + 'torrents/browse/index/query/%(query)s/categories/%(cats)s'} 'search': self.url_base + 'torrents/browse/index/query/%(query)s/categories/%(cats)s/%(x)s'}
self.categories = {'shows': [2, 26, 27, 32], 'anime': [7, 34, 35]} self.categories = {'shows': [2, 26, 27, 32], 'anime': [7, 34, 35]}
self.url = self.urls['config_provider_home_uri'] self.url = self.urls['config_provider_home_uri']
self.digest, self.minseed, self.minleech, self.freeleech = 4 * [None]
self.username, self.password, self.minseed, self.minleech = 4 * [None]
def _authorised(self, **kwargs): def _authorised(self, **kwargs):
return super(TorrentLeechProvider, self)._authorised(logged_in=(lambda y=None: self.has_all_cookies(pre='tl')), return super(TorrentLeechProvider, self)._authorised(
post_params={'remember_me': 'on', 'form_tmpl': True}) logged_in=(lambda y='': all(
['TorrentLeech' in y, 'type="password"' not in y[0:4096], self.has_all_cookies(pre='tl')])),
failed_msg=(lambda y=None: u'Invalid cookie details for %s. Check settings'))
@staticmethod
def _has_signature(data=None):
return generic.TorrentProvider._has_signature(data) or (data and re.search(r'(?i)<title[^<]+?leech', data))
def _search_provider(self, search_params, **kwargs): def _search_provider(self, search_params, **kwargs):
@ -52,20 +57,41 @@ class TorrentLeechProvider(generic.TorrentProvider):
if not self._authorised(): if not self._authorised():
return results return results
items = {'Cache': [], 'Season': [], 'Episode': [], 'Propers': []} last_recent_search = self.last_recent_search
last_recent_search = '' if not last_recent_search else last_recent_search.replace('id-', '')
rc = dict((k, re.compile('(?i)' + v)) for (k, v) in {'get': 'download'}.items())
for mode in search_params.keys(): for mode in search_params.keys():
urls = []
for search_string in search_params[mode]: for search_string in search_params[mode]:
search_url = self.urls[('search', 'browse')['Cache' == mode]] % { urls += [[]]
'cats': self._categories_string(mode, '', ','), for page in range((3, 5)['Cache' == mode])[1:]:
'query': isinstance(search_string, unicode) and unidecode(search_string) or search_string} urls[-1] += [self.urls[('search', 'browse')['Cache' == mode]] % {
'cats': self._categories_string(mode, '', ','),
'query': isinstance(search_string, unicode) and unidecode(search_string) or search_string,
'x': '%spage/%s' % (('facets/tags:FREELEECH/', '')[not self.freeleech], page)
}]
results += self._search_urls(mode, last_recent_search, urls)
last_recent_search = ''
return results
def _search_urls(self, mode, last_recent_search, urls):
results = []
items = {'Cache': [], 'Season': [], 'Episode': [], 'Propers': []}
rc = dict((k, re.compile('(?i)' + v)) for (k, v) in dict(get='download', id=r'download.*?/([\d]+)').items())
lrs_found = False
lrs_new = True
for search_urls in urls: # this intentionally iterates once to preserve indentation
for search_url in search_urls:
html = self.get_url(search_url) html = self.get_url(search_url)
if self.should_skip(): if self.should_skip():
return results return results
cnt = len(items[mode]) cnt = len(items[mode])
cnt_search = 0
log_settings_hint = False
try: try:
if not html or self._has_no_results(html): if not html or self._has_no_results(html):
raise generic.HaltParseException raise generic.HaltParseException
@ -77,22 +103,32 @@ class TorrentLeechProvider(generic.TorrentProvider):
if 2 > len(tbl_rows): if 2 > len(tbl_rows):
raise generic.HaltParseException raise generic.HaltParseException
if 'Cache' == mode and 100 > len(tbl_rows):
log_settings_hint = True
head = None head = None
for tr in tbl_rows[1:]: for tr in tbl_rows[1:]:
cells = tr.find_all('td') cells = tr.find_all('td')
if 6 > len(cells): if 6 > len(cells):
continue continue
cnt_search += 1
try: try:
head = head if None is not head else self._header_row(tr) head = head if None is not head else self._header_row(tr)
seeders, leechers = [tryInt(n) for n in [ seeders, leechers = [tryInt(n) for n in [
tr.find('td', class_=x).get_text().strip() for x in 'seeders', 'leechers']] tr.find('td', class_=x).get_text().strip() for x in ('seeders', 'leechers')]]
if self._reject_item(seeders, leechers): if self._reject_item(seeders, leechers):
continue continue
dl = tr.find('a', href=rc['get'])['href']
dl_id = rc['id'].findall(dl)[0]
lrs_found = dl_id == last_recent_search
if lrs_found:
break
info = tr.find('td', class_='name').a info = tr.find('td', class_='name').a
title = (info.attrs.get('title') or info.get_text()).strip() title = (info.attrs.get('title') or info.get_text()).strip()
size = cells[head['size']].get_text().strip() size = cells[head['size']].get_text().strip()
download_url = self._link(tr.find('a', href=rc['get'])['href']) download_url = self._link(dl)
except (AttributeError, TypeError, ValueError): except (AttributeError, TypeError, ValueError):
continue continue
@ -103,7 +139,11 @@ class TorrentLeechProvider(generic.TorrentProvider):
pass pass
except (BaseException, Exception): except (BaseException, Exception):
logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR) logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR)
self._log_search(mode, len(items[mode]) - cnt, search_url) self._log_search(mode, len(items[mode]) - cnt, search_url, log_settings_hint)
if self.is_search_finished(mode, items, cnt_search, rc['id'], last_recent_search, lrs_new, lrs_found):
break
lrs_new = False
results = self._sort_seeding(mode, results + items[mode]) results = self._sort_seeding(mode, results + items[mode])
@ -113,5 +153,8 @@ class TorrentLeechProvider(generic.TorrentProvider):
return super(TorrentLeechProvider, self)._episode_strings(ep_obj, sep_date='|', **kwargs) return super(TorrentLeechProvider, self)._episode_strings(ep_obj, sep_date='|', **kwargs)
def ui_string(self, key):
return 'torrentleech_digest' == key and self._valid_home() and 'use... \'tluid=xx; tlpass=yy\'' or ''
provider = TorrentLeechProvider() provider = TorrentLeechProvider()

Loading…
Cancel
Save