diff --git a/CHANGES.md b/CHANGES.md index b3c3c3b..f8fdb06 100644 --- a/CHANGES.md +++ b/CHANGES.md @@ -1,5 +1,6 @@ ### 0.25.0 (2021-xx-xx xx:xx:xx UTC) +* Add provider TorrentDB [develop changelog] diff --git a/gui/slick/images/providers/torrentdb.png b/gui/slick/images/providers/torrentdb.png new file mode 100644 index 0000000..fc945de Binary files /dev/null and b/gui/slick/images/providers/torrentdb.png differ diff --git a/sickbeard/providers/__init__.py b/sickbeard/providers/__init__.py index 989acc1..f4028db 100755 --- a/sickbeard/providers/__init__.py +++ b/sickbeard/providers/__init__.py @@ -43,7 +43,7 @@ __all__ = [ 'immortalseed', 'iptorrents', 'limetorrents', 'magnetdl', 'milkie', 'morethan', 'nebulance', 'ncore', 'nyaa', 'pretome', 'privatehd', 'ptf', 'rarbg', 'revtt', 'scenehd', 'scenetime', 'shazbat', 'showrss', 'snowfl', 'speedapp', 'speedcd', - 'thepiratebay', 'torlock', 'torrentday', 'torrenting', 'torrentleech', 'tvchaosuk', + 'thepiratebay', 'torlock', 'torrentday', 'torrentdb', 'torrenting', 'torrentleech', 'tvchaosuk', 'xspeeds', 'zooqle', # anime 'tokyotoshokan', diff --git a/sickbeard/providers/torrentdb.py b/sickbeard/providers/torrentdb.py new file mode 100644 index 0000000..7a3fa5c --- /dev/null +++ b/sickbeard/providers/torrentdb.py @@ -0,0 +1,196 @@ +# coding=utf-8 +# +# This file is part of SickGear. +# +# SickGear is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# SickGear is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with SickGear. If not, see . + +import random +import re +import time +import traceback + +from . import generic +from .. import logger +from ..helpers import try_int +from bs4_parser import BS4Parser + +from _23 import unidecode +from six import iteritems + + +class TorrentDBProvider(generic.TorrentProvider): + + def __init__(self): + generic.TorrentProvider.__init__(self, 'TorrentDB') + + self.url_base = 'https://torrentdb.net/' + self.urls = {'config_provider_home_uri': self.url_base, + 'login_action': self.url_base + 'login', + 'logout': self.url_base + 'logout', + 'passkey': self.url_base + 'settings', + 'search': self.url_base + 'filter/torrents?%s' % '&'.join( + ['_token=%s', '%s', 'tags=', 'sorting=created_at', 'direction=desc', 'qty=100', 'search=%s'])} + + self.categories = {'Season': [21, 52, 61, 62, 63, 64, 65], 'anime': [53]} + self.categories['Cache'] = self.categories['Episode'] = self.categories['Season'] + + self.url = self.urls['config_provider_home_uri'] + + self.username, self.password, self._token, self._passkey, \ + self.freeleech, self.minseed, self.minleech = 7 * [None] + + def _authorised(self, **kwargs): + + return super(TorrentDBProvider, self)._authorised( + logged_in=self.logged_in, post_params=dict(remember='on'), + failed_msg=(lambda y=None: u'Failed to parse (or authenticate) a response from %s')) + + def logged_in(self, resp=None): + + result = True + if not self._token: + try: + result = 'Username' not in resp and 'Logout' in resp + if not self.session.cookies.get('laravel_session', domain='torrentdb.net'): + raise ValueError('Cookie invalid') + input_tag = re.findall(r'(]+?"(?:hidden|_token)"[^>]+?"(?:hidden|_token)"[^>]+?>)', resp)[0] + csrf = re.findall(r']+csrf-token[^>]+content[^"]+"\s*([^\s"]+)', resp)[0] + token = re.findall(r'value\s*=\s*["\']\s*([^"\'\s]+)', input_tag)[0] + self._token = csrf == token and token + + html = self.get_url(self.urls['passkey'], skip_auth=True) + with BS4Parser(html, parse_only=dict(form={'action': (lambda at: at and 'change_pid' in at)})) as form: + self._passkey = re.sub('(?mi)^.*?passkey.*?$', '', form.get_text()).strip() + except (BaseException, Exception): + result = False + return result + + def _search_provider(self, search_params, **kwargs): + + results = [] + + vals = [i for i in range(5, 16)] + random.SystemRandom().shuffle(vals) + attempts = None + action = 'Failed to authorise' + for attempts, s in enumerate((0, vals[0], vals[5], vals[10])): + time.sleep(s) + if self._authorised(): + action = 'Authorised' + break + if attempts: + logger.log('%s after %s attempts' % (action, attempts + 1)) + if 'Auth' not in action: + return results + + items = {'Cache': [], 'Season': [], 'Episode': [], 'Propers': []} + + rc = dict([(k, re.compile('(?i)' + v)) for (k, v) in iteritems({'info': r'/torrent/'})]) + + for mode in search_params: + for search_string in search_params[mode]: + search_string = unidecode(search_string) + search_url = self.urls['search'] % ( + self._token, self._categories_string(mode, 'types[]=%s'), search_string) + + vals = [i for i in range(5, 16)] + random.SystemRandom().shuffle(vals) + attempts = html = soup = tbl = None + action = 'failed fetch' + for attempts, s in enumerate((0, vals[0], vals[5], vals[10])): + time.sleep(s) + html = self.get_url(search_url) + if self.should_skip(): + logger.log('%s %s after %s attempts' % (mode, action, attempts + 1)) + return results + if html: + try: + soup = BS4Parser(html).soup + tbl = soup.find('table', class_=(lambda at: at and 'table' in at)) + if tbl: + action = 'data fetched' + break + except (BaseException, Exception): + pass + # after auth multi-attempted, force re-auth due to strange server responses + self.get_url(self.urls['logout'], skip_auth=True) + self._token = None + + if attempts: + logger.log('%s %s after %s attempts' % (mode, action, attempts+1)) + + if self.should_skip(): + return results + + cnt = len(items[mode]) + try: + if not html or self._has_no_results(html) or not tbl: + raise generic.HaltParseException + + with BS4Parser(html, parse_only=dict(table={'class': (lambda at: at and 'table' in at)})) as tbl: + tbl_rows = [] if not tbl else tbl.find_all('tr') + + if 2 > len(tbl_rows): + raise generic.HaltParseException + + head = None + for tr in tbl_rows[1:]: + cells = tr.find_all('td') + if 5 > len(cells): + continue + try: + head = head if None is not head else self._header_row(tr, {'info': r'(?:name)'}) + + if self.freeleech: + marked = ','.join([x.get_text(strip=True).lower() + for x in cells[head['info']].find_all( + 'span', attrs={'class': (lambda at: at and 'align-middle' in at)})]) + if 'free' not in marked: + continue + + seeders, leechers, size = [try_int(n, n) for n in [ + cells[head[x]].get_text().strip() for x in ('seed', 'leech', 'size')]] + if self._reject_item(seeders, leechers): + continue + + info = cells[head['info']].find('a', href=rc['info']) + title = info.get_text(strip=True) + download_url = '%s/%s' % ( + info['href'].replace('/torrent/', '/torrent/download/irssi/', 1), self._passkey) + except (AttributeError, TypeError, ValueError, IndexError): + continue + + if title and download_url: + items[mode].append((title, download_url, seeders, self._bytesizer(size))) + + except generic.HaltParseException: + pass + except (BaseException, Exception): + logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR) + + if soup: + soup.clear(True) + del soup + + self._log_search(mode, len(items[mode]) - cnt, search_url) + + if mode in 'Season' and len(items[mode]): + break + + results = self._sort_seeding(mode, results + items[mode]) + + return results + + +provider = TorrentDBProvider()