4 changed files with 198 additions and 1 deletions
After Width: | Height: | Size: 1.0 KiB |
@ -0,0 +1,196 @@ |
|||||
|
# coding=utf-8 |
||||
|
# |
||||
|
# This file is part of SickGear. |
||||
|
# |
||||
|
# SickGear is free software: you can redistribute it and/or modify |
||||
|
# it under the terms of the GNU General Public License as published by |
||||
|
# the Free Software Foundation, either version 3 of the License, or |
||||
|
# (at your option) any later version. |
||||
|
# |
||||
|
# SickGear is distributed in the hope that it will be useful, |
||||
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of |
||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
||||
|
# GNU General Public License for more details. |
||||
|
# |
||||
|
# You should have received a copy of the GNU General Public License |
||||
|
# along with SickGear. If not, see <http://www.gnu.org/licenses/>. |
||||
|
|
||||
|
import random |
||||
|
import re |
||||
|
import time |
||||
|
import traceback |
||||
|
|
||||
|
from . import generic |
||||
|
from .. import logger |
||||
|
from ..helpers import try_int |
||||
|
from bs4_parser import BS4Parser |
||||
|
|
||||
|
from _23 import unidecode |
||||
|
from six import iteritems |
||||
|
|
||||
|
|
||||
|
class TorrentDBProvider(generic.TorrentProvider): |
||||
|
|
||||
|
def __init__(self): |
||||
|
generic.TorrentProvider.__init__(self, 'TorrentDB') |
||||
|
|
||||
|
self.url_base = 'https://torrentdb.net/' |
||||
|
self.urls = {'config_provider_home_uri': self.url_base, |
||||
|
'login_action': self.url_base + 'login', |
||||
|
'logout': self.url_base + 'logout', |
||||
|
'passkey': self.url_base + 'settings', |
||||
|
'search': self.url_base + 'filter/torrents?%s' % '&'.join( |
||||
|
['_token=%s', '%s', 'tags=', 'sorting=created_at', 'direction=desc', 'qty=100', 'search=%s'])} |
||||
|
|
||||
|
self.categories = {'Season': [21, 52, 61, 62, 63, 64, 65], 'anime': [53]} |
||||
|
self.categories['Cache'] = self.categories['Episode'] = self.categories['Season'] |
||||
|
|
||||
|
self.url = self.urls['config_provider_home_uri'] |
||||
|
|
||||
|
self.username, self.password, self._token, self._passkey, \ |
||||
|
self.freeleech, self.minseed, self.minleech = 7 * [None] |
||||
|
|
||||
|
def _authorised(self, **kwargs): |
||||
|
|
||||
|
return super(TorrentDBProvider, self)._authorised( |
||||
|
logged_in=self.logged_in, post_params=dict(remember='on'), |
||||
|
failed_msg=(lambda y=None: u'Failed to parse (or authenticate) a response from %s')) |
||||
|
|
||||
|
def logged_in(self, resp=None): |
||||
|
|
||||
|
result = True |
||||
|
if not self._token: |
||||
|
try: |
||||
|
result = 'Username' not in resp and 'Logout' in resp |
||||
|
if not self.session.cookies.get('laravel_session', domain='torrentdb.net'): |
||||
|
raise ValueError('Cookie invalid') |
||||
|
input_tag = re.findall(r'(<input[^>]+?"(?:hidden|_token)"[^>]+?"(?:hidden|_token)"[^>]+?>)', resp)[0] |
||||
|
csrf = re.findall(r'<meta[^>]+csrf-token[^>]+content[^"]+"\s*([^\s"]+)', resp)[0] |
||||
|
token = re.findall(r'value\s*=\s*["\']\s*([^"\'\s]+)', input_tag)[0] |
||||
|
self._token = csrf == token and token |
||||
|
|
||||
|
html = self.get_url(self.urls['passkey'], skip_auth=True) |
||||
|
with BS4Parser(html, parse_only=dict(form={'action': (lambda at: at and 'change_pid' in at)})) as form: |
||||
|
self._passkey = re.sub('(?mi)^.*?passkey.*?$', '', form.get_text()).strip() |
||||
|
except (BaseException, Exception): |
||||
|
result = False |
||||
|
return result |
||||
|
|
||||
|
def _search_provider(self, search_params, **kwargs): |
||||
|
|
||||
|
results = [] |
||||
|
|
||||
|
vals = [i for i in range(5, 16)] |
||||
|
random.SystemRandom().shuffle(vals) |
||||
|
attempts = None |
||||
|
action = 'Failed to authorise' |
||||
|
for attempts, s in enumerate((0, vals[0], vals[5], vals[10])): |
||||
|
time.sleep(s) |
||||
|
if self._authorised(): |
||||
|
action = 'Authorised' |
||||
|
break |
||||
|
if attempts: |
||||
|
logger.log('%s after %s attempts' % (action, attempts + 1)) |
||||
|
if 'Auth' not in action: |
||||
|
return results |
||||
|
|
||||
|
items = {'Cache': [], 'Season': [], 'Episode': [], 'Propers': []} |
||||
|
|
||||
|
rc = dict([(k, re.compile('(?i)' + v)) for (k, v) in iteritems({'info': r'/torrent/'})]) |
||||
|
|
||||
|
for mode in search_params: |
||||
|
for search_string in search_params[mode]: |
||||
|
search_string = unidecode(search_string) |
||||
|
search_url = self.urls['search'] % ( |
||||
|
self._token, self._categories_string(mode, 'types[]=%s'), search_string) |
||||
|
|
||||
|
vals = [i for i in range(5, 16)] |
||||
|
random.SystemRandom().shuffle(vals) |
||||
|
attempts = html = soup = tbl = None |
||||
|
action = 'failed fetch' |
||||
|
for attempts, s in enumerate((0, vals[0], vals[5], vals[10])): |
||||
|
time.sleep(s) |
||||
|
html = self.get_url(search_url) |
||||
|
if self.should_skip(): |
||||
|
logger.log('%s %s after %s attempts' % (mode, action, attempts + 1)) |
||||
|
return results |
||||
|
if html: |
||||
|
try: |
||||
|
soup = BS4Parser(html).soup |
||||
|
tbl = soup.find('table', class_=(lambda at: at and 'table' in at)) |
||||
|
if tbl: |
||||
|
action = 'data fetched' |
||||
|
break |
||||
|
except (BaseException, Exception): |
||||
|
pass |
||||
|
# after auth multi-attempted, force re-auth due to strange server responses |
||||
|
self.get_url(self.urls['logout'], skip_auth=True) |
||||
|
self._token = None |
||||
|
|
||||
|
if attempts: |
||||
|
logger.log('%s %s after %s attempts' % (mode, action, attempts+1)) |
||||
|
|
||||
|
if self.should_skip(): |
||||
|
return results |
||||
|
|
||||
|
cnt = len(items[mode]) |
||||
|
try: |
||||
|
if not html or self._has_no_results(html) or not tbl: |
||||
|
raise generic.HaltParseException |
||||
|
|
||||
|
with BS4Parser(html, parse_only=dict(table={'class': (lambda at: at and 'table' in at)})) as tbl: |
||||
|
tbl_rows = [] if not tbl else tbl.find_all('tr') |
||||
|
|
||||
|
if 2 > len(tbl_rows): |
||||
|
raise generic.HaltParseException |
||||
|
|
||||
|
head = None |
||||
|
for tr in tbl_rows[1:]: |
||||
|
cells = tr.find_all('td') |
||||
|
if 5 > len(cells): |
||||
|
continue |
||||
|
try: |
||||
|
head = head if None is not head else self._header_row(tr, {'info': r'(?:name)'}) |
||||
|
|
||||
|
if self.freeleech: |
||||
|
marked = ','.join([x.get_text(strip=True).lower() |
||||
|
for x in cells[head['info']].find_all( |
||||
|
'span', attrs={'class': (lambda at: at and 'align-middle' in at)})]) |
||||
|
if 'free' not in marked: |
||||
|
continue |
||||
|
|
||||
|
seeders, leechers, size = [try_int(n, n) for n in [ |
||||
|
cells[head[x]].get_text().strip() for x in ('seed', 'leech', 'size')]] |
||||
|
if self._reject_item(seeders, leechers): |
||||
|
continue |
||||
|
|
||||
|
info = cells[head['info']].find('a', href=rc['info']) |
||||
|
title = info.get_text(strip=True) |
||||
|
download_url = '%s/%s' % ( |
||||
|
info['href'].replace('/torrent/', '/torrent/download/irssi/', 1), self._passkey) |
||||
|
except (AttributeError, TypeError, ValueError, IndexError): |
||||
|
continue |
||||
|
|
||||
|
if title and download_url: |
||||
|
items[mode].append((title, download_url, seeders, self._bytesizer(size))) |
||||
|
|
||||
|
except generic.HaltParseException: |
||||
|
pass |
||||
|
except (BaseException, Exception): |
||||
|
logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR) |
||||
|
|
||||
|
if soup: |
||||
|
soup.clear(True) |
||||
|
del soup |
||||
|
|
||||
|
self._log_search(mode, len(items[mode]) - cnt, search_url) |
||||
|
|
||||
|
if mode in 'Season' and len(items[mode]): |
||||
|
break |
||||
|
|
||||
|
results = self._sort_seeding(mode, results + items[mode]) |
||||
|
|
||||
|
return results |
||||
|
|
||||
|
|
||||
|
provider = TorrentDBProvider() |
Loading…
Reference in new issue