7 changed files with 5 additions and 296 deletions
Before Width: | Height: | Size: 237 B |
Before Width: | Height: | Size: 785 B |
@ -1,152 +0,0 @@ |
|||||
# coding=utf-8 |
|
||||
# |
|
||||
# This file is part of SickGear. |
|
||||
# |
|
||||
# SickGear is free software: you can redistribute it and/or modify |
|
||||
# it under the terms of the GNU General Public License as published by |
|
||||
# the Free Software Foundation, either version 3 of the License, or |
|
||||
# (at your option) any later version. |
|
||||
# |
|
||||
# SickGear is distributed in the hope that it will be useful, |
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
||||
# GNU General Public License for more details. |
|
||||
# |
|
||||
# You should have received a copy of the GNU General Public License |
|
||||
# along with SickGear. If not, see <http://www.gnu.org/licenses/>. |
|
||||
|
|
||||
import re |
|
||||
import traceback |
|
||||
|
|
||||
from . import generic |
|
||||
from sickbeard import logger |
|
||||
from sickbeard.bs4_parser import BS4Parser |
|
||||
from sickbeard.helpers import tryInt |
|
||||
from lib.unidecode import unidecode |
|
||||
|
|
||||
|
|
||||
class PotUKProvider(generic.TorrentProvider): |
|
||||
|
|
||||
def __init__(self): |
|
||||
generic.TorrentProvider.__init__(self, 'PotUK') |
|
||||
|
|
||||
self.url_base = 'http://www.potuk.com/newforum/' |
|
||||
self.urls = {'config_provider_home_uri': self.url_base, |
|
||||
'login': self.url_base + 'search.php', |
|
||||
'browse': self.url_base + 'search.php?do=getdaily&exclude=%s', |
|
||||
'get_data': self.url_base + 'misc.php?do=showattachments&t=%s'} |
|
||||
|
|
||||
self.url = self.urls['config_provider_home_uri'] |
|
||||
|
|
||||
self.digest, self.resp = 2 * [None] |
|
||||
|
|
||||
def logged_in(self, resp): |
|
||||
try: |
|
||||
self.resp = re.findall('(?sim)<form .*?search.php.*?</form>', resp)[0] |
|
||||
except (IndexError, TypeError): |
|
||||
return False |
|
||||
return self.has_all_cookies('bbsessionhash') |
|
||||
|
|
||||
def _authorised(self, **kwargs): |
|
||||
|
|
||||
return super(PotUKProvider, self)._authorised( |
|
||||
logged_in=(lambda y=None: self.logged_in(y)), |
|
||||
failed_msg=(lambda y=None: u'Invalid cookie details for %s. Check settings')) |
|
||||
|
|
||||
def _search_provider(self, search_params, **kwargs): |
|
||||
|
|
||||
results = [] |
|
||||
if not self._authorised(): |
|
||||
return results |
|
||||
|
|
||||
items = {'Cache': [], 'Season': [], 'Episode': [], 'Propers': []} |
|
||||
|
|
||||
opts = re.findall(r'(?sim)forumchoice\[\][^<]+(.*?)</select>', self.resp)[0] |
|
||||
cat_opts = re.findall(r'(?mis)<option[^>]*?value=[\'"](\d+)[^>]*>(.*?)</option>', opts) |
|
||||
include = [] |
|
||||
tv = False |
|
||||
for c in cat_opts: |
|
||||
if not tv and 'TV Shows' in c[1]: |
|
||||
tv = True |
|
||||
elif tv: |
|
||||
if 3 > len(re.findall(' ', c[1])): |
|
||||
break |
|
||||
elif not filter(lambda v: v in c[1], ('Requests', 'Offer', 'Discussion')): |
|
||||
include += [c[0]] |
|
||||
exclude = ','.join(list(filter(lambda v: v not in include, map(lambda x: x[0], cat_opts)))) |
|
||||
|
|
||||
for mode in search_params.keys(): |
|
||||
for search_string in search_params[mode]: |
|
||||
search_string = isinstance(search_string, unicode) and unidecode(search_string) or search_string |
|
||||
|
|
||||
params = {} |
|
||||
if 'Cache' == mode: |
|
||||
search_url = self.urls['browse'] % exclude |
|
||||
else: |
|
||||
search_url = self._link(re.findall('(?i)action="([^"]+?)"', self.resp)[0]) |
|
||||
params = {'query': search_string, 'showposts': 0, 'titleonly': 1, 'prefixchoice': '', |
|
||||
'replyless': 0, 'searchdate': 0, 'beforeafter': 'after', 'sortby': 'threadstart', |
|
||||
'order': 'descending', 'starteronly': 0, 'forumchoice': include} |
|
||||
tags = re.findall(r'(?is)(<input[^>]*?name=[\'"][^\'"]+[^>]*)', self.resp) |
|
||||
attrs = [[(re.findall(r'(?is)%s=[\'"]([^\'"]+)' % attr, c) or [''])[0] |
|
||||
for attr in ['type', 'name', 'value']] for c in tags] |
|
||||
for itype, name, value in attrs: |
|
||||
params.setdefault(name, value) |
|
||||
del params['doprefs'] |
|
||||
html = self.get_url(search_url, post_data=params) |
|
||||
if self.should_skip(): |
|
||||
return results |
|
||||
|
|
||||
cnt = len(items[mode]) |
|
||||
try: |
|
||||
if not html or self._has_no_results(html): |
|
||||
raise generic.HaltParseException |
|
||||
|
|
||||
with BS4Parser(html, parse_only=dict(table={'id': 'threadslist'})) as tbl: |
|
||||
tbl_rows = [] if not tbl else tbl.find_all('tr') |
|
||||
|
|
||||
if 2 > len(tbl_rows): |
|
||||
raise generic.HaltParseException |
|
||||
|
|
||||
for tr in tbl_rows[1:]: |
|
||||
if 6 > len(tr.find_all('td')) or not tr.select('img[alt*="ttach"]'): |
|
||||
continue |
|
||||
try: |
|
||||
link = tr.select('td[id^="td_threadtitle"]')[0].select('a[id*="title"]')[0] |
|
||||
title = link.get_text().strip() |
|
||||
download_url = self.urls['get_data'] % re.findall(r't=(\d+)', link['href'])[0] |
|
||||
except (AttributeError, TypeError, ValueError, IndexError): |
|
||||
continue |
|
||||
|
|
||||
if title and download_url: |
|
||||
items[mode].append((title, download_url, '', '')) |
|
||||
|
|
||||
except generic.HaltParseException: |
|
||||
pass |
|
||||
except (BaseException, Exception): |
|
||||
logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR) |
|
||||
|
|
||||
self._log_search( |
|
||||
mode, len(items[mode]) - cnt, ('search_param: ' + search_string, search_url)['Cache' == mode]) |
|
||||
|
|
||||
results = self._sort_seeding(mode, results + items[mode]) |
|
||||
|
|
||||
return results |
|
||||
|
|
||||
def get_data(self, url): |
|
||||
result = None |
|
||||
html = self.get_url(url, timeout=90) |
|
||||
if self.should_skip(): |
|
||||
return result |
|
||||
|
|
||||
try: |
|
||||
result = self._link(re.findall(r'(?i)"(attachment\.php[^"]+?)"', html)[0]) |
|
||||
except IndexError: |
|
||||
logger.log('Failed no torrent in response', logger.DEBUG) |
|
||||
return result |
|
||||
|
|
||||
def ui_string(self, key): |
|
||||
return ('%s_digest' % self.get_id()) == key and 'use... \'bbuserid=xx; bbpassword=yy\'' or '' |
|
||||
|
|
||||
|
|
||||
provider = PotUKProvider() |
|
@ -1,141 +0,0 @@ |
|||||
# coding=utf-8 |
|
||||
# |
|
||||
# This file is part of SickGear. |
|
||||
# |
|
||||
# SickGear is free software: you can redistribute it and/or modify |
|
||||
# it under the terms of the GNU General Public License as published by |
|
||||
# the Free Software Foundation, either version 3 of the License, or |
|
||||
# (at your option) any later version. |
|
||||
# |
|
||||
# SickGear is distributed in the hope that it will be useful, |
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
|
||||
# GNU General Public License for more details. |
|
||||
# |
|
||||
# You should have received a copy of the GNU General Public License |
|
||||
# along with SickGear. If not, see <http://www.gnu.org/licenses/>. |
|
||||
|
|
||||
import re |
|
||||
import traceback |
|
||||
|
|
||||
from . import generic |
|
||||
from sickbeard import logger |
|
||||
from sickbeard.bs4_parser import BS4Parser |
|
||||
from sickbeard.helpers import tryInt |
|
||||
from lib.unidecode import unidecode |
|
||||
|
|
||||
|
|
||||
class WOPProvider(generic.TorrentProvider): |
|
||||
|
|
||||
def __init__(self): |
|
||||
generic.TorrentProvider.__init__(self, 'WOP') |
|
||||
|
|
||||
self.url_home = ['https://worldofp2p.net/'] |
|
||||
|
|
||||
self.url_vars = {'login': 'getrss.php', 'search': 'browse.php?%s' % '&'.join( |
|
||||
['search=%s', 'searchin=title', 'incldead=0', 'sort=4', 'type=desc', '%s'])} |
|
||||
self.url_tmpl = {'config_provider_home_uri': '%(home)s', 'login': '%(home)s%(vars)s', |
|
||||
'search': '%(home)s%(vars)s'} |
|
||||
self.url_drop = ['https://www'] |
|
||||
|
|
||||
self.categories = {'Season': [41], 'Episode': [35, 5, 58, 42, 36, 55, 39, 37, 54, 38]} |
|
||||
self.categories['Cache'] = self.categories['Season'] + self.categories['Episode'] |
|
||||
|
|
||||
self.digest, self.freeleech, self.minseed, self.minleech = 4 * [None] |
|
||||
|
|
||||
def _authorised(self, **kwargs): |
|
||||
|
|
||||
return super(WOPProvider, self)._authorised( |
|
||||
logged_in=(lambda y=None: all( |
|
||||
[(None is y or re.search(r'(?i)rss\slink', y)), self.has_all_cookies()] + |
|
||||
[(self.session.cookies.get(x) or 'sg!no!pw') in self.digest for x in ['hashv']])), |
|
||||
failed_msg=(lambda y=None: u'Invalid cookie details for %s. Check settings')) |
|
||||
|
|
||||
@staticmethod |
|
||||
def _has_signature(data=None): |
|
||||
return generic.TorrentProvider._has_signature(data) or (data and re.search(r'(?sim)<title[^<]+WOP', data)) |
|
||||
|
|
||||
def _search_provider(self, search_params, **kwargs): |
|
||||
|
|
||||
results = [] |
|
||||
if not self._authorised(): |
|
||||
return results |
|
||||
|
|
||||
items = {'Cache': [], 'Season': [], 'Episode': [], 'Propers': []} |
|
||||
|
|
||||
rc = dict((k, re.compile('(?i)' + v)) for (k, v) in { |
|
||||
'info': 'detail', 'get': 'download', 'filter': 'fa-(?:heart|star)'}.items()) |
|
||||
for mode in search_params.keys(): |
|
||||
for search_string in search_params[mode]: |
|
||||
search_string = isinstance(search_string, unicode) and unidecode(search_string) or search_string |
|
||||
search_url = self.urls['search'] % (search_string, self._categories_string(mode, 'cats2[]=%s')) |
|
||||
|
|
||||
html = self.get_url(search_url, timeout=90) |
|
||||
if self.should_skip(): |
|
||||
return results |
|
||||
|
|
||||
cnt = len(items[mode]) |
|
||||
try: |
|
||||
if not html or self._has_no_results(html): |
|
||||
raise generic.HaltParseException |
|
||||
|
|
||||
parse_only = dict(table={'class': (lambda at: at and 'yenitorrenttable' in at)}) |
|
||||
with BS4Parser(html, tag='table', attr='yenitorrenttable', parse_only=parse_only) as tbl: |
|
||||
tbl_rows = [] if not tbl else tbl.find_all('tr') |
|
||||
|
|
||||
if 2 > len(tbl_rows): |
|
||||
raise generic.HaltParseException |
|
||||
|
|
||||
head = None |
|
||||
for tr in tbl_rows[1:]: |
|
||||
cells = tr.find_all('td') |
|
||||
if 5 > len(cells): |
|
||||
continue |
|
||||
try: |
|
||||
head = head if None is not head else self._header_row( |
|
||||
tr, custom_tags=[('span', 'data-original-title')]) |
|
||||
seeders, leechers, size = [n for n in [ |
|
||||
cells[head[x]].get_text().strip() for x in 'seed', 'leech', 'size']] |
|
||||
if self._reject_item(seeders, leechers, self.freeleech and ( |
|
||||
not tr.find('i', class_=rc['filter']))): |
|
||||
continue |
|
||||
|
|
||||
title = tr.find('a', href=rc['info']).get_text().strip() |
|
||||
download_url = self._link(tr.find('a', href=rc['get'])['href']) |
|
||||
except (AttributeError, TypeError, ValueError, KeyError): |
|
||||
continue |
|
||||
|
|
||||
if title and download_url: |
|
||||
items[mode].append((title, download_url, seeders, self._bytesizer(size))) |
|
||||
|
|
||||
except generic.HaltParseException: |
|
||||
pass |
|
||||
except (BaseException, Exception): |
|
||||
logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR) |
|
||||
|
|
||||
self._log_search(mode, len(items[mode]) - cnt, search_url) |
|
||||
|
|
||||
results = self._sort_seeding(mode, results + items[mode]) |
|
||||
|
|
||||
return results |
|
||||
|
|
||||
def _season_strings(self, ep_obj, **kwargs): |
|
||||
|
|
||||
return self._search_params(super(WOPProvider, self)._season_strings(ep_obj, **kwargs)) |
|
||||
|
|
||||
def _episode_strings(self, ep_obj, **kwargs): |
|
||||
|
|
||||
return self._search_params(super(WOPProvider, self)._episode_strings(ep_obj, **kwargs)) |
|
||||
|
|
||||
@staticmethod |
|
||||
def _search_params(search_params): |
|
||||
|
|
||||
return [dict((k, ['*%s*' % re.sub(r'[.\s]', '*', v) for v in v]) for k, v in d.items()) for d in search_params] |
|
||||
|
|
||||
@staticmethod |
|
||||
def ui_string(key): |
|
||||
|
|
||||
return 'wop_digest' == key and 'use... \'uid=xx; pass=yy; hashv=zz\'' or '' |
|
||||
|
|
||||
|
|
||||
provider = WOPProvider() |
|
Loading…
Reference in new issue