Browse Source

Update UnRar for Windows 5.91 to 6.00 x64.

Fix providers BitHDTV, Blutopia, HDTorrents, Pretome, PrivateHD, PTFiles, SceneHD, TVChaosUK.
Change handle redirects from POST requests.
Change Kodi Addon 1.0.8
tags/release_0.23.6^2
JackDandy 4 years ago
parent
commit
e084543844
  1. 10
      CHANGES.md
  2. 13
      gui/slick/interfaces/default/history.tmpl
  3. BIN
      lib/rarfile/UnRAR.exe
  4. 30
      lib/sg_helpers.py
  5. 2
      sickbeard/__init__.py
  6. 4
      sickbeard/clients/kodi/service.sickgear.watchedstate.updater/addon.xml
  7. 2
      sickbeard/clients/kodi/service.sickgear.watchedstate.updater/changelog.txt
  8. 96
      sickbeard/clients/kodi/service.sickgear.watchedstate.updater/service.py
  9. 16
      sickbeard/providers/bithdtv.py
  10. 52
      sickbeard/providers/blutopia.py
  11. 4
      sickbeard/providers/generic.py
  12. 4
      sickbeard/providers/hdtorrents.py
  13. 80
      sickbeard/providers/pretome.py
  14. 15
      sickbeard/providers/privatehd.py
  15. 5
      sickbeard/providers/ptf.py
  16. 14
      sickbeard/providers/scenehd.py
  17. 2
      sickbeard/providers/speedcd.py
  18. 107
      sickbeard/providers/tvchaosuk.py
  19. 2
      sickbeard/webserveInit.py

10
CHANGES.md

@ -1,4 +1,12 @@
### 0.23.5 (2020-12-05 13:45:00 UTC)
### 0.23.6 (2020-12-11 01:50:00 UTC)
* Update UnRar for Windows 5.91 to 6.00 x64
* Fix providers BitHDTV, Blutopia, HDTorrents, Pretome, PrivateHD, PTFiles, SceneHD, TVChaosUK
* Change handle redirects from POST requests
* Change Kodi Addon 1.0.8
### 0.23.5 (2020-12-05 13:45:00 UTC)
* Change improve dark theme text legibility with green/gold background under "Downloads" in view-shows/simple layout

13
gui/slick/interfaces/default/history.tmpl

@ -456,22 +456,23 @@
<tbody>
<tr class="$row_class()">
<td><img height="16px" src="$sbRoot/images/notifiers/kodi.png"><span class="vmid">Kodi</span>
<p><em class="grey-text">Isengard, Jarvis, Krypton</em><br>
Episodes marked watched or unwatched are pushed in real-time and shown above.</p>
<em class="grey-text">Leia, Krypton, Jarvis, and Isengard builds</em>
<p>Episodes marked watched or unwatched are pushed in real-time and shown above.</p>
</td>
<td>
<p>Make the following changes at Kodi;</p>
<ol>
<li>Install the SickGear repo to access its Kodi Add-on
<ul>
<li>in <b class="boldest">Filemanager</b>, add a source with <span class="grey-text">&lt;ip&gt;:&lt;port&gt;/kodi/</span> (e.g. <span class="grey-text">http://192.168.0.10:$sg_port/kodi/</span>)<br>
and name it for example, <span class="grey-text">SickGear</span>. <em>You will need to allow <span class="highlight-text">Unknown Sources</span> if not already</em> </li>
<ul style="padding-left:20px">
<li>in <b class="boldest">Filemanager</b>, add a source with <span class="grey-text">&lt;ip&gt;:&lt;port&gt;/kodi/</span> (e.g. <span class="grey-text">http://192.168.0.10:$sg_port/kodi/</span> or<br>
for SSL <span class="grey-text">https://192.168.0.10:$sg_port/kodi/</span> ...<a href="https://github.com/SickGear/SickGear/wiki/Install-SickGear-%5B81%5D-Media-Apps">see SSL guide</a>) and name it for example, <span class="grey-text">SickGear</span>.<br>
<em>You must allow <span class="highlight-text">Unknown Sources</span> in Kodi settings if not already</em> </li>
<li>in <b class="boldest">System/Add-ons</b>, "<span class="grey-text">Install from zip file</span>", in the folder list, select the <span class="grey-text">SickGear</span> source</li>
<li>select the <span class="grey-text">repository.sickgear</span> in the folder listing, and install the repository zip<br>
<em>Kodi will connect to the SickGear app to download and install its Add-on repository</em></li>
</ul>
<li>Install the SickGear Add-on from the repo</li>
<ul>
<ul style="padding-left:20px">
<li>in <b class="boldest">System/Add-ons</b>, "<span class="grey-text">Install from zip repository</span>", select "<span class="grey-text">SickGear Add-on repository</span>" / "<span class="grey-text">Services</span>"<br>
<li>select Add-on "<span class="grey-text">SickGear Watched State Updater</span>"</li>
<li>configure Add-on and restart Kodi after install or after switching profiles for the first time</li>

BIN
lib/rarfile/UnRAR.exe

Binary file not shown.

30
lib/sg_helpers.py

@ -844,21 +844,23 @@ def get_url(url, # type: AnyStr
if post_json:
kwargs.setdefault('json', post_json)
response = session.post(url, timeout=timeout, **kwargs)
method = session.post
else:
for r in range(0, 5):
response = session.get(url, timeout=timeout, **kwargs)
if response.ok and not response.content:
if 'url=' in response.headers.get('Refresh', '').lower():
url = response.headers.get('Refresh').lower().split('url=')[1].strip('/')
if not url.startswith('http'):
parsed[2] = '/%s' % url
url = urlunparse(parsed)
response = session.get(url, timeout=timeout, **kwargs)
elif 'github' in url:
time.sleep(2)
continue
break
method = session.get
for r in range(0, 5):
response = method(url, timeout=timeout, **kwargs)
if response.ok and not response.content:
if 'url=' in response.headers.get('Refresh', '').lower():
url = response.headers.get('Refresh').lower().split('url=')[1].strip('/')
if not url.startswith('http'):
parsed[2] = '/%s' % url
url = urlunparse(parsed)
response = session.get(url, timeout=timeout, **kwargs)
elif 'github' in url:
time.sleep(2)
continue
break
# if encoding is not in header try to use best guess
# ignore downloads with savename

2
sickbeard/__init__.py

@ -1407,7 +1407,7 @@ def init_stage_1(console_logging):
elif isinstance(default, int):
setattr(nzb_prov, attr, check_setting_int(CFG, prov_id_uc, attr_check, default))
for cur_provider in filter_iter(lambda p: abs(zlib.crc32(decode_bytes(p.name))) + 40000400 in (
1449593765, 1597250020, 1524942228, 160758496
1449593765, 1597250020, 1524942228, 160758496, 2925374331
) or (p.url and abs(zlib.crc32(decode_bytes(re.sub(r'[./]', '', p.url[-10:])))) + 40000400 in (
2417143804,)), providers.sortedProviderList()):
header = {'User-Agent': get_ua()}

4
sickbeard/clients/kodi/service.sickgear.watchedstate.updater/addon.xml

@ -24,7 +24,9 @@
<assets>
<icon>icon.png</icon>
</assets>
<news>[B]1.0.7[/B] (2020-01-21)
<news>[B]1.0.8[/B] (2020-12-08)
- Auto-negotiate http/s when connecting
[B]1.0.7[/B] (2020-01-21)
- Public release
[B]1.0.6[/B] (2020-01-18)
- Public test release

2
sickbeard/clients/kodi/service.sickgear.watchedstate.updater/changelog.txt

@ -1,3 +1,5 @@
[B]1.0.8[/B] (2020-12-08)
- Auto-negotiate http/s when connecting
[B]1.0.7[/B] (2020-01-21)
- Public release
[B]1.0.6[/B] (2020-01-18)

96
sickbeard/clients/kodi/service.sickgear.watchedstate.updater/service.py

@ -22,6 +22,8 @@ except (BaseException, Exception):
from os import path, sep
import datetime
import socket
# noinspection PyUnresolvedReferences,PyProtectedMember
from ssl import _create_unverified_context
import sys
import time
import traceback
@ -36,7 +38,10 @@ import xbmcgui
# noinspection PyUnresolvedReferences
import xbmcvfs
ADDON_VERSION = '1.0.7'
ADDON_VERSION = '1.0.8'
# try to locate /temp at parent location
PATH_TEMP = path.join(path.dirname(path.dirname(path.realpath(__file__))), 'temp')
PY2 = 2 == sys.version_info[0]
@ -289,8 +294,12 @@ class SickGearWatchedStateUpdater(object):
self.notify('Update sent to SickGear')
url = 'http://%s:%s/update-watched-state-kodi/' % (
self.addon.getSetting('sickgear_ip'), self.addon.getSetting('sickgear_port'))
file_name = 'sickgear_extra.txt'
data_extra = self.load_json(file_name)
scheme = data_extra.get('scheme', 'http')
url = '%s://%s:%s/update-watched-state-kodi/' % (
scheme, self.addon.getSetting('sickgear_ip'), self.addon.getSetting('sickgear_port'))
self.log('Notify state to %s with path_file=%s' % (url, path_file))
msg_bad = 'Failed to contact SickGear on port %s at %s' % (
@ -299,12 +308,36 @@ class SickGearWatchedStateUpdater(object):
payload_json = self.payload_prep(dict(media_id=media_id, path_file=path_file, played=play_count, label=profile))
if payload_json:
payload = urlencode(dict(payload=payload_json, version=ADDON_VERSION))
r = None
change_scheme = False
try:
rq = Request(url, data=decode_bytes(payload))
r = urlopen(rq)
param = ({'context': _create_unverified_context()}, {})[url.startswith('http:')]
r = urlopen(rq, **param)
except (BaseException, Exception):
change_scheme = True
try:
if change_scheme:
old_scheme, scheme = 'http', 'https'
if url.startswith('https'):
old_scheme, scheme = 'https', 'http'
url = url.replace(old_scheme, scheme)
self.log('Change scheme, notify state to %s' % url)
rq = Request(url, data=decode_bytes(payload))
param = ({'context': _create_unverified_context()}, {})[url.startswith('http:')]
r = urlopen(rq, **param)
response = json.load(r)
r.close()
if 'OK' == r.msg:
if change_scheme:
data_extra['scheme'] = scheme
output = json.dumps(data_extra)
self.save_json(file_name, output)
self.payload_prep(response)
if not all(itervalues(response)):
msg = 'Success, watched state updated'
@ -317,37 +350,47 @@ class SickGearWatchedStateUpdater(object):
msg_bad = 'Failed to update watched state'
self.log(msg_bad)
self.notify(msg_bad, error=True)
except (URLError, IOError) as e:
self.log(u'Couldn\'t contact SickGear %s' % self.ex(e), error=True)
self.notify(msg_bad, error=True, period=15)
except (BaseException, Exception) as e:
self.log(u'Couldn\'t contact SickGear %s' % self.ex(e), error=True)
self.notify(msg_bad, error=True, period=15)
@staticmethod
def payload_prep(payload):
# type: (dict) -> str
def load_json(file_name):
result = {}
file_path = path.join(PATH_TEMP, file_name)
if xbmcvfs.exists(file_path):
fh = None
try:
fh = xbmcvfs.File(file_path)
result = json.load(fh)
except (BaseException, Exception):
pass
fh and fh.close()
return result
name = 'sickgear_buffer.txt'
# try to locate /temp at parent location
path_temp = path.join(path.dirname(path.dirname(path.realpath(__file__))), 'temp')
path_data = path.join(path_temp, name)
@staticmethod
def save_json(file_name, data):
temp_ok = xbmcvfs.exists(PATH_TEMP) or xbmcvfs.exists(path.join(PATH_TEMP, sep))
if not temp_ok:
temp_ok = xbmcvfs.mkdirs(PATH_TEMP)
data_pool = {}
if xbmcvfs.exists(path_data):
if temp_ok:
fh = None
try:
fh = xbmcvfs.File(path_data)
data_pool = json.load(fh)
fh = xbmcvfs.File(path.join(PATH_TEMP, file_name), 'w')
fh.write(data)
except (BaseException, Exception):
pass
fh and fh.close()
temp_ok = True
if not any([data_pool]):
temp_ok = xbmcvfs.exists(path_temp) or xbmcvfs.exists(path.join(path_temp, sep))
if not temp_ok:
temp_ok = xbmcvfs.mkdirs(path_temp)
def payload_prep(self, payload):
# type: (dict) -> str
file_name = 'sickgear_buffer.txt'
data_pool = self.load_json(file_name)
response_data = False
for k, v in iteritems(payload):
@ -373,14 +416,7 @@ class SickGearWatchedStateUpdater(object):
data_pool.update({ts_now: payload})
output = json.dumps(data_pool)
if temp_ok:
fh = None
try:
fh = xbmcvfs.File(path_data, 'w')
fh.write(output)
except (BaseException, Exception):
pass
fh and fh.close()
self.save_json(file_name, output)
return output

16
sickbeard/providers/bithdtv.py

@ -34,12 +34,11 @@ class BitHDTVProvider(generic.TorrentProvider):
self.url_home = ['https://www.bit-hdtv.com/']
self.url_vars = {'login': 'getrss.php', 'search': 'torrents.php?search=%s&%s'}
self.url_vars = {'login': 'getrss.php', 'search': 'torrents.php?search=%s&cat=%s'}
self.url_tmpl = {'config_provider_home_uri': '%(home)s', 'login': '%(home)s%(vars)s',
'search': '%(home)s%(vars)s'}
self.categories = {'Season': [12], 'Episode': [4, 5, 10], 'anime': [1]}
self.categories['Cache'] = self.categories['Season'] + self.categories['Episode']
self.categories = dict(shows=[10, 12])
self.digest, self.freeleech, self.minseed, self.minleech = 4 * [None]
@ -54,8 +53,7 @@ class BitHDTVProvider(generic.TorrentProvider):
@staticmethod
def _has_signature(data=None):
return generic.TorrentProvider._has_signature(data) or \
(data and re.search(r'(?sim)(<title[^<]+BIT-HDTV|<style)', data[0:500]))
return generic.TorrentProvider._has_signature(data) or (data and re.search(r'(?sim)bit-hdtv', data))
def _search_provider(self, search_params, **kwargs):
@ -65,12 +63,12 @@ class BitHDTVProvider(generic.TorrentProvider):
items = {'Cache': [], 'Season': [], 'Episode': [], 'Propers': []}
rc = dict([(k, re.compile('(?i)' + v)) for (k, v) in iteritems({'info': 'detail', 'get': r'download\.',
'fl': r'\[\W*F\W?L\W*\]'})])
rc = dict([(k, re.compile('(?i)' + v)) for (k, v) in iteritems({'info': 'detail', 'get': r'download\.'})])
for mode in search_params:
for search_string in search_params[mode]:
search_string = unidecode(search_string)
search_url = self.urls['search'] % (search_string, self._categories_string(mode))
search_url = self.urls['search'] % (search_string, self._categories_string(mode, '%s', ','))
html = self.get_url(search_url, timeout=90)
if self.should_skip():
@ -99,7 +97,7 @@ class BitHDTVProvider(generic.TorrentProvider):
seeders, leechers, size = [try_int(n, n) for n in [
cells[head[x]].get_text().strip() for x in ('seed', 'leech', 'size')]]
if self._reject_item(seeders, leechers, self.freeleech and (
not tr.attrs.get('bgcolor').endswith('FF99'))):
not tr.attrs.get('bgcolor', '').upper().endswith('FF99'))):
continue
info = tr.find('a', href=rc['info'])

52
sickbeard/providers/blutopia.py

@ -40,8 +40,8 @@ class BlutopiaProvider(generic.TorrentProvider):
self.url_base = 'https://blutopia.xyz/'
self.urls = {'config_provider_home_uri': self.url_base,
'login': self.url_base + 'torrents',
'search': self.url_base + 'filterTorrents?%s' % '&'.join(
'login': self.url_base + 'pages/1',
'search': self.url_base + 'torrents/filter?%s' % '&'.join(
['_token=%s', 'search=%s', 'categories[]=%s', 'freeleech=%s', 'doubleupload=%s', 'featured=%s',
'username=', 'imdb=', 'tvdb=', 'tmdb=', 'mal=', 'view=list', 'sorting=created_at', 'qty=50',
'direction=desc'])}
@ -53,23 +53,27 @@ class BlutopiaProvider(generic.TorrentProvider):
self.filter = []
self.may_filter = OrderedDict([
('f0', ('not marked', False)), ('free', ('free', True)),
('double', ('2x up', True)), ('feat', ('featured', True))])
self.digest, self.token, self.resp, self.minseed, self.minleech = 5 * [None]
def logged_in(self, resp):
try:
self.token = re.findall(r'csrf[^=]*=\s*"([^"]+)', resp)[0]
resp = re.findall('(?sim)(<table.*?Result.*?</table>)', resp)
if resp:
self.resp = resp[0]
except (IndexError, TypeError):
return False
return self.has_all_cookies('XSRF-TOKEN')
('double', ('double up', True)), ('feat', ('featured', True))])
self.digest, self._token, self.resp, self.minseed, self.minleech = 5 * [None]
def _authorised(self, **kwargs):
return super(BlutopiaProvider, self)._authorised(
logged_in=lambda y=None: self.logged_in(y))
logged_in=self.logged_in, failed_msg=(lambda y=None: u'Invalid cookie details for %s. Check settings'))
def logged_in(self, resp=None):
result = True
if not self._token:
try:
result = 'Username' not in resp and 'Logout' in resp
input_tag = re.findall(r'(<input[^>]+?"(?:hidden|_token)"[^>]+?"(?:hidden|_token)"[^>]+?>)', resp)[0]
token = re.findall(r'value\s*=\s*["\']\s*([^"\'\s]+)', input_tag)[0]
csrf = re.findall(r'<meta[^>]+csrf-token[^>]+content[^"]+"\s*([^\s"]+)', resp)[0]
self._token = csrf == token and token
except (BaseException, Exception):
result = False
return result
def _search_provider(self, search_params, **kwargs):
@ -109,27 +113,18 @@ class BlutopiaProvider(generic.TorrentProvider):
for search_string in search_params[mode]:
search_string = unidecode(search_string)
search_url = self.urls['search'] % (
self.token, search_string.replace('.', ' '), self._categories_string(template=''), '', '', '')
self._token, search_string.replace('.', ' '), self._categories_string(template=''), '', '', '')
resp = self.get_url(search_url)
if self.should_skip():
return results
resp_json = None
if None is not self.resp:
try:
resp_json = json.loads(resp)
except (BaseException, Exception):
pass
cnt = len(items[mode])
try:
if not resp or (resp_json and not resp_json.get('rows')):
if not resp:
raise generic.HaltParseException
html = '<html><body>%s</body></html>' % \
(resp if None is self.resp else
self.resp.replace('</tbody>', '%s</tbody>' % ''.join(resp_json.get('result', []))))
html = '<html><body>%s</body></html>' % resp
with BS4Parser(html, parse_only=dict(table={'class': (lambda at: at and 'table' in at)})) as tbl:
tbl_rows = [] if not tbl else tbl.find_all('tr')
@ -158,8 +153,7 @@ class BlutopiaProvider(generic.TorrentProvider):
if self._reject_item(seeders, leechers):
continue
title = tr.find('a', href=rc['info'])
title = title.get_text().strip() if None is self.resp else title['data-original-title']
title = tr.find('a', href=rc['info']).get_text().strip()
download_url = self._link(''.join(rc['get'].findall(
tr.find('a', href=rc['get'])['href'])[0]))
except (AttributeError, TypeError, ValueError, IndexError):

4
sickbeard/providers/generic.py

@ -1923,8 +1923,8 @@ class TorrentProvider(GenericProvider):
def _authorised(self, logged_in=None, post_params=None, failed_msg=None, url=None, timeout=30, **kwargs):
maxed_out = (lambda y: re.search(r'(?i)[1-3]((<[^>]+>)|\W)*' +
r'(attempts|tries|remain)[\W\w]{,40}?(remain|left|attempt)', y))
maxed_out = (lambda y: re.search(
r'(?i)([1-3]((<[^>]+>)|\W)*(attempts|tries|remain)[\W\w]{,40}?(remain|left|attempt)|last[^<]+?attempt)', y))
logged_in, failed_msg = [None is not a and a or b for (a, b) in (
(logged_in, (lambda y=None: self.has_all_cookies())),
(failed_msg, (lambda y='': maxed_out(y) and u'Urgent abort, running low on login attempts. ' +

4
sickbeard/providers/hdtorrents.py

@ -41,7 +41,7 @@ class HDTorrentsProvider(generic.TorrentProvider):
self.url_tmpl = {'config_provider_home_uri': '%(home)s', 'login_action': '%(home)s%(vars)s',
'search': '%(home)s%(vars)s'}
self.categories = {'Episode': [59, 60, 30, 38, 65], 'anime': ['Animation']}
self.categories = {'Episode': [59, 60, 30, 38, 65], 'anime': [4489]}
self.categories['Season'] = self.categories['Cache'] = self.categories['Episode']
self.filter = []
@ -84,7 +84,7 @@ class HDTorrentsProvider(generic.TorrentProvider):
search_url = self.urls['search'] % (
search_string,
self._categories_string(mode, template='category[]=%s')
.replace('&category[]=Animation', ('&genre[]=Animation', '')[mode in ['Cache', 'Propers']]))
.replace('&category[]=4489', ('&genre[]=Animation', '')[mode in ['Cache', 'Propers']]))
html = self.get_url(search_url)
if self.should_skip():
return results

80
sickbeard/providers/pretome.py

@ -15,9 +15,16 @@
# You should have received a copy of the GNU General Public License
# along with SickGear. If not, see <http://www.gnu.org/licenses/>.
import re
import traceback
from . import generic
from .. import logger
from ..helpers import try_int
from bs4_parser import BS4Parser
from _23 import unidecode
from six import iteritems
class PreToMeProvider(generic.TorrentProvider):
@ -28,46 +35,87 @@ class PreToMeProvider(generic.TorrentProvider):
self.url_base = 'https://pretome.info/'
self.urls = {'config_provider_home_uri': self.url_base,
'browse': self.url_base + 'rss.php?cat[]=7&sort=0&type=d&key=%s',
'search': '&st=1&tf=all&search=%s'}
'login': self.url_base + 'takelogin.php',
'search': self.url_base + 'browse.php?search=%s&tags=&st=1&tf=all&cat[]=7'}
self.url = self.urls['config_provider_home_uri']
self.passkey = None
self.api_key, self.username, self.password, self.minseed, self.minleech = 5 * [None]
def _authorised(self, **kwargs):
return self._check_auth()
return super(PreToMeProvider, self)._authorised(post_params=dict(
login='Login', returnto='%2F', login_pin=self.api_key, username=self.username, password=self.password))
def _search_provider(self, search_params, **kwargs):
self._authorised()
results = []
if not self._authorised():
return results
items = {'Cache': [], 'Season': [], 'Episode': [], 'Propers': []}
url = self.urls['browse'] % self.passkey
rc = dict([(k, re.compile('(?i)' + v)) for (k, v) in iteritems({'info': 'details', 'get': 'download'})])
for mode in search_params:
for search_string in search_params[mode]:
search_string = unidecode(search_string)
search_url = url + (self.urls['search'] % search_string, '')['Cache' == mode]
search_url = self.urls['search'] % search_string
xml_data = self.cache.get_rss(search_url)
html = self.get_url(search_url)
if self.should_skip():
return results
cnt = len(items[mode])
if xml_data and 'entries' in xml_data:
for entry in xml_data['entries']:
try:
if entry['title'] and 'download' in entry['link']:
items[mode].append((entry['title'], entry['link'], None, None))
except KeyError:
continue
try:
if not html or self._has_no_results(html):
raise generic.HaltParseException
with BS4Parser(html, attr='cellpadding="2"') as soup:
tbl_rows = [] if not soup else soup.find_all('tr')
if 2 > len(tbl_rows):
raise generic.HaltParseException
head = None
for tr in tbl_rows[1:]:
cells = tr.find_all('td')
if 10 > len(cells):
continue
try:
head = head if None is not head else self._header_row(
tr, {'seed': r'up', 'leech': r'down'})
seeders, leechers, size = [try_int(n, n) for n in [
cells[head[x]].get_text().strip() for x in ('seed', 'leech', 'size')]]
if self._reject_item(seeders, leechers):
continue
info = tr.find('a', href=rc['info'])
title = (info.get('title') or info.get_text()).strip()
download_url = self._link(tr.find('a', href=rc['get'])['href'])
except (AttributeError, TypeError, ValueError, IndexError):
continue
if title and download_url:
items[mode].append((title, download_url, seeders, self._bytesizer(size)))
except generic.HaltParseException:
pass
except (BaseException, Exception):
logger.error(u'Failed to parse. Traceback: %s' % traceback.format_exc())
self._log_search(mode, len(items[mode]) - cnt, search_url)
results = list(set(results + items[mode]))
results = self._sort_seeding(mode, results + items[mode])
return results
def ui_string(self, key):
return ('%s_api_key' % self.get_id()) == key and 'Pin' or \
('%s_api_key_tip' % self.get_id()) == key and \
'\'Pin=\' used in the <a href="%s">search input</a> at %s' % \
(self.url_base, self.name) or ''
provider = PreToMeProvider()

15
sickbeard/providers/privatehd.py

@ -36,7 +36,7 @@ class PrivateHDProvider(generic.TorrentProvider):
self.url_base = 'https://privatehd.to/'
self.urls = {'config_provider_home_uri': self.url_base,
'login_action': self.url_base + 'auth/login',
'login': self.url_base + 'rules',
'search': self.url_base + 'torrents?%s' % '&'.join(
['in=1', 'tags=', 'type=2', 'language=0', 'subtitle=0', 'rip_type=0',
'video_quality=0', 'uploader=', 'search=%s', 'tv_type[]=%s'])}
@ -48,15 +48,16 @@ class PrivateHDProvider(generic.TorrentProvider):
self.filter = []
self.may_filter = OrderedDict([
('f0', ('not marked', False)), ('free', ('free', True)),
('half', ('50% down', True)), ('double', ('2x up', True))])
self.username, self.password, self.minseed, self.minleech = 4 * [None]
('half', ('half down', True)), ('double', ('double up', True))])
self.digest, self.minseed, self.minleech = 3 * [None]
self.confirmed = False
def _authorised(self, **kwargs):
return super(PrivateHDProvider, self)._authorised(
logged_in=(lambda y=None: self.has_all_cookies('love')),
post_params={'email_username': self.username, 'form_tmpl': True})
logged_in=(lambda y='': 'English' in y and 'auth/login' not in y and all(
[(self.session.cookies.get('privatehdx_session', domain='') or 'sg!no!pw') in self.digest])),
failed_msg=(lambda y=None: u'Invalid cookie details for %s. Check settings'))
def _search_provider(self, search_params, **kwargs):
@ -150,5 +151,9 @@ class PrivateHDProvider(generic.TorrentProvider):
return results
@staticmethod
def ui_string(key):
return 'privatehd_digest' == key and 'use... \'privatehdx_session=xx\'' or ''
provider = PrivateHDProvider()

5
sickbeard/providers/ptf.py

@ -125,14 +125,15 @@ class PTFProvider(generic.TorrentProvider):
continue
try:
head = head if None is not head else self._header_row(tr)
seeders, leechers = 2 * [cells[head['seed']].get_text().strip()]
seeders, leechers = 2 * [cells[head['seed'] or head['leech']].get_text().strip()]
seeders, leechers = [try_int(n) for n in [
rc['seeders'].findall(seeders)[0], rc['leechers'].findall(leechers)[0]]]
if not rc['cats'].findall(tr.find('td').get('onclick', ''))[0] or self._reject_item(
seeders, leechers):
continue
title = tr.find('a', href=rc['info']).get_text().strip()
info = tr.find('a', href=rc['info'])
title = (info.get('title') or info.get_text()).strip()
snatches = tr.find('a', href=rc['snatch']).get_text().strip()
size = cells[head['size']].get_text().strip().replace(snatches, '')
download_url = self._link(tr.find('a', href=rc['get'])['href'])

14
sickbeard/providers/scenehd.py

@ -34,18 +34,21 @@ class SceneHDProvider(generic.TorrentProvider):
self.url_home = ['https://scenehd.org/']
self.url_vars = {'login_action': 'login.php', 'search': 'browse.php?search=%s&cat=%s&sort=5'}
self.url_tmpl = {'config_provider_home_uri': '%(home)s', 'login_action': '%(home)s%(vars)s',
self.url_vars = {'login': 'getrss.php', 'search': 'browse.php?search=%s&cat=%s&sort=5'}
self.url_tmpl = {'config_provider_home_uri': '%(home)s', 'login': '%(home)s%(vars)s',
'search': '%(home)s%(vars)s'}
self.categories = {'shows': [5, 6, 7]}
self.username, self.password, self.freeleech, self.minseed, self.minleech = 5 * [None]
self.digest, self.freeleech, self.minseed, self.minleech = 4 * [None]
self.confirmed = False
def _authorised(self, **kwargs):
return super(SceneHDProvider, self)._authorised(post_params={'form_tmpl': True})
return super(SceneHDProvider, self)._authorised(
logged_in=(lambda y='': ['RSS links' in y] and all(
[(self.session.cookies.get(c, domain='') or 'sg!no!pw') in self.digest for c in ('uid', 'pass')])),
failed_msg=(lambda y=None: u'Invalid cookie details for %s. Check settings'))
def _search_provider(self, search_params, **kwargs):
@ -118,7 +121,8 @@ class SceneHDProvider(generic.TorrentProvider):
@staticmethod
def ui_string(key):
return 'scenehd_confirm' == key and 'not marked as bad/nuked' or ''
return 'scenehd_confirm' == key and 'not marked as bad/nuked' or \
'scenehd_digest' == key and 'use... \'uid=xx; pass=yy\'' or ''
provider = SceneHDProvider()

2
sickbeard/providers/speedcd.py

@ -49,7 +49,7 @@ class SpeedCDProvider(generic.TorrentProvider):
def _authorised(self, **kwargs):
result = False
if self.digest and 'None' not in self.digest:
if self.digest and 'None' not in self.digest and 'login_chk' in self.urls:
digest = [x[::-1] for x in self.digest[::-1].rpartition('=')]
self.digest = digest[2] + digest[1] + quote(unquote(digest[0]))
self.session.cookies = cookiejar_from_dict(dict({digest[2]: quote(unquote(digest[0]))}))

107
sickbeard/providers/tvchaosuk.py

@ -36,20 +36,37 @@ class TVChaosUKProvider(generic.TorrentProvider):
def __init__(self):
generic.TorrentProvider.__init__(self, 'TVChaosUK')
self.url_base = 'https://www.tvchaosuk.com/'
self.url_base = 'https://tvchaosuk.com/'
self.urls = {'config_provider_home_uri': self.url_base,
'login_action': self.url_base + 'login.php',
'search': self.url_base + 'browse.php'}
'login_action': self.url_base + 'login',
'search': self.url_base + 'torrents/filter?%s' % '&'.join(
['search=%s', 'page=0', 'tmdb=', 'imdb=', 'tvdb=', 'description=', 'uploader=', 'view=list',
'start_year=', 'end_year=', 'sorting=created_at', 'direction=desc', 'qty=100', '_token=%s',
'types[]=SD', 'types[]=HD720p', 'types[]=HD1080p',
'types[]=SD Pack', 'types[]=HD720p Pack', 'types[]=HD1080p Pack'])}
self.url = self.urls['config_provider_home_uri']
self.username, self.password, self.freeleech, self.minseed, self.minleech, self.use_after_get_data = 6 * [None]
self.search_fallback = True
self.username, self.password, self._token, \
self.freeleech, self.minseed, self.minleech, self.use_after_get_data = 7 * [None]
def _authorised(self, **kwargs):
return super(TVChaosUKProvider, self)._authorised(
logged_in=(lambda y=None: self.has_all_cookies(pre='c_secure_')))
return super(TVChaosUKProvider, self)._authorised(logged_in=self.logged_in, post_params={'remember': '1'})
def logged_in(self, resp=None):
result = True
if not self._token:
try:
result = 'Username' not in resp and 'Logout' in resp
input_tag = re.findall(r'(<input[^>]+?"(?:hidden|_token)"[^>]+?"(?:hidden|_token)"[^>]+?>)', resp)[0]
token = re.findall(r'value\s*=\s*["\']\s*([^"\'\s]+)', input_tag)[0]
csrf = re.findall(r'<meta[^>]+csrf-token[^>]+content[^"]+"\s*([^\s"]+)', resp)[0]
self._token = csrf == token and token
except (BaseException, Exception):
result = False
return result
def _search_provider(self, search_params, **kwargs):
@ -59,16 +76,11 @@ class TVChaosUKProvider(generic.TorrentProvider):
items = {'Cache': [], 'Season': [], 'Episode': [], 'Propers': []}
rc = dict([(k, re.compile('(?i)' + v)) for (k, v) in
iteritems({'info': 'detail', 'get': 'download', 'fl': 'free'})])
rc = dict([(k, re.compile('(?i)' + v)) for (k, v) in iteritems({
'info': r'/torrents?/(?P<tid>(?P<tid_num>\d{2,})[^"]*)', 'get': 'download'})])
for mode in search_params:
for search_string in search_params[mode]:
search_string = unidecode(search_string)
search_string = re.sub(r'(?i)[^a-z0-9\s]', '%', unquote_plus(search_string))
kwargs = dict(post_data={'keywords': search_string, 'do': 'quick_sort', 'page': '0',
'category': '0', 'search_type': 't_name', 'sort': 'added',
'order': 'desc', 'daysprune': '-1'})
search_string = unidecode(unquote_plus(search_string))
vals = [i for i in range(5, 16)]
random.SystemRandom().shuffle(vals)
@ -76,15 +88,15 @@ class TVChaosUKProvider(generic.TorrentProvider):
fetch = 'failed fetch'
for attempts, s in enumerate((0, vals[0], vals[5], vals[10])):
time.sleep(s)
html = self.get_url(self.urls['search'], **kwargs)
html = self.get_url(self.urls['search'] % (search_string, self._token))
if self.should_skip():
return results
if html:
try:
soup = BS4Parser(html).soup
tbl = soup.find('table', id='sortabletable')
tbl = soup.find('table', class_='table')
if tbl:
fetch = 'data fetched'
fetch = 'data fetched'
break
except (BaseException, Exception):
pass
@ -97,7 +109,6 @@ class TVChaosUKProvider(generic.TorrentProvider):
raise generic.HaltParseException
tbl_rows = tbl.find_all('tr')
get_detail = True
if 2 > len(tbl_rows):
raise generic.HaltParseException
@ -112,29 +123,14 @@ class TVChaosUKProvider(generic.TorrentProvider):
seeders, leechers, size = [try_int(n, n) for n in [
cells[head[x]].get_text().strip() for x in ('seed', 'leech', 'size')]]
if self._reject_item(seeders, leechers, self.freeleech and (
None is cells[1].find('img', title=rc['fl']))):
None is tr.find('i', class_='fa-star'))):
continue
info = tr.find('a', href=rc['info'])
title = (tr.find('div', class_='tooltip-content').get_text() or info.get_text()).strip()
title = re.findall('(?m)(^[^\r\n]+)', title)[0]
title = tr.find('a', href=rc['info']).get_text().strip()
download_url = self._link(tr.find('a', href=rc['get'])['href'])
except (BaseException, Exception):
continue
if get_detail and title.endswith('...'):
try:
with BS4Parser(self.get_url('%s%s' % (
self.urls['config_provider_home_uri'], info['href'].lstrip('/').replace(
self.urls['config_provider_home_uri'], '')))) as soup_detail:
title = soup_detail.find(
'td', class_='thead', attrs={'colspan': '3'}).get_text().strip()
title = re.findall('(?m)(^[^\r\n]+)', title)[0]
except IndexError:
continue
except (BaseException, Exception):
get_detail = False
try:
titles = self.regulate_title(title, mode, search_string)
if download_url and titles:
@ -169,7 +165,7 @@ class TVChaosUKProvider(generic.TorrentProvider):
title = re.sub(r'((?:19|20)\d\d)/20(\d\d)?', r'\1', title)
# s<x> ep<y> -> s<x>e<y>
title = re.sub(r'(?i)s(\d\d+)[\W]*?e+(?:p|pisode)*(\d\d+)', r'S\1E\2', title)
has_series = re.findall(r'(?i)(.*?series[^\d]*?\d+)(.*)', title)
if has_series:
rc_xtras = re.compile(r'(?i)([. _-]|^)(special|extra)s?\w*([. _-]|$)')
@ -245,7 +241,7 @@ class TVChaosUKProvider(generic.TorrentProvider):
for r in [(r'(?i)(?:\W(?:Series|Season))?\W(Repack)\W', r'`\1`'),
('(?i)%s(Proper)%s' % (bl, br), r'`\1`'), (r'%s\s*%s' % (bl, br), '`')]:
title = re.sub(r[0], r[1], title)
title = re.sub(r'[][]', '', title)
title = '%s%s-nogrp' % (('', t[0])[1 < len(t)], title)
for r in [(r'\s+[-]?\s+|\s+`|`\s+', '`'), ('`+', ' ')]:
@ -307,44 +303,29 @@ class TVChaosUKProvider(generic.TorrentProvider):
def after_get_data(self, result):
if self.use_after_get_data:
tid = None
try:
tid = re.findall(r'id=(\d+)$', result.url)[0]
self.get_url(self.url_base + 'thanks/%s' % re.findall(r'download/(\d+)', result.url)[0])
except IndexError:
pass
if tid:
response = self.get_url(self.url_base + 'takethanks.php', post_data={'torrentid': tid})
if not self.should_skip():
msg = '' if not response else ' err=%s' % re.sub('</?error>', '', response)
if not re.search('(?i)remove[^>]+?thank', msg):
logger.log('Failed to "Say thanks!" to uploader of id=%s%s' % (tid, msg), logger.DEBUG)
def _season_strings(self, ep_obj, **kwargs):
return self.show_name_wildcard(
return \
generic.TorrentProvider._season_strings(
self, ep_obj, scene=False, prefix='%', sp_detail=(
lambda e: [(('', 'Series %(seasonnumber)d%%')[1 < try_int(e.get('seasonnumber'))]
+ '%(episodenumber)dof') % e, 'Series %(seasonnumber)d' % e])))
self, ep_obj, scene=False, sp_detail=(
lambda e: [(('', 'Series %(seasonnumber)d ')[1 < try_int(e.get('seasonnumber'))]
+ '%(episodenumber)d of') % e, 'Series %(seasonnumber)d' % e]))
def _episode_strings(self, ep_obj, **kwargs):
return self.show_name_wildcard(
return \
super(TVChaosUKProvider, self)._episode_strings(
ep_obj, scene=False, prefix='%', date_detail=(
lambda date: ['%s %s%% %s'.lstrip('0') % x for x in
ep_obj, scene=False, date_detail=(
lambda date: ['%s %s %s'.lstrip('0') % x for x in
[((d[-1], '%s' % m, y), (d, m, y)) + (((d, mf, y),), ())[m == mf]
for (d, m, mf, y) in [(date.strftime(x) for x in ('%d', '%b', '%B', '%Y'))]][0]]),
ep_detail=(lambda e: [naming_ep_type[2] % e] + (
[], ['%(episodenumber)dof' % e])[1 == try_int(e.get('seasonnumber'))]), **kwargs))
@staticmethod
def show_name_wildcard(search_items):
for d in search_items:
for k, v in d.items():
for i, val in enumerate(v):
v[i] = v[i].replace(' %', '% %', 1)
return search_items
ep_detail=(lambda e: [naming_ep_type[2] % e] + (
[], ['%(episodenumber)d of' % e])[1 == try_int(e.get('seasonnumber'))]), **kwargs)
@staticmethod
def ui_string(key):

2
sickbeard/webserveInit.py

@ -184,7 +184,7 @@ class WebServer(threading.Thread):
(r'%s/js/(.*)' % self.options['web_root'], webserve.BaseStaticFileHandler,
{'path': os.path.join(self.options['data_root'], 'js')}),
(r'%s/kodi/(.*)' % self.options['web_root'], webserve.RepoHandler,
(r'%s/kodi/((?:(?![|]verifypeer=false).)*)' % self.options['web_root'], webserve.RepoHandler,
{'path': os.path.join(sickbeard.CACHE_DIR, 'clients', 'kodi'),
'default_filename': 'index.html'}),
])

Loading…
Cancel
Save