Browse Source

Add indicator for public access search providers.

Change improve probability selecting most seeded release.
Change add the TorrentDay x265 category to search.
Change torrent provider code PEP8 and refactoring.
Add BTScene torrent provider.
Add Extratorrent provider.
Add Limetorrents provider.
Add nCore torrent provider.
Remove Usenet Crawler provider.
pull/764/head
JackDandy 9 years ago
parent
commit
0d50a4b345
  1. 8
      CHANGES.md
  2. BIN
      gui/slick/images/providers/btscene.png
  3. BIN
      gui/slick/images/providers/extratorrent.png
  4. BIN
      gui/slick/images/providers/limetorrents.png
  5. BIN
      gui/slick/images/providers/ncore.png
  6. 17
      gui/slick/interfaces/default/config_providers.tmpl
  7. 4
      sickbeard/properFinder.py
  8. 12
      sickbeard/providers/__init__.py
  9. 20
      sickbeard/providers/alpharatio.py
  10. 8
      sickbeard/providers/beyondhd.py
  11. 16
      sickbeard/providers/bithdtv.py
  12. 16
      sickbeard/providers/bitmetv.py
  13. 53
      sickbeard/providers/btn.py
  14. 117
      sickbeard/providers/btscene.py
  15. 16
      sickbeard/providers/dh.py
  16. 108
      sickbeard/providers/extratorrent.py
  17. 14
      sickbeard/providers/fano.py
  18. 12
      sickbeard/providers/filelist.py
  19. 23
      sickbeard/providers/freshontv.py
  20. 23
      sickbeard/providers/funfile.py
  21. 105
      sickbeard/providers/generic.py
  22. 13
      sickbeard/providers/gftracker.py
  23. 26
      sickbeard/providers/grabtheinfo.py
  24. 12
      sickbeard/providers/hd4free.py
  25. 15
      sickbeard/providers/hdbits.py
  26. 28
      sickbeard/providers/hdspace.py
  27. 14
      sickbeard/providers/ilt.py
  28. 26
      sickbeard/providers/iptorrents.py
  29. 109
      sickbeard/providers/limetorrents.py
  30. 22
      sickbeard/providers/morethan.py
  31. 112
      sickbeard/providers/ncore.py
  32. 2
      sickbeard/providers/newznab.py
  33. 77
      sickbeard/providers/nyaatorrents.py
  34. 6
      sickbeard/providers/omgwtfnzbs.py
  35. 18
      sickbeard/providers/pisexy.py
  36. 6
      sickbeard/providers/pretome.py
  37. 18
      sickbeard/providers/privatehd.py
  38. 14
      sickbeard/providers/ptf.py
  39. 26
      sickbeard/providers/rarbg.py
  40. 14
      sickbeard/providers/revtt.py
  41. 2
      sickbeard/providers/rsstorrent.py
  42. 25
      sickbeard/providers/scc.py
  43. 17
      sickbeard/providers/scenetime.py
  44. 14
      sickbeard/providers/shazbat.py
  45. 16
      sickbeard/providers/speedcd.py
  46. 26
      sickbeard/providers/thepiratebay.py
  47. 84
      sickbeard/providers/tokyotoshokan.py
  48. 21
      sickbeard/providers/torrentbytes.py
  49. 22
      sickbeard/providers/torrentday.py
  50. 20
      sickbeard/providers/torrenting.py
  51. 27
      sickbeard/providers/torrentleech.py
  52. 30
      sickbeard/providers/torrentshack.py
  53. 10
      sickbeard/providers/transmithe_net.py
  54. 56
      sickbeard/providers/tvchaosuk.py
  55. 8
      sickbeard/providers/womble.py
  56. 8
      sickbeard/providers/zooqle.py

8
CHANGES.md

@ -89,6 +89,11 @@
* Add PTF torrent provider * Add PTF torrent provider
* Add ILT torrent provider * Add ILT torrent provider
* Add Fano torrent provider * Add Fano torrent provider
* Add BTScene torrent provider
* Add Extratorrent provider
* Add Limetorrents provider
* Add nCore torrent provider
* Remove Usenet-Crawler provider
* Change CPU throttling on General Config/Advanced to "Disabled" by default for new installs * Change CPU throttling on General Config/Advanced to "Disabled" by default for new installs
* Change provider OMGWTFNZBS api url and auto reject nuked releases * Change provider OMGWTFNZBS api url and auto reject nuked releases
* Change Search Provider page to load torrent settings only when Search torrents is enabled in Search Settings * Change Search Provider page to load torrent settings only when Search torrents is enabled in Search Settings
@ -120,6 +125,9 @@
* Change post process to join incrementally named (i.e. file.001 to file.nnn) split files * Change post process to join incrementally named (i.e. file.001 to file.nnn) split files
* Change replace unrar2 lib with rarfile 3.0 and UnRAR.exe 5.40 freeware * Change replace unrar2 lib with rarfile 3.0 and UnRAR.exe 5.40 freeware
* Change post process "Copy" to delete redundant files after use * Change post process "Copy" to delete redundant files after use
* Add indicator for public access search providers
* Change improve probability selecting most seeded release
* Change add the TorrentDay x265 category to search
[develop changelog] [develop changelog]
* Change send nzb data to NZBGet for Anizb instead of url * Change send nzb data to NZBGet for Anizb instead of url

BIN
gui/slick/images/providers/btscene.png

Binary file not shown.

After

Width:  |  Height:  |  Size: 548 B

BIN
gui/slick/images/providers/extratorrent.png

Binary file not shown.

After

Width:  |  Height:  |  Size: 497 B

BIN
gui/slick/images/providers/limetorrents.png

Binary file not shown.

After

Width:  |  Height:  |  Size: 682 B

BIN
gui/slick/images/providers/ncore.png

Binary file not shown.

After

Width:  |  Height:  |  Size: 482 B

17
gui/slick/interfaces/default/config_providers.tmpl

@ -87,7 +87,7 @@
<div class="component-group-desc"> <div class="component-group-desc">
<h3>Provider Priorities</h3> <h3>Provider Priorities</h3>
<p>Check off and drag the providers into the order you want them to be used.</p> <p>Check off and drag the providers into the order you want them to be used.</p>
<p>At least one provider is required but two are recommended.</p> <p>At least one provider is required, two are recommended.</p>
#if $methods_notused #if $methods_notused
<blockquote style="margin:20px 0"><%= '/'.join(x for x in methods_notused) %> providers can be enabled in <a href="$sbRoot/config/search/">Search Settings</a></blockquote> <blockquote style="margin:20px 0"><%= '/'.join(x for x in methods_notused) %> providers can be enabled in <a href="$sbRoot/config/search/">Search Settings</a></blockquote>
@ -109,7 +109,10 @@
<input type="checkbox" id="enable_$cur_name" class="provider_enabler" <%= html_checked if cur_provider.is_enabled() else '' %>/> <input type="checkbox" id="enable_$cur_name" class="provider_enabler" <%= html_checked if cur_provider.is_enabled() else '' %>/>
<a href="<%= anon_url(cur_url) %>" class="imgLink" rel="noreferrer" onclick="window.open(this.href,'_blank');return false;"><img src="$sbRoot/images/providers/$cur_provider.image_name()" alt="$tip" title="$tip" width="16" height="16" style="vertical-align:middle" /></a> <a href="<%= anon_url(cur_url) %>" class="imgLink" rel="noreferrer" onclick="window.open(this.href,'_blank');return false;"><img src="$sbRoot/images/providers/$cur_provider.image_name()" alt="$tip" title="$tip" width="16" height="16" style="vertical-align:middle" /></a>
<span style="vertical-align:middle">$cur_provider.name$state</span> <span style="vertical-align:middle">$cur_provider.name$state</span>
<%= '*' if not cur_provider.supports_backlog else '' %> #if $cur_provider.is_public_access()#
<span style="font-size:10px;vertical-align:top;font-weight:normal">(PA)</span>
#end if#
#if not $cur_provider.supports_backlog#*#end if#
<span class="ui-icon ui-icon-arrowthick-2-n-s pull-right" style="margin-top:3px"></span> <span class="ui-icon ui-icon-arrowthick-2-n-s pull-right" style="margin-top:3px"></span>
</li> </li>
#end for #end for
@ -117,10 +120,12 @@
<div id="provider_key"> <div id="provider_key">
<h4 class="note">*</h4><p class="note">Provider does not support backlog searches at this time</p> <span style="float:left;font-size:10px;vertical-align:top;font-weight:normal">(PA)</span><p class="note">Public access, no account required</p>
#if $sickbeard.USE_TORRENTS <h4 class="note"></h4><p class="note">Searches current and past releases</p>
<h4 class="note">**</h4><p class="note">Provider supports <b>limited</b> backlog searches, some episodes/qualities may not be available</p> <h4 class="note">*</h4><p class="note">Searches current but not past releases</p>
#end if ## #if $sickbeard.USE_TORRENTS
## <h4 class="note">**</h4><p class="note">Supports <b>limited</b> backlog searches, some episodes/qualities may not be available</p>
## #end if
##<h4 class="note">!</h4><p class="note">Provider is <b>NOT WORKING</b></p> ##<h4 class="note">!</h4><p class="note">Provider is <b>NOT WORKING</b></p>
</div> </div>

4
sickbeard/properFinder.py

@ -78,6 +78,7 @@ def _get_proper_list(aired_since_shows, recent_shows, recent_anime):
# for each provider get a list of the # for each provider get a list of the
orig_thread_name = threading.currentThread().name orig_thread_name = threading.currentThread().name
providers = [x for x in sickbeard.providers.sortedProviderList() if x.is_active()] providers = [x for x in sickbeard.providers.sortedProviderList() if x.is_active()]
np = NameParser(False, try_scene_exceptions=True)
for cur_provider in providers: for cur_provider in providers:
if not recent_anime and cur_provider.anime_only: if not recent_anime and cur_provider.anime_only:
continue continue
@ -99,7 +100,6 @@ def _get_proper_list(aired_since_shows, recent_shows, recent_anime):
# if they haven't been added by a different provider than add the proper to the list # if they haven't been added by a different provider than add the proper to the list
count = 0 count = 0
np = NameParser(False, try_scene_exceptions=True)
for x in found_propers: for x in found_propers:
name = _generic_name(x.name) name = _generic_name(x.name)
if name not in propers: if name not in propers:
@ -125,6 +125,8 @@ def _get_proper_list(aired_since_shows, recent_shows, recent_anime):
for cur_proper in sorted_propers: for cur_proper in sorted_propers:
parse_result = np.parse(cur_proper.name)
# set the indexerid in the db to the show's indexerid # set the indexerid in the db to the show's indexerid
cur_proper.indexerid = parse_result.show.indexerid cur_proper.indexerid = parse_result.show.indexerid

12
sickbeard/providers/__init__.py

@ -26,8 +26,10 @@ from sickbeard import logger, encodingKludge as ek
# usenet # usenet
from . import newznab, omgwtfnzbs, womble from . import newznab, omgwtfnzbs, womble
# torrent # torrent
from . import alpharatio, beyondhd, bithdtv, bitmetv, btn, dh, fano, filelist, freshontv, funfile, gftracker, grabtheinfo, \ from . import alpharatio, beyondhd, bithdtv, bitmetv, btn, btscene, dh, extratorrent, \
hd4free, hdbits, hdspace, ilt, iptorrents, morethan, pisexy, pretome, privatehd, ptf, rarbg, revtt, scc, scenetime, shazbat, speedcd, \ fano, filelist, freshontv, funfile, gftracker, grabtheinfo, hd4free, hdbits, hdspace, \
ilt, iptorrents, limetorrents, morethan, ncore, pisexy, pretome, privatehd, ptf, \
rarbg, revtt, scc, scenetime, shazbat, speedcd, \
thepiratebay, torrentbytes, torrentday, torrenting, torrentleech, torrentshack, transmithe_net, tvchaosuk, zooqle thepiratebay, torrentbytes, torrentday, torrenting, torrentleech, torrentshack, transmithe_net, tvchaosuk, zooqle
# anime # anime
from . import anizb, nyaatorrents, tokyotoshokan from . import anizb, nyaatorrents, tokyotoshokan
@ -45,8 +47,10 @@ __all__ = ['omgwtfnzbs',
'bithdtv', 'bithdtv',
'bitmetv', 'bitmetv',
'btn', 'btn',
'btscene',
'custom01', 'custom01',
'dh', 'dh',
'extratorrent',
'fano', 'fano',
'filelist', 'filelist',
'freshontv', 'freshontv',
@ -58,7 +62,9 @@ __all__ = ['omgwtfnzbs',
'hdspace', 'hdspace',
'ilt', 'ilt',
'iptorrents', 'iptorrents',
'limetorrents',
'morethan', 'morethan',
'ncore',
'pisexy', 'pisexy',
'pretome', 'pretome',
'privatehd', 'privatehd',
@ -227,7 +233,7 @@ def getDefaultNewznabProviders():
return '!!!'.join(['Sick Beard Index|http://lolo.sickbeard.com/|0|5030,5040|0|eponly|0|0|0', return '!!!'.join(['Sick Beard Index|http://lolo.sickbeard.com/|0|5030,5040|0|eponly|0|0|0',
'NZBgeek|https://api.nzbgeek.info/||5030,5040|0|eponly|0|0|0', 'NZBgeek|https://api.nzbgeek.info/||5030,5040|0|eponly|0|0|0',
'NZBs.org|https://nzbs.org/||5030,5040|0|eponly|0|0|0', 'NZBs.org|https://nzbs.org/||5030,5040|0|eponly|0|0|0',
'Usenet-Crawler|https://www.usenet-crawler.com/||5030,5040|0|eponly|0|0|0']) ])
def getProviderModule(name): def getProviderModule(name):

20
sickbeard/providers/alpharatio.py

@ -35,7 +35,7 @@ class AlphaRatioProvider(generic.TorrentProvider):
self.url_base = 'https://alpharatio.cc/' self.url_base = 'https://alpharatio.cc/'
self.urls = {'config_provider_home_uri': self.url_base, self.urls = {'config_provider_home_uri': self.url_base,
'login': self.url_base + 'login.php', 'login_action': self.url_base + 'login.php',
'search': self.url_base + 'torrents.php?searchstr=%s%s&' + '&'.join( 'search': self.url_base + 'torrents.php?searchstr=%s%s&' + '&'.join(
['tags_type=1', 'order_by=time', 'order_way=desc'] + ['tags_type=1', 'order_by=time', 'order_way=desc'] +
['filter_cat[%s]=1' % c for c in 1, 2, 3, 4, 5] + ['filter_cat[%s]=1' % c for c in 1, 2, 3, 4, 5] +
@ -48,8 +48,8 @@ class AlphaRatioProvider(generic.TorrentProvider):
def _authorised(self, **kwargs): def _authorised(self, **kwargs):
return super(AlphaRatioProvider, self)._authorised(logged_in=(lambda x=None: self.has_all_cookies('session')), return super(AlphaRatioProvider, self)._authorised(logged_in=(lambda y=None: self.has_all_cookies('session')),
post_params={'keeplogged': '1', 'login': 'Login'}) post_params={'keeplogged': '1', 'form_tmpl': True})
def _search_provider(self, search_params, **kwargs): def _search_provider(self, search_params, **kwargs):
@ -73,7 +73,7 @@ class AlphaRatioProvider(generic.TorrentProvider):
raise generic.HaltParseException raise generic.HaltParseException
with BS4Parser(html, features=['html5lib', 'permissive']) as soup: with BS4Parser(html, features=['html5lib', 'permissive']) as soup:
torrent_table = soup.find('table', attrs={'id': 'torrent_table'}) torrent_table = soup.find(id='torrent_table')
torrent_rows = [] if not torrent_table else torrent_table.find_all('tr') torrent_rows = [] if not torrent_table else torrent_table.find_all('tr')
if 2 > len(torrent_rows): if 2 > len(torrent_rows):
@ -82,14 +82,12 @@ class AlphaRatioProvider(generic.TorrentProvider):
for tr in torrent_rows[1:]: for tr in torrent_rows[1:]:
try: try:
seeders, leechers, size = [tryInt(n, n) for n in [ seeders, leechers, size = [tryInt(n, n) for n in [
tr.find_all('td')[x].get_text().strip() for x in (-2, -1, -4)]] tr.find_all('td')[x].get_text().strip() for x in -2, -1, -4]]
if self._peers_fail(mode, seeders, leechers): if self._peers_fail(mode, seeders, leechers):
continue continue
title = tr.find('a', title=rc['info']).get_text().strip() title = tr.find('a', title=rc['info']).get_text().strip()
download_url = self._link(tr.find('a', title=rc['get'])['href'])
link = str(tr.find('a', title=rc['get'])['href']).replace('&amp;', '&').lstrip('/')
download_url = self.urls['get'] % link
except (AttributeError, TypeError, ValueError): except (AttributeError, TypeError, ValueError):
continue continue
@ -98,13 +96,11 @@ class AlphaRatioProvider(generic.TorrentProvider):
except generic.HaltParseException: except generic.HaltParseException:
pass pass
except Exception: except (StandardError, Exception):
logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR) logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR)
self._log_search(mode, len(items[mode]) - cnt, search_url) self._log_search(mode, len(items[mode]) - cnt, search_url)
self._sort_seeders(mode, items) results = self._sort_seeding(mode, results + items[mode])
results = list(set(results + items[mode]))
return results return results

8
sickbeard/providers/beyondhd.py

@ -71,7 +71,7 @@ class BeyondHDProvider(generic.TorrentProvider):
search_string = isinstance(search_string, unicode) and unidecode(search_string) or search_string search_string = isinstance(search_string, unicode) and unidecode(search_string) or search_string
search_url = self.urls['browse'] % (self.passkey, self.categories[mode_cats]) search_url = self.urls['browse'] % (self.passkey, self.categories[mode_cats])
if 'Cache' != mode: if 'Cache' != mode:
search_url += self.urls['search'] % re.sub('[\.\s]+', ' ', search_string) search_url += self.urls['search'] % re.sub('[.\s]+', ' ', search_string)
data_json = self.get_url(search_url, json=True) data_json = self.get_url(search_url, json=True)
@ -82,16 +82,14 @@ class BeyondHDProvider(generic.TorrentProvider):
seeders, leechers = item.get('seeders', 0), item.get('leechers', 0) seeders, leechers = item.get('seeders', 0), item.get('leechers', 0)
if self._peers_fail(mode, seeders, leechers): if self._peers_fail(mode, seeders, leechers):
continue continue
title, download_url = item.get('file'), item.get('get') title, download_url = item.get('file'), self._link(item.get('get'))
if title and download_url: if title and download_url:
items[mode].append((title, download_url, seeders, self._bytesizer(item.get('size')))) items[mode].append((title, download_url, seeders, self._bytesizer(item.get('size'))))
time.sleep(1.1) time.sleep(1.1)
self._log_search(mode, len(items[mode]) - cnt, search_url) self._log_search(mode, len(items[mode]) - cnt, search_url)
self._sort_seeders(mode, items) results = self._sort_seeding(mode, results + items[mode])
results = list(set(results + items[mode]))
return results return results

16
sickbeard/providers/bithdtv.py

@ -44,7 +44,7 @@ class BitHDTVProvider(generic.TorrentProvider):
def _authorised(self, **kwargs): def _authorised(self, **kwargs):
return super(BitHDTVProvider, self)._authorised( return super(BitHDTVProvider, self)._authorised(
logged_in=(lambda x=None: self.has_all_cookies(['h_sl', 'h_sp', 'h_su']))) and 'search' in self.urls logged_in=(lambda y=None: self.has_all_cookies(['h_sl', 'h_sp', 'h_su']))) and 'search' in self.urls
@staticmethod @staticmethod
def _has_signature(data=None): def _has_signature(data=None):
@ -82,15 +82,15 @@ class BitHDTVProvider(generic.TorrentProvider):
for tr in torrent_rows[1:]: for tr in torrent_rows[1:]:
try: try:
seeders, leechers, size = [tryInt(n, n) for n in [ seeders, leechers, size = [tryInt(n, n) for n in [
tr.find_all('td')[x].get_text().strip() for x in (-3, -2, -5)]] tr.find_all('td')[x].get_text().strip() for x in -3, -2, -5]]
if self.freeleech and not tr.attrs.get('bgcolor').endswith('FF99') or \ if self.freeleech and not tr.attrs.get('bgcolor').endswith('FF99') or \
self._peers_fail(mode, seeders, leechers): self._peers_fail(mode, seeders, leechers):
continue continue
info = tr.find('a', href=rc['info']) info = tr.find('a', href=rc['info'])
title = (info.attrs.get('title') or info.contents[0].get_text()).strip() title = (info.attrs.get('title') or info.get_text()).strip()
download_url = self.urls['get'] % str(tr.find('a', href=rc['get'])['href']).lstrip('/') download_url = self._link(tr.find('a', href=rc['get'])['href'])
except (AttributeError, TypeError, ValueError): except (AttributeError, TypeError, ValueError, KeyError):
continue continue
if title and download_url: if title and download_url:
@ -98,14 +98,12 @@ class BitHDTVProvider(generic.TorrentProvider):
except generic.HaltParseException: except generic.HaltParseException:
pass pass
except Exception: except (StandardError, Exception):
logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR) logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR)
self._log_search(mode, len(items[mode]) - cnt, search_url) self._log_search(mode, len(items[mode]) - cnt, search_url)
self._sort_seeders(mode, items) results = self._sort_seeding(mode, results + items[mode])
results = list(set(results + items[mode]))
return results return results

16
sickbeard/providers/bitmetv.py

@ -46,9 +46,9 @@ class BitmetvProvider(generic.TorrentProvider):
def _authorised(self, **kwargs): def _authorised(self, **kwargs):
return super(BitmetvProvider, self)._authorised( return super(BitmetvProvider, self)._authorised(
logged_in=(lambda x=None: (None is x or 'Other Links' in x) and self.has_all_cookies() and logged_in=(lambda y=None: (None is y or 'Other Links' in y) and self.has_all_cookies() and
self.session.cookies['uid'] in self.digest and self.session.cookies['pass'] in self.digest), self.session.cookies['uid'] in self.digest and self.session.cookies['pass'] in self.digest),
failed_msg=(lambda x=None: u'Invalid cookie details for %s. Check settings')) failed_msg=(lambda y=None: u'Invalid cookie details for %s. Check settings'))
def _search_provider(self, search_params, **kwargs): def _search_provider(self, search_params, **kwargs):
@ -81,13 +81,13 @@ class BitmetvProvider(generic.TorrentProvider):
for tr in torrent_rows[1:]: for tr in torrent_rows[1:]:
try: try:
seeders, leechers, size = [tryInt(n, n) for n in [ seeders, leechers, size = [tryInt(n, n) for n in [
(tr.find_all('td')[x].get_text().strip()) for x in (-3, -2, -5)]] (tr.find_all('td')[x].get_text().strip()) for x in -3, -2, -5]]
if self._peers_fail(mode, seeders, leechers): if self._peers_fail(mode, seeders, leechers):
continue continue
info = tr.find('a', href=rc['info']) info = tr.find('a', href=rc['info'])
title = info.attrs.get('title') or info.get_text().strip() title = (info.attrs.get('title') or info.get_text()).strip()
download_url = self.urls['get'] % str(tr.find('a', href=rc['get'])['href']).lstrip('/') download_url = self._link(tr.find('a', href=rc['get'])['href'])
except (AttributeError, TypeError, ValueError): except (AttributeError, TypeError, ValueError):
continue continue
@ -96,14 +96,12 @@ class BitmetvProvider(generic.TorrentProvider):
except generic.HaltParseException: except generic.HaltParseException:
pass pass
except Exception: except (StandardError, Exception):
logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR) logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR)
self._log_search(mode, len(items[mode]) - cnt, search_url) self._log_search(mode, len(items[mode]) - cnt, search_url)
self._sort_seeders(mode, items) results = self._sort_seeding(mode, results + items[mode])
results = list(set(results + items[mode]))
return results return results

53
sickbeard/providers/btn.py

@ -75,15 +75,18 @@ class BTNProvider(generic.TorrentProvider):
try: try:
response = helpers.getURL(self.url_api, post_data=json_rpc(params), session=self.session, json=True) response = helpers.getURL(self.url_api, post_data=json_rpc(params), session=self.session, json=True)
error_text = response['error']['message'] error_text = response['error']['message']
logger.log(('Call Limit' in error_text and u'Action aborted because the %(prov)s 150 calls/hr limit was reached' or logger.log(
u'Action prematurely ended. %(prov)s server error response = %(desc)s') % {'prov': self.name, 'desc': error_text}, logger.WARNING) ('Call Limit' in error_text
and u'Action aborted because the %(prov)s 150 calls/hr limit was reached'
or u'Action prematurely ended. %(prov)s server error response = %(desc)s') %
{'prov': self.name, 'desc': error_text}, logger.WARNING)
return results return results
except: except (KeyError, Exception):
data_json = response and 'result' in response and response['result'] or {} data_json = response and 'result' in response and response['result'] or {}
if data_json: if data_json:
found_torrents = {} if 'torrents' not in data_json else data_json['torrents'] found_torrents = 'torrents' in data_json and data_json['torrents'] or {}
# We got something, we know the API sends max 1000 results at a time. # We got something, we know the API sends max 1000 results at a time.
# See if there are more than 1000 results for our query, if not we # See if there are more than 1000 results for our query, if not we
@ -101,37 +104,45 @@ class BTNProvider(generic.TorrentProvider):
for page in range(1, pages_needed + 1): for page in range(1, pages_needed + 1):
try: try:
response = helpers.getURL(self.url_api, json=True, session=self.session, response = helpers.getURL(
post_data=json_rpc(params, results_per_page, page * results_per_page)) self.url_api, json=True, session=self.session,
post_data=json_rpc(params, results_per_page, page * results_per_page))
error_text = response['error']['message'] error_text = response['error']['message']
logger.log(('Call Limit' in error_text and u'Action prematurely ended because the %(prov)s 150 calls/hr limit was reached' or logger.log(
u'Action prematurely ended. %(prov)s server error response = %(desc)s') % {'prov': self.name, 'desc': error_text}, logger.WARNING) ('Call Limit' in error_text
and u'Action prematurely ended because the %(prov)s 150 calls/hr limit was reached'
or u'Action prematurely ended. %(prov)s server error response = %(desc)s') %
{'prov': self.name, 'desc': error_text}, logger.WARNING)
return results return results
except: except (KeyError, Exception):
data_json = response and 'result' in response and response['result'] or {} data_json = response and 'result' in response and response['result'] or {}
# Note that this these are individual requests and might time out individually. This would result in 'gaps' # Note that this these are individual requests and might time out individually.
# in the results. There is no way to fix this though. # This would result in 'gaps' in the results. There is no way to fix this though.
if 'torrents' in data_json: if 'torrents' in data_json:
found_torrents.update(data_json['torrents']) found_torrents.update(data_json['torrents'])
cnt = len(results) cnt = len(results)
for torrentid, torrent_info in found_torrents.iteritems(): for torrentid, torrent_info in found_torrents.iteritems():
seeders, leechers = [tryInt(n) for n in torrent_info.get('Seeders'), torrent_info.get('Leechers')] seeders, leechers, size = (tryInt(n, n) for n in [torrent_info.get(x) for x in
'Seeders', 'Leechers', 'Size'])
if self._peers_fail(mode, seeders, leechers) or \ if self._peers_fail(mode, seeders, leechers) or \
self.reject_m2ts and re.match(r'(?i)m2?ts', torrent_info.get('Container', '')): self.reject_m2ts and re.match(r'(?i)m2?ts', torrent_info.get('Container', '')):
continue continue
title, url = self._title_and_url(torrent_info) title, url = self._get_title_and_url(torrent_info)
if title and url: if title and url:
results.append(torrent_info) results.append((title, url, seeders, self._bytesizer(size)))
self._log_search(mode, len(results) - cnt, self._log_search(mode, len(results) - cnt,
('search_param: ' + str(search_param), self.name)['Cache' == mode]) ('search_param: ' + str(search_param), self.name)['Cache' == mode])
results = self._sort_seeding(mode, results)
return results return results
def _title_and_url(self, data_json): @staticmethod
def _get_title_and_url(data_json):
# The BTN API gives a lot of information in response, # The BTN API gives a lot of information in response,
# however SickGear is built mostly around Scene or # however SickGear is built mostly around Scene or
@ -189,7 +200,7 @@ class BTNProvider(generic.TorrentProvider):
series_param.update(base_params) series_param.update(base_params)
search_params.append(series_param) search_params.append(series_param)
return [dict({'Season': search_params})] return [dict(Season=search_params)]
def _episode_strings(self, ep_obj, **kwargs): def _episode_strings(self, ep_obj, **kwargs):
@ -231,7 +242,7 @@ class BTNProvider(generic.TorrentProvider):
series_param.update(base_params) series_param.update(base_params)
search_params.append(series_param) search_params.append(series_param)
return [dict({'Episode': search_params})] return [dict(Episode=search_params)]
def cache_data(self, **kwargs): def cache_data(self, **kwargs):
@ -246,11 +257,11 @@ class BTNProvider(generic.TorrentProvider):
# Set maximum to 24 hours (24 * 60 * 60 = 86400 seconds) of "RSS" data search, # Set maximum to 24 hours (24 * 60 * 60 = 86400 seconds) of "RSS" data search,
# older items will be done through backlog # older items will be done through backlog
if 86400 < seconds_since_last_update: if 86400 < seconds_since_last_update:
logger.log(u'Only trying to fetch the last 24 hours even though the last known successful update on %s was over 24 hours' logger.log(u'Only trying to fetch the last 24 hours even though the last known successful update on ' +
% self.name, logger.WARNING) '%s was over 24 hours' % self.name, logger.WARNING)
seconds_since_last_update = 86400 seconds_since_last_update = 86400
return self._search_provider(dict({'Cache': ['']}), age=seconds_since_last_update) return self._search_provider(dict(Cache=['']), age=seconds_since_last_update)
class BTNCache(tvcache.TVCache): class BTNCache(tvcache.TVCache):
@ -258,7 +269,7 @@ class BTNCache(tvcache.TVCache):
def __init__(self, this_provider): def __init__(self, this_provider):
tvcache.TVCache.__init__(self, this_provider) tvcache.TVCache.__init__(self, this_provider)
self.update_freq = 15 # cache update frequency self.update_freq = 15
def _cache_data(self): def _cache_data(self):

117
sickbeard/providers/btscene.py

@ -0,0 +1,117 @@
# coding=utf-8
#
# This file is part of SickGear.
#
# SickGear is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# SickGear is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with SickGear. If not, see <http://www.gnu.org/licenses/>.
import re
import traceback
import urllib
from . import generic
from sickbeard import logger
from sickbeard.bs4_parser import BS4Parser
from sickbeard.helpers import tryInt
from lib.unidecode import unidecode
class BTSceneProvider(generic.TorrentProvider):
def __init__(self):
generic.TorrentProvider.__init__(self, 'BTScene')
self.url_home = ['http://www.btstorrent.cc/', 'http://bittorrentstart.com/',
'http://diriri.xyz/', 'http://mytorrentz.tv/']
self.url_vars = {'search': 'results.php?q=%s&category=series&order=1', 'browse': 'lastdaycat/type/Series/',
'get': 'torrentdownload.php?id=%s'}
self.url_tmpl = {'config_provider_home_uri': '%(home)s', 'search': '%(home)s%(vars)s',
'browse': '%(home)s%(vars)s', 'get': '%(home)s%(vars)s'}
self.minseed, self.minleech = 2 * [None]
self.confirmed = False
@staticmethod
def _has_signature(data=None):
return data and re.search(r'(?i)(?:btscene|bts[-]official|full\sindex)', data)
def _search_provider(self, search_params, **kwargs):
results = []
if not self.url:
return results
items = {'Cache': [], 'Season': [], 'Episode': [], 'Propers': []}
rc = dict((k, re.compile('(?i)' + v)) for (k, v) in {
'info': '\w+?(\d+)[.]html', 'verified': 'Verified'}.iteritems())
for mode in search_params.keys():
for search_string in search_params[mode]:
search_string = isinstance(search_string, unicode) and unidecode(search_string) or search_string
search_url = self.urls['browse'] if 'Cache' == mode \
else self.urls['search'] % (urllib.quote_plus(search_string))
html = self.get_url(search_url)
cnt = len(items[mode])
try:
if not html or self._has_no_results(html):
raise generic.HaltParseException
with BS4Parser(html, features=['html5lib', 'permissive']) as soup:
torrent_rows = soup.select('tr[class$="_tr"]')
if not len(torrent_rows):
raise generic.HaltParseException
for tr in torrent_rows:
try:
seeders, leechers, size = [tryInt(n, n) for n in [
tr.find_all('td')[x].get_text().strip() for x in -4, -3, -5]]
if self._peers_fail(mode, seeders, leechers) or \
self.confirmed and not (tr.find('img', src=rc['verified'])
or tr.find('img', title=rc['verified'])):
continue
info = tr.find('a', href=rc['info'])
title = info and info.get_text().strip()
tid_href = info and rc['info'].findall(info['href'])
tid_href = tid_href and tryInt(tid_href[0], 0) or 0
tid_tr = tryInt(tr['id'].strip('_'), 0)
tid = (tid_tr, tid_href)[tid_href > tid_tr]
download_url = info and (self.urls['get'] % tid)
except (AttributeError, TypeError, ValueError, IndexError):
continue
if title and download_url:
items[mode].append((title, download_url, seeders, self._bytesizer(size)))
except generic.HaltParseException:
pass
except (StandardError, Exception):
logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR)
self._log_search(mode, len(items[mode]) - cnt, search_url)
results = self._sort_seeding(mode, results + items[mode])
return results
def _episode_strings(self, ep_obj, **kwargs):
return generic.TorrentProvider._episode_strings(self, ep_obj, sep_date='.', **kwargs)
provider = BTSceneProvider()

16
sickbeard/providers/dh.py

@ -46,9 +46,9 @@ class DHProvider(generic.TorrentProvider):
def _authorised(self, **kwargs): def _authorised(self, **kwargs):
return super(DHProvider, self)._authorised( return super(DHProvider, self)._authorised(
logged_in=(lambda x=None: (None is x or re.search('(?i)rss\slink', x)) and self.has_all_cookies() and logged_in=(lambda y=None: (None is y or re.search('(?i)rss\slink', y)) and self.has_all_cookies() and
self.session.cookies['uid'] in self.digest and self.session.cookies['pass'] in self.digest), self.session.cookies['uid'] in self.digest and self.session.cookies['pass'] in self.digest),
failed_msg=(lambda x=None: u'Invalid cookie details for %s. Check settings')) failed_msg=(lambda y=None: u'Invalid cookie details for %s. Check settings'))
def _search_provider(self, search_params, **kwargs): def _search_provider(self, search_params, **kwargs):
@ -82,14 +82,12 @@ class DHProvider(generic.TorrentProvider):
for tr in torrent_rows[1:]: for tr in torrent_rows[1:]:
try: try:
seeders, leechers, size = [tryInt(n, n) for n in [ seeders, leechers, size = [tryInt(n, n) for n in [
(tr.find_all('td')[x].get_text().strip()) for x in (-3, -2, -5)]] tr.find_all('td')[x].get_text().strip() for x in -3, -2, -5]]
if self._peers_fail(mode, seeders, leechers) or not tr.find('a', href=rc['cats']): if self._peers_fail(mode, seeders, leechers) or not tr.find('a', href=rc['cats']):
continue continue
title = tr.find('a', href=rc['info']).get_text().strip() title = tr.find('a', href=rc['info']).get_text().strip()
download_url = self._link(tr.find('a', href=rc['get'])['href'])
download_url = self.urls['get'] % str(tr.find('a', href=rc['get'])['href']).lstrip('/')
except (AttributeError, TypeError, ValueError, IndexError): except (AttributeError, TypeError, ValueError, IndexError):
continue continue
@ -98,14 +96,12 @@ class DHProvider(generic.TorrentProvider):
except generic.HaltParseException: except generic.HaltParseException:
pass pass
except Exception: except (StandardError, Exception):
logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR) logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR)
self._log_search(mode, len(items[mode]) - cnt, self.session.response.get('url')) self._log_search(mode, len(items[mode]) - cnt, self.session.response.get('url'))
self._sort_seeders(mode, items) results = self._sort_seeding(mode, results + items[mode])
results = list(set(results + items[mode]))
return results return results

108
sickbeard/providers/extratorrent.py

@ -0,0 +1,108 @@
# coding=utf-8
#
# This file is part of SickGear.
#
# SickGear is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# SickGear is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with SickGear. If not, see <http://www.gnu.org/licenses/>.
import re
import traceback
import urllib
from . import generic
from sickbeard import logger
from sickbeard.bs4_parser import BS4Parser
from sickbeard.helpers import tryInt
from lib.unidecode import unidecode
class ExtraTorrentProvider(generic.TorrentProvider):
def __init__(self):
generic.TorrentProvider.__init__(self, 'ExtraTorrent')
self.url_home = ['https://www.extratorrent%s/' % u for u in '.works', 'live.com', 'online.com', '.cc'] + \
['https://etmirror.com/', 'https://etproxy.com/', 'https://extratorrent.usbypass.xyz/']
self.url_vars = {'search': 'search/?new=1&search=%s&s_cat=8', 'browse': 'view/today/TV.html',
'get': '%s'}
self.url_tmpl = {'config_provider_home_uri': '%(home)s', 'search': '%(home)s%(vars)s',
'browse': '%(home)s%(vars)s', 'get': '%(home)s%(vars)s'}
self.minseed, self.minleech = 2 * [None]
@staticmethod
def _has_signature(data=None):
return data and re.search(r'(?i)ExtraTorrent', data[33:1024:])
def _search_provider(self, search_params, **kwargs):
results = []
if not self.url:
return results
items = {'Cache': [], 'Season': [], 'Episode': [], 'Propers': []}
rc = dict((k, re.compile('(?i)' + v)) for (k, v) in {
'get': 'download', 'title': '(?:^download|torrent$)', 'get_url': '^/(torrent_)?'}.iteritems())
for mode in search_params.keys():
for search_string in search_params[mode]:
search_string = isinstance(search_string, unicode) and unidecode(search_string) or search_string
search_url = self.urls['browse'] if 'Cache' == mode \
else self.urls['search'] % (urllib.quote_plus(search_string))
html = self.get_url(search_url)
cnt = len(items[mode])
try:
if not html or self._has_no_results(html):
raise generic.HaltParseException
with BS4Parser(html, features=['html5lib', 'permissive']) as soup:
torrent_table = soup.find('table', class_='tl')
torrent_rows = [] if not torrent_table else torrent_table.find_all('tr')
if 2 > len(torrent_rows):
raise generic.HaltParseException
for tr in torrent_rows[1:]:
try:
seeders, leechers, size = [tryInt(n.replace('---', '0'), n) for n in [
tr.find_all('td')[x].get_text().strip() for x in -3, -2, -4]]
if self._peers_fail(mode, seeders, leechers):
continue
info = tr.find('a', title=rc['get']) or {}
title = rc['title'].sub('', info.get('title') or '').strip()
download_url = self._link(rc['get_url'].sub('', info['href']))
except (AttributeError, TypeError, ValueError, IndexError):
continue
if title and download_url:
items[mode].append((title, download_url, seeders, self._bytesizer(size)))
except generic.HaltParseException:
pass
except (StandardError, Exception):
logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR)
self._log_search(mode, len(items[mode]) - cnt, search_url)
results = self._sort_seeding(mode, results + items[mode])
return results
provider = ExtraTorrentProvider()

14
sickbeard/providers/fano.py

@ -45,7 +45,7 @@ class FanoProvider(generic.TorrentProvider):
def _authorised(self, **kwargs): def _authorised(self, **kwargs):
return super(FanoProvider, self)._authorised(logged_in=lambda x=None: self.has_all_cookies(['uid', 'pass'])) return super(FanoProvider, self)._authorised()
def _search_provider(self, search_params, **kwargs): def _search_provider(self, search_params, **kwargs):
@ -82,14 +82,12 @@ class FanoProvider(generic.TorrentProvider):
for tr in torrent_rows[1:]: for tr in torrent_rows[1:]:
try: try:
seeders, leechers, size = [tryInt(n, n) for n in [ seeders, leechers, size = [tryInt(n, n) for n in [
(tr.find_all('td')[x].get_text().strip()) for x in (-2, -1, -4)]] tr.find_all('td')[x].get_text().strip() for x in -2, -1, -4]]
if self._peers_fail(mode, seeders, leechers) or not tr.find('a', href=rc['cats']): if self._peers_fail(mode, seeders, leechers) or not tr.find('a', href=rc['cats']):
continue continue
title = tr.find('a', href=rc['info']).get_text().strip() title = tr.find('a', href=rc['info']).get_text().strip()
download_url = self._link(tr.find('a', href=rc['get'])['href'])
download_url = self.urls['get'] % str(tr.find('a', href=rc['get'])['href']).lstrip('/')
except (AttributeError, TypeError, ValueError, IndexError): except (AttributeError, TypeError, ValueError, IndexError):
continue continue
@ -98,14 +96,12 @@ class FanoProvider(generic.TorrentProvider):
except generic.HaltParseException: except generic.HaltParseException:
pass pass
except Exception: except (StandardError, Exception):
logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR) logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR)
self._log_search(mode, len(items[mode]) - cnt, search_url) self._log_search(mode, len(items[mode]) - cnt, search_url)
self._sort_seeders(mode, items) results = self._sort_seeding(mode, results + items[mode])
results = list(set(results + items[mode]))
return results return results

12
sickbeard/providers/filelist.py

@ -78,14 +78,12 @@ class FLProvider(generic.TorrentProvider):
for tr in torrent_rows: for tr in torrent_rows:
try: try:
seeders, leechers, size = [tryInt(n, n) for n in [ seeders, leechers, size = [tryInt(n, n) for n in [
(tr.select('span[style*="cell"]')[x].get_text().strip()) for x in (-3, -2, -5)]] tr.select('span[style*="cell"]')[x].get_text().strip() for x in -3, -2, -5]]
if self._peers_fail(mode, seeders, leechers) or not tr.find('a', href=rc['cats']): if self._peers_fail(mode, seeders, leechers) or not tr.find('a', href=rc['cats']):
continue continue
title = tr.find('a', href=rc['info']).get_text().strip() title = tr.find('a', href=rc['info']).get_text().strip()
download_url = self._link(tr.find('a', href=rc['get'])['href'])
download_url = self.urls['get'] % str(tr.find('a', href=rc['get'])['href']).lstrip('/')
except (AttributeError, TypeError, ValueError, IndexError): except (AttributeError, TypeError, ValueError, IndexError):
continue continue
@ -94,14 +92,12 @@ class FLProvider(generic.TorrentProvider):
except generic.HaltParseException: except generic.HaltParseException:
pass pass
except Exception: except (StandardError, Exception):
logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR) logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR)
self._log_search(mode, len(items[mode]) - cnt, self.session.response.get('url')) self._log_search(mode, len(items[mode]) - cnt, self.session.response.get('url'))
self._sort_seeders(mode, items) results = self._sort_seeding(mode, results + items[mode])
results = list(set(results + items[mode]))
return results return results

23
sickbeard/providers/freshontv.py

@ -32,7 +32,7 @@ class FreshOnTVProvider(generic.TorrentProvider):
self.url_base = 'https://freshon.tv/' self.url_base = 'https://freshon.tv/'
self.urls = {'config_provider_home_uri': self.url_base, self.urls = {'config_provider_home_uri': self.url_base,
'login': self.url_base + 'login.php?action=makelogin', 'login_action': self.url_base + 'login.php',
'search': self.url_base + 'browse.php?incldead=%s&words=0&%s&search=%s', 'search': self.url_base + 'browse.php?incldead=%s&words=0&%s&search=%s',
'get': self.url_base + '%s'} 'get': self.url_base + '%s'}
@ -45,8 +45,8 @@ class FreshOnTVProvider(generic.TorrentProvider):
def _authorised(self, **kwargs): def _authorised(self, **kwargs):
return super(FreshOnTVProvider, self)._authorised( return super(FreshOnTVProvider, self)._authorised(
post_params={'login': 'Do it!'}, post_params={'form_tmpl': True},
failed_msg=(lambda x=None: 'DDoS protection by CloudFlare' in x and failed_msg=(lambda y=None: 'DDoS protection by CloudFlare' in y and
u'Unable to login to %s due to CloudFlare DDoS javascript check' or u'Unable to login to %s due to CloudFlare DDoS javascript check' or
'Username does not exist' in x and 'Username does not exist' in x and
u'Invalid username or password for %s. Check settings' or u'Invalid username or password for %s. Check settings' or
@ -80,7 +80,7 @@ class FreshOnTVProvider(generic.TorrentProvider):
raise generic.HaltParseException raise generic.HaltParseException
with BS4Parser(html, features=['html5lib', 'permissive']) as soup: with BS4Parser(html, features=['html5lib', 'permissive']) as soup:
torrent_table = soup.find('table', attrs={'class': 'frame'}) torrent_table = soup.find('table', class_='frame')
torrent_rows = [] if not torrent_table else torrent_table.find_all('tr') torrent_rows = [] if not torrent_table else torrent_table.find_all('tr')
if 2 > len(torrent_rows): if 2 > len(torrent_rows):
@ -92,14 +92,13 @@ class FreshOnTVProvider(generic.TorrentProvider):
continue continue
seeders, leechers, size = [tryInt(n, n) for n in [ seeders, leechers, size = [tryInt(n, n) for n in [
(tr.find_all('td')[x].get_text().strip()) for x in (-2, -1, -4)]] tr.find_all('td')[x].get_text().strip() for x in -2, -1, -4]]
if self._peers_fail(mode, seeders, leechers): if self._peers_fail(mode, seeders, leechers):
continue continue
info = tr.find('a', href=rc['info'], attrs={'class': rc['name']}) info = tr.find('a', href=rc['info'], class_=rc['name'])
title = info.attrs.get('title') or info.get_text().strip() title = (info.attrs.get('title') or info.get_text()).strip()
download_url = self._link(tr.find('a', href=rc['get'])['href'])
download_url = self.urls['get'] % str(tr.find('a', href=rc['get'])['href']).lstrip('/')
except (AttributeError, TypeError, ValueError): except (AttributeError, TypeError, ValueError):
continue continue
@ -108,13 +107,11 @@ class FreshOnTVProvider(generic.TorrentProvider):
except generic.HaltParseException: except generic.HaltParseException:
pass pass
except Exception: except (StandardError, Exception):
logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR) logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR)
self._log_search(mode, len(items[mode]) - cnt, search_url) self._log_search(mode, len(items[mode]) - cnt, search_url)
self._sort_seeders(mode, items) results = self._sort_seeding(mode, results + items[mode])
results = list(set(results + items[mode]))
return results return results

23
sickbeard/providers/funfile.py

@ -32,7 +32,7 @@ class FunFileProvider(generic.TorrentProvider):
self.url_base = 'https://www.funfile.org/' self.url_base = 'https://www.funfile.org/'
self.urls = {'config_provider_home_uri': self.url_base, self.urls = {'config_provider_home_uri': self.url_base,
'login': self.url_base + 'takelogin.php', 'login_action': self.url_base + 'login.php',
'search': self.url_base + 'browse.php?%s&search=%s&incldead=0&showspam=1&', 'search': self.url_base + 'browse.php?%s&search=%s&incldead=0&showspam=1&',
'get': self.url_base + '%s'} 'get': self.url_base + '%s'}
@ -45,9 +45,9 @@ class FunFileProvider(generic.TorrentProvider):
def _authorised(self, **kwargs): def _authorised(self, **kwargs):
return super(FunFileProvider, self)._authorised( return super(FunFileProvider, self)._authorised(
logged_in=(lambda x=None: None is not self.session.cookies.get('uid', domain='.funfile.org') and logged_in=(lambda y=None: all(
None is not self.session.cookies.get('pass', domain='.funfile.org')), [None is not self.session.cookies.get(x, domain='.funfile.org') for x in 'uid', 'pass'])),
post_params={'login': 'Login', 'returnto': '/'}, timeout=self.url_timeout) post_params={'form_tmpl': True}, timeout=self.url_timeout)
def _search_provider(self, search_params, **kwargs): def _search_provider(self, search_params, **kwargs):
@ -72,7 +72,7 @@ class FunFileProvider(generic.TorrentProvider):
raise generic.HaltParseException raise generic.HaltParseException
with BS4Parser(html, features=['html5lib', 'permissive']) as soup: with BS4Parser(html, features=['html5lib', 'permissive']) as soup:
torrent_table = soup.find('td', attrs={'class': 'colhead'}).find_parent('table') torrent_table = soup.find('td', class_='colhead').find_parent('table')
torrent_rows = [] if not torrent_table else torrent_table.find_all('tr') torrent_rows = [] if not torrent_table else torrent_table.find_all('tr')
if 2 > len(torrent_rows): if 2 > len(torrent_rows):
@ -85,13 +85,12 @@ class FunFileProvider(generic.TorrentProvider):
continue continue
seeders, leechers, size = [tryInt(n, n) for n in [ seeders, leechers, size = [tryInt(n, n) for n in [
(tr.find_all('td')[x].get_text().strip()) for x in (-2, -1, -4)]] tr.find_all('td')[x].get_text().strip() for x in -2, -1, -4]]
if None is tr.find('a', href=rc['cats']) or self._peers_fail(mode, seeders, leechers): if None is tr.find('a', href=rc['cats']) or self._peers_fail(mode, seeders, leechers):
continue continue
title = info.attrs.get('title') or info.get_text().strip() title = (info.attrs.get('title') or info.get_text()).strip()
download_url = self.urls['get'] % str(tr.find('a', href=rc['get'])['href']).lstrip('/') download_url = self._link(tr.find('a', href=rc['get'])['href'])
except (AttributeError, TypeError, ValueError): except (AttributeError, TypeError, ValueError):
continue continue
@ -100,14 +99,12 @@ class FunFileProvider(generic.TorrentProvider):
except (generic.HaltParseException, AttributeError): except (generic.HaltParseException, AttributeError):
pass pass
except Exception: except (StandardError, Exception):
logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR) logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR)
self._log_search(mode, len(items[mode]) - cnt, search_url) self._log_search(mode, len(items[mode]) - cnt, search_url)
self._sort_seeders(mode, items) results = self._sort_seeding(mode, results + items[mode])
results = list(set(results + items[mode]))
return results return results

105
sickbeard/providers/generic.py

@ -33,6 +33,7 @@ import sickbeard
import requests import requests
import requests.cookies import requests.cookies
from hachoir_parser import guessParser from hachoir_parser import guessParser
from hachoir_core.error import HachoirError
from hachoir_core.stream import FileInputStream from hachoir_core.stream import FileInputStream
from sickbeard import helpers, classes, logger, db, tvcache, encodingKludge as ek from sickbeard import helpers, classes, logger, db, tvcache, encodingKludge as ek
@ -77,7 +78,8 @@ class GenericProvider:
self.headers = { self.headers = {
# Using USER_AGENT instead of Mozilla to keep same user agent along authentication and download phases, # Using USER_AGENT instead of Mozilla to keep same user agent along authentication and download phases,
# otherwise session might be broken and download fail, asking again for authentication # otherwise session might be broken and download fail, asking again for authentication
# 'User-Agent': 'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/32.0.1700.107 Safari/537.36'} # 'User-Agent': 'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) ' +
# 'Chrome/32.0.1700.107 Safari/537.36'}
'User-Agent': USER_AGENT} 'User-Agent': USER_AGENT}
def get_id(self): def get_id(self):
@ -99,9 +101,17 @@ class GenericProvider:
def _authorised(self): def _authorised(self):
return True return True
def _check_auth(self): def _check_auth(self, is_required=None):
return True return True
def is_public_access(self):
try:
return bool(re.search('(?i)rarbg|sick|womble|anizb', self.name)) \
or False is bool(('_authorised' in self.__class__.__dict__ or hasattr(self, 'digest')
or self._check_auth(is_required=True)))
except AuthException:
return False
def is_active(self): def is_active(self):
if GenericProvider.NZB == self.providerType and sickbeard.USE_NZBS: if GenericProvider.NZB == self.providerType and sickbeard.USE_NZBS:
return self.is_enabled() return self.is_enabled()
@ -176,7 +186,7 @@ class GenericProvider:
urls = ['http%s://%s/torrent/%s.torrent' % (u + (torrent_hash,)) urls = ['http%s://%s/torrent/%s.torrent' % (u + (torrent_hash,))
for u in (('s', 'itorrents.org'), ('s', 'torra.pro'), ('s', 'torra.click'), for u in (('s', 'itorrents.org'), ('s', 'torra.pro'), ('s', 'torra.click'),
('s', 'torrentproject.se'), ('', 'thetorrent.org'))] ('s', 'torrentproject.se'), ('', 'thetorrent.org'))]
except: except (StandardError, Exception):
link_type = 'torrent' link_type = 'torrent'
urls = [result.url] urls = [result.url]
@ -204,7 +214,7 @@ class GenericProvider:
try: try:
helpers.moveFile(cache_file, final_file) helpers.moveFile(cache_file, final_file)
msg = 'moved' msg = 'moved'
except: except (OSError, Exception):
msg = 'copied cached file' msg = 'copied cached file'
logger.log(u'Saved %s link and %s to %s' % (link_type, msg, final_file)) logger.log(u'Saved %s link and %s to %s' % (link_type, msg, final_file))
saved = True saved = True
@ -234,13 +244,13 @@ class GenericProvider:
try: try:
stream = FileInputStream(file_name) stream = FileInputStream(file_name)
parser = guessParser(stream) parser = guessParser(stream)
except: except (HachoirError, Exception):
pass pass
result = parser and 'application/x-bittorrent' == parser.mime_type result = parser and 'application/x-bittorrent' == parser.mime_type
try: try:
stream._input.close() stream._input.close()
except: except (HachoirError, Exception):
pass pass
return result return result
@ -282,7 +292,7 @@ class GenericProvider:
try: try:
title, url = isinstance(item, tuple) and (item[0], item[1]) or \ title, url = isinstance(item, tuple) and (item[0], item[1]) or \
(item.get('title', None), item.get('link', None)) (item.get('title', None), item.get('link', None))
except Exception: except (StandardError, Exception):
pass pass
title = title and re.sub(r'\s+', '.', u'%s' % title) title = title and re.sub(r'\s+', '.', u'%s' % title)
@ -290,6 +300,15 @@ class GenericProvider:
return title, url return title, url
def _link(self, url, url_tmpl=None):
url = url and str(url).strip().replace('&amp;', '&') or ''
try:
url_tmpl = url_tmpl or self.urls['get']
except (StandardError, Exception):
url_tmpl = '%s'
return url if re.match('(?i)https?://', url) else (url_tmpl % url.lstrip('/'))
def find_search_results(self, show, episodes, search_mode, manual_search=False): def find_search_results(self, show, episodes, search_mode, manual_search=False):
self._check_auth() self._check_auth()
@ -391,8 +410,9 @@ class GenericProvider:
logger.log(u'The result ' + title + u' doesn\'t seem to be a valid season that we are trying' + logger.log(u'The result ' + title + u' doesn\'t seem to be a valid season that we are trying' +
u' to snatch, ignoring', logger.DEBUG) u' to snatch, ignoring', logger.DEBUG)
add_cache_entry = True add_cache_entry = True
elif len(parse_result.episode_numbers) and not [ep for ep in episodes if elif len(parse_result.episode_numbers) and not [
ep.season == parse_result.season_number and ep.episode in parse_result.episode_numbers]: ep for ep in episodes if ep.season == parse_result.season_number and
ep.episode in parse_result.episode_numbers]:
logger.log(u'The result ' + title + ' doesn\'t seem to be a valid episode that we are trying' + logger.log(u'The result ' + title + ' doesn\'t seem to be a valid episode that we are trying' +
u' to snatch, ignoring', logger.DEBUG) u' to snatch, ignoring', logger.DEBUG)
add_cache_entry = True add_cache_entry = True
@ -409,8 +429,8 @@ class GenericProvider:
else: else:
airdate = parse_result.air_date.toordinal() airdate = parse_result.air_date.toordinal()
my_db = db.DBConnection() my_db = db.DBConnection()
sql_results = my_db.select('SELECT season, episode FROM tv_episodes WHERE showid = ? AND airdate = ?', sql_results = my_db.select('SELECT season, episode FROM tv_episodes ' +
[show_obj.indexerid, airdate]) 'WHERE showid = ? AND airdate = ?', [show_obj.indexerid, airdate])
if 1 != len(sql_results): if 1 != len(sql_results):
logger.log(u'Tried to look up the date for the episode ' + title + ' but the database didn\'t' + logger.log(u'Tried to look up the date for the episode ' + title + ' but the database didn\'t' +
@ -507,6 +527,7 @@ class GenericProvider:
def log_result(self, mode='Cache', count=0, url='url missing'): def log_result(self, mode='Cache', count=0, url='url missing'):
""" """
Simple function to log the result of any search Simple function to log the result of any search
:param mode: string that this log relates to
:param count: count of successfully processed items :param count: count of successfully processed items
:param url: source url of item(s) :param url: source url of item(s)
""" """
@ -541,8 +562,8 @@ class GenericProvider:
def has_all_cookies(self, cookies=None, pre=''): def has_all_cookies(self, cookies=None, pre=''):
cookies = cookies or ['uid', 'pass'] cookies = cookies and ([cookies], cookies)[isinstance(cookies, list)] or ['uid', 'pass']
return False not in ['%s%s' % (pre, item) in self.session.cookies for item in ([cookies], cookies)[isinstance(cookies, list)]] return all(['%s%s' % (pre, item) in self.session.cookies for item in cookies])
def _categories_string(self, mode='Cache', template='c%s=1', delimiter='&'): def _categories_string(self, mode='Cache', template='c%s=1', delimiter='&'):
@ -558,7 +579,7 @@ class GenericProvider:
def _bytesizer(size_dim=''): def _bytesizer(size_dim=''):
try: try:
value = float('.'.join(re.findall('(?i)(\d+)(?:[\.,](\d+))?', size_dim)[0])) value = float('.'.join(re.findall('(?i)(\d+)(?:[.,](\d+))?', size_dim)[0]))
except TypeError: except TypeError:
return size_dim return size_dim
except IndexError: except IndexError:
@ -587,7 +608,7 @@ class NZBProvider(object, GenericProvider):
return (getattr(self, 'key', '') and self.key) or (getattr(self, 'api_key', '') and self.api_key) or None return (getattr(self, 'key', '') and self.key) or (getattr(self, 'api_key', '') and self.api_key) or None
return False return False
def _check_auth(self): def _check_auth(self, is_required=None):
has_key = self.maybe_apikey() has_key = self.maybe_apikey()
if has_key: if has_key:
@ -703,9 +724,16 @@ class TorrentProvider(object, GenericProvider):
@staticmethod @staticmethod
def _sort_seeders(mode, items): def _sort_seeders(mode, items):
""" legacy function used by a custom provider, do not remove """
mode in ['Season', 'Episode'] and items[mode].sort(key=lambda tup: tup[2], reverse=True) mode in ['Season', 'Episode'] and items[mode].sort(key=lambda tup: tup[2], reverse=True)
@staticmethod
def _sort_seeding(mode, items):
if mode in ['Season', 'Episode']:
return sorted(set(items), key=lambda tup: tup[2], reverse=True)
return items
def _peers_fail(self, mode, seeders=0, leechers=0): def _peers_fail(self, mode, seeders=0, leechers=0):
return 'Cache' != mode and (seeders < getattr(self, 'minseed', 0) or leechers < getattr(self, 'minleech', 0)) return 'Cache' != mode and (seeders < getattr(self, 'minseed', 0) or leechers < getattr(self, 'minleech', 0))
@ -744,7 +772,7 @@ class TorrentProvider(object, GenericProvider):
ep_dict = self._ep_dict(ep_obj) ep_dict = self._ep_dict(ep_obj)
sp_detail = (show.air_by_date or show.is_sports) and str(ep_obj.airdate).split('-')[0] or \ sp_detail = (show.air_by_date or show.is_sports) and str(ep_obj.airdate).split('-')[0] or \
(show.is_anime and ep_obj.scene_absolute_number or (show.is_anime and ep_obj.scene_absolute_number or
'S%(seasonnumber)02d' % ep_dict if 'sp_detail' not in kwargs.keys() else kwargs['sp_detail'](ep_dict)) ('sp_detail' in kwargs.keys() and kwargs['sp_detail'](ep_dict)) or 'S%(seasonnumber)02d' % ep_dict)
sp_detail = ([sp_detail], sp_detail)[isinstance(sp_detail, list)] sp_detail = ([sp_detail], sp_detail)[isinstance(sp_detail, list)]
detail = ({}, {'Season_only': sp_detail})[detail_only and not self.show.is_sports and not self.show.is_anime] detail = ({}, {'Season_only': sp_detail})[detail_only and not self.show.is_sports and not self.show.is_anime]
return [dict({'Season': self._build_search_strings(sp_detail, scene, prefix)}.items() + detail.items())] return [dict({'Season': self._build_search_strings(sp_detail, scene, prefix)}.items() + detail.items())]
@ -792,7 +820,7 @@ class TorrentProvider(object, GenericProvider):
prefix = ([prefix], prefix)[isinstance(prefix, list)] prefix = ([prefix], prefix)[isinstance(prefix, list)]
search_params = [] search_params = []
crop = re.compile(r'([\.\s])(?:\1)+') crop = re.compile(r'([.\s])(?:\1)+')
for name in set(allPossibleShowNames(self.show)): for name in set(allPossibleShowNames(self.show)):
if process_name: if process_name:
name = helpers.sanitizeSceneName(name) name = helpers.sanitizeSceneName(name)
@ -861,11 +889,14 @@ class TorrentProvider(object, GenericProvider):
def _authorised(self, logged_in=None, post_params=None, failed_msg=None, url=None, timeout=30): def _authorised(self, logged_in=None, post_params=None, failed_msg=None, url=None, timeout=30):
maxed_out = (lambda x: re.search(r'(?i)[1-3]((<[^>]+>)|\W)*(attempts|tries|remain)[\W\w]{,40}?(remain|left|attempt)', x)) maxed_out = (lambda y: re.search(r'(?i)[1-3]((<[^>]+>)|\W)*' +
'(attempts|tries|remain)[\W\w]{,40}?(remain|left|attempt)', y))
logged_in, failed_msg = [None is not a and a or b for (a, b) in ( logged_in, failed_msg = [None is not a and a or b for (a, b) in (
(logged_in, (lambda x=None: self.has_all_cookies())), (logged_in, (lambda y=None: self.has_all_cookies())),
(failed_msg, (lambda x='': maxed_out(x) and u'Urgent abort, running low on login attempts. Password flushed to prevent service disruption to %s.' or (failed_msg, (lambda y='': maxed_out(y) and u'Urgent abort, running low on login attempts. ' +
(re.search(r'(?i)(username|password)((<[^>]+>)|\W)*(or|and|/|\s)((<[^>]+>)|\W)*(password|incorrect)', x) and u'Password flushed to prevent service disruption to %s.' or
(re.search(r'(?i)(username|password)((<[^>]+>)|\W)*' +
'(or|and|/|\s)((<[^>]+>)|\W)*(password|incorrect)', y) and
u'Invalid username or password for %s. Check settings' or u'Invalid username or password for %s. Check settings' or
u'Failed to authenticate or parse a response from %s, abort provider'))) u'Failed to authenticate or parse a response from %s, abort provider')))
)] )]
@ -896,17 +927,25 @@ class TorrentProvider(object, GenericProvider):
if url: if url:
response = helpers.getURL(url, session=self.session) response = helpers.getURL(url, session=self.session)
try: try:
action = re.findall('[<]form[\w\W]+?action=[\'\"]([^\'\"]+)', response)[0] post_params = isinstance(post_params, type({})) and post_params or {}
form = 'form_tmpl' in post_params and post_params.pop('form_tmpl')
if form:
form = re.findall(
'(?is)(<form[^>]+%s.*?</form>)' % (True is form and 'login' or form), response)
response = form and form[0] or response
action = re.findall('<form[^>]+action=[\'"]([^\'"]*)', response)[0]
url = action if action.startswith('http') else \ url = action if action.startswith('http') else \
url if not action else \
(url + action) if action.startswith('?') else \
(self.urls.get('login_base') or self.urls['config_provider_home_uri']) + action.lstrip('/') (self.urls.get('login_base') or self.urls['config_provider_home_uri']) + action.lstrip('/')
tags = re.findall(r'(?is)(<input.*?name=[\'\"][^\'\"]+[\'\"].*?>)', response) tags = re.findall(r'(?is)(<input.*?name=[\'"][^\'"]+[^>]*)', response)
nv = [(tup[0]) for tup in [ nv = [(tup[0]) for tup in [
re.findall(r'(?is)name=[\'\"]([^\'\"]+)[\'\"](?:.*?value=[\'\"]([^\'\"]+)[\'\"])?', x) re.findall(r'(?is)name=[\'"]([^\'"]+)(?:[^>]*?value=[\'"]([^\'"]+))?', x)
for x in tags]] for x in tags]]
for name, value in nv: for name, value in nv:
if name not in ('username', 'password'): if name not in ('username', 'password'):
post_params = isinstance(post_params, type({})) and post_params or {}
post_params.setdefault(name, value) post_params.setdefault(name, value)
except KeyError: except KeyError:
return super(TorrentProvider, self)._authorised() return super(TorrentProvider, self)._authorised()
@ -936,7 +975,7 @@ class TorrentProvider(object, GenericProvider):
return False return False
def _check_auth(self): def _check_auth(self, is_required=False):
if hasattr(self, 'username') and hasattr(self, 'password'): if hasattr(self, 'username') and hasattr(self, 'password'):
if self.username and self.password: if self.username and self.password:
@ -963,7 +1002,7 @@ class TorrentProvider(object, GenericProvider):
return True return True
setting = 'Passkey' setting = 'Passkey'
else: else:
return GenericProvider._check_auth(self) return not is_required and GenericProvider._check_auth(self)
raise AuthException('%s for %s is empty in config provider options' % (setting, self.name)) raise AuthException('%s for %s is empty in config provider options' % (setting, self.name))
@ -982,7 +1021,7 @@ class TorrentProvider(object, GenericProvider):
items = self._search_provider({'Propers': search_terms}) items = self._search_provider({'Propers': search_terms})
clean_term = re.compile(r'(?i)[^a-z1-9\|\.]+') clean_term = re.compile(r'(?i)[^a-z1-9|.]+')
for proper_term in search_terms: for proper_term in search_terms:
proper_check = re.compile(r'(?i)(?:%s)' % clean_term.sub('', proper_term)) proper_check = re.compile(r'(?i)(?:%s)' % clean_term.sub('', proper_term))
@ -995,10 +1034,10 @@ class TorrentProvider(object, GenericProvider):
@staticmethod @staticmethod
def _has_no_results(*html): def _has_no_results(*html):
return re.search(r'(?i)<(?:b|div|h\d|p|span|strong)[^>]*>(?:' + return re.search(r'(?i)<(?:b|div|h\d|p|span|strong)[^>]*>\s*(?:' +
'your\ssearch\sdid\snot\smatch|' + 'your\ssearch.*?did\snot\smatch|' +
'nothing\sfound|' + '(?:nothing|0</b>\s+torrents)\sfound|' +
'(sorry,\s)?no\storrents\s(found|match)|' + '(sorry,\s)?no\s(?:results|torrents)\s(found|match)|' +
'.*?there\sare\sno\sresults|' + '.*?there\sare\sno\sresults|' +
'.*?no\shits\.\sTry\sadding' + '.*?no\shits\.\sTry\sadding' +
')', html[0]) ')', html[0])

13
sickbeard/providers/gftracker.py

@ -47,7 +47,7 @@ class GFTrackerProvider(generic.TorrentProvider):
def _authorised(self, **kwargs): def _authorised(self, **kwargs):
return super(GFTrackerProvider, self)._authorised(logged_in=(lambda x=None: self.has_all_cookies(pre='gft_')), return super(GFTrackerProvider, self)._authorised(logged_in=(lambda y=None: self.has_all_cookies(pre='gft_')),
url=[self.urls['login_init']]) url=[self.urls['login_init']])
def _search_provider(self, search_params, **kwargs): def _search_provider(self, search_params, **kwargs):
@ -90,10 +90,9 @@ class GFTrackerProvider(generic.TorrentProvider):
continue continue
info = tr.find('a', href=rc['info']) info = tr.find('a', href=rc['info'])
title = ('title' in info.attrs and info['title']) or info.get_text().strip() title = (info.attrs.get('title') or info.get_text()).strip()
size = tr.find_all('td')[-2].get_text().strip() size = tr.find_all('td')[-2].get_text().strip()
download_url = self._link(tr.find('a', href=rc['get'])['href'])
download_url = self.urls['get'] % str(tr.find('a', href=rc['get'])['href']).lstrip('/')
except (AttributeError, TypeError, ValueError): except (AttributeError, TypeError, ValueError):
continue continue
@ -102,13 +101,11 @@ class GFTrackerProvider(generic.TorrentProvider):
except generic.HaltParseException: except generic.HaltParseException:
pass pass
except Exception: except (StandardError, Exception):
logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR) logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR)
self._log_search(mode, len(items[mode]) - cnt, search_url) self._log_search(mode, len(items[mode]) - cnt, search_url)
self._sort_seeders(mode, items) results = self._sort_seeding(mode, results + items[mode])
results = list(set(results + items[mode]))
return results return results

26
sickbeard/providers/grabtheinfo.py

@ -81,35 +81,27 @@ class GrabTheInfoProvider(generic.TorrentProvider):
for tr in torrent_rows[1 + shows_found:]: for tr in torrent_rows[1 + shows_found:]:
try: try:
info = tr.find('a', href=rc['info'])
if None is info:
continue
title = (('title' in info.attrs.keys() and info['title']) or info.get_text()).strip()
download_url = tr.find('a', href=rc['get'])
if None is download_url:
continue
seeders, leechers, size = [tryInt(n, n) for n in [ seeders, leechers, size = [tryInt(n, n) for n in [
(tr.find_all('td')[x].get_text().strip()) for x in (-2, -1, -3)]] tr.find_all('td')[x].get_text().strip() for x in -2, -1, -3]]
if self._peers_fail(mode, seeders, leechers): if self._peers_fail(mode, seeders, leechers):
continue continue
info = tr.find('a', href=rc['info'])
title = (info.attrs.get('title') or info.get_text()).strip()
download_url = self._link(tr.find('a', href=rc['get'])['href'])
except (AttributeError, TypeError, ValueError, KeyError): except (AttributeError, TypeError, ValueError, KeyError):
continue continue
if title: if title and download_url:
items[mode].append((title, self.urls['get'] % str(download_url['href'].lstrip('/')), items[mode].append((title, download_url, seeders, self._bytesizer(size)))
seeders, self._bytesizer(size)))
except generic.HaltParseException: except generic.HaltParseException:
pass pass
except Exception: except (StandardError, Exception):
logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR) logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR)
self._log_search(mode, len(items[mode]) - cnt, search_url) self._log_search(mode, len(items[mode]) - cnt, search_url)
self._sort_seeders(mode, items) results = self._sort_seeding(mode, results + items[mode])
results = list(set(results + items[mode]))
return results return results

12
sickbeard/providers/hd4free.py

@ -52,10 +52,10 @@ class HD4FreeProvider(generic.TorrentProvider):
for mode in search_params.keys(): for mode in search_params.keys():
for search_string in search_params[mode]: for search_string in search_params[mode]:
params['search'] = '+'.join(search_string.split()) params['search'] = '+'.join(search_string.split())
data_json = self.get_url(self.urls['search'], params=params, json=True) json_resp = self.get_url(self.urls['search'], params=params, json=True)
cnt = len(items[mode]) cnt = len(items[mode])
for k, item in data_json.items(): for k, item in json_resp.items():
if 'error' == k or not item.get('total_results'): if 'error' == k or not item.get('total_results'):
break break
seeders, leechers, size = [tryInt(n, n) for n in [ seeders, leechers, size = [tryInt(n, n) for n in [
@ -63,17 +63,15 @@ class HD4FreeProvider(generic.TorrentProvider):
if self._peers_fail(mode, seeders, leechers): if self._peers_fail(mode, seeders, leechers):
continue continue
title = item.get('release_name') title = item.get('release_name')
download_url = (self.urls['get'] % (item.get('torrentid'), item.get('torrentpass')), None)[ tid, tpass = [item.get('torrent' + x) for x in 'id', 'pass']
not (item.get('torrentid') and item.get('torrentpass'))] download_url = all([tid, tpass]) and (self.urls['get'] % (tid, tpass))
if title and download_url: if title and download_url:
items[mode].append((title, download_url, seeders, self._bytesizer('%smb' % size))) items[mode].append((title, download_url, seeders, self._bytesizer('%smb' % size)))
self._log_search(mode, len(items[mode]) - cnt, self.session.response['url']) self._log_search(mode, len(items[mode]) - cnt, self.session.response['url'])
time.sleep(1.1) time.sleep(1.1)
self._sort_seeders(mode, items) results = self._sort_seeding(mode, results + items[mode])
results = list(set(results + items[mode]))
return results return results

15
sickbeard/providers/hdbits.py

@ -51,7 +51,7 @@ class HDBitsProvider(generic.TorrentProvider):
def check_auth_from_data(self, parsed_json): def check_auth_from_data(self, parsed_json):
if 'status' in parsed_json and 5 == parsed_json.get('status') and 'message' in parsed_json: if 'status' in parsed_json and 5 == parsed_json.get('status') and 'message' in parsed_json:
logger.log(u'Incorrect username or password for %s : %s' % (self.name, parsed_json['message']), logger.DEBUG) logger.log(u'Incorrect username or password for %s: %s' % (self.name, parsed_json['message']), logger.DEBUG)
raise AuthException('Your username or password for %s is incorrect, check your config.' % self.name) raise AuthException('Your username or password for %s is incorrect, check your config.' % self.name)
return True return True
@ -120,13 +120,14 @@ class HDBitsProvider(generic.TorrentProvider):
cnt = len(items[mode]) cnt = len(items[mode])
for item in json_resp['data']: for item in json_resp['data']:
try: try:
seeders, leechers, size = [tryInt(n, n) for n in [item.get(x) for x in 'seeders', 'leechers', 'size']] seeders, leechers, size = [tryInt(n, n) for n in [item.get(x) for x in
'seeders', 'leechers', 'size']]
if self._peers_fail(mode, seeders, leechers)\ if self._peers_fail(mode, seeders, leechers)\
or self.freeleech and re.search('(?i)no', item.get('freeleech', 'no')): or self.freeleech and re.search('(?i)no', item.get('freeleech', 'no')):
continue continue
title = item['name'] title = item['name']
download_url = self.urls['get'] % urllib.urlencode({'id': item['id'], 'passkey': self.passkey}) download_url = self.urls['get'] % urllib.urlencode({'id': item['id'], 'passkey': self.passkey})
except (AttributeError, TypeError, ValueError): except (AttributeError, TypeError, ValueError):
continue continue
@ -136,12 +137,10 @@ class HDBitsProvider(generic.TorrentProvider):
self._log_search(mode, len(items[mode]) - cnt, self._log_search(mode, len(items[mode]) - cnt,
('search_param: ' + str(search_param), self.name)['Cache' == mode]) ('search_param: ' + str(search_param), self.name)['Cache' == mode])
self._sort_seeders(mode, items) results = self._sort_seeding(mode, results + items[mode])
if id_search and len(items[mode]): if id_search and len(results):
return items[mode] return results
results = list(set(results + items[mode]))
return results return results

28
sickbeard/providers/hdspace.py

@ -21,6 +21,7 @@ import traceback
from . import generic from . import generic
from sickbeard import logger from sickbeard import logger
from sickbeard.bs4_parser import BS4Parser from sickbeard.bs4_parser import BS4Parser
from sickbeard.helpers import tryInt
from lib.unidecode import unidecode from lib.unidecode import unidecode
@ -31,8 +32,9 @@ class HDSpaceProvider(generic.TorrentProvider):
self.url_base = 'https://hd-space.org/' self.url_base = 'https://hd-space.org/'
self.urls = {'config_provider_home_uri': self.url_base, self.urls = {'config_provider_home_uri': self.url_base,
'login': self.url_base + 'index.php?page=login', 'login_action': self.url_base + 'index.php?page=login',
'browse': self.url_base + 'index.php?page=torrents&' + '&'.join(['options=0', 'active=1', 'category=']), 'browse': self.url_base + 'index.php?page=torrents&' + '&'.join(
['options=0', 'active=1', 'category=']),
'search': '&search=%s', 'search': '&search=%s',
'get': self.url_base + '%s'} 'get': self.url_base + '%s'}
@ -44,7 +46,8 @@ class HDSpaceProvider(generic.TorrentProvider):
def _authorised(self, **kwargs): def _authorised(self, **kwargs):
return super(HDSpaceProvider, self)._authorised(post_params={'uid': self.username, 'pwd': self.password}) return super(HDSpaceProvider, self)._authorised(
post_params={'uid': self.username, 'pwd': self.password, 'form_tmpl': 'name=[\'"]login[\'"]'})
def _search_provider(self, search_params, **kwargs): def _search_provider(self, search_params, **kwargs):
@ -71,8 +74,9 @@ class HDSpaceProvider(generic.TorrentProvider):
if not html or self._has_no_results(html): if not html or self._has_no_results(html):
raise generic.HaltParseException raise generic.HaltParseException
with BS4Parser(html, features=['html5lib', 'permissive'], attr='width="100%"\Wclass="lista"') as soup: with BS4Parser(html, features=['html5lib', 'permissive'],
torrent_table = soup.find_all('table', attrs={'class': 'lista'})[-1] attr='width="100%"\Wclass="lista"') as soup:
torrent_table = soup.find_all('table', class_='lista')[-1]
torrent_rows = [] if not torrent_table else torrent_table.find_all('tr') torrent_rows = [] if not torrent_table else torrent_table.find_all('tr')
if 2 > len(torrent_rows): if 2 > len(torrent_rows):
@ -85,16 +89,16 @@ class HDSpaceProvider(generic.TorrentProvider):
if None is downlink: if None is downlink:
continue continue
try: try:
seeders, leechers = [int(x.get_text().strip()) for x in tr.find_all('a', href=rc['peers'])] seeders, leechers = [tryInt(x.get_text().strip())
for x in tr.find_all('a', href=rc['peers'])]
if self._peers_fail(mode, seeders, leechers)\ if self._peers_fail(mode, seeders, leechers)\
or self.freeleech and None is tr.find('img', title=rc['fl']): or self.freeleech and None is tr.find('img', title=rc['fl']):
continue continue
info = tr.find('a', href=rc['info']) info = tr.find('a', href=rc['info'])
title = ('title' in info.attrs and info['title']) or info.get_text().strip() title = (info.attrs.get('title') or info.get_text()).strip()
size = tr.find_all('td')[-5].get_text().strip() size = tr.find_all('td')[-5].get_text().strip()
download_url = self._link(downlink['href'])
download_url = self.urls['get'] % str(downlink['href']).lstrip('/')
except (AttributeError, TypeError, ValueError): except (AttributeError, TypeError, ValueError):
continue continue
@ -103,13 +107,11 @@ class HDSpaceProvider(generic.TorrentProvider):
except generic.HaltParseException: except generic.HaltParseException:
pass pass
except Exception: except (StandardError, Exception):
logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR) logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR)
self._log_search(mode, len(items[mode]) - cnt, search_url) self._log_search(mode, len(items[mode]) - cnt, search_url)
self._sort_seeders(mode, items) results = self._sort_seeding(mode, results + items[mode])
results = list(set(results + items[mode]))
return results return results

14
sickbeard/providers/ilt.py

@ -45,7 +45,7 @@ class ILTProvider(generic.TorrentProvider):
def _authorised(self, **kwargs): def _authorised(self, **kwargs):
return super(ILTProvider, self)._authorised(logged_in=lambda x=None: self.has_all_cookies(['uid', 'pass'])) return super(ILTProvider, self)._authorised()
def _search_provider(self, search_params, **kwargs): def _search_provider(self, search_params, **kwargs):
@ -79,14 +79,12 @@ class ILTProvider(generic.TorrentProvider):
for tr in torrent_rows[1:]: for tr in torrent_rows[1:]:
try: try:
seeders, leechers, size = [tryInt(n, n) for n in [ seeders, leechers, size = [tryInt(n, n) for n in [
(tr.find_all('td')[x].get_text().strip()) for x in (-3, -2, -5)]] tr.find_all('td')[x].get_text().strip() for x in -3, -2, -5]]
if self._peers_fail(mode, seeders, leechers) or not tr.find('a', href=rc['cats']): if self._peers_fail(mode, seeders, leechers) or not tr.find('a', href=rc['cats']):
continue continue
title = tr.find('a', href=rc['info']).get_text().strip() title = tr.find('a', href=rc['info']).get_text().strip()
download_url = self._link(tr.find('a', href=rc['get'])['href'])
download_url = self.urls['get'] % str(tr.find('a', href=rc['get'])['href']).lstrip('/')
except (AttributeError, TypeError, ValueError, IndexError): except (AttributeError, TypeError, ValueError, IndexError):
continue continue
@ -95,14 +93,12 @@ class ILTProvider(generic.TorrentProvider):
except generic.HaltParseException: except generic.HaltParseException:
pass pass
except Exception: except (StandardError, Exception):
logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR) logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR)
self._log_search(mode, len(items[mode]) - cnt, self.session.response.get('url')) self._log_search(mode, len(items[mode]) - cnt, self.session.response.get('url'))
self._sort_seeders(mode, items) results = self._sort_seeding(mode, results + items[mode])
results = list(set(results + items[mode]))
return results return results

26
sickbeard/providers/iptorrents.py

@ -21,6 +21,7 @@ import traceback
from . import generic from . import generic
from sickbeard import logger from sickbeard import logger
from sickbeard.bs4_parser import BS4Parser from sickbeard.bs4_parser import BS4Parser
from sickbeard.helpers import tryInt
from lib.unidecode import unidecode from lib.unidecode import unidecode
@ -45,9 +46,10 @@ class IPTorrentsProvider(generic.TorrentProvider):
def _authorised(self, **kwargs): def _authorised(self, **kwargs):
return super(IPTorrentsProvider, self)._authorised( return super(IPTorrentsProvider, self)._authorised(
logged_in=(lambda x='': ('RSS Link' in x) and self.has_all_cookies() and logged_in=(lambda y='': all(
self.session.cookies['uid'] in self.digest and self.session.cookies['pass'] in self.digest), ['RSS Link' in y, self.has_all_cookies()] +
failed_msg=(lambda x=None: u'Invalid cookie details for %s. Check settings')) [(self.session.cookies.get(x) or 'sg!no!pw') in self.digest for x in 'uid', 'pass'])),
failed_msg=(lambda y=None: u'Invalid cookie details for %s. Check settings'))
@staticmethod @staticmethod
def _has_signature(data=None): def _has_signature(data=None):
@ -78,8 +80,7 @@ class IPTorrentsProvider(generic.TorrentProvider):
raise generic.HaltParseException raise generic.HaltParseException
with BS4Parser(html, features=['html5lib', 'permissive']) as soup: with BS4Parser(html, features=['html5lib', 'permissive']) as soup:
torrent_table = soup.find('table', attrs={'id': 'torrents'}) or \ torrent_table = soup.find(id='torrents') or soup.find('table', class_='torrents')
soup.find('table', attrs={'class': 'torrents'})
torrent_rows = [] if not torrent_table else torrent_table.find_all('tr') torrent_rows = [] if not torrent_table else torrent_table.find_all('tr')
if 2 > len(torrent_rows): if 2 > len(torrent_rows):
@ -87,16 +88,15 @@ class IPTorrentsProvider(generic.TorrentProvider):
for tr in torrent_rows[1:]: for tr in torrent_rows[1:]:
try: try:
seeders, leechers = [int(tr.find('td', attrs={'class': x}).get_text().strip()) seeders, leechers = [tryInt(tr.find('td', class_='t_' + x).get_text().strip())
for x in ('t_seeders', 't_leechers')] for x in 'seeders', 'leechers']
if self._peers_fail(mode, seeders, leechers): if self._peers_fail(mode, seeders, leechers):
continue continue
info = tr.find('a', href=rc['info']) info = tr.find('a', href=rc['info'])
title = ('title' in info.attrs and info['title']) or info.get_text().strip() title = (info.attrs.get('title') or info.get_text()).strip()
size = tr.find_all('td')[-4].get_text().strip() size = tr.find_all('td')[-4].get_text().strip()
download_url = self._link(tr.find('a', href=rc['get'])['href'])
download_url = self.urls['get'] % str(tr.find('a', href=rc['get'])['href']).lstrip('/')
except (AttributeError, TypeError, ValueError): except (AttributeError, TypeError, ValueError):
continue continue
@ -105,13 +105,11 @@ class IPTorrentsProvider(generic.TorrentProvider):
except generic.HaltParseException: except generic.HaltParseException:
pass pass
except Exception: except (StandardError, Exception):
logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR) logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR)
self._log_search(mode, len(items[mode]) - cnt, search_url) self._log_search(mode, len(items[mode]) - cnt, search_url)
self._sort_seeders(mode, items) results = self._sort_seeding(mode, results + items[mode])
results = list(set(results + items[mode]))
return results return results

109
sickbeard/providers/limetorrents.py

@ -0,0 +1,109 @@
# coding=utf-8
#
# This file is part of SickGear.
#
# SickGear is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# SickGear is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with SickGear. If not, see <http://www.gnu.org/licenses/>.
import re
import traceback
import urllib
from . import generic
from sickbeard import logger
from sickbeard.bs4_parser import BS4Parser
from sickbeard.helpers import tryInt
from lib.unidecode import unidecode
class LimeTorrentsProvider(generic.TorrentProvider):
def __init__(self):
generic.TorrentProvider.__init__(self, 'LimeTorrents')
self.url_home = ['https://www.limetorrents.cc/', 'https://limetorrents.usbypass.xyz/']
self.url_vars = {'search': 'search/tv/%s/', 'browse': 'browse-torrents/TV-shows/'}
self.url_tmpl = {'config_provider_home_uri': '%(home)s', 'search': '%(home)s%(vars)s',
'browse': '%(home)s%(vars)s'}
self.minseed, self.minleech = 2 * [None]
@staticmethod
def _has_signature(data=None):
return data and re.search(r'(?i)LimeTorrents', data[33:1024:])
def _search_provider(self, search_params, **kwargs):
results = []
if not self.url:
return results
items = {'Cache': [], 'Season': [], 'Episode': [], 'Propers': []}
rc = dict((k, re.compile('(?i)' + v)) for (k, v) in {'get': 'dl'}.iteritems())
for mode in search_params.keys():
for search_string in search_params[mode]:
search_string = isinstance(search_string, unicode) and unidecode(search_string) or search_string
search_url = self.urls['browse'] if 'Cache' == mode \
else self.urls['search'] % (urllib.quote_plus(search_string))
html = self.get_url(search_url)
cnt = len(items[mode])
try:
if not html or self._has_no_results(html):
raise generic.HaltParseException
with BS4Parser(html, features=['html5lib', 'permissive']) as soup:
torrent_table = soup.find_all('table', class_='table2')
torrent_rows = [] if not torrent_table else [
t.select('tr[bgcolor]') for t in torrent_table if
all([x in ' '.join(x.get_text() for x in t.find_all('th')).lower() for x in
['torrent', 'size']])]
if not len(torrent_rows):
raise generic.HaltParseException
for tr in torrent_rows[0]: # 0 = all rows
try:
seeders, leechers, size = [tryInt(n.replace(',', ''), n) for n in [
tr.find_all('td')[x].get_text().strip() for x in -3, -2, -4]]
if self._peers_fail(mode, seeders, leechers):
continue
anchors = tr.td.find_all('a')
stats = anchors and [len(a.get_text()) for a in anchors]
title = stats and anchors[stats.index(max(stats))].get_text().strip()
download_url = self._link((tr.td.find('a', class_=rc['get']) or {}).get('href'))
except (AttributeError, TypeError, ValueError, IndexError):
continue
if title and download_url:
items[mode].append((title, download_url, seeders, self._bytesizer(size)))
except generic.HaltParseException:
pass
except (StandardError, Exception):
logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR)
self._log_search(mode, len(items[mode]) - cnt, search_url)
results = self._sort_seeding(mode, results + items[mode])
return results
provider = LimeTorrentsProvider()

22
sickbeard/providers/morethan.py

@ -34,7 +34,7 @@ class MoreThanProvider(generic.TorrentProvider):
self.url_base = 'https://www.morethan.tv/' self.url_base = 'https://www.morethan.tv/'
self.urls = {'config_provider_home_uri': self.url_base, self.urls = {'config_provider_home_uri': self.url_base,
'login': self.url_base + 'login.php', 'login_action': self.url_base + 'login.php',
'search': self.url_base + 'torrents.php?searchstr=%s&' + '&'.join([ 'search': self.url_base + 'torrents.php?searchstr=%s&' + '&'.join([
'tags_type=1', 'order_by=time', 'order_way=desc', 'tags_type=1', 'order_by=time', 'order_way=desc',
'filter_cat[2]=1', 'action=basic', 'searchsubmit=1']), 'filter_cat[2]=1', 'action=basic', 'searchsubmit=1']),
@ -46,8 +46,8 @@ class MoreThanProvider(generic.TorrentProvider):
def _authorised(self, **kwargs): def _authorised(self, **kwargs):
return super(MoreThanProvider, self)._authorised(logged_in=(lambda x=None: self.has_all_cookies('session')), return super(MoreThanProvider, self)._authorised(logged_in=(lambda y=None: self.has_all_cookies('session')),
post_params={'keeplogged': '1', 'login': 'Log in'}) post_params={'keeplogged': '1', 'form_tmpl': True})
def _search_provider(self, search_params, **kwargs): def _search_provider(self, search_params, **kwargs):
@ -72,7 +72,7 @@ class MoreThanProvider(generic.TorrentProvider):
raise generic.HaltParseException raise generic.HaltParseException
with BS4Parser(html, features=['html5lib', 'permissive']) as soup: with BS4Parser(html, features=['html5lib', 'permissive']) as soup:
torrent_table = soup.find('table', attrs={'class': 'torrent_table'}) torrent_table = soup.find('table', class_='torrent_table')
torrent_rows = [] torrent_rows = []
if torrent_table: if torrent_table:
torrent_rows = torrent_table.find_all('tr') torrent_rows = torrent_table.find_all('tr')
@ -86,17 +86,15 @@ class MoreThanProvider(generic.TorrentProvider):
try: try:
seeders, leechers, size = [tryInt(n, n) for n in [ seeders, leechers, size = [tryInt(n, n) for n in [
tr.find_all('td')[x].get_text().strip() for x in (-2, -1, -4)]] tr.find_all('td')[x].get_text().strip() for x in -2, -1, -4]]
if self._peers_fail(mode, seeders, leechers): if self._peers_fail(mode, seeders, leechers):
continue continue
title = tr.find('a', title=rc['info']).get_text().strip() title = tr.find('a', title=rc['info']).get_text().strip()
if title.lower().startswith('season '): if title.lower().startswith('season '):
title = '%s %s' % (tr.find('div', attrs={'class': rc['name']}).get_text().strip(), title = '%s %s' % (tr.find('div', class_=rc['name']).get_text().strip(), title)
title)
link = str(tr.find('a', href=rc['get'])['href']).replace('&amp;', '&').lstrip('/') download_url = self._link(tr.find('a', href=rc['get'])['href'])
download_url = self.urls['get'] % link
except (AttributeError, TypeError, ValueError): except (AttributeError, TypeError, ValueError):
continue continue
@ -105,14 +103,12 @@ class MoreThanProvider(generic.TorrentProvider):
except generic.HaltParseException: except generic.HaltParseException:
pass pass
except Exception: except (StandardError, Exception):
logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR) logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR)
self._log_search(mode, len(items[mode]) - cnt, search_url) self._log_search(mode, len(items[mode]) - cnt, search_url)
self._sort_seeders(mode, items) results = self._sort_seeding(mode, results + items[mode])
results = list(set(results + items[mode]))
return results return results

112
sickbeard/providers/ncore.py

@ -0,0 +1,112 @@
# coding=utf-8
#
# Author: SickGear
#
# This file is part of SickGear.
#
# SickGear is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# SickGear is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with SickGear. If not, see <http://www.gnu.org/licenses/>.
import re
import traceback
from . import generic
from sickbeard import logger
from sickbeard.bs4_parser import BS4Parser
from sickbeard.helpers import tryInt
from lib.unidecode import unidecode
class NcoreProvider(generic.TorrentProvider):
def __init__(self):
generic.TorrentProvider.__init__(self, 'nCore')
self.url_base = 'https://ncore.cc/'
self.urls = {'config_provider_home_uri': self.url_base,
'login_action': self.url_base + 'login.php',
'search': self.url_base + 'torrents.php?mire=%s&' + '&'.join([
'miszerint=fid', 'hogyan=DESC', 'tipus=kivalasztottak_kozott',
'kivalasztott_tipus=xvidser,dvdser,hdser', 'miben=name']),
'get': self.url_base + '%s'}
self.url = self.urls['config_provider_home_uri']
self.username, self.password, self.minseed, self.minleech = 4 * [None]
self.chk_td = True
def _authorised(self, **kwargs):
return super(NcoreProvider, self)._authorised(
logged_in=(lambda y='': all([bool(y), 'action="login' not in y, self.has_all_cookies('PHPSESSID')])),
post_params={'nev': self.username, 'pass': self.password, 'form_tmpl': 'name=[\'"]login[\'"]'})
def _search_provider(self, search_params, **kwargs):
results = []
if not self._authorised():
return results
items = {'Cache': [], 'Season': [], 'Episode': [], 'Propers': []}
rc = dict((k, re.compile('(?i)' + v)) for (k, v) in {'list': '.*?torrent_all', 'info': 'details'}.iteritems())
for mode in search_params.keys():
for search_string in search_params[mode]:
search_string = isinstance(search_string, unicode) and unidecode(search_string) or search_string
search_url = self.urls['search'] % search_string
# fetches 15 results by default, and up to 100 if allowed in user profile
html = self.get_url(search_url)
cnt = len(items[mode])
try:
if not html or self._has_no_results(html):
raise generic.HaltParseException
with BS4Parser(html, features=['html5lib', 'permissive']) as soup:
torrent_table = soup.find('div', class_=rc['list'])
torrent_rows = [] if not torrent_table else torrent_table.find_all('div', class_='box_torrent')
if not len(torrent_rows):
raise generic.HaltParseException
for tr in torrent_rows:
try:
seeders, leechers, size = [tryInt(n, n) for n in [
tr.find('div', class_=x).get_text().strip()
for x in 'box_s2', 'box_l2', 'box_meret2']]
if self._peers_fail(mode, seeders, leechers):
continue
anchor = tr.find('a', href=rc['info'])
title = (anchor.get('title') or anchor.get_text()).strip()
download_url = self._link(anchor.get('href').replace('details', 'download'))
except (AttributeError, TypeError, ValueError):
continue
if title and download_url:
items[mode].append((title, download_url, seeders, self._bytesizer(size)))
except generic.HaltParseException:
pass
except (StandardError, Exception):
logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR)
self._log_search(mode, len(items[mode]) - cnt, search_url)
results = self._sort_seeding(mode, results + items[mode])
return results
provider = NcoreProvider()

2
sickbeard/providers/newznab.py

@ -359,7 +359,7 @@ class NewznabCache(tvcache.TVCache):
def __init__(self, provider): def __init__(self, provider):
tvcache.TVCache.__init__(self, provider) tvcache.TVCache.__init__(self, provider)
self.update_freq = 5 # cache update frequency self.update_freq = 5
def updateCache(self): def updateCache(self):

77
sickbeard/providers/nyaatorrents.py

@ -1,5 +1,3 @@
# Author: Mr_Orange
# URL: http://code.google.com/p/sickbeard/
# #
# This file is part of SickGear. # This file is part of SickGear.
# #
@ -16,10 +14,12 @@
# You should have received a copy of the GNU General Public License # You should have received a copy of the GNU General Public License
# along with SickGear. If not, see <http://www.gnu.org/licenses/>. # along with SickGear. If not, see <http://www.gnu.org/licenses/>.
import re
import urllib import urllib
from . import generic from . import generic
from sickbeard import logger, show_name_helpers, tvcache from sickbeard import logger, show_name_helpers, tvcache
from sickbeard.helpers import tryInt
class NyaaProvider(generic.TorrentProvider): class NyaaProvider(generic.TorrentProvider):
@ -27,43 +27,55 @@ class NyaaProvider(generic.TorrentProvider):
def __init__(self): def __init__(self):
generic.TorrentProvider.__init__(self, 'NyaaTorrents', anime_only=True) generic.TorrentProvider.__init__(self, 'NyaaTorrents', anime_only=True)
self.url_base = self.url = 'http://www.nyaa.se/' self.url_base = self.url = 'https://www.nyaa.se/'
self.minseed, self.minleech = 2 * [None]
self.cache = NyaaCache(self) self.cache = NyaaCache(self)
def _search_provider(self, search_string, **kwargs): def _search_provider(self, search_string, search_mode='eponly', **kwargs):
results = []
if self.show and not self.show.is_anime: if self.show and not self.show.is_anime:
return results return []
params = urllib.urlencode({'term': search_string.encode('utf-8'),
'cats': '1_37', # Limit to English-translated Anime (for now)
# 'sort': '2', # Sort Descending By Seeders
})
params = {'term': search_string.encode('utf-8'), return self.get_data(getrss_func=self.cache.getRSSFeed,
'cats': '1_37', # Limit to English-translated Anime (for now) search_url='%s?page=rss&%s' % (self.url, params),
# 'sort': '2', # Sort Descending By Seeders mode=('Episode', 'Season')['sponly' == search_mode])
}
search_url = self.url + '?page=rss&' + urllib.urlencode(params) def get_data(self, getrss_func, search_url, mode='cache'):
logger.log(u'Search string: ' + search_url, logger.DEBUG) data = getrss_func(search_url)
data = self.cache.getRSSFeed(search_url) results = []
if data and 'entries' in data: if data and 'entries' in data:
items = data.entries
for curItem in items:
title, url = self._title_and_url(curItem) rc = dict((k, re.compile('(?i)' + v)) for (k, v) in {
'stats': '(\d+)\W+seed[^\d]+(\d+)\W+leech[^\d]+\d+\W+down[^\d]+([\d.,]+\s\w+)'}.iteritems())
if title and url: for cur_item in data.get('entries', []):
results.append(curItem) try:
else: seeders, leechers, size = 0, 0, 0
logger.log(u'The data returned from ' + self.name + ' is incomplete, this result is unusable', stats = rc['stats'].findall(cur_item.get('summary_detail', {'value': ''}).get('value', ''))
logger.DEBUG) if len(stats):
seeders, leechers, size = (tryInt(n, n) for n in stats[0])
if self._peers_fail(mode, seeders, leechers):
continue
title, download_url = self._title_and_url(cur_item)
download_url = self._link(download_url)
except (AttributeError, TypeError, ValueError, IndexError):
continue
return results if title and download_url:
results.append((title, download_url, seeders, self._bytesizer(size)))
def find_search_results(self, show, episodes, search_mode, manual_search=False): self._log_search(mode, len(results), search_url)
return generic.TorrentProvider.find_search_results(self, show, episodes, search_mode, manual_search) return self._sort_seeding(mode, results)
def _season_strings(self, ep_obj, **kwargs): def _season_strings(self, ep_obj, **kwargs):
@ -79,20 +91,17 @@ class NyaaCache(tvcache.TVCache):
def __init__(self, this_provider): def __init__(self, this_provider):
tvcache.TVCache.__init__(self, this_provider) tvcache.TVCache.__init__(self, this_provider)
self.update_freq = 15 # cache update frequency self.update_freq = 15
def _cache_data(self): def _cache_data(self):
params = {'page': 'rss', # Use RSS page
'order': '1', # Sort Descending By Date
'cats': '1_37'} # Limit to English-translated Anime (for now)
url = self.provider.url + '?' + urllib.urlencode(params) params = urllib.urlencode({'page': 'rss', # Use RSS page
logger.log(u'NyaaTorrents cache update URL: ' + url, logger.DEBUG) 'order': '1', # Sort Descending By Date
'cats': '1_37' # Limit to English-translated Anime (for now)
})
data = self.getRSSFeed(url) return self.provider.get_data(getrss_func=self.getRSSFeed,
if data and 'entries' in data: search_url='%s?%s' % (self.provider.url, params))
return data.entries
return []
provider = NyaaProvider() provider = NyaaProvider()

6
sickbeard/providers/omgwtfnzbs.py

@ -203,7 +203,7 @@ class OmgwtfnzbsProvider(generic.NZBProvider):
except generic.HaltParseException: except generic.HaltParseException:
time.sleep(1.1) time.sleep(1.1)
pass pass
except Exception: except (StandardError, Exception):
logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR) logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR)
mode = (mode, search_mode)['Propers' == search_mode] mode = (mode, search_mode)['Propers' == search_mode]
@ -222,7 +222,7 @@ class OmgwtfnzbsProvider(generic.NZBProvider):
title, url = self._title_and_url(item) title, url = self._title_and_url(item)
try: try:
result_date = datetime.fromtimestamp(int(item['usenetage'])) result_date = datetime.fromtimestamp(int(item['usenetage']))
except: except (StandardError, Exception):
result_date = None result_date = None
if result_date: if result_date:
@ -236,7 +236,7 @@ class OmgwtfnzbsProvider(generic.NZBProvider):
api_key = self._check_auth() api_key = self._check_auth()
if not api_key.startswith('cookie:'): if not api_key.startswith('cookie:'):
return api_key return api_key
except Exception: except (StandardError, Exception):
return None return None
self.cookies = re.sub(r'(?i)([\s\']+|cookie\s*:)', '', api_key) self.cookies = re.sub(r'(?i)([\s\']+|cookie\s*:)', '', api_key)

18
sickbeard/providers/pisexy.py

@ -40,7 +40,8 @@ class PiSexyProvider(generic.TorrentProvider):
def _authorised(self, **kwargs): def _authorised(self, **kwargs):
return super(PiSexyProvider, self)._authorised(logged_in=lambda x=None: self.has_all_cookies(['uid', 'pass', 'pcode', 'pisexy'])) return super(PiSexyProvider, self)._authorised(
logged_in=(lambda y=None: self.has_all_cookies(['uid', 'pass', 'pcode', 'pisexy'])))
def _search_provider(self, search_params, **kwargs): def _search_provider(self, search_params, **kwargs):
@ -81,13 +82,10 @@ class PiSexyProvider(generic.TorrentProvider):
continue continue
info = tr.find('a', href=rc['info']) info = tr.find('a', href=rc['info'])
title = 'title' in info.attrs and rc['title'].sub('', info.attrs['title'])\ title = (rc['title'].sub('', info.attrs.get('title', '')) or info.get_text()).strip()
or info.get_text().strip()
size = tr.find_all('td')[3].get_text().strip() size = tr.find_all('td')[3].get_text().strip()
download_url = self._link(tr.find('a', href=rc['get'])['href'])
download_url = self.urls['get'] % str(tr.find('a', href=rc['get'])['href']).lstrip('/') except (AttributeError, TypeError, ValueError, KeyError, IndexError):
except (AttributeError, TypeError, ValueError, IndexError):
continue continue
if title and download_url: if title and download_url:
@ -95,14 +93,12 @@ class PiSexyProvider(generic.TorrentProvider):
except generic.HaltParseException: except generic.HaltParseException:
pass pass
except Exception: except (StandardError, Exception):
logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR) logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR)
self._log_search(mode, len(items[mode]) - cnt, search_url) self._log_search(mode, len(items[mode]) - cnt, search_url)
self._sort_seeders(mode, items) results = self._sort_seeding(mode, results + items[mode])
results = list(set(results + items[mode]))
return results return results

6
sickbeard/providers/pretome.py

@ -52,11 +52,11 @@ class PreToMeProvider(generic.TorrentProvider):
search_string = isinstance(search_string, unicode) and unidecode(search_string) or search_string search_string = isinstance(search_string, unicode) and unidecode(search_string) or search_string
search_url = url + (self.urls['search'] % search_string, '')['Cache' == mode] search_url = url + (self.urls['search'] % search_string, '')['Cache' == mode]
data = RSSFeeds(self).get_feed(search_url) xml_data = RSSFeeds(self).get_feed(search_url)
cnt = len(items[mode]) cnt = len(items[mode])
if data and 'entries' in data: if xml_data and 'entries' in xml_data:
for entry in data['entries']: for entry in xml_data['entries']:
try: try:
if entry['title'] and 'download' in entry['link']: if entry['title'] and 'download' in entry['link']:
items[mode].append((entry['title'], entry['link'], None, None)) items[mode].append((entry['title'], entry['link'], None, None))

18
sickbeard/providers/privatehd.py

@ -46,8 +46,8 @@ class PrivateHDProvider(generic.TorrentProvider):
def _authorised(self, **kwargs): def _authorised(self, **kwargs):
return super(PrivateHDProvider, self)._authorised( return super(PrivateHDProvider, self)._authorised(
logged_in=lambda x=None: self.has_all_cookies(['love']), logged_in=(lambda y=None: self.has_all_cookies('love')),
post_params={'email_username': self.username}) post_params={'email_username': self.username, 'form_tmpl': True})
def _search_provider(self, search_params, **kwargs): def _search_provider(self, search_params, **kwargs):
@ -80,7 +80,7 @@ class PrivateHDProvider(generic.TorrentProvider):
raise generic.HaltParseException raise generic.HaltParseException
with BS4Parser(html, features=['html5lib', 'permissive']) as soup: with BS4Parser(html, features=['html5lib', 'permissive']) as soup:
torrent_table = soup.find('table', attrs={'class': 'table'}) torrent_table = soup.find('table', class_='table')
torrent_rows = [] if not torrent_table else torrent_table.find_all('tr') torrent_rows = [] if not torrent_table else torrent_table.find_all('tr')
if 2 > len(torrent_rows): if 2 > len(torrent_rows):
@ -89,14 +89,12 @@ class PrivateHDProvider(generic.TorrentProvider):
for tr in torrent_rows[1:]: for tr in torrent_rows[1:]:
try: try:
seeders, leechers, size = [tryInt(n, n) for n in [ seeders, leechers, size = [tryInt(n, n) for n in [
(tr.find_all('td')[x].get_text().strip()) for x in (-3, -2, -4)]] tr.find_all('td')[x].get_text().strip() for x in -3, -2, -4]]
if self._peers_fail(mode, seeders, leechers): if self._peers_fail(mode, seeders, leechers):
continue continue
title = rc['info'].sub('', tr.find('a', attrs={'title': rc['info']})['title']) title = rc['info'].sub('', tr.find('a', attrs={'title': rc['info']})['title'])
download_url = self._link(tr.find('a', href=rc['get'])['href'])
download_url = tr.find('a', href=rc['get'])['href']
except (AttributeError, TypeError, ValueError, IndexError): except (AttributeError, TypeError, ValueError, IndexError):
continue continue
@ -105,14 +103,12 @@ class PrivateHDProvider(generic.TorrentProvider):
except generic.HaltParseException: except generic.HaltParseException:
pass pass
except Exception: except (StandardError, Exception):
logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR) logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR)
self._log_search(mode, len(items[mode]) - cnt, search_url) self._log_search(mode, len(items[mode]) - cnt, search_url)
self._sort_seeders(mode, items) results = self._sort_seeding(mode, results + items[mode])
results = list(set(results + items[mode]))
return results return results

14
sickbeard/providers/ptf.py

@ -47,8 +47,8 @@ class PTFProvider(generic.TorrentProvider):
def _authorised(self, **kwargs): def _authorised(self, **kwargs):
return super(PTFProvider, self)._authorised(logged_in=lambda x=None: self.has_all_cookies(['session_key']), return super(PTFProvider, self)._authorised(logged_in=(lambda y=None: self.has_all_cookies('session_key')),
post_params={'force_ssl': 'on', 'ssl': ''}) post_params={'force_ssl': 'on', 'ssl': '', 'form_tmpl': True})
def _search_provider(self, search_params, **kwargs): def _search_provider(self, search_params, **kwargs):
@ -98,9 +98,7 @@ class PTFProvider(generic.TorrentProvider):
title = tr.find('a', href=rc['info']).get_text().strip() title = tr.find('a', href=rc['info']).get_text().strip()
snatches = tr.find('a', href=rc['snatch']).get_text().strip() snatches = tr.find('a', href=rc['snatch']).get_text().strip()
size = tr.find_all('td')[-3].get_text().strip().replace(snatches, '') size = tr.find_all('td')[-3].get_text().strip().replace(snatches, '')
download_url = self._link(tr.find('a', href=rc['get'])['href'])
download_url = self.urls['get'] % str(tr.find('a', href=rc['get'])['href']).lstrip('/')
except (AttributeError, TypeError, ValueError, IndexError): except (AttributeError, TypeError, ValueError, IndexError):
continue continue
@ -109,14 +107,12 @@ class PTFProvider(generic.TorrentProvider):
except generic.HaltParseException: except generic.HaltParseException:
pass pass
except Exception: except (StandardError, Exception):
logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR) logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR)
self._log_search(mode, len(items[mode]) - cnt, self.session.response.get('url')) self._log_search(mode, len(items[mode]) - cnt, self.session.response.get('url'))
self._sort_seeders(mode, items) results = self._sort_seeding(mode, results + items[mode])
results = list(set(results + items[mode]))
return results return results

26
sickbeard/providers/rarbg.py

@ -38,7 +38,7 @@ class RarbgProvider(generic.TorrentProvider):
'api_list': self.url_api + 'mode=list', 'api_list': self.url_api + 'mode=list',
'api_search': self.url_api + 'mode=search'} 'api_search': self.url_api + 'mode=search'}
self.params = {'defaults': '&format=json_extended&category=18;41&limit=100&sort=last&ranked=%(ranked)s&token=%(token)s', self.params = {'defaults': '&format=json_extended&category=18;41&limit=100&sort=last&ranked=%(r)s&token=%(t)s',
'param_iid': '&search_imdb=%(sid)s', 'param_iid': '&search_imdb=%(sid)s',
'param_tid': '&search_tvdb=%(sid)s', 'param_tid': '&search_tvdb=%(sid)s',
'param_str': '&search_string=%(str)s', 'param_str': '&search_string=%(str)s',
@ -90,7 +90,8 @@ class RarbgProvider(generic.TorrentProvider):
id_search = self.params[search_with] % {'sid': sid} id_search = self.params[search_with] % {'sid': sid}
dedupe = [] dedupe = []
search_types = sorted([x for x in search_params.items()], key=lambda tup: tup[0], reverse=True) # sort type "_only" as first to process # sort type "_only" as first to process
search_types = sorted([x for x in search_params.items()], key=lambda tup: tup[0], reverse=True)
for mode_params in search_types: for mode_params in search_types:
mode_search = mode_params[0] mode_search = mode_params[0]
mode = mode_search.replace('_only', '') mode = mode_search.replace('_only', '')
@ -121,41 +122,40 @@ class RarbgProvider(generic.TorrentProvider):
time_out += 1 time_out += 1
time.sleep(1) time.sleep(1)
searched_url = search_url % {'ranked': int(self.confirmed), 'token': self.token} searched_url = search_url % {'r': int(self.confirmed), 't': self.token}
data = self.get_url(searched_url, json=True) data_json = self.get_url(searched_url, json=True)
self.token_expiry = datetime.datetime.now() + datetime.timedelta(minutes=14) self.token_expiry = datetime.datetime.now() + datetime.timedelta(minutes=14)
self.request_throttle = datetime.datetime.now() + datetime.timedelta(seconds=3) self.request_throttle = datetime.datetime.now() + datetime.timedelta(seconds=3)
if not data: if not data_json:
continue continue
if 'error' in data: if 'error' in data_json:
if 5 == data['error_code']: # Too many requests per second. if 5 == data_json['error_code']: # Too many requests per second.
continue continue
elif 2 == data['error_code']: # Invalid token set elif 2 == data_json['error_code']: # Invalid token set
if self._authorised(reset=True): if self._authorised(reset=True):
continue continue
self.log_result(mode, len(items[mode]) - cnt, searched_url) self.log_result(mode, len(items[mode]) - cnt, searched_url)
return items[mode] return items[mode]
break break
if 'error' not in data: if 'error' not in data_json:
for item in data['torrent_results']: for item in data_json['torrent_results']:
title, download_magnet, seeders, size = [ title, download_magnet, seeders, size = [
item.get(x) for x in 'title', 'download', 'seeders', 'size'] item.get(x) for x in 'title', 'download', 'seeders', 'size']
title = None is title and item.get('filename') or title title = None is title and item.get('filename') or title
if not (title and download_magnet) or download_magnet in dedupe: if not (title and download_magnet) or download_magnet in dedupe:
continue continue
dedupe += [download_magnet] dedupe += [download_magnet]
items[mode].append((title, download_magnet, seeders, self._bytesizer(size))) items[mode].append((title, download_magnet, seeders, self._bytesizer(size)))
self._log_search(mode, len(items[mode]) - cnt, searched_url) self._log_search(mode, len(items[mode]) - cnt, searched_url)
self._sort_seeders(mode, items) results = self._sort_seeding(mode, results + items[mode])
results = list(set(results + items[mode]))
if '_only' in mode_search and len(results): if '_only' in mode_search and len(results):
break break

14
sickbeard/providers/revtt.py

@ -45,7 +45,7 @@ class RevTTProvider(generic.TorrentProvider):
def _authorised(self, **kwargs): def _authorised(self, **kwargs):
return super(RevTTProvider, self)._authorised(logged_in=lambda x=None: self.has_all_cookies(['uid', 'pass'])) return super(RevTTProvider, self)._authorised()
def _search_provider(self, search_params, **kwargs): def _search_provider(self, search_params, **kwargs):
@ -80,15 +80,13 @@ class RevTTProvider(generic.TorrentProvider):
for tr in torrent_rows[1:]: for tr in torrent_rows[1:]:
try: try:
seeders, leechers, size = [tryInt(n, n) for n in [ seeders, leechers, size = [tryInt(n, n) for n in [
(tr.find_all('td')[x].get_text().strip()) for x in (-2, -1, -4)]] tr.find_all('td')[x].get_text().strip() for x in -2, -1, -4]]
if self._peers_fail(mode, seeders, leechers) or not tr.find('a', href=rc['cats']): if self._peers_fail(mode, seeders, leechers) or not tr.find('a', href=rc['cats']):
continue continue
title = tr.find('a', href=rc['info']).get_text().strip() title = tr.find('a', href=rc['info']).get_text().strip()
size = rc['size'].sub(r'\1', size) size = rc['size'].sub(r'\1', size)
download_url = self._link(tr.find('a', href=rc['get'])['href'])
download_url = self.urls['get'] % str(tr.find('a', href=rc['get'])['href']).lstrip('/')
except (AttributeError, TypeError, ValueError, IndexError): except (AttributeError, TypeError, ValueError, IndexError):
continue continue
@ -97,14 +95,12 @@ class RevTTProvider(generic.TorrentProvider):
except generic.HaltParseException: except generic.HaltParseException:
pass pass
except Exception: except (StandardError, Exception):
logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR) logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR)
self._log_search(mode, len(items[mode]) - cnt, self.session.response.get('url')) self._log_search(mode, len(items[mode]) - cnt, self.session.response.get('url'))
self._sort_seeders(mode, items) results = self._sort_seeding(mode, results + items[mode])
results = list(set(results + items[mode]))
return results return results

2
sickbeard/providers/rsstorrent.py

@ -100,7 +100,7 @@ class TorrentRssProvider(generic.TorrentProvider):
try: try:
bdecode(torrent_file) bdecode(torrent_file)
break break
except Exception: except (StandardError, Exception):
pass pass
else: else:
return False, '%s fetched RSS feed data: %s' % \ return False, '%s fetched RSS feed data: %s' % \

25
sickbeard/providers/scc.py

@ -34,17 +34,17 @@ class SCCProvider(generic.TorrentProvider):
self.url_home = ['https://sceneaccess.%s/' % u for u in 'eu', 'org'] self.url_home = ['https://sceneaccess.%s/' % u for u in 'eu', 'org']
self.url_vars = { self.url_vars = {
'login': 'login', 'search': 'browse?search=%s&method=1&c27=27&c17=17&c11=11', 'get': '%s', 'login_action': 'login', 'search': 'browse?search=%s&method=1&c27=27&c17=17&c11=11', 'get': '%s',
'nonscene': 'nonscene?search=%s&method=1&c44=44&c45=44', 'archive': 'archive?search=%s&method=1&c26=26'} 'nonscene': 'nonscene?search=%s&method=1&c44=44&c45=44', 'archive': 'archive?search=%s&method=1&c26=26'}
self.url_tmpl = { self.url_tmpl = {
'config_provider_home_uri': '%(home)s', 'login': '%(home)s%(vars)s', 'search': '%(home)s%(vars)s', 'config_provider_home_uri': '%(home)s', 'login_action': '%(home)s%(vars)s', 'search': '%(home)s%(vars)s',
'get': '%(home)s%(vars)s', 'nonscene': '%(home)s%(vars)s', 'archive': '%(home)s%(vars)s'} 'get': '%(home)s%(vars)s', 'nonscene': '%(home)s%(vars)s', 'archive': '%(home)s%(vars)s'}
self.username, self.password, self.minseed, self.minleech = 4 * [None] self.username, self.password, self.minseed, self.minleech = 4 * [None]
def _authorised(self, **kwargs): def _authorised(self, **kwargs):
return super(SCCProvider, self)._authorised(post_params={'submit': 'come+on+in'}) return super(SCCProvider, self)._authorised(post_params={'form_tmpl': 'method'})
def _search_provider(self, search_params, **kwargs): def _search_provider(self, search_params, **kwargs):
@ -76,7 +76,7 @@ class SCCProvider(generic.TorrentProvider):
raise generic.HaltParseException raise generic.HaltParseException
with BS4Parser(html, features=['html5lib', 'permissive']) as soup: with BS4Parser(html, features=['html5lib', 'permissive']) as soup:
torrent_table = soup.find('table', attrs={'id': 'torrents-table'}) torrent_table = soup.find(id='torrents-table')
torrent_rows = [] if not torrent_table else torrent_table.find_all('tr') torrent_rows = [] if not torrent_table else torrent_table.find_all('tr')
if 2 > len(torrent_rows): if 2 > len(torrent_rows):
@ -85,17 +85,14 @@ class SCCProvider(generic.TorrentProvider):
for tr in torrent_table.find_all('tr')[1:]: for tr in torrent_table.find_all('tr')[1:]:
try: try:
seeders, leechers, size = [tryInt(n, n) for n in [ seeders, leechers, size = [tryInt(n, n) for n in [
tr.find('td', attrs={'class': x}).get_text().strip() tr.find('td', class_='ttr_' + x).get_text().strip()
for x in ('ttr_seeders', 'ttr_leechers', 'ttr_size')]] for x in 'seeders', 'leechers', 'size']]
if self._peers_fail(mode, seeders, leechers): if self._peers_fail(mode, seeders, leechers):
continue continue
info = tr.find('a', href=rc['info']) info = tr.find('a', href=rc['info'])
title = ('title' in info.attrs and info['title']) or info.get_text().strip() title = (info.attrs.get('title') or info.get_text()).strip()
download_url = self._link(tr.find('a', href=rc['get'])['href'])
link = str(tr.find('a', href=rc['get'])['href']).lstrip('/')
download_url = self.urls['get'] % link
except (AttributeError, TypeError, ValueError): except (AttributeError, TypeError, ValueError):
continue continue
@ -104,13 +101,11 @@ class SCCProvider(generic.TorrentProvider):
except generic.HaltParseException: except generic.HaltParseException:
time.sleep(1.1) time.sleep(1.1)
except Exception: except (StandardError, Exception):
logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR) logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR)
self._log_search(mode, len(items[mode]) - cnt, search_url) self._log_search(mode, len(items[mode]) - cnt, search_url)
self._sort_seeders(mode, items) results = self._sort_seeding(mode, results + items[mode])
results = list(set(results + items[mode]))
return results return results

17
sickbeard/providers/scenetime.py

@ -33,7 +33,7 @@ class SceneTimeProvider(generic.TorrentProvider):
self.url_base = 'https://www.scenetime.com/' self.url_base = 'https://www.scenetime.com/'
self.urls = {'config_provider_home_uri': self.url_base, self.urls = {'config_provider_home_uri': self.url_base,
'login': self.url_base + 'takelogin.php', 'login_action': self.url_base + 'login.php',
'browse': self.url_base + 'browse_API.php', 'browse': self.url_base + 'browse_API.php',
'params': {'sec': 'jax', 'cata': 'yes'}, 'params': {'sec': 'jax', 'cata': 'yes'},
'get': self.url_base + 'download.php/%(id)s/%(title)s.torrent'} 'get': self.url_base + 'download.php/%(id)s/%(title)s.torrent'}
@ -46,7 +46,7 @@ class SceneTimeProvider(generic.TorrentProvider):
def _authorised(self, **kwargs): def _authorised(self, **kwargs):
return super(SceneTimeProvider, self)._authorised(post_params={'submit': 'Log in'}) return super(SceneTimeProvider, self)._authorised(post_params={'form_tmpl': True})
def _search_provider(self, search_params, **kwargs): def _search_provider(self, search_params, **kwargs):
@ -91,19 +91,18 @@ class SceneTimeProvider(generic.TorrentProvider):
for tr in torrent_rows[1:]: for tr in torrent_rows[1:]:
try: try:
seeders, leechers, size = [tryInt(n, n) for n in [ seeders, leechers, size = [tryInt(n, n) for n in [
tr.find_all('td')[x].get_text().strip() for x in (-2, -1, -3)]] tr.find_all('td')[x].get_text().strip() for x in -2, -1, -3]]
if None is tr.find('a', href=rc['cats'])\ if None is tr.find('a', href=rc['cats'])\
or self.freeleech and None is rc['fl'].search(tr.find_all('td')[1].get_text())\ or self.freeleech and None is rc['fl'].search(tr.find_all('td')[1].get_text())\
or self._peers_fail(mode, seeders, leechers): or self._peers_fail(mode, seeders, leechers):
continue continue
info = tr.find('a', href=rc['info']) info = tr.find('a', href=rc['info'])
title = info.attrs.get('title') or info.get_text().strip() title = (info.attrs.get('title') or info.get_text()).strip()
download_url = self.urls['get'] % { download_url = self.urls['get'] % {
'id': re.sub(rc['get'], r'\1', str(info.attrs['href'])), 'id': re.sub(rc['get'], r'\1', str(info.attrs['href'])),
'title': str(title).replace(' ', '.')} 'title': str(title).replace(' ', '.')}
except (AttributeError, TypeError, ValueError): except (AttributeError, TypeError, ValueError, KeyError):
continue continue
if title and download_url: if title and download_url:
@ -111,15 +110,13 @@ class SceneTimeProvider(generic.TorrentProvider):
except generic.HaltParseException: except generic.HaltParseException:
pass pass
except Exception: except (StandardError, Exception):
logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR) logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR)
self._log_search(mode, len(items[mode]) - cnt, self._log_search(mode, len(items[mode]) - cnt,
('search string: ' + search_string, self.name)['Cache' == mode]) ('search string: ' + search_string, self.name)['Cache' == mode])
self._sort_seeders(mode, items) results = self._sort_seeding(mode, results + items[mode])
results = list(set(results + items[mode]))
return results return results

14
sickbeard/providers/shazbat.py

@ -50,10 +50,9 @@ class ShazbatProvider(generic.TorrentProvider):
def _authorised(self, **kwargs): def _authorised(self, **kwargs):
return super(ShazbatProvider, self)._authorised( return super(ShazbatProvider, self)._authorised(
logged_in=(lambda x=None: '<input type="password"' not in helpers.getURL( logged_in=(lambda y=None: '<input type="password"' not in helpers.getURL(
self.urls['feeds'], session=self.session)), self.urls['feeds'], session=self.session)),
post_params={'tv_login': self.username, 'tv_password': self.password, post_params={'tv_login': self.username, 'tv_password': self.password, 'form_tmpl': True})
'referer': 'login', 'query': '', 'email': ''})
def _search_provider(self, search_params, **kwargs): def _search_provider(self, search_params, **kwargs):
@ -116,8 +115,7 @@ class ShazbatProvider(generic.TorrentProvider):
title = unicode(element).strip() title = unicode(element).strip()
break break
link = str(tr.find('a', href=rc['get'])['href']).replace('&amp;', '&').lstrip('/') download_url = self._link(tr.find('a', href=rc['get'])['href'])
download_url = self.urls['get'] % link
except (AttributeError, TypeError, ValueError): except (AttributeError, TypeError, ValueError):
continue continue
@ -126,13 +124,11 @@ class ShazbatProvider(generic.TorrentProvider):
except generic.HaltParseException: except generic.HaltParseException:
pass pass
except Exception: except (StandardError, Exception):
logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR) logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR)
self._log_search(mode, len(items[mode]) - cnt, search_url) self._log_search(mode, len(items[mode]) - cnt, search_url)
self._sort_seeders(mode, items) results = self._sort_seeding(mode, results + items[mode])
results = list(set(results + items[mode]))
return results return results

16
sickbeard/providers/speedcd.py

@ -44,7 +44,7 @@ class SpeedCDProvider(generic.TorrentProvider):
def _authorised(self, **kwargs): def _authorised(self, **kwargs):
return super(SpeedCDProvider, self)._authorised( return super(SpeedCDProvider, self)._authorised(
logged_in=(lambda x=None: self.has_all_cookies('inSpeed_speedian'))) logged_in=(lambda y=None: self.has_all_cookies('inSpeed_speedian')))
def _search_provider(self, search_params, **kwargs): def _search_provider(self, search_params, **kwargs):
@ -81,32 +81,28 @@ class SpeedCDProvider(generic.TorrentProvider):
for tr in torrent_rows[1:]: for tr in torrent_rows[1:]:
try: try:
seeders, leechers, size = [tryInt(n, n) for n in [ seeders, leechers, size = [tryInt(n, n) for n in [
tr.find_all('td')[x].get_text().strip() for x in (-2, -1, -3)]] tr.find_all('td')[x].get_text().strip() for x in -2, -1, -3]]
if None is tr.find('a', href=rc['cats']) \ if None is tr.find('a', href=rc['cats']) \
or self.freeleech and None is rc['fl'].search(tr.find_all('td')[1].get_text()) \ or self.freeleech and None is rc['fl'].search(tr.find_all('td')[1].get_text()) \
or self._peers_fail(mode, seeders, leechers): or self._peers_fail(mode, seeders, leechers):
continue continue
info = tr.find('a', 'torrent') info = tr.find('a', 'torrent')
title = info.attrs.get('title') or info.get_text().strip() title = (info.attrs.get('title') or info.get_text()).strip()
download_url = self._link(tr.find('a', href=rc['get'])['href'])
download_url = self.urls['get'] % str(tr.find('a', href=rc['get'])['href']).lstrip('/')
except (AttributeError, TypeError, ValueError): except (AttributeError, TypeError, ValueError):
continue continue
if title and download_url: if title and download_url:
items[mode].append((title, download_url, seeders, self._bytesizer(size))) items[mode].append((title, download_url, seeders, self._bytesizer(size)))
except Exception: except (StandardError, Exception):
time.sleep(1.1) time.sleep(1.1)
self._log_search(mode, len(items[mode]) - cnt, self._log_search(mode, len(items[mode]) - cnt,
('search string: ' + search_string, self.name)['Cache' == mode]) ('search string: ' + search_string, self.name)['Cache' == mode])
self._sort_seeders(mode, items) results = self._sort_seeding(mode, results + items[mode])
results = list(set(results + items[mode]))
return results return results

26
sickbeard/providers/thepiratebay.py

@ -26,6 +26,7 @@ from . import generic
from sickbeard import config, logger, show_name_helpers from sickbeard import config, logger, show_name_helpers
from sickbeard.bs4_parser import BS4Parser from sickbeard.bs4_parser import BS4Parser
from sickbeard.common import Quality, mediaExtensions from sickbeard.common import Quality, mediaExtensions
from sickbeard.helpers import tryInt
from sickbeard.name_parser.parser import NameParser, InvalidNameException, InvalidShowException from sickbeard.name_parser.parser import NameParser, InvalidNameException, InvalidShowException
from lib.unidecode import unidecode from lib.unidecode import unidecode
@ -35,7 +36,8 @@ class ThePirateBayProvider(generic.TorrentProvider):
def __init__(self): def __init__(self):
generic.TorrentProvider.__init__(self, 'The Pirate Bay', cache_update_freq=20) generic.TorrentProvider.__init__(self, 'The Pirate Bay', cache_update_freq=20)
self.url_home = ['https://thepiratebay.%s/' % u for u in 'se', 'org'] self.url_home = ['https://thepiratebay.%s/' % u for u in 'se', 'org'] + \
['piratebay.usbypass.xyz/']
self.url_vars = {'search': 'search/%s/0/7/200', 'browse': 'tv/latest/'} self.url_vars = {'search': 'search/%s/0/7/200', 'browse': 'tv/latest/'}
self.url_tmpl = {'config_provider_home_uri': '%(home)s', 'search': '%(home)s%(vars)s', self.url_tmpl = {'config_provider_home_uri': '%(home)s', 'search': '%(home)s%(vars)s',
@ -135,9 +137,9 @@ class ThePirateBayProvider(generic.TorrentProvider):
items = {'Cache': [], 'Season': [], 'Episode': [], 'Propers': []} items = {'Cache': [], 'Season': [], 'Episode': [], 'Propers': []}
rc = dict((k, re.compile('(?i)' + v)) rc = dict((k, re.compile('(?i)' + v)) for (k, v) in {
for (k, v) in {'info': 'detail', 'get': 'download[^"]+magnet', 'tid': r'.*/(\d{5,}).*', 'info': 'detail', 'get': 'download[^"]+magnet', 'tid': r'.*/(\d{5,}).*',
'verify': '(?:helper|moderator|trusted|vip)'}.items()) 'verify': '(?:helper|moderator|trusted|vip)', 'size': 'size[^\d]+(\d+(?:[.,]\d+)?\W*[bkmgt]\w+)'}.items())
for mode in search_params.keys(): for mode in search_params.keys():
for search_string in search_params[mode]: for search_string in search_params[mode]:
@ -153,7 +155,7 @@ class ThePirateBayProvider(generic.TorrentProvider):
raise generic.HaltParseException raise generic.HaltParseException
with BS4Parser(html, features=['html5lib', 'permissive'], attr='id="searchResult"') as soup: with BS4Parser(html, features=['html5lib', 'permissive'], attr='id="searchResult"') as soup:
torrent_table = soup.find('table', attrs={'id': 'searchResult'}) torrent_table = soup.find(id='searchResult')
torrent_rows = [] if not torrent_table else torrent_table.find_all('tr') torrent_rows = [] if not torrent_table else torrent_table.find_all('tr')
if 2 > len(torrent_rows): if 2 > len(torrent_rows):
@ -161,14 +163,13 @@ class ThePirateBayProvider(generic.TorrentProvider):
for tr in torrent_table.find_all('tr')[1:]: for tr in torrent_table.find_all('tr')[1:]:
try: try:
seeders, leechers = [int(tr.find_all('td')[x].get_text().strip()) for x in (-2, -1)] seeders, leechers = [tryInt(tr.find_all('td')[x].get_text().strip()) for x in -2, -1]
if self._peers_fail(mode, seeders, leechers): if self._peers_fail(mode, seeders, leechers):
continue continue
info = tr.find('a', title=rc['info']) info = tr.find('a', title=rc['info'])
title = info.get_text().strip().replace('_', '.') title = info.get_text().strip().replace('_', '.')
tid = rc['tid'].sub(r'\1', str(info['href'])) tid = rc['tid'].sub(r'\1', str(info['href']))
download_magnet = tr.find('a', title=rc['get'])['href'] download_magnet = tr.find('a', title=rc['get'])['href']
except (AttributeError, TypeError, ValueError): except (AttributeError, TypeError, ValueError):
continue continue
@ -186,22 +187,19 @@ class ThePirateBayProvider(generic.TorrentProvider):
if title and download_magnet: if title and download_magnet:
size = None size = None
try: try:
size = re.findall('(?i)size[^\d]+(\d+(?:[\.,]\d+)?\W*[bkmgt]\w+)', size = rc['size'].findall(tr.find_all(class_='detDesc')[0].get_text())[0]
tr.find_all(class_='detDesc')[0].get_text())[0] except (StandardError, Exception):
except Exception:
pass pass
items[mode].append((title, download_magnet, seeders, self._bytesizer(size))) items[mode].append((title, download_magnet, seeders, self._bytesizer(size)))
except generic.HaltParseException: except generic.HaltParseException:
pass pass
except Exception: except (StandardError, Exception):
logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR) logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR)
self._log_search(mode, len(items[mode]) - cnt, search_url) self._log_search(mode, len(items[mode]) - cnt, search_url)
self._sort_seeders(mode, items) results = self._sort_seeding(mode, results + items[mode])
results = list(set(results + items[mode]))
return results return results

84
sickbeard/providers/tokyotoshokan.py

@ -1,5 +1,3 @@
# Author: Mr_Orange
# URL: http://code.google.com/p/sickbeard/
# #
# This file is part of SickGear. # This file is part of SickGear.
# #
@ -16,11 +14,13 @@
# You should have received a copy of the GNU General Public License # You should have received a copy of the GNU General Public License
# along with SickGear. If not, see <http://www.gnu.org/licenses/>. # along with SickGear. If not, see <http://www.gnu.org/licenses/>.
import re
import traceback import traceback
import urllib import urllib
from . import generic from . import generic
from sickbeard import logger, show_name_helpers, tvcache from sickbeard import logger, show_name_helpers, tvcache
from sickbeard.helpers import tryInt
from sickbeard.bs4_parser import BS4Parser from sickbeard.bs4_parser import BS4Parser
@ -29,7 +29,7 @@ class TokyoToshokanProvider(generic.TorrentProvider):
def __init__(self): def __init__(self):
generic.TorrentProvider.__init__(self, 'TokyoToshokan', anime_only=True) generic.TorrentProvider.__init__(self, 'TokyoToshokan', anime_only=True)
self.url_base = self.url = 'http://tokyotosho.info/' self.url_base = self.url = 'https://tokyotosho.info/'
self.cache = TokyoToshokanCache(self) self.cache = TokyoToshokanCache(self)
@ -39,36 +39,49 @@ class TokyoToshokanProvider(generic.TorrentProvider):
if self.show and not self.show.is_anime: if self.show and not self.show.is_anime:
return results return results
params = {'terms': search_string.encode('utf-8'), params = urllib.urlencode({'terms': search_string.encode('utf-8'),
'type': 1} # get anime types 'type': 1}) # get anime types
search_url = self.url + 'search.php?' + urllib.urlencode(params) search_url = '%ssearch.php?%s' % (self.url, params)
logger.log(u'Search string: ' + search_url, logger.DEBUG) mode = ('Episode', 'Season')['sponly' == search_mode]
rc = dict((k, re.compile('(?i)' + v)) for (k, v) in {
'stats': 'S:\s*?(\d)+\s*L:\s*(\d+)', 'size': 'size:\s*(\d+[.,]\d+\w+)'}.iteritems())
html = self.get_url(search_url) html = self.get_url(search_url)
if html: if html:
try: try:
with BS4Parser(html, features=['html5lib', 'permissive']) as soup: with BS4Parser(html, features=['html5lib', 'permissive']) as soup:
torrent_table = soup.find('table', attrs={'class': 'listing'}) torrent_table = soup.find('table', class_='listing')
torrent_rows = torrent_table.find_all('tr') if torrent_table else [] torrent_rows = [] if not torrent_table else torrent_table.find_all('tr')
if torrent_rows: if torrent_rows:
a = (0, 1)[None is not torrent_rows[0].find('td', attrs={'class': 'centertext'})] a = (0, 1)[None is not torrent_rows[0].find('td', class_='centertext')]
for top, bottom in zip(torrent_rows[a::2], torrent_rows[a::2]): for top, bottom in zip(torrent_rows[a::2], torrent_rows[a+1::2]):
title = top.find('td', attrs={'class': 'desc-top'}).text try:
url = top.find('td', attrs={'class': 'desc-top'}).find('a')['href'] bottom_text = bottom.get_text() or ''
stats = rc['stats'].findall(bottom_text)
seeders, leechers = (0, 0) if not stats else [tryInt(n) for n in stats[0]]
if title and url: size = rc['size'].findall(bottom_text)
results.append((title.lstrip(), url)) size = size and size[0] or -1
except Exception: info = top.find('td', class_='desc-top')
logger.log(u'Failed to parsing ' + self.name + ' Traceback: ' + traceback.format_exc(), logger.ERROR) title = info and re.sub(r'[ .]{2,}', '.', info.get_text().strip())
urls = info and sorted([x.get('href') for x in info.find_all('a') or []])
download_url = urls and urls[0].startswith('http') and urls[0] or urls[1]
except (AttributeError, TypeError, ValueError, IndexError):
continue
return results if title and download_url:
results.append((title, download_url, seeders, self._bytesizer(size)))
except (StandardError, Exception):
logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR)
def find_search_results(self, show, episodes, search_mode, manual_search=False): self._log_search(mode, len(results), search_url)
return generic.TorrentProvider.find_search_results(self, show, episodes, search_mode, manual_search) return self._sort_seeding(mode, results)
def _season_strings(self, ep_obj, **kwargs): def _season_strings(self, ep_obj, **kwargs):
@ -84,18 +97,35 @@ class TokyoToshokanCache(tvcache.TVCache):
def __init__(self, this_provider): def __init__(self, this_provider):
tvcache.TVCache.__init__(self, this_provider) tvcache.TVCache.__init__(self, this_provider)
self.update_freq = 15 # cache update frequency self.update_freq = 15
def _cache_data(self): def _cache_data(self):
params = {'filter': '1'}
url = self.provider.url + 'rss.php?' + urllib.urlencode(params) mode = 'Cache'
logger.log(u'TokyoToshokan cache update URL: ' + url, logger.DEBUG) search_url = '%srss.php?%s' % (self.provider.url, urllib.urlencode({'filter': '1'}))
data = self.getRSSFeed(search_url)
data = self.getRSSFeed(url) results = []
if data and 'entries' in data: if data and 'entries' in data:
return data.entries
return [] rc = dict((k, re.compile('(?i)' + v)) for (k, v) in {'size': 'size:\s*(\d+[.,]\d+\w+)'}.iteritems())
for cur_item in data.get('entries', []):
try:
title, download_url = self._title_and_url(cur_item)
size = rc['size'].findall(cur_item.get('summary_detail', {'value': ''}).get('value', ''))
size = size and size[0] or -1
except (AttributeError, TypeError, ValueError):
continue
if title and download_url:
# feed does not carry seed, leech counts
results.append((title, download_url, 0, self.provider._bytesizer(size)))
self.provider._log_search(mode, len(results), search_url)
return results
provider = TokyoToshokanProvider() provider = TokyoToshokanProvider()

21
sickbeard/providers/torrentbytes.py

@ -32,8 +32,8 @@ class TorrentBytesProvider(generic.TorrentProvider):
self.url_home = ['https://www.torrentbytes.net/'] self.url_home = ['https://www.torrentbytes.net/']
self.url_vars = {'login': 'takelogin.php', 'search': 'browse.php?search=%s&%s', 'get': '%s'} self.url_vars = {'login_action': 'login.php', 'search': 'browse.php?search=%s&%s', 'get': '%s'}
self.url_tmpl = {'config_provider_home_uri': '%(home)s', 'login': '%(home)s%(vars)s', self.url_tmpl = {'config_provider_home_uri': '%(home)s', 'login_action': '%(home)s%(vars)s',
'search': '%(home)s%(vars)s', 'get': '%(home)s%(vars)s'} 'search': '%(home)s%(vars)s', 'get': '%(home)s%(vars)s'}
self.categories = {'Season': [41, 32], 'Episode': [33, 37, 38]} self.categories = {'Season': [41, 32], 'Episode': [33, 37, 38]}
@ -43,7 +43,7 @@ class TorrentBytesProvider(generic.TorrentProvider):
def _authorised(self, **kwargs): def _authorised(self, **kwargs):
return super(TorrentBytesProvider, self)._authorised(post_params={'login': 'Log in!'}) return super(TorrentBytesProvider, self)._authorised(post_params={'form_tmpl': True})
def _search_provider(self, search_params, **kwargs): def _search_provider(self, search_params, **kwargs):
@ -78,15 +78,14 @@ class TorrentBytesProvider(generic.TorrentProvider):
try: try:
info = tr.find('a', href=rc['info']) info = tr.find('a', href=rc['info'])
seeders, leechers, size = [tryInt(n, n) for n in [ seeders, leechers, size = [tryInt(n, n) for n in [
tr.find_all('td')[x].get_text().strip() for x in (-2, -1, -4)]] tr.find_all('td')[x].get_text().strip() for x in -2, -1, -4]]
if self.freeleech and (len(info.contents) < 2 or not rc['fl'].search( if self.freeleech and (len(info.contents) < 2 or not rc['fl'].search(
info.contents[1].string.strip())) or self._peers_fail(mode, seeders, leechers): info.contents[1].string.strip())) or self._peers_fail(mode, seeders, leechers):
continue continue
title = info.attrs.get('title') or info.contents[0] title = (info.attrs.get('title') or info.get_text()).strip()
title = (isinstance(title, list) and title[0] or title).strip() download_url = self._link(tr.find('a', href=rc['get'])['href'])
download_url = self.urls['get'] % str(tr.find('a', href=rc['get'])['href']).lstrip('/') except (AttributeError, TypeError, ValueError, KeyError):
except (AttributeError, TypeError, ValueError):
continue continue
if title and download_url: if title and download_url:
@ -94,14 +93,12 @@ class TorrentBytesProvider(generic.TorrentProvider):
except generic.HaltParseException: except generic.HaltParseException:
pass pass
except Exception: except (StandardError, Exception):
logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR) logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR)
self._log_search(mode, len(items[mode]) - cnt, search_url) self._log_search(mode, len(items[mode]) - cnt, search_url)
self._sort_seeders(mode, items) results = self._sort_seeding(mode, results + items[mode])
results = list(set(results + items[mode]))
return results return results

22
sickbeard/providers/torrentday.py

@ -35,7 +35,7 @@ class TorrentDayProvider(generic.TorrentProvider):
self.url_tmpl = {'config_provider_home_uri': '%(home)s', 'login': '%(home)s%(vars)s', self.url_tmpl = {'config_provider_home_uri': '%(home)s', 'login': '%(home)s%(vars)s',
'search': '%(home)s%(vars)s', 'get': '%(home)s%(vars)s'} 'search': '%(home)s%(vars)s', 'get': '%(home)s%(vars)s'}
self.categories = {'Season': [31, 33, 14], 'Episode': [24, 32, 26, 7, 2], 'Anime': [29]} self.categories = {'Season': [31, 33, 14], 'Episode': [24, 32, 26, 7, 34, 2], 'Anime': [29]}
self.categories['Cache'] = self.categories['Season'] + self.categories['Episode'] self.categories['Cache'] = self.categories['Season'] + self.categories['Episode']
self.proper_search_terms = None self.proper_search_terms = None
@ -45,9 +45,10 @@ class TorrentDayProvider(generic.TorrentProvider):
def _authorised(self, **kwargs): def _authorised(self, **kwargs):
return super(TorrentDayProvider, self)._authorised( return super(TorrentDayProvider, self)._authorised(
logged_in=(lambda x='': ('RSS URL' in x) and self.has_all_cookies() and logged_in=(lambda y='': all(
self.session.cookies['uid'] in self.digest and self.session.cookies['pass'] in self.digest), ['RSS URL' in y, self.has_all_cookies()] +
failed_msg=(lambda x=None: u'Invalid cookie details for %s. Check settings')) [(self.session.cookies.get(x) or 'sg!no!pw') in self.digest for x in 'uid', 'pass'])),
failed_msg=(lambda y=None: u'Invalid cookie details for %s. Check settings'))
@staticmethod @staticmethod
def _has_signature(data=None): def _has_signature(data=None):
@ -87,15 +88,14 @@ class TorrentDayProvider(generic.TorrentProvider):
for tr in torrent_rows[1:]: for tr in torrent_rows[1:]:
try: try:
seeders, leechers = [tryInt(tr.find('td', attrs={'class': x}).get_text().strip()) seeders, leechers = [tryInt(tr.find('td', class_=x + 'ersInfo').get_text().strip())
for x in ('seedersInfo', 'leechersInfo')] for x in 'seed', 'leech']
if self._peers_fail(mode, seeders, leechers): if self._peers_fail(mode, seeders, leechers):
continue continue
title = tr.find('a', href=rc['info']).get_text().strip() title = tr.find('a', href=rc['info']).get_text().strip()
size = tr.find_all('td')[-3].get_text().strip() size = tr.find_all('td')[-3].get_text().strip()
download_url = self._link(tr.find('a', href=rc['get'])['href'])
download_url = self.urls['get'] % str(tr.find('a', href=rc['get'])['href']).lstrip('/')
except (AttributeError, TypeError, ValueError): except (AttributeError, TypeError, ValueError):
continue continue
@ -104,14 +104,12 @@ class TorrentDayProvider(generic.TorrentProvider):
except generic.HaltParseException: except generic.HaltParseException:
pass pass
except Exception: except (StandardError, Exception):
time.sleep(1.1) time.sleep(1.1)
self._log_search(mode, len(items[mode]) - cnt, search_url) self._log_search(mode, len(items[mode]) - cnt, search_url)
self._sort_seeders(mode, items) results = self._sort_seeding(mode, results + items[mode])
results = list(set(results + items[mode]))
return results return results

20
sickbeard/providers/torrenting.py

@ -43,9 +43,10 @@ class TorrentingProvider(generic.TorrentProvider):
def _authorised(self, **kwargs): def _authorised(self, **kwargs):
return super(TorrentingProvider, self)._authorised( return super(TorrentingProvider, self)._authorised(
logged_in=(lambda x='': ('RSS link' in x) and self.has_all_cookies() and logged_in=(lambda y='': all(
self.session.cookies['uid'] in self.digest and self.session.cookies['pass'] in self.digest), ['RSS link' in y, self.has_all_cookies()] +
failed_msg=(lambda x=None: u'Invalid cookie details for %s. Check settings')) [(self.session.cookies.get(x) or 'sg!no!pw') in self.digest for x in 'uid', 'pass'])),
failed_msg=(lambda y=None: u'Invalid cookie details for %s. Check settings'))
@staticmethod @staticmethod
def _has_signature(data=None): def _has_signature(data=None):
@ -84,14 +85,13 @@ class TorrentingProvider(generic.TorrentProvider):
for tr in torrent_rows[1:]: for tr in torrent_rows[1:]:
try: try:
seeders, leechers, size = [tryInt(n, n) for n in [ seeders, leechers, size = [tryInt(n, n) for n in [
tr.find_all('td')[x].get_text().strip() for x in (-2, -1, -3)]] tr.find_all('td')[x].get_text().strip() for x in -2, -1, -3]]
if None is tr.find('a', href=rc['cats']) or self._peers_fail(mode, seeders, leechers): if None is tr.find('a', href=rc['cats']) or self._peers_fail(mode, seeders, leechers):
continue continue
info = tr.find('a', href=rc['info']) info = tr.find('a', href=rc['info'])
title = info.attrs.get('title') or info.get_text().strip() title = (info.attrs.get('title') or info.get_text()).strip()
download_url = self.urls['get'] % str(tr.find('a', href=rc['get'])['href']).lstrip('/') download_url = self._link(tr.find('a', href=rc['get'])['href'])
except (AttributeError, TypeError, ValueError): except (AttributeError, TypeError, ValueError):
continue continue
@ -100,14 +100,12 @@ class TorrentingProvider(generic.TorrentProvider):
except generic.HaltParseException: except generic.HaltParseException:
pass pass
except Exception: except (StandardError, Exception):
logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR) logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR)
self._log_search(mode, len(items[mode]) - cnt, search_url) self._log_search(mode, len(items[mode]) - cnt, search_url)
self._sort_seeders(mode, items) results = self._sort_seeding(mode, results + items[mode])
results = list(set(results + items[mode]))
return results return results

27
sickbeard/providers/torrentleech.py

@ -21,6 +21,7 @@ import traceback
from . import generic from . import generic
from sickbeard import logger from sickbeard import logger
from sickbeard.bs4_parser import BS4Parser from sickbeard.bs4_parser import BS4Parser
from sickbeard.helpers import tryInt
from lib.unidecode import unidecode from lib.unidecode import unidecode
@ -30,7 +31,7 @@ class TorrentLeechProvider(generic.TorrentProvider):
self.url_base = 'https://torrentleech.org/' self.url_base = 'https://torrentleech.org/'
self.urls = {'config_provider_home_uri': self.url_base, self.urls = {'config_provider_home_uri': self.url_base,
'login': self.url_base + 'user/account/login/', 'login_action': self.url_base,
'browse': self.url_base + 'torrents/browse/index/categories/%(cats)s', 'browse': self.url_base + 'torrents/browse/index/categories/%(cats)s',
'search': self.url_base + 'torrents/browse/index/query/%(query)s/categories/%(cats)s', 'search': self.url_base + 'torrents/browse/index/query/%(query)s/categories/%(cats)s',
'get': self.url_base + '%s'} 'get': self.url_base + '%s'}
@ -43,8 +44,8 @@ class TorrentLeechProvider(generic.TorrentProvider):
def _authorised(self, **kwargs): def _authorised(self, **kwargs):
return super(TorrentLeechProvider, self)._authorised(logged_in=(lambda x=None: self.has_all_cookies(pre='tl')), return super(TorrentLeechProvider, self)._authorised(logged_in=(lambda y=None: self.has_all_cookies(pre='tl')),
post_params={'remember_me': 'on', 'login': 'submit'}) post_params={'remember_me': 'on', 'form_tmpl': True})
def _search_provider(self, search_params, **kwargs): def _search_provider(self, search_params, **kwargs):
@ -69,7 +70,7 @@ class TorrentLeechProvider(generic.TorrentProvider):
raise generic.HaltParseException raise generic.HaltParseException
with BS4Parser(html, features=['html5lib', 'permissive']) as soup: with BS4Parser(html, features=['html5lib', 'permissive']) as soup:
torrent_table = soup.find('table', attrs={'id': 'torrenttable'}) torrent_table = soup.find(id='torrenttable')
torrent_rows = [] if not torrent_table else torrent_table.find_all('tr') torrent_rows = [] if not torrent_table else torrent_table.find_all('tr')
if 2 > len(torrent_rows): if 2 > len(torrent_rows):
@ -77,16 +78,15 @@ class TorrentLeechProvider(generic.TorrentProvider):
for tr in torrent_rows[1:]: for tr in torrent_rows[1:]:
try: try:
seeders, leechers = [int(tr.find('td', attrs={'class': x}).get_text().strip()) seeders, leechers = [tryInt(n) for n in [
for x in ('seeders', 'leechers')] tr.find('td', class_=x).get_text().strip() for x in 'seeders', 'leechers']]
if self._peers_fail(mode, seeders, leechers): if self._peers_fail(mode, seeders, leechers):
continue continue
info = tr.find('td', {'class': 'name'}).a info = tr.find('td', class_='name').a
title = ('title' in info.attrs and info['title']) or info.get_text().strip() title = (info.attrs.get('title') or info.get_text()).strip()
size = tr.find_all('td')[-5].get_text().strip() size = tr.find_all('td')[-5].get_text().strip()
download_url = self._link(tr.find('a', href=rc['get'])['href'])
download_url = self.urls['get'] % str(tr.find('a', href=rc['get'])['href']).lstrip('/')
except (AttributeError, TypeError, ValueError): except (AttributeError, TypeError, ValueError):
continue continue
@ -95,13 +95,11 @@ class TorrentLeechProvider(generic.TorrentProvider):
except generic.HaltParseException: except generic.HaltParseException:
pass pass
except Exception: except (StandardError, Exception):
logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR) logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR)
self._log_search(mode, len(items[mode]) - cnt, search_url) self._log_search(mode, len(items[mode]) - cnt, search_url)
self._sort_seeders(mode, items) results = self._sort_seeding(mode, results + items[mode])
results = list(set(results + items[mode]))
return results return results
@ -110,5 +108,4 @@ class TorrentLeechProvider(generic.TorrentProvider):
return generic.TorrentProvider._episode_strings(self, ep_obj, sep_date='|', **kwargs) return generic.TorrentProvider._episode_strings(self, ep_obj, sep_date='|', **kwargs)
provider = TorrentLeechProvider() provider = TorrentLeechProvider()

30
sickbeard/providers/torrentshack.py

@ -34,7 +34,7 @@ class TorrentShackProvider(generic.TorrentProvider):
self.url_base = 'https://torrentshack.me/' self.url_base = 'https://torrentshack.me/'
self.urls = {'config_provider_home_uri': self.url_base, self.urls = {'config_provider_home_uri': self.url_base,
'login': self.url_base + 'login.php?lang=', 'login_action': self.url_base + 'login.php',
'search': self.url_base + 'torrents.php?searchstr=%s&%s&' + '&'.join( 'search': self.url_base + 'torrents.php?searchstr=%s&%s&' + '&'.join(
['release_type=both', 'searchtags=', 'tags_type=0', ['release_type=both', 'searchtags=', 'tags_type=0',
'order_by=s3', 'order_way=desc', 'torrent_preset=all']), 'order_by=s3', 'order_way=desc', 'torrent_preset=all']),
@ -48,8 +48,8 @@ class TorrentShackProvider(generic.TorrentProvider):
def _authorised(self, **kwargs): def _authorised(self, **kwargs):
return super(TorrentShackProvider, self)._authorised(logged_in=(lambda x=None: self.has_all_cookies('session')), return super(TorrentShackProvider, self)._authorised(logged_in=(lambda y=None: self.has_all_cookies('session')),
post_params={'keeplogged': '1', 'login': 'Login'}) post_params={'keeplogged': '1', 'form_tmpl': True})
def _search_provider(self, search_params, **kwargs): def _search_provider(self, search_params, **kwargs):
@ -59,8 +59,8 @@ class TorrentShackProvider(generic.TorrentProvider):
items = {'Cache': [], 'Season': [], 'Episode': [], 'Propers': []} items = {'Cache': [], 'Season': [], 'Episode': [], 'Propers': []}
rc = dict((k, re.compile('(?i)' + v)) rc = dict((k, re.compile('(?i)' + v)) for (k, v) in {
for (k, v) in {'info': 'view', 'get': 'download', 'title': 'view\s+torrent\s+'}.items()) 'info': 'view', 'get': 'download', 'title': 'view\s+torrent\s+', 'size': '\s{2,}.*'}.iteritems())
for mode in search_params.keys(): for mode in search_params.keys():
for search_string in search_params[mode]: for search_string in search_params[mode]:
search_string = isinstance(search_string, unicode) and unidecode(search_string) or search_string search_string = isinstance(search_string, unicode) and unidecode(search_string) or search_string
@ -75,7 +75,7 @@ class TorrentShackProvider(generic.TorrentProvider):
raise generic.HaltParseException raise generic.HaltParseException
with BS4Parser(html, features=['html5lib', 'permissive']) as soup: with BS4Parser(html, features=['html5lib', 'permissive']) as soup:
torrent_table = soup.find('table', attrs={'class': 'torrent_table'}) torrent_table = soup.find('table', class_='torrent_table')
torrent_rows = [] if not torrent_table else torrent_table.find_all('tr') torrent_rows = [] if not torrent_table else torrent_table.find_all('tr')
if 2 > len(torrent_rows): if 2 > len(torrent_rows):
@ -84,17 +84,15 @@ class TorrentShackProvider(generic.TorrentProvider):
for tr in torrent_rows[1:]: for tr in torrent_rows[1:]:
try: try:
seeders, leechers, size = [tryInt(n, n) for n in [ seeders, leechers, size = [tryInt(n, n) for n in [
tr.find_all('td')[x].get_text().strip() for x in (-2, -1, -4)]] tr.find_all('td')[x].get_text().strip() for x in -2, -1, -4]]
if self._peers_fail(mode, seeders, leechers): if self._peers_fail(mode, seeders, leechers):
continue continue
size = rc['size'].sub('', size)
info = tr.find('a', title=rc['info']) info = tr.find('a', title=rc['info'])
title = 'title' in info.attrs and rc['title'].sub('', info.attrs['title']) \ title = (rc['title'].sub('', info.attrs.get('title', '')) or info.get_text()).strip()
or info.get_text().strip() download_url = self._link(tr.find('a', title=rc['get'])['href'])
except (AttributeError, TypeError, ValueError, KeyError):
link = str(tr.find('a', title=rc['get'])['href']).replace('&amp;', '&').lstrip('/')
download_url = self.urls['get'] % link
except (AttributeError, TypeError, ValueError):
continue continue
if title and download_url: if title and download_url:
@ -102,13 +100,11 @@ class TorrentShackProvider(generic.TorrentProvider):
except generic.HaltParseException: except generic.HaltParseException:
pass pass
except Exception: except (StandardError, Exception):
logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR) logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR)
self._log_search(mode, len(items[mode]) - cnt, search_url) self._log_search(mode, len(items[mode]) - cnt, search_url)
self._sort_seeders(mode, items) results = self._sort_seeding(mode, results + items[mode])
results = list(set(results + items[mode]))
return results return results

10
sickbeard/providers/transmithe_net.py

@ -47,8 +47,8 @@ class TransmithenetProvider(generic.TorrentProvider):
def _authorised(self, **kwargs): def _authorised(self, **kwargs):
if not super(TransmithenetProvider, self)._authorised( if not super(TransmithenetProvider, self)._authorised(
logged_in=(lambda x=None: self.has_all_cookies('session')), logged_in=(lambda y=None: self.has_all_cookies('session')),
post_params={'keeplogged': '1', 'login': 'Login'}): post_params={'keeplogged': '1', 'form_tmpl': True}):
return False return False
if not self.user_authkey: if not self.user_authkey:
response = helpers.getURL(self.urls['user'], session=self.session, json=True) response = helpers.getURL(self.urls['user'], session=self.session, json=True)
@ -102,13 +102,11 @@ class TransmithenetProvider(generic.TorrentProvider):
if title and download_url: if title and download_url:
items[mode].append((title, download_url, seeders, self._bytesizer(size))) items[mode].append((title, download_url, seeders, self._bytesizer(size)))
except Exception: except (StandardError, Exception):
logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR) logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR)
self._log_search(mode, len(items[mode]) - cnt, search_url) self._log_search(mode, len(items[mode]) - cnt, search_url)
self._sort_seeders(mode, items) results = self._sort_seeding(mode, results + items[mode])
results = list(set(results + items[mode]))
return results return results

56
sickbeard/providers/tvchaosuk.py

@ -45,7 +45,8 @@ class TVChaosUKProvider(generic.TorrentProvider):
def _authorised(self, **kwargs): def _authorised(self, **kwargs):
return super(TVChaosUKProvider, self)._authorised(logged_in=(lambda x=None: self.has_all_cookies(pre='c_secure_'))) return super(TVChaosUKProvider, self)._authorised(
logged_in=(lambda y=None: self.has_all_cookies(pre='c_secure_')))
def _search_provider(self, search_params, **kwargs): def _search_provider(self, search_params, **kwargs):
@ -83,29 +84,30 @@ class TVChaosUKProvider(generic.TorrentProvider):
for tr in torrent_rows[1:]: for tr in torrent_rows[1:]:
try: try:
seeders, leechers, size = [tryInt(n, n) for n in [ seeders, leechers, size = [tryInt(n, n) for n in [
tr.find_all('td')[x].get_text().strip() for x in (-3, -2, -5)]] tr.find_all('td')[x].get_text().strip() for x in -3, -2, -5]]
if self._peers_fail(mode, seeders, leechers) \ if self._peers_fail(mode, seeders, leechers) \
or self.freeleech and None is tr.find_all('td')[1].find('img', title=rc['fl']): or self.freeleech and None is tr.find_all('td')[1].find('img', title=rc['fl']):
continue continue
info = tr.find('a', href=rc['info']) info = tr.find('a', href=rc['info'])
title = (tr.find('div', attrs={'class': 'tooltip-content'}).get_text() or info.get_text()).strip() title = (tr.find('div', class_='tooltip-content').get_text() or info.get_text()).strip()
title = re.findall('(?m)(^[^\r\n]+)', title)[0] title = re.findall('(?m)(^[^\r\n]+)', title)[0]
download_url = str(tr.find('a', href=rc['get'])['href']) download_url = self._link(tr.find('a', href=rc['get'])['href'])
if not download_url.startswith('http'): except (StandardError, Exception):
download_url = self.urls['get'] % download_url.lstrip('/')
except Exception:
continue continue
if get_detail and title.endswith('...'): if get_detail and title.endswith('...'):
try: try:
with BS4Parser(self.get_url('%s%s' % (self.urls['config_provider_home_uri'], info['href'].lstrip( with BS4Parser(self.get_url('%s%s' % (
'/').replace(self.urls['config_provider_home_uri'], ''))), 'html.parser') as soup_detail: self.urls['config_provider_home_uri'], info['href'].lstrip('/').replace(
title = soup_detail.find('td', attrs={'colspan': '3', 'class': 'thead'}).get_text().strip() self.urls['config_provider_home_uri'], ''))),
'html.parser') as soup_detail:
title = soup_detail.find(
'td', class_='thead', attrs={'colspan': '3'}).get_text().strip()
title = re.findall('(?m)(^[^\r\n]+)', title)[0] title = re.findall('(?m)(^[^\r\n]+)', title)[0]
except IndexError: except IndexError:
continue continue
except Exception: except (StandardError, Exception):
get_detail = False get_detail = False
try: try:
@ -114,11 +116,13 @@ class TVChaosUKProvider(generic.TorrentProvider):
rc_xtras = re.compile('(?i)([. _-]|^)(special|extra)s?\w*([. _-]|$)') rc_xtras = re.compile('(?i)([. _-]|^)(special|extra)s?\w*([. _-]|$)')
has_special = rc_xtras.findall(has_series[0][1]) has_special = rc_xtras.findall(has_series[0][1])
if has_special: if has_special:
title = has_series[0][0] + rc_xtras.sub(list( title = has_series[0][0] + rc_xtras.sub(list(set(
set(list(has_special[0][0]) + list(has_special[0][2])))[0], has_series[0][1]) list(has_special[0][0]) + list(has_special[0][2])))[0], has_series[0][1])
title = re.sub('(?i)series', r'Season', title) title = re.sub('(?i)series', r'Season', title)
title_parts = re.findall('(?im)^(.*?)(?:Season[^\d]*?(\d+).*?)?(?:(?:pack|part|pt)\W*?)?(\d+)[^\d]*?of[^\d]*?(?:\d+)(.*?)$', title) title_parts = re.findall(
'(?im)^(.*?)(?:Season[^\d]*?(\d+).*?)?' +
'(?:(?:pack|part|pt)\W*?)?(\d+)[^\d]*?of[^\d]*?(?:\d+)(.*?)$', title)
if len(title_parts): if len(title_parts):
new_parts = [tryInt(part, part.strip()) for part in title_parts[0]] new_parts = [tryInt(part, part.strip()) for part in title_parts[0]]
if not new_parts[1]: if not new_parts[1]:
@ -126,24 +130,26 @@ class TVChaosUKProvider(generic.TorrentProvider):
new_parts[2] = ('E%02d', ' Pack %d')[mode in 'Season'] % new_parts[2] new_parts[2] = ('E%02d', ' Pack %d')[mode in 'Season'] % new_parts[2]
title = '%s.S%02d%s.%s' % tuple(new_parts) title = '%s.S%02d%s.%s' % tuple(new_parts)
dated = re.findall('(?i)([\(\s]*)((?:\d\d\s)?[adfjmnos]\w{2,}\s+(?:19|20)\d\d)([\)\s]*)', title) dated = re.findall(
'(?i)([(\s]*)((?:\d\d\s)?[adfjmnos]\w{2,}\s+(?:19|20)\d\d)([)\s]*)', title)
if dated: if dated:
title = title.replace(''.join(dated[0]), '%s%s%s' % ( title = title.replace(''.join(dated[0]), '%s%s%s' % (
('', ' ')[1 < len(dated[0][0])], parse(dated[0][1]).strftime('%Y-%m-%d'), ('', ' ')[1 < len(dated[0][0])], parse(dated[0][1]).strftime('%Y-%m-%d'),
('', ' ')[1 < len(dated[0][2])])) ('', ' ')[1 < len(dated[0][2])]))
add_pad = re.findall('((?:19|20)\d\d\-\d\d\-\d\d)([\w\W])', title) add_pad = re.findall('((?:19|20)\d\d[-]\d\d[-]\d\d)([\w\W])', title)
if len(add_pad) and add_pad[0][1] not in [' ', '.']: if len(add_pad) and add_pad[0][1] not in [' ', '.']:
title = title.replace(''.join(add_pad[0]), '%s %s' % (add_pad[0][0], add_pad[0][1])) title = title.replace(''.join(
add_pad[0]), '%s %s' % (add_pad[0][0], add_pad[0][1]))
title = re.sub(r'(?sim)(.*?)(?:Episode|Season).\d+.(.*)', r'\1\2', title) title = re.sub(r'(?sim)(.*?)(?:Episode|Season).\d+.(.*)', r'\1\2', title)
if title and download_url: if title and download_url:
items[mode].append((title, download_url, seeders, self._bytesizer(size))) items[mode].append((title, download_url, seeders, self._bytesizer(size)))
except Exception: except (StandardError, Exception):
pass pass
except generic.HaltParseException: except generic.HaltParseException:
pass pass
except Exception: except (StandardError, Exception):
logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR) logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR)
self._log_search(mode, len(items[mode]) - cnt, self._log_search(mode, len(items[mode]) - cnt,
@ -152,17 +158,16 @@ class TVChaosUKProvider(generic.TorrentProvider):
if mode in 'Season' and len(items[mode]): if mode in 'Season' and len(items[mode]):
break break
self._sort_seeders(mode, items) results = self._sort_seeding(mode, results + items[mode])
results = list(set(results + items[mode]))
return results return results
def _season_strings(self, ep_obj, **kwargs): def _season_strings(self, ep_obj, **kwargs):
return generic.TorrentProvider._season_strings(self, ep_obj, scene=False, prefix='%', sp_detail=( return generic.TorrentProvider._season_strings(self, ep_obj, scene=False, prefix='%', sp_detail=(
lambda e: [(('', 'Series %(seasonnumber)d%%')[1 < tryInt(e.get('seasonnumber'))] + '%(episodenumber)dof') % e, lambda e: [
'Series %(seasonnumber)d' % e])) (('', 'Series %(seasonnumber)d%%')[1 < tryInt(e.get('seasonnumber'))] + '%(episodenumber)dof') % e,
'Series %(seasonnumber)d' % e]))
def _episode_strings(self, ep_obj, **kwargs): def _episode_strings(self, ep_obj, **kwargs):
@ -174,7 +179,8 @@ class TVChaosUKProvider(generic.TorrentProvider):
@staticmethod @staticmethod
def ui_string(key): def ui_string(key):
return 'tvchaosuk_tip' == key and 'has missing quality data so you must add quality Custom/Unknown to any wanted show' or '' return ('tvchaosuk_tip' == key
and 'has missing quality data so you must add quality Custom/Unknown to any wanted show' or '')
provider = TVChaosUKProvider() provider = TVChaosUKProvider()

8
sickbeard/providers/womble.py

@ -35,19 +35,19 @@ class WombleCache(tvcache.TVCache):
def __init__(self, this_provider): def __init__(self, this_provider):
tvcache.TVCache.__init__(self, this_provider) tvcache.TVCache.__init__(self, this_provider)
self.update_freq = 6 # cache update frequency self.update_freq = 6
def _cache_data(self): def _cache_data(self):
result = [] result = []
for section in ['sd', 'hd', 'x264', 'dvd']: for section in ['sd', 'hd', 'x264', 'dvd']:
url = '%srss/?sec=tv-%s&fr=false' % (self.provider.url, section) url = '%srss/?sec=tv-%s&fr=false' % (self.provider.url, section)
data = self.getRSSFeed(url) xml_data = self.getRSSFeed(url)
time.sleep(1.1) time.sleep(1.1)
cnt = len(result) cnt = len(result)
for entry in (data and data.get('entries', []) or []): for entry in (xml_data and xml_data.get('entries', []) or []):
if entry.get('title') and entry.get('link', '').startswith('http'): if entry.get('title') and entry.get('link', '').startswith('http'):
result.append((entry['title'], entry['link'], None, None)) result.append((entry.get('title'), entry.get('link'), None, None))
self.provider.log_result(count=len(result) - cnt, url=url) self.provider.log_result(count=len(result) - cnt, url=url)

8
sickbeard/providers/zooqle.py

@ -82,9 +82,7 @@ class ZooqleProvider(generic.TorrentProvider):
info = td[1].find('a', href=rc['info']) info = td[1].find('a', href=rc['info'])
title = info and info.get_text().strip() title = info and info.get_text().strip()
size = td[-3].get_text().strip() size = td[-3].get_text().strip()
download_url = info and (self.urls['get'] % rc['info'].findall(info['href'])[0]) download_url = info and (self.urls['get'] % rc['info'].findall(info['href'])[0])
except (AttributeError, TypeError, ValueError, IndexError): except (AttributeError, TypeError, ValueError, IndexError):
continue continue
@ -93,14 +91,12 @@ class ZooqleProvider(generic.TorrentProvider):
except generic.HaltParseException: except generic.HaltParseException:
pass pass
except Exception: except (StandardError, Exception):
logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR) logger.log(u'Failed to parse. Traceback: %s' % traceback.format_exc(), logger.ERROR)
self._log_search(mode, len(items[mode]) - cnt, search_url) self._log_search(mode, len(items[mode]) - cnt, search_url)
self._sort_seeders(mode, items) results = self._sort_seeding(mode, results + items[mode])
results = list(set(results + items[mode]))
return results return results

Loading…
Cancel
Save