Browse Source

Merge pull request #219 from JackDandy/feature/ChangeFreshOnTV

Change FreshOnTv provider secure URLs, add Cloudflare logging, prevent v...
pull/222/head
JackDandy 10 years ago
parent
commit
d6dc97160c
  1. 1
      CHANGES.md
  2. 103
      sickbeard/providers/freshontv.py

1
CHANGES.md

@ -74,6 +74,7 @@
* Add 404 error page * Add 404 error page
* Change SCC URLs to remove redirection overhead * Change SCC URLs to remove redirection overhead
* Change TorrentBytes login parameter in line with site change * Change TorrentBytes login parameter in line with site change
* Change FreshOnTv login parameter and use secure URLs, add logging of Cloudflare blocking and prevent vacant cookie tracebacks
[develop changelog] [develop changelog]
* Change uT params from unicode to str.format as magnet URLs worked but sending files in POST bodies failed * Change uT params from unicode to str.format as magnet URLs worked but sending files in POST bodies failed

103
sickbeard/providers/freshontv.py

@ -19,10 +19,9 @@
import re import re
import traceback import traceback
import datetime import datetime
import urlparse
import sickbeard import sickbeard
import generic import generic
from sickbeard.common import Quality, cpu_presets from sickbeard.common import Quality
from sickbeard import logger from sickbeard import logger
from sickbeard import tvcache from sickbeard import tvcache
from sickbeard import db from sickbeard import db
@ -30,7 +29,6 @@ from sickbeard import classes
from sickbeard import helpers from sickbeard import helpers
from sickbeard import show_name_helpers from sickbeard import show_name_helpers
from sickbeard.exceptions import ex, AuthException from sickbeard.exceptions import ex, AuthException
from sickbeard import clients
from lib import requests from lib import requests
from lib.requests import exceptions from lib.requests import exceptions
from sickbeard.bs4_parser import BS4Parser from sickbeard.bs4_parser import BS4Parser
@ -39,16 +37,15 @@ from sickbeard.helpers import sanitizeSceneName
class FreshOnTVProvider(generic.TorrentProvider): class FreshOnTVProvider(generic.TorrentProvider):
urls = {'base_url': 'http://freshon.tv/', urls = {'base_url': 'https://freshon.tv/',
'login': 'http://freshon.tv/login.php?action=makelogin', 'login': 'https://freshon.tv/login.php?action=makelogin',
'detail': 'http://freshon.tv/details.php?id=%s', 'detail': 'https://freshon.tv/details.php?id=%s',
'search': 'http://freshon.tv/browse.php?incldead=%s&words=0&cat=0&search=%s', 'search': 'https://freshon.tv/browse.php?incldead=%s&words=0&cat=0&search=%s',
'download': 'http://freshon.tv/download.php?id=%s&type=torrent', 'download': 'https://freshon.tv/download.php?id=%s&type=torrent'}
}
def __init__(self): def __init__(self):
generic.TorrentProvider.__init__(self, "FreshOnTV") generic.TorrentProvider.__init__(self, 'FreshOnTV')
self.supportsBacklog = True self.supportsBacklog = True
@ -81,7 +78,7 @@ class FreshOnTVProvider(generic.TorrentProvider):
def _checkAuth(self): def _checkAuth(self):
if not self.username or not self.password: if not self.username or not self.password:
raise AuthException("Your authentication credentials for " + self.name + " are missing, check your config.") raise AuthException('Your authentication credentials for %s are missing, check your config.' % self.name)
return True return True
@ -91,13 +88,12 @@ class FreshOnTVProvider(generic.TorrentProvider):
if self._uid and self._hash: if self._uid and self._hash:
requests.utils.add_dict_to_cookiejar(self.session.cookies, self.cookies) requests.utils.add_dict_to_cookiejar(self.session.cookies, self.cookies)
else: else:
login_params = {'username': self.username, login_params = {'username': self.username,
'password': self.password, 'password': self.password,
'login': 'submit' 'login': 'Do it!'}
}
if not self.session: if not self.session:
self.session = requests.Session() self.session = requests.Session()
@ -105,24 +101,30 @@ class FreshOnTVProvider(generic.TorrentProvider):
try: try:
response = self.session.post(self.urls['login'], data=login_params, timeout=30, verify=False) response = self.session.post(self.urls['login'], data=login_params, timeout=30, verify=False)
except (requests.exceptions.ConnectionError, requests.exceptions.HTTPError), e: except (requests.exceptions.ConnectionError, requests.exceptions.HTTPError), e:
logger.log(u'Unable to connect to ' + self.name + ' provider: ' + ex(e), logger.ERROR) logger.log(u'Unable to connect to %s provider: %s' % (self.name, ex(e)), logger.ERROR)
return False return False
if re.search('Username does not exist in the userbase or the account is not confirmed yet.', response.text): if re.search('Username does not exist in the userbase or the account is not confirmed yet.', response.text):
logger.log(u'Your authentication credentials for ' + self.name + ' are incorrect, check your config.', logger.ERROR) logger.log(u'Invalid username or password for %s, check your config.' % self.name, logger.ERROR)
return False return False
if re.search('DDoS protection by CloudFlare', response.text):
logger.log(u'Unable to login to %s due to CloudFlare DDoS javascript check.' % self.name, logger.ERROR)
return False
if requests.utils.dict_from_cookiejar(self.session.cookies)['uid'] and requests.utils.dict_from_cookiejar(self.session.cookies)['pass']: try:
if requests.utils.dict_from_cookiejar(self.session.cookies)['uid'] and requests.utils.dict_from_cookiejar(self.session.cookies)['pass']:
self._uid = requests.utils.dict_from_cookiejar(self.session.cookies)['uid'] self._uid = requests.utils.dict_from_cookiejar(self.session.cookies)['uid']
self._hash = requests.utils.dict_from_cookiejar(self.session.cookies)['pass'] self._hash = requests.utils.dict_from_cookiejar(self.session.cookies)['pass']
self.cookies = {'uid': self._uid, self.cookies = {'uid': self._uid,
'pass': self._hash 'pass': self._hash}
}
return True return True
else: except:
logger.log(u'Unable to obtain cookie for FreshOnTV', logger.ERROR) pass
return False
logger.log(u'Unable to obtain cookie for FreshOnTV', logger.ERROR)
return False
def _get_season_search_strings(self, ep_obj): def _get_season_search_strings(self, ep_obj):
@ -131,7 +133,7 @@ class FreshOnTVProvider(generic.TorrentProvider):
if ep_obj.show.air_by_date or ep_obj.show.sports: if ep_obj.show.air_by_date or ep_obj.show.sports:
ep_string = show_name + '.' + str(ep_obj.airdate).split('-')[0] ep_string = show_name + '.' + str(ep_obj.airdate).split('-')[0]
elif ep_obj.show.anime: elif ep_obj.show.anime:
ep_string = show_name + '.' + "%d" % ep_obj.scene_absolute_number ep_string = show_name + '.' + '%d' % ep_obj.scene_absolute_number
else: else:
ep_string = show_name + '.S%02d' % int(ep_obj.scene_season) #1) showName SXX ep_string = show_name + '.S%02d' % int(ep_obj.scene_season) #1) showName SXX
@ -149,18 +151,18 @@ class FreshOnTVProvider(generic.TorrentProvider):
if self.show.air_by_date: if self.show.air_by_date:
for show_name in set(show_name_helpers.allPossibleShowNames(self.show)): for show_name in set(show_name_helpers.allPossibleShowNames(self.show)):
ep_string = sanitizeSceneName(show_name) + ' ' + \ ep_string = sanitizeSceneName(show_name) + ' ' + \
str(ep_obj.airdate).replace('-', '|') str(ep_obj.airdate).replace('-', '|')
search_string['Episode'].append(ep_string) search_string['Episode'].append(ep_string)
elif self.show.sports: elif self.show.sports:
for show_name in set(show_name_helpers.allPossibleShowNames(self.show)): for show_name in set(show_name_helpers.allPossibleShowNames(self.show)):
ep_string = sanitizeSceneName(show_name) + ' ' + \ ep_string = sanitizeSceneName(show_name) + ' ' + \
str(ep_obj.airdate).replace('-', '|') + '|' + \ str(ep_obj.airdate).replace('-', '|') + '|' + \
ep_obj.airdate.strftime('%b') ep_obj.airdate.strftime('%b')
search_string['Episode'].append(ep_string) search_string['Episode'].append(ep_string)
elif self.show.anime: elif self.show.anime:
for show_name in set(show_name_helpers.allPossibleShowNames(self.show)): for show_name in set(show_name_helpers.allPossibleShowNames(self.show)):
ep_string = sanitizeSceneName(show_name) + ' ' + \ ep_string = sanitizeSceneName(show_name) + ' ' + \
"%i" % int(ep_obj.scene_absolute_number) '%i' % int(ep_obj.scene_absolute_number)
search_string['Episode'].append(ep_string) search_string['Episode'].append(ep_string)
else: else:
for show_name in set(show_name_helpers.allPossibleShowNames(self.show)): for show_name in set(show_name_helpers.allPossibleShowNames(self.show)):
@ -180,18 +182,18 @@ class FreshOnTVProvider(generic.TorrentProvider):
freeleech = '3' if self.freeleech else '0' freeleech = '3' if self.freeleech else '0'
if not self._doLogin(): if not self._doLogin():
return [] return results
for mode in search_params.keys(): for mode in search_params.keys():
for search_string in search_params[mode]: for search_string in search_params[mode]:
search_string, url = self._get_title_and_url([search_string, self.urls['search'], '', '', ''])
if isinstance(search_string, unicode): if isinstance(search_string, unicode):
search_string = unidecode(search_string) search_string = unidecode(search_string)
searchURL = self.urls['search'] % (freeleech, search_string) searchURL = self.urls['search'] % (freeleech, search_string)
logger.log(u"Search string: " + searchURL, logger.DEBUG) logger.log(u'Search string: ' + searchURL, logger.DEBUG)
# returns top 15 results by default, expandable in user profile to 100 # returns top 15 results by default, expandable in user profile to 100
data = self.getURL(searchURL) data = self.getURL(searchURL)
@ -199,13 +201,15 @@ class FreshOnTVProvider(generic.TorrentProvider):
continue continue
try: try:
with BS4Parser(data, features=["html5lib", "permissive"]) as html: with BS4Parser(data, features=['html5lib', 'permissive']) as html:
torrent_table = html.find('table', attrs={'class': 'frame'}) torrent_table = html.find('table', attrs={'class': 'frame'})
torrent_rows = torrent_table.findChildren('tr') if torrent_table else [] torrent_rows = []
if torrent_table:
torrent_rows = torrent_table.findChildren('tr')
#Continue only if one Release is found # Continue only if one Release is found
if len(torrent_rows) < 2: if 2 > len(torrent_rows):
logger.log(u"The data returned from " + self.name + " does not contain any torrents", logger.log(u'The data returned from %s does not contain any torrents' % self.name,
logger.DEBUG) logger.DEBUG)
continue continue
@ -213,14 +217,13 @@ class FreshOnTVProvider(generic.TorrentProvider):
for result in torrent_rows[1:]: for result in torrent_rows[1:]:
cells = result.findChildren('td') cells = result.findChildren('td')
link = cells[1].find('a', attrs = {'class': 'torrent_name_link'}) link = cells[1].find('a', attrs={'class': 'torrent_name_link'})
#skip if torrent has been nuked due to poor quality # skip if torrent has been nuked due to poor quality
if cells[1].find('img', alt='Nuked') != None: if None is not cells[1].find('img', alt='Nuked'):
continue continue
torrent_id = link['href'].replace('/details.php?id=', '') torrent_id = link['href'].replace('/details.php?id=', '')
try: try:
if link.has_key('title'): if link.has_key('title'):
title = cells[1].find('a', {'class': 'torrent_name_link'})['title'] title = cells[1].find('a', {'class': 'torrent_name_link'})['title']
@ -234,22 +237,22 @@ class FreshOnTVProvider(generic.TorrentProvider):
except (AttributeError, TypeError): except (AttributeError, TypeError):
continue continue
#Filter unseeded torrent # Filter unseeded torrent
if mode != 'RSS' and (seeders < self.minseed or leechers < self.minleech): if 'RSS' != mode and (self.minseed > seeders or self.minleech > leechers):
continue continue
if not title or not download_url: if not title or not download_url:
continue continue
item = title, download_url, id, seeders, leechers item = title, download_url, id, seeders, leechers
logger.log(u"Found result: " + title + "(" + searchURL + ")", logger.DEBUG) logger.log(u'Found result: %s (%s)' % (title, searchURL), logger.DEBUG)
items[mode].append(item) items[mode].append(item)
except Exception, e: except Exception, e:
logger.log(u"Failed parsing " + self.name + " Traceback: " + traceback.format_exc(), logger.ERROR) logger.log(u'Failed parsing %s Traceback: %s' % (self.name, traceback.format_exc()), logger.ERROR)
#For each search mode sort all the items by seeders # For each search mode sort all the items by seeders
items[mode].sort(key=lambda tup: tup[3], reverse=True) items[mode].sort(key=lambda tup: tup[3], reverse=True)
results += items[mode] results += items[mode]
@ -260,10 +263,14 @@ class FreshOnTVProvider(generic.TorrentProvider):
title, url, id, seeders, leechers = item title, url, id, seeders, leechers = item
if title:
title += u''
title = re.sub(r'\s+', '.', title)
if url: if url:
url = str(url).replace('&amp;', '&') url = str(url).replace('&amp;', '&')
return (title, url) return title, url
def findPropers(self, search_date=datetime.datetime.today()): def findPropers(self, search_date=datetime.datetime.today()):
@ -282,9 +289,9 @@ class FreshOnTVProvider(generic.TorrentProvider):
return [] return []
for sqlshow in sqlResults: for sqlshow in sqlResults:
self.show = helpers.findCertainShow(sickbeard.showList, int(sqlshow["showid"])) self.show = helpers.findCertainShow(sickbeard.showList, int(sqlshow['showid']))
if self.show: if self.show:
curEp = self.show.getEpisode(int(sqlshow["season"]), int(sqlshow["episode"])) curEp = self.show.getEpisode(int(sqlshow['season']), int(sqlshow['episode']))
searchString = self._get_episode_search_strings(curEp, add_string='PROPER|REPACK') searchString = self._get_episode_search_strings(curEp, add_string='PROPER|REPACK')

Loading…
Cancel
Save