From b7de52229ea5fb14575b820d08146a6dafa544fe Mon Sep 17 00:00:00 2001 From: Janez Troha Date: Wed, 4 Jul 2012 21:01:39 +0200 Subject: [PATCH] Add info for selectred TPB domain on start of request --- .../core/providers/torrent/thepiratebay/main.py | 97 ++++++++++------------ 1 file changed, 42 insertions(+), 55 deletions(-) diff --git a/couchpotato/core/providers/torrent/thepiratebay/main.py b/couchpotato/core/providers/torrent/thepiratebay/main.py index 6dd779f..0fbc25d 100644 --- a/couchpotato/core/providers/torrent/thepiratebay/main.py +++ b/couchpotato/core/providers/torrent/thepiratebay/main.py @@ -15,22 +15,18 @@ import time class TPBProxy(object): - """ TPBProxy deals with failed or blocked TPB proxys - it works as Round-robin balancer if user seleced + """ TPBProxy deals with failed or blocked TPB proxys. + It works as round-robin balancer, if user seleced or default domain becomes unavaliable. """ list = [ ('(Sweden) thepiratebay.se', 'http://thepiratebay.se'), - ('(Sweden) tpb.ipredator.se (ssl)', 'https://tpb.ipredator.se' - ), + ('(Sweden) tpb.ipredator.se (ssl)', 'https://tpb.ipredator.se'), ('(Germany) depiraatbaai.be', 'http://depiraatbaai.be'), - ('(UK) piratereverse.info (ssl)', 'https://piratereverse.info' - ), - ('(UK) tpb.pirateparty.org.uk (ssl)', - 'https://tpb.pirateparty.org.uk'), - ('(Netherlands) argumentomteemigreren.nl', - 'http://argumentomteemigreren.nl'), + ('(UK) piratereverse.info (ssl)', 'https://piratereverse.info'), + ('(UK) tpb.pirateparty.org.uk (ssl)', 'https://tpb.pirateparty.org.uk'), + ('(Netherlands) argumentomteemigreren.nl', 'http://argumentomteemigreren.nl'), ('(direct) 194.71.107.80', 'http://194.71.107.80'), ('(direct) 194.71.107.81', 'http://194.71.107.81'), ('(direct) 194.71.107.82', 'http://194.71.107.82'), @@ -42,8 +38,8 @@ class TPBProxy(object): # compare lists and user/default value, exclude filter - unused = [item for item in TPBProxy.list if item - not in http_failed_disabled and current not in item] + unused = [item for item in TPBProxy.list if item not in http_failed_disabled and current + not in item] if len(unused) > 0: @@ -71,29 +67,35 @@ class ThePirateBay(TorrentProvider): ]), ([202], ['dvdr'])] cat_backup_id = 200 + disable_provider = False def __init__(self): super(ThePirateBay, self).__init__() - self.urls = {'test': self.api_domain(), - 'detail': '%s/torrent/%s', + self.urls = {'test': self.api_domain(), 'detail': '%s/torrent/%s', 'search': '%s/search/%s/0/7/%d'} def api_domain(self, url=''): # default domain - domain = ('http://thepiratebay.se', self.conf('domain_for_tpb' - ))[self.conf('domain_for_tpb') != None] - + domain = self.conf('domain_for_tpb', default='http://thepiratebay.se') + self.log.info('Selected domain for this request: %s', domain) host = urlparse(domain).hostname # Clear disabled list for default or user selected host if time expired if self.http_failed_disabled.get(host, 0) > 0: if self.http_failed_disabled[host] > time.time() - 900: + # get new random domain - domain = TPBProxy.get_proxy(self.http_failed_disabled, domain) + + try: + domain = TPBProxy.get_proxy(self.http_failed_disabled, domain) + except Exception, err: + self.disable_provider = True + self.log.error(err) else: + del self.http_failed_request[host] del self.http_failed_disabled[host] @@ -102,15 +104,15 @@ class ThePirateBay(TorrentProvider): def search(self, movie, quality): results = [] - if self.isDisabled(): + if self.isDisabled() or self.disable_provider: return results - cache_key = 'thepiratebay.%s.%s' % (movie['library' - ]['identifier'], quality.get('identifier')) + cache_key = 'thepiratebay.%s.%s' % (movie['library']['identifier'], quality.get('identifier' + )) search_url = self.urls['search'] % (self.api_domain(), - self.for_search(getTitle(movie['library']) + ' ' - + quality['identifier']), - self.getCatId(quality['identifier'])[0]) + self.for_search(getTitle(movie['library']) + ' ' + + quality['identifier']), + self.getCatId(quality['identifier'])[0]) self.log.info('searchUrl: %s', search_url) data = self.getCache(cache_key, search_url) @@ -122,8 +124,7 @@ class ThePirateBay(TorrentProvider): try: soup = BeautifulSoup(data) - results_table = soup.find('table', - attrs={'id': 'searchResult'}) + results_table = soup.find('table', attrs={'id': 'searchResult'}) entries = results_table.find_all('tr') for result in entries[1:]: link = result.find(href=re.compile('torrent\/\d+\/')) @@ -131,8 +132,7 @@ class ThePirateBay(TorrentProvider): # Uploaded 06-28 02:27, Size 1.37 GiB, - size = re.search('Size (?P.+),', - unicode(result.select('font.detDesc' + size = re.search('Size (?P.+),', unicode(result.select('font.detDesc' )[0])).group('size') if link and download: new = { @@ -142,42 +142,29 @@ class ThePirateBay(TorrentProvider): 'provider': self.getName(), } - trusted = (0, 10)[result.find('img', - alt=re.compile('Trusted')) != None] - vip = (0, 20)[result.find('img', - alt=re.compile('VIP')) != None] - confirmed = (0, 30)[result.find('img', - alt=re.compile('Helpers')) != None] - moderated = (0, 50)[result.find('img', - alt=re.compile('Moderator')) != None] - is_imdb = \ - self.imdb_match(self.api_domain(link['href']), - movie['library']['identifier']) + trusted = (0, 10)[result.find('img', alt=re.compile('Trusted')) != None] + vip = (0, 20)[result.find('img', alt=re.compile('VIP')) != None] + confirmed = (0, 30)[result.find('img', alt=re.compile('Helpers')) != None] + moderated = (0, 50)[result.find('img', alt=re.compile('Moderator')) != None] + is_imdb = self.imdb_match(self.api_domain(link['href']), movie['library' + ]['identifier']) self.log.info('Name: %s', link.string) - self.log.info('Seeders: %s', result.find_all('td' - )[2].string) - self.log.info('Leechers: %s', result.find_all('td' - )[3].string) + self.log.info('Seeders: %s', result.find_all('td')[2].string) + self.log.info('Leechers: %s', result.find_all('td')[3].string) self.log.info('Size: %s', size) - self.log.info('Score(trusted + vip + moderated): %d' - , confirmed + trusted + vip + self.log.info('Score(trusted + vip + moderated): %d', confirmed + trusted + vip + moderated) new['name'] = link.string - new['id'] = re.search('/(?P\d+)/', link['href' - ]).group('id') + new['id'] = re.search('/(?P\d+)/', link['href']).group('id') new['url'] = self.api_domain(link['href']) new['magnet'] = download['href'] new['size'] = self.parseSize(size) - new['seeders'] = int(result.find_all('td' - )[2].string) - new['leechers'] = int(result.find_all('td' - )[3].string) - new['extra_score'] = lambda x: confirmed + trusted \ - + vip + moderated - new['score'] = fireEvent('score.calculate', new, - movie, single=True) + new['seeders'] = int(result.find_all('td')[2].string) + new['leechers'] = int(result.find_all('td')[3].string) + new['extra_score'] = lambda x: confirmed + trusted + vip + moderated + new['score'] = fireEvent('score.calculate', new, movie, single=True) is_correct_movie = fireEvent( 'searcher.correct_movie',