diff --git a/.gitignore b/.gitignore index 1d21d2e..78fda6d 100644 --- a/.gitignore +++ b/.gitignore @@ -14,3 +14,5 @@ nosetests.xml # Visual Studio /.vs + +.DS_Store diff --git a/couchpotato/core/downloaders/transmission.py b/couchpotato/core/downloaders/transmission.py index f40b955..4bb7dbe 100644 --- a/couchpotato/core/downloaders/transmission.py +++ b/couchpotato/core/downloaders/transmission.py @@ -143,12 +143,21 @@ class Transmission(DownloaderBase): log.debug('name=%s / id=%s / downloadDir=%s / hashString=%s / percentDone=%s / status=%s / isStalled=%s / eta=%s / uploadRatio=%s / isFinished=%s / incomplete-dir-enabled=%s / incomplete-dir=%s', (torrent['name'], torrent['id'], torrent['downloadDir'], torrent['hashString'], torrent['percentDone'], torrent['status'], torrent.get('isStalled', 'N/A'), torrent['eta'], torrent['uploadRatio'], torrent['isFinished'], session['incomplete-dir-enabled'], session['incomplete-dir'])) + """ + https://trac.transmissionbt.com/browser/branches/2.8x/libtransmission/transmission.h#L1853 + 0 = Torrent is stopped + 1 = Queued to check files + 2 = Checking files + 3 = Queued to download + 4 = Downloading + 5 = Queued to seed + 6 = Seeding + """ + status = 'busy' if torrent.get('isStalled') and not torrent['percentDone'] == 1 and self.conf('stalled_as_failed'): status = 'failed' - elif torrent['status'] == 0 and torrent['percentDone'] == 1: - status = 'completed' - elif torrent['status'] == 16 and torrent['percentDone'] == 1: + elif torrent['status'] == 0 and torrent['percentDone'] == 1 and torrent['isFinished']: status = 'completed' elif torrent['status'] in [5, 6]: status = 'seeding' diff --git a/couchpotato/core/media/_base/providers/torrent/bithdtv.py b/couchpotato/core/media/_base/providers/torrent/bithdtv.py index 149d7c1..a3eb1d9 100644 --- a/couchpotato/core/media/_base/providers/torrent/bithdtv.py +++ b/couchpotato/core/media/_base/providers/torrent/bithdtv.py @@ -13,9 +13,6 @@ log = CPLog(__name__) class Base(TorrentProvider): urls = { - 'test': 'https://www.bit-hdtv.com/', - 'login': 'https://www.bit-hdtv.com/takelogin.php', - 'login_check': 'https://www.bit-hdtv.com/messages.php', 'detail': 'https://www.bit-hdtv.com/details.php?id=%s', 'search': 'https://www.bit-hdtv.com/torrents.php?', 'download': 'https://www.bit-hdtv.com/download.php?id=%s', @@ -31,7 +28,7 @@ class Base(TorrentProvider): url = "%s&%s" % (self.urls['search'], query) - data = self.getHTMLData(url) + data = self.getHTMLData(url, headers = self.getRequestHeaders()) if data: # Remove BiT-HDTV's output garbage so outdated BS4 versions successfully parse the HTML @@ -42,11 +39,12 @@ class Base(TorrentProvider): html = BeautifulSoup(data, 'html.parser') try: - result_tables = html.find_all('table', attrs = {'width': '750', 'class': ''}) + result_tables = html.find_all('table', attrs = {'width': '800', 'class': ''}) if result_tables is None: return - result_table = result_tables[1] + # Take first result + result_table = result_tables[0] if result_table is None: return @@ -72,10 +70,10 @@ class Base(TorrentProvider): except: log.error('Failed getting results from %s: %s', (self.getName(), traceback.format_exc())) - def getLoginParams(self): + def getRequestHeaders(self): + cookies = 'h_sl={};h_sp={};h_su={}'.format(self.conf('cookiesettingsl') or '', self.conf('cookiesettingsp') or '', self.conf('cookiesettingsu') or '') return { - 'username': self.conf('username'), - 'password': self.conf('password'), + 'Cookie': cookies } def getMoreInfo(self, item): @@ -87,11 +85,13 @@ class Base(TorrentProvider): item['description'] = description return item - def loginSuccess(self, output): - return 'logout.php' in output.lower() - - loginCheckSuccess = loginSuccess + def download(self, url = '', nzb_id = ''): + try: + return self.urlopen(url, headers=self.getRequestHeaders()) + except: + log.error('Failed getting release from %s: %s', (self.getName(), traceback.format_exc())) + return 'try_next' config = [{ 'name': 'bithdtv', @@ -110,13 +110,22 @@ config = [{ 'default': False, }, { - 'name': 'username', + 'name': 'cookiesettingsl', + 'label': 'Cookies (h_sl)', + 'default': '', + 'description': 'Cookie h_sl from session', + }, + { + 'name': 'cookiesettingsp', + 'label': 'Cookies (h_sp)', 'default': '', + 'description': 'Cookie h_sp from session', }, { - 'name': 'password', + 'name': 'cookiesettingsu', + 'label': 'Cookies (h_su)', 'default': '', - 'type': 'password', + 'description': 'Cookie h_su from session', }, { 'name': 'seed_ratio', diff --git a/couchpotato/core/media/_base/providers/torrent/passthepopcorn.py b/couchpotato/core/media/_base/providers/torrent/passthepopcorn.py index c96afd1..6a13ff4 100644 --- a/couchpotato/core/media/_base/providers/torrent/passthepopcorn.py +++ b/couchpotato/core/media/_base/providers/torrent/passthepopcorn.py @@ -73,6 +73,8 @@ class Base(TorrentProvider): torrentdesc += ' Scene' if self.conf('prefer_scene'): torrentscore += 2000 + if self.conf('no_scene'): + torrentscore -= 2000 if 'RemasterTitle' in torrent and torrent['RemasterTitle']: torrentdesc += self.htmlToASCII(' %s' % torrent['RemasterTitle']) @@ -258,6 +260,14 @@ config = [{ 'description': 'Favors scene-releases over non-scene releases.' }, { + 'name': 'no_scene', + 'advanced': True, + 'type': 'bool', + 'label': 'Reject scene', + 'default': 0, + 'description': 'Reject scene-releases over non-scene releases.' + }, + { 'name': 'require_approval', 'advanced': True, 'type': 'bool', diff --git a/couchpotato/core/media/_base/providers/torrent/torrentz.py b/couchpotato/core/media/_base/providers/torrent/torrentz.py index 8412a8d..96e8025 100644 --- a/couchpotato/core/media/_base/providers/torrent/torrentz.py +++ b/couchpotato/core/media/_base/providers/torrent/torrentz.py @@ -15,25 +15,19 @@ log = CPLog(__name__) class Base(TorrentMagnetProvider, RSS): urls = { - 'detail': 'https://torrentz.eu/%s', - 'search': 'https://torrentz.eu/feed?q=%s', - 'verified_search': 'https://torrentz.eu/feed_verified?q=%s' + 'detail': 'https://torrentz2.eu/%s', + 'search': 'https://torrentz2.eu/feed?f=%s' } http_time_between_calls = 0 def _searchOnTitle(self, title, media, quality, results): - search_url = self.urls['verified_search'] if self.conf('verified_only') else self.urls['search'] + search_url = self.urls['search'] # Create search parameters search_params = self.buildUrl(title, media, quality) - smin = quality.get('size_min') - smax = quality.get('size_max') - if smin and smax: - search_params += ' size %sm - %sm' % (smin, smax) - min_seeds = tryInt(self.conf('minimal_seeds')) if min_seeds: search_params += ' seed > %s' % (min_seeds - 1) @@ -52,17 +46,24 @@ class Base(TorrentMagnetProvider, RSS): magnet = splitString(detail_url, '/')[-1] magnet_url = 'magnet:?xt=urn:btih:%s&dn=%s&tr=%s' % (magnet.upper(), tryUrlencode(name), tryUrlencode('udp://tracker.openbittorrent.com/announce')) - reg = re.search('Size: (?P\d+) MB Seeds: (?P[\d,]+) Peers: (?P[\d,]+)', six.text_type(description)) + reg = re.search('Size: (?P\d+) (?P[KMG]B) Seeds: (?P[\d,]+) Peers: (?P[\d,]+)', six.text_type(description)) size = reg.group('size') + unit = reg.group('unit') seeds = reg.group('seeds').replace(',', '') peers = reg.group('peers').replace(',', '') + multiplier = 1 + if unit == 'GB': + multiplier = 1000 + elif unit == 'KB': + multiplier = 0 + results.append({ 'id': magnet, 'name': six.text_type(name), 'url': magnet_url, 'detail_url': detail_url, - 'size': tryInt(size), + 'size': tryInt(size)*multiplier, 'seeders': tryInt(seeds), 'leechers': tryInt(peers), }) @@ -78,7 +79,7 @@ config = [{ 'tab': 'searcher', 'list': 'torrent_providers', 'name': 'Torrentz', - 'description': 'Torrentz is a free, fast and powerful meta-search engine. Torrentz', + 'description': 'Torrentz.eu was a free, fast and powerful meta-search engine combining results from dozens of search engines, Torrentz2.eu is trying to replace it. Torrentz2', 'wizard': True, 'icon': 'iVBORw0KGgoAAAANSUhEUgAAABAAAAAQCAYAAAAf8/9hAAAAQklEQVQ4y2NgAALjtJn/ycEMlGiGG0IVAxiwAKzOxaKGARcgxgC8YNSAwWoAzuRMjgsIugqfAUR5CZcBRIcHsWEAADSA96Ig020yAAAAAElFTkSuQmCC', 'options': [ @@ -88,13 +89,6 @@ config = [{ 'default': True }, { - 'name': 'verified_only', - 'type': 'bool', - 'default': True, - 'advanced': True, - 'description': 'Only search verified releases', - }, - { 'name': 'minimal_seeds', 'type': 'int', 'default': 1, diff --git a/couchpotato/core/media/_base/providers/torrent/yts.py b/couchpotato/core/media/_base/providers/torrent/yts.py index 188f9e5..674adc3 100644 --- a/couchpotato/core/media/_base/providers/torrent/yts.py +++ b/couchpotato/core/media/_base/providers/torrent/yts.py @@ -11,8 +11,8 @@ class Base(TorrentMagnetProvider): # Only qualities allowed: 720p/1080p/3D - the rest will fail. # All YTS.ag torrents are verified urls = { - 'detail': 'https://yts.ag/api#list_movies', - 'search': 'https://yts.ag/api/v2/list_movies.json?query_term=%s&limit=%s&page=%s' + 'detail': 'https://yts.am/api#list_movies', + 'search': 'https://yts.am/api/v2/list_movies.json?query_term=%s&limit=%s&page=%s' } def _search(self, movie, quality, results): diff --git a/couchpotato/core/media/movie/_base/static/details.js b/couchpotato/core/media/movie/_base/static/details.js index 127e260..dd156ba 100644 --- a/couchpotato/core/media/movie/_base/static/details.js +++ b/couchpotato/core/media/movie/_base/static/details.js @@ -77,7 +77,6 @@ var MovieDetails = new Class({ 'class': parent.get('title') == t ? 'icon-ok' : '' })); }); - }, addSection: function(name, section_el){ @@ -101,7 +100,7 @@ var MovieDetails = new Class({ var self = this; self.el.addClass('show'); - + document.onkeyup = self.keyup.bind(self); //if(!App.mobile_screen){ // $(self.content).getElements('> .head, > .section').each(function(section, nr){ // dynamics.css(section, { @@ -130,12 +129,19 @@ var MovieDetails = new Class({ }, + keyup: function(e) { + if (e.keyCode == 27 /* Esc */) { + this.close(); + } + }, + close: function(){ var self = this; var ended = function() { self.el.dispose(); self.overlay.removeEventListener('transitionend', ended); + document.onkeyup = null; }; self.overlay.addEventListener('transitionend', ended, false); @@ -165,5 +171,4 @@ var MovieDetails = new Class({ App.removeEvent('history.push', self.outer_click); } - }); diff --git a/couchpotato/core/media/movie/providers/automation/letterboxd.py b/couchpotato/core/media/movie/providers/automation/letterboxd.py index e1fcddb..072c416 100644 --- a/couchpotato/core/media/movie/providers/automation/letterboxd.py +++ b/couchpotato/core/media/movie/providers/automation/letterboxd.py @@ -13,7 +13,7 @@ autoload = 'Letterboxd' class Letterboxd(Automation): - url = 'http://letterboxd.com/%s/watchlist/' + url = 'http://letterboxd.com/%s/watchlist/page/%d/' pattern = re.compile(r'(.*)\((\d*)\)') interval = 1800 @@ -46,18 +46,30 @@ class Letterboxd(Automation): if not enablers[index]: continue - soup = BeautifulSoup(self.getHTMLData(self.url % username)) + soup = BeautifulSoup(self.getHTMLData(self.url % (username, 1))) - for movie in soup.find_all('li', attrs = {'class': 'poster-container'}): - img = movie.find('img', movie) - title = img.get('alt') + pagination = soup.find_all('li', attrs={'class': 'paginate-page'}) + number_of_pages = tryInt(pagination[-1].find('a').get_text()) if pagination else 1 + pages = range(1, number_of_pages) - movies.append({ - 'title': title - }) + for page in pages: + soup = BeautifulSoup(self.getHTMLData(self.url % (username, page))) + movies += self.getMoviesFromHTML(soup) return movies + def getMoviesFromHTML(self, html): + movies = [] + + for movie in html.find_all('li', attrs={'class': 'poster-container'}): + img = movie.find('img') + title = img.get('alt') + + movies.append({ + 'title': title + }) + + return movies config = [{ 'name': 'letterboxd', diff --git a/couchpotato/core/media/movie/providers/metadata/xbmc.py b/couchpotato/core/media/movie/providers/metadata/xbmc.py index 3031403..33febf6 100644 --- a/couchpotato/core/media/movie/providers/metadata/xbmc.py +++ b/couchpotato/core/media/movie/providers/metadata/xbmc.py @@ -3,6 +3,7 @@ import os import re import traceback import xml.dom.minidom +import time from couchpotato.core.media.movie.providers.metadata.base import MovieMetaData from couchpotato.core.helpers.encoding import toUnicode @@ -92,7 +93,7 @@ class XBMC(MovieMetaData): pass # Other values - types = ['year', 'originaltitle:original_title', 'outline', 'plot', 'tagline', 'premiered:released'] + types = ['year', 'originaltitle:original_title', 'outline', 'plot', 'tagline'] for type in types: if ':' in type: @@ -107,6 +108,14 @@ class XBMC(MovieMetaData): except: pass + # Release date + try: + if movie_info.get('released'): + el = SubElement(nfoxml, 'premiered') + el.text = time.strftime('%Y:%m:%d', time.strptime(movie_info.get('released'), '%d %b %Y')) + except: + log.debug('Failed to parse release date %s: %s', movie_info.get('released'), traceback.format_exc()) + # Rating for rating_type in ['imdb', 'rotten', 'tmdb']: try: diff --git a/couchpotato/core/media/movie/providers/nzb/binsearch.py b/couchpotato/core/media/movie/providers/nzb/binsearch.py index d6f4852..b3e59c0 100644 --- a/couchpotato/core/media/movie/providers/nzb/binsearch.py +++ b/couchpotato/core/media/movie/providers/nzb/binsearch.py @@ -21,7 +21,7 @@ class BinSearch(MovieProvider, Base): 'adv_sort': 'date', 'adv_col': 'on', 'adv_nfo': 'on', - 'minsize': quality.get('size_min'), - 'maxsize': quality.get('size_max'), + 'xminsize': quality.get('size_min'), + 'xmaxsize': quality.get('size_max'), }) return query diff --git a/couchpotato/core/media/movie/providers/torrent/alpharatio.py b/couchpotato/core/media/movie/providers/torrent/alpharatio.py index e7f39c1..762ef47 100644 --- a/couchpotato/core/media/movie/providers/torrent/alpharatio.py +++ b/couchpotato/core/media/movie/providers/torrent/alpharatio.py @@ -19,7 +19,7 @@ class AlphaRatio(MovieProvider, Base): cat_ids = [ ([7, 9], ['bd50']), - ([7, 9], ['720p', '1080p']), + ([7, 9], ['720p', '1080p', '2160p']), ([6, 8], ['dvdr']), ([6, 8], ['brrip', 'dvdrip']), ] diff --git a/couchpotato/core/media/movie/providers/torrent/torrentleech.py b/couchpotato/core/media/movie/providers/torrent/torrentleech.py index eea74f8..bfa5cd1 100644 --- a/couchpotato/core/media/movie/providers/torrent/torrentleech.py +++ b/couchpotato/core/media/movie/providers/torrent/torrentleech.py @@ -11,12 +11,14 @@ autoload = 'TorrentLeech' class TorrentLeech(MovieProvider, Base): cat_ids = [ - ([13], ['720p', '1080p', 'bd50']), + ([41, 47], ['2160p']), + ([13, 14, 37, 43], ['720p', '1080p']), + ([13], ['bd50']), ([8], ['cam']), ([9], ['ts', 'tc']), - ([10], ['r5', 'scr']), + ([10, 11, 37], ['r5', 'scr']), ([11], ['dvdrip']), - ([13, 14], ['brrip']), + ([13, 14, 37, 43], ['brrip']), ([12], ['dvdr']), ] diff --git a/couchpotato/core/media/movie/providers/torrent/torrentz.py b/couchpotato/core/media/movie/providers/torrent/torrentz.py index 011ec43..d1294e6 100644 --- a/couchpotato/core/media/movie/providers/torrent/torrentz.py +++ b/couchpotato/core/media/movie/providers/torrent/torrentz.py @@ -11,4 +11,4 @@ autoload = 'Torrentz' class Torrentz(MovieProvider, Base): def buildUrl(self, title, media, quality): - return tryUrlencode('"%s %s"' % (title, media['info']['year'])) \ No newline at end of file + return tryUrlencode('%s %s' % (title, media['info']['year'])) diff --git a/couchpotato/core/media/movie/providers/userscript/filmstarts.py b/couchpotato/core/media/movie/providers/userscript/filmstarts.py index 4e61f29..5201ce0 100644 --- a/couchpotato/core/media/movie/providers/userscript/filmstarts.py +++ b/couchpotato/core/media/movie/providers/userscript/filmstarts.py @@ -1,5 +1,6 @@ from bs4 import BeautifulSoup from couchpotato.core.media._base.providers.userscript.base import UserscriptBase +import re autoload = 'Filmstarts' @@ -15,16 +16,16 @@ class Filmstarts(UserscriptBase): return html = BeautifulSoup(data) - table = html.find("table", attrs={"class": "table table-standard thead-standard table-striped_2 fs11"}) + table = html.find("section", attrs={"class": "section ovw ovw-synopsis", "id": "synopsis-details"}) - if table.find(text='Originaltitel'): + if table.find(text=re.compile('Originaltitel')): #some trailing whitespaces on some pages # Get original film title from the table specified above - name = table.find("div", text="Originaltitel").parent.parent.parent.td.text + name = name = table.find("span", text=re.compile("Originaltitel")).findNext('h2').text else: # If none is available get the title from the meta data name = html.find("meta", {"property":"og:title"})['content'] # Year of production is not available in the meta data, so get it from the table - year = table.find(text="Produktionsjahr").parent.parent.next_sibling.text + year = table.find("span", text=re.compile("Produktionsjahr")).findNext('span').text return self.search(name, year) diff --git a/couchpotato/core/plugins/log/static/log.js b/couchpotato/core/plugins/log/static/log.js index a0b1f7c..997ec87 100644 --- a/couchpotato/core/plugins/log/static/log.js +++ b/couchpotato/core/plugins/log/static/log.js @@ -250,7 +250,7 @@ Page.Log = new Class({ new Element('a.button', { 'target': '_blank', 'text': 'the contributing guide', - 'href': 'https://github.com/CouchPotato/CouchPotatoServer/blob/develop/contributing.md' + 'href': 'https://github.com/CouchPotato/CouchPotatoServer/wiki/Developer-branch' }), new Element('span', { 'html': ' before posting, then copy the text below and FILL IN the dots.' diff --git a/couchpotato/core/plugins/subtitle.py b/couchpotato/core/plugins/subtitle.py index ef4a806..110fe11 100644 --- a/couchpotato/core/plugins/subtitle.py +++ b/couchpotato/core/plugins/subtitle.py @@ -16,7 +16,7 @@ autoload = 'Subtitle' class Subtitle(Plugin): - services = ['opensubtitles', 'thesubdb', 'subswiki', 'subscenter', 'thewiz'] + services = ['opensubtitles', 'thesubdb', 'subswiki', 'subscenter', 'wizdom'] def __init__(self): addEvent('renamer.before', self.searchSingle) diff --git a/couchpotato/static/images/icons/dark/safari.svg b/couchpotato/static/images/icons/dark/safari.svg new file mode 100644 index 0000000..89b5092 --- /dev/null +++ b/couchpotato/static/images/icons/dark/safari.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/couchpotato/static/images/icons/safari.svg b/couchpotato/static/images/icons/safari.svg new file mode 100644 index 0000000..89b5092 --- /dev/null +++ b/couchpotato/static/images/icons/safari.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/couchpotato/static/scripts/combined.plugins.min.js b/couchpotato/static/scripts/combined.plugins.min.js index 475c053..1862ca9 100644 --- a/couchpotato/static/scripts/combined.plugins.min.js +++ b/couchpotato/static/scripts/combined.plugins.min.js @@ -382,16 +382,23 @@ var MovieDetails = new Class({ open: function() { var self = this; self.el.addClass("show"); + document.onkeyup = self.keyup.bind(self); self.outer_click = function() { self.close(); }; App.addEvent("history.push", self.outer_click); }, + keyup: function(e) { + if (e.keyCode == 27) { + this.close(); + } + }, close: function() { var self = this; var ended = function() { self.el.dispose(); self.overlay.removeEventListener("transitionend", ended); + document.onkeyup = null; }; self.overlay.addEventListener("transitionend", ended, false); self.el.removeClass("show"); @@ -3093,7 +3100,7 @@ Page.Log = new Class({ }), new Element("a.button", { target: "_blank", text: "the contributing guide", - href: "https://github.com/CouchPotato/CouchPotatoServer/blob/develop/contributing.md" + href: "https://github.com/CouchPotato/CouchPotatoServer/wiki/Developer-branch" }), new Element("span", { html: " before posting, then copy the text below and FILL IN the dots." })), textarea = new Element("textarea", { @@ -3416,7 +3423,7 @@ var QualityBase = new Class({ try { return this.qualities.filter(function(q) { return q.identifier == identifier; - }).pick(); + }).pick() || {}; } catch (e) {} return {}; }, diff --git a/couchpotato/templates/index.html b/couchpotato/templates/index.html index 927b2d0..4020773 100644 --- a/couchpotato/templates/index.html +++ b/couchpotato/templates/index.html @@ -15,6 +15,9 @@ + + + diff --git a/libs/rtorrent/__init__.py b/libs/rtorrent/__init__.py index 0d64e81..f6980bd 100644 --- a/libs/rtorrent/__init__.py +++ b/libs/rtorrent/__init__.py @@ -74,7 +74,9 @@ class RTorrent: if m.is_retriever() and m.is_available(self)] m = rtorrent.rpc.Multicall(self) - m.add("d.multicall", view, "d.get_hash=", + # multicall2 wants .. something .. as its first argument. It accepts a blank string, so let's go with that. + MCFirstArg = "" + m.add("d.multicall2", MCFirstArg, view, "d.hash=", *[method.rpc_call + "=" for method in retriever_methods]) results = m.call()[0] # only sent one call, only need first result @@ -116,7 +118,7 @@ class RTorrent: elif verbose: func_name = "load.verbose" else: - func_name = "load" + func_name = "load.normal" elif file_type in ["file", "raw"]: if start and verbose: func_name = "load.raw_start_verbose" @@ -137,31 +139,49 @@ class RTorrent: func_name = self._get_load_function("url", start, verbose) + # rtorrent > 0.9.6 requires first parameter @target + target = "" # load magnet - getattr(p, func_name)(magneturl) + getattr(p, func_name)(target, magneturl) if verify_load: + magnet = False i = 0 while i < verify_retries: - for torrent in self.get_torrents(): - if torrent.info_hash != info_hash: - continue + for m in self.get_torrents(): + # This block finds the magnet that was just added, starts it, breaks + # out of the for loop, and then out of the while loop. + # If it can't find the magnet, magnet won't get defined. + if m.info_hash == info_hash: + magnet = m + magnet.start() + i += 999 + break + + # If torrent hasn't been defined, sleep for a second and check again. + if not magnet: time.sleep(1) i += 1 - # Resolve magnet to torrent - torrent.start() + # This bit waits for the magnet to be resolved into an actual + # torrent, and then starts it. + torrent = False + i = 0 + while i < verify_retries: + for t in self.get_torrents(): + if t.info_hash == info_hash: + if str(info_hash) not in str(t.name): + torrent = t + torrent.start() + i += 999 + break + if not torrent: + time.sleep(1) + i += 1 assert info_hash in [t.info_hash for t in self.torrents],\ "Adding magnet was unsuccessful." - i = 0 - while i < verify_retries: - for torrent in self.get_torrents(): - if torrent.info_hash == info_hash: - if str(info_hash) not in str(torrent.name): - time.sleep(1) - i += 1 return(torrent) diff --git a/libs/rtorrent/file.py b/libs/rtorrent/file.py index a3db35c..758ed76 100755 --- a/libs/rtorrent/file.py +++ b/libs/rtorrent/file.py @@ -59,29 +59,29 @@ class File: methods = [ # RETRIEVERS - Method(File, 'get_last_touched', 'f.get_last_touched'), - Method(File, 'get_range_second', 'f.get_range_second'), - Method(File, 'get_size_bytes', 'f.get_size_bytes'), - Method(File, 'get_priority', 'f.get_priority'), - Method(File, 'get_match_depth_next', 'f.get_match_depth_next'), + Method(File, 'get_last_touched', 'f.last_touched'), + Method(File, 'get_range_second', 'f.range_second'), + Method(File, 'get_size_bytes', 'f.size_bytes'), + Method(File, 'get_priority', 'f.priority'), + Method(File, 'get_match_depth_next', 'f.match_depth_next'), Method(File, 'is_resize_queued', 'f.is_resize_queued', boolean=True, ), - Method(File, 'get_range_first', 'f.get_range_first'), - Method(File, 'get_match_depth_prev', 'f.get_match_depth_prev'), - Method(File, 'get_path', 'f.get_path'), - Method(File, 'get_completed_chunks', 'f.get_completed_chunks'), - Method(File, 'get_path_components', 'f.get_path_components'), + Method(File, 'get_range_first', 'f.range_first'), + Method(File, 'get_match_depth_prev', 'f.match_depth_prev'), + Method(File, 'get_path', 'f.path'), + Method(File, 'get_completed_chunks', 'f.completed_chunks'), + Method(File, 'get_path_components', 'f.path_components'), Method(File, 'is_created', 'f.is_created', boolean=True, ), Method(File, 'is_open', 'f.is_open', boolean=True, ), - Method(File, 'get_size_chunks', 'f.get_size_chunks'), - Method(File, 'get_offset', 'f.get_offset'), - Method(File, 'get_frozen_path', 'f.get_frozen_path'), - Method(File, 'get_path_depth', 'f.get_path_depth'), + Method(File, 'get_size_chunks', 'f.size_chunks'), + Method(File, 'get_offset', 'f.offset'), + Method(File, 'get_frozen_path', 'f.frozen_path'), + Method(File, 'get_path_depth', 'f.path_depth'), Method(File, 'is_create_queued', 'f.is_create_queued', boolean=True, ), diff --git a/libs/rtorrent/peer.py b/libs/rtorrent/peer.py index 61ca094..f998812 100755 --- a/libs/rtorrent/peer.py +++ b/libs/rtorrent/peer.py @@ -60,39 +60,39 @@ methods = [ Method(Peer, 'is_preferred', 'p.is_preferred', boolean=True, ), - Method(Peer, 'get_down_rate', 'p.get_down_rate'), + Method(Peer, 'get_down_rate', 'p.down_rate'), Method(Peer, 'is_unwanted', 'p.is_unwanted', boolean=True, ), - Method(Peer, 'get_peer_total', 'p.get_peer_total'), - Method(Peer, 'get_peer_rate', 'p.get_peer_rate'), - Method(Peer, 'get_port', 'p.get_port'), + Method(Peer, 'get_peer_total', 'p.peer_total'), + Method(Peer, 'get_peer_rate', 'p.peer_rate'), + Method(Peer, 'get_port', 'p.port'), Method(Peer, 'is_snubbed', 'p.is_snubbed', boolean=True, ), - Method(Peer, 'get_id_html', 'p.get_id_html'), - Method(Peer, 'get_up_rate', 'p.get_up_rate'), + Method(Peer, 'get_id_html', 'p.id_html'), + Method(Peer, 'get_up_rate', 'p.up_rate'), Method(Peer, 'is_banned', 'p.banned', boolean=True, ), - Method(Peer, 'get_completed_percent', 'p.get_completed_percent'), + Method(Peer, 'get_completed_percent', 'p.completed_percent'), Method(Peer, 'completed_percent', 'p.completed_percent'), - Method(Peer, 'get_id', 'p.get_id'), + Method(Peer, 'get_id', 'p.id'), Method(Peer, 'is_obfuscated', 'p.is_obfuscated', boolean=True, ), - Method(Peer, 'get_down_total', 'p.get_down_total'), - Method(Peer, 'get_client_version', 'p.get_client_version'), - Method(Peer, 'get_address', 'p.get_address'), + Method(Peer, 'get.down.total', 'p.down_total'), + Method(Peer, 'get_client_version', 'p.client_version'), + Method(Peer, 'get_address', 'p.address'), Method(Peer, 'is_incoming', 'p.is_incoming', boolean=True, ), Method(Peer, 'is_encrypted', 'p.is_encrypted', boolean=True, ), - Method(Peer, 'get_options_str', 'p.get_options_str'), + Method(Peer, 'get_options_str', 'p.options_str'), Method(Peer, 'get_client_version', 'p.client_version'), - Method(Peer, 'get_up_total', 'p.get_up_total'), + Method(Peer, 'get_up_total', 'p.up_total'), # MODIFIERS ] diff --git a/libs/rtorrent/rpc/__init__.py b/libs/rtorrent/rpc/__init__.py index 607d409..e294a72 100755 --- a/libs/rtorrent/rpc/__init__.py +++ b/libs/rtorrent/rpc/__init__.py @@ -38,13 +38,13 @@ def get_varname(rpc_call): r = re.search( "([ptdf]\.|system\.|get\_|is\_|set\_)+([^=]*)", rpc_call, re.I) if r: - return(r.groups()[-1]) + return(r.groups()[-1].replace(".","_")) else: return(None) def _handle_unavailable_rpc_method(method, rt_obj): - msg = "Method isn't available." + msg = "Method " + str(method) + " isn't available." if rt_obj.connection._get_client_version_tuple() < method.min_version: msg = "This method is only available in " \ "RTorrent version v{0} or later".format( @@ -91,7 +91,7 @@ class Method: def _get_method_type(self): """Determine whether method is a modifier or a retriever""" - if self.method_name[:4] == "set_": return('m') # modifier + if self.method_name[:4] == "set_" or self.method_name[-4:] == ".set": return('m') # modifier else: return('r') # retriever diff --git a/libs/rtorrent/torrent.py b/libs/rtorrent/torrent.py index bd6bb68..f4f1406 100755 --- a/libs/rtorrent/torrent.py +++ b/libs/rtorrent/torrent.py @@ -139,7 +139,7 @@ class Torrent: results = m.call()[0] # only sent one call, only need first result offset_method_index = retriever_methods.index( - rtorrent.rpc.find_method("f.get_offset")) + rtorrent.rpc.find_method("f.offset")) # make a list of the offsets of all the files, sort appropriately offset_list = sorted([r[offset_method_index] for r in results]) @@ -168,7 +168,7 @@ class Torrent: """ m = rtorrent.rpc.Multicall(self) self.multicall_add(m, "d.try_stop") - self.multicall_add(m, "d.set_directory", d) + self.multicall_add(m, "d.directory.set", d) self.directory = m.call()[-1] @@ -181,7 +181,7 @@ class Torrent: """ m = rtorrent.rpc.Multicall(self) self.multicall_add(m, "d.try_stop") - self.multicall_add(m, "d.set_directory_base", d) + self.multicall_add(m, "d.directory_base.set", d) def start(self): """Start the torrent""" @@ -304,7 +304,7 @@ class Torrent: m = rtorrent.rpc.Multicall(self) field = "custom{0}".format(key) - self.multicall_add(m, "d.get_{0}".format(field)) + self.multicall_add(m, "d.{0}".format(field)) setattr(self, field, m.call()[-1]) return (getattr(self, field)) @@ -326,7 +326,7 @@ class Torrent: self._assert_custom_key_valid(key) m = rtorrent.rpc.Multicall(self) - self.multicall_add(m, "d.set_custom{0}".format(key), value) + self.multicall_add(m, "d.custom{0}.set".format(key), value) return(m.call()[-1]) @@ -355,7 +355,7 @@ class Torrent: @note: Variable where the result for this method is stored Torrent.hash_checking_queued""" m = rtorrent.rpc.Multicall(self) - self.multicall_add(m, "d.get_hashing") + self.multicall_add(m, "d.hashing") self.multicall_add(m, "d.is_hash_checking") results = m.call() @@ -397,86 +397,86 @@ methods = [ Method(Torrent, 'is_hash_checking', 'd.is_hash_checking', boolean=True, ), - Method(Torrent, 'get_peers_max', 'd.get_peers_max'), - Method(Torrent, 'get_tracker_focus', 'd.get_tracker_focus'), - Method(Torrent, 'get_skip_total', 'd.get_skip_total'), - Method(Torrent, 'get_state', 'd.get_state'), - Method(Torrent, 'get_peer_exchange', 'd.get_peer_exchange'), - Method(Torrent, 'get_down_rate', 'd.get_down_rate'), - Method(Torrent, 'get_connection_seed', 'd.get_connection_seed'), - Method(Torrent, 'get_uploads_max', 'd.get_uploads_max'), - Method(Torrent, 'get_priority_str', 'd.get_priority_str'), + Method(Torrent, 'get_peers_max', 'd.peers_max'), + Method(Torrent, 'get_tracker_focus', 'd.tracker_focus'), + Method(Torrent, 'get_skip_total', 'd.skip.total'), + Method(Torrent, 'get_state', 'd.state'), + Method(Torrent, 'get_peer_exchange', 'd.peer_exchange'), + Method(Torrent, 'get_down_rate', 'd.down.rate'), + Method(Torrent, 'get_connection_seed', 'd.connection_seed'), + Method(Torrent, 'get_uploads_max', 'd.uploads_max'), + Method(Torrent, 'get_priority_str', 'd.priority_str'), Method(Torrent, 'is_open', 'd.is_open', boolean=True, ), - Method(Torrent, 'get_peers_min', 'd.get_peers_min'), - Method(Torrent, 'get_peers_complete', 'd.get_peers_complete'), - Method(Torrent, 'get_tracker_numwant', 'd.get_tracker_numwant'), - Method(Torrent, 'get_connection_current', 'd.get_connection_current'), - Method(Torrent, 'is_complete', 'd.get_complete', + Method(Torrent, 'get_peers_min', 'd.peers_min'), + Method(Torrent, 'get_peers_complete', 'd.peers_complete'), + Method(Torrent, 'get_tracker_numwant', 'd.tracker_numwant'), + Method(Torrent, 'get_connection_current', 'd.connection_current'), + Method(Torrent, 'is_complete', 'd.complete', boolean=True, ), - Method(Torrent, 'get_peers_connected', 'd.get_peers_connected'), - Method(Torrent, 'get_chunk_size', 'd.get_chunk_size'), - Method(Torrent, 'get_state_counter', 'd.get_state_counter'), - Method(Torrent, 'get_base_filename', 'd.get_base_filename'), - Method(Torrent, 'get_state_changed', 'd.get_state_changed'), - Method(Torrent, 'get_peers_not_connected', 'd.get_peers_not_connected'), - Method(Torrent, 'get_directory', 'd.get_directory'), + Method(Torrent, 'get_peers_connected', 'd.peers_connected'), + Method(Torrent, 'get_chunk_size', 'd.chunk_size'), + Method(Torrent, 'get_state_counter', 'd.state_counter'), + Method(Torrent, 'get_base_filename', 'd.base_filename'), + Method(Torrent, 'get_state_changed', 'd.state_changed'), + Method(Torrent, 'get_peers_not_connected', 'd.peers_not_connected'), + Method(Torrent, 'get_directory', 'd.directory'), Method(Torrent, 'is_incomplete', 'd.incomplete', boolean=True, ), - Method(Torrent, 'get_tracker_size', 'd.get_tracker_size'), + Method(Torrent, 'get_tracker_size', 'd.tracker_size'), Method(Torrent, 'is_multi_file', 'd.is_multi_file', boolean=True, ), - Method(Torrent, 'get_local_id', 'd.get_local_id'), - Method(Torrent, 'get_ratio', 'd.get_ratio', + Method(Torrent, 'get_local_id', 'd.local_id'), + Method(Torrent, 'get_ratio', 'd.ratio', post_process_func=lambda x: x / 1000.0, ), - Method(Torrent, 'get_loaded_file', 'd.get_loaded_file'), - Method(Torrent, 'get_max_file_size', 'd.get_max_file_size'), - Method(Torrent, 'get_size_chunks', 'd.get_size_chunks'), + Method(Torrent, 'get_loaded_file', 'd.loaded_file'), + Method(Torrent, 'get_max_file_size', 'd.max_file_size'), + Method(Torrent, 'get_size_chunks', 'd.size_chunks'), Method(Torrent, 'is_pex_active', 'd.is_pex_active', boolean=True, ), - Method(Torrent, 'get_hashing', 'd.get_hashing'), - Method(Torrent, 'get_bitfield', 'd.get_bitfield'), - Method(Torrent, 'get_local_id_html', 'd.get_local_id_html'), - Method(Torrent, 'get_connection_leech', 'd.get_connection_leech'), - Method(Torrent, 'get_peers_accounted', 'd.get_peers_accounted'), - Method(Torrent, 'get_message', 'd.get_message'), + Method(Torrent, 'get_hashing', 'd.hashing'), + Method(Torrent, 'get_bitfield', 'd.bitfield'), + Method(Torrent, 'get_local_id_html', 'd.local_id_html'), + Method(Torrent, 'get_connection_leech', 'd.connection_leech'), + Method(Torrent, 'get_peers_accounted', 'd.peers_accounted'), + Method(Torrent, 'get_message', 'd.message'), Method(Torrent, 'is_active', 'd.is_active', boolean=True, ), - Method(Torrent, 'get_size_bytes', 'd.get_size_bytes'), - Method(Torrent, 'get_ignore_commands', 'd.get_ignore_commands'), - Method(Torrent, 'get_creation_date', 'd.get_creation_date'), - Method(Torrent, 'get_base_path', 'd.get_base_path'), - Method(Torrent, 'get_left_bytes', 'd.get_left_bytes'), - Method(Torrent, 'get_size_files', 'd.get_size_files'), - Method(Torrent, 'get_size_pex', 'd.get_size_pex'), + Method(Torrent, 'get_size_bytes', 'd.size_bytes'), + Method(Torrent, 'get_ignore_commands', 'd.ignore_commands'), + Method(Torrent, 'get_creation_date', 'd.creation_date'), + Method(Torrent, 'get_base_path', 'd.base_path'), + Method(Torrent, 'get_left_bytes', 'd.left_bytes'), + Method(Torrent, 'get_size_files', 'd.size_files'), + Method(Torrent, 'get_size_pex', 'd.size_pex'), Method(Torrent, 'is_private', 'd.is_private', boolean=True, ), - Method(Torrent, 'get_max_size_pex', 'd.get_max_size_pex'), - Method(Torrent, 'get_num_chunks_hashed', 'd.get_chunks_hashed', + Method(Torrent, 'get_max_size_pex', 'd.max_size_pex'), + Method(Torrent, 'get_num_chunks_hashed', 'd.chunks_hashed', aliases=("get_chunks_hashed",)), Method(Torrent, 'get_num_chunks_wanted', 'd.wanted_chunks'), - Method(Torrent, 'get_priority', 'd.get_priority'), - Method(Torrent, 'get_skip_rate', 'd.get_skip_rate'), - Method(Torrent, 'get_completed_bytes', 'd.get_completed_bytes'), - Method(Torrent, 'get_name', 'd.get_name'), - Method(Torrent, 'get_completed_chunks', 'd.get_completed_chunks'), - Method(Torrent, 'get_throttle_name', 'd.get_throttle_name'), - Method(Torrent, 'get_free_diskspace', 'd.get_free_diskspace'), - Method(Torrent, 'get_directory_base', 'd.get_directory_base'), - Method(Torrent, 'get_hashing_failed', 'd.get_hashing_failed'), - Method(Torrent, 'get_tied_to_file', 'd.get_tied_to_file'), - Method(Torrent, 'get_down_total', 'd.get_down_total'), - Method(Torrent, 'get_bytes_done', 'd.get_bytes_done'), - Method(Torrent, 'get_up_rate', 'd.get_up_rate'), - Method(Torrent, 'get_up_total', 'd.get_up_total'), + Method(Torrent, 'get_priority', 'd.priority'), + Method(Torrent, 'get_skip_rate', 'd.skip.rate'), + Method(Torrent, 'get_completed_bytes', 'd.completed_bytes'), + Method(Torrent, 'get_name', 'd.name'), + Method(Torrent, 'get_completed_chunks', 'd.completed_chunks'), + Method(Torrent, 'get_throttle_name', 'd.throttle_name'), + Method(Torrent, 'get_free_diskspace', 'd.free_diskspace'), + Method(Torrent, 'get_directory_base', 'd.directory_base'), + Method(Torrent, 'get_hashing_failed', 'd.hashing_failed'), + Method(Torrent, 'get_tied_to_file', 'd.tied_to_file'), + Method(Torrent, 'get_down_total', 'd.down.total'), + Method(Torrent, 'get_bytes_done', 'd.bytes_done'), + Method(Torrent, 'get_up_rate', 'd.up.rate'), + Method(Torrent, 'get_up_total', 'd.up.total'), Method(Torrent, 'is_accepting_seeders', 'd.accepting_seeders', boolean=True, ), @@ -490,28 +490,28 @@ methods = [ boolean=True, ), Method(Torrent, "get_time_started", "d.timestamp.started"), - Method(Torrent, "get_custom1", "d.get_custom1"), - Method(Torrent, "get_custom2", "d.get_custom2"), - Method(Torrent, "get_custom3", "d.get_custom3"), - Method(Torrent, "get_custom4", "d.get_custom4"), - Method(Torrent, "get_custom5", "d.get_custom5"), + Method(Torrent, "get_custom1", "d.custom1"), + Method(Torrent, "get_custom2", "d.custom2"), + Method(Torrent, "get_custom3", "d.custom3"), + Method(Torrent, "get_custom4", "d.custom4"), + Method(Torrent, "get_custom5", "d.custom5"), # MODIFIERS - Method(Torrent, 'set_uploads_max', 'd.set_uploads_max'), - Method(Torrent, 'set_tied_to_file', 'd.set_tied_to_file'), - Method(Torrent, 'set_tracker_numwant', 'd.set_tracker_numwant'), - Method(Torrent, 'set_priority', 'd.set_priority'), - Method(Torrent, 'set_peers_max', 'd.set_peers_max'), - Method(Torrent, 'set_hashing_failed', 'd.set_hashing_failed'), - Method(Torrent, 'set_message', 'd.set_message'), - Method(Torrent, 'set_throttle_name', 'd.set_throttle_name'), - Method(Torrent, 'set_peers_min', 'd.set_peers_min'), - Method(Torrent, 'set_ignore_commands', 'd.set_ignore_commands'), - Method(Torrent, 'set_max_file_size', 'd.set_max_file_size'), - Method(Torrent, 'set_custom5', 'd.set_custom5'), - Method(Torrent, 'set_custom4', 'd.set_custom4'), - Method(Torrent, 'set_custom2', 'd.set_custom2'), - Method(Torrent, 'set_custom1', 'd.set_custom1'), - Method(Torrent, 'set_custom3', 'd.set_custom3'), - Method(Torrent, 'set_connection_current', 'd.set_connection_current'), + Method(Torrent, 'set_uploads_max', 'd.uploads_max.set'), + Method(Torrent, 'set_tied_to_file', 'd.tied_to_file.set'), + Method(Torrent, 'set_tracker_numwant', 'd.tracker_numwant.set'), + Method(Torrent, 'set_priority', 'd.priority.set'), + Method(Torrent, 'set_peers_max', 'd.peers_max.set'), + Method(Torrent, 'set_hashing_failed', 'd.hashing_failed.set'), + Method(Torrent, 'set_message', 'd.message.set'), + Method(Torrent, 'set_throttle_name', 'd.throttle_name.set'), + Method(Torrent, 'set_peers_min', 'd.peers_min.set'), + Method(Torrent, 'set_ignore_commands', 'd.ignore_commands.set'), + Method(Torrent, 'set_max_file_size', 'd.max_file_size.set'), + Method(Torrent, 'set_custom5', 'd.custom5.set'), + Method(Torrent, 'set_custom4', 'd.custom4.set'), + Method(Torrent, 'set_custom2', 'd.custom2.set'), + Method(Torrent, 'set_custom1', 'd.custom1.set'), + Method(Torrent, 'set_custom3', 'd.custom3.set'), + Method(Torrent, 'set_connection_current', 'd.connection_current.set'), ] diff --git a/libs/rtorrent/tracker.py b/libs/rtorrent/tracker.py index 81af2e4..03cec4b 100755 --- a/libs/rtorrent/tracker.py +++ b/libs/rtorrent/tracker.py @@ -70,17 +70,17 @@ class Tracker: methods = [ # RETRIEVERS Method(Tracker, 'is_enabled', 't.is_enabled', boolean=True), - Method(Tracker, 'get_id', 't.get_id'), - Method(Tracker, 'get_scrape_incomplete', 't.get_scrape_incomplete'), + Method(Tracker, 'get_id', 't.id'), + Method(Tracker, 'get_scrape_incomplete', 't.scrape_incomplete'), Method(Tracker, 'is_open', 't.is_open', boolean=True), - Method(Tracker, 'get_min_interval', 't.get_min_interval'), - Method(Tracker, 'get_scrape_downloaded', 't.get_scrape_downloaded'), - Method(Tracker, 'get_group', 't.get_group'), - Method(Tracker, 'get_scrape_time_last', 't.get_scrape_time_last'), - Method(Tracker, 'get_type', 't.get_type'), - Method(Tracker, 'get_normal_interval', 't.get_normal_interval'), - Method(Tracker, 'get_url', 't.get_url'), - Method(Tracker, 'get_scrape_complete', 't.get_scrape_complete', + Method(Tracker, 'get_min_interval', 't.min_interval'), + Method(Tracker, 'get_scrape_downloaded', 't.scrape_downloaded'), + Method(Tracker, 'get_group', 't.group'), + Method(Tracker, 'get_scrape_time_last', 't.scrape_time_last'), + Method(Tracker, 'get_type', 't.type'), + Method(Tracker, 'get_normal_interval', 't.normal_interval'), + Method(Tracker, 'get_url', 't.url'), + Method(Tracker, 'get_scrape_complete', 't.scrape_complete', min_version=(0, 8, 9), ), Method(Tracker, 'get_activity_time_last', 't.activity_time_last', @@ -134,5 +134,5 @@ methods = [ ), # MODIFIERS - Method(Tracker, 'set_enabled', 't.set_enabled'), + Method(Tracker, 'set_enabled', 't.is_enabled.set'), ] diff --git a/libs/subliminal/core.py b/libs/subliminal/core.py index fcecd06..5cb4a96 100755 --- a/libs/subliminal/core.py +++ b/libs/subliminal/core.py @@ -33,7 +33,7 @@ __all__ = ['SERVICES', 'LANGUAGE_INDEX', 'SERVICE_INDEX', 'SERVICE_CONFIDENCE', 'key_subtitles', 'group_by_video'] logger = logging.getLogger(__name__) SERVICES = ['opensubtitles', 'bierdopje', 'subswiki', 'subtitulos', 'thesubdb', 'addic7ed', 'tvsubtitles', - 'subscenter', 'thewiz'] + 'subscenter', 'wizdom'] LANGUAGE_INDEX, SERVICE_INDEX, SERVICE_CONFIDENCE, MATCHING_CONFIDENCE = range(4) diff --git a/libs/subliminal/services/__init__.py b/libs/subliminal/services/__init__.py index b82b309..b169aaf 100755 --- a/libs/subliminal/services/__init__.py +++ b/libs/subliminal/services/__init__.py @@ -183,16 +183,21 @@ class ServiceBase(object): return False return True - def download_file(self, url, filepath): + def download_file(self, url, filepath, data=None): """Attempt to download a file and remove it in case of failure :param string url: URL to download :param string filepath: destination path + :param string data: data to add to the post request """ logger.info(u'Downloading %s in %s' % (url, filepath)) try: - r = self.session.get(url, timeout = 10, headers = {'Referer': url, 'User-Agent': self.user_agent}) + headers = {'Referer': url, 'User-Agent': self.user_agent} + if data: + r = self.session.post(url, data=data, timeout=10, headers=headers) + else: + r = self.session.get(url, timeout=10, headers=headers) with open(filepath, 'wb') as f: f.write(r.content) except Exception as e: @@ -202,18 +207,23 @@ class ServiceBase(object): raise DownloadFailedError(str(e)) logger.debug(u'Download finished') - def download_zip_file(self, url, filepath): + def download_zip_file(self, url, filepath, data=None): """Attempt to download a zip file and extract any subtitle file from it, if any. This cleans up after itself if anything fails. :param string url: URL of the zip file to download :param string filepath: destination path for the subtitle + :param string data: data to add to the post request """ logger.info(u'Downloading %s in %s' % (url, filepath)) try: zippath = filepath + '.zip' - r = self.session.get(url, timeout = 10, headers = {'Referer': url, 'User-Agent': self.user_agent}) + headers = {'Referer': url, 'User-Agent': self.user_agent} + if data: + r = self.session.post(url, data=data, timeout=10, headers=headers) + else: + r = self.session.get(url, timeout=10, headers=headers) with open(zippath, 'wb') as f: f.write(r.content) if not zipfile.is_zipfile(zippath): diff --git a/libs/subliminal/services/subscenter.py b/libs/subliminal/services/subscenter.py index 253d925..7125f92 100644 --- a/libs/subliminal/services/subscenter.py +++ b/libs/subliminal/services/subscenter.py @@ -16,124 +16,147 @@ # You should have received a copy of the GNU Lesser General Public License # along with subliminal. If not, see . from . import ServiceBase -from ..exceptions import DownloadFailedError, ServiceError +from ..exceptions import ServiceError from ..language import language_set from ..subtitles import get_subtitle_path, ResultSubtitle from ..videos import Episode, Movie -from ..utils import to_unicode, get_keywords -from bs4 import BeautifulSoup +from ..utils import to_unicode + import bisect -import json import logging +from urllib import urlencode + logger = logging.getLogger(__name__) class Subscenter(ServiceBase): - server = 'http://www.subscenter.org/he/' - api_based = False + server = 'http://www.cinemast.org/he/cinemast/api/' + api_based = True languages = language_set(['he']) videos = [Episode, Movie] require_video = False - def _search_url_title(self, title, kind): - """Search the URL title for the given `title`. - - :param str title: title to search for. - :param str kind: kind of the title, ``movie`` or ``series``. - :return: the URL version of the title. - :rtype: str or None - """ - # make the search - logger.info('Searching title name for %r', title) - r = self.session.get(self.server + 'subtitle/search/', params={'q': title}, allow_redirects=False, timeout=10) - r.raise_for_status() + default_username = 'subliminal@gmail.com' + default_password = 'subliminal' - # if redirected, get the url title from the Location header - if r.is_redirect: - parts = r.headers['Location'].split('/') + def __init__(self, config=None): + super(Subscenter, self).__init__(config) + self.token = None + self.user_id = None - # check kind - if parts[-3] == kind: - return parts[-2] + def init(self): + super(Subscenter, self).init() + logger.debug('Logging in') + url = self.server_url + 'login/' - return None + # actual login + data = {'username': self.default_username, 'password': self.default_password} + r = self.session.post(url, data=urlencode(data), allow_redirects=False, timeout=10) - # otherwise, get the first valid suggestion - soup = BeautifulSoup(r.content, ['lxml', 'html.parser']) - suggestions = soup.select('#processes div.generalWindowTop a') - logger.debug('Found %d suggestions', len(suggestions)) - for suggestion in suggestions: - parts = suggestion.attrs['href'].split('/') + if r.status_code != 200: + raise ServiceError('Login failed') + + try: + result = r.json() + if 'token' not in result: + raise ServiceError('Login failed') - # check kind - if parts[-3] == kind: - return parts[-2] + logger.info('Logged in') + self.user_id = r.json().get('user') + self.token = r.json().get('token') + except ValueError: + raise ServiceError('Login failed') + + def terminate(self): + super(Subscenter, self).terminate() + if self.token or self.user_id: + logger.info('Logged out') + self.token = None + self.user_id = None def list_checked(self, video, languages): series = None season = None episode = None title = video.title + year = video.year if isinstance(video, Episode): series = video.series season = video.season episode = video.episode - return self.query(video.path or video.release, languages, get_keywords(video.guess), series, season, - episode, title) + return self.query(video.path or video.release, languages, series, season, episode, title, year) - def query(self, filepath, languages=None, keywords=None, series=None, season=None, episode=None, title=None): + def query(self, filepath, languages=None, series=None, season=None, episode=None, title=None, year=None): logger.debug(u'Getting subtitles for {0} season {1} episode {2} with languages {3}'.format( series, season, episode, languages)) - # Set the correct parameters depending on the kind. - if series and season and episode: - url_series = self._search_url_title(series, 'series') - url = self.server + 'cst/data/series/sb/{}/{}/{}/'.format(url_series, season, episode) + + query = { + 'user': self.user_id, + 'token': self.token + } + + # episode + if season and episode: + query['q'] = series + query['type'] = 'series' + query['season'] = season + query['episode'] = episode elif title: - url_title = self._search_url_title(title, 'movie') - url = self.server + 'cst/data/movie/sb/{}/'.format(url_title) + query['q'] = title + query['type'] = 'movies' + if year: + query['year_start'] = year - 1 + query['year_end'] = year else: raise ServiceError('One or more parameters are missing') - logger.debug('Searching subtitles for title {0}, season {1}, episode {2}'.format(title, season, episode)) - response = self.session.get(url) - if response.status_code != 200: - raise ServiceError('Request failed with status code {0}'.format(response.status_code)) - # Loop over results. - subtitles = dict() - response_json = json.loads(response.content) - for language_code, language_data in response_json.items(): - language_object = self.get_language(language_code) - if language_object in self.languages and language_object in languages: - for quality_data in language_data.values(): - for quality, subtitles_data in quality_data.items(): - for subtitle_item in subtitles_data.values(): - # Read the item. - subtitle_id = subtitle_item['id'] - subtitle_key = subtitle_item['key'] - subtitle_version = subtitle_item['h_version'] - release = subtitle_item['subtitle_version'] - subtitle_path = get_subtitle_path(filepath, language_object, self.config.multi) - download_link = self.server_url + 'subtitle/download/{0}/{1}/?v={2}&key={3}'.format( - language_code, subtitle_id, subtitle_version, subtitle_key) - # Add the release and increment downloaded count if we already have the subtitle. - if subtitle_id in subtitles: - logger.debug('Found additional release {0} for subtitle {1}'.format( - release, subtitle_id)) - bisect.insort_left(subtitles[subtitle_id].release, release) # Deterministic order. - continue - # Otherwise create it. - subtitle = ResultSubtitle(subtitle_path, language_object, self.__class__.__name__.lower(), - download_link, release=to_unicode(release)) - logger.debug('Found subtitle %r', subtitle) - subtitles[subtitle_id] = subtitle + + # get the list of subtitles + logger.debug('Getting the list of subtitles') + url = self.server_url + 'search/' + r = self.session.post(url, data=urlencode(query)) + r.raise_for_status() + + try: + results = r.json() + except ValueError: + return {} + + # loop over results + subtitles = {} + for group_data in results.get('data', []): + for language_code, subtitles_data in group_data.get('subtitles', {}).items(): + language_object = self.get_language(language_code) + + for subtitle_item in subtitles_data: + # read the item + subtitle_id = subtitle_item['id'] + subtitle_key = subtitle_item['key'] + release = subtitle_item['version'] + + subtitle_path = get_subtitle_path(filepath, language_object, self.config.multi) + download_link = self.server_url + 'subtitle/download/{0}/?v={1}&key={2}&sub_id={3}'.format( + language_code, release, subtitle_key, subtitle_id) + # Add the release and increment downloaded count if we already have the subtitle. + if subtitle_id in subtitles: + logger.debug('Found additional release {0} for subtitle {1}'.format( + release, subtitle_id)) + bisect.insort_left(subtitles[subtitle_id].release, release) # Deterministic order. + continue + # Otherwise create it. + subtitle = ResultSubtitle(subtitle_path, language_object, self.__class__.__name__.lower(), + download_link, release=to_unicode(release)) + logger.debug('Found subtitle %r', subtitle) + subtitles[subtitle_id] = subtitle + return subtitles.values() def download(self, subtitle): - try: - self.download_zip_file(subtitle.link, subtitle.path) - except DownloadFailedError: - # If no zip file was retrieved, daily downloads limit has exceeded. - raise ServiceError('Daily limit exceeded') + data = { + 'user': self.user_id, + 'token': self.token + } + self.download_zip_file(subtitle.link, subtitle.path, data=urlencode(data)) return subtitle diff --git a/libs/subliminal/services/thewiz.py b/libs/subliminal/services/wizdom.py similarity index 68% rename from libs/subliminal/services/thewiz.py rename to libs/subliminal/services/wizdom.py index da351a8..675dccf 100644 --- a/libs/subliminal/services/thewiz.py +++ b/libs/subliminal/services/wizdom.py @@ -23,52 +23,46 @@ from ..videos import Episode, Movie from ..utils import to_unicode import bisect import logging -import os logger = logging.getLogger(__name__) -class TheWiz(ServiceBase): - server = 'http://subs.thewiz.info/' +class Wizdom(ServiceBase): + server = 'http://wizdom.xyz' api_based = True languages = language_set(['he']) videos = [Episode, Movie] require_video = False - _tmdb_api_key = 'f7f51775877e0bb6703520952b3c7840' + _tmdb_api_key = 'a51ee051bcd762543373903de296e0a3' def _search_imdb_id(self, title, year, is_movie): """Search the IMDB ID for the given `title` and `year`. :param str title: title to search for. :param int year: year to search for (or 0 if not relevant). - :param bool is_movie: If True, IMDB ID will be searched for in TMDB instead of TheWiz. + :param bool is_movie: If True, IMDB ID will be searched for in TMDB instead of Wizdom. :return: the IMDB ID for the given title and year (or None if not found). :rtype: str """ # make the search logger.info('Searching IMDB ID for %r%r', title, '' if not year else ' ({})'.format(year)) + category = 'movie' if is_movie else 'tv' title = title.replace('\'', '') - if is_movie: - # get TMDB ID first - r = self.session.get('http://api.tmdb.org/3/search/movie?api_key={}&query={}{}&language=en'.format( - self._tmdb_api_key, title, '' if not year else '&year={}'.format(year))) - r.raise_for_status() - tmdb_results = r.json().get('results') - if tmdb_results: - tmdb_id = tmdb_results[0].get('id') - if tmdb_id: - # get actual IMDB ID from TMDB - r = self.session.get('http://api.tmdb.org/3/movie/{}?api_key={}&language=en'.format( - tmdb_id, self._tmdb_api_key)) - r.raise_for_status() - return str(r.json().get('imdb_id', '')) or None - return None - - # handle TV series - r = self.session.get(self.server_url + 'search.tv.php', params={'name': title}, timeout=10) + # get TMDB ID first + r = self.session.get('http://api.tmdb.org/3/search/{}?api_key={}&query={}{}&language=en'.format( + category, self._tmdb_api_key, title, '' if not year else '&year={}'.format(year))) r.raise_for_status() - return r.text or None + tmdb_results = r.json().get('results') + if tmdb_results: + tmdb_id = tmdb_results[0].get('id') + if tmdb_id: + # get actual IMDB ID from TMDB + r = self.session.get('http://api.tmdb.org/3/{}/{}{}?api_key={}&language=en'.format( + category, tmdb_id, '' if is_movie else '/external_ids', self._tmdb_api_key)) + r.raise_for_status() + return str(r.json().get('imdb_id', '')) or None + return None def list_checked(self, video, languages): series = None @@ -84,40 +78,44 @@ class TheWiz(ServiceBase): return self.query(video.path or video.release, languages, series, season, episode, title, imdb_id, year) - def query(self, filepath, languages=None, series=None, season=None, episode=None, title=None, imdbid=None, year=None): + def query(self, filepath, languages=None, series=None, season=None, episode=None, title=None, imdbid=None, + year=None): logger.debug(u'Getting subtitles for {0} season {1} episode {2} with languages {3}'.format( series, season, episode, languages)) # search for the IMDB ID if needed is_movie = not (series and season and episode) if is_movie and not title: raise ServiceError('One or more parameters are missing') - # for tv series, we need the series IMDB ID, and not the specific episode ID - imdb_id = (is_movie and imdbid) or self._search_imdb_id(title, year, is_movie) - # get search parameters - season = season or 0 - episode = episode or 0 - version = os.path.splitext(os.path.basename(filepath))[0] if filepath else 0 + # for TV series, we need the series IMDB ID, and not the specific episode ID + imdb_id = imdbid or self._search_imdb_id(title, year, is_movie) # search logger.debug(u'Using IMDB ID {0}'.format(imdb_id)) - url = 'http://subs.thewiz.info/search.id.php?imdb={}&season={}&episode={}&version={}'.format( - imdb_id, season, episode, version) + url = 'http://json.{}/{}.json'.format(self.server_url, imdb_id) # get the list of subtitles - logger.debug(u'Getting the list of subtitles') + logger.debug('Getting the list of subtitles') r = self.session.get(url) r.raise_for_status() - results = r.json() + try: + results = r.json() + except ValueError: + return {} - # loop over results + # filter irrelevant results + if not is_movie: + results = results.get('subs', {}).get(str(season), {}).get(str(episode), []) + else: + results = results.get('subs', []) + # loop over results subtitles = dict() for result in results: language_object = self.get_language('heb') subtitle_id = result['id'] - release = result['versioname'] + release = result['version'] subtitle_path = get_subtitle_path(filepath, language_object, self.config.multi) - download_link = self.server_url + 'zip/{0}.zip'.format(subtitle_id) + download_link = 'http://zip.{}/{}.zip'.format(self.server_url, subtitle_id) # add the release and increment downloaded count if we already have the subtitle if subtitle_id in subtitles: logger.debug(u'Found additional release {0} for subtitle {1}'.format(release, subtitle_id)) @@ -137,4 +135,4 @@ class TheWiz(ServiceBase): return subtitle -Service = TheWiz +Service = Wizdom diff --git a/libs/xmpp/transports.py b/libs/xmpp/transports.py index f3d1316..bf59266 100644 --- a/libs/xmpp/transports.py +++ b/libs/xmpp/transports.py @@ -27,7 +27,7 @@ Transports are stackable so you - f.e. TLS use HTPPROXYsocket or TCPsocket as mo Also exception 'error' is defined to allow capture of this module specific exceptions. """ -import socket, select, base64, dispatcher, sys +import socket, ssl, select, base64, dispatcher, sys from simplexml import ustr from client import PlugIn from protocol import * @@ -312,9 +312,9 @@ class TLS(PlugIn): """ Immidiatedly switch socket to TLS mode. Used internally.""" """ Here we should switch pending_data to hint mode.""" tcpsock = self._owner.Connection - tcpsock._sslObj = socket.ssl(tcpsock._sock, None, None) - tcpsock._sslIssuer = tcpsock._sslObj.issuer() - tcpsock._sslServer = tcpsock._sslObj.server() + tcpsock._sslObj = ssl.wrap_socket(tcpsock._sock, None, None) + tcpsock._sslIssuer = tcpsock._sslObj.getpeercert().get('issuer') + tcpsock._sslServer = tcpsock._sslObj.getpeercert().get('server') tcpsock._recv = tcpsock._sslObj.read tcpsock._send = tcpsock._sslObj.write