From 91d72802c82710bf9e54e8de93b14272d26153bb Mon Sep 17 00:00:00 2001 From: Fernando Date: Tue, 6 Dec 2016 12:12:13 -0200 Subject: [PATCH 01/20] Fix Transmission not reading correct status --- couchpotato/core/downloaders/transmission.py | 13 ++++++++++++- 1 file changed, 12 insertions(+), 1 deletion(-) diff --git a/couchpotato/core/downloaders/transmission.py b/couchpotato/core/downloaders/transmission.py index f40b955..41f5c23 100644 --- a/couchpotato/core/downloaders/transmission.py +++ b/couchpotato/core/downloaders/transmission.py @@ -143,12 +143,23 @@ class Transmission(DownloaderBase): log.debug('name=%s / id=%s / downloadDir=%s / hashString=%s / percentDone=%s / status=%s / isStalled=%s / eta=%s / uploadRatio=%s / isFinished=%s / incomplete-dir-enabled=%s / incomplete-dir=%s', (torrent['name'], torrent['id'], torrent['downloadDir'], torrent['hashString'], torrent['percentDone'], torrent['status'], torrent.get('isStalled', 'N/A'), torrent['eta'], torrent['uploadRatio'], torrent['isFinished'], session['incomplete-dir-enabled'], session['incomplete-dir'])) + """ + https://trac.transmissionbt.com/browser/branches/2.8x/libtransmission/transmission.h#L1853 + 0 = Torrent is stopped + 1 = Queued to check files + 2 = Checking files + 3 = Queued to download + 4 = Downloading + 5 = Queued to seed + 6 = Seeding + """ + status = 'busy' if torrent.get('isStalled') and not torrent['percentDone'] == 1 and self.conf('stalled_as_failed'): status = 'failed' elif torrent['status'] == 0 and torrent['percentDone'] == 1: status = 'completed' - elif torrent['status'] == 16 and torrent['percentDone'] == 1: + elif torrent['status'] == 6 and torrent['percentDone'] == 1: status = 'completed' elif torrent['status'] in [5, 6]: status = 'seeding' From c73a37b7962a42efc522d125afa2458c86f8b8b3 Mon Sep 17 00:00:00 2001 From: Fernando Date: Fri, 20 Jan 2017 14:51:08 -0300 Subject: [PATCH 02/20] Read param isFinished because otherwise torrent can be paused but seed is not finished --- couchpotato/core/downloaders/transmission.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/couchpotato/core/downloaders/transmission.py b/couchpotato/core/downloaders/transmission.py index 41f5c23..4bb7dbe 100644 --- a/couchpotato/core/downloaders/transmission.py +++ b/couchpotato/core/downloaders/transmission.py @@ -157,9 +157,7 @@ class Transmission(DownloaderBase): status = 'busy' if torrent.get('isStalled') and not torrent['percentDone'] == 1 and self.conf('stalled_as_failed'): status = 'failed' - elif torrent['status'] == 0 and torrent['percentDone'] == 1: - status = 'completed' - elif torrent['status'] == 6 and torrent['percentDone'] == 1: + elif torrent['status'] == 0 and torrent['percentDone'] == 1 and torrent['isFinished']: status = 'completed' elif torrent['status'] in [5, 6]: status = 'seeding' From 068634305298b8563922c6ba05a032103ba29908 Mon Sep 17 00:00:00 2001 From: Hans van Luttikhuizen Date: Thu, 4 May 2017 16:04:37 +0200 Subject: [PATCH 03/20] Fix letterboxd integration --- couchpotato/core/media/movie/providers/automation/letterboxd.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/couchpotato/core/media/movie/providers/automation/letterboxd.py b/couchpotato/core/media/movie/providers/automation/letterboxd.py index e1fcddb..41fbd88 100644 --- a/couchpotato/core/media/movie/providers/automation/letterboxd.py +++ b/couchpotato/core/media/movie/providers/automation/letterboxd.py @@ -49,7 +49,7 @@ class Letterboxd(Automation): soup = BeautifulSoup(self.getHTMLData(self.url % username)) for movie in soup.find_all('li', attrs = {'class': 'poster-container'}): - img = movie.find('img', movie) + img = movie.find('img') title = img.get('alt') movies.append({ From 5ba6901d0001fb62530e3daf9f987ba5399c112e Mon Sep 17 00:00:00 2001 From: Hans van Luttikhuizen Date: Thu, 4 May 2017 17:36:41 +0200 Subject: [PATCH 04/20] Support multiple watchlist pages for Letterboxd --- .../media/movie/providers/automation/letterboxd.py | 27 +++++++++++++++------- 1 file changed, 19 insertions(+), 8 deletions(-) diff --git a/couchpotato/core/media/movie/providers/automation/letterboxd.py b/couchpotato/core/media/movie/providers/automation/letterboxd.py index 41fbd88..adfc828 100644 --- a/couchpotato/core/media/movie/providers/automation/letterboxd.py +++ b/couchpotato/core/media/movie/providers/automation/letterboxd.py @@ -13,7 +13,7 @@ autoload = 'Letterboxd' class Letterboxd(Automation): - url = 'http://letterboxd.com/%s/watchlist/' + url = 'http://letterboxd.com/%s/watchlist/page/%d/' pattern = re.compile(r'(.*)\((\d*)\)') interval = 1800 @@ -46,18 +46,29 @@ class Letterboxd(Automation): if not enablers[index]: continue - soup = BeautifulSoup(self.getHTMLData(self.url % username)) + soup = BeautifulSoup(self.getHTMLData(self.url % (username, 1))) - for movie in soup.find_all('li', attrs = {'class': 'poster-container'}): - img = movie.find('img') - title = img.get('alt') + pagination_items = soup.find_all('li', attrs={'class': 'paginate-page'}) + pages = range(1, len(pagination_items) + 1) if pagination_items else [1] - movies.append({ - 'title': title - }) + for page in pages: + soup = BeautifulSoup(self.getHTMLData(self.url % (username, page))) + movies += self.getMoviesFromHTML(soup) return movies + def getMoviesFromHTML(self, html): + movies = [] + + for movie in html.find_all('li', attrs={'class': 'poster-container'}): + img = movie.find('img') + title = img.get('alt') + + movies.append({ + 'title': title + }) + + return movies config = [{ 'name': 'letterboxd', From 32fdf53b623d4f022aea19ccf8e08d47f5345981 Mon Sep 17 00:00:00 2001 From: Hans van Luttikhuizen Date: Thu, 4 May 2017 20:20:36 +0200 Subject: [PATCH 05/20] Support watchlist with more than 5 pages --- couchpotato/core/media/movie/providers/automation/letterboxd.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/couchpotato/core/media/movie/providers/automation/letterboxd.py b/couchpotato/core/media/movie/providers/automation/letterboxd.py index adfc828..072c416 100644 --- a/couchpotato/core/media/movie/providers/automation/letterboxd.py +++ b/couchpotato/core/media/movie/providers/automation/letterboxd.py @@ -48,8 +48,9 @@ class Letterboxd(Automation): soup = BeautifulSoup(self.getHTMLData(self.url % (username, 1))) - pagination_items = soup.find_all('li', attrs={'class': 'paginate-page'}) - pages = range(1, len(pagination_items) + 1) if pagination_items else [1] + pagination = soup.find_all('li', attrs={'class': 'paginate-page'}) + number_of_pages = tryInt(pagination[-1].find('a').get_text()) if pagination else 1 + pages = range(1, number_of_pages) for page in pages: soup = BeautifulSoup(self.getHTMLData(self.url % (username, page))) From 2ed4154672bb4542c6f2595e10931a8ceba03ba3 Mon Sep 17 00:00:00 2001 From: Safihre Date: Sun, 30 Jul 2017 23:58:22 +0200 Subject: [PATCH 06/20] Switch from qstatus (deprecated) to queue API call for SABnzbd Will be deprecated very soon. --- couchpotato/core/downloaders/sabnzbd.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/couchpotato/core/downloaders/sabnzbd.py b/couchpotato/core/downloaders/sabnzbd.py index 0f82618..47c94ad 100644 --- a/couchpotato/core/downloaders/sabnzbd.py +++ b/couchpotato/core/downloaders/sabnzbd.py @@ -100,7 +100,7 @@ class Sabnzbd(DownloaderBase): # the version check will work even with wrong api key, so we need the next check as well sab_data = self.call({ - 'mode': 'qstatus', + 'mode': 'queue', }) if not sab_data: return False From e4b7f2dfd27e01b8aaba775a077a9b87a3a5a410 Mon Sep 17 00:00:00 2001 From: libussa Date: Sun, 13 Aug 2017 00:18:33 +0200 Subject: [PATCH 07/20] Add an option to reject scene releases when using PTP as a searcher --- .../core/media/_base/providers/torrent/passthepopcorn.py | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/couchpotato/core/media/_base/providers/torrent/passthepopcorn.py b/couchpotato/core/media/_base/providers/torrent/passthepopcorn.py index c96afd1..6a13ff4 100644 --- a/couchpotato/core/media/_base/providers/torrent/passthepopcorn.py +++ b/couchpotato/core/media/_base/providers/torrent/passthepopcorn.py @@ -73,6 +73,8 @@ class Base(TorrentProvider): torrentdesc += ' Scene' if self.conf('prefer_scene'): torrentscore += 2000 + if self.conf('no_scene'): + torrentscore -= 2000 if 'RemasterTitle' in torrent and torrent['RemasterTitle']: torrentdesc += self.htmlToASCII(' %s' % torrent['RemasterTitle']) @@ -258,6 +260,14 @@ config = [{ 'description': 'Favors scene-releases over non-scene releases.' }, { + 'name': 'no_scene', + 'advanced': True, + 'type': 'bool', + 'label': 'Reject scene', + 'default': 0, + 'description': 'Reject scene-releases over non-scene releases.' + }, + { 'name': 'require_approval', 'advanced': True, 'type': 'bool', From 131005036fbe33133891570018c9881cd8ec2f53 Mon Sep 17 00:00:00 2001 From: Filip Andre Larsen Tomren Date: Sat, 19 Aug 2017 17:07:44 +0200 Subject: [PATCH 08/20] Fixed BitHDTV login failure. Now based on cookie from a logged in session --- .../core/media/_base/providers/torrent/bithdtv.py | 41 +++++++++++++--------- 1 file changed, 25 insertions(+), 16 deletions(-) diff --git a/couchpotato/core/media/_base/providers/torrent/bithdtv.py b/couchpotato/core/media/_base/providers/torrent/bithdtv.py index 149d7c1..a3eb1d9 100644 --- a/couchpotato/core/media/_base/providers/torrent/bithdtv.py +++ b/couchpotato/core/media/_base/providers/torrent/bithdtv.py @@ -13,9 +13,6 @@ log = CPLog(__name__) class Base(TorrentProvider): urls = { - 'test': 'https://www.bit-hdtv.com/', - 'login': 'https://www.bit-hdtv.com/takelogin.php', - 'login_check': 'https://www.bit-hdtv.com/messages.php', 'detail': 'https://www.bit-hdtv.com/details.php?id=%s', 'search': 'https://www.bit-hdtv.com/torrents.php?', 'download': 'https://www.bit-hdtv.com/download.php?id=%s', @@ -31,7 +28,7 @@ class Base(TorrentProvider): url = "%s&%s" % (self.urls['search'], query) - data = self.getHTMLData(url) + data = self.getHTMLData(url, headers = self.getRequestHeaders()) if data: # Remove BiT-HDTV's output garbage so outdated BS4 versions successfully parse the HTML @@ -42,11 +39,12 @@ class Base(TorrentProvider): html = BeautifulSoup(data, 'html.parser') try: - result_tables = html.find_all('table', attrs = {'width': '750', 'class': ''}) + result_tables = html.find_all('table', attrs = {'width': '800', 'class': ''}) if result_tables is None: return - result_table = result_tables[1] + # Take first result + result_table = result_tables[0] if result_table is None: return @@ -72,10 +70,10 @@ class Base(TorrentProvider): except: log.error('Failed getting results from %s: %s', (self.getName(), traceback.format_exc())) - def getLoginParams(self): + def getRequestHeaders(self): + cookies = 'h_sl={};h_sp={};h_su={}'.format(self.conf('cookiesettingsl') or '', self.conf('cookiesettingsp') or '', self.conf('cookiesettingsu') or '') return { - 'username': self.conf('username'), - 'password': self.conf('password'), + 'Cookie': cookies } def getMoreInfo(self, item): @@ -87,11 +85,13 @@ class Base(TorrentProvider): item['description'] = description return item - def loginSuccess(self, output): - return 'logout.php' in output.lower() - - loginCheckSuccess = loginSuccess + def download(self, url = '', nzb_id = ''): + try: + return self.urlopen(url, headers=self.getRequestHeaders()) + except: + log.error('Failed getting release from %s: %s', (self.getName(), traceback.format_exc())) + return 'try_next' config = [{ 'name': 'bithdtv', @@ -110,13 +110,22 @@ config = [{ 'default': False, }, { - 'name': 'username', + 'name': 'cookiesettingsl', + 'label': 'Cookies (h_sl)', + 'default': '', + 'description': 'Cookie h_sl from session', + }, + { + 'name': 'cookiesettingsp', + 'label': 'Cookies (h_sp)', 'default': '', + 'description': 'Cookie h_sp from session', }, { - 'name': 'password', + 'name': 'cookiesettingsu', + 'label': 'Cookies (h_su)', 'default': '', - 'type': 'password', + 'description': 'Cookie h_su from session', }, { 'name': 'seed_ratio', From f4489e9565325237c206cda82355c35037f5b907 Mon Sep 17 00:00:00 2001 From: ylde pm Date: Wed, 23 Aug 2017 03:43:50 +0100 Subject: [PATCH 09/20] Replacing torrentz provider with torrentz2. --- .../core/media/_base/providers/torrent/torrentz.py | 30 ++++++++-------------- .../core/media/movie/providers/torrent/torrentz.py | 2 +- 2 files changed, 12 insertions(+), 20 deletions(-) diff --git a/couchpotato/core/media/_base/providers/torrent/torrentz.py b/couchpotato/core/media/_base/providers/torrent/torrentz.py index 8412a8d..db27dcc 100644 --- a/couchpotato/core/media/_base/providers/torrent/torrentz.py +++ b/couchpotato/core/media/_base/providers/torrent/torrentz.py @@ -15,25 +15,19 @@ log = CPLog(__name__) class Base(TorrentMagnetProvider, RSS): urls = { - 'detail': 'https://torrentz.eu/%s', - 'search': 'https://torrentz.eu/feed?q=%s', - 'verified_search': 'https://torrentz.eu/feed_verified?q=%s' + 'detail': 'https://torrentz2.eu/%s', + 'search': 'https://torrentz2.eu/feed?f=%s' } http_time_between_calls = 0 def _searchOnTitle(self, title, media, quality, results): - search_url = self.urls['verified_search'] if self.conf('verified_only') else self.urls['search'] + search_url = self.urls['search'] # Create search parameters search_params = self.buildUrl(title, media, quality) - smin = quality.get('size_min') - smax = quality.get('size_max') - if smin and smax: - search_params += ' size %sm - %sm' % (smin, smax) - min_seeds = tryInt(self.conf('minimal_seeds')) if min_seeds: search_params += ' seed > %s' % (min_seeds - 1) @@ -52,17 +46,22 @@ class Base(TorrentMagnetProvider, RSS): magnet = splitString(detail_url, '/')[-1] magnet_url = 'magnet:?xt=urn:btih:%s&dn=%s&tr=%s' % (magnet.upper(), tryUrlencode(name), tryUrlencode('udp://tracker.openbittorrent.com/announce')) - reg = re.search('Size: (?P\d+) MB Seeds: (?P[\d,]+) Peers: (?P[\d,]+)', six.text_type(description)) + reg = re.search('Size: (?P\d+) (?P[MG]B) Seeds: (?P[\d,]+) Peers: (?P[\d,]+)', six.text_type(description)) size = reg.group('size') + unit = reg.group('unit') seeds = reg.group('seeds').replace(',', '') peers = reg.group('peers').replace(',', '') + multiplier = 1 + if unit == 'GB': + multiplier = 1000 + results.append({ 'id': magnet, 'name': six.text_type(name), 'url': magnet_url, 'detail_url': detail_url, - 'size': tryInt(size), + 'size': tryInt(size)*multiplier, 'seeders': tryInt(seeds), 'leechers': tryInt(peers), }) @@ -78,7 +77,7 @@ config = [{ 'tab': 'searcher', 'list': 'torrent_providers', 'name': 'Torrentz', - 'description': 'Torrentz is a free, fast and powerful meta-search engine. Torrentz', + 'description': 'Torrentz.eu was a free, fast and powerful meta-search engine combining results from dozens of search engines, Torrentz2.eu is trying to replace it. Torrentz2', 'wizard': True, 'icon': 'iVBORw0KGgoAAAANSUhEUgAAABAAAAAQCAYAAAAf8/9hAAAAQklEQVQ4y2NgAALjtJn/ycEMlGiGG0IVAxiwAKzOxaKGARcgxgC8YNSAwWoAzuRMjgsIugqfAUR5CZcBRIcHsWEAADSA96Ig020yAAAAAElFTkSuQmCC', 'options': [ @@ -88,13 +87,6 @@ config = [{ 'default': True }, { - 'name': 'verified_only', - 'type': 'bool', - 'default': True, - 'advanced': True, - 'description': 'Only search verified releases', - }, - { 'name': 'minimal_seeds', 'type': 'int', 'default': 1, diff --git a/couchpotato/core/media/movie/providers/torrent/torrentz.py b/couchpotato/core/media/movie/providers/torrent/torrentz.py index 011ec43..d1294e6 100644 --- a/couchpotato/core/media/movie/providers/torrent/torrentz.py +++ b/couchpotato/core/media/movie/providers/torrent/torrentz.py @@ -11,4 +11,4 @@ autoload = 'Torrentz' class Torrentz(MovieProvider, Base): def buildUrl(self, title, media, quality): - return tryUrlencode('"%s %s"' % (title, media['info']['year'])) \ No newline at end of file + return tryUrlencode('%s %s' % (title, media['info']['year'])) From eea8d7b0e252354e034ab897b5e10fc5fb3e7925 Mon Sep 17 00:00:00 2001 From: ylde pm Date: Wed, 23 Aug 2017 04:27:33 +0100 Subject: [PATCH 10/20] cater for KB size files --- couchpotato/core/media/_base/providers/torrent/torrentz.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/couchpotato/core/media/_base/providers/torrent/torrentz.py b/couchpotato/core/media/_base/providers/torrent/torrentz.py index db27dcc..96e8025 100644 --- a/couchpotato/core/media/_base/providers/torrent/torrentz.py +++ b/couchpotato/core/media/_base/providers/torrent/torrentz.py @@ -46,7 +46,7 @@ class Base(TorrentMagnetProvider, RSS): magnet = splitString(detail_url, '/')[-1] magnet_url = 'magnet:?xt=urn:btih:%s&dn=%s&tr=%s' % (magnet.upper(), tryUrlencode(name), tryUrlencode('udp://tracker.openbittorrent.com/announce')) - reg = re.search('Size: (?P\d+) (?P[MG]B) Seeds: (?P[\d,]+) Peers: (?P[\d,]+)', six.text_type(description)) + reg = re.search('Size: (?P\d+) (?P[KMG]B) Seeds: (?P[\d,]+) Peers: (?P[\d,]+)', six.text_type(description)) size = reg.group('size') unit = reg.group('unit') seeds = reg.group('seeds').replace(',', '') @@ -55,6 +55,8 @@ class Base(TorrentMagnetProvider, RSS): multiplier = 1 if unit == 'GB': multiplier = 1000 + elif unit == 'KB': + multiplier = 0 results.append({ 'id': magnet, From 51cc60c495520bdb35679b23fa1fe1cce3e30dd9 Mon Sep 17 00:00:00 2001 From: Martin Warnaar Date: Wed, 30 Aug 2017 00:47:04 +0200 Subject: [PATCH 11/20] Dismiss details overlay on pressing escape --- couchpotato/core/media/movie/_base/static/details.js | 11 ++++++++--- couchpotato/static/scripts/combined.plugins.min.js | 9 ++++++++- 2 files changed, 16 insertions(+), 4 deletions(-) diff --git a/couchpotato/core/media/movie/_base/static/details.js b/couchpotato/core/media/movie/_base/static/details.js index 127e260..dd156ba 100644 --- a/couchpotato/core/media/movie/_base/static/details.js +++ b/couchpotato/core/media/movie/_base/static/details.js @@ -77,7 +77,6 @@ var MovieDetails = new Class({ 'class': parent.get('title') == t ? 'icon-ok' : '' })); }); - }, addSection: function(name, section_el){ @@ -101,7 +100,7 @@ var MovieDetails = new Class({ var self = this; self.el.addClass('show'); - + document.onkeyup = self.keyup.bind(self); //if(!App.mobile_screen){ // $(self.content).getElements('> .head, > .section').each(function(section, nr){ // dynamics.css(section, { @@ -130,12 +129,19 @@ var MovieDetails = new Class({ }, + keyup: function(e) { + if (e.keyCode == 27 /* Esc */) { + this.close(); + } + }, + close: function(){ var self = this; var ended = function() { self.el.dispose(); self.overlay.removeEventListener('transitionend', ended); + document.onkeyup = null; }; self.overlay.addEventListener('transitionend', ended, false); @@ -165,5 +171,4 @@ var MovieDetails = new Class({ App.removeEvent('history.push', self.outer_click); } - }); diff --git a/couchpotato/static/scripts/combined.plugins.min.js b/couchpotato/static/scripts/combined.plugins.min.js index b93d282..1862ca9 100644 --- a/couchpotato/static/scripts/combined.plugins.min.js +++ b/couchpotato/static/scripts/combined.plugins.min.js @@ -382,16 +382,23 @@ var MovieDetails = new Class({ open: function() { var self = this; self.el.addClass("show"); + document.onkeyup = self.keyup.bind(self); self.outer_click = function() { self.close(); }; App.addEvent("history.push", self.outer_click); }, + keyup: function(e) { + if (e.keyCode == 27) { + this.close(); + } + }, close: function() { var self = this; var ended = function() { self.el.dispose(); self.overlay.removeEventListener("transitionend", ended); + document.onkeyup = null; }; self.overlay.addEventListener("transitionend", ended, false); self.el.removeClass("show"); @@ -3416,7 +3423,7 @@ var QualityBase = new Class({ try { return this.qualities.filter(function(q) { return q.identifier == identifier; - }).pick(); + }).pick() || {}; } catch (e) {} return {}; }, From 0f31ac81bfa75c8c1680f3377288785ada3b614e Mon Sep 17 00:00:00 2001 From: Jon Whitter Date: Wed, 13 Sep 2017 11:48:12 +0100 Subject: [PATCH 12/20] Reworked the categories for TorrentLeech to catch WEB-DL, WEBRip, HDRip and 2160p --- couchpotato/core/media/movie/providers/torrent/torrentleech.py | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/couchpotato/core/media/movie/providers/torrent/torrentleech.py b/couchpotato/core/media/movie/providers/torrent/torrentleech.py index eea74f8..91093e3 100644 --- a/couchpotato/core/media/movie/providers/torrent/torrentleech.py +++ b/couchpotato/core/media/movie/providers/torrent/torrentleech.py @@ -11,12 +11,14 @@ autoload = 'TorrentLeech' class TorrentLeech(MovieProvider, Base): cat_ids = [ - ([13], ['720p', '1080p', 'bd50']), + ([41], ['2160p']), + ([13, 14, 37, 43], ['720p', '1080p']), + ([13], ['bd50']), ([8], ['cam']), ([9], ['ts', 'tc']), - ([10], ['r5', 'scr']), + ([10, 11, 37], ['r5', 'scr']), ([11], ['dvdrip']), - ([13, 14], ['brrip']), + ([13, 14, 37, 43], ['brrip']), ([12], ['dvdr']), ] From 1a9e7e4b0bd7c67e09d594d1b30963243aaa9eac Mon Sep 17 00:00:00 2001 From: nenladar <31939341+nenladar@users.noreply.github.com> Date: Thu, 28 Sep 2017 15:49:13 +0200 Subject: [PATCH 13/20] Add support for 2160p quality to AR When using AlphaRatio with a 2160p quality request, this would previously default to using the the cat_backup_id category, which is MoviesSD, rather than using MoviesHD. --- couchpotato/core/media/movie/providers/torrent/alpharatio.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/couchpotato/core/media/movie/providers/torrent/alpharatio.py b/couchpotato/core/media/movie/providers/torrent/alpharatio.py index e7f39c1..762ef47 100644 --- a/couchpotato/core/media/movie/providers/torrent/alpharatio.py +++ b/couchpotato/core/media/movie/providers/torrent/alpharatio.py @@ -19,7 +19,7 @@ class AlphaRatio(MovieProvider, Base): cat_ids = [ ([7, 9], ['bd50']), - ([7, 9], ['720p', '1080p']), + ([7, 9], ['720p', '1080p', '2160p']), ([6, 8], ['dvdr']), ([6, 8], ['brrip', 'dvdrip']), ] From fc59c6505db6946503b5e4d9a5ff9dd4edba026a Mon Sep 17 00:00:00 2001 From: whitter Date: Thu, 28 Sep 2017 15:00:07 +0100 Subject: [PATCH 14/20] Added new TorrentLeech category 47 Added new category for 'Real 4K UltraHD HDR' to the 2160p quality --- couchpotato/core/media/movie/providers/torrent/torrentleech.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/couchpotato/core/media/movie/providers/torrent/torrentleech.py b/couchpotato/core/media/movie/providers/torrent/torrentleech.py index 91093e3..bfa5cd1 100644 --- a/couchpotato/core/media/movie/providers/torrent/torrentleech.py +++ b/couchpotato/core/media/movie/providers/torrent/torrentleech.py @@ -11,7 +11,7 @@ autoload = 'TorrentLeech' class TorrentLeech(MovieProvider, Base): cat_ids = [ - ([41], ['2160p']), + ([41, 47], ['2160p']), ([13, 14, 37, 43], ['720p', '1080p']), ([13], ['bd50']), ([8], ['cam']), From 367fe5987d63fe4383f4a49c9e85d907f6cf69f4 Mon Sep 17 00:00:00 2001 From: roeffus Date: Thu, 28 Sep 2017 20:42:45 +0200 Subject: [PATCH 15/20] BinSearch API update fixes BinSearch API update. Fixes ERROR [hpotato.core.plugins.base] Failed opening url in BinSearch. --- couchpotato/core/media/movie/providers/nzb/binsearch.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/couchpotato/core/media/movie/providers/nzb/binsearch.py b/couchpotato/core/media/movie/providers/nzb/binsearch.py index d6f4852..b3e59c0 100644 --- a/couchpotato/core/media/movie/providers/nzb/binsearch.py +++ b/couchpotato/core/media/movie/providers/nzb/binsearch.py @@ -21,7 +21,7 @@ class BinSearch(MovieProvider, Base): 'adv_sort': 'date', 'adv_col': 'on', 'adv_nfo': 'on', - 'minsize': quality.get('size_min'), - 'maxsize': quality.get('size_max'), + 'xminsize': quality.get('size_min'), + 'xmaxsize': quality.get('size_max'), }) return query From 6106332baae183893602ec03839daa2f0dac93f7 Mon Sep 17 00:00:00 2001 From: ofir123 Date: Tue, 3 Oct 2017 22:42:46 +0300 Subject: [PATCH 16/20] Added new SubsCenter provider. --- libs/subliminal/services/__init__.py | 18 +++- libs/subliminal/services/subscenter.py | 183 +++++++++++++++++++-------------- 2 files changed, 117 insertions(+), 84 deletions(-) diff --git a/libs/subliminal/services/__init__.py b/libs/subliminal/services/__init__.py index b82b309..b169aaf 100755 --- a/libs/subliminal/services/__init__.py +++ b/libs/subliminal/services/__init__.py @@ -183,16 +183,21 @@ class ServiceBase(object): return False return True - def download_file(self, url, filepath): + def download_file(self, url, filepath, data=None): """Attempt to download a file and remove it in case of failure :param string url: URL to download :param string filepath: destination path + :param string data: data to add to the post request """ logger.info(u'Downloading %s in %s' % (url, filepath)) try: - r = self.session.get(url, timeout = 10, headers = {'Referer': url, 'User-Agent': self.user_agent}) + headers = {'Referer': url, 'User-Agent': self.user_agent} + if data: + r = self.session.post(url, data=data, timeout=10, headers=headers) + else: + r = self.session.get(url, timeout=10, headers=headers) with open(filepath, 'wb') as f: f.write(r.content) except Exception as e: @@ -202,18 +207,23 @@ class ServiceBase(object): raise DownloadFailedError(str(e)) logger.debug(u'Download finished') - def download_zip_file(self, url, filepath): + def download_zip_file(self, url, filepath, data=None): """Attempt to download a zip file and extract any subtitle file from it, if any. This cleans up after itself if anything fails. :param string url: URL of the zip file to download :param string filepath: destination path for the subtitle + :param string data: data to add to the post request """ logger.info(u'Downloading %s in %s' % (url, filepath)) try: zippath = filepath + '.zip' - r = self.session.get(url, timeout = 10, headers = {'Referer': url, 'User-Agent': self.user_agent}) + headers = {'Referer': url, 'User-Agent': self.user_agent} + if data: + r = self.session.post(url, data=data, timeout=10, headers=headers) + else: + r = self.session.get(url, timeout=10, headers=headers) with open(zippath, 'wb') as f: f.write(r.content) if not zipfile.is_zipfile(zippath): diff --git a/libs/subliminal/services/subscenter.py b/libs/subliminal/services/subscenter.py index 258edad..7125f92 100644 --- a/libs/subliminal/services/subscenter.py +++ b/libs/subliminal/services/subscenter.py @@ -16,124 +16,147 @@ # You should have received a copy of the GNU Lesser General Public License # along with subliminal. If not, see . from . import ServiceBase -from ..exceptions import DownloadFailedError, ServiceError +from ..exceptions import ServiceError from ..language import language_set from ..subtitles import get_subtitle_path, ResultSubtitle from ..videos import Episode, Movie -from ..utils import to_unicode, get_keywords -from bs4 import BeautifulSoup +from ..utils import to_unicode + import bisect -import json import logging +from urllib import urlencode + logger = logging.getLogger(__name__) class Subscenter(ServiceBase): - server = 'http://www.subscenter.info/he/' - api_based = False + server = 'http://www.cinemast.org/he/cinemast/api/' + api_based = True languages = language_set(['he']) videos = [Episode, Movie] require_video = False - def _search_url_title(self, title, kind): - """Search the URL title for the given `title`. - - :param str title: title to search for. - :param str kind: kind of the title, ``movie`` or ``series``. - :return: the URL version of the title. - :rtype: str or None - """ - # make the search - logger.info('Searching title name for %r', title) - r = self.session.get(self.server + 'subtitle/search/', params={'q': title}, allow_redirects=False, timeout=10) - r.raise_for_status() + default_username = 'subliminal@gmail.com' + default_password = 'subliminal' - # if redirected, get the url title from the Location header - if r.is_redirect: - parts = r.headers['Location'].split('/') + def __init__(self, config=None): + super(Subscenter, self).__init__(config) + self.token = None + self.user_id = None - # check kind - if parts[-3] == kind: - return parts[-2] + def init(self): + super(Subscenter, self).init() + logger.debug('Logging in') + url = self.server_url + 'login/' - return None + # actual login + data = {'username': self.default_username, 'password': self.default_password} + r = self.session.post(url, data=urlencode(data), allow_redirects=False, timeout=10) - # otherwise, get the first valid suggestion - soup = BeautifulSoup(r.content, ['lxml', 'html.parser']) - suggestions = soup.select('#processes div.generalWindowTop a') - logger.debug('Found %d suggestions', len(suggestions)) - for suggestion in suggestions: - parts = suggestion.attrs['href'].split('/') + if r.status_code != 200: + raise ServiceError('Login failed') + + try: + result = r.json() + if 'token' not in result: + raise ServiceError('Login failed') - # check kind - if parts[-3] == kind: - return parts[-2] + logger.info('Logged in') + self.user_id = r.json().get('user') + self.token = r.json().get('token') + except ValueError: + raise ServiceError('Login failed') + + def terminate(self): + super(Subscenter, self).terminate() + if self.token or self.user_id: + logger.info('Logged out') + self.token = None + self.user_id = None def list_checked(self, video, languages): series = None season = None episode = None title = video.title + year = video.year if isinstance(video, Episode): series = video.series season = video.season episode = video.episode - return self.query(video.path or video.release, languages, get_keywords(video.guess), series, season, - episode, title) + return self.query(video.path or video.release, languages, series, season, episode, title, year) - def query(self, filepath, languages=None, keywords=None, series=None, season=None, episode=None, title=None): + def query(self, filepath, languages=None, series=None, season=None, episode=None, title=None, year=None): logger.debug(u'Getting subtitles for {0} season {1} episode {2} with languages {3}'.format( series, season, episode, languages)) - # Set the correct parameters depending on the kind. - if series and season and episode: - url_series = self._search_url_title(series, 'series') - url = self.server + 'cst/data/series/sb/{}/{}/{}/'.format(url_series, season, episode) + + query = { + 'user': self.user_id, + 'token': self.token + } + + # episode + if season and episode: + query['q'] = series + query['type'] = 'series' + query['season'] = season + query['episode'] = episode elif title: - url_title = self._search_url_title(title, 'movie') - url = self.server + 'cst/data/movie/sb/{}/'.format(url_title) + query['q'] = title + query['type'] = 'movies' + if year: + query['year_start'] = year - 1 + query['year_end'] = year else: raise ServiceError('One or more parameters are missing') - logger.debug('Searching subtitles for title {0}, season {1}, episode {2}'.format(title, season, episode)) - response = self.session.get(url) - if response.status_code != 200: - raise ServiceError('Request failed with status code {0}'.format(response.status_code)) - # Loop over results. - subtitles = dict() - response_json = json.loads(response.content) - for language_code, language_data in response_json.items(): - language_object = self.get_language(language_code) - if language_object in self.languages and language_object in languages: - for quality_data in language_data.values(): - for quality, subtitles_data in quality_data.items(): - for subtitle_item in subtitles_data.values(): - # Read the item. - subtitle_id = subtitle_item['id'] - subtitle_key = subtitle_item['key'] - subtitle_version = subtitle_item['h_version'] - release = subtitle_item['subtitle_version'] - subtitle_path = get_subtitle_path(filepath, language_object, self.config.multi) - download_link = self.server_url + 'subtitle/download/{0}/{1}/?v={2}&key={3}'.format( - language_code, subtitle_id, subtitle_version, subtitle_key) - # Add the release and increment downloaded count if we already have the subtitle. - if subtitle_id in subtitles: - logger.debug('Found additional release {0} for subtitle {1}'.format( - release, subtitle_id)) - bisect.insort_left(subtitles[subtitle_id].release, release) # Deterministic order. - continue - # Otherwise create it. - subtitle = ResultSubtitle(subtitle_path, language_object, self.__class__.__name__.lower(), - download_link, release=to_unicode(release)) - logger.debug('Found subtitle %r', subtitle) - subtitles[subtitle_id] = subtitle + + # get the list of subtitles + logger.debug('Getting the list of subtitles') + url = self.server_url + 'search/' + r = self.session.post(url, data=urlencode(query)) + r.raise_for_status() + + try: + results = r.json() + except ValueError: + return {} + + # loop over results + subtitles = {} + for group_data in results.get('data', []): + for language_code, subtitles_data in group_data.get('subtitles', {}).items(): + language_object = self.get_language(language_code) + + for subtitle_item in subtitles_data: + # read the item + subtitle_id = subtitle_item['id'] + subtitle_key = subtitle_item['key'] + release = subtitle_item['version'] + + subtitle_path = get_subtitle_path(filepath, language_object, self.config.multi) + download_link = self.server_url + 'subtitle/download/{0}/?v={1}&key={2}&sub_id={3}'.format( + language_code, release, subtitle_key, subtitle_id) + # Add the release and increment downloaded count if we already have the subtitle. + if subtitle_id in subtitles: + logger.debug('Found additional release {0} for subtitle {1}'.format( + release, subtitle_id)) + bisect.insort_left(subtitles[subtitle_id].release, release) # Deterministic order. + continue + # Otherwise create it. + subtitle = ResultSubtitle(subtitle_path, language_object, self.__class__.__name__.lower(), + download_link, release=to_unicode(release)) + logger.debug('Found subtitle %r', subtitle) + subtitles[subtitle_id] = subtitle + return subtitles.values() def download(self, subtitle): - try: - self.download_zip_file(subtitle.link, subtitle.path) - except DownloadFailedError: - # If no zip file was retrieved, daily downloads limit has exceeded. - raise ServiceError('Daily limit exceeded') + data = { + 'user': self.user_id, + 'token': self.token + } + self.download_zip_file(subtitle.link, subtitle.path, data=urlencode(data)) return subtitle From 4bdde2b6541f1f9eaa2f79cca3bd957e0e118386 Mon Sep 17 00:00:00 2001 From: andofrjando Date: Sun, 8 Oct 2017 11:04:57 +0800 Subject: [PATCH 17/20] New Feature: Safari pinned tab icon See https://developer.apple.com/library/content/documentation/AppleApplications/Reference/SafariWebContent/pinnedTabs/pinnedTabs.html for documentation --- .gitignore | 2 ++ couchpotato/templates/index.html | 3 +++ 2 files changed, 5 insertions(+) diff --git a/.gitignore b/.gitignore index 1d21d2e..78fda6d 100644 --- a/.gitignore +++ b/.gitignore @@ -14,3 +14,5 @@ nosetests.xml # Visual Studio /.vs + +.DS_Store diff --git a/couchpotato/templates/index.html b/couchpotato/templates/index.html index 927b2d0..4020773 100644 --- a/couchpotato/templates/index.html +++ b/couchpotato/templates/index.html @@ -15,6 +15,9 @@ + + + From 8b1659caa0ee2654115abd3ea9adce1839d71eb4 Mon Sep 17 00:00:00 2001 From: andofrjando Date: Sun, 8 Oct 2017 11:05:48 +0800 Subject: [PATCH 18/20] Commit icons for pinned tab --- couchpotato/static/images/icons/dark/safari.svg | 1 + couchpotato/static/images/icons/safari.svg | 1 + 2 files changed, 2 insertions(+) create mode 100644 couchpotato/static/images/icons/dark/safari.svg create mode 100644 couchpotato/static/images/icons/safari.svg diff --git a/couchpotato/static/images/icons/dark/safari.svg b/couchpotato/static/images/icons/dark/safari.svg new file mode 100644 index 0000000..89b5092 --- /dev/null +++ b/couchpotato/static/images/icons/dark/safari.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/couchpotato/static/images/icons/safari.svg b/couchpotato/static/images/icons/safari.svg new file mode 100644 index 0000000..89b5092 --- /dev/null +++ b/couchpotato/static/images/icons/safari.svg @@ -0,0 +1 @@ + \ No newline at end of file From e1f9aa88c530130054fda316bc5d34b73a17dfbe Mon Sep 17 00:00:00 2001 From: Ruud Burger Date: Sun, 5 Nov 2017 15:12:35 +0100 Subject: [PATCH 19/20] Update xbmc.py --- couchpotato/core/media/movie/providers/metadata/xbmc.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/couchpotato/core/media/movie/providers/metadata/xbmc.py b/couchpotato/core/media/movie/providers/metadata/xbmc.py index f561618..33febf6 100644 --- a/couchpotato/core/media/movie/providers/metadata/xbmc.py +++ b/couchpotato/core/media/movie/providers/metadata/xbmc.py @@ -3,6 +3,7 @@ import os import re import traceback import xml.dom.minidom +import time from couchpotato.core.media.movie.providers.metadata.base import MovieMetaData from couchpotato.core.helpers.encoding import toUnicode @@ -111,7 +112,7 @@ class XBMC(MovieMetaData): try: if movie_info.get('released'): el = SubElement(nfoxml, 'premiered') - el.text = time.strftime('%Y:%m:%d', time.strptime(movie_info.get('released'), '%d %b %Y') + el.text = time.strftime('%Y:%m:%d', time.strptime(movie_info.get('released'), '%d %b %Y')) except: log.debug('Failed to parse release date %s: %s', movie_info.get('released'), traceback.format_exc()) From e8438f178691982b344ce3a84ee485a14f3020cb Mon Sep 17 00:00:00 2001 From: kainem Date: Wed, 6 Dec 2017 13:39:34 +1100 Subject: [PATCH 20/20] Update yts.py Updated to new yts domain --- couchpotato/core/media/_base/providers/torrent/yts.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/couchpotato/core/media/_base/providers/torrent/yts.py b/couchpotato/core/media/_base/providers/torrent/yts.py index 188f9e5..674adc3 100644 --- a/couchpotato/core/media/_base/providers/torrent/yts.py +++ b/couchpotato/core/media/_base/providers/torrent/yts.py @@ -11,8 +11,8 @@ class Base(TorrentMagnetProvider): # Only qualities allowed: 720p/1080p/3D - the rest will fail. # All YTS.ag torrents are verified urls = { - 'detail': 'https://yts.ag/api#list_movies', - 'search': 'https://yts.ag/api/v2/list_movies.json?query_term=%s&limit=%s&page=%s' + 'detail': 'https://yts.am/api#list_movies', + 'search': 'https://yts.am/api/v2/list_movies.json?query_term=%s&limit=%s&page=%s' } def _search(self, movie, quality, results):