Browse Source
Conflicts: couchpotato/core/media/_base/searcher/main.py couchpotato/core/plugins/renamer/main.pypull/2352/head
42 changed files with 830 additions and 216 deletions
@ -0,0 +1,6 @@ |
|||
from .main import Custom |
|||
|
|||
def start(): |
|||
return Custom() |
|||
|
|||
config = [] |
@ -0,0 +1,21 @@ |
|||
from couchpotato.core.event import addEvent |
|||
from couchpotato.core.logger import CPLog |
|||
from couchpotato.core.plugins.base import Plugin |
|||
from couchpotato.environment import Env |
|||
import os |
|||
|
|||
log = CPLog(__name__) |
|||
|
|||
|
|||
class Custom(Plugin): |
|||
|
|||
def __init__(self): |
|||
addEvent('app.load', self.createStructure) |
|||
|
|||
def createStructure(self): |
|||
|
|||
custom_dir = os.path.join(Env.get('data_dir'), 'custom_plugins') |
|||
|
|||
if not os.path.isdir(custom_dir): |
|||
self.makeDir(custom_dir) |
|||
self.createFile(os.path.join(custom_dir, '__init__.py'), '# Don\'t remove this file') |
@ -0,0 +1,34 @@ |
|||
from .main import Flixster |
|||
|
|||
def start(): |
|||
return Flixster() |
|||
|
|||
config = [{ |
|||
'name': 'flixster', |
|||
'groups': [ |
|||
{ |
|||
'tab': 'automation', |
|||
'list': 'watchlist_providers', |
|||
'name': 'flixster_automation', |
|||
'label': 'Flixster', |
|||
'description': 'Import movies from any public <a href="http://www.flixster.com/">Flixster</a> watchlist', |
|||
'options': [ |
|||
{ |
|||
'name': 'automation_enabled', |
|||
'default': False, |
|||
'type': 'enabler', |
|||
}, |
|||
{ |
|||
'name': 'automation_ids_use', |
|||
'label': 'Use', |
|||
}, |
|||
{ |
|||
'name': 'automation_ids', |
|||
'label': 'User ID', |
|||
'type': 'combined', |
|||
'combine': ['automation_ids_use', 'automation_ids'], |
|||
}, |
|||
], |
|||
}, |
|||
], |
|||
}] |
@ -0,0 +1,48 @@ |
|||
from couchpotato.core.helpers.variable import tryInt, splitString |
|||
from couchpotato.core.logger import CPLog |
|||
from couchpotato.core.providers.automation.base import Automation |
|||
import json |
|||
|
|||
log = CPLog(__name__) |
|||
|
|||
|
|||
class Flixster(Automation): |
|||
|
|||
url = 'http://www.flixster.com/api/users/%s/movies/ratings?scoreTypes=wts' |
|||
|
|||
interval = 60 |
|||
|
|||
def getIMDBids(self): |
|||
|
|||
ids = splitString(self.conf('automation_ids')) |
|||
|
|||
if len(ids) == 0: |
|||
return [] |
|||
|
|||
movies = [] |
|||
|
|||
for movie in self.getWatchlist(): |
|||
imdb_id = self.search(movie.get('title'), movie.get('year'), imdb_only = True) |
|||
movies.append(imdb_id) |
|||
|
|||
return movies |
|||
|
|||
def getWatchlist(self): |
|||
|
|||
enablers = [tryInt(x) for x in splitString(self.conf('automation_ids_use'))] |
|||
ids = splitString(self.conf('automation_ids')) |
|||
|
|||
index = -1 |
|||
movies = [] |
|||
for user_id in ids: |
|||
|
|||
index += 1 |
|||
if not enablers[index]: |
|||
continue |
|||
|
|||
data = json.loads(self.getHTMLData(self.url % user_id)) |
|||
|
|||
for movie in data: |
|||
movies.append({'title': movie['movie']['title'], 'year': movie['movie']['year'] }) |
|||
|
|||
return movies |
@ -0,0 +1,60 @@ |
|||
from main import ILoveTorrents |
|||
|
|||
def start(): |
|||
return ILoveTorrents() |
|||
|
|||
config = [{ |
|||
'name': 'ilovetorrents', |
|||
'groups': [ |
|||
{ |
|||
'tab': 'searcher', |
|||
'list': 'torrent_providers', |
|||
'name': 'ILoveTorrents', |
|||
'description': 'Where the Love of Torrents is Born', |
|||
'wizard': True, |
|||
'options': [ |
|||
{ |
|||
'name': 'enabled', |
|||
'type': 'enabler', |
|||
'default': False |
|||
}, |
|||
{ |
|||
'name': 'username', |
|||
'label': 'Username', |
|||
'type': 'string', |
|||
'default': '', |
|||
'description': 'The user name for your ILT account', |
|||
}, |
|||
{ |
|||
'name': 'password', |
|||
'label': 'Password', |
|||
'type': 'password', |
|||
'default': '', |
|||
'description': 'The password for your ILT account.', |
|||
}, |
|||
{ |
|||
'name': 'seed_ratio', |
|||
'label': 'Seed ratio', |
|||
'type': 'float', |
|||
'default': 1, |
|||
'description': 'Will not be (re)moved until this seed ratio is met.', |
|||
}, |
|||
{ |
|||
'name': 'seed_time', |
|||
'label': 'Seed time', |
|||
'type': 'int', |
|||
'default': 40, |
|||
'description': 'Will not be (re)moved until this seed time (in hours) is met.', |
|||
}, |
|||
{ |
|||
'name': 'extra_score', |
|||
'advanced': True, |
|||
'label': 'Extra Score', |
|||
'type': 'int', |
|||
'default': 0, |
|||
'description': 'Starting score for each release found via this provider.', |
|||
} |
|||
], |
|||
} |
|||
] |
|||
}] |
@ -0,0 +1,128 @@ |
|||
from bs4 import BeautifulSoup |
|||
from couchpotato.core.helpers.encoding import toUnicode, tryUrlencode |
|||
from couchpotato.core.helpers.variable import tryInt |
|||
from couchpotato.core.logger import CPLog |
|||
from couchpotato.core.providers.torrent.base import TorrentProvider |
|||
import re |
|||
import traceback |
|||
|
|||
log = CPLog(__name__) |
|||
|
|||
|
|||
class ILoveTorrents(TorrentProvider): |
|||
|
|||
urls = { |
|||
'download': 'http://www.ilovetorrents.me/%s', |
|||
'detail': 'http://www.ilovetorrents.me/%s', |
|||
'search': 'http://www.ilovetorrents.me/browse.php?search=%s&page=%s&cat=%s', |
|||
'test' : 'http://www.ilovetorrents.me/', |
|||
'login' : 'http://www.ilovetorrents.me/takelogin.php', |
|||
'login_check' : 'http://www.ilovetorrents.me' |
|||
} |
|||
|
|||
cat_ids = [ |
|||
(['41'], ['720p', '1080p', 'brrip']), |
|||
(['19'], ['cam', 'ts', 'dvdrip', 'tc', 'r5', 'scr']), |
|||
(['20'], ['dvdr']) |
|||
] |
|||
|
|||
cat_backup_id = 200 |
|||
disable_provider = False |
|||
http_time_between_calls = 1 |
|||
|
|||
def _searchOnTitle(self, title, movie, quality, results): |
|||
|
|||
page = 0 |
|||
total_pages = 1 |
|||
cats = self.getCatId(quality['identifier']) |
|||
|
|||
while page < total_pages: |
|||
|
|||
movieTitle = tryUrlencode('"%s" %s' % (title, movie['library']['year'])) |
|||
search_url = self.urls['search'] % (movieTitle, page, cats[0]) |
|||
page += 1 |
|||
|
|||
data = self.getHTMLData(search_url, opener = self.login_opener) |
|||
if data: |
|||
try: |
|||
soup = BeautifulSoup(data) |
|||
|
|||
results_table = soup.find('table', attrs = {'class': 'koptekst'}) |
|||
if not results_table: |
|||
return |
|||
|
|||
try: |
|||
pagelinks = soup.findAll(href = re.compile('page')) |
|||
pageNumbers = [int(re.search('page=(?P<pageNumber>.+'')', i['href']).group('pageNumber')) for i in pagelinks] |
|||
total_pages = max(pageNumbers) |
|||
|
|||
except: |
|||
pass |
|||
|
|||
entries = results_table.find_all('tr') |
|||
|
|||
for result in entries[1:]: |
|||
prelink = result.find(href = re.compile('details.php')) |
|||
link = prelink['href'] |
|||
download = result.find('a', href = re.compile('download.php'))['href'] |
|||
|
|||
if link and download: |
|||
|
|||
def extra_score(item): |
|||
trusted = (0, 10)[result.find('img', alt = re.compile('Trusted')) is not None] |
|||
vip = (0, 20)[result.find('img', alt = re.compile('VIP')) is not None] |
|||
confirmed = (0, 30)[result.find('img', alt = re.compile('Helpers')) is not None] |
|||
moderated = (0, 50)[result.find('img', alt = re.compile('Moderator')) is not None] |
|||
|
|||
return confirmed + trusted + vip + moderated |
|||
|
|||
id = re.search('id=(?P<id>\d+)&', link).group('id') |
|||
url = self.urls['download'] % (download) |
|||
|
|||
fileSize = self.parseSize(result.select('td.rowhead')[5].text) |
|||
results.append({ |
|||
'id': id, |
|||
'name': toUnicode(prelink.find('b').text), |
|||
'url': url, |
|||
'detail_url': self.urls['detail'] % link, |
|||
'size': fileSize, |
|||
'seeders': tryInt(result.find_all('td')[2].string), |
|||
'leechers': tryInt(result.find_all('td')[3].string), |
|||
'extra_score': extra_score, |
|||
'get_more_info': self.getMoreInfo |
|||
}) |
|||
|
|||
except: |
|||
log.error('Failed getting results from %s: %s', (self.getName(), traceback.format_exc())) |
|||
|
|||
def getLoginParams(self): |
|||
return tryUrlencode({ |
|||
'username': self.conf('username'), |
|||
'password': self.conf('password'), |
|||
'submit': 'Welcome to ILT', |
|||
}) |
|||
|
|||
def getMoreInfo(self, item): |
|||
cache_key = 'ilt.%s' % item['id'] |
|||
description = self.getCache(cache_key) |
|||
|
|||
if not description: |
|||
|
|||
try: |
|||
full_description = self.getHTMLData(item['detail_url'], opener = self.login_opener) |
|||
html = BeautifulSoup(full_description) |
|||
nfo_pre = html.find('td', attrs = {'class':'main'}).findAll('table')[1] |
|||
description = toUnicode(nfo_pre.text) if nfo_pre else '' |
|||
except: |
|||
log.error('Failed getting more info for %s', item['name']) |
|||
description = '' |
|||
|
|||
self.setCache(cache_key, description, timeout = 25920000) |
|||
|
|||
item['description'] = description |
|||
return item |
|||
|
|||
def loginSuccess(self, output): |
|||
return 'logout.php' in output.lower() |
|||
|
|||
loginCheckSuccess = loginSuccess |
@ -0,0 +1,38 @@ |
|||
"""Backport of importlib.import_module from 3.x.""" |
|||
# While not critical (and in no way guaranteed!), it would be nice to keep this |
|||
# code compatible with Python 2.3. |
|||
import sys |
|||
|
|||
def _resolve_name(name, package, level): |
|||
"""Return the absolute name of the module to be imported.""" |
|||
if not hasattr(package, 'rindex'): |
|||
raise ValueError("'package' not set to a string") |
|||
dot = len(package) |
|||
for x in xrange(level, 1, -1): |
|||
try: |
|||
dot = package.rindex('.', 0, dot) |
|||
except ValueError: |
|||
raise ValueError("attempted relative import beyond top-level " |
|||
"package") |
|||
return "%s.%s" % (package[:dot], name) |
|||
|
|||
|
|||
def import_module(name, package=None): |
|||
"""Import a module. |
|||
|
|||
The 'package' argument is required when performing a relative import. It |
|||
specifies the package to use as the anchor point from which to resolve the |
|||
relative import to an absolute import. |
|||
|
|||
""" |
|||
if name.startswith('.'): |
|||
if not package: |
|||
raise TypeError("relative imports require the 'package' argument") |
|||
level = 0 |
|||
for character in name: |
|||
if character != '.': |
|||
break |
|||
level += 1 |
|||
name = _resolve_name(name[level:], package, level) |
|||
__import__(name) |
|||
return sys.modules[name] |
Loading…
Reference in new issue