Compare commits
141 Commits
master
...
tv_redesig
Author | SHA1 | Date |
---|---|---|
|
096267376b | 10 years ago |
|
911e254298 | 10 years ago |
|
c8504d0ae2 | 10 years ago |
|
2bca2863ae | 10 years ago |
|
b5a401d9e5 | 10 years ago |
|
6e373a0f19 | 10 years ago |
|
0f57e9369e | 10 years ago |
|
c89f4e5393 | 10 years ago |
|
d5b737fc77 | 10 years ago |
|
cb98b06fbd | 10 years ago |
|
d01fc73081 | 10 years ago |
|
83cf2db1e9 | 10 years ago |
|
b74d6a4eb7 | 10 years ago |
|
cf82f5f422 | 10 years ago |
|
a85f35c33b | 10 years ago |
|
c9faf31ee8 | 10 years ago |
|
f390624382 | 10 years ago |
|
756d0451a2 | 10 years ago |
|
1bcb9af4ef | 10 years ago |
|
1cbbbfc38d | 10 years ago |
|
f8b90905d0 | 10 years ago |
|
ad85b2ff95 | 10 years ago |
|
728e96a44d | 10 years ago |
|
de5fb285a1 | 10 years ago |
|
bc0262389c | 10 years ago |
|
40784f3c4e | 10 years ago |
|
147e565249 | 11 years ago |
|
4e568ff515 | 11 years ago |
|
6c586f8b19 | 11 years ago |
|
bb609e073b | 11 years ago |
|
02571d0f5d | 11 years ago |
|
106c5c2d7f | 11 years ago |
|
1b0df8fe45 | 11 years ago |
|
a11aa2c14e | 11 years ago |
|
f070b18d0f | 11 years ago |
|
8c51b8548c | 11 years ago |
|
2ec81b3de6 | 11 years ago |
|
aa02f4d977 | 11 years ago |
|
1c6026d0a2 | 11 years ago |
|
6fbd5e0f3a | 11 years ago |
|
a0b0e3055e | 11 years ago |
|
2f48dc2bca | 11 years ago |
|
058d241c73 | 11 years ago |
|
8836cf7684 | 11 years ago |
|
f4a3b2eccc | 11 years ago |
|
5c62144403 | 11 years ago |
|
c18e284aa6 | 11 years ago |
|
60e8c3ad9b | 11 years ago |
|
894f46a741 | 11 years ago |
|
7d5efad20c | 11 years ago |
|
ba14c95e82 | 11 years ago |
|
2ad249b195 | 11 years ago |
|
deb7943203 | 11 years ago |
|
4e78b0cac1 | 11 years ago |
|
c8f0cdc90f | 11 years ago |
|
ce80ac5a33 | 11 years ago |
|
5e438e5343 | 11 years ago |
|
12dd9c6b14 | 11 years ago |
|
478dc0f242 | 11 years ago |
|
5d886ccf1f | 11 years ago |
|
7f466f9c08 | 11 years ago |
|
7fbd89a317 | 11 years ago |
|
6f620f451b | 11 years ago |
|
dea5bbbf1c | 11 years ago |
|
68bde6086d | 11 years ago |
|
34bb8c7993 | 11 years ago |
|
74c7cf4381 | 11 years ago |
|
efe0a4af53 | 11 years ago |
|
b9c6d983e1 | 11 years ago |
|
3d6ce1c2e2 | 11 years ago |
|
a06bfcb3bf | 11 years ago |
|
fe2e508e4c | 11 years ago |
|
72cb53bcc0 | 11 years ago |
|
90be6ec38b | 11 years ago |
|
212d5c5432 | 11 years ago |
|
b10e25ab8c | 11 years ago |
|
5c4f8186df | 11 years ago |
|
02d4a7625b | 11 years ago |
|
8018ef979f | 11 years ago |
|
482f5f82e6 | 11 years ago |
|
88f8cd708b | 11 years ago |
|
aa92d76eb4 | 11 years ago |
|
3e05bc8d78 | 11 years ago |
|
4de9879927 | 11 years ago |
|
479e20d8f3 | 11 years ago |
|
f7ed5d4b2f | 11 years ago |
|
bda44848a1 | 11 years ago |
|
f3ae8a05cc | 11 years ago |
|
43275297e9 | 11 years ago |
|
d79556f36f | 11 years ago |
|
8fe3d6f58f | 11 years ago |
|
a1ca367037 | 11 years ago |
|
bfdf565a0d | 11 years ago |
|
c77eaabbff | 11 years ago |
|
44063dfcc5 | 11 years ago |
|
c2c98f644b | 11 years ago |
|
74caecbe89 | 11 years ago |
|
a721a40d5e | 11 years ago |
|
338e645579 | 11 years ago |
|
5f2dd0aac3 | 11 years ago |
|
0f434afd33 | 11 years ago |
|
364527b0b2 | 11 years ago |
|
ac857301ac | 11 years ago |
|
c038c66dc9 | 11 years ago |
|
c81891683c | 11 years ago |
|
d787cb0cdb | 11 years ago |
|
2d5a3e7564 | 11 years ago |
|
7ae178e2a6 | 11 years ago |
|
e885ade131 | 11 years ago |
|
0925dd08bc | 11 years ago |
|
050d8ccfda | 11 years ago |
|
4efdca91d5 | 11 years ago |
|
0d128a3525 | 11 years ago |
|
0f97e57307 | 11 years ago |
|
6833e78546 | 11 years ago |
|
30c56f29d0 | 11 years ago |
|
7ed0c6f099 | 11 years ago |
|
af64961502 | 11 years ago |
|
342e61da48 | 11 years ago |
|
8ce30f0aad | 11 years ago |
|
63b8e3ff1a | 11 years ago |
|
91c3df7c46 | 11 years ago |
|
ae3d9c0a0a | 11 years ago |
|
090eb6f14d | 11 years ago |
|
44de06f518 | 11 years ago |
|
b23db7541d | 11 years ago |
|
7410288781 | 11 years ago |
|
bb4252363d | 11 years ago |
|
0a0a1704be | 11 years ago |
|
b13b32952f | 11 years ago |
|
0978ac33bc | 11 years ago |
|
6e8b7d25e5 | 11 years ago |
|
0f555dbb85 | 11 years ago |
|
43e4ed6e2d | 11 years ago |
|
2e50eb487c | 11 years ago |
|
70e5f1a6d8 | 11 years ago |
|
9cfa7fa2a3 | 11 years ago |
|
cfc9f524a7 | 11 years ago |
|
8281fdc08b | 11 years ago |
|
949f76cd50 | 11 years ago |
|
9631be1ee4 | 11 years ago |
117 changed files with 12669 additions and 1279 deletions
@ -0,0 +1,143 @@ |
|||||
|
import math |
||||
|
import re |
||||
|
import traceback |
||||
|
|
||||
|
from bs4 import BeautifulSoup |
||||
|
from couchpotato.core.event import fireEvent |
||||
|
from couchpotato.core.helpers.variable import tryInt, tryFloat |
||||
|
from couchpotato.core.helpers.encoding import tryUrlencode |
||||
|
from couchpotato.core.logger import CPLog |
||||
|
from couchpotato.core.media._base.providers.torrent.base import TorrentProvider |
||||
|
|
||||
|
|
||||
|
log = CPLog(__name__) |
||||
|
|
||||
|
class Base(TorrentProvider): |
||||
|
|
||||
|
field_link = 0 |
||||
|
field_name = 2 |
||||
|
field_size = 3 |
||||
|
field_seeders = 4 |
||||
|
field_leechers = 5 |
||||
|
|
||||
|
max_pages = 2 |
||||
|
|
||||
|
category = 0 # any category |
||||
|
|
||||
|
urls = { |
||||
|
'url': '%s%s', |
||||
|
'detail': '%s%s', |
||||
|
'search': '%s/advanced_search/?page=%d&with=%s&s_cat=%d&seeds_from=1' |
||||
|
} |
||||
|
|
||||
|
http_time_between_calls = 1 # Seconds |
||||
|
|
||||
|
proxy_list = [ |
||||
|
'http://extratorrent.cc' |
||||
|
] |
||||
|
|
||||
|
def buildUrl(self, *args, **kwargs): |
||||
|
media = kwargs.get('media', None) |
||||
|
title = kwargs.get('title', None) |
||||
|
page = kwargs.get('page', 1) |
||||
|
if not title and media: |
||||
|
title = fireEvent('library.query', media, single = True) |
||||
|
if not title: |
||||
|
return False |
||||
|
assert isinstance(page, (int, long)) |
||||
|
|
||||
|
return self.urls['search'] % (self.getDomain(), page, tryUrlencode(title), self.category) |
||||
|
|
||||
|
def _searchOnTitle(self, title, media, quality, results): |
||||
|
page = 1 |
||||
|
pages = self.max_pages |
||||
|
while page <= pages: |
||||
|
url = self.buildUrl(title=title, media=media, page=page) |
||||
|
data = self.getHTMLData(url) |
||||
|
try: |
||||
|
html = BeautifulSoup(data) |
||||
|
if page == 1: |
||||
|
matches = re.search('total .b.([0-9]+)..b. torrents found', data, re.MULTILINE) |
||||
|
torrents_total = tryFloat(matches.group(1)) |
||||
|
option = html.find('select', attrs={'name': 'torr_cat'}).find('option', attrs={'selected': 'selected'}) |
||||
|
torrents_per_page = tryFloat(option.text) |
||||
|
pages = math.ceil(torrents_total / torrents_per_page) |
||||
|
if self.max_pages < pages: |
||||
|
pages = self.max_pages |
||||
|
|
||||
|
for tr in html.find_all('tr', attrs={'class': ['tlr', 'tlz']}): |
||||
|
result = { } |
||||
|
field = self.field_link |
||||
|
for td in tr.find_all('td'): |
||||
|
if field == self.field_link: |
||||
|
a = td.find('a', title=re.compile('^download ', re.IGNORECASE)) |
||||
|
result['url'] = self.urls['url'] % (self.getDomain(), a.get('href')) |
||||
|
elif field == self.field_name: |
||||
|
a = None |
||||
|
for a in td.find_all('a', title=re.compile('^view ', re.IGNORECASE)): pass |
||||
|
if a: |
||||
|
result['id'] = re.search('/torrent/(?P<id>\d+)/', a.get('href')).group('id') |
||||
|
result['name'] = a.text |
||||
|
result['detail_url'] = self.urls['detail'] % (self.getDomain(), a.get('href')) |
||||
|
elif field == self.field_size: |
||||
|
result['size'] = self.parseSize(td.text) |
||||
|
elif field == self.field_seeders: |
||||
|
result['seeders'] = tryInt(td.text) |
||||
|
elif field == self.field_leechers: |
||||
|
result['leechers'] = tryInt(td.text) |
||||
|
|
||||
|
field += 1 |
||||
|
# /for |
||||
|
|
||||
|
if all(key in result for key in ('url', 'id', 'name', 'detail_url', 'size', 'seeders', 'leechers')): |
||||
|
results.append(result) |
||||
|
# /for |
||||
|
except: |
||||
|
log.error('Failed parsing results from ExtraTorrent: %s', traceback.format_exc()) |
||||
|
break |
||||
|
|
||||
|
page += 1 |
||||
|
# /while |
||||
|
|
||||
|
config = [{ |
||||
|
'name': 'extratorrent', |
||||
|
'groups': [ |
||||
|
{ |
||||
|
'tab': 'searcher', |
||||
|
'list': 'torrent_providers', |
||||
|
'name': 'ExtraTorrent', |
||||
|
'description': '<a href="http://extratorrent.cc/">ExtraTorrent</a>', |
||||
|
'wizard': True, |
||||
|
'icon': 'AAABAAEAEBAAAAEAIABoBAAAFgAAACgAAAAQAAAAIAAAAAEAIAAAAAAAQAQAAAAAAAAAAAAAAAAAAAAAAADIvb7/xry9/8e8vf/Ivb7/yL2+/8i9vv/Ivb7/yL2+/8i9vv/Ivb7/yL2+/8i9vv/Ivb7/x7y9/8a8vf/Ivb7/xry9//7+/v/+/v7//v7+//7+/v/+/v7//v7+//7+/v/+/v7//v7+//7+/v/+/v7//v7+//7+/v/+/v7/xry9/8e8vf/8/Pz//Pz8/5OTkv9qSRj/akkY/2tKGf9rShn/a0oZ/2tKGP9qSRj/Tzoc/4iEff/8/Pz//Pz8/8e8vf/HvL3/+/v7//v7+/+Hf3P/4bd0/+G3dP/ht3T/4bd0/+G3dP/ht3T/4bd0/3hgOv9xbmr/+/v7//v7+//HvL3/x7y9//f4+P/3+Pj/hHxx/+zAfP/swHz/7MB8/3xzaP9yal3/cmpd/3JqXf98c2b/xcPB//f4+P/3+Pj/x7y9/8e8vf/29vb/9vb2/4V9cf/txYX/7cWF/2RSNv/Nz8//zs/R/87P0P/Mzs7/29va//f39//29vb/9vb2/8e8vf/HvL3/8/Pz//Pz8/+IgHP/78yU/+/MlP94YDr/8vLy//Ly8v/y8vL/8vLy//Ly8v/09PT/8/T0//Lz8//HvL3/x7y9//Ly8v/y8vL/h4F5/+/Pnf/vz53/kHdP/3hgOv94YDr/eWE7/3hgOv9USDf/5eLd//Ly8v/y8vL/x7y9/8e8vf/w8PD/8PDw/4eBef/t0Kf/7dCn/+3Qp//t0Kf/7dCn/+3Qp//t0Kf/VEg3/9vb2v/w7+//8PDw/8e8vf/HvL3/7u3u/+7t7v+Gg33/7dm8/+3ZvP/Txa7/cW5r/3Fua/9xbmv/d3Rv/62rqf/o5+f/7u3u/+7t7v/HvL3/x7y9/+vr6//r6+v/h4F4/+/l0P/v5dD/XFVM/8XHx//Fx8f/xcfH/8TGxv/r7Ov/6+vr/+vr6//r6+v/x7y9/8e8vf/p6un/6erp/4eAeP/58+b/+fPm/4iEff93dG7/fXl0/356df99eXT/bGlj/5OTkf/p6un/6erp/8e8vf/HvL3/6Ofn/+fn5/+GgHj/5uPd/+bj3f/m493/5uPd/+bj3f/m493/5uPd/3Z2df9vb2//6Ofn/+fn5//HvL3/x7y9/+fn5//n5+f/raqo/3Z2df94eHj/d3d2/3h4ef95eXn/eHh4/3h4eP94eHj/ramo/+fn5//n5+f/x7y9/8a8vf/l5eX/5eXl/+Xl5f/l5eX/5eXl/+Xl5f/l5eX/5eXl/+Xl5f/l5eX/5eXl/+Xl5f/l5eX/5eXl/8a8vf/Ivb7/xry9/8e8vf/Ivb7/yL2+/8i9vv/Ivb7/yL2+/8i9vv/Ivb7/yL2+/8i9vv/Ivb7/x7y9/8a8vf/Ivb7/AAD//wAA//8AAP//AAD//wAA//8AAP//AAD//wAA//8AAP//AAD//wAA//8AAP//AAD//wAA//8AAP//AAD//w==', |
||||
|
'options': [ |
||||
|
{ |
||||
|
'name': 'enabled', |
||||
|
'type': 'enabler', |
||||
|
'default': False, |
||||
|
}, |
||||
|
{ |
||||
|
'name': 'seed_ratio', |
||||
|
'label': 'Seed ratio', |
||||
|
'type': 'float', |
||||
|
'default': 1, |
||||
|
'description': 'Will not be (re)moved until this seed ratio is met.', |
||||
|
}, |
||||
|
{ |
||||
|
'name': 'seed_time', |
||||
|
'label': 'Seed time', |
||||
|
'type': 'int', |
||||
|
'default': 40, |
||||
|
'description': 'Will not be (re)moved until this seed time (in hours) is met.', |
||||
|
}, |
||||
|
{ |
||||
|
'name': 'extra_score', |
||||
|
'advanced': True, |
||||
|
'label': 'Extra Score', |
||||
|
'type': 'int', |
||||
|
'default': 0, |
||||
|
'description': 'Starting score for each release found via this provider.', |
||||
|
} |
||||
|
], |
||||
|
}, |
||||
|
], |
||||
|
}] |
@ -0,0 +1,7 @@ |
|||||
|
from .main import Quality |
||||
|
|
||||
|
|
||||
|
def autoload(): |
||||
|
return Quality() |
||||
|
|
||||
|
config = [] |
@ -0,0 +1,185 @@ |
|||||
|
import traceback |
||||
|
|
||||
|
from CodernityDB.database import RecordNotFound |
||||
|
from couchpotato import get_db |
||||
|
from couchpotato.core.event import addEvent, fireEvent |
||||
|
from couchpotato.core.helpers.encoding import toUnicode, ss |
||||
|
from couchpotato.core.helpers.variable import mergeDicts, getExt, tryInt, splitString |
||||
|
from couchpotato.core.logger import CPLog |
||||
|
from couchpotato.core.plugins.base import Plugin |
||||
|
|
||||
|
log = CPLog(__name__) |
||||
|
|
||||
|
|
||||
|
class QualityBase(Plugin): |
||||
|
type = None |
||||
|
|
||||
|
properties = {} |
||||
|
qualities = [] |
||||
|
|
||||
|
pre_releases = ['cam', 'ts', 'tc', 'r5', 'scr'] |
||||
|
threed_tags = { |
||||
|
'sbs': [('half', 'sbs'), 'hsbs', ('full', 'sbs'), 'fsbs'], |
||||
|
'ou': [('half', 'ou'), 'hou', ('full', 'ou'), 'fou'], |
||||
|
'3d': ['2d3d', '3d2d', '3d'], |
||||
|
} |
||||
|
|
||||
|
cached_qualities = None |
||||
|
cached_order = None |
||||
|
|
||||
|
def __init__(self): |
||||
|
addEvent('quality.pre_releases', self.preReleases) |
||||
|
|
||||
|
addEvent('quality.get', self.get) |
||||
|
addEvent('quality.all', self.all) |
||||
|
addEvent('quality.reset_cache', self.resetCache) |
||||
|
|
||||
|
addEvent('quality.fill', self.fill) |
||||
|
addEvent('quality.isfinish', self.isFinish) |
||||
|
addEvent('quality.ishigher', self.isHigher) |
||||
|
|
||||
|
addEvent('app.initialize', self.fill, priority = 10) |
||||
|
|
||||
|
self.order = [] |
||||
|
|
||||
|
for q in self.qualities: |
||||
|
self.order.append(q.get('identifier')) |
||||
|
|
||||
|
def preReleases(self, types = None): |
||||
|
if types and self.type not in types: |
||||
|
return |
||||
|
|
||||
|
return self.pre_releases |
||||
|
|
||||
|
def get(self, identifier, types = None): |
||||
|
if types and self.type not in types: |
||||
|
return |
||||
|
|
||||
|
for q in self.qualities: |
||||
|
if identifier == q.get('identifier'): |
||||
|
return q |
||||
|
|
||||
|
def all(self, types = None): |
||||
|
if types and self.type not in types: |
||||
|
return |
||||
|
|
||||
|
if self.cached_qualities: |
||||
|
return self.cached_qualities |
||||
|
|
||||
|
db = get_db() |
||||
|
|
||||
|
temp = [] |
||||
|
for quality in self.qualities: |
||||
|
quality_doc = db.get('quality', quality.get('identifier'), with_doc = True)['doc'] |
||||
|
q = mergeDicts(quality, quality_doc) |
||||
|
temp.append(q) |
||||
|
|
||||
|
if len(temp) == len(self.qualities): |
||||
|
self.cached_qualities = temp |
||||
|
|
||||
|
return temp |
||||
|
|
||||
|
def expand(self, quality): |
||||
|
for key, options in self.properties.items(): |
||||
|
if key not in quality: |
||||
|
continue |
||||
|
|
||||
|
quality[key] = [self.getProperty(key, identifier) for identifier in quality[key]] |
||||
|
|
||||
|
return quality |
||||
|
|
||||
|
def getProperty(self, key, identifier): |
||||
|
if key not in self.properties: |
||||
|
return |
||||
|
|
||||
|
for item in self.properties[key]: |
||||
|
if item.get('identifier') == identifier: |
||||
|
return item |
||||
|
|
||||
|
def resetCache(self): |
||||
|
self.cached_qualities = None |
||||
|
|
||||
|
def fill(self): |
||||
|
|
||||
|
try: |
||||
|
db = get_db() |
||||
|
|
||||
|
order = 0 |
||||
|
for q in self.qualities: |
||||
|
|
||||
|
existing = None |
||||
|
try: |
||||
|
existing = db.get('quality', q.get('identifier')) |
||||
|
except RecordNotFound: |
||||
|
pass |
||||
|
|
||||
|
if not existing: |
||||
|
db.insert({ |
||||
|
'_t': 'quality', |
||||
|
'order': order, |
||||
|
'identifier': q.get('identifier'), |
||||
|
'size_min': tryInt(q.get('size')[0]), |
||||
|
'size_max': tryInt(q.get('size')[1]), |
||||
|
}) |
||||
|
|
||||
|
log.info('Creating profile: %s', q.get('label')) |
||||
|
db.insert({ |
||||
|
'_t': 'profile', |
||||
|
'order': order + 20, # Make sure it goes behind other profiles |
||||
|
'core': True, |
||||
|
'qualities': [q.get('identifier')], |
||||
|
'label': toUnicode(q.get('label')), |
||||
|
'finish': [True], |
||||
|
'wait_for': [0], |
||||
|
}) |
||||
|
|
||||
|
order += 1 |
||||
|
|
||||
|
return True |
||||
|
except: |
||||
|
log.error('Failed: %s', traceback.format_exc()) |
||||
|
|
||||
|
return False |
||||
|
|
||||
|
def isFinish(self, quality, profile, release_age = 0): |
||||
|
if not isinstance(profile, dict) or not profile.get('qualities'): |
||||
|
# No profile so anything (scanned) is good enough |
||||
|
return True |
||||
|
|
||||
|
try: |
||||
|
index = [i for i, identifier in enumerate(profile['qualities']) if identifier == quality['identifier'] and bool(profile['3d'][i] if profile.get('3d') else False) == bool(quality.get('is_3d', False))][0] |
||||
|
|
||||
|
if index == 0 or (profile['finish'][index] and int(release_age) >= int(profile.get('stop_after', [0])[0])): |
||||
|
return True |
||||
|
|
||||
|
return False |
||||
|
except: |
||||
|
return False |
||||
|
|
||||
|
def isHigher(self, quality, compare_with, profile = None): |
||||
|
if not isinstance(profile, dict) or not profile.get('qualities'): |
||||
|
profile = fireEvent('profile.default', single = True) |
||||
|
|
||||
|
# Try to find quality in profile, if not found: a quality we do not want is lower than anything else |
||||
|
try: |
||||
|
quality_order = [i for i, identifier in enumerate(profile['qualities']) if identifier == quality['identifier'] and bool(profile['3d'][i] if profile.get('3d') else 0) == bool(quality.get('is_3d', 0))][0] |
||||
|
except: |
||||
|
log.debug('Quality %s not found in profile identifiers %s', (quality['identifier'] + (' 3D' if quality.get('is_3d', 0) else ''), \ |
||||
|
[identifier + (' 3D' if (profile['3d'][i] if profile.get('3d') else 0) else '') for i, identifier in enumerate(profile['qualities'])])) |
||||
|
return 'lower' |
||||
|
|
||||
|
# Try to find compare quality in profile, if not found: anything is higher than a not wanted quality |
||||
|
try: |
||||
|
compare_order = [i for i, identifier in enumerate(profile['qualities']) if identifier == compare_with['identifier'] and bool(profile['3d'][i] if profile.get('3d') else 0) == bool(compare_with.get('is_3d', 0))][0] |
||||
|
except: |
||||
|
log.debug('Compare quality %s not found in profile identifiers %s', (compare_with['identifier'] + (' 3D' if compare_with.get('is_3d', 0) else ''), \ |
||||
|
[identifier + (' 3D' if (profile['3d'][i] if profile.get('3d') else 0) else '') for i, identifier in enumerate(profile['qualities'])])) |
||||
|
return 'higher' |
||||
|
|
||||
|
# Note to self: a lower number means higher quality |
||||
|
if quality_order > compare_order: |
||||
|
return 'lower' |
||||
|
elif quality_order == compare_order: |
||||
|
return 'equal' |
||||
|
else: |
||||
|
return 'higher' |
@ -0,0 +1,82 @@ |
|||||
|
import traceback |
||||
|
|
||||
|
from couchpotato import fireEvent, get_db, tryInt, CPLog |
||||
|
from couchpotato.api import addApiView |
||||
|
from couchpotato.core.event import addEvent |
||||
|
from couchpotato.core.helpers.variable import splitString, mergeDicts |
||||
|
from couchpotato.core.media._base.quality.index import QualityIndex |
||||
|
from couchpotato.core.plugins.base import Plugin |
||||
|
|
||||
|
log = CPLog(__name__) |
||||
|
|
||||
|
|
||||
|
class Quality(Plugin): |
||||
|
_database = { |
||||
|
'quality': QualityIndex |
||||
|
} |
||||
|
|
||||
|
def __init__(self): |
||||
|
addEvent('quality.single', self.single) |
||||
|
|
||||
|
addApiView('quality.list', self.allView, docs = { |
||||
|
'desc': 'List all available qualities', |
||||
|
'params': { |
||||
|
'type': {'type': 'string', 'desc': 'Media type to filter on.'}, |
||||
|
}, |
||||
|
'return': {'type': 'object', 'example': """{ |
||||
|
'success': True, |
||||
|
'list': array, qualities |
||||
|
}"""} |
||||
|
}) |
||||
|
|
||||
|
addApiView('quality.size.save', self.saveSize) |
||||
|
|
||||
|
def single(self, identifier = '', types = None): |
||||
|
db = get_db() |
||||
|
quality = db.get('quality', identifier, with_doc = True)['doc'] |
||||
|
|
||||
|
if quality: |
||||
|
return mergeDicts( |
||||
|
fireEvent( |
||||
|
'quality.get', |
||||
|
quality['identifier'], |
||||
|
types = types, |
||||
|
single = True |
||||
|
), |
||||
|
quality |
||||
|
) |
||||
|
|
||||
|
return {} |
||||
|
|
||||
|
def allView(self, **kwargs): |
||||
|
|
||||
|
return { |
||||
|
'success': True, |
||||
|
'list': fireEvent( |
||||
|
'quality.all', |
||||
|
types = splitString(kwargs.get('type')), |
||||
|
merge = True |
||||
|
) |
||||
|
} |
||||
|
|
||||
|
def saveSize(self, **kwargs): |
||||
|
|
||||
|
try: |
||||
|
db = get_db() |
||||
|
quality = db.get('quality', kwargs.get('identifier'), with_doc = True) |
||||
|
|
||||
|
if quality: |
||||
|
quality['doc'][kwargs.get('value_type')] = tryInt(kwargs.get('value')) |
||||
|
db.update(quality['doc']) |
||||
|
|
||||
|
fireEvent('quality.reset_cache') |
||||
|
|
||||
|
return { |
||||
|
'success': True |
||||
|
} |
||||
|
except: |
||||
|
log.error('Failed: %s', traceback.format_exc()) |
||||
|
|
||||
|
return { |
||||
|
'success': False |
||||
|
} |
@ -0,0 +1,12 @@ |
|||||
|
from couchpotato.core.logger import CPLog |
||||
|
from couchpotato.core.media._base.providers.torrent.extratorrent import Base |
||||
|
from couchpotato.core.media.movie.providers.base import MovieProvider |
||||
|
|
||||
|
log = CPLog(__name__) |
||||
|
|
||||
|
autoload = 'ExtraTorrent' |
||||
|
|
||||
|
|
||||
|
class ExtraTorrent(MovieProvider, Base): |
||||
|
|
||||
|
category = 4 |
@ -0,0 +1,55 @@ |
|||||
|
from couchpotato.core.event import addEvent, fireEvent |
||||
|
from couchpotato.core.media import MediaBase |
||||
|
|
||||
|
autoload = 'ShowToggler' |
||||
|
|
||||
|
|
||||
|
class ShowToggler(MediaBase): |
||||
|
""" |
||||
|
TV Show support is EXPERIMENTAL and disabled by default. The "Shows" item |
||||
|
must only be visible if the user enabled it. This class notifies the |
||||
|
frontend if the shows.enabled configuration item changed. |
||||
|
|
||||
|
FIXME: remove after TV Show support is considered stable. |
||||
|
""" |
||||
|
def __init__(self): |
||||
|
addEvent('setting.save.shows.enabled.after', self.toggleTab) |
||||
|
|
||||
|
def toggleTab(self): |
||||
|
fireEvent('notify.frontend', type = 'shows.enabled', data = self.conf('enabled', section='shows')) |
||||
|
|
||||
|
|
||||
|
class ShowTypeBase(MediaBase): |
||||
|
_type = 'show' |
||||
|
|
||||
|
def getType(self): |
||||
|
if hasattr(self, 'type') and self.type != self._type: |
||||
|
return '%s.%s' % (self._type, self.type) |
||||
|
|
||||
|
return self._type |
||||
|
|
||||
|
config = [{ |
||||
|
'name': 'shows', |
||||
|
'groups': [ |
||||
|
{ |
||||
|
'tab': 'general', |
||||
|
'name': 'Shows', |
||||
|
'label': 'Shows', |
||||
|
'description': 'Enable EXPERIMENTAL TV Show support', |
||||
|
'options': [ |
||||
|
{ |
||||
|
'name': 'enabled', |
||||
|
'default': False, |
||||
|
'type': 'enabler', |
||||
|
}, |
||||
|
{ |
||||
|
'name': 'prefer_episode_releases', |
||||
|
'default': False, |
||||
|
'type': 'bool', |
||||
|
'label': 'Episode releases', |
||||
|
'description': 'Prefer episode releases over season packs', |
||||
|
}, |
||||
|
], |
||||
|
}, |
||||
|
], |
||||
|
}] |
@ -0,0 +1,4 @@ |
|||||
|
from .main import ShowBase |
||||
|
|
||||
|
def autoload(): |
||||
|
return ShowBase() |
@ -0,0 +1,111 @@ |
|||||
|
from couchpotato import get_db |
||||
|
from couchpotato.core.event import addEvent, fireEvent, fireEventAsync |
||||
|
from couchpotato.core.logger import CPLog |
||||
|
from couchpotato.core.helpers.variable import tryInt |
||||
|
from couchpotato.core.media import MediaBase |
||||
|
|
||||
|
|
||||
|
log = CPLog(__name__) |
||||
|
|
||||
|
autoload = 'Episode' |
||||
|
|
||||
|
|
||||
|
class Episode(MediaBase): |
||||
|
|
||||
|
_type = 'show.episode' |
||||
|
|
||||
|
def __init__(self): |
||||
|
addEvent('show.episode.add', self.add) |
||||
|
addEvent('show.episode.update', self.update) |
||||
|
addEvent('show.episode.update_extras', self.updateExtras) |
||||
|
|
||||
|
def add(self, parent_id, info = None, update_after = True, status = None): |
||||
|
if not info: info = {} |
||||
|
|
||||
|
identifiers = info.pop('identifiers', None) |
||||
|
|
||||
|
if not identifiers: |
||||
|
log.warning('Unable to add episode, missing identifiers (info provider mismatch?)') |
||||
|
return |
||||
|
|
||||
|
# Add Season |
||||
|
episode_info = { |
||||
|
'_t': 'media', |
||||
|
'type': 'show.episode', |
||||
|
'identifiers': identifiers, |
||||
|
'status': status if status else 'active', |
||||
|
'parent_id': parent_id, |
||||
|
'info': info, # Returned dict by providers |
||||
|
} |
||||
|
|
||||
|
# Check if season already exists |
||||
|
existing_episode = fireEvent('media.with_identifiers', identifiers, with_doc = True, types = [self._type], single = True) |
||||
|
|
||||
|
db = get_db() |
||||
|
|
||||
|
if existing_episode: |
||||
|
s = existing_episode['doc'] |
||||
|
s.update(episode_info) |
||||
|
|
||||
|
episode = db.update(s) |
||||
|
else: |
||||
|
episode = db.insert(episode_info) |
||||
|
|
||||
|
# Update library info |
||||
|
if update_after is not False: |
||||
|
handle = fireEventAsync if update_after is 'async' else fireEvent |
||||
|
handle('show.episode.update_extras', episode, info, store = True, single = True) |
||||
|
|
||||
|
return episode |
||||
|
|
||||
|
def update(self, media_id = None, identifiers = None, info = None): |
||||
|
if not info: info = {} |
||||
|
|
||||
|
if self.shuttingDown(): |
||||
|
return |
||||
|
|
||||
|
db = get_db() |
||||
|
|
||||
|
episode = db.get('id', media_id) |
||||
|
|
||||
|
# Get new info |
||||
|
if not info: |
||||
|
season = db.get('id', episode['parent_id']) |
||||
|
show = db.get('id', season['parent_id']) |
||||
|
|
||||
|
info = fireEvent( |
||||
|
'episode.info', show.get('identifiers'), { |
||||
|
'season_identifiers': season.get('identifiers'), |
||||
|
'season_number': season.get('info', {}).get('number'), |
||||
|
|
||||
|
'episode_identifiers': episode.get('identifiers'), |
||||
|
'episode_number': episode.get('info', {}).get('number'), |
||||
|
|
||||
|
'absolute_number': episode.get('info', {}).get('absolute_number') |
||||
|
}, |
||||
|
merge = True |
||||
|
) |
||||
|
|
||||
|
info['season_number'] = season.get('info', {}).get('number') |
||||
|
|
||||
|
identifiers = info.pop('identifiers', None) or identifiers |
||||
|
|
||||
|
# Update/create media |
||||
|
episode['identifiers'].update(identifiers) |
||||
|
episode.update({'info': info}) |
||||
|
|
||||
|
self.updateExtras(episode, info) |
||||
|
|
||||
|
db.update(episode) |
||||
|
return episode |
||||
|
|
||||
|
def updateExtras(self, episode, info, store=False): |
||||
|
db = get_db() |
||||
|
|
||||
|
# Get images |
||||
|
image_urls = info.get('images', []) |
||||
|
existing_files = episode.get('files', {}) |
||||
|
self.getPoster(image_urls, existing_files) |
||||
|
|
||||
|
if store: |
||||
|
db.update(episode) |
@ -0,0 +1,289 @@ |
|||||
|
import time |
||||
|
import traceback |
||||
|
|
||||
|
from couchpotato import get_db |
||||
|
from couchpotato.api import addApiView |
||||
|
from couchpotato.core.event import fireEvent, fireEventAsync, addEvent |
||||
|
from couchpotato.core.helpers.variable import getTitle, find |
||||
|
from couchpotato.core.logger import CPLog |
||||
|
from couchpotato.core.media import MediaBase |
||||
|
|
||||
|
|
||||
|
log = CPLog(__name__) |
||||
|
|
||||
|
|
||||
|
class ShowBase(MediaBase): |
||||
|
|
||||
|
_type = 'show' |
||||
|
|
||||
|
def __init__(self): |
||||
|
super(ShowBase, self).__init__() |
||||
|
self.initType() |
||||
|
|
||||
|
addApiView('show.add', self.addView, docs = { |
||||
|
'desc': 'Add new show to the wanted list', |
||||
|
'params': { |
||||
|
'identifier': {'desc': 'IMDB id of the show your want to add.'}, |
||||
|
'profile_id': {'desc': 'ID of quality profile you want the add the show in. If empty will use the default profile.'}, |
||||
|
'category_id': {'desc': 'ID of category you want the add the show in.'}, |
||||
|
'title': {'desc': 'Title of the show to use for search and renaming'}, |
||||
|
} |
||||
|
}) |
||||
|
|
||||
|
addEvent('show.add', self.add) |
||||
|
addEvent('show.update', self.update) |
||||
|
addEvent('show.update_extras', self.updateExtras) |
||||
|
|
||||
|
def addView(self, **kwargs): |
||||
|
add_dict = self.add(params = kwargs) |
||||
|
|
||||
|
return { |
||||
|
'success': True if add_dict else False, |
||||
|
'show': add_dict, |
||||
|
} |
||||
|
|
||||
|
def add(self, params = None, force_readd = True, search_after = True, update_after = True, notify_after = True, status = None): |
||||
|
if not params: params = {} |
||||
|
|
||||
|
# Identifiers |
||||
|
if not params.get('identifiers'): |
||||
|
msg = 'Can\'t add show without at least 1 identifier.' |
||||
|
log.error(msg) |
||||
|
fireEvent('notify.frontend', type = 'show.no_identifier', message = msg) |
||||
|
return False |
||||
|
|
||||
|
info = params.get('info') |
||||
|
if not info or (info and len(info.get('titles', [])) == 0): |
||||
|
info = fireEvent('show.info', merge = True, identifiers = params.get('identifiers')) |
||||
|
|
||||
|
# Add Show |
||||
|
try: |
||||
|
m, added = self.create(info, params, force_readd, search_after, update_after) |
||||
|
|
||||
|
result = fireEvent('media.get', m['_id'], single = True) |
||||
|
|
||||
|
if added and notify_after: |
||||
|
if params.get('title'): |
||||
|
message = 'Successfully added "%s" to your wanted list.' % params.get('title', '') |
||||
|
else: |
||||
|
title = getTitle(m) |
||||
|
if title: |
||||
|
message = 'Successfully added "%s" to your wanted list.' % title |
||||
|
else: |
||||
|
message = 'Successfully added to your wanted list.' |
||||
|
|
||||
|
fireEvent('notify.frontend', type = 'show.added', data = result, message = message) |
||||
|
|
||||
|
return result |
||||
|
except: |
||||
|
log.error('Failed adding media: %s', traceback.format_exc()) |
||||
|
|
||||
|
def create(self, info, params = None, force_readd = True, search_after = True, update_after = True, notify_after = True, status = None): |
||||
|
# Set default title |
||||
|
def_title = self.getDefaultTitle(info) |
||||
|
|
||||
|
# Default profile and category |
||||
|
default_profile = {} |
||||
|
if not params.get('profile_id'): |
||||
|
default_profile = fireEvent('profile.default', single = True) |
||||
|
|
||||
|
cat_id = params.get('category_id') |
||||
|
|
||||
|
media = { |
||||
|
'_t': 'media', |
||||
|
'type': 'show', |
||||
|
'title': def_title, |
||||
|
'identifiers': info.get('identifiers'), |
||||
|
'status': status if status else 'active', |
||||
|
'profile_id': params.get('profile_id', default_profile.get('_id')), |
||||
|
'category_id': cat_id if cat_id is not None and len(cat_id) > 0 and cat_id != '-1' else None |
||||
|
} |
||||
|
|
||||
|
identifiers = info.pop('identifiers', {}) |
||||
|
seasons = info.pop('seasons', {}) |
||||
|
|
||||
|
# Update media with info |
||||
|
self.updateInfo(media, info) |
||||
|
|
||||
|
existing_show = fireEvent('media.with_identifiers', params.get('identifiers'), with_doc = True, types = [self._type], single = True) |
||||
|
|
||||
|
db = get_db() |
||||
|
|
||||
|
if existing_show: |
||||
|
s = existing_show['doc'] |
||||
|
s.update(media) |
||||
|
|
||||
|
show = db.update(s) |
||||
|
else: |
||||
|
show = db.insert(media) |
||||
|
|
||||
|
# Update dict to be usable |
||||
|
show.update(media) |
||||
|
|
||||
|
added = True |
||||
|
do_search = False |
||||
|
search_after = search_after and self.conf('search_on_add', section = 'showsearcher') |
||||
|
onComplete = None |
||||
|
|
||||
|
if existing_show: |
||||
|
if search_after: |
||||
|
onComplete = self.createOnComplete(show['_id']) |
||||
|
|
||||
|
search_after = False |
||||
|
elif force_readd: |
||||
|
# Clean snatched history |
||||
|
for release in fireEvent('release.for_media', show['_id'], single = True): |
||||
|
if release.get('status') in ['downloaded', 'snatched', 'done']: |
||||
|
if params.get('ignore_previous', False): |
||||
|
release['status'] = 'ignored' |
||||
|
db.update(release) |
||||
|
else: |
||||
|
fireEvent('release.delete', release['_id'], single = True) |
||||
|
|
||||
|
show['profile_id'] = params.get('profile_id', default_profile.get('id')) |
||||
|
show['category_id'] = media.get('category_id') |
||||
|
show['last_edit'] = int(time.time()) |
||||
|
|
||||
|
do_search = True |
||||
|
db.update(show) |
||||
|
else: |
||||
|
params.pop('info', None) |
||||
|
log.debug('Show already exists, not updating: %s', params) |
||||
|
added = False |
||||
|
|
||||
|
# Create episodes |
||||
|
self.createEpisodes(show, seasons) |
||||
|
|
||||
|
# Trigger update info |
||||
|
if added and update_after: |
||||
|
# Do full update to get images etc |
||||
|
fireEventAsync('show.update_extras', show.copy(), info, store = True, on_complete = onComplete) |
||||
|
|
||||
|
# Remove releases |
||||
|
for rel in fireEvent('release.for_media', show['_id'], single = True): |
||||
|
if rel['status'] is 'available': |
||||
|
db.delete(rel) |
||||
|
|
||||
|
if do_search and search_after: |
||||
|
onComplete = self.createOnComplete(show['_id']) |
||||
|
onComplete() |
||||
|
|
||||
|
return show, added |
||||
|
|
||||
|
def createEpisodes(self, m, seasons_info): |
||||
|
# Add Seasons |
||||
|
for season_nr in seasons_info: |
||||
|
season_info = seasons_info[season_nr] |
||||
|
episodes = season_info.get('episodes', {}) |
||||
|
|
||||
|
season = fireEvent('show.season.add', m.get('_id'), season_info, update_after = False, single = True) |
||||
|
|
||||
|
# Add Episodes |
||||
|
for episode_nr in episodes: |
||||
|
episode_info = episodes[episode_nr] |
||||
|
episode_info['season_number'] = season_nr |
||||
|
|
||||
|
fireEvent('show.episode.add', season.get('_id'), episode_info, update_after = False, single = True) |
||||
|
|
||||
|
def update(self, media_id = None, media = None, identifiers = None, info = None): |
||||
|
""" |
||||
|
Update movie information inside media['doc']['info'] |
||||
|
|
||||
|
@param media_id: document id |
||||
|
@param identifiers: identifiers from multiple providers |
||||
|
{ |
||||
|
'thetvdb': 123, |
||||
|
'imdb': 'tt123123', |
||||
|
.. |
||||
|
} |
||||
|
@param extended: update with extended info (parses more info, actors, images from some info providers) |
||||
|
@return: dict, with media |
||||
|
""" |
||||
|
|
||||
|
if not info: info = {} |
||||
|
if not identifiers: identifiers = {} |
||||
|
|
||||
|
db = get_db() |
||||
|
|
||||
|
if self.shuttingDown(): |
||||
|
return |
||||
|
|
||||
|
if media is None and media_id: |
||||
|
media = db.get('id', media_id) |
||||
|
else: |
||||
|
log.error('missing "media" and "media_id" parameters, unable to update') |
||||
|
return |
||||
|
|
||||
|
if not info: |
||||
|
info = fireEvent('show.info', identifiers = media.get('identifiers'), merge = True) |
||||
|
|
||||
|
try: |
||||
|
identifiers = info.pop('identifiers', {}) |
||||
|
seasons = info.pop('seasons', {}) |
||||
|
|
||||
|
self.updateInfo(media, info) |
||||
|
self.updateEpisodes(media, seasons) |
||||
|
self.updateExtras(media, info) |
||||
|
|
||||
|
db.update(media) |
||||
|
return media |
||||
|
except: |
||||
|
log.error('Failed update media: %s', traceback.format_exc()) |
||||
|
|
||||
|
return {} |
||||
|
|
||||
|
def updateInfo(self, media, info): |
||||
|
# Remove season info for later use (save separately) |
||||
|
info.pop('in_wanted', None) |
||||
|
info.pop('in_library', None) |
||||
|
|
||||
|
if not info or len(info) == 0: |
||||
|
log.error('Could not update, no show info to work with: %s', media.get('identifier')) |
||||
|
return False |
||||
|
|
||||
|
# Update basic info |
||||
|
media['info'] = info |
||||
|
|
||||
|
def updateEpisodes(self, media, seasons): |
||||
|
# Fetch current season/episode tree |
||||
|
show_tree = fireEvent('library.tree', media_id = media['_id'], single = True) |
||||
|
|
||||
|
# Update seasons |
||||
|
for season_num in seasons: |
||||
|
season_info = seasons[season_num] |
||||
|
episodes = season_info.get('episodes', {}) |
||||
|
|
||||
|
# Find season that matches number |
||||
|
season = find(lambda s: s.get('info', {}).get('number', 0) == season_num, show_tree.get('seasons', [])) |
||||
|
|
||||
|
if not season: |
||||
|
log.warning('Unable to find season "%s"', season_num) |
||||
|
continue |
||||
|
|
||||
|
# Update season |
||||
|
fireEvent('show.season.update', season['_id'], info = season_info, single = True) |
||||
|
|
||||
|
# Update episodes |
||||
|
for episode_num in episodes: |
||||
|
episode_info = episodes[episode_num] |
||||
|
episode_info['season_number'] = season_num |
||||
|
|
||||
|
# Find episode that matches number |
||||
|
episode = find(lambda s: s.get('info', {}).get('number', 0) == episode_num, season.get('episodes', [])) |
||||
|
|
||||
|
if not episode: |
||||
|
log.debug('Creating new episode %s in season %s', (episode_num, season_num)) |
||||
|
fireEvent('show.episode.add', season.get('_id'), episode_info, update_after = False, single = True) |
||||
|
continue |
||||
|
|
||||
|
fireEvent('show.episode.update', episode['_id'], info = episode_info, single = True) |
||||
|
|
||||
|
def updateExtras(self, media, info, store=False): |
||||
|
db = get_db() |
||||
|
|
||||
|
# Update image file |
||||
|
image_urls = info.get('images', []) |
||||
|
self.getPoster(media, image_urls) |
||||
|
|
||||
|
if store: |
||||
|
db.update(media) |
@ -0,0 +1,96 @@ |
|||||
|
from couchpotato import get_db |
||||
|
from couchpotato.core.event import addEvent, fireEvent, fireEventAsync |
||||
|
from couchpotato.core.logger import CPLog |
||||
|
from couchpotato.core.helpers.variable import tryInt |
||||
|
from couchpotato.core.media import MediaBase |
||||
|
|
||||
|
|
||||
|
log = CPLog(__name__) |
||||
|
|
||||
|
autoload = 'Season' |
||||
|
|
||||
|
|
||||
|
class Season(MediaBase): |
||||
|
|
||||
|
_type = 'show.season' |
||||
|
|
||||
|
def __init__(self): |
||||
|
addEvent('show.season.add', self.add) |
||||
|
addEvent('show.season.update', self.update) |
||||
|
addEvent('show.season.update_extras', self.updateExtras) |
||||
|
|
||||
|
def add(self, parent_id, info = None, update_after = True, status = None): |
||||
|
if not info: info = {} |
||||
|
|
||||
|
identifiers = info.pop('identifiers', None) |
||||
|
info.pop('episodes', None) |
||||
|
|
||||
|
# Add Season |
||||
|
season_info = { |
||||
|
'_t': 'media', |
||||
|
'type': 'show.season', |
||||
|
'identifiers': identifiers, |
||||
|
'status': status if status else 'active', |
||||
|
'parent_id': parent_id, |
||||
|
'info': info, # Returned dict by providers |
||||
|
} |
||||
|
|
||||
|
# Check if season already exists |
||||
|
existing_season = fireEvent('media.with_identifiers', identifiers, with_doc = True, types = [self._type], single = True) |
||||
|
|
||||
|
db = get_db() |
||||
|
|
||||
|
if existing_season: |
||||
|
s = existing_season['doc'] |
||||
|
s.update(season_info) |
||||
|
|
||||
|
season = db.update(s) |
||||
|
else: |
||||
|
season = db.insert(season_info) |
||||
|
|
||||
|
# Update library info |
||||
|
if update_after is not False: |
||||
|
handle = fireEventAsync if update_after is 'async' else fireEvent |
||||
|
handle('show.season.update_extras', season, info, store = True, single = True) |
||||
|
|
||||
|
return season |
||||
|
|
||||
|
def update(self, media_id = None, identifiers = None, info = None): |
||||
|
if not info: info = {} |
||||
|
|
||||
|
if self.shuttingDown(): |
||||
|
return |
||||
|
|
||||
|
db = get_db() |
||||
|
|
||||
|
season = db.get('id', media_id) |
||||
|
show = db.get('id', season['parent_id']) |
||||
|
|
||||
|
# Get new info |
||||
|
if not info: |
||||
|
info = fireEvent('season.info', show.get('identifiers'), { |
||||
|
'season_number': season.get('info', {}).get('number', 0) |
||||
|
}, merge = True) |
||||
|
|
||||
|
identifiers = info.pop('identifiers', None) or identifiers |
||||
|
info.pop('episodes', None) |
||||
|
|
||||
|
# Update/create media |
||||
|
season['identifiers'].update(identifiers) |
||||
|
season.update({'info': info}) |
||||
|
|
||||
|
self.updateExtras(season, info) |
||||
|
|
||||
|
db.update(season) |
||||
|
return season |
||||
|
|
||||
|
def updateExtras(self, season, info, store=False): |
||||
|
db = get_db() |
||||
|
|
||||
|
# Get images |
||||
|
image_urls = info.get('images', []) |
||||
|
existing_files = season.get('files', {}) |
||||
|
self.getPoster(image_urls, existing_files) |
||||
|
|
||||
|
if store: |
||||
|
db.update(season) |
@ -0,0 +1,128 @@ |
|||||
|
var Episode = new Class({ |
||||
|
|
||||
|
Extends: BlockBase, |
||||
|
|
||||
|
action: {}, |
||||
|
|
||||
|
initialize: function(show, options, data){ |
||||
|
var self = this; |
||||
|
self.setOptions(options); |
||||
|
|
||||
|
self.show = show; |
||||
|
self.options = options; |
||||
|
self.data = data; |
||||
|
|
||||
|
self.profile = self.show.profile; |
||||
|
|
||||
|
self.el = new Element('div.item.episode').adopt( |
||||
|
self.detail = new Element('div.item.data') |
||||
|
); |
||||
|
|
||||
|
self.create(); |
||||
|
}, |
||||
|
|
||||
|
create: function(){ |
||||
|
var self = this; |
||||
|
|
||||
|
self.detail.set('id', 'episode_'+self.data._id); |
||||
|
|
||||
|
self.detail.adopt( |
||||
|
new Element('span.episode', {'text': (self.data.info.number || 0)}), |
||||
|
new Element('span.name', {'text': self.getTitle()}), |
||||
|
new Element('span.firstaired', {'text': self.data.info.firstaired}), |
||||
|
|
||||
|
self.quality = new Element('span.quality', { |
||||
|
'events': { |
||||
|
'click': function(e){ |
||||
|
var releases = self.detail.getElement('.item-actions .releases'); |
||||
|
|
||||
|
if(releases.isVisible()) |
||||
|
releases.fireEvent('click', [e]) |
||||
|
} |
||||
|
} |
||||
|
}), |
||||
|
self.actions = new Element('div.item-actions') |
||||
|
); |
||||
|
|
||||
|
// Add profile
|
||||
|
if(self.profile.data) { |
||||
|
self.profile.getTypes().each(function(type){ |
||||
|
var q = self.addQuality(type.get('quality'), type.get('3d')); |
||||
|
|
||||
|
if((type.finish == true || type.get('finish')) && !q.hasClass('finish')){ |
||||
|
q.addClass('finish'); |
||||
|
q.set('title', q.get('title') + ' Will finish searching for this movie if this quality is found.') |
||||
|
} |
||||
|
}); |
||||
|
} |
||||
|
|
||||
|
// Add releases
|
||||
|
self.updateReleases(); |
||||
|
|
||||
|
Object.each(self.options.actions, function(action, key){ |
||||
|
self.action[key.toLowerCase()] = action = new self.options.actions[key](self); |
||||
|
if(action.el) |
||||
|
self.actions.adopt(action) |
||||
|
}); |
||||
|
}, |
||||
|
|
||||
|
updateReleases: function(){ |
||||
|
var self = this; |
||||
|
if(!self.data.releases || self.data.releases.length == 0) return; |
||||
|
|
||||
|
self.data.releases.each(function(release){ |
||||
|
|
||||
|
var q = self.quality.getElement('.q_'+ release.quality+(release.is_3d ? '.is_3d' : ':not(.is_3d)')), |
||||
|
status = release.status; |
||||
|
|
||||
|
if(!q && (status == 'snatched' || status == 'seeding' || status == 'done')) |
||||
|
q = self.addQuality(release.quality, release.is_3d || false); |
||||
|
|
||||
|
if (q && !q.hasClass(status)){ |
||||
|
q.addClass(status); |
||||
|
q.set('title', (q.get('title') ? q.get('title') : '') + ' status: '+ status) |
||||
|
} |
||||
|
|
||||
|
}); |
||||
|
}, |
||||
|
|
||||
|
addQuality: function(quality, is_3d){ |
||||
|
var self = this, |
||||
|
q = Quality.getQuality(quality); |
||||
|
|
||||
|
return new Element('span', { |
||||
|
'text': q.label + (is_3d ? ' 3D' : ''), |
||||
|
'class': 'q_'+q.identifier + (is_3d ? ' is_3d' : ''), |
||||
|
'title': '' |
||||
|
}).inject(self.quality); |
||||
|
}, |
||||
|
|
||||
|
getTitle: function(){ |
||||
|
var self = this; |
||||
|
|
||||
|
var title = ''; |
||||
|
|
||||
|
if(self.data.info.titles && self.data.info.titles.length > 0) { |
||||
|
title = self.data.info.titles[0]; |
||||
|
} else { |
||||
|
title = 'Episode ' + self.data.info.number; |
||||
|
} |
||||
|
|
||||
|
return title; |
||||
|
}, |
||||
|
|
||||
|
getIdentifier: function(){ |
||||
|
var self = this; |
||||
|
|
||||
|
try { |
||||
|
return self.get('identifiers').imdb; |
||||
|
} |
||||
|
catch (e){ } |
||||
|
|
||||
|
return self.get('imdb'); |
||||
|
}, |
||||
|
|
||||
|
get: function(attr){ |
||||
|
return this.data[attr] || this.data.info[attr] |
||||
|
} |
||||
|
}); |
@ -0,0 +1,8 @@ |
|||||
|
var ShowList = new Class({ |
||||
|
|
||||
|
Extends: MovieList, |
||||
|
|
||||
|
media_type: 'show', |
||||
|
list_key: 'shows' |
||||
|
|
||||
|
}); |
@ -0,0 +1,56 @@ |
|||||
|
Page.Shows = new Class({ |
||||
|
|
||||
|
Extends: PageBase, |
||||
|
|
||||
|
name: 'shows', |
||||
|
icon: 'show', |
||||
|
sub_pages: ['Wanted'], |
||||
|
default_page: 'Wanted', |
||||
|
current_page: null, |
||||
|
|
||||
|
initialize: function(parent, options){ |
||||
|
var self = this; |
||||
|
self.parent(parent, options); |
||||
|
|
||||
|
self.navigation = new BlockNavigation(); |
||||
|
$(self.navigation).inject(self.content, 'top'); |
||||
|
|
||||
|
App.on('shows.enabled', self.toggleShows.bind(self)); |
||||
|
|
||||
|
}, |
||||
|
|
||||
|
defaultAction: function(action, params){ |
||||
|
var self = this; |
||||
|
|
||||
|
if(self.current_page){ |
||||
|
self.current_page.hide(); |
||||
|
|
||||
|
if(self.current_page.list && self.current_page.list.navigation) |
||||
|
self.current_page.list.navigation.dispose(); |
||||
|
} |
||||
|
|
||||
|
var route = new Route(); |
||||
|
route.parse(action); |
||||
|
|
||||
|
var page_name = route.getPage() != 'index' ? route.getPage().capitalize() : self.default_page; |
||||
|
|
||||
|
var page = self.sub_pages.filter(function(page){ |
||||
|
return page.name == page_name; |
||||
|
}).pick()['class']; |
||||
|
|
||||
|
page.open(route.getAction() || 'index', params); |
||||
|
page.show(); |
||||
|
|
||||
|
if(page.list && page.list.navigation) |
||||
|
page.list.navigation.inject(self.navigation); |
||||
|
|
||||
|
self.current_page = page; |
||||
|
self.navigation.activate(page_name.toLowerCase()); |
||||
|
|
||||
|
}, |
||||
|
|
||||
|
toggleShows: function(notification) { |
||||
|
document.body[notification.data === true ? 'addClass' : 'removeClass']('show_support'); |
||||
|
} |
||||
|
|
||||
|
}); |
@ -0,0 +1,7 @@ |
|||||
|
var BlockSearchShowItem = new Class({ |
||||
|
|
||||
|
Extends: BlockSearchMovieItem, |
||||
|
|
||||
|
media_type: 'movie' |
||||
|
|
||||
|
}); |
@ -0,0 +1,127 @@ |
|||||
|
var Season = new Class({ |
||||
|
|
||||
|
Extends: BlockBase, |
||||
|
|
||||
|
action: {}, |
||||
|
|
||||
|
initialize: function(show, options, data){ |
||||
|
var self = this; |
||||
|
self.setOptions(options); |
||||
|
|
||||
|
self.show = show; |
||||
|
self.options = options; |
||||
|
self.data = data; |
||||
|
|
||||
|
self.profile = self.show.profile; |
||||
|
|
||||
|
self.el = new Element('div.item.season').adopt( |
||||
|
self.detail = new Element('div.item.data') |
||||
|
); |
||||
|
|
||||
|
self.create(); |
||||
|
}, |
||||
|
|
||||
|
create: function(){ |
||||
|
var self = this; |
||||
|
|
||||
|
self.detail.set('id', 'season_'+self.data._id); |
||||
|
|
||||
|
self.detail.adopt( |
||||
|
new Element('span.name', {'text': self.getTitle()}), |
||||
|
|
||||
|
self.quality = new Element('span.quality', { |
||||
|
'events': { |
||||
|
'click': function(e){ |
||||
|
var releases = self.detail.getElement('.item-actions .releases'); |
||||
|
|
||||
|
if(releases.isVisible()) |
||||
|
releases.fireEvent('click', [e]) |
||||
|
} |
||||
|
} |
||||
|
}), |
||||
|
self.actions = new Element('div.item-actions') |
||||
|
); |
||||
|
|
||||
|
// Add profile
|
||||
|
if(self.profile.data) { |
||||
|
self.profile.getTypes().each(function(type){ |
||||
|
var q = self.addQuality(type.get('quality'), type.get('3d')); |
||||
|
|
||||
|
if((type.finish == true || type.get('finish')) && !q.hasClass('finish')){ |
||||
|
q.addClass('finish'); |
||||
|
q.set('title', q.get('title') + ' Will finish searching for this movie if this quality is found.') |
||||
|
} |
||||
|
}); |
||||
|
} |
||||
|
|
||||
|
// Add releases
|
||||
|
self.updateReleases(); |
||||
|
|
||||
|
Object.each(self.options.actions, function(action, key){ |
||||
|
self.action[key.toLowerCase()] = action = new self.options.actions[key](self); |
||||
|
if(action.el) |
||||
|
self.actions.adopt(action) |
||||
|
}); |
||||
|
}, |
||||
|
|
||||
|
updateReleases: function(){ |
||||
|
var self = this; |
||||
|
if(!self.data.releases || self.data.releases.length == 0) return; |
||||
|
|
||||
|
self.data.releases.each(function(release){ |
||||
|
|
||||
|
var q = self.quality.getElement('.q_'+ release.quality+(release.is_3d ? '.is_3d' : ':not(.is_3d)')), |
||||
|
status = release.status; |
||||
|
|
||||
|
if(!q && (status == 'snatched' || status == 'seeding' || status == 'done')) |
||||
|
q = self.addQuality(release.quality, release.is_3d || false); |
||||
|
|
||||
|
if (q && !q.hasClass(status)){ |
||||
|
q.addClass(status); |
||||
|
q.set('title', (q.get('title') ? q.get('title') : '') + ' status: '+ status) |
||||
|
} |
||||
|
|
||||
|
}); |
||||
|
}, |
||||
|
|
||||
|
addQuality: function(quality, is_3d){ |
||||
|
var self = this, |
||||
|
q = Quality.getQuality(quality); |
||||
|
|
||||
|
return new Element('span', { |
||||
|
'text': q.label + (is_3d ? ' 3D' : ''), |
||||
|
'class': 'q_'+q.identifier + (is_3d ? ' is_3d' : ''), |
||||
|
'title': '' |
||||
|
}).inject(self.quality); |
||||
|
}, |
||||
|
|
||||
|
getTitle: function(){ |
||||
|
var self = this; |
||||
|
|
||||
|
var title = ''; |
||||
|
|
||||
|
if(self.data.info.number) { |
||||
|
title = 'Season ' + self.data.info.number; |
||||
|
} else { |
||||
|
// Season 0 / Specials
|
||||
|
title = 'Specials'; |
||||
|
} |
||||
|
|
||||
|
return title; |
||||
|
}, |
||||
|
|
||||
|
getIdentifier: function(){ |
||||
|
var self = this; |
||||
|
|
||||
|
try { |
||||
|
return self.get('identifiers').imdb; |
||||
|
} |
||||
|
catch (e){ } |
||||
|
|
||||
|
return self.get('imdb'); |
||||
|
}, |
||||
|
|
||||
|
get: function(attr){ |
||||
|
return this.data[attr] || this.data.info[attr] |
||||
|
} |
||||
|
}); |
@ -0,0 +1,92 @@ |
|||||
|
var Episodes = new Class({ |
||||
|
initialize: function(show, options) { |
||||
|
var self = this; |
||||
|
|
||||
|
self.show = show; |
||||
|
self.options = options; |
||||
|
}, |
||||
|
|
||||
|
open: function(){ |
||||
|
var self = this; |
||||
|
|
||||
|
if(!self.container){ |
||||
|
self.container = new Element('div.options').grab( |
||||
|
self.episodes_container = new Element('div.episodes.table') |
||||
|
); |
||||
|
|
||||
|
self.container.inject(self.show, 'top'); |
||||
|
|
||||
|
Api.request('library.tree', { |
||||
|
'data': { |
||||
|
'media_id': self.show.data._id |
||||
|
}, |
||||
|
'onComplete': function(json){ |
||||
|
self.data = json.result; |
||||
|
|
||||
|
self.createEpisodes(); |
||||
|
} |
||||
|
}); |
||||
|
} |
||||
|
|
||||
|
self.show.slide('in', self.container, true); |
||||
|
}, |
||||
|
|
||||
|
createEpisodes: function() { |
||||
|
var self = this; |
||||
|
|
||||
|
self.data.seasons.sort(self.sortSeasons); |
||||
|
self.data.seasons.each(function(season) { |
||||
|
self.createSeason(season); |
||||
|
|
||||
|
season.episodes.sort(self.sortEpisodes); |
||||
|
season.episodes.each(function(episode) { |
||||
|
self.createEpisode(episode); |
||||
|
}); |
||||
|
}); |
||||
|
}, |
||||
|
|
||||
|
createSeason: function(season) { |
||||
|
var self = this, |
||||
|
s = new Season(self.show, self.options, season); |
||||
|
|
||||
|
$(s).inject(self.episodes_container); |
||||
|
}, |
||||
|
|
||||
|
createEpisode: function(episode){ |
||||
|
var self = this, |
||||
|
e = new Episode(self.show, self.options, episode); |
||||
|
|
||||
|
$(e).inject(self.episodes_container); |
||||
|
}, |
||||
|
|
||||
|
sortSeasons: function(a, b) { |
||||
|
// Move "Specials" to the bottom of the list
|
||||
|
if(!a.info.number) { |
||||
|
return 1; |
||||
|
} |
||||
|
|
||||
|
if(!b.info.number) { |
||||
|
return -1; |
||||
|
} |
||||
|
|
||||
|
// Order seasons descending
|
||||
|
if(a.info.number < b.info.number) |
||||
|
return -1; |
||||
|
|
||||
|
if(a.info.number > b.info.number) |
||||
|
return 1; |
||||
|
|
||||
|
return 0; |
||||
|
}, |
||||
|
|
||||
|
sortEpisodes: function(a, b) { |
||||
|
// Order episodes descending
|
||||
|
if(a.info.number < b.info.number) |
||||
|
return -1; |
||||
|
|
||||
|
if(a.info.number > b.info.number) |
||||
|
return 1; |
||||
|
|
||||
|
return 0; |
||||
|
} |
||||
|
}); |
@ -0,0 +1,5 @@ |
|||||
|
var Show = new Class({ |
||||
|
|
||||
|
Extends: Movie |
||||
|
|
||||
|
}); |
File diff suppressed because it is too large
@ -0,0 +1,28 @@ |
|||||
|
var ShowsWanted = new Class({ |
||||
|
Extends: PageBase, |
||||
|
|
||||
|
name: 'wanted', |
||||
|
title: 'List of TV Shows subscribed to', |
||||
|
folder_browser: null, |
||||
|
has_tab: false, |
||||
|
|
||||
|
indexAction: function(){ |
||||
|
var self = this; |
||||
|
|
||||
|
if(!self.wanted){ |
||||
|
|
||||
|
// Wanted movies
|
||||
|
self.wanted = new ShowList({ |
||||
|
'identifier': 'wanted', |
||||
|
'status': 'active', |
||||
|
'type': 'show', |
||||
|
'actions': [MA.IMDB, MA.Release, MA.Refresh, MA.Delete], |
||||
|
'add_new': true, |
||||
|
'on_empty_element': App.createUserscriptButtons().addClass('empty_wanted') |
||||
|
}); |
||||
|
$(self.wanted).inject(self.content); |
||||
|
} |
||||
|
|
||||
|
} |
||||
|
|
||||
|
}); |
@ -0,0 +1,71 @@ |
|||||
|
from couchpotato.core.event import addEvent, fireEvent |
||||
|
from couchpotato.core.helpers.variable import tryInt |
||||
|
from couchpotato.core.logger import CPLog |
||||
|
from couchpotato.core.media._base.library.base import LibraryBase |
||||
|
|
||||
|
log = CPLog(__name__) |
||||
|
|
||||
|
autoload = 'EpisodeLibraryPlugin' |
||||
|
|
||||
|
|
||||
|
class EpisodeLibraryPlugin(LibraryBase): |
||||
|
def __init__(self): |
||||
|
addEvent('library.query', self.query) |
||||
|
addEvent('library.identifier', self.identifier) |
||||
|
|
||||
|
def query(self, media, first = True, condense = True, include_identifier = True, **kwargs): |
||||
|
if media.get('type') != 'show.episode': |
||||
|
return |
||||
|
|
||||
|
related = fireEvent('library.related', media, single = True) |
||||
|
|
||||
|
# Get season titles |
||||
|
titles = fireEvent( |
||||
|
'library.query', related['season'], |
||||
|
|
||||
|
first = False, |
||||
|
include_identifier = include_identifier, |
||||
|
condense = condense, |
||||
|
|
||||
|
single = True |
||||
|
) |
||||
|
|
||||
|
# Add episode identifier to titles |
||||
|
if include_identifier: |
||||
|
identifier = fireEvent('library.identifier', media, single = True) |
||||
|
|
||||
|
if identifier and identifier.get('episode'): |
||||
|
titles = [title + ('E%02d' % identifier['episode']) for title in titles] |
||||
|
|
||||
|
if first: |
||||
|
return titles[0] if titles else None |
||||
|
|
||||
|
return titles |
||||
|
|
||||
|
def identifier(self, media): |
||||
|
if media.get('type') != 'show.episode': |
||||
|
return |
||||
|
|
||||
|
identifier = { |
||||
|
'season': None, |
||||
|
'episode': None |
||||
|
} |
||||
|
|
||||
|
# TODO identifier mapping |
||||
|
# scene_map = media['info'].get('map_episode', {}).get('scene') |
||||
|
|
||||
|
# if scene_map: |
||||
|
# # Use scene mappings if they are available |
||||
|
# identifier['season'] = scene_map.get('season_nr') |
||||
|
# identifier['episode'] = scene_map.get('episode_nr') |
||||
|
# else: |
||||
|
# Fallback to normal season/episode numbers |
||||
|
identifier['season'] = media['info'].get('season_number') |
||||
|
identifier['episode'] = media['info'].get('number') |
||||
|
|
||||
|
# Cast identifiers to integers |
||||
|
# TODO this will need changing to support identifiers with trailing 'a', 'b' characters |
||||
|
identifier['season'] = tryInt(identifier['season'], None) |
||||
|
identifier['episode'] = tryInt(identifier['episode'], None) |
||||
|
|
||||
|
return identifier |
@ -0,0 +1,52 @@ |
|||||
|
from couchpotato.core.event import addEvent, fireEvent |
||||
|
from couchpotato.core.helpers.variable import tryInt |
||||
|
from couchpotato.core.logger import CPLog |
||||
|
from couchpotato.core.media._base.library.base import LibraryBase |
||||
|
|
||||
|
log = CPLog(__name__) |
||||
|
|
||||
|
autoload = 'SeasonLibraryPlugin' |
||||
|
|
||||
|
|
||||
|
class SeasonLibraryPlugin(LibraryBase): |
||||
|
def __init__(self): |
||||
|
addEvent('library.query', self.query) |
||||
|
addEvent('library.identifier', self.identifier) |
||||
|
|
||||
|
def query(self, media, first = True, condense = True, include_identifier = True, **kwargs): |
||||
|
if media.get('type') != 'show.season': |
||||
|
return |
||||
|
|
||||
|
related = fireEvent('library.related', media, single = True) |
||||
|
|
||||
|
# Get show titles |
||||
|
titles = fireEvent( |
||||
|
'library.query', related['show'], |
||||
|
|
||||
|
first = False, |
||||
|
condense = condense, |
||||
|
|
||||
|
single = True |
||||
|
) |
||||
|
|
||||
|
# TODO map_names |
||||
|
|
||||
|
# Add season identifier to titles |
||||
|
if include_identifier: |
||||
|
identifier = fireEvent('library.identifier', media, single = True) |
||||
|
|
||||
|
if identifier and identifier.get('season') is not None: |
||||
|
titles = [title + (' S%02d' % identifier['season']) for title in titles] |
||||
|
|
||||
|
if first: |
||||
|
return titles[0] if titles else None |
||||
|
|
||||
|
return titles |
||||
|
|
||||
|
def identifier(self, media): |
||||
|
if media.get('type') != 'show.season': |
||||
|
return |
||||
|
|
||||
|
return { |
||||
|
'season': tryInt(media['info']['number'], None) |
||||
|
} |
@ -0,0 +1,38 @@ |
|||||
|
from couchpotato.core.event import addEvent |
||||
|
from couchpotato.core.helpers.encoding import simplifyString |
||||
|
from couchpotato.core.logger import CPLog |
||||
|
from couchpotato.core.media._base.library.base import LibraryBase |
||||
|
from qcond import QueryCondenser |
||||
|
|
||||
|
log = CPLog(__name__) |
||||
|
|
||||
|
autoload = 'ShowLibraryPlugin' |
||||
|
|
||||
|
|
||||
|
class ShowLibraryPlugin(LibraryBase): |
||||
|
query_condenser = QueryCondenser() |
||||
|
|
||||
|
def __init__(self): |
||||
|
addEvent('library.query', self.query) |
||||
|
|
||||
|
def query(self, media, first = True, condense = True, include_identifier = True, **kwargs): |
||||
|
if media.get('type') != 'show': |
||||
|
return |
||||
|
|
||||
|
titles = media['info']['titles'] |
||||
|
|
||||
|
if condense: |
||||
|
# Use QueryCondenser to build a list of optimal search titles |
||||
|
condensed_titles = self.query_condenser.distinct(titles) |
||||
|
|
||||
|
if condensed_titles: |
||||
|
# Use condensed titles if we got a valid result |
||||
|
titles = condensed_titles |
||||
|
else: |
||||
|
# Fallback to simplifying titles |
||||
|
titles = [simplifyString(title) for title in titles] |
||||
|
|
||||
|
if first: |
||||
|
return titles[0] if titles else None |
||||
|
|
||||
|
return titles |
@ -0,0 +1,7 @@ |
|||||
|
from .main import ShowMatcher |
||||
|
|
||||
|
|
||||
|
def autoload(): |
||||
|
return ShowMatcher() |
||||
|
|
||||
|
config = [] |
@ -0,0 +1,61 @@ |
|||||
|
from couchpotato import fireEvent, CPLog, tryInt |
||||
|
from couchpotato.core.event import addEvent |
||||
|
from couchpotato.core.media._base.matcher.base import MatcherBase |
||||
|
|
||||
|
log = CPLog(__name__) |
||||
|
|
||||
|
|
||||
|
class Base(MatcherBase): |
||||
|
def __init__(self): |
||||
|
super(Base, self).__init__() |
||||
|
|
||||
|
addEvent('%s.matcher.correct_identifier' % self.type, self.correctIdentifier) |
||||
|
|
||||
|
def correct(self, chain, release, media, quality): |
||||
|
log.info("Checking if '%s' is valid", release['name']) |
||||
|
log.info2('Release parsed as: %s', chain.info) |
||||
|
|
||||
|
if not fireEvent('%s.matcher.correct_identifier' % self.type, chain, media): |
||||
|
log.info('Wrong: %s, identifier does not match', release['name']) |
||||
|
return False |
||||
|
|
||||
|
if not fireEvent('matcher.correct_title', chain, media): |
||||
|
log.info("Wrong: '%s', undetermined naming.", (' '.join(chain.info['show_name']))) |
||||
|
return False |
||||
|
|
||||
|
return True |
||||
|
|
||||
|
def correctIdentifier(self, chain, media): |
||||
|
raise NotImplementedError() |
||||
|
|
||||
|
def getChainIdentifier(self, chain): |
||||
|
if 'identifier' not in chain.info: |
||||
|
return None |
||||
|
|
||||
|
identifier = self.flattenInfo(chain.info['identifier']) |
||||
|
|
||||
|
# Try cast values to integers |
||||
|
for key, value in identifier.items(): |
||||
|
if isinstance(value, list): |
||||
|
if len(value) <= 1: |
||||
|
value = value[0] |
||||
|
else: |
||||
|
# It might contain multiple season or episode values, but |
||||
|
# there's a chance that it contains the same identifier |
||||
|
# multiple times. |
||||
|
x, y = None, None |
||||
|
for y in value: |
||||
|
y = tryInt(y, None) |
||||
|
if x is None: |
||||
|
x = y |
||||
|
elif x is None or y is None or x != y: |
||||
|
break |
||||
|
if x is not None and y is not None and x == y: |
||||
|
value = value[0] |
||||
|
else: |
||||
|
log.warning('Wrong: identifier contains multiple season or episode values, unsupported: %s' % repr(value)) |
||||
|
return None |
||||
|
|
||||
|
identifier[key] = tryInt(value, value) |
||||
|
|
||||
|
return identifier |
@ -0,0 +1,30 @@ |
|||||
|
from couchpotato import fireEvent, CPLog |
||||
|
from couchpotato.core.media.show.matcher.base import Base |
||||
|
|
||||
|
log = CPLog(__name__) |
||||
|
|
||||
|
|
||||
|
class Episode(Base): |
||||
|
type = 'show.episode' |
||||
|
|
||||
|
def correctIdentifier(self, chain, media): |
||||
|
identifier = self.getChainIdentifier(chain) |
||||
|
if not identifier: |
||||
|
log.info2('Wrong: release identifier is not valid (unsupported or missing identifier)') |
||||
|
return False |
||||
|
|
||||
|
# TODO - Parse episode ranges from identifier to determine if they are multi-part episodes |
||||
|
if any([x in identifier for x in ['episode_from', 'episode_to']]): |
||||
|
log.info2('Wrong: releases with identifier ranges are not supported yet') |
||||
|
return False |
||||
|
|
||||
|
required = fireEvent('library.identifier', media, single = True) |
||||
|
|
||||
|
# TODO - Support air by date episodes |
||||
|
# TODO - Support episode parts |
||||
|
|
||||
|
if identifier != required: |
||||
|
log.info2('Wrong: required identifier (%s) does not match release identifier (%s)', (required, identifier)) |
||||
|
return False |
||||
|
|
||||
|
return True |
@ -0,0 +1,9 @@ |
|||||
|
from couchpotato.core.media._base.providers.base import MultiProvider |
||||
|
from couchpotato.core.media.show.matcher.episode import Episode |
||||
|
from couchpotato.core.media.show.matcher.season import Season |
||||
|
|
||||
|
|
||||
|
class ShowMatcher(MultiProvider): |
||||
|
|
||||
|
def getTypes(self): |
||||
|
return [Season, Episode] |
@ -0,0 +1,27 @@ |
|||||
|
from couchpotato import fireEvent, CPLog |
||||
|
from couchpotato.core.media.show.matcher.base import Base |
||||
|
|
||||
|
log = CPLog(__name__) |
||||
|
|
||||
|
|
||||
|
class Season(Base): |
||||
|
type = 'show.season' |
||||
|
|
||||
|
def correctIdentifier(self, chain, media): |
||||
|
identifier = self.getChainIdentifier(chain) |
||||
|
if not identifier: |
||||
|
log.info2('Wrong: release identifier is not valid (unsupported or missing identifier)') |
||||
|
return False |
||||
|
|
||||
|
# TODO - Parse episode ranges from identifier to determine if they are season packs |
||||
|
if any([x in identifier for x in ['episode_from', 'episode_to']]): |
||||
|
log.info2('Wrong: releases with identifier ranges are not supported yet') |
||||
|
return False |
||||
|
|
||||
|
required = fireEvent('library.identifier', media, single = True) |
||||
|
|
||||
|
if identifier != required: |
||||
|
log.info2('Wrong: required identifier (%s) does not match release identifier (%s)', (required, identifier)) |
||||
|
return False |
||||
|
|
||||
|
return True |
@ -0,0 +1,13 @@ |
|||||
|
from couchpotato.core.media._base.providers.info.base import BaseInfoProvider |
||||
|
|
||||
|
|
||||
|
class ShowProvider(BaseInfoProvider): |
||||
|
type = 'show' |
||||
|
|
||||
|
|
||||
|
class SeasonProvider(BaseInfoProvider): |
||||
|
type = 'show.season' |
||||
|
|
||||
|
|
||||
|
class EpisodeProvider(BaseInfoProvider): |
||||
|
type = 'show.episode' |
@ -0,0 +1,376 @@ |
|||||
|
from datetime import datetime |
||||
|
import os |
||||
|
import traceback |
||||
|
|
||||
|
from couchpotato import Env |
||||
|
|
||||
|
from couchpotato.core.event import addEvent |
||||
|
from couchpotato.core.helpers.encoding import simplifyString, toUnicode |
||||
|
from couchpotato.core.helpers.variable import splitString, tryInt, tryFloat |
||||
|
from couchpotato.core.logger import CPLog |
||||
|
from couchpotato.core.media.show.providers.base import ShowProvider |
||||
|
from tvdb_api import tvdb_exceptions |
||||
|
from tvdb_api.tvdb_api import Tvdb, Show |
||||
|
|
||||
|
log = CPLog(__name__) |
||||
|
|
||||
|
autoload = 'TheTVDb' |
||||
|
|
||||
|
|
||||
|
class TheTVDb(ShowProvider): |
||||
|
|
||||
|
# TODO: Consider grabbing zips to put less strain on tvdb |
||||
|
# TODO: Unicode stuff (check) |
||||
|
# TODO: Notigy frontend on error (tvdb down at monent) |
||||
|
# TODO: Expose apikey in setting so it can be changed by user |
||||
|
|
||||
|
def __init__(self): |
||||
|
addEvent('show.info', self.getShowInfo, priority = 1) |
||||
|
addEvent('season.info', self.getSeasonInfo, priority = 1) |
||||
|
addEvent('episode.info', self.getEpisodeInfo, priority = 1) |
||||
|
|
||||
|
self.tvdb_api_parms = { |
||||
|
'apikey': self.conf('api_key'), |
||||
|
'banners': True, |
||||
|
'language': 'en', |
||||
|
'cache': os.path.join(Env.get('cache_dir'), 'thetvdb_api'), |
||||
|
} |
||||
|
self._setup() |
||||
|
|
||||
|
def _setup(self): |
||||
|
self.tvdb = Tvdb(**self.tvdb_api_parms) |
||||
|
self.valid_languages = self.tvdb.config['valid_languages'] |
||||
|
|
||||
|
def getShow(self, identifier = None): |
||||
|
show = None |
||||
|
try: |
||||
|
log.debug('Getting show: %s', identifier) |
||||
|
show = self.tvdb[int(identifier)] |
||||
|
except (tvdb_exceptions.tvdb_error, IOError), e: |
||||
|
log.error('Failed to getShowInfo for show id "%s": %s', (identifier, traceback.format_exc())) |
||||
|
return None |
||||
|
|
||||
|
return show |
||||
|
|
||||
|
def getShowInfo(self, identifiers = None): |
||||
|
""" |
||||
|
|
||||
|
@param identifiers: dict with identifiers per provider |
||||
|
@return: Full show info including season and episode info |
||||
|
""" |
||||
|
|
||||
|
if not identifiers or not identifiers.get('thetvdb'): |
||||
|
return None |
||||
|
|
||||
|
identifier = tryInt(identifiers.get('thetvdb')) |
||||
|
|
||||
|
cache_key = 'thetvdb.cache.show.%s' % identifier |
||||
|
result = None #self.getCache(cache_key) |
||||
|
if result: |
||||
|
return result |
||||
|
|
||||
|
show = self.getShow(identifier = identifier) |
||||
|
if show: |
||||
|
result = self._parseShow(show) |
||||
|
self.setCache(cache_key, result) |
||||
|
|
||||
|
return result or {} |
||||
|
|
||||
|
def getSeasonInfo(self, identifiers = None, params = {}): |
||||
|
"""Either return a list of all seasons or a single season by number. |
||||
|
identifier is the show 'id' |
||||
|
""" |
||||
|
if not identifiers or not identifiers.get('thetvdb'): |
||||
|
return None |
||||
|
|
||||
|
season_number = params.get('season_number', None) |
||||
|
identifier = tryInt(identifiers.get('thetvdb')) |
||||
|
|
||||
|
cache_key = 'thetvdb.cache.%s.%s' % (identifier, season_number) |
||||
|
log.debug('Getting SeasonInfo: %s', cache_key) |
||||
|
result = self.getCache(cache_key) or {} |
||||
|
if result: |
||||
|
return result |
||||
|
|
||||
|
try: |
||||
|
show = self.tvdb[int(identifier)] |
||||
|
except (tvdb_exceptions.tvdb_error, IOError), e: |
||||
|
log.error('Failed parsing TheTVDB SeasonInfo for "%s" id "%s": %s', (show, identifier, traceback.format_exc())) |
||||
|
return False |
||||
|
|
||||
|
result = [] |
||||
|
for number, season in show.items(): |
||||
|
if season_number is not None and number == season_number: |
||||
|
result = self._parseSeason(show, number, season) |
||||
|
self.setCache(cache_key, result) |
||||
|
return result |
||||
|
else: |
||||
|
result.append(self._parseSeason(show, number, season)) |
||||
|
|
||||
|
self.setCache(cache_key, result) |
||||
|
return result |
||||
|
|
||||
|
def getEpisodeInfo(self, identifier = None, params = {}): |
||||
|
"""Either return a list of all episodes or a single episode. |
||||
|
If episode_identifer contains an episode number to search for |
||||
|
""" |
||||
|
season_number = self.getIdentifier(params.get('season_number', None)) |
||||
|
episode_identifier = self.getIdentifier(params.get('episode_identifiers', None)) |
||||
|
identifier = self.getIdentifier(identifier) |
||||
|
|
||||
|
if not identifier and season_number is None: |
||||
|
return False |
||||
|
|
||||
|
# season_identifier must contain the 'show id : season number' since there is no tvdb id |
||||
|
# for season and we need a reference to both the show id and season number |
||||
|
if not identifier and season_number: |
||||
|
try: |
||||
|
identifier, season_number = season_number.split(':') |
||||
|
season_number = int(season_number) |
||||
|
except: return None |
||||
|
|
||||
|
identifier = tryInt(identifier) |
||||
|
cache_key = 'thetvdb.cache.%s.%s.%s' % (identifier, episode_identifier, season_number) |
||||
|
log.debug('Getting EpisodeInfo: %s', cache_key) |
||||
|
result = self.getCache(cache_key) or {} |
||||
|
if result: |
||||
|
return result |
||||
|
|
||||
|
try: |
||||
|
show = self.tvdb[identifier] |
||||
|
except (tvdb_exceptions.tvdb_error, IOError), e: |
||||
|
log.error('Failed parsing TheTVDB EpisodeInfo for "%s" id "%s": %s', (show, identifier, traceback.format_exc())) |
||||
|
return False |
||||
|
|
||||
|
result = [] |
||||
|
for number, season in show.items(): |
||||
|
if season_number is not None and number != season_number: |
||||
|
continue |
||||
|
|
||||
|
for episode in season.values(): |
||||
|
if episode_identifier is not None and episode['id'] == toUnicode(episode_identifier): |
||||
|
result = self._parseEpisode(episode) |
||||
|
self.setCache(cache_key, result) |
||||
|
return result |
||||
|
else: |
||||
|
result.append(self._parseEpisode(episode)) |
||||
|
|
||||
|
self.setCache(cache_key, result) |
||||
|
return result |
||||
|
|
||||
|
def getIdentifier(self, value): |
||||
|
if type(value) is dict: |
||||
|
return value.get('thetvdb') |
||||
|
|
||||
|
return value |
||||
|
|
||||
|
def _parseShow(self, show): |
||||
|
|
||||
|
# |
||||
|
# NOTE: show object only allows direct access via |
||||
|
# show['id'], not show.get('id') |
||||
|
# |
||||
|
def get(name): |
||||
|
return show.get(name) if not hasattr(show, 'search') else show[name] |
||||
|
|
||||
|
## Images |
||||
|
poster = get('poster') |
||||
|
backdrop = get('fanart') |
||||
|
|
||||
|
genres = splitString(get('genre'), '|') |
||||
|
if get('firstaired') is not None: |
||||
|
try: year = datetime.strptime(get('firstaired'), '%Y-%m-%d').year |
||||
|
except: year = None |
||||
|
else: |
||||
|
year = None |
||||
|
|
||||
|
show_data = { |
||||
|
'identifiers': { |
||||
|
'thetvdb': tryInt(get('id')), |
||||
|
'imdb': get('imdb_id'), |
||||
|
'zap2it': get('zap2it_id'), |
||||
|
}, |
||||
|
'type': 'show', |
||||
|
'titles': [get('seriesname')], |
||||
|
'images': { |
||||
|
'poster': [poster] if poster else [], |
||||
|
'backdrop': [backdrop] if backdrop else [], |
||||
|
'poster_original': [], |
||||
|
'backdrop_original': [], |
||||
|
}, |
||||
|
'year': year, |
||||
|
'genres': genres, |
||||
|
'network': get('network'), |
||||
|
'plot': get('overview'), |
||||
|
'networkid': get('networkid'), |
||||
|
'air_day': (get('airs_dayofweek') or '').lower(), |
||||
|
'air_time': self.parseTime(get('airs_time')), |
||||
|
'firstaired': get('firstaired'), |
||||
|
'runtime': tryInt(get('runtime')), |
||||
|
'contentrating': get('contentrating'), |
||||
|
'rating': {}, |
||||
|
'actors': splitString(get('actors'), '|'), |
||||
|
'status': get('status'), |
||||
|
'language': get('language'), |
||||
|
} |
||||
|
|
||||
|
if tryFloat(get('rating')): |
||||
|
show_data['rating']['thetvdb'] = [tryFloat(get('rating')), tryInt(get('ratingcount'))], |
||||
|
|
||||
|
show_data = dict((k, v) for k, v in show_data.iteritems() if v) |
||||
|
|
||||
|
# Only load season info when available |
||||
|
if type(show) == Show: |
||||
|
|
||||
|
# Parse season and episode data |
||||
|
show_data['seasons'] = {} |
||||
|
|
||||
|
for season_nr in show: |
||||
|
season = self._parseSeason(show, season_nr, show[season_nr]) |
||||
|
season['episodes'] = {} |
||||
|
|
||||
|
for episode_nr in show[season_nr]: |
||||
|
season['episodes'][episode_nr] = self._parseEpisode(show[season_nr][episode_nr]) |
||||
|
|
||||
|
show_data['seasons'][season_nr] = season |
||||
|
|
||||
|
# Add alternative titles |
||||
|
# try: |
||||
|
# raw = self.tvdb.search(show['seriesname']) |
||||
|
# if raw: |
||||
|
# for show_info in raw: |
||||
|
# print show_info |
||||
|
# if show_info['id'] == show_data['id'] and show_info.get('aliasnames', None): |
||||
|
# for alt_name in show_info['aliasnames'].split('|'): |
||||
|
# show_data['titles'].append(toUnicode(alt_name)) |
||||
|
# except (tvdb_exceptions.tvdb_error, IOError), e: |
||||
|
# log.error('Failed searching TheTVDB for "%s": %s', (show['seriesname'], traceback.format_exc())) |
||||
|
|
||||
|
return show_data |
||||
|
|
||||
|
def _parseSeason(self, show, number, season): |
||||
|
""" |
||||
|
contains no data |
||||
|
""" |
||||
|
|
||||
|
poster = [] |
||||
|
try: |
||||
|
temp_poster = {} |
||||
|
for id, data in show.data['_banners']['season']['season'].items(): |
||||
|
if data.get('season') == str(number) and data.get('language') == self.tvdb_api_parms['language']: |
||||
|
temp_poster[tryFloat(data.get('rating')) * tryInt(data.get('ratingcount'))] = data.get('_bannerpath') |
||||
|
#break |
||||
|
poster.append(temp_poster[sorted(temp_poster, reverse = True)[0]]) |
||||
|
except: |
||||
|
pass |
||||
|
|
||||
|
identifier = tryInt( |
||||
|
show['id'] if show.get('id') else show[number][1]['seasonid']) |
||||
|
|
||||
|
season_data = { |
||||
|
'identifiers': { |
||||
|
'thetvdb': identifier |
||||
|
}, |
||||
|
'number': tryInt(number), |
||||
|
'images': { |
||||
|
'poster': poster, |
||||
|
}, |
||||
|
} |
||||
|
|
||||
|
season_data = dict((k, v) for k, v in season_data.iteritems() if v) |
||||
|
return season_data |
||||
|
|
||||
|
def _parseEpisode(self, episode): |
||||
|
""" |
||||
|
('episodenumber', u'1'), |
||||
|
('thumb_added', None), |
||||
|
('rating', u'7.7'), |
||||
|
('overview', |
||||
|
u'Experienced waitress Max Black meets her new co-worker, former rich-girl Caroline Channing, and puts her skills to the test at an old but re-emerging Brooklyn diner. Despite her initial distaste for Caroline, Max eventually softens and the two team up for a new business venture.'), |
||||
|
('dvd_episodenumber', None), |
||||
|
('dvd_discid', None), |
||||
|
('combined_episodenumber', u'1'), |
||||
|
('epimgflag', u'7'), |
||||
|
('id', u'4099506'), |
||||
|
('seasonid', u'465948'), |
||||
|
('thumb_height', u'225'), |
||||
|
('tms_export', u'1374789754'), |
||||
|
('seasonnumber', u'1'), |
||||
|
('writer', u'|Michael Patrick King|Whitney Cummings|'), |
||||
|
('lastupdated', u'1371420338'), |
||||
|
('filename', u'http://thetvdb.com/banners/episodes/248741/4099506.jpg'), |
||||
|
('absolute_number', u'1'), |
||||
|
('ratingcount', u'102'), |
||||
|
('combined_season', u'1'), |
||||
|
('thumb_width', u'400'), |
||||
|
('imdb_id', u'tt1980319'), |
||||
|
('director', u'James Burrows'), |
||||
|
('dvd_chapter', None), |
||||
|
('dvd_season', None), |
||||
|
('gueststars', |
||||
|
u'|Brooke Lyons|Noah Mills|Shoshana Bush|Cale Hartmann|Adam Korson|Alex Enriquez|Matt Cook|Bill Parks|Eugene Shaw|Sergey Brusilovsky|Greg Lewis|Cocoa Brown|Nick Jameson|'), |
||||
|
('seriesid', u'248741'), |
||||
|
('language', u'en'), |
||||
|
('productioncode', u'296793'), |
||||
|
('firstaired', u'2011-09-19'), |
||||
|
('episodename', u'Pilot')] |
||||
|
""" |
||||
|
|
||||
|
def get(name, default = None): |
||||
|
return episode.get(name, default) |
||||
|
|
||||
|
poster = get('filename', []) |
||||
|
|
||||
|
episode_data = { |
||||
|
'number': tryInt(get('episodenumber')), |
||||
|
'absolute_number': tryInt(get('absolute_number')), |
||||
|
'identifiers': { |
||||
|
'thetvdb': tryInt(episode['id']) |
||||
|
}, |
||||
|
'type': 'episode', |
||||
|
'titles': [get('episodename')] if get('episodename') else [], |
||||
|
'images': { |
||||
|
'poster': [poster] if poster else [], |
||||
|
}, |
||||
|
'released': get('firstaired'), |
||||
|
'plot': get('overview'), |
||||
|
'firstaired': get('firstaired'), |
||||
|
'language': get('language'), |
||||
|
} |
||||
|
|
||||
|
if get('imdb_id'): |
||||
|
episode_data['identifiers']['imdb'] = get('imdb_id') |
||||
|
|
||||
|
episode_data = dict((k, v) for k, v in episode_data.iteritems() if v) |
||||
|
return episode_data |
||||
|
|
||||
|
def parseTime(self, time): |
||||
|
return time |
||||
|
|
||||
|
def isDisabled(self): |
||||
|
if self.conf('api_key') == '': |
||||
|
log.error('No API key provided.') |
||||
|
return True |
||||
|
else: |
||||
|
return False |
||||
|
|
||||
|
|
||||
|
config = [{ |
||||
|
'name': 'thetvdb', |
||||
|
'groups': [ |
||||
|
{ |
||||
|
'tab': 'providers', |
||||
|
'name': 'tmdb', |
||||
|
'label': 'TheTVDB', |
||||
|
'hidden': True, |
||||
|
'description': 'Used for all calls to TheTVDB.', |
||||
|
'options': [ |
||||
|
{ |
||||
|
'name': 'api_key', |
||||
|
'default': '7966C02F860586D2', |
||||
|
'label': 'Api Key', |
||||
|
}, |
||||
|
], |
||||
|
}, |
||||
|
], |
||||
|
}] |
@ -0,0 +1,64 @@ |
|||||
|
from couchpotato.core.event import addEvent |
||||
|
from couchpotato.core.logger import CPLog |
||||
|
from couchpotato.core.media.movie.providers.automation.trakt.main import TraktBase |
||||
|
from couchpotato.core.media.show.providers.base import ShowProvider |
||||
|
|
||||
|
log = CPLog(__name__) |
||||
|
|
||||
|
autoload = 'Trakt' |
||||
|
|
||||
|
|
||||
|
class Trakt(ShowProvider, TraktBase): |
||||
|
|
||||
|
def __init__(self): |
||||
|
addEvent('info.search', self.search, priority = 1) |
||||
|
addEvent('show.search', self.search, priority = 1) |
||||
|
|
||||
|
def search(self, q, limit = 12): |
||||
|
if self.isDisabled() or not self.conf('enabled', section='shows'): |
||||
|
log.debug('Not searching for show: %s', q) |
||||
|
return False |
||||
|
|
||||
|
# Search |
||||
|
log.debug('Searching for show: "%s"', q) |
||||
|
|
||||
|
response = self.call('search?type=show&query=%s' % (q)) |
||||
|
|
||||
|
results = [] |
||||
|
for show in response: |
||||
|
results.append(self._parseShow(show.get('show'))) |
||||
|
|
||||
|
for result in results: |
||||
|
if 'year' in result: |
||||
|
log.info('Found: %s', result['titles'][0] + ' (' + str(result.get('year', 0)) + ')') |
||||
|
else: |
||||
|
log.info('Found: %s', result['titles'][0]) |
||||
|
|
||||
|
return results |
||||
|
|
||||
|
def _parseShow(self, show): |
||||
|
# Images |
||||
|
images = show.get('images', {}) |
||||
|
|
||||
|
poster = images.get('poster', {}).get('thumb') |
||||
|
backdrop = images.get('fanart', {}).get('thumb') |
||||
|
|
||||
|
# Build show dict |
||||
|
show_data = { |
||||
|
'identifiers': { |
||||
|
'thetvdb': show.get('ids', {}).get('tvdb'), |
||||
|
'imdb': show.get('ids', {}).get('imdb'), |
||||
|
'tvrage': show.get('ids', {}).get('tvrage'), |
||||
|
}, |
||||
|
'type': 'show', |
||||
|
'titles': [show.get('title')], |
||||
|
'images': { |
||||
|
'poster': [poster] if poster else [], |
||||
|
'backdrop': [backdrop] if backdrop else [], |
||||
|
'poster_original': [], |
||||
|
'backdrop_original': [], |
||||
|
}, |
||||
|
'year': show.get('year'), |
||||
|
} |
||||
|
|
||||
|
return dict((k, v) for k, v in show_data.iteritems() if v) |
@ -0,0 +1,285 @@ |
|||||
|
from datetime import datetime |
||||
|
import os |
||||
|
import traceback |
||||
|
|
||||
|
from couchpotato import Env |
||||
|
|
||||
|
from couchpotato.core.event import addEvent |
||||
|
from couchpotato.core.helpers.encoding import simplifyString, toUnicode |
||||
|
from couchpotato.core.helpers.variable import splitString, tryInt, tryFloat |
||||
|
from couchpotato.core.logger import CPLog |
||||
|
from couchpotato.core.media.show.providers.base import ShowProvider |
||||
|
from tvrage_api import tvrage_api |
||||
|
from tvrage_api import tvrage_exceptions |
||||
|
from tvrage_api.tvrage_api import Show |
||||
|
|
||||
|
log = CPLog(__name__) |
||||
|
|
||||
|
autoload = 'TVRage' |
||||
|
|
||||
|
|
||||
|
class TVRage(ShowProvider): |
||||
|
|
||||
|
def __init__(self): |
||||
|
# Search is handled by Trakt exclusively as search functionality has |
||||
|
# been removed from TheTVDB provider as well. |
||||
|
addEvent('show.info', self.getShowInfo, priority = 3) |
||||
|
addEvent('season.info', self.getSeasonInfo, priority = 3) |
||||
|
addEvent('episode.info', self.getEpisodeInfo, priority = 3) |
||||
|
|
||||
|
self.tvrage_api_parms = { |
||||
|
'apikey': self.conf('api_key'), |
||||
|
'language': 'en', |
||||
|
'cache': os.path.join(Env.get('cache_dir'), 'tvrage_api') |
||||
|
} |
||||
|
self._setup() |
||||
|
|
||||
|
def _setup(self): |
||||
|
self.tvrage = tvrage_api.TVRage(**self.tvrage_api_parms) |
||||
|
self.valid_languages = self.tvrage.config['valid_languages'] |
||||
|
|
||||
|
def getShow(self, identifier): |
||||
|
show = None |
||||
|
try: |
||||
|
log.debug('Getting show: %s', identifier) |
||||
|
show = self.tvrage[int(identifier)] |
||||
|
except (tvrage_exceptions.tvrage_error, IOError), e: |
||||
|
log.error('Failed to getShowInfo for show id "%s": %s', (identifier, traceback.format_exc())) |
||||
|
|
||||
|
return show |
||||
|
|
||||
|
def getShowInfo(self, identifiers = None): |
||||
|
|
||||
|
if not identifiers: |
||||
|
# Raise exception instead? Invocation is clearly wrong! |
||||
|
return None |
||||
|
if 'tvrage' not in identifiers: |
||||
|
# TVRage identifier unavailable, but invocation was valid. |
||||
|
return None |
||||
|
|
||||
|
identifier = tryInt(identifiers['tvrage'], None) |
||||
|
if identifier is None: |
||||
|
# Raise exception instead? Invocation is clearly wrong! |
||||
|
return None |
||||
|
|
||||
|
cache_key = 'tvrage.cache.show.%s' % identifier |
||||
|
result = self.getCache(cache_key) or [] |
||||
|
if not result: |
||||
|
show = self.getShow(identifier) |
||||
|
if show is not None: |
||||
|
result = self._parseShow(show) |
||||
|
self.setCache(cache_key, result) |
||||
|
|
||||
|
return result |
||||
|
|
||||
|
def getSeasonInfo(self, identifiers = None, params = {}): |
||||
|
"""Either return a list of all seasons or a single season by number. |
||||
|
identifier is the show 'id' |
||||
|
""" |
||||
|
if not identifiers: |
||||
|
# Raise exception instead? Invocation is clearly wrong! |
||||
|
return None |
||||
|
if 'tvrage' not in identifiers: |
||||
|
# TVRage identifier unavailable, but invocation was valid. |
||||
|
return None |
||||
|
|
||||
|
season_number = params.get('season_number', None) |
||||
|
identifier = tryInt(identifiers['tvrage'], None) |
||||
|
if identifier is None: |
||||
|
# Raise exception instead? Invocation is clearly wrong! |
||||
|
return None |
||||
|
|
||||
|
cache_key = 'tvrage.cache.%s.%s' % (identifier, season_number) |
||||
|
log.debug('Getting TVRage SeasonInfo: %s', cache_key) |
||||
|
result = self.getCache(cache_key) or {} |
||||
|
if result: |
||||
|
return result |
||||
|
|
||||
|
try: |
||||
|
show = self.tvrage[int(identifier)] |
||||
|
except (tvrage_exceptions.tvrage_error, IOError), e: |
||||
|
log.error('Failed parsing TVRage SeasonInfo for "%s" id "%s": %s', (show, identifier, traceback.format_exc())) |
||||
|
return False |
||||
|
|
||||
|
result = [] |
||||
|
for number, season in show.items(): |
||||
|
if season_number is None: |
||||
|
result.append(self._parseSeason(show, number, season)) |
||||
|
elif number == season_number: |
||||
|
result = self._parseSeason(show, number, season) |
||||
|
break |
||||
|
|
||||
|
self.setCache(cache_key, result) |
||||
|
return result |
||||
|
|
||||
|
def getEpisodeInfo(self, identifiers = None, params = {}): |
||||
|
"""Either return a list of all episodes or a single episode. |
||||
|
If episode_identifer contains an episode number to search for |
||||
|
""" |
||||
|
if not identifiers: |
||||
|
# Raise exception instead? Invocation is clearly wrong! |
||||
|
return None |
||||
|
if 'tvrage' not in identifiers: |
||||
|
# TVRage identifier unavailable, but invocation was valid. |
||||
|
return None |
||||
|
|
||||
|
season_number = params.get('season_number', None) |
||||
|
episode_identifiers = params.get('episode_identifiers', None) |
||||
|
identifier = tryInt(identifiers['tvrage'], None) |
||||
|
if season_number is None: |
||||
|
# Raise exception instead? Invocation is clearly wrong! |
||||
|
return False |
||||
|
if identifier is None: |
||||
|
# season_identifier might contain the 'show id : season number' |
||||
|
# since there is no tvrage id for season and we need a reference to |
||||
|
# both the show id and season number. |
||||
|
try: |
||||
|
identifier, season_number = season_number.split(':') |
||||
|
season_number = int(season_number) |
||||
|
identifier = tryInt(identifier, None) |
||||
|
except: |
||||
|
pass |
||||
|
|
||||
|
if identifier is None: |
||||
|
# Raise exception instead? Invocation is clearly wrong! |
||||
|
return None |
||||
|
|
||||
|
episode_identifier = None |
||||
|
if episode_identifiers: |
||||
|
if 'tvrage' in episode_identifiers: |
||||
|
episode_identifier = tryInt(episode_identifiers['tvrage'], None) |
||||
|
if episode_identifier is None: |
||||
|
return None |
||||
|
|
||||
|
cache_key = 'tvrage.cache.%s.%s.%s' % (identifier, episode_identifier, season_number) |
||||
|
log.debug('Getting TVRage EpisodeInfo: %s', cache_key) |
||||
|
result = self.getCache(cache_key) or {} |
||||
|
if result: |
||||
|
return result |
||||
|
|
||||
|
try: |
||||
|
show = self.tvrage[int(identifier)] |
||||
|
except (tvrage_exceptions.tvrage_error, IOError), e: |
||||
|
log.error('Failed parsing TVRage EpisodeInfo for "%s" id "%s": %s', (show, identifier, traceback.format_exc())) |
||||
|
return False |
||||
|
|
||||
|
result = [] |
||||
|
for number, season in show.items(): |
||||
|
if season_number is not None and number != season_number: |
||||
|
continue |
||||
|
|
||||
|
for episode in season.values(): |
||||
|
if episode_identifier is not None and episode['id'] == toUnicode(episode_identifier): |
||||
|
result = self._parseEpisode(episode) |
||||
|
self.setCache(cache_key, result) |
||||
|
return result |
||||
|
else: |
||||
|
result.append(self._parseEpisode(episode)) |
||||
|
|
||||
|
self.setCache(cache_key, result) |
||||
|
return result |
||||
|
|
||||
|
def _parseShow(self, show): |
||||
|
# |
||||
|
# NOTE: tvrage_api mimics tvdb_api, but some information is unavailable |
||||
|
# |
||||
|
|
||||
|
# |
||||
|
# NOTE: show object only allows direct access via |
||||
|
# show['id'], not show.get('id') |
||||
|
# |
||||
|
def get(name): |
||||
|
return show.get(name) if not hasattr(show, 'search') else show[name] |
||||
|
|
||||
|
genres = splitString(get('genre'), '|') |
||||
|
classification = get('classification') or '' |
||||
|
if classification == 'Talk Shows': |
||||
|
# "Talk Show" is a genre on TheTVDB.com, as these types of shows, |
||||
|
# e.g. "The Tonight Show Starring Jimmy Fallon", often use |
||||
|
# different naming schemes, it might be useful to the searcher if |
||||
|
# it is added here. |
||||
|
genres.append('Talk Show') |
||||
|
if get('firstaired') is not None: |
||||
|
try: year = datetime.strptime(get('firstaired'), '%Y-%m-%d').year |
||||
|
except: year = None |
||||
|
else: |
||||
|
year = None |
||||
|
|
||||
|
show_data = { |
||||
|
'identifiers': { |
||||
|
'tvrage': tryInt(get('id')), |
||||
|
}, |
||||
|
'type': 'show', |
||||
|
'titles': [get('seriesname')], |
||||
|
'images': { |
||||
|
'poster': [], |
||||
|
'backdrop': [], |
||||
|
'poster_original': [], |
||||
|
'backdrop_original': [], |
||||
|
}, |
||||
|
'year': year, |
||||
|
'genres': genres, |
||||
|
'network': get('network'), |
||||
|
'air_day': (get('airs_dayofweek') or '').lower(), |
||||
|
'air_time': self.parseTime(get('airs_time')), |
||||
|
'firstaired': get('firstaired'), |
||||
|
'runtime': tryInt(get('runtime')), |
||||
|
'status': get('status'), |
||||
|
} |
||||
|
|
||||
|
show_data = dict((k, v) for k, v in show_data.iteritems() if v) |
||||
|
|
||||
|
# Only load season info when available |
||||
|
if type(show) == Show: |
||||
|
|
||||
|
# Parse season and episode data |
||||
|
show_data['seasons'] = {} |
||||
|
|
||||
|
for season_nr in show: |
||||
|
season = self._parseSeason(show, season_nr, show[season_nr]) |
||||
|
season['episodes'] = {} |
||||
|
|
||||
|
for episode_nr in show[season_nr]: |
||||
|
season['episodes'][episode_nr] = self._parseEpisode(show[season_nr][episode_nr]) |
||||
|
|
||||
|
show_data['seasons'][season_nr] = season |
||||
|
|
||||
|
return show_data |
||||
|
|
||||
|
def _parseSeason(self, show, number, season): |
||||
|
|
||||
|
season_data = { |
||||
|
'number': tryInt(number), |
||||
|
} |
||||
|
|
||||
|
season_data = dict((k, v) for k, v in season_data.iteritems() if v) |
||||
|
return season_data |
||||
|
|
||||
|
def _parseEpisode(self, episode): |
||||
|
|
||||
|
def get(name, default = None): |
||||
|
return episode.get(name, default) |
||||
|
|
||||
|
poster = get('filename', []) |
||||
|
|
||||
|
episode_data = { |
||||
|
'number': tryInt(get('episodenumber')), |
||||
|
'absolute_number': tryInt(get('absolute_number')), |
||||
|
'identifiers': { |
||||
|
'tvrage': tryInt(episode['id']) |
||||
|
}, |
||||
|
'type': 'episode', |
||||
|
'titles': [get('episodename')] if get('episodename') else [], |
||||
|
'images': { |
||||
|
'poster': [poster] if poster else [], |
||||
|
}, |
||||
|
'released': get('firstaired'), |
||||
|
'firstaired': get('firstaired'), |
||||
|
'language': get('language'), |
||||
|
} |
||||
|
|
||||
|
episode_data = dict((k, v) for k, v in episode_data.iteritems() if v) |
||||
|
return episode_data |
||||
|
|
||||
|
def parseTime(self, time): |
||||
|
return time |
@ -0,0 +1,216 @@ |
|||||
|
from couchpotato.core.event import addEvent |
||||
|
from couchpotato.core.logger import CPLog |
||||
|
from couchpotato.core.helpers.encoding import toUnicode, tryUrlencode |
||||
|
from couchpotato.core.media.show.providers.base import ShowProvider |
||||
|
|
||||
|
log = CPLog(__name__) |
||||
|
|
||||
|
autoload = 'Xem' |
||||
|
|
||||
|
|
||||
|
class Xem(ShowProvider): |
||||
|
''' |
||||
|
Mapping Information |
||||
|
=================== |
||||
|
|
||||
|
Single |
||||
|
------ |
||||
|
You will need the id / identifier of the show e.g. tvdb-id for American Dad! is 73141 |
||||
|
the origin is the name of the site/entity the episode, season (and/or absolute) numbers are based on |
||||
|
|
||||
|
http://thexem.de/map/single?id=&origin=&episode=&season=&absolute= |
||||
|
|
||||
|
episode, season and absolute are all optional but it wont work if you don't provide either episode and season OR absolute in |
||||
|
addition you can provide destination as the name of the wished destination, if not provided it will output all available |
||||
|
|
||||
|
When a destination has two or more addresses another entry will be added as _ ... for now the second address gets the index "2" |
||||
|
(the first index is omitted) and so on |
||||
|
|
||||
|
http://thexem.de/map/single?id=7529&origin=anidb&season=1&episode=2&destination=trakt |
||||
|
{ |
||||
|
"result":"success", |
||||
|
"data":{ |
||||
|
"trakt": {"season":1,"episode":3,"absolute":3}, |
||||
|
"trakt_2":{"season":1,"episode":4,"absolute":4} |
||||
|
}, |
||||
|
"message":"single mapping for 7529 on anidb." |
||||
|
} |
||||
|
|
||||
|
All |
||||
|
--- |
||||
|
Basically same as "single" just a little easier |
||||
|
The origin address is added into the output too!! |
||||
|
|
||||
|
http://thexem.de/map/all?id=7529&origin=anidb |
||||
|
|
||||
|
All Names |
||||
|
--------- |
||||
|
Get all names xem has to offer |
||||
|
non optional params: origin(an entity string like 'tvdb') |
||||
|
optional params: season, language |
||||
|
- season: a season number or a list like: 1,3,5 or a compare operator like ne,gt,ge,lt,le,eq and a season number. default would |
||||
|
return all |
||||
|
- language: a language string like 'us' or 'jp' default is all |
||||
|
- defaultNames: 1(yes) or 0(no) should the default names be added to the list ? default is 0(no) |
||||
|
|
||||
|
http://thexem.de/map/allNames?origin=tvdb&season=le1 |
||||
|
|
||||
|
{ |
||||
|
"result": "success", |
||||
|
"data": { |
||||
|
"248812": ["Dont Trust the Bitch in Apartment 23", "Don't Trust the Bitch in Apartment 23"], |
||||
|
"257571": ["Nazo no Kanojo X"], |
||||
|
"257875": ["Lupin III - Mine Fujiko to Iu Onna", "Lupin III Fujiko to Iu Onna", "Lupin the Third - Mine Fujiko to Iu Onna"] |
||||
|
}, |
||||
|
"message": "" |
||||
|
} |
||||
|
''' |
||||
|
|
||||
|
def __init__(self): |
||||
|
addEvent('show.info', self.getShowInfo, priority = 5) |
||||
|
addEvent('episode.info', self.getEpisodeInfo, priority = 5) |
||||
|
|
||||
|
self.config = {} |
||||
|
self.config['base_url'] = "http://thexem.de" |
||||
|
self.config['url_single'] = u"%(base_url)s/map/single?" % self.config |
||||
|
self.config['url_all'] = u"%(base_url)s/map/all?" % self.config |
||||
|
self.config['url_names'] = u"%(base_url)s/map/names?" % self.config |
||||
|
self.config['url_all_names'] = u"%(base_url)s/map/allNames?" % self.config |
||||
|
|
||||
|
def getShowInfo(self, identifiers = None): |
||||
|
if self.isDisabled(): |
||||
|
return {} |
||||
|
|
||||
|
identifier = identifiers.get('thetvdb') |
||||
|
|
||||
|
if not identifier: |
||||
|
return {} |
||||
|
|
||||
|
cache_key = 'xem.cache.%s' % identifier |
||||
|
log.debug('Getting showInfo: %s', cache_key) |
||||
|
result = self.getCache(cache_key) or {} |
||||
|
if result: |
||||
|
return result |
||||
|
|
||||
|
result['seasons'] = {} |
||||
|
|
||||
|
# Create season/episode and absolute mappings |
||||
|
url = self.config['url_all'] + "id=%s&origin=tvdb" % tryUrlencode(identifier) |
||||
|
response = self.getJsonData(url) |
||||
|
|
||||
|
if response and response.get('result') == 'success': |
||||
|
data = response.get('data', None) |
||||
|
self.parseMaps(result, data) |
||||
|
|
||||
|
# Create name alias mappings |
||||
|
url = self.config['url_names'] + "id=%s&origin=tvdb" % tryUrlencode(identifier) |
||||
|
response = self.getJsonData(url) |
||||
|
|
||||
|
if response and response.get('result') == 'success': |
||||
|
data = response.get('data', None) |
||||
|
self.parseNames(result, data) |
||||
|
|
||||
|
self.setCache(cache_key, result) |
||||
|
return result |
||||
|
|
||||
|
def getEpisodeInfo(self, identifiers = None, params = {}): |
||||
|
episode_num = params.get('episode_number', None) |
||||
|
if episode_num is None: |
||||
|
return False |
||||
|
|
||||
|
season_num = params.get('season_number', None) |
||||
|
if season_num is None: |
||||
|
return False |
||||
|
|
||||
|
result = self.getShowInfo(identifiers) |
||||
|
|
||||
|
if not result: |
||||
|
return False |
||||
|
|
||||
|
# Find season |
||||
|
if season_num not in result['seasons']: |
||||
|
return False |
||||
|
|
||||
|
season = result['seasons'][season_num] |
||||
|
|
||||
|
# Find episode |
||||
|
if episode_num not in season['episodes']: |
||||
|
return False |
||||
|
|
||||
|
return season['episodes'][episode_num] |
||||
|
|
||||
|
def parseMaps(self, result, data, master = 'tvdb'): |
||||
|
'''parses xem map and returns a custom formatted dict map |
||||
|
|
||||
|
To retreive map for scene: |
||||
|
if 'scene' in map['map_episode'][1][1]: |
||||
|
print map['map_episode'][1][1]['scene']['season'] |
||||
|
''' |
||||
|
if not isinstance(data, list): |
||||
|
return |
||||
|
|
||||
|
for episode_map in data: |
||||
|
origin = episode_map.pop(master, None) |
||||
|
if origin is None: |
||||
|
continue # No master origin to map to |
||||
|
|
||||
|
o_season = origin['season'] |
||||
|
o_episode = origin['episode'] |
||||
|
|
||||
|
# Create season info |
||||
|
if o_season not in result['seasons']: |
||||
|
result['seasons'][o_season] = {} |
||||
|
|
||||
|
season = result['seasons'][o_season] |
||||
|
|
||||
|
if 'episodes' not in season: |
||||
|
season['episodes'] = {} |
||||
|
|
||||
|
# Create episode info |
||||
|
if o_episode not in season['episodes']: |
||||
|
season['episodes'][o_episode] = {} |
||||
|
|
||||
|
episode = season['episodes'][o_episode] |
||||
|
episode['episode_map'] = episode_map |
||||
|
|
||||
|
def parseNames(self, result, data): |
||||
|
result['title_map'] = data.pop('all', None) |
||||
|
|
||||
|
for season, title_map in data.items(): |
||||
|
season = int(season) |
||||
|
|
||||
|
# Create season info |
||||
|
if season not in result['seasons']: |
||||
|
result['seasons'][season] = {} |
||||
|
|
||||
|
season = result['seasons'][season] |
||||
|
season['title_map'] = title_map |
||||
|
|
||||
|
def isDisabled(self): |
||||
|
if __name__ == '__main__': |
||||
|
return False |
||||
|
if self.conf('enabled'): |
||||
|
return False |
||||
|
else: |
||||
|
return True |
||||
|
|
||||
|
|
||||
|
config = [{ |
||||
|
'name': 'xem', |
||||
|
'groups': [ |
||||
|
{ |
||||
|
'tab': 'providers', |
||||
|
'name': 'xem', |
||||
|
'label': 'TheXem', |
||||
|
'hidden': True, |
||||
|
'description': 'Used for all calls to TheXem.', |
||||
|
'options': [ |
||||
|
{ |
||||
|
'name': 'enabled', |
||||
|
'default': True, |
||||
|
'label': 'Enabled', |
||||
|
}, |
||||
|
], |
||||
|
}, |
||||
|
], |
||||
|
}] |
@ -0,0 +1,51 @@ |
|||||
|
from couchpotato.core.helpers.encoding import tryUrlencode |
||||
|
from couchpotato.core.logger import CPLog |
||||
|
from couchpotato.core.event import fireEvent |
||||
|
from couchpotato.core.media._base.providers.base import MultiProvider |
||||
|
from couchpotato.core.media._base.providers.nzb.binsearch import Base |
||||
|
from couchpotato.core.media.show.providers.base import SeasonProvider, EpisodeProvider |
||||
|
from couchpotato.environment import Env |
||||
|
|
||||
|
log = CPLog(__name__) |
||||
|
|
||||
|
autoload = 'BinSearch' |
||||
|
|
||||
|
|
||||
|
class BinSearch(MultiProvider): |
||||
|
|
||||
|
def getTypes(self): |
||||
|
return [Season, Episode] |
||||
|
|
||||
|
|
||||
|
class Season(SeasonProvider, Base): |
||||
|
|
||||
|
def buildUrl(self, media, quality): |
||||
|
query = tryUrlencode({ |
||||
|
'q': fireEvent('media.search_query', media, single = True), |
||||
|
'm': 'n', |
||||
|
'max': 400, |
||||
|
'adv_age': Env.setting('retention', 'nzb'), |
||||
|
'adv_sort': 'date', |
||||
|
'adv_col': 'on', |
||||
|
'adv_nfo': 'on', |
||||
|
'minsize': quality.get('size_min'), |
||||
|
'maxsize': quality.get('size_max'), |
||||
|
}) |
||||
|
return query |
||||
|
|
||||
|
|
||||
|
class Episode(EpisodeProvider, Base): |
||||
|
|
||||
|
def buildUrl(self, media, quality): |
||||
|
query = tryUrlencode({ |
||||
|
'q': fireEvent('media.search_query', media, single = True), |
||||
|
'm': 'n', |
||||
|
'max': 400, |
||||
|
'adv_age': Env.setting('retention', 'nzb'), |
||||
|
'adv_sort': 'date', |
||||
|
'adv_col': 'on', |
||||
|
'adv_nfo': 'on', |
||||
|
'minsize': quality.get('size_min'), |
||||
|
'maxsize': quality.get('size_max'), |
||||
|
}) |
||||
|
return query |
@ -0,0 +1,49 @@ |
|||||
|
from couchpotato.core.helpers.encoding import tryUrlencode |
||||
|
from couchpotato.core.event import fireEvent |
||||
|
from couchpotato.core.logger import CPLog |
||||
|
from couchpotato.core.media._base.providers.base import MultiProvider |
||||
|
from couchpotato.core.media._base.providers.nzb.newznab import Base |
||||
|
from couchpotato.core.media.show.providers.base import SeasonProvider, EpisodeProvider |
||||
|
|
||||
|
log = CPLog(__name__) |
||||
|
|
||||
|
autoload = 'Newznab' |
||||
|
|
||||
|
|
||||
|
class Newznab(MultiProvider): |
||||
|
|
||||
|
def getTypes(self): |
||||
|
return [Season, Episode] |
||||
|
|
||||
|
|
||||
|
class Season(SeasonProvider, Base): |
||||
|
|
||||
|
def buildUrl(self, media, host): |
||||
|
related = fireEvent('library.related', media, single = True) |
||||
|
identifier = fireEvent('library.identifier', media, single = True) |
||||
|
|
||||
|
query = tryUrlencode({ |
||||
|
't': 'tvsearch', |
||||
|
'apikey': host['api_key'], |
||||
|
'q': related['show']['title'], |
||||
|
'season': identifier['season'], |
||||
|
'extended': 1 |
||||
|
}) |
||||
|
return query |
||||
|
|
||||
|
|
||||
|
class Episode(EpisodeProvider, Base): |
||||
|
|
||||
|
def buildUrl(self, media, host): |
||||
|
related = fireEvent('library.related', media, single = True) |
||||
|
identifier = fireEvent('library.identifier', media, single = True) |
||||
|
query = tryUrlencode({ |
||||
|
't': 'tvsearch', |
||||
|
'apikey': host['api_key'], |
||||
|
'q': related['show']['title'], |
||||
|
'season': identifier['season'], |
||||
|
'ep': identifier['episode'], |
||||
|
'extended': 1 |
||||
|
}) |
||||
|
|
||||
|
return query |
@ -0,0 +1,52 @@ |
|||||
|
from couchpotato.core.helpers.encoding import tryUrlencode |
||||
|
from couchpotato.core.logger import CPLog |
||||
|
from couchpotato.core.event import fireEvent |
||||
|
from couchpotato.core.media._base.providers.base import MultiProvider |
||||
|
from couchpotato.core.media.show.providers.base import SeasonProvider, EpisodeProvider |
||||
|
from couchpotato.core.media._base.providers.nzb.nzbclub import Base |
||||
|
|
||||
|
log = CPLog(__name__) |
||||
|
|
||||
|
autoload = 'NZBClub' |
||||
|
|
||||
|
|
||||
|
class NZBClub(MultiProvider): |
||||
|
|
||||
|
def getTypes(self): |
||||
|
return [Season, Episode] |
||||
|
|
||||
|
|
||||
|
class Season(SeasonProvider, Base): |
||||
|
|
||||
|
def buildUrl(self, media): |
||||
|
|
||||
|
q = tryUrlencode({ |
||||
|
'q': fireEvent('media.search_query', media, single = True), |
||||
|
}) |
||||
|
|
||||
|
query = tryUrlencode({ |
||||
|
'ig': 1, |
||||
|
'rpp': 200, |
||||
|
'st': 5, |
||||
|
'sp': 1, |
||||
|
'ns': 1, |
||||
|
}) |
||||
|
return '%s&%s' % (q, query) |
||||
|
|
||||
|
|
||||
|
class Episode(EpisodeProvider, Base): |
||||
|
|
||||
|
def buildUrl(self, media): |
||||
|
|
||||
|
q = tryUrlencode({ |
||||
|
'q': fireEvent('media.search_query', media, single = True), |
||||
|
}) |
||||
|
|
||||
|
query = tryUrlencode({ |
||||
|
'ig': 1, |
||||
|
'rpp': 200, |
||||
|
'st': 5, |
||||
|
'sp': 1, |
||||
|
'ns': 1, |
||||
|
}) |
||||
|
return '%s&%s' % (q, query) |
@ -0,0 +1,36 @@ |
|||||
|
from couchpotato.core.helpers.encoding import tryUrlencode |
||||
|
from couchpotato.core.logger import CPLog |
||||
|
from couchpotato.core.event import fireEvent |
||||
|
from couchpotato.core.media._base.providers.base import MultiProvider |
||||
|
from couchpotato.core.media.show.providers.base import SeasonProvider, EpisodeProvider |
||||
|
from couchpotato.core.media._base.providers.torrent.bithdtv import Base |
||||
|
|
||||
|
log = CPLog(__name__) |
||||
|
|
||||
|
autoload = 'BiTHDTV' |
||||
|
|
||||
|
|
||||
|
class BiTHDTV(MultiProvider): |
||||
|
|
||||
|
def getTypes(self): |
||||
|
return [Season, Episode] |
||||
|
|
||||
|
|
||||
|
class Season(SeasonProvider, Base): |
||||
|
|
||||
|
def buildUrl(self, media): |
||||
|
query = tryUrlencode({ |
||||
|
'search': fireEvent('media.search_query', media, single = True), |
||||
|
'cat': 12 # Season cat |
||||
|
}) |
||||
|
return query |
||||
|
|
||||
|
|
||||
|
class Episode(EpisodeProvider, Base): |
||||
|
|
||||
|
def buildUrl(self, media): |
||||
|
query = tryUrlencode({ |
||||
|
'search': fireEvent('media.search_query', media, single = True), |
||||
|
'cat': 10 # Episode cat |
||||
|
}) |
||||
|
return query |
@ -0,0 +1,41 @@ |
|||||
|
from couchpotato.core.helpers.encoding import tryUrlencode |
||||
|
from couchpotato.core.logger import CPLog |
||||
|
from couchpotato.core.event import fireEvent |
||||
|
from couchpotato.core.media._base.providers.base import MultiProvider |
||||
|
from couchpotato.core.media.show.providers.base import SeasonProvider, EpisodeProvider |
||||
|
from couchpotato.core.media._base.providers.torrent.bitsoup import Base |
||||
|
|
||||
|
log = CPLog(__name__) |
||||
|
|
||||
|
autoload = 'Bitsoup' |
||||
|
|
||||
|
|
||||
|
class Bitsoup(MultiProvider): |
||||
|
|
||||
|
def getTypes(self): |
||||
|
return [Season, Episode] |
||||
|
|
||||
|
|
||||
|
class Season(SeasonProvider, Base): |
||||
|
# For season bundles, bitsoup currently only has one category |
||||
|
def buildUrl(self, media, quality): |
||||
|
query = tryUrlencode({ |
||||
|
'search': fireEvent('media.search_query', media, single = True), |
||||
|
'cat': 45 # TV-Packs Category |
||||
|
}) |
||||
|
return query |
||||
|
|
||||
|
|
||||
|
class Episode(EpisodeProvider, Base): |
||||
|
cat_ids = [ |
||||
|
([42], ['hdtv_720p', 'webdl_720p', 'webdl_1080p', 'bdrip_1080p', 'bdrip_720p', 'brrip_1080p', 'brrip_720p']), |
||||
|
([49], ['hdtv_sd', 'webdl_480p']) |
||||
|
] |
||||
|
cat_backup_id = 0 |
||||
|
|
||||
|
def buildUrl(self, media, quality): |
||||
|
query = tryUrlencode({ |
||||
|
'search': fireEvent('media.search_query', media, single = True), |
||||
|
'cat': self.getCatId(quality['identifier'])[0], |
||||
|
}) |
||||
|
return query |
@ -0,0 +1,24 @@ |
|||||
|
from couchpotato.core.logger import CPLog |
||||
|
|
||||
|
from couchpotato.core.media._base.providers.base import MultiProvider |
||||
|
from couchpotato.core.media.show.providers.base import SeasonProvider, EpisodeProvider |
||||
|
from couchpotato.core.media._base.providers.torrent.extratorrent import Base |
||||
|
|
||||
|
log = CPLog(__name__) |
||||
|
|
||||
|
autoload = 'ExtraTorrent' |
||||
|
|
||||
|
|
||||
|
class ExtraTorrent(MultiProvider): |
||||
|
|
||||
|
def getTypes(self): |
||||
|
return [Season, Episode] |
||||
|
|
||||
|
class Season(SeasonProvider, Base): |
||||
|
|
||||
|
category = 8 |
||||
|
|
||||
|
|
||||
|
class Episode(EpisodeProvider, Base): |
||||
|
|
||||
|
category = 8 |
@ -0,0 +1,28 @@ |
|||||
|
from couchpotato.core.logger import CPLog |
||||
|
from couchpotato.core.media._base.providers.base import MultiProvider |
||||
|
from couchpotato.core.media.show.providers.base import SeasonProvider, EpisodeProvider |
||||
|
from couchpotato.core.media._base.providers.torrent.iptorrents import Base |
||||
|
|
||||
|
log = CPLog(__name__) |
||||
|
|
||||
|
autoload = 'IPTorrents' |
||||
|
|
||||
|
|
||||
|
class IPTorrents(MultiProvider): |
||||
|
def getTypes(self): |
||||
|
return [Season, Episode] |
||||
|
|
||||
|
|
||||
|
class Season(SeasonProvider, Base): |
||||
|
cat_ids = [ |
||||
|
([65], {}), |
||||
|
] |
||||
|
|
||||
|
|
||||
|
class Episode(EpisodeProvider, Base): |
||||
|
cat_ids = [ |
||||
|
([4], {'codec': ['mp4-asp'], 'resolution': ['sd'], 'source': ['hdtv', 'web']}), |
||||
|
([5], {'codec': ['mp4-avc'], 'resolution': ['720p', '1080p'], 'source': ['hdtv', 'web']}), |
||||
|
([78], {'codec': ['mp4-avc'], 'resolution': ['480p'], 'source': ['hdtv', 'web']}), |
||||
|
([79], {'codec': ['mp4-avc'], 'resolution': ['sd'], 'source': ['hdtv', 'web']}) |
||||
|
] |
@ -0,0 +1,34 @@ |
|||||
|
from couchpotato.core.logger import CPLog |
||||
|
|
||||
|
from couchpotato.core.media._base.providers.base import MultiProvider |
||||
|
from couchpotato.core.media.show.providers.base import SeasonProvider, EpisodeProvider |
||||
|
from couchpotato.core.media._base.providers.torrent.kickasstorrents import Base |
||||
|
|
||||
|
log = CPLog(__name__) |
||||
|
|
||||
|
autoload = 'KickAssTorrents' |
||||
|
|
||||
|
|
||||
|
class KickAssTorrents(MultiProvider): |
||||
|
|
||||
|
def getTypes(self): |
||||
|
return [Season, Episode] |
||||
|
|
||||
|
class Season(SeasonProvider, Base): |
||||
|
|
||||
|
urls = { |
||||
|
'detail': '%s/%%s', |
||||
|
'search': '%s/usearch/%s category:tv/%d/', |
||||
|
} |
||||
|
|
||||
|
# buildUrl does not need an override |
||||
|
|
||||
|
|
||||
|
class Episode(EpisodeProvider, Base): |
||||
|
|
||||
|
urls = { |
||||
|
'detail': '%s/%%s', |
||||
|
'search': '%s/usearch/%s category:tv/%d/', |
||||
|
} |
||||
|
|
||||
|
# buildUrl does not need an override |
@ -0,0 +1,60 @@ |
|||||
|
from couchpotato.core.helpers.encoding import tryUrlencode |
||||
|
from couchpotato.core.event import fireEvent |
||||
|
from couchpotato.core.logger import CPLog |
||||
|
from couchpotato.core.media._base.providers.base import MultiProvider |
||||
|
from couchpotato.core.media.show.providers.base import SeasonProvider, EpisodeProvider |
||||
|
from couchpotato.core.media._base.providers.torrent.sceneaccess import Base |
||||
|
|
||||
|
|
||||
|
log = CPLog(__name__) |
||||
|
|
||||
|
autoload = 'SceneAccess' |
||||
|
|
||||
|
|
||||
|
class SceneAccess(MultiProvider): |
||||
|
|
||||
|
def getTypes(self): |
||||
|
return [Season, Episode] |
||||
|
|
||||
|
|
||||
|
class Season(SeasonProvider, Base): |
||||
|
|
||||
|
cat_ids = [ |
||||
|
([26], ['hdtv_sd', 'hdtv_720p', 'webdl_720p', 'webdl_1080p']), |
||||
|
] |
||||
|
|
||||
|
def buildUrl(self, media, quality): |
||||
|
url = self.urls['archive'] % ( |
||||
|
self.getCatId(quality['identifier'])[0], |
||||
|
self.getCatId(quality['identifier'])[0] |
||||
|
) |
||||
|
|
||||
|
arguments = tryUrlencode({ |
||||
|
'search': fireEvent('media.search_query', media, single = True), |
||||
|
'method': 3, |
||||
|
}) |
||||
|
query = "%s&%s" % (url, arguments) |
||||
|
|
||||
|
return query |
||||
|
|
||||
|
|
||||
|
class Episode(EpisodeProvider, Base): |
||||
|
|
||||
|
cat_ids = [ |
||||
|
([27], ['hdtv_720p', 'webdl_720p', 'webdl_1080p']), |
||||
|
([17, 11], ['hdtv_sd']) |
||||
|
] |
||||
|
|
||||
|
def buildUrl(self, media, quality): |
||||
|
url = self.urls['search'] % ( |
||||
|
self.getCatId(quality['identifier'])[0], |
||||
|
self.getCatId(quality['identifier'])[0] |
||||
|
) |
||||
|
|
||||
|
arguments = tryUrlencode({ |
||||
|
'search': fireEvent('media.search_query', media, single = True), |
||||
|
'method': 3, |
||||
|
}) |
||||
|
query = "%s&%s" % (url, arguments) |
||||
|
|
||||
|
return query |
@ -0,0 +1,46 @@ |
|||||
|
from couchpotato.core.helpers.encoding import tryUrlencode |
||||
|
from couchpotato.core.logger import CPLog |
||||
|
from couchpotato.core.event import fireEvent |
||||
|
from couchpotato.core.media._base.providers.base import MultiProvider |
||||
|
from couchpotato.core.media.show.providers.base import SeasonProvider, EpisodeProvider |
||||
|
from couchpotato.core.media._base.providers.torrent.thepiratebay import Base |
||||
|
|
||||
|
log = CPLog(__name__) |
||||
|
|
||||
|
autoload = 'ThePirateBay' |
||||
|
|
||||
|
|
||||
|
class ThePirateBay(MultiProvider): |
||||
|
|
||||
|
def getTypes(self): |
||||
|
return [Season, Episode] |
||||
|
|
||||
|
|
||||
|
class Season(SeasonProvider, Base): |
||||
|
|
||||
|
cat_ids = [ |
||||
|
([208], ['hdtv_720p', 'webdl_720p', 'webdl_1080p']), |
||||
|
([205], ['hdtv_sd']) |
||||
|
] |
||||
|
|
||||
|
def buildUrl(self, media, page, cats): |
||||
|
return ( |
||||
|
tryUrlencode('"%s"' % fireEvent('library.query', media, single = True)), |
||||
|
page, |
||||
|
','.join(str(x) for x in cats) |
||||
|
) |
||||
|
|
||||
|
|
||||
|
class Episode(EpisodeProvider, Base): |
||||
|
|
||||
|
cat_ids = [ |
||||
|
([208], ['hdtv_720p', 'webdl_720p', 'webdl_1080p']), |
||||
|
([205], ['hdtv_sd']) |
||||
|
] |
||||
|
|
||||
|
def buildUrl(self, media, page, cats): |
||||
|
return ( |
||||
|
tryUrlencode('"%s"' % fireEvent('library.query', media, single = True)), |
||||
|
page, |
||||
|
','.join(str(x) for x in cats) |
||||
|
) |
@ -0,0 +1,34 @@ |
|||||
|
from couchpotato.core.logger import CPLog |
||||
|
from couchpotato.core.event import fireEvent |
||||
|
from couchpotato.core.media._base.providers.base import MultiProvider |
||||
|
from couchpotato.core.media.show.providers.base import SeasonProvider, EpisodeProvider |
||||
|
from couchpotato.core.media._base.providers.torrent.torrentday import Base |
||||
|
|
||||
|
log = CPLog(__name__) |
||||
|
|
||||
|
autoload = 'TorrentDay' |
||||
|
|
||||
|
|
||||
|
class TorrentDay(MultiProvider): |
||||
|
|
||||
|
def getTypes(self): |
||||
|
return [Season, Episode] |
||||
|
|
||||
|
|
||||
|
class Season(SeasonProvider, Base): |
||||
|
|
||||
|
cat_ids = [ |
||||
|
([14], ['hdtv_sd', 'hdtv_720p', 'webdl_720p', 'webdl_1080p']), |
||||
|
] |
||||
|
def buildUrl(self, media): |
||||
|
return fireEvent('media.search_query', media, single = True) |
||||
|
|
||||
|
|
||||
|
class Episode(EpisodeProvider, Base): |
||||
|
cat_ids = [ |
||||
|
([7], ['hdtv_720p', 'webdl_720p', 'webdl_1080p']), |
||||
|
([2], [24], [26], ['hdtv_sd']) |
||||
|
] |
||||
|
def buildUrl(self, media): |
||||
|
return fireEvent('media.search_query', media, single = True) |
||||
|
|
@ -0,0 +1,42 @@ |
|||||
|
from couchpotato import fireEvent |
||||
|
from couchpotato.core.helpers.encoding import tryUrlencode |
||||
|
from couchpotato.core.logger import CPLog |
||||
|
from couchpotato.core.media._base.providers.base import MultiProvider |
||||
|
from couchpotato.core.media.show.providers.base import SeasonProvider, EpisodeProvider |
||||
|
from couchpotato.core.media._base.providers.torrent.torrentleech import Base |
||||
|
|
||||
|
log = CPLog(__name__) |
||||
|
|
||||
|
autoload = 'TorrentLeech' |
||||
|
|
||||
|
|
||||
|
class TorrentLeech(MultiProvider): |
||||
|
|
||||
|
def getTypes(self): |
||||
|
return [Season, Episode] |
||||
|
|
||||
|
|
||||
|
class Season(SeasonProvider, Base): |
||||
|
|
||||
|
cat_ids = [ |
||||
|
([27], ['hdtv_sd', 'hdtv_720p', 'webdl_720p', 'webdl_1080p']), |
||||
|
] |
||||
|
|
||||
|
def buildUrl(self, media, quality): |
||||
|
return ( |
||||
|
tryUrlencode(fireEvent('media.search_query', media, single = True)), |
||||
|
self.getCatId(quality['identifier'])[0] |
||||
|
) |
||||
|
|
||||
|
class Episode(EpisodeProvider, Base): |
||||
|
|
||||
|
cat_ids = [ |
||||
|
([32], ['hdtv_720p', 'webdl_720p', 'webdl_1080p']), |
||||
|
([26], ['hdtv_sd']) |
||||
|
] |
||||
|
|
||||
|
def buildUrl(self, media, quality): |
||||
|
return ( |
||||
|
tryUrlencode(fireEvent('media.search_query', media, single = True)), |
||||
|
self.getCatId(quality['identifier'])[0] |
||||
|
) |
@ -0,0 +1,38 @@ |
|||||
|
from couchpotato.core.helpers.encoding import tryUrlencode |
||||
|
from couchpotato.core.logger import CPLog |
||||
|
from couchpotato.core.event import fireEvent |
||||
|
from couchpotato.core.media._base.providers.base import MultiProvider |
||||
|
from couchpotato.core.media.show.providers.base import SeasonProvider, EpisodeProvider |
||||
|
from couchpotato.core.media._base.providers.torrent.torrentpotato import Base |
||||
|
|
||||
|
log = CPLog(__name__) |
||||
|
|
||||
|
autoload = 'TorrentPotato' |
||||
|
|
||||
|
|
||||
|
class TorrentPotato(MultiProvider): |
||||
|
|
||||
|
def getTypes(self): |
||||
|
return [Season, Episode] |
||||
|
|
||||
|
|
||||
|
class Season(SeasonProvider, Base): |
||||
|
|
||||
|
def buildUrl(self, media, host): |
||||
|
arguments = tryUrlencode({ |
||||
|
'user': host['name'], |
||||
|
'passkey': host['pass_key'], |
||||
|
'search': fireEvent('media.search_query', media, single = True) |
||||
|
}) |
||||
|
return '%s?%s' % (host['host'], arguments) |
||||
|
|
||||
|
|
||||
|
class Episode(EpisodeProvider, Base): |
||||
|
|
||||
|
def buildUrl(self, media, host): |
||||
|
arguments = tryUrlencode({ |
||||
|
'user': host['name'], |
||||
|
'passkey': host['pass_key'], |
||||
|
'search': fireEvent('media.search_query', media, single = True) |
||||
|
}) |
||||
|
return '%s?%s' % (host['host'], arguments) |
@ -0,0 +1,52 @@ |
|||||
|
from couchpotato.core.event import fireEvent |
||||
|
from couchpotato.core.helpers.encoding import tryUrlencode |
||||
|
from couchpotato.core.logger import CPLog |
||||
|
from couchpotato.core.media._base.providers.base import MultiProvider |
||||
|
from couchpotato.core.media.show.providers.base import SeasonProvider, EpisodeProvider |
||||
|
from couchpotato.core.media._base.providers.torrent.torrentshack import Base |
||||
|
|
||||
|
log = CPLog(__name__) |
||||
|
|
||||
|
autoload = 'TorrentShack' |
||||
|
|
||||
|
|
||||
|
class TorrentShack(MultiProvider): |
||||
|
|
||||
|
def getTypes(self): |
||||
|
return [Season, Episode] |
||||
|
|
||||
|
|
||||
|
class Season(SeasonProvider, Base): |
||||
|
# TorrentShack tv season search categories |
||||
|
# TV-SD Pack - 980 |
||||
|
# TV-HD Pack - 981 |
||||
|
# Full Blu-ray - 970 |
||||
|
cat_ids = [ |
||||
|
([980], ['hdtv_sd']), |
||||
|
([981], ['hdtv_720p', 'webdl_720p', 'webdl_1080p', 'bdrip_1080p', 'bdrip_720p', 'brrip_1080p', 'brrip_720p']), |
||||
|
([970], ['bluray_1080p', 'bluray_720p']), |
||||
|
] |
||||
|
cat_backup_id = 980 |
||||
|
|
||||
|
def buildUrl(self, media, quality): |
||||
|
query = (tryUrlencode(fireEvent('media.search_query', media, single = True)), |
||||
|
self.getCatId(quality['identifier'])[0], |
||||
|
self.getSceneOnly()) |
||||
|
return query |
||||
|
|
||||
|
class Episode(EpisodeProvider, Base): |
||||
|
# TorrentShack tv episode search categories |
||||
|
# TV/x264-HD - 600 |
||||
|
# TV/x264-SD - 620 |
||||
|
# TV/DVDrip - 700 |
||||
|
cat_ids = [ |
||||
|
([600], ['hdtv_720p', 'webdl_720p', 'webdl_1080p', 'bdrip_1080p', 'bdrip_720p', 'brrip_1080p', 'brrip_720p']), |
||||
|
([620], ['hdtv_sd']) |
||||
|
] |
||||
|
cat_backup_id = 620 |
||||
|
|
||||
|
def buildUrl(self, media, quality): |
||||
|
query = (tryUrlencode(fireEvent('media.search_query', media, single = True)), |
||||
|
self.getCatId(quality['identifier'])[0], |
||||
|
self.getSceneOnly()) |
||||
|
return query |
@ -0,0 +1,196 @@ |
|||||
|
from caper import Caper |
||||
|
|
||||
|
from couchpotato.core.event import addEvent, fireEvent |
||||
|
from couchpotato.core.helpers.variable import getExt |
||||
|
from couchpotato.core.logger import CPLog |
||||
|
from couchpotato.core.media._base.quality.base import QualityBase |
||||
|
|
||||
|
log = CPLog(__name__) |
||||
|
|
||||
|
autoload = 'ShowQuality' |
||||
|
|
||||
|
|
||||
|
class ShowQuality(QualityBase): |
||||
|
type = 'show' |
||||
|
|
||||
|
properties = { |
||||
|
'codec': [ |
||||
|
{'identifier': 'mp2', 'label': 'MPEG-2/H.262', 'value': ['mpeg2']}, |
||||
|
{'identifier': 'mp4-asp', 'label': 'MPEG-4 ASP', 'value': ['divx', 'xvid']}, |
||||
|
{'identifier': 'mp4-avc', 'label': 'MPEG-4 AVC/H.264', 'value': ['avc', 'h264', 'x264', ('h', '264')]}, |
||||
|
], |
||||
|
'container': [ |
||||
|
{'identifier': 'avi', 'label': 'AVI', 'value': ['avi']}, |
||||
|
{'identifier': 'mov', 'label': 'QuickTime Movie', 'value': ['mov']}, |
||||
|
{'identifier': 'mpeg-4', 'label': 'MPEG-4', 'value': ['m4v', 'mp4']}, |
||||
|
{'identifier': 'mpeg-ts', 'label': 'MPEG-TS', 'value': ['m2ts', 'ts']}, |
||||
|
{'identifier': 'mkv', 'label': 'Matroska', 'value': ['mkv']}, |
||||
|
{'identifier': 'wmv', 'label': 'Windows Media Video', 'value': ['wmv']} |
||||
|
], |
||||
|
'resolution': [ |
||||
|
# TODO interlaced resolutions (auto-fill these options?) |
||||
|
{'identifier': 'sd'}, |
||||
|
{'identifier': '480p', 'width': 853, 'height': 480}, |
||||
|
{'identifier': '576p', 'width': 1024, 'height': 576}, |
||||
|
{'identifier': '720p', 'width': 1280, 'height': 720}, |
||||
|
{'identifier': '1080p', 'width': 1920, 'height': 1080} |
||||
|
], |
||||
|
'source': [ |
||||
|
{'identifier': 'cam', 'label': 'Cam', 'value': ['camrip', 'hdcam']}, |
||||
|
{'identifier': 'hdtv', 'label': 'HDTV', 'value': ['hdtv']}, |
||||
|
{'identifier': 'screener', 'label': 'Screener', 'value': ['screener', 'dvdscr', 'ppvrip', 'dvdscreener', 'hdscr']}, |
||||
|
{'identifier': 'web', 'label': 'Web', 'value': ['webrip', ('web', 'rip'), 'webdl', ('web', 'dl')]} |
||||
|
] |
||||
|
} |
||||
|
|
||||
|
qualities = [ |
||||
|
# TODO sizes will need to be adjusted for season packs |
||||
|
|
||||
|
# resolutions |
||||
|
{'identifier': '1080p', 'label': '1080p', 'size': (1000, 25000), 'codec': ['mp4-avc'], 'container': ['mpeg-ts', 'mkv'], 'resolution': ['1080p']}, |
||||
|
{'identifier': '720p', 'label': '720p', 'size': (1000, 5000), 'codec': ['mp4-avc'], 'container': ['mpeg-ts', 'mkv'], 'resolution': ['720p']}, |
||||
|
{'identifier': '480p', 'label': '480p', 'size': (800, 5000), 'codec': ['mp4-avc'], 'container': ['mpeg-ts', 'mkv'], 'resolution': ['480p']}, |
||||
|
|
||||
|
# sources |
||||
|
{'identifier': 'cam', 'label': 'Cam', 'size': (800, 5000), 'source': ['cam']}, |
||||
|
{'identifier': 'hdtv', 'label': 'HDTV', 'size': (800, 5000), 'source': ['hdtv']}, |
||||
|
{'identifier': 'screener', 'label': 'Screener', 'size': (800, 5000), 'source': ['screener']}, |
||||
|
{'identifier': 'web', 'label': 'Web', 'size': (800, 5000), 'source': ['web']}, |
||||
|
] |
||||
|
|
||||
|
def __init__(self): |
||||
|
super(ShowQuality, self).__init__() |
||||
|
|
||||
|
addEvent('quality.guess', self.guess) |
||||
|
|
||||
|
self.caper = Caper() |
||||
|
|
||||
|
def guess(self, files, extra = None, size = None, types = None): |
||||
|
if types and self.type not in types: |
||||
|
return |
||||
|
|
||||
|
log.debug('Trying to guess quality of: %s', files) |
||||
|
|
||||
|
if not extra: extra = {} |
||||
|
|
||||
|
# Create hash for cache |
||||
|
cache_key = str([f.replace('.' + getExt(f), '') if len(getExt(f)) < 4 else f for f in files]) |
||||
|
cached = self.getCache(cache_key) |
||||
|
if cached and len(extra) == 0: |
||||
|
return cached |
||||
|
|
||||
|
qualities = self.all() |
||||
|
|
||||
|
# Score files against each quality |
||||
|
score = self.score(files, qualities = qualities) |
||||
|
|
||||
|
if score is None: |
||||
|
return None |
||||
|
|
||||
|
# Return nothing if all scores are <= 0 |
||||
|
has_non_zero = 0 |
||||
|
for s in score: |
||||
|
if score[s]['score'] > 0: |
||||
|
has_non_zero += 1 |
||||
|
|
||||
|
if not has_non_zero: |
||||
|
return None |
||||
|
|
||||
|
heighest_quality = max(score, key = lambda p: score[p]['score']) |
||||
|
if heighest_quality: |
||||
|
for quality in qualities: |
||||
|
if quality.get('identifier') == heighest_quality: |
||||
|
quality['is_3d'] = False |
||||
|
if score[heighest_quality].get('3d'): |
||||
|
quality['is_3d'] = True |
||||
|
return self.setCache(cache_key, quality) |
||||
|
|
||||
|
return None |
||||
|
|
||||
|
def score(self, files, qualities = None, types = None): |
||||
|
if types and self.type not in types: |
||||
|
return None |
||||
|
|
||||
|
if not qualities: |
||||
|
qualities = self.all() |
||||
|
|
||||
|
qualities_expanded = [self.expand(q.copy()) for q in qualities] |
||||
|
|
||||
|
# Start with 0 |
||||
|
score = {} |
||||
|
for quality in qualities: |
||||
|
score[quality.get('identifier')] = { |
||||
|
'score': 0, |
||||
|
'3d': {} |
||||
|
} |
||||
|
|
||||
|
for cur_file in files: |
||||
|
match = self.caper.parse(cur_file, 'scene') |
||||
|
|
||||
|
if len(match.chains) < 1: |
||||
|
log.info2('Unable to parse "%s", ignoring file') |
||||
|
continue |
||||
|
|
||||
|
chain = match.chains[0] |
||||
|
|
||||
|
for quality in qualities_expanded: |
||||
|
property_score = self.propertyScore(quality, chain) |
||||
|
|
||||
|
self.calcScore(score, quality, property_score) |
||||
|
|
||||
|
return score |
||||
|
|
||||
|
def propertyScore(self, quality, chain): |
||||
|
score = 0 |
||||
|
|
||||
|
if 'video' not in chain.info: |
||||
|
return 0 |
||||
|
|
||||
|
info = fireEvent('matcher.flatten_info', chain.info['video'], single = True) |
||||
|
|
||||
|
for key in ['codec', 'resolution', 'source']: |
||||
|
if key not in quality: |
||||
|
# No specific property required |
||||
|
score += 5 |
||||
|
continue |
||||
|
|
||||
|
available = list(self.getInfo(info, key)) |
||||
|
found = False |
||||
|
|
||||
|
for property in quality[key]: |
||||
|
required = property['value'] if 'value' in property else [property['identifier']] |
||||
|
|
||||
|
if set(available) & set(required): |
||||
|
score += 10 |
||||
|
found = True |
||||
|
break |
||||
|
|
||||
|
if not found: |
||||
|
score -= 10 |
||||
|
|
||||
|
return score |
||||
|
|
||||
|
def getInfo(self, info, key): |
||||
|
for value in info.get(key, []): |
||||
|
if isinstance(value, list): |
||||
|
yield tuple([x.lower() for x in value]) |
||||
|
else: |
||||
|
yield value.lower() |
||||
|
|
||||
|
def calcScore(self, score, quality, add_score, threedscore = (0, None), penalty = True): |
||||
|
score[quality['identifier']]['score'] += add_score |
||||
|
|
||||
|
# Set order for allow calculation (and cache) |
||||
|
if not self.cached_order: |
||||
|
self.cached_order = {} |
||||
|
for q in self.qualities: |
||||
|
self.cached_order[q.get('identifier')] = self.qualities.index(q) |
||||
|
|
||||
|
if penalty and add_score != 0: |
||||
|
for allow in quality.get('allow', []): |
||||
|
score[allow]['score'] -= 40 if self.cached_order[allow] < self.cached_order[quality['identifier']] else 5 |
||||
|
|
||||
|
# Give panelty for all lower qualities |
||||
|
for q in self.qualities[self.order.index(quality.get('identifier'))+1:]: |
||||
|
if score.get(q.get('identifier')): |
||||
|
score[q.get('identifier')]['score'] -= 1 |
@ -0,0 +1,109 @@ |
|||||
|
import time |
||||
|
|
||||
|
from couchpotato import fireEvent, get_db, Env |
||||
|
from couchpotato.api import addApiView |
||||
|
from couchpotato.core.event import addEvent, fireEventAsync |
||||
|
from couchpotato.core.logger import CPLog |
||||
|
from couchpotato.core.media._base.searcher.main import Searcher |
||||
|
from couchpotato.core.media._base.searcher.main import SearchSetupError |
||||
|
from couchpotato.core.media.show import ShowTypeBase |
||||
|
from couchpotato.core.helpers.variable import strtotime |
||||
|
|
||||
|
log = CPLog(__name__) |
||||
|
|
||||
|
autoload = 'EpisodeSearcher' |
||||
|
|
||||
|
|
||||
|
class EpisodeSearcher(Searcher, ShowTypeBase): |
||||
|
type = 'episode' |
||||
|
|
||||
|
in_progress = False |
||||
|
|
||||
|
def __init__(self): |
||||
|
super(EpisodeSearcher, self).__init__() |
||||
|
|
||||
|
addEvent('%s.searcher.all' % self.getType(), self.searchAll) |
||||
|
addEvent('%s.searcher.single' % self.getType(), self.single) |
||||
|
addEvent('searcher.correct_release', self.correctRelease) |
||||
|
|
||||
|
addApiView('%s.searcher.full_search' % self.getType(), self.searchAllView, docs = { |
||||
|
'desc': 'Starts a full search for all wanted shows', |
||||
|
}) |
||||
|
|
||||
|
addApiView('%s.searcher.single' % self.getType(), self.singleView) |
||||
|
|
||||
|
def searchAllView(self, **kwargs): |
||||
|
fireEventAsync('%s.searcher.all' % self.getType(), manual = True) |
||||
|
|
||||
|
return { |
||||
|
'success': not self.in_progress |
||||
|
} |
||||
|
|
||||
|
def searchAll(self, manual = False): |
||||
|
pass |
||||
|
|
||||
|
def singleView(self, media_id, **kwargs): |
||||
|
db = get_db() |
||||
|
media = db.get('id', media_id) |
||||
|
|
||||
|
return { |
||||
|
'result': fireEvent('%s.searcher.single' % self.getType(), media, single = True) |
||||
|
} |
||||
|
|
||||
|
def correctRelease(self, release = None, media = None, quality = None, **kwargs): |
||||
|
if media.get('type') != 'show.episode': return |
||||
|
|
||||
|
retention = Env.setting('retention', section = 'nzb') |
||||
|
|
||||
|
if release.get('seeders') is None and 0 < retention < release.get('age', 0): |
||||
|
log.info2('Wrong: Outside retention, age is %s, needs %s or lower: %s', (release['age'], retention, release['name'])) |
||||
|
return False |
||||
|
|
||||
|
# Check for required and ignored words |
||||
|
if not self.correctWords(release['name'], media): |
||||
|
return False |
||||
|
|
||||
|
preferred_quality = quality if quality else fireEvent('quality.single', identifier = quality['identifier'], single = True) |
||||
|
|
||||
|
# Contains lower quality string |
||||
|
contains_other = self.containsOtherQuality(release, preferred_quality = preferred_quality, types= [self._type]) |
||||
|
if contains_other != False: |
||||
|
log.info2('Wrong: %s, looking for %s, found %s', (release['name'], quality['label'], [x for x in contains_other] if contains_other else 'no quality')) |
||||
|
return False |
||||
|
|
||||
|
# TODO Matching is quite costly, maybe we should be caching release matches somehow? (also look at caper optimizations) |
||||
|
match = fireEvent('matcher.match', release, media, quality, single = True) |
||||
|
if match: |
||||
|
return match.weight |
||||
|
|
||||
|
return False |
||||
|
|
||||
|
def couldBeReleased(self, is_pre_release, dates, media): |
||||
|
""" |
||||
|
Determine if episode could have aired by now |
||||
|
|
||||
|
@param is_pre_release: True if quality is pre-release, otherwise False. Ignored for episodes. |
||||
|
@param dates: |
||||
|
@param media: media dictionary to retrieve episode air date from. |
||||
|
@return: dict, with media |
||||
|
""" |
||||
|
now = time.time() |
||||
|
released = strtotime(media.get('info', {}).get('released'), '%Y-%m-%d') |
||||
|
|
||||
|
if (released < now): |
||||
|
return True |
||||
|
|
||||
|
return False |
||||
|
|
||||
|
def getProfileId(self, media): |
||||
|
assert media and media['type'] == 'show.episode' |
||||
|
|
||||
|
profile_id = None |
||||
|
|
||||
|
related = fireEvent('library.related', media, single = True) |
||||
|
if related: |
||||
|
show = related.get('show') |
||||
|
if show: |
||||
|
profile_id = show.get('profile_id') |
||||
|
|
||||
|
return profile_id |
@ -0,0 +1,137 @@ |
|||||
|
from couchpotato import get_db, Env |
||||
|
from couchpotato.api import addApiView |
||||
|
from couchpotato.core.event import addEvent, fireEventAsync, fireEvent |
||||
|
from couchpotato.core.logger import CPLog |
||||
|
from couchpotato.core.media._base.searcher.main import Searcher |
||||
|
from couchpotato.core.media.movie.searcher import SearchSetupError |
||||
|
from couchpotato.core.media.show import ShowTypeBase |
||||
|
from couchpotato.core.helpers.variable import getTitle |
||||
|
|
||||
|
log = CPLog(__name__) |
||||
|
|
||||
|
autoload = 'SeasonSearcher' |
||||
|
|
||||
|
|
||||
|
class SeasonSearcher(Searcher, ShowTypeBase): |
||||
|
type = 'season' |
||||
|
|
||||
|
in_progress = False |
||||
|
|
||||
|
def __init__(self): |
||||
|
super(SeasonSearcher, self).__init__() |
||||
|
|
||||
|
addEvent('%s.searcher.all' % self.getType(), self.searchAll) |
||||
|
addEvent('%s.searcher.single' % self.getType(), self.single) |
||||
|
addEvent('searcher.correct_release', self.correctRelease) |
||||
|
|
||||
|
addApiView('%s.searcher.full_search' % self.getType(), self.searchAllView, docs = { |
||||
|
'desc': 'Starts a full search for all wanted seasons', |
||||
|
}) |
||||
|
|
||||
|
def searchAllView(self, **kwargs): |
||||
|
fireEventAsync('%s.searcher.all' % self.getType(), manual = True) |
||||
|
|
||||
|
return { |
||||
|
'success': not self.in_progress |
||||
|
} |
||||
|
|
||||
|
def searchAll(self, manual = False): |
||||
|
pass |
||||
|
|
||||
|
def single(self, media, search_protocols = None, manual = False, force_download = False, notify = True): |
||||
|
|
||||
|
# The user can prefer episode releases over season releases. |
||||
|
prefer_episode_releases = self.conf('prefer_episode_releases') |
||||
|
|
||||
|
episodes = [] |
||||
|
all_episodes_available = self.couldBeReleased(False, [], media) |
||||
|
|
||||
|
event_type = 'show.season.searcher.started' |
||||
|
related = fireEvent('library.related', media, single = True) |
||||
|
default_title = getTitle(related.get('show')) |
||||
|
fireEvent('notify.frontend', type = event_type, data = {'_id': media['_id']}, message = 'Searching for "%s"' % default_title) |
||||
|
|
||||
|
result = False |
||||
|
if not all_episodes_available or prefer_episode_releases: |
||||
|
result = True |
||||
|
for episode in episodes: |
||||
|
if not fireEvent('show.episode.searcher.single', episode, search_protocols, manual, force_download, False): |
||||
|
result = False |
||||
|
break |
||||
|
|
||||
|
if not result and all_episodes_available: |
||||
|
# The user might have preferred episode releases over season |
||||
|
# releases, but that did not work out, fallback to season releases. |
||||
|
result = super(SeasonSearcher, self).single(media, search_protocols, manual, force_download, False) |
||||
|
|
||||
|
event_type = 'show.season.searcher.ended' |
||||
|
fireEvent('notify.frontend', type = event_type, data = {'_id': media['_id']}) |
||||
|
|
||||
|
return result |
||||
|
|
||||
|
def correctRelease(self, release = None, media = None, quality = None, **kwargs): |
||||
|
if media.get('type') != 'show.season': |
||||
|
return |
||||
|
|
||||
|
retention = Env.setting('retention', section = 'nzb') |
||||
|
|
||||
|
if release.get('seeders') is None and 0 < retention < release.get('age', 0): |
||||
|
log.info2('Wrong: Outside retention, age is %s, needs %s or lower: %s', (release['age'], retention, release['name'])) |
||||
|
return False |
||||
|
|
||||
|
# Check for required and ignored words |
||||
|
if not self.correctWords(release['name'], media): |
||||
|
return False |
||||
|
|
||||
|
preferred_quality = quality if quality else fireEvent('quality.single', identifier = quality['identifier'], single = True) |
||||
|
|
||||
|
# Contains lower quality string |
||||
|
contains_other = self.containsOtherQuality(release, preferred_quality = preferred_quality, types = [self._type]) |
||||
|
if contains_other != False: |
||||
|
log.info2('Wrong: %s, looking for %s, found %s', (release['name'], quality['label'], [x for x in contains_other] if contains_other else 'no quality')) |
||||
|
return False |
||||
|
|
||||
|
# TODO Matching is quite costly, maybe we should be caching release matches somehow? (also look at caper optimizations) |
||||
|
match = fireEvent('matcher.match', release, media, quality, single = True) |
||||
|
if match: |
||||
|
return match.weight |
||||
|
|
||||
|
return False |
||||
|
|
||||
|
def couldBeReleased(self, is_pre_release, dates, media): |
||||
|
episodes = [] |
||||
|
all_episodes_available = True |
||||
|
|
||||
|
related = fireEvent('library.related', media, single = True) |
||||
|
if related: |
||||
|
for episode in related.get('episodes', []): |
||||
|
if episode.get('status') == 'active': |
||||
|
episodes.append(episode) |
||||
|
else: |
||||
|
all_episodes_available = False |
||||
|
if not episodes: |
||||
|
all_episodes_available = False |
||||
|
|
||||
|
return all_episodes_available |
||||
|
|
||||
|
def getTitle(self, media): |
||||
|
# FIXME: Season media type should have a title. |
||||
|
# e.g. <Show> Season <Number> |
||||
|
title = None |
||||
|
related = fireEvent('library.related', media, single = True) |
||||
|
if related: |
||||
|
title = getTitle(related.get('show')) |
||||
|
return title |
||||
|
|
||||
|
def getProfileId(self, media): |
||||
|
assert media and media['type'] == 'show.season' |
||||
|
|
||||
|
profile_id = None |
||||
|
|
||||
|
related = fireEvent('library.related', media, single = True) |
||||
|
if related: |
||||
|
show = related.get('show') |
||||
|
if show: |
||||
|
profile_id = show.get('profile_id') |
||||
|
|
||||
|
return profile_id |
@ -0,0 +1,93 @@ |
|||||
|
from couchpotato import get_db |
||||
|
from couchpotato.api import addApiView |
||||
|
from couchpotato.core.event import fireEvent, addEvent, fireEventAsync |
||||
|
from couchpotato.core.helpers.variable import getTitle |
||||
|
from couchpotato.core.logger import CPLog |
||||
|
from couchpotato.core.media._base.searcher.main import Searcher |
||||
|
from couchpotato.core.media._base.searcher.main import SearchSetupError |
||||
|
from couchpotato.core.media.show import ShowTypeBase |
||||
|
|
||||
|
log = CPLog(__name__) |
||||
|
|
||||
|
autoload = 'ShowSearcher' |
||||
|
|
||||
|
|
||||
|
class ShowSearcher(Searcher, ShowTypeBase): |
||||
|
type = 'show' |
||||
|
|
||||
|
in_progress = False |
||||
|
|
||||
|
def __init__(self): |
||||
|
super(ShowSearcher, self).__init__() |
||||
|
|
||||
|
addEvent('%s.searcher.all' % self.getType(), self.searchAll) |
||||
|
addEvent('%s.searcher.single' % self.getType(), self.single) |
||||
|
addEvent('searcher.get_search_title', self.getSearchTitle) |
||||
|
|
||||
|
addApiView('%s.searcher.full_search' % self.getType(), self.searchAllView, docs = { |
||||
|
'desc': 'Starts a full search for all wanted episodes', |
||||
|
}) |
||||
|
|
||||
|
def searchAllView(self, **kwargs): |
||||
|
fireEventAsync('%s.searcher.all' % self.getType(), manual = True) |
||||
|
|
||||
|
return { |
||||
|
'success': not self.in_progress |
||||
|
} |
||||
|
|
||||
|
def searchAll(self, manual = False): |
||||
|
pass |
||||
|
|
||||
|
def single(self, media, search_protocols = None, manual = False, force_download = False, notify = True): |
||||
|
|
||||
|
db = get_db() |
||||
|
profile = db.get('id', media['profile_id']) |
||||
|
|
||||
|
if not profile or (media['status'] == 'done' and not manual): |
||||
|
log.debug('Media does not have a profile or already done, assuming in manage tab.') |
||||
|
fireEvent('media.restatus', media['_id'], single = True) |
||||
|
return |
||||
|
|
||||
|
default_title = getTitle(media) |
||||
|
if not default_title: |
||||
|
log.error('No proper info found for media, removing it from library to stop it from causing more issues.') |
||||
|
fireEvent('media.delete', media['_id'], single = True) |
||||
|
return |
||||
|
|
||||
|
fireEvent('notify.frontend', type = 'show.searcher.started.%s' % media['_id'], data = True, message = 'Searching for "%s"' % default_title) |
||||
|
|
||||
|
seasons = [] |
||||
|
|
||||
|
tree = fireEvent('library.tree', media, single = True) |
||||
|
if tree: |
||||
|
for season in tree.get('seasons', []): |
||||
|
if season.get('info'): |
||||
|
continue |
||||
|
|
||||
|
# Skip specials (and seasons missing 'number') for now |
||||
|
# TODO: set status for specials to skipped by default |
||||
|
if not season['info'].get('number'): |
||||
|
continue |
||||
|
|
||||
|
seasons.append(season) |
||||
|
|
||||
|
result = True |
||||
|
for season in seasons: |
||||
|
if not fireEvent('show.season.searcher.single', search_protocols, manual, force_download, False): |
||||
|
result = False |
||||
|
break |
||||
|
|
||||
|
fireEvent('notify.frontend', type = 'show.searcher.ended.%s' % media['_id'], data = True) |
||||
|
|
||||
|
return result |
||||
|
|
||||
|
def getSearchTitle(self, media): |
||||
|
show = None |
||||
|
if media.get('type') == 'show': |
||||
|
show = media |
||||
|
elif media.get('type') in ('show.season', 'show.episode'): |
||||
|
related = fireEvent('library.related', media, single = True) |
||||
|
show = related['show'] |
||||
|
|
||||
|
if show: |
||||
|
return getTitle(show) |
@ -1,5 +0,0 @@ |
|||||
from .main import QualityPlugin |
|
||||
|
|
||||
|
|
||||
def autoload(): |
|
||||
return QualityPlugin() |
|
@ -0,0 +1,42 @@ |
|||||
|
# Copyright 2013 Dean Gardiner <gardiner91@gmail.com> |
||||
|
# |
||||
|
# Licensed under the Apache License, Version 2.0 (the "License"); |
||||
|
# you may not use this file except in compliance with the License. |
||||
|
# You may obtain a copy of the License at |
||||
|
# |
||||
|
# http://www.apache.org/licenses/LICENSE-2.0 |
||||
|
# |
||||
|
# Unless required by applicable law or agreed to in writing, software |
||||
|
# distributed under the License is distributed on an "AS IS" BASIS, |
||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
||||
|
# See the License for the specific language governing permissions and |
||||
|
# limitations under the License. |
||||
|
|
||||
|
|
||||
|
from qcond.transformers.merge import MergeTransformer |
||||
|
from qcond.transformers.slice import SliceTransformer |
||||
|
from qcond.transformers.strip_common import StripCommonTransformer |
||||
|
|
||||
|
|
||||
|
__version_info__ = ('0', '1', '0') |
||||
|
__version_branch__ = 'master' |
||||
|
|
||||
|
__version__ = "%s%s" % ( |
||||
|
'.'.join(__version_info__), |
||||
|
'-' + __version_branch__ if __version_branch__ else '' |
||||
|
) |
||||
|
|
||||
|
|
||||
|
class QueryCondenser(object): |
||||
|
def __init__(self): |
||||
|
self.transformers = [ |
||||
|
MergeTransformer(), |
||||
|
SliceTransformer(), |
||||
|
StripCommonTransformer() |
||||
|
] |
||||
|
|
||||
|
def distinct(self, titles): |
||||
|
for transformer in self.transformers: |
||||
|
titles = transformer.run(titles) |
||||
|
|
||||
|
return titles |
@ -0,0 +1,23 @@ |
|||||
|
# Copyright 2013 Dean Gardiner <gardiner91@gmail.com> |
||||
|
# |
||||
|
# Licensed under the Apache License, Version 2.0 (the "License"); |
||||
|
# you may not use this file except in compliance with the License. |
||||
|
# You may obtain a copy of the License at |
||||
|
# |
||||
|
# http://www.apache.org/licenses/LICENSE-2.0 |
||||
|
# |
||||
|
# Unless required by applicable law or agreed to in writing, software |
||||
|
# distributed under the License is distributed on an "AS IS" BASIS, |
||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
||||
|
# See the License for the specific language governing permissions and |
||||
|
# limitations under the License. |
||||
|
|
||||
|
|
||||
|
import sys |
||||
|
|
||||
|
PY3 = sys.version_info[0] == 3 |
||||
|
|
||||
|
if PY3: |
||||
|
xrange = range |
||||
|
else: |
||||
|
xrange = xrange |
@ -0,0 +1,84 @@ |
|||||
|
# Copyright 2013 Dean Gardiner <gardiner91@gmail.com> |
||||
|
# |
||||
|
# Licensed under the Apache License, Version 2.0 (the "License"); |
||||
|
# you may not use this file except in compliance with the License. |
||||
|
# You may obtain a copy of the License at |
||||
|
# |
||||
|
# http://www.apache.org/licenses/LICENSE-2.0 |
||||
|
# |
||||
|
# Unless required by applicable law or agreed to in writing, software |
||||
|
# distributed under the License is distributed on an "AS IS" BASIS, |
||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
||||
|
# See the License for the specific language governing permissions and |
||||
|
# limitations under the License. |
||||
|
|
||||
|
|
||||
|
from difflib import SequenceMatcher |
||||
|
import re |
||||
|
import sys |
||||
|
from logr import Logr |
||||
|
from qcond.compat import xrange |
||||
|
|
||||
|
|
||||
|
PY3 = sys.version_info[0] == 3 |
||||
|
|
||||
|
|
||||
|
def simplify(s): |
||||
|
s = s.lower() |
||||
|
s = re.sub(r"(\w)'(\w)", r"\1\2", s) |
||||
|
return s |
||||
|
|
||||
|
|
||||
|
def strip(s): |
||||
|
return re.sub(r"^(\W*)(.*?)(\W*)$", r"\2", s) |
||||
|
|
||||
|
|
||||
|
def create_matcher(a, b, swap_longest = True, case_sensitive = False): |
||||
|
# Ensure longest string is a |
||||
|
if swap_longest and len(b) > len(a): |
||||
|
a_ = a |
||||
|
a = b |
||||
|
b = a_ |
||||
|
|
||||
|
if not case_sensitive: |
||||
|
a = a.upper() |
||||
|
b = b.upper() |
||||
|
|
||||
|
return SequenceMatcher(None, a, b) |
||||
|
|
||||
|
|
||||
|
def first(function_or_none, sequence): |
||||
|
if PY3: |
||||
|
for item in filter(function_or_none, sequence): |
||||
|
return item |
||||
|
else: |
||||
|
result = filter(function_or_none, sequence) |
||||
|
if len(result): |
||||
|
return result[0] |
||||
|
|
||||
|
return None |
||||
|
|
||||
|
def sorted_append(sequence, item, func): |
||||
|
if not len(sequence): |
||||
|
sequence.insert(0, item) |
||||
|
return |
||||
|
|
||||
|
x = 0 |
||||
|
for x in xrange(len(sequence)): |
||||
|
if func(sequence[x]): |
||||
|
sequence.insert(x, item) |
||||
|
return |
||||
|
|
||||
|
sequence.append(item) |
||||
|
|
||||
|
def itemsMatch(L1, L2): |
||||
|
return len(L1) == len(L2) and sorted(L1) == sorted(L2) |
||||
|
|
||||
|
def distinct(sequence): |
||||
|
result = [] |
||||
|
|
||||
|
for item in sequence: |
||||
|
if item not in result: |
||||
|
result.append(item) |
||||
|
|
||||
|
return result |
@ -0,0 +1,21 @@ |
|||||
|
# Copyright 2013 Dean Gardiner <gardiner91@gmail.com> |
||||
|
# |
||||
|
# Licensed under the Apache License, Version 2.0 (the "License"); |
||||
|
# you may not use this file except in compliance with the License. |
||||
|
# You may obtain a copy of the License at |
||||
|
# |
||||
|
# http://www.apache.org/licenses/LICENSE-2.0 |
||||
|
# |
||||
|
# Unless required by applicable law or agreed to in writing, software |
||||
|
# distributed under the License is distributed on an "AS IS" BASIS, |
||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
||||
|
# See the License for the specific language governing permissions and |
||||
|
# limitations under the License. |
||||
|
|
||||
|
|
||||
|
class Transformer(object): |
||||
|
def __init__(self): |
||||
|
pass |
||||
|
|
||||
|
def run(self, titles): |
||||
|
raise NotImplementedError() |
@ -0,0 +1,241 @@ |
|||||
|
# Copyright 2013 Dean Gardiner <gardiner91@gmail.com> |
||||
|
# |
||||
|
# Licensed under the Apache License, Version 2.0 (the "License"); |
||||
|
# you may not use this file except in compliance with the License. |
||||
|
# You may obtain a copy of the License at |
||||
|
# |
||||
|
# http://www.apache.org/licenses/LICENSE-2.0 |
||||
|
# |
||||
|
# Unless required by applicable law or agreed to in writing, software |
||||
|
# distributed under the License is distributed on an "AS IS" BASIS, |
||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
||||
|
# See the License for the specific language governing permissions and |
||||
|
# limitations under the License. |
||||
|
|
||||
|
|
||||
|
from operator import itemgetter |
||||
|
from logr import Logr |
||||
|
from qcond.helpers import simplify, strip, first, sorted_append, distinct |
||||
|
from qcond.transformers.base import Transformer |
||||
|
from qcond.compat import xrange |
||||
|
|
||||
|
|
||||
|
class MergeTransformer(Transformer): |
||||
|
def __init__(self): |
||||
|
super(MergeTransformer, self).__init__() |
||||
|
|
||||
|
def run(self, titles): |
||||
|
titles = distinct([simplify(title) for title in titles]) |
||||
|
|
||||
|
Logr.info(str(titles)) |
||||
|
|
||||
|
Logr.debug("------------------------------------------------------------") |
||||
|
|
||||
|
root, tails = self.parse(titles) |
||||
|
|
||||
|
Logr.debug("--------------------------PARSE-----------------------------") |
||||
|
|
||||
|
for node in root: |
||||
|
print_tree(node) |
||||
|
|
||||
|
Logr.debug("--------------------------MERGE-----------------------------") |
||||
|
|
||||
|
self.merge(root) |
||||
|
|
||||
|
Logr.debug("--------------------------FINAL-----------------------------") |
||||
|
|
||||
|
for node in root: |
||||
|
print_tree(node) |
||||
|
|
||||
|
Logr.debug("--------------------------RESULT-----------------------------") |
||||
|
|
||||
|
scores = {} |
||||
|
results = [] |
||||
|
|
||||
|
for tail in tails: |
||||
|
score, value, original_value = tail.full_value() |
||||
|
|
||||
|
if value in scores: |
||||
|
scores[value] += score |
||||
|
else: |
||||
|
results.append((value, original_value)) |
||||
|
scores[value] = score |
||||
|
|
||||
|
Logr.debug("%s %s %s", score, value, original_value) |
||||
|
|
||||
|
sorted_results = sorted(results, key=lambda item: (scores[item[0]], item[1]), reverse = True) |
||||
|
|
||||
|
return [result[0] for result in sorted_results] |
||||
|
|
||||
|
def parse(self, titles): |
||||
|
root = [] |
||||
|
tails = [] |
||||
|
|
||||
|
for title in titles: |
||||
|
Logr.debug(title) |
||||
|
|
||||
|
cur = None |
||||
|
words = title.split(' ') |
||||
|
|
||||
|
for wx in xrange(len(words)): |
||||
|
word = strip(words[wx]) |
||||
|
|
||||
|
if cur is None: |
||||
|
cur = find_node(root, word) |
||||
|
|
||||
|
if cur is None: |
||||
|
cur = DNode(word, None, num_children=len(words) - wx, original_value=title) |
||||
|
root.append(cur) |
||||
|
else: |
||||
|
parent = cur |
||||
|
parent.weight += 1 |
||||
|
|
||||
|
cur = find_node(parent.right, word) |
||||
|
|
||||
|
if cur is None: |
||||
|
Logr.debug("%s %d", word, len(words) - wx) |
||||
|
cur = DNode(word, parent, num_children=len(words) - wx) |
||||
|
sorted_append(parent.right, cur, lambda a: a.num_children < cur.num_children) |
||||
|
else: |
||||
|
cur.weight += 1 |
||||
|
|
||||
|
tails.append(cur) |
||||
|
|
||||
|
return root, tails |
||||
|
|
||||
|
def merge(self, root): |
||||
|
for x in range(len(root)): |
||||
|
Logr.debug(root[x]) |
||||
|
root[x].right = self._merge(root[x].right) |
||||
|
Logr.debug('=================================================================') |
||||
|
|
||||
|
return root |
||||
|
|
||||
|
def get_nodes_right(self, value): |
||||
|
if type(value) is not list: |
||||
|
value = [value] |
||||
|
|
||||
|
nodes = [] |
||||
|
|
||||
|
for node in value: |
||||
|
nodes.append(node) |
||||
|
|
||||
|
for child in self.get_nodes_right(node.right): |
||||
|
nodes.append(child) |
||||
|
|
||||
|
return nodes |
||||
|
|
||||
|
def destroy_nodes_right(self, value): |
||||
|
nodes = self.get_nodes_right(value) |
||||
|
|
||||
|
for node in nodes: |
||||
|
node.value = None |
||||
|
node.dead = True |
||||
|
|
||||
|
def _merge(self, nodes, depth = 0): |
||||
|
Logr.debug(str('\t' * depth) + str(nodes)) |
||||
|
|
||||
|
if not len(nodes): |
||||
|
return [] |
||||
|
|
||||
|
top = nodes[0] |
||||
|
|
||||
|
# Merge into top |
||||
|
for x in range(len(nodes)): |
||||
|
# Merge extra results into top |
||||
|
if x > 0: |
||||
|
top.value = None |
||||
|
top.weight += nodes[x].weight |
||||
|
self.destroy_nodes_right(top.right) |
||||
|
|
||||
|
if len(nodes[x].right): |
||||
|
top.join_right(nodes[x].right) |
||||
|
|
||||
|
Logr.debug("= %s joined %s", nodes[x], top) |
||||
|
|
||||
|
nodes[x].dead = True |
||||
|
|
||||
|
nodes = [n for n in nodes if not n.dead] |
||||
|
|
||||
|
# Traverse further |
||||
|
for node in nodes: |
||||
|
if len(node.right): |
||||
|
node.right = self._merge(node.right, depth + 1) |
||||
|
|
||||
|
return nodes |
||||
|
|
||||
|
|
||||
|
def print_tree(node, depth = 0): |
||||
|
Logr.debug(str('\t' * depth) + str(node)) |
||||
|
|
||||
|
if len(node.right): |
||||
|
for child in node.right: |
||||
|
print_tree(child, depth + 1) |
||||
|
else: |
||||
|
Logr.debug(node.full_value()[1]) |
||||
|
|
||||
|
|
||||
|
def find_node(node_list, value): |
||||
|
# Try find adjacent node match |
||||
|
for node in node_list: |
||||
|
if node.value == value: |
||||
|
return node |
||||
|
|
||||
|
return None |
||||
|
|
||||
|
|
||||
|
class DNode(object): |
||||
|
def __init__(self, value, parent, right=None, weight=1, num_children=None, original_value=None): |
||||
|
self.value = value |
||||
|
|
||||
|
self.parent = parent |
||||
|
|
||||
|
if right is None: |
||||
|
right = [] |
||||
|
self.right = right |
||||
|
|
||||
|
self.weight = weight |
||||
|
|
||||
|
self.original_value = original_value |
||||
|
self.num_children = num_children |
||||
|
|
||||
|
self.dead = False |
||||
|
|
||||
|
def join_right(self, nodes): |
||||
|
for node in nodes: |
||||
|
duplicate = first(lambda x: x.value == node.value, self.right) |
||||
|
|
||||
|
if duplicate: |
||||
|
duplicate.weight += node.weight |
||||
|
duplicate.join_right(node.right) |
||||
|
else: |
||||
|
node.parent = self |
||||
|
self.right.append(node) |
||||
|
|
||||
|
def full_value(self): |
||||
|
words = [] |
||||
|
total_score = 0 |
||||
|
|
||||
|
cur = self |
||||
|
root = None |
||||
|
|
||||
|
while cur is not None: |
||||
|
if cur.value and not cur.dead: |
||||
|
words.insert(0, cur.value) |
||||
|
total_score += cur.weight |
||||
|
|
||||
|
if cur.parent is None: |
||||
|
root = cur |
||||
|
cur = cur.parent |
||||
|
|
||||
|
return float(total_score) / len(words), ' '.join(words), root.original_value if root else None |
||||
|
|
||||
|
def __repr__(self): |
||||
|
return '<%s value:"%s", weight: %s, num_children: %s%s%s>' % ( |
||||
|
'DNode', |
||||
|
self.value, |
||||
|
self.weight, |
||||
|
self.num_children, |
||||
|
(', original_value: %s' % self.original_value) if self.original_value else '', |
||||
|
' REMOVING' if self.dead else '' |
||||
|
) |
@ -0,0 +1,280 @@ |
|||||
|
# Copyright 2013 Dean Gardiner <gardiner91@gmail.com> |
||||
|
# |
||||
|
# Licensed under the Apache License, Version 2.0 (the "License"); |
||||
|
# you may not use this file except in compliance with the License. |
||||
|
# You may obtain a copy of the License at |
||||
|
# |
||||
|
# http://www.apache.org/licenses/LICENSE-2.0 |
||||
|
# |
||||
|
# Unless required by applicable law or agreed to in writing, software |
||||
|
# distributed under the License is distributed on an "AS IS" BASIS, |
||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
||||
|
# See the License for the specific language governing permissions and |
||||
|
# limitations under the License. |
||||
|
|
||||
|
|
||||
|
from logr import Logr |
||||
|
from qcond.helpers import create_matcher |
||||
|
from qcond.transformers.base import Transformer |
||||
|
|
||||
|
|
||||
|
class SliceTransformer(Transformer): |
||||
|
def __init__(self): |
||||
|
super(SliceTransformer, self).__init__() |
||||
|
|
||||
|
def run(self, titles): |
||||
|
nodes = [] |
||||
|
|
||||
|
# Create a node for each title |
||||
|
for title in titles: |
||||
|
nodes.append(SimNode(title)) |
||||
|
|
||||
|
# Calculate similarities between nodes |
||||
|
for node in nodes: |
||||
|
calculate_sim_links(node, [n for n in nodes if n != node]) |
||||
|
|
||||
|
kill_nodes_above(nodes, 0.90) |
||||
|
|
||||
|
Logr.debug('---------------------------------------------------------------------') |
||||
|
|
||||
|
print_link_tree(nodes) |
||||
|
Logr.debug('%s %s', len(nodes), [n.value for n in nodes]) |
||||
|
|
||||
|
Logr.debug('---------------------------------------------------------------------') |
||||
|
|
||||
|
kill_trailing_nodes(nodes) |
||||
|
|
||||
|
Logr.debug('---------------------------------------------------------------------') |
||||
|
|
||||
|
# Sort remaining nodes by 'num_merges' |
||||
|
nodes = sorted(nodes, key=lambda n: n.num_merges, reverse=True) |
||||
|
|
||||
|
print_link_tree(nodes) |
||||
|
|
||||
|
Logr.debug('---------------------------------------------------------------------') |
||||
|
|
||||
|
Logr.debug('%s %s', len(nodes), [n.value for n in nodes]) |
||||
|
|
||||
|
return [n.value for n in nodes] |
||||
|
|
||||
|
|
||||
|
class SimLink(object): |
||||
|
def __init__(self, similarity, opcodes, stats): |
||||
|
self.similarity = similarity |
||||
|
self.opcodes = opcodes |
||||
|
self.stats = stats |
||||
|
|
||||
|
|
||||
|
class SimNode(object): |
||||
|
def __init__(self, value): |
||||
|
self.value = value |
||||
|
|
||||
|
self.dead = False |
||||
|
self.num_merges = 0 |
||||
|
|
||||
|
self.links = {} # {<other SimNode>: <SimLink>} |
||||
|
|
||||
|
|
||||
|
def kill_nodes(nodes, killed_nodes): |
||||
|
# Remove killed nodes from root list |
||||
|
for node in killed_nodes: |
||||
|
if node in nodes: |
||||
|
nodes.remove(node) |
||||
|
|
||||
|
# Remove killed nodes from links |
||||
|
for killed_node in killed_nodes: |
||||
|
for node in nodes: |
||||
|
if killed_node in node.links: |
||||
|
node.links.pop(killed_node) |
||||
|
|
||||
|
|
||||
|
def kill_nodes_above(nodes, above_sim): |
||||
|
killed_nodes = [] |
||||
|
|
||||
|
for node in nodes: |
||||
|
if node.dead: |
||||
|
continue |
||||
|
|
||||
|
Logr.debug(node.value) |
||||
|
|
||||
|
for link_node, link in node.links.items(): |
||||
|
if link_node.dead: |
||||
|
continue |
||||
|
|
||||
|
Logr.debug('\t%0.2f -- %s', link.similarity, link_node.value) |
||||
|
|
||||
|
if link.similarity >= above_sim: |
||||
|
if len(link_node.value) > len(node.value): |
||||
|
Logr.debug('\t\tvery similar, killed this node') |
||||
|
link_node.dead = True |
||||
|
node.num_merges += 1 |
||||
|
killed_nodes.append(link_node) |
||||
|
else: |
||||
|
Logr.debug('\t\tvery similar, killed owner') |
||||
|
node.dead = True |
||||
|
link_node.num_merges += 1 |
||||
|
killed_nodes.append(node) |
||||
|
|
||||
|
kill_nodes(nodes, killed_nodes) |
||||
|
|
||||
|
|
||||
|
def print_link_tree(nodes): |
||||
|
for node in nodes: |
||||
|
Logr.debug(node.value) |
||||
|
Logr.debug('\tnum_merges: %s', node.num_merges) |
||||
|
|
||||
|
if len(node.links): |
||||
|
Logr.debug('\t========== LINKS ==========') |
||||
|
for link_node, link in node.links.items(): |
||||
|
Logr.debug('\t%0.2f -- %s', link.similarity, link_node.value) |
||||
|
|
||||
|
Logr.debug('\t---------------------------') |
||||
|
|
||||
|
|
||||
|
def kill_trailing_nodes(nodes): |
||||
|
killed_nodes = [] |
||||
|
|
||||
|
for node in nodes: |
||||
|
if node.dead: |
||||
|
continue |
||||
|
|
||||
|
Logr.debug(node.value) |
||||
|
|
||||
|
for link_node, link in node.links.items(): |
||||
|
if link_node.dead: |
||||
|
continue |
||||
|
|
||||
|
is_valid = link.stats.get('valid', False) |
||||
|
|
||||
|
has_deletions = False |
||||
|
has_insertions = False |
||||
|
has_replacements = False |
||||
|
|
||||
|
for opcode in link.opcodes: |
||||
|
if opcode[0] == 'delete': |
||||
|
has_deletions = True |
||||
|
if opcode[0] == 'insert': |
||||
|
has_insertions = True |
||||
|
if opcode[0] == 'replace': |
||||
|
has_replacements = True |
||||
|
|
||||
|
equal_perc = link.stats.get('equal', 0) / float(len(node.value)) |
||||
|
insert_perc = link.stats.get('insert', 0) / float(len(node.value)) |
||||
|
|
||||
|
Logr.debug('\t({0:<24}) [{1:02d}:{2:02d} = {3:02d} {4:3.0f}% {5:3.0f}%] -- {6:<45}'.format( |
||||
|
'd:%s, i:%s, r:%s' % (has_deletions, has_insertions, has_replacements), |
||||
|
len(node.value), len(link_node.value), link.stats.get('equal', 0), |
||||
|
equal_perc * 100, insert_perc * 100, |
||||
|
'"{0}"'.format(link_node.value) |
||||
|
)) |
||||
|
|
||||
|
Logr.debug('\t\t%s', link.stats) |
||||
|
|
||||
|
kill = all([ |
||||
|
is_valid, |
||||
|
equal_perc >= 0.5, |
||||
|
insert_perc < 2, |
||||
|
has_insertions, |
||||
|
not has_deletions, |
||||
|
not has_replacements |
||||
|
]) |
||||
|
|
||||
|
if kill: |
||||
|
Logr.debug('\t\tkilled this node') |
||||
|
|
||||
|
link_node.dead = True |
||||
|
node.num_merges += 1 |
||||
|
killed_nodes.append(link_node) |
||||
|
|
||||
|
kill_nodes(nodes, killed_nodes) |
||||
|
|
||||
|
stats_print_format = "\t{0:<8} ({1:2d}:{2:2d}) ({3:2d}:{4:2d})" |
||||
|
|
||||
|
|
||||
|
def get_index_values(iterable, a, b): |
||||
|
return ( |
||||
|
iterable[a] if a else None, |
||||
|
iterable[b] if b else None |
||||
|
) |
||||
|
|
||||
|
|
||||
|
def get_indices(iterable, a, b): |
||||
|
return ( |
||||
|
a if 0 < a < len(iterable) else None, |
||||
|
b if 0 < b < len(iterable) else None |
||||
|
) |
||||
|
|
||||
|
|
||||
|
def get_opcode_stats(for_node, node, opcodes): |
||||
|
stats = {} |
||||
|
|
||||
|
for tag, i1, i2, j1, j2 in opcodes: |
||||
|
Logr.debug(stats_print_format.format( |
||||
|
tag, i1, i2, j1, j2 |
||||
|
)) |
||||
|
|
||||
|
if tag in ['insert', 'delete']: |
||||
|
ax = None, None |
||||
|
bx = None, None |
||||
|
|
||||
|
if tag == 'insert': |
||||
|
ax = get_indices(for_node.value, i1 - 1, i1) |
||||
|
bx = get_indices(node.value, j1, j2 - 1) |
||||
|
|
||||
|
if tag == 'delete': |
||||
|
ax = get_indices(for_node.value, j1 - 1, j1) |
||||
|
bx = get_indices(node.value, i1, i2 - 1) |
||||
|
|
||||
|
av = get_index_values(for_node.value, *ax) |
||||
|
bv = get_index_values(node.value, *bx) |
||||
|
|
||||
|
Logr.debug( |
||||
|
'\t\t%s %s [%s><%s] <---> %s %s [%s><%s]', |
||||
|
ax, av, av[0], av[1], |
||||
|
bx, bv, bv[0], bv[1] |
||||
|
) |
||||
|
|
||||
|
head_valid = av[0] in [None, ' '] or bv[0] in [None, ' '] |
||||
|
tail_valid = av[1] in [None, ' '] or bv[1] in [None, ' '] |
||||
|
valid = head_valid and tail_valid |
||||
|
|
||||
|
if 'valid' not in stats or (stats['valid'] and not valid): |
||||
|
stats['valid'] = valid |
||||
|
|
||||
|
Logr.debug('\t\t' + ('VALID' if valid else 'INVALID')) |
||||
|
|
||||
|
if tag not in stats: |
||||
|
stats[tag] = 0 |
||||
|
|
||||
|
stats[tag] += (i2 - i1) or (j2 - j1) |
||||
|
|
||||
|
return stats |
||||
|
|
||||
|
|
||||
|
def calculate_sim_links(for_node, other_nodes): |
||||
|
for node in other_nodes: |
||||
|
if node in for_node.links: |
||||
|
continue |
||||
|
|
||||
|
Logr.debug('calculating similarity between "%s" and "%s"', for_node.value, node.value) |
||||
|
|
||||
|
# Get similarity |
||||
|
similarity_matcher = create_matcher(for_node.value, node.value) |
||||
|
similarity = similarity_matcher.quick_ratio() |
||||
|
|
||||
|
# Get for_node -> node opcodes |
||||
|
a_opcodes_matcher = create_matcher(for_node.value, node.value, swap_longest = False) |
||||
|
a_opcodes = a_opcodes_matcher.get_opcodes() |
||||
|
a_stats = get_opcode_stats(for_node, node, a_opcodes) |
||||
|
|
||||
|
Logr.debug('-' * 100) |
||||
|
|
||||
|
# Get node -> for_node opcodes |
||||
|
b_opcodes_matcher = create_matcher(node.value, for_node.value, swap_longest = False) |
||||
|
b_opcodes = b_opcodes_matcher.get_opcodes() |
||||
|
b_stats = get_opcode_stats(for_node, node, b_opcodes) |
||||
|
|
||||
|
for_node.links[node] = SimLink(similarity, a_opcodes, a_stats) |
||||
|
node.links[for_node] = SimLink(similarity, b_opcodes, b_stats) |
||||
|
|
||||
|
#raw_input('Press ENTER to continue') |
@ -0,0 +1,26 @@ |
|||||
|
# Copyright 2013 Dean Gardiner <gardiner91@gmail.com> |
||||
|
# |
||||
|
# Licensed under the Apache License, Version 2.0 (the "License"); |
||||
|
# you may not use this file except in compliance with the License. |
||||
|
# You may obtain a copy of the License at |
||||
|
# |
||||
|
# http://www.apache.org/licenses/LICENSE-2.0 |
||||
|
# |
||||
|
# Unless required by applicable law or agreed to in writing, software |
||||
|
# distributed under the License is distributed on an "AS IS" BASIS, |
||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
||||
|
# See the License for the specific language governing permissions and |
||||
|
# limitations under the License. |
||||
|
|
||||
|
|
||||
|
from qcond.transformers.base import Transformer |
||||
|
|
||||
|
|
||||
|
COMMON_WORDS = [ |
||||
|
'the' |
||||
|
] |
||||
|
|
||||
|
|
||||
|
class StripCommonTransformer(Transformer): |
||||
|
def run(self, titles): |
||||
|
return [title for title in titles if title.lower() not in COMMON_WORDS] |
@ -0,0 +1,4 @@ |
|||||
|
.DS_Store |
||||
|
*.pyc |
||||
|
*.egg-info/* |
||||
|
dist/*.tar.gz |
@ -0,0 +1,9 @@ |
|||||
|
language: python |
||||
|
python: |
||||
|
- 2.5 |
||||
|
- 2.6 |
||||
|
- 2.7 |
||||
|
|
||||
|
install: pip install nose |
||||
|
|
||||
|
script: nosetests |
@ -0,0 +1,4 @@ |
|||||
|
include UNLICENSE |
||||
|
include readme.md |
||||
|
include tests/*.py |
||||
|
include Rakefile |
@ -0,0 +1,103 @@ |
|||||
|
require 'fileutils' |
||||
|
|
||||
|
task :default => [:clean] |
||||
|
|
||||
|
task :clean do |
||||
|
[".", "tests"].each do |cd| |
||||
|
puts "Cleaning directory #{cd}" |
||||
|
Dir.new(cd).each do |t| |
||||
|
if t =~ /.*\.pyc$/ |
||||
|
puts "Removing #{File.join(cd, t)}" |
||||
|
File.delete(File.join(cd, t)) |
||||
|
end |
||||
|
end |
||||
|
end |
||||
|
end |
||||
|
|
||||
|
desc "Upversion files" |
||||
|
task :upversion do |
||||
|
puts "Upversioning" |
||||
|
|
||||
|
Dir.glob("*.py").each do |filename| |
||||
|
f = File.new(filename, File::RDWR) |
||||
|
contents = f.read() |
||||
|
|
||||
|
contents.gsub!(/__version__ = ".+?"/){|m| |
||||
|
cur_version = m.scan(/\d+\.\d+/)[0].to_f |
||||
|
new_version = cur_version + 0.1 |
||||
|
|
||||
|
puts "Current version: #{cur_version}" |
||||
|
puts "New version: #{new_version}" |
||||
|
|
||||
|
new_line = "__version__ = \"#{new_version}\"" |
||||
|
|
||||
|
puts "Old line: #{m}" |
||||
|
puts "New line: #{new_line}" |
||||
|
|
||||
|
m = new_line |
||||
|
} |
||||
|
|
||||
|
puts contents[0] |
||||
|
|
||||
|
f.truncate(0) # empty the existing file |
||||
|
f.seek(0) |
||||
|
f.write(contents.to_s) # write modified file |
||||
|
f.close() |
||||
|
end |
||||
|
end |
||||
|
|
||||
|
desc "Upload current version to PyPi" |
||||
|
task :topypi => :test do |
||||
|
cur_file = File.open("tvdb_api.py").read() |
||||
|
tvdb_api_version = cur_file.scan(/__version__ = "(.*)"/) |
||||
|
tvdb_api_version = tvdb_api_version[0][0].to_f |
||||
|
|
||||
|
puts "Build sdist and send tvdb_api v#{tvdb_api_version} to PyPi?" |
||||
|
if $stdin.gets.chomp == "y" |
||||
|
puts "Sending source-dist (sdist) to PyPi" |
||||
|
|
||||
|
if system("python setup.py sdist register upload") |
||||
|
puts "tvdb_api uploaded!" |
||||
|
end |
||||
|
|
||||
|
else |
||||
|
puts "Cancelled" |
||||
|
end |
||||
|
end |
||||
|
|
||||
|
desc "Profile by running unittests" |
||||
|
task :profile do |
||||
|
cd "tests" |
||||
|
puts "Profiling.." |
||||
|
`python -m cProfile -o prof_runtest.prof runtests.py` |
||||
|
puts "Converting prof to dot" |
||||
|
`python gprof2dot.py -o prof_runtest.dot -f pstats prof_runtest.prof` |
||||
|
puts "Generating graph" |
||||
|
`~/Applications/dev/graphviz.app/Contents/macOS/dot -Tpng -o profile.png prof_runtest.dot -Gbgcolor=black` |
||||
|
puts "Cleanup" |
||||
|
rm "prof_runtest.dot" |
||||
|
rm "prof_runtest.prof" |
||||
|
end |
||||
|
|
||||
|
task :test do |
||||
|
puts "Nosetest'ing" |
||||
|
if not system("nosetests -v --with-doctest") |
||||
|
raise "Test failed!" |
||||
|
end |
||||
|
|
||||
|
puts "Doctesting *.py (excluding setup.py)" |
||||
|
Dir.glob("*.py").select{|e| ! e.match(/setup.py/)}.each do |filename| |
||||
|
if filename =~ /^setup\.py/ |
||||
|
skip |
||||
|
end |
||||
|
puts "Doctesting #{filename}" |
||||
|
if not system("python", "-m", "doctest", filename) |
||||
|
raise "Failed doctest" |
||||
|
end |
||||
|
end |
||||
|
|
||||
|
puts "Doctesting readme.md" |
||||
|
if not system("python", "-m", "doctest", "readme.md") |
||||
|
raise "Doctest" |
||||
|
end |
||||
|
end |
@ -0,0 +1,26 @@ |
|||||
|
Copyright 2011-2012 Ben Dickson (dbr) |
||||
|
|
||||
|
This is free and unencumbered software released into the public domain. |
||||
|
|
||||
|
Anyone is free to copy, modify, publish, use, compile, sell, or |
||||
|
distribute this software, either in source code form or as a compiled |
||||
|
binary, for any purpose, commercial or non-commercial, and by any |
||||
|
means. |
||||
|
|
||||
|
In jurisdictions that recognize copyright laws, the author or authors |
||||
|
of this software dedicate any and all copyright interest in the |
||||
|
software to the public domain. We make this dedication for the benefit |
||||
|
of the public at large and to the detriment of our heirs and |
||||
|
successors. We intend this dedication to be an overt act of |
||||
|
relinquishment in perpetuity of all present and future rights to this |
||||
|
software under copyright law. |
||||
|
|
||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, |
||||
|
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF |
||||
|
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. |
||||
|
IN NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY CLAIM, DAMAGES OR |
||||
|
OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, |
||||
|
ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR |
||||
|
OTHER DEALINGS IN THE SOFTWARE. |
||||
|
|
||||
|
For more information, please refer to <http://unlicense.org/> |
@ -0,0 +1,109 @@ |
|||||
|
# `tvdb_api` |
||||
|
|
||||
|
`tvdb_api` is an easy to use interface to [thetvdb.com][tvdb] |
||||
|
|
||||
|
`tvnamer` has moved to a separate repository: [github.com/dbr/tvnamer][tvnamer] - it is a utility which uses `tvdb_api` to rename files from `some.show.s01e03.blah.abc.avi` to `Some Show - [01x03] - The Episode Name.avi` (which works by getting the episode name from `tvdb_api`) |
||||
|
|
||||
|
[](http://travis-ci.org/dbr/tvdb_api) |
||||
|
|
||||
|
## To install |
||||
|
|
||||
|
You can easily install `tvdb_api` via `easy_install` |
||||
|
|
||||
|
easy_install tvdb_api |
||||
|
|
||||
|
You may need to use sudo, depending on your setup: |
||||
|
|
||||
|
sudo easy_install tvdb_api |
||||
|
|
||||
|
The [`tvnamer`][tvnamer] command-line tool can also be installed via `easy_install`, this installs `tvdb_api` as a dependancy: |
||||
|
|
||||
|
easy_install tvnamer |
||||
|
|
||||
|
|
||||
|
## Basic usage |
||||
|
|
||||
|
import tvdb_api |
||||
|
t = tvdb_api.Tvdb() |
||||
|
episode = t['My Name Is Earl'][1][3] # get season 1, episode 3 of show |
||||
|
print episode['episodename'] # Print episode name |
||||
|
|
||||
|
## Advanced usage |
||||
|
|
||||
|
Most of the documentation is in docstrings. The examples are tested (using doctest) so will always be up to date and working. |
||||
|
|
||||
|
The docstring for `Tvdb.__init__` lists all initialisation arguments, including support for non-English searches, custom "Select Series" interfaces and enabling the retrieval of banners and extended actor information. You can also override the default API key using `apikey`, recommended if you're using `tvdb_api` in a larger script or application |
||||
|
|
||||
|
### Exceptions |
||||
|
|
||||
|
There are several exceptions you may catch, these can be imported from `tvdb_api`: |
||||
|
|
||||
|
- `tvdb_error` - this is raised when there is an error communicating with [thetvdb.com][tvdb] (a network error most commonly) |
||||
|
- `tvdb_userabort` - raised when a user aborts the Select Series dialog (by `ctrl+c`, or entering `q`) |
||||
|
- `tvdb_shownotfound` - raised when `t['show name']` cannot find anything |
||||
|
- `tvdb_seasonnotfound` - raised when the requested series (`t['show name][99]`) does not exist |
||||
|
- `tvdb_episodenotfound` - raised when the requested episode (`t['show name][1][99]`) does not exist. |
||||
|
- `tvdb_attributenotfound` - raised when the requested attribute is not found (`t['show name']['an attribute']`, `t['show name'][1]['an attribute']`, or ``t['show name'][1][1]['an attribute']``) |
||||
|
|
||||
|
### Series data |
||||
|
|
||||
|
All data exposed by [thetvdb.com][tvdb] is accessible via the `Show` class. A Show is retrieved by doing.. |
||||
|
|
||||
|
>>> import tvdb_api |
||||
|
>>> t = tvdb_api.Tvdb() |
||||
|
>>> show = t['scrubs'] |
||||
|
>>> type(show) |
||||
|
<class 'tvdb_api.Show'> |
||||
|
|
||||
|
For example, to find out what network Scrubs is aired: |
||||
|
|
||||
|
>>> t['scrubs']['network'] |
||||
|
u'ABC' |
||||
|
|
||||
|
The data is stored in an attribute named `data`, within the Show instance: |
||||
|
|
||||
|
>>> t['scrubs'].data.keys() |
||||
|
['networkid', 'rating', 'airs_dayofweek', 'contentrating', 'seriesname', 'id', 'airs_time', 'network', 'fanart', 'lastupdated', 'actors', 'ratingcount', 'status', 'added', 'poster', 'imdb_id', 'genre', 'banner', 'seriesid', 'language', 'zap2it_id', 'addedby', 'firstaired', 'runtime', 'overview'] |
||||
|
|
||||
|
Although each element is also accessible via `t['scrubs']` for ease-of-use: |
||||
|
|
||||
|
>>> t['scrubs']['rating'] |
||||
|
u'9.0' |
||||
|
|
||||
|
This is the recommended way of retrieving "one-off" data (for example, if you are only interested in "seriesname"). If you wish to iterate over all data, or check if a particular show has a specific piece of data, use the `data` attribute, |
||||
|
|
||||
|
>>> 'rating' in t['scrubs'].data |
||||
|
True |
||||
|
|
||||
|
### Banners and actors |
||||
|
|
||||
|
Since banners and actors are separate XML files, retrieving them by default is undesirable. If you wish to retrieve banners (and other fanart), use the `banners` Tvdb initialisation argument: |
||||
|
|
||||
|
>>> from tvdb_api import Tvdb |
||||
|
>>> t = Tvdb(banners = True) |
||||
|
|
||||
|
Then access the data using a `Show`'s `_banner` key: |
||||
|
|
||||
|
>>> t['scrubs']['_banners'].keys() |
||||
|
['fanart', 'poster', 'series', 'season'] |
||||
|
|
||||
|
The banner data structure will be improved in future versions. |
||||
|
|
||||
|
Extended actor data is accessible similarly: |
||||
|
|
||||
|
>>> t = Tvdb(actors = True) |
||||
|
>>> actors = t['scrubs']['_actors'] |
||||
|
>>> actors[0] |
||||
|
<Actor "Zach Braff"> |
||||
|
>>> actors[0].keys() |
||||
|
['sortorder', 'image', 'role', 'id', 'name'] |
||||
|
>>> actors[0]['role'] |
||||
|
u'Dr. John Michael "J.D." Dorian' |
||||
|
|
||||
|
Remember a simple list of actors is accessible via the default Show data: |
||||
|
|
||||
|
>>> t['scrubs']['actors'] |
||||
|
u'|Zach Braff|Donald Faison|Sarah Chalke|Christa Miller|Aloma Wright|Robert Maschio|Sam Lloyd|Neil Flynn|Ken Jenkins|Judy Reyes|John C. McGinley|Travis Schuldt|Johnny Kastl|Heather Graham|Michael Mosley|Kerry Bish\xe9|Dave Franco|Eliza Coupe|' |
||||
|
|
||||
|
[tvdb]: http://thetvdb.com |
||||
|
[tvnamer]: http://github.com/dbr/tvnamer |
@ -0,0 +1,35 @@ |
|||||
|
from setuptools import setup |
||||
|
setup( |
||||
|
name = 'tvdb_api', |
||||
|
version='1.8.2', |
||||
|
|
||||
|
author='dbr/Ben', |
||||
|
description='Interface to thetvdb.com', |
||||
|
url='http://github.com/dbr/tvdb_api/tree/master', |
||||
|
license='unlicense', |
||||
|
|
||||
|
long_description="""\ |
||||
|
An easy to use API interface to TheTVDB.com |
||||
|
Basic usage is: |
||||
|
|
||||
|
>>> import tvdb_api |
||||
|
>>> t = tvdb_api.Tvdb() |
||||
|
>>> ep = t['My Name Is Earl'][1][22] |
||||
|
>>> ep |
||||
|
<Episode 01x22 - Stole a Badge> |
||||
|
>>> ep['episodename'] |
||||
|
u'Stole a Badge' |
||||
|
""", |
||||
|
|
||||
|
py_modules = ['tvdb_api', 'tvdb_ui', 'tvdb_exceptions', 'tvdb_cache'], |
||||
|
|
||||
|
classifiers=[ |
||||
|
"Intended Audience :: Developers", |
||||
|
"Natural Language :: English", |
||||
|
"Operating System :: OS Independent", |
||||
|
"Programming Language :: Python", |
||||
|
"Topic :: Multimedia", |
||||
|
"Topic :: Utilities", |
||||
|
"Topic :: Software Development :: Libraries :: Python Modules", |
||||
|
] |
||||
|
) |
Some files were not shown because too many files changed in this diff
Loading…
Reference in new issue