25 changed files with 1149 additions and 181 deletions
@ -0,0 +1,6 @@ |
|||
from .main import CoreNotifier |
|||
|
|||
def start(): |
|||
return CoreNotifier() |
|||
|
|||
config = [] |
@ -0,0 +1,49 @@ |
|||
from couchpotato.api import addApiView |
|||
from couchpotato.core.event import addEvent, fireEvent |
|||
from couchpotato.core.helpers.request import jsonified |
|||
from couchpotato.core.logger import CPLog |
|||
from couchpotato.core.plugins.base import Plugin |
|||
import time |
|||
|
|||
log = CPLog(__name__) |
|||
|
|||
|
|||
class CoreNotifier(Plugin): |
|||
|
|||
messages = [] |
|||
|
|||
def __init__(self): |
|||
addEvent('notify', self.notify) |
|||
addEvent('notify.core_notifier', self.notify) |
|||
addEvent('core_notifier.frontend', self.frontend) |
|||
|
|||
addApiView('core_notifier.listener', self.listener) |
|||
|
|||
static = self.registerStatic(__file__) |
|||
fireEvent('register_script', static + 'notification.js') |
|||
|
|||
def notify(self, message = '', data = {}): |
|||
self.add(data = { |
|||
'message': message, |
|||
'raw': data, |
|||
}) |
|||
|
|||
def frontend(self, type = 'notification', data = {}): |
|||
|
|||
self.messages.append({ |
|||
'time': time.time(), |
|||
'type': type, |
|||
'data': data, |
|||
}) |
|||
|
|||
def listener(self): |
|||
|
|||
for message in self.messages: |
|||
#delete message older then 15s |
|||
if message['time'] < (time.time() - 15): |
|||
del message |
|||
|
|||
return jsonified({ |
|||
'success': True, |
|||
'result': self.messages, |
|||
}) |
@ -0,0 +1,42 @@ |
|||
var NotificationBase = new Class({ |
|||
|
|||
Extends: BlockBase, |
|||
Implements: [Options, Events], |
|||
|
|||
initialize: function(options){ |
|||
var self = this; |
|||
self.setOptions(options); |
|||
|
|||
//App.addEvent('load', self.request.bind(self));
|
|||
|
|||
self.addEvent('notification', self.notify.bind(self)) |
|||
|
|||
}, |
|||
|
|||
request: function(){ |
|||
var self = this; |
|||
|
|||
Api.request('core_notifier.listener', { |
|||
'initialDelay': 100, |
|||
'delay': 3000, |
|||
'onComplete': self.processData.bind(self) |
|||
}).startTimer() |
|||
|
|||
}, |
|||
|
|||
notify: function(data){ |
|||
var self = this; |
|||
|
|||
}, |
|||
|
|||
processData: function(json){ |
|||
var self = this; |
|||
|
|||
Array.each(json.result, function(result){ |
|||
self.fireEvent(result.type, result.data) |
|||
}) |
|||
} |
|||
|
|||
}); |
|||
|
|||
window.Notification = new NotificationBase(); |
@ -0,0 +1,22 @@ |
|||
from .main import Wizard |
|||
|
|||
def start(): |
|||
return Wizard() |
|||
|
|||
config = [{ |
|||
'name': 'global', |
|||
'groups': [ |
|||
{ |
|||
'tab': 'general', |
|||
'name': 'advanced', |
|||
'options': [ |
|||
{ |
|||
'name': 'show_wizard', |
|||
'label': 'Run the wizard', |
|||
'default': True, |
|||
'type': 'bool', |
|||
}, |
|||
], |
|||
}, |
|||
], |
|||
}] |
@ -0,0 +1,13 @@ |
|||
from couchpotato.core.event import fireEvent |
|||
from couchpotato.core.logger import CPLog |
|||
from couchpotato.core.plugins.base import Plugin |
|||
|
|||
log = CPLog(__name__) |
|||
|
|||
|
|||
class Wizard(Plugin): |
|||
|
|||
def __init__(self): |
|||
path = self.registerStatic(__file__) |
|||
fireEvent('register_script', path + 'spotlight.js') |
|||
fireEvent('register_script', path + 'wizard.js') |
@ -0,0 +1,306 @@ |
|||
/* |
|||
--- |
|||
description: Fill the empty space around elements, creating a spotlight effect. |
|||
|
|||
license: GPL v3.0 |
|||
|
|||
authors: |
|||
- Ruud Burger |
|||
|
|||
requires: |
|||
- core/1.3: [Class.Extras, Element.Dimensions] |
|||
|
|||
provides: [Spotlight] |
|||
|
|||
... |
|||
*/ |
|||
|
|||
var Spotlight = new Class({ |
|||
|
|||
Implements: [Options], |
|||
|
|||
options: { |
|||
'fillClass': 'spotlight_fill', |
|||
'fillColor': [255,255,255], |
|||
'fillOpacity': 1, |
|||
'parent': null, |
|||
'inject': null, |
|||
'soften': 10 |
|||
}, |
|||
|
|||
initialize: function(elements, options){ |
|||
var self = this; |
|||
self.setOptions(options); |
|||
|
|||
self.setElements(elements); |
|||
self.clean(); |
|||
|
|||
}, |
|||
|
|||
clean: function(){ |
|||
var self = this; |
|||
|
|||
self.range = []; self.fills = []; self.edges = []; |
|||
|
|||
self.vert = []; |
|||
self.vert_el = []; |
|||
|
|||
self.top = []; self.left = []; |
|||
self.width = []; self.height = []; |
|||
}, |
|||
|
|||
setElements: function(elements){ |
|||
this.elements = elements; |
|||
}, |
|||
|
|||
addElement: function(element){ |
|||
this.elements.include(element); |
|||
}, |
|||
|
|||
create: function(){ |
|||
var self = this; |
|||
|
|||
self.destroy(); |
|||
|
|||
var page_c = $(self.options.parent || window).getScrollSize(); |
|||
var soften = self.options.soften; |
|||
|
|||
// Get the top and bottom of all the elements
|
|||
self.elements.each(function(el, nr){ |
|||
var c = el.getCoordinates(); |
|||
|
|||
if(c.top > 0 && nr == 0){ |
|||
self.vert.append([0]); |
|||
self.vert_el.append([null]); |
|||
} |
|||
|
|||
// Top
|
|||
self.vert.append([c.top-soften]); |
|||
self.vert_el.append([el]); |
|||
|
|||
// Bottom
|
|||
self.vert.append([c.top+c.height+soften]); |
|||
self.vert_el.append([el]); |
|||
|
|||
// Add it to range, for later calculation from left to right
|
|||
self.range.append([{ |
|||
'el': el, |
|||
'top': c.top-soften, |
|||
'bottom': c.top+c.height+soften, |
|||
'left': c.left-soften, |
|||
'right': c.left+c.width+soften |
|||
}]) |
|||
|
|||
// Create soft edge around element
|
|||
self.soften(el); |
|||
|
|||
}); |
|||
|
|||
if(self.elements.length == 0){ |
|||
self.vert.append([0]); |
|||
self.vert_el.append([null]); |
|||
} |
|||
|
|||
// Reorder
|
|||
var vert = self.vert.clone().sort(self.numberSort) // Use custom sort function because apparently 100 is less then 20..
|
|||
vert_el_new = [], vert_new = []; |
|||
vert.each(function(v){ |
|||
var old_nr = self.vert.indexOf(v); |
|||
vert_el_new.append([self.vert_el[old_nr]]); |
|||
vert_new.append([v]); |
|||
|
|||
}); |
|||
self.vert = vert_new; |
|||
self.vert_el = vert_el_new; |
|||
|
|||
// Shorten vars
|
|||
var vert = self.vert, |
|||
vert_el = self.vert_el; |
|||
var t, h, l, w, left, width, |
|||
row_el, cursor = 0; |
|||
|
|||
// Loop over all vertical lines
|
|||
vert.each(function(v, nr){ |
|||
|
|||
// Use defaults if el == null (for first fillblock)
|
|||
var c = vert_el[nr] ? vert_el[nr].getCoordinates() : { |
|||
'left': 0, |
|||
'top': 0, |
|||
'width': page_c.x, |
|||
'height': 0 |
|||
}; |
|||
|
|||
// Loop till cursor gets to parent_element.width
|
|||
var fail_safe = 0; |
|||
while (cursor < page_c.x && fail_safe < 10){ |
|||
|
|||
t = vert[nr]; // Top is the same for every element in a row
|
|||
h = (nr == vert.length-1) ? (page_c.y - t) : vert[nr+1] - vert[nr]; // So is hight
|
|||
|
|||
// First element get special treatment
|
|||
if(nr == 0){ |
|||
l = 0; |
|||
w = c.width+(2*soften); |
|||
cursor += w; |
|||
} |
|||
else { |
|||
|
|||
row_el = self.firstFromLeft(cursor, t) // First next element
|
|||
left = row_el.el ? row_el.left : c.left-soften; |
|||
width = row_el.el ? row_el.left - cursor : c.left-soften; |
|||
|
|||
if(t == c.bottom+soften && !row_el.el) |
|||
width = page_c.x; |
|||
|
|||
l = cursor; |
|||
if(cursor < left){ |
|||
w = width; |
|||
cursor += w+(row_el.right - row_el.left); |
|||
} |
|||
else { |
|||
w = page_c.x-l; |
|||
cursor += w; |
|||
} |
|||
|
|||
} |
|||
|
|||
// Add it to the pile!
|
|||
if(h > 0 && w > 0){ |
|||
self.top.append([t]); self.left.append([l]); |
|||
self.width.append([w]); self.height.append([h]); |
|||
} |
|||
|
|||
fail_safe++; |
|||
|
|||
} |
|||
|
|||
cursor = 0; // New line, reset cursor position
|
|||
fail_safe = 0; |
|||
|
|||
}); |
|||
|
|||
// Create the fill blocks
|
|||
self.top.each(self.createFillItem.bind(self)); |
|||
|
|||
}, |
|||
|
|||
createFillItem: function(top, nr){ |
|||
var self = this; |
|||
|
|||
var fill = new Element('div', { |
|||
'class': self.options.fillClass, |
|||
'styles': { |
|||
'position': 'absolute', |
|||
'background-color': 'rgba('+self.options.fillColor.join(',')+', '+self.options.fillOpacity+')', |
|||
'display': 'block', |
|||
'z-index': 2, |
|||
'top': self.top[nr], |
|||
'left': self.left[nr], |
|||
'height': self.height[nr], |
|||
'width': self.width[nr] |
|||
} |
|||
}).inject(self.options.inject || document.body); |
|||
|
|||
self.fills.include(fill); |
|||
}, |
|||
|
|||
// Find the first element after x,y coordinates
|
|||
firstFromLeft: function(x, y){ |
|||
var self = this; |
|||
|
|||
var lowest_left = null; |
|||
var return_data = {}; |
|||
|
|||
self.range.each(function(range){ |
|||
var is_within_height_range = range.top <= y && range.bottom > y, |
|||
is_within_width_range = range.left >= x, |
|||
more_left_then_previous = range.left < lowest_left || lowest_left == null; |
|||
|
|||
if(is_within_height_range && is_within_width_range && more_left_then_previous){ |
|||
lowest_left = range.left; |
|||
return_data = range; |
|||
} |
|||
}) |
|||
|
|||
return return_data |
|||
|
|||
}, |
|||
|
|||
soften: function(el){ |
|||
var self = this; |
|||
var soften = self.options.soften; |
|||
|
|||
var c = el.getCoordinates(); |
|||
var from_color = 'rgba('+self.options.fillColor.join(',')+', '+self.options.fillOpacity+')'; |
|||
var to_color = 'rgba('+self.options.fillColor.join(',')+', 0)'; |
|||
|
|||
// Top
|
|||
self.createEdge({ |
|||
'top': c.top-soften, |
|||
'left': c.left-soften, |
|||
'width': c.width+(2*soften), |
|||
'background': '-webkit-gradient(linear, left top, left bottom, from('+from_color+'), to('+to_color+'))', |
|||
'background': '-moz-linear-gradient(top, '+from_color+', '+to_color+')' |
|||
}) |
|||
|
|||
// Right
|
|||
self.createEdge({ |
|||
'top': c.top-soften, |
|||
'left': c.right, |
|||
'height': c.height+(2*soften), |
|||
'background': '-webkit-gradient(linear, left, right, from('+from_color+'), to('+to_color+'))', |
|||
'background': '-moz-linear-gradient(right, '+from_color+', '+to_color+')' |
|||
}) |
|||
|
|||
// Bottom
|
|||
self.createEdge({ |
|||
'top': c.bottom, |
|||
'left': c.left-soften, |
|||
'width': c.width+(2*soften), |
|||
'background': '-webkit-gradient(linear, left bottom, left top, from('+from_color+'), to('+to_color+'))', |
|||
'background': '-moz-linear-gradient(bottom, '+from_color+', '+to_color+')' |
|||
}) |
|||
|
|||
// Left
|
|||
self.createEdge({ |
|||
'top': c.top-soften, |
|||
'left': c.left-soften, |
|||
'height': c.height+(2*soften), |
|||
'background': '-webkit-gradient(linear, right, left, from('+from_color+'), to('+to_color+'))', |
|||
'background': '-moz-linear-gradient(left, '+from_color+', '+to_color+')' |
|||
}) |
|||
|
|||
}, |
|||
|
|||
createEdge: function(style){ |
|||
var self = this; |
|||
|
|||
var soften = self.options.soften; |
|||
var edge = new Element('div', { |
|||
'styles': Object.merge({ |
|||
'position': 'absolute', |
|||
'width': soften, |
|||
'height': soften, |
|||
}, style) |
|||
}).inject(self.options.inject || document.body) |
|||
|
|||
self.edges.include(edge); |
|||
|
|||
}, |
|||
|
|||
destroy: function(){ |
|||
var self = this; |
|||
self.fills.each(function(fill){ |
|||
fill.destroy(); |
|||
}) |
|||
self.edges.each(function(edge){ |
|||
edge.destroy(); |
|||
}) |
|||
self.clean(); |
|||
}, |
|||
|
|||
numberSort: function (a, b) { |
|||
return a - b; |
|||
} |
|||
|
|||
}); |
@ -0,0 +1,76 @@ |
|||
var WizardBase = new Class({ |
|||
|
|||
Implements: [Options, Events], |
|||
|
|||
initialize: function(steps){ |
|||
var self = this; |
|||
|
|||
self.steps = steps; |
|||
self.start(); |
|||
|
|||
}, |
|||
|
|||
start: function(){ |
|||
|
|||
|
|||
|
|||
}, |
|||
|
|||
nextStep: function(){ |
|||
|
|||
}, |
|||
|
|||
previousStep: function(){ |
|||
|
|||
} |
|||
|
|||
}); |
|||
|
|||
WizardBase.Screen = new Class({ |
|||
|
|||
initialize: function(data){ |
|||
var self = this; |
|||
|
|||
self.data = data; |
|||
self.create() |
|||
|
|||
}, |
|||
|
|||
create: function(){ |
|||
var self = this; |
|||
|
|||
self.el = new Element('div.') |
|||
|
|||
|
|||
}, |
|||
|
|||
destroy: function(){ |
|||
this.el.destroy(); |
|||
|
|||
return this |
|||
} |
|||
|
|||
}) |
|||
|
|||
window.Wizard = new WizardBase([ |
|||
{ |
|||
'title': 'Fill in your username and password', |
|||
'Description': 'Outside blabla', |
|||
'tab': 'general', |
|||
'fields': ['username', 'password'] |
|||
}, |
|||
{ |
|||
'title': 'What do you use to download your movies', |
|||
'answers': [ |
|||
{'name': 'nzb', 'label': 'Usenet'}, |
|||
{'name': 'torrent', 'label': 'Torrents'} |
|||
] |
|||
}, |
|||
{ |
|||
'title': 'Do you have a login for any of the following sites', |
|||
'tab': 'providers', |
|||
'needs': function(){ |
|||
return self.config_nzb || self.config_torrent |
|||
} |
|||
} |
|||
]) |
@ -1,8 +1,138 @@ |
|||
from couchpotato.core.event import addEvent |
|||
from couchpotato.core.logger import CPLog |
|||
from couchpotato.core.providers.base import NZBProvider |
|||
from dateutil.parser import parse |
|||
from urllib import urlencode |
|||
from urllib2 import URLError |
|||
import time |
|||
|
|||
log = CPLog(__name__) |
|||
|
|||
|
|||
class Newzbin(NZBProvider): |
|||
pass |
|||
searchUrl = 'https://www.newzbin.com/search/' |
|||
|
|||
formatIds = { |
|||
2: ['scr'], |
|||
1: ['cam'], |
|||
4: ['tc'], |
|||
8: ['ts'], |
|||
1024: ['r5'], |
|||
} |
|||
cat_ids = [ |
|||
([2097152], ['1080p']), |
|||
([524288], ['720p']), |
|||
([262144], ['brrip']), |
|||
([2], ['dvdr']), |
|||
] |
|||
cat_backup_id = -1 |
|||
|
|||
def __init__(self): |
|||
addEvent('provider.nzb.search', self.search) |
|||
|
|||
def search(self, movie, quality): |
|||
|
|||
self.cleanCache(); |
|||
|
|||
results = [] |
|||
if not self.enabled() or not self.isAvailable(self.searchUrl): |
|||
return results |
|||
|
|||
formatId = self.getFormatId(type) |
|||
catId = self.getCatId(type) |
|||
|
|||
arguments = urlencode({ |
|||
'searchaction': 'Search', |
|||
'u_url_posts_only': '0', |
|||
'u_show_passworded': '0', |
|||
'q_url': 'imdb.com/title/' + movie.imdb, |
|||
'sort': 'ps_totalsize', |
|||
'order': 'asc', |
|||
'u_post_results_amt': '100', |
|||
'feed': 'rss', |
|||
'category': '6', |
|||
'ps_rb_video_format': str(catId), |
|||
'ps_rb_source': str(formatId), |
|||
}) |
|||
|
|||
url = "%s?%s" % (self.searchUrl, arguments) |
|||
cacheId = str('%s %s %s' % (movie.imdb, str(formatId), str(catId))) |
|||
singleCat = True |
|||
|
|||
try: |
|||
cached = False |
|||
if(self.cache.get(cacheId)): |
|||
data = True |
|||
cached = True |
|||
log.info('Getting RSS from cache: %s.' % cacheId) |
|||
else: |
|||
log.info('Searching: %s' % url) |
|||
data = self.urlopen(url, username = self.conf('username'), password = self.conf('password')) |
|||
self.cache[cacheId] = { |
|||
'time': time.time() |
|||
} |
|||
|
|||
except (IOError, URLError): |
|||
log.error('Failed to open %s.' % url) |
|||
return results |
|||
|
|||
if data: |
|||
try: |
|||
try: |
|||
if cached: |
|||
xml = self.cache[cacheId]['xml'] |
|||
else: |
|||
xml = self.getItems(data) |
|||
self.cache[cacheId]['xml'] = xml |
|||
except: |
|||
log.debug('No valid xml or to many requests.. You never know with %s.' % self.name) |
|||
return results |
|||
|
|||
for item in xml: |
|||
|
|||
title = self.gettextelement(item, "title") |
|||
if 'error' in title.lower(): continue |
|||
|
|||
REPORT_NS = 'http://www.newzbin.com/DTD/2007/feeds/report/'; |
|||
|
|||
# Add attributes to name |
|||
for attr in item.find('{%s}attributes' % REPORT_NS): |
|||
title += ' ' + attr.text |
|||
|
|||
id = int(self.gettextelement(item, '{%s}id' % REPORT_NS)) |
|||
size = str(int(self.gettextelement(item, '{%s}size' % REPORT_NS)) / 1024 / 1024) + ' mb' |
|||
date = str(self.gettextelement(item, '{%s}postdate' % REPORT_NS)) |
|||
|
|||
new = self.feedItem() |
|||
new.id = id |
|||
new.type = 'nzb' |
|||
new.name = title |
|||
new.date = int(time.mktime(parse(date).timetuple())) |
|||
new.size = self.parseSize(size) |
|||
new.url = str(self.gettextelement(item, '{%s}nzb' % REPORT_NS)) |
|||
new.detailUrl = str(self.gettextelement(item, 'link')) |
|||
new.content = self.gettextelement(item, "description") |
|||
new.score = self.calcScore(new, movie) |
|||
new.addbyid = True |
|||
new.checkNZB = False |
|||
|
|||
if new.date > time.time() - (int(self.config.get('NZB', 'retention')) * 24 * 60 * 60) and self.isCorrectMovie(new, movie, type, imdbResults = True, singleCategory = singleCat): |
|||
results.append(new) |
|||
log.info('Found: %s' % new.name) |
|||
|
|||
return results |
|||
except SyntaxError: |
|||
log.error('Failed to parse XML response from newzbin.com') |
|||
|
|||
return results |
|||
|
|||
def getFormatId(self, format): |
|||
for id, quality in self.formatIds.iteritems(): |
|||
for q in quality: |
|||
if q == format: |
|||
return id |
|||
|
|||
return self.catBackupId |
|||
|
|||
def isEnabled(self): |
|||
return NZBProvider.isEnabled(self) and self.conf('enabled') and self.conf('username') and self.conf('password') |
|||
|
@ -1,8 +1,126 @@ |
|||
from couchpotato.core.event import addEvent |
|||
from couchpotato.core.helpers.variable import cleanHost |
|||
from couchpotato.core.logger import CPLog |
|||
from couchpotato.core.providers.base import NZBProvider |
|||
from dateutil.parser import parse |
|||
from urllib import urlencode |
|||
from urllib2 import URLError |
|||
import time |
|||
|
|||
log = CPLog(__name__) |
|||
|
|||
|
|||
class Newznab(NZBProvider): |
|||
pass |
|||
|
|||
urls = { |
|||
'download': 'get&id=%s%s', |
|||
'detail': 'details&id=%s', |
|||
} |
|||
|
|||
cat_ids = [ |
|||
([2000], ['brrip']), |
|||
([2010], ['dvdr']), |
|||
([2030], ['cam', 'ts', 'dvdrip', 'tc', 'r5', 'scr']), |
|||
([2040], ['720p', '1080p']), |
|||
] |
|||
cat_backup_id = 2000 |
|||
|
|||
time_between_searches = 1 # Seconds |
|||
|
|||
def __init__(self): |
|||
addEvent('provider.nzb.search', self.search) |
|||
|
|||
def getUrl(self, type): |
|||
return cleanHost(self.conf('host')) + 'api?t=' + type |
|||
|
|||
def search(self, movie, quality): |
|||
|
|||
self.cleanCache(); |
|||
|
|||
results = [] |
|||
if not self.enabled() or not self.isAvailable(self.getUrl(self.searchUrl)): |
|||
return results |
|||
|
|||
catId = self.getCatId(type) |
|||
arguments = urlencode({ |
|||
'imdbid': movie.imdb.replace('tt', ''), |
|||
'cat': catId, |
|||
'apikey': self.conf('apikey'), |
|||
't': self.searchUrl, |
|||
'extended': 1 |
|||
}) |
|||
url = "%s&%s" % (self.getUrl(self.searchUrl), arguments) |
|||
cacheId = str(movie.imdb) + '-' + str(catId) |
|||
singleCat = (len(self.catIds.get(catId)) == 1 and catId != self.catBackupId) |
|||
|
|||
try: |
|||
cached = False |
|||
if(self.cache.get(cacheId)): |
|||
data = True |
|||
cached = True |
|||
log.info('Getting RSS from cache: %s.' % cacheId) |
|||
else: |
|||
log.info('Searching: %s' % url) |
|||
data = self.urlopen(url) |
|||
self.cache[cacheId] = { |
|||
'time': time.time() |
|||
} |
|||
|
|||
except (IOError, URLError): |
|||
log.error('Failed to open %s.' % url) |
|||
return results |
|||
|
|||
if data: |
|||
try: |
|||
try: |
|||
if cached: |
|||
xml = self.cache[cacheId]['xml'] |
|||
else: |
|||
xml = self.getItems(data) |
|||
self.cache[cacheId]['xml'] = xml |
|||
except: |
|||
log.debug('No valid xml or to many requests.' % self.name) |
|||
return results |
|||
|
|||
results = [] |
|||
for nzb in xml: |
|||
|
|||
for item in nzb: |
|||
if item.attrib.get('name') == 'size': |
|||
size = item.attrib.get('value') |
|||
elif item.attrib.get('name') == 'usenetdate': |
|||
date = item.attrib.get('value') |
|||
|
|||
new = self.feedItem() |
|||
new.id = self.gettextelement(nzb, "guid").split('/')[-1:].pop() |
|||
new.type = 'nzb' |
|||
new.name = self.gettextelement(nzb, "title") |
|||
new.date = int(time.mktime(parse(date).timetuple())) |
|||
new.size = int(size) / 1024 / 1024 |
|||
new.url = self.downloadLink(new.id) |
|||
new.detailUrl = self.detailLink(new.id) |
|||
new.content = self.gettextelement(nzb, "description") |
|||
new.score = self.calcScore(new, movie) |
|||
|
|||
if new.date > time.time() - (int(self.config.get('NZB', 'retention')) * 24 * 60 * 60) and self.isCorrectMovie(new, movie, type, imdbResults = True, singleCategory = singleCat): |
|||
results.append(new) |
|||
log.info('Found: %s' % new.name) |
|||
|
|||
return results |
|||
except SyntaxError: |
|||
log.error('Failed to parse XML response from Newznab') |
|||
return False |
|||
|
|||
return results |
|||
|
|||
def isEnabled(self): |
|||
return NZBProvider.isEnabled(self) and self.conf('enabled') and self.conf('host') and self.conf('apikey') |
|||
|
|||
def getApiExt(self): |
|||
return '&apikey=%s' % self.conf('apikey') |
|||
|
|||
def downloadLink(self, id): |
|||
return self.getUrl(self.downloadUrl) % (id, self.getApiExt()) |
|||
|
|||
def detailLink(self, id): |
|||
return self.getUrl(self.detailUrl) % id |
|||
|
@ -1,124 +0,0 @@ |
|||
from app.config.cplog import CPLog |
|||
from app.lib.provider.yarr.base import nzbBase |
|||
from dateutil.parser import parse |
|||
from urllib import urlencode |
|||
from urllib2 import URLError |
|||
import time |
|||
|
|||
log = CPLog(__name__) |
|||
|
|||
class nzbMatrix(nzbBase): |
|||
"""Api for NZBMatrix""" |
|||
|
|||
name = 'NZBMatrix' |
|||
downloadUrl = 'https://api.nzbmatrix.com/v1.1/download.php?id=%s%s' |
|||
detailUrl = 'https://nzbmatrix.com/nzb-details.php?id=%s&hit=1' |
|||
searchUrl = 'http://rss.nzbmatrix.com/rss.php' |
|||
|
|||
catIds = { |
|||
42: ['720p', '1080p'], |
|||
2: ['cam', 'ts', 'dvdrip', 'tc', 'r5', 'scr'], |
|||
54: ['brrip'], |
|||
1: ['dvdr'] |
|||
} |
|||
catBackupId = 2 |
|||
|
|||
timeBetween = 10 # Seconds |
|||
|
|||
def __init__(self, config): |
|||
log.info('Using NZBMatrix provider') |
|||
|
|||
self.config = config |
|||
|
|||
def conf(self, option): |
|||
return self.config.get('NZBMatrix', option) |
|||
|
|||
def enabled(self): |
|||
return self.conf('enabled') and self.config.get('NZB', 'enabled') and self.conf('username') and self.conf('apikey') |
|||
|
|||
def find(self, movie, quality, type, retry = False): |
|||
|
|||
self.cleanCache(); |
|||
|
|||
results = [] |
|||
if not self.enabled() or not self.isAvailable(self.searchUrl): |
|||
return results |
|||
|
|||
catId = self.getCatId(type) |
|||
arguments = urlencode({ |
|||
'term': movie.imdb, |
|||
'subcat': catId, |
|||
'username': self.conf('username'), |
|||
'apikey': self.conf('apikey'), |
|||
'searchin': 'weblink', |
|||
'english': 1 if self.conf('english') else 0, |
|||
}) |
|||
url = "%s?%s" % (self.searchUrl, arguments) |
|||
cacheId = str(movie.imdb) + '-' + str(catId) |
|||
singleCat = (len(self.catIds.get(catId)) == 1 and catId != self.catBackupId) |
|||
|
|||
try: |
|||
cached = False |
|||
if(self.cache.get(cacheId)): |
|||
data = True |
|||
cached = True |
|||
log.info('Getting RSS from cache: %s.' % cacheId) |
|||
else: |
|||
log.info('Searching: %s' % url) |
|||
data = self.urlopen(url) |
|||
self.cache[cacheId] = { |
|||
'time': time.time() |
|||
} |
|||
|
|||
except (IOError, URLError): |
|||
log.error('Failed to open %s.' % url) |
|||
return results |
|||
|
|||
if data: |
|||
try: |
|||
try: |
|||
if cached: |
|||
xml = self.cache[cacheId]['xml'] |
|||
else: |
|||
xml = self.getItems(data) |
|||
self.cache[cacheId]['xml'] = xml |
|||
except: |
|||
log.debug('No valid xml or to many requests.. You never know with %s.' % self.name) |
|||
return results |
|||
|
|||
for nzb in xml: |
|||
|
|||
title = self.gettextelement(nzb, "title") |
|||
if 'error' in title.lower(): continue |
|||
|
|||
id = int(self.gettextelement(nzb, "link").split('&')[0].partition('id=')[2]) |
|||
size = self.gettextelement(nzb, "description").split('<br /><b>')[2].split('> ')[1] |
|||
date = str(self.gettextelement(nzb, "description").split('<br /><b>')[3].partition('Added:</b> ')[2]) |
|||
|
|||
new = self.feedItem() |
|||
new.id = id |
|||
new.type = 'nzb' |
|||
new.name = title |
|||
new.date = int(time.mktime(parse(date).timetuple())) |
|||
new.size = self.parseSize(size) |
|||
new.url = self.downloadLink(id) |
|||
new.detailUrl = self.detailLink(id) |
|||
new.content = self.gettextelement(nzb, "description") |
|||
new.score = self.calcScore(new, movie) |
|||
new.checkNZB = True |
|||
|
|||
if new.date > time.time() - (int(self.config.get('NZB', 'retention')) * 24 * 60 * 60): |
|||
if self.isCorrectMovie(new, movie, type, imdbResults = True, singleCategory = singleCat): |
|||
results.append(new) |
|||
log.info('Found: %s' % new.name) |
|||
else: |
|||
log.info('Found outside retention: %s' % new.name) |
|||
|
|||
return results |
|||
except SyntaxError: |
|||
log.error('Failed to parse XML response from NZBMatrix.com') |
|||
|
|||
return results |
|||
|
|||
def getApiExt(self): |
|||
return '&username=%s&apikey=%s' % (self.conf('username'), self.conf('apikey')) |
@ -1,8 +1,122 @@ |
|||
from couchpotato.core.event import addEvent |
|||
from couchpotato.core.logger import CPLog |
|||
from couchpotato.core.providers.base import NZBProvider |
|||
from dateutil.parser import parse |
|||
from urllib import urlencode |
|||
from urllib2 import URLError |
|||
import time |
|||
|
|||
log = CPLog(__name__) |
|||
|
|||
|
|||
class Nzbs(NZBProvider): |
|||
pass |
|||
|
|||
urls = { |
|||
'download': 'http://nzbs.org/index.php?action=getnzb&nzbid=%s%s', |
|||
'nfo': 'http://nzbs.org/index.php?action=view&nzbid=%s&nfo=1', |
|||
'detail': 'http://nzbs.org/index.php?action=view&nzbid=%s', |
|||
'api': 'http://nzbs.org/rss.php', |
|||
} |
|||
|
|||
cat_ids = [ |
|||
([4], ['720p', '1080p']), |
|||
([2], ['cam', 'ts', 'dvdrip', 'tc', 'brrip', 'r5', 'scr']), |
|||
([9], ['dvdr']), |
|||
] |
|||
cat_backup_id = 't2' |
|||
|
|||
time_between_searches = 3 # Seconds |
|||
|
|||
def __init__(self): |
|||
addEvent('provider.nzb.search', self.search) |
|||
|
|||
def search(self, movie, quality): |
|||
|
|||
self.cleanCache(); |
|||
|
|||
results = [] |
|||
if not self.enabled() or not self.isAvailable(self.apiUrl + '?test' + self.getApiExt()): |
|||
return results |
|||
|
|||
catId = self.getCatId(type) |
|||
arguments = urlencode({ |
|||
'action':'search', |
|||
'q': self.toSearchString(movie.name), |
|||
'catid': catId, |
|||
'i': self.conf('id'), |
|||
'h': self.conf('key'), |
|||
'age': self.config.get('NZB', 'retention') |
|||
}) |
|||
url = "%s?%s" % (self.apiUrl, arguments) |
|||
cacheId = str(movie.imdb) + '-' + str(catId) |
|||
singleCat = (len(self.catIds.get(catId)) == 1 and catId != self.catBackupId) |
|||
|
|||
try: |
|||
cached = False |
|||
if(self.cache.get(cacheId)): |
|||
data = True |
|||
cached = True |
|||
log.info('Getting RSS from cache: %s.' % cacheId) |
|||
else: |
|||
log.info('Searching: %s' % url) |
|||
data = self.urlopen(url) |
|||
self.cache[cacheId] = { |
|||
'time': time.time() |
|||
} |
|||
except (IOError, URLError): |
|||
log.error('Failed to open %s.' % url) |
|||
return results |
|||
|
|||
if data: |
|||
log.debug('Parsing NZBs.org RSS.') |
|||
try: |
|||
try: |
|||
if cached: |
|||
xml = self.cache[cacheId]['xml'] |
|||
else: |
|||
xml = self.getItems(data) |
|||
self.cache[cacheId]['xml'] = xml |
|||
except: |
|||
retry = False |
|||
if retry == False: |
|||
log.error('No valid xml, to many requests? Try again in 15sec.') |
|||
time.sleep(15) |
|||
return self.find(movie, quality, type, retry = True) |
|||
else: |
|||
log.error('Failed again.. disable %s for 15min.' % self.name) |
|||
self.available = False |
|||
return results |
|||
|
|||
for nzb in xml: |
|||
|
|||
id = int(self.gettextelement(nzb, "link").partition('nzbid=')[2]) |
|||
|
|||
size = self.gettextelement(nzb, "description").split('</a><br />')[1].split('">')[1] |
|||
|
|||
new = self.feedItem() |
|||
new.id = id |
|||
new.type = 'nzb' |
|||
new.name = self.gettextelement(nzb, "title") |
|||
new.date = int(time.mktime(parse(self.gettextelement(nzb, "pubDate")).timetuple())) |
|||
new.size = self.parseSize(size) |
|||
new.url = self.downloadLink(id) |
|||
new.detailUrl = self.detailLink(id) |
|||
new.content = self.gettextelement(nzb, "description") |
|||
new.score = self.calcScore(new, movie) |
|||
|
|||
if self.isCorrectMovie(new, movie, type, singleCategory = singleCat): |
|||
results.append(new) |
|||
log.info('Found: %s' % new.name) |
|||
|
|||
return results |
|||
except SyntaxError: |
|||
log.error('Failed to parse XML response from NZBs.org') |
|||
return False |
|||
|
|||
return results |
|||
|
|||
def isEnabled(self): |
|||
return NZBProvider.isEnabled(self) and self.conf('enabled') and self.conf('id') and self.conf('key') |
|||
|
|||
def getApiExt(self): |
|||
return '&i=%s&h=%s' % (self.conf('id'), self.conf('key')) |
|||
|
Loading…
Reference in new issue