Browse Source
Conflicts: couchpotato/core/helpers/variable.py couchpotato/core/media/_base/searcher/main.py couchpotato/core/media/movie/searcher/main.py couchpotato/core/plugins/quality/main.py couchpotato/core/plugins/release/main.py couchpotato/core/plugins/renamer/main.pypull/2523/head
265 changed files with 27500 additions and 10330 deletions
@ -1,39 +0,0 @@ |
|||
from couchpotato.core.helpers.encoding import toUnicode |
|||
from couchpotato.core.logger import CPLog |
|||
from couchpotato.core.notifications.base import Notification |
|||
import base64 |
|||
import json |
|||
import traceback |
|||
|
|||
log = CPLog(__name__) |
|||
|
|||
|
|||
class Notifo(Notification): |
|||
|
|||
url = 'https://api.notifo.com/v1/send_notification' |
|||
|
|||
def notify(self, message = '', data = None, listener = None): |
|||
if not data: data = {} |
|||
|
|||
try: |
|||
params = { |
|||
'label': self.default_title, |
|||
'msg': toUnicode(message), |
|||
} |
|||
|
|||
headers = { |
|||
'Authorization': "Basic %s" % base64.encodestring('%s:%s' % (self.conf('username'), self.conf('api_key')))[:-1] |
|||
} |
|||
|
|||
handle = self.urlopen(self.url, params = params, headers = headers) |
|||
result = json.loads(handle) |
|||
|
|||
if result['status'] != 'success' or result['response_message'] != 'OK': |
|||
raise Exception |
|||
|
|||
except: |
|||
log.error('Notification failed: %s', traceback.format_exc()) |
|||
return False |
|||
|
|||
log.info('Notifo notification successful.') |
|||
return True |
@ -0,0 +1,85 @@ |
|||
import json |
|||
from couchpotato import CPLog |
|||
from couchpotato.core.event import addEvent |
|||
from couchpotato.core.helpers.encoding import tryUrlencode |
|||
import requests |
|||
|
|||
log = CPLog(__name__) |
|||
|
|||
|
|||
class PlexClientProtocol(object): |
|||
def __init__(self, plex): |
|||
self.plex = plex |
|||
|
|||
addEvent('notify.plex.notifyClient', self.notify) |
|||
|
|||
def notify(self, client, message): |
|||
raise NotImplementedError() |
|||
|
|||
|
|||
class PlexClientHTTP(PlexClientProtocol): |
|||
def request(self, command, client): |
|||
url = 'http://%s:%s/xbmcCmds/xbmcHttp/?%s' % ( |
|||
client['address'], |
|||
client['port'], |
|||
tryUrlencode(command) |
|||
) |
|||
|
|||
headers = {} |
|||
|
|||
try: |
|||
self.plex.urlopen(url, headers = headers, timeout = 3, show_error = False) |
|||
except Exception, err: |
|||
log.error("Couldn't sent command to Plex: %s", err) |
|||
return False |
|||
|
|||
return True |
|||
|
|||
def notify(self, client, message): |
|||
if client.get('protocol') != 'xbmchttp': |
|||
return None |
|||
|
|||
data = { |
|||
'command': 'ExecBuiltIn', |
|||
'parameter': 'Notification(CouchPotato, %s)' % message |
|||
} |
|||
|
|||
return self.request(data, client) |
|||
|
|||
|
|||
class PlexClientJSON(PlexClientProtocol): |
|||
def request(self, method, params, client): |
|||
log.debug('sendJSON("%s", %s, %s)', (method, params, client)) |
|||
url = 'http://%s:%s/jsonrpc' % ( |
|||
client['address'], |
|||
client['port'] |
|||
) |
|||
|
|||
headers = { |
|||
'Content-Type': 'application/json' |
|||
} |
|||
|
|||
request = { |
|||
'id': 1, |
|||
'jsonrpc': '2.0', |
|||
'method': method, |
|||
'params': params |
|||
} |
|||
|
|||
try: |
|||
requests.post(url, headers = headers, timeout = 3, data = json.dumps(request)) |
|||
except Exception, err: |
|||
log.error("Couldn't sent command to Plex: %s", err) |
|||
return False |
|||
|
|||
return True |
|||
|
|||
def notify(self, client, message): |
|||
if client.get('protocol') not in ['xbmcjson', 'plex']: |
|||
return None |
|||
|
|||
params = { |
|||
'title': 'CouchPotato', |
|||
'message': message |
|||
} |
|||
return self.request('GUI.ShowNotification', params, client) |
@ -0,0 +1,114 @@ |
|||
from datetime import timedelta, datetime |
|||
from couchpotato.core.helpers.variable import cleanHost |
|||
from couchpotato import CPLog |
|||
from urlparse import urlparse |
|||
import traceback |
|||
|
|||
|
|||
try: |
|||
import xml.etree.cElementTree as etree |
|||
except ImportError: |
|||
import xml.etree.ElementTree as etree |
|||
|
|||
log = CPLog(__name__) |
|||
|
|||
|
|||
class PlexServer(object): |
|||
def __init__(self, plex): |
|||
self.plex = plex |
|||
|
|||
self.clients = {} |
|||
self.last_clients_update = None |
|||
|
|||
def staleClients(self): |
|||
if not self.last_clients_update: |
|||
return True |
|||
|
|||
return self.last_clients_update + timedelta(minutes=15) < datetime.now() |
|||
|
|||
def request(self, path, data_type='xml'): |
|||
if not self.plex.conf('media_server'): |
|||
log.warning("Plex media server hostname is required") |
|||
return None |
|||
|
|||
if path.startswith('/'): |
|||
path = path[1:] |
|||
|
|||
data = self.plex.urlopen('%s/%s' % ( |
|||
self.createHost(self.plex.conf('media_server'), port = 32400), |
|||
path |
|||
)) |
|||
|
|||
if data_type == 'xml': |
|||
return etree.fromstring(data) |
|||
else: |
|||
return data |
|||
|
|||
def updateClients(self, client_names): |
|||
log.info('Searching for clients on Plex Media Server') |
|||
|
|||
self.clients = {} |
|||
|
|||
result = self.request('clients') |
|||
if not result: |
|||
return |
|||
|
|||
found_clients = [ |
|||
c for c in result.findall('Server') |
|||
if c.get('name') and c.get('name').lower() in client_names |
|||
] |
|||
|
|||
# Store client details in cache |
|||
for client in found_clients: |
|||
name = client.get('name').lower() |
|||
|
|||
self.clients[name] = { |
|||
'name': client.get('name'), |
|||
'found': True, |
|||
'address': client.get('address'), |
|||
'port': client.get('port'), |
|||
'protocol': client.get('protocol', 'xbmchttp') |
|||
} |
|||
|
|||
client_names.remove(name) |
|||
|
|||
# Store dummy info for missing clients |
|||
for client_name in client_names: |
|||
self.clients[client_name] = { |
|||
'found': False |
|||
} |
|||
|
|||
if len(client_names) > 0: |
|||
log.debug('Unable to find clients: %s', ', '.join(client_names)) |
|||
|
|||
self.last_clients_update = datetime.now() |
|||
|
|||
def refresh(self, section_types=None): |
|||
if not section_types: |
|||
section_types = ['movie'] |
|||
|
|||
sections = self.request('library/sections') |
|||
|
|||
try: |
|||
for section in sections.findall('Directory'): |
|||
if section.get('type') not in section_types: |
|||
continue |
|||
|
|||
self.request('library/sections/%s/refresh' % section.get('key'), 'text') |
|||
except: |
|||
log.error('Plex library update failed for %s, Media Server not running: %s', |
|||
(self.plex.conf('media_server'), traceback.format_exc(1))) |
|||
return False |
|||
|
|||
return True |
|||
|
|||
def createHost(self, host, port = None): |
|||
|
|||
h = cleanHost(host) |
|||
p = urlparse(h) |
|||
h = h.rstrip('/') |
|||
|
|||
if port and not p.port: |
|||
h += ':%s' % port |
|||
|
|||
return h |
@ -0,0 +1,86 @@ |
|||
from couchpotato.core.helpers.encoding import toUnicode |
|||
from couchpotato.core.helpers.variable import tryInt |
|||
from couchpotato.core.logger import CPLog |
|||
from couchpotato.core.notifications.base import Notification |
|||
import base64 |
|||
import json |
|||
|
|||
log = CPLog(__name__) |
|||
|
|||
|
|||
class Pushbullet(Notification): |
|||
|
|||
url = 'https://api.pushbullet.com/api/%s' |
|||
|
|||
def notify(self, message = '', data = None, listener = None): |
|||
if not data: data = {} |
|||
|
|||
devices = self.getDevices() |
|||
if devices is None: |
|||
return False |
|||
|
|||
# Get all the device IDs linked to this user |
|||
if not len(devices): |
|||
response = self.request('devices') |
|||
if not response: |
|||
return False |
|||
|
|||
devices += [device.get('id') for device in response['devices']] |
|||
|
|||
successful = 0 |
|||
for device in devices: |
|||
response = self.request( |
|||
'pushes', |
|||
cache = False, |
|||
device_id = device, |
|||
type = 'note', |
|||
title = self.default_title, |
|||
body = toUnicode(message) |
|||
) |
|||
|
|||
if response: |
|||
successful += 1 |
|||
else: |
|||
log.error('Unable to push notification to Pushbullet device with ID %s' % device) |
|||
|
|||
return successful == len(devices) |
|||
|
|||
def getDevices(self): |
|||
devices = [d.strip() for d in self.conf('devices').split(',')] |
|||
|
|||
# Remove empty items |
|||
devices = [d for d in devices if len(d)] |
|||
|
|||
# Break on any ids that aren't integers |
|||
valid_devices = [] |
|||
|
|||
for device_id in devices: |
|||
d = tryInt(device_id, None) |
|||
|
|||
if not d: |
|||
log.error('Device ID "%s" is not valid', device_id) |
|||
return None |
|||
|
|||
valid_devices.append(d) |
|||
|
|||
return valid_devices |
|||
|
|||
def request(self, method, cache = True, **kwargs): |
|||
try: |
|||
base64string = base64.encodestring('%s:' % self.conf('api_key'))[:-1] |
|||
|
|||
headers = { |
|||
"Authorization": "Basic %s" % base64string |
|||
} |
|||
|
|||
if cache: |
|||
return self.getJsonData(self.url % method, headers = headers, params = kwargs) |
|||
else: |
|||
data = self.urlopen(self.url % method, headers = headers, params = kwargs) |
|||
return json.loads(data) |
|||
|
|||
except Exception, ex: |
|||
log.error('Pushbullet request failed') |
|||
log.debug(ex) |
|||
|
|||
return None |
@ -0,0 +1,52 @@ |
|||
from .main import Xmpp |
|||
|
|||
def start(): |
|||
return Xmpp() |
|||
|
|||
config = [{ |
|||
'name': 'xmpp', |
|||
'groups': [ |
|||
{ |
|||
'tab': 'notifications', |
|||
'list': 'notification_providers', |
|||
'name': 'xmpp', |
|||
'label': 'XMPP', |
|||
'description`': 'for Jabber, Hangouts (Google Talk), AIM...', |
|||
'options': [ |
|||
{ |
|||
'name': 'enabled', |
|||
'default': 0, |
|||
'type': 'enabler', |
|||
}, |
|||
{ |
|||
'name': 'username', |
|||
'description': 'User sending the message. For Hangouts, e-mail of a single-step authentication Google account.', |
|||
}, |
|||
{ |
|||
'name': 'password', |
|||
'type': 'Password', |
|||
}, |
|||
{ |
|||
'name': 'hostname', |
|||
'default': 'talk.google.com', |
|||
}, |
|||
{ |
|||
'name': 'to', |
|||
'description': 'Username (or e-mail for Hangouts) of the person to send the messages to.', |
|||
}, |
|||
{ |
|||
'name': 'port', |
|||
'type': 'int', |
|||
'default': 5222, |
|||
}, |
|||
{ |
|||
'name': 'on_snatch', |
|||
'default': 0, |
|||
'type': 'bool', |
|||
'advanced': True, |
|||
'description': 'Also send message when movie is snatched.', |
|||
}, |
|||
], |
|||
} |
|||
], |
|||
}] |
@ -0,0 +1,43 @@ |
|||
from couchpotato.core.logger import CPLog |
|||
from couchpotato.core.notifications.base import Notification |
|||
from time import sleep |
|||
import traceback |
|||
import xmpp |
|||
|
|||
log = CPLog(__name__) |
|||
|
|||
|
|||
class Xmpp(Notification): |
|||
|
|||
def notify(self, message = '', data = None, listener = None): |
|||
if not data: data = {} |
|||
|
|||
try: |
|||
jid = xmpp.protocol.JID(self.conf('username')) |
|||
client = xmpp.Client(jid.getDomain(), debug = []) |
|||
|
|||
# Connect |
|||
if not client.connect(server = (self.conf('hostname'), self.conf('port'))): |
|||
log.error('XMPP failed: Connection to server failed.') |
|||
return False |
|||
|
|||
# Authenticate |
|||
if not client.auth(jid.getNode(), self.conf('password'), resource = jid.getResource()): |
|||
log.error('XMPP failed: Failed to authenticate.') |
|||
return False |
|||
|
|||
# Send message |
|||
client.send(xmpp.protocol.Message(to = self.conf('to'), body = message, typ = 'chat')) |
|||
|
|||
# Disconnect |
|||
# some older servers will not send the message if you disconnect immediately after sending |
|||
sleep(1) |
|||
client.disconnect() |
|||
|
|||
log.info('XMPP notifications sent.') |
|||
return True |
|||
|
|||
except: |
|||
log.error('XMPP failed: %s', traceback.format_exc()) |
|||
|
|||
return False |
@ -0,0 +1,88 @@ |
|||
from bs4 import BeautifulSoup |
|||
from couchpotato.core.helpers.encoding import tryUrlencode, toUnicode |
|||
from couchpotato.core.helpers.variable import tryInt |
|||
from couchpotato.core.logger import CPLog |
|||
from couchpotato.core.providers.torrent.base import TorrentProvider |
|||
import traceback |
|||
|
|||
log = CPLog(__name__) |
|||
|
|||
class BiTHDTV(TorrentProvider): |
|||
|
|||
urls = { |
|||
'test' : 'http://www.bit-hdtv.com/', |
|||
'login' : 'http://www.bit-hdtv.com/takelogin.php', |
|||
'login_check': 'http://www.bit-hdtv.com/messages.php', |
|||
'detail' : 'http://www.bit-hdtv.com/details.php?id=%s', |
|||
'search' : 'http://www.bit-hdtv.com/torrents.php?', |
|||
} |
|||
|
|||
# Searches for movies only - BiT-HDTV's subcategory and resolution search filters appear to be broken |
|||
cat_id_movies = 7 |
|||
|
|||
http_time_between_calls = 1 #seconds |
|||
|
|||
def _searchOnTitle(self, title, movie, quality, results): |
|||
|
|||
arguments = tryUrlencode({ |
|||
'search': '%s %s' % (title.replace(':', ''), movie['library']['year']), |
|||
'cat': self.cat_id_movies |
|||
}) |
|||
|
|||
url = "%s&%s" % (self.urls['search'], arguments) |
|||
|
|||
data = self.getHTMLData(url, opener = self.login_opener) |
|||
|
|||
if data: |
|||
# Remove BiT-HDTV's output garbage so outdated BS4 versions successfully parse the HTML |
|||
split_data = data.partition('-->') |
|||
if '## SELECT COUNT(' in split_data[0]: |
|||
data = split_data[2] |
|||
|
|||
html = BeautifulSoup(data) |
|||
|
|||
try: |
|||
result_table = html.find('table', attrs = {'width' : '750', 'class' : ''}) |
|||
if result_table is None: |
|||
return |
|||
|
|||
entries = result_table.find_all('tr') |
|||
for result in entries[1:]: |
|||
|
|||
cells = result.find_all('td') |
|||
link = cells[2].find('a') |
|||
torrent_id = link['href'].replace('/details.php?id=', '') |
|||
|
|||
results.append({ |
|||
'id': torrent_id, |
|||
'name': link.contents[0].get_text(), |
|||
'url': cells[0].find('a')['href'], |
|||
'detail_url': self.urls['detail'] % torrent_id, |
|||
'size': self.parseSize(cells[6].get_text()), |
|||
'seeders': tryInt(cells[8].string), |
|||
'leechers': tryInt(cells[9].string), |
|||
'get_more_info': self.getMoreInfo, |
|||
}) |
|||
|
|||
except: |
|||
log.error('Failed getting results from %s: %s', (self.getName(), traceback.format_exc())) |
|||
|
|||
def getLoginParams(self): |
|||
return tryUrlencode({ |
|||
'username': self.conf('username'), |
|||
'password': self.conf('password'), |
|||
}) |
|||
|
|||
def getMoreInfo(self, item): |
|||
full_description = self.getCache('bithdtv.%s' % item['id'], item['detail_url'], cache_timeout = 25920000) |
|||
html = BeautifulSoup(full_description) |
|||
nfo_pre = html.find('table', attrs = {'class':'detail'}) |
|||
description = toUnicode(nfo_pre.text) if nfo_pre else '' |
|||
|
|||
item['description'] = description |
|||
return item |
|||
|
|||
def loginSuccess(self, output): |
|||
return 'logout.php' in output.lower() |
|||
|
|||
loginCheckSuccess = loginSuccess |
@ -1,79 +0,0 @@ |
|||
from bs4 import BeautifulSoup |
|||
from couchpotato.core.helpers.encoding import simplifyString, tryUrlencode |
|||
from couchpotato.core.helpers.variable import tryInt |
|||
from couchpotato.core.logger import CPLog |
|||
from couchpotato.core.providers.torrent.base import TorrentProvider |
|||
import traceback |
|||
|
|||
log = CPLog(__name__) |
|||
|
|||
|
|||
class SceneHD(TorrentProvider): |
|||
|
|||
urls = { |
|||
'test': 'https://scenehd.org/', |
|||
'login' : 'https://scenehd.org/takelogin.php', |
|||
'login_check': 'https://scenehd.org/my.php', |
|||
'detail': 'https://scenehd.org/details.php?id=%s', |
|||
'search': 'https://scenehd.org/browse.php?ajax', |
|||
'download': 'https://scenehd.org/download.php?id=%s', |
|||
} |
|||
|
|||
http_time_between_calls = 1 #seconds |
|||
|
|||
def _searchOnTitle(self, title, movie, quality, results): |
|||
|
|||
q = '"%s %s"' % (simplifyString(title), movie['library']['year']) |
|||
arguments = tryUrlencode({ |
|||
'search': q, |
|||
}) |
|||
url = "%s&%s" % (self.urls['search'], arguments) |
|||
|
|||
data = self.getHTMLData(url, opener = self.login_opener) |
|||
|
|||
if data: |
|||
html = BeautifulSoup(data) |
|||
|
|||
try: |
|||
resultsTable = html.find_all('table')[6] |
|||
entries = resultsTable.find_all('tr') |
|||
for result in entries[1:]: |
|||
|
|||
all_cells = result.find_all('td') |
|||
|
|||
detail_link = all_cells[2].find('a') |
|||
details = detail_link['href'] |
|||
torrent_id = details.replace('details.php?id=', '') |
|||
|
|||
leechers = all_cells[11].find('a') |
|||
if leechers: |
|||
leechers = leechers.string |
|||
else: |
|||
leechers = all_cells[11].string |
|||
|
|||
results.append({ |
|||
'id': torrent_id, |
|||
'name': detail_link['title'], |
|||
'size': self.parseSize(all_cells[7].string), |
|||
'seeders': tryInt(all_cells[10].find('a').string), |
|||
'leechers': tryInt(leechers), |
|||
'url': self.urls['download'] % torrent_id, |
|||
'description': all_cells[1].find('a')['href'], |
|||
}) |
|||
|
|||
except: |
|||
log.error('Failed getting results from %s: %s', (self.getName(), traceback.format_exc())) |
|||
|
|||
|
|||
def getLoginParams(self): |
|||
return tryUrlencode({ |
|||
'username': self.conf('username'), |
|||
'password': self.conf('password'), |
|||
'ssl': 'yes', |
|||
}) |
|||
|
|||
def loginSuccess(self, output): |
|||
return 'logout.php' in output.lower() |
|||
|
|||
loginCheckSuccess = loginSuccess |
|||
|
@ -0,0 +1,6 @@ |
|||
from .main import Flickchart |
|||
|
|||
def start(): |
|||
return Flickchart() |
|||
|
|||
config = [] |
@ -0,0 +1,30 @@ |
|||
from couchpotato.core.event import fireEvent |
|||
from couchpotato.core.logger import CPLog |
|||
from couchpotato.core.providers.userscript.base import UserscriptBase |
|||
import traceback |
|||
|
|||
log = CPLog(__name__) |
|||
|
|||
|
|||
class Flickchart(UserscriptBase): |
|||
|
|||
includes = ['http://www.flickchart.com/movie/*'] |
|||
|
|||
def getMovie(self, url): |
|||
|
|||
try: |
|||
data = self.getUrl(url) |
|||
except: |
|||
return |
|||
|
|||
try: |
|||
start = data.find('<title>') |
|||
end = data.find('</title>', start) |
|||
page_title = data[start + len('<title>'):end].strip().split('-') |
|||
|
|||
year_name = fireEvent('scanner.name_year', page_title[0], single = True) |
|||
|
|||
return self.search(**year_name) |
|||
except: |
|||
log.error('Failed parsing page for title and year: %s', traceback.format_exc()) |
|||
|
@ -0,0 +1,56 @@ |
|||
/* |
|||
--- |
|||
|
|||
script: Array.stableSort.js |
|||
|
|||
description: Add a stable sort algorithm for all browsers |
|||
|
|||
license: MIT-style license. |
|||
|
|||
authors: |
|||
- Yorick Sijsling |
|||
|
|||
requires: |
|||
core/1.3: '*' |
|||
|
|||
provides: |
|||
- [Array.stableSort, Array.mergeSort] |
|||
|
|||
... |
|||
*/ |
|||
|
|||
(function() { |
|||
|
|||
var defaultSortFunction = function(a, b) { |
|||
return a > b ? 1 : (a < b ? -1 : 0); |
|||
} |
|||
|
|||
Array.implement({ |
|||
|
|||
stableSort: function(compare) { |
|||
// I would love some real feature recognition. Problem is that an unstable algorithm sometimes/often gives the same result as an unstable algorithm.
|
|||
return (Browser.chrome || Browser.firefox2 || Browser.opera9) ? this.mergeSort(compare) : this.sort(compare); |
|||
}, |
|||
|
|||
mergeSort: function(compare, token) { |
|||
compare = compare || defaultSortFunction; |
|||
if (this.length > 1) { |
|||
// Split and sort both parts
|
|||
var right = this.splice(Math.floor(this.length / 2)).mergeSort(compare); |
|||
var left = this.splice(0).mergeSort(compare); // 'this' is now empty.
|
|||
|
|||
// Merge parts together
|
|||
while (left.length > 0 || right.length > 0) { |
|||
this.push( |
|||
right.length === 0 ? left.shift() |
|||
: left.length === 0 ? right.shift() |
|||
: compare(left[0], right[0]) > 0 ? right.shift() |
|||
: left.shift()); |
|||
} |
|||
} |
|||
return this; |
|||
} |
|||
|
|||
}); |
|||
})(); |
|||
|
@ -0,0 +1,955 @@ |
|||
/*global setImmediate: false, setTimeout: false, console: false */ |
|||
(function () { |
|||
|
|||
var async = {}; |
|||
|
|||
// global on the server, window in the browser
|
|||
var root, previous_async; |
|||
|
|||
root = this; |
|||
if (root != null) { |
|||
previous_async = root.async; |
|||
} |
|||
|
|||
async.noConflict = function () { |
|||
root.async = previous_async; |
|||
return async; |
|||
}; |
|||
|
|||
function only_once(fn) { |
|||
var called = false; |
|||
return function() { |
|||
if (called) throw new Error("Callback was already called."); |
|||
called = true; |
|||
fn.apply(root, arguments); |
|||
} |
|||
} |
|||
|
|||
//// cross-browser compatiblity functions ////
|
|||
|
|||
var _each = function (arr, iterator) { |
|||
if (arr.forEach) { |
|||
return arr.forEach(iterator); |
|||
} |
|||
for (var i = 0; i < arr.length; i += 1) { |
|||
iterator(arr[i], i, arr); |
|||
} |
|||
}; |
|||
|
|||
var _map = function (arr, iterator) { |
|||
if (arr.map) { |
|||
return arr.map(iterator); |
|||
} |
|||
var results = []; |
|||
_each(arr, function (x, i, a) { |
|||
results.push(iterator(x, i, a)); |
|||
}); |
|||
return results; |
|||
}; |
|||
|
|||
var _reduce = function (arr, iterator, memo) { |
|||
if (arr.reduce) { |
|||
return arr.reduce(iterator, memo); |
|||
} |
|||
_each(arr, function (x, i, a) { |
|||
memo = iterator(memo, x, i, a); |
|||
}); |
|||
return memo; |
|||
}; |
|||
|
|||
var _keys = function (obj) { |
|||
if (Object.keys) { |
|||
return Object.keys(obj); |
|||
} |
|||
var keys = []; |
|||
for (var k in obj) { |
|||
if (obj.hasOwnProperty(k)) { |
|||
keys.push(k); |
|||
} |
|||
} |
|||
return keys; |
|||
}; |
|||
|
|||
//// exported async module functions ////
|
|||
|
|||
//// nextTick implementation with browser-compatible fallback ////
|
|||
if (typeof process === 'undefined' || !(process.nextTick)) { |
|||
if (typeof setImmediate === 'function') { |
|||
async.nextTick = function (fn) { |
|||
// not a direct alias for IE10 compatibility
|
|||
setImmediate(fn); |
|||
}; |
|||
async.setImmediate = async.nextTick; |
|||
} |
|||
else { |
|||
async.nextTick = function (fn) { |
|||
setTimeout(fn, 0); |
|||
}; |
|||
async.setImmediate = async.nextTick; |
|||
} |
|||
} |
|||
else { |
|||
async.nextTick = process.nextTick; |
|||
if (typeof setImmediate !== 'undefined') { |
|||
async.setImmediate = setImmediate; |
|||
} |
|||
else { |
|||
async.setImmediate = async.nextTick; |
|||
} |
|||
} |
|||
|
|||
async.each = function (arr, iterator, callback) { |
|||
callback = callback || function () {}; |
|||
if (!arr.length) { |
|||
return callback(); |
|||
} |
|||
var completed = 0; |
|||
_each(arr, function (x) { |
|||
iterator(x, only_once(function (err) { |
|||
if (err) { |
|||
callback(err); |
|||
callback = function () {}; |
|||
} |
|||
else { |
|||
completed += 1; |
|||
if (completed >= arr.length) { |
|||
callback(null); |
|||
} |
|||
} |
|||
})); |
|||
}); |
|||
}; |
|||
async.forEach = async.each; |
|||
|
|||
async.eachSeries = function (arr, iterator, callback) { |
|||
callback = callback || function () {}; |
|||
if (!arr.length) { |
|||
return callback(); |
|||
} |
|||
var completed = 0; |
|||
var iterate = function () { |
|||
iterator(arr[completed], function (err) { |
|||
if (err) { |
|||
callback(err); |
|||
callback = function () {}; |
|||
} |
|||
else { |
|||
completed += 1; |
|||
if (completed >= arr.length) { |
|||
callback(null); |
|||
} |
|||
else { |
|||
iterate(); |
|||
} |
|||
} |
|||
}); |
|||
}; |
|||
iterate(); |
|||
}; |
|||
async.forEachSeries = async.eachSeries; |
|||
|
|||
async.eachLimit = function (arr, limit, iterator, callback) { |
|||
var fn = _eachLimit(limit); |
|||
fn.apply(null, [arr, iterator, callback]); |
|||
}; |
|||
async.forEachLimit = async.eachLimit; |
|||
|
|||
var _eachLimit = function (limit) { |
|||
|
|||
return function (arr, iterator, callback) { |
|||
callback = callback || function () {}; |
|||
if (!arr.length || limit <= 0) { |
|||
return callback(); |
|||
} |
|||
var completed = 0; |
|||
var started = 0; |
|||
var running = 0; |
|||
|
|||
(function replenish () { |
|||
if (completed >= arr.length) { |
|||
return callback(); |
|||
} |
|||
|
|||
while (running < limit && started < arr.length) { |
|||
started += 1; |
|||
running += 1; |
|||
iterator(arr[started - 1], function (err) { |
|||
if (err) { |
|||
callback(err); |
|||
callback = function () {}; |
|||
} |
|||
else { |
|||
completed += 1; |
|||
running -= 1; |
|||
if (completed >= arr.length) { |
|||
callback(); |
|||
} |
|||
else { |
|||
replenish(); |
|||
} |
|||
} |
|||
}); |
|||
} |
|||
})(); |
|||
}; |
|||
}; |
|||
|
|||
|
|||
var doParallel = function (fn) { |
|||
return function () { |
|||
var args = Array.prototype.slice.call(arguments); |
|||
return fn.apply(null, [async.each].concat(args)); |
|||
}; |
|||
}; |
|||
var doParallelLimit = function(limit, fn) { |
|||
return function () { |
|||
var args = Array.prototype.slice.call(arguments); |
|||
return fn.apply(null, [_eachLimit(limit)].concat(args)); |
|||
}; |
|||
}; |
|||
var doSeries = function (fn) { |
|||
return function () { |
|||
var args = Array.prototype.slice.call(arguments); |
|||
return fn.apply(null, [async.eachSeries].concat(args)); |
|||
}; |
|||
}; |
|||
|
|||
|
|||
var _asyncMap = function (eachfn, arr, iterator, callback) { |
|||
var results = []; |
|||
arr = _map(arr, function (x, i) { |
|||
return {index: i, value: x}; |
|||
}); |
|||
eachfn(arr, function (x, callback) { |
|||
iterator(x.value, function (err, v) { |
|||
results[x.index] = v; |
|||
callback(err); |
|||
}); |
|||
}, function (err) { |
|||
callback(err, results); |
|||
}); |
|||
}; |
|||
async.map = doParallel(_asyncMap); |
|||
async.mapSeries = doSeries(_asyncMap); |
|||
async.mapLimit = function (arr, limit, iterator, callback) { |
|||
return _mapLimit(limit)(arr, iterator, callback); |
|||
}; |
|||
|
|||
var _mapLimit = function(limit) { |
|||
return doParallelLimit(limit, _asyncMap); |
|||
}; |
|||
|
|||
// reduce only has a series version, as doing reduce in parallel won't
|
|||
// work in many situations.
|
|||
async.reduce = function (arr, memo, iterator, callback) { |
|||
async.eachSeries(arr, function (x, callback) { |
|||
iterator(memo, x, function (err, v) { |
|||
memo = v; |
|||
callback(err); |
|||
}); |
|||
}, function (err) { |
|||
callback(err, memo); |
|||
}); |
|||
}; |
|||
// inject alias
|
|||
async.inject = async.reduce; |
|||
// foldl alias
|
|||
async.foldl = async.reduce; |
|||
|
|||
async.reduceRight = function (arr, memo, iterator, callback) { |
|||
var reversed = _map(arr, function (x) { |
|||
return x; |
|||
}).reverse(); |
|||
async.reduce(reversed, memo, iterator, callback); |
|||
}; |
|||
// foldr alias
|
|||
async.foldr = async.reduceRight; |
|||
|
|||
var _filter = function (eachfn, arr, iterator, callback) { |
|||
var results = []; |
|||
arr = _map(arr, function (x, i) { |
|||
return {index: i, value: x}; |
|||
}); |
|||
eachfn(arr, function (x, callback) { |
|||
iterator(x.value, function (v) { |
|||
if (v) { |
|||
results.push(x); |
|||
} |
|||
callback(); |
|||
}); |
|||
}, function (err) { |
|||
callback(_map(results.sort(function (a, b) { |
|||
return a.index - b.index; |
|||
}), function (x) { |
|||
return x.value; |
|||
})); |
|||
}); |
|||
}; |
|||
async.filter = doParallel(_filter); |
|||
async.filterSeries = doSeries(_filter); |
|||
// select alias
|
|||
async.select = async.filter; |
|||
async.selectSeries = async.filterSeries; |
|||
|
|||
var _reject = function (eachfn, arr, iterator, callback) { |
|||
var results = []; |
|||
arr = _map(arr, function (x, i) { |
|||
return {index: i, value: x}; |
|||
}); |
|||
eachfn(arr, function (x, callback) { |
|||
iterator(x.value, function (v) { |
|||
if (!v) { |
|||
results.push(x); |
|||
} |
|||
callback(); |
|||
}); |
|||
}, function (err) { |
|||
callback(_map(results.sort(function (a, b) { |
|||
return a.index - b.index; |
|||
}), function (x) { |
|||
return x.value; |
|||
})); |
|||
}); |
|||
}; |
|||
async.reject = doParallel(_reject); |
|||
async.rejectSeries = doSeries(_reject); |
|||
|
|||
var _detect = function (eachfn, arr, iterator, main_callback) { |
|||
eachfn(arr, function (x, callback) { |
|||
iterator(x, function (result) { |
|||
if (result) { |
|||
main_callback(x); |
|||
main_callback = function () {}; |
|||
} |
|||
else { |
|||
callback(); |
|||
} |
|||
}); |
|||
}, function (err) { |
|||
main_callback(); |
|||
}); |
|||
}; |
|||
async.detect = doParallel(_detect); |
|||
async.detectSeries = doSeries(_detect); |
|||
|
|||
async.some = function (arr, iterator, main_callback) { |
|||
async.each(arr, function (x, callback) { |
|||
iterator(x, function (v) { |
|||
if (v) { |
|||
main_callback(true); |
|||
main_callback = function () {}; |
|||
} |
|||
callback(); |
|||
}); |
|||
}, function (err) { |
|||
main_callback(false); |
|||
}); |
|||
}; |
|||
// any alias
|
|||
async.any = async.some; |
|||
|
|||
async.every = function (arr, iterator, main_callback) { |
|||
async.each(arr, function (x, callback) { |
|||
iterator(x, function (v) { |
|||
if (!v) { |
|||
main_callback(false); |
|||
main_callback = function () {}; |
|||
} |
|||
callback(); |
|||
}); |
|||
}, function (err) { |
|||
main_callback(true); |
|||
}); |
|||
}; |
|||
// all alias
|
|||
async.all = async.every; |
|||
|
|||
async.sortBy = function (arr, iterator, callback) { |
|||
async.map(arr, function (x, callback) { |
|||
iterator(x, function (err, criteria) { |
|||
if (err) { |
|||
callback(err); |
|||
} |
|||
else { |
|||
callback(null, {value: x, criteria: criteria}); |
|||
} |
|||
}); |
|||
}, function (err, results) { |
|||
if (err) { |
|||
return callback(err); |
|||
} |
|||
else { |
|||
var fn = function (left, right) { |
|||
var a = left.criteria, b = right.criteria; |
|||
return a < b ? -1 : a > b ? 1 : 0; |
|||
}; |
|||
callback(null, _map(results.sort(fn), function (x) { |
|||
return x.value; |
|||
})); |
|||
} |
|||
}); |
|||
}; |
|||
|
|||
async.auto = function (tasks, callback) { |
|||
callback = callback || function () {}; |
|||
var keys = _keys(tasks); |
|||
if (!keys.length) { |
|||
return callback(null); |
|||
} |
|||
|
|||
var results = {}; |
|||
|
|||
var listeners = []; |
|||
var addListener = function (fn) { |
|||
listeners.unshift(fn); |
|||
}; |
|||
var removeListener = function (fn) { |
|||
for (var i = 0; i < listeners.length; i += 1) { |
|||
if (listeners[i] === fn) { |
|||
listeners.splice(i, 1); |
|||
return; |
|||
} |
|||
} |
|||
}; |
|||
var taskComplete = function () { |
|||
_each(listeners.slice(0), function (fn) { |
|||
fn(); |
|||
}); |
|||
}; |
|||
|
|||
addListener(function () { |
|||
if (_keys(results).length === keys.length) { |
|||
callback(null, results); |
|||
callback = function () {}; |
|||
} |
|||
}); |
|||
|
|||
_each(keys, function (k) { |
|||
var task = (tasks[k] instanceof Function) ? [tasks[k]]: tasks[k]; |
|||
var taskCallback = function (err) { |
|||
var args = Array.prototype.slice.call(arguments, 1); |
|||
if (args.length <= 1) { |
|||
args = args[0]; |
|||
} |
|||
if (err) { |
|||
var safeResults = {}; |
|||
_each(_keys(results), function(rkey) { |
|||
safeResults[rkey] = results[rkey]; |
|||
}); |
|||
safeResults[k] = args; |
|||
callback(err, safeResults); |
|||
// stop subsequent errors hitting callback multiple times
|
|||
callback = function () {}; |
|||
} |
|||
else { |
|||
results[k] = args; |
|||
async.setImmediate(taskComplete); |
|||
} |
|||
}; |
|||
var requires = task.slice(0, Math.abs(task.length - 1)) || []; |
|||
var ready = function () { |
|||
return _reduce(requires, function (a, x) { |
|||
return (a && results.hasOwnProperty(x)); |
|||
}, true) && !results.hasOwnProperty(k); |
|||
}; |
|||
if (ready()) { |
|||
task[task.length - 1](taskCallback, results); |
|||
} |
|||
else { |
|||
var listener = function () { |
|||
if (ready()) { |
|||
removeListener(listener); |
|||
task[task.length - 1](taskCallback, results); |
|||
} |
|||
}; |
|||
addListener(listener); |
|||
} |
|||
}); |
|||
}; |
|||
|
|||
async.waterfall = function (tasks, callback) { |
|||
callback = callback || function () {}; |
|||
if (tasks.constructor !== Array) { |
|||
var err = new Error('First argument to waterfall must be an array of functions'); |
|||
return callback(err); |
|||
} |
|||
if (!tasks.length) { |
|||
return callback(); |
|||
} |
|||
var wrapIterator = function (iterator) { |
|||
return function (err) { |
|||
if (err) { |
|||
callback.apply(null, arguments); |
|||
callback = function () {}; |
|||
} |
|||
else { |
|||
var args = Array.prototype.slice.call(arguments, 1); |
|||
var next = iterator.next(); |
|||
if (next) { |
|||
args.push(wrapIterator(next)); |
|||
} |
|||
else { |
|||
args.push(callback); |
|||
} |
|||
async.setImmediate(function () { |
|||
iterator.apply(null, args); |
|||
}); |
|||
} |
|||
}; |
|||
}; |
|||
wrapIterator(async.iterator(tasks))(); |
|||
}; |
|||
|
|||
var _parallel = function(eachfn, tasks, callback) { |
|||
callback = callback || function () {}; |
|||
if (tasks.constructor === Array) { |
|||
eachfn.map(tasks, function (fn, callback) { |
|||
if (fn) { |
|||
fn(function (err) { |
|||
var args = Array.prototype.slice.call(arguments, 1); |
|||
if (args.length <= 1) { |
|||
args = args[0]; |
|||
} |
|||
callback.call(null, err, args); |
|||
}); |
|||
} |
|||
}, callback); |
|||
} |
|||
else { |
|||
var results = {}; |
|||
eachfn.each(_keys(tasks), function (k, callback) { |
|||
tasks[k](function (err) { |
|||
var args = Array.prototype.slice.call(arguments, 1); |
|||
if (args.length <= 1) { |
|||
args = args[0]; |
|||
} |
|||
results[k] = args; |
|||
callback(err); |
|||
}); |
|||
}, function (err) { |
|||
callback(err, results); |
|||
}); |
|||
} |
|||
}; |
|||
|
|||
async.parallel = function (tasks, callback) { |
|||
_parallel({ map: async.map, each: async.each }, tasks, callback); |
|||
}; |
|||
|
|||
async.parallelLimit = function(tasks, limit, callback) { |
|||
_parallel({ map: _mapLimit(limit), each: _eachLimit(limit) }, tasks, callback); |
|||
}; |
|||
|
|||
async.series = function (tasks, callback) { |
|||
callback = callback || function () {}; |
|||
if (tasks.constructor === Array) { |
|||
async.mapSeries(tasks, function (fn, callback) { |
|||
if (fn) { |
|||
fn(function (err) { |
|||
var args = Array.prototype.slice.call(arguments, 1); |
|||
if (args.length <= 1) { |
|||
args = args[0]; |
|||
} |
|||
callback.call(null, err, args); |
|||
}); |
|||
} |
|||
}, callback); |
|||
} |
|||
else { |
|||
var results = {}; |
|||
async.eachSeries(_keys(tasks), function (k, callback) { |
|||
tasks[k](function (err) { |
|||
var args = Array.prototype.slice.call(arguments, 1); |
|||
if (args.length <= 1) { |
|||
args = args[0]; |
|||
} |
|||
results[k] = args; |
|||
callback(err); |
|||
}); |
|||
}, function (err) { |
|||
callback(err, results); |
|||
}); |
|||
} |
|||
}; |
|||
|
|||
async.iterator = function (tasks) { |
|||
var makeCallback = function (index) { |
|||
var fn = function () { |
|||
if (tasks.length) { |
|||
tasks[index].apply(null, arguments); |
|||
} |
|||
return fn.next(); |
|||
}; |
|||
fn.next = function () { |
|||
return (index < tasks.length - 1) ? makeCallback(index + 1): null; |
|||
}; |
|||
return fn; |
|||
}; |
|||
return makeCallback(0); |
|||
}; |
|||
|
|||
async.apply = function (fn) { |
|||
var args = Array.prototype.slice.call(arguments, 1); |
|||
return function () { |
|||
return fn.apply( |
|||
null, args.concat(Array.prototype.slice.call(arguments)) |
|||
); |
|||
}; |
|||
}; |
|||
|
|||
var _concat = function (eachfn, arr, fn, callback) { |
|||
var r = []; |
|||
eachfn(arr, function (x, cb) { |
|||
fn(x, function (err, y) { |
|||
r = r.concat(y || []); |
|||
cb(err); |
|||
}); |
|||
}, function (err) { |
|||
callback(err, r); |
|||
}); |
|||
}; |
|||
async.concat = doParallel(_concat); |
|||
async.concatSeries = doSeries(_concat); |
|||
|
|||
async.whilst = function (test, iterator, callback) { |
|||
if (test()) { |
|||
iterator(function (err) { |
|||
if (err) { |
|||
return callback(err); |
|||
} |
|||
async.whilst(test, iterator, callback); |
|||
}); |
|||
} |
|||
else { |
|||
callback(); |
|||
} |
|||
}; |
|||
|
|||
async.doWhilst = function (iterator, test, callback) { |
|||
iterator(function (err) { |
|||
if (err) { |
|||
return callback(err); |
|||
} |
|||
if (test()) { |
|||
async.doWhilst(iterator, test, callback); |
|||
} |
|||
else { |
|||
callback(); |
|||
} |
|||
}); |
|||
}; |
|||
|
|||
async.until = function (test, iterator, callback) { |
|||
if (!test()) { |
|||
iterator(function (err) { |
|||
if (err) { |
|||
return callback(err); |
|||
} |
|||
async.until(test, iterator, callback); |
|||
}); |
|||
} |
|||
else { |
|||
callback(); |
|||
} |
|||
}; |
|||
|
|||
async.doUntil = function (iterator, test, callback) { |
|||
iterator(function (err) { |
|||
if (err) { |
|||
return callback(err); |
|||
} |
|||
if (!test()) { |
|||
async.doUntil(iterator, test, callback); |
|||
} |
|||
else { |
|||
callback(); |
|||
} |
|||
}); |
|||
}; |
|||
|
|||
async.queue = function (worker, concurrency) { |
|||
if (concurrency === undefined) { |
|||
concurrency = 1; |
|||
} |
|||
function _insert(q, data, pos, callback) { |
|||
if(data.constructor !== Array) { |
|||
data = [data]; |
|||
} |
|||
_each(data, function(task) { |
|||
var item = { |
|||
data: task, |
|||
callback: typeof callback === 'function' ? callback : null |
|||
}; |
|||
|
|||
if (pos) { |
|||
q.tasks.unshift(item); |
|||
} else { |
|||
q.tasks.push(item); |
|||
} |
|||
|
|||
if (q.saturated && q.tasks.length === concurrency) { |
|||
q.saturated(); |
|||
} |
|||
async.setImmediate(q.process); |
|||
}); |
|||
} |
|||
|
|||
var workers = 0; |
|||
var q = { |
|||
tasks: [], |
|||
concurrency: concurrency, |
|||
saturated: null, |
|||
empty: null, |
|||
drain: null, |
|||
push: function (data, callback) { |
|||
_insert(q, data, false, callback); |
|||
}, |
|||
unshift: function (data, callback) { |
|||
_insert(q, data, true, callback); |
|||
}, |
|||
process: function () { |
|||
if (workers < q.concurrency && q.tasks.length) { |
|||
var task = q.tasks.shift(); |
|||
if (q.empty && q.tasks.length === 0) { |
|||
q.empty(); |
|||
} |
|||
workers += 1; |
|||
var next = function () { |
|||
workers -= 1; |
|||
if (task.callback) { |
|||
task.callback.apply(task, arguments); |
|||
} |
|||
if (q.drain && q.tasks.length + workers === 0) { |
|||
q.drain(); |
|||
} |
|||
q.process(); |
|||
}; |
|||
var cb = only_once(next); |
|||
worker(task.data, cb); |
|||
} |
|||
}, |
|||
length: function () { |
|||
return q.tasks.length; |
|||
}, |
|||
running: function () { |
|||
return workers; |
|||
} |
|||
}; |
|||
return q; |
|||
}; |
|||
|
|||
async.cargo = function (worker, payload) { |
|||
var working = false, |
|||
tasks = []; |
|||
|
|||
var cargo = { |
|||
tasks: tasks, |
|||
payload: payload, |
|||
saturated: null, |
|||
empty: null, |
|||
drain: null, |
|||
push: function (data, callback) { |
|||
if(data.constructor !== Array) { |
|||
data = [data]; |
|||
} |
|||
_each(data, function(task) { |
|||
tasks.push({ |
|||
data: task, |
|||
callback: typeof callback === 'function' ? callback : null |
|||
}); |
|||
if (cargo.saturated && tasks.length === payload) { |
|||
cargo.saturated(); |
|||
} |
|||
}); |
|||
async.setImmediate(cargo.process); |
|||
}, |
|||
process: function process() { |
|||
if (working) return; |
|||
if (tasks.length === 0) { |
|||
if(cargo.drain) cargo.drain(); |
|||
return; |
|||
} |
|||
|
|||
var ts = typeof payload === 'number' |
|||
? tasks.splice(0, payload) |
|||
: tasks.splice(0); |
|||
|
|||
var ds = _map(ts, function (task) { |
|||
return task.data; |
|||
}); |
|||
|
|||
if(cargo.empty) cargo.empty(); |
|||
working = true; |
|||
worker(ds, function () { |
|||
working = false; |
|||
|
|||
var args = arguments; |
|||
_each(ts, function (data) { |
|||
if (data.callback) { |
|||
data.callback.apply(null, args); |
|||
} |
|||
}); |
|||
|
|||
process(); |
|||
}); |
|||
}, |
|||
length: function () { |
|||
return tasks.length; |
|||
}, |
|||
running: function () { |
|||
return working; |
|||
} |
|||
}; |
|||
return cargo; |
|||
}; |
|||
|
|||
var _console_fn = function (name) { |
|||
return function (fn) { |
|||
var args = Array.prototype.slice.call(arguments, 1); |
|||
fn.apply(null, args.concat([function (err) { |
|||
var args = Array.prototype.slice.call(arguments, 1); |
|||
if (typeof console !== 'undefined') { |
|||
if (err) { |
|||
if (console.error) { |
|||
console.error(err); |
|||
} |
|||
} |
|||
else if (console[name]) { |
|||
_each(args, function (x) { |
|||
console[name](x); |
|||
}); |
|||
} |
|||
} |
|||
}])); |
|||
}; |
|||
}; |
|||
async.log = _console_fn('log'); |
|||
async.dir = _console_fn('dir'); |
|||
/*async.info = _console_fn('info'); |
|||
async.warn = _console_fn('warn'); |
|||
async.error = _console_fn('error');*/ |
|||
|
|||
async.memoize = function (fn, hasher) { |
|||
var memo = {}; |
|||
var queues = {}; |
|||
hasher = hasher || function (x) { |
|||
return x; |
|||
}; |
|||
var memoized = function () { |
|||
var args = Array.prototype.slice.call(arguments); |
|||
var callback = args.pop(); |
|||
var key = hasher.apply(null, args); |
|||
if (key in memo) { |
|||
callback.apply(null, memo[key]); |
|||
} |
|||
else if (key in queues) { |
|||
queues[key].push(callback); |
|||
} |
|||
else { |
|||
queues[key] = [callback]; |
|||
fn.apply(null, args.concat([function () { |
|||
memo[key] = arguments; |
|||
var q = queues[key]; |
|||
delete queues[key]; |
|||
for (var i = 0, l = q.length; i < l; i++) { |
|||
q[i].apply(null, arguments); |
|||
} |
|||
}])); |
|||
} |
|||
}; |
|||
memoized.memo = memo; |
|||
memoized.unmemoized = fn; |
|||
return memoized; |
|||
}; |
|||
|
|||
async.unmemoize = function (fn) { |
|||
return function () { |
|||
return (fn.unmemoized || fn).apply(null, arguments); |
|||
}; |
|||
}; |
|||
|
|||
async.times = function (count, iterator, callback) { |
|||
var counter = []; |
|||
for (var i = 0; i < count; i++) { |
|||
counter.push(i); |
|||
} |
|||
return async.map(counter, iterator, callback); |
|||
}; |
|||
|
|||
async.timesSeries = function (count, iterator, callback) { |
|||
var counter = []; |
|||
for (var i = 0; i < count; i++) { |
|||
counter.push(i); |
|||
} |
|||
return async.mapSeries(counter, iterator, callback); |
|||
}; |
|||
|
|||
async.compose = function (/* functions... */) { |
|||
var fns = Array.prototype.reverse.call(arguments); |
|||
return function () { |
|||
var that = this; |
|||
var args = Array.prototype.slice.call(arguments); |
|||
var callback = args.pop(); |
|||
async.reduce(fns, args, function (newargs, fn, cb) { |
|||
fn.apply(that, newargs.concat([function () { |
|||
var err = arguments[0]; |
|||
var nextargs = Array.prototype.slice.call(arguments, 1); |
|||
cb(err, nextargs); |
|||
}])) |
|||
}, |
|||
function (err, results) { |
|||
callback.apply(that, [err].concat(results)); |
|||
}); |
|||
}; |
|||
}; |
|||
|
|||
var _applyEach = function (eachfn, fns /*args...*/) { |
|||
var go = function () { |
|||
var that = this; |
|||
var args = Array.prototype.slice.call(arguments); |
|||
var callback = args.pop(); |
|||
return eachfn(fns, function (fn, cb) { |
|||
fn.apply(that, args.concat([cb])); |
|||
}, |
|||
callback); |
|||
}; |
|||
if (arguments.length > 2) { |
|||
var args = Array.prototype.slice.call(arguments, 2); |
|||
return go.apply(this, args); |
|||
} |
|||
else { |
|||
return go; |
|||
} |
|||
}; |
|||
async.applyEach = doParallel(_applyEach); |
|||
async.applyEachSeries = doSeries(_applyEach); |
|||
|
|||
async.forever = function (fn, callback) { |
|||
function next(err) { |
|||
if (err) { |
|||
if (callback) { |
|||
return callback(err); |
|||
} |
|||
throw err; |
|||
} |
|||
fn(next); |
|||
} |
|||
next(); |
|||
}; |
|||
|
|||
// AMD / RequireJS
|
|||
if (typeof define !== 'undefined' && define.amd) { |
|||
define([], function () { |
|||
return async; |
|||
}); |
|||
} |
|||
// Node.js
|
|||
else if (typeof module !== 'undefined' && module.exports) { |
|||
module.exports = async; |
|||
} |
|||
// included directly via <script> tag
|
|||
else { |
|||
root.async = async; |
|||
} |
|||
|
|||
}()); |
@ -1,3 +1,3 @@ |
|||
version_info = (2, 0, 2) |
|||
version_info = (2, 1, 1) |
|||
version = '.'.join(str(n) for n in version_info[:3]) |
|||
release = version + ''.join(str(n) for n in version_info[3:]) |
|||
release = '.'.join(str(n) for n in version_info) |
|||
|
@ -0,0 +1,91 @@ |
|||
""" |
|||
Stores jobs in a Redis database. |
|||
""" |
|||
from uuid import uuid4 |
|||
from datetime import datetime |
|||
import logging |
|||
|
|||
from apscheduler.jobstores.base import JobStore |
|||
from apscheduler.job import Job |
|||
|
|||
try: |
|||
import cPickle as pickle |
|||
except ImportError: # pragma: nocover |
|||
import pickle |
|||
|
|||
try: |
|||
from redis import StrictRedis |
|||
except ImportError: # pragma: nocover |
|||
raise ImportError('RedisJobStore requires redis installed') |
|||
|
|||
try: |
|||
long = long |
|||
except NameError: |
|||
long = int |
|||
|
|||
logger = logging.getLogger(__name__) |
|||
|
|||
|
|||
class RedisJobStore(JobStore): |
|||
def __init__(self, db=0, key_prefix='jobs.', |
|||
pickle_protocol=pickle.HIGHEST_PROTOCOL, **connect_args): |
|||
self.jobs = [] |
|||
self.pickle_protocol = pickle_protocol |
|||
self.key_prefix = key_prefix |
|||
|
|||
if db is None: |
|||
raise ValueError('The "db" parameter must not be empty') |
|||
if not key_prefix: |
|||
raise ValueError('The "key_prefix" parameter must not be empty') |
|||
|
|||
self.redis = StrictRedis(db=db, **connect_args) |
|||
|
|||
def add_job(self, job): |
|||
job.id = str(uuid4()) |
|||
job_state = job.__getstate__() |
|||
job_dict = { |
|||
'job_state': pickle.dumps(job_state, self.pickle_protocol), |
|||
'runs': '0', |
|||
'next_run_time': job_state.pop('next_run_time').isoformat()} |
|||
self.redis.hmset(self.key_prefix + job.id, job_dict) |
|||
self.jobs.append(job) |
|||
|
|||
def remove_job(self, job): |
|||
self.redis.delete(self.key_prefix + job.id) |
|||
self.jobs.remove(job) |
|||
|
|||
def load_jobs(self): |
|||
jobs = [] |
|||
keys = self.redis.keys(self.key_prefix + '*') |
|||
pipeline = self.redis.pipeline() |
|||
for key in keys: |
|||
pipeline.hgetall(key) |
|||
results = pipeline.execute() |
|||
|
|||
for job_dict in results: |
|||
job_state = {} |
|||
try: |
|||
job = Job.__new__(Job) |
|||
job_state = pickle.loads(job_dict['job_state'.encode()]) |
|||
job_state['runs'] = long(job_dict['runs'.encode()]) |
|||
dateval = job_dict['next_run_time'.encode()].decode() |
|||
job_state['next_run_time'] = datetime.strptime( |
|||
dateval, '%Y-%m-%dT%H:%M:%S') |
|||
job.__setstate__(job_state) |
|||
jobs.append(job) |
|||
except Exception: |
|||
job_name = job_state.get('name', '(unknown)') |
|||
logger.exception('Unable to restore job "%s"', job_name) |
|||
self.jobs = jobs |
|||
|
|||
def update_job(self, job): |
|||
attrs = { |
|||
'next_run_time': job.next_run_time.isoformat(), |
|||
'runs': job.runs} |
|||
self.redis.hmset(self.key_prefix + job.id, attrs) |
|||
|
|||
def close(self): |
|||
self.redis.connection_pool.disconnect() |
|||
|
|||
def __repr__(self): |
|||
return '<%s>' % self.__class__.__name__ |
Some files were not shown because too many files changed in this diff
Loading…
Reference in new issue