'
}).inject(self.message_container, 'top');
setTimeout(function(){
- new_message.addClass('show')
+ new_message.addClass('show');
}, 10);
var hide_message = function(){
@@ -211,8 +210,8 @@ var NotificationBase = new Class({
var setting_page = App.getPage('Settings');
setting_page.addEvent('create', function(){
- Object.each(setting_page.tabs.notifications.groups, self.addTestButton.bind(self))
- })
+ Object.each(setting_page.tabs.notifications.groups, self.addTestButton.bind(self));
+ });
},
@@ -222,7 +221,7 @@ var NotificationBase = new Class({
if(button_name.contains('Notifications')) return;
- new Element('.ctrlHolder.test_button').adopt(
+ new Element('.ctrlHolder.test_button').grab(
new Element('a.button', {
'text': button_name,
'events': {
@@ -235,20 +234,21 @@ var NotificationBase = new Class({
button.set('text', button_name);
+ var message;
if(json.success){
- var message = new Element('span.success', {
+ message = new Element('span.success', {
'text': 'Notification successful'
- }).inject(button, 'after')
+ }).inject(button, 'after');
}
else {
- var message = new Element('span.failed', {
+ message = new Element('span.failed', {
'text': 'Notification failed. Check logs for details.'
- }).inject(button, 'after')
+ }).inject(button, 'after');
}
(function(){
message.destroy();
- }).delay(3000)
+ }).delay(3000);
}
});
}
@@ -258,7 +258,7 @@ var NotificationBase = new Class({
},
testButtonName: function(fieldset){
- var name = String(fieldset.getElement('h2').innerHTML).substring(0,String(fieldset.getElement('h2').innerHTML).indexOf("Automation Trakt settings',
+ 'description': 'add movies to your collection once downloaded. Connect your account in Automation Trakt settings',
'options': [
{
'name': 'notification_enabled',
diff --git a/couchpotato/core/notifications/twitter/static/twitter.js b/couchpotato/core/notifications/twitter/static/twitter.js
index 75c96a8..97465b6 100644
--- a/couchpotato/core/notifications/twitter/static/twitter.js
+++ b/couchpotato/core/notifications/twitter/static/twitter.js
@@ -16,7 +16,7 @@ var TwitterNotification = new Class({
var twitter_set = 0;
fieldset.getElements('input[type=text]').each(function(el){
- twitter_set += +(el.get('value') != '');
+ twitter_set += +(el.get('value') !== '');
});
@@ -57,7 +57,7 @@ var TwitterNotification = new Class({
}
})
).inject(fieldset.getElement('.test_button'), 'before');
- })
+ });
}
diff --git a/couchpotato/core/notifications/xbmc.py b/couchpotato/core/notifications/xbmc.py
index 1eef709..1fbfdb8 100644
--- a/couchpotato/core/notifications/xbmc.py
+++ b/couchpotato/core/notifications/xbmc.py
@@ -83,7 +83,7 @@ class XBMC(Notification):
# v6 (as of XBMC v12(Frodo)) is required to send notifications
xbmc_rpc_version = str(result['result']['version'])
- log.debug('XBMC JSON-RPC Version: %s ; Notifications by JSON-RPC only supported for v6 [as of XBMC v12(Frodo)]', xbmc_rpc_version)
+ log.debug('Kodi JSON-RPC Version: %s ; Notifications by JSON-RPC only supported for v6 [as of XBMC v12(Frodo)]', xbmc_rpc_version)
# disable JSON use
self.use_json_notifications[host] = False
@@ -96,7 +96,7 @@ class XBMC(Notification):
success = True
break
elif r.get('error'):
- log.error('XBMC error; %s: %s (%s)', (r['id'], r['error']['message'], r['error']['code']))
+ log.error('Kodi error; %s: %s (%s)', (r['id'], r['error']['message'], r['error']['code']))
break
elif result.get('result') and type(result['result']['version']).__name__ == 'dict':
@@ -106,7 +106,7 @@ class XBMC(Notification):
xbmc_rpc_version += '.' + str(result['result']['version']['minor'])
xbmc_rpc_version += '.' + str(result['result']['version']['patch'])
- log.debug('XBMC JSON-RPC Version: %s', xbmc_rpc_version)
+ log.debug('Kodie JSON-RPC Version: %s', xbmc_rpc_version)
# ok, XBMC version is supported
self.use_json_notifications[host] = True
@@ -119,12 +119,12 @@ class XBMC(Notification):
success = True
break
elif r.get('error'):
- log.error('XBMC error; %s: %s (%s)', (r['id'], r['error']['message'], r['error']['code']))
+ log.error('Kodi error; %s: %s (%s)', (r['id'], r['error']['message'], r['error']['code']))
break
# error getting version info (we do have contact with XBMC though)
elif result.get('error'):
- log.error('XBMC error; %s: %s (%s)', (result['id'], result['error']['message'], result['error']['code']))
+ log.error('Kodi error; %s: %s (%s)', (result['id'], result['error']['message'], result['error']['code']))
log.debug('Use JSON notifications: %s ', self.use_json_notifications)
@@ -173,10 +173,10 @@ class XBMC(Notification):
return [{'result': 'Error'}]
except (MaxRetryError, Timeout, ConnectionError):
- log.info2('Couldn\'t send request to XBMC, assuming it\'s turned off')
+ log.info2('Couldn\'t send request to Kodi, assuming it\'s turned off')
return [{'result': 'Error'}]
except:
- log.error('Failed sending non-JSON-type request to XBMC: %s', traceback.format_exc())
+ log.error('Failed sending non-JSON-type request to Kodi: %s', traceback.format_exc())
return [{'result': 'Error'}]
def request(self, host, do_requests):
@@ -209,10 +209,10 @@ class XBMC(Notification):
return response
except (MaxRetryError, Timeout, ConnectionError):
- log.info2('Couldn\'t send request to XBMC, assuming it\'s turned off')
+ log.info2('Couldn\'t send request to Kodi, assuming it\'s turned off')
return []
except:
- log.error('Failed sending request to XBMC: %s', traceback.format_exc())
+ log.error('Failed sending request to Kodi: %s', traceback.format_exc())
return []
@@ -223,8 +223,8 @@ config = [{
'tab': 'notifications',
'list': 'notification_providers',
'name': 'xbmc',
- 'label': 'XBMC',
- 'description': 'v11 (Eden), v12 (Frodo), v13 (Gotham)',
+ 'label': 'Kodi',
+ 'description': 'v14 (Helix), v15 (Isengard)',
'options': [
{
'name': 'enabled',
@@ -249,7 +249,7 @@ config = [{
'default': 0,
'type': 'bool',
'advanced': True,
- 'description': 'Only update the first host when movie snatched, useful for synced XBMC',
+ 'description': 'Only update the first host when movie snatched, useful for synced Kodi',
},
{
'name': 'remote_dir_scan',
@@ -257,7 +257,7 @@ config = [{
'default': 0,
'type': 'bool',
'advanced': True,
- 'description': ('Only scan new movie folder at remote XBMC servers.', 'Useful if the XBMC path is different from the path CPS uses.'),
+ 'description': ('Only scan new movie folder at remote Kodi servers.', 'Useful if the Kodi path is different from the path CPS uses.'),
},
{
'name': 'force_full_scan',
@@ -265,7 +265,7 @@ config = [{
'default': 0,
'type': 'bool',
'advanced': True,
- 'description': ('Do a full scan instead of only the new movie.', 'Useful if the XBMC path is different from the path CPS uses.'),
+ 'description': ('Do a full scan instead of only the new movie.', 'Useful if the Kodi path is different from the path CPS uses.'),
},
{
'name': 'on_snatch',
diff --git a/couchpotato/core/plugins/base.py b/couchpotato/core/plugins/base.py
index 6d63b83..b1db842 100644
--- a/couchpotato/core/plugins/base.py
+++ b/couchpotato/core/plugins/base.py
@@ -1,17 +1,14 @@
import threading
-from urllib import quote
+from urllib import quote, getproxies
from urlparse import urlparse
-import glob
-import inspect
import os.path
-import re
import time
import traceback
from couchpotato.core.event import fireEvent, addEvent
from couchpotato.core.helpers.encoding import ss, toSafeString, \
toUnicode, sp
-from couchpotato.core.helpers.variable import getExt, md5, isLocalIP, scanForPassword, tryInt, getIdentifier, \
+from couchpotato.core.helpers.variable import md5, isLocalIP, scanForPassword, tryInt, getIdentifier, \
randomString
from couchpotato.core.logger import CPLog
from couchpotato.environment import Env
@@ -19,8 +16,6 @@ import requests
from requests.packages.urllib3 import Timeout
from requests.packages.urllib3.exceptions import MaxRetryError
from tornado import template
-from tornado.web import StaticFileHandler
-
log = CPLog(__name__)
@@ -32,7 +27,6 @@ class Plugin(object):
plugin_path = None
enabled_option = 'enabled'
- auto_register_static = True
_needs_shutdown = False
_running = None
@@ -41,6 +35,7 @@ class Plugin(object):
user_agent = 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10.10; rv:34.0) Gecko/20100101 Firefox/34.0'
http_last_use = {}
+ http_last_use_queue = {}
http_time_between_calls = 0
http_failed_request = {}
http_failed_disabled = {}
@@ -56,9 +51,6 @@ class Plugin(object):
addEvent('plugin.running', self.isRunning)
self._running = []
- if self.auto_register_static:
- self.registerStatic(inspect.getfile(self.__class__))
-
# Setup database
if self._database:
addEvent('database.setup', self.databaseSetup)
@@ -88,32 +80,6 @@ class Plugin(object):
t = template.Template(open(os.path.join(os.path.dirname(parent_file), templ), 'r').read())
return t.generate(**params)
- def registerStatic(self, plugin_file, add_to_head = True):
-
- # Register plugin path
- self.plugin_path = os.path.dirname(plugin_file)
- static_folder = toUnicode(os.path.join(self.plugin_path, 'static'))
-
- if not os.path.isdir(static_folder):
- return
-
- # Get plugin_name from PluginName
- s1 = re.sub('(.)([A-Z][a-z]+)', r'\1_\2', self.__class__.__name__)
- class_name = re.sub('([a-z0-9])([A-Z])', r'\1_\2', s1).lower()
-
- # View path
- path = 'static/plugin/%s/' % class_name
-
- # Add handler to Tornado
- Env.get('app').add_handlers(".*$", [(Env.get('web_base') + path + '(.*)', StaticFileHandler, {'path': static_folder})])
-
- # Register for HTML
- if add_to_head:
- for f in glob.glob(os.path.join(self.plugin_path, 'static', '*')):
- ext = getExt(f)
- if ext in ['js', 'css']:
- fireEvent('register_%s' % ('script' if ext in 'js' else 'style'), path + os.path.basename(f), f)
-
def createFile(self, path, content, binary = False):
path = sp(path)
@@ -144,7 +110,7 @@ class Plugin(object):
f.close()
os.chmod(path, Env.getPermission('file'))
except:
- log.error('Unable writing to file "%s": %s', (path, traceback.format_exc()))
+ log.error('Unable to write file "%s": %s', (path, traceback.format_exc()))
if os.path.isfile(path):
os.remove(path)
@@ -199,6 +165,23 @@ class Plugin(object):
headers['Connection'] = headers.get('Connection', 'keep-alive')
headers['Cache-Control'] = headers.get('Cache-Control', 'max-age=0')
+ use_proxy = Env.setting('use_proxy')
+ proxy_url = None
+
+ if use_proxy:
+ proxy_server = Env.setting('proxy_server')
+ proxy_username = Env.setting('proxy_username')
+ proxy_password = Env.setting('proxy_password')
+
+ if proxy_server:
+ loc = "{0}:{1}@{2}".format(proxy_username, proxy_password, proxy_server) if proxy_username else proxy_server
+ proxy_url = {
+ "http": "http://"+loc,
+ "https": "https://"+loc,
+ }
+ else:
+ proxy_url = getproxies()
+
r = Env.get('http_opener')
# Don't try for failed requests
@@ -213,7 +196,7 @@ class Plugin(object):
del self.http_failed_request[host]
del self.http_failed_disabled[host]
- self.wait(host)
+ self.wait(host, url)
status_code = None
try:
@@ -224,6 +207,7 @@ class Plugin(object):
'files': files,
'verify': False, #verify_ssl, Disable for now as to many wrongly implemented certificates..
'stream': stream,
+ 'proxies': proxy_url,
}
method = 'post' if len(data) > 0 or files else 'get'
@@ -267,20 +251,34 @@ class Plugin(object):
return data
- def wait(self, host = ''):
+ def wait(self, host = '', url = ''):
if self.http_time_between_calls == 0:
return
- now = time.time()
+ try:
+ if host not in self.http_last_use_queue:
+ self.http_last_use_queue[host] = []
- last_use = self.http_last_use.get(host, 0)
- if last_use > 0:
+ self.http_last_use_queue[host].append(url)
- wait = (last_use - now) + self.http_time_between_calls
+ while True and not self.shuttingDown():
+ wait = (self.http_last_use.get(host, 0) - time.time()) + self.http_time_between_calls
+
+ if self.http_last_use_queue[host][0] != url:
+ time.sleep(.1)
+ continue
+
+ if wait > 0:
+ log.debug('Waiting for %s, %d seconds', (self.getName(), max(1, wait)))
+ time.sleep(min(wait, 30))
+ else:
+ self.http_last_use_queue[host] = self.http_last_use_queue[host][1:]
+ self.http_last_use[host] = time.time()
+ break
+ except:
+ log.error('Failed handling waiting call: %s', traceback.format_exc())
+ time.sleep(self.http_time_between_calls)
- if wait > 0:
- log.debug('Waiting for %s, %d seconds', (self.getName(), max(1, wait)))
- time.sleep(min(wait, 30))
def beforeCall(self, handler):
self.isRunning('%s.%s' % (self.getName(), handler.__name__))
diff --git a/couchpotato/core/plugins/category/static/category.js b/couchpotato/core/plugins/category/static/category.js
index 6d160be..5962609 100644
--- a/couchpotato/core/plugins/category/static/category.js
+++ b/couchpotato/core/plugins/category/static/category.js
@@ -52,7 +52,7 @@ var CategoryListBase = new Class({
});
- })
+ });
},
@@ -71,7 +71,7 @@ var CategoryListBase = new Class({
'events': {
'click': function(){
var category = self.createCategory();
- $(category).inject(self.category_container)
+ $(category).inject(self.category_container);
}
}
})
@@ -79,15 +79,15 @@ var CategoryListBase = new Class({
// Add categories, that aren't part of the core (for editing)
Array.each(self.categories, function(category){
- $(category).inject(self.category_container)
+ $(category).inject(self.category_container);
});
},
getCategory: function(id){
return this.categories.filter(function(category){
- return category.data._id == id
- }).pick()
+ return category.data._id == id;
+ }).pick();
},
getAll: function(){
@@ -97,7 +97,7 @@ var CategoryListBase = new Class({
createCategory: function(data){
var self = this;
- var data = data || {'id': randomString()};
+ data = data || {'id': randomString()};
var category = new Category(data);
self.categories.include(category);
@@ -115,7 +115,7 @@ var CategoryListBase = new Class({
new Element('label[text=Order]'),
category_list = new Element('ul'),
new Element('p.formHint', {
- 'html': 'Change the order the categories are in the dropdown list. First one will be default.'
+ 'html': 'Change the order the categories are in the dropdown list.'
})
)
).inject(self.content);
@@ -125,7 +125,7 @@ var CategoryListBase = new Class({
new Element('span.category_label', {
'text': category.data.label
}),
- new Element('span.handle')
+ new Element('span.handle.icon-handle')
).inject(category_list);
});
@@ -192,7 +192,7 @@ var Category = new Class({
}),
new Element('.category_label.ctrlHolder').adopt(
new Element('label', {'text':'Name'}),
- new Element('input.inlay', {
+ new Element('input', {
'type':'text',
'value': data.label,
'placeholder': 'Example: Kids, Horror or His'
@@ -201,7 +201,7 @@ var Category = new Class({
),
new Element('.category_preferred.ctrlHolder').adopt(
new Element('label', {'text':'Preferred'}),
- new Element('input.inlay', {
+ new Element('input', {
'type':'text',
'value': data.preferred,
'placeholder': 'Blu-ray, DTS'
@@ -209,7 +209,7 @@ var Category = new Class({
),
new Element('.category_required.ctrlHolder').adopt(
new Element('label', {'text':'Required'}),
- new Element('input.inlay', {
+ new Element('input', {
'type':'text',
'value': data.required,
'placeholder': 'Example: DTS, AC3 & English'
@@ -217,7 +217,7 @@ var Category = new Class({
),
new Element('.category_ignored.ctrlHolder').adopt(
new Element('label', {'text':'Ignored'}),
- new Element('input.inlay', {
+ new Element('input', {
'type':'text',
'value': data.ignored,
'placeholder': 'Example: dubbed, swesub, french'
@@ -225,7 +225,7 @@ var Category = new Class({
)
);
- self.makeSortable()
+ self.makeSortable();
},
@@ -248,7 +248,7 @@ var Category = new Class({
}
});
- }).delay(delay || 0, self)
+ }).delay(delay || 0, self);
},
@@ -262,13 +262,13 @@ var Category = new Class({
'preferred' : self.el.getElement('.category_preferred input').get('value'),
'ignored' : self.el.getElement('.category_ignored input').get('value'),
'destination': self.data.destination
- }
+ };
},
del: function(){
var self = this;
- if(self.data.label == undefined){
+ if(self.data.label === undefined){
self.el.destroy();
return;
}
@@ -318,11 +318,11 @@ var Category = new Class({
},
get: function(attr){
- return this.data[attr]
+ return this.data[attr];
},
toElement: function(){
- return this.el
+ return this.el;
}
});
diff --git a/couchpotato/core/plugins/category/static/category.css b/couchpotato/core/plugins/category/static/category.scss
similarity index 72%
rename from couchpotato/core/plugins/category/static/category.css
rename to couchpotato/core/plugins/category/static/category.scss
index 3218a79..24ba16e 100644
--- a/couchpotato/core/plugins/category/static/category.css
+++ b/couchpotato/core/plugins/category/static/category.scss
@@ -1,13 +1,14 @@
+@import "_mixins";
+
.add_new_category {
padding: 20px;
display: block;
text-align: center;
font-size: 20px;
- border-bottom: 1px solid rgba(255,255,255,0.2);
}
.category {
- border-bottom: 1px solid rgba(255,255,255,0.2);
+ margin-bottom: 20px;
position: relative;
}
@@ -28,8 +29,6 @@
}
.category .formHint {
- width: 250px !important;
- margin: 0 !important;
opacity: 0.1;
}
.category:hover .formHint {
@@ -48,11 +47,10 @@
}
#category_ordering li {
- cursor: -webkit-grab;
- cursor: -moz-grab;
cursor: grab;
- border-bottom: 1px solid rgba(255,255,255,0.2);
- padding: 0 5px;
+ border-bottom: 1px solid $theme_off;
+ padding: 5px;
+ list-style: none;
}
#category_ordering li:last-child { border: 0; }
@@ -69,14 +67,9 @@
}
#category_ordering li .handle {
- background: url('../../images/handle.png') center;
width: 20px;
float: right;
}
#category_ordering .formHint {
- clear: none;
- float: right;
- width: 250px;
- margin: 0;
}
diff --git a/couchpotato/core/plugins/dashboard.py b/couchpotato/core/plugins/dashboard.py
index afead44..16dc418 100644
--- a/couchpotato/core/plugins/dashboard.py
+++ b/couchpotato/core/plugins/dashboard.py
@@ -1,6 +1,6 @@
import random as rndm
import time
-from CodernityDB.database import RecordDeleted
+from CodernityDB.database import RecordDeleted, RecordNotFound
from couchpotato import get_db
from couchpotato.api import addApiView
@@ -65,6 +65,10 @@ class Dashboard(Plugin):
log.debug('Record already deleted: %s', media_id)
continue
+ except RecordNotFound:
+ log.debug('Record not found: %s', media_id)
+ continue
+
pp = profile_pre.get(media.get('profile_id'))
if not pp: continue
@@ -92,7 +96,7 @@ class Dashboard(Plugin):
if late:
media['releases'] = fireEvent('release.for_media', media['_id'], single = True)
- for release in media.get('releases'):
+ for release in media.get('releases', []):
if release.get('status') in ['snatched', 'available', 'seeding', 'downloaded']:
add = False
break
diff --git a/couchpotato/core/plugins/log/main.py b/couchpotato/core/plugins/log/main.py
index 003529b..4bf7cf3 100644
--- a/couchpotato/core/plugins/log/main.py
+++ b/couchpotato/core/plugins/log/main.py
@@ -131,12 +131,13 @@ class Logging(Plugin):
def toList(self, log_content = ''):
- logs_raw = toUnicode(log_content).split('[0m\n')
+ logs_raw = re.split(r'\[0m\n', toUnicode(log_content))
logs = []
+ re_split = r'\x1b'
for log_line in logs_raw:
- split = splitString(log_line, '\x1b')
- if split:
+ split = re.split(re_split, log_line)
+ if split and len(split) == 3:
try:
date, time, log_type = splitString(split[0], ' ')
timestamp = '%s %s' % (date, time)
diff --git a/couchpotato/core/plugins/log/static/log.css b/couchpotato/core/plugins/log/static/log.css
deleted file mode 100644
index c7aace6..0000000
--- a/couchpotato/core/plugins/log/static/log.css
+++ /dev/null
@@ -1,199 +0,0 @@
-.page.log .nav {
- display: block;
- text-align: center;
- padding: 0 0 30px;
- margin: 0;
- font-size: 20px;
- position: fixed;
- width: 100%;
- bottom: 0;
- left: 0;
- background: #4E5969;
- z-index: 100;
-}
-
- .page.log .nav li {
- display: inline-block;
- padding: 5px 10px;
- margin: 0;
- }
-
- .page.log .nav li.select,
- .page.log .nav li.clear {
- cursor: pointer;
- }
-
- .page.log .nav li:hover:not(.active):not(.filter) {
- background: rgba(255, 255, 255, 0.1);
- }
-
- .page.log .nav li.active {
- font-weight: bold;
- cursor: default;
- background: rgba(255,255,255,.1);
- }
-
- @media all and (max-width: 480px) {
- .page.log .nav {
- font-size: 14px;
- }
-
- .page.log .nav li {
- padding: 5px;
- }
- }
-
- .page.log .nav li.hint {
- text-align: center;
- width: 400px;
- left: 50%;
- margin-left: -200px;
- font-style: italic;
- font-size: 11px;
- position: absolute;
- right: 20px;
- opacity: .5;
- bottom: 5px;
- }
-
-.page.log .loading {
- text-align: center;
- font-size: 20px;
- padding: 50px;
-}
-
-.page.log .container {
- padding: 30px 0 60px;
- overflow: hidden;
- line-height: 150%;
- font-size: 11px;
- color: #FFF;
-}
-
- .page.log .container select {
- vertical-align: top;
- }
-
- .page.log .container .time {
- clear: both;
- color: lightgrey;
- font-size: 10px;
- border-top: 1px solid rgba(255, 255, 255, 0.1);
- position: relative;
- overflow: hidden;
- padding: 0 3px;
- font-family: Lucida Console, Monaco, Nimbus Mono L, monospace, serif;
- }
- .page.log .container .time.highlight {
- background: rgba(255, 255, 255, 0.1);
- }
- .page.log .container .time span {
- padding: 5px 0 3px;
- display: inline-block;
- vertical-align: middle;
- }
-
- .page.log[data-filter=INFO] .error,
- .page.log[data-filter=INFO] .debug,
- .page.log[data-filter=ERROR] .debug,
- .page.log[data-filter=ERROR] .info,
- .page.log[data-filter=DEBUG] .info,
- .page.log[data-filter=DEBUG] .error {
- display: none;
- }
-
- .page.log .container .type {
- margin-left: 10px;
- }
-
- .page.log .container .message {
- float: right;
- width: 86%;
- white-space: pre-wrap;
- }
-
- .page.log .container .error { color: #FFA4A4; }
- .page.log .container .debug span { opacity: .6; }
-
-.do_report {
- position: absolute;
- padding: 10px;
-}
-
-.page.log .report {
- position: fixed;
- width: 100%;
- height: 100%;
- background: rgba(0,0,0,.7);
- left: 0;
- top: 0;
- z-index: 99999;
- font-size: 14px;
-}
-
- .page.log .report .button {
- display: inline-block;
- margin: 10px 0;
- padding: 10px;
- }
-
- .page.log .report .bug {
- width: 800px;
- height: 80%;
- position: absolute;
- left: 50%;
- top: 50%;
- margin: 0 0 0 -400px;
- transform: translate(0, -50%);
- }
-
- .page.log .report .bug textarea {
- display: block;
- width: 100%;
- background: #FFF;
- padding: 20px;
- overflow: auto;
- color: #666;
- height: 70%;
- font-size: 12px;
- }
-
-.page.log .container .time ::-webkit-selection {
- background-color: #000;
- color: #FFF;
-}
-
-.page.log .container .time ::-moz-selection {
- background-color: #000;
- color: #FFF;
-}
-
-.page.log .container .time ::-ms-selection {
- background-color: #000;
- color: #FFF;
-}
-
-.page.log .container .time.highlight ::selection {
- background-color: transparent;
- color: inherit;
-}
-
-.page.log .container .time.highlight ::-webkit-selection {
- background-color: transparent;
- color: inherit;
-}
-
-.page.log .container .time.highlight ::-moz-selection {
- background-color: transparent;
- color: inherit;
-}
-
-.page.log .container .time.highlight ::-ms-selection {
- background-color: transparent;
- color: inherit;
-}
-
-.page.log .container .time.highlight ::selection {
- background-color: transparent;
- color: inherit;
-}
diff --git a/couchpotato/core/plugins/log/static/log.js b/couchpotato/core/plugins/log/static/log.js
index 71a65d0..0d47914 100644
--- a/couchpotato/core/plugins/log/static/log.js
+++ b/couchpotato/core/plugins/log/static/log.js
@@ -7,21 +7,21 @@ Page.Log = new Class({
title: 'Show recent logs.',
has_tab: false,
+ navigation: null,
log_items: [],
- report_text: '\
-### Steps to reproduce:\n\
-1. ..\n\
-2. ..\n\
-\n\
-### Information:\n\
-Movie(s) I have this with: ...\n\
-Quality of the movie being searched: ...\n\
-Providers I use: ...\n\
-Version of CouchPotato: {version}\n\
-Running on: ...\n\
-\n\
-### Logs:\n\
-```\n{issue}```',
+ report_text: '### Steps to reproduce:\n'+
+ '1. ..\n'+
+ '2. ..\n'+
+ '\n'+
+ '### Information:\n'+
+ 'Movie(s) I have this with: ...\n'+
+ 'Quality of the movie being searched: ...\n'+
+ 'Providers I use: ...\n'+
+ 'Version of CouchPotato: {version}\n'+
+ 'Running on: ...\n'+
+ '\n'+
+ '### Logs:\n'+
+ '```\n{issue}```',
indexAction: function () {
var self = this;
@@ -34,6 +34,7 @@ Running on: ...\n\
var self = this;
if (self.log) self.log.destroy();
+
self.log = new Element('div.container.loading', {
'text': 'loading...',
'events': {
@@ -41,9 +42,17 @@ Running on: ...\n\
self.showSelectionButton.delay(100, self, e);
}
}
- }).inject(self.el);
+ }).inject(self.content);
+
+ if(self.navigation){
+ var nav = self.navigation.getElement('.nav');
+ nav.getElements('.active').removeClass('active');
+
+ self.navigation.getElements('li')[nr+1].addClass('active');
+ }
- Api.request('logging.get', {
+ if(self.request && self.request.running) self.request.cancel();
+ self.request = Api.request('logging.get', {
'data': {
'nr': nr
},
@@ -52,65 +61,68 @@ Running on: ...\n\
self.log_items = self.createLogElements(json.log);
self.log.adopt(self.log_items);
self.log.removeClass('loading');
+ self.scrollToBottom();
- var nav = new Element('ul.nav', {
- 'events': {
- 'click:relay(li.select)': function (e, el) {
- self.getLogs(parseInt(el.get('text')) - 1);
- }
- }
- });
+ if(!self.navigation){
+ self.navigation = new Element('div.navigation').adopt(
+ new Element('h2[text=Logs]'),
+ new Element('div.hint', {
+ 'text': 'Select multiple lines & report an issue'
+ })
+ );
- // Type selection
- new Element('li.filter').grab(
- new Element('select', {
+ var nav = new Element('ul.nav', {
'events': {
- 'change': function () {
- var type_filter = this.getSelected()[0].get('value');
- self.el.set('data-filter', type_filter);
- self.scrollToBottom();
+ 'click:relay(li.select)': function (e, el) {
+ self.getLogs(parseInt(el.get('text')) - 1);
}
}
- }).adopt(
- new Element('option', {'value': 'ALL', 'text': 'Show all logs'}),
- new Element('option', {'value': 'INFO', 'text': 'Show only INFO'}),
- new Element('option', {'value': 'DEBUG', 'text': 'Show only DEBUG'}),
- new Element('option', {'value': 'ERROR', 'text': 'Show only ERROR'})
- )
- ).inject(nav);
-
- // Selections
- for (var i = 0; i <= json.total; i++) {
- new Element('li', {
- 'text': i + 1,
- 'class': 'select ' + (nr == i ? 'active' : '')
- }).inject(nav);
- }
-
- // Clear button
- new Element('li.clear', {
- 'text': 'clear',
- 'events': {
- 'click': function () {
- Api.request('logging.clear', {
- 'onComplete': function () {
- self.getLogs(0);
+ }).inject(self.navigation);
+
+ // Type selection
+ new Element('li.filter').grab(
+ new Element('select', {
+ 'events': {
+ 'change': function () {
+ var type_filter = this.getSelected()[0].get('value');
+ self.content.set('data-filter', type_filter);
+ self.scrollToBottom();
}
- });
-
- }
+ }
+ }).adopt(
+ new Element('option', {'value': 'ALL', 'text': 'Show all logs'}),
+ new Element('option', {'value': 'INFO', 'text': 'Show only INFO'}),
+ new Element('option', {'value': 'DEBUG', 'text': 'Show only DEBUG'}),
+ new Element('option', {'value': 'ERROR', 'text': 'Show only ERROR'})
+ )
+ ).inject(nav);
+
+ // Selections
+ for (var i = 0; i <= json.total; i++) {
+ new Element('li', {
+ 'text': i + 1,
+ 'class': 'select ' + (nr == i ? 'active' : '')
+ }).inject(nav);
}
- }).inject(nav);
- // Hint
- new Element('li.hint', {
- 'text': 'Select multiple lines & report an issue'
- }).inject(nav);
+ // Clear button
+ new Element('li.clear', {
+ 'text': 'clear',
+ 'events': {
+ 'click': function () {
+ Api.request('logging.clear', {
+ 'onComplete': function () {
+ self.getLogs(0);
+ }
+ });
- // Add to page
- nav.inject(self.log, 'top');
+ }
+ }
+ }).inject(nav);
- self.scrollToBottom();
+ // Add to page
+ self.navigation.inject(self.content, 'top');
+ }
}
});
@@ -133,14 +145,14 @@ Running on: ...\n\
new Element('span.message', {
'text': log.message
})
- ))
+ ));
});
return elements;
},
scrollToBottom: function () {
- new Fx.Scroll(window, {'duration': 0}).toBottom();
+ new Fx.Scroll(this.content, {'duration': 0}).toBottom();
},
showSelectionButton: function(e){
@@ -213,7 +225,7 @@ Running on: ...\n\
.replace('{version}', version ? version.version.repr : '...'),
textarea;
- var overlay = new Element('div.report', {
+ var overlay = new Element('div.mask.report_popup', {
'method': 'post',
'events': {
'click': function(e){
@@ -245,12 +257,7 @@ Running on: ...\n\
})
),
textarea = new Element('textarea', {
- 'text': body,
- 'events': {
- 'click': function(){
- this.select();
- }
- }
+ 'text': body
}),
new Element('a.button', {
'target': '_blank',
@@ -270,7 +277,7 @@ Running on: ...\n\
)
);
- overlay.inject(self.log);
+ overlay.inject(document.body);
},
getSelected: function(){
diff --git a/couchpotato/core/plugins/log/static/log.scss b/couchpotato/core/plugins/log/static/log.scss
new file mode 100644
index 0000000..4be815b
--- /dev/null
+++ b/couchpotato/core/plugins/log/static/log.scss
@@ -0,0 +1,159 @@
+@import "_mixins";
+
+.page.log {
+
+ .nav {
+ text-align: right;
+ padding: 0;
+ margin: 0;
+
+ li {
+ display: inline-block;
+ padding: 5px 10px;
+ margin: 0;
+
+ &.select, &.clear {
+ cursor: pointer;
+ }
+
+ &:hover:not(.active):not(.filter) {
+ background: rgba(255, 255, 255, 0.1);
+ }
+
+ &.active {
+ font-weight: bold;
+ cursor: default;
+ background: rgba(255,255,255,.1);
+ }
+ }
+ }
+
+ .hint {
+ font-style: italic;
+ opacity: .5;
+ margin-top: 3px;
+ }
+
+ .container {
+ padding: $padding;
+ overflow: hidden;
+ line-height: 150%;
+
+ &.loading {
+ text-align: center;
+ font-size: 20px;
+ padding: 100px 50px;
+ }
+
+ select {
+ vertical-align: top;
+ }
+
+ .time {
+ clear: both;
+ font-size: .75em;
+ border-top: 1px solid rgba(255, 255, 255, 0.1);
+ overflow: hidden;
+ padding: 0 3px;
+ font-family: Lucida Console, Monaco, Nimbus Mono L, monospace, serif;
+ display: flex;
+
+ &.highlight {
+ background: $theme_off;
+ }
+
+ span {
+ padding: 5px 0 3px;
+ display: inline-block;
+ vertical-align: middle;
+ width: 90px;
+ }
+
+ ::selection {
+ background-color: #000;
+ color: #FFF;
+ }
+ }
+
+ .type.type {
+ margin-left: 10px;
+ width: 40px;
+ }
+
+ .message {
+ white-space: pre-wrap;
+ flex: 1 auto;
+ }
+
+
+ .error { color: #FFA4A4; }
+ .debug span { opacity: .6; }
+ }
+
+
+
+ [data-filter=INFO] .error,
+ [data-filter=INFO] .debug,
+ [data-filter=ERROR] .debug,
+ [data-filter=ERROR] .info,
+ [data-filter=DEBUG] .info,
+ [data-filter=DEBUG] .error {
+ display: none;
+ }
+}
+
+.report_popup.report_popup {
+ position: fixed;
+ left: 0;
+ right: 0;
+ bottom: 0;
+ top: 0;
+ z-index: 99999;
+ font-size: 14px;
+ display: flex;
+ justify-content: center;
+ align-items: center;
+ opacity: 1;
+ color: #FFF;
+ pointer-events: auto;
+
+ .button {
+ margin: 10px 0;
+ padding: 10px;
+ color: $background_color;
+ background: $primary_color;
+ }
+
+ .bug {
+ width: 80%;
+ height: 80%;
+ max-height: 800px;
+ max-width: 800px;
+
+ display: flex;
+ flex-flow: column nowrap;
+
+ > span {
+ margin: $padding/2 0 $padding 0;
+ }
+
+ textarea {
+ display: block;
+ width: 100%;
+ background: #FFF;
+ padding: 20px;
+ overflow: auto;
+ color: #666;
+ height: 70%;
+ font-size: 12px;
+ }
+ }
+}
+
+.do_report.do_report {
+ z-index: 10000;
+ position: absolute;
+ padding: 10px;
+ background: $primary_color;
+ color: #FFF;
+}
diff --git a/couchpotato/core/plugins/profile/static/profile.css b/couchpotato/core/plugins/profile/static/profile.css
deleted file mode 100644
index df93944..0000000
--- a/couchpotato/core/plugins/profile/static/profile.css
+++ /dev/null
@@ -1,197 +0,0 @@
-.add_new_profile {
- padding: 20px;
- display: block;
- text-align: center;
- font-size: 20px;
- border-bottom: 1px solid rgba(255,255,255,0.2);
-}
-
-.profile {
- border-bottom: 1px solid rgba(255,255,255,0.2);
- position: relative;
-}
-
- .profile > .delete {
- position: absolute;
- padding: 16px;
- right: 0;
- cursor: pointer;
- opacity: 0.6;
- color: #fd5353;
- }
- .profile > .delete:hover {
- opacity: 1;
- }
-
- .profile .ctrlHolder:hover {
- background: none;
- }
-
- .profile .qualities {
- min-height: 80px;
- }
-
- .profile .formHint {
- width: 210px !important;
- vertical-align: top !important;
- margin: 0 !important;
- padding-left: 3px !important;
- opacity: 0.1;
- }
- .profile:hover .formHint {
- opacity: 1;
- }
-
- .profile .wait_for {
- padding-top: 0;
- padding-bottom: 20px;
- }
-
- .profile .wait_for input {
- margin: 0 5px !important;
- }
-
- .profile .wait_for .minimum_score_input {
- width: 40px !important;
- text-align: left;
- }
-
- .profile .types {
- padding: 0;
- margin: 0 20px 0 -4px;
- display: inline-block;
- }
-
- .profile .types li {
- padding: 3px 5px;
- border-bottom: 1px solid rgba(255,255,255,0.2);
- list-style: none;
- }
- .profile .types li:last-child { border: 0; }
-
- .profile .types li > * {
- display: inline-block;
- vertical-align: middle;
- line-height: 0;
- margin-right: 10px;
- }
-
- .profile .type .check {
- margin-top: -1px;
- }
-
- .profile .quality_type select {
- width: 120px;
- margin-left: -1px;
- }
-
- .profile .types li.is_empty .check,
- .profile .types li.is_empty .delete,
- .profile .types li.is_empty .handle,
- .profile .types li.is_empty .check_label {
- visibility: hidden;
- }
-
- .profile .types .type label {
- display: inline-block;
- width: auto;
- float: none;
- text-transform: uppercase;
- font-size: 11px;
- font-weight: normal;
- margin-right: 20px;
- text-shadow: none;
- vertical-align: bottom;
- padding: 0;
- height: 17px;
- }
- .profile .types .type label .check {
- margin-right: 5px;
- }
- .profile .types .type label .check_label {
- display: inline-block;
- vertical-align: top;
- height: 16px;
- line-height: 13px;
- }
-
- .profile .types .type .threed {
- display: none;
- }
-
- .profile .types .type.allow_3d .threed {
- display: inline-block;
- }
-
- .profile .types .type .handle {
- background: url('../../images/handle.png') center;
- display: inline-block;
- height: 20px;
- width: 20px;
- cursor: -moz-grab;
- cursor: -webkit-grab;
- cursor: grab;
- margin: 0;
- }
-
- .profile .types .type .delete {
- height: 20px;
- width: 20px;
- line-height: 20px;
- visibility: hidden;
- cursor: pointer;
- font-size: 13px;
- color: #fd5353;
- }
- .profile .types .type:not(.allow_3d) .delete {
- margin-left: 55px;
- }
-
- .profile .types .type:hover:not(.is_empty) .delete {
- visibility: visible;
- }
-
-#profile_ordering {
-
-}
-
- #profile_ordering ul {
- float: left;
- margin: 0;
- width: 275px;
- padding: 0;
- }
-
- #profile_ordering li {
- border-bottom: 1px solid rgba(255,255,255,0.2);
- padding: 0 5px;
- }
- #profile_ordering li:last-child { border: 0; }
-
- #profile_ordering li .check {
- margin: 2px 10px 0 0;
- vertical-align: top;
- }
-
- #profile_ordering li > span {
- display: inline-block;
- height: 20px;
- vertical-align: top;
- line-height: 20px;
- }
-
- #profile_ordering li .handle {
- background: url('../../images/handle.png') center;
- width: 20px;
- float: right;
- cursor: -webkit-grab;
- cursor: -moz-grab;
- cursor: grab;
- }
-
- #profile_ordering .formHint {
- clear: none;
- float: right;
- width: 250px;
- margin: 0;
- }
diff --git a/couchpotato/core/plugins/profile/static/profile.js b/couchpotato/core/plugins/profile/static/profile.js
index 35ad81b..626c8ce 100644
--- a/couchpotato/core/plugins/profile/static/profile.js
+++ b/couchpotato/core/plugins/profile/static/profile.js
@@ -31,7 +31,7 @@ var Profile = new Class({
}),
new Element('.quality_label.ctrlHolder').adopt(
new Element('label', {'text':'Name'}),
- new Element('input.inlay', {
+ new Element('input', {
'type':'text',
'value': data.label,
'placeholder': 'Profile name'
@@ -47,7 +47,7 @@ var Profile = new Class({
new Element('div.wait_for.ctrlHolder').adopt(
// "Wait the entered number of days for a checked quality, before downloading a lower quality release."
new Element('span', {'text':'Wait'}),
- new Element('input.inlay.wait_for_input.xsmall', {
+ new Element('input.wait_for_input.xsmall', {
'type':'text',
'value': data.wait_for && data.wait_for.length > 0 ? data.wait_for[0] : 0
}),
@@ -55,7 +55,7 @@ var Profile = new Class({
new Element('span.advanced', {'text':'and keep searching'}),
// "After a checked quality is found and downloaded, continue searching for even better quality releases for the entered number of days."
- new Element('input.inlay.xsmall.stop_after_input.advanced', {
+ new Element('input.xsmall.stop_after_input.advanced', {
'type':'text',
'value': data.stop_after && data.stop_after.length > 0 ? data.stop_after[0] : 0
}),
@@ -63,7 +63,7 @@ var Profile = new Class({
// Minimum score of
new Element('span.advanced', {'html':' Releases need a minimum score of'}),
- new Element('input.advanced.inlay.xsmall.minimum_score_input', {
+ new Element('input.advanced.xsmall.minimum_score_input', {
'size': 4,
'type':'text',
'value': data.minimum_score || 1
@@ -81,7 +81,7 @@ var Profile = new Class({
'quality': quality,
'finish': data.finish[nr] || false,
'3d': data['3d'] ? data['3d'][nr] || false : false
- })
+ });
});
}
@@ -123,7 +123,7 @@ var Profile = new Class({
}
});
- }).delay(delay, self)
+ }).delay(delay, self);
},
@@ -148,7 +148,7 @@ var Profile = new Class({
});
});
- return data
+ return data;
},
addType: function(data){
@@ -177,7 +177,7 @@ var Profile = new Class({
var self = this;
return self.types.filter(function(type){
- return type.get('quality')
+ return type.get('quality');
});
},
@@ -231,15 +231,15 @@ var Profile = new Class({
},
get: function(attr){
- return this.data[attr]
+ return this.data[attr];
},
isCore: function(){
- return this.data.core
+ return this.data.core;
},
toElement: function(){
- return this.el
+ return this.el;
}
});
@@ -270,47 +270,42 @@ Profile.Type = new Class({
var data = self.data;
self.el = new Element('li.type').adopt(
- new Element('span.quality_type').grab(
+ new Element('span.quality_type.select_wrapper.icon-dropdown').grab(
self.fillQualities()
),
self.finish_container = new Element('label.finish').adopt(
- new Element('span.finish').grab(
- self.finish = new Element('input.inlay.finish[type=checkbox]', {
- 'checked': data.finish !== undefined ? data.finish : 1,
- 'events': {
- 'change': function(){
- if(self.el == self.el.getParent().getElement(':first-child')){
- self.finish_class.check();
- alert('Top quality always finishes the search');
- return;
- }
-
- self.fireEvent('change');
+ self.finish = new Element('input.finish[type=checkbox]', {
+ 'checked': data.finish !== undefined ? data.finish : 1,
+ 'events': {
+ 'change': function(){
+ if(self.el == self.el.getParent().getElement(':first-child')){
+ alert('Top quality always finishes the search');
+ return;
}
+
+ self.fireEvent('change');
}
- })
- ),
+ }
+ }),
new Element('span.check_label[text=finish]')
),
self['3d_container'] = new Element('label.threed').adopt(
- new Element('span.3d').grab(
- self['3d'] = new Element('input.inlay.3d[type=checkbox]', {
- 'checked': data['3d'] !== undefined ? data['3d'] : 0,
- 'events': {
- 'change': function(){
- self.fireEvent('change');
- }
+ self['3d'] = new Element('input.3d[type=checkbox]', {
+ 'checked': data['3d'] !== undefined ? data['3d'] : 0,
+ 'events': {
+ 'change': function(){
+ self.fireEvent('change');
}
- })
- ),
+ }
+ }),
new Element('span.check_label[text=3D]')
),
- new Element('span.delete.icon2', {
+ new Element('span.delete.icon-cancel', {
'events': {
'click': self.del.bind(self)
}
}),
- new Element('span.handle')
+ new Element('span.handle.icon-handle')
);
self.el[self.data.quality ? 'removeClass' : 'addClass']('is_empty');
@@ -318,9 +313,6 @@ Profile.Type = new Class({
if(self.data.quality && Quality.getQuality(self.data.quality).allow_3d)
self.el.addClass('allow_3d');
- self.finish_class = new Form.Check(self.finish);
- self['3d_class'] = new Form.Check(self['3d']);
-
},
fillQualities: function(){
@@ -342,7 +334,7 @@ Profile.Type = new Class({
'text': q.label,
'value': q.identifier,
'data-allow_3d': q.allow_3d
- }).inject(self.qualities)
+ }).inject(self.qualities);
});
self.qualities.set('value', self.data.quality);
@@ -358,7 +350,7 @@ Profile.Type = new Class({
'quality': self.qualities.get('value'),
'finish': +self.finish.checked,
'3d': +self['3d'].checked
- }
+ };
},
get: function(key){
diff --git a/couchpotato/core/plugins/profile/static/profile.scss b/couchpotato/core/plugins/profile/static/profile.scss
new file mode 100644
index 0000000..ee9ae41
--- /dev/null
+++ b/couchpotato/core/plugins/profile/static/profile.scss
@@ -0,0 +1,150 @@
+@import "_mixins";
+
+.add_new_profile {
+ padding: 20px;
+ display: block;
+ text-align: center;
+ font-size: 20px;
+ border-bottom: 1px solid $theme_off;
+}
+
+.profile {
+ margin-bottom: 20px;
+
+ .quality_label input {
+ font-weight: bold;
+ }
+
+ .ctrlHolder {
+
+ .types {
+ flex: 1 1 auto;
+ min-width: 360px;
+
+ .type {
+ display: flex;
+ flex-row: row nowrap;
+ align-items: center;
+ padding: 2px 0;
+
+ label {
+ min-width: 0;
+ margin-left: $padding/2;
+
+ span {
+ font-size: .9em;
+ }
+ }
+
+ input[type=checkbox] {
+ margin-right: 3px;
+ }
+
+ .delete, .handle {
+ margin-left: $padding/4;
+ width: 20px;
+ font-size: 20px;
+ opacity: .1;
+ text-align: center;
+ cursor: pointer;
+
+ &.handle {
+ cursor: move;
+ cursor: grab;
+ }
+
+ &:hover {
+ opacity: 1;
+ }
+ }
+
+ &.is_empty {
+ .delete, .handle {
+ display: none;
+ }
+ }
+ }
+
+ }
+
+ &.wait_for.wait_for {
+ display: block;
+
+ input {
+ min-width: 0;
+ width: 40px;
+ text-align: center;
+ margin: 0 2px;
+ }
+
+ .advanced {
+ display: none;
+ color: $primary_color;
+
+ .show_advanced & {
+ display: inline;
+ }
+ }
+
+ }
+
+ .formHint {
+ }
+
+ }
+}
+
+#profile_ordering {
+ ul {
+ list-style: none;
+ margin: 0;
+ width: 275px;
+ padding: 0;
+ }
+
+ li {
+ border-bottom: 1px solid $theme_off;
+ padding: 5px;
+ display: flex;
+ align-items: center;
+
+ &:hover {
+ background: $theme_off;
+ }
+
+ &:last-child { border: 0; }
+
+ input[type=checkbox] {
+ margin: 2px 10px 0 0;
+ vertical-align: top;
+ }
+
+ > span {
+ display: inline-block;
+ height: 20px;
+ vertical-align: top;
+ line-height: 20px;
+
+ &.profile_label {
+ flex: 1 1 auto;
+ }
+ }
+
+ .handle {
+ font-size: 20px;
+ width: 20px;
+ float: right;
+ cursor: move;
+ cursor: grab;
+ opacity: .5;
+ text-align: center;
+
+ &:hover {
+ opacity: 1;
+ }
+ }
+ }
+
+ .formHint {
+ }
+}
diff --git a/couchpotato/core/plugins/quality/main.py b/couchpotato/core/plugins/quality/main.py
index 7d80f72..3ed1a7a 100644
--- a/couchpotato/core/plugins/quality/main.py
+++ b/couchpotato/core/plugins/quality/main.py
@@ -24,8 +24,8 @@ class QualityPlugin(Plugin):
qualities = [
{'identifier': 'bd50', 'hd': True, 'allow_3d': True, 'size': (20000, 60000), 'median_size': 40000, 'label': 'BR-Disk', 'alternative': ['bd25', ('br', 'disk')], 'allow': ['1080p'], 'ext':['iso', 'img'], 'tags': ['bdmv', 'certificate', ('complete', 'bluray'), 'avc', 'mvc']},
- {'identifier': '1080p', 'hd': True, 'allow_3d': True, 'size': (4000, 20000), 'median_size': 10000, 'label': '1080p', 'width': 1920, 'height': 1080, 'alternative': [], 'allow': [], 'ext':['mkv', 'm2ts', 'ts'], 'tags': ['m2ts', 'x264', 'h264']},
- {'identifier': '720p', 'hd': True, 'allow_3d': True, 'size': (3000, 10000), 'median_size': 5500, 'label': '720p', 'width': 1280, 'height': 720, 'alternative': [], 'allow': [], 'ext':['mkv', 'ts'], 'tags': ['x264', 'h264']},
+ {'identifier': '1080p', 'hd': True, 'allow_3d': True, 'size': (4000, 20000), 'median_size': 10000, 'label': '1080p', 'width': 1920, 'height': 1080, 'alternative': [], 'allow': [], 'ext':['mkv', 'm2ts', 'ts'], 'tags': ['m2ts', 'x264', 'h264', '1080']},
+ {'identifier': '720p', 'hd': True, 'allow_3d': True, 'size': (3000, 10000), 'median_size': 5500, 'label': '720p', 'width': 1280, 'height': 720, 'alternative': [], 'allow': [], 'ext':['mkv', 'ts'], 'tags': ['x264', 'h264', '720']},
{'identifier': 'brrip', 'hd': True, 'allow_3d': True, 'size': (700, 7000), 'median_size': 2000, 'label': 'BR-Rip', 'alternative': ['bdrip', ('br', 'rip'), 'hdtv', 'hdrip'], 'allow': ['720p', '1080p'], 'ext':['mp4', 'avi'], 'tags': ['webdl', ('web', 'dl')]},
{'identifier': 'dvdr', 'size': (3000, 10000), 'median_size': 4500, 'label': 'DVD-R', 'alternative': ['br2dvd', ('dvd', 'r')], 'allow': [], 'ext':['iso', 'img', 'vob'], 'tags': ['pal', 'ntsc', 'video_ts', 'audio_ts', ('dvd', 'r'), 'dvd9']},
{'identifier': 'dvdrip', 'size': (600, 2400), 'median_size': 1500, 'label': 'DVD-Rip', 'width': 720, 'alternative': [('dvd', 'rip')], 'allow': [], 'ext':['avi'], 'tags': [('dvd', 'rip'), ('dvd', 'xvid'), ('dvd', 'divx')]},
@@ -271,8 +271,8 @@ class QualityPlugin(Plugin):
words = words[:-1]
points = {
- 'identifier': 20,
- 'label': 20,
+ 'identifier': 25,
+ 'label': 25,
'alternative': 20,
'tags': 11,
'ext': 5,
@@ -487,11 +487,12 @@ class QualityPlugin(Plugin):
'Movie Name (2014).mkv': {'size': 4500, 'quality': '720p', 'extra': {'titles': ['Movie Name 2014 720p Bluray']}},
'Movie Name (2015).mkv': {'size': 500, 'quality': '1080p', 'extra': {'resolution_width': 1920}},
'Movie Name (2015).mp4': {'size': 6500, 'quality': 'brrip'},
- 'Movie Name (2015).mp4': {'size': 6500, 'quality': 'brrip'},
'Movie Name.2014.720p Web-Dl Aac2.0 h264-ReleaseGroup': {'size': 3800, 'quality': 'brrip'},
'Movie Name.2014.720p.WEBRip.x264.AC3-ReleaseGroup': {'size': 3000, 'quality': 'scr'},
'Movie.Name.2014.1080p.HDCAM.-.ReleaseGroup': {'size': 5300, 'quality': 'cam'},
'Movie.Name.2014.720p.HDSCR.4PARTS.MP4.AAC.ReleaseGroup': {'size': 2401, 'quality': 'scr'},
+ 'Movie.Name.2014.720p.BluRay.x264-ReleaseGroup': {'size': 10300, 'quality': '720p'},
+ 'Movie.Name.2014.720.Bluray.x264.DTS-ReleaseGroup': {'size': 9700, 'quality': '720p'},
}
correct = 0
diff --git a/couchpotato/core/plugins/quality/static/quality.css b/couchpotato/core/plugins/quality/static/quality.css
deleted file mode 100644
index f71f007..0000000
--- a/couchpotato/core/plugins/quality/static/quality.css
+++ /dev/null
@@ -1,26 +0,0 @@
-.group_sizes {
-
-}
-
- .group_sizes .head {
- font-weight: bold;
- }
-
- .group_sizes .ctrlHolder {
- padding-top: 4px !important;
- padding-bottom: 4px !important;
- font-size: 12px;
- }
-
- .group_sizes .label {
- max-width: 120px;
- }
-
- .group_sizes .min, .group_sizes .max {
- text-align: center;
- width: 50px;
- max-width: 50px;
- margin: 0 5px !important;
- padding: 0 3px;
- display: inline-block;
- }
\ No newline at end of file
diff --git a/couchpotato/core/plugins/quality/static/quality.js b/couchpotato/core/plugins/quality/static/quality.js
index d233b1c..67924b0 100644
--- a/couchpotato/core/plugins/quality/static/quality.js
+++ b/couchpotato/core/plugins/quality/static/quality.js
@@ -12,20 +12,20 @@ var QualityBase = new Class({
self.profiles = [];
Array.each(data.profiles, self.createProfilesClass.bind(self));
- App.addEvent('loadSettings', self.addSettings.bind(self))
+ App.addEvent('loadSettings', self.addSettings.bind(self));
},
getProfile: function(id){
return this.profiles.filter(function(profile){
- return profile.data._id == id
- }).pick()
+ return profile.data._id == id;
+ }).pick();
},
// Hide items when getting profiles
getActiveProfiles: function(){
return Array.filter(this.profiles, function(profile){
- return !profile.data.hide
+ return !profile.data.hide;
});
},
@@ -37,7 +37,7 @@ var QualityBase = new Class({
}
catch(e){}
- return {}
+ return {};
},
addSettings: function(){
@@ -58,7 +58,7 @@ var QualityBase = new Class({
self.createProfileOrdering();
self.createSizes();
- })
+ });
},
@@ -68,7 +68,7 @@ var QualityBase = new Class({
createProfiles: function(){
var self = this;
- var non_core_profiles = Array.filter(self.profiles, function(profile){ return !profile.isCore() });
+ var non_core_profiles = Array.filter(self.profiles, function(profile){ return !profile.isCore(); });
var count = non_core_profiles.length;
self.settings.createGroup({
@@ -81,7 +81,7 @@ var QualityBase = new Class({
'events': {
'click': function(){
var profile = self.createProfilesClass();
- $(profile).inject(self.profile_container)
+ $(profile).inject(self.profile_container);
}
}
})
@@ -89,7 +89,7 @@ var QualityBase = new Class({
// Add profiles, that aren't part of the core (for editing)
Array.each(non_core_profiles, function(profile){
- $(profile).inject(self.profile_container)
+ $(profile).inject(self.profile_container);
});
},
@@ -97,7 +97,7 @@ var QualityBase = new Class({
createProfilesClass: function(data){
var self = this;
- var data = data || {'id': randomString()};
+ data = data || {'id': randomString()};
var profile = new Profile(data);
self.profiles.include(profile);
@@ -110,7 +110,7 @@ var QualityBase = new Class({
self.settings.createGroup({
'label': 'Profile Defaults',
'description': '(Needs refresh \'' +(App.isMac() ? 'CMD+R' : 'F5')+ '\' after editing)'
- }).adopt(
+ }).grab(
new Element('.ctrlHolder#profile_ordering').adopt(
new Element('label[text=Order]'),
self.profiles_list = new Element('ul'),
@@ -123,7 +123,7 @@ var QualityBase = new Class({
Array.each(self.profiles, function(profile){
var check;
new Element('li', {'data-id': profile.data._id}).adopt(
- check = new Element('input.inlay[type=checkbox]', {
+ check = new Element('input[type=checkbox]', {
'checked': !profile.data.hide,
'events': {
'change': self.saveProfileOrdering.bind(self)
@@ -132,11 +132,8 @@ var QualityBase = new Class({
new Element('span.profile_label', {
'text': profile.data.label
}),
- new Element('span.handle')
+ new Element('span.handle.icon-handle')
).inject(self.profiles_list);
-
- new Form.Check(check);
-
});
// Sortable
@@ -190,7 +187,6 @@ var QualityBase = new Class({
'name': 'sizes'
}).inject(self.content);
-
new Element('div.item.head.ctrlHolder').adopt(
new Element('span.label', {'text': 'Quality'}),
new Element('span.min', {'text': 'Min'}),
@@ -200,23 +196,23 @@ var QualityBase = new Class({
Array.each(self.qualities, function(quality){
new Element('div.ctrlHolder.item').adopt(
new Element('span.label', {'text': quality.label}),
- new Element('input.min.inlay[type=text]', {
+ new Element('input.min[type=text]', {
'value': quality.size_min,
'events': {
'keyup': function(e){
- self.changeSize(quality.identifier, 'size_min', e.target.get('value'))
+ self.changeSize(quality.identifier, 'size_min', e.target.get('value'));
}
}
}),
- new Element('input.max.inlay[type=text]', {
+ new Element('input.max[type=text]', {
'value': quality.size_max,
'events': {
'keyup': function(e){
- self.changeSize(quality.identifier, 'size_max', e.target.get('value'))
+ self.changeSize(quality.identifier, 'size_max', e.target.get('value'));
}
}
})
- ).inject(group)
+ ).inject(group);
});
},
@@ -235,7 +231,7 @@ var QualityBase = new Class({
'value': value
}
});
- }).delay(300)
+ }).delay(300);
}
diff --git a/couchpotato/core/plugins/quality/static/quality.scss b/couchpotato/core/plugins/quality/static/quality.scss
new file mode 100644
index 0000000..c2aa9f9
--- /dev/null
+++ b/couchpotato/core/plugins/quality/static/quality.scss
@@ -0,0 +1,19 @@
+@import "_mixins";
+
+.group_sizes {
+
+ .item {
+ .label {
+ min-width: 150px;
+ }
+
+ .min, .max {
+ display: inline-block;
+ width: 70px !important;
+ min-width: 0 !important;
+ margin-right: $padding/2;
+ text-align: center;
+ }
+ }
+
+}
diff --git a/couchpotato/core/plugins/release/main.py b/couchpotato/core/plugins/release/main.py
index a241c34..41d8241 100644
--- a/couchpotato/core/plugins/release/main.py
+++ b/couchpotato/core/plugins/release/main.py
@@ -555,6 +555,8 @@ class Release(Plugin):
releases.append(doc)
except RecordDeleted:
pass
+ except (ValueError, EOFError):
+ fireEvent('database.delete_corrupted', r.get('_id'), traceback_error = traceback.format_exc(0))
releases = sorted(releases, key = lambda k: k.get('info', {}).get('score', 0), reverse = True)
@@ -563,4 +565,4 @@ class Release(Plugin):
if download_preference != 'both':
releases = sorted(releases, key = lambda k: k.get('info', {}).get('protocol', '')[:3], reverse = (download_preference == 'torrent'))
- return releases
+ return releases or []
diff --git a/couchpotato/core/plugins/renamer.py b/couchpotato/core/plugins/renamer.py
index 60b4f8b..fd710d5 100755
--- a/couchpotato/core/plugins/renamer.py
+++ b/couchpotato/core/plugins/renamer.py
@@ -886,8 +886,8 @@ Remove it if you want it to be renamed (again, or at least let it try again)
replaced = replaced.replace('<' + x + '>', '')
if self.conf('replace_doubles'):
- replaced = self.replaceDoubles(replaced.lstrip('. '))
-
+ replaced = self.replaceDoubles(replaced.lstrip('. '))
+
for x, r in replacements.items():
if x in ['thename', 'namethe']:
replaced = replaced.replace(six.u('<%s>') % toUnicode(x), toUnicode(r))
@@ -966,6 +966,7 @@ Remove it if you want it to be renamed (again, or at least let it try again)
try:
for rel in rels:
+ if not rel.get('media_id'): continue
movie_dict = db.get('id', rel.get('media_id'))
download_info = rel.get('download_info')
@@ -1202,7 +1203,10 @@ Remove it if you want it to be renamed (again, or at least let it try again)
log.info('Archive %s found. Extracting...', os.path.basename(archive['file']))
try:
- rar_handle = RarFile(archive['file'], custom_path = self.conf('unrar_path'))
+ unrar_path = self.conf('unrar_path')
+ unrar_path = unrar_path if unrar_path and (os.path.isfile(unrar_path) or re.match('^[a-zA-Z0-9_/\.\-]+$', unrar_path)) else None
+
+ rar_handle = RarFile(archive['file'], custom_path = unrar_path)
extr_path = os.path.join(from_folder, os.path.relpath(os.path.dirname(archive['file']), folder))
self.makeDir(extr_path)
for packedinfo in rar_handle.infolist():
@@ -1326,7 +1330,7 @@ config = [{
{
'name': 'to',
'type': 'directory',
- 'description': 'Default folder where the movies are moved to.',
+ 'description': 'Default folder where the movies are moved/copied/linked to.',
},
{
'name': 'folder_name',
@@ -1345,12 +1349,12 @@ config = [{
'options': rename_options
},
{
- 'advanced': True,
+ 'advanced': True,
'name': 'replace_doubles',
'type': 'bool',
'label': 'Clean Name',
'description': ('Attempt to clean up double separaters due to missing data for fields.','Sometimes this eliminates wanted white space (see #2782).'),
- 'default': True
+ 'default': True
},
{
'name': 'unrar',
@@ -1444,7 +1448,7 @@ config = [{
'default': 'link',
'type': 'dropdown',
'values': [('Link', 'link'), ('Copy', 'copy'), ('Move', 'move')],
- 'description': 'See above. It is prefered to use link when downloading torrents as it will save you space, while still beeing able to seed.',
+ 'description': 'See above. It is prefered to use link when downloading torrents as it will save you space, while still being able to seed.',
'advanced': True,
},
{
diff --git a/couchpotato/core/plugins/scanner.py b/couchpotato/core/plugins/scanner.py
index aca9e64..1088ddd 100644
--- a/couchpotato/core/plugins/scanner.py
+++ b/couchpotato/core/plugins/scanner.py
@@ -63,8 +63,8 @@ class Scanner(Plugin):
}
file_sizes = { # in MB
- 'movie': {'min': 300},
- 'trailer': {'min': 2, 'max': 250},
+ 'movie': {'min': 200},
+ 'trailer': {'min': 2, 'max': 199},
'backdrop': {'min': 0, 'max': 5},
}
@@ -797,6 +797,10 @@ class Scanner(Plugin):
identifier = file_path.replace(folder, '').lstrip(os.path.sep) # root folder
identifier = os.path.splitext(identifier)[0] # ext
+ # Exclude file name path if needed (f.e. for DVD files)
+ if exclude_filename:
+ identifier = identifier[:len(identifier) - len(os.path.split(identifier)[-1])]
+
# Make sure the identifier is lower case as all regex is with lower case tags
identifier = identifier.lower()
@@ -805,9 +809,6 @@ class Scanner(Plugin):
identifier = path_split[-2] if len(path_split) > 1 and len(path_split[-2]) > len(path_split[-1]) else path_split[-1] # Only get filename
except: pass
- if exclude_filename:
- identifier = identifier[:len(identifier) - len(os.path.split(identifier)[-1])]
-
# multipart
identifier = self.removeMultipart(identifier)
diff --git a/couchpotato/core/plugins/userscript/main.py b/couchpotato/core/plugins/userscript/main.py
index 4ca8ed3..0499526 100644
--- a/couchpotato/core/plugins/userscript/main.py
+++ b/couchpotato/core/plugins/userscript/main.py
@@ -1,4 +1,7 @@
import os
+import traceback
+import time
+from base64 import b64encode, b64decode
from couchpotato import index
from couchpotato.api import addApiView
@@ -15,7 +18,7 @@ log = CPLog(__name__)
class Userscript(Plugin):
- version = 5
+ version = 8
def __init__(self):
addApiView('userscript.get/(.*)/(.*)', self.getUserScript, static = True)
@@ -26,6 +29,7 @@ class Userscript(Plugin):
addApiView('userscript.bookmark', self.bookmark)
addEvent('userscript.get_version', self.getVersion)
+ addEvent('app.test', self.doTest)
def bookmark(self, host = None, **kwargs):
@@ -91,3 +95,46 @@ class Userscript(Plugin):
params['error'] = params['movie'] if params['movie'] else 'Failed getting movie info'
return params
+
+ def doTest(self):
+ time.sleep(1)
+
+ tests = [
+ 'aHR0cDovL3d3dy5hbGxvY2luZS5mci9maWxtL2ZpY2hlZmlsbV9nZW5fY2ZpbG09MjAxMTA1Lmh0bWw=',
+ 'aHR0cDovL3RyYWlsZXJzLmFwcGxlLmNvbS90cmFpbGVycy9wYXJhbW91bnQvbWlzc2lvbmltcG9zc2libGVyb2d1ZW5hdGlvbi8=',
+ 'aHR0cDovL3d3dy55b3V0aGVhdGVyLmNvbS92aWV3LnBocD9pZD0xMTI2Mjk5',
+ 'aHR0cDovL3RyYWt0LnR2L21vdmllcy9taXNzaW9uLWltcG9zc2libGUtcm9ndWUtbmF0aW9uLTIwMTU=',
+ 'aHR0cHM6Ly93d3cucmVkZGl0LmNvbS9yL0lqdXN0d2F0Y2hlZC9jb21tZW50cy8zZjk3bzYvaWp3X21pc3Npb25faW1wb3NzaWJsZV9yb2d1ZV9uYXRpb25fMjAxNS8=',
+ 'aHR0cDovL3d3dy5yb3R0ZW50b21hdG9lcy5jb20vbS9taXNzaW9uX2ltcG9zc2libGVfcm9ndWVfbmF0aW9uLw==',
+ 'aHR0cHM6Ly93d3cudGhlbW92aWVkYi5vcmcvbW92aWUvMTc3Njc3LW1pc3Npb24taW1wb3NzaWJsZS01',
+ 'aHR0cDovL3d3dy5jcml0aWNrZXIuY29tL2ZpbG0vTWlzc2lvbl9JbXBvc3NpYmxlX1JvZ3VlLw==',
+ 'aHR0cDovL2ZpbG1jZW50cnVtLm5sL2ZpbG1zLzE4MzIzL21pc3Npb24taW1wb3NzaWJsZS1yb2d1ZS1uYXRpb24v',
+ 'aHR0cDovL3d3dy5maWxtc3RhcnRzLmRlL2tyaXRpa2VuLzIwMTEwNS5odG1s',
+ 'aHR0cDovL3d3dy5maWxtd2ViLnBsL2ZpbG0vTWlzc2lvbiUzQStJbXBvc3NpYmxlKy0rUm9ndWUrTmF0aW9uLTIwMTUtNjU1MDQ4',
+ 'aHR0cDovL3d3dy5mbGlja2NoYXJ0LmNvbS9tb3ZpZS8zM0NFMzEyNUJB',
+ 'aHR0cDovL3d3dy5pbWRiLmNvbS90aXRsZS90dDIzODEyNDkv',
+ 'aHR0cDovL2xldHRlcmJveGQuY29tL2ZpbG0vbWlzc2lvbi1pbXBvc3NpYmxlLXJvZ3VlLW5hdGlvbi8=',
+ 'aHR0cDovL3d3dy5tb3ZpZW1ldGVyLm5sL2ZpbG0vMTA0MTcw',
+ 'aHR0cDovL21vdmllcy5pby9tLzMxL2Vu',
+ ]
+
+ success = 0
+ for x in tests:
+ x = b64decode(x)
+ try:
+ movie = self.getViaUrl(x)
+ movie = movie.get('movie', {}) or {}
+ imdb = movie.get('imdb')
+
+ if imdb and b64encode(imdb) in ['dHQxMjI5MjM4', 'dHQyMzgxMjQ5']:
+ success += 1
+ continue
+ except:
+ log.error('Failed userscript test "%s": %s', (x, traceback.format_exc()))
+
+ log.error('Failed userscript test "%s"', x)
+
+ if success == len(tests):
+ log.debug('All userscript tests successful')
+ else:
+ log.error('Failed userscript tests, %s out of %s', (success, len(tests)))
diff --git a/couchpotato/core/plugins/userscript/static/userscript.css b/couchpotato/core/plugins/userscript/static/userscript.css
deleted file mode 100644
index d816101..0000000
--- a/couchpotato/core/plugins/userscript/static/userscript.css
+++ /dev/null
@@ -1,38 +0,0 @@
-.page.userscript {
- position: absolute;
- width: 100%;
- top: 0;
- bottom: 0;
- left: 0;
- right: 0;
- padding: 0;
-}
-
- .page.userscript .frame.loading {
- text-align: center;
- font-size: 20px;
- padding: 20px;
- }
-
- .page.userscript .media_result {
- height: 140px;
- }
- .page.userscript .media_result .thumbnail {
- width: 90px;
- }
- .page.userscript .media_result .options {
- left: 90px;
- padding: 54px 15px;
- }
-
- .page.userscript .media_result .year {
- display: none;
- }
-
- .page.userscript .media_result .options select[name="title"] {
- width: 190px;
- }
-
- .page.userscript .media_result .options select[name="profile"] {
- width: 70px;
- }
diff --git a/couchpotato/core/plugins/userscript/static/userscript.js b/couchpotato/core/plugins/userscript/static/userscript.js
index d8caeb3..7bb6055 100644
--- a/couchpotato/core/plugins/userscript/static/userscript.js
+++ b/couchpotato/core/plugins/userscript/static/userscript.js
@@ -16,7 +16,7 @@ Page.Userscript = new Class({
indexAction: function(){
var self = this;
- self.el.adopt(
+ self.content.grab(
self.frame = new Element('div.frame.loading', {
'text': 'Loading...'
})
@@ -35,7 +35,7 @@ Page.Userscript = new Class({
if(json.error)
self.frame.set('html', json.error);
else {
- var item = new Block.Search.MovieItem(json.movie);
+ var item = new BlockSearchMovieItem(json.movie);
self.frame.adopt(item);
item.showOptions();
}
@@ -54,7 +54,7 @@ var UserscriptSettingTab = new Class({
initialize: function(){
var self = this;
- App.addEvent('loadSettings', self.addSettings.bind(self))
+ App.addEvent('loadSettings', self.addSettings.bind(self));
},
@@ -80,25 +80,28 @@ var UserscriptSettingTab = new Class({
new Element('span.bookmarklet').adopt(
new Element('a.button.green', {
'text': '+CouchPotato',
+ /* jshint ignore:start */
'href': "javascript:void((function(){var e=document.createElement('script');e.setAttribute('type','text/javascript');e.setAttribute('charset','UTF-8');e.setAttribute('src','" +
host_url + Api.createUrl('userscript.bookmark') +
"?host="+ encodeURI(host_url + Api.createUrl('userscript.get')+randomString()+'/') +
"&r='+Math.random()*99999999);document.body.appendChild(e)})());",
+ /* jshint ignore:end */
'target': '',
'events': {
'click': function(e){
(e).stop();
- alert('Drag it to your bookmark ;)')
+ alert('Drag it to your bookmark ;)');
}
}
}),
new Element('span', {
'text': '⇽ Drag this to your bookmarks'
})
- )
- ).setStyles({
- 'background-image': "url('https://couchpota.to/media/images/userscript.gif')"
- });
+ ),
+ new Element('img', {
+ 'src': 'https://couchpota.to/media/images/userscript.gif'
+ })
+ );
});
diff --git a/couchpotato/core/plugins/userscript/static/userscript.scss b/couchpotato/core/plugins/userscript/static/userscript.scss
new file mode 100644
index 0000000..720a69f
--- /dev/null
+++ b/couchpotato/core/plugins/userscript/static/userscript.scss
@@ -0,0 +1,72 @@
+@import "_mixins";
+
+.page.userscript {
+ position: absolute;
+ width: 100%;
+ top: 0;
+ bottom: 0;
+ left: 0;
+ right: 0;
+ padding: 0;
+
+ .frame.loading {
+ text-align: center;
+ font-size: 20px;
+ padding: 20px;
+ }
+
+ .media_result {
+ height: 140px;
+ display: flex;
+ }
+
+ .thumbnail {
+ width: 90px;
+ }
+
+ .options {
+ left: 90px;
+ display: flex;
+ align-items: center;
+ padding: $padding/2;
+
+ > div {
+ display: flex;
+ flex-wrap: wrap;
+
+ div {
+ flex: 1 auto;
+ margin: 0;
+ padding: 0 $padding/4;
+ }
+
+ .title {
+ min-width: 100%;
+ margin-bottom: $padding;
+ }
+
+ .add {
+ text-align: right;
+
+ a {
+ display: block;
+ text-align: center;
+ }
+ }
+
+ select {
+ width: 100%;
+ }
+ }
+ }
+
+ .message {
+ font-size: 1.5em;
+ }
+
+ .year,
+ .data {
+ display: none;
+ }
+
+}
diff --git a/couchpotato/core/plugins/userscript/template.js_tmpl b/couchpotato/core/plugins/userscript/template.js_tmpl
index 5a32da3..25e1842 100644
--- a/couchpotato/core/plugins/userscript/template.js_tmpl
+++ b/couchpotato/core/plugins/userscript/template.js_tmpl
@@ -67,13 +67,13 @@ var addStyle = function(css) {
// Styles
addStyle('\
- #cp_popup { font-family: "Helvetica Neue", Helvetica, Arial, Geneva, sans-serif; -moz-border-radius: 6px 0px 0px 6px; -webkit-border-radius: 6px 0px 0px 6px; border-radius: 6px 0px 0px 6px; -moz-box-shadow: 0 0 20px rgba(0,0,0,0.5); -webkit-box-shadow: 0 0 20px rgba(0,0,0,0.5); box-shadow: 0 0 20px rgba(0,0,0,0.5); position:fixed; z-index:20000; bottom:0; right:0; font-size:15px; margin: 20px 0; display: block; background:#4E5969; } \
+ #cp_popup { font-family: "Helvetica Neue", Helvetica, Arial, Geneva, sans-serif; -moz-border-radius: 6px 0px 0px 6px; -webkit-border-radius: 6px 0px 0px 6px; border-radius: 6px 0px 0px 6px; -moz-box-shadow: 0 0 20px rgba(0,0,0,0.5); -webkit-box-shadow: 0 0 20px rgba(0,0,0,0.5); box-shadow: 0 0 20px rgba(0,0,0,0.5); position:fixed; z-index:20000; bottom:0; right:0; font-size:15px; margin: 20px 0; display: block; background:#FFF; } \
#cp_popup.opened { width: 492px; } \
#cp_popup a#add_to { cursor:pointer; text-align:center; text-decoration:none; color: #000; display:block; padding:5px 0 5px 5px; } \
#cp_popup a#close_button { cursor:pointer; float: right; padding:120px 10px 10px; } \
#cp_popup a img { vertical-align: middle; } \
#cp_popup a:hover { color:#000; } \
- #cp_popup iframe{ background:#4E5969; margin:6px 0 2px 6px; height:140px; width:450px; overflow:hidden; border:none; } \
+ #cp_popup iframe{ background:#FFF; margin:6px 0 2px 6px; height:140px; width:450px; overflow:hidden; border:none; } \
');
var cp_icon = 'data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAACgAAAAoCAYAAACM/rhtAAADHmlDQ1BJQ0MgUHJvZmlsZQAAeAGFVN9r01AU/tplnbDhizpnEQk+aJFuZFN0Q5y2a1e6zVrqNrchSJumbVyaxiTtfrAH2YtvOsV38Qc++QcM2YNve5INxhRh+KyIIkz2IrOemzRNJ1MDufe73/nuOSfn5F6g+XFa0xQvDxRVU0/FwvzE5BTf8gFeHEMr/GhNi4YWSiZHQA/Tsnnvs/MOHsZsdO5v36v+Y9WalQwR8BwgvpQ1xCLhWaBpXNR0E+DWie+dMTXCzUxzWKcECR9nOG9jgeGMjSOWZjQ1QJoJwgfFQjpLuEA4mGng8w3YzoEU5CcmqZIuizyrRVIv5WRFsgz28B9zg/JfsKiU6Zut5xCNbZoZTtF8it4fOX1wjOYA1cE/Xxi9QbidcFg246M1fkLNJK4RJr3n7nRpmO1lmpdZKRIlHCS8YlSuM2xp5gsDiZrm0+30UJKwnzS/NDNZ8+PtUJUE6zHF9fZLRvS6vdfbkZMH4zU+pynWf0D+vff1corleZLw67QejdX0W5I6Vtvb5M2mI8PEd1E/A0hCgo4cZCjgkUIMYZpjxKr4TBYZIkqk0ml0VHmyONY7KJOW7RxHeMlfDrheFvVbsrj24Pue3SXXjrwVhcW3o9hR7bWB6bqyE5obf3VhpaNu4Te55ZsbbasLCFH+iuWxSF5lyk+CUdd1NuaQU5f8dQvPMpTuJXYSWAy6rPBe+CpsCk+FF8KXv9TIzt6tEcuAcSw+q55TzcbsJdJM0utkuL+K9ULGGPmQMUNanb4kTZyKOfLaUAsnBneC6+biXC/XB567zF3h+rkIrS5yI47CF/VFfCHwvjO+Pl+3b4hhp9u+02TrozFa67vTkbqisXqUj9sn9j2OqhMZsrG+sX5WCCu0omNqSrN0TwADJW1Ol/MFk+8RhAt8iK4tiY+rYleQTysKb5kMXpcMSa9I2S6wO4/tA7ZT1l3maV9zOfMqcOkb/cPrLjdVBl4ZwNFzLhegM3XkCbB8XizrFdsfPJ63gJE722OtPW1huos+VqvbdC5bHgG7D6vVn8+q1d3n5H8LeKP8BqkjCtbCoV8yAAAACXBIWXMAAAsTAAALEwEAmpwYAAABZGlUWHRYTUw6Y29tLmFkb2JlLnhtcAAAAAAAPHg6eG1wbWV0YSB4bWxuczp4PSJhZG9iZTpuczptZXRhLyIgeDp4bXB0az0iWE1QIENvcmUgNC40LjAiPgogICA8cmRmOlJERiB4bWxuczpyZGY9Imh0dHA6Ly93d3cudzMub3JnLzE5OTkvMDIvMjItcmRmLXN5bnRheC1ucyMiPgogICAgICA8cmRmOkRlc2NyaXB0aW9uIHJkZjphYm91dD0iIgogICAgICAgICAgICB4bWxuczp4bXA9Imh0dHA6Ly9ucy5hZG9iZS5jb20veGFwLzEuMC8iPgogICAgICAgICA8eG1wOkNyZWF0b3JUb29sPkFkb2JlIEltYWdlUmVhZHk8L3htcDpDcmVhdG9yVG9vbD4KICAgICAgPC9yZGY6RGVzY3JpcHRpb24+CiAgIDwvcmRmOlJERj4KPC94OnhtcG1ldGE+Chvleg4AAAdrSURBVFgJzZfPb1VVEMfn3PfaUgotVIpi+a3BWETFRIkYEzTRtRv8BxSiCRp1JStkBXGhC42J7Ay4wGrcqHHhAjfEaNSoESOoVQIWBIGW/qB9797r93Pem8d9zwe4KJFJpnPOnDkz35k559zXkOe5XS8KFsKW3VbC/8CQ5cNbjWAxYG7/LXC4XgABdzUQIYREsbNrFee6AAy7FXxXnt25N6wWgBdL8+1oeaH9Uh6wP5Zutj8/6c/HrwXM18s+mEu5xSyRv8yCbe5cbM9nM2bphHjcqidO2OmN88KZ8iI79NUT9pI6nquaFKrtWbsuAC8uEzRRKNtawGWzNqPwnVKVbdYGBX0wndHfOjhsr0RkOue0cLR2EZKSrUu6ItBS0mFBnCdAFGn8ZQy81TiLbasX7aLRHP859IqluAwlu41aClTQmDkyqYP+DZst62vVZtyO5rzF8faqIptfD92h21YE9SgHINFrj0yIJzSxY+0AtermvsW7axWZWGTLdAaXxgrSXpUiqMXSlfKqxmYjgDmk97EVVHE+ZxX0R3n9UKzPrACs1vPSJTB6DmugGQlkyCp2LpmykxHIkasDnJN3sN2jvHF/2FZeYPuyaUtVolIdRqrLUapO2A/fPWX3cDmu9sSQQNsK7tYrPxS7UCy22XDzNM4Wb9ctfDuvbHotrKv22gMdK+2nLx63b8rdNiQwRgX9/Omu5sk8OZ6wkQjua9XzPcuGn5RFO+eK8K8KDuuebc3zeAvb4LmiSlWcv+Gg/T6vywYU9nRWtR69dAsaG2oXpFLqsY6p47b3++35zsbaVQZNAPeF0LE9zysHensfzi5VdiRZVglpzkUMmaXp16aGiXhxVZzYNWWSp7dYuetU3/ljz409evQx21jWJYg/EYJMhNxPmb68aqmFypiNJKkdXpLa1Noxs01vylxXhhz844ykSo0Wq63lXQJ3sK/v/uzixUNKPWGDX/M+jZeLz4oXidkIT4ovncL5mA2+EezMUJ5dWKUE9KHLefe0DvEUK8uQCWT3YltT7bA1I9I9/LNZ/0gNmKaRkDAgI0DOnMBVPxwYWJePj3+mxz/R13xG4EoEoFoYax6ffOY6SlFfkZRtmO3TcRsrl279qJKM75BSnhOyqyPUTxsTOOusWpjKLUunLXvhfcvXv6sEZeaAiAP7PALUHFfZ1NkLr/aY9SrgrBa6+CGHgQDHDZSc9mKsb79N1Zlv16xaNdNfsdLH3bbokWkb3yQ7FjAWkVmnspmQs65pS545YMkdH5hNL5T+4mVADo5T0mixbiyAlUleriddAgjJs6DvfQRKtYiJExwwJ3v5j1I/AOR01rrekf1dUirbmmfNFW18vtlNSuTpt8xWfqoEexVD1QAIcZCtXM9PKyIFIzbnO6eNDhJQgKy3M4JhbYl4pXiVuF+c6kBeWJra5A89VvpcxeNJkbMORZkU2JUXzLbtMVsmcJM6yPwqdED4bmWK4C3WMILQOY5d0UtR606rgzPS03KYzdgxBuiAePQvvmGTdnJP2Xoe1Ftzq0AL5OBxsyd2KukjZqcXa8/52n5AeYyiBAfzJoAoYq/rkhbDEFVknWrJf9zIGXUbWqGbb7eIN8hg9HzJDg9XbfRls/sE6qFndSz0BIxqLRE4AKiAjTPNfvflMZFNACkpzAISqlfURjWmpSpITKLojDEBnACwSodizX6zX5eb3SvZIXBV3iqtQfjniULFXpJFtnJbgBhTLYwBSPXk3+4We4UdYNFhK9BB2a/YUwOT6Rx0jl1ODv+6wNYtbufL/TYBrMiUM8EFABhZAohMaR+bWEeixzHSq4yesVPsgm5q7KVumSeHBCC+sGFfO1/omwBizCY2eyAkTBV5TnBOhZ08e5foGTu1+/+NdXySOL4AARjI/bhsAZhHgA4KCQEYwlk7gKwVQV1r7MEBhU+X6PHv6xrWgA8zEqVa8rJj6EAByFeDjJH8YqCiVBx2O/ZASOeoqM/xgz17YXzRFferd7jh07vYUsHL54KgBMEQ/lZ8Wsy7R9beGrKlAkXWNO5FOviixC+gRsRLxbyj7s/f32IMLdfIq+cSLZky56vlPxAIALHG2IOjc8DFgFw6QBQllXlQTPL4xxdfq6Jk3FRBAhVboGncQOlvFpMtALyKbPZMXaIrMnqvrp8tl1qK/ogLIYsJA74JYEU7q7IgI7KBPCNA8gsG5w7Aq+RzpOuKgAHooJBXIsA5+9FqAujlBhztgLz8rJEhARgXyZ2yjkOkA6Qj6LyKDlaqJsIH+2AHh2wCKGcBhw5Kw8YYPQxAB1R06qBw6uAAwxh/SAfn1ZQqUtEf+4tAmwCmiVUrshCQKq2FHBgSHc69Su6oVXrmgGINYOiKADVtIk8WWQQZAa6vFcWm0mo/H29l3IURYAjAmPcKY4IgCd4q0Tm7LXP8sK8IEJ1TsYKtyTb+q9M/0B2368euAK7Qc1LRGYw+2HBO/LeYn1lOHtAleh+7dF1xj4+vJInnxLgB0JU3mqQTDeK/ux/rlWso/+fBDV9BjssNTf8AmPnhttjsZCIAAAAASUVORK5CYII=';
diff --git a/couchpotato/core/plugins/wizard/static/wizard.css b/couchpotato/core/plugins/wizard/static/wizard.css
deleted file mode 100644
index 9af32ed..0000000
--- a/couchpotato/core/plugins/wizard/static/wizard.css
+++ /dev/null
@@ -1,84 +0,0 @@
-.page.wizard .uniForm {
- margin: 0 0 30px;
- width: 83%;
-}
-
-.page.wizard h1 {
- padding: 10px 0;
- display: block;
- font-size: 30px;
- margin: 80px 5px 0;
-}
-
-.page.wizard .description {
- padding: 10px 5px;
- font-size: 1.45em;
- line-height: 1.4em;
- display: block;
-}
-
-.page.wizard .tab_wrapper {
- background: #5c697b;
- height: 65px;
- font-size: 1.75em;
- position: fixed;
- top: 0;
- margin: 0;
- width: 100%;
- left: 0;
- z-index: 2;
- box-shadow: 0 0 10px rgba(0,0,0,0.1);
-}
-
- .page.wizard .tab_wrapper .tabs {
- padding: 0;
- margin: 0 auto;
- display: block;
- height: 100%;
- width: 100%;
- max-width: 960px;
- }
-
- .page.wizard .tabs li {
- display: inline-block;
- height: 100%;
- }
- .page.wizard .tabs li a {
- padding: 20px 10px;
- height: 100%;
- display: block;
- color: #FFF;
- font-weight: normal;
- border-bottom: 4px solid transparent;
- }
-
- .page.wizard .tabs li:hover a { border-color: #047792; }
- .page.wizard .tabs li.done a { border-color: #04bce6; }
-
- .page.wizard .tab_wrapper .pointer {
- border-right: 10px solid transparent;
- border-left: 10px solid transparent;
- border-top: 10px solid #5c697b;
- display: block;
- position: absolute;
- top: 44px;
- }
-
-.page.wizard .tab_content {
- margin: 20px 0 160px;
-}
-
-.page.wizard form > div {
- min-height: 300px;
-}
-
-.page.wizard .button.green {
- padding: 20px;
- font-size: 25px;
- margin: 10px 0 80px;
- display: block;
-}
-
-.page.wizard .tab_nzb_providers {
- margin: 20px 0 0 0;
-}
diff --git a/couchpotato/core/plugins/wizard/static/wizard.js b/couchpotato/core/plugins/wizard/static/wizard.js
index f215dbf..ec1101b 100644
--- a/couchpotato/core/plugins/wizard/static/wizard.js
+++ b/couchpotato/core/plugins/wizard/static/wizard.js
@@ -1,245 +1,202 @@
-Page.Wizard = new Class({
-
- Extends: Page.Settings,
-
- order: 70,
- name: 'wizard',
- has_tab: false,
- wizard_only: true,
-
- headers: {
- 'welcome': {
- 'title': 'Welcome to the new CouchPotato',
- 'description': 'To get started, fill in each of the following settings as much as you can.',
- 'content': new Element('div', {
- 'styles': {
- 'margin': '0 0 0 30px'
- }
- })
- },
- 'general': {
- 'title': 'General',
- 'description': 'If you want to access CP from outside your local network, you better secure it a bit with a username & password.'
- },
- 'downloaders': {
- 'title': 'What download apps are you using?',
- 'description': 'CP needs an external download app to work with. Choose one below. For more downloaders check settings after you have filled in the wizard. If your download app isn\'t in the list, use the default Blackhole.'
- },
- 'searcher': {
- 'label': 'Providers',
- 'title': 'Are you registered at any of these sites?',
- 'description': 'CP uses these sites to search for movies. A few free are enabled by default, but it\'s always better to have more.'
- },
- 'renamer': {
- 'title': 'Move & rename the movies after downloading?',
- 'description': 'The coolest part of CP is that it can move and organize your downloaded movies automagically. Check settings and you can even download trailers, subtitles and other data when it has finished downloading. It\'s awesome!'
- },
- 'automation': {
- 'title': 'Easily add movies to your wanted list!',
- 'description': 'You can easily add movies from your favorite movie site, like IMDB, Rotten Tomatoes, Apple Trailers and more. Just install the extension or drag the bookmarklet to your bookmarks.' +
- ' Once installed, just click the bookmarklet on a movie page and watch the magic happen ;)',
- 'content': function(){
- return App.createUserscriptButtons().setStyles({
- 'background-image': "url('https://couchpota.to/media/images/userscript.gif')"
- })
- }
- },
- 'finish': {
- 'title': 'Finishing Up',
- 'description': 'Are you done? Did you fill in everything as much as possible?' +
- ' Be sure to check the settings to see what more CP can do!
' +
- '
After you\'ve used CP for a while, and you like it (which of course you will), consider supporting CP. Maybe even by writing some code. Or by getting a subscription at Usenet Server or Newshosting.
',
- 'content': new Element('div').adopt(
- new Element('a.button.green', {
- 'styles': {
- 'margin-top': 20
- },
- 'text': 'I\'m ready to start the awesomeness, wow this button is big and green!',
- 'events': {
- 'click': function(e){
- (e).preventDefault();
- Api.request('settings.save', {
- 'data': {
- 'section': 'core',
- 'name': 'show_wizard',
- 'value': 0
- },
- 'useSpinner': true,
- 'spinnerOptions': {
- 'target': self.el
- },
- 'onComplete': function(){
- window.location = App.createUrl('wanted');
- }
- });
- }
- }
- })
- )
- }
- },
- groups: ['welcome', 'general', 'downloaders', 'searcher', 'renamer', 'automation', 'finish'],
-
- open: function(action, params){
- var self = this;
-
- if(!self.initialized){
- App.fireEvent('unload');
- App.getBlock('header').hide();
-
- self.parent(action, params);
-
- self.addEvent('create', function(){
- self.orderGroups();
- });
-
- self.initialized = true;
-
- self.scroll = new Fx.Scroll(document.body, {
- 'transition': 'quint:in:out'
- });
- }
- else
- (function(){
- var sc = self.el.getElement('.wgroup_'+action);
- self.scroll.start(0, sc.getCoordinates().top-80);
- }).delay(1)
- },
-
- orderGroups: function(){
- var self = this;
-
- var form = self.el.getElement('.uniForm');
- var tabs = self.el.getElement('.tabs');
-
- self.groups.each(function(group){
-
- if(self.headers[group]){
- var group_container = new Element('.wgroup_'+group, {
- 'styles': {
- 'opacity': 0.2
- },
- 'tween': {
- 'duration': 350
- }
- });
-
- if(self.headers[group].include){
- self.headers[group].include.each(function(inc){
- group_container.addClass('wgroup_'+inc);
- })
- }
-
- var content = self.headers[group].content;
- group_container.adopt(
- new Element('h1', {
- 'text': self.headers[group].title
- }),
- self.headers[group].description ? new Element('span.description', {
- 'html': self.headers[group].description
- }) : null,
- content ? (typeOf(content) == 'function' ? content() : content) : null
- ).inject(form);
- }
-
- var tab_navigation = tabs.getElement('.t_'+group);
-
- if(!tab_navigation && self.headers[group] && self.headers[group].include){
- tab_navigation = [];
- self.headers[group].include.each(function(inc){
- tab_navigation.include(tabs.getElement('.t_'+inc));
- })
- }
-
- if(tab_navigation && group_container){
- tabs.adopt(tab_navigation); // Tab navigation
-
- if(self.headers[group] && self.headers[group].include){
-
- self.headers[group].include.each(function(inc){
- self.el.getElement('.tab_'+inc).inject(group_container);
- });
-
- new Element('li.t_'+group).adopt(
- new Element('a', {
- 'href': App.createUrl('wizard/'+group),
- 'text': (self.headers[group].label || group).capitalize()
- })
- ).inject(tabs)
-
- }
- else
- self.el.getElement('.tab_'+group).inject(group_container); // Tab content
-
- if(tab_navigation.getElement && self.headers[group]){
- var a = tab_navigation.getElement('a');
- a.set('text', (self.headers[group].label || group).capitalize());
- var url_split = a.get('href').split('wizard')[1].split('/');
- if(url_split.length > 3)
- a.set('href', a.get('href').replace(url_split[url_split.length-3]+'/', ''));
-
- }
- }
- else {
- new Element('li.t_'+group).adopt(
- new Element('a', {
- 'href': App.createUrl('wizard/'+group),
- 'text': (self.headers[group].label || group).capitalize()
- })
- ).inject(tabs);
- }
-
- if(self.headers[group] && self.headers[group].event)
- self.headers[group].event.call()
- });
-
- // Remove toggle
- self.el.getElement('.advanced_toggle').destroy();
-
- // Hide retention
- self.el.getElement('.section_nzb').hide();
-
- // Add pointer
- new Element('.tab_wrapper').wraps(tabs);
-
- // Add nav
- var minimum = self.el.getSize().y-window.getSize().y;
- self.groups.each(function(group, nr){
-
- var g = self.el.getElement('.wgroup_'+group);
- if(!g || !g.isVisible()) return;
- var t = self.el.getElement('.t_'+group);
- if(!t) return;
-
- var func = function(){
- // Activate all previous ones
- self.groups.each(function(groups2, nr2){
- var t2 = self.el.getElement('.t_'+groups2);
- t2[nr2 > nr ? 'removeClass' : 'addClass' ]('done');
- });
- g.tween('opacity', 1);
- };
-
- if(nr == 0)
- func();
-
- new ScrollSpy( {
- min: function(){
- var c = g.getCoordinates();
- var top = c.top-(window.getSize().y/2);
- return top > minimum ? minimum : top
- },
- max: function(){
- var c = g.getCoordinates();
- return c.top+(c.height/2)
- },
- onEnter: func,
- onLeave: function(){
- g.tween('opacity', 0.2)
- }
- });
- });
-
- }
-
-});
+Page.Wizard = new Class({
+
+ Extends: Page.Settings,
+
+ order: 70,
+ name: 'wizard',
+ current: 'welcome',
+ has_tab: false,
+ wizard_only: true,
+
+ headers: {
+ 'welcome': {
+ 'title': 'Welcome to the new CouchPotato',
+ 'description': 'To get started, fill in each of the following settings as much as you can.',
+ 'content': new Element('div', {
+ 'styles': {
+ 'margin': '0 0 0 30px'
+ }
+ })
+ },
+ 'general': {
+ 'title': 'General',
+ 'description': 'If you want to access CP from outside your local network, you better secure it a bit with a username & password.'
+ },
+ 'downloaders': {
+ 'title': 'What download apps are you using?',
+ 'description': 'CP needs an external download app to work with. Choose one below. For more downloaders check settings after you have filled in the wizard. If your download app isn\'t in the list, use the default Blackhole.'
+ },
+ 'searcher': {
+ 'label': 'Providers',
+ 'title': 'Are you registered at any of these sites?',
+ 'description': 'CP uses these sites to search for movies. A few free are enabled by default, but it\'s always better to have more.'
+ },
+ 'renamer': {
+ 'title': 'Move & rename the movies after downloading?',
+ 'description': 'The coolest part of CP is that it can move and organize your downloaded movies automagically. Check settings and you can even download trailers, subtitles and other data when it has finished downloading. It\'s awesome!'
+ },
+ 'automation': {
+ 'title': 'Easily add movies to your wanted list!',
+ 'description': 'You can easily add movies from your favorite movie site, like IMDB, Rotten Tomatoes, Apple Trailers and more. Just install the extension or drag the bookmarklet to your bookmarks.' +
+ ' Once installed, just click the bookmarklet on a movie page and watch the magic happen ;)',
+ 'content': function(){
+ return App.createUserscriptButtons().setStyles({
+ 'background-image': "url('https://couchpota.to/media/images/userscript.gif')"
+ });
+ }
+ },
+ 'finish': {
+ 'title': 'Finishing Up',
+ 'description': 'Are you done? Did you fill in everything as much as possible?' +
+ ' Be sure to check the settings to see what more CP can do!
' +
+ '
After you\'ve used CP for a while, and you like it (which of course you will), consider supporting CP. Maybe even by writing some code. Or by getting a subscription at Usenet Server or Newshosting.
"
+ }).inject(self.message_container, "top");
+ setTimeout(function() {
+ new_message.addClass("show");
+ }, 10);
+ var hide_message = function() {
+ new_message.addClass("hide");
+ setTimeout(function() {
+ new_message.destroy();
+ }, 1e3);
+ };
+ if (sticky) new_message.grab(new Element("a.close.icon2", {
+ events: {
+ click: function() {
+ self.markAsRead([ data._id ]);
+ hide_message();
+ }
+ }
+ })); else setTimeout(hide_message, 4e3);
+ },
+ addTestButtons: function() {
+ var self = this;
+ var setting_page = App.getPage("Settings");
+ setting_page.addEvent("create", function() {
+ Object.each(setting_page.tabs.notifications.groups, self.addTestButton.bind(self));
+ });
+ },
+ addTestButton: function(fieldset, plugin_name) {
+ var self = this, button_name = self.testButtonName(fieldset);
+ if (button_name.contains("Notifications")) return;
+ new Element(".ctrlHolder.test_button").grab(new Element("a.button", {
+ text: button_name,
+ events: {
+ click: function() {
+ var button = fieldset.getElement(".test_button .button");
+ button.set("text", "Sending notification");
+ Api.request("notify." + plugin_name + ".test", {
+ onComplete: function(json) {
+ button.set("text", button_name);
+ var message;
+ if (json.success) {
+ message = new Element("span.success", {
+ text: "Notification successful"
+ }).inject(button, "after");
+ } else {
+ message = new Element("span.failed", {
+ text: "Notification failed. Check logs for details."
+ }).inject(button, "after");
+ }
+ (function() {
+ message.destroy();
+ }).delay(3e3);
+ }
+ });
+ }
+ }
+ })).inject(fieldset);
+ },
+ testButtonName: function(fieldset) {
+ var name = fieldset.getElement("h2 .group_label").get("text");
+ return "Test " + name;
+ }
+});
+
+window.Notification = new NotificationBase();
+
+var TwitterNotification = new Class({
+ initialize: function() {
+ var self = this;
+ App.addEvent("loadSettings", self.addRegisterButton.bind(self));
+ },
+ addRegisterButton: function() {
+ var self = this;
+ var setting_page = App.getPage("Settings");
+ setting_page.addEvent("create", function() {
+ var fieldset = setting_page.tabs.notifications.groups.twitter, l = window.location;
+ var twitter_set = 0;
+ fieldset.getElements("input[type=text]").each(function(el) {
+ twitter_set += +(el.get("value") !== "");
+ });
+ new Element(".ctrlHolder").adopt(twitter_set > 0 ? [ self.unregister = new Element("a.button.red", {
+ text: 'Unregister "' + fieldset.getElement("input[name*=screen_name]").get("value") + '"',
+ events: {
+ click: function() {
+ fieldset.getElements("input[type=text]").set("value", "").fireEvent("change");
+ self.unregister.destroy();
+ self.unregister_or.destroy();
+ }
+ }
+ }), self.unregister_or = new Element("span[text=or]") ] : null, new Element("a.button", {
+ text: twitter_set > 0 ? "Register a different account" : "Register your Twitter account",
+ events: {
+ click: function() {
+ Api.request("notify.twitter.auth_url", {
+ data: {
+ host: l.protocol + "//" + l.hostname + (l.port ? ":" + l.port : "")
+ },
+ onComplete: function(json) {
+ window.location = json.url;
+ }
+ });
+ }
+ }
+ })).inject(fieldset.getElement(".test_button"), "before");
+ });
+ }
+});
+
+window.addEvent("domready", function() {
+ new TwitterNotification();
+});
+
+var CategoryListBase = new Class({
+ initialize: function() {
+ var self = this;
+ App.addEvent("loadSettings", self.addSettings.bind(self));
+ },
+ setup: function(categories) {
+ var self = this;
+ self.categories = [];
+ Array.each(categories, self.createCategory.bind(self));
+ },
+ addSettings: function() {
+ var self = this;
+ self.settings = App.getPage("Settings");
+ self.settings.addEvent("create", function() {
+ var tab = self.settings.createSubTab("category", {
+ label: "Categories",
+ name: "category",
+ subtab_label: "Category & filtering"
+ }, self.settings.tabs.searcher, "searcher");
+ self.tab = tab.tab;
+ self.content = tab.content;
+ self.createList();
+ self.createOrdering();
+ });
+ self.settings.addEvent("create", function() {
+ var renamer_group = self.settings.tabs.renamer.groups.renamer;
+ self.categories.each(function(category) {
+ var input = new Option.Directory("section_name", "option.name", category.get("destination"), {
+ name: category.get("label")
+ });
+ input.inject(renamer_group.getElement(".renamer_to"));
+ input.fireEvent("injected");
+ input.save = function() {
+ category.data.destination = input.getValue();
+ category.save();
+ };
+ });
+ });
+ },
+ createList: function() {
+ var self = this;
+ var count = self.categories.length;
+ self.settings.createGroup({
+ label: "Categories",
+ description: "Create categories, each one extending global filters. (Needs refresh '" + (App.isMac() ? "CMD+R" : "F5") + "' after editing)"
+ }).inject(self.content).adopt(self.category_container = new Element("div.container"), new Element("a.add_new_category", {
+ text: count > 0 ? "Create another category" : "Click here to create a category.",
+ events: {
+ click: function() {
+ var category = self.createCategory();
+ $(category).inject(self.category_container);
+ }
+ }
+ }));
+ Array.each(self.categories, function(category) {
+ $(category).inject(self.category_container);
+ });
+ },
+ getCategory: function(id) {
+ return this.categories.filter(function(category) {
+ return category.data._id == id;
+ }).pick();
+ },
+ getAll: function() {
+ return this.categories;
+ },
+ createCategory: function(data) {
+ var self = this;
+ data = data || {
+ id: randomString()
+ };
+ var category = new Category(data);
+ self.categories.include(category);
+ return category;
+ },
+ createOrdering: function() {
+ var self = this;
+ var category_list;
+ self.settings.createGroup({
+ label: "Category ordering"
+ }).adopt(new Element(".ctrlHolder#category_ordering").adopt(new Element("label[text=Order]"), category_list = new Element("ul"), new Element("p.formHint", {
+ html: "Change the order the categories are in the dropdown list."
+ }))).inject(self.content);
+ Array.each(self.categories, function(category) {
+ new Element("li", {
+ "data-id": category.data._id
+ }).adopt(new Element("span.category_label", {
+ text: category.data.label
+ }), new Element("span.handle.icon-handle")).inject(category_list);
+ });
+ self.category_sortable = new Sortables(category_list, {
+ revert: true,
+ handle: "",
+ opacity: .5,
+ onComplete: self.saveOrdering.bind(self)
+ });
+ },
+ saveOrdering: function() {
+ var self = this;
+ var ids = [];
+ self.category_sortable.list.getElements("li").each(function(el) {
+ ids.include(el.get("data-id"));
+ });
+ Api.request("category.save_order", {
+ data: {
+ ids: ids
+ }
+ });
+ }
+});
+
+window.CategoryList = new CategoryListBase();
+
+var Category = new Class({
+ data: {},
+ initialize: function(data) {
+ var self = this;
+ self.data = data;
+ self.create();
+ self.el.addEvents({
+ "change:relay(select)": self.save.bind(self, 0),
+ "keyup:relay(input[type=text])": self.save.bind(self, [ 300 ])
+ });
+ },
+ create: function() {
+ var self = this;
+ var data = self.data;
+ self.el = new Element("div.category").adopt(self.delete_button = new Element("span.delete.icon2", {
+ events: {
+ click: self.del.bind(self)
+ }
+ }), new Element(".category_label.ctrlHolder").adopt(new Element("label", {
+ text: "Name"
+ }), new Element("input", {
+ type: "text",
+ value: data.label,
+ placeholder: "Example: Kids, Horror or His"
+ }), new Element("p.formHint", {
+ text: "See global filters for explanation."
+ })), new Element(".category_preferred.ctrlHolder").adopt(new Element("label", {
+ text: "Preferred"
+ }), new Element("input", {
+ type: "text",
+ value: data.preferred,
+ placeholder: "Blu-ray, DTS"
+ })), new Element(".category_required.ctrlHolder").adopt(new Element("label", {
+ text: "Required"
+ }), new Element("input", {
+ type: "text",
+ value: data.required,
+ placeholder: "Example: DTS, AC3 & English"
+ })), new Element(".category_ignored.ctrlHolder").adopt(new Element("label", {
+ text: "Ignored"
+ }), new Element("input", {
+ type: "text",
+ value: data.ignored,
+ placeholder: "Example: dubbed, swesub, french"
+ })));
+ self.makeSortable();
+ },
+ save: function(delay) {
+ var self = this;
+ if (self.save_timer) clearTimeout(self.save_timer);
+ self.save_timer = function() {
+ Api.request("category.save", {
+ data: self.getData(),
+ useSpinner: true,
+ spinnerOptions: {
+ target: self.el
+ },
+ onComplete: function(json) {
+ if (json.success) {
+ self.data = json.category;
+ }
+ }
+ });
+ }.delay(delay || 0, self);
+ },
+ getData: function() {
+ var self = this;
+ return {
+ id: self.data._id,
+ label: self.el.getElement(".category_label input").get("value"),
+ required: self.el.getElement(".category_required input").get("value"),
+ preferred: self.el.getElement(".category_preferred input").get("value"),
+ ignored: self.el.getElement(".category_ignored input").get("value"),
+ destination: self.data.destination
+ };
+ },
+ del: function() {
+ var self = this;
+ if (self.data.label === undefined) {
+ self.el.destroy();
+ return;
+ }
+ var label = self.el.getElement(".category_label input").get("value");
+ var qObj = new Question('Are you sure you want to delete "' + label + '"?', "", [ {
+ text: 'Delete "' + label + '"',
+ class: "delete",
+ events: {
+ click: function(e) {
+ e.preventDefault();
+ Api.request("category.delete", {
+ data: {
+ id: self.data._id
+ },
+ useSpinner: true,
+ spinnerOptions: {
+ target: self.el
+ },
+ onComplete: function(json) {
+ if (json.success) {
+ qObj.close();
+ self.el.destroy();
+ } else {
+ alert(json.message);
+ }
+ }
+ });
+ }
+ }
+ }, {
+ text: "Cancel",
+ cancel: true
+ } ]);
+ },
+ makeSortable: function() {
+ var self = this;
+ self.sortable = new Sortables(self.category_container, {
+ revert: true,
+ handle: ".handle",
+ opacity: .5,
+ onComplete: self.save.bind(self, 300)
+ });
+ },
+ get: function(attr) {
+ return this.data[attr];
+ },
+ toElement: function() {
+ return this.el;
+ }
+});
+
+Page.Log = new Class({
+ Extends: PageBase,
+ order: 60,
+ name: "log",
+ title: "Show recent logs.",
+ has_tab: false,
+ navigation: null,
+ log_items: [],
+ report_text: "### Steps to reproduce:\n" + "1. ..\n" + "2. ..\n" + "\n" + "### Information:\n" + "Movie(s) I have this with: ...\n" + "Quality of the movie being searched: ...\n" + "Providers I use: ...\n" + "Version of CouchPotato: {version}\n" + "Running on: ...\n" + "\n" + "### Logs:\n" + "```\n{issue}```",
+ indexAction: function() {
+ var self = this;
+ self.getLogs(0);
+ },
+ getLogs: function(nr) {
+ var self = this;
+ if (self.log) self.log.destroy();
+ self.log = new Element("div.container.loading", {
+ text: "loading...",
+ events: {
+ "mouseup:relay(.time)": function(e) {
+ self.showSelectionButton.delay(100, self, e);
+ }
+ }
+ }).inject(self.content);
+ if (self.navigation) {
+ var nav = self.navigation.getElement(".nav");
+ nav.getElements(".active").removeClass("active");
+ self.navigation.getElements("li")[nr + 1].addClass("active");
+ }
+ if (self.request && self.request.running) self.request.cancel();
+ self.request = Api.request("logging.get", {
+ data: {
+ nr: nr
+ },
+ onComplete: function(json) {
+ self.log.set("text", "");
+ self.log_items = self.createLogElements(json.log);
+ self.log.adopt(self.log_items);
+ self.log.removeClass("loading");
+ self.scrollToBottom();
+ if (!self.navigation) {
+ self.navigation = new Element("div.navigation").adopt(new Element("h2[text=Logs]"), new Element("div.hint", {
+ text: "Select multiple lines & report an issue"
+ }));
+ var nav = new Element("ul.nav", {
+ events: {
+ "click:relay(li.select)": function(e, el) {
+ self.getLogs(parseInt(el.get("text")) - 1);
+ }
+ }
+ }).inject(self.navigation);
+ new Element("li.filter").grab(new Element("select", {
+ events: {
+ change: function() {
+ var type_filter = this.getSelected()[0].get("value");
+ self.content.set("data-filter", type_filter);
+ self.scrollToBottom();
+ }
+ }
+ }).adopt(new Element("option", {
+ value: "ALL",
+ text: "Show all logs"
+ }), new Element("option", {
+ value: "INFO",
+ text: "Show only INFO"
+ }), new Element("option", {
+ value: "DEBUG",
+ text: "Show only DEBUG"
+ }), new Element("option", {
+ value: "ERROR",
+ text: "Show only ERROR"
+ }))).inject(nav);
+ for (var i = 0; i <= json.total; i++) {
+ new Element("li", {
+ text: i + 1,
+ class: "select " + (nr == i ? "active" : "")
+ }).inject(nav);
+ }
+ new Element("li.clear", {
+ text: "clear",
+ events: {
+ click: function() {
+ Api.request("logging.clear", {
+ onComplete: function() {
+ self.getLogs(0);
+ }
+ });
+ }
+ }
+ }).inject(nav);
+ self.navigation.inject(self.content, "top");
+ }
+ }
+ });
+ },
+ createLogElements: function(logs) {
+ var elements = [];
+ logs.each(function(log) {
+ elements.include(new Element("div", {
+ class: "time " + log.type.toLowerCase()
+ }).adopt(new Element("span", {
+ text: log.time
+ }), new Element("span.type", {
+ text: log.type
+ }), new Element("span.message", {
+ text: log.message
+ })));
+ });
+ return elements;
+ },
+ scrollToBottom: function() {
+ new Fx.Scroll(this.content, {
+ duration: 0
+ }).toBottom();
+ },
+ showSelectionButton: function(e) {
+ var self = this, selection = self.getSelected(), start_node = selection.anchorNode, parent_start = start_node.parentNode.getParent(".time"), end_node = selection.focusNode.parentNode.getParent(".time"), text = "";
+ var remove_button = function() {
+ self.log.getElements(".highlight").removeClass("highlight");
+ if (self.do_report) self.do_report.destroy();
+ document.body.removeEvent("click", remove_button);
+ };
+ remove_button();
+ if (parent_start) start_node = parent_start;
+ var index = {
+ start: self.log_items.indexOf(start_node),
+ end: self.log_items.indexOf(end_node)
+ };
+ if (index.start > index.end) {
+ index = {
+ start: index.end,
+ end: index.start
+ };
+ }
+ var nodes = self.log_items.slice(index.start, index.end + 1);
+ nodes.each(function(node, nr) {
+ node.addClass("highlight");
+ node.getElements("span").each(function(span) {
+ text += self.spaceFill(span.get("text") + " ", 6);
+ });
+ text += "\n";
+ });
+ self.do_report = new Element("a.do_report.button", {
+ text: "Report issue",
+ styles: {
+ top: e.page.y,
+ left: e.page.x
+ },
+ events: {
+ click: function(e) {
+ e.stop();
+ self.showReport(text);
+ }
+ }
+ }).inject(document.body);
+ setTimeout(function() {
+ document.body.addEvent("click", remove_button);
+ }, 0);
+ },
+ showReport: function(text) {
+ var self = this, version = Updater.getInfo(), body = self.report_text.replace("{issue}", text).replace("{version}", version ? version.version.repr : "..."), textarea;
+ var overlay = new Element("div.mask.report_popup", {
+ method: "post",
+ events: {
+ click: function(e) {
+ overlay.destroy();
+ }
+ }
+ }).grab(new Element("div.bug", {
+ events: {
+ click: function(e) {
+ e.stopPropagation();
+ }
+ }
+ }).adopt(new Element("h1", {
+ text: "Report a bug"
+ }), new Element("span").adopt(new Element("span", {
+ text: "Read "
+ }), new Element("a.button", {
+ target: "_blank",
+ text: "the contributing guide",
+ href: "https://github.com/RuudBurger/CouchPotatoServer/blob/develop/contributing.md"
+ }), new Element("span", {
+ html: " before posting, then copy the text below and FILL IN the dots."
+ })), textarea = new Element("textarea", {
+ text: body
+ }), new Element("a.button", {
+ target: "_blank",
+ text: "Create a new issue on GitHub with the text above",
+ href: "https://github.com/RuudBurger/CouchPotatoServer/issues/new",
+ events: {
+ click: function(e) {
+ e.stop();
+ var body = textarea.get("value"), bdy = "?body=" + (body.length < 2e3 ? encodeURIComponent(body) : "Paste the text here"), win = window.open(e.target.get("href") + bdy, "_blank");
+ win.focus();
+ }
+ }
+ })));
+ overlay.inject(document.body);
+ },
+ getSelected: function() {
+ if (window.getSelection) return window.getSelection(); else if (document.getSelection) return document.getSelection(); else {
+ var selection = document.selection && document.selection.createRange();
+ if (selection.text) return selection.text;
+ }
+ return false;
+ },
+ spaceFill: function(number, width) {
+ if (number.toString().length >= width) return number;
+ return (new Array(width).join(" ") + number.toString()).substr(-width);
+ }
+});
+
+var Profile = new Class({
+ data: {},
+ types: [],
+ initialize: function(data) {
+ var self = this;
+ self.data = data;
+ self.types = [];
+ self.create();
+ self.el.addEvents({
+ "change:relay(select, input[type=checkbox])": self.save.bind(self, 0),
+ "keyup:relay(input[type=text])": self.save.bind(self, [ 300 ])
+ });
+ },
+ create: function() {
+ var self = this;
+ var data = self.data;
+ self.el = new Element("div.profile").adopt(self.delete_button = new Element("span.delete.icon2", {
+ events: {
+ click: self.del.bind(self)
+ }
+ }), new Element(".quality_label.ctrlHolder").adopt(new Element("label", {
+ text: "Name"
+ }), new Element("input", {
+ type: "text",
+ value: data.label,
+ placeholder: "Profile name"
+ })), new Element("div.qualities.ctrlHolder").adopt(new Element("label", {
+ text: "Search for"
+ }), self.type_container = new Element("ol.types"), new Element("div.formHint", {
+ html: "Search these qualities (2 minimum), from top to bottom. Use the checkbox, to stop searching after it found this quality."
+ })), new Element("div.wait_for.ctrlHolder").adopt(new Element("span", {
+ text: "Wait"
+ }), new Element("input.wait_for_input.xsmall", {
+ type: "text",
+ value: data.wait_for && data.wait_for.length > 0 ? data.wait_for[0] : 0
+ }), new Element("span", {
+ text: "day(s) for a better quality "
+ }), new Element("span.advanced", {
+ text: "and keep searching"
+ }), new Element("input.xsmall.stop_after_input.advanced", {
+ type: "text",
+ value: data.stop_after && data.stop_after.length > 0 ? data.stop_after[0] : 0
+ }), new Element("span.advanced", {
+ text: "day(s) for a better (checked) quality."
+ }), new Element("span.advanced", {
+ html: " Releases need a minimum score of"
+ }), new Element("input.advanced.xsmall.minimum_score_input", {
+ size: 4,
+ type: "text",
+ value: data.minimum_score || 1
+ })));
+ self.makeSortable();
+ if (data.qualities) {
+ data.types = [];
+ data.qualities.each(function(quality, nr) {
+ data.types.include({
+ quality: quality,
+ finish: data.finish[nr] || false,
+ "3d": data["3d"] ? data["3d"][nr] || false : false
+ });
+ });
+ }
+ if (data.types) data.types.each(self.addType.bind(self)); else self.delete_button.hide();
+ self.addType();
+ },
+ save: function(delay) {
+ var self = this;
+ if (self.save_timer) clearTimeout(self.save_timer);
+ self.save_timer = function() {
+ self.addType();
+ var data = self.getData();
+ if (data.types.length < 2) return; else self.delete_button.show();
+ Api.request("profile.save", {
+ data: self.getData(),
+ useSpinner: true,
+ spinnerOptions: {
+ target: self.el
+ },
+ onComplete: function(json) {
+ if (json.success) {
+ self.data = json.profile;
+ self.type_container.getElement("li:first-child input.finish[type=checkbox]").set("checked", true).getParent().addClass("checked");
+ }
+ }
+ });
+ }.delay(delay, self);
+ },
+ getData: function() {
+ var self = this;
+ var data = {
+ id: self.data._id,
+ label: self.el.getElement(".quality_label input").get("value"),
+ wait_for: self.el.getElement(".wait_for_input").get("value"),
+ stop_after: self.el.getElement(".stop_after_input").get("value"),
+ minimum_score: self.el.getElement(".minimum_score_input").get("value"),
+ types: []
+ };
+ Array.each(self.type_container.getElements(".type"), function(type) {
+ if (!type.hasClass("deleted") && type.getElement("select").get("value") != -1) data.types.include({
+ quality: type.getElement("select").get("value"),
+ finish: +type.getElement("input.finish[type=checkbox]").checked,
+ "3d": +type.getElement("input.3d[type=checkbox]").checked
+ });
+ });
+ return data;
+ },
+ addType: function(data) {
+ var self = this;
+ var has_empty = false;
+ self.types.each(function(type) {
+ if ($(type).hasClass("is_empty")) has_empty = true;
+ });
+ if (has_empty) return;
+ var t = new Profile.Type(data, {
+ onChange: self.save.bind(self, 0)
+ });
+ $(t).inject(self.type_container);
+ self.sortable.addItems($(t));
+ self.types.include(t);
+ },
+ getTypes: function() {
+ var self = this;
+ return self.types.filter(function(type) {
+ return type.get("quality");
+ });
+ },
+ del: function() {
+ var self = this;
+ var label = self.el.getElement(".quality_label input").get("value");
+ var qObj = new Question('Are you sure you want to delete "' + label + '"?', "Items using this profile, will be set to the default quality.", [ {
+ text: 'Delete "' + label + '"',
+ class: "delete",
+ events: {
+ click: function(e) {
+ e.preventDefault();
+ Api.request("profile.delete", {
+ data: {
+ id: self.data._id
+ },
+ useSpinner: true,
+ spinnerOptions: {
+ target: self.el
+ },
+ onComplete: function(json) {
+ if (json.success) {
+ qObj.close();
+ self.el.destroy();
+ } else {
+ alert(json.message);
+ }
+ }
+ });
+ }
+ }
+ }, {
+ text: "Cancel",
+ cancel: true
+ } ]);
+ },
+ makeSortable: function() {
+ var self = this;
+ self.sortable = new Sortables(self.type_container, {
+ revert: true,
+ handle: ".handle",
+ opacity: .5,
+ onComplete: self.save.bind(self, 300)
+ });
+ },
+ get: function(attr) {
+ return this.data[attr];
+ },
+ isCore: function() {
+ return this.data.core;
+ },
+ toElement: function() {
+ return this.el;
+ }
+});
+
+Profile.Type = new Class({
+ Implements: [ Events, Options ],
+ deleted: false,
+ initialize: function(data, options) {
+ var self = this;
+ self.setOptions(options);
+ self.data = data || {};
+ self.create();
+ self.addEvent("change", function() {
+ self.el[self.qualities.get("value") == "-1" ? "addClass" : "removeClass"]("is_empty");
+ self.el[Quality.getQuality(self.qualities.get("value")).allow_3d ? "addClass" : "removeClass"]("allow_3d");
+ self.deleted = self.qualities.get("value") == "-1";
+ });
+ },
+ create: function() {
+ var self = this;
+ var data = self.data;
+ self.el = new Element("li.type").adopt(new Element("span.quality_type.select_wrapper.icon-dropdown").grab(self.fillQualities()), self.finish_container = new Element("label.finish").adopt(self.finish = new Element("input.finish[type=checkbox]", {
+ checked: data.finish !== undefined ? data.finish : 1,
+ events: {
+ change: function() {
+ if (self.el == self.el.getParent().getElement(":first-child")) {
+ alert("Top quality always finishes the search");
+ return;
+ }
+ self.fireEvent("change");
+ }
+ }
+ }), new Element("span.check_label[text=finish]")), self["3d_container"] = new Element("label.threed").adopt(self["3d"] = new Element("input.3d[type=checkbox]", {
+ checked: data["3d"] !== undefined ? data["3d"] : 0,
+ events: {
+ change: function() {
+ self.fireEvent("change");
+ }
+ }
+ }), new Element("span.check_label[text=3D]")), new Element("span.delete.icon-cancel", {
+ events: {
+ click: self.del.bind(self)
+ }
+ }), new Element("span.handle.icon-handle"));
+ self.el[self.data.quality ? "removeClass" : "addClass"]("is_empty");
+ if (self.data.quality && Quality.getQuality(self.data.quality).allow_3d) self.el.addClass("allow_3d");
+ },
+ fillQualities: function() {
+ var self = this;
+ self.qualities = new Element("select", {
+ events: {
+ change: self.fireEvent.bind(self, "change")
+ }
+ }).grab(new Element("option", {
+ text: "+ Add another quality",
+ value: -1
+ }));
+ Object.each(Quality.qualities, function(q) {
+ new Element("option", {
+ text: q.label,
+ value: q.identifier,
+ "data-allow_3d": q.allow_3d
+ }).inject(self.qualities);
+ });
+ self.qualities.set("value", self.data.quality);
+ return self.qualities;
+ },
+ getData: function() {
+ var self = this;
+ return {
+ quality: self.qualities.get("value"),
+ finish: +self.finish.checked,
+ "3d": +self["3d"].checked
+ };
+ },
+ get: function(key) {
+ return this.data[key];
+ },
+ del: function() {
+ var self = this;
+ self.el.addClass("deleted");
+ self.el.hide();
+ self.deleted = true;
+ self.fireEvent("change");
+ },
+ toElement: function() {
+ return this.el;
+ }
+});
+
+var QualityBase = new Class({
+ tab: "",
+ content: "",
+ setup: function(data) {
+ var self = this;
+ self.qualities = data.qualities;
+ self.profiles_list = null;
+ self.profiles = [];
+ Array.each(data.profiles, self.createProfilesClass.bind(self));
+ App.addEvent("loadSettings", self.addSettings.bind(self));
+ },
+ getProfile: function(id) {
+ return this.profiles.filter(function(profile) {
+ return profile.data._id == id;
+ }).pick();
+ },
+ getActiveProfiles: function() {
+ return Array.filter(this.profiles, function(profile) {
+ return !profile.data.hide;
+ });
+ },
+ getQuality: function(identifier) {
+ try {
+ return this.qualities.filter(function(q) {
+ return q.identifier == identifier;
+ }).pick();
+ } catch (e) {}
+ return {};
+ },
+ addSettings: function() {
+ var self = this;
+ self.settings = App.getPage("Settings");
+ self.settings.addEvent("create", function() {
+ var tab = self.settings.createSubTab("profile", {
+ label: "Quality",
+ name: "profile",
+ subtab_label: "Qualities"
+ }, self.settings.tabs.searcher, "searcher");
+ self.tab = tab.tab;
+ self.content = tab.content;
+ self.createProfiles();
+ self.createProfileOrdering();
+ self.createSizes();
+ });
+ },
+ createProfiles: function() {
+ var self = this;
+ var non_core_profiles = Array.filter(self.profiles, function(profile) {
+ return !profile.isCore();
+ });
+ var count = non_core_profiles.length;
+ self.settings.createGroup({
+ label: "Quality Profiles",
+ description: "Create your own profiles with multiple qualities."
+ }).inject(self.content).adopt(self.profile_container = new Element("div.container"), new Element("a.add_new_profile", {
+ text: count > 0 ? "Create another quality profile" : "Click here to create a quality profile.",
+ events: {
+ click: function() {
+ var profile = self.createProfilesClass();
+ $(profile).inject(self.profile_container);
+ }
+ }
+ }));
+ Array.each(non_core_profiles, function(profile) {
+ $(profile).inject(self.profile_container);
+ });
+ },
+ createProfilesClass: function(data) {
+ var self = this;
+ data = data || {
+ id: randomString()
+ };
+ var profile = new Profile(data);
+ self.profiles.include(profile);
+ return profile;
+ },
+ createProfileOrdering: function() {
+ var self = this;
+ self.settings.createGroup({
+ label: "Profile Defaults",
+ description: "(Needs refresh '" + (App.isMac() ? "CMD+R" : "F5") + "' after editing)"
+ }).grab(new Element(".ctrlHolder#profile_ordering").adopt(new Element("label[text=Order]"), self.profiles_list = new Element("ul"), new Element("p.formHint", {
+ html: "Change the order the profiles are in the dropdown list. Uncheck to hide it completely. First one will be default."
+ }))).inject(self.content);
+ Array.each(self.profiles, function(profile) {
+ var check;
+ new Element("li", {
+ "data-id": profile.data._id
+ }).adopt(check = new Element("input[type=checkbox]", {
+ checked: !profile.data.hide,
+ events: {
+ change: self.saveProfileOrdering.bind(self)
+ }
+ }), new Element("span.profile_label", {
+ text: profile.data.label
+ }), new Element("span.handle.icon-handle")).inject(self.profiles_list);
+ });
+ var sorted_changed = false;
+ self.profile_sortable = new Sortables(self.profiles_list, {
+ revert: true,
+ handle: ".handle",
+ opacity: .5,
+ onSort: function() {
+ sorted_changed = true;
+ },
+ onComplete: function() {
+ if (sorted_changed) {
+ self.saveProfileOrdering();
+ sorted_changed = false;
+ }
+ }
+ });
+ },
+ saveProfileOrdering: function() {
+ var self = this, ids = [], hidden = [];
+ self.profiles_list.getElements("li").each(function(el, nr) {
+ ids.include(el.get("data-id"));
+ hidden[nr] = +!el.getElement("input[type=checkbox]").get("checked");
+ });
+ Api.request("profile.save_order", {
+ data: {
+ ids: ids,
+ hidden: hidden
+ }
+ });
+ },
+ createSizes: function() {
+ var self = this;
+ var group = self.settings.createGroup({
+ label: "Sizes",
+ description: "Edit the minimal and maximum sizes (in MB) for each quality.",
+ advanced: true,
+ name: "sizes"
+ }).inject(self.content);
+ new Element("div.item.head.ctrlHolder").adopt(new Element("span.label", {
+ text: "Quality"
+ }), new Element("span.min", {
+ text: "Min"
+ }), new Element("span.max", {
+ text: "Max"
+ })).inject(group);
+ Array.each(self.qualities, function(quality) {
+ new Element("div.ctrlHolder.item").adopt(new Element("span.label", {
+ text: quality.label
+ }), new Element("input.min[type=text]", {
+ value: quality.size_min,
+ events: {
+ keyup: function(e) {
+ self.changeSize(quality.identifier, "size_min", e.target.get("value"));
+ }
+ }
+ }), new Element("input.max[type=text]", {
+ value: quality.size_max,
+ events: {
+ keyup: function(e) {
+ self.changeSize(quality.identifier, "size_max", e.target.get("value"));
+ }
+ }
+ })).inject(group);
+ });
+ },
+ size_timer: {},
+ changeSize: function(identifier, type, value) {
+ var self = this;
+ if (self.size_timer[identifier + type]) clearTimeout(self.size_timer[identifier + type]);
+ self.size_timer[identifier + type] = function() {
+ Api.request("quality.size.save", {
+ data: {
+ identifier: identifier,
+ value_type: type,
+ value: value
+ }
+ });
+ }.delay(300);
+ }
+});
+
+window.Quality = new QualityBase();
+
+Page.Userscript = new Class({
+ Extends: PageBase,
+ order: 80,
+ name: "userscript",
+ has_tab: false,
+ options: {
+ onOpened: function() {
+ App.fireEvent("unload");
+ App.getBlock("header").hide();
+ }
+ },
+ indexAction: function() {
+ var self = this;
+ self.content.grab(self.frame = new Element("div.frame.loading", {
+ text: "Loading..."
+ }));
+ var url = window.location.href.split("url=")[1];
+ Api.request("userscript.add_via_url", {
+ data: {
+ url: url
+ },
+ onComplete: function(json) {
+ self.frame.empty();
+ self.frame.removeClass("loading");
+ if (json.error) self.frame.set("html", json.error); else {
+ var item = new BlockSearchMovieItem(json.movie);
+ self.frame.adopt(item);
+ item.showOptions();
+ }
+ }
+ });
+ }
+});
+
+var UserscriptSettingTab = new Class({
+ tab: "",
+ content: "",
+ initialize: function() {
+ var self = this;
+ App.addEvent("loadSettings", self.addSettings.bind(self));
+ },
+ addSettings: function() {
+ var self = this;
+ self.settings = App.getPage("Settings");
+ self.settings.addEvent("create", function() {
+ var host_url = window.location.protocol + "//" + window.location.host;
+ self.settings.createGroup({
+ name: "userscript",
+ label: "Install the browser extension or bookmarklet",
+ description: "Easily add movies via imdb.com, appletrailers and more"
+ }).inject(self.settings.tabs.automation.content, "top").adopt(new Element("a.userscript.button", {
+ text: "Install extension",
+ href: "https://couchpota.to/extension/",
+ target: "_blank"
+ }), new Element("span.or[text=or]"), new Element("span.bookmarklet").adopt(new Element("a.button.green", {
+ text: "+CouchPotato",
+ href: "javascript:void((function(){var e=document.createElement('script');e.setAttribute('type','text/javascript');e.setAttribute('charset','UTF-8');e.setAttribute('src','" + host_url + Api.createUrl("userscript.bookmark") + "?host=" + encodeURI(host_url + Api.createUrl("userscript.get") + randomString() + "/") + "&r='+Math.random()*99999999);document.body.appendChild(e)})());",
+ target: "",
+ events: {
+ click: function(e) {
+ e.stop();
+ alert("Drag it to your bookmark ;)");
+ }
+ }
+ }), new Element("span", {
+ text: "⇽ Drag this to your bookmarks"
+ })), new Element("img", {
+ src: "https://couchpota.to/media/images/userscript.gif"
+ }));
+ });
+ }
+});
+
+window.addEvent("domready", function() {
+ new UserscriptSettingTab();
+});
+
+window.addEvent("load", function() {
+ var your_version = $(document.body).get("data-userscript_version"), latest_version = App.getOption("userscript_version") || "", key = "cp_version_check", checked_already = Cookie.read(key);
+ if (your_version && your_version < latest_version && checked_already < latest_version) {
+ if (confirm("Update to the latest Userscript?\nYour version: " + your_version + ", new version: " + latest_version)) {
+ document.location = Api.createUrl("userscript.get") + randomString() + "/couchpotato.user.js";
+ }
+ Cookie.write(key, latest_version, {
+ duration: 100
+ });
+ }
+});
+
+Page.Wizard = new Class({
+ Extends: Page.Settings,
+ order: 70,
+ name: "wizard",
+ current: "welcome",
+ has_tab: false,
+ wizard_only: true,
+ headers: {
+ welcome: {
+ title: "Welcome to the new CouchPotato",
+ description: "To get started, fill in each of the following settings as much as you can.",
+ content: new Element("div", {
+ styles: {
+ margin: "0 0 0 30px"
+ }
+ })
+ },
+ general: {
+ title: "General",
+ description: "If you want to access CP from outside your local network, you better secure it a bit with a username & password."
+ },
+ downloaders: {
+ title: "What download apps are you using?",
+ description: "CP needs an external download app to work with. Choose one below. For more downloaders check settings after you have filled in the wizard. If your download app isn't in the list, use the default Blackhole."
+ },
+ searcher: {
+ label: "Providers",
+ title: "Are you registered at any of these sites?",
+ description: "CP uses these sites to search for movies. A few free are enabled by default, but it's always better to have more."
+ },
+ renamer: {
+ title: "Move & rename the movies after downloading?",
+ description: "The coolest part of CP is that it can move and organize your downloaded movies automagically. Check settings and you can even download trailers, subtitles and other data when it has finished downloading. It's awesome!"
+ },
+ automation: {
+ title: "Easily add movies to your wanted list!",
+ description: "You can easily add movies from your favorite movie site, like IMDB, Rotten Tomatoes, Apple Trailers and more. Just install the extension or drag the bookmarklet to your bookmarks." + " Once installed, just click the bookmarklet on a movie page and watch the magic happen ;)",
+ content: function() {
+ return App.createUserscriptButtons().setStyles({
+ "background-image": "url('https://couchpota.to/media/images/userscript.gif')"
+ });
+ }
+ },
+ finish: {
+ title: "Finishing Up",
+ description: "Are you done? Did you fill in everything as much as possible?" + " Be sure to check the settings to see what more CP can do!
" + '
After you\'ve used CP for a while, and you like it (which of course you will), consider supporting CP. Maybe even by writing some code. Or by getting a subscription at Usenet Server or Newshosting.
',
+ content: new Element("div").grab(new Element("a.button.green", {
+ styles: {
+ "margin-top": 20
+ },
+ text: "I'm ready to start the awesomeness!",
+ events: {
+ click: function(e) {
+ e.preventDefault();
+ Api.request("settings.save", {
+ data: {
+ section: "core",
+ name: "show_wizard",
+ value: 0
+ },
+ useSpinner: true,
+ spinnerOptions: {
+ target: self.el
+ },
+ onComplete: function() {
+ window.location = App.createUrl("wanted");
+ }
+ });
+ }
+ }
+ }))
+ }
+ },
+ groups: [ "welcome", "general", "downloaders", "searcher", "renamer", "automation", "finish" ],
+ open: function(action, params) {
+ var self = this;
+ if (!self.initialized) {
+ App.fireEvent("unload");
+ App.getBlock("header").hide();
+ self.parent(action, params);
+ self.el.addClass("settings");
+ self.addEvent("create", function() {
+ self.orderGroups();
+ });
+ self.initialized = true;
+ self.scroll = new Fx.Scroll(document.body, {
+ transition: "quint:in:out"
+ });
+ } else (function() {
+ var sc = self.el.getElement(".wgroup_" + action);
+ self.scroll.start(0, sc.getCoordinates().top - 80);
+ }).delay(1);
+ },
+ orderGroups: function() {
+ var self = this;
+ var form = self.el.getElement(".uniForm");
+ var tabs = self.el.getElement(".tabs").hide();
+ self.groups.each(function(group) {
+ var group_container;
+ if (self.headers[group]) {
+ group_container = new Element(".wgroup_" + group);
+ if (self.headers[group].include) {
+ self.headers[group].include.each(function(inc) {
+ group_container.addClass("wgroup_" + inc);
+ });
+ }
+ var content = self.headers[group].content;
+ group_container.adopt(new Element("h1", {
+ text: self.headers[group].title
+ }), self.headers[group].description ? new Element("span.description", {
+ html: self.headers[group].description
+ }) : null, content ? typeOf(content) == "function" ? content() : content : null).inject(form);
+ }
+ var tab_navigation = tabs.getElement(".t_" + group);
+ if (!tab_navigation && self.headers[group] && self.headers[group].include) {
+ tab_navigation = [];
+ self.headers[group].include.each(function(inc) {
+ tab_navigation.include(tabs.getElement(".t_" + inc));
+ });
+ }
+ if (tab_navigation && group_container) {
+ tabs.adopt(tab_navigation);
+ if (self.headers[group] && self.headers[group].include) {
+ self.headers[group].include.each(function(inc) {
+ self.el.getElement(".tab_" + inc).inject(group_container);
+ });
+ new Element("li.t_" + group).grab(new Element("a", {
+ href: App.createUrl("wizard/" + group),
+ text: (self.headers[group].label || group).capitalize()
+ })).inject(tabs);
+ } else self.el.getElement(".tab_" + group).inject(group_container);
+ if (tab_navigation.getElement && self.headers[group]) {
+ var a = tab_navigation.getElement("a");
+ a.set("text", (self.headers[group].label || group).capitalize());
+ var url_split = a.get("href").split("wizard")[1].split("/");
+ if (url_split.length > 3) a.set("href", a.get("href").replace(url_split[url_split.length - 3] + "/", ""));
+ }
+ } else {
+ new Element("li.t_" + group).grab(new Element("a", {
+ href: App.createUrl("wizard/" + group),
+ text: (self.headers[group].label || group).capitalize()
+ })).inject(tabs);
+ }
+ if (self.headers[group] && self.headers[group].event) self.headers[group].event.call();
+ });
+ self.el.getElement(".advanced_toggle").destroy();
+ self.el.getElement(".section_nzb").hide();
+ }
+});
\ No newline at end of file
diff --git a/couchpotato/static/scripts/combined.vendor.min.js b/couchpotato/static/scripts/combined.vendor.min.js
new file mode 100644
index 0000000..61ad0a5
--- /dev/null
+++ b/couchpotato/static/scripts/combined.vendor.min.js
@@ -0,0 +1,9028 @@
+(function() {
+ this.MooTools = {
+ version: "1.5.1",
+ build: "0542c135fdeb7feed7d9917e01447a408f22c876"
+ };
+ var typeOf = this.typeOf = function(item) {
+ if (item == null) return "null";
+ if (item.$family != null) return item.$family();
+ if (item.nodeName) {
+ if (item.nodeType == 1) return "element";
+ if (item.nodeType == 3) return /\S/.test(item.nodeValue) ? "textnode" : "whitespace";
+ } else if (typeof item.length == "number") {
+ if ("callee" in item) return "arguments";
+ if ("item" in item) return "collection";
+ }
+ return typeof item;
+ };
+ var instanceOf = this.instanceOf = function(item, object) {
+ if (item == null) return false;
+ var constructor = item.$constructor || item.constructor;
+ while (constructor) {
+ if (constructor === object) return true;
+ constructor = constructor.parent;
+ }
+ if (!item.hasOwnProperty) return false;
+ return item instanceof object;
+ };
+ var Function = this.Function;
+ var enumerables = true;
+ for (var i in {
+ toString: 1
+ }) enumerables = null;
+ if (enumerables) enumerables = [ "hasOwnProperty", "valueOf", "isPrototypeOf", "propertyIsEnumerable", "toLocaleString", "toString", "constructor" ];
+ Function.prototype.overloadSetter = function(usePlural) {
+ var self = this;
+ return function(a, b) {
+ if (a == null) return this;
+ if (usePlural || typeof a != "string") {
+ for (var k in a) self.call(this, k, a[k]);
+ if (enumerables) for (var i = enumerables.length; i--; ) {
+ k = enumerables[i];
+ if (a.hasOwnProperty(k)) self.call(this, k, a[k]);
+ }
+ } else {
+ self.call(this, a, b);
+ }
+ return this;
+ };
+ };
+ Function.prototype.overloadGetter = function(usePlural) {
+ var self = this;
+ return function(a) {
+ var args, result;
+ if (typeof a != "string") args = a; else if (arguments.length > 1) args = arguments; else if (usePlural) args = [ a ];
+ if (args) {
+ result = {};
+ for (var i = 0; i < args.length; i++) result[args[i]] = self.call(this, args[i]);
+ } else {
+ result = self.call(this, a);
+ }
+ return result;
+ };
+ };
+ Function.prototype.extend = function(key, value) {
+ this[key] = value;
+ }.overloadSetter();
+ Function.prototype.implement = function(key, value) {
+ this.prototype[key] = value;
+ }.overloadSetter();
+ var slice = Array.prototype.slice;
+ Function.from = function(item) {
+ return typeOf(item) == "function" ? item : function() {
+ return item;
+ };
+ };
+ Array.from = function(item) {
+ if (item == null) return [];
+ return Type.isEnumerable(item) && typeof item != "string" ? typeOf(item) == "array" ? item : slice.call(item) : [ item ];
+ };
+ Number.from = function(item) {
+ var number = parseFloat(item);
+ return isFinite(number) ? number : null;
+ };
+ String.from = function(item) {
+ return item + "";
+ };
+ Function.implement({
+ hide: function() {
+ this.$hidden = true;
+ return this;
+ },
+ protect: function() {
+ this.$protected = true;
+ return this;
+ }
+ });
+ var Type = this.Type = function(name, object) {
+ if (name) {
+ var lower = name.toLowerCase();
+ var typeCheck = function(item) {
+ return typeOf(item) == lower;
+ };
+ Type["is" + name] = typeCheck;
+ if (object != null) {
+ object.prototype.$family = function() {
+ return lower;
+ }.hide();
+ }
+ }
+ if (object == null) return null;
+ object.extend(this);
+ object.$constructor = Type;
+ object.prototype.$constructor = object;
+ return object;
+ };
+ var toString = Object.prototype.toString;
+ Type.isEnumerable = function(item) {
+ return item != null && typeof item.length == "number" && toString.call(item) != "[object Function]";
+ };
+ var hooks = {};
+ var hooksOf = function(object) {
+ var type = typeOf(object.prototype);
+ return hooks[type] || (hooks[type] = []);
+ };
+ var implement = function(name, method) {
+ if (method && method.$hidden) return;
+ var hooks = hooksOf(this);
+ for (var i = 0; i < hooks.length; i++) {
+ var hook = hooks[i];
+ if (typeOf(hook) == "type") implement.call(hook, name, method); else hook.call(this, name, method);
+ }
+ var previous = this.prototype[name];
+ if (previous == null || !previous.$protected) this.prototype[name] = method;
+ if (this[name] == null && typeOf(method) == "function") extend.call(this, name, function(item) {
+ return method.apply(item, slice.call(arguments, 1));
+ });
+ };
+ var extend = function(name, method) {
+ if (method && method.$hidden) return;
+ var previous = this[name];
+ if (previous == null || !previous.$protected) this[name] = method;
+ };
+ Type.implement({
+ implement: implement.overloadSetter(),
+ extend: extend.overloadSetter(),
+ alias: function(name, existing) {
+ implement.call(this, name, this.prototype[existing]);
+ }.overloadSetter(),
+ mirror: function(hook) {
+ hooksOf(this).push(hook);
+ return this;
+ }
+ });
+ new Type("Type", Type);
+ var force = function(name, object, methods) {
+ var isType = object != Object, prototype = object.prototype;
+ if (isType) object = new Type(name, object);
+ for (var i = 0, l = methods.length; i < l; i++) {
+ var key = methods[i], generic = object[key], proto = prototype[key];
+ if (generic) generic.protect();
+ if (isType && proto) object.implement(key, proto.protect());
+ }
+ if (isType) {
+ var methodsEnumerable = prototype.propertyIsEnumerable(methods[0]);
+ object.forEachMethod = function(fn) {
+ if (!methodsEnumerable) for (var i = 0, l = methods.length; i < l; i++) {
+ fn.call(prototype, prototype[methods[i]], methods[i]);
+ }
+ for (var key in prototype) fn.call(prototype, prototype[key], key);
+ };
+ }
+ return force;
+ };
+ force("String", String, [ "charAt", "charCodeAt", "concat", "contains", "indexOf", "lastIndexOf", "match", "quote", "replace", "search", "slice", "split", "substr", "substring", "trim", "toLowerCase", "toUpperCase" ])("Array", Array, [ "pop", "push", "reverse", "shift", "sort", "splice", "unshift", "concat", "join", "slice", "indexOf", "lastIndexOf", "filter", "forEach", "every", "map", "some", "reduce", "reduceRight" ])("Number", Number, [ "toExponential", "toFixed", "toLocaleString", "toPrecision" ])("Function", Function, [ "apply", "call", "bind" ])("RegExp", RegExp, [ "exec", "test" ])("Object", Object, [ "create", "defineProperty", "defineProperties", "keys", "getPrototypeOf", "getOwnPropertyDescriptor", "getOwnPropertyNames", "preventExtensions", "isExtensible", "seal", "isSealed", "freeze", "isFrozen" ])("Date", Date, [ "now" ]);
+ Object.extend = extend.overloadSetter();
+ Date.extend("now", function() {
+ return +new Date();
+ });
+ new Type("Boolean", Boolean);
+ Number.prototype.$family = function() {
+ return isFinite(this) ? "number" : "null";
+ }.hide();
+ Number.extend("random", function(min, max) {
+ return Math.floor(Math.random() * (max - min + 1) + min);
+ });
+ var hasOwnProperty = Object.prototype.hasOwnProperty;
+ Object.extend("forEach", function(object, fn, bind) {
+ for (var key in object) {
+ if (hasOwnProperty.call(object, key)) fn.call(bind, object[key], key, object);
+ }
+ });
+ Object.each = Object.forEach;
+ Array.implement({
+ forEach: function(fn, bind) {
+ for (var i = 0, l = this.length; i < l; i++) {
+ if (i in this) fn.call(bind, this[i], i, this);
+ }
+ },
+ each: function(fn, bind) {
+ Array.forEach(this, fn, bind);
+ return this;
+ }
+ });
+ var cloneOf = function(item) {
+ switch (typeOf(item)) {
+ case "array":
+ return item.clone();
+
+ case "object":
+ return Object.clone(item);
+
+ default:
+ return item;
+ }
+ };
+ Array.implement("clone", function() {
+ var i = this.length, clone = new Array(i);
+ while (i--) clone[i] = cloneOf(this[i]);
+ return clone;
+ });
+ var mergeOne = function(source, key, current) {
+ switch (typeOf(current)) {
+ case "object":
+ if (typeOf(source[key]) == "object") Object.merge(source[key], current); else source[key] = Object.clone(current);
+ break;
+
+ case "array":
+ source[key] = current.clone();
+ break;
+
+ default:
+ source[key] = current;
+ }
+ return source;
+ };
+ Object.extend({
+ merge: function(source, k, v) {
+ if (typeOf(k) == "string") return mergeOne(source, k, v);
+ for (var i = 1, l = arguments.length; i < l; i++) {
+ var object = arguments[i];
+ for (var key in object) mergeOne(source, key, object[key]);
+ }
+ return source;
+ },
+ clone: function(object) {
+ var clone = {};
+ for (var key in object) clone[key] = cloneOf(object[key]);
+ return clone;
+ },
+ append: function(original) {
+ for (var i = 1, l = arguments.length; i < l; i++) {
+ var extended = arguments[i] || {};
+ for (var key in extended) original[key] = extended[key];
+ }
+ return original;
+ }
+ });
+ [ "Object", "WhiteSpace", "TextNode", "Collection", "Arguments" ].each(function(name) {
+ new Type(name);
+ });
+ var UID = Date.now();
+ String.extend("uniqueID", function() {
+ return (UID++).toString(36);
+ });
+})();
+
+Array.implement({
+ every: function(fn, bind) {
+ for (var i = 0, l = this.length >>> 0; i < l; i++) {
+ if (i in this && !fn.call(bind, this[i], i, this)) return false;
+ }
+ return true;
+ },
+ filter: function(fn, bind) {
+ var results = [];
+ for (var value, i = 0, l = this.length >>> 0; i < l; i++) if (i in this) {
+ value = this[i];
+ if (fn.call(bind, value, i, this)) results.push(value);
+ }
+ return results;
+ },
+ indexOf: function(item, from) {
+ var length = this.length >>> 0;
+ for (var i = from < 0 ? Math.max(0, length + from) : from || 0; i < length; i++) {
+ if (this[i] === item) return i;
+ }
+ return -1;
+ },
+ map: function(fn, bind) {
+ var length = this.length >>> 0, results = Array(length);
+ for (var i = 0; i < length; i++) {
+ if (i in this) results[i] = fn.call(bind, this[i], i, this);
+ }
+ return results;
+ },
+ some: function(fn, bind) {
+ for (var i = 0, l = this.length >>> 0; i < l; i++) {
+ if (i in this && fn.call(bind, this[i], i, this)) return true;
+ }
+ return false;
+ },
+ clean: function() {
+ return this.filter(function(item) {
+ return item != null;
+ });
+ },
+ invoke: function(methodName) {
+ var args = Array.slice(arguments, 1);
+ return this.map(function(item) {
+ return item[methodName].apply(item, args);
+ });
+ },
+ associate: function(keys) {
+ var obj = {}, length = Math.min(this.length, keys.length);
+ for (var i = 0; i < length; i++) obj[keys[i]] = this[i];
+ return obj;
+ },
+ link: function(object) {
+ var result = {};
+ for (var i = 0, l = this.length; i < l; i++) {
+ for (var key in object) {
+ if (object[key](this[i])) {
+ result[key] = this[i];
+ delete object[key];
+ break;
+ }
+ }
+ }
+ return result;
+ },
+ contains: function(item, from) {
+ return this.indexOf(item, from) != -1;
+ },
+ append: function(array) {
+ this.push.apply(this, array);
+ return this;
+ },
+ getLast: function() {
+ return this.length ? this[this.length - 1] : null;
+ },
+ getRandom: function() {
+ return this.length ? this[Number.random(0, this.length - 1)] : null;
+ },
+ include: function(item) {
+ if (!this.contains(item)) this.push(item);
+ return this;
+ },
+ combine: function(array) {
+ for (var i = 0, l = array.length; i < l; i++) this.include(array[i]);
+ return this;
+ },
+ erase: function(item) {
+ for (var i = this.length; i--; ) {
+ if (this[i] === item) this.splice(i, 1);
+ }
+ return this;
+ },
+ empty: function() {
+ this.length = 0;
+ return this;
+ },
+ flatten: function() {
+ var array = [];
+ for (var i = 0, l = this.length; i < l; i++) {
+ var type = typeOf(this[i]);
+ if (type == "null") continue;
+ array = array.concat(type == "array" || type == "collection" || type == "arguments" || instanceOf(this[i], Array) ? Array.flatten(this[i]) : this[i]);
+ }
+ return array;
+ },
+ pick: function() {
+ for (var i = 0, l = this.length; i < l; i++) {
+ if (this[i] != null) return this[i];
+ }
+ return null;
+ },
+ hexToRgb: function(array) {
+ if (this.length != 3) return null;
+ var rgb = this.map(function(value) {
+ if (value.length == 1) value += value;
+ return parseInt(value, 16);
+ });
+ return array ? rgb : "rgb(" + rgb + ")";
+ },
+ rgbToHex: function(array) {
+ if (this.length < 3) return null;
+ if (this.length == 4 && this[3] == 0 && !array) return "transparent";
+ var hex = [];
+ for (var i = 0; i < 3; i++) {
+ var bit = (this[i] - 0).toString(16);
+ hex.push(bit.length == 1 ? "0" + bit : bit);
+ }
+ return array ? hex : "#" + hex.join("");
+ }
+});
+
+String.implement({
+ contains: function(string, index) {
+ return (index ? String(this).slice(index) : String(this)).indexOf(string) > -1;
+ },
+ test: function(regex, params) {
+ return (typeOf(regex) == "regexp" ? regex : new RegExp("" + regex, params)).test(this);
+ },
+ trim: function() {
+ return String(this).replace(/^\s+|\s+$/g, "");
+ },
+ clean: function() {
+ return String(this).replace(/\s+/g, " ").trim();
+ },
+ camelCase: function() {
+ return String(this).replace(/-\D/g, function(match) {
+ return match.charAt(1).toUpperCase();
+ });
+ },
+ hyphenate: function() {
+ return String(this).replace(/[A-Z]/g, function(match) {
+ return "-" + match.charAt(0).toLowerCase();
+ });
+ },
+ capitalize: function() {
+ return String(this).replace(/\b[a-z]/g, function(match) {
+ return match.toUpperCase();
+ });
+ },
+ escapeRegExp: function() {
+ return String(this).replace(/([-.*+?^${}()|[\]\/\\])/g, "\\$1");
+ },
+ toInt: function(base) {
+ return parseInt(this, base || 10);
+ },
+ toFloat: function() {
+ return parseFloat(this);
+ },
+ hexToRgb: function(array) {
+ var hex = String(this).match(/^#?(\w{1,2})(\w{1,2})(\w{1,2})$/);
+ return hex ? hex.slice(1).hexToRgb(array) : null;
+ },
+ rgbToHex: function(array) {
+ var rgb = String(this).match(/\d{1,3}/g);
+ return rgb ? rgb.rgbToHex(array) : null;
+ },
+ substitute: function(object, regexp) {
+ return String(this).replace(regexp || /\\?\{([^{}]+)\}/g, function(match, name) {
+ if (match.charAt(0) == "\\") return match.slice(1);
+ return object[name] != null ? object[name] : "";
+ });
+ }
+});
+
+Function.extend({
+ attempt: function() {
+ for (var i = 0, l = arguments.length; i < l; i++) {
+ try {
+ return arguments[i]();
+ } catch (e) {}
+ }
+ return null;
+ }
+});
+
+Function.implement({
+ attempt: function(args, bind) {
+ try {
+ return this.apply(bind, Array.from(args));
+ } catch (e) {}
+ return null;
+ },
+ bind: function(that) {
+ var self = this, args = arguments.length > 1 ? Array.slice(arguments, 1) : null, F = function() {};
+ var bound = function() {
+ var context = that, length = arguments.length;
+ if (this instanceof bound) {
+ F.prototype = self.prototype;
+ context = new F();
+ }
+ var result = !args && !length ? self.call(context) : self.apply(context, args && length ? args.concat(Array.slice(arguments)) : args || arguments);
+ return context == that ? result : context;
+ };
+ return bound;
+ },
+ pass: function(args, bind) {
+ var self = this;
+ if (args != null) args = Array.from(args);
+ return function() {
+ return self.apply(bind, args || arguments);
+ };
+ },
+ delay: function(delay, bind, args) {
+ return setTimeout(this.pass(args == null ? [] : args, bind), delay);
+ },
+ periodical: function(periodical, bind, args) {
+ return setInterval(this.pass(args == null ? [] : args, bind), periodical);
+ }
+});
+
+Number.implement({
+ limit: function(min, max) {
+ return Math.min(max, Math.max(min, this));
+ },
+ round: function(precision) {
+ precision = Math.pow(10, precision || 0).toFixed(precision < 0 ? -precision : 0);
+ return Math.round(this * precision) / precision;
+ },
+ times: function(fn, bind) {
+ for (var i = 0; i < this; i++) fn.call(bind, i, this);
+ },
+ toFloat: function() {
+ return parseFloat(this);
+ },
+ toInt: function(base) {
+ return parseInt(this, base || 10);
+ }
+});
+
+Number.alias("each", "times");
+
+(function(math) {
+ var methods = {};
+ math.each(function(name) {
+ if (!Number[name]) methods[name] = function() {
+ return Math[name].apply(null, [ this ].concat(Array.from(arguments)));
+ };
+ });
+ Number.implement(methods);
+})([ "abs", "acos", "asin", "atan", "atan2", "ceil", "cos", "exp", "floor", "log", "max", "min", "pow", "sin", "sqrt", "tan" ]);
+
+(function() {
+ var Class = this.Class = new Type("Class", function(params) {
+ if (instanceOf(params, Function)) params = {
+ initialize: params
+ };
+ var newClass = function() {
+ reset(this);
+ if (newClass.$prototyping) return this;
+ this.$caller = null;
+ var value = this.initialize ? this.initialize.apply(this, arguments) : this;
+ this.$caller = this.caller = null;
+ return value;
+ }.extend(this).implement(params);
+ newClass.$constructor = Class;
+ newClass.prototype.$constructor = newClass;
+ newClass.prototype.parent = parent;
+ return newClass;
+ });
+ var parent = function() {
+ if (!this.$caller) throw new Error('The method "parent" cannot be called.');
+ var name = this.$caller.$name, parent = this.$caller.$owner.parent, previous = parent ? parent.prototype[name] : null;
+ if (!previous) throw new Error('The method "' + name + '" has no parent.');
+ return previous.apply(this, arguments);
+ };
+ var reset = function(object) {
+ for (var key in object) {
+ var value = object[key];
+ switch (typeOf(value)) {
+ case "object":
+ var F = function() {};
+ F.prototype = value;
+ object[key] = reset(new F());
+ break;
+
+ case "array":
+ object[key] = value.clone();
+ break;
+ }
+ }
+ return object;
+ };
+ var wrap = function(self, key, method) {
+ if (method.$origin) method = method.$origin;
+ var wrapper = function() {
+ if (method.$protected && this.$caller == null) throw new Error('The method "' + key + '" cannot be called.');
+ var caller = this.caller, current = this.$caller;
+ this.caller = current;
+ this.$caller = wrapper;
+ var result = method.apply(this, arguments);
+ this.$caller = current;
+ this.caller = caller;
+ return result;
+ }.extend({
+ $owner: self,
+ $origin: method,
+ $name: key
+ });
+ return wrapper;
+ };
+ var implement = function(key, value, retain) {
+ if (Class.Mutators.hasOwnProperty(key)) {
+ value = Class.Mutators[key].call(this, value);
+ if (value == null) return this;
+ }
+ if (typeOf(value) == "function") {
+ if (value.$hidden) return this;
+ this.prototype[key] = retain ? value : wrap(this, key, value);
+ } else {
+ Object.merge(this.prototype, key, value);
+ }
+ return this;
+ };
+ var getInstance = function(klass) {
+ klass.$prototyping = true;
+ var proto = new klass();
+ delete klass.$prototyping;
+ return proto;
+ };
+ Class.implement("implement", implement.overloadSetter());
+ Class.Mutators = {
+ Extends: function(parent) {
+ this.parent = parent;
+ this.prototype = getInstance(parent);
+ },
+ Implements: function(items) {
+ Array.from(items).each(function(item) {
+ var instance = new item();
+ for (var key in instance) implement.call(this, key, instance[key], true);
+ }, this);
+ }
+ };
+})();
+
+(function() {
+ this.Chain = new Class({
+ $chain: [],
+ chain: function() {
+ this.$chain.append(Array.flatten(arguments));
+ return this;
+ },
+ callChain: function() {
+ return this.$chain.length ? this.$chain.shift().apply(this, arguments) : false;
+ },
+ clearChain: function() {
+ this.$chain.empty();
+ return this;
+ }
+ });
+ var removeOn = function(string) {
+ return string.replace(/^on([A-Z])/, function(full, first) {
+ return first.toLowerCase();
+ });
+ };
+ this.Events = new Class({
+ $events: {},
+ addEvent: function(type, fn, internal) {
+ type = removeOn(type);
+ this.$events[type] = (this.$events[type] || []).include(fn);
+ if (internal) fn.internal = true;
+ return this;
+ },
+ addEvents: function(events) {
+ for (var type in events) this.addEvent(type, events[type]);
+ return this;
+ },
+ fireEvent: function(type, args, delay) {
+ type = removeOn(type);
+ var events = this.$events[type];
+ if (!events) return this;
+ args = Array.from(args);
+ events.each(function(fn) {
+ if (delay) fn.delay(delay, this, args); else fn.apply(this, args);
+ }, this);
+ return this;
+ },
+ removeEvent: function(type, fn) {
+ type = removeOn(type);
+ var events = this.$events[type];
+ if (events && !fn.internal) {
+ var index = events.indexOf(fn);
+ if (index != -1) delete events[index];
+ }
+ return this;
+ },
+ removeEvents: function(events) {
+ var type;
+ if (typeOf(events) == "object") {
+ for (type in events) this.removeEvent(type, events[type]);
+ return this;
+ }
+ if (events) events = removeOn(events);
+ for (type in this.$events) {
+ if (events && events != type) continue;
+ var fns = this.$events[type];
+ for (var i = fns.length; i--; ) if (i in fns) {
+ this.removeEvent(type, fns[i]);
+ }
+ }
+ return this;
+ }
+ });
+ this.Options = new Class({
+ setOptions: function() {
+ var options = this.options = Object.merge.apply(null, [ {}, this.options ].append(arguments));
+ if (this.addEvent) for (var option in options) {
+ if (typeOf(options[option]) != "function" || !/^on[A-Z]/.test(option)) continue;
+ this.addEvent(option, options[option]);
+ delete options[option];
+ }
+ return this;
+ }
+ });
+})();
+
+(function() {
+ var document = this.document;
+ var window = document.window = this;
+ var parse = function(ua, platform) {
+ ua = ua.toLowerCase();
+ platform = platform ? platform.toLowerCase() : "";
+ var UA = ua.match(/(opera|ie|firefox|chrome|trident|crios|version)[\s\/:]([\w\d\.]+)?.*?(safari|(?:rv[\s\/:]|version[\s\/:])([\w\d\.]+)|$)/) || [ null, "unknown", 0 ];
+ if (UA[1] == "trident") {
+ UA[1] = "ie";
+ if (UA[4]) UA[2] = UA[4];
+ } else if (UA[1] == "crios") {
+ UA[1] = "chrome";
+ }
+ platform = ua.match(/ip(?:ad|od|hone)/) ? "ios" : (ua.match(/(?:webos|android)/) || platform.match(/mac|win|linux/) || [ "other" ])[0];
+ if (platform == "win") platform = "windows";
+ return {
+ extend: Function.prototype.extend,
+ name: UA[1] == "version" ? UA[3] : UA[1],
+ version: parseFloat(UA[1] == "opera" && UA[4] ? UA[4] : UA[2]),
+ platform: platform
+ };
+ };
+ var Browser = this.Browser = parse(navigator.userAgent, navigator.platform);
+ if (Browser.name == "ie") {
+ Browser.version = document.documentMode;
+ }
+ Browser.extend({
+ Features: {
+ xpath: !!document.evaluate,
+ air: !!window.runtime,
+ query: !!document.querySelector,
+ json: !!window.JSON
+ },
+ parseUA: parse
+ });
+ Browser.Request = function() {
+ var XMLHTTP = function() {
+ return new XMLHttpRequest();
+ };
+ var MSXML2 = function() {
+ return new ActiveXObject("MSXML2.XMLHTTP");
+ };
+ var MSXML = function() {
+ return new ActiveXObject("Microsoft.XMLHTTP");
+ };
+ return Function.attempt(function() {
+ XMLHTTP();
+ return XMLHTTP;
+ }, function() {
+ MSXML2();
+ return MSXML2;
+ }, function() {
+ MSXML();
+ return MSXML;
+ });
+ }();
+ Browser.Features.xhr = !!Browser.Request;
+ Browser.exec = function(text) {
+ if (!text) return text;
+ if (window.execScript) {
+ window.execScript(text);
+ } else {
+ var script = document.createElement("script");
+ script.setAttribute("type", "text/javascript");
+ script.text = text;
+ document.head.appendChild(script);
+ document.head.removeChild(script);
+ }
+ return text;
+ };
+ String.implement("stripScripts", function(exec) {
+ var scripts = "";
+ var text = this.replace(/
+
{% end %}
- {% for url in fireEvent('clientscript.get_scripts', as_html = True, location = 'head', single = True) %}
- {% end %}
- {% for url in fireEvent('clientscript.get_styles', as_html = True, location = 'head', single = True) %}
- {% end %}
-
-
+ {% if Env.get('dev') %}
+
+ {% end %}
{% end %}
+ {% for url in fireEvent('clientscript.get_styles', single = True) %}
+ {% end %}
+ {% for url in fireEvent('clientscript.get_scripts', single = True) %}{% if 'combined.plugins' not in url %}
+ {% end %}{% end %}
-
+ {% if Env.get('dev') %}
+
+ {% end %}
CouchPotato
diff --git a/init/freebsd b/init/freebsd
index d389933..bf67b48 100644
--- a/init/freebsd
+++ b/init/freebsd
@@ -1,7 +1,7 @@
#!/bin/sh
#
# PROVIDE: couchpotato
-# REQUIRE: DAEMON
+# REQUIRE: LOGIN
# KEYWORD: shutdown
# Add the following lines to /etc/rc.conf to enable couchpotato:
diff --git a/libs/CodernityDB/__init__.py b/libs/CodernityDB/__init__.py
index 8399a60..c059538 100644
--- a/libs/CodernityDB/__init__.py
+++ b/libs/CodernityDB/__init__.py
@@ -16,5 +16,5 @@
# limitations under the License.
-__version__ = '0.4.2'
+__version__ = '0.5.0'
__license__ = "Apache 2.0"
diff --git a/libs/CodernityDB/database.py b/libs/CodernityDB/database.py
index 064836f..7aa177a 100644
--- a/libs/CodernityDB/database.py
+++ b/libs/CodernityDB/database.py
@@ -339,7 +339,7 @@ class Database(object):
self.__set_main_storage()
self.__compat_things()
for patch in getattr(ind_obj, 'patchers', ()): # index can patch db object
- patch(self)
+ patch(self, ind_obj)
return name
def edit_index(self, index, reindex=False, ind_kwargs=None):
diff --git a/libs/CodernityDB/database_safe_shared.py b/libs/CodernityDB/database_safe_shared.py
index e31100f..72290e8 100644
--- a/libs/CodernityDB/database_safe_shared.py
+++ b/libs/CodernityDB/database_safe_shared.py
@@ -59,8 +59,7 @@ class SafeDatabase(Database):
def __init__(self, path, *args, **kwargs):
super(SafeDatabase, self).__init__(path, *args, **kwargs)
- self.indexes_locks = defaultdict(
- lambda: cdb_environment['rlock_obj']())
+ self.indexes_locks = defaultdict(cdb_environment['rlock_obj'])
self.close_open_lock = cdb_environment['rlock_obj']()
self.main_lock = cdb_environment['rlock_obj']()
self.id_revs = {}
@@ -94,7 +93,6 @@ class SafeDatabase(Database):
def initialize(self, *args, **kwargs):
with self.close_open_lock:
- self.close_open_lock.acquire()
res = super(SafeDatabase, self).initialize(*args, **kwargs)
for name in self.indexes_names.iterkeys():
self.indexes_locks[name] = cdb_environment['rlock_obj']()
diff --git a/libs/CodernityDB/debug_stuff.py b/libs/CodernityDB/debug_stuff.py
index 76cdedf..2dce695 100644
--- a/libs/CodernityDB/debug_stuff.py
+++ b/libs/CodernityDB/debug_stuff.py
@@ -92,7 +92,7 @@ class DebugTreeBasedIndex(TreeBasedIndex):
+ nr_of_elements * (self.key_size + self.pointer_size))
node = struct.unpack('<' + self.node_heading_format + self.pointer_format
+ nr_of_elements * (
- self.key_format + self.pointer_format),
+ self.key_format + self.pointer_format),
data)
print node
print
diff --git a/libs/CodernityDB/sharded_hash.py b/libs/CodernityDB/sharded_hash.py
index 08a8c2f..3cf76ac 100644
--- a/libs/CodernityDB/sharded_hash.py
+++ b/libs/CodernityDB/sharded_hash.py
@@ -40,7 +40,7 @@ from CodernityDB.sharded_index import ShardedIndex
self.patchers.append(self.wrap_insert_id_index)
@staticmethod
- def wrap_insert_id_index(db_obj, clean=False):
+ def wrap_insert_id_index(db_obj, ind_obj, clean=False):
def _insert_id_index(_rev, data):
"""
Performs insert on **id** index.
diff --git a/libs/CodernityDB/tree_index.py b/libs/CodernityDB/tree_index.py
index b79805d..4257b44 100644
--- a/libs/CodernityDB/tree_index.py
+++ b/libs/CodernityDB/tree_index.py
@@ -1565,13 +1565,13 @@ class IU_TreeBasedIndex(Index):
def update(self, doc_id, key, u_start=0, u_size=0, u_status='o'):
containing_leaf_start, element_index, old_doc_id, old_key, old_start, old_size, old_status = self._find_key_to_update(key, doc_id)
+ if u_start:
+ old_start = u_start
+ if u_size:
+ old_size = u_size
+ if u_status:
+ old_status = u_status
new_data = (old_doc_id, old_start, old_size, old_status)
- if not u_start:
- new_data[1] = u_start
- if not u_size:
- new_data[2] = u_size
- if not u_status:
- new_data[3] = u_status
self._update_element(containing_leaf_start, element_index, new_data)
self._find_key.delete(key)
diff --git a/libs/minify/__init__.py b/libs/minify/__init__.py
deleted file mode 100644
index e69de29..0000000
diff --git a/libs/minify/cssmin.py b/libs/minify/cssmin.py
deleted file mode 100644
index 09beb19..0000000
--- a/libs/minify/cssmin.py
+++ /dev/null
@@ -1,202 +0,0 @@
-#!/usr/bin/env python
-# -*- coding: utf-8 -*-
-
-# `cssmin.py` - A Python port of the YUI CSS compressor.
-
-
-from StringIO import StringIO # The pure-Python StringIO supports unicode.
-import re
-
-
-__version__ = '0.1.1'
-
-
-def remove_comments(css):
- """Remove all CSS comment blocks."""
-
- iemac = False
- preserve = False
- comment_start = css.find("/*")
- while comment_start >= 0:
- # Preserve comments that look like `/*!...*/`.
- # Slicing is used to make sure we don"t get an IndexError.
- preserve = css[comment_start + 2:comment_start + 3] == "!"
-
- comment_end = css.find("*/", comment_start + 2)
- if comment_end < 0:
- if not preserve:
- css = css[:comment_start]
- break
- elif comment_end >= (comment_start + 2):
- if css[comment_end - 1] == "\\":
- # This is an IE Mac-specific comment; leave this one and the
- # following one alone.
- comment_start = comment_end + 2
- iemac = True
- elif iemac:
- comment_start = comment_end + 2
- iemac = False
- elif not preserve:
- css = css[:comment_start] + css[comment_end + 2:]
- else:
- comment_start = comment_end + 2
- comment_start = css.find("/*", comment_start)
-
- return css
-
-
-def remove_unnecessary_whitespace(css):
- """Remove unnecessary whitespace characters."""
-
- def pseudoclasscolon(css):
-
- """
- Prevents 'p :link' from becoming 'p:link'.
-
- Translates 'p :link' into 'p ___PSEUDOCLASSCOLON___link'; this is
- translated back again later.
- """
-
- regex = re.compile(r"(^|\})(([^\{\:])+\:)+([^\{]*\{)")
- match = regex.search(css)
- while match:
- css = ''.join([
- css[:match.start()],
- match.group().replace(":", "___PSEUDOCLASSCOLON___"),
- css[match.end():]])
- match = regex.search(css)
- return css
-
- css = pseudoclasscolon(css)
- # Remove spaces from before things.
- css = re.sub(r"\s+([!{};:>+\(\)\],])", r"\1", css)
-
- # If there is a `@charset`, then only allow one, and move to the beginning.
- css = re.sub(r"^(.*)(@charset \"[^\"]*\";)", r"\2\1", css)
- css = re.sub(r"^(\s*@charset [^;]+;\s*)+", r"\1", css)
-
- # Put the space back in for a few cases, such as `@media screen` and
- # `(-webkit-min-device-pixel-ratio:0)`.
- css = re.sub(r"\band\(", "and (", css)
-
- # Put the colons back.
- css = css.replace('___PSEUDOCLASSCOLON___', ':')
-
- # Remove spaces from after things.
- css = re.sub(r"([!{}:;>+\(\[,])\s+", r"\1", css)
-
- return css
-
-
-def remove_unnecessary_semicolons(css):
- """Remove unnecessary semicolons."""
-
- return re.sub(r";+\}", "}", css)
-
-
-def remove_empty_rules(css):
- """Remove empty rules."""
-
- return re.sub(r"[^\}\{]+\{\}", "", css)
-
-
-def normalize_rgb_colors_to_hex(css):
- """Convert `rgb(51,102,153)` to `#336699`."""
-
- regex = re.compile(r"rgb\s*\(\s*([0-9,\s]+)\s*\)")
- match = regex.search(css)
- while match:
- colors = match.group(1).split(",")
- hexcolor = '#%.2x%.2x%.2x' % tuple(map(int, colors))
- css = css.replace(match.group(), hexcolor)
- match = regex.search(css)
- return css
-
-
-def condense_zero_units(css):
- """Replace `0(px, em, %, etc)` with `0`."""
-
- return re.sub(r"([\s:])(0)(px|em|%|in|cm|mm|pc|pt|ex)", r"\1\2", css)
-
-
-def condense_multidimensional_zeros(css):
- """Replace `:0 0 0 0;`, `:0 0 0;` etc. with `:0;`."""
-
- css = css.replace(":0 0 0 0;", ":0;")
- css = css.replace(":0 0 0;", ":0;")
- css = css.replace(":0 0;", ":0;")
-
- # Revert `background-position:0;` to the valid `background-position:0 0;`.
- css = css.replace("background-position:0;", "background-position:0 0;")
-
- return css
-
-
-def condense_floating_points(css):
- """Replace `0.6` with `.6` where possible."""
-
- return re.sub(r"(:|\s)0+\.(\d+)", r"\1.\2", css)
-
-
-def condense_hex_colors(css):
- """Shorten colors from #AABBCC to #ABC where possible."""
-
- regex = re.compile(r"([^\"'=\s])(\s*)#([0-9a-fA-F])([0-9a-fA-F])([0-9a-fA-F])([0-9a-fA-F])([0-9a-fA-F])([0-9a-fA-F])")
- match = regex.search(css)
- while match:
- first = match.group(3) + match.group(5) + match.group(7)
- second = match.group(4) + match.group(6) + match.group(8)
- if first.lower() == second.lower():
- css = css.replace(match.group(), match.group(1) + match.group(2) + '#' + first)
- match = regex.search(css, match.end() - 3)
- else:
- match = regex.search(css, match.end())
- return css
-
-
-def condense_whitespace(css):
- """Condense multiple adjacent whitespace characters into one."""
-
- return re.sub(r"\s+", " ", css)
-
-
-def condense_semicolons(css):
- """Condense multiple adjacent semicolon characters into one."""
-
- return re.sub(r";;+", ";", css)
-
-
-def wrap_css_lines(css, line_length):
- """Wrap the lines of the given CSS to an approximate length."""
-
- lines = []
- line_start = 0
- for i, char in enumerate(css):
- # It's safe to break after `}` characters.
- if char == '}' and (i - line_start >= line_length):
- lines.append(css[line_start:i + 1])
- line_start = i + 1
-
- if line_start < len(css):
- lines.append(css[line_start:])
- return '\n'.join(lines)
-
-
-def cssmin(css, wrap = None):
- css = remove_comments(css)
- css = condense_whitespace(css)
- # A pseudo class for the Box Model Hack
- # (see http://tantek.com/CSS/Examples/boxmodelhack.html)
- css = css.replace('"\\"}\\""', "___PSEUDOCLASSBMH___")
- #css = remove_unnecessary_whitespace(css)
- css = remove_unnecessary_semicolons(css)
- css = condense_zero_units(css)
- css = condense_multidimensional_zeros(css)
- css = condense_floating_points(css)
- css = normalize_rgb_colors_to_hex(css)
- css = condense_hex_colors(css)
- if wrap is not None:
- css = wrap_css_lines(css, wrap)
- css = css.replace("___PSEUDOCLASSBMH___", '"\\"}\\""')
- css = condense_semicolons(css)
- return css.strip()
diff --git a/libs/minify/jsmin.py b/libs/minify/jsmin.py
deleted file mode 100644
index a1b81f9..0000000
--- a/libs/minify/jsmin.py
+++ /dev/null
@@ -1,218 +0,0 @@
-#!/usr/bin/python
-
-# This code is original from jsmin by Douglas Crockford, it was translated to
-# Python by Baruch Even. The original code had the following copyright and
-# license.
-#
-# /* jsmin.c
-# 2007-05-22
-#
-# Copyright (c) 2002 Douglas Crockford (www.crockford.com)
-#
-# Permission is hereby granted, free of charge, to any person obtaining a copy of
-# this software and associated documentation files (the "Software"), to deal in
-# the Software without restriction, including without limitation the rights to
-# use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
-# of the Software, and to permit persons to whom the Software is furnished to do
-# so, subject to the following conditions:
-#
-# The above copyright notice and this permission notice shall be included in all
-# copies or substantial portions of the Software.
-#
-# The Software shall be used for Good, not Evil.
-#
-# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
-# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
-# SOFTWARE.
-# */
-
-from StringIO import StringIO
-
-def jsmin(js):
- ins = StringIO(js)
- outs = StringIO()
- JavascriptMinify().minify(ins, outs)
- str = outs.getvalue()
- if len(str) > 0 and str[0] == '\n':
- str = str[1:]
- return str
-
-def isAlphanum(c):
- """return true if the character is a letter, digit, underscore,
- dollar sign, or non-ASCII character.
- """
- return ((c >= 'a' and c <= 'z') or (c >= '0' and c <= '9') or
- (c >= 'A' and c <= 'Z') or c == '_' or c == '$' or c == '\\' or (c is not None and ord(c) > 126));
-
-class UnterminatedComment(Exception):
- pass
-
-class UnterminatedStringLiteral(Exception):
- pass
-
-class UnterminatedRegularExpression(Exception):
- pass
-
-class JavascriptMinify(object):
-
- def _outA(self):
- self.outstream.write(self.theA)
- def _outB(self):
- self.outstream.write(self.theB)
-
- def _get(self):
- """return the next character from stdin. Watch out for lookahead. If
- the character is a control character, translate it to a space or
- linefeed.
- """
- c = self.theLookahead
- self.theLookahead = None
- if c == None:
- c = self.instream.read(1)
- if c >= ' ' or c == '\n':
- return c
- if c == '': # EOF
- return '\000'
- if c == '\r':
- return '\n'
- return ' '
-
- def _peek(self):
- self.theLookahead = self._get()
- return self.theLookahead
-
- def _next(self):
- """get the next character, excluding comments. peek() is used to see
- if a '/' is followed by a '/' or '*'.
- """
- c = self._get()
- if c == '/':
- p = self._peek()
- if p == '/':
- c = self._get()
- while c > '\n':
- c = self._get()
- return c
- if p == '*':
- c = self._get()
- while 1:
- c = self._get()
- if c == '*':
- if self._peek() == '/':
- self._get()
- return ' '
- if c == '\000':
- raise UnterminatedComment()
-
- return c
-
- def _action(self, action):
- """do something! What you do is determined by the argument:
- 1 Output A. Copy B to A. Get the next B.
- 2 Copy B to A. Get the next B. (Delete A).
- 3 Get the next B. (Delete B).
- action treats a string as a single character. Wow!
- action recognizes a regular expression if it is preceded by ( or , or =.
- """
- if action <= 1:
- self._outA()
-
- if action <= 2:
- self.theA = self.theB
- if self.theA == "'" or self.theA == '"':
- while 1:
- self._outA()
- self.theA = self._get()
- if self.theA == self.theB:
- break
- if self.theA <= '\n':
- raise UnterminatedStringLiteral()
- if self.theA == '\\':
- self._outA()
- self.theA = self._get()
-
-
- if action <= 3:
- self.theB = self._next()
- if self.theB == '/' and (self.theA == '(' or self.theA == ',' or
- self.theA == '=' or self.theA == ':' or
- self.theA == '[' or self.theA == '?' or
- self.theA == '!' or self.theA == '&' or
- self.theA == '|' or self.theA == ';' or
- self.theA == '{' or self.theA == '}' or
- self.theA == '\n'):
- self._outA()
- self._outB()
- while 1:
- self.theA = self._get()
- if self.theA == '/':
- break
- elif self.theA == '\\':
- self._outA()
- self.theA = self._get()
- elif self.theA <= '\n':
- raise UnterminatedRegularExpression()
- self._outA()
- self.theB = self._next()
-
-
- def _jsmin(self):
- """Copy the input to the output, deleting the characters which are
- insignificant to JavaScript. Comments will be removed. Tabs will be
- replaced with spaces. Carriage returns will be replaced with linefeeds.
- Most spaces and linefeeds will be removed.
- """
- self.theA = '\n'
- self._action(3)
-
- while self.theA != '\000':
- if self.theA == ' ':
- if isAlphanum(self.theB):
- self._action(1)
- else:
- self._action(2)
- elif self.theA == '\n':
- if self.theB in ['{', '[', '(', '+', '-']:
- self._action(1)
- elif self.theB == ' ':
- self._action(3)
- else:
- if isAlphanum(self.theB):
- self._action(1)
- else:
- self._action(2)
- else:
- if self.theB == ' ':
- if isAlphanum(self.theA):
- self._action(1)
- else:
- self._action(3)
- elif self.theB == '\n':
- if self.theA in ['}', ']', ')', '+', '-', '"', '\'']:
- self._action(1)
- else:
- if isAlphanum(self.theA):
- self._action(1)
- else:
- self._action(3)
- else:
- self._action(1)
-
- def minify(self, instream, outstream):
- self.instream = instream
- self.outstream = outstream
- self.theA = '\n'
- self.theB = None
- self.theLookahead = None
-
- self._jsmin()
- self.instream.close()
-
-if __name__ == '__main__':
- import sys
- jsm = JavascriptMinify()
- jsm.minify(sys.stdin, sys.stdout)
diff --git a/libs/ndg/__init__.py b/libs/ndg/__init__.py
new file mode 100644
index 0000000..3b01e15
--- /dev/null
+++ b/libs/ndg/__init__.py
@@ -0,0 +1,19 @@
+"""ndg_httpsclient - PyOpenSSL utility to make a httplib-like interface suitable
+for use with urllib2
+
+This is a setuptools namespace_package. DO NOT place any other
+code in this file! There is no guarantee that it will be installed
+with easy_install. See:
+
+http://peak.telecommunity.com/DevCenter/setuptools#namespace-packages
+
+... for details.
+"""
+__author__ = "P J Kershaw"
+__date__ = "06/01/12"
+__copyright__ = "(C) 2012 Science and Technology Facilities Council"
+__license__ = "BSD - see LICENSE file in top-level directory"
+__contact__ = "Philip.Kershaw@stfc.ac.uk"
+__revision__ = '$Id$'
+
+__import__('pkg_resources').declare_namespace(__name__)
\ No newline at end of file
diff --git a/libs/ndg/httpsclient/__init__.py b/libs/ndg/httpsclient/__init__.py
new file mode 100644
index 0000000..83f2087
--- /dev/null
+++ b/libs/ndg/httpsclient/__init__.py
@@ -0,0 +1,9 @@
+"""ndg_httpsclient - PyOpenSSL utility to make a httplib-like interface suitable
+for use with urllib2
+"""
+__author__ = "P J Kershaw (STFC) and Richard Wilkinson (Tessella)"
+__date__ = "09/12/11"
+__copyright__ = "(C) 2011 Science and Technology Facilities Council"
+__license__ = "BSD - see LICENSE file in top-level directory"
+__contact__ = "Philip.Kershaw@stfc.ac.uk"
+__revision__ = '$Id$'
diff --git a/libs/ndg/httpsclient/https.py b/libs/ndg/httpsclient/https.py
new file mode 100644
index 0000000..41ad1c6
--- /dev/null
+++ b/libs/ndg/httpsclient/https.py
@@ -0,0 +1,131 @@
+"""ndg_httpsclient HTTPS module containing PyOpenSSL implementation of
+httplib.HTTPSConnection
+
+PyOpenSSL utility to make a httplib-like interface suitable for use with
+urllib2
+"""
+__author__ = "P J Kershaw (STFC)"
+__date__ = "09/12/11"
+__copyright__ = "(C) 2012 Science and Technology Facilities Council"
+__license__ = "BSD - see LICENSE file in top-level directory"
+__contact__ = "Philip.Kershaw@stfc.ac.uk"
+__revision__ = '$Id$'
+import logging
+import socket
+import sys
+
+if sys.version_info[0] > 2:
+ from http.client import HTTPS_PORT
+ from http.client import HTTPConnection
+
+ from urllib.request import AbstractHTTPHandler
+else:
+ from httplib import HTTPS_PORT
+ from httplib import HTTPConnection
+
+ from urllib2 import AbstractHTTPHandler
+
+
+from OpenSSL import SSL
+
+from ndg.httpsclient.ssl_socket import SSLSocket
+
+log = logging.getLogger(__name__)
+
+
+class HTTPSConnection(HTTPConnection):
+ """This class allows communication via SSL using PyOpenSSL.
+ It is based on httplib.HTTPSConnection, modified to use PyOpenSSL.
+
+ Note: This uses the constructor inherited from HTTPConnection to allow it to
+ be used with httplib and HTTPSContextHandler. To use the class directly with
+ an SSL context set ssl_context after construction.
+
+ @cvar default_port: default port for this class (443)
+ @type default_port: int
+ @cvar default_ssl_method: default SSL method used if no SSL context is
+ explicitly set - defaults to version 2/3.
+ @type default_ssl_method: int
+ """
+ default_port = HTTPS_PORT
+ default_ssl_method = SSL.SSLv23_METHOD
+
+ def __init__(self, host, port=None, strict=None,
+ timeout=socket._GLOBAL_DEFAULT_TIMEOUT, ssl_context=None):
+ HTTPConnection.__init__(self, host, port, strict, timeout)
+ if not hasattr(self, 'ssl_context'):
+ self.ssl_context = None
+
+ if ssl_context is not None:
+ if not isinstance(ssl_context, SSL.Context):
+ raise TypeError('Expecting OpenSSL.SSL.Context type for "'
+ 'ssl_context" keyword; got %r instead' %
+ ssl_context)
+
+ self.ssl_context = ssl_context
+
+ def connect(self):
+ """Create SSL socket and connect to peer
+ """
+ if getattr(self, 'ssl_context', None):
+ if not isinstance(self.ssl_context, SSL.Context):
+ raise TypeError('Expecting OpenSSL.SSL.Context type for "'
+ 'ssl_context" attribute; got %r instead' %
+ self.ssl_context)
+ ssl_context = self.ssl_context
+ else:
+ ssl_context = SSL.Context(self.__class__.default_ssl_method)
+
+ sock = socket.create_connection((self.host, self.port), self.timeout)
+
+ # Tunnel if using a proxy - ONLY available for Python 2.6.2 and above
+ if getattr(self, '_tunnel_host', None):
+ self.sock = sock
+ self._tunnel()
+
+ self.sock = SSLSocket(ssl_context, sock)
+
+ # Go to client mode.
+ self.sock.set_connect_state()
+
+ def close(self):
+ """Close socket and shut down SSL connection"""
+ if hasattr(self.sock, "close"):
+ self.sock.close()
+
+
+class HTTPSContextHandler(AbstractHTTPHandler):
+ '''HTTPS handler that allows a SSL context to be set for the SSL
+ connections.
+ '''
+ https_request = AbstractHTTPHandler.do_request_
+
+ def __init__(self, ssl_context, debuglevel=0):
+ """
+ @param ssl_context:SSL context
+ @type ssl_context: OpenSSL.SSL.Context
+ @param debuglevel: debug level for HTTPSHandler
+ @type debuglevel: int
+ """
+ AbstractHTTPHandler.__init__(self, debuglevel)
+
+ if ssl_context is not None:
+ if not isinstance(ssl_context, SSL.Context):
+ raise TypeError('Expecting OpenSSL.SSL.Context type for "'
+ 'ssl_context" keyword; got %r instead' %
+ ssl_context)
+ self.ssl_context = ssl_context
+ else:
+ self.ssl_context = SSL.Context(SSL.TLSv1_METHOD)
+
+ def https_open(self, req):
+ """Opens HTTPS request
+ @param req: HTTP request
+ @return: HTTP Response object
+ """
+ # Make a custom class extending HTTPSConnection, with the SSL context
+ # set as a class variable so that it is available to the connect method.
+ customHTTPSContextConnection = type('CustomHTTPSContextConnection',
+ (HTTPSConnection, object),
+ {'ssl_context': self.ssl_context})
+ return self.do_open(customHTTPSContextConnection, req)
diff --git a/libs/ndg/httpsclient/ssl_context_util.py b/libs/ndg/httpsclient/ssl_context_util.py
new file mode 100644
index 0000000..0ed1d32
--- /dev/null
+++ b/libs/ndg/httpsclient/ssl_context_util.py
@@ -0,0 +1,98 @@
+"""ndg_httpsclient SSL Context utilities module containing convenience routines
+for setting SSL context configuration.
+
+"""
+__author__ = "P J Kershaw (STFC)"
+__date__ = "09/12/11"
+__copyright__ = "(C) 2012 Science and Technology Facilities Council"
+__license__ = "BSD - see LICENSE file in top-level directory"
+__contact__ = "Philip.Kershaw@stfc.ac.uk"
+__revision__ = '$Id$'
+import sys
+
+if sys.version_info[0] > 2:
+ import urllib.parse as urlparse_
+else:
+ import urlparse as urlparse_
+
+from OpenSSL import SSL
+
+from ndg.httpsclient.ssl_peer_verification import ServerSSLCertVerification
+
+
+class SSlContextConfig(object):
+ """
+ Holds configuration options for creating a SSL context. This is used as a
+ template to create the contexts with specific verification callbacks.
+ """
+ def __init__(self, key_file=None, cert_file=None, pem_file=None, ca_dir=None,
+ verify_peer=False):
+ self.key_file = key_file
+ self.cert_file = cert_file
+ self.pem_file = pem_file
+ self.ca_dir = ca_dir
+ self.verify_peer = verify_peer
+
+
+def make_ssl_context_from_config(ssl_config=False, url=None):
+ return make_ssl_context(ssl_config.key_file, ssl_config.cert_file,
+ ssl_config.pem_file, ssl_config.ca_dir,
+ ssl_config.verify_peer, url)
+
+
+def make_ssl_context(key_file=None, cert_file=None, pem_file=None, ca_dir=None,
+ verify_peer=False, url=None, method=SSL.TLSv1_METHOD,
+ key_file_passphrase=None):
+ """
+ Creates SSL context containing certificate and key file locations.
+ """
+ ssl_context = SSL.Context(method)
+
+ # Key file defaults to certificate file if present.
+ if cert_file:
+ ssl_context.use_certificate_file(cert_file)
+
+ if key_file_passphrase:
+ passwd_cb = lambda max_passphrase_len, set_prompt, userdata: \
+ key_file_passphrase
+ ssl_context.set_passwd_cb(passwd_cb)
+
+ if key_file:
+ ssl_context.use_privatekey_file(key_file)
+ elif cert_file:
+ ssl_context.use_privatekey_file(cert_file)
+
+ if pem_file or ca_dir:
+ ssl_context.load_verify_locations(pem_file, ca_dir)
+
+ def _callback(conn, x509, errnum, errdepth, preverify_ok):
+ """Default certification verification callback.
+ Performs no checks and returns the status passed in.
+ """
+ return preverify_ok
+
+ verify_callback = _callback
+
+ if verify_peer:
+ ssl_context.set_verify_depth(9)
+ if url:
+ set_peer_verification_for_url_hostname(ssl_context, url)
+ else:
+ ssl_context.set_verify(SSL.VERIFY_PEER, verify_callback)
+ else:
+ ssl_context.set_verify(SSL.VERIFY_NONE, verify_callback)
+
+ return ssl_context
+
+
+def set_peer_verification_for_url_hostname(ssl_context, url,
+ if_verify_enabled=False):
+ '''Convenience routine to set peer verification callback based on
+ ServerSSLCertVerification class'''
+ if not if_verify_enabled or (ssl_context.get_verify_mode() & SSL.VERIFY_PEER):
+ urlObj = urlparse_.urlparse(url)
+ hostname = urlObj.hostname
+ server_ssl_cert_verif = ServerSSLCertVerification(hostname=hostname)
+ verify_callback_ = server_ssl_cert_verif.get_verify_server_cert_func()
+ ssl_context.set_verify(SSL.VERIFY_PEER, verify_callback_)
+
diff --git a/libs/ndg/httpsclient/ssl_peer_verification.py b/libs/ndg/httpsclient/ssl_peer_verification.py
new file mode 100644
index 0000000..5e82dae
--- /dev/null
+++ b/libs/ndg/httpsclient/ssl_peer_verification.py
@@ -0,0 +1,237 @@
+"""ndg_httpsclient - module containing SSL peer verification class.
+"""
+__author__ = "P J Kershaw (STFC)"
+__date__ = "09/12/11"
+__copyright__ = "(C) 2012 Science and Technology Facilities Council"
+__license__ = "BSD - see LICENSE file in top-level directory"
+__contact__ = "Philip.Kershaw@stfc.ac.uk"
+__revision__ = '$Id$'
+import re
+import logging
+log = logging.getLogger(__name__)
+
+try:
+ from ndg.httpsclient.subj_alt_name import SubjectAltName
+ from pyasn1.codec.der import decoder as der_decoder
+ SUBJ_ALT_NAME_SUPPORT = True
+
+except ImportError as e:
+ SUBJ_ALT_NAME_SUPPORT = False
+ SUBJ_ALT_NAME_SUPPORT_MSG = (
+ 'SubjectAltName support is disabled - check pyasn1 package '
+ 'installation to enable'
+ )
+ import warnings
+ warnings.warn(SUBJ_ALT_NAME_SUPPORT_MSG)
+
+
+class ServerSSLCertVerification(object):
+ """Check server identity. If hostname doesn't match, allow match of
+ host's Distinguished Name against server DN setting"""
+ DN_LUT = {
+ 'commonName': 'CN',
+ 'organisationalUnitName': 'OU',
+ 'organisation': 'O',
+ 'countryName': 'C',
+ 'emailAddress': 'EMAILADDRESS',
+ 'localityName': 'L',
+ 'stateOrProvinceName': 'ST',
+ 'streetAddress': 'STREET',
+ 'domainComponent': 'DC',
+ 'userid': 'UID'
+ }
+ SUBJ_ALT_NAME_EXT_NAME = 'subjectAltName'
+ PARSER_RE_STR = '/(%s)=' % '|'.join(list(DN_LUT.keys()) + list(DN_LUT.values()))
+ PARSER_RE = re.compile(PARSER_RE_STR)
+
+ __slots__ = ('__hostname', '__certDN', '__subj_alt_name_match')
+
+ def __init__(self, certDN=None, hostname=None, subj_alt_name_match=True):
+ """Override parent class __init__ to enable setting of certDN
+ setting
+
+ @type certDN: string
+ @param certDN: Set the expected Distinguished Name of the
+ server to avoid errors matching hostnames. This is useful
+ where the hostname is not fully qualified
+ @type hostname: string
+ @param hostname: hostname to match against peer certificate
+ subjectAltNames or subject common name
+ @type subj_alt_name_match: bool
+ @param subj_alt_name_match: flag to enable/disable matching of hostname
+ against peer certificate subjectAltNames. Nb. A setting of True will
+ be ignored if the pyasn1 package is not installed
+ """
+ self.__certDN = None
+ self.__hostname = None
+
+ if certDN is not None:
+ self.certDN = certDN
+
+ if hostname is not None:
+ self.hostname = hostname
+
+ if subj_alt_name_match:
+ if not SUBJ_ALT_NAME_SUPPORT:
+ log.warning('Overriding "subj_alt_name_match" keyword setting: '
+ 'peer verification with subjectAltNames is disabled')
+ self.__subj_alt_name_match = False
+ else:
+ self.__subj_alt_name_match = True
+ else:
+ log.debug('Disabling peer verification with subject '
+ 'subjectAltNames!')
+ self.__subj_alt_name_match = False
+
+ def __call__(self, connection, peerCert, errorStatus, errorDepth,
+ preverifyOK):
+ """Verify server certificate
+
+ @type connection: OpenSSL.SSL.Connection
+ @param connection: SSL connection object
+ @type peerCert: basestring
+ @param peerCert: server host certificate as OpenSSL.crypto.X509
+ instance
+ @type errorStatus: int
+ @param errorStatus: error status passed from caller. This is the value
+ returned by the OpenSSL C function X509_STORE_CTX_get_error(). Look-up
+ x509_vfy.h in the OpenSSL source to get the meanings of the different
+ codes. PyOpenSSL doesn't help you!
+ @type errorDepth: int
+ @param errorDepth: a non-negative integer representing where in the
+ certificate chain the error occurred. If it is zero it occured in the
+ end entity certificate, one if it is the certificate which signed the
+ end entity certificate and so on.
+
+ @type preverifyOK: int
+ @param preverifyOK: the error status - 0 = Error, 1 = OK of the current
+ SSL context irrespective of any verification checks done here. If this
+ function yields an OK status, it should enforce the preverifyOK value
+ so that any error set upstream overrides and is honoured.
+ @rtype: int
+ @return: status code - 0/False = Error, 1/True = OK
+ """
+ if peerCert.has_expired():
+ # Any expired certificate in the chain should result in an error
+ log.error('Certificate %r in peer certificate chain has expired',
+ peerCert.get_subject())
+
+ return False
+
+ elif errorDepth == 0:
+ # Only interested in DN of last certificate in the chain - this must
+ # match the expected Server DN setting
+ peerCertSubj = peerCert.get_subject()
+ peerCertDN = peerCertSubj.get_components()
+ peerCertDN.sort()
+
+ if self.certDN is None:
+ # Check hostname against peer certificate CN field instead:
+ if self.hostname is None:
+ log.error('No "hostname" or "certDN" set to check peer '
+ 'certificate against')
+ return False
+
+ # Check for subject alternative names
+ if self.__subj_alt_name_match:
+ dns_names = self._get_subj_alt_name(peerCert)
+ if self.hostname in dns_names:
+ return preverifyOK
+
+ # If no subjectAltNames, default to check of subject Common Name
+ if peerCertSubj.commonName == self.hostname:
+ return preverifyOK
+ else:
+ log.error('Peer certificate CN %r doesn\'t match the '
+ 'expected CN %r', peerCertSubj.commonName,
+ self.hostname)
+ return False
+ else:
+ if peerCertDN == self.certDN:
+ return preverifyOK
+ else:
+ log.error('Peer certificate DN %r doesn\'t match the '
+ 'expected DN %r', peerCertDN, self.certDN)
+ return False
+ else:
+ return preverifyOK
+
+ def get_verify_server_cert_func(self):
+ def verify_server_cert(connection, peerCert, errorStatus, errorDepth,
+ preverifyOK):
+ return self.__call__(connection, peerCert, errorStatus,
+ errorDepth, preverifyOK)
+
+ return verify_server_cert
+
+ @classmethod
+ def _get_subj_alt_name(cls, peer_cert):
+ '''Extract subjectAltName DNS name settings from certificate extensions
+
+ @param peer_cert: peer certificate in SSL connection. subjectAltName
+ settings if any will be extracted from this
+ @type peer_cert: OpenSSL.crypto.X509
+ '''
+ # Search through extensions
+ dns_name = []
+ general_names = SubjectAltName()
+ for i in range(peer_cert.get_extension_count()):
+ ext = peer_cert.get_extension(i)
+ ext_name = ext.get_short_name()
+ if ext_name == cls.SUBJ_ALT_NAME_EXT_NAME:
+ # PyOpenSSL returns extension data in ASN.1 encoded form
+ ext_dat = ext.get_data()
+ decoded_dat = der_decoder.decode(ext_dat,
+ asn1Spec=general_names)
+
+ for name in decoded_dat:
+ if isinstance(name, SubjectAltName):
+ for entry in range(len(name)):
+ component = name.getComponentByPosition(entry)
+ dns_name.append(str(component.getComponent()))
+
+ return dns_name
+
+ def _getCertDN(self):
+ return self.__certDN
+
+ def _setCertDN(self, val):
+ if isinstance(val, str):
+ # Allow for quoted DN
+ certDN = val.strip('"')
+
+ dnFields = self.__class__.PARSER_RE.split(certDN)
+ if len(dnFields) < 2:
+ raise TypeError('Error parsing DN string: "%s"' % certDN)
+
+ self.__certDN = list(zip(dnFields[1::2], dnFields[2::2]))
+ self.__certDN.sort()
+
+ elif not isinstance(val, list):
+ for i in val:
+ if not len(i) == 2:
+ raise TypeError('Expecting list of two element DN field, '
+ 'DN field value pairs for "certDN" '
+ 'attribute')
+ self.__certDN = val
+ else:
+ raise TypeError('Expecting list or string type for "certDN" '
+ 'attribute')
+
+ certDN = property(fget=_getCertDN,
+ fset=_setCertDN,
+ doc="Distinguished Name for Server Certificate")
+
+ # Get/Set Property methods
+ def _getHostname(self):
+ return self.__hostname
+
+ def _setHostname(self, val):
+ if not isinstance(val, str):
+ raise TypeError("Expecting string type for hostname "
+ "attribute")
+ self.__hostname = val
+
+ hostname = property(fget=_getHostname,
+ fset=_setHostname,
+ doc="hostname of server")
diff --git a/libs/ndg/httpsclient/ssl_socket.py b/libs/ndg/httpsclient/ssl_socket.py
new file mode 100644
index 0000000..7780314
--- /dev/null
+++ b/libs/ndg/httpsclient/ssl_socket.py
@@ -0,0 +1,282 @@
+"""PyOpenSSL utilities including HTTPSSocket class which wraps PyOpenSSL
+SSL connection into a httplib-like interface suitable for use with urllib2
+
+"""
+__author__ = "P J Kershaw"
+__date__ = "21/12/10"
+__copyright__ = "(C) 2012 Science and Technology Facilities Council"
+__license__ = "BSD - see LICENSE file in top-level directory"
+__contact__ = "Philip.Kershaw@stfc.ac.uk"
+__revision__ = '$Id$'
+
+from datetime import datetime
+import logging
+import socket
+from io import BytesIO
+
+from OpenSSL import SSL
+
+log = logging.getLogger(__name__)
+
+
+class SSLSocket(object):
+ """SSL Socket class wraps pyOpenSSL's SSL.Connection class implementing
+ the makefile method so that it is compatible with the standard socket
+ interface and usable with httplib.
+
+ @cvar default_buf_size: default buffer size for recv operations in the
+ makefile method
+ @type default_buf_size: int
+ """
+ default_buf_size = 8192
+
+ def __init__(self, ctx, sock=None):
+ """Create SSL socket object
+
+ @param ctx: SSL context
+ @type ctx: OpenSSL.SSL.Context
+ @param sock: underlying socket object
+ @type sock: socket.socket
+ """
+ if sock is not None:
+ self.socket = sock
+ else:
+ self.socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
+ self.socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
+
+ self.__ssl_conn = SSL.Connection(ctx, self.socket)
+ self.buf_size = self.__class__.default_buf_size
+ self._makefile_refs = 0
+
+ def __del__(self):
+ """Close underlying socket when this object goes out of scope
+ """
+ self.close()
+
+ @property
+ def buf_size(self):
+ """Buffer size for makefile method recv() operations"""
+ return self.__buf_size
+
+ @buf_size.setter
+ def buf_size(self, value):
+ """Buffer size for makefile method recv() operations"""
+ if not isinstance(value, int):
+ raise TypeError('Expecting int type for "buf_size"; '
+ 'got %r instead' % type(value))
+ self.__buf_size = value
+
+ def close(self):
+ """Shutdown the SSL connection and call the close method of the
+ underlying socket"""
+ if self._makefile_refs < 1:
+ try:
+ self.__ssl_conn.shutdown()
+ except (SSL.Error, SSL.SysCallError):
+ # Make errors on shutdown non-fatal
+ pass
+ else:
+ self._makefile_refs -= 1
+
+ def set_shutdown(self, mode):
+ """Set the shutdown state of the Connection.
+ @param mode: bit vector of either or both of SENT_SHUTDOWN and
+ RECEIVED_SHUTDOWN
+ """
+ self.__ssl_conn.set_shutdown(mode)
+
+ def get_shutdown(self):
+ """Get the shutdown state of the Connection.
+ @return: bit vector of either or both of SENT_SHUTDOWN and
+ RECEIVED_SHUTDOWN
+ """
+ return self.__ssl_conn.get_shutdown()
+
+ def bind(self, addr):
+ """bind to the given address - calls method of the underlying socket
+ @param addr: address/port number tuple
+ @type addr: tuple"""
+ self.__ssl_conn.bind(addr)
+
+ def listen(self, backlog):
+ """Listen for connections made to the socket.
+
+ @param backlog: specifies the maximum number of queued connections and
+ should be at least 1; the maximum value is system-dependent (usually 5).
+ @param backlog: int
+ """
+ self.__ssl_conn.listen(backlog)
+
+ def set_accept_state(self):
+ """Set the connection to work in server mode. The handshake will be
+ handled automatically by read/write"""
+ self.__ssl_conn.set_accept_state()
+
+ def accept(self):
+ """Accept an SSL connection.
+
+ @return: pair (ssl, addr) where ssl is a new SSL connection object and
+ addr is the address bound to the other end of the SSL connection.
+ @rtype: tuple
+ """
+ return self.__ssl_conn.accept()
+
+ def set_connect_state(self):
+ """Set the connection to work in client mode. The handshake will be
+ handled automatically by read/write"""
+ self.__ssl_conn.set_connect_state()
+
+ def connect(self, addr):
+ """Call the connect method of the underlying socket and set up SSL on
+ the socket, using the Context object supplied to this Connection object
+ at creation.
+
+ @param addr: address/port number pair
+ @type addr: tuple
+ """
+ self.__ssl_conn.connect(addr)
+
+ def shutdown(self, how):
+ """Send the shutdown message to the Connection.
+
+ @param how: for socket.socket this flag determines whether read, write
+ or both type operations are supported. OpenSSL.SSL.Connection doesn't
+ support this so this parameter is IGNORED
+ @return: true if the shutdown message exchange is completed and false
+ otherwise (in which case you call recv() or send() when the connection
+ becomes readable/writeable.
+ @rtype: bool
+ """
+ return self.__ssl_conn.shutdown()
+
+ def renegotiate(self):
+ """Renegotiate this connection's SSL parameters."""
+ return self.__ssl_conn.renegotiate()
+
+ def pending(self):
+ """@return: numbers of bytes that can be safely read from the SSL
+ buffer.
+ @rtype: int
+ """
+ return self.__ssl_conn.pending()
+
+ def send(self, data, *flags_arg):
+ """Send data to the socket. Nb. The optional flags argument is ignored.
+ - retained for compatibility with socket.socket interface
+
+ @param data: data to send down the socket
+ @type data: string
+ """
+ return self.__ssl_conn.send(data)
+
+ def sendall(self, data):
+ self.__ssl_conn.sendall(data)
+
+ def recv(self, size=default_buf_size):
+ """Receive data from the Connection.
+
+ @param size: The maximum amount of data to be received at once
+ @type size: int
+ @return: data received.
+ @rtype: string
+ """
+ return self.__ssl_conn.recv(size)
+
+ def setblocking(self, mode):
+ """Set this connection's underlying socket blocking _mode_.
+
+ @param mode: blocking mode
+ @type mode: int
+ """
+ self.__ssl_conn.setblocking(mode)
+
+ def fileno(self):
+ """
+ @return: file descriptor number for the underlying socket
+ @rtype: int
+ """
+ return self.__ssl_conn.fileno()
+
+ def getsockopt(self, *args):
+ """See socket.socket.getsockopt
+ """
+ return self.__ssl_conn.getsockopt(*args)
+
+ def setsockopt(self, *args):
+ """See socket.socket.setsockopt
+
+ @return: value of the given socket option
+ @rtype: int/string
+ """
+ return self.__ssl_conn.setsockopt(*args)
+
+ def state_string(self):
+ """Return the SSL state of this connection."""
+ return self.__ssl_conn.state_string()
+
+ def makefile(self, *args):
+ """Specific to Python socket API and required by httplib: convert
+ response into a file-like object. This implementation reads using recv
+ and copies the output into a StringIO buffer to simulate a file object
+ for consumption by httplib
+
+ Nb. Ignoring optional file open mode (StringIO is generic and will
+ open for read and write unless a string is passed to the constructor)
+ and buffer size - httplib set a zero buffer size which results in recv
+ reading nothing
+
+ @return: file object for data returned from socket
+ @rtype: cStringIO.StringO
+ """
+ self._makefile_refs += 1
+
+ # Optimisation
+ _buf_size = self.buf_size
+
+ i=0
+ stream = BytesIO()
+ startTime = datetime.utcnow()
+ try:
+ dat = self.__ssl_conn.recv(_buf_size)
+ while dat:
+ i+=1
+ stream.write(dat)
+ dat = self.__ssl_conn.recv(_buf_size)
+
+ except (SSL.ZeroReturnError, SSL.SysCallError):
+ # Connection is closed - assuming here that all is well and full
+ # response has been received. httplib will catch an error in
+ # incomplete content since it checks the content-length header
+ # against the actual length of data received
+ pass
+
+ if log.getEffectiveLevel() <= logging.DEBUG:
+ log.debug("Socket.makefile %d recv calls completed in %s", i,
+ datetime.utcnow() - startTime)
+
+ # Make sure to rewind the buffer otherwise consumers of the content will
+ # read from the end of the buffer
+ stream.seek(0)
+
+ return stream
+
+ def getsockname(self):
+ """
+ @return: the socket's own address
+ @rtype:
+ """
+ return self.__ssl_conn.getsockname()
+
+ def getpeername(self):
+ """
+ @return: remote address to which the socket is connected
+ """
+ return self.__ssl_conn.getpeername()
+
+ def get_context(self):
+ '''Retrieve the Context object associated with this Connection. '''
+ return self.__ssl_conn.get_context()
+
+ def get_peer_certificate(self):
+ '''Retrieve the other side's certificate (if any) '''
+ return self.__ssl_conn.get_peer_certificate()
diff --git a/libs/ndg/httpsclient/subj_alt_name.py b/libs/ndg/httpsclient/subj_alt_name.py
new file mode 100644
index 0000000..b2c1918
--- /dev/null
+++ b/libs/ndg/httpsclient/subj_alt_name.py
@@ -0,0 +1,153 @@
+"""NDG HTTPS Client package
+
+Use pyasn1 to provide support for parsing ASN.1 formatted subjectAltName
+content for SSL peer verification. Code based on:
+
+http://stackoverflow.com/questions/5519958/how-do-i-parse-subjectaltname-extension-data-using-pyasn1
+"""
+__author__ = "P J Kershaw"
+__date__ = "01/02/12"
+__copyright__ = "(C) 2012 Science and Technology Facilities Council"
+__license__ = "BSD - see LICENSE file in top-level directory"
+__contact__ = "Philip.Kershaw@stfc.ac.uk"
+__revision__ = '$Id$'
+try:
+ from pyasn1.type import univ, constraint, char, namedtype, tag
+
+except ImportError as e:
+ import_error_msg = ('Error importing pyasn1, subjectAltName check for SSL '
+ 'peer verification will be disabled. Import error '
+ 'is: %s' % e)
+ import warnings
+ warnings.warn(import_error_msg)
+ class Pyasn1ImportError(ImportError):
+ "Raise for pyasn1 import error"
+ raise Pyasn1ImportError(import_error_msg)
+
+
+MAX = 64
+
+
+class DirectoryString(univ.Choice):
+ """ASN.1 Directory string class"""
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType(
+ 'teletexString', char.TeletexString().subtype(
+ subtypeSpec=constraint.ValueSizeConstraint(1, MAX))),
+ namedtype.NamedType(
+ 'printableString', char.PrintableString().subtype(
+ subtypeSpec=constraint.ValueSizeConstraint(1, MAX))),
+ namedtype.NamedType(
+ 'universalString', char.UniversalString().subtype(
+ subtypeSpec=constraint.ValueSizeConstraint(1, MAX))),
+ namedtype.NamedType(
+ 'utf8String', char.UTF8String().subtype(
+ subtypeSpec=constraint.ValueSizeConstraint(1, MAX))),
+ namedtype.NamedType(
+ 'bmpString', char.BMPString().subtype(
+ subtypeSpec=constraint.ValueSizeConstraint(1, MAX))),
+ namedtype.NamedType(
+ 'ia5String', char.IA5String().subtype(
+ subtypeSpec=constraint.ValueSizeConstraint(1, MAX))),
+ )
+
+
+class AttributeValue(DirectoryString):
+ """ASN.1 Attribute value"""
+
+
+class AttributeType(univ.ObjectIdentifier):
+ """ASN.1 Attribute type"""
+
+
+class AttributeTypeAndValue(univ.Sequence):
+ """ASN.1 Attribute type and value class"""
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('type', AttributeType()),
+ namedtype.NamedType('value', AttributeValue()),
+ )
+
+
+class RelativeDistinguishedName(univ.SetOf):
+ '''ASN.1 Realtive distinguished name'''
+ componentType = AttributeTypeAndValue()
+
+class RDNSequence(univ.SequenceOf):
+ '''ASN.1 RDN sequence class'''
+ componentType = RelativeDistinguishedName()
+
+
+class Name(univ.Choice):
+ '''ASN.1 name class'''
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('', RDNSequence()),
+ )
+
+
+class Extension(univ.Sequence):
+ '''ASN.1 extension class'''
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('extnID', univ.ObjectIdentifier()),
+ namedtype.DefaultedNamedType('critical', univ.Boolean('False')),
+ namedtype.NamedType('extnValue', univ.OctetString()),
+ )
+
+
+class Extensions(univ.SequenceOf):
+ '''ASN.1 extensions class'''
+ componentType = Extension()
+ sizeSpec = univ.SequenceOf.sizeSpec + constraint.ValueSizeConstraint(1, MAX)
+
+
+class AnotherName(univ.Sequence):
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('type-id', univ.ObjectIdentifier()),
+ namedtype.NamedType('value', univ.Any().subtype(
+ explicitTag=tag.Tag(tag.tagClassContext,
+ tag.tagFormatSimple, 0)))
+ )
+
+
+class GeneralName(univ.Choice):
+ '''ASN.1 configuration for X.509 certificate subjectAltNames fields'''
+ componentType = namedtype.NamedTypes(
+ namedtype.NamedType('otherName', AnotherName().subtype(
+ implicitTag=tag.Tag(tag.tagClassContext,
+ tag.tagFormatSimple, 0))),
+ namedtype.NamedType('rfc822Name', char.IA5String().subtype(
+ implicitTag=tag.Tag(tag.tagClassContext,
+ tag.tagFormatSimple, 1))),
+ namedtype.NamedType('dNSName', char.IA5String().subtype(
+ implicitTag=tag.Tag(tag.tagClassContext,
+ tag.tagFormatSimple, 2))),
+# namedtype.NamedType('x400Address', ORAddress().subtype(
+# implicitTag=tag.Tag(tag.tagClassContext,
+# tag.tagFormatSimple, 3))),
+ namedtype.NamedType('directoryName', Name().subtype(
+ implicitTag=tag.Tag(tag.tagClassContext,
+ tag.tagFormatSimple, 4))),
+# namedtype.NamedType('ediPartyName', EDIPartyName().subtype(
+# implicitTag=tag.Tag(tag.tagClassContext,
+# tag.tagFormatSimple, 5))),
+ namedtype.NamedType('uniformResourceIdentifier', char.IA5String().subtype(
+ implicitTag=tag.Tag(tag.tagClassContext,
+ tag.tagFormatSimple, 6))),
+ namedtype.NamedType('iPAddress', univ.OctetString().subtype(
+ implicitTag=tag.Tag(tag.tagClassContext,
+ tag.tagFormatSimple, 7))),
+ namedtype.NamedType('registeredID', univ.ObjectIdentifier().subtype(
+ implicitTag=tag.Tag(tag.tagClassContext,
+ tag.tagFormatSimple, 8))),
+ )
+
+
+class GeneralNames(univ.SequenceOf):
+ '''Sequence of names for ASN.1 subjectAltNames settings'''
+ componentType = GeneralName()
+ sizeSpec = univ.SequenceOf.sizeSpec + constraint.ValueSizeConstraint(1, MAX)
+
+
+class SubjectAltName(GeneralNames):
+ '''ASN.1 implementation for subjectAltNames support'''
+
+
diff --git a/libs/ndg/httpsclient/urllib2_build_opener.py b/libs/ndg/httpsclient/urllib2_build_opener.py
new file mode 100644
index 0000000..55d8632
--- /dev/null
+++ b/libs/ndg/httpsclient/urllib2_build_opener.py
@@ -0,0 +1,78 @@
+"""urllib2 style build opener integrates with HTTPSConnection class from this
+package.
+"""
+__author__ = "P J Kershaw"
+__date__ = "21/12/10"
+__copyright__ = "(C) 2011 Science and Technology Facilities Council"
+__license__ = "BSD - see LICENSE file in top-level directory"
+__contact__ = "Philip.Kershaw@stfc.ac.uk"
+__revision__ = '$Id$'
+import logging
+import sys
+
+# Py 2 <=> 3 compatibility for class type checking
+if sys.version_info[0] > 2:
+ class_type_ = type
+ from urllib.request import (ProxyHandler, UnknownHandler,
+ HTTPDefaultErrorHandler, FTPHandler,
+ FileHandler, HTTPErrorProcessor,
+ HTTPHandler, OpenerDirector,
+ HTTPRedirectHandler)
+else:
+ import types
+ class_type_ = types.ClassType
+
+ from urllib2 import (ProxyHandler, UnknownHandler, HTTPDefaultErrorHandler,
+ FTPHandler, FileHandler, HTTPErrorProcessor,
+ HTTPHandler, OpenerDirector, HTTPRedirectHandler)
+
+from ndg.httpsclient.https import HTTPSContextHandler
+
+log = logging.getLogger(__name__)
+
+
+# Copied from urllib2 with modifications for ssl
+def build_opener(*handlers, **kw):
+ """Create an opener object from a list of handlers.
+
+ The opener will use several default handlers, including support
+ for HTTP and FTP.
+
+ If any of the handlers passed as arguments are subclasses of the
+ default handlers, the default handlers will not be used.
+ """
+ def isclass(obj):
+ return isinstance(obj, class_type_) or hasattr(obj, "__bases__")
+
+ opener = OpenerDirector()
+ default_classes = [ProxyHandler, UnknownHandler, HTTPHandler,
+ HTTPDefaultErrorHandler, HTTPRedirectHandler,
+ FTPHandler, FileHandler, HTTPErrorProcessor]
+ check_classes = list(default_classes)
+ check_classes.append(HTTPSContextHandler)
+ skip = []
+ for klass in check_classes:
+ for check in handlers:
+ if isclass(check):
+ if issubclass(check, klass):
+ skip.append(klass)
+ elif isinstance(check, klass):
+ skip.append(klass)
+
+ for klass in default_classes:
+ if klass not in skip:
+ opener.add_handler(klass())
+
+ # Pick up SSL context from keyword settings
+ ssl_context = kw.get('ssl_context')
+
+ # Add the HTTPS handler with ssl_context
+ if HTTPSContextHandler not in skip:
+ opener.add_handler(HTTPSContextHandler(ssl_context))
+
+ for h in handlers:
+ if isclass(h):
+ h = h()
+ opener.add_handler(h)
+
+ return opener
diff --git a/libs/ndg/httpsclient/utils.py b/libs/ndg/httpsclient/utils.py
new file mode 100644
index 0000000..a2b0ed3
--- /dev/null
+++ b/libs/ndg/httpsclient/utils.py
@@ -0,0 +1,414 @@
+"""Utilities using NDG HTTPS Client, including a main module that can be used to
+fetch from a URL.
+"""
+__author__ = "R B Wilkinson"
+__date__ = "09/12/11"
+__copyright__ = "(C) 2011 Science and Technology Facilities Council"
+__license__ = "BSD - see LICENSE file in top-level directory"
+__contact__ = "Philip.Kershaw@stfc.ac.uk"
+__revision__ = '$Id$'
+
+import logging
+from optparse import OptionParser
+import os
+import sys
+
+if sys.version_info[0] > 2:
+ import http.cookiejar as cookiejar_
+ import http.client as http_client_
+ from urllib.request import Request as Request_
+ from urllib.request import HTTPHandler as HTTPHandler_
+ from urllib.request import HTTPCookieProcessor as HTTPCookieProcessor_
+ from urllib.request import HTTPBasicAuthHandler as HTTPBasicAuthHandler_
+ from urllib.request import HTTPPasswordMgrWithDefaultRealm as \
+ HTTPPasswordMgrWithDefaultRealm_
+ from urllib.request import ProxyHandler as ProxyHandler_
+ from urllib.error import HTTPError as HTTPError_
+ import urllib.parse as urlparse_
+else:
+ import cookielib as cookiejar_
+ import httplib as http_client_
+ from urllib2 import Request as Request_
+ from urllib2 import HTTPHandler as HTTPHandler_
+ from urllib2 import HTTPCookieProcessor as HTTPCookieProcessor_
+ from urllib2 import HTTPBasicAuthHandler as HTTPBasicAuthHandler_
+ from urllib2 import HTTPPasswordMgrWithDefaultRealm as \
+ HTTPPasswordMgrWithDefaultRealm_
+ from urllib2 import ProxyHandler as ProxyHandler_
+ from urllib2 import HTTPError as HTTPError_
+ import urlparse as urlparse_
+
+from ndg.httpsclient.urllib2_build_opener import build_opener
+from ndg.httpsclient.https import HTTPSContextHandler
+from ndg.httpsclient import ssl_context_util
+
+log = logging.getLogger(__name__)
+
+class AccumulatingHTTPCookieProcessor(HTTPCookieProcessor_):
+ """Cookie processor that adds new cookies (instead of replacing the existing
+ ones as HTTPCookieProcessor does)
+ """
+ def http_request(self, request):
+ """Processes cookies for a HTTP request.
+ @param request: request to process
+ @type request: urllib2.Request
+ @return: request
+ @rtype: urllib2.Request
+ """
+ COOKIE_HEADER_NAME = "Cookie"
+ tmp_request = Request_(request.get_full_url(), request.data, {},
+ request.origin_req_host,
+ request.unverifiable)
+ self.cookiejar.add_cookie_header(tmp_request)
+ # Combine existing and new cookies.
+ new_cookies = tmp_request.get_header(COOKIE_HEADER_NAME)
+ if new_cookies:
+ if request.has_header(COOKIE_HEADER_NAME):
+ # Merge new cookies with existing ones.
+ old_cookies = request.get_header(COOKIE_HEADER_NAME)
+ merged_cookies = '; '.join([old_cookies, new_cookies])
+ request.add_unredirected_header(COOKIE_HEADER_NAME,
+ merged_cookies)
+ else:
+ # No existing cookies so just set new ones.
+ request.add_unredirected_header(COOKIE_HEADER_NAME, new_cookies)
+ return request
+
+ # Process cookies for HTTPS in the same way.
+ https_request = http_request
+
+
+class URLFetchError(Exception):
+ """Error fetching content from URL"""
+
+
+def fetch_from_url(url, config, data=None, handlers=None):
+ """Returns data retrieved from a URL.
+ @param url: URL to attempt to open
+ @type url: basestring
+ @param config: SSL context configuration
+ @type config: Configuration
+ @return data retrieved from URL or None
+ """
+ return_code, return_message, response = open_url(url, config, data=data,
+ handlers=handlers)
+ if return_code and return_code == http_client_.OK:
+ return_data = response.read()
+ response.close()
+ return return_data
+ else:
+ raise URLFetchError(return_message)
+
+def fetch_from_url_to_file(url, config, output_file, data=None, handlers=None):
+ """Writes data retrieved from a URL to a file.
+ @param url: URL to attempt to open
+ @type url: basestring
+ @param config: SSL context configuration
+ @type config: Configuration
+ @param output_file: output file
+ @type output_file: basestring
+ @return: tuple (
+ returned HTTP status code or 0 if an error occurred
+ returned message
+ boolean indicating whether access was successful)
+ """
+ return_code, return_message, response = open_url(url, config, data=data,
+ handlers=handlers)
+ if return_code == http_client_.OK:
+ return_data = response.read()
+ response.close()
+ outfile = open(output_file, "w")
+ outfile.write(return_data)
+ outfile.close()
+
+ return return_code, return_message, return_code == http_client_.OK
+
+
+def fetch_stream_from_url(url, config, data=None, handlers=None):
+ """Returns data retrieved from a URL.
+ @param url: URL to attempt to open
+ @type url: basestring
+ @param config: SSL context configuration
+ @type config: Configuration
+ @param data: HTTP POST data
+ @type data: str
+ @param handlers: list of custom urllib2 handlers to add to the request
+ @type handlers: iterable
+ @return: data retrieved from URL or None
+ @rtype: file derived type
+ """
+ return_code, return_message, response = open_url(url, config, data=data,
+ handlers=handlers)
+ if return_code and return_code == http_client_.OK:
+ return response
+ else:
+ raise URLFetchError(return_message)
+
+
+def open_url(url, config, data=None, handlers=None):
+ """Attempts to open a connection to a specified URL.
+ @param url: URL to attempt to open
+ @param config: SSL context configuration
+ @type config: Configuration
+ @param data: HTTP POST data
+ @type data: str
+ @param handlers: list of custom urllib2 handlers to add to the request
+ @type handlers: iterable
+ @return: tuple (
+ returned HTTP status code or 0 if an error occurred
+ returned message or error description
+ response object)
+ """
+ debuglevel = 1 if config.debug else 0
+
+ # Set up handlers for URL opener.
+ if config.cookie:
+ cj = config.cookie
+ else:
+ cj = cookiejar_.CookieJar()
+
+ # Use a cookie processor that accumulates cookies when redirects occur so
+ # that an application can redirect for authentication and retain both any
+ # cookies for the application and the security system (c.f.,
+ # urllib2.HTTPCookieProcessor which replaces cookies).
+ cookie_handler = AccumulatingHTTPCookieProcessor(cj)
+
+ if not handlers:
+ handlers = []
+
+ handlers.append(cookie_handler)
+
+ if config.debug:
+ http_handler = HTTPHandler_(debuglevel=debuglevel)
+ https_handler = HTTPSContextHandler(config.ssl_context,
+ debuglevel=debuglevel)
+ handlers.extend([http_handler, https_handler])
+
+ if config.http_basicauth:
+ # currently only supports http basic auth
+ auth_handler = HTTPBasicAuthHandler_(HTTPPasswordMgrWithDefaultRealm_())
+ auth_handler.add_password(realm=None, uri=url,
+ user=config.httpauth[0],
+ passwd=config.httpauth[1])
+ handlers.append(auth_handler)
+
+
+ # Explicitly remove proxy handling if the host is one listed in the value of
+ # the no_proxy environment variable because urllib2 does use proxy settings
+ # set via http_proxy and https_proxy, but does not take the no_proxy value
+ # into account.
+ if not _should_use_proxy(url, config.no_proxy):
+ handlers.append(ProxyHandler_({}))
+ log.debug("Not using proxy")
+ elif config.proxies:
+ handlers.append(ProxyHandler_(config.proxies))
+ log.debug("Configuring proxies: %s" % config.proxies)
+
+ opener = build_opener(*handlers, ssl_context=config.ssl_context)
+
+ headers = config.headers
+ if headers is None:
+ headers = {}
+
+ request = Request_(url, data, headers)
+
+ # Open the URL and check the response.
+ return_code = 0
+ return_message = ''
+ response = None
+
+ # FIXME
+ response = opener.open(request)
+
+ try:
+ response = opener.open(request)
+ return_message = response.msg
+ return_code = response.code
+ if log.isEnabledFor(logging.DEBUG):
+ for index, cookie in enumerate(cj):
+ log.debug("%s : %s", index, cookie)
+
+ except HTTPError_ as exc:
+ return_code = exc.code
+ return_message = "Error: %s" % exc.msg
+ if log.isEnabledFor(logging.DEBUG):
+ log.debug("%s %s", exc.code, exc.msg)
+
+ except Exception as exc:
+ return_message = "Error: %s" % exc.__str__()
+ if log.isEnabledFor(logging.DEBUG):
+ import traceback
+ log.debug(traceback.format_exc())
+
+ return (return_code, return_message, response)
+
+
+def _should_use_proxy(url, no_proxy=None):
+ """Determines whether a proxy should be used to open a connection to the
+ specified URL, based on the value of the no_proxy environment variable.
+ @param url: URL
+ @type url: basestring or urllib2.Request
+ """
+ if no_proxy is None:
+ no_proxy_effective = os.environ.get('no_proxy', '')
+ else:
+ no_proxy_effective = no_proxy
+
+ urlObj = urlparse_.urlparse(_url_as_string(url))
+ for np in [h.strip() for h in no_proxy_effective.split(',')]:
+ if urlObj.hostname == np:
+ return False
+
+ return True
+
+def _url_as_string(url):
+ """Returns the URL string from a URL value that is either a string or
+ urllib2.Request..
+ @param url: URL
+ @type url: basestring or urllib2.Request
+ @return: URL string
+ @rtype: basestring
+ """
+ if isinstance(url, Request_):
+ return url.get_full_url()
+ elif isinstance(url, str):
+ return url
+ else:
+ raise TypeError("Expected type %r or %r" %
+ (str, Request_))
+
+
+class Configuration(object):
+ """Connection configuration.
+ """
+ def __init__(self, ssl_context, debug=False, proxies=None, no_proxy=None,
+ cookie=None, http_basicauth=None, headers=None):
+ """
+ @param ssl_context: SSL context to use with this configuration
+ @type ssl_context: OpenSSL.SSL.Context
+ @param debug: if True, output debugging information
+ @type debug: bool
+ @param proxies: proxies to use for
+ @type proxies: dict with basestring keys and values
+ @param no_proxy: hosts for which a proxy should not be used
+ @type no_proxy: basestring
+ @param cookie: cookies to set for request
+ @type cookie: cookielib.CookieJar (python 3 - http.cookiejar)
+ @param http_basicauth: http authentication, or None
+ @type http_basicauth: tuple of (username,password)
+ @param headers: http headers
+ @type headers: dict
+ """
+ self.ssl_context = ssl_context
+ self.debug = debug
+ self.proxies = proxies
+ self.no_proxy = no_proxy
+ self.cookie = cookie
+ self.http_basicauth = http_basicauth
+ self.headers = headers
+
+
+def main():
+ '''Utility to fetch data using HTTP or HTTPS GET from a specified URL.
+ '''
+ parser = OptionParser(usage="%prog [options] url")
+ parser.add_option("-c", "--certificate", dest="cert_file", metavar="FILE",
+ default=os.path.expanduser("~/credentials.pem"),
+ help="Certificate file - defaults to $HOME/credentials.pem")
+ parser.add_option("-k", "--private-key", dest="key_file", metavar="FILE",
+ default=None,
+ help="Private key file - defaults to the certificate file")
+ parser.add_option("-t", "--ca-certificate-dir", dest="ca_dir",
+ metavar="PATH",
+ default=None,
+ help="Trusted CA certificate file directory")
+ parser.add_option("-d", "--debug", action="store_true", dest="debug",
+ default=False,
+ help="Print debug information.")
+ parser.add_option("-p", "--post-data-file", dest="data_file",
+ metavar="FILE", default=None,
+ help="POST data file")
+ parser.add_option("-f", "--fetch", dest="output_file", metavar="FILE",
+ default=None, help="Output file")
+ parser.add_option("-n", "--no-verify-peer", action="store_true",
+ dest="no_verify_peer", default=False,
+ help="Skip verification of peer certificate.")
+ parser.add_option("-a", "--basicauth", dest="basicauth",
+ metavar="USER:PASSWD",
+ default=None,
+ help="HTTP authentication credentials")
+ parser.add_option("--header", action="append", dest="headers",
+ metavar="HEADER: VALUE",
+ help="Add HTTP header to request")
+ (options, args) = parser.parse_args()
+ if len(args) != 1:
+ parser.error("Incorrect number of arguments")
+
+ url = args[0]
+
+ if options.debug:
+ logging.getLogger().setLevel(logging.DEBUG)
+
+ if options.key_file and os.path.exists(options.key_file):
+ key_file = options.key_file
+ else:
+ key_file = None
+
+ if options.cert_file and os.path.exists(options.cert_file):
+ cert_file = options.cert_file
+ else:
+ cert_file = None
+
+ if options.ca_dir and os.path.exists(options.ca_dir):
+ ca_dir = options.ca_dir
+ else:
+ ca_dir = None
+
+ verify_peer = not options.no_verify_peer
+
+ if options.data_file and os.path.exists(options.data_file):
+ data_file = open(options.data_file)
+ data = data_file.read()
+ data_file.close()
+ else:
+ data = None
+
+ if options.basicauth:
+ http_basicauth = options.basicauth.split(':', 1)
+ else:
+ http_basicauth = None
+
+ headers = {}
+ if options.headers:
+ for h in options.headers:
+ key, val = h.split(':', 1)
+ headers[key.strip()] = val.lstrip()
+
+ # If a private key file is not specified, the key is assumed to be stored in
+ # the certificate file.
+ ssl_context = ssl_context_util.make_ssl_context(key_file,
+ cert_file,
+ None,
+ ca_dir,
+ verify_peer,
+ url)
+
+ config = Configuration(ssl_context,
+ options.debug,
+ http_basicauth=http_basicauth,
+ headers=headers)
+ if options.output_file:
+ return_code, return_message = fetch_from_url_to_file(
+ url,
+ config,
+ options.output_file,
+ data)[:2]
+ raise SystemExit(return_code, return_message)
+ else:
+ data = fetch_from_url(url, config)
+ print(data)
+
+
+if __name__=='__main__':
+ logging.basicConfig()
+ main()
diff --git a/libs/pyasn1/__init__.py b/libs/pyasn1/__init__.py
index 88aff79..77793cd 100644
--- a/libs/pyasn1/__init__.py
+++ b/libs/pyasn1/__init__.py
@@ -1,8 +1,8 @@
import sys
# http://www.python.org/dev/peps/pep-0396/
-__version__ = '0.1.7'
+__version__ = '0.1.8'
if sys.version_info[:2] < (2, 4):
- raise RuntimeError('PyASN1 requires Python 2.4 or later')
+ raise RuntimeError('PyASN1 requires Python 2.4 or later')
diff --git a/libs/pyasn1/codec/ber/decoder.py b/libs/pyasn1/codec/ber/decoder.py
index be0cf49..61bfbce 100644
--- a/libs/pyasn1/codec/ber/decoder.py
+++ b/libs/pyasn1/codec/ber/decoder.py
@@ -1,7 +1,7 @@
# BER decoder
-from pyasn1.type import tag, base, univ, char, useful, tagmap
+from pyasn1.type import tag, univ, char, useful, tagmap
from pyasn1.codec.ber import eoo
-from pyasn1.compat.octets import oct2int, octs2ints, isOctetsType
+from pyasn1.compat.octets import oct2int, isOctetsType
from pyasn1 import debug, error
class AbstractDecoder:
@@ -11,14 +11,14 @@ class AbstractDecoder:
raise error.PyAsn1Error('Decoder not implemented for %s' % (tagSet,))
def indefLenValueDecoder(self, fullSubstrate, substrate, asn1Spec, tagSet,
- length, state, decodeFun, substrateFun):
+ length, state, decodeFun, substrateFun):
raise error.PyAsn1Error('Indefinite length mode decoder not implemented for %s' % (tagSet,))
class AbstractSimpleDecoder(AbstractDecoder):
tagFormats = (tag.tagFormatSimple,)
def _createComponent(self, asn1Spec, tagSet, value=None):
if tagSet[0][1] not in self.tagFormats:
- raise error.PyAsn1Error('Invalid tag format %r for %r' % (tagSet[0], self.protoComponent,))
+ raise error.PyAsn1Error('Invalid tag format %s for %s' % (tagSet[0], self.protoComponent.prettyPrintType()))
if asn1Spec is None:
return self.protoComponent.clone(value, tagSet)
elif value is None:
@@ -30,17 +30,12 @@ class AbstractConstructedDecoder(AbstractDecoder):
tagFormats = (tag.tagFormatConstructed,)
def _createComponent(self, asn1Spec, tagSet, value=None):
if tagSet[0][1] not in self.tagFormats:
- raise error.PyAsn1Error('Invalid tag format %r for %r' % (tagSet[0], self.protoComponent,))
+ raise error.PyAsn1Error('Invalid tag format %s for %s' % (tagSet[0], self.protoComponent.prettyPrintType()))
if asn1Spec is None:
return self.protoComponent.clone(tagSet)
else:
return asn1Spec.clone()
-class EndOfOctetsDecoder(AbstractSimpleDecoder):
- def valueDecoder(self, fullSubstrate, substrate, asn1Spec, tagSet,
- length, state, decodeFun, substrateFun):
- return eoo.endOfOctets, substrate[length:]
-
class ExplicitTagDecoder(AbstractSimpleDecoder):
protoComponent = univ.Any('')
tagFormats = (tag.tagFormatConstructed,)
@@ -63,7 +58,7 @@ class ExplicitTagDecoder(AbstractSimpleDecoder):
substrate, length
)
value, substrate = decodeFun(substrate, asn1Spec, tagSet, length)
- terminator, substrate = decodeFun(substrate)
+ terminator, substrate = decodeFun(substrate, allowEoo=True)
if eoo.endOfOctets.isSameTypeWith(terminator) and \
terminator == eoo.endOfOctets:
return value, substrate
@@ -129,14 +124,14 @@ class BitStringDecoder(AbstractSimpleDecoder):
'Trailing bits overflow %s' % trailingBits
)
head = head[1:]
- lsb = p = 0; l = len(head)-1; b = ()
+ lsb = p = 0; l = len(head)-1; b = []
while p <= l:
if p == l:
lsb = trailingBits
j = 7
o = oct2int(head[p])
while j >= lsb:
- b = b + ((o>>j)&0x01,)
+ b.append((o>>j)&0x01)
j = j - 1
p = p + 1
return self._createComponent(asn1Spec, tagSet, b), tail
@@ -144,7 +139,7 @@ class BitStringDecoder(AbstractSimpleDecoder):
if substrateFun:
return substrateFun(r, substrate, length)
while head:
- component, head = decodeFun(head)
+ component, head = decodeFun(head, self.protoComponent)
r = r + component
return r, tail
@@ -154,7 +149,8 @@ class BitStringDecoder(AbstractSimpleDecoder):
if substrateFun:
return substrateFun(r, substrate, length)
while substrate:
- component, substrate = decodeFun(substrate)
+ component, substrate = decodeFun(substrate, self.protoComponent,
+ allowEoo=True)
if eoo.endOfOctets.isSameTypeWith(component) and \
component == eoo.endOfOctets:
break
@@ -177,7 +173,7 @@ class OctetStringDecoder(AbstractSimpleDecoder):
if substrateFun:
return substrateFun(r, substrate, length)
while head:
- component, head = decodeFun(head)
+ component, head = decodeFun(head, self.protoComponent)
r = r + component
return r, tail
@@ -187,7 +183,8 @@ class OctetStringDecoder(AbstractSimpleDecoder):
if substrateFun:
return substrateFun(r, substrate, length)
while substrate:
- component, substrate = decodeFun(substrate)
+ component, substrate = decodeFun(substrate, self.protoComponent,
+ allowEoo=True)
if eoo.endOfOctets.isSameTypeWith(component) and \
component == eoo.endOfOctets:
break
@@ -216,20 +213,14 @@ class ObjectIdentifierDecoder(AbstractSimpleDecoder):
if not head:
raise error.PyAsn1Error('Empty substrate')
- # Get the first subid
- subId = oct2int(head[0])
- oid = divmod(subId, 40)
-
- index = 1
+ oid = ()
+ index = 0
substrateLen = len(head)
while index < substrateLen:
subId = oct2int(head[index])
- index = index + 1
- if subId == 128:
- # ASN.1 spec forbids leading zeros (0x80) in sub-ID OID
- # encoding, tolerating it opens a vulnerability.
- # See http://www.cosic.esat.kuleuven.be/publications/article-1432.pdf page 7
- raise error.PyAsn1Error('Invalid leading 0x80 in sub-OID')
+ index += 1
+ if subId < 128:
+ oid = oid + (subId,)
elif subId > 128:
# Construct subid from a number of octets
nextSubId = subId
@@ -239,11 +230,27 @@ class ObjectIdentifierDecoder(AbstractSimpleDecoder):
if index >= substrateLen:
raise error.SubstrateUnderrunError(
'Short substrate for sub-OID past %s' % (oid,)
- )
+ )
nextSubId = oct2int(head[index])
- index = index + 1
- subId = (subId << 7) + nextSubId
- oid = oid + (subId,)
+ index += 1
+ oid = oid + ((subId << 7) + nextSubId,)
+ elif subId == 128:
+ # ASN.1 spec forbids leading zeros (0x80) in OID
+ # encoding, tolerating it opens a vulnerability. See
+ # http://www.cosic.esat.kuleuven.be/publications/article-1432.pdf
+ # page 7
+ raise error.PyAsn1Error('Invalid octet 0x80 in OID encoding')
+
+ # Decode two leading arcs
+ if 0 <= oid[0] <= 39:
+ oid = (0,) + oid
+ elif 40 <= oid[0] <= 79:
+ oid = (1, oid[0]-40) + oid[1:]
+ elif oid[0] >= 80:
+ oid = (2, oid[0]-80) + oid[1:]
+ else:
+ raise error.PyAsn1Error('Malformed first OID octet: %s' % head[0])
+
return self._createComponent(asn1Spec, tagSet, oid), tail
class RealDecoder(AbstractSimpleDecoder):
@@ -254,10 +261,13 @@ class RealDecoder(AbstractSimpleDecoder):
if not head:
return self._createComponent(asn1Spec, tagSet, 0.0), tail
fo = oct2int(head[0]); head = head[1:]
- if fo & 0x80: # binary enoding
+ if fo & 0x80: # binary encoding
+ if not head:
+ raise error.PyAsn1Error("Incomplete floating-point value")
n = (fo & 0x03) + 1
if n == 4:
n = oct2int(head[0])
+ head = head[1:]
eo, head = head[:n], head[n:]
if not eo or not head:
raise error.PyAsn1Error('Real exponent screwed')
@@ -266,6 +276,13 @@ class RealDecoder(AbstractSimpleDecoder):
e <<= 8
e |= oct2int(eo[0])
eo = eo[1:]
+ b = fo >> 4 & 0x03 # base bits
+ if b > 2:
+ raise error.PyAsn1Error('Illegal Real base')
+ if b == 1: # encbase = 8
+ e *= 3
+ elif b == 2: # encbase = 16
+ e *= 4
p = 0
while head: # value
p <<= 8
@@ -273,10 +290,14 @@ class RealDecoder(AbstractSimpleDecoder):
head = head[1:]
if fo & 0x40: # sign bit
p = -p
+ sf = fo >> 2 & 0x03 # scale bits
+ p *= 2**sf
value = (p, 2, e)
elif fo & 0x40: # infinite value
value = fo & 0x01 and '-inf' or 'inf'
elif fo & 0xc0 == 0: # character encoding
+ if not head:
+ raise error.PyAsn1Error("Incomplete floating-point value")
try:
if fo & 0x3 == 0x1: # NR1
value = (int(head), 10, 0)
@@ -336,7 +357,7 @@ class SequenceDecoder(AbstractConstructedDecoder):
idx = 0
while substrate:
asn1Spec = self._getComponentTagMap(r, idx)
- component, substrate = decodeFun(substrate, asn1Spec)
+ component, substrate = decodeFun(substrate, asn1Spec, allowEoo=True)
if eoo.endOfOctets.isSameTypeWith(component) and \
component == eoo.endOfOctets:
break
@@ -378,7 +399,7 @@ class SequenceOfDecoder(AbstractConstructedDecoder):
asn1Spec = r.getComponentType()
idx = 0
while substrate:
- component, substrate = decodeFun(substrate, asn1Spec)
+ component, substrate = decodeFun(substrate, asn1Spec, allowEoo=True)
if eoo.endOfOctets.isSameTypeWith(component) and \
component == eoo.endOfOctets:
break
@@ -437,7 +458,8 @@ class ChoiceDecoder(AbstractConstructedDecoder):
return substrateFun(r, substrate, length)
if r.getTagSet() == tagSet: # explicitly tagged Choice
component, substrate = decodeFun(substrate, r.getComponentTagMap())
- eooMarker, substrate = decodeFun(substrate) # eat up EOO marker
+ # eat up EOO marker
+ eooMarker, substrate = decodeFun(substrate, allowEoo=True)
if not eoo.endOfOctets.isSameTypeWith(eooMarker) or \
eooMarker != eoo.endOfOctets:
raise error.PyAsn1Error('No EOO seen before substrate ends')
@@ -485,7 +507,7 @@ class AnyDecoder(AbstractSimpleDecoder):
if substrateFun:
return substrateFun(r, substrate, length)
while substrate:
- component, substrate = decodeFun(substrate, asn1Spec)
+ component, substrate = decodeFun(substrate, asn1Spec, allowEoo=True)
if eoo.endOfOctets.isSameTypeWith(component) and \
component == eoo.endOfOctets:
break
@@ -521,13 +543,14 @@ class BMPStringDecoder(OctetStringDecoder):
protoComponent = char.BMPString()
# "useful" types
+class ObjectDescriptorDecoder(OctetStringDecoder):
+ protoComponent = useful.ObjectDescriptor()
class GeneralizedTimeDecoder(OctetStringDecoder):
protoComponent = useful.GeneralizedTime()
class UTCTimeDecoder(OctetStringDecoder):
protoComponent = useful.UTCTime()
tagMap = {
- eoo.endOfOctets.tagSet: EndOfOctetsDecoder(),
univ.Integer.tagSet: IntegerDecoder(),
univ.Boolean.tagSet: BooleanDecoder(),
univ.BitString.tagSet: BitStringDecoder(),
@@ -552,9 +575,10 @@ tagMap = {
char.UniversalString.tagSet: UniversalStringDecoder(),
char.BMPString.tagSet: BMPStringDecoder(),
# useful types
+ useful.ObjectDescriptor.tagSet: ObjectDescriptorDecoder(),
useful.GeneralizedTime.tagSet: GeneralizedTimeDecoder(),
useful.UTCTime.tagSet: UTCTimeDecoder()
- }
+}
# Type-to-codec map for ambiguous ASN.1 types
typeMap = {
@@ -564,7 +588,7 @@ typeMap = {
univ.SequenceOf.typeId: SequenceOfDecoder(),
univ.Choice.typeId: ChoiceDecoder(),
univ.Any.typeId: AnyDecoder()
- }
+}
( stDecodeTag, stDecodeLength, stGetValueDecoder, stGetValueDecoderByAsn1Spec,
stGetValueDecoderByTag, stTryAsExplicitTag, stDecodeValue,
@@ -574,23 +598,22 @@ class Decoder:
defaultErrorState = stErrorCondition
# defaultErrorState = stDumpRawValue
defaultRawDecoder = AnyDecoder()
+ supportIndefLength = True
def __init__(self, tagMap, typeMap={}):
self.__tagMap = tagMap
self.__typeMap = typeMap
- self.__endOfOctetsTagSet = eoo.endOfOctets.getTagSet()
# Tag & TagSet objects caches
self.__tagCache = {}
self.__tagSetCache = {}
def __call__(self, substrate, asn1Spec=None, tagSet=None,
length=None, state=stDecodeTag, recursiveFlag=1,
- substrateFun=None):
+ substrateFun=None, allowEoo=False):
if debug.logger & debug.flagDecoder:
debug.logger('decoder called at scope %s with state %d, working with up to %d octets of substrate: %s' % (debug.scope, state, len(substrate), debug.hexdump(substrate)))
fullSubstrate = substrate
while state != stStop:
if state == stDecodeTag:
- # Decode tag
if not substrate:
raise error.SubstrateUnderrunError(
'Short octet stream on tag decoding'
@@ -598,13 +621,25 @@ class Decoder:
if not isOctetsType(substrate) and \
not isinstance(substrate, univ.OctetString):
raise error.PyAsn1Error('Bad octet stream type')
-
+ # Decode tag
firstOctet = substrate[0]
substrate = substrate[1:]
if firstOctet in self.__tagCache:
lastTag = self.__tagCache[firstOctet]
else:
t = oct2int(firstOctet)
+ # Look for end-of-octets sentinel
+ if t == 0:
+ if substrate and oct2int(substrate[0]) == 0:
+ if allowEoo and self.supportIndefLength:
+ debug.logger and debug.logger & debug.flagDecoder and debug.logger('end-of-octets sentinel found')
+ value, substrate = eoo.endOfOctets, substrate[1:]
+ state = stStop
+ continue
+ else:
+ raise error.PyAsn1Error('Unexpected end-of-contents sentinel')
+ else:
+ raise error.PyAsn1Error('Zero tag encountered')
tagClass = t&0xC0
tagFormat = t&0x20
tagId = t&0x1F
@@ -622,7 +657,7 @@ class Decoder:
break
lastTag = tag.Tag(
tagClass=tagClass, tagFormat=tagFormat, tagId=tagId
- )
+ )
if tagId < 31:
# cache short tags
self.__tagCache[firstOctet] = lastTag
@@ -637,13 +672,13 @@ class Decoder:
else:
tagSet = lastTag + tagSet
state = stDecodeLength
- debug.logger and debug.logger & debug.flagDecoder and debug.logger('tag decoded into %r, decoding length' % tagSet)
+ debug.logger and debug.logger & debug.flagDecoder and debug.logger('tag decoded into %s, decoding length' % tagSet)
if state == stDecodeLength:
# Decode length
if not substrate:
- raise error.SubstrateUnderrunError(
- 'Short octet stream on length decoding'
- )
+ raise error.SubstrateUnderrunError(
+ 'Short octet stream on length decoding'
+ )
firstOctet = oct2int(substrate[0])
if firstOctet == 128:
size = 1
@@ -670,6 +705,8 @@ class Decoder:
raise error.SubstrateUnderrunError(
'%d-octet short' % (length - len(substrate))
)
+ if length == -1 and not self.supportIndefLength:
+ error.PyAsn1Error('Indefinite length encoding not supported by this codec')
state = stGetValueDecoder
debug.logger and debug.logger & debug.flagDecoder and debug.logger('value length decoded into %d, payload substrate is: %s' % (length, debug.hexdump(length == -1 and substrate or substrate[:length])))
if state == stGetValueDecoder:
@@ -722,12 +759,12 @@ class Decoder:
if debug.logger and debug.logger & debug.flagDecoder:
debug.logger('candidate ASN.1 spec is a map of:')
for t, v in asn1Spec.getPosMap().items():
- debug.logger(' %r -> %s' % (t, v.__class__.__name__))
+ debug.logger(' %s -> %s' % (t, v.__class__.__name__))
if asn1Spec.getNegMap():
debug.logger('but neither of: ')
- for i in asn1Spec.getNegMap().items():
- debug.logger(' %r -> %s' % (t, v.__class__.__name__))
- debug.logger('new candidate ASN.1 spec is %s, chosen by %r' % (__chosenSpec is None and '' or __chosenSpec.__class__.__name__, tagSet))
+ for t, v in asn1Spec.getNegMap().items():
+ debug.logger(' %s -> %s' % (t, v.__class__.__name__))
+ debug.logger('new candidate ASN.1 spec is %s, chosen by %s' % (__chosenSpec is None and '' or __chosenSpec.prettyPrintType(), tagSet))
else:
__chosenSpec = asn1Spec
debug.logger and debug.logger & debug.flagDecoder and debug.logger('candidate ASN.1 spec is %s' % asn1Spec.__class__.__name__)
@@ -745,7 +782,7 @@ class Decoder:
elif baseTagSet in self.__tagMap:
# base type or tagged subtype
concreteDecoder = self.__tagMap[baseTagSet]
- debug.logger and debug.logger & debug.flagDecoder and debug.logger('value decoder chosen by base %r' % (baseTagSet,))
+ debug.logger and debug.logger & debug.flagDecoder and debug.logger('value decoder chosen by base %s' % (baseTagSet,))
else:
concreteDecoder = None
if concreteDecoder:
@@ -753,10 +790,6 @@ class Decoder:
state = stDecodeValue
else:
state = stTryAsExplicitTag
- elif tagSet == self.__endOfOctetsTagSet:
- concreteDecoder = self.__tagMap[tagSet]
- state = stDecodeValue
- debug.logger and debug.logger & debug.flagDecoder and debug.logger('end-of-octets found')
else:
concreteDecoder = None
state = stTryAsExplicitTag
@@ -795,8 +828,8 @@ class Decoder:
debug.logger and debug.logger & debug.flagDecoder and debug.logger('codec %s yields type %s, value:\n%s\n...remaining substrate is: %s' % (concreteDecoder.__class__.__name__, value.__class__.__name__, value.prettyPrint(), substrate and debug.hexdump(substrate) or ''))
if state == stErrorCondition:
raise error.PyAsn1Error(
- '%r not in asn1Spec: %r' % (tagSet, asn1Spec)
- )
+ '%s not in asn1Spec: %s' % (tagSet, asn1Spec)
+ )
if debug.logger and debug.logger & debug.flagDecoder:
debug.scope.pop()
debug.logger('decoder left scope %s, call completed' % debug.scope)
diff --git a/libs/pyasn1/codec/ber/encoder.py b/libs/pyasn1/codec/ber/encoder.py
index 173949d..0fb4ae7 100644
--- a/libs/pyasn1/codec/ber/encoder.py
+++ b/libs/pyasn1/codec/ber/encoder.py
@@ -114,13 +114,17 @@ class IntegerEncoder(AbstractItemEncoder):
class BitStringEncoder(AbstractItemEncoder):
def encodeValue(self, encodeFun, value, defMode, maxChunkSize):
if not maxChunkSize or len(value) <= maxChunkSize*8:
- r = {}; l = len(value); p = 0; j = 7
- while p < l:
- i, j = divmod(p, 8)
- r[i] = r.get(i,0) | value[p]<<(7-j)
- p = p + 1
- keys = list(r); keys.sort()
- return int2oct(7-j) + ints2octs([r[k] for k in keys]), 0
+ out_len = (len(value) + 7) // 8
+ out_list = out_len * [0]
+ j = 7
+ i = -1
+ for val in value:
+ j += 1
+ if j == 8:
+ i += 1
+ j = 0
+ out_list[i] = out_list[i] | val << (7-j)
+ return int2oct(7-j) + ints2octs(out_list), 0
else:
pos = 0; substrate = null
while 1:
@@ -156,47 +160,98 @@ class ObjectIdentifierEncoder(AbstractItemEncoder):
precomputedValues = {
(1, 3, 6, 1, 2): (43, 6, 1, 2),
(1, 3, 6, 1, 4): (43, 6, 1, 4)
- }
+ }
def encodeValue(self, encodeFun, value, defMode, maxChunkSize):
oid = value.asTuple()
if oid[:5] in self.precomputedValues:
octets = self.precomputedValues[oid[:5]]
- index = 5
+ oid = oid[5:]
else:
if len(oid) < 2:
raise error.PyAsn1Error('Short OID %s' % (value,))
+ octets = ()
+
# Build the first twos
- if oid[0] > 6 or oid[1] > 39 or oid[0] == 6 and oid[1] > 15:
+ if oid[0] == 0 and 0 <= oid[1] <= 39:
+ oid = (oid[1],) + oid[2:]
+ elif oid[0] == 1 and 0 <= oid[1] <= 39:
+ oid = (oid[1] + 40,) + oid[2:]
+ elif oid[0] == 2:
+ oid = (oid[1] + 80,) + oid[2:]
+ else:
raise error.PyAsn1Error(
- 'Initial sub-ID overflow %s in OID %s' % (oid[:2], value)
+ 'Impossible initial arcs %s at %s' % (oid[:2], value)
)
- octets = (oid[0] * 40 + oid[1],)
- index = 2
- # Cycle through subids
- for subid in oid[index:]:
- if subid > -1 and subid < 128:
+ # Cycle through subIds
+ for subId in oid:
+ if subId > -1 and subId < 128:
# Optimize for the common case
- octets = octets + (subid & 0x7f,)
- elif subid < 0 or subid > 0xFFFFFFFF:
+ octets = octets + (subId & 0x7f,)
+ elif subId < 0:
raise error.PyAsn1Error(
- 'SubId overflow %s in %s' % (subid, value)
- )
+ 'Negative OID arc %s at %s' % (subId, value)
+ )
else:
# Pack large Sub-Object IDs
- res = (subid & 0x7f,)
- subid = subid >> 7
- while subid > 0:
- res = (0x80 | (subid & 0x7f),) + res
- subid = subid >> 7
+ res = (subId & 0x7f,)
+ subId = subId >> 7
+ while subId > 0:
+ res = (0x80 | (subId & 0x7f),) + res
+ subId = subId >> 7
# Add packed Sub-Object ID to resulted Object ID
octets += res
-
+
return ints2octs(octets), 0
class RealEncoder(AbstractItemEncoder):
supportIndefLenMode = 0
+ binEncBase = 2 # set to None to choose encoding base automatically
+ def _dropFloatingPoint(self, m, encbase, e):
+ ms, es = 1, 1
+ if m < 0:
+ ms = -1 # mantissa sign
+ if e < 0:
+ es = -1 # exponenta sign
+ m *= ms
+ if encbase == 8:
+ m = m*2**(abs(e) % 3 * es)
+ e = abs(e) // 3 * es
+ elif encbase == 16:
+ m = m*2**(abs(e) % 4 * es)
+ e = abs(e) // 4 * es
+
+ while 1:
+ if int(m) != m:
+ m *= encbase
+ e -= 1
+ continue
+ break
+ return ms, int(m), encbase, e
+
+ def _chooseEncBase(self, value):
+ m, b, e = value
+ base = [2, 8, 16]
+ if value.binEncBase in base:
+ return self._dropFloatingPoint(m, value.binEncBase, e)
+ elif self.binEncBase in base:
+ return self._dropFloatingPoint(m, self.binEncBase, e)
+ # auto choosing base 2/8/16
+ mantissa = [m, m, m]
+ exponenta = [e, e, e]
+ encbase = 2
+ e = float('inf')
+ for i in range(3):
+ sign, mantissa[i], base[i], exponenta[i] = \
+ self._dropFloatingPoint(mantissa[i], base[i], exponenta[i])
+ if abs(exponenta[i]) < abs(e) or \
+ (abs(exponenta[i]) == abs(e) and mantissa[i] < m):
+ e = exponenta[i]
+ m = int(mantissa[i])
+ encbase = base[i]
+ return sign, m, encbase, e
+
def encodeValue(self, encodeFun, value, defMode, maxChunkSize):
if value.isPlusInfinity():
return int2oct(0x40), 0
@@ -208,22 +263,43 @@ class RealEncoder(AbstractItemEncoder):
if b == 10:
return str2octs('\x03%dE%s%d' % (m, e == 0 and '+' or '', e)), 0
elif b == 2:
- fo = 0x80 # binary enoding
- if m < 0:
- fo = fo | 0x40 # sign bit
- m = -m
- while int(m) != m: # drop floating point
- m *= 2
- e -= 1
- while m & 0x1 == 0: # mantissa normalization
+ fo = 0x80 # binary encoding
+ ms, m, encbase, e = self._chooseEncBase(value)
+ if ms < 0: # mantissa sign
+ fo = fo | 0x40 # sign bit
+ # exponenta & mantissa normalization
+ if encbase == 2:
+ while m & 0x1 == 0:
+ m >>= 1
+ e += 1
+ elif encbase == 8:
+ while m & 0x7 == 0:
+ m >>= 3
+ e += 1
+ fo |= 0x10
+ else: # encbase = 16
+ while m & 0xf == 0:
+ m >>= 4
+ e += 1
+ fo |= 0x20
+ sf = 0 # scale factor
+ while m & 0x1 == 0:
m >>= 1
- e += 1
+ sf += 1
+ if sf > 3:
+ raise error.PyAsn1Error('Scale factor overflow') # bug if raised
+ fo |= sf << 2
eo = null
- while e not in (0, -1):
- eo = int2oct(e&0xff) + eo
- e >>= 8
- if e == 0 and eo and oct2int(eo[0]) & 0x80:
- eo = int2oct(0) + eo
+ if e == 0 or e == -1:
+ eo = int2oct(e&0xff)
+ else:
+ while e not in (0, -1):
+ eo = int2oct(e&0xff) + eo
+ e >>= 8
+ if e == 0 and eo and oct2int(eo[0]) & 0x80:
+ eo = int2oct(0) + eo
+ if e == -1 and eo and not (oct2int(eo[0]) & 0x80):
+ eo = int2oct(0xff) + eo
n = len(eo)
if n > 0xff:
raise error.PyAsn1Error('Real exponent overflow')
@@ -235,7 +311,7 @@ class RealEncoder(AbstractItemEncoder):
fo |= 2
else:
fo |= 3
- eo = int2oct(n//0xff+1) + eo
+ eo = int2oct(n&0xff) + eo
po = null
while m:
po = int2oct(m&0xff) + po
@@ -308,6 +384,7 @@ tagMap = {
char.UniversalString.tagSet: OctetStringEncoder(),
char.BMPString.tagSet: OctetStringEncoder(),
# useful types
+ useful.ObjectDescriptor.tagSet: OctetStringEncoder(),
useful.GeneralizedTime.tagSet: OctetStringEncoder(),
useful.UTCTime.tagSet: OctetStringEncoder()
}
@@ -323,12 +400,15 @@ typeMap = {
}
class Encoder:
+ supportIndefLength = True
def __init__(self, tagMap, typeMap={}):
self.__tagMap = tagMap
self.__typeMap = typeMap
- def __call__(self, value, defMode=1, maxChunkSize=0):
- debug.logger & debug.flagEncoder and debug.logger('encoder called in %sdef mode, chunk size %s for type %s, value:\n%s' % (not defMode and 'in' or '', maxChunkSize, value.__class__.__name__, value.prettyPrint()))
+ def __call__(self, value, defMode=True, maxChunkSize=0):
+ if not defMode and not self.supportIndefLength:
+ raise error.PyAsn1Error('Indefinite length encoding not supported by this codec')
+ debug.logger & debug.flagEncoder and debug.logger('encoder called in %sdef mode, chunk size %s for type %s, value:\n%s' % (not defMode and 'in' or '', maxChunkSize, value.prettyPrintType(), value.prettyPrint()))
tagSet = value.getTagSet()
if len(tagSet) > 1:
concreteEncoder = explicitlyTaggedItemEncoder
@@ -343,7 +423,7 @@ class Encoder:
concreteEncoder = self.__tagMap[tagSet]
else:
raise Error('No encoder for %s' % (value,))
- debug.logger & debug.flagEncoder and debug.logger('using value codec %s chosen by %r' % (concreteEncoder.__class__.__name__, tagSet))
+ debug.logger & debug.flagEncoder and debug.logger('using value codec %s chosen by %s' % (concreteEncoder.__class__.__name__, tagSet))
substrate = concreteEncoder.encode(
self, value, defMode, maxChunkSize
)
diff --git a/libs/pyasn1/codec/cer/encoder.py b/libs/pyasn1/codec/cer/encoder.py
index 4c05130..61ce8a1 100644
--- a/libs/pyasn1/codec/cer/encoder.py
+++ b/libs/pyasn1/codec/cer/encoder.py
@@ -1,7 +1,9 @@
# CER encoder
from pyasn1.type import univ
+from pyasn1.type import useful
from pyasn1.codec.ber import encoder
-from pyasn1.compat.octets import int2oct, null
+from pyasn1.compat.octets import int2oct, str2octs, null
+from pyasn1 import error
class BooleanEncoder(encoder.IntegerEncoder):
def encodeValue(self, encodeFun, client, defMode, maxChunkSize):
@@ -15,18 +17,56 @@ class BitStringEncoder(encoder.BitStringEncoder):
def encodeValue(self, encodeFun, client, defMode, maxChunkSize):
return encoder.BitStringEncoder.encodeValue(
self, encodeFun, client, defMode, 1000
- )
+ )
class OctetStringEncoder(encoder.OctetStringEncoder):
def encodeValue(self, encodeFun, client, defMode, maxChunkSize):
return encoder.OctetStringEncoder.encodeValue(
self, encodeFun, client, defMode, 1000
- )
+ )
+
+class RealEncoder(encoder.RealEncoder):
+ def _chooseEncBase(self, value):
+ m, b, e = value
+ return self._dropFloatingPoint(m, b, e)
-# specialized RealEncoder here
# specialized GeneralStringEncoder here
-# specialized GeneralizedTimeEncoder here
-# specialized UTCTimeEncoder here
+
+class GeneralizedTimeEncoder(OctetStringEncoder):
+ zchar = str2octs('Z')
+ pluschar = str2octs('+')
+ minuschar = str2octs('-')
+ zero = str2octs('0')
+ def encodeValue(self, encodeFun, client, defMode, maxChunkSize):
+ octets = client.asOctets()
+# This breaks too many existing data items
+# if '.' not in octets:
+# raise error.PyAsn1Error('Format must include fraction of second: %r' % octets)
+ if len(octets) < 15:
+ raise error.PyAsn1Error('Bad UTC time length: %r' % octets)
+ if self.pluschar in octets or self.minuschar in octets:
+ raise error.PyAsn1Error('Must be UTC time: %r' % octets)
+ if octets[-1] != self.zchar[0]:
+ raise error.PyAsn1Error('Missing timezone specifier: %r' % octets)
+ return encoder.OctetStringEncoder.encodeValue(
+ self, encodeFun, client, defMode, 1000
+ )
+
+class UTCTimeEncoder(encoder.OctetStringEncoder):
+ zchar = str2octs('Z')
+ pluschar = str2octs('+')
+ minuschar = str2octs('-')
+ def encodeValue(self, encodeFun, client, defMode, maxChunkSize):
+ octets = client.asOctets()
+ if self.pluschar in octets or self.minuschar in octets:
+ raise error.PyAsn1Error('Must be UTC time: %r' % octets)
+ if octets and octets[-1] != self.zchar[0]:
+ client = client.clone(octets + self.zchar)
+ if len(client) != 13:
+ raise error.PyAsn1Error('Bad UTC time length: %r' % client)
+ return encoder.OctetStringEncoder.encodeValue(
+ self, encodeFun, client, defMode, 1000
+ )
class SetOfEncoder(encoder.SequenceOfEncoder):
def encodeValue(self, encodeFun, client, defMode, maxChunkSize):
@@ -69,17 +109,20 @@ tagMap.update({
univ.Boolean.tagSet: BooleanEncoder(),
univ.BitString.tagSet: BitStringEncoder(),
univ.OctetString.tagSet: OctetStringEncoder(),
+ univ.Real.tagSet: RealEncoder(),
+ useful.GeneralizedTime.tagSet: GeneralizedTimeEncoder(),
+ useful.UTCTime.tagSet: UTCTimeEncoder(),
univ.SetOf().tagSet: SetOfEncoder() # conflcts with Set
- })
+})
typeMap = encoder.typeMap.copy()
typeMap.update({
univ.Set.typeId: SetOfEncoder(),
univ.SetOf.typeId: SetOfEncoder()
- })
+})
class Encoder(encoder.Encoder):
- def __call__(self, client, defMode=0, maxChunkSize=0):
+ def __call__(self, client, defMode=False, maxChunkSize=0):
return encoder.Encoder.__call__(self, client, defMode, maxChunkSize)
encode = Encoder(tagMap, typeMap)
diff --git a/libs/pyasn1/codec/der/decoder.py b/libs/pyasn1/codec/der/decoder.py
index 604abec..ea58d6d 100644
--- a/libs/pyasn1/codec/der/decoder.py
+++ b/libs/pyasn1/codec/der/decoder.py
@@ -1,9 +1,9 @@
# DER decoder
-from pyasn1.type import univ
from pyasn1.codec.cer import decoder
tagMap = decoder.tagMap
typeMap = decoder.typeMap
-Decoder = decoder.Decoder
+class Decoder(decoder.Decoder):
+ supportIndefLength = False
decode = Decoder(tagMap, typeMap)
diff --git a/libs/pyasn1/codec/der/encoder.py b/libs/pyasn1/codec/der/encoder.py
index 4e5faef..7f55eeb 100644
--- a/libs/pyasn1/codec/der/encoder.py
+++ b/libs/pyasn1/codec/der/encoder.py
@@ -1,6 +1,7 @@
# DER encoder
from pyasn1.type import univ
from pyasn1.codec.cer import encoder
+from pyasn1 import error
class SetOfEncoder(encoder.SetOfEncoder):
def _cmpSetComponents(self, c1, c2):
@@ -12,17 +13,20 @@ class SetOfEncoder(encoder.SetOfEncoder):
tagMap = encoder.tagMap.copy()
tagMap.update({
- # Overload CER encodrs with BER ones (a bit hackerish XXX)
+ # Overload CER encoders with BER ones (a bit hackerish XXX)
univ.BitString.tagSet: encoder.encoder.BitStringEncoder(),
univ.OctetString.tagSet: encoder.encoder.OctetStringEncoder(),
# Set & SetOf have same tags
univ.SetOf().tagSet: SetOfEncoder()
- })
+})
typeMap = encoder.typeMap
class Encoder(encoder.Encoder):
- def __call__(self, client, defMode=1, maxChunkSize=0):
+ supportIndefLength = False
+ def __call__(self, client, defMode=True, maxChunkSize=0):
+ if not defMode:
+ raise error.PyAsn1Error('DER forbids indefinite length mode')
return encoder.Encoder.__call__(self, client, defMode, maxChunkSize)
-
+
encode = Encoder(tagMap, typeMap)
diff --git a/libs/pyasn1/compat/binary.py b/libs/pyasn1/compat/binary.py
new file mode 100644
index 0000000..b38932a
--- /dev/null
+++ b/libs/pyasn1/compat/binary.py
@@ -0,0 +1,10 @@
+from sys import version_info
+
+if version_info[0:2] < (2, 6):
+ def bin(x):
+ if x <= 1:
+ return '0b'+str(x)
+ else:
+ return bin(x>>1) + str(x&1)
+else:
+ bin = bin
diff --git a/libs/pyasn1/compat/iterfunc.py b/libs/pyasn1/compat/iterfunc.py
new file mode 100644
index 0000000..0720bde
--- /dev/null
+++ b/libs/pyasn1/compat/iterfunc.py
@@ -0,0 +1,10 @@
+from sys import version_info
+
+if version_info[0] <= 2 and version_info[1] <= 4:
+ def all(iterable):
+ for element in iterable:
+ if not element:
+ return False
+ return True
+else:
+ all = all
diff --git a/libs/pyasn1/debug.py b/libs/pyasn1/debug.py
index c27cb1d..9b69886 100644
--- a/libs/pyasn1/debug.py
+++ b/libs/pyasn1/debug.py
@@ -1,4 +1,5 @@
-import sys
+import time
+import logging
from pyasn1.compat.octets import octs2ints
from pyasn1 import error
from pyasn1 import __version__
@@ -14,23 +15,67 @@ flagMap = {
'all': flagAll
}
+class Printer:
+ def __init__(self, logger=None, handler=None, formatter=None):
+ if logger is None:
+ logger = logging.getLogger('pyasn1')
+ logger.setLevel(logging.DEBUG)
+ if handler is None:
+ handler = logging.StreamHandler()
+ if formatter is None:
+ formatter = logging.Formatter('%(asctime)s %(name)s: %(message)s')
+ handler.setFormatter(formatter)
+ handler.setLevel(logging.DEBUG)
+ logger.addHandler(handler)
+ self.__logger = logger
+
+ def __call__(self, msg): self.__logger.debug(msg)
+ def __str__(self): return ''
+
+if hasattr(logging, 'NullHandler'):
+ NullHandler = logging.NullHandler
+else:
+ # Python 2.6 and older
+ class NullHandler(logging.Handler):
+ def emit(self, record):
+ pass
+
class Debug:
- defaultPrinter = sys.stderr.write
- def __init__(self, *flags):
+ defaultPrinter = None
+ def __init__(self, *flags, **options):
self._flags = flagNone
- self._printer = self.defaultPrinter
+ if options.get('printer') is not None:
+ self._printer = options.get('printer')
+ elif self.defaultPrinter is not None:
+ self._printer = self.defaultPrinter
+ if 'loggerName' in options:
+ # route our logs to parent logger
+ self._printer = Printer(
+ logger=logging.getLogger(options['loggerName']),
+ handler=NullHandler()
+ )
+ else:
+ self._printer = Printer()
self('running pyasn1 version %s' % __version__)
for f in flags:
- if f not in flagMap:
- raise error.PyAsn1Error('bad debug flag %s' % (f,))
- self._flags = self._flags | flagMap[f]
- self('debug category \'%s\' enabled' % f)
-
+ inverse = f and f[0] in ('!', '~')
+ if inverse:
+ f = f[1:]
+ try:
+ if inverse:
+ self._flags &= ~flagMap[f]
+ else:
+ self._flags |= flagMap[f]
+ except KeyError:
+ raise error.PyAsn1Error('bad debug flag %s' % f)
+
+ self('debug category \'%s\' %s' % (f, inverse and 'disabled' or 'enabled'))
+
def __str__(self):
return 'logger %s, flags %x' % (self._printer, self._flags)
def __call__(self, msg):
- self._printer('DBG: %s\n' % msg)
+ self._printer(msg)
def __and__(self, flag):
return self._flags & flag
diff --git a/libs/pyasn1/type/base.py b/libs/pyasn1/type/base.py
index 4087371..155ed74 100644
--- a/libs/pyasn1/type/base.py
+++ b/libs/pyasn1/type/base.py
@@ -1,13 +1,13 @@
# Base classes for ASN.1 types
import sys
-from pyasn1.type import constraint, tagmap
+from pyasn1.type import constraint, tagmap, tag
from pyasn1 import error
class Asn1Item: pass
class Asn1ItemBase(Asn1Item):
# Set of tags for this ASN.1 type
- tagSet = ()
+ tagSet = tag.TagSet()
# A list of constraint.Constraint instances for checking values
subtypeSpec = constraint.ConstraintsIntersection()
@@ -38,22 +38,28 @@ class Asn1ItemBase(Asn1Item):
def getEffectiveTagSet(self): return self._tagSet # used by untagged types
def getTagMap(self): return tagmap.TagMap({self._tagSet: self})
- def isSameTypeWith(self, other):
+ def isSameTypeWith(self, other, matchTags=True, matchConstraints=True):
return self is other or \
- self._tagSet == other.getTagSet() and \
- self._subtypeSpec == other.getSubtypeSpec()
- def isSuperTypeOf(self, other):
+ (not matchTags or \
+ self._tagSet == other.getTagSet()) and \
+ (not matchConstraints or \
+ self._subtypeSpec==other.getSubtypeSpec())
+
+ def isSuperTypeOf(self, other, matchTags=True, matchConstraints=True):
"""Returns true if argument is a ASN1 subtype of ourselves"""
- return self._tagSet.isSuperTagSetOf(other.getTagSet()) and \
- self._subtypeSpec.isSuperTypeOf(other.getSubtypeSpec())
+ return (not matchTags or \
+ self._tagSet.isSuperTagSetOf(other.getTagSet())) and \
+ (not matchConstraints or \
+ (self._subtypeSpec.isSuperTypeOf(other.getSubtypeSpec())))
-class __NoValue:
+class NoValue:
def __getattr__(self, attr):
raise error.PyAsn1Error('No value for %s()' % attr)
def __getitem__(self, i):
raise error.PyAsn1Error('No value')
+ def __repr__(self): return '%s()' % self.__class__.__name__
-noValue = __NoValue()
+noValue = NoValue()
# Base class for "simple" ASN.1 objects. These are immutable.
class AbstractSimpleAsn1Item(Asn1ItemBase):
@@ -72,10 +78,15 @@ class AbstractSimpleAsn1Item(Asn1ItemBase):
self._len = None
def __repr__(self):
- if self._value is noValue:
- return self.__class__.__name__ + '()'
- else:
- return self.__class__.__name__ + '(%s)' % (self.prettyOut(self._value),)
+ r = []
+ if self._value is not self.defaultValue:
+ r.append(self.prettyOut(self._value))
+ if self._tagSet is not self.tagSet:
+ r.append('tagSet=%r' % (self._tagSet,))
+ if self._subtypeSpec is not self.subtypeSpec:
+ r.append('subtypeSpec=%r' % (self._subtypeSpec,))
+ return '%s(%s)' % (self.__class__.__name__, ', '.join(r))
+
def __str__(self): return str(self._value)
def __eq__(self, other):
return self is other and True or self._value == other
@@ -90,6 +101,9 @@ class AbstractSimpleAsn1Item(Asn1ItemBase):
def __bool__(self): return bool(self._value)
def __hash__(self): return self.__hashedValue
+ def hasValue(self):
+ return not isinstance(self._value, NoValue)
+
def clone(self, value=None, tagSet=None, subtypeSpec=None):
if value is None and tagSet is None and subtypeSpec is None:
return self
@@ -121,14 +135,17 @@ class AbstractSimpleAsn1Item(Asn1ItemBase):
def prettyOut(self, value): return str(value)
def prettyPrint(self, scope=0):
- if self._value is noValue:
- return ''
- else:
+ if self.hasValue():
return self.prettyOut(self._value)
+ else:
+ return ''
# XXX Compatibility stub
def prettyPrinter(self, scope=0): return self.prettyPrint(scope)
+ def prettyPrintType(self, scope=0):
+ return '%s -> %s' % (self.getTagSet(), self.__class__.__name__)
+
#
# Constructed types:
# * There are five of them: Sequence, SequenceOf/SetOf, Set and Choice
@@ -166,13 +183,16 @@ class AbstractConstructedAsn1Item(Asn1ItemBase):
self._componentValuesSet = 0
def __repr__(self):
- r = self.__class__.__name__ + '()'
- for idx in range(len(self._componentValues)):
- if self._componentValues[idx] is None:
- continue
- r = r + '.setComponentByPosition(%s, %r)' % (
- idx, self._componentValues[idx]
- )
+ r = []
+ if self._componentType is not self.componentType:
+ r.append('componentType=%r' % (self._componentType,))
+ if self._tagSet is not self.tagSet:
+ r.append('tagSet=%r' % (self._tagSet,))
+ if self._subtypeSpec is not self.subtypeSpec:
+ r.append('subtypeSpec=%r' % (self._subtypeSpec,))
+ r = '%s(%s)' % (self.__class__.__name__, ', '.join(r))
+ if self._componentValues:
+ r += '.setComponents(%s)' % ', '.join([repr(x) for x in self._componentValues])
return r
def __eq__(self, other):
@@ -235,8 +255,17 @@ class AbstractConstructedAsn1Item(Asn1ItemBase):
def setComponentByPosition(self, idx, value, verifyConstraints=True):
raise error.PyAsn1Error('Method not implemented')
+ def setComponents(self, *args, **kwargs):
+ for idx in range(len(args)):
+ self[idx] = args[idx]
+ for k in kwargs:
+ self[k] = kwargs[k]
+ return self
+
def getComponentType(self): return self._componentType
+ def setDefaultComponents(self): pass
+
def __getitem__(self, idx): return self.getComponentByPosition(idx)
def __setitem__(self, idx, value): self.setComponentByPosition(idx, value)
@@ -246,4 +275,3 @@ class AbstractConstructedAsn1Item(Asn1ItemBase):
self._componentValues = []
self._componentValuesSet = 0
- def setDefaultComponents(self): pass
diff --git a/libs/pyasn1/type/char.py b/libs/pyasn1/type/char.py
index ae112f8..af49ab3 100644
--- a/libs/pyasn1/type/char.py
+++ b/libs/pyasn1/type/char.py
@@ -1,12 +1,6 @@
# ASN.1 "character string" types
from pyasn1.type import univ, tag
-class UTF8String(univ.OctetString):
- tagSet = univ.OctetString.tagSet.tagImplicitly(
- tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 12)
- )
- encoding = "utf-8"
-
class NumericString(univ.OctetString):
tagSet = univ.OctetString.tagSet.tagImplicitly(
tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 18)
@@ -21,7 +15,8 @@ class TeletexString(univ.OctetString):
tagSet = univ.OctetString.tagSet.tagImplicitly(
tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 20)
)
-
+
+class T61String(TeletexString): pass
class VideotexString(univ.OctetString):
tagSet = univ.OctetString.tagSet.tagImplicitly(
@@ -43,6 +38,8 @@ class VisibleString(univ.OctetString):
tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 26)
)
+class ISO646String(VisibleString): pass
+
class GeneralString(univ.OctetString):
tagSet = univ.OctetString.tagSet.tagImplicitly(
tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 27)
@@ -59,3 +56,9 @@ class BMPString(univ.OctetString):
tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 30)
)
encoding = "utf-16-be"
+
+class UTF8String(univ.OctetString):
+ tagSet = univ.OctetString.tagSet.tagImplicitly(
+ tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 12)
+ )
+ encoding = "utf-8"
diff --git a/libs/pyasn1/type/namedtype.py b/libs/pyasn1/type/namedtype.py
index 48967a5..aca4282 100644
--- a/libs/pyasn1/type/namedtype.py
+++ b/libs/pyasn1/type/namedtype.py
@@ -8,9 +8,17 @@ class NamedType:
isDefaulted = 0
def __init__(self, name, t):
self.__name = name; self.__type = t
- def __repr__(self): return '%s(%s, %s)' % (
+ def __repr__(self): return '%s(%r, %r)' % (
self.__class__.__name__, self.__name, self.__type
)
+ def __eq__(self, other): return tuple(self) == tuple(other)
+ def __ne__(self, other): return tuple(self) != tuple(other)
+ def __lt__(self, other): return tuple(self) < tuple(other)
+ def __le__(self, other): return tuple(self) <= tuple(other)
+ def __gt__(self, other): return tuple(self) > tuple(other)
+ def __ge__(self, other): return tuple(self) >= tuple(other)
+ def __hash__(self): return hash(tuple(self))
+
def getType(self): return self.__type
def getName(self): return self.__name
def __getitem__(self, idx):
@@ -33,11 +41,18 @@ class NamedTypes:
self.__ambigiousTypes = {}
def __repr__(self):
- r = '%s(' % self.__class__.__name__
- for n in self.__namedTypes:
- r = r + '%r, ' % (n,)
- return r + ')'
-
+ return '%s(%s)' % (
+ self.__class__.__name__,
+ ', '.join([ repr(x) for x in self.__namedTypes ])
+ )
+ def __eq__(self, other): return tuple(self) == tuple(other)
+ def __ne__(self, other): return tuple(self) != tuple(other)
+ def __lt__(self, other): return tuple(self) < tuple(other)
+ def __le__(self, other): return tuple(self) <= tuple(other)
+ def __gt__(self, other): return tuple(self) > tuple(other)
+ def __ge__(self, other): return tuple(self) >= tuple(other)
+ def __hash__(self): return hash(tuple(self))
+
def __getitem__(self, idx): return self.__namedTypes[idx]
if sys.version_info[0] <= 2:
@@ -45,7 +60,9 @@ class NamedTypes:
else:
def __bool__(self): return bool(self.__namedTypesLen)
def __len__(self): return self.__namedTypesLen
-
+
+ def clone(self): return self.__class__(*self.__namedTypes)
+
def getTypeByPosition(self, idx):
if idx < 0 or idx >= self.__namedTypesLen:
raise error.PyAsn1Error('Type position out of range')
diff --git a/libs/pyasn1/type/namedval.py b/libs/pyasn1/type/namedval.py
index d0fea7c..676cb93 100644
--- a/libs/pyasn1/type/namedval.py
+++ b/libs/pyasn1/type/namedval.py
@@ -22,7 +22,19 @@ class NamedValues:
self.valToNameIdx[val] = name
self.namedValues = self.namedValues + ((name, val),)
automaticVal = automaticVal + 1
+
+ def __repr__(self):
+ return '%s(%s)' % (self.__class__.__name__, ', '.join([repr(x) for x in self.namedValues]))
+
def __str__(self): return str(self.namedValues)
+
+ def __eq__(self, other): return tuple(self) == tuple(other)
+ def __ne__(self, other): return tuple(self) != tuple(other)
+ def __lt__(self, other): return tuple(self) < tuple(other)
+ def __le__(self, other): return tuple(self) <= tuple(other)
+ def __gt__(self, other): return tuple(self) > tuple(other)
+ def __ge__(self, other): return tuple(self) >= tuple(other)
+ def __hash__(self): return hash(tuple(self))
def getName(self, value):
if value in self.valToNameIdx:
diff --git a/libs/pyasn1/type/tag.py b/libs/pyasn1/type/tag.py
index 1144907..7471a9b 100644
--- a/libs/pyasn1/type/tag.py
+++ b/libs/pyasn1/type/tag.py
@@ -24,6 +24,9 @@ class Tag:
self.uniq = (tagClass, tagId)
self.__hashedUniqTag = hash(self.uniq)
+ def __str__(self):
+ return '[%s:%s:%s]' % self.__tag
+
def __repr__(self):
return '%s(tagClass=%s, tagFormat=%s, tagId=%s)' % (
(self.__class__.__name__,) + self.__tag
@@ -62,11 +65,14 @@ class TagSet:
_uniq = _uniq + t.uniq
self.uniq = _uniq
self.__lenOfSuperTags = len(superTags)
-
+
+ def __str__(self):
+ return self.__superTags and '+'.join([str(x) for x in self.__superTags]) or '[untagged]'
+
def __repr__(self):
return '%s(%s)' % (
self.__class__.__name__,
- ', '.join([repr(x) for x in self.__superTags])
+ '(), ' + ', '.join([repr(x) for x in self.__superTags])
)
def __add__(self, superTag):
diff --git a/libs/pyasn1/type/tagmap.py b/libs/pyasn1/type/tagmap.py
index 7cec3a1..feb91ae 100644
--- a/libs/pyasn1/type/tagmap.py
+++ b/libs/pyasn1/type/tagmap.py
@@ -21,9 +21,23 @@ class TagMap:
raise KeyError()
def __repr__(self):
- s = '%r/%r' % (self.__posMap, self.__negMap)
+ s = self.__class__.__name__ + '('
+ if self.__posMap:
+ s = s + 'posMap=%r, ' % (self.__posMap,)
+ if self.__negMap:
+ s = s + 'negMap=%r, ' % (self.__negMap,)
if self.__defType is not None:
- s = s + '/%r' % (self.__defType,)
+ s = s + 'defType=%r' % (self.__defType,)
+ return s + ')'
+
+ def __str__(self):
+ s = self.__class__.__name__ + ':\n'
+ if self.__posMap:
+ s = s + 'posMap:\n%s, ' % ',\n '.join([ x.prettyPrintType() for x in self.__posMap.values()])
+ if self.__negMap:
+ s = s + 'negMap:\n%s, ' % ',\n '.join([ x.prettyPrintType() for x in self.__negMap.values()])
+ if self.__defType is not None:
+ s = s + 'defType:\n%s, ' % self.__defType.prettyPrintType()
return s
def clone(self, parentType, tagMap, uniq=False):
diff --git a/libs/pyasn1/type/univ.py b/libs/pyasn1/type/univ.py
index 9cd16f8..f4bff81 100644
--- a/libs/pyasn1/type/univ.py
+++ b/libs/pyasn1/type/univ.py
@@ -1,5 +1,5 @@
# ASN.1 "universal" data types
-import operator, sys
+import operator, sys, math
from pyasn1.type import base, tag, constraint, namedtype, namedval, tagmap
from pyasn1.codec.ber import eoo
from pyasn1.compat import octets
@@ -22,6 +22,12 @@ class Integer(base.AbstractSimpleAsn1Item):
self, value, tagSet, subtypeSpec
)
+ def __repr__(self):
+ if self.__namedValues is not self.namedValues:
+ return '%s, %r)' % (base.AbstractSimpleAsn1Item.__repr__(self)[:-1], self.__namedValues)
+ else:
+ return base.AbstractSimpleAsn1Item.__repr__(self)
+
def __and__(self, value): return self.clone(self._value & value)
def __rand__(self, value): return self.clone(value & self._value)
def __or__(self, value): return self.clone(self._value | value)
@@ -57,8 +63,21 @@ class Integer(base.AbstractSimpleAsn1Item):
if sys.version_info[0] <= 2:
def __long__(self): return long(self._value)
def __float__(self): return float(self._value)
- def __abs__(self): return abs(self._value)
+ def __abs__(self): return self.clone(abs(self._value))
def __index__(self): return int(self._value)
+ def __pos__(self): return self.clone(+self._value)
+ def __neg__(self): return self.clone(-self._value)
+ def __invert__(self): return self.clone(~self._value)
+ def __round__(self, n=0):
+ r = round(self._value, n)
+ if n:
+ return self.clone(r)
+ else:
+ return r
+ def __floor__(self): return math.floor(self._value)
+ def __ceil__(self): return math.ceil(self._value)
+ if sys.version_info[0:2] > (2, 5):
+ def __trunc__(self): return self.clone(math.trunc(self._value))
def __lt__(self, value): return self._value < value
def __le__(self, value): return self._value <= value
@@ -73,7 +92,7 @@ class Integer(base.AbstractSimpleAsn1Item):
return int(value)
except:
raise error.PyAsn1Error(
- 'Can\'t coerce %s into integer: %s' % (value, sys.exc_info()[1])
+ 'Can\'t coerce %r into integer: %s' % (value, sys.exc_info()[1])
)
r = self.__namedValues.getValue(value)
if r is not None:
@@ -82,7 +101,7 @@ class Integer(base.AbstractSimpleAsn1Item):
return int(value)
except:
raise error.PyAsn1Error(
- 'Can\'t coerce %s into integer: %s' % (value, sys.exc_info()[1])
+ 'Can\'t coerce %r into integer: %s' % (value, sys.exc_info()[1])
)
def prettyOut(self, value):
@@ -260,6 +279,15 @@ class BitString(base.AbstractSimpleAsn1Item):
def prettyOut(self, value):
return '\"\'%s\'B\"' % ''.join([str(x) for x in value])
+try:
+ all
+except NameError: # Python 2.4
+ def all(iterable):
+ for element in iterable:
+ if not element:
+ return False
+ return True
+
class OctetString(base.AbstractSimpleAsn1Item):
tagSet = baseTagSet = tag.initTagSet(
tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 0x04)
@@ -280,7 +308,7 @@ class OctetString(base.AbstractSimpleAsn1Item):
value = self.defaultHexValue
if value is None or value is base.noValue:
value = self.defaultBinValue
- self.__intValue = None
+ self.__asNumbersCache = None
base.AbstractSimpleAsn1Item.__init__(self, value, tagSet, subtypeSpec)
def clone(self, value=None, tagSet=None, subtypeSpec=None,
@@ -304,19 +332,33 @@ class OctetString(base.AbstractSimpleAsn1Item):
def prettyIn(self, value):
if isinstance(value, str):
return value
+ elif isinstance(value, unicode):
+ try:
+ return value.encode(self._encoding)
+ except (LookupError, UnicodeEncodeError):
+ raise error.PyAsn1Error(
+ 'Can\'t encode string \'%s\' with \'%s\' codec' % (value, self._encoding)
+ )
elif isinstance(value, (tuple, list)):
try:
return ''.join([ chr(x) for x in value ])
except ValueError:
raise error.PyAsn1Error(
'Bad OctetString initializer \'%s\'' % (value,)
- )
+ )
else:
return str(value)
else:
def prettyIn(self, value):
if isinstance(value, bytes):
return value
+ elif isinstance(value, str):
+ try:
+ return value.encode(self._encoding)
+ except UnicodeEncodeError:
+ raise error.PyAsn1Error(
+ 'Can\'t encode string \'%s\' with \'%s\' codec' % (value, self._encoding)
+ )
elif isinstance(value, OctetString):
return value.asOctets()
elif isinstance(value, (tuple, list, map)):
@@ -325,14 +367,14 @@ class OctetString(base.AbstractSimpleAsn1Item):
except ValueError:
raise error.PyAsn1Error(
'Bad OctetString initializer \'%s\'' % (value,)
- )
+ )
else:
try:
return str(value).encode(self._encoding)
except UnicodeEncodeError:
raise error.PyAsn1Error(
'Can\'t encode string \'%s\' with \'%s\' codec' % (value, self._encoding)
- )
+ )
def fromBinaryString(self, value):
@@ -369,21 +411,33 @@ class OctetString(base.AbstractSimpleAsn1Item):
def prettyOut(self, value):
if sys.version_info[0] <= 2:
- numbers = tuple([ ord(x) for x in value ])
+ numbers = tuple(( ord(x) for x in value ))
else:
numbers = tuple(value)
- if [ x for x in numbers if x < 32 or x > 126 ]:
- return '0x' + ''.join([ '%.2x' % x for x in numbers ])
- else:
+ if all(x >= 32 and x <= 126 for x in numbers):
return str(value)
+ else:
+ return '0x' + ''.join(( '%.2x' % x for x in numbers ))
def __repr__(self):
- if self._value is base.noValue:
- return self.__class__.__name__ + '()'
- if [ x for x in self.asNumbers() if x < 32 or x > 126 ]:
- return self.__class__.__name__ + '(hexValue=\'' + ''.join([ '%.2x' % x for x in self.asNumbers() ])+'\')'
- else:
- return self.__class__.__name__ + '(\'' + self.prettyOut(self._value) + '\')'
+ r = []
+ doHex = False
+ if self._value is not self.defaultValue:
+ for x in self.asNumbers():
+ if x < 32 or x > 126:
+ doHex = True
+ break
+ if not doHex:
+ r.append('%r' % (self._value,))
+ if self._tagSet is not self.tagSet:
+ r.append('tagSet=%r' % (self._tagSet,))
+ if self._subtypeSpec is not self.subtypeSpec:
+ r.append('subtypeSpec=%r' % (self._subtypeSpec,))
+ if self.encoding is not self._encoding:
+ r.append('encoding=%r' % (self._encoding,))
+ if doHex:
+ r.append('hexValue=%r' % ''.join([ '%.2x' % x for x in self.asNumbers() ]))
+ return '%s(%s)' % (self.__class__.__name__, ', '.join(r))
if sys.version_info[0] <= 2:
def __str__(self): return str(self._value)
@@ -391,17 +445,17 @@ class OctetString(base.AbstractSimpleAsn1Item):
return self._value.decode(self._encoding, 'ignore')
def asOctets(self): return self._value
def asNumbers(self):
- if self.__intValue is None:
- self.__intValue = tuple([ ord(x) for x in self._value ])
- return self.__intValue
+ if self.__asNumbersCache is None:
+ self.__asNumbersCache = tuple([ ord(x) for x in self._value ])
+ return self.__asNumbersCache
else:
def __str__(self): return self._value.decode(self._encoding, 'ignore')
def __bytes__(self): return self._value
def asOctets(self): return self._value
def asNumbers(self):
- if self.__intValue is None:
- self.__intValue = tuple(self._value)
- return self.__intValue
+ if self.__asNumbersCache is None:
+ self.__asNumbersCache = tuple(self._value)
+ return self.__asNumbersCache
# Immutable sequence object protocol
@@ -419,7 +473,9 @@ class OctetString(base.AbstractSimpleAsn1Item):
def __radd__(self, value): return self.clone(self.prettyIn(value) + self._value)
def __mul__(self, value): return self.clone(self._value * value)
def __rmul__(self, value): return self * value
-
+ def __int__(self): return int(self._value)
+ def __float__(self): return float(self._value)
+
class Null(OctetString):
defaultValue = ''.encode() # This is tightly constrained
tagSet = baseTagSet = tag.initTagSet(
@@ -430,7 +486,9 @@ class Null(OctetString):
if sys.version_info[0] <= 2:
intTypes = (int, long)
else:
- intTypes = int
+ intTypes = (int,)
+
+numericTypes = intTypes + (float,)
class ObjectIdentifier(base.AbstractSimpleAsn1Item):
tagSet = baseTagSet = tag.initTagSet(
@@ -456,7 +514,9 @@ class ObjectIdentifier(base.AbstractSimpleAsn1Item):
return self._value[i]
def __str__(self): return self.prettyPrint()
-
+ def __repr__(self):
+ return '%s(%r)' % (self.__class__.__name__, self.prettyPrint())
+
def index(self, suboid): return self._value.index(suboid)
def isPrefixOf(self, value):
@@ -504,6 +564,7 @@ class ObjectIdentifier(base.AbstractSimpleAsn1Item):
def prettyOut(self, value): return '.'.join([ str(x) for x in value ])
class Real(base.AbstractSimpleAsn1Item):
+ binEncBase = None # binEncBase = 16 is recommended for large numbers
try:
_plusInf = float('inf')
_minusInf = float('-inf')
@@ -526,11 +587,13 @@ class Real(base.AbstractSimpleAsn1Item):
def prettyIn(self, value):
if isinstance(value, tuple) and len(value) == 3:
- for d in value:
- if not isinstance(d, intTypes):
- raise error.PyAsn1Error(
- 'Lame Real value syntax: %s' % (value,)
- )
+ if not isinstance(value[0], numericTypes) or \
+ not isinstance(value[1], intTypes) or \
+ not isinstance(value[2], intTypes):
+ raise error.PyAsn1Error('Lame Real value syntax: %s' % (value,))
+ if isinstance(value[0], float) and \
+ self._inf and value[0] in self._inf:
+ return value[0]
if value[1] not in (2, 10):
raise error.PyAsn1Error(
'Prohibited base for Real value: %s' % (value[1],)
@@ -540,7 +603,14 @@ class Real(base.AbstractSimpleAsn1Item):
return value
elif isinstance(value, intTypes):
return self.__normalizeBase10((value, 10, 0))
- elif isinstance(value, float):
+ elif isinstance(value, (str, float)):
+ if isinstance(value, str):
+ try:
+ value = float(value)
+ except ValueError:
+ raise error.PyAsn1Error(
+ 'Bad real value syntax: %s' % (value,)
+ )
if self._inf and value in self._inf:
return value
else:
@@ -551,11 +621,6 @@ class Real(base.AbstractSimpleAsn1Item):
return self.__normalizeBase10((int(value), 10, e))
elif isinstance(value, Real):
return tuple(value)
- elif isinstance(value, str): # handle infinite literal
- try:
- return float(value)
- except ValueError:
- pass
raise error.PyAsn1Error(
'Bad real value syntax: %s' % (value,)
)
@@ -566,6 +631,12 @@ class Real(base.AbstractSimpleAsn1Item):
else:
return str(value)
+ def prettyPrint(self, scope=0):
+ if self.isInfinity():
+ return self.prettyOut(self._value)
+ else:
+ return str(float(self))
+
def isPlusInfinity(self): return self._value == self._plusInf
def isMinusInfinity(self): return self._value == self._minusInf
def isInfinity(self): return self._value in self._inf
@@ -601,8 +672,20 @@ class Real(base.AbstractSimpleAsn1Item):
else:
return float(
self._value[0] * pow(self._value[1], self._value[2])
- )
- def __abs__(self): return abs(float(self))
+ )
+ def __abs__(self): return self.clone(abs(float(self)))
+ def __pos__(self): return self.clone(+float(self))
+ def __neg__(self): return self.clone(-float(self))
+ def __round__(self, n=0):
+ r = round(float(self), n)
+ if n:
+ return self.clone(r)
+ else:
+ return r
+ def __floor__(self): return self.clone(math.floor(float(self)))
+ def __ceil__(self): return self.clone(math.ceil(float(self)))
+ if sys.version_info[0:2] > (2, 5):
+ def __trunc__(self): return self.clone(math.trunc(float(self)))
def __lt__(self, value): return float(self) < value
def __le__(self, value): return float(self) <= value
@@ -636,6 +719,7 @@ class SetOf(base.AbstractConstructedAsn1Item):
tag.Tag(tag.tagClassUniversal, tag.tagFormatConstructed, 0x11)
)
typeId = 1
+ strictConstraints = False
def _cloneComponentValues(self, myClone, cloneValueFlag):
idx = 0; l = len(self._componentValues)
@@ -651,9 +735,14 @@ class SetOf(base.AbstractConstructedAsn1Item):
idx = idx + 1
def _verifyComponent(self, idx, value):
- if self._componentType is not None and \
- not self._componentType.isSuperTypeOf(value):
- raise error.PyAsn1Error('Component type error %s' % (value,))
+ t = self._componentType
+ if t is None:
+ return
+ if not t.isSameTypeWith(value,matchConstraints=self.strictConstraints):
+ raise error.PyAsn1Error('Component value is tag-incompatible: %r vs %r' % (value, t))
+ if self.strictConstraints and \
+ not t.isSuperTypeOf(value, matchTags=False):
+ raise error.PyAsn1Error('Component value is constraints-incompatible: %r vs %r' % (value, t))
def getComponentByPosition(self, idx): return self._componentValues[idx]
def setComponentByPosition(self, idx, value=None, verifyConstraints=True):
@@ -698,6 +787,14 @@ class SetOf(base.AbstractConstructedAsn1Item):
r = r + self._componentValues[idx].prettyPrint(scope)
return r
+ def prettyPrintType(self, scope=0):
+ scope = scope + 1
+ r = '%s -> %s {\n' % (self.getTagSet(), self.__class__.__name__)
+ if self._componentType is not None:
+ r = r + ' '*scope
+ r = r + self._componentType.prettyPrintType(scope)
+ return r + '\n' + ' '*(scope-1) + '}'
+
class SequenceOf(SetOf):
tagSet = baseTagSet = tag.initTagSet(
tag.Tag(tag.tagClassUniversal, tag.tagFormatConstructed, 0x10)
@@ -706,15 +803,15 @@ class SequenceOf(SetOf):
class SequenceAndSetBase(base.AbstractConstructedAsn1Item):
componentType = namedtype.NamedTypes()
+ strictConstraints = False
def __init__(self, componentType=None, tagSet=None,
subtypeSpec=None, sizeSpec=None):
+ if componentType is None:
+ componentType = self.componentType
base.AbstractConstructedAsn1Item.__init__(
- self, componentType, tagSet, subtypeSpec, sizeSpec
- )
- if self._componentType is None:
- self._componentTypeLen = 0
- else:
- self._componentTypeLen = len(self._componentType)
+ self, componentType.clone(), tagSet, subtypeSpec, sizeSpec
+ )
+ self._componentTypeLen = len(self._componentType)
def __getitem__(self, idx):
if isinstance(idx, str):
@@ -747,8 +844,11 @@ class SequenceAndSetBase(base.AbstractConstructedAsn1Item):
'Component type error out of range'
)
t = self._componentType[idx].getType()
- if not t.isSuperTypeOf(value):
- raise error.PyAsn1Error('Component type error %r vs %r' % (t, value))
+ if not t.isSameTypeWith(value,matchConstraints=self.strictConstraints):
+ raise error.PyAsn1Error('Component value is tag-incompatible: %r vs %r' % (value, t))
+ if self.strictConstraints and \
+ not t.isSuperTypeOf(value, matchTags=False):
+ raise error.PyAsn1Error('Component value is constraints-incompatible: %r vs %r' % (value, t))
def getComponentByName(self, name):
return self.getComponentByPosition(
@@ -756,9 +856,8 @@ class SequenceAndSetBase(base.AbstractConstructedAsn1Item):
)
def setComponentByName(self, name, value=None, verifyConstraints=True):
return self.setComponentByPosition(
- self._componentType.getPositionByName(name), value,
- verifyConstraints
- )
+ self._componentType.getPositionByName(name),value,verifyConstraints
+ )
def getComponentByPosition(self, idx):
try:
@@ -767,7 +866,11 @@ class SequenceAndSetBase(base.AbstractConstructedAsn1Item):
if idx < self._componentTypeLen:
return
raise
- def setComponentByPosition(self, idx, value=None, verifyConstraints=True):
+ def setComponentByPosition(self, idx, value=None,
+ verifyConstraints=True,
+ exactTypes=False,
+ matchTags=True,
+ matchConstraints=True):
l = len(self._componentValues)
if idx >= l:
self._componentValues = self._componentValues + (idx-l+1)*[None]
@@ -834,6 +937,17 @@ class SequenceAndSetBase(base.AbstractConstructedAsn1Item):
)
return r
+ def prettyPrintType(self, scope=0):
+ scope = scope + 1
+ r = '%s -> %s {\n' % (self.getTagSet(), self.__class__.__name__)
+ for idx in range(len(self.componentType)):
+ r = r + ' '*scope
+ r = r + '"%s"' % self.componentType.getNameByPosition(idx)
+ r = '%s = %s\n' % (
+ r, self._componentType.getTypeByPosition(idx).prettyPrintType(scope)
+ )
+ return r + '\n' + ' '*(scope-1) + '}'
+
class Sequence(SequenceAndSetBase):
tagSet = baseTagSet = tag.initTagSet(
tag.Tag(tag.tagClassUniversal, tag.tagFormatConstructed, 0x10)
@@ -877,16 +991,16 @@ class Set(SequenceAndSetBase):
if t.getTagSet():
return self.setComponentByPosition(
idx, value, verifyConstraints
- )
+ )
else:
t = self.setComponentByPosition(idx).getComponentByPosition(idx)
return t.setComponentByType(
tagSet, value, innerFlag, verifyConstraints
- )
+ )
else: # set outer component by inner tagSet
return self.setComponentByPosition(
idx, value, verifyConstraints
- )
+ )
def getComponentTagMap(self):
if self._componentType:
diff --git a/libs/pyasn1/type/useful.py b/libs/pyasn1/type/useful.py
index a7139c2..1766534 100644
--- a/libs/pyasn1/type/useful.py
+++ b/libs/pyasn1/type/useful.py
@@ -1,6 +1,11 @@
# ASN.1 "useful" types
from pyasn1.type import char, tag
+class ObjectDescriptor(char.GraphicString):
+ tagSet = char.GraphicString.tagSet.tagImplicitly(
+ tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 7)
+ )
+
class GeneralizedTime(char.VisibleString):
tagSet = char.VisibleString.tagSet.tagImplicitly(
tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 24)
diff --git a/libs/pynma/__init__.py b/libs/pynma/__init__.py
index f90424e..a75b428 100644
--- a/libs/pynma/__init__.py
+++ b/libs/pynma/__init__.py
@@ -1,4 +1,4 @@
#!/usr/bin/python
-from pynma import PyNMA
+from .pynma import PyNMA
diff --git a/libs/pynma/pynma.py b/libs/pynma/pynma.py
index 5a1b7eb..4ce1e49 100644
--- a/libs/pynma/pynma.py
+++ b/libs/pynma/pynma.py
@@ -1,12 +1,20 @@
#!/usr/bin/python
from xml.dom.minidom import parseString
-from httplib import HTTPSConnection
-from urllib import urlencode
-__version__ = "0.1"
+try:
+ from http.client import HTTPSConnection
+except ImportError:
+ from httplib import HTTPSConnection
-API_SERVER = 'nma.usk.bz'
+try:
+ from urllib.parse import urlencode
+except ImportError:
+ from urllib import urlencode
+
+__version__ = "1.0"
+
+API_SERVER = 'www.notifymyandroid.com'
ADD_PATH = '/publicapi/notify'
USER_AGENT="PyNMA/v%s"%__version__
@@ -18,7 +26,7 @@ def uniq_preserve(seq): # Dave Kirby
def uniq(seq):
# Not order preserving
- return {}.fromkeys(seq).keys()
+ return list({}.fromkeys(seq).keys())
class PyNMA(object):
"""PyNMA(apikey=[], developerkey=None)
@@ -60,16 +68,17 @@ takes 2 optional arguments:
if type(developerkey) == str and len(developerkey) == 48:
self._developerkey = developerkey
- def push(self, application="", event="", description="", url="", priority=0, batch_mode=False):
+ def push(self, application="", event="", description="", url="", contenttype=None, priority=0, batch_mode=False, html=False):
"""Pushes a message on the registered API keys.
takes 5 arguments:
- (req) application: application name [256]
- (req) event: event name [1000]
- (req) description: description [10000]
- (opt) url: url [512]
+ - (opt) contenttype: Content Type (act: None (plain text) or text/html)
- (opt) priority: from -2 (lowest) to 2 (highest) (def:0)
- - (opt) batch_mode: call API 5 by 5 (def:False)
-
+ - (opt) batch_mode: push to all keys at once (def:False)
+ - (opt) html: shortcut for contenttype=text/html
Warning: using batch_mode will return error only if all API keys are bad
cf: http://nma.usk.bz/api.php
"""
@@ -83,6 +92,9 @@ Warning: using batch_mode will return error only if all API keys are bad
if url:
datas['url'] = url[:512]
+ if contenttype == "text/html" or html == True: # Currently only accepted content type
+ datas['content-type'] = "text/html"
+
if self._developerkey:
datas['developerkey'] = self._developerkey
@@ -94,12 +106,11 @@ Warning: using batch_mode will return error only if all API keys are bad
res = self.callapi('POST', ADD_PATH, datas)
results[key] = res
else:
- for i in range(0, len(self._apikey), 5):
- datas['apikey'] = ",".join(self._apikey[i:i+5])
- res = self.callapi('POST', ADD_PATH, datas)
- results[datas['apikey']] = res
+ datas['apikey'] = ",".join(self._apikey)
+ res = self.callapi('POST', ADD_PATH, datas)
+ results[datas['apikey']] = res
return results
-
+
def callapi(self, method, path, args):
headers = { 'User-Agent': USER_AGENT }
if method == "POST":
@@ -110,13 +121,13 @@ Warning: using batch_mode will return error only if all API keys are bad
try:
res = self._parse_reponse(resp.read())
- except Exception, e:
+ except Exception as e:
res = {'type': "pynmaerror",
'code': 600,
'message': str(e)
- }
+ }
pass
-
+
return res
def _parse_reponse(self, response):
@@ -124,12 +135,12 @@ Warning: using batch_mode will return error only if all API keys are bad
for elem in root.childNodes:
if elem.nodeType == elem.TEXT_NODE: continue
if elem.tagName == 'success':
- res = dict(elem.attributes.items())
+ res = dict(list(elem.attributes.items()))
res['message'] = ""
res['type'] = elem.tagName
return res
if elem.tagName == 'error':
- res = dict(elem.attributes.items())
+ res = dict(list(elem.attributes.items()))
res['message'] = elem.firstChild.nodeValue
res['type'] = elem.tagName
return res
diff --git a/libs/requests/__init__.py b/libs/requests/__init__.py
index ac2b06c..d247128 100644
--- a/libs/requests/__init__.py
+++ b/libs/requests/__init__.py
@@ -6,7 +6,7 @@
# /
"""
-requests HTTP library
+Requests HTTP library
~~~~~~~~~~~~~~~~~~~~~
Requests is an HTTP library, written in Python, for human beings. Basic GET
@@ -36,17 +36,17 @@ usage:
The other HTTP methods are supported - see `requests.api`. Full documentation
is at .
-:copyright: (c) 2014 by Kenneth Reitz.
+:copyright: (c) 2015 by Kenneth Reitz.
:license: Apache 2.0, see LICENSE for more details.
"""
__title__ = 'requests'
-__version__ = '2.5.1'
-__build__ = 0x020501
+__version__ = '2.7.0'
+__build__ = 0x020700
__author__ = 'Kenneth Reitz'
__license__ = 'Apache 2.0'
-__copyright__ = 'Copyright 2014 Kenneth Reitz'
+__copyright__ = 'Copyright 2015 Kenneth Reitz'
# Attempt to enable urllib3's SNI support, if possible
try:
diff --git a/libs/requests/adapters.py b/libs/requests/adapters.py
index c892853..cdc5744 100644
--- a/libs/requests/adapters.py
+++ b/libs/requests/adapters.py
@@ -11,10 +11,10 @@ and maintain connections.
import socket
from .models import Response
-from .packages.urllib3 import Retry
from .packages.urllib3.poolmanager import PoolManager, proxy_from_url
from .packages.urllib3.response import HTTPResponse
from .packages.urllib3.util import Timeout as TimeoutSauce
+from .packages.urllib3.util.retry import Retry
from .compat import urlparse, basestring
from .utils import (DEFAULT_CA_BUNDLE_PATH, get_encoding_from_headers,
prepend_scheme_if_needed, get_auth_from_url, urldefragauth)
@@ -35,6 +35,7 @@ from .auth import _basic_auth_str
DEFAULT_POOLBLOCK = False
DEFAULT_POOLSIZE = 10
DEFAULT_RETRIES = 0
+DEFAULT_POOL_TIMEOUT = None
class BaseAdapter(object):
@@ -326,8 +327,8 @@ class HTTPAdapter(BaseAdapter):
:param request: The :class:`PreparedRequest ` being sent.
:param stream: (optional) Whether to stream the request content.
:param timeout: (optional) How long to wait for the server to send
- data before giving up, as a float, or a (`connect timeout, read
- timeout `_) tuple.
+ data before giving up, as a float, or a :ref:`(connect timeout,
+ read timeout) ` tuple.
:type timeout: float or tuple
:param verify: (optional) Whether to verify SSL certificates.
:param cert: (optional) Any user-provided SSL certificate to be trusted.
@@ -375,7 +376,7 @@ class HTTPAdapter(BaseAdapter):
if hasattr(conn, 'proxy_pool'):
conn = conn.proxy_pool
- low_conn = conn._get_conn(timeout=timeout)
+ low_conn = conn._get_conn(timeout=DEFAULT_POOL_TIMEOUT)
try:
low_conn.putrequest(request.method,
@@ -407,9 +408,6 @@ class HTTPAdapter(BaseAdapter):
# Then, reraise so that we can handle the actual exception.
low_conn.close()
raise
- else:
- # All is well, return the connection to the pool.
- conn._put_conn(low_conn)
except (ProtocolError, socket.error) as err:
raise ConnectionError(err, request=request)
diff --git a/libs/requests/api.py b/libs/requests/api.py
index 1469b05..72a777b 100644
--- a/libs/requests/api.py
+++ b/libs/requests/api.py
@@ -16,7 +16,6 @@ from . import sessions
def request(method, url, **kwargs):
"""Constructs and sends a :class:`Request `.
- Returns :class:`Response ` object.
:param method: method for the new :class:`Request` object.
:param url: URL for the new :class:`Request` object.
@@ -28,8 +27,8 @@ def request(method, url, **kwargs):
:param files: (optional) Dictionary of ``'name': file-like-objects`` (or ``{'name': ('filename', fileobj)}``) for multipart encoding upload.
:param auth: (optional) Auth tuple to enable Basic/Digest/Custom HTTP Auth.
:param timeout: (optional) How long to wait for the server to send data
- before giving up, as a float, or a (`connect timeout, read timeout
- `_) tuple.
+ before giving up, as a float, or a :ref:`(connect timeout, read
+ timeout) ` tuple.
:type timeout: float or tuple
:param allow_redirects: (optional) Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
:type allow_redirects: bool
@@ -37,6 +36,8 @@ def request(method, url, **kwargs):
:param verify: (optional) if ``True``, the SSL cert will be verified. A CA_BUNDLE path can also be provided.
:param stream: (optional) if ``False``, the response content will be immediately downloaded.
:param cert: (optional) if String, path to ssl client cert file (.pem). If Tuple, ('cert', 'key') pair.
+ :return: :class:`Response ` object
+ :rtype: requests.Response
Usage::
@@ -54,22 +55,27 @@ def request(method, url, **kwargs):
return response
-def get(url, **kwargs):
- """Sends a GET request. Returns :class:`Response` object.
+def get(url, params=None, **kwargs):
+ """Sends a GET request.
:param url: URL for the new :class:`Request` object.
+ :param params: (optional) Dictionary or bytes to be sent in the query string for the :class:`Request`.
:param \*\*kwargs: Optional arguments that ``request`` takes.
+ :return: :class:`Response ` object
+ :rtype: requests.Response
"""
kwargs.setdefault('allow_redirects', True)
- return request('get', url, **kwargs)
+ return request('get', url, params=params, **kwargs)
def options(url, **kwargs):
- """Sends a OPTIONS request. Returns :class:`Response` object.
+ """Sends a OPTIONS request.
:param url: URL for the new :class:`Request` object.
:param \*\*kwargs: Optional arguments that ``request`` takes.
+ :return: :class:`Response ` object
+ :rtype: requests.Response
"""
kwargs.setdefault('allow_redirects', True)
@@ -77,10 +83,12 @@ def options(url, **kwargs):
def head(url, **kwargs):
- """Sends a HEAD request. Returns :class:`Response` object.
+ """Sends a HEAD request.
:param url: URL for the new :class:`Request` object.
:param \*\*kwargs: Optional arguments that ``request`` takes.
+ :return: :class:`Response ` object
+ :rtype: requests.Response
"""
kwargs.setdefault('allow_redirects', False)
@@ -88,44 +96,52 @@ def head(url, **kwargs):
def post(url, data=None, json=None, **kwargs):
- """Sends a POST request. Returns :class:`Response` object.
+ """Sends a POST request.
:param url: URL for the new :class:`Request` object.
:param data: (optional) Dictionary, bytes, or file-like object to send in the body of the :class:`Request`.
:param json: (optional) json data to send in the body of the :class:`Request`.
:param \*\*kwargs: Optional arguments that ``request`` takes.
+ :return: :class:`Response ` object
+ :rtype: requests.Response
"""
return request('post', url, data=data, json=json, **kwargs)
def put(url, data=None, **kwargs):
- """Sends a PUT request. Returns :class:`Response` object.
+ """Sends a PUT request.
:param url: URL for the new :class:`Request` object.
:param data: (optional) Dictionary, bytes, or file-like object to send in the body of the :class:`Request`.
:param \*\*kwargs: Optional arguments that ``request`` takes.
+ :return: :class:`Response ` object
+ :rtype: requests.Response
"""
return request('put', url, data=data, **kwargs)
def patch(url, data=None, **kwargs):
- """Sends a PATCH request. Returns :class:`Response` object.
+ """Sends a PATCH request.
:param url: URL for the new :class:`Request` object.
:param data: (optional) Dictionary, bytes, or file-like object to send in the body of the :class:`Request`.
:param \*\*kwargs: Optional arguments that ``request`` takes.
+ :return: :class:`Response ` object
+ :rtype: requests.Response
"""
return request('patch', url, data=data, **kwargs)
def delete(url, **kwargs):
- """Sends a DELETE request. Returns :class:`Response` object.
+ """Sends a DELETE request.
:param url: URL for the new :class:`Request` object.
:param \*\*kwargs: Optional arguments that ``request`` takes.
+ :return: :class:`Response ` object
+ :rtype: requests.Response
"""
return request('delete', url, **kwargs)
diff --git a/libs/requests/auth.py b/libs/requests/auth.py
index b950181..03c3302 100644
--- a/libs/requests/auth.py
+++ b/libs/requests/auth.py
@@ -103,7 +103,8 @@ class HTTPDigestAuth(AuthBase):
# XXX not implemented yet
entdig = None
p_parsed = urlparse(url)
- path = p_parsed.path
+ #: path is request-uri defined in RFC 2616 which should not be empty
+ path = p_parsed.path or "/"
if p_parsed.query:
path += '?' + p_parsed.query
@@ -124,13 +125,15 @@ class HTTPDigestAuth(AuthBase):
s += os.urandom(8)
cnonce = (hashlib.sha1(s).hexdigest()[:16])
- noncebit = "%s:%s:%s:%s:%s" % (nonce, ncvalue, cnonce, qop, HA2)
if _algorithm == 'MD5-SESS':
HA1 = hash_utf8('%s:%s:%s' % (HA1, nonce, cnonce))
if qop is None:
respdig = KD(HA1, "%s:%s" % (nonce, HA2))
elif qop == 'auth' or 'auth' in qop.split(','):
+ noncebit = "%s:%s:%s:%s:%s" % (
+ nonce, ncvalue, cnonce, 'auth', HA2
+ )
respdig = KD(HA1, noncebit)
else:
# XXX handle auth-int.
@@ -176,7 +179,7 @@ class HTTPDigestAuth(AuthBase):
# Consume content and release the original connection
# to allow our new request to reuse the same one.
r.content
- r.raw.release_conn()
+ r.close()
prep = r.request.copy()
extract_cookies_to_jar(prep._cookies, r.request, r.raw)
prep.prepare_cookies(prep._cookies)
diff --git a/libs/requests/compat.py b/libs/requests/compat.py
index a294a32..2d8ef02 100644
--- a/libs/requests/compat.py
+++ b/libs/requests/compat.py
@@ -21,58 +21,6 @@ is_py2 = (_ver[0] == 2)
#: Python 3.x?
is_py3 = (_ver[0] == 3)
-#: Python 3.0.x
-is_py30 = (is_py3 and _ver[1] == 0)
-
-#: Python 3.1.x
-is_py31 = (is_py3 and _ver[1] == 1)
-
-#: Python 3.2.x
-is_py32 = (is_py3 and _ver[1] == 2)
-
-#: Python 3.3.x
-is_py33 = (is_py3 and _ver[1] == 3)
-
-#: Python 3.4.x
-is_py34 = (is_py3 and _ver[1] == 4)
-
-#: Python 2.7.x
-is_py27 = (is_py2 and _ver[1] == 7)
-
-#: Python 2.6.x
-is_py26 = (is_py2 and _ver[1] == 6)
-
-#: Python 2.5.x
-is_py25 = (is_py2 and _ver[1] == 5)
-
-#: Python 2.4.x
-is_py24 = (is_py2 and _ver[1] == 4) # I'm assuming this is not by choice.
-
-
-# ---------
-# Platforms
-# ---------
-
-
-# Syntax sugar.
-_ver = sys.version.lower()
-
-is_pypy = ('pypy' in _ver)
-is_jython = ('jython' in _ver)
-is_ironpython = ('iron' in _ver)
-
-# Assume CPython, if nothing else.
-is_cpython = not any((is_pypy, is_jython, is_ironpython))
-
-# Windows-based system.
-is_windows = 'win32' in str(sys.platform).lower()
-
-# Standard Linux 2+ system.
-is_linux = ('linux' in str(sys.platform).lower())
-is_osx = ('darwin' in str(sys.platform).lower())
-is_hpux = ('hpux' in str(sys.platform).lower()) # Complete guess.
-is_solaris = ('solar==' in str(sys.platform).lower()) # Complete guess.
-
try:
import simplejson as json
except (ImportError, SyntaxError):
@@ -99,7 +47,6 @@ if is_py2:
basestring = basestring
numeric_types = (int, long, float)
-
elif is_py3:
from urllib.parse import urlparse, urlunparse, urljoin, urlsplit, urlencode, quote, unquote, quote_plus, unquote_plus, urldefrag
from urllib.request import parse_http_list, getproxies, proxy_bypass
diff --git a/libs/requests/cookies.py b/libs/requests/cookies.py
index 831c49c..88b478c 100644
--- a/libs/requests/cookies.py
+++ b/libs/requests/cookies.py
@@ -6,6 +6,7 @@ Compatibility code to be able to use `cookielib.CookieJar` with requests.
requests.utils imports from here, so be careful with imports.
"""
+import copy
import time
import collections
from .compat import cookielib, urlparse, urlunparse, Morsel
@@ -157,26 +158,28 @@ class CookieConflictError(RuntimeError):
class RequestsCookieJar(cookielib.CookieJar, collections.MutableMapping):
- """Compatibility class; is a cookielib.CookieJar, but exposes a dict interface.
+ """Compatibility class; is a cookielib.CookieJar, but exposes a dict
+ interface.
This is the CookieJar we create by default for requests and sessions that
don't specify one, since some clients may expect response.cookies and
session.cookies to support dict operations.
- Don't use the dict interface internally; it's just for compatibility with
- with external client code. All `requests` code should work out of the box
- with externally provided instances of CookieJar, e.g., LWPCookieJar and
- FileCookieJar.
-
- Caution: dictionary operations that are normally O(1) may be O(n).
+ Requests does not use the dict interface internally; it's just for
+ compatibility with external client code. All requests code should work
+ out of the box with externally provided instances of ``CookieJar``, e.g.
+ ``LWPCookieJar`` and ``FileCookieJar``.
Unlike a regular CookieJar, this class is pickleable.
- """
+ .. warning:: dictionary operations that are normally O(1) may be O(n).
+ """
def get(self, name, default=None, domain=None, path=None):
"""Dict-like get() that also supports optional domain and path args in
order to resolve naming collisions from using one cookie jar over
- multiple domains. Caution: operation is O(n), not O(1)."""
+ multiple domains.
+
+ .. warning:: operation is O(n), not O(1)."""
try:
return self._find_no_duplicates(name, domain, path)
except KeyError:
@@ -199,37 +202,38 @@ class RequestsCookieJar(cookielib.CookieJar, collections.MutableMapping):
return c
def iterkeys(self):
- """Dict-like iterkeys() that returns an iterator of names of cookies from the jar.
- See itervalues() and iteritems()."""
+ """Dict-like iterkeys() that returns an iterator of names of cookies
+ from the jar. See itervalues() and iteritems()."""
for cookie in iter(self):
yield cookie.name
def keys(self):
- """Dict-like keys() that returns a list of names of cookies from the jar.
- See values() and items()."""
+ """Dict-like keys() that returns a list of names of cookies from the
+ jar. See values() and items()."""
return list(self.iterkeys())
def itervalues(self):
- """Dict-like itervalues() that returns an iterator of values of cookies from the jar.
- See iterkeys() and iteritems()."""
+ """Dict-like itervalues() that returns an iterator of values of cookies
+ from the jar. See iterkeys() and iteritems()."""
for cookie in iter(self):
yield cookie.value
def values(self):
- """Dict-like values() that returns a list of values of cookies from the jar.
- See keys() and items()."""
+ """Dict-like values() that returns a list of values of cookies from the
+ jar. See keys() and items()."""
return list(self.itervalues())
def iteritems(self):
- """Dict-like iteritems() that returns an iterator of name-value tuples from the jar.
- See iterkeys() and itervalues()."""
+ """Dict-like iteritems() that returns an iterator of name-value tuples
+ from the jar. See iterkeys() and itervalues()."""
for cookie in iter(self):
yield cookie.name, cookie.value
def items(self):
- """Dict-like items() that returns a list of name-value tuples from the jar.
- See keys() and values(). Allows client-code to call "dict(RequestsCookieJar)
- and get a vanilla python dict of key value pairs."""
+ """Dict-like items() that returns a list of name-value tuples from the
+ jar. See keys() and values(). Allows client-code to call
+ ``dict(RequestsCookieJar)`` and get a vanilla python dict of key value
+ pairs."""
return list(self.iteritems())
def list_domains(self):
@@ -259,8 +263,9 @@ class RequestsCookieJar(cookielib.CookieJar, collections.MutableMapping):
return False # there is only one domain in jar
def get_dict(self, domain=None, path=None):
- """Takes as an argument an optional domain and path and returns a plain old
- Python dict of name-value pairs of cookies that meet the requirements."""
+ """Takes as an argument an optional domain and path and returns a plain
+ old Python dict of name-value pairs of cookies that meet the
+ requirements."""
dictionary = {}
for cookie in iter(self):
if (domain is None or cookie.domain == domain) and (path is None
@@ -269,21 +274,24 @@ class RequestsCookieJar(cookielib.CookieJar, collections.MutableMapping):
return dictionary
def __getitem__(self, name):
- """Dict-like __getitem__() for compatibility with client code. Throws exception
- if there are more than one cookie with name. In that case, use the more
- explicit get() method instead. Caution: operation is O(n), not O(1)."""
+ """Dict-like __getitem__() for compatibility with client code. Throws
+ exception if there are more than one cookie with name. In that case,
+ use the more explicit get() method instead.
+
+ .. warning:: operation is O(n), not O(1)."""
return self._find_no_duplicates(name)
def __setitem__(self, name, value):
- """Dict-like __setitem__ for compatibility with client code. Throws exception
- if there is already a cookie of that name in the jar. In that case, use the more
- explicit set() method instead."""
+ """Dict-like __setitem__ for compatibility with client code. Throws
+ exception if there is already a cookie of that name in the jar. In that
+ case, use the more explicit set() method instead."""
self.set(name, value)
def __delitem__(self, name):
- """Deletes a cookie given a name. Wraps cookielib.CookieJar's remove_cookie_by_name()."""
+ """Deletes a cookie given a name. Wraps ``cookielib.CookieJar``'s
+ ``remove_cookie_by_name()``."""
remove_cookie_by_name(self, name)
def set_cookie(self, cookie, *args, **kwargs):
@@ -295,15 +303,16 @@ class RequestsCookieJar(cookielib.CookieJar, collections.MutableMapping):
"""Updates this jar with cookies from another CookieJar or dict-like"""
if isinstance(other, cookielib.CookieJar):
for cookie in other:
- self.set_cookie(cookie)
+ self.set_cookie(copy.copy(cookie))
else:
super(RequestsCookieJar, self).update(other)
def _find(self, name, domain=None, path=None):
- """Requests uses this method internally to get cookie values. Takes as args name
- and optional domain and path. Returns a cookie.value. If there are conflicting cookies,
- _find arbitrarily chooses one. See _find_no_duplicates if you want an exception thrown
- if there are conflicting cookies."""
+ """Requests uses this method internally to get cookie values. Takes as
+ args name and optional domain and path. Returns a cookie.value. If
+ there are conflicting cookies, _find arbitrarily chooses one. See
+ _find_no_duplicates if you want an exception thrown if there are
+ conflicting cookies."""
for cookie in iter(self):
if cookie.name == name:
if domain is None or cookie.domain == domain:
@@ -313,10 +322,11 @@ class RequestsCookieJar(cookielib.CookieJar, collections.MutableMapping):
raise KeyError('name=%r, domain=%r, path=%r' % (name, domain, path))
def _find_no_duplicates(self, name, domain=None, path=None):
- """__get_item__ and get call _find_no_duplicates -- never used in Requests internally.
- Takes as args name and optional domain and path. Returns a cookie.value.
- Throws KeyError if cookie is not found and CookieConflictError if there are
- multiple cookies that match name and optionally domain and path."""
+ """Both ``__get_item__`` and ``get`` call this function: it's never
+ used elsewhere in Requests. Takes as args name and optional domain and
+ path. Returns a cookie.value. Throws KeyError if cookie is not found
+ and CookieConflictError if there are multiple cookies that match name
+ and optionally domain and path."""
toReturn = None
for cookie in iter(self):
if cookie.name == name:
@@ -350,6 +360,21 @@ class RequestsCookieJar(cookielib.CookieJar, collections.MutableMapping):
return new_cj
+def _copy_cookie_jar(jar):
+ if jar is None:
+ return None
+
+ if hasattr(jar, 'copy'):
+ # We're dealing with an instane of RequestsCookieJar
+ return jar.copy()
+ # We're dealing with a generic CookieJar instance
+ new_jar = copy.copy(jar)
+ new_jar.clear()
+ for cookie in jar:
+ new_jar.set_cookie(copy.copy(cookie))
+ return new_jar
+
+
def create_cookie(name, value, **kwargs):
"""Make a cookie from underspecified parameters.
@@ -390,11 +415,14 @@ def morsel_to_cookie(morsel):
expires = None
if morsel['max-age']:
- expires = time.time() + morsel['max-age']
+ try:
+ expires = int(time.time() + int(morsel['max-age']))
+ except ValueError:
+ raise TypeError('max-age: %s must be integer' % morsel['max-age'])
elif morsel['expires']:
time_template = '%a, %d-%b-%Y %H:%M:%S GMT'
- expires = time.mktime(
- time.strptime(morsel['expires'], time_template)) - time.timezone
+ expires = int(time.mktime(
+ time.strptime(morsel['expires'], time_template)) - time.timezone)
return create_cookie(
comment=morsel['comment'],
comment_url=bool(morsel['comment']),
@@ -440,7 +468,7 @@ def merge_cookies(cookiejar, cookies):
"""
if not isinstance(cookiejar, cookielib.CookieJar):
raise ValueError('You can only merge into CookieJar')
-
+
if isinstance(cookies, dict):
cookiejar = cookiejar_from_dict(
cookies, cookiejar=cookiejar, overwrite=False)
diff --git a/libs/requests/models.py b/libs/requests/models.py
index b728c84..4270c64 100644
--- a/libs/requests/models.py
+++ b/libs/requests/models.py
@@ -15,7 +15,7 @@ from .hooks import default_hooks
from .structures import CaseInsensitiveDict
from .auth import HTTPBasicAuth
-from .cookies import cookiejar_from_dict, get_cookie_header
+from .cookies import cookiejar_from_dict, get_cookie_header, _copy_cookie_jar
from .packages.urllib3.fields import RequestField
from .packages.urllib3.filepost import encode_multipart_formdata
from .packages.urllib3.util import parse_url
@@ -30,7 +30,8 @@ from .utils import (
iter_slices, guess_json_utf, super_len, to_native_string)
from .compat import (
cookielib, urlunparse, urlsplit, urlencode, str, bytes, StringIO,
- is_py2, chardet, json, builtin_str, basestring)
+ is_py2, chardet, builtin_str, basestring)
+from .compat import json as complexjson
from .status_codes import codes
#: The set of HTTP status codes that indicate an automatically
@@ -42,12 +43,11 @@ REDIRECT_STATI = (
codes.temporary_redirect, # 307
codes.permanent_redirect, # 308
)
+
DEFAULT_REDIRECT_LIMIT = 30
CONTENT_CHUNK_SIZE = 10 * 1024
ITER_CHUNK_SIZE = 512
-json_dumps = json.dumps
-
class RequestEncodingMixin(object):
@property
@@ -143,13 +143,13 @@ class RequestEncodingMixin(object):
else:
fn = guess_filename(v) or k
fp = v
- if isinstance(fp, str):
- fp = StringIO(fp)
- if isinstance(fp, bytes):
- fp = BytesIO(fp)
- rf = RequestField(name=k, data=fp.read(),
- filename=fn, headers=fh)
+ if isinstance(fp, (str, bytes, bytearray)):
+ fdata = fp
+ else:
+ fdata = fp.read()
+
+ rf = RequestField(name=k, data=fdata, filename=fn, headers=fh)
rf.make_multipart(content_type=ft)
new_fields.append(rf)
@@ -206,17 +206,8 @@ class Request(RequestHooksMixin):
"""
- def __init__(self,
- method=None,
- url=None,
- headers=None,
- files=None,
- data=None,
- params=None,
- auth=None,
- cookies=None,
- hooks=None,
- json=None):
+ def __init__(self, method=None, url=None, headers=None, files=None,
+ data=None, params=None, auth=None, cookies=None, hooks=None, json=None):
# Default empty dicts for dict params.
data = [] if data is None else data
@@ -295,8 +286,7 @@ class PreparedRequest(RequestEncodingMixin, RequestHooksMixin):
self.hooks = default_hooks()
def prepare(self, method=None, url=None, headers=None, files=None,
- data=None, params=None, auth=None, cookies=None, hooks=None,
- json=None):
+ data=None, params=None, auth=None, cookies=None, hooks=None, json=None):
"""Prepares the entire request with the given parameters."""
self.prepare_method(method)
@@ -305,6 +295,7 @@ class PreparedRequest(RequestEncodingMixin, RequestHooksMixin):
self.prepare_cookies(cookies)
self.prepare_body(data, files, json)
self.prepare_auth(auth, url)
+
# Note that prepare_auth must be last to enable authentication schemes
# such as OAuth to work on a fully prepared request.
@@ -319,7 +310,7 @@ class PreparedRequest(RequestEncodingMixin, RequestHooksMixin):
p.method = self.method
p.url = self.url
p.headers = self.headers.copy() if self.headers is not None else None
- p._cookies = self._cookies.copy() if self._cookies is not None else None
+ p._cookies = _copy_cookie_jar(self._cookies)
p.body = self.body
p.hooks = self.hooks
return p
@@ -356,8 +347,10 @@ class PreparedRequest(RequestEncodingMixin, RequestHooksMixin):
raise InvalidURL(*e.args)
if not scheme:
- raise MissingSchema("Invalid URL {0!r}: No schema supplied. "
- "Perhaps you meant http://{0}?".format(url))
+ error = ("Invalid URL {0!r}: No schema supplied. Perhaps you meant http://{0}?")
+ error = error.format(to_native_string(url, 'utf8'))
+
+ raise MissingSchema(error)
if not host:
raise InvalidURL("Invalid URL %r: No host supplied" % url)
@@ -423,7 +416,7 @@ class PreparedRequest(RequestEncodingMixin, RequestHooksMixin):
if json is not None:
content_type = 'application/json'
- body = json_dumps(json)
+ body = complexjson.dumps(json)
is_stream = all([
hasattr(data, '__iter__'),
@@ -500,7 +493,15 @@ class PreparedRequest(RequestEncodingMixin, RequestHooksMixin):
self.prepare_content_length(self.body)
def prepare_cookies(self, cookies):
- """Prepares the given HTTP cookie data."""
+ """Prepares the given HTTP cookie data.
+
+ This function eventually generates a ``Cookie`` header from the
+ given cookies using cookielib. Due to cookielib's design, the header
+ will not be regenerated if it already exists, meaning this function
+ can only be called once for the life of the
+ :class:`PreparedRequest ` object. Any subsequent calls
+ to ``prepare_cookies`` will have no actual effect, unless the "Cookie"
+ header is removed beforehand."""
if isinstance(cookies, cookielib.CookieJar):
self._cookies = cookies
@@ -513,6 +514,10 @@ class PreparedRequest(RequestEncodingMixin, RequestHooksMixin):
def prepare_hooks(self, hooks):
"""Prepares the given hooks."""
+ # hooks can be passed as None to the prepare method and to this
+ # method. To prevent iterating over None, simply use an empty list
+ # if hooks is False-y
+ hooks = hooks or []
for event in hooks:
self.register_hook(event, hooks[event])
@@ -523,16 +528,8 @@ class Response(object):
"""
__attrs__ = [
- '_content',
- 'status_code',
- 'headers',
- 'url',
- 'history',
- 'encoding',
- 'reason',
- 'cookies',
- 'elapsed',
- 'request',
+ '_content', 'status_code', 'headers', 'url', 'history',
+ 'encoding', 'reason', 'cookies', 'elapsed', 'request'
]
def __init__(self):
@@ -572,7 +569,11 @@ class Response(object):
self.cookies = cookiejar_from_dict({})
#: The amount of time elapsed between sending the request
- #: and the arrival of the response (as a timedelta)
+ #: and the arrival of the response (as a timedelta).
+ #: This property specifically measures the time taken between sending
+ #: the first byte of the request and finishing parsing the headers. It
+ #: is therefore unaffected by consuming the response content or the
+ #: value of the ``stream`` keyword argument.
self.elapsed = datetime.timedelta(0)
#: The :class:`PreparedRequest ` object to which this
@@ -648,9 +649,10 @@ class Response(object):
If decode_unicode is True, content will be decoded using the best
available encoding based on the response.
"""
+
def generate():
- try:
- # Special case for urllib3.
+ # Special case for urllib3.
+ if hasattr(self.raw, 'stream'):
try:
for chunk in self.raw.stream(chunk_size, decode_content=True):
yield chunk
@@ -660,7 +662,7 @@ class Response(object):
raise ContentDecodingError(e)
except ReadTimeoutError as e:
raise ConnectionError(e)
- except AttributeError:
+ else:
# Standard file-like object.
while True:
chunk = self.raw.read(chunk_size)
@@ -688,6 +690,8 @@ class Response(object):
"""Iterates over the response data, one line at a time. When
stream=True is set on the request, this avoids reading the
content at once into memory for large responses.
+
+ .. note:: This method is not reentrant safe.
"""
pending = None
@@ -789,14 +793,16 @@ class Response(object):
encoding = guess_json_utf(self.content)
if encoding is not None:
try:
- return json.loads(self.content.decode(encoding), **kwargs)
+ return complexjson.loads(
+ self.content.decode(encoding), **kwargs
+ )
except UnicodeDecodeError:
# Wrong UTF codec detected; usually because it's not UTF-8
# but some other 8-bit codec. This is an RFC violation,
# and the server didn't bother to tell us what codec *was*
# used.
pass
- return json.loads(self.text, **kwargs)
+ return complexjson.loads(self.text, **kwargs)
@property
def links(self):
@@ -822,10 +828,10 @@ class Response(object):
http_error_msg = ''
if 400 <= self.status_code < 500:
- http_error_msg = '%s Client Error: %s' % (self.status_code, self.reason)
+ http_error_msg = '%s Client Error: %s for url: %s' % (self.status_code, self.reason, self.url)
elif 500 <= self.status_code < 600:
- http_error_msg = '%s Server Error: %s' % (self.status_code, self.reason)
+ http_error_msg = '%s Server Error: %s for url: %s' % (self.status_code, self.reason, self.url)
if http_error_msg:
raise HTTPError(http_error_msg, response=self)
@@ -836,4 +842,7 @@ class Response(object):
*Note: Should not normally need to be called explicitly.*
"""
+ if not self._content_consumed:
+ return self.raw.close()
+
return self.raw.release_conn()
diff --git a/libs/requests/packages/urllib3/__init__.py b/libs/requests/packages/urllib3/__init__.py
index dfc82d0..f48ac4a 100644
--- a/libs/requests/packages/urllib3/__init__.py
+++ b/libs/requests/packages/urllib3/__init__.py
@@ -4,7 +4,7 @@ urllib3 - Thread-safe connection pooling and re-using.
__author__ = 'Andrey Petrov (andrey.petrov@shazow.net)'
__license__ = 'MIT'
-__version__ = 'dev'
+__version__ = '1.10.4'
from .connectionpool import (
@@ -55,9 +55,12 @@ def add_stderr_logger(level=logging.DEBUG):
del NullHandler
-# Set security warning to only go off once by default.
import warnings
-warnings.simplefilter('always', exceptions.SecurityWarning)
+# SecurityWarning's always go off by default.
+warnings.simplefilter('always', exceptions.SecurityWarning, append=True)
+# InsecurePlatformWarning's don't vary between requests, so we keep it default.
+warnings.simplefilter('default', exceptions.InsecurePlatformWarning,
+ append=True)
def disable_warnings(category=exceptions.HTTPWarning):
"""
diff --git a/libs/requests/packages/urllib3/_collections.py b/libs/requests/packages/urllib3/_collections.py
index 784342a..279416c 100644
--- a/libs/requests/packages/urllib3/_collections.py
+++ b/libs/requests/packages/urllib3/_collections.py
@@ -1,7 +1,7 @@
from collections import Mapping, MutableMapping
try:
from threading import RLock
-except ImportError: # Platform-specific: No threads available
+except ImportError: # Platform-specific: No threads available
class RLock:
def __enter__(self):
pass
@@ -10,11 +10,11 @@ except ImportError: # Platform-specific: No threads available
pass
-try: # Python 2.7+
+try: # Python 2.7+
from collections import OrderedDict
except ImportError:
from .packages.ordered_dict import OrderedDict
-from .packages.six import iterkeys, itervalues
+from .packages.six import iterkeys, itervalues, PY3
__all__ = ['RecentlyUsedContainer', 'HTTPHeaderDict']
@@ -97,7 +97,14 @@ class RecentlyUsedContainer(MutableMapping):
return list(iterkeys(self._container))
-class HTTPHeaderDict(MutableMapping):
+_dict_setitem = dict.__setitem__
+_dict_getitem = dict.__getitem__
+_dict_delitem = dict.__delitem__
+_dict_contains = dict.__contains__
+_dict_setdefault = dict.setdefault
+
+
+class HTTPHeaderDict(dict):
"""
:param headers:
An iterable of field-value pairs. Must not contain multiple field names
@@ -129,25 +136,75 @@ class HTTPHeaderDict(MutableMapping):
'foo=bar, baz=quxx'
>>> headers['Content-Length']
'7'
-
- If you want to access the raw headers with their original casing
- for debugging purposes you can access the private ``._data`` attribute
- which is a normal python ``dict`` that maps the case-insensitive key to a
- list of tuples stored as (case-sensitive-original-name, value). Using the
- structure from above as our example:
-
- >>> headers._data
- {'set-cookie': [('Set-Cookie', 'foo=bar'), ('set-cookie', 'baz=quxx')],
- 'content-length': [('content-length', '7')]}
"""
def __init__(self, headers=None, **kwargs):
- self._data = {}
- if headers is None:
- headers = {}
- self.update(headers, **kwargs)
+ dict.__init__(self)
+ if headers is not None:
+ if isinstance(headers, HTTPHeaderDict):
+ self._copy_from(headers)
+ else:
+ self.extend(headers)
+ if kwargs:
+ self.extend(kwargs)
+
+ def __setitem__(self, key, val):
+ return _dict_setitem(self, key.lower(), (key, val))
+
+ def __getitem__(self, key):
+ val = _dict_getitem(self, key.lower())
+ return ', '.join(val[1:])
+
+ def __delitem__(self, key):
+ return _dict_delitem(self, key.lower())
- def add(self, key, value):
+ def __contains__(self, key):
+ return _dict_contains(self, key.lower())
+
+ def __eq__(self, other):
+ if not isinstance(other, Mapping) and not hasattr(other, 'keys'):
+ return False
+ if not isinstance(other, type(self)):
+ other = type(self)(other)
+ return dict((k1, self[k1]) for k1 in self) == dict((k2, other[k2]) for k2 in other)
+
+ def __ne__(self, other):
+ return not self.__eq__(other)
+
+ values = MutableMapping.values
+ get = MutableMapping.get
+ update = MutableMapping.update
+
+ if not PY3: # Python 2
+ iterkeys = MutableMapping.iterkeys
+ itervalues = MutableMapping.itervalues
+
+ __marker = object()
+
+ def pop(self, key, default=__marker):
+ '''D.pop(k[,d]) -> v, remove specified key and return the corresponding value.
+ If key is not found, d is returned if given, otherwise KeyError is raised.
+ '''
+ # Using the MutableMapping function directly fails due to the private marker.
+ # Using ordinary dict.pop would expose the internal structures.
+ # So let's reinvent the wheel.
+ try:
+ value = self[key]
+ except KeyError:
+ if default is self.__marker:
+ raise
+ return default
+ else:
+ del self[key]
+ return value
+
+ def discard(self, key):
+ try:
+ del self[key]
+ except KeyError:
+ pass
+
+ def add(self, key, val):
"""Adds a (name, value) pair, doesn't overwrite the value if it already
exists.
@@ -156,43 +213,111 @@ class HTTPHeaderDict(MutableMapping):
>>> headers['foo']
'bar, baz'
"""
- self._data.setdefault(key.lower(), []).append((key, value))
+ key_lower = key.lower()
+ new_vals = key, val
+ # Keep the common case aka no item present as fast as possible
+ vals = _dict_setdefault(self, key_lower, new_vals)
+ if new_vals is not vals:
+ # new_vals was not inserted, as there was a previous one
+ if isinstance(vals, list):
+ # If already several items got inserted, we have a list
+ vals.append(val)
+ else:
+ # vals should be a tuple then, i.e. only one item so far
+ # Need to convert the tuple to list for further extension
+ _dict_setitem(self, key_lower, [vals[0], vals[1], val])
+
+ def extend(self, *args, **kwargs):
+ """Generic import function for any type of header-like object.
+ Adapted version of MutableMapping.update in order to insert items
+ with self.add instead of self.__setitem__
+ """
+ if len(args) > 1:
+ raise TypeError("extend() takes at most 1 positional "
+ "arguments ({} given)".format(len(args)))
+ other = args[0] if len(args) >= 1 else ()
+
+ if isinstance(other, HTTPHeaderDict):
+ for key, val in other.iteritems():
+ self.add(key, val)
+ elif isinstance(other, Mapping):
+ for key in other:
+ self.add(key, other[key])
+ elif hasattr(other, "keys"):
+ for key in other.keys():
+ self.add(key, other[key])
+ else:
+ for key, value in other:
+ self.add(key, value)
+
+ for key, value in kwargs.items():
+ self.add(key, value)
def getlist(self, key):
"""Returns a list of all the values for the named field. Returns an
empty list if the key doesn't exist."""
- return self[key].split(', ') if key in self else []
-
- def copy(self):
- h = HTTPHeaderDict()
- for key in self._data:
- for rawkey, value in self._data[key]:
- h.add(rawkey, value)
- return h
-
- def __eq__(self, other):
- if not isinstance(other, Mapping):
- return False
- other = HTTPHeaderDict(other)
- return dict((k1, self[k1]) for k1 in self._data) == \
- dict((k2, other[k2]) for k2 in other._data)
-
- def __getitem__(self, key):
- values = self._data[key.lower()]
- return ', '.join(value[1] for value in values)
-
- def __setitem__(self, key, value):
- self._data[key.lower()] = [(key, value)]
+ try:
+ vals = _dict_getitem(self, key.lower())
+ except KeyError:
+ return []
+ else:
+ if isinstance(vals, tuple):
+ return [vals[1]]
+ else:
+ return vals[1:]
+
+ # Backwards compatibility for httplib
+ getheaders = getlist
+ getallmatchingheaders = getlist
+ iget = getlist
- def __delitem__(self, key):
- del self._data[key.lower()]
+ def __repr__(self):
+ return "%s(%s)" % (type(self).__name__, dict(self.itermerged()))
- def __len__(self):
- return len(self._data)
+ def _copy_from(self, other):
+ for key in other:
+ val = _dict_getitem(other, key)
+ if isinstance(val, list):
+ # Don't need to convert tuples
+ val = list(val)
+ _dict_setitem(self, key, val)
- def __iter__(self):
- for headers in itervalues(self._data):
- yield headers[0][0]
-
- def __repr__(self):
- return '%s(%r)' % (self.__class__.__name__, dict(self.items()))
+ def copy(self):
+ clone = type(self)()
+ clone._copy_from(self)
+ return clone
+
+ def iteritems(self):
+ """Iterate over all header lines, including duplicate ones."""
+ for key in self:
+ vals = _dict_getitem(self, key)
+ for val in vals[1:]:
+ yield vals[0], val
+
+ def itermerged(self):
+ """Iterate over all headers, merging duplicate ones together."""
+ for key in self:
+ val = _dict_getitem(self, key)
+ yield val[0], ', '.join(val[1:])
+
+ def items(self):
+ return list(self.iteritems())
+
+ @classmethod
+ def from_httplib(cls, message): # Python 2
+ """Read headers from a Python 2 httplib message object."""
+ # python2.7 does not expose a proper API for exporting multiheaders
+ # efficiently. This function re-reads raw lines from the message
+ # object and extracts the multiheaders properly.
+ headers = []
+
+ for line in message.headers:
+ if line.startswith((' ', '\t')):
+ key, value = headers[-1]
+ headers[-1] = (key, value + '\r\n' + line.rstrip())
+ continue
+
+ key, value = line.split(':', 1)
+ headers.append((key, value.strip()))
+
+ return cls(headers)
diff --git a/libs/requests/packages/urllib3/connection.py b/libs/requests/packages/urllib3/connection.py
index e5de769..2a8c359 100644
--- a/libs/requests/packages/urllib3/connection.py
+++ b/libs/requests/packages/urllib3/connection.py
@@ -260,3 +260,5 @@ if ssl:
# Make a copy for testing.
UnverifiedHTTPSConnection = HTTPSConnection
HTTPSConnection = VerifiedHTTPSConnection
+else:
+ HTTPSConnection = DummyConnection
diff --git a/libs/requests/packages/urllib3/connectionpool.py b/libs/requests/packages/urllib3/connectionpool.py
index 70ee4ee..117269a 100644
--- a/libs/requests/packages/urllib3/connectionpool.py
+++ b/libs/requests/packages/urllib3/connectionpool.py
@@ -72,6 +72,21 @@ class ConnectionPool(object):
return '%s(host=%r, port=%r)' % (type(self).__name__,
self.host, self.port)
+ def __enter__(self):
+ return self
+
+ def __exit__(self, exc_type, exc_val, exc_tb):
+ self.close()
+ # Return False to re-raise any potential exceptions
+ return False
+
+ def close():
+ """
+ Close all pooled connections and disable the pool.
+ """
+ pass
+
+
# This is taken from http://hg.python.org/cpython/file/7aaba721ebc0/Lib/socket.py#l252
_blocking_errnos = set([errno.EAGAIN, errno.EWOULDBLOCK])
@@ -266,6 +281,10 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods):
"""
pass
+ def _prepare_proxy(self, conn):
+ # Nothing to do for HTTP connections.
+ pass
+
def _get_timeout(self, timeout):
""" Helper that always returns a :class:`urllib3.util.Timeout` """
if timeout is _Default:
@@ -349,7 +368,7 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods):
# Receive the response from the server
try:
- try: # Python 2.7+, use buffering of HTTP responses
+ try: # Python 2.7, use buffering of HTTP responses
httplib_response = conn.getresponse(buffering=True)
except TypeError: # Python 2.6 and older
httplib_response = conn.getresponse()
@@ -510,11 +529,18 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods):
try:
# Request a connection from the queue.
+ timeout_obj = self._get_timeout(timeout)
conn = self._get_conn(timeout=pool_timeout)
+ conn.timeout = timeout_obj.connect_timeout
+
+ is_new_proxy_conn = self.proxy is not None and not getattr(conn, 'sock', None)
+ if is_new_proxy_conn:
+ self._prepare_proxy(conn)
+
# Make the request on the httplib connection object.
httplib_response = self._make_request(conn, method, url,
- timeout=timeout,
+ timeout=timeout_obj,
body=body, headers=headers)
# If we're going to release the connection in ``finally:``, then
@@ -547,6 +573,14 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods):
conn = None
raise SSLError(e)
+ except SSLError:
+ # Treat SSLError separately from BaseSSLError to preserve
+ # traceback.
+ if conn:
+ conn.close()
+ conn = None
+ raise
+
except (TimeoutError, HTTPException, SocketError, ConnectionError) as e:
if conn:
# Discard the connection for these exceptions. It will be
@@ -554,14 +588,13 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods):
conn.close()
conn = None
- stacktrace = sys.exc_info()[2]
if isinstance(e, SocketError) and self.proxy:
e = ProxyError('Cannot connect to proxy.', e)
elif isinstance(e, (SocketError, HTTPException)):
e = ProtocolError('Connection aborted.', e)
- retries = retries.increment(method, url, error=e,
- _pool=self, _stacktrace=stacktrace)
+ retries = retries.increment(method, url, error=e, _pool=self,
+ _stacktrace=sys.exc_info()[2])
retries.sleep()
# Keep track of the error for the retry warning.
@@ -673,23 +706,25 @@ class HTTPSConnectionPool(HTTPConnectionPool):
assert_fingerprint=self.assert_fingerprint)
conn.ssl_version = self.ssl_version
- if self.proxy is not None:
- # Python 2.7+
- try:
- set_tunnel = conn.set_tunnel
- except AttributeError: # Platform-specific: Python 2.6
- set_tunnel = conn._set_tunnel
+ return conn
- if sys.version_info <= (2, 6, 4) and not self.proxy_headers: # Python 2.6.4 and older
- set_tunnel(self.host, self.port)
- else:
- set_tunnel(self.host, self.port, self.proxy_headers)
+ def _prepare_proxy(self, conn):
+ """
+ Establish tunnel connection early, because otherwise httplib
+ would improperly set Host: header to proxy's IP:port.
+ """
+ # Python 2.7+
+ try:
+ set_tunnel = conn.set_tunnel
+ except AttributeError: # Platform-specific: Python 2.6
+ set_tunnel = conn._set_tunnel
- # Establish tunnel connection early, because otherwise httplib
- # would improperly set Host: header to proxy's IP:port.
- conn.connect()
+ if sys.version_info <= (2, 6, 4) and not self.proxy_headers: # Python 2.6.4 and older
+ set_tunnel(self.host, self.port)
+ else:
+ set_tunnel(self.host, self.port, self.proxy_headers)
- return conn
+ conn.connect()
def _new_conn(self):
"""
@@ -700,7 +735,6 @@ class HTTPSConnectionPool(HTTPConnectionPool):
% (self.num_connections, self.host))
if not self.ConnectionCls or self.ConnectionCls is DummyConnection:
- # Platform-specific: Python without ssl
raise SSLError("Can't connect to HTTPS URL because the SSL "
"module is not available.")
diff --git a/libs/requests/packages/urllib3/contrib/pyopenssl.py b/libs/requests/packages/urllib3/contrib/pyopenssl.py
index 8229090..b2c34a8 100644
--- a/libs/requests/packages/urllib3/contrib/pyopenssl.py
+++ b/libs/requests/packages/urllib3/contrib/pyopenssl.py
@@ -38,8 +38,6 @@ Module Variables
----------------
:var DEFAULT_SSL_CIPHER_LIST: The list of supported SSL/TLS cipher suites.
- Default: ``ECDH+AESGCM:DH+AESGCM:ECDH+AES256:DH+AES256:ECDH+AES128:DH+AES:
- ECDH+3DES:DH+3DES:RSA+AESGCM:RSA+AES:RSA+3DES:!aNULL:!MD5:!DSS``
.. _sni: https://en.wikipedia.org/wiki/Server_Name_Indication
.. _crime attack: https://en.wikipedia.org/wiki/CRIME_(security_exploit)
@@ -85,22 +83,7 @@ _openssl_verify = {
+ OpenSSL.SSL.VERIFY_FAIL_IF_NO_PEER_CERT,
}
-# A secure default.
-# Sources for more information on TLS ciphers:
-#
-# - https://wiki.mozilla.org/Security/Server_Side_TLS
-# - https://www.ssllabs.com/projects/best-practices/index.html
-# - https://hynek.me/articles/hardening-your-web-servers-ssl-ciphers/
-#
-# The general intent is:
-# - Prefer cipher suites that offer perfect forward secrecy (DHE/ECDHE),
-# - prefer ECDHE over DHE for better performance,
-# - prefer any AES-GCM over any AES-CBC for better performance and security,
-# - use 3DES as fallback which is secure but slow,
-# - disable NULL authentication, MD5 MACs and DSS for security reasons.
-DEFAULT_SSL_CIPHER_LIST = "ECDH+AESGCM:DH+AESGCM:ECDH+AES256:DH+AES256:" + \
- "ECDH+AES128:DH+AES:ECDH+3DES:DH+3DES:RSA+AESGCM:RSA+AES:RSA+3DES:" + \
- "!aNULL:!MD5:!DSS"
+DEFAULT_SSL_CIPHER_LIST = util.ssl_.DEFAULT_CIPHERS
orig_util_HAS_SNI = util.HAS_SNI
@@ -191,6 +174,11 @@ class WrappedSocket(object):
return b''
else:
raise
+ except OpenSSL.SSL.ZeroReturnError as e:
+ if self.connection.get_shutdown() == OpenSSL.SSL.RECEIVED_SHUTDOWN:
+ return b''
+ else:
+ raise
except OpenSSL.SSL.WantReadError:
rd, wd, ed = select.select(
[self.socket], [], [], self.socket.gettimeout())
@@ -294,7 +282,9 @@ def ssl_wrap_socket(sock, keyfile=None, certfile=None, cert_reqs=None,
try:
cnx.do_handshake()
except OpenSSL.SSL.WantReadError:
- select.select([sock], [], [])
+ rd, _, _ = select.select([sock], [], [], sock.gettimeout())
+ if not rd:
+ raise timeout('select timed out')
continue
except OpenSSL.SSL.Error as e:
raise ssl.SSLError('bad handshake', e)
diff --git a/libs/requests/packages/urllib3/exceptions.py b/libs/requests/packages/urllib3/exceptions.py
index 0c6fd3c..31bda1c 100644
--- a/libs/requests/packages/urllib3/exceptions.py
+++ b/libs/requests/packages/urllib3/exceptions.py
@@ -157,3 +157,13 @@ class InsecureRequestWarning(SecurityWarning):
class SystemTimeWarning(SecurityWarning):
"Warned when system time is suspected to be wrong"
pass
+
+
+class InsecurePlatformWarning(SecurityWarning):
+ "Warned when certain SSL configuration is not available on a platform."
+ pass
+
+
+class ResponseNotChunked(ProtocolError, ValueError):
+ "Response needs to be chunked in order to read it as chunks."
+ pass
diff --git a/libs/requests/packages/urllib3/poolmanager.py b/libs/requests/packages/urllib3/poolmanager.py
index 515dc96..b8d1e74 100644
--- a/libs/requests/packages/urllib3/poolmanager.py
+++ b/libs/requests/packages/urllib3/poolmanager.py
@@ -8,7 +8,7 @@ except ImportError:
from ._collections import RecentlyUsedContainer
from .connectionpool import HTTPConnectionPool, HTTPSConnectionPool
from .connectionpool import port_by_scheme
-from .exceptions import LocationValueError
+from .exceptions import LocationValueError, MaxRetryError
from .request import RequestMethods
from .util.url import parse_url
from .util.retry import Retry
@@ -64,6 +64,14 @@ class PoolManager(RequestMethods):
self.pools = RecentlyUsedContainer(num_pools,
dispose_func=lambda p: p.close())
+ def __enter__(self):
+ return self
+
+ def __exit__(self, exc_type, exc_val, exc_tb):
+ self.clear()
+ # Return False to re-raise any potential exceptions
+ return False
+
def _new_pool(self, scheme, host, port):
"""
Create a new :class:`ConnectionPool` based on host, port and scheme.
@@ -167,7 +175,14 @@ class PoolManager(RequestMethods):
if not isinstance(retries, Retry):
retries = Retry.from_int(retries, redirect=redirect)
- kw['retries'] = retries.increment(method, redirect_location)
+ try:
+ retries = retries.increment(method, url, response=response, _pool=conn)
+ except MaxRetryError:
+ if retries.raise_on_redirect:
+ raise
+ return response
+
+ kw['retries'] = retries
kw['redirect'] = redirect
log.info("Redirecting %s -> %s" % (url, redirect_location))
diff --git a/libs/requests/packages/urllib3/response.py b/libs/requests/packages/urllib3/response.py
index e69de95..24140c4 100644
--- a/libs/requests/packages/urllib3/response.py
+++ b/libs/requests/packages/urllib3/response.py
@@ -1,15 +1,20 @@
+try:
+ import http.client as httplib
+except ImportError:
+ import httplib
import zlib
import io
from socket import timeout as SocketTimeout
from ._collections import HTTPHeaderDict
-from .exceptions import ProtocolError, DecodeError, ReadTimeoutError
-from .packages.six import string_types as basestring, binary_type
+from .exceptions import (
+ ProtocolError, DecodeError, ReadTimeoutError, ResponseNotChunked
+)
+from .packages.six import string_types as basestring, binary_type, PY3
from .connection import HTTPException, BaseSSLError
from .util.response import is_fp_closed
-
class DeflateDecoder(object):
def __init__(self):
@@ -21,6 +26,9 @@ class DeflateDecoder(object):
return getattr(self._obj, name)
def decompress(self, data):
+ if not data:
+ return data
+
if not self._first_try:
return self._obj.decompress(data)
@@ -36,9 +44,23 @@ class DeflateDecoder(object):
self._data = None
+class GzipDecoder(object):
+
+ def __init__(self):
+ self._obj = zlib.decompressobj(16 + zlib.MAX_WBITS)
+
+ def __getattr__(self, name):
+ return getattr(self._obj, name)
+
+ def decompress(self, data):
+ if not data:
+ return data
+ return self._obj.decompress(data)
+
+
def _get_decoder(mode):
if mode == 'gzip':
- return zlib.decompressobj(16 + zlib.MAX_WBITS)
+ return GzipDecoder()
return DeflateDecoder()
@@ -76,9 +98,10 @@ class HTTPResponse(io.IOBase):
strict=0, preload_content=True, decode_content=True,
original_response=None, pool=None, connection=None):
- self.headers = HTTPHeaderDict()
- if headers:
- self.headers.update(headers)
+ if isinstance(headers, HTTPHeaderDict):
+ self.headers = headers
+ else:
+ self.headers = HTTPHeaderDict(headers)
self.status = status
self.version = version
self.reason = reason
@@ -100,7 +123,17 @@ class HTTPResponse(io.IOBase):
if hasattr(body, 'read'):
self._fp = body
- if preload_content and not self._body:
+ # Are we using the chunked-style of transfer encoding?
+ self.chunked = False
+ self.chunk_left = None
+ tr_enc = self.headers.get('transfer-encoding', '').lower()
+ # Don't incur the penalty of creating a list and then discarding it
+ encodings = (enc.strip() for enc in tr_enc.split(","))
+ if "chunked" in encodings:
+ self.chunked = True
+
+ # We certainly don't want to preload content when the response is chunked.
+ if not self.chunked and preload_content and not self._body:
self._body = self.read(decode_content=decode_content)
def get_redirect_location(self):
@@ -140,6 +173,35 @@ class HTTPResponse(io.IOBase):
"""
return self._fp_bytes_read
+ def _init_decoder(self):
+ """
+ Set-up the _decoder attribute if necessar.
+ """
+ # Note: content-encoding value should be case-insensitive, per RFC 7230
+ # Section 3.2
+ content_encoding = self.headers.get('content-encoding', '').lower()
+ if self._decoder is None and content_encoding in self.CONTENT_DECODERS:
+ self._decoder = _get_decoder(content_encoding)
+
+ def _decode(self, data, decode_content, flush_decoder):
+ """
+ Decode the data passed in and potentially flush the decoder.
+ """
+ try:
+ if decode_content and self._decoder:
+ data = self._decoder.decompress(data)
+ except (IOError, zlib.error) as e:
+ content_encoding = self.headers.get('content-encoding', '').lower()
+ raise DecodeError(
+ "Received response with content-encoding: %s, but "
+ "failed to decode it." % content_encoding, e)
+
+ if flush_decoder and decode_content and self._decoder:
+ buf = self._decoder.decompress(binary_type())
+ data += buf + self._decoder.flush()
+
+ return data
+
def read(self, amt=None, decode_content=None, cache_content=False):
"""
Similar to :meth:`httplib.HTTPResponse.read`, but with two additional
@@ -161,12 +223,7 @@ class HTTPResponse(io.IOBase):
after having ``.read()`` the file object. (Overridden if ``amt`` is
set.)
"""
- # Note: content-encoding value should be case-insensitive, per RFC 7230
- # Section 3.2
- content_encoding = self.headers.get('content-encoding', '').lower()
- if self._decoder is None:
- if content_encoding in self.CONTENT_DECODERS:
- self._decoder = _get_decoder(content_encoding)
+ self._init_decoder()
if decode_content is None:
decode_content = self.decode_content
@@ -202,7 +259,7 @@ class HTTPResponse(io.IOBase):
except BaseSSLError as e:
# FIXME: Is there a better way to differentiate between SSLErrors?
- if not 'read operation timed out' in str(e): # Defensive:
+ if 'read operation timed out' not in str(e): # Defensive:
# This shouldn't happen but just in case we're missing an edge
# case, let's avoid swallowing SSL errors.
raise
@@ -215,17 +272,7 @@ class HTTPResponse(io.IOBase):
self._fp_bytes_read += len(data)
- try:
- if decode_content and self._decoder:
- data = self._decoder.decompress(data)
- except (IOError, zlib.error) as e:
- raise DecodeError(
- "Received response with content-encoding: %s, but "
- "failed to decode it." % content_encoding, e)
-
- if flush_decoder and decode_content and self._decoder:
- buf = self._decoder.decompress(binary_type())
- data += buf + self._decoder.flush()
+ data = self._decode(data, decode_content, flush_decoder)
if cache_content:
self._body = data
@@ -252,11 +299,15 @@ class HTTPResponse(io.IOBase):
If True, will attempt to decode the body based on the
'content-encoding' header.
"""
- while not is_fp_closed(self._fp):
- data = self.read(amt=amt, decode_content=decode_content)
+ if self.chunked:
+ for line in self.read_chunked(amt, decode_content=decode_content):
+ yield line
+ else:
+ while not is_fp_closed(self._fp):
+ data = self.read(amt=amt, decode_content=decode_content)
- if data:
- yield data
+ if data:
+ yield data
@classmethod
def from_httplib(ResponseCls, r, **response_kw):
@@ -267,14 +318,16 @@ class HTTPResponse(io.IOBase):
Remaining parameters are passed to the HTTPResponse constructor, along
with ``original_response=r``.
"""
-
- headers = HTTPHeaderDict()
- for k, v in r.getheaders():
- headers.add(k, v)
+ headers = r.msg
+ if not isinstance(headers, HTTPHeaderDict):
+ if PY3: # Python 3
+ headers = HTTPHeaderDict(headers.items())
+ else: # Python 2
+ headers = HTTPHeaderDict.from_httplib(headers)
# HTTPResponse objects in Python 3 don't have a .strict attribute
strict = getattr(r, 'strict', 0)
- return ResponseCls(body=r,
+ resp = ResponseCls(body=r,
headers=headers,
status=r.status,
version=r.version,
@@ -282,6 +335,7 @@ class HTTPResponse(io.IOBase):
strict=strict,
original_response=r,
**response_kw)
+ return resp
# Backwards-compatibility methods for httplib.HTTPResponse
def getheaders(self):
@@ -331,3 +385,82 @@ class HTTPResponse(io.IOBase):
else:
b[:len(temp)] = temp
return len(temp)
+
+ def _update_chunk_length(self):
+ # First, we'll figure out length of a chunk and then
+ # we'll try to read it from socket.
+ if self.chunk_left is not None:
+ return
+ line = self._fp.fp.readline()
+ line = line.split(b';', 1)[0]
+ try:
+ self.chunk_left = int(line, 16)
+ except ValueError:
+ # Invalid chunked protocol response, abort.
+ self.close()
+ raise httplib.IncompleteRead(line)
+
+ def _handle_chunk(self, amt):
+ returned_chunk = None
+ if amt is None:
+ chunk = self._fp._safe_read(self.chunk_left)
+ returned_chunk = chunk
+ self._fp._safe_read(2) # Toss the CRLF at the end of the chunk.
+ self.chunk_left = None
+ elif amt < self.chunk_left:
+ value = self._fp._safe_read(amt)
+ self.chunk_left = self.chunk_left - amt
+ returned_chunk = value
+ elif amt == self.chunk_left:
+ value = self._fp._safe_read(amt)
+ self._fp._safe_read(2) # Toss the CRLF at the end of the chunk.
+ self.chunk_left = None
+ returned_chunk = value
+ else: # amt > self.chunk_left
+ returned_chunk = self._fp._safe_read(self.chunk_left)
+ self._fp._safe_read(2) # Toss the CRLF at the end of the chunk.
+ self.chunk_left = None
+ return returned_chunk
+
+ def read_chunked(self, amt=None, decode_content=None):
+ """
+ Similar to :meth:`HTTPResponse.read`, but with an additional
+ parameter: ``decode_content``.
+
+ :param decode_content:
+ If True, will attempt to decode the body based on the
+ 'content-encoding' header.
+ """
+ self._init_decoder()
+ # FIXME: Rewrite this method and make it a class with a better structured logic.
+ if not self.chunked:
+ raise ResponseNotChunked("Response is not chunked. "
+ "Header 'transfer-encoding: chunked' is missing.")
+
+ if self._original_response and self._original_response._method.upper() == 'HEAD':
+ # Don't bother reading the body of a HEAD request.
+ # FIXME: Can we do this somehow without accessing private httplib _method?
+ self._original_response.close()
+ return
+
+ while True:
+ self._update_chunk_length()
+ if self.chunk_left == 0:
+ break
+ chunk = self._handle_chunk(amt)
+ yield self._decode(chunk, decode_content=decode_content,
+ flush_decoder=True)
+
+ # Chunk content ends with \r\n: discard it.
+ while True:
+ line = self._fp.fp.readline()
+ if not line:
+ # Some sites may not end with '\r\n'.
+ break
+ if line == b'\r\n':
+ break
+
+ # We read everything; close the "file".
+ if self._original_response:
+ self._original_response.close()
+ self.release_conn()
diff --git a/libs/requests/packages/urllib3/util/connection.py b/libs/requests/packages/urllib3/util/connection.py
index 2156993..859aec6 100644
--- a/libs/requests/packages/urllib3/util/connection.py
+++ b/libs/requests/packages/urllib3/util/connection.py
@@ -82,6 +82,7 @@ def create_connection(address, timeout=socket._GLOBAL_DEFAULT_TIMEOUT,
err = _
if sock is not None:
sock.close()
+ sock = None
if err is not None:
raise err
diff --git a/libs/requests/packages/urllib3/util/retry.py b/libs/requests/packages/urllib3/util/retry.py
index aeaf8a0..7e0959d 100644
--- a/libs/requests/packages/urllib3/util/retry.py
+++ b/libs/requests/packages/urllib3/util/retry.py
@@ -190,7 +190,7 @@ class Retry(object):
return isinstance(err, (ReadTimeoutError, ProtocolError))
def is_forced_retry(self, method, status_code):
- """ Is this method/response retryable? (Based on method/codes whitelists)
+ """ Is this method/status code retryable? (Based on method/codes whitelists)
"""
if self.method_whitelist and method.upper() not in self.method_whitelist:
return False
diff --git a/libs/requests/packages/urllib3/util/ssl_.py b/libs/requests/packages/urllib3/util/ssl_.py
index a788b1b..b846d42 100644
--- a/libs/requests/packages/urllib3/util/ssl_.py
+++ b/libs/requests/packages/urllib3/util/ssl_.py
@@ -1,7 +1,7 @@
from binascii import hexlify, unhexlify
-from hashlib import md5, sha1
+from hashlib import md5, sha1, sha256
-from ..exceptions import SSLError
+from ..exceptions import SSLError, InsecurePlatformWarning
SSLContext = None
@@ -9,9 +9,10 @@ HAS_SNI = False
create_default_context = None
import errno
-import ssl
+import warnings
try: # Test for SSL features
+ import ssl
from ssl import wrap_socket, CERT_NONE, PROTOCOL_SSLv23
from ssl import HAS_SNI # Has SNI?
except ImportError:
@@ -24,14 +25,24 @@ except ImportError:
OP_NO_SSLv2, OP_NO_SSLv3 = 0x1000000, 0x2000000
OP_NO_COMPRESSION = 0x20000
-try:
- from ssl import _DEFAULT_CIPHERS
-except ImportError:
- _DEFAULT_CIPHERS = (
- 'ECDH+AESGCM:DH+AESGCM:ECDH+AES256:DH+AES256:ECDH+AES128:DH+AES:ECDH+HIGH:'
- 'DH+HIGH:ECDH+3DES:DH+3DES:RSA+AESGCM:RSA+AES:RSA+HIGH:RSA+3DES:ECDH+RC4:'
- 'DH+RC4:RSA+RC4:!aNULL:!eNULL:!MD5'
- )
+# A secure default.
+# Sources for more information on TLS ciphers:
+#
+# - https://wiki.mozilla.org/Security/Server_Side_TLS
+# - https://www.ssllabs.com/projects/best-practices/index.html
+# - https://hynek.me/articles/hardening-your-web-servers-ssl-ciphers/
+#
+# The general intent is:
+# - Prefer cipher suites that offer perfect forward secrecy (DHE/ECDHE),
+# - prefer ECDHE over DHE for better performance,
+# - prefer any AES-GCM over any AES-CBC for better performance and security,
+# - use 3DES as fallback which is secure but slow,
+# - disable NULL authentication, MD5 MACs and DSS for security reasons.
+DEFAULT_CIPHERS = (
+ 'ECDH+AESGCM:DH+AESGCM:ECDH+AES256:DH+AES256:ECDH+AES128:DH+AES:ECDH+HIGH:'
+ 'DH+HIGH:ECDH+3DES:DH+3DES:RSA+AESGCM:RSA+AES:RSA+HIGH:RSA+3DES:!aNULL:'
+ '!eNULL:!MD5'
+)
try:
from ssl import SSLContext # Modern SSL?
@@ -39,7 +50,8 @@ except ImportError:
import sys
class SSLContext(object): # Platform-specific: Python 2 & 3.1
- supports_set_ciphers = sys.version_info >= (2, 7)
+ supports_set_ciphers = ((2, 7) <= sys.version_info < (3,) or
+ (3, 2) <= sys.version_info)
def __init__(self, protocol_version):
self.protocol = protocol_version
@@ -69,6 +81,14 @@ except ImportError:
self.ciphers = cipher_suite
def wrap_socket(self, socket, server_hostname=None):
+ warnings.warn(
+ 'A true SSLContext object is not available. This prevents '
+ 'urllib3 from configuring SSL appropriately and may cause '
+ 'certain SSL connections to fail. For more information, see '
+ 'https://urllib3.readthedocs.org/en/latest/security.html'
+ '#insecureplatformwarning.',
+ InsecurePlatformWarning
+ )
kwargs = {
'keyfile': self.keyfile,
'certfile': self.certfile,
@@ -96,7 +116,8 @@ def assert_fingerprint(cert, fingerprint):
# this digest.
hashfunc_map = {
16: md5,
- 20: sha1
+ 20: sha1,
+ 32: sha256,
}
fingerprint = fingerprint.replace(':', '').lower()
@@ -157,7 +178,7 @@ def resolve_ssl_version(candidate):
return candidate
-def create_urllib3_context(ssl_version=None, cert_reqs=ssl.CERT_REQUIRED,
+def create_urllib3_context(ssl_version=None, cert_reqs=None,
options=None, ciphers=None):
"""All arguments have the same meaning as ``ssl_wrap_socket``.
@@ -194,6 +215,9 @@ def create_urllib3_context(ssl_version=None, cert_reqs=ssl.CERT_REQUIRED,
"""
context = SSLContext(ssl_version or ssl.PROTOCOL_SSLv23)
+ # Setting the default here, as we may have no ssl module on import
+ cert_reqs = ssl.CERT_REQUIRED if cert_reqs is None else cert_reqs
+
if options is None:
options = 0
# SSLv2 is easily broken and is considered harmful and dangerous
@@ -207,11 +231,13 @@ def create_urllib3_context(ssl_version=None, cert_reqs=ssl.CERT_REQUIRED,
context.options |= options
if getattr(context, 'supports_set_ciphers', True): # Platform-specific: Python 2.6
- context.set_ciphers(ciphers or _DEFAULT_CIPHERS)
+ context.set_ciphers(ciphers or DEFAULT_CIPHERS)
context.verify_mode = cert_reqs
if getattr(context, 'check_hostname', None) is not None: # Platform-specific: Python 3.2
- context.check_hostname = (context.verify_mode == ssl.CERT_REQUIRED)
+ # We do our own verification, including fingerprints and alternative
+ # hostnames. So disable it here
+ context.check_hostname = False
return context
diff --git a/libs/requests/packages/urllib3/util/url.py b/libs/requests/packages/urllib3/util/url.py
index b2ec834..e58050c 100644
--- a/libs/requests/packages/urllib3/util/url.py
+++ b/libs/requests/packages/urllib3/util/url.py
@@ -15,6 +15,8 @@ class Url(namedtuple('Url', url_attrs)):
def __new__(cls, scheme=None, auth=None, host=None, port=None, path=None,
query=None, fragment=None):
+ if path and not path.startswith('/'):
+ path = '/' + path
return super(Url, cls).__new__(cls, scheme, auth, host, port, path,
query, fragment)
diff --git a/libs/requests/sessions.py b/libs/requests/sessions.py
index 4f30696..7c75460 100644
--- a/libs/requests/sessions.py
+++ b/libs/requests/sessions.py
@@ -63,12 +63,10 @@ def merge_setting(request_setting, session_setting, dict_class=OrderedDict):
merged_setting.update(to_key_val_list(request_setting))
# Remove keys that are set to None.
- for (k, v) in request_setting.items():
+ for (k, v) in merged_setting.items():
if v is None:
del merged_setting[k]
- merged_setting = dict((k, v) for (k, v) in merged_setting.items() if v is not None)
-
return merged_setting
@@ -90,7 +88,7 @@ def merge_hooks(request_hooks, session_hooks, dict_class=OrderedDict):
class SessionRedirectMixin(object):
def resolve_redirects(self, resp, req, stream=False, timeout=None,
- verify=True, cert=None, proxies=None):
+ verify=True, cert=None, proxies=None, **adapter_kwargs):
"""Receives a Response. Returns a generator of Responses."""
i = 0
@@ -171,7 +169,10 @@ class SessionRedirectMixin(object):
except KeyError:
pass
- extract_cookies_to_jar(prepared_request._cookies, prepared_request, resp.raw)
+ # Extract any cookies sent on the response to the cookiejar
+ # in the new request. Because we've mutated our copied prepared
+ # request, use the old one that we haven't yet touched.
+ extract_cookies_to_jar(prepared_request._cookies, req, resp.raw)
prepared_request._cookies.update(self.cookies)
prepared_request.prepare_cookies(prepared_request._cookies)
@@ -190,6 +191,7 @@ class SessionRedirectMixin(object):
cert=cert,
proxies=proxies,
allow_redirects=False,
+ **adapter_kwargs
)
extract_cookies_to_jar(self.cookies, prepared_request, resp.raw)
@@ -271,6 +273,12 @@ class Session(SessionRedirectMixin):
>>> s = requests.Session()
>>> s.get('http://httpbin.org/get')
200
+
+ Or as a context manager::
+
+ >>> with requests.Session() as s:
+ >>> s.get('http://httpbin.org/get')
+ 200
"""
__attrs__ = [
@@ -414,8 +422,8 @@ class Session(SessionRedirectMixin):
:param auth: (optional) Auth tuple or callable to enable
Basic/Digest/Custom HTTP Auth.
:param timeout: (optional) How long to wait for the server to send
- data before giving up, as a float, or a (`connect timeout, read
- timeout `_) tuple.
+ data before giving up, as a float, or a :ref:`(connect timeout,
+ read timeout) ` tuple.
:type timeout: float or tuple
:param allow_redirects: (optional) Set to True by default.
:type allow_redirects: bool
@@ -557,10 +565,6 @@ class Session(SessionRedirectMixin):
# Set up variables needed for resolve_redirects and dispatching of hooks
allow_redirects = kwargs.pop('allow_redirects', True)
stream = kwargs.get('stream')
- timeout = kwargs.get('timeout')
- verify = kwargs.get('verify')
- cert = kwargs.get('cert')
- proxies = kwargs.get('proxies')
hooks = request.hooks
# Get the appropriate adapter to use
@@ -588,12 +592,7 @@ class Session(SessionRedirectMixin):
extract_cookies_to_jar(self.cookies, request, r.raw)
# Redirect resolving generator.
- gen = self.resolve_redirects(r, request,
- stream=stream,
- timeout=timeout,
- verify=verify,
- cert=cert,
- proxies=proxies)
+ gen = self.resolve_redirects(r, request, **kwargs)
# Resolve redirects if allowed.
history = [resp for resp in gen] if allow_redirects else []
diff --git a/libs/requests/utils.py b/libs/requests/utils.py
index 7467941..3fd0e41 100644
--- a/libs/requests/utils.py
+++ b/libs/requests/utils.py
@@ -25,7 +25,8 @@ from . import __version__
from . import certs
from .compat import parse_http_list as _parse_list_header
from .compat import (quote, urlparse, bytes, str, OrderedDict, unquote, is_py2,
- builtin_str, getproxies, proxy_bypass, urlunparse)
+ builtin_str, getproxies, proxy_bypass, urlunparse,
+ basestring)
from .cookies import RequestsCookieJar, cookiejar_from_dict
from .structures import CaseInsensitiveDict
from .exceptions import InvalidURL
@@ -66,7 +67,7 @@ def super_len(o):
return len(o.getvalue())
-def get_netrc_auth(url):
+def get_netrc_auth(url, raise_errors=False):
"""Returns the Requests tuple auth for a given url from netrc."""
try:
@@ -104,8 +105,9 @@ def get_netrc_auth(url):
return (_netrc[login_i], _netrc[2])
except (NetrcParseError, IOError):
# If there was a parsing error or a permissions issue reading the file,
- # we'll just skip netrc auth
- pass
+ # we'll just skip netrc auth unless explicitly asked to raise errors.
+ if raise_errors:
+ raise
# AppEngine hackiness.
except (ImportError, AttributeError):
@@ -115,7 +117,8 @@ def get_netrc_auth(url):
def guess_filename(obj):
"""Tries to guess the filename of the given object."""
name = getattr(obj, 'name', None)
- if name and isinstance(name, builtin_str) and name[0] != '<' and name[-1] != '>':
+ if (name and isinstance(name, basestring) and name[0] != '<' and
+ name[-1] != '>'):
return os.path.basename(name)
@@ -418,10 +421,18 @@ def requote_uri(uri):
This function passes the given URI through an unquote/quote cycle to
ensure that it is fully and consistently quoted.
"""
- # Unquote only the unreserved characters
- # Then quote only illegal characters (do not quote reserved, unreserved,
- # or '%')
- return quote(unquote_unreserved(uri), safe="!#$%&'()*+,/:;=?@[]~")
+ safe_with_percent = "!#$%&'()*+,/:;=?@[]~"
+ safe_without_percent = "!#$&'()*+,/:;=?@[]~"
+ try:
+ # Unquote only the unreserved characters
+ # Then quote only illegal characters (do not quote reserved,
+ # unreserved, or '%')
+ return quote(unquote_unreserved(uri), safe=safe_with_percent)
+ except InvalidURL:
+ # We couldn't unquote the given URI, so let's try quoting it, but
+ # there may be unquoted '%'s in the URI. We need to make sure they're
+ # properly quoted so they do not cause issues elsewhere.
+ return quote(uri, safe=safe_without_percent)
def address_in_network(ip, net):
diff --git a/libs/tornado/__init__.py b/libs/tornado/__init__.py
index 0e39f84..6f4f47d 100755
--- a/libs/tornado/__init__.py
+++ b/libs/tornado/__init__.py
@@ -25,5 +25,5 @@ from __future__ import absolute_import, division, print_function, with_statement
# is zero for an official release, positive for a development branch,
# or negative for a release candidate or beta (after the base version
# number has been incremented)
-version = "4.1.dev1"
-version_info = (4, 1, 0, -100)
+version = "4.1"
+version_info = (4, 1, 0, 0)
diff --git a/libs/tornado/autoreload.py b/libs/tornado/autoreload.py
index 3982579..a548cf0 100755
--- a/libs/tornado/autoreload.py
+++ b/libs/tornado/autoreload.py
@@ -108,7 +108,11 @@ _io_loops = weakref.WeakKeyDictionary()
def start(io_loop=None, check_time=500):
- """Begins watching source files for changes using the given `.IOLoop`. """
+ """Begins watching source files for changes.
+
+ .. versionchanged:: 4.1
+ The ``io_loop`` argument is deprecated.
+ """
io_loop = io_loop or ioloop.IOLoop.current()
if io_loop in _io_loops:
return
diff --git a/libs/tornado/concurrent.py b/libs/tornado/concurrent.py
index 6bab5d2..acfbcd8 100755
--- a/libs/tornado/concurrent.py
+++ b/libs/tornado/concurrent.py
@@ -25,11 +25,13 @@ module.
from __future__ import absolute_import, division, print_function, with_statement
import functools
+import platform
+import traceback
import sys
+from tornado.log import app_log
from tornado.stack_context import ExceptionStackContext, wrap
from tornado.util import raise_exc_info, ArgReplacer
-from tornado.log import app_log
try:
from concurrent import futures
@@ -37,9 +39,88 @@ except ImportError:
futures = None
+# Can the garbage collector handle cycles that include __del__ methods?
+# This is true in cpython beginning with version 3.4 (PEP 442).
+_GC_CYCLE_FINALIZERS = (platform.python_implementation() == 'CPython' and
+ sys.version_info >= (3, 4))
+
class ReturnValueIgnoredError(Exception):
pass
+# This class and associated code in the future object is derived
+# from the Trollius project, a backport of asyncio to Python 2.x - 3.x
+
+class _TracebackLogger(object):
+ """Helper to log a traceback upon destruction if not cleared.
+
+ This solves a nasty problem with Futures and Tasks that have an
+ exception set: if nobody asks for the exception, the exception is
+ never logged. This violates the Zen of Python: 'Errors should
+ never pass silently. Unless explicitly silenced.'
+
+ However, we don't want to log the exception as soon as
+ set_exception() is called: if the calling code is written
+ properly, it will get the exception and handle it properly. But
+ we *do* want to log it if result() or exception() was never called
+ -- otherwise developers waste a lot of time wondering why their
+ buggy code fails silently.
+
+ An earlier attempt added a __del__() method to the Future class
+ itself, but this backfired because the presence of __del__()
+ prevents garbage collection from breaking cycles. A way out of
+ this catch-22 is to avoid having a __del__() method on the Future
+ class itself, but instead to have a reference to a helper object
+ with a __del__() method that logs the traceback, where we ensure
+ that the helper object doesn't participate in cycles, and only the
+ Future has a reference to it.
+
+ The helper object is added when set_exception() is called. When
+ the Future is collected, and the helper is present, the helper
+ object is also collected, and its __del__() method will log the
+ traceback. When the Future's result() or exception() method is
+ called (and a helper object is present), it removes the the helper
+ object, after calling its clear() method to prevent it from
+ logging.
+
+ One downside is that we do a fair amount of work to extract the
+ traceback from the exception, even when it is never logged. It
+ would seem cheaper to just store the exception object, but that
+ references the traceback, which references stack frames, which may
+ reference the Future, which references the _TracebackLogger, and
+ then the _TracebackLogger would be included in a cycle, which is
+ what we're trying to avoid! As an optimization, we don't
+ immediately format the exception; we only do the work when
+ activate() is called, which call is delayed until after all the
+ Future's callbacks have run. Since usually a Future has at least
+ one callback (typically set by 'yield From') and usually that
+ callback extracts the callback, thereby removing the need to
+ format the exception.
+
+ PS. I don't claim credit for this solution. I first heard of it
+ in a discussion about closing files when they are collected.
+ """
+
+ __slots__ = ('exc_info', 'formatted_tb')
+
+ def __init__(self, exc_info):
+ self.exc_info = exc_info
+ self.formatted_tb = None
+
+ def activate(self):
+ exc_info = self.exc_info
+ if exc_info is not None:
+ self.exc_info = None
+ self.formatted_tb = traceback.format_exception(*exc_info)
+
+ def clear(self):
+ self.exc_info = None
+ self.formatted_tb = None
+
+ def __del__(self):
+ if self.formatted_tb:
+ app_log.error('Future exception was never retrieved: %s',
+ ''.join(self.formatted_tb).rstrip())
+
class Future(object):
"""Placeholder for an asynchronous result.
@@ -68,12 +149,23 @@ class Future(object):
if that package was available and fall back to the thread-unsafe
implementation if it was not.
+ .. versionchanged:: 4.1
+ If a `.Future` contains an error but that error is never observed
+ (by calling ``result()``, ``exception()``, or ``exc_info()``),
+ a stack trace will be logged when the `.Future` is garbage collected.
+ This normally indicates an error in the application, but in cases
+ where it results in undesired logging it may be necessary to
+ suppress the logging by ensuring that the exception is observed:
+ ``f.add_done_callback(lambda f: f.exception())``.
"""
def __init__(self):
self._done = False
self._result = None
- self._exception = None
self._exc_info = None
+
+ self._log_traceback = False # Used for Python >= 3.4
+ self._tb_logger = None # Used for Python <= 3.3
+
self._callbacks = []
def cancel(self):
@@ -100,16 +192,21 @@ class Future(object):
"""Returns True if the future has finished running."""
return self._done
+ def _clear_tb_log(self):
+ self._log_traceback = False
+ if self._tb_logger is not None:
+ self._tb_logger.clear()
+ self._tb_logger = None
+
def result(self, timeout=None):
"""If the operation succeeded, return its result. If it failed,
re-raise its exception.
"""
+ self._clear_tb_log()
if self._result is not None:
return self._result
if self._exc_info is not None:
raise_exc_info(self._exc_info)
- elif self._exception is not None:
- raise self._exception
self._check_done()
return self._result
@@ -117,8 +214,9 @@ class Future(object):
"""If the operation raised an exception, return the `Exception`
object. Otherwise returns None.
"""
- if self._exception is not None:
- return self._exception
+ self._clear_tb_log()
+ if self._exc_info is not None:
+ return self._exc_info[1]
else:
self._check_done()
return None
@@ -147,14 +245,17 @@ class Future(object):
def set_exception(self, exception):
"""Sets the exception of a ``Future.``"""
- self._exception = exception
- self._set_done()
+ self.set_exc_info(
+ (exception.__class__,
+ exception,
+ getattr(exception, '__traceback__', None)))
def exc_info(self):
"""Returns a tuple in the same format as `sys.exc_info` or None.
.. versionadded:: 4.0
"""
+ self._clear_tb_log()
return self._exc_info
def set_exc_info(self, exc_info):
@@ -165,7 +266,18 @@ class Future(object):
.. versionadded:: 4.0
"""
self._exc_info = exc_info
- self.set_exception(exc_info[1])
+ self._log_traceback = True
+ if not _GC_CYCLE_FINALIZERS:
+ self._tb_logger = _TracebackLogger(exc_info)
+
+ try:
+ self._set_done()
+ finally:
+ # Activate the logger after all callbacks have had a
+ # chance to call result() or exception().
+ if self._log_traceback and self._tb_logger is not None:
+ self._tb_logger.activate()
+ self._exc_info = exc_info
def _check_done(self):
if not self._done:
@@ -181,6 +293,21 @@ class Future(object):
cb, self)
self._callbacks = None
+ # On Python 3.3 or older, objects with a destructor part of a reference
+ # cycle are never destroyed. It's no longer the case on Python 3.4 thanks to
+ # the PEP 442.
+ if _GC_CYCLE_FINALIZERS:
+ def __del__(self):
+ if not self._log_traceback:
+ # set_exception() was not called, or result() or exception()
+ # has consumed the exception
+ return
+
+ tb = traceback.format_exception(*self._exc_info)
+
+ app_log.error('Future %r exception was never retrieved: %s',
+ self, ''.join(tb).rstrip())
+
TracebackFuture = Future
if futures is None:
@@ -293,7 +420,7 @@ def return_future(f):
# If the initial synchronous part of f() raised an exception,
# go ahead and raise it to the caller directly without waiting
# for them to inspect the Future.
- raise_exc_info(exc_info)
+ future.result()
# If the caller passed in a callback, schedule it to be called
# when the future resolves. It is important that this happens
diff --git a/libs/tornado/curl_httpclient.py b/libs/tornado/curl_httpclient.py
index 68047cc..ebbe0e8 100755
--- a/libs/tornado/curl_httpclient.py
+++ b/libs/tornado/curl_httpclient.py
@@ -28,12 +28,12 @@ from io import BytesIO
from tornado import httputil
from tornado import ioloop
-from tornado.log import gen_log
from tornado import stack_context
from tornado.escape import utf8, native_str
from tornado.httpclient import HTTPResponse, HTTPError, AsyncHTTPClient, main
+curl_log = logging.getLogger('tornado.curl_httpclient')
class CurlAsyncHTTPClient(AsyncHTTPClient):
def initialize(self, io_loop, max_clients=10, defaults=None):
@@ -257,7 +257,7 @@ class CurlAsyncHTTPClient(AsyncHTTPClient):
def _curl_create(self):
curl = pycurl.Curl()
- if gen_log.isEnabledFor(logging.DEBUG):
+ if curl_log.isEnabledFor(logging.DEBUG):
curl.setopt(pycurl.VERBOSE, 1)
curl.setopt(pycurl.DEBUGFUNCTION, self._curl_debug)
return curl
@@ -403,11 +403,11 @@ class CurlAsyncHTTPClient(AsyncHTTPClient):
raise ValueError("Unsupported auth_mode %s" % request.auth_mode)
curl.setopt(pycurl.USERPWD, native_str(userpwd))
- gen_log.debug("%s %s (username: %r)", request.method, request.url,
+ curl_log.debug("%s %s (username: %r)", request.method, request.url,
request.auth_username)
else:
curl.unsetopt(pycurl.USERPWD)
- gen_log.debug("%s %s", request.method, request.url)
+ curl_log.debug("%s %s", request.method, request.url)
if request.client_cert is not None:
curl.setopt(pycurl.SSLCERT, request.client_cert)
@@ -448,12 +448,12 @@ class CurlAsyncHTTPClient(AsyncHTTPClient):
def _curl_debug(self, debug_type, debug_msg):
debug_types = ('I', '<', '>', '<', '>')
if debug_type == 0:
- gen_log.debug('%s', debug_msg.strip())
+ curl_log.debug('%s', debug_msg.strip())
elif debug_type in (1, 2):
for line in debug_msg.splitlines():
- gen_log.debug('%s %s', debug_types[debug_type], line)
+ curl_log.debug('%s %s', debug_types[debug_type], line)
elif debug_type == 4:
- gen_log.debug('%s %r', debug_types[debug_type], debug_msg)
+ curl_log.debug('%s %r', debug_types[debug_type], debug_msg)
class CurlError(HTTPError):
diff --git a/libs/tornado/gen.py b/libs/tornado/gen.py
index 2fc9b0c..86fe2f1 100755
--- a/libs/tornado/gen.py
+++ b/libs/tornado/gen.py
@@ -43,8 +43,21 @@ be returned when they are all finished::
response3 = response_dict['response3']
response4 = response_dict['response4']
+If the `~functools.singledispatch` library is available (standard in
+Python 3.4, available via the `singledispatch
+`_ package on older
+versions), additional types of objects may be yielded. Tornado includes
+support for ``asyncio.Future`` and Twisted's ``Deferred`` class when
+``tornado.platform.asyncio`` and ``tornado.platform.twisted`` are imported.
+See the `convert_yielded` function to extend this mechanism.
+
.. versionchanged:: 3.2
Dict support added.
+
+.. versionchanged:: 4.1
+ Support added for yielding ``asyncio`` Futures and Twisted Deferreds
+ via ``singledispatch``.
+
"""
from __future__ import absolute_import, division, print_function, with_statement
@@ -53,11 +66,21 @@ import functools
import itertools
import sys
import types
+import weakref
from tornado.concurrent import Future, TracebackFuture, is_future, chain_future
from tornado.ioloop import IOLoop
+from tornado.log import app_log
from tornado import stack_context
+try:
+ from functools import singledispatch # py34+
+except ImportError as e:
+ try:
+ from singledispatch import singledispatch # backport
+ except ImportError:
+ singledispatch = None
+
class KeyReuseError(Exception):
pass
@@ -240,6 +263,106 @@ class Return(Exception):
super(Return, self).__init__()
self.value = value
+class WaitIterator(object):
+ """Provides an iterator to yield the results of futures as they finish.
+
+ Yielding a set of futures like this:
+
+ ``results = yield [future1, future2]``
+
+ pauses the coroutine until both ``future1`` and ``future2``
+ return, and then restarts the coroutine with the results of both
+ futures. If either future is an exception, the expression will
+ raise that exception and all the results will be lost.
+
+ If you need to get the result of each future as soon as possible,
+ or if you need the result of some futures even if others produce
+ errors, you can use ``WaitIterator``:
+
+ ::
+
+ wait_iterator = gen.WaitIterator(future1, future2)
+ while not wait_iterator.done():
+ try:
+ result = yield wait_iterator.next()
+ except Exception as e:
+ print "Error {} from {}".format(e, wait_iterator.current_future)
+ else:
+ print "Result {} recieved from {} at {}".format(
+ result, wait_iterator.current_future,
+ wait_iterator.current_index)
+
+ Because results are returned as soon as they are available the
+ output from the iterator *will not be in the same order as the
+ input arguments*. If you need to know which future produced the
+ current result, you can use the attributes
+ ``WaitIterator.current_future``, or ``WaitIterator.current_index``
+ to get the index of the future from the input list. (if keyword
+ arguments were used in the construction of the `WaitIterator`,
+ ``current_index`` will use the corresponding keyword).
+
+ .. versionadded:: 4.1
+ """
+ def __init__(self, *args, **kwargs):
+ if args and kwargs:
+ raise ValueError(
+ "You must provide args or kwargs, not both")
+
+ if kwargs:
+ self._unfinished = dict((f, k) for (k, f) in kwargs.items())
+ futures = list(kwargs.values())
+ else:
+ self._unfinished = dict((f, i) for (i, f) in enumerate(args))
+ futures = args
+
+ self._finished = collections.deque()
+ self.current_index = self.current_future = None
+ self._running_future = None
+
+ self_ref = weakref.ref(self)
+ for future in futures:
+ future.add_done_callback(functools.partial(
+ self._done_callback, self_ref))
+
+ def done(self):
+ """Returns True if this iterator has no more results."""
+ if self._finished or self._unfinished:
+ return False
+ # Clear the 'current' values when iteration is done.
+ self.current_index = self.current_future = None
+ return True
+
+ def next(self):
+ """Returns a `.Future` that will yield the next available result.
+
+ Note that this `.Future` will not be the same object as any of
+ the inputs.
+ """
+ self._running_future = TracebackFuture()
+
+ if self._finished:
+ self._return_result(self._finished.popleft())
+
+ return self._running_future
+
+ @staticmethod
+ def _done_callback(self_ref, done):
+ self = self_ref()
+ if self is not None:
+ if self._running_future and not self._running_future.done():
+ self._return_result(done)
+ else:
+ self._finished.append(done)
+
+ def _return_result(self, done):
+ """Called set the returned future's state that of the future
+ we yielded, and set the current future for the iterator.
+ """
+ chain_future(done, self._running_future)
+
+ self.current_future = done
+ self.current_index = self._unfinished.pop(done)
+
class YieldPoint(object):
"""Base class for objects that may be yielded from the generator.
@@ -371,6 +494,11 @@ def Task(func, *args, **kwargs):
class YieldFuture(YieldPoint):
def __init__(self, future, io_loop=None):
+ """Adapts a `.Future` to the `YieldPoint` interface.
+
+ .. versionchanged:: 4.1
+ The ``io_loop`` argument is deprecated.
+ """
self.future = future
self.io_loop = io_loop or IOLoop.current()
@@ -504,7 +632,7 @@ def maybe_future(x):
return fut
-def with_timeout(timeout, future, io_loop=None):
+def with_timeout(timeout, future, io_loop=None, quiet_exceptions=()):
"""Wraps a `.Future` in a timeout.
Raises `TimeoutError` if the input future does not complete before
@@ -512,9 +640,17 @@ def with_timeout(timeout, future, io_loop=None):
`.IOLoop.add_timeout` (i.e. a `datetime.timedelta` or an absolute time
relative to `.IOLoop.time`)
+ If the wrapped `.Future` fails after it has timed out, the exception
+ will be logged unless it is of a type contained in ``quiet_exceptions``
+ (which may be an exception type or a sequence of types).
+
Currently only supports Futures, not other `YieldPoint` classes.
.. versionadded:: 4.0
+
+ .. versionchanged:: 4.1
+ Added the ``quiet_exceptions`` argument and the logging of unhandled
+ exceptions.
"""
# TODO: allow yield points in addition to futures?
# Tricky to do with stack_context semantics.
@@ -528,9 +664,19 @@ def with_timeout(timeout, future, io_loop=None):
chain_future(future, result)
if io_loop is None:
io_loop = IOLoop.current()
+ def error_callback(future):
+ try:
+ future.result()
+ except Exception as e:
+ if not isinstance(e, quiet_exceptions):
+ app_log.error("Exception in Future %r after timeout",
+ future, exc_info=True)
+ def timeout_callback():
+ result.set_exception(TimeoutError("Timeout"))
+ # In case the wrapped future goes on to fail, log it.
+ future.add_done_callback(error_callback)
timeout_handle = io_loop.add_timeout(
- timeout,
- lambda: result.set_exception(TimeoutError("Timeout")))
+ timeout, timeout_callback)
if isinstance(future, Future):
# We know this future will resolve on the IOLoop, so we don't
# need the extra thread-safety of IOLoop.add_future (and we also
@@ -545,6 +691,25 @@ def with_timeout(timeout, future, io_loop=None):
return result
+def sleep(duration):
+ """Return a `.Future` that resolves after the given number of seconds.
+
+ When used with ``yield`` in a coroutine, this is a non-blocking
+ analogue to `time.sleep` (which should not be used in coroutines
+ because it is blocking)::
+
+ yield gen.sleep(0.5)
+
+ Note that calling this function on its own does nothing; you must
+ wait on the `.Future` it returns (usually by yielding it).
+
+ .. versionadded:: 4.1
+ """
+ f = Future()
+ IOLoop.current().call_later(duration, lambda: f.set_result(None))
+ return f
+
+
_null_future = Future()
_null_future.set_result(None)
@@ -678,18 +843,18 @@ class Runner(object):
self.running = False
def handle_yield(self, yielded):
- if isinstance(yielded, list):
- if all(is_future(f) for f in yielded):
- yielded = multi_future(yielded)
- else:
- yielded = Multi(yielded)
- elif isinstance(yielded, dict):
- if all(is_future(f) for f in yielded.values()):
- yielded = multi_future(yielded)
- else:
- yielded = Multi(yielded)
+ # Lists containing YieldPoints require stack contexts;
+ # other lists are handled via multi_future in convert_yielded.
+ if (isinstance(yielded, list) and
+ any(isinstance(f, YieldPoint) for f in yielded)):
+ yielded = Multi(yielded)
+ elif (isinstance(yielded, dict) and
+ any(isinstance(f, YieldPoint) for f in yielded.values())):
+ yielded = Multi(yielded)
if isinstance(yielded, YieldPoint):
+ # YieldPoints are too closely coupled to the Runner to go
+ # through the generic convert_yielded mechanism.
self.future = TracebackFuture()
def start_yield_point():
try:
@@ -702,6 +867,7 @@ class Runner(object):
except Exception:
self.future = TracebackFuture()
self.future.set_exc_info(sys.exc_info())
+
if self.stack_context_deactivate is None:
# Start a stack context if this is the first
# YieldPoint we've seen.
@@ -715,16 +881,17 @@ class Runner(object):
return False
else:
start_yield_point()
- elif is_future(yielded):
- self.future = yielded
- if not self.future.done() or self.future is moment:
- self.io_loop.add_future(
- self.future, lambda f: self.run())
- return False
else:
- self.future = TracebackFuture()
- self.future.set_exception(BadYieldError(
- "yielded unknown object %r" % (yielded,)))
+ try:
+ self.future = convert_yielded(yielded)
+ except BadYieldError:
+ self.future = TracebackFuture()
+ self.future.set_exc_info(sys.exc_info())
+
+ if not self.future.done() or self.future is moment:
+ self.io_loop.add_future(
+ self.future, lambda f: self.run())
+ return False
return True
def result_callback(self, key):
@@ -763,3 +930,30 @@ def _argument_adapter(callback):
else:
callback(None)
return wrapper
+
+
+def convert_yielded(yielded):
+ """Convert a yielded object into a `.Future`.
+
+ The default implementation accepts lists, dictionaries, and Futures.
+
+ If the `~functools.singledispatch` library is available, this function
+ may be extended to support additional types. For example::
+
+ @convert_yielded.register(asyncio.Future)
+ def _(asyncio_future):
+ return tornado.platform.asyncio.to_tornado_future(asyncio_future)
+
+ .. versionadded:: 4.1
+ """
+ # Lists and dicts containing YieldPoints were handled separately
+ # via Multi().
+ if isinstance(yielded, (list, dict)):
+ return multi_future(yielded)
+ elif is_future(yielded):
+ return yielded
+ else:
+ raise BadYieldError("yielded unknown object %r" % (yielded,))
+
+if singledispatch is not None:
+ convert_yielded = singledispatch(convert_yielded)
diff --git a/libs/tornado/http1connection.py b/libs/tornado/http1connection.py
index 90895cc..181319c 100644
--- a/libs/tornado/http1connection.py
+++ b/libs/tornado/http1connection.py
@@ -162,7 +162,8 @@ class HTTP1Connection(httputil.HTTPConnection):
header_data = yield gen.with_timeout(
self.stream.io_loop.time() + self.params.header_timeout,
header_future,
- io_loop=self.stream.io_loop)
+ io_loop=self.stream.io_loop,
+ quiet_exceptions=iostream.StreamClosedError)
except gen.TimeoutError:
self.close()
raise gen.Return(False)
@@ -221,7 +222,8 @@ class HTTP1Connection(httputil.HTTPConnection):
try:
yield gen.with_timeout(
self.stream.io_loop.time() + self._body_timeout,
- body_future, self.stream.io_loop)
+ body_future, self.stream.io_loop,
+ quiet_exceptions=iostream.StreamClosedError)
except gen.TimeoutError:
gen_log.info("Timeout reading body from %s",
self.context)
@@ -326,8 +328,10 @@ class HTTP1Connection(httputil.HTTPConnection):
def write_headers(self, start_line, headers, chunk=None, callback=None):
"""Implements `.HTTPConnection.write_headers`."""
+ lines = []
if self.is_client:
self._request_start_line = start_line
+ lines.append(utf8('%s %s HTTP/1.1' % (start_line[0], start_line[1])))
# Client requests with a non-empty body must have either a
# Content-Length or a Transfer-Encoding.
self._chunking_output = (
@@ -336,6 +340,7 @@ class HTTP1Connection(httputil.HTTPConnection):
'Transfer-Encoding' not in headers)
else:
self._response_start_line = start_line
+ lines.append(utf8('HTTP/1.1 %s %s' % (start_line[1], start_line[2])))
self._chunking_output = (
# TODO: should this use
# self._request_start_line.version or
@@ -365,7 +370,6 @@ class HTTP1Connection(httputil.HTTPConnection):
self._expected_content_remaining = int(headers['Content-Length'])
else:
self._expected_content_remaining = None
- lines = [utf8("%s %s %s" % start_line)]
lines.extend([utf8(n) + b": " + utf8(v) for n, v in headers.get_all()])
for line in lines:
if b'\n' in line:
@@ -374,6 +378,7 @@ class HTTP1Connection(httputil.HTTPConnection):
if self.stream.closed():
future = self._write_future = Future()
future.set_exception(iostream.StreamClosedError())
+ future.exception()
else:
if callback is not None:
self._write_callback = stack_context.wrap(callback)
@@ -412,6 +417,7 @@ class HTTP1Connection(httputil.HTTPConnection):
if self.stream.closed():
future = self._write_future = Future()
self._write_future.set_exception(iostream.StreamClosedError())
+ self._write_future.exception()
else:
if callback is not None:
self._write_callback = stack_context.wrap(callback)
@@ -451,6 +457,9 @@ class HTTP1Connection(httputil.HTTPConnection):
self._pending_write.add_done_callback(self._finish_request)
def _on_write_complete(self, future):
+ exc = future.exception()
+ if exc is not None and not isinstance(exc, iostream.StreamClosedError):
+ future.result()
if self._write_callback is not None:
callback = self._write_callback
self._write_callback = None
@@ -491,8 +500,9 @@ class HTTP1Connection(httputil.HTTPConnection):
# we SHOULD ignore at least one empty line before the request.
# http://tools.ietf.org/html/rfc7230#section-3.5
data = native_str(data.decode('latin1')).lstrip("\r\n")
- eol = data.find("\r\n")
- start_line = data[:eol]
+ # RFC 7230 section allows for both CRLF and bare LF.
+ eol = data.find("\n")
+ start_line = data[:eol].rstrip("\r")
try:
headers = httputil.HTTPHeaders.parse(data[eol:])
except ValueError:
diff --git a/libs/tornado/httpclient.py b/libs/tornado/httpclient.py
index 6ea872d..0ae9e48 100755
--- a/libs/tornado/httpclient.py
+++ b/libs/tornado/httpclient.py
@@ -137,6 +137,9 @@ class AsyncHTTPClient(Configurable):
# or with force_instance:
client = AsyncHTTPClient(force_instance=True,
defaults=dict(user_agent="MyUserAgent"))
+
+ .. versionchanged:: 4.1
+ The ``io_loop`` argument is deprecated.
"""
@classmethod
def configurable_base(cls):
diff --git a/libs/tornado/httpserver.py b/libs/tornado/httpserver.py
index 47c7472..e470e0e 100755
--- a/libs/tornado/httpserver.py
+++ b/libs/tornado/httpserver.py
@@ -114,6 +114,11 @@ class HTTPServer(TCPServer, httputil.HTTPServerConnectionDelegate):
``idle_connection_timeout``, ``body_timeout``, ``max_body_size``
arguments. Added support for `.HTTPServerConnectionDelegate`
instances as ``request_callback``.
+
+ .. versionchanged:: 4.1
+ `.HTTPServerConnectionDelegate.start_request` is now called with
+ two arguments ``(server_conn, request_conn)`` (in accordance with the
+ documentation) instead of one ``(request_conn)``.
"""
def __init__(self, request_callback, no_keep_alive=False, io_loop=None,
xheaders=False, ssl_options=None, protocol=None,
@@ -153,7 +158,7 @@ class HTTPServer(TCPServer, httputil.HTTPServerConnectionDelegate):
conn.start_serving(self)
def start_request(self, server_conn, request_conn):
- return _ServerRequestAdapter(self, request_conn)
+ return _ServerRequestAdapter(self, server_conn, request_conn)
def on_close(self, server_conn):
self._connections.remove(server_conn)
@@ -226,13 +231,14 @@ class _ServerRequestAdapter(httputil.HTTPMessageDelegate):
"""Adapts the `HTTPMessageDelegate` interface to the interface expected
by our clients.
"""
- def __init__(self, server, connection):
+ def __init__(self, server, server_conn, request_conn):
self.server = server
- self.connection = connection
+ self.connection = request_conn
self.request = None
if isinstance(server.request_callback,
httputil.HTTPServerConnectionDelegate):
- self.delegate = server.request_callback.start_request(connection)
+ self.delegate = server.request_callback.start_request(
+ server_conn, request_conn)
self._chunks = None
else:
self.delegate = None
diff --git a/libs/tornado/httputil.py b/libs/tornado/httputil.py
index 88389fe..9c99b3e 100755
--- a/libs/tornado/httputil.py
+++ b/libs/tornado/httputil.py
@@ -62,6 +62,11 @@ except ImportError:
pass
+# RFC 7230 section 3.5: a recipient MAY recognize a single LF as a line
+# terminator and ignore any preceding CR.
+_CRLF_RE = re.compile(r'\r?\n')
+
+
class _NormalizedHeaderCache(dict):
"""Dynamic cached mapping of header names to Http-Header-Case.
@@ -193,7 +198,7 @@ class HTTPHeaders(dict):
[('Content-Length', '42'), ('Content-Type', 'text/html')]
"""
h = cls()
- for line in headers.splitlines():
+ for line in _CRLF_RE.split(headers):
if line:
h.parse_line(line)
return h
@@ -543,6 +548,8 @@ class HTTPConnection(object):
headers.
:arg callback: a callback to be run when the write is complete.
+ The ``version`` field of ``start_line`` is ignored.
+
Returns a `.Future` if no callback is given.
"""
raise NotImplementedError()
@@ -689,14 +696,17 @@ def parse_body_arguments(content_type, body, arguments, files, headers=None):
if values:
arguments.setdefault(name, []).extend(values)
elif content_type.startswith("multipart/form-data"):
- fields = content_type.split(";")
- for field in fields:
- k, sep, v = field.strip().partition("=")
- if k == "boundary" and v:
- parse_multipart_form_data(utf8(v), body, arguments, files)
- break
- else:
- gen_log.warning("Invalid multipart/form-data")
+ try:
+ fields = content_type.split(";")
+ for field in fields:
+ k, sep, v = field.strip().partition("=")
+ if k == "boundary" and v:
+ parse_multipart_form_data(utf8(v), body, arguments, files)
+ break
+ else:
+ raise ValueError("multipart boundary not found")
+ except Exception as e:
+ gen_log.warning("Invalid multipart/form-data: %s", e)
def parse_multipart_form_data(boundary, data, arguments, files):
@@ -782,7 +792,7 @@ def parse_request_start_line(line):
method, path, version = line.split(" ")
except ValueError:
raise HTTPInputError("Malformed HTTP request line")
- if not version.startswith("HTTP/"):
+ if not re.match(r"^HTTP/1\.[0-9]$", version):
raise HTTPInputError(
"Malformed HTTP version in HTTP Request-Line: %r" % version)
return RequestStartLine(method, path, version)
@@ -801,7 +811,7 @@ def parse_response_start_line(line):
ResponseStartLine(version='HTTP/1.1', code=200, reason='OK')
"""
line = native_str(line)
- match = re.match("(HTTP/1.[01]) ([0-9]+) ([^\r]*)", line)
+ match = re.match("(HTTP/1.[0-9]) ([0-9]+) ([^\r]*)", line)
if not match:
raise HTTPInputError("Error parsing response start line")
return ResponseStartLine(match.group(1), int(match.group(2)),
@@ -878,6 +888,8 @@ def split_host_and_port(netloc):
"""Returns ``(host, port)`` tuple from ``netloc``.
Returned ``port`` will be ``None`` if not present.
+
+ .. versionadded:: 4.1
"""
match = re.match(r'^(.+):(\d+)$', netloc)
if match:
diff --git a/libs/tornado/ioloop.py b/libs/tornado/ioloop.py
index 0319386..680dc40 100755
--- a/libs/tornado/ioloop.py
+++ b/libs/tornado/ioloop.py
@@ -167,28 +167,26 @@ class IOLoop(Configurable):
del IOLoop._instance
@staticmethod
- def current():
+ def current(instance=True):
"""Returns the current thread's `IOLoop`.
- If an `IOLoop` is currently running or has been marked as current
- by `make_current`, returns that instance. Otherwise returns
- `IOLoop.instance()`, i.e. the main thread's `IOLoop`.
-
- A common pattern for classes that depend on ``IOLoops`` is to use
- a default argument to enable programs with multiple ``IOLoops``
- but not require the argument for simpler applications::
-
- class MyClass(object):
- def __init__(self, io_loop=None):
- self.io_loop = io_loop or IOLoop.current()
+ If an `IOLoop` is currently running or has been marked as
+ current by `make_current`, returns that instance. If there is
+ no current `IOLoop`, returns `IOLoop.instance()` (i.e. the
+ main thread's `IOLoop`, creating one if necessary) if ``instance``
+ is true.
In general you should use `IOLoop.current` as the default when
constructing an asynchronous object, and use `IOLoop.instance`
when you mean to communicate to the main thread from a different
one.
+
+ .. versionchanged:: 4.1
+ Added ``instance`` argument to control the
+
"""
current = getattr(IOLoop._current, "instance", None)
- if current is None:
+ if current is None and instance:
return IOLoop.instance()
return current
@@ -200,6 +198,10 @@ class IOLoop(Configurable):
`make_current` explicitly before starting the `IOLoop`,
so that code run at startup time can find the right
instance.
+
+ .. versionchanged:: 4.1
+ An `IOLoop` created while there is no current `IOLoop`
+ will automatically become current.
"""
IOLoop._current.instance = self
@@ -224,7 +226,8 @@ class IOLoop(Configurable):
return SelectIOLoop
def initialize(self):
- pass
+ if IOLoop.current(instance=False) is None:
+ self.make_current()
def close(self, all_fds=False):
"""Closes the `IOLoop`, freeing any resources used.
@@ -946,6 +949,9 @@ class PeriodicCallback(object):
The callback is called every ``callback_time`` milliseconds.
`start` must be called after the `PeriodicCallback` is created.
+
+ .. versionchanged:: 4.1
+ The ``io_loop`` argument is deprecated.
"""
def __init__(self, callback, callback_time, io_loop=None):
self.callback = callback
@@ -969,6 +975,13 @@ class PeriodicCallback(object):
self.io_loop.remove_timeout(self._timeout)
self._timeout = None
+ def is_running(self):
+ """Return True if this `.PeriodicCallback` has been started.
+
+ .. versionadded:: 4.1
+ """
+ return self._running
+
def _run(self):
if not self._running:
return
diff --git a/libs/tornado/iostream.py b/libs/tornado/iostream.py
index 2d5df99..cdb6250 100755
--- a/libs/tornado/iostream.py
+++ b/libs/tornado/iostream.py
@@ -68,6 +68,14 @@ _ERRNO_CONNRESET = (errno.ECONNRESET, errno.ECONNABORTED, errno.EPIPE,
if hasattr(errno, "WSAECONNRESET"):
_ERRNO_CONNRESET += (errno.WSAECONNRESET, errno.WSAECONNABORTED, errno.WSAETIMEDOUT)
+if sys.platform == 'darwin':
+ # OSX appears to have a race condition that causes send(2) to return
+ # EPROTOTYPE if called while a socket is being torn down:
+ # http://erickt.github.io/blog/2014/11/19/adventures-in-debugging-a-potential-osx-kernel-bug/
+ # Since the socket is being closed anyway, treat this as an ECONNRESET
+ # instead of an unexpected error.
+ _ERRNO_CONNRESET += (errno.EPROTOTYPE,)
+
# More non-portable errnos:
_ERRNO_INPROGRESS = (errno.EINPROGRESS,)
@@ -122,6 +130,7 @@ class BaseIOStream(object):
"""`BaseIOStream` constructor.
:arg io_loop: The `.IOLoop` to use; defaults to `.IOLoop.current`.
+ Deprecated since Tornado 4.1.
:arg max_buffer_size: Maximum amount of incoming data to buffer;
defaults to 100MB.
:arg read_chunk_size: Amount of data to read at one time from the
@@ -230,6 +239,12 @@ class BaseIOStream(object):
gen_log.info("Unsatisfiable read, closing connection: %s" % e)
self.close(exc_info=True)
return future
+ except:
+ if future is not None:
+ # Ensure that the future doesn't log an error because its
+ # failure was never examined.
+ future.add_done_callback(lambda f: f.exception())
+ raise
return future
def read_until(self, delimiter, callback=None, max_bytes=None):
@@ -257,6 +272,10 @@ class BaseIOStream(object):
gen_log.info("Unsatisfiable read, closing connection: %s" % e)
self.close(exc_info=True)
return future
+ except:
+ if future is not None:
+ future.add_done_callback(lambda f: f.exception())
+ raise
return future
def read_bytes(self, num_bytes, callback=None, streaming_callback=None,
@@ -281,7 +300,12 @@ class BaseIOStream(object):
self._read_bytes = num_bytes
self._read_partial = partial
self._streaming_callback = stack_context.wrap(streaming_callback)
- self._try_inline_read()
+ try:
+ self._try_inline_read()
+ except:
+ if future is not None:
+ future.add_done_callback(lambda f: f.exception())
+ raise
return future
def read_until_close(self, callback=None, streaming_callback=None):
@@ -305,7 +329,11 @@ class BaseIOStream(object):
self._run_read_callback(self._read_buffer_size, False)
return future
self._read_until_close = True
- self._try_inline_read()
+ try:
+ self._try_inline_read()
+ except:
+ future.add_done_callback(lambda f: f.exception())
+ raise
return future
def write(self, data, callback=None):
@@ -344,6 +372,7 @@ class BaseIOStream(object):
future = None
else:
future = self._write_future = TracebackFuture()
+ future.add_done_callback(lambda f: f.exception())
if not self._connecting:
self._handle_write()
if self._write_buffer:
@@ -1010,8 +1039,9 @@ class IOStream(BaseIOStream):
# reported later in _handle_connect.
if (errno_from_exception(e) not in _ERRNO_INPROGRESS and
errno_from_exception(e) not in _ERRNO_WOULDBLOCK):
- gen_log.warning("Connect error on fd %s: %s",
- self.socket.fileno(), e)
+ if future is None:
+ gen_log.warning("Connect error on fd %s: %s",
+ self.socket.fileno(), e)
self.close(exc_info=True)
return future
self._add_io_state(self.io_loop.WRITE)
@@ -1058,7 +1088,9 @@ class IOStream(BaseIOStream):
socket = self.socket
self.io_loop.remove_handler(socket)
self.socket = None
- socket = ssl_wrap_socket(socket, ssl_options, server_side=server_side,
+ socket = ssl_wrap_socket(socket, ssl_options,
+ server_hostname=server_hostname,
+ server_side=server_side,
do_handshake_on_connect=False)
orig_close_callback = self._close_callback
self._close_callback = None
diff --git a/libs/tornado/netutil.py b/libs/tornado/netutil.py
index e85f62b..17e9580 100755
--- a/libs/tornado/netutil.py
+++ b/libs/tornado/netutil.py
@@ -187,6 +187,9 @@ def add_accept_handler(sock, callback, io_loop=None):
address of the other end of the connection). Note that this signature
is different from the ``callback(fd, events)`` signature used for
`.IOLoop` handlers.
+
+ .. versionchanged:: 4.1
+ The ``io_loop`` argument is deprecated.
"""
if io_loop is None:
io_loop = IOLoop.current()
@@ -301,6 +304,9 @@ class ExecutorResolver(Resolver):
The executor will be shut down when the resolver is closed unless
``close_resolver=False``; use this if you want to reuse the same
executor elsewhere.
+
+ .. versionchanged:: 4.1
+ The ``io_loop`` argument is deprecated.
"""
def initialize(self, io_loop=None, executor=None, close_executor=True):
self.io_loop = io_loop or IOLoop.current()
diff --git a/libs/tornado/platform/asyncio.py b/libs/tornado/platform/asyncio.py
index dd6722a..bc68517 100644
--- a/libs/tornado/platform/asyncio.py
+++ b/libs/tornado/platform/asyncio.py
@@ -12,6 +12,8 @@ unfinished callbacks on the event loop that fail when it resumes)
from __future__ import absolute_import, division, print_function, with_statement
import functools
+import tornado.concurrent
+from tornado.gen import convert_yielded
from tornado.ioloop import IOLoop
from tornado import stack_context
@@ -138,3 +140,18 @@ class AsyncIOLoop(BaseAsyncIOLoop):
def initialize(self):
super(AsyncIOLoop, self).initialize(asyncio.new_event_loop(),
close_loop=True)
+
+def to_tornado_future(asyncio_future):
+ """Convert an ``asyncio.Future`` to a `tornado.concurrent.Future`."""
+ tf = tornado.concurrent.Future()
+ tornado.concurrent.chain_future(asyncio_future, tf)
+ return tf
+
+def to_asyncio_future(tornado_future):
+ """Convert a `tornado.concurrent.Future` to an ``asyncio.Future``."""
+ af = asyncio.Future()
+ tornado.concurrent.chain_future(tornado_future, af)
+ return af
+
+if hasattr(convert_yielded, 'register'):
+ convert_yielded.register(asyncio.Future, to_tornado_future)
diff --git a/libs/tornado/platform/caresresolver.py b/libs/tornado/platform/caresresolver.py
index c4648c2..5559614 100755
--- a/libs/tornado/platform/caresresolver.py
+++ b/libs/tornado/platform/caresresolver.py
@@ -18,6 +18,9 @@ class CaresResolver(Resolver):
so it is only recommended for use in ``AF_INET`` (i.e. IPv4). This is
the default for ``tornado.simple_httpclient``, but other libraries
may default to ``AF_UNSPEC``.
+
+ .. versionchanged:: 4.1
+ The ``io_loop`` argument is deprecated.
"""
def initialize(self, io_loop=None):
self.io_loop = io_loop or IOLoop.current()
diff --git a/libs/tornado/platform/twisted.py b/libs/tornado/platform/twisted.py
index 27d991c..09b3283 100755
--- a/libs/tornado/platform/twisted.py
+++ b/libs/tornado/platform/twisted.py
@@ -70,8 +70,10 @@ import datetime
import functools
import numbers
import socket
+import sys
import twisted.internet.abstract
+from twisted.internet.defer import Deferred
from twisted.internet.posixbase import PosixReactorBase
from twisted.internet.interfaces import \
IReactorFDSet, IDelayedCall, IReactorTime, IReadDescriptor, IWriteDescriptor
@@ -84,6 +86,7 @@ import twisted.names.resolve
from zope.interface import implementer
+from tornado.concurrent import Future
from tornado.escape import utf8
from tornado import gen
import tornado.ioloop
@@ -147,6 +150,9 @@ class TornadoReactor(PosixReactorBase):
We override `mainLoop` instead of `doIteration` and must implement
timed call functionality on top of `IOLoop.add_timeout` rather than
using the implementation in `PosixReactorBase`.
+
+ .. versionchanged:: 4.1
+ The ``io_loop`` argument is deprecated.
"""
def __init__(self, io_loop=None):
if not io_loop:
@@ -356,7 +362,11 @@ class _TestReactor(TornadoReactor):
def install(io_loop=None):
- """Install this package as the default Twisted reactor."""
+ """Install this package as the default Twisted reactor.
+
+ .. versionchanged:: 4.1
+ The ``io_loop`` argument is deprecated.
+ """
if not io_loop:
io_loop = tornado.ioloop.IOLoop.current()
reactor = TornadoReactor(io_loop)
@@ -512,6 +522,9 @@ class TwistedResolver(Resolver):
``socket.AF_UNSPEC``.
Requires Twisted 12.1 or newer.
+
+ .. versionchanged:: 4.1
+ The ``io_loop`` argument is deprecated.
"""
def initialize(self, io_loop=None):
self.io_loop = io_loop or IOLoop.current()
@@ -554,3 +567,17 @@ class TwistedResolver(Resolver):
(resolved_family, (resolved, port)),
]
raise gen.Return(result)
+
+if hasattr(gen.convert_yielded, 'register'):
+ @gen.convert_yielded.register(Deferred)
+ def _(d):
+ f = Future()
+ def errback(failure):
+ try:
+ failure.raiseException()
+ # Should never happen, but just in case
+ raise Exception("errback called without error")
+ except:
+ f.set_exc_info(sys.exc_info())
+ d.addCallbacks(f.set_result, errback)
+ return f
diff --git a/libs/tornado/process.py b/libs/tornado/process.py
index cea3dbd..3790ca0 100755
--- a/libs/tornado/process.py
+++ b/libs/tornado/process.py
@@ -191,6 +191,9 @@ class Subprocess(object):
``tornado.process.Subprocess.STREAM``, which will make the corresponding
attribute of the resulting Subprocess a `.PipeIOStream`.
* A new keyword argument ``io_loop`` may be used to pass in an IOLoop.
+
+ .. versionchanged:: 4.1
+ The ``io_loop`` argument is deprecated.
"""
STREAM = object()
@@ -263,6 +266,9 @@ class Subprocess(object):
Note that the `.IOLoop` used for signal handling need not be the
same one used by individual Subprocess objects (as long as the
``IOLoops`` are each running in separate threads).
+
+ .. versionchanged:: 4.1
+ The ``io_loop`` argument is deprecated.
"""
if cls._initialized:
return
diff --git a/libs/tornado/simple_httpclient.py b/libs/tornado/simple_httpclient.py
index 7c915e9..31d076e 100755
--- a/libs/tornado/simple_httpclient.py
+++ b/libs/tornado/simple_httpclient.py
@@ -345,7 +345,7 @@ class _HTTPConnection(httputil.HTTPMessageDelegate):
decompress=self.request.decompress_response),
self._sockaddr)
start_line = httputil.RequestStartLine(self.request.method,
- req_path, 'HTTP/1.1')
+ req_path, '')
self.connection.write_headers(start_line, self.request.headers)
if self.request.expect_100_continue:
self._read_response()
diff --git a/libs/tornado/tcpclient.py b/libs/tornado/tcpclient.py
index 0abbea2..f594d91 100644
--- a/libs/tornado/tcpclient.py
+++ b/libs/tornado/tcpclient.py
@@ -111,6 +111,7 @@ class _Connector(object):
if self.timeout is not None:
# If the first attempt failed, don't wait for the
# timeout to try an address from the secondary queue.
+ self.io_loop.remove_timeout(self.timeout)
self.on_timeout()
return
self.clear_timeout()
@@ -135,6 +136,9 @@ class _Connector(object):
class TCPClient(object):
"""A non-blocking TCP connection factory.
+
+ .. versionchanged:: 4.1
+ The ``io_loop`` argument is deprecated.
"""
def __init__(self, resolver=None, io_loop=None):
self.io_loop = io_loop or IOLoop.current()
diff --git a/libs/tornado/tcpserver.py b/libs/tornado/tcpserver.py
index 427acec..a02b36f 100755
--- a/libs/tornado/tcpserver.py
+++ b/libs/tornado/tcpserver.py
@@ -95,7 +95,7 @@ class TCPServer(object):
self._pending_sockets = []
self._started = False
self.max_buffer_size = max_buffer_size
- self.read_chunk_size = None
+ self.read_chunk_size = read_chunk_size
# Verify the SSL options. Otherwise we don't get errors until clients
# connect. This doesn't verify that the keys are legitimate, but
diff --git a/libs/tornado/testing.py b/libs/tornado/testing.py
index 4511863..3d3bcf7 100755
--- a/libs/tornado/testing.py
+++ b/libs/tornado/testing.py
@@ -543,6 +543,9 @@ class LogTrapTestCase(unittest.TestCase):
`logging.basicConfig` and the "pretty logging" configured by
`tornado.options`. It is not compatible with other log buffering
mechanisms, such as those provided by some test runners.
+
+ .. deprecated:: 4.1
+ Use the unittest module's ``--buffer`` option instead, or `.ExpectLog`.
"""
def run(self, result=None):
logger = logging.getLogger()
diff --git a/libs/tornado/web.py b/libs/tornado/web.py
index 2d1dac0..52bfce3 100755
--- a/libs/tornado/web.py
+++ b/libs/tornado/web.py
@@ -268,6 +268,7 @@ class RequestHandler(object):
if _has_stream_request_body(self.__class__):
if not self.request.body.done():
self.request.body.set_exception(iostream.StreamClosedError())
+ self.request.body.exception()
def clear(self):
"""Resets all headers and content for this response."""
@@ -840,7 +841,7 @@ class RequestHandler(object):
for cookie in self._new_cookie.values():
self.add_header("Set-Cookie", cookie.OutputString(None))
- start_line = httputil.ResponseStartLine(self.request.version,
+ start_line = httputil.ResponseStartLine('',
self._status_code,
self._reason)
return self.request.connection.write_headers(
@@ -1120,28 +1121,36 @@ class RequestHandler(object):
"""Convert a cookie string into a the tuple form returned by
_get_raw_xsrf_token.
"""
- m = _signed_value_version_re.match(utf8(cookie))
- if m:
- version = int(m.group(1))
- if version == 2:
- _, mask, masked_token, timestamp = cookie.split("|")
- mask = binascii.a2b_hex(utf8(mask))
- token = _websocket_mask(
- mask, binascii.a2b_hex(utf8(masked_token)))
- timestamp = int(timestamp)
- return version, token, timestamp
+
+ try:
+ m = _signed_value_version_re.match(utf8(cookie))
+
+ if m:
+ version = int(m.group(1))
+ if version == 2:
+ _, mask, masked_token, timestamp = cookie.split("|")
+
+ mask = binascii.a2b_hex(utf8(mask))
+ token = _websocket_mask(
+ mask, binascii.a2b_hex(utf8(masked_token)))
+ timestamp = int(timestamp)
+ return version, token, timestamp
+ else:
+ # Treat unknown versions as not present instead of failing.
+ raise Exception("Unknown xsrf cookie version")
else:
- # Treat unknown versions as not present instead of failing.
- return None, None, None
- else:
- version = 1
- try:
- token = binascii.a2b_hex(utf8(cookie))
- except (binascii.Error, TypeError):
- token = utf8(cookie)
- # We don't have a usable timestamp in older versions.
- timestamp = int(time.time())
- return (version, token, timestamp)
+ version = 1
+ try:
+ token = binascii.a2b_hex(utf8(cookie))
+ except (binascii.Error, TypeError):
+ token = utf8(cookie)
+ # We don't have a usable timestamp in older versions.
+ timestamp = int(time.time())
+ return (version, token, timestamp)
+ except Exception:
+ # Catch exceptions and return nothing instead of failing.
+ gen_log.debug("Uncaught exception in _decode_xsrf_token", exc_info=True)
+ return None, None, None
def check_xsrf_cookie(self):
"""Verifies that the ``_xsrf`` cookie matches the ``_xsrf`` argument.
@@ -1771,9 +1780,9 @@ class Application(httputil.HTTPServerConnectionDelegate):
except TypeError:
pass
- def start_request(self, connection):
+ def start_request(self, server_conn, request_conn):
# Modern HTTPServer interface
- return _RequestDispatcher(self, connection)
+ return _RequestDispatcher(self, request_conn)
def __call__(self, request):
# Legacy HTTPServer interface
@@ -1915,8 +1924,10 @@ class _RequestDispatcher(httputil.HTTPMessageDelegate):
# trapped in the Future it returns (which we are ignoring here).
# However, that shouldn't happen because _execute has a blanket
# except handler, and we cannot easily access the IOLoop here to
- # call add_future.
- self.handler._execute(transforms, *self.path_args, **self.path_kwargs)
+ # call add_future (because of the requirement to remain compatible
+ # with WSGI)
+ f = self.handler._execute(transforms, *self.path_args, **self.path_kwargs)
+ f.add_done_callback(lambda f: f.exception())
# If we are streaming the request body, then execute() is finished
# when the handler has prepared to receive the body. If not,
# it doesn't matter when execute() finishes (so we return None)
@@ -2622,6 +2633,8 @@ class UIModule(object):
UI modules often execute additional queries, and they can include
additional CSS and JavaScript that will be included in the output
page, which is automatically inserted on page render.
+
+ Subclasses of UIModule must override the `render` method.
"""
def __init__(self, handler):
self.handler = handler
@@ -2634,31 +2647,43 @@ class UIModule(object):
return self.handler.current_user
def render(self, *args, **kwargs):
- """Overridden in subclasses to return this module's output."""
+ """Override in subclasses to return this module's output."""
raise NotImplementedError()
def embedded_javascript(self):
- """Returns a JavaScript string that will be embedded in the page."""
+ """Override to return a JavaScript string to be embedded in the page."""
return None
def javascript_files(self):
- """Returns a list of JavaScript files required by this module."""
+ """Override to return a list of JavaScript files needed by this module.
+
+ If the return values are relative paths, they will be passed to
+ `RequestHandler.static_url`; otherwise they will be used as-is.
+ """
return None
def embedded_css(self):
- """Returns a CSS string that will be embedded in the page."""
+ """Override to return a CSS string that will be embedded in the page."""
return None
def css_files(self):
- """Returns a list of CSS files required by this module."""
+ """Override to returns a list of CSS files required by this module.
+
+ If the return values are relative paths, they will be passed to
+ `RequestHandler.static_url`; otherwise they will be used as-is.
+ """
return None
def html_head(self):
- """Returns a CSS string that will be put in the element"""
+ """Override to return an HTML string that will be put in the
+ element.
+ """
return None
def html_body(self):
- """Returns an HTML string that will be put in the element"""
+ """Override to return an HTML string that will be put at the end of
+ the element.
+ """
return None
def render_string(self, path, **kwargs):
diff --git a/libs/tornado/websocket.py b/libs/tornado/websocket.py
index 5c762ad..c009225 100755
--- a/libs/tornado/websocket.py
+++ b/libs/tornado/websocket.py
@@ -129,6 +129,7 @@ class WebSocketHandler(tornado.web.RequestHandler):
self.close_code = None
self.close_reason = None
self.stream = None
+ self._on_close_called = False
@tornado.web.asynchronous
def get(self, *args, **kwargs):
@@ -171,18 +172,16 @@ class WebSocketHandler(tornado.web.RequestHandler):
self.stream = self.request.connection.detach()
self.stream.set_close_callback(self.on_connection_close)
- if self.request.headers.get("Sec-WebSocket-Version") in ("7", "8", "13"):
- self.ws_connection = WebSocketProtocol13(
- self, compression_options=self.get_compression_options())
+ self.ws_connection = self.get_websocket_protocol()
+ if self.ws_connection:
self.ws_connection.accept_connection()
else:
if not self.stream.closed():
self.stream.write(tornado.escape.utf8(
"HTTP/1.1 426 Upgrade Required\r\n"
- "Sec-WebSocket-Version: 8\r\n\r\n"))
+ "Sec-WebSocket-Version: 7, 8, 13\r\n\r\n"))
self.stream.close()
-
def write_message(self, message, binary=False):
"""Sends the given message to the client of this Web Socket.
@@ -350,6 +349,8 @@ class WebSocketHandler(tornado.web.RequestHandler):
if self.ws_connection:
self.ws_connection.on_connection_close()
self.ws_connection = None
+ if not self._on_close_called:
+ self._on_close_called
self.on_close()
def send_error(self, *args, **kwargs):
@@ -362,6 +363,13 @@ class WebSocketHandler(tornado.web.RequestHandler):
# we can close the connection more gracefully.
self.stream.close()
+ def get_websocket_protocol(self):
+ websocket_version = self.request.headers.get("Sec-WebSocket-Version")
+ if websocket_version in ("7", "8", "13"):
+ return WebSocketProtocol13(
+ self, compression_options=self.get_compression_options())
+
+
def _wrap_method(method):
def _disallow_for_websocket(self, *args, **kwargs):
if self.stream is None:
@@ -852,12 +860,15 @@ class WebSocketClientConnection(simple_httpclient._HTTPConnection):
This class should not be instantiated directly; use the
`websocket_connect` function instead.
"""
- def __init__(self, io_loop, request, compression_options=None):
+ def __init__(self, io_loop, request, on_message_callback=None,
+ compression_options=None):
self.compression_options = compression_options
self.connect_future = TracebackFuture()
+ self.protocol = None
self.read_future = None
self.read_queue = collections.deque()
self.key = base64.b64encode(os.urandom(16))
+ self._on_message_callback = on_message_callback
scheme, sep, rest = request.url.partition(':')
scheme = {'ws': 'http', 'wss': 'https'}[scheme]
@@ -919,9 +930,7 @@ class WebSocketClientConnection(simple_httpclient._HTTPConnection):
start_line, headers)
self.headers = headers
- self.protocol = WebSocketProtocol13(
- self, mask_outgoing=True,
- compression_options=self.compression_options)
+ self.protocol = self.get_websocket_protocol()
self.protocol._process_server_headers(self.key, self.headers)
self.protocol._receive_frame()
@@ -946,6 +955,9 @@ class WebSocketClientConnection(simple_httpclient._HTTPConnection):
def read_message(self, callback=None):
"""Reads a message from the WebSocket server.
+ If on_message_callback was specified at WebSocket
+ initialization, this function will never return messages
+
Returns a future whose result is the message, or None
if the connection is closed. If a callback argument
is given it will be called with the future when it is
@@ -962,7 +974,9 @@ class WebSocketClientConnection(simple_httpclient._HTTPConnection):
return future
def on_message(self, message):
- if self.read_future is not None:
+ if self._on_message_callback:
+ self._on_message_callback(message)
+ elif self.read_future is not None:
self.read_future.set_result(message)
self.read_future = None
else:
@@ -971,9 +985,13 @@ class WebSocketClientConnection(simple_httpclient._HTTPConnection):
def on_pong(self, data):
pass
+ def get_websocket_protocol(self):
+ return WebSocketProtocol13(self, mask_outgoing=True,
+ compression_options=self.compression_options)
+
def websocket_connect(url, io_loop=None, callback=None, connect_timeout=None,
- compression_options=None):
+ on_message_callback=None, compression_options=None):
"""Client-side websocket support.
Takes a url and returns a Future whose result is a
@@ -982,11 +1000,26 @@ def websocket_connect(url, io_loop=None, callback=None, connect_timeout=None,
``compression_options`` is interpreted in the same way as the
return value of `.WebSocketHandler.get_compression_options`.
+ The connection supports two styles of operation. In the coroutine
+ style, the application typically calls
+ `~.WebSocketClientConnection.read_message` in a loop::
+
+ conn = yield websocket_connection(loop)
+ while True:
+ msg = yield conn.read_message()
+ if msg is None: break
+ # Do something with msg
+
+ In the callback style, pass an ``on_message_callback`` to
+ ``websocket_connect``. In both styles, a message of ``None``
+ indicates that the connection has been closed.
+
.. versionchanged:: 3.2
Also accepts ``HTTPRequest`` objects in place of urls.
.. versionchanged:: 4.1
- Added ``compression_options``.
+ Added ``compression_options`` and ``on_message_callback``.
+ The ``io_loop`` argument is deprecated.
"""
if io_loop is None:
io_loop = IOLoop.current()
@@ -1000,7 +1033,9 @@ def websocket_connect(url, io_loop=None, callback=None, connect_timeout=None,
request = httpclient.HTTPRequest(url, connect_timeout=connect_timeout)
request = httpclient._RequestProxy(
request, httpclient.HTTPRequest._DEFAULTS)
- conn = WebSocketClientConnection(io_loop, request, compression_options)
+ conn = WebSocketClientConnection(io_loop, request,
+ on_message_callback=on_message_callback,
+ compression_options=compression_options)
if callback is not None:
io_loop.add_future(conn.connect_future, callback)
return conn.connect_future
diff --git a/package.json b/package.json
new file mode 100644
index 0000000..0a7bc02
--- /dev/null
+++ b/package.json
@@ -0,0 +1,26 @@
+{
+ "name": "couchpotato_develop",
+ "repository": {
+ "type": "git",
+ "url": ""
+ },
+ "scripts": {
+ "start": "grunt"
+ },
+ "dependencies": {},
+ "devDependencies": {
+ "grunt": "~0.4.5",
+ "grunt-autoprefixer": "^3.0.3",
+ "grunt-concurrent": "~2.0.1",
+ "grunt-contrib-clean": "^0.6.0",
+ "grunt-contrib-cssmin": "~0.13.0",
+ "grunt-contrib-jshint": "~0.11.2",
+ "grunt-contrib-sass": "^0.9.2",
+ "grunt-contrib-uglify": "~0.9.1",
+ "grunt-contrib-watch": "~0.6.1",
+ "grunt-shell-spawn": "^0.3.8",
+ "jit-grunt": "^0.9.1",
+ "jshint-stylish": "^2.0.1",
+ "time-grunt": "^1.2.1"
+ }
+}