diff --git a/CouchPotato.py b/CouchPotato.py index b1620c0..c36757f 100755 --- a/CouchPotato.py +++ b/CouchPotato.py @@ -100,7 +100,7 @@ class Loader(object): logging.shutdown() time.sleep(3) - args = [sys.executable] + [os.path.join(base_path, __file__)] + sys.argv[1:] + args = [sys.executable] + [os.path.join(base_path, os.path.basename(__file__))] + sys.argv[1:] subprocess.Popen(args) except: self.log.critical(traceback.format_exc()) diff --git a/README.md b/README.md index 91223f1..8d1e5b8 100644 --- a/README.md +++ b/README.md @@ -17,6 +17,7 @@ Windows, see [the CP forum](http://couchpota.to/forum/showthread.php?tid=14) for * Open up `Git Bash` (or CMD) and go to the folder you want to install CP. Something like Program Files. * Run `git clone https://github.com/RuudBurger/CouchPotatoServer.git`. * You can now start CP via `CouchPotatoServer\CouchPotato.py` to start +* Your browser should open up, but if it doesn't go to: `http://localhost:5050/` OSx: @@ -26,6 +27,7 @@ OSx: * Go to your App folder `cd /Applications` * Run `git clone https://github.com/RuudBurger/CouchPotatoServer.git` * Then do `python CouchPotatoServer/CouchPotato.py` +* Your browser should open up, but if it doesn't go to: `http://localhost:5050/` Linux (ubuntu / debian): @@ -37,3 +39,4 @@ Linux (ubuntu / debian): * Change the paths inside the init script. `sudo nano /etc/init.d/couchpotato` * Make it executable. `sudo chmod +x /etc/init.d/couchpotato` * Add it to defaults. `sudo update-rc.d couchpotato defaults` +* Open your browser and go to: `http://localhost:5050/` diff --git a/couchpotato/core/_base/_core/main.py b/couchpotato/core/_base/_core/main.py index 0e178de..e0aaa2d 100644 --- a/couchpotato/core/_base/_core/main.py +++ b/couchpotato/core/_base/_core/main.py @@ -9,6 +9,7 @@ from tornado.ioloop import IOLoop from uuid import uuid4 import os import platform +import signal import time import traceback import webbrowser @@ -51,6 +52,8 @@ class Core(Plugin): addEvent('setting.save.core.password', self.md5Password) addEvent('setting.save.core.api_key', self.checkApikey) + # Make sure we can close-down with ctrl+c properly + self.signalHandler() def md5Password(self, value): return md5(value.encode(Env.get('encoding'))) if value else '' @@ -170,3 +173,10 @@ class Core(Plugin): return jsonified({ 'version': self.version() }) + + def signalHandler(self): + + def signal_handler(signal, frame): + fireEvent('app.shutdown') + + signal.signal(signal.SIGINT, signal_handler) diff --git a/couchpotato/core/_base/updater/main.py b/couchpotato/core/_base/updater/main.py index dab8077..18d2c30 100644 --- a/couchpotato/core/_base/updater/main.py +++ b/couchpotato/core/_base/updater/main.py @@ -106,6 +106,10 @@ class Updater(Plugin): if success: fireEventAsync('app.restart') + # Assume the updater handles things + if not success: + success = True + return jsonified({ 'success': success }) @@ -396,6 +400,7 @@ class DesktopUpdater(BaseUpdater): self.update_failed = True self.desktop._esky.auto_update(callback = do_restart) + return except: self.update_failed = True diff --git a/couchpotato/core/downloaders/base.py b/couchpotato/core/downloaders/base.py index 776976d..a309598 100644 --- a/couchpotato/core/downloaders/base.py +++ b/couchpotato/core/downloaders/base.py @@ -18,6 +18,20 @@ class Downloader(Plugin): 'http://torcache.net/torrent/%s.torrent', ] + torrent_trackers = [ + 'http://tracker.publicbt.com/announce', + 'udp://tracker.istole.it:80/announce', + 'udp://fr33domtracker.h33t.com:3310/announce', + 'http://tracker.istole.it/announce', + 'http://tracker.ccc.de/announce', + 'udp://tracker.publicbt.com:80/announce', + 'udp://tracker.ccc.de:80/announce', + 'http://exodus.desync.com/announce', + 'http://exodus.desync.com:6969/announce', + 'http://tracker.publichd.eu/announce', + 'http://tracker.openbittorrent.com/announce', + ] + def __init__(self): addEvent('download', self.download) addEvent('download.status', self.getAllDownloadStatus) diff --git a/couchpotato/core/downloaders/blackhole/__init__.py b/couchpotato/core/downloaders/blackhole/__init__.py index c287538..71649df 100644 --- a/couchpotato/core/downloaders/blackhole/__init__.py +++ b/couchpotato/core/downloaders/blackhole/__init__.py @@ -1,4 +1,5 @@ from .main import Blackhole +from couchpotato.core.helpers.variable import getDownloadDir def start(): return Blackhole() @@ -16,7 +17,7 @@ config = [{ 'options': [ { 'name': 'enabled', - 'default': 0, + 'default': True, 'type': 'enabler', 'radio_group': 'nzb,torrent', }, @@ -24,6 +25,7 @@ config = [{ 'name': 'directory', 'type': 'directory', 'description': 'Directory where the .nzb (or .torrent) file is saved to.', + 'default': getDownloadDir() }, { 'name': 'use_for', diff --git a/couchpotato/core/downloaders/sabnzbd/main.py b/couchpotato/core/downloaders/sabnzbd/main.py index 8b7383b..9bd6922 100644 --- a/couchpotato/core/downloaders/sabnzbd/main.py +++ b/couchpotato/core/downloaders/sabnzbd/main.py @@ -76,7 +76,7 @@ class Sabnzbd(Downloader): 'mode': 'queue', }) except: - log.error('Failed getting queue: %s', traceback.format_exc(0)) + log.error('Failed getting queue: %s', traceback.format_exc(1)) return False # Go through history items @@ -86,7 +86,7 @@ class Sabnzbd(Downloader): 'limit': 15, }) except: - log.error('Failed getting history json: %s', traceback.format_exc(0)) + log.error('Failed getting history json: %s', traceback.format_exc(1)) return False statuses = [] diff --git a/couchpotato/core/downloaders/transmission/main.py b/couchpotato/core/downloaders/transmission/main.py index 5bdd65b..63c4c0e 100644 --- a/couchpotato/core/downloaders/transmission/main.py +++ b/couchpotato/core/downloaders/transmission/main.py @@ -30,9 +30,10 @@ class Transmission(Downloader): return False # Set parameters for Transmission + folder_name = self.createFileName(data, filedata, movie)[:-len(data.get('type')) - 1] params = { 'paused': self.conf('paused', default = 0), - 'download-dir': self.conf('directory', default = '').rstrip(os.path.sep) + 'download-dir': os.path.join(self.conf('directory', default = ''), folder_name).rstrip(os.path.sep) } torrent_params = { @@ -49,6 +50,7 @@ class Transmission(Downloader): trpc = TransmissionRPC(host[0], port = host[1], username = self.conf('username'), password = self.conf('password')) if data.get('type') == 'torrent_magnet': remote_torrent = trpc.add_torrent_uri(data.get('url'), arguments = params) + torrent_params['trackerAdd'] = self.torrent_trackers else: remote_torrent = trpc.add_torrent_file(b64encode(filedata), arguments = params) diff --git a/couchpotato/core/downloaders/utorrent/__init__.py b/couchpotato/core/downloaders/utorrent/__init__.py new file mode 100644 index 0000000..88ceaf7 --- /dev/null +++ b/couchpotato/core/downloaders/utorrent/__init__.py @@ -0,0 +1,54 @@ +from .main import uTorrent + +def start(): + return uTorrent() + +config = [{ + 'name': 'utorrent', + 'groups': [ + { + 'tab': 'downloaders', + 'name': 'utorrent', + 'label': 'uTorrent', + 'description': 'Send torrents to uTorrent.', + 'wizard': True, + 'options': [ + { + 'name': 'enabled', + 'default': 0, + 'type': 'enabler', + 'radio_group': 'torrent', + }, + { + 'name': 'host', + 'default': 'localhost:8000', + 'description': 'Hostname with port. Usually localhost:8000', + }, + { + 'name': 'username', + }, + { + 'name': 'password', + 'type': 'password', + }, + { + 'name': 'label', + 'description': 'Label to add torrent as.', + }, + { + 'name': 'paused', + 'type': 'bool', + 'default': False, + 'description': 'Add the torrent paused.', + }, + { + 'name': 'manual', + 'default': 0, + 'type': 'bool', + 'advanced': True, + 'description': 'Disable this downloader for automated searches, but use it when I manually send a release.', + }, + ], + } + ], +}] diff --git a/couchpotato/core/downloaders/utorrent/main.py b/couchpotato/core/downloaders/utorrent/main.py new file mode 100644 index 0000000..983afbf --- /dev/null +++ b/couchpotato/core/downloaders/utorrent/main.py @@ -0,0 +1,138 @@ +from bencode import bencode, bdecode +from couchpotato.core.downloaders.base import Downloader +from couchpotato.core.helpers.encoding import isInt +from couchpotato.core.logger import CPLog +from hashlib import sha1 +from multipartpost import MultipartPostHandler +import cookielib +import httplib +import re +import time +import urllib +import urllib2 + + +log = CPLog(__name__) + + +class uTorrent(Downloader): + + type = ['torrent', 'torrent_magnet'] + utorrent_api = None + + def download(self, data, movie, manual = False, filedata = None): + + if self.isDisabled(manual) or not self.isCorrectType(data.get('type')): + return + + log.debug('Sending "%s" (%s) to uTorrent.', (data.get('name'), data.get('type'))) + + # Load host from config and split out port. + host = self.conf('host').split(':') + if not isInt(host[1]): + log.error('Config properties are not filled in correctly, port is missing.') + return False + + torrent_params = {} + if self.conf('label'): + torrent_params['label'] = self.conf('label') + + if not filedata and data.get('type') == 'torrent': + log.error('Failed sending torrent, no data') + return False + if data.get('type') == 'torrent_magnet': + torrent_hash = re.findall('urn:btih:([\w]{32,40})', data.get('url'))[0].upper() + torrent_params['trackers'] = '%0D%0A%0D%0A'.join(self.torrent_trackers) + else: + info = bdecode(filedata)["info"] + torrent_hash = sha1(bencode(info)).hexdigest().upper() + torrent_filename = self.createFileName(data, filedata, movie) + + # Send request to uTorrent + try: + if not self.utorrent_api: + self.utorrent_api = uTorrentAPI(host[0], port = host[1], username = self.conf('username'), password = self.conf('password')) + + if data.get('type') == 'torrent_magnet': + self.utorrent_api.add_torrent_uri(data.get('url')) + else: + self.utorrent_api.add_torrent_file(torrent_filename, filedata) + + # Change settings of added torrents + self.utorrent_api.set_torrent(torrent_hash, torrent_params) + if self.conf('paused', default = 0): + self.utorrent_api.pause_torrent(torrent_hash) + return True + except Exception, err: + log.error('Failed to send torrent to uTorrent: %s', err) + return False + + +class uTorrentAPI(object): + + def __init__(self, host = 'localhost', port = 8000, username = None, password = None): + + super(uTorrentAPI, self).__init__() + + self.url = 'http://' + str(host) + ':' + str(port) + '/gui/' + self.token = '' + self.last_time = time.time() + cookies = cookielib.CookieJar() + self.opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(cookies), MultipartPostHandler) + self.opener.addheaders = [('User-agent', 'couchpotato-utorrent-client/1.0')] + if username and password: + password_manager = urllib2.HTTPPasswordMgrWithDefaultRealm() + password_manager.add_password(realm = None, uri = self.url, user = username, passwd = password) + self.opener.add_handler(urllib2.HTTPBasicAuthHandler(password_manager)) + self.opener.add_handler(urllib2.HTTPDigestAuthHandler(password_manager)) + elif username or password: + log.debug('User or password missing, not using authentication.') + self.token = self.get_token() + + def _request(self, action, data = None): + if time.time() > self.last_time + 1800: + self.last_time = time.time() + self.token = self.get_token() + request = urllib2.Request(self.url + "?token=" + self.token + "&" + action, data) + try: + open_request = self.opener.open(request) + response = open_request.read() + log.debug('response: %s', response) + if response: + log.debug('uTorrent action successfull') + return response + else: + log.debug('Unknown failure sending command to uTorrent. Return text is: %s', response) + except httplib.InvalidURL, err: + log.error('Invalid uTorrent host, check your config %s', err) + except urllib2.HTTPError, err: + if err.code == 401: + log.error('Invalid uTorrent Username or Password, check your config') + else: + log.error('uTorrent HTTPError: %s', err) + except urllib2.URLError, err: + log.error('Unable to connect to uTorrent %s', err) + return False + + def get_token(self): + request = self.opener.open(self.url + "token.html") + token = re.findall("(.*?) 0 else [] + return ids[0] except IndexError: pass diff --git a/couchpotato/core/logger.py b/couchpotato/core/logger.py index 7a357b3..09875a6 100644 --- a/couchpotato/core/logger.py +++ b/couchpotato/core/logger.py @@ -17,6 +17,9 @@ class CPLog(object): def info(self, msg, replace_tuple = ()): self.logger.info(self.addContext(msg, replace_tuple)) + def info2(self, msg, replace_tuple = ()): + self.logger.log(19, self.addContext(msg, replace_tuple)) + def debug(self, msg, replace_tuple = ()): self.logger.debug(self.addContext(msg, replace_tuple)) diff --git a/couchpotato/core/notifications/xbmc/main.py b/couchpotato/core/notifications/xbmc/main.py index a2a81d7..96bb2cf 100755 --- a/couchpotato/core/notifications/xbmc/main.py +++ b/couchpotato/core/notifications/xbmc/main.py @@ -3,6 +3,7 @@ from couchpotato.core.logger import CPLog from couchpotato.core.notifications.base import Notification from flask.helpers import json import base64 +import traceback log = CPLog(__name__) @@ -22,9 +23,12 @@ class XBMC(Notification): ('VideoLibrary.Scan', {}), ]) - for result in response: - if result['result'] == "OK": - successful += 1 + try: + for result in response: + if result['result'] == "OK": + successful += 1 + except: + log.error('Failed parsing results: %s', traceback.format_exc()) return successful == len(hosts) * 2 @@ -50,10 +54,14 @@ class XBMC(Notification): base64string = base64.encodestring('%s:%s' % (self.conf('username'), self.conf('password'))).replace('\n', '') headers['Authorization'] = 'Basic %s' % base64string - log.debug('Sending request to %s: %s', (host, data)) - rdata = self.urlopen(server, headers = headers, params = data, multipart = True) - response = json.loads(rdata) - log.debug('Returned from request %s: %s', (host, response)) + try: + log.debug('Sending request to %s: %s', (host, data)) + rdata = self.urlopen(server, headers = headers, params = data, multipart = True) + response = json.loads(rdata) + log.debug('Returned from request %s: %s', (host, response)) - return response + return response + except: + log.error('Failed sending request to XBMC: %s', traceback.format_exc()) + return [] diff --git a/couchpotato/core/plugins/browser/main.py b/couchpotato/core/plugins/browser/main.py index b84284b..b5839e7 100644 --- a/couchpotato/core/plugins/browser/main.py +++ b/couchpotato/core/plugins/browser/main.py @@ -1,5 +1,6 @@ from couchpotato.api import addApiView from couchpotato.core.helpers.request import getParam, jsonified +from couchpotato.core.helpers.variable import getUserDir from couchpotato.core.plugins.base import Plugin import ctypes import os @@ -65,15 +66,7 @@ class FileBrowser(Plugin): def view(self): path = getParam('path', '/') - - # Set proper home dir for some systems - try: - import pwd - os.environ['HOME'] = pwd.getpwuid(os.geteuid()).pw_dir - except: - pass - - home = os.path.expanduser('~') + home = getUserDir() if not path: path = home diff --git a/couchpotato/core/plugins/renamer/__init__.py b/couchpotato/core/plugins/renamer/__init__.py index c3afbc8..e2c65ed 100644 --- a/couchpotato/core/plugins/renamer/__init__.py +++ b/couchpotato/core/plugins/renamer/__init__.py @@ -84,6 +84,15 @@ config = [{ }, { 'advanced': True, + 'name': 'force_every', + 'label': 'Force every', + 'default': 2, + 'type': 'int', + 'unit': 'hour(s)', + 'description': 'Forces the renamer to scan every X hours', + }, + { + 'advanced': True, 'name': 'next_on_failed', 'default': True, 'type': 'bool', diff --git a/couchpotato/core/plugins/renamer/main.py b/couchpotato/core/plugins/renamer/main.py index 207af6b..acb98ae 100644 --- a/couchpotato/core/plugins/renamer/main.py +++ b/couchpotato/core/plugins/renamer/main.py @@ -35,7 +35,7 @@ class Renamer(Plugin): addEvent('app.load', self.scan) fireEvent('schedule.interval', 'renamer.check_snatched', self.checkSnatched, minutes = self.conf('run_every')) - fireEvent('schedule.interval', 'renamer.check_snatched_forced', self.scan, hours = 2) + fireEvent('schedule.interval', 'renamer.check_snatched_forced', self.scan, hours = self.conf('force_every')) def scanView(self): @@ -383,7 +383,10 @@ class Renamer(Plugin): # Notify on download, search for trailers etc download_message = 'Downloaded %s (%s)' % (movie_title, replacements['quality']) - fireEvent('renamer.after', message = download_message, group = group, in_order = True) + try: + fireEvent('renamer.after', message = download_message, group = group, in_order = True) + except: + log.error('Failed firing (some) of the renamer.after events: %s', traceback.format_exc()) # Break if CP wants to shut down if self.shuttingDown(): diff --git a/couchpotato/core/plugins/scanner/main.py b/couchpotato/core/plugins/scanner/main.py index d83d87d..78feca8 100644 --- a/couchpotato/core/plugins/scanner/main.py +++ b/couchpotato/core/plugins/scanner/main.py @@ -23,7 +23,7 @@ class Scanner(Plugin): 'media': 314572800, # 300MB 'trailer': 1048576, # 1MB } - ignored_in_path = ['_unpack', '_failed_', '_unknown_', '_exists_', '_failed_remove_', '_failed_rename_', '.appledouble', '.appledb', '.appledesktop', os.path.sep + '._', '.ds_store', 'cp.cpnfo'] #unpacking, smb-crap, hidden files + ignored_in_path = ['extracting', '_unpack', '_failed_', '_unknown_', '_exists_', '_failed_remove_', '_failed_rename_', '.appledouble', '.appledb', '.appledesktop', os.path.sep + '._', '.ds_store', 'cp.cpnfo'] #unpacking, smb-crap, hidden files ignore_names = ['extract', 'extracting', 'extracted', 'movie', 'movies', 'film', 'films', 'download', 'downloads', 'video_ts', 'audio_ts', 'bdmv', 'certificate'] extensions = { 'movie': ['mkv', 'wmv', 'avi', 'mpg', 'mpeg', 'mp4', 'm2ts', 'iso', 'img', 'mdf', 'ts', 'm4v'], @@ -53,6 +53,20 @@ class Scanner(Plugin): 'video': ['x264', 'h264', 'divx', 'xvid'] } + audio_codec_map = { + 0x2000: 'ac3', + 0x2001: 'dts', + 0x0055: 'mp3', + 0x0050: 'mp2', + 0x0001: 'pcm', + 0x003: 'pcm', + 0x77a1: 'tta1', + 0x5756: 'wav', + 0x6750: 'vorbis', + 0xF1AC: 'flac', + 0x00ff: 'aac', + } + source_media = { 'bluray': ['bluray', 'blu-ray', 'brrip', 'br-rip'], 'hddvd': ['hddvd', 'hd-dvd'], @@ -331,7 +345,7 @@ class Scanner(Plugin): continue log.debug('Getting metadata for %s', identifier) - group['meta_data'] = self.getMetaData(group) + group['meta_data'] = self.getMetaData(group, folder = folder) # Subtitle meta group['subtitle_language'] = self.getSubtitleLanguage(group) if not simple else {} @@ -381,7 +395,7 @@ class Scanner(Plugin): return processed_movies - def getMetaData(self, group): + def getMetaData(self, group, folder = ''): data = {} files = list(group['files']['movie']) @@ -410,7 +424,7 @@ class Scanner(Plugin): data['quality_type'] = 'HD' if data.get('resolution_width', 0) >= 1280 else 'SD' filename = re.sub('(.cp\(tt[0-9{7}]+\))', '', files[0]) - data['group'] = self.getGroup(filename) + data['group'] = self.getGroup(filename[len(folder):]) data['source'] = self.getSourceMedia(filename) return data @@ -419,9 +433,18 @@ class Scanner(Plugin): try: p = enzyme.parse(filename) + + # Video codec + vc = ('h264' if p.video[0].codec == 'AVC1' else p.video[0].codec).lower() + + # Audio codec + ac = p.audio[0].codec + try: ac = self.audio_codec_map.get(p.audio[0].codec) + except: pass + return { - 'video': p.video[0].codec, - 'audio': p.audio[0].codec, + 'video': vc, + 'audio': ac, 'resolution_width': tryInt(p.video[0].width), 'resolution_height': tryInt(p.video[0].height), } @@ -738,8 +761,8 @@ class Scanner(Plugin): def getGroup(self, file): try: - match = re.search('-(?P[A-Z0-9]+).', file, re.I) - return match.group('group') or '' + match = re.findall('\-([A-Z0-9]+)[\.\/]', file, re.I) + return match[-1] or '' except: return '' diff --git a/couchpotato/core/plugins/searcher/__init__.py b/couchpotato/core/plugins/searcher/__init__.py index f499e2b..5855c4f 100644 --- a/couchpotato/core/plugins/searcher/__init__.py +++ b/couchpotato/core/plugins/searcher/__init__.py @@ -29,13 +29,13 @@ config = [{ { 'name': 'ignored_words', 'label': 'Ignored words', - 'default': 'german, dutch, french, truefrench, danish, swedish, spanish, italian, korean, dubbed, swesub, korsub', + 'default': 'german, dutch, french, truefrench, danish, swedish, spanish, italian, korean, dubbed, swesub, korsub, dksubs', }, { 'name': 'preferred_method', 'label': 'First search', 'description': 'Which of the methods do you prefer', - 'default': 'nzb', + 'default': 'both', 'type': 'dropdown', 'values': [('usenet & torrents', 'both'), ('usenet', 'nzb'), ('torrents', 'torrent')], }, diff --git a/couchpotato/core/plugins/searcher/main.py b/couchpotato/core/plugins/searcher/main.py index 7e3aefd..312d238 100644 --- a/couchpotato/core/plugins/searcher/main.py +++ b/couchpotato/core/plugins/searcher/main.py @@ -294,7 +294,7 @@ class Searcher(Plugin): retention = Env.setting('retention', section = 'nzb') if nzb.get('seeds') is None and 0 < retention < nzb.get('age', 0): - log.info('Wrong: Outside retention, age is %s, needs %s or lower: %s', (nzb['age'], retention, nzb['name'])) + log.info2('Wrong: Outside retention, age is %s, needs %s or lower: %s', (nzb['age'], retention, nzb['name'])) return False movie_name = getTitle(movie['library']) @@ -304,13 +304,13 @@ class Searcher(Plugin): required_words = [x.strip().lower() for x in self.conf('required_words').lower().split(',')] if self.conf('required_words') and not list(set(nzb_words) & set(required_words)): - log.info("Wrong: Required word missing: %s" % nzb['name']) + log.info2("Wrong: Required word missing: %s" % nzb['name']) return False ignored_words = [x.strip().lower() for x in self.conf('ignored_words').split(',')] blacklisted = list(set(nzb_words) & set(ignored_words)) if self.conf('ignored_words') and blacklisted: - log.info("Wrong: '%s' blacklisted words: %s" % (nzb['name'], ", ".join(blacklisted))) + log.info2("Wrong: '%s' blacklisted words: %s" % (nzb['name'], ", ".join(blacklisted))) return False pron_tags = ['xxx', 'sex', 'anal', 'tits', 'fuck', 'porn', 'orgy', 'milf', 'boobs', 'erotica', 'erotic'] @@ -324,18 +324,18 @@ class Searcher(Plugin): # Contains lower quality string if self.containsOtherQuality(nzb, movie_year = movie['library']['year'], preferred_quality = preferred_quality): - log.info('Wrong: %s, looking for %s', (nzb['name'], quality['label'])) + log.info2('Wrong: %s, looking for %s', (nzb['name'], quality['label'])) return False # File to small if nzb['size'] and preferred_quality['size_min'] > nzb['size']: - log.info('"%s" is too small to be %s. %sMB instead of the minimal of %sMB.', (nzb['name'], preferred_quality['label'], nzb['size'], preferred_quality['size_min'])) + log.info2('Wrong: "%s" is too small to be %s. %sMB instead of the minimal of %sMB.', (nzb['name'], preferred_quality['label'], nzb['size'], preferred_quality['size_min'])) return False # File to large if nzb['size'] and preferred_quality.get('size_max') < nzb['size']: - log.info('"%s" is too large to be %s. %sMB instead of the maximum of %sMB.', (nzb['name'], preferred_quality['label'], nzb['size'], preferred_quality['size_max'])) + log.info2('Wrong: "%s" is too large to be %s. %sMB instead of the maximum of %sMB.', (nzb['name'], preferred_quality['label'], nzb['size'], preferred_quality['size_max'])) return False @@ -391,7 +391,7 @@ class Searcher(Plugin): # Hack for older movies that don't contain quality tag year_name = fireEvent('scanner.name_year', name, single = True) - if movie_year < datetime.datetime.now().year - 3 and not year_name.get('year', None): + if len(found) == 0 and movie_year < datetime.datetime.now().year - 3 and not year_name.get('year', None): if size > 3000: # Assume dvdr log.info('Quality was missing in name, assuming it\'s a DVD-R based on the size: %s', (size)) found['dvdr'] = True diff --git a/couchpotato/core/plugins/status/main.py b/couchpotato/core/plugins/status/main.py index 91c2858..338749d 100644 --- a/couchpotato/core/plugins/status/main.py +++ b/couchpotato/core/plugins/status/main.py @@ -22,6 +22,7 @@ class StatusPlugin(Plugin): 'failed': 'Failed', 'deleted': 'Deleted', 'ignored': 'Ignored', + 'available': 'Available', } def __init__(self): diff --git a/couchpotato/core/plugins/wizard/static/wizard.js b/couchpotato/core/plugins/wizard/static/wizard.js index 5d087ad..fd6eb14 100644 --- a/couchpotato/core/plugins/wizard/static/wizard.js +++ b/couchpotato/core/plugins/wizard/static/wizard.js @@ -9,7 +9,7 @@ Page.Wizard = new Class({ headers: { 'welcome': { 'title': 'Welcome to the new CouchPotato', - 'description': 'To get started, fill in each of the following settings as much as your can.
Maybe first start with importing your movies from the previous CouchPotato', + 'description': 'To get started, fill in each of the following settings as much as you can.
Maybe first start with importing your movies from the previous CouchPotato', 'content': new Element('div', { 'styles': { 'margin': '0 0 0 30px' @@ -37,7 +37,7 @@ Page.Wizard = new Class({ }, 'downloaders': { 'title': 'What download apps are you using?', - 'description': 'CP needs an external download app to work with. Choose one below. For more downloaders check settings after you have filled in the wizard. If your download app isn\'t in the list, use Blackhole.' + 'description': 'CP needs an external download app to work with. Choose one below. For more downloaders check settings after you have filled in the wizard. If your download app isn\'t in the list, use the default Blackhole.' }, 'providers': { 'title': 'Are you registered at any of these sites?', diff --git a/couchpotato/core/providers/automation/imdb/main.py b/couchpotato/core/providers/automation/imdb/main.py index a4511b4..c232485 100644 --- a/couchpotato/core/providers/automation/imdb/main.py +++ b/couchpotato/core/providers/automation/imdb/main.py @@ -1,9 +1,8 @@ from couchpotato.core.helpers.rss import RSS -from couchpotato.core.helpers.variable import md5, getImdb +from couchpotato.core.helpers.variable import md5, getImdb, splitString, tryInt from couchpotato.core.logger import CPLog from couchpotato.core.providers.automation.base import Automation import traceback -import xml.etree.ElementTree as XMLTree log = CPLog(__name__) @@ -19,30 +18,25 @@ class IMDB(Automation, RSS): movies = [] - enablers = self.conf('automation_urls_use').split(',') + enablers = [tryInt(x) for x in splitString(self.conf('automation_urls_use'))] + urls = splitString(self.conf('automation_urls')) index = -1 - for rss_url in self.conf('automation_urls').split(','): + for url in urls: index += 1 if not enablers[index]: continue - elif 'rss.imdb' not in rss_url: - log.error('This isn\'t the correct url.: %s', rss_url) - continue try: - cache_key = 'imdb.rss.%s' % md5(rss_url) - - rss_data = self.getCache(cache_key, rss_url) - data = XMLTree.fromstring(rss_data) - rss_movies = self.getElements(data, 'channel/item') + cache_key = 'imdb.rss.%s' % md5(url) + rss_data = self.getCache(cache_key, url) + imdbs = getImdb(rss_data, multiple = True) - for movie in rss_movies: - imdb = getImdb(self.getTextElement(movie, "link")) + for imdb in imdbs: movies.append(imdb) except: - log.error('Failed loading IMDB watchlist: %s %s', (rss_url, traceback.format_exc())) + log.error('Failed loading IMDB watchlist: %s %s', (url, traceback.format_exc())) return movies diff --git a/couchpotato/core/providers/base.py b/couchpotato/core/providers/base.py index 3e9ef26..17ccb2a 100644 --- a/couchpotato/core/providers/base.py +++ b/couchpotato/core/providers/base.py @@ -111,4 +111,9 @@ class YarrProvider(Provider): return [self.cat_backup_id] def found(self, new): - log.info('Found: score(%(score)s) on %(provider)s: %(name)s', new) + if not new.get('provider_extra'): + new['provider_extra'] = '' + else: + new['provider_extra'] = ', %s' % new['provider_extra'] + + log.info('Found: score(%(score)s) on %(provider)s%(provider_extra)s: %(name)s', new) diff --git a/couchpotato/core/providers/nzb/mysterbin/__init__.py b/couchpotato/core/providers/nzb/mysterbin/__init__.py index fd4de65..a28d9a8 100644 --- a/couchpotato/core/providers/nzb/mysterbin/__init__.py +++ b/couchpotato/core/providers/nzb/mysterbin/__init__.py @@ -15,7 +15,7 @@ config = [{ { 'name': 'enabled', 'type': 'enabler', - 'default': False, + 'default': True, }, ], }, diff --git a/couchpotato/core/providers/nzb/newznab/main.py b/couchpotato/core/providers/nzb/newznab/main.py index e86c79b..2ea36a3 100644 --- a/couchpotato/core/providers/nzb/newznab/main.py +++ b/couchpotato/core/providers/nzb/newznab/main.py @@ -132,6 +132,7 @@ class Newznab(NZBProvider, RSS): new = { 'id': id, 'provider': self.getName(), + 'provider_extra': host['host'], 'type': 'nzb', 'name': self.getTextElement(nzb, "title"), 'age': self.calculateAge(int(time.mktime(parse(date).timetuple()))), diff --git a/couchpotato/core/providers/nzb/nzbclub/__init__.py b/couchpotato/core/providers/nzb/nzbclub/__init__.py index fc7b7ef..c7cf8d9 100644 --- a/couchpotato/core/providers/nzb/nzbclub/__init__.py +++ b/couchpotato/core/providers/nzb/nzbclub/__init__.py @@ -15,7 +15,6 @@ config = [{ { 'name': 'enabled', 'type': 'enabler', - 'default': True, }, ], }, diff --git a/couchpotato/core/providers/nzb/nzbclub/main.py b/couchpotato/core/providers/nzb/nzbclub/main.py index d047a8a..47d6c85 100644 --- a/couchpotato/core/providers/nzb/nzbclub/main.py +++ b/couchpotato/core/providers/nzb/nzbclub/main.py @@ -17,7 +17,7 @@ log = CPLog(__name__) class NZBClub(NZBProvider, RSS): urls = { - 'search': 'https://www.nzbclub.com/nzbfeed.aspx?%s', + 'search': 'http://www.nzbclub.com/nzbfeed.aspx?%s', } http_time_between_calls = 4 #seconds diff --git a/couchpotato/core/providers/nzb/nzbindex/__init__.py b/couchpotato/core/providers/nzb/nzbindex/__init__.py index 51ee6d9..04d5022 100644 --- a/couchpotato/core/providers/nzb/nzbindex/__init__.py +++ b/couchpotato/core/providers/nzb/nzbindex/__init__.py @@ -15,6 +15,7 @@ config = [{ { 'name': 'enabled', 'type': 'enabler', + 'default': True, }, ], }, diff --git a/couchpotato/core/providers/torrent/kickasstorrents/__init__.py b/couchpotato/core/providers/torrent/kickasstorrents/__init__.py index ffa3934..d31250f 100644 --- a/couchpotato/core/providers/torrent/kickasstorrents/__init__.py +++ b/couchpotato/core/providers/torrent/kickasstorrents/__init__.py @@ -16,7 +16,7 @@ config = [{ { 'name': 'enabled', 'type': 'enabler', - 'default': False, + 'default': True, }, ], }, diff --git a/couchpotato/core/providers/torrent/kickasstorrents/main.py b/couchpotato/core/providers/torrent/kickasstorrents/main.py index f59d8df..e8c3cd1 100644 --- a/couchpotato/core/providers/torrent/kickasstorrents/main.py +++ b/couchpotato/core/providers/torrent/kickasstorrents/main.py @@ -94,6 +94,7 @@ class KickAssTorrents(TorrentProvider): is_correct_movie = fireEvent('searcher.correct_movie', nzb = new, movie = movie, quality = quality, imdb_results = True, single = True) + if is_correct_movie: results.append(new) self.found(new) diff --git a/couchpotato/core/providers/torrent/publichd/__init__.py b/couchpotato/core/providers/torrent/publichd/__init__.py index 648c676..49a9d96 100644 --- a/couchpotato/core/providers/torrent/publichd/__init__.py +++ b/couchpotato/core/providers/torrent/publichd/__init__.py @@ -15,7 +15,7 @@ config = [{ { 'name': 'enabled', 'type': 'enabler', - 'default': False, + 'default': True, }, ], }, diff --git a/couchpotato/core/providers/torrent/thepiratebay/__init__.py b/couchpotato/core/providers/torrent/thepiratebay/__init__.py index 9c56eb8..f890ca0 100644 --- a/couchpotato/core/providers/torrent/thepiratebay/__init__.py +++ b/couchpotato/core/providers/torrent/thepiratebay/__init__.py @@ -16,7 +16,7 @@ config = [{ { 'name': 'enabled', 'type': 'enabler', - 'default': False + 'default': True }, { 'name': 'domain', diff --git a/couchpotato/core/settings/__init__.py b/couchpotato/core/settings/__init__.py index 366a101..00f77a6 100644 --- a/couchpotato/core/settings/__init__.py +++ b/couchpotato/core/settings/__init__.py @@ -90,7 +90,7 @@ class Settings(object): def set(self, section, option, value): return self.p.set(section, option, value) - def get(self, option = '', section = 'core', default = '', type = None): + def get(self, option = '', section = 'core', default = None, type = None): try: try: type = self.types[section][option] @@ -111,7 +111,7 @@ class Settings(object): try: return self.p.getboolean(section, option) except: - return self.p.get(section, option) + return self.p.get(section, option) == 1 def getInt(self, section, option): try: diff --git a/couchpotato/runner.py b/couchpotato/runner.py index 569ff57..973ac88 100644 --- a/couchpotato/runner.py +++ b/couchpotato/runner.py @@ -148,6 +148,7 @@ def runCouchPotato(options, base_path, args, data_dir = None, log_dir = None, En formatter = logging.Formatter('%(asctime)s %(levelname)s %(message)s', '%m-%d %H:%M:%S') level = logging.DEBUG if debug else logging.INFO logger.setLevel(level) + logging.addLevelName(19, 'INFO') # To screen if (debug or options.console_log) and not options.quiet and not options.daemon: diff --git a/couchpotato/static/scripts/page/manage.js b/couchpotato/static/scripts/page/manage.js index 7ec7b4c..c06c655 100644 --- a/couchpotato/static/scripts/page/manage.js +++ b/couchpotato/static/scripts/page/manage.js @@ -46,7 +46,7 @@ Page.Manage = new Class({ 'text': 'When you\'ve done that, hit this button → ' }).adopt( new Element('a.button.green', { - 'text': 'Hit me, but not to hard', + 'text': 'Hit me, but not too hard', 'events':{ 'click': self.refresh.bind(self, true) } diff --git a/init/fedora b/init/fedora index 0adefd0..c9ebbf2 100644 --- a/init/fedora +++ b/init/fedora @@ -28,7 +28,7 @@ pidfile=${CP_PIDFILE-/var/run/couchpotato/couchpotato.pid} ## pidpath=`dirname ${pidfile}` -options=" --daemon --pid_file=${pidfile} --datadir=${datadir}" +options=" --daemon --pid_file=${pidfile} --data_dir=${datadir}" # create PID directory if not exist and ensure the couchpotato user can write to it if [ ! -d $pidpath ]; then diff --git a/init/ffpstick b/init/ffpstick new file mode 100644 index 0000000..0d2f3ac --- /dev/null +++ b/init/ffpstick @@ -0,0 +1,85 @@ +#!/ffp/bin/sh + +# PROVIDE: Couchpotato +# REQUIRE: LOGIN + +. /ffp/etc/ffp.subr + +# script name +NAME=couchpotato +APP_PATH=/ffp/usr/local/CouchPotatoServer +APP_CONFIG_DIF=/ffp/root/.couchpotato + +# path to python bin +DAEMON=/ffp/bin/python + +# Path to store PID file +PID_FILE=/ffp/var/run/couchpotato.pid +PID_PATH=$(dirname $PID_FILE) + +# startup args +DAEMON_OPTS=" ${APP_PATH}/CouchPotato.py --daemon --pid_file=${PID_FILE}" + +command=$NAME +start_cmd="couchpotato_start" +stop_cmd="couchpotato_stop" +status_cmd="couchpotato_status" + +wait_for_status() +{ + counter=$2 + while [ ${counter} -gt 0 ]; do + daemon_status + [ $? -eq $1 ] && break + let counter=counter-1 + sleep 1 + done +} + +daemon_status() +{ + if [ -f ${PID_FILE} ] && [ -d /proc/`cat ${PID_FILE}` ]; then + return 0 + fi + return 1 +} + +couchpotato_start() +{ + if [ -f ${PID_FILE} ] && [ -d /proc/`cat ${PID_FILE}` ]; then + echo "$NAME already running" + return 1 + fi + + echo "Starting $NAME" + $DAEMON $DAEMON_OPTS +} + +couchpotato_stop() +{ + if [ -f $PID_FILE ] ; then + echo "Stopping $NAME" + kill `cat ${PID_FILE}` + wait_for_status 1 20 + rm -f ${PID_FILE} + else + echo "Cannot find $PID_FILE" + fi +} + +couchpotato_status() +{ + local pid=` cat ${PID_FILE} 2>/dev/null ` + if test -n "$pid"; then + if cat /proc/$pid/cmdline | grep ${APP_PATH} >/dev/null ; then + [ "$1" == "silent" ] || echo "$NAME is running" + return 0 + fi + rm $pidfile + fi + + [ "$1" == "silent" ] || echo "$NAME not running" + return 1 +} + +run_rc_command "$1" \ No newline at end of file diff --git a/libs/axl/axel.py b/libs/axl/axel.py index 8e9b607..46940da 100644 --- a/libs/axl/axel.py +++ b/libs/axl/axel.py @@ -95,7 +95,6 @@ class Event(object): (None, None, handler), ... # asynchronous execution ) """ - self.in_order = False self.name = name self.asynchronous = asynch self.exc_info = exc_info @@ -142,11 +141,16 @@ class Event(object): def fire(self, *args, **kwargs): """ Stores all registered handlers in a queue for processing """ self.queue = Queue.Queue() - self.result = {} + result = {} if self.handlers: - max_threads = self._threads() + max_threads = 1 if kwargs.get('event_order_lock') else self._threads() + + # Set global result + def add_to(key, value): + result[key] = value + kwargs['event_add_to_result'] = add_to for i in range(max_threads): t = threading.Thread(target = self._execute, @@ -159,17 +163,12 @@ class Event(object): if self.asynchronous: handler_, memoize, timeout = self.handlers[handler] - self.result[handler] = (None, None, handler_) + result[handler] = (None, None, handler_) if not self.asynchronous: self.queue.join() - res = self.result or None - - # Cleanup - self.result = {} - - return res + return result def count(self): """ Returns the count of registered handlers """ @@ -181,24 +180,51 @@ class Event(object): self.memoize.clear() def _execute(self, *args, **kwargs): + + # Remove get and set from kwargs + add_to_result = kwargs.get('event_add_to_result') + del kwargs['event_add_to_result'] + + # Get and remove order lock + order_lock = kwargs.get('event_order_lock') + try: del kwargs['event_order_lock'] + except: pass + + # Get and remove return on first + return_on_result = kwargs.get('event_return_on_result') + try: del kwargs['event_return_on_result'] + except: pass + + got_results = False + """ Executes all handlers stored in the queue """ while True: + try: h_ = self.queue.get(timeout = 2) handler, memoize, timeout = self.handlers[h_] - if self.lock and self.in_order: - self.lock.acquire() + if return_on_result and got_results: + + if not self.asynchronous: + self.queue.task_done() + + continue + + if order_lock: + order_lock.acquire() try: r = self._memoize(memoize, timeout, handler, *args, **kwargs) if not self.asynchronous: - self.result[h_] = tuple(r) + if not return_on_result or (return_on_result and r[1] is not None): + add_to_result(h_, tuple(r)) + got_results = True except Exception: if not self.asynchronous: - self.result[h_] = (False, self._error(sys.exc_info()), - handler) + add_to_result(h_, (False, self._error(sys.exc_info()), + handler)) else: self.error_handler(sys.exc_info()) finally: @@ -206,8 +232,8 @@ class Event(object): if not self.asynchronous: self.queue.task_done() - if self.lock and self.in_order: - self.lock.release() + if order_lock: + order_lock.release() if self.queue.empty(): raise Queue.Empty @@ -257,7 +283,7 @@ class Event(object): args.insert(0, self.sender) if not memoize: - if timeout <= 0: #no time restriction + if timeout <= 0: #no time restriction result = [True, handler(*args, **kwargs), handler] return result @@ -273,7 +299,7 @@ class Event(object): if args_ == args and kwargs_ == kwargs: return [True, result, handler] - if timeout <= 0: #no time restriction + if timeout <= 0: #no time restriction result = handler(*args, **kwargs) else: result = self._timeout(timeout, handler, *args, **kwargs) diff --git a/libs/bencode/BTL.py b/libs/bencode/BTL.py new file mode 100644 index 0000000..58b0d6d --- /dev/null +++ b/libs/bencode/BTL.py @@ -0,0 +1,2 @@ +class BTFailure(Exception): + pass diff --git a/libs/bencode/LICENSE.txt b/libs/bencode/LICENSE.txt new file mode 100644 index 0000000..4b7a674 --- /dev/null +++ b/libs/bencode/LICENSE.txt @@ -0,0 +1,143 @@ +BitTorrent Open Source License + +Version 1.1 + +This BitTorrent Open Source License (the "License") applies to the BitTorrent client and related software products as well as any updates or maintenance releases of that software ("BitTorrent Products") that are distributed by BitTorrent, Inc. ("Licensor"). Any BitTorrent Product licensed pursuant to this License is a Licensed Product. Licensed Product, in its entirety, is protected by U.S. copyright law. This License identifies the terms under which you may use, copy, distribute or modify Licensed Product. + +Preamble + +This Preamble is intended to describe, in plain English, the nature and scope of this License. However, this Preamble is not a part of this license. The legal effect of this License is dependent only upon the terms of the License and not this Preamble. + +This License complies with the Open Source Definition and is derived from the Jabber Open Source License 1.0 (the "JOSL"), which has been approved by Open Source Initiative. Sections 4(c) and 4(f)(iii) from the JOSL have been deleted. + +This License provides that: + +1. You may use or give away the Licensed Product, alone or as a component of an aggregate software distribution containing programs from several different sources. No royalty or other fee is required. + +2. Both Source Code and executable versions of the Licensed Product, including Modifications made by previous Contributors, are available for your use. (The terms "Licensed Product," "Modifications," "Contributors" and "Source Code" are defined in the License.) + +3. You are allowed to make Modifications to the Licensed Product, and you can create Derivative Works from it. (The term "Derivative Works" is defined in the License.) + +4. By accepting the Licensed Product under the provisions of this License, you agree that any Modifications you make to the Licensed Product and then distribute are governed by the provisions of this License. In particular, you must make the Source Code of your Modifications available to others free of charge and without a royalty. + +5. You may sell, accept donations or otherwise receive compensation for executable versions of a Licensed Product, without paying a royalty or other fee to the Licensor or any Contributor, provided that such executable versions contain your or another Contributor?s material Modifications. For the avoidance of doubt, to the extent your executable version of a Licensed Product does not contain your or another Contributor?s material Modifications, you may not sell, accept donations or otherwise receive compensation for such executable. + +You may use the Licensed Product for any purpose, but the Licensor is not providing you any warranty whatsoever, nor is the Licensor accepting any liability in the event that the Licensed Product doesn't work properly or causes you any injury or damages. + +6. If you sublicense the Licensed Product or Derivative Works, you may charge fees for warranty or support, or for accepting indemnity or liability obligations to your customers. You cannot charge for, sell, accept donations or otherwise receive compensation for the Source Code. + +7. If you assert any patent claims against the Licensor relating to the Licensed Product, or if you breach any terms of the License, your rights to the Licensed Product under this License automatically terminate. + +You may use this License to distribute your own Derivative Works, in which case the provisions of this License will apply to your Derivative Works just as they do to the original Licensed Product. + +Alternatively, you may distribute your Derivative Works under any other OSI-approved Open Source license, or under a proprietary license of your choice. If you use any license other than this License, however, you must continue to fulfill the requirements of this License (including the provisions relating to publishing the Source Code) for those portions of your Derivative Works that consist of the Licensed Product, including the files containing Modifications. + +New versions of this License may be published from time to time in connection with new versions of a Licensed Product or otherwise. You may choose to continue to use the license terms in this version of the License for the Licensed Product that was originally licensed hereunder, however, the new versions of this License will at all times apply to new versions of the Licensed Product released by Licensor after the release of the new version of this License. Only the Licensor has the right to change the License terms as they apply to the Licensed Product. + +This License relies on precise definitions for certain terms. Those terms are defined when they are first used, and the definitions are repeated for your convenience in a Glossary at the end of the License. + +License Terms + +1. Grant of License From Licensor. Subject to the terms and conditions of this License, Licensor hereby grants you a world-wide, royalty-free, non-exclusive license, subject to third party intellectual property claims, to do the following: + +a. Use, reproduce, modify, display, perform, sublicense and distribute any Modifications created by a Contributor or portions thereof, in both Source Code or as an executable program, either on an unmodified basis or as part of Derivative Works. + +b. Under claims of patents now or hereafter owned or controlled by Contributor, to make, use, sell, offer for sale, have made, and/or otherwise dispose of Modifications or portions thereof, but solely to the extent that any such claim is necessary to enable you to make, use, sell, offer for sale, have made, and/or otherwise dispose of Modifications or portions thereof or Derivative Works thereof. + +2. Grant of License to Modifications From Contributor. "Modifications" means any additions to or deletions from the substance or structure of (i) a file containing a Licensed Product, or (ii) any new file that contains any part of a Licensed Product. Hereinafter in this License, the term "Licensed Product" shall include all previous Modifications that you receive from any Contributor. Subject to the terms and conditions of this License, By application of the provisions in Section 4(a) below, each person or entity who created or contributed to the creation of, and distributed, a Modification (a "Contributor") hereby grants you a world-wide, royalty-free, non-exclusive license, subject to third party intellectual property claims, to do the following: + +a. Use, reproduce, modify, display, perform, sublicense and distribute any Modifications created by such Contributor or portions thereof, in both Source Code or as an executable program, either on an unmodified basis or as part of Derivative Works. + +b. Under claims of patents now or hereafter owned or controlled by Contributor, to make, use, sell, offer for sale, have made, and/or otherwise dispose of Modifications or portions thereof, but solely to the extent that any such claim is necessary to enable you to make, use, sell, offer for sale, have made, and/or otherwise dispose of Modifications or portions thereof or Derivative Works thereof. + +3. Exclusions From License Grant. Nothing in this License shall be deemed to grant any rights to trademarks, copyrights, patents, trade secrets or any other intellectual property of Licensor or any Contributor except as expressly stated herein. No patent license is granted separate from the Licensed Product, for code that you delete from the Licensed Product, or for combinations of the Licensed Product with other software or hardware. No right is granted to the trademarks of Licensor or any Contributor even if such marks are included in the Licensed Product. Nothing in this License shall be interpreted to prohibit Licensor from licensing under different terms from this License any code that Licensor otherwise would have a right to license. As an express condition for your use of the Licensed Product, you hereby agree that you will not, without the prior written consent of Licensor, use any trademarks, copyrights, patents, trade secrets or any other intellectual property of Licensor or any Contributor except as expressly stated herein. For the avoidance of doubt and without limiting the foregoing, you hereby agree that you will not use or display any trademark of Licensor or any Contributor in any domain name, directory filepath, advertisement, link or other reference to you in any manner or in any media. + +4. Your Obligations Regarding Distribution. + +a. Application of This License to Your Modifications. As an express condition for your use of the Licensed Product, you hereby agree that any Modifications that you create or to which you contribute, and which you distribute, are governed by the terms of this License including, without limitation, Section 2. Any Modifications that you create or to which you contribute may be distributed only under the terms of this License or a future version of this License released under Section 7. You must include a copy of this License with every copy of the Modifications you distribute. You agree not to offer or impose any terms on any Source Code or executable version of the Licensed Product or Modifications that alter or restrict the applicable version of this License or the recipients' rights hereunder. However, you may include an additional document offering the additional rights described in Section 4(d). + +b. Availability of Source Code. You must make available, without charge, under the terms of this License, the Source Code of the Licensed Product and any Modifications that you distribute, either on the same media as you distribute any executable or other form of the Licensed Product, or via a mechanism generally accepted in the software development community for the electronic transfer of data (an "Electronic Distribution Mechanism"). The Source Code for any version of Licensed Product or Modifications that you distribute must remain available for as long as any executable or other form of the Licensed Product is distributed by you. You are responsible for ensuring that the Source Code version remains available even if the Electronic Distribution Mechanism is maintained by a third party. + +c. Intellectual Property Matters. + + i. Third Party Claims. If you have knowledge that a license to a third party's intellectual property right is required to exercise the rights granted by this License, you must include a text file with the Source Code distribution titled "LEGAL" that describes the claim and the party making the claim in sufficient detail that a recipient will know whom to contact. If you obtain such knowledge after you make any Modifications available as described in Section 4(b), you shall promptly modify the LEGAL file in all copies you make available thereafter and shall take other steps (such as notifying appropriate mailing lists or newsgroups) reasonably calculated to inform those who received the Licensed Product from you that new knowledge has been obtained. + + ii. Contributor APIs. If your Modifications include an application programming interface ("API") and you have knowledge of patent licenses that are reasonably necessary to implement that API, you must also include this information in the LEGAL file. + + iii. Representations. You represent that, except as disclosed pursuant to 4(c)(i) above, you believe that any Modifications you distribute are your original creations and that you have sufficient rights to grant the rights conveyed by this License. + +d. Required Notices. You must duplicate this License in any documentation you provide along with the Source Code of any Modifications you create or to which you contribute, and which you distribute, wherever you describe recipients' rights relating to Licensed Product. You must duplicate the notice contained in Exhibit A (the "Notice") in each file of the Source Code of any copy you distribute of the Licensed Product. If you created a Modification, you may add your name as a Contributor to the Notice. If it is not possible to put the Notice in a particular Source Code file due to its structure, then you must include such Notice in a location (such as a relevant directory file) where a user would be likely to look for such a notice. You may choose to offer, and charge a fee for, warranty, support, indemnity or liability obligations to one or more recipients of Licensed Product. However, you may do so only on your own behalf, and not on behalf of the Licensor or any Contributor. You must make it clear that any such warranty, support, indemnity or liability obligation is offered by you alone, and you hereby agree to indemnify the Licensor and every Contributor for any liability incurred by the Licensor or such Contributor as a result of warranty, support, indemnity or liability terms you offer. + +e. Distribution of Executable Versions. You may distribute Licensed Product as an executable program under a license of your choice that may contain terms different from this License provided (i) you have satisfied the requirements of Sections 4(a) through 4(d) for that distribution, (ii) you include a conspicuous notice in the executable version, related documentation and collateral materials stating that the Source Code version of the +Licensed Product is available under the terms of this License, including a description of how and where you have fulfilled the obligations of Section 4(b), and (iii) you make it clear that any terms that differ from this License are offered by you alone, not by Licensor or any Contributor. You hereby agree to indemnify the Licensor and every Contributor for any liability incurred by Licensor or such Contributor as a result of any terms you offer. + +f. Distribution of Derivative Works. You may create Derivative Works (e.g., combinations of some or all of the Licensed Product with other code) and distribute the Derivative Works as products under any other license you select, with the proviso that the requirements of this License are fulfilled for those portions of the Derivative Works that consist of the Licensed Product or any Modifications thereto. + +g. Compensation for Distribution of Executable Versions of Licensed Products, Modifications or Derivative Works. Notwithstanding any provision of this License to the contrary, by distributing, selling, licensing, sublicensing or otherwise making available any Licensed Product, or Modification or Derivative Work thereof, you and Licensor hereby acknowledge and agree that you may sell, license or sublicense for a fee, accept donations or otherwise receive compensation for executable versions of a Licensed Product, without paying a royalty or other fee to the Licensor or any other Contributor, provided that such executable versions (i) contain your or another Contributor?s material Modifications, or (ii) are otherwise material Derivative Works. For purposes of this License, an executable version of the Licensed Product will be deemed to contain a material Modification, or will otherwise be deemed a material Derivative Work, if (a) the Licensed Product is modified with your own or a third party?s software programs or other code, and/or the Licensed Product is combined with a number of your own or a third party?s software programs or code, respectively, and (b) such software programs or code add or contribute material value, functionality or features to the License Product. For the avoidance of doubt, to the extent your executable version of a Licensed Product does not contain your or another Contributor?s material Modifications or is otherwise not a material Derivative Work, in each case as contemplated herein, you may not sell, license or sublicense for a fee, accept donations or otherwise receive compensation for such executable. Additionally, without limitation of the foregoing and notwithstanding any provision of this License to the contrary, you cannot charge for, sell, license or sublicense for a fee, accept donations or otherwise receive compensation for the Source Code. + +5. Inability to Comply Due to Statute or Regulation. If it is impossible for you to comply with any of the terms of this License with respect to some or all of the Licensed Product due to statute, judicial order, or regulation, then you must (i) comply with the terms of this License to the maximum extent possible, (ii) cite the statute or regulation that prohibits you from adhering to the License, and (iii) describe the limitations and the code they affect. Such description must be included in the LEGAL file described in Section 4(d), and must be included with all distributions of the Source Code. Except to the extent prohibited by statute or regulation, such description must be sufficiently detailed for a recipient of ordinary skill at computer programming to be able to understand it. + +6. Application of This License. This License applies to code to which Licensor or Contributor has attached the Notice in Exhibit A, which is incorporated herein by this reference. + +7. Versions of This License. + +a. New Versions. Licensor may publish from time to time revised and/or new versions of the License. + +b. Effect of New Versions. Once Licensed Product has been published under a particular version of the License, you may always continue to use it under the terms of that version, provided that any such license be in full force and effect at the time, and has not been revoked or otherwise terminated. You may also choose to use such Licensed Product under the terms of any subsequent version (but not any prior version) of the License published by Licensor. No one other than Licensor has the right to modify the terms applicable to Licensed Product created under this License. + +c. Derivative Works of this License. If you create or use a modified version of this License, which you may do only in order to apply it to software that is not already a Licensed Product under this License, you must rename your license so that it is not confusingly similar to this License, and must make it clear that your license contains terms that differ from this License. In so naming your license, you may not use any trademark of Licensor or any Contributor. + +8. Disclaimer of Warranty. LICENSED PRODUCT IS PROVIDED UNDER THIS LICENSE ON AN AS IS BASIS, WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING, WITHOUT LIMITATION, WARRANTIES THAT THE LICENSED PRODUCT IS FREE OF DEFECTS, MERCHANTABLE, FIT FOR A PARTICULAR PURPOSE OR NON-INFRINGING. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE LICENSED PRODUCT IS WITH YOU. SHOULD LICENSED PRODUCT PROVE DEFECTIVE IN ANY RESPECT, YOU (AND NOT THE LICENSOR OR ANY OTHER CONTRIBUTOR) ASSUME THE COST OF ANY NECESSARY SERVICING, REPAIR OR CORRECTION. THIS +DISCLAIMER OF WARRANTY CONSTITUTES AN ESSENTIAL PART OF THIS LICENSE. NO USE OF LICENSED PRODUCT IS AUTHORIZED HEREUNDER EXCEPT UNDER THIS DISCLAIMER. + +9. Termination. + +a. Automatic Termination Upon Breach. This license and the rights granted hereunder will terminate automatically if you fail to comply with the terms herein and fail to cure such breach within ten (10) days of being notified of the breach by the Licensor. For purposes of this provision, proof of delivery via email to the address listed in the ?WHOIS? database of the registrar for any website through which you distribute or market any Licensed Product, or to any alternate email address which you designate in writing to the Licensor, shall constitute sufficient notification. All sublicenses to the Licensed Product that are properly granted shall survive any termination of this license so long as they continue to complye with the terms of this License. Provisions that, by their nature, must remain in effect beyond the termination of this License, shall survive. + +b. Termination Upon Assertion of Patent Infringement. If you initiate litigation by asserting a patent infringement claim (excluding declaratory judgment actions) against Licensor or a Contributor (Licensor or Contributor against whom you file such an action is referred to herein as Respondent) alleging that Licensed Product directly or indirectly infringes any patent, then any and all rights granted by such Respondent to you under Sections 1 or 2 of this License shall terminate prospectively upon sixty (60) days notice from Respondent (the "Notice Period") unless within that Notice Period you either agree in writing (i) to pay Respondent a mutually agreeable reasonably royalty for your past or future use of Licensed Product made by such Respondent, or (ii) withdraw your litigation claim with respect to Licensed Product against such Respondent. If within said Notice Period a reasonable royalty and payment arrangement are not mutually agreed upon in writing by the parties or the litigation claim is not withdrawn, the rights granted by Licensor to you under Sections 1 and 2 automatically terminate at the expiration of said Notice Period. + +c. Reasonable Value of This License. If you assert a patent infringement claim against Respondent alleging that Licensed Product directly or indirectly infringes any patent where such claim is resolved (such as by license or settlement) prior to the initiation of patent infringement litigation, then the reasonable value of the licenses granted by said Respondent under Sections 1 and 2 shall be taken into account in determining the amount or value of any payment or license. + +d. No Retroactive Effect of Termination. In the event of termination under Sections 9(a) or 9(b) above, all end user license agreements (excluding licenses to distributors and resellers) that have been validly granted by you or any distributor hereunder prior to termination shall survive termination. + +10. Limitation of Liability. UNDER NO CIRCUMSTANCES AND UNDER NO LEGAL THEORY, WHETHER TORT (INCLUDING NEGLIGENCE), CONTRACT, OR OTHERWISE, SHALL THE LICENSOR, ANY CONTRIBUTOR, OR ANY DISTRIBUTOR OF LICENSED PRODUCT, OR ANY SUPPLIER OF ANY OF SUCH PARTIES, BE LIABLE TO ANY PERSON FOR ANY INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES OF ANY CHARACTER INCLUDING, WITHOUT LIMITATION, DAMAGES FOR LOSS OF GOODWILL, WORK STOPPAGE, COMPUTER FAILURE OR MALFUNCTION, OR ANY AND ALL OTHER COMMERCIAL DAMAGES OR LOSSES, EVEN IF SUCH PARTY SHALL HAVE BEEN INFORMED OF THE POSSIBILITY OF SUCH DAMAGES. THIS LIMITATION OF LIABILITY SHALL NOT APPLY TO LIABILITY FOR DEATH OR PERSONAL INJURY RESULTING FROM SUCH PARTYS NEGLIGENCE TO THE EXTENT APPLICABLE LAW PROHIBITS SUCH LIMITATION. SOME JURISDICTIONS DO NOT ALLOW THE EXCLUSION OR LIMITATION OF INCIDENTAL OR CONSEQUENTIAL DAMAGES, SO THIS EXCLUSION AND LIMITATION MAY NOT APPLY TO YOU. + +11. Responsibility for Claims. As between Licensor and Contributors, each party is responsible for claims and damages arising, directly or indirectly, out of its utilization of rights under this License. You agree to work with Licensor and Contributors to distribute such responsibility on an equitable basis. Nothing herein is intended or shall be deemed to constitute any admission of liability. + +12. U.S. Government End Users. The Licensed Product is a commercial item, as that term is defined in 48 C.F.R. 2.101 (Oct. 1995), consisting of commercial computer software and commercial computer software documentation, as such terms are used in 48 C.F.R. 12.212 (Sept. 1995). Consistent with 48 C.F.R. 12.212 and 48 C.F.R. 227.7202-1 through 227.7202-4 (June 1995), all U.S. Government End Users acquire Licensed Product with only those rights set forth herein. + +13. Miscellaneous. This License represents the complete agreement concerning the subject matter hereof. If any provision of this License is held to be unenforceable, such provision shall be reformed only to the extent necessary to make it enforceable. This License shall be governed by California law provisions (except to the extent applicable law, if any, provides otherwise), excluding its conflict-of-law provisions. You expressly agree that in any litigation relating to this license the losing party shall be responsible for costs including, without limitation, court costs and reasonable attorneys fees and expenses. The application of the United Nations Convention on Contracts for the International Sale of Goods is expressly excluded. Any law or regulation that provides that the language of a contract shall be construed against the drafter shall not apply to this License. + +14. Definition of You in This License. You throughout this License, whether in upper or lower case, means an individual or a legal entity exercising rights under, and complying with all of the terms of, this License or a future version of this License issued under Section 7. For legal entities, you includes any entity that controls, is controlled by, is under common control with, or affiliated with, you. For purposes of this definition, control means (i) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the outstanding shares, or (iii) beneficial ownership of such entity. You are responsible for advising any affiliated entity of the terms of this License, and that any rights or privileges derived from or obtained by way of this License are subject to the restrictions outlined herein. + +15. Glossary. All defined terms in this License that are used in more than one Section of this License are repeated here, in alphabetical order, for the convenience of the reader. The Section of this License in which each defined term is first used is shown in parentheses. + +Contributor: Each person or entity who created or contributed to the creation of, and distributed, a Modification. (See Section 2) + +Derivative Works: That term as used in this License is defined under U.S. copyright law. (See Section 1(b)) + +License: This BitTorrent Open Source License. (See first paragraph of License) + +Licensed Product: Any BitTorrent Product licensed pursuant to this License. The term "Licensed Product" includes all previous Modifications from any Contributor that you receive. (See first paragraph of License and Section 2) + +Licensor: BitTorrent, Inc. (See first paragraph of License) + +Modifications: Any additions to or deletions from the substance or structure of (i) a file containing Licensed Product, or (ii) any new file that contains any part of Licensed Product. (See Section 2) + +Notice: The notice contained in Exhibit A. (See Section 4(e)) + +Source Code: The preferred form for making modifications to the Licensed Product, including all modules contained therein, plus any associated interface definition files, scripts used to control compilation and installation of an executable program, or a list of differential comparisons against the Source Code of the Licensed Product. (See Section 1(a)) + +You: This term is defined in Section 14 of this License. + + +EXHIBIT A + +The Notice below must appear in each file of the Source Code of any copy you distribute of the Licensed Product or any hereto. Contributors to any Modifications may add their own copyright notices to identify their own contributions. + +License: + +The contents of this file are subject to the BitTorrent Open Source License Version 1.0 (the License). You may not copy or use this file, in either source code or executable form, except in compliance with the License. You may obtain a copy of the License at http://www.bittorrent.com/license/. + +Software distributed under the License is distributed on an AS IS basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License for the specific language governing rights and limitations under the License. + diff --git a/libs/bencode/__init__.py b/libs/bencode/__init__.py new file mode 100644 index 0000000..4424fc7 --- /dev/null +++ b/libs/bencode/__init__.py @@ -0,0 +1 @@ +from bencode import * \ No newline at end of file diff --git a/libs/bencode/bencode.py b/libs/bencode/bencode.py new file mode 100644 index 0000000..7a2af17 --- /dev/null +++ b/libs/bencode/bencode.py @@ -0,0 +1,131 @@ +# The contents of this file are subject to the BitTorrent Open Source License +# Version 1.1 (the License). You may not copy or use this file, in either +# source code or executable form, except in compliance with the License. You +# may obtain a copy of the License at http://www.bittorrent.com/license/. +# +# Software distributed under the License is distributed on an AS IS basis, +# WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License +# for the specific language governing rights and limitations under the +# License. + +# Written by Petru Paler + +from BTL import BTFailure + + +def decode_int(x, f): + f += 1 + newf = x.index('e', f) + n = int(x[f:newf]) + if x[f] == '-': + if x[f + 1] == '0': + raise ValueError + elif x[f] == '0' and newf != f+1: + raise ValueError + return (n, newf+1) + +def decode_string(x, f): + colon = x.index(':', f) + n = int(x[f:colon]) + if x[f] == '0' and colon != f+1: + raise ValueError + colon += 1 + return (x[colon:colon+n], colon+n) + +def decode_list(x, f): + r, f = [], f+1 + while x[f] != 'e': + v, f = decode_func[x[f]](x, f) + r.append(v) + return (r, f + 1) + +def decode_dict(x, f): + r, f = {}, f+1 + while x[f] != 'e': + k, f = decode_string(x, f) + r[k], f = decode_func[x[f]](x, f) + return (r, f + 1) + +decode_func = {} +decode_func['l'] = decode_list +decode_func['d'] = decode_dict +decode_func['i'] = decode_int +decode_func['0'] = decode_string +decode_func['1'] = decode_string +decode_func['2'] = decode_string +decode_func['3'] = decode_string +decode_func['4'] = decode_string +decode_func['5'] = decode_string +decode_func['6'] = decode_string +decode_func['7'] = decode_string +decode_func['8'] = decode_string +decode_func['9'] = decode_string + +def bdecode(x): + try: + r, l = decode_func[x[0]](x, 0) + except (IndexError, KeyError, ValueError): + raise BTFailure("not a valid bencoded string") + if l != len(x): + raise BTFailure("invalid bencoded value (data after valid prefix)") + return r + +from types import StringType, IntType, LongType, DictType, ListType, TupleType + + +class Bencached(object): + + __slots__ = ['bencoded'] + + def __init__(self, s): + self.bencoded = s + +def encode_bencached(x,r): + r.append(x.bencoded) + +def encode_int(x, r): + r.extend(('i', str(x), 'e')) + +def encode_bool(x, r): + if x: + encode_int(1, r) + else: + encode_int(0, r) + +def encode_string(x, r): + r.extend((str(len(x)), ':', x)) + +def encode_list(x, r): + r.append('l') + for i in x: + encode_func[type(i)](i, r) + r.append('e') + +def encode_dict(x,r): + r.append('d') + ilist = x.items() + ilist.sort() + for k, v in ilist: + r.extend((str(len(k)), ':', k)) + encode_func[type(v)](v, r) + r.append('e') + +encode_func = {} +encode_func[Bencached] = encode_bencached +encode_func[IntType] = encode_int +encode_func[LongType] = encode_int +encode_func[StringType] = encode_string +encode_func[ListType] = encode_list +encode_func[TupleType] = encode_list +encode_func[DictType] = encode_dict + +try: + from types import BooleanType + encode_func[BooleanType] = encode_bool +except ImportError: + pass + +def bencode(x): + r = [] + encode_func[type(x)](x, r) + return ''.join(r) diff --git a/libs/flask/__init__.py b/libs/flask/__init__.py old mode 100755 new mode 100644 index 54bfedd..b170ba5 --- a/libs/flask/__init__.py +++ b/libs/flask/__init__.py @@ -10,7 +10,7 @@ :license: BSD, see LICENSE for more details. """ -__version__ = '0.9-dev' +__version__ = '0.9' # utilities we import from Werkzeug and Jinja2 that are unused # in the module but are exported as public interface. @@ -22,9 +22,12 @@ from .app import Flask, Request, Response from .config import Config from .helpers import url_for, jsonify, json_available, flash, \ send_file, send_from_directory, get_flashed_messages, \ - get_template_attribute, make_response, safe_join -from .globals import current_app, g, request, session, _request_ctx_stack -from .ctx import has_request_context + get_template_attribute, make_response, safe_join, \ + stream_with_context +from .globals import current_app, g, request, session, _request_ctx_stack, \ + _app_ctx_stack +from .ctx import has_request_context, has_app_context, \ + after_this_request from .module import Module from .blueprints import Blueprint from .templating import render_template, render_template_string diff --git a/libs/flask/app.py b/libs/flask/app.py old mode 100755 new mode 100644 index 15e432d..d30d380 --- a/libs/flask/app.py +++ b/libs/flask/app.py @@ -19,7 +19,7 @@ from itertools import chain from functools import update_wrapper from werkzeug.datastructures import ImmutableDict -from werkzeug.routing import Map, Rule, RequestRedirect +from werkzeug.routing import Map, Rule, RequestRedirect, BuildError from werkzeug.exceptions import HTTPException, InternalServerError, \ MethodNotAllowed, BadRequest @@ -28,14 +28,14 @@ from .helpers import _PackageBoundObject, url_for, get_flashed_messages, \ find_package from .wrappers import Request, Response from .config import ConfigAttribute, Config -from .ctx import RequestContext +from .ctx import RequestContext, AppContext, _RequestGlobals from .globals import _request_ctx_stack, request from .sessions import SecureCookieSessionInterface from .module import blueprint_is_module from .templating import DispatchingJinjaLoader, Environment, \ _default_template_ctx_processor from .signals import request_started, request_finished, got_request_exception, \ - request_tearing_down + request_tearing_down, appcontext_tearing_down # a lock used for logger initialization _logger_lock = Lock() @@ -148,6 +148,18 @@ class Flask(_PackageBoundObject): #: :class:`~flask.Response` for more information. response_class = Response + #: The class that is used for the :data:`~flask.g` instance. + #: + #: Example use cases for a custom class: + #: + #: 1. Store arbitrary attributes on flask.g. + #: 2. Add a property for lazy per-request database connectors. + #: 3. Return None instead of AttributeError on expected attributes. + #: 4. Raise exception if an unexpected attr is set, a "controlled" flask.g. + #: + #: .. versionadded:: 0.9 + request_globals_class = _RequestGlobals + #: The debug flag. Set this to `True` to enable debugging of the #: application. In debug mode the debugger will kick in when an unhandled #: exception ocurrs and the integrated server will automatically reload @@ -249,8 +261,10 @@ class Flask(_PackageBoundObject): 'SESSION_COOKIE_HTTPONLY': True, 'SESSION_COOKIE_SECURE': False, 'MAX_CONTENT_LENGTH': None, + 'SEND_FILE_MAX_AGE_DEFAULT': 12 * 60 * 60, # 12 hours 'TRAP_BAD_REQUEST_ERRORS': False, - 'TRAP_HTTP_EXCEPTIONS': False + 'TRAP_HTTP_EXCEPTIONS': False, + 'PREFERRED_URL_SCHEME': 'http' }) #: The rule object to use for URL rules created. This is used by @@ -327,6 +341,15 @@ class Flask(_PackageBoundObject): #: decorator. self.error_handler_spec = {None: self._error_handlers} + #: A list of functions that are called when :meth:`url_for` raises a + #: :exc:`~werkzeug.routing.BuildError`. Each function registered here + #: is called with `error`, `endpoint` and `values`. If a function + #: returns `None` or raises a `BuildError` the next function is + #: tried. + #: + #: .. versionadded:: 0.9 + self.url_build_error_handlers = [] + #: A dictionary with lists of functions that should be called at the #: beginning of the request. The key of the dictionary is the name of #: the blueprint this function is active for, `None` for all requests. @@ -362,6 +385,14 @@ class Flask(_PackageBoundObject): #: .. versionadded:: 0.7 self.teardown_request_funcs = {} + #: A list of functions that are called when the application context + #: is destroyed. Since the application context is also torn down + #: if the request ends this is the place to store code that disconnects + #: from databases. + #: + #: .. versionadded:: 0.9 + self.teardown_appcontext_funcs = [] + #: A dictionary with lists of functions that can be used as URL #: value processor functions. Whenever a URL is built these functions #: are called to modify the dictionary of values in place. The key @@ -884,6 +915,10 @@ class Flask(_PackageBoundObject): # a tuple of only `GET` as default. if methods is None: methods = getattr(view_func, 'methods', None) or ('GET',) + methods = set(methods) + + # Methods that should always be added + required_methods = set(getattr(view_func, 'required_methods', ())) # starting with Flask 0.8 the view_func object can disable and # force-enable the automatic options handling. @@ -892,11 +927,14 @@ class Flask(_PackageBoundObject): if provide_automatic_options is None: if 'OPTIONS' not in methods: - methods = tuple(methods) + ('OPTIONS',) provide_automatic_options = True + required_methods.add('OPTIONS') else: provide_automatic_options = False + # Add the required methods now. + methods |= required_methods + # due to a werkzeug bug we need to make sure that the defaults are # None if they are an empty dictionary. This should not be necessary # with Werkzeug 0.7 @@ -1098,11 +1136,43 @@ class Flask(_PackageBoundObject): that they will fail. If they do execute code that might fail they will have to surround the execution of these code by try/except statements and log ocurring errors. + + When a teardown function was called because of a exception it will + be passed an error object. """ self.teardown_request_funcs.setdefault(None, []).append(f) return f @setupmethod + def teardown_appcontext(self, f): + """Registers a function to be called when the application context + ends. These functions are typically also called when the request + context is popped. + + Example:: + + ctx = app.app_context() + ctx.push() + ... + ctx.pop() + + When ``ctx.pop()`` is executed in the above example, the teardown + functions are called just before the app context moves from the + stack of active contexts. This becomes relevant if you are using + such constructs in tests. + + Since a request context typically also manages an application + context it would also be called when you pop a request context. + + When a teardown function was called because of an exception it will + be passed an error object. + + .. versionadded:: 0.9 + """ + self.teardown_appcontext_funcs.append(f) + return f + + @setupmethod def context_processor(self, f): """Registers a template context processor function.""" self.template_context_processors[None].append(f) @@ -1312,7 +1382,7 @@ class Flask(_PackageBoundObject): def make_default_options_response(self): """This method is called to create the default `OPTIONS` response. This can be changed through subclassing to change the default - behaviour of `OPTIONS` responses. + behavior of `OPTIONS` responses. .. versionadded:: 0.7 """ @@ -1346,23 +1416,48 @@ class Flask(_PackageBoundObject): string as body :class:`unicode` a response object is created with the string encoded to utf-8 as body - :class:`tuple` the response object is created with the - contents of the tuple as arguments a WSGI function the function is called as WSGI application and buffered as response object + :class:`tuple` A tuple in the form ``(response, status, + headers)`` where `response` is any of the + types defined here, `status` is a string + or an integer and `headers` is a list of + a dictionary with header values. ======================= =========================================== :param rv: the return value from the view function + + .. versionchanged:: 0.9 + Previously a tuple was interpreted as the arguments for the + response object. """ + status = headers = None + if isinstance(rv, tuple): + rv, status, headers = rv + (None,) * (3 - len(rv)) + if rv is None: raise ValueError('View function did not return a response') - if isinstance(rv, self.response_class): - return rv - if isinstance(rv, basestring): - return self.response_class(rv) - if isinstance(rv, tuple): - return self.response_class(*rv) - return self.response_class.force_type(rv, request.environ) + + if not isinstance(rv, self.response_class): + # When we create a response object directly, we let the constructor + # set the headers and status. We do this because there can be + # some extra logic involved when creating these objects with + # specific values (like defualt content type selection). + if isinstance(rv, basestring): + rv = self.response_class(rv, headers=headers, status=status) + headers = status = None + else: + rv = self.response_class.force_type(rv, request.environ) + + if status is not None: + if isinstance(status, basestring): + rv.status = status + else: + rv.status_code = status + if headers: + rv.headers.extend(headers) + + return rv def create_url_adapter(self, request): """Creates a URL adapter for the given request. The URL adapter @@ -1370,9 +1465,21 @@ class Flask(_PackageBoundObject): so the request is passed explicitly. .. versionadded:: 0.6 + + .. versionchanged:: 0.9 + This can now also be called without a request object when the + UR adapter is created for the application context. """ - return self.url_map.bind_to_environ(request.environ, - server_name=self.config['SERVER_NAME']) + if request is not None: + return self.url_map.bind_to_environ(request.environ, + server_name=self.config['SERVER_NAME']) + # We need at the very least the server name to be set for this + # to work. + if self.config['SERVER_NAME'] is not None: + return self.url_map.bind( + self.config['SERVER_NAME'], + script_name=self.config['APPLICATION_ROOT'] or '/', + url_scheme=self.config['PREFERRED_URL_SCHEME']) def inject_url_defaults(self, endpoint, values): """Injects the URL defaults for the given endpoint directly into @@ -1383,11 +1490,30 @@ class Flask(_PackageBoundObject): """ funcs = self.url_default_functions.get(None, ()) if '.' in endpoint: - bp = endpoint.split('.', 1)[0] + bp = endpoint.rsplit('.', 1)[0] funcs = chain(funcs, self.url_default_functions.get(bp, ())) for func in funcs: func(endpoint, values) + def handle_url_build_error(self, error, endpoint, values): + """Handle :class:`~werkzeug.routing.BuildError` on :meth:`url_for`. + """ + exc_type, exc_value, tb = sys.exc_info() + for handler in self.url_build_error_handlers: + try: + rv = handler(error, endpoint, values) + if rv is not None: + return rv + except BuildError, error: + pass + + # At this point we want to reraise the exception. If the error is + # still the same one we can reraise it with the original traceback, + # otherwise we raise it from here. + if error is exc_value: + raise exc_type, exc_value, tb + raise error + def preprocess_request(self): """Called before the actual request dispatching and will call every as :meth:`before_request` decorated function. @@ -1429,7 +1555,7 @@ class Flask(_PackageBoundObject): """ ctx = _request_ctx_stack.top bp = ctx.request.blueprint - funcs = () + funcs = ctx._after_request_functions if bp is not None and bp in self.after_request_funcs: funcs = reversed(self.after_request_funcs[bp]) if None in self.after_request_funcs: @@ -1440,23 +1566,54 @@ class Flask(_PackageBoundObject): self.save_session(ctx.session, response) return response - def do_teardown_request(self): + def do_teardown_request(self, exc=None): """Called after the actual request dispatching and will call every as :meth:`teardown_request` decorated function. This is not actually called by the :class:`Flask` object itself but is always triggered when the request context is popped. That way we have a tighter control over certain resources under testing environments. + + .. versionchanged:: 0.9 + Added the `exc` argument. Previously this was always using the + current exception information. """ + if exc is None: + exc = sys.exc_info()[1] funcs = reversed(self.teardown_request_funcs.get(None, ())) bp = _request_ctx_stack.top.request.blueprint if bp is not None and bp in self.teardown_request_funcs: funcs = chain(funcs, reversed(self.teardown_request_funcs[bp])) - exc = sys.exc_info()[1] for func in funcs: rv = func(exc) - if rv is not None: - return rv - request_tearing_down.send(self) + request_tearing_down.send(self, exc=exc) + + def do_teardown_appcontext(self, exc=None): + """Called when an application context is popped. This works pretty + much the same as :meth:`do_teardown_request` but for the application + context. + + .. versionadded:: 0.9 + """ + if exc is None: + exc = sys.exc_info()[1] + for func in reversed(self.teardown_appcontext_funcs): + func(exc) + appcontext_tearing_down.send(self, exc=exc) + + def app_context(self): + """Binds the application only. For as long as the application is bound + to the current context the :data:`flask.current_app` points to that + application. An application context is automatically created when a + request context is pushed if necessary. + + Example usage:: + + with app.app_context(): + ... + + .. versionadded:: 0.9 + """ + return AppContext(self) def request_context(self, environ): """Creates a :class:`~flask.ctx.RequestContext` from the given diff --git a/libs/flask/blueprints.py b/libs/flask/blueprints.py old mode 100755 new mode 100644 index d81d3c7..9c55702 --- a/libs/flask/blueprints.py +++ b/libs/flask/blueprints.py @@ -25,7 +25,7 @@ class BlueprintSetupState(object): #: a reference to the current application self.app = app - #: a reference to the blurprint that created this setup state. + #: a reference to the blueprint that created this setup state. self.blueprint = blueprint #: a dictionary with all options that were passed to the diff --git a/libs/flask/config.py b/libs/flask/config.py old mode 100755 new mode 100644 index 67dbf9b..759fd48 --- a/libs/flask/config.py +++ b/libs/flask/config.py @@ -106,8 +106,7 @@ class Config(dict): 'loaded. Set this variable and make it ' 'point to a configuration file' % variable_name) - self.from_pyfile(rv) - return True + return self.from_pyfile(rv, silent=silent) def from_pyfile(self, filename, silent=False): """Updates the values in the config from a Python file. This function diff --git a/libs/flask/ctx.py b/libs/flask/ctx.py old mode 100755 new mode 100644 index 47ac0cc..3ea42a2 --- a/libs/flask/ctx.py +++ b/libs/flask/ctx.py @@ -9,16 +9,44 @@ :license: BSD, see LICENSE for more details. """ +import sys + from werkzeug.exceptions import HTTPException -from .globals import _request_ctx_stack +from .globals import _request_ctx_stack, _app_ctx_stack from .module import blueprint_is_module class _RequestGlobals(object): + """A plain object.""" pass +def after_this_request(f): + """Executes a function after this request. This is useful to modify + response objects. The function is passed the response object and has + to return the same or a new one. + + Example:: + + @app.route('/') + def index(): + @after_this_request + def add_header(response): + response.headers['X-Foo'] = 'Parachute' + return response + return 'Hello World!' + + This is more useful if a function other than the view function wants to + modify a response. For instance think of a decorator that wants to add + some headers without converting the return value into a response object. + + .. versionadded:: 0.9 + """ + _request_ctx_stack.top._after_request_functions.append(f) + return f + + def has_request_context(): """If you have code that wants to test if a request context is there or not this function can be used. For instance, you may want to take advantage @@ -51,6 +79,57 @@ def has_request_context(): return _request_ctx_stack.top is not None +def has_app_context(): + """Works like :func:`has_request_context` but for the application + context. You can also just do a boolean check on the + :data:`current_app` object instead. + + .. versionadded:: 0.9 + """ + return _app_ctx_stack.top is not None + + +class AppContext(object): + """The application context binds an application object implicitly + to the current thread or greenlet, similar to how the + :class:`RequestContext` binds request information. The application + context is also implicitly created if a request context is created + but the application is not on top of the individual application + context. + """ + + def __init__(self, app): + self.app = app + self.url_adapter = app.create_url_adapter(None) + + # Like request context, app contexts can be pushed multiple times + # but there a basic "refcount" is enough to track them. + self._refcnt = 0 + + def push(self): + """Binds the app context to the current context.""" + self._refcnt += 1 + _app_ctx_stack.push(self) + + def pop(self, exc=None): + """Pops the app context.""" + self._refcnt -= 1 + if self._refcnt <= 0: + if exc is None: + exc = sys.exc_info()[1] + self.app.do_teardown_appcontext(exc) + rv = _app_ctx_stack.pop() + assert rv is self, 'Popped wrong app context. (%r instead of %r)' \ + % (rv, self) + + def __enter__(self): + self.push() + return self + + def __exit__(self, exc_type, exc_value, tb): + self.pop(exc_value) + + class RequestContext(object): """The request context contains all request relevant information. It is created at the beginning of the request and pushed to the @@ -85,17 +164,28 @@ class RequestContext(object): self.app = app self.request = app.request_class(environ) self.url_adapter = app.create_url_adapter(self.request) - self.g = _RequestGlobals() + self.g = app.request_globals_class() self.flashes = None self.session = None + # Request contexts can be pushed multiple times and interleaved with + # other request contexts. Now only if the last level is popped we + # get rid of them. Additionally if an application context is missing + # one is created implicitly so for each level we add this information + self._implicit_app_ctx_stack = [] + # indicator if the context was preserved. Next time another context # is pushed the preserved context is popped. self.preserved = False + # Functions that should be executed after the request on the response + # object. These will be called before the regular "after_request" + # functions. + self._after_request_functions = [] + self.match_request() - # XXX: Support for deprecated functionality. This is doing away with + # XXX: Support for deprecated functionality. This is going away with # Flask 1.0 blueprint = self.request.blueprint if blueprint is not None: @@ -130,6 +220,16 @@ class RequestContext(object): if top is not None and top.preserved: top.pop() + # Before we push the request context we have to ensure that there + # is an application context. + app_ctx = _app_ctx_stack.top + if app_ctx is None or app_ctx.app != self.app: + app_ctx = self.app.app_context() + app_ctx.push() + self._implicit_app_ctx_stack.append(app_ctx) + else: + self._implicit_app_ctx_stack.append(None) + _request_ctx_stack.push(self) # Open the session at the moment that the request context is @@ -139,20 +239,36 @@ class RequestContext(object): if self.session is None: self.session = self.app.make_null_session() - def pop(self): + def pop(self, exc=None): """Pops the request context and unbinds it by doing that. This will also trigger the execution of functions registered by the :meth:`~flask.Flask.teardown_request` decorator. + + .. versionchanged:: 0.9 + Added the `exc` argument. """ - self.preserved = False - self.app.do_teardown_request() + app_ctx = self._implicit_app_ctx_stack.pop() + + clear_request = False + if not self._implicit_app_ctx_stack: + self.preserved = False + if exc is None: + exc = sys.exc_info()[1] + self.app.do_teardown_request(exc) + clear_request = True + rv = _request_ctx_stack.pop() assert rv is self, 'Popped wrong request context. (%r instead of %r)' \ % (rv, self) # get rid of circular dependencies at the end of the request # so that we don't require the GC to be active. - rv.request.environ['werkzeug.request'] = None + if clear_request: + rv.request.environ['werkzeug.request'] = None + + # Get rid of the app as well if necessary. + if app_ctx is not None: + app_ctx.pop(exc) def __enter__(self): self.push() @@ -168,7 +284,7 @@ class RequestContext(object): (tb is not None and self.app.preserve_context_on_exception): self.preserved = True else: - self.pop() + self.pop(exc_value) def __repr__(self): return '<%s \'%s\' [%s] of %s>' % ( diff --git a/libs/flask/debughelpers.py b/libs/flask/debughelpers.py old mode 100755 new mode 100644 diff --git a/libs/flask/exceptions.py b/libs/flask/exceptions.py new file mode 100644 index 0000000..9ccdeda --- /dev/null +++ b/libs/flask/exceptions.py @@ -0,0 +1,49 @@ +# -*- coding: utf-8 -*- +""" + flask.exceptions + ~~~~~~~~~~~~ + + Flask specific additions to :class:`~werkzeug.exceptions.HTTPException` + + :copyright: (c) 2011 by Armin Ronacher. + :license: BSD, see LICENSE for more details. +""" +from werkzeug.exceptions import HTTPException, BadRequest +from .helpers import json + + +class JSONHTTPException(HTTPException): + """A base class for HTTP exceptions with ``Content-Type: + application/json``. + + The ``description`` attribute of this class must set to a string (*not* an + HTML string) which describes the error. + + """ + + def get_body(self, environ): + """Overrides :meth:`werkzeug.exceptions.HTTPException.get_body` to + return the description of this error in JSON format instead of HTML. + + """ + return json.dumps(dict(description=self.get_description(environ))) + + def get_headers(self, environ): + """Returns a list of headers including ``Content-Type: + application/json``. + + """ + return [('Content-Type', 'application/json')] + + +class JSONBadRequest(JSONHTTPException, BadRequest): + """Represents an HTTP ``400 Bad Request`` error whose body contains an + error message in JSON format instead of HTML format (as in the superclass). + + """ + + #: The description of the error which occurred as a string. + description = ( + 'The browser (or proxy) sent a request that this server could not ' + 'understand.' + ) diff --git a/libs/flask/ext/__init__.py b/libs/flask/ext/__init__.py old mode 100755 new mode 100644 diff --git a/libs/flask/exthook.py b/libs/flask/exthook.py old mode 100755 new mode 100644 diff --git a/libs/flask/globals.py b/libs/flask/globals.py old mode 100755 new mode 100644 index 16580d1..f6d6248 --- a/libs/flask/globals.py +++ b/libs/flask/globals.py @@ -20,9 +20,17 @@ def _lookup_object(name): return getattr(top, name) +def _find_app(): + top = _app_ctx_stack.top + if top is None: + raise RuntimeError('working outside of application context') + return top.app + + # context locals _request_ctx_stack = LocalStack() -current_app = LocalProxy(partial(_lookup_object, 'app')) +_app_ctx_stack = LocalStack() +current_app = LocalProxy(_find_app) request = LocalProxy(partial(_lookup_object, 'request')) session = LocalProxy(partial(_lookup_object, 'session')) g = LocalProxy(partial(_lookup_object, 'g')) diff --git a/libs/flask/helpers.py b/libs/flask/helpers.py old mode 100755 new mode 100644 index 25250d2..501a2f8 --- a/libs/flask/helpers.py +++ b/libs/flask/helpers.py @@ -11,7 +11,6 @@ from __future__ import with_statement -import imp import os import sys import pkgutil @@ -20,7 +19,9 @@ import mimetypes from time import time from zlib import adler32 from threading import RLock +from werkzeug.routing import BuildError from werkzeug.urls import url_quote +from functools import update_wrapper # try to load the best simplejson implementation available. If JSON # is not installed, we add a failing class. @@ -50,7 +51,8 @@ except ImportError: from jinja2 import FileSystemLoader -from .globals import session, _request_ctx_stack, current_app, request +from .globals import session, _request_ctx_stack, _app_ctx_stack, \ + current_app, request def _assert_have_json(): @@ -59,7 +61,7 @@ def _assert_have_json(): raise RuntimeError('simplejson not installed') -# figure out if simplejson escapes slashes. This behaviour was changed +# figure out if simplejson escapes slashes. This behavior was changed # from one version to another without reason. if not json_available or '\\/' not in json.dumps('/'): @@ -91,6 +93,78 @@ def _endpoint_from_view_func(view_func): return view_func.__name__ +def stream_with_context(generator_or_function): + """Request contexts disappear when the response is started on the server. + This is done for efficiency reasons and to make it less likely to encounter + memory leaks with badly written WSGI middlewares. The downside is that if + you are using streamed responses, the generator cannot access request bound + information any more. + + This function however can help you keep the context around for longer:: + + from flask import stream_with_context, request, Response + + @app.route('/stream') + def streamed_response(): + @stream_with_context + def generate(): + yield 'Hello ' + yield request.args['name'] + yield '!' + return Response(generate()) + + Alternatively it can also be used around a specific generator: + + from flask import stream_with_context, request, Response + + @app.route('/stream') + def streamed_response(): + def generate(): + yield 'Hello ' + yield request.args['name'] + yield '!' + return Response(stream_with_context(generate())) + + .. versionadded:: 0.9 + """ + try: + gen = iter(generator_or_function) + except TypeError: + def decorator(*args, **kwargs): + gen = generator_or_function() + return stream_with_context(gen) + return update_wrapper(decorator, generator_or_function) + + def generator(): + ctx = _request_ctx_stack.top + if ctx is None: + raise RuntimeError('Attempted to stream with context but ' + 'there was no context in the first place to keep around.') + with ctx: + # Dummy sentinel. Has to be inside the context block or we're + # not actually keeping the context around. + yield None + + # The try/finally is here so that if someone passes a WSGI level + # iterator in we're still running the cleanup logic. Generators + # don't need that because they are closed on their destruction + # automatically. + try: + for item in gen: + yield item + finally: + if hasattr(gen, 'close'): + gen.close() + + # The trick is to start the generator. Then the code execution runs until + # the first dummy None is yielded at which point the context was already + # pushed. This item is discarded. Then when the iteration continues the + # real generator is executed. + wrapped_g = generator() + wrapped_g.next() + return wrapped_g + + def jsonify(*args, **kwargs): """Creates a :class:`~flask.Response` with the JSON representation of the given arguments with an `application/json` mimetype. The arguments @@ -188,36 +262,106 @@ def url_for(endpoint, **values): For more information, head over to the :ref:`Quickstart `. + To integrate applications, :class:`Flask` has a hook to intercept URL build + errors through :attr:`Flask.build_error_handler`. The `url_for` function + results in a :exc:`~werkzeug.routing.BuildError` when the current app does + not have a URL for the given endpoint and values. When it does, the + :data:`~flask.current_app` calls its :attr:`~Flask.build_error_handler` if + it is not `None`, which can return a string to use as the result of + `url_for` (instead of `url_for`'s default to raise the + :exc:`~werkzeug.routing.BuildError` exception) or re-raise the exception. + An example:: + + def external_url_handler(error, endpoint, **values): + "Looks up an external URL when `url_for` cannot build a URL." + # This is an example of hooking the build_error_handler. + # Here, lookup_url is some utility function you've built + # which looks up the endpoint in some external URL registry. + url = lookup_url(endpoint, **values) + if url is None: + # External lookup did not have a URL. + # Re-raise the BuildError, in context of original traceback. + exc_type, exc_value, tb = sys.exc_info() + if exc_value is error: + raise exc_type, exc_value, tb + else: + raise error + # url_for will use this result, instead of raising BuildError. + return url + + app.build_error_handler = external_url_handler + + Here, `error` is the instance of :exc:`~werkzeug.routing.BuildError`, and + `endpoint` and `**values` are the arguments passed into `url_for`. Note + that this is for building URLs outside the current application, and not for + handling 404 NotFound errors. + .. versionadded:: 0.9 The `_anchor` and `_method` parameters were added. + .. versionadded:: 0.9 + Calls :meth:`Flask.handle_build_error` on + :exc:`~werkzeug.routing.BuildError`. + :param endpoint: the endpoint of the URL (name of the function) :param values: the variable arguments of the URL rule :param _external: if set to `True`, an absolute URL is generated. :param _anchor: if provided this is added as anchor to the URL. :param _method: if provided this explicitly specifies an HTTP method. """ - ctx = _request_ctx_stack.top - blueprint_name = request.blueprint - if not ctx.request._is_old_module: - if endpoint[:1] == '.': - if blueprint_name is not None: - endpoint = blueprint_name + endpoint - else: + appctx = _app_ctx_stack.top + reqctx = _request_ctx_stack.top + if appctx is None: + raise RuntimeError('Attempted to generate a URL with the application ' + 'context being pushed. This has to be executed ') + + # If request specific information is available we have some extra + # features that support "relative" urls. + if reqctx is not None: + url_adapter = reqctx.url_adapter + blueprint_name = request.blueprint + if not reqctx.request._is_old_module: + if endpoint[:1] == '.': + if blueprint_name is not None: + endpoint = blueprint_name + endpoint + else: + endpoint = endpoint[1:] + else: + # TODO: get rid of this deprecated functionality in 1.0 + if '.' not in endpoint: + if blueprint_name is not None: + endpoint = blueprint_name + '.' + endpoint + elif endpoint.startswith('.'): endpoint = endpoint[1:] + external = values.pop('_external', False) + + # Otherwise go with the url adapter from the appctx and make + # the urls external by default. else: - # TODO: get rid of this deprecated functionality in 1.0 - if '.' not in endpoint: - if blueprint_name is not None: - endpoint = blueprint_name + '.' + endpoint - elif endpoint.startswith('.'): - endpoint = endpoint[1:] - external = values.pop('_external', False) + url_adapter = appctx.url_adapter + if url_adapter is None: + raise RuntimeError('Application was not able to create a URL ' + 'adapter for request independent URL generation. ' + 'You might be able to fix this by setting ' + 'the SERVER_NAME config variable.') + external = values.pop('_external', True) + anchor = values.pop('_anchor', None) method = values.pop('_method', None) - ctx.app.inject_url_defaults(endpoint, values) - rv = ctx.url_adapter.build(endpoint, values, method=method, + appctx.app.inject_url_defaults(endpoint, values) + try: + rv = url_adapter.build(endpoint, values, method=method, force_external=external) + except BuildError, error: + # We need to inject the values again so that the app callback can + # deal with that sort of stuff. + values['_external'] = external + values['_anchor'] = anchor + values['_method'] = method + return appctx.app.handle_url_build_error(error, endpoint, values) + + rv = url_adapter.build(endpoint, values, method=method, + force_external=external) if anchor is not None: rv += '#' + url_quote(anchor) return rv @@ -261,7 +405,16 @@ def flash(message, category='message'): messages and ``'warning'`` for warnings. However any kind of string can be used as category. """ - session.setdefault('_flashes', []).append((category, message)) + # Original implementation: + # + # session.setdefault('_flashes', []).append((category, message)) + # + # This assumed that changes made to mutable structures in the session are + # are always in sync with the sess on object, which is not true for session + # implementations that use external storage for keeping their keys/values. + flashes = session.get('_flashes', []) + flashes.append((category, message)) + session['_flashes'] = flashes def get_flashed_messages(with_categories=False, category_filter=[]): @@ -305,7 +458,7 @@ def get_flashed_messages(with_categories=False, category_filter=[]): def send_file(filename_or_fp, mimetype=None, as_attachment=False, attachment_filename=None, add_etags=True, - cache_timeout=60 * 60 * 12, conditional=False): + cache_timeout=None, conditional=False): """Sends the contents of a file to the client. This will use the most efficient method available and configured. By default it will try to use the WSGI server's file_wrapper support. Alternatively @@ -330,7 +483,7 @@ def send_file(filename_or_fp, mimetype=None, as_attachment=False, .. versionadded:: 0.5 The `add_etags`, `cache_timeout` and `conditional` parameters were - added. The default behaviour is now to attach etags. + added. The default behavior is now to attach etags. .. versionchanged:: 0.7 mimetype guessing and etag support for file objects was @@ -338,6 +491,9 @@ def send_file(filename_or_fp, mimetype=None, as_attachment=False, able to, otherwise attach an etag yourself. This functionality will be removed in Flask 1.0 + .. versionchanged:: 0.9 + cache_timeout pulls its default from application config, when None. + :param filename_or_fp: the filename of the file to send. This is relative to the :attr:`~Flask.root_path` if a relative path is specified. @@ -354,7 +510,11 @@ def send_file(filename_or_fp, mimetype=None, as_attachment=False, differs from the file's filename. :param add_etags: set to `False` to disable attaching of etags. :param conditional: set to `True` to enable conditional responses. - :param cache_timeout: the timeout in seconds for the headers. + + :param cache_timeout: the timeout in seconds for the headers. When `None` + (default), this value is set by + :meth:`~Flask.get_send_file_max_age` of + :data:`~flask.current_app`. """ mtime = None if isinstance(filename_or_fp, basestring): @@ -365,7 +525,7 @@ def send_file(filename_or_fp, mimetype=None, as_attachment=False, file = filename_or_fp filename = getattr(file, 'name', None) - # XXX: this behaviour is now deprecated because it was unreliable. + # XXX: this behavior is now deprecated because it was unreliable. # removed in Flask 1.0 if not attachment_filename and not mimetype \ and isinstance(filename, basestring): @@ -376,7 +536,7 @@ def send_file(filename_or_fp, mimetype=None, as_attachment=False, if add_etags: warn(DeprecationWarning('In future flask releases etags will no ' 'longer be generated for file objects passed to the send_file ' - 'function because this behaviour was unreliable. Pass ' + 'function because this behavior was unreliable. Pass ' 'filenames instead if possible, otherwise attach an etag ' 'yourself based on another value'), stacklevel=2) @@ -418,7 +578,9 @@ def send_file(filename_or_fp, mimetype=None, as_attachment=False, rv.last_modified = int(mtime) rv.cache_control.public = True - if cache_timeout: + if cache_timeout is None: + cache_timeout = current_app.get_send_file_max_age(filename) + if cache_timeout is not None: rv.cache_control.max_age = cache_timeout rv.expires = int(time() + cache_timeout) @@ -495,7 +657,8 @@ def send_from_directory(directory, filename, **options): filename = safe_join(directory, filename) if not os.path.isfile(filename): raise NotFound() - return send_file(filename, conditional=True, **options) + options.setdefault('conditional', True) + return send_file(filename, **options) def get_root_path(import_name): @@ -504,17 +667,29 @@ def get_root_path(import_name): Not to be confused with the package path returned by :func:`find_package`. """ + # Module already imported and has a file attribute. Use that first. + mod = sys.modules.get(import_name) + if mod is not None and hasattr(mod, '__file__'): + return os.path.dirname(os.path.abspath(mod.__file__)) + + # Next attempt: check the loader. loader = pkgutil.get_loader(import_name) + + # Loader does not exist or we're referring to an unloaded main module + # or a main module without path (interactive sessions), go with the + # current working directory. if loader is None or import_name == '__main__': - # import name is not found, or interactive/main module return os.getcwd() + # For .egg, zipimporter does not have get_filename until Python 2.7. + # Some other loaders might exhibit the same behavior. if hasattr(loader, 'get_filename'): filepath = loader.get_filename(import_name) else: # Fall back to imports. __import__(import_name) filepath = sys.modules[import_name].__file__ + # filepath is import_name.py for a module, or __init__.py for a package. return os.path.dirname(os.path.abspath(filepath)) @@ -651,6 +826,32 @@ class _PackageBoundObject(object): return FileSystemLoader(os.path.join(self.root_path, self.template_folder)) + def get_send_file_max_age(self, filename): + """Provides default cache_timeout for the :func:`send_file` functions. + + By default, this function returns ``SEND_FILE_MAX_AGE_DEFAULT`` from + the configuration of :data:`~flask.current_app`. + + Static file functions such as :func:`send_from_directory` use this + function, and :func:`send_file` calls this function on + :data:`~flask.current_app` when the given cache_timeout is `None`. If a + cache_timeout is given in :func:`send_file`, that timeout is used; + otherwise, this method is called. + + This allows subclasses to change the behavior when sending files based + on the filename. For example, to set the cache timeout for .js files + to 60 seconds:: + + class MyFlask(flask.Flask): + def get_send_file_max_age(self, name): + if name.lower().endswith('.js'): + return 60 + return flask.Flask.get_send_file_max_age(self, name) + + .. versionadded:: 0.9 + """ + return current_app.config['SEND_FILE_MAX_AGE_DEFAULT'] + def send_static_file(self, filename): """Function used internally to send static files from the static folder to the browser. @@ -659,7 +860,11 @@ class _PackageBoundObject(object): """ if not self.has_static_folder: raise RuntimeError('No static folder for this object') - return send_from_directory(self.static_folder, filename) + # Ensure get_send_file_max_age is called in all cases. + # Here, we ensure get_send_file_max_age is called for Blueprints. + cache_timeout = self.get_send_file_max_age(filename) + return send_from_directory(self.static_folder, filename, + cache_timeout=cache_timeout) def open_resource(self, resource, mode='rb'): """Opens a resource from the application's resource folder. To see diff --git a/libs/flask/logging.py b/libs/flask/logging.py old mode 100755 new mode 100644 diff --git a/libs/flask/module.py b/libs/flask/module.py old mode 100755 new mode 100644 diff --git a/libs/flask/session.py b/libs/flask/session.py old mode 100755 new mode 100644 diff --git a/libs/flask/sessions.py b/libs/flask/sessions.py old mode 100755 new mode 100644 diff --git a/libs/flask/signals.py b/libs/flask/signals.py old mode 100755 new mode 100644 index eeb763d..78a77bd --- a/libs/flask/signals.py +++ b/libs/flask/signals.py @@ -49,3 +49,4 @@ request_started = _signals.signal('request-started') request_finished = _signals.signal('request-finished') request_tearing_down = _signals.signal('request-tearing-down') got_request_exception = _signals.signal('got-request-exception') +appcontext_tearing_down = _signals.signal('appcontext-tearing-down') diff --git a/libs/flask/templating.py b/libs/flask/templating.py old mode 100755 new mode 100644 index 90e8772..c809a63 --- a/libs/flask/templating.py +++ b/libs/flask/templating.py @@ -109,17 +109,19 @@ def _render(template, context, app): return rv -def render_template(template_name, **context): +def render_template(template_name_or_list, **context): """Renders a template from the template folder with the given context. - :param template_name: the name of the template to be rendered + :param template_name_or_list: the name of the template to be + rendered, or an iterable with template names + the first one existing will be rendered :param context: the variables that should be available in the context of the template. """ ctx = _request_ctx_stack.top ctx.app.update_template_context(context) - return _render(ctx.app.jinja_env.get_template(template_name), + return _render(ctx.app.jinja_env.get_or_select_template(template_name_or_list), context, ctx.app) diff --git a/libs/flask/testing.py b/libs/flask/testing.py old mode 100755 new mode 100644 diff --git a/libs/flask/views.py b/libs/flask/views.py old mode 100755 new mode 100644 index 79d6299..5192c1c --- a/libs/flask/views.py +++ b/libs/flask/views.py @@ -107,7 +107,7 @@ class MethodViewType(type): rv = type.__new__(cls, name, bases, d) if 'methods' not in d: methods = set(rv.methods or []) - for key, value in d.iteritems(): + for key in d: if key in http_method_funcs: methods.add(key.upper()) # if we have no method at all in there we don't want to diff --git a/libs/flask/wrappers.py b/libs/flask/wrappers.py old mode 100755 new mode 100644 index f6ec278..3ee718f --- a/libs/flask/wrappers.py +++ b/libs/flask/wrappers.py @@ -10,9 +10,9 @@ """ from werkzeug.wrappers import Request as RequestBase, Response as ResponseBase -from werkzeug.exceptions import BadRequest from werkzeug.utils import cached_property +from .exceptions import JSONBadRequest from .debughelpers import attach_enctype_error_multidict from .helpers import json, _assert_have_json from .globals import _request_ctx_stack @@ -108,12 +108,22 @@ class Request(RequestBase): def on_json_loading_failed(self, e): """Called if decoding of the JSON data failed. The return value of - this method is used by :attr:`json` when an error ocurred. The - default implementation raises a :class:`~werkzeug.exceptions.BadRequest`. + this method is used by :attr:`json` when an error ocurred. The default + implementation raises a :class:`JSONBadRequest`, which is a subclass of + :class:`~werkzeug.exceptions.BadRequest` which sets the + ``Content-Type`` to ``application/json`` and provides a JSON-formatted + error description:: + + {"description": "The browser (or proxy) sent a request that \ + this server could not understand."} + + .. versionchanged:: 0.9 + Return a :class:`JSONBadRequest` instead of a + :class:`~werkzeug.exceptions.BadRequest` by default. .. versionadded:: 0.8 """ - raise BadRequest() + raise JSONBadRequest() def _load_form_data(self): RequestBase._load_form_data(self) diff --git a/libs/jinja2/__init__.py b/libs/jinja2/__init__.py old mode 100755 new mode 100644 index 5b349d6..0cf967d --- a/libs/jinja2/__init__.py +++ b/libs/jinja2/__init__.py @@ -27,7 +27,7 @@ :license: BSD, see LICENSE for more details. """ __docformat__ = 'restructuredtext en' -__version__ = '2.7-dev' +__version__ = '2.6' # high level interface from jinja2.environment import Environment, Template diff --git a/libs/jinja2/_debugsupport.c b/libs/jinja2/_debugsupport.c old mode 100755 new mode 100644 diff --git a/libs/jinja2/_markupsafe/__init__.py b/libs/jinja2/_markupsafe/__init__.py old mode 100755 new mode 100644 diff --git a/libs/jinja2/_markupsafe/_bundle.py b/libs/jinja2/_markupsafe/_bundle.py old mode 100755 new mode 100644 diff --git a/libs/jinja2/_markupsafe/_constants.py b/libs/jinja2/_markupsafe/_constants.py old mode 100755 new mode 100644 diff --git a/libs/jinja2/_markupsafe/_native.py b/libs/jinja2/_markupsafe/_native.py old mode 100755 new mode 100644 diff --git a/libs/jinja2/_markupsafe/tests.py b/libs/jinja2/_markupsafe/tests.py old mode 100755 new mode 100644 diff --git a/libs/jinja2/_stringdefs.py b/libs/jinja2/_stringdefs.py old mode 100755 new mode 100644 diff --git a/libs/jinja2/bccache.py b/libs/jinja2/bccache.py old mode 100755 new mode 100644 diff --git a/libs/jinja2/compiler.py b/libs/jinja2/compiler.py old mode 100755 new mode 100644 diff --git a/libs/jinja2/constants.py b/libs/jinja2/constants.py old mode 100755 new mode 100644 diff --git a/libs/jinja2/debug.py b/libs/jinja2/debug.py old mode 100755 new mode 100644 index 3ac4041..2af2222 --- a/libs/jinja2/debug.py +++ b/libs/jinja2/debug.py @@ -77,7 +77,7 @@ def make_frame_proxy(frame): class ProcessedTraceback(object): - """Holds a Jinja preprocessed traceback for printing or reraising.""" + """Holds a Jinja preprocessed traceback for priting or reraising.""" def __init__(self, exc_type, exc_value, frames): assert frames, 'no frames for this traceback?' diff --git a/libs/jinja2/defaults.py b/libs/jinja2/defaults.py old mode 100755 new mode 100644 diff --git a/libs/jinja2/environment.py b/libs/jinja2/environment.py old mode 100755 new mode 100644 index ebb5454..7a9a59f --- a/libs/jinja2/environment.py +++ b/libs/jinja2/environment.py @@ -67,7 +67,7 @@ def copy_cache(cache): def load_extensions(environment, extensions): """Load the extensions from the list and bind it to the environment. - Returns a dict of instantiated environments. + Returns a dict of instanciated environments. """ result = {} for extension in extensions: @@ -239,7 +239,7 @@ class Environment(object): # passed by keyword rather than position. However it's important to # not change the order of arguments because it's used at least # internally in those cases: - # - spontaneous environments (i18n extension and Template) + # - spontaneus environments (i18n extension and Template) # - unittests # If parameter changes are required only add parameters at the end # and don't change the arguments (or the defaults!) of the arguments @@ -435,7 +435,7 @@ class Environment(object): return stream def _generate(self, source, name, filename, defer_init=False): - """Internal hook that can be overridden to hook a different generate + """Internal hook that can be overriden to hook a different generate method in. .. versionadded:: 2.5 @@ -443,7 +443,7 @@ class Environment(object): return generate(source, self, name, filename, defer_init=defer_init) def _compile(self, source, filename): - """Internal hook that can be overridden to hook a different compile + """Internal hook that can be overriden to hook a different compile method in. .. versionadded:: 2.5 @@ -1053,7 +1053,7 @@ class TemplateStream(object): def dump(self, fp, encoding=None, errors='strict'): """Dump the complete stream into a file or file-like object. Per default unicode strings are written, if you want to encode - before writing specify an `encoding`. + before writing specifiy an `encoding`. Example usage:: diff --git a/libs/jinja2/exceptions.py b/libs/jinja2/exceptions.py old mode 100755 new mode 100644 index 841aabb..771f6a8 --- a/libs/jinja2/exceptions.py +++ b/libs/jinja2/exceptions.py @@ -62,7 +62,7 @@ class TemplatesNotFound(TemplateNotFound): def __init__(self, names=(), message=None): if message is None: - message = u'none of the templates given were found: ' + \ + message = u'non of the templates given were found: ' + \ u', '.join(map(unicode, names)) TemplateNotFound.__init__(self, names and names[-1] or None, message) self.templates = list(names) diff --git a/libs/jinja2/ext.py b/libs/jinja2/ext.py old mode 100755 new mode 100644 index 206756f..5ba6efd --- a/libs/jinja2/ext.py +++ b/libs/jinja2/ext.py @@ -552,10 +552,6 @@ def babel_extract(fileobj, keywords, comment_tags, options): The `newstyle_gettext` flag can be set to `True` to enable newstyle gettext calls. - .. versionchanged:: 2.7 - A `silent` option can now be provided. If set to `False` template - syntax errors are propagated instead of being ignored. - :param fileobj: the file-like object the messages should be extracted from :param keywords: a list of keywords (i.e. function names) that should be recognized as translation functions @@ -575,10 +571,8 @@ def babel_extract(fileobj, keywords, comment_tags, options): extensions.add(InternationalizationExtension) def getbool(options, key, default=False): - return options.get(key, str(default)).lower() in \ - ('1', 'on', 'yes', 'true') + options.get(key, str(default)).lower() in ('1', 'on', 'yes', 'true') - silent = getbool(options, 'silent', True) environment = Environment( options.get('block_start_string', BLOCK_START_STRING), options.get('block_end_string', BLOCK_END_STRING), @@ -602,8 +596,6 @@ def babel_extract(fileobj, keywords, comment_tags, options): node = environment.parse(source) tokens = list(environment.lex(environment.preprocess(source))) except TemplateSyntaxError, e: - if not silent: - raise # skip templates with syntax errors return diff --git a/libs/jinja2/filters.py b/libs/jinja2/filters.py old mode 100755 new mode 100644 index 8fef6ea..1ef47f9 --- a/libs/jinja2/filters.py +++ b/libs/jinja2/filters.py @@ -13,10 +13,9 @@ import math from random import choice from operator import itemgetter from itertools import imap, groupby -from jinja2.utils import Markup, escape, pformat, urlize, soft_unicode, \ - unicode_urlencode +from jinja2.utils import Markup, escape, pformat, urlize, soft_unicode from jinja2.runtime import Undefined -from jinja2.exceptions import FilterArgumentError +from jinja2.exceptions import FilterArgumentError, SecurityError _word_re = re.compile(r'\w+(?u)') @@ -71,26 +70,6 @@ def do_forceescape(value): return escape(unicode(value)) -def do_urlencode(value): - """Escape strings for use in URLs (uses UTF-8 encoding). It accepts both - dictionaries and regular strings as well as pairwise iterables. - - .. versionadded:: 2.7 - """ - itemiter = None - if isinstance(value, dict): - itemiter = value.iteritems() - elif not isinstance(value, basestring): - try: - itemiter = iter(value) - except TypeError: - pass - if itemiter is None: - return unicode_urlencode(value) - return u'&'.join(unicode_urlencode(k) + '=' + - unicode_urlencode(v) for k, v in itemiter) - - @evalcontextfilter def do_replace(eval_ctx, s, old, new, count=None): """Return a copy of the value with all occurrences of a substring @@ -176,12 +155,7 @@ def do_title(s): """Return a titlecased version of the value. I.e. words will start with uppercase letters, all remaining characters are lowercase. """ - rv = [] - for item in re.compile(r'([-\s]+)(?u)').split(s): - if not item: - continue - rv.append(item[0].upper() + item[1:]) - return ''.join(rv) + return soft_unicode(s).title() def do_dictsort(value, case_sensitive=False, by='key'): @@ -372,25 +346,25 @@ def do_filesizeformat(value, binary=False): bytes = float(value) base = binary and 1024 or 1000 prefixes = [ - (binary and 'KiB' or 'kB'), - (binary and 'MiB' or 'MB'), - (binary and 'GiB' or 'GB'), - (binary and 'TiB' or 'TB'), - (binary and 'PiB' or 'PB'), - (binary and 'EiB' or 'EB'), - (binary and 'ZiB' or 'ZB'), - (binary and 'YiB' or 'YB') + (binary and "KiB" or "kB"), + (binary and "MiB" or "MB"), + (binary and "GiB" or "GB"), + (binary and "TiB" or "TB"), + (binary and "PiB" or "PB"), + (binary and "EiB" or "EB"), + (binary and "ZiB" or "ZB"), + (binary and "YiB" or "YB") ] if bytes == 1: - return '1 Byte' + return "1 Byte" elif bytes < base: - return '%d Bytes' % bytes + return "%d Bytes" % bytes else: for i, prefix in enumerate(prefixes): - unit = base ** (i + 2) + unit = base * base ** (i + 1) if bytes < unit: - return '%.1f %s' % ((base * bytes / unit), prefix) - return '%.1f %s' % ((base * bytes / unit), prefix) + return "%.1f %s" % ((bytes / unit), prefix) + return "%.1f %s" % ((bytes / unit), prefix) def do_pprint(value, verbose=False): @@ -583,7 +557,7 @@ def do_batch(value, linecount, fill_with=None): A filter that batches items. It works pretty much like `slice` just the other way round. It returns a list of lists with the given number of items. If you provide a second parameter this - is used to fill up missing items. See this example: + is used to fill missing items. See this example: .. sourcecode:: html+jinja @@ -823,6 +797,5 @@ FILTERS = { 'round': do_round, 'groupby': do_groupby, 'safe': do_mark_safe, - 'xmlattr': do_xmlattr, - 'urlencode': do_urlencode + 'xmlattr': do_xmlattr } diff --git a/libs/jinja2/lexer.py b/libs/jinja2/lexer.py old mode 100755 new mode 100644 index 69865d0..0d3f696 --- a/libs/jinja2/lexer.py +++ b/libs/jinja2/lexer.py @@ -414,7 +414,7 @@ class Lexer(object): (operator_re, TOKEN_OPERATOR, None) ] - # assemble the root lexing rule. because "|" is ungreedy + # assamble the root lexing rule. because "|" is ungreedy # we have to sort by length so that the lexer continues working # as expected when we have parsing rules like <% for block and # <%= for variables. (if someone wants asp like syntax) @@ -491,7 +491,7 @@ class Lexer(object): } def _normalize_newlines(self, value): - """Called for strings and template data to normalize it to unicode.""" + """Called for strings and template data to normlize it to unicode.""" return newline_re.sub(self.newline_sequence, value) def tokenize(self, source, name=None, filename=None, state=None): @@ -571,7 +571,7 @@ class Lexer(object): if m is None: continue - # we only match blocks and variables if braces / parentheses + # we only match blocks and variables if brances / parentheses # are balanced. continue parsing with the lower rule which # is the operator rule. do this only if the end tags look # like operators @@ -669,7 +669,7 @@ class Lexer(object): # publish new function and start again pos = pos2 break - # if loop terminated without break we haven't found a single match + # if loop terminated without break we havn't found a single match # either we are at the end of the file or we have a problem else: # end of text diff --git a/libs/jinja2/loaders.py b/libs/jinja2/loaders.py old mode 100755 new mode 100644 index c90bbe7..419a9c8 --- a/libs/jinja2/loaders.py +++ b/libs/jinja2/loaders.py @@ -330,16 +330,12 @@ class PrefixLoader(BaseLoader): self.mapping = mapping self.delimiter = delimiter - def get_loader(self, template): + def get_source(self, environment, template): try: prefix, name = template.split(self.delimiter, 1) loader = self.mapping[prefix] except (ValueError, KeyError): raise TemplateNotFound(template) - return loader, name - - def get_source(self, environment, template): - loader, name = self.get_loader(template) try: return loader.get_source(environment, name) except TemplateNotFound: @@ -347,16 +343,6 @@ class PrefixLoader(BaseLoader): # (the one that includes the prefix) raise TemplateNotFound(template) - @internalcode - def load(self, environment, name, globals=None): - loader, local_name = self.get_loader(name) - try: - return loader.load(environment, local_name) - except TemplateNotFound: - # re-raise the exception with the correct fileame here. - # (the one that includes the prefix) - raise TemplateNotFound(name) - def list_templates(self): result = [] for prefix, loader in self.mapping.iteritems(): @@ -390,15 +376,6 @@ class ChoiceLoader(BaseLoader): pass raise TemplateNotFound(template) - @internalcode - def load(self, environment, name, globals=None): - for loader in self.loaders: - try: - return loader.load(environment, name, globals) - except TemplateNotFound: - pass - raise TemplateNotFound(name) - def list_templates(self): found = set() for loader in self.loaders: diff --git a/libs/jinja2/meta.py b/libs/jinja2/meta.py old mode 100755 new mode 100644 diff --git a/libs/jinja2/nodes.py b/libs/jinja2/nodes.py old mode 100755 new mode 100644 diff --git a/libs/jinja2/optimizer.py b/libs/jinja2/optimizer.py old mode 100755 new mode 100644 diff --git a/libs/jinja2/parser.py b/libs/jinja2/parser.py old mode 100755 new mode 100644 index 2125338..d44229a --- a/libs/jinja2/parser.py +++ b/libs/jinja2/parser.py @@ -223,7 +223,7 @@ class Parser(object): # raise a nicer error message in that case. if self.stream.current.type == 'sub': self.fail('Block names in Jinja have to be valid Python ' - 'identifiers and may not contain hyphens, use an ' + 'identifiers and may not contain hypens, use an ' 'underscore instead.') node.body = self.parse_statements(('name:endblock',), drop_needle=True) @@ -698,6 +698,7 @@ class Parser(object): arg = nodes.Const(attr_token.value, lineno=attr_token.lineno) return nodes.Getitem(node, arg, 'load', lineno=token.lineno) if token.type == 'lbracket': + priority_on_attribute = False args = [] while self.stream.current.type != 'rbracket': if args: diff --git a/libs/jinja2/runtime.py b/libs/jinja2/runtime.py old mode 100755 new mode 100644 index 5c39984..a4a47a2 --- a/libs/jinja2/runtime.py +++ b/libs/jinja2/runtime.py @@ -30,8 +30,6 @@ to_string = unicode #: the identity function. Useful for certain things in the environment identity = lambda x: x -_last_iteration = object() - def markup_join(seq): """Concatenation that escapes if necessary and converts to unicode.""" @@ -272,7 +270,6 @@ class LoopContext(object): def __init__(self, iterable, recurse=None): self._iterator = iter(iterable) self._recurse = recurse - self._after = self._safe_next() self.index0 = -1 # try to get the length of the iterable early. This must be done @@ -291,7 +288,7 @@ class LoopContext(object): return args[self.index0 % len(args)] first = property(lambda x: x.index0 == 0) - last = property(lambda x: x._after is _last_iteration) + last = property(lambda x: x.index0 + 1 == x.length) index = property(lambda x: x.index0 + 1) revindex = property(lambda x: x.length - x.index0) revindex0 = property(lambda x: x.length - x.index) @@ -302,12 +299,6 @@ class LoopContext(object): def __iter__(self): return LoopContextIterator(self) - def _safe_next(self): - try: - return next(self._iterator) - except StopIteration: - return _last_iteration - @internalcode def loop(self, iterable): if self._recurse is None: @@ -353,11 +344,7 @@ class LoopContextIterator(object): def next(self): ctx = self.context ctx.index0 += 1 - if ctx._after is _last_iteration: - raise StopIteration() - next_elem = ctx._after - ctx._after = ctx._safe_next() - return next_elem, ctx + return next(ctx._iterator), ctx class Macro(object): diff --git a/libs/jinja2/sandbox.py b/libs/jinja2/sandbox.py old mode 100755 new mode 100644 diff --git a/libs/jinja2/tests.py b/libs/jinja2/tests.py old mode 100755 new mode 100644 diff --git a/libs/jinja2/testsuite/__init__.py b/libs/jinja2/testsuite/__init__.py old mode 100755 new mode 100644 diff --git a/libs/jinja2/testsuite/api.py b/libs/jinja2/testsuite/api.py old mode 100755 new mode 100644 diff --git a/libs/jinja2/testsuite/core_tags.py b/libs/jinja2/testsuite/core_tags.py old mode 100755 new mode 100644 diff --git a/libs/jinja2/testsuite/debug.py b/libs/jinja2/testsuite/debug.py old mode 100755 new mode 100644 diff --git a/libs/jinja2/testsuite/doctests.py b/libs/jinja2/testsuite/doctests.py old mode 100755 new mode 100644 diff --git a/libs/jinja2/testsuite/ext.py b/libs/jinja2/testsuite/ext.py old mode 100755 new mode 100644 diff --git a/libs/jinja2/testsuite/filters.py b/libs/jinja2/testsuite/filters.py old mode 100755 new mode 100644 index b037e24..aefe768 --- a/libs/jinja2/testsuite/filters.py +++ b/libs/jinja2/testsuite/filters.py @@ -84,28 +84,10 @@ class FilterTestCase(JinjaTestCase): '{{ 1000000000000|filesizeformat(true) }}' ) out = tmpl.render() - self.assert_equal(out, ( - '100 Bytes|1.0 kB|1.0 MB|1.0 GB|1.0 TB|100 Bytes|' - '1000 Bytes|976.6 KiB|953.7 MiB|931.3 GiB' - )) - - def test_filesizeformat_issue59(self): - tmpl = env.from_string( - '{{ 300|filesizeformat }}|' - '{{ 3000|filesizeformat }}|' - '{{ 3000000|filesizeformat }}|' - '{{ 3000000000|filesizeformat }}|' - '{{ 3000000000000|filesizeformat }}|' - '{{ 300|filesizeformat(true) }}|' - '{{ 3000|filesizeformat(true) }}|' - '{{ 3000000|filesizeformat(true) }}' + assert out == ( + '100 Bytes|0.0 kB|0.0 MB|0.0 GB|0.0 TB|100 Bytes|' + '1000 Bytes|1.0 KiB|0.9 MiB|0.9 GiB' ) - out = tmpl.render() - self.assert_equal(out, ( - '300 Bytes|3.0 kB|3.0 MB|3.0 GB|3.0 TB|300 Bytes|' - '2.9 KiB|2.9 MiB' - )) - def test_first(self): tmpl = env.from_string('{{ foo|first }}') @@ -193,16 +175,6 @@ class FilterTestCase(JinjaTestCase): def test_title(self): tmpl = env.from_string('''{{ "foo bar"|title }}''') assert tmpl.render() == "Foo Bar" - tmpl = env.from_string('''{{ "foo's bar"|title }}''') - assert tmpl.render() == "Foo's Bar" - tmpl = env.from_string('''{{ "foo bar"|title }}''') - assert tmpl.render() == "Foo Bar" - tmpl = env.from_string('''{{ "f bar f"|title }}''') - assert tmpl.render() == "F Bar F" - tmpl = env.from_string('''{{ "foo-bar"|title }}''') - assert tmpl.render() == "Foo-Bar" - tmpl = env.from_string('''{{ "foo\tbar"|title }}''') - assert tmpl.render() == "Foo\tBar" def test_truncate(self): tmpl = env.from_string( @@ -377,18 +349,6 @@ class FilterTestCase(JinjaTestCase): tmpl = env.from_string('{{ "
foo
" }}') assert tmpl.render() == '<div>foo</div>' - def test_urlencode(self): - env = Environment(autoescape=True) - tmpl = env.from_string('{{ "Hello, world!"|urlencode }}') - assert tmpl.render() == 'Hello%2C%20world%21' - tmpl = env.from_string('{{ o|urlencode }}') - assert tmpl.render(o=u"Hello, world\u203d") == "Hello%2C%20world%E2%80%BD" - assert tmpl.render(o=(("f", 1),)) == "f=1" - assert tmpl.render(o=(('f', 1), ("z", 2))) == "f=1&z=2" - assert tmpl.render(o=((u"\u203d", 1),)) == "%E2%80%BD=1" - assert tmpl.render(o={u"\u203d": 1}) == "%E2%80%BD=1" - assert tmpl.render(o={0: 1}) == "0=1" - def suite(): suite = unittest.TestSuite() diff --git a/libs/jinja2/testsuite/imports.py b/libs/jinja2/testsuite/imports.py old mode 100755 new mode 100644 diff --git a/libs/jinja2/testsuite/inheritance.py b/libs/jinja2/testsuite/inheritance.py old mode 100755 new mode 100644 diff --git a/libs/jinja2/testsuite/lexnparse.py b/libs/jinja2/testsuite/lexnparse.py old mode 100755 new mode 100644 index 77b76ec..562df62 --- a/libs/jinja2/testsuite/lexnparse.py +++ b/libs/jinja2/testsuite/lexnparse.py @@ -169,7 +169,7 @@ and bar comment #} except TemplateSyntaxError, e: assert str(e) == expected, 'unexpected error message' else: - assert False, 'that was supposed to be an error' + assert False, 'that was suposed to be an error' assert_error('{% for item in seq %}...{% endif %}', "Encountered unknown tag 'endif'. Jinja was looking " @@ -189,7 +189,7 @@ and bar comment #} "that needs to be closed is 'for'.") assert_error('{% block foo-bar-baz %}', "Block names in Jinja have to be valid Python identifiers " - "and may not contain hyphens, use an underscore instead.") + "and may not contain hypens, use an underscore instead.") assert_error('{% unknown_tag %}', "Encountered unknown tag 'unknown_tag'.") @@ -317,7 +317,7 @@ class SyntaxTestCase(JinjaTestCase): self.assert_raises(TemplateSyntaxError, env.from_string, '{% block x %}{% endblock y %}') - def test_constant_casing(self): + def test_contant_casing(self): for const in True, False, None: tmpl = env.from_string('{{ %s }}|{{ %s }}|{{ %s }}' % ( str(const), str(const).lower(), str(const).upper() @@ -327,7 +327,7 @@ class SyntaxTestCase(JinjaTestCase): def test_test_chaining(self): self.assert_raises(TemplateSyntaxError, env.from_string, '{{ foo is string is sequence }}') - assert env.from_string('{{ 42 is string or 42 is number }}' + env.from_string('{{ 42 is string or 42 is number }}' ).render() == 'True' def test_string_concatenation(self): diff --git a/libs/jinja2/testsuite/loader.py b/libs/jinja2/testsuite/loader.py old mode 100755 new mode 100644 index f62ec92..fb1e53d --- a/libs/jinja2/testsuite/loader.py +++ b/libs/jinja2/testsuite/loader.py @@ -182,34 +182,6 @@ class ModuleLoaderTestCase(JinjaTestCase): tmpl_3c4ddf650c1a73df961a6d3d2ce2752f1b8fd490 assert mod.__file__.endswith('.pyc') - def test_choice_loader(self): - log = self.compile_down(py_compile=True) - assert 'Byte-compiled "a/test.html"' in log - - self.mod_env.loader = loaders.ChoiceLoader([ - self.mod_env.loader, - loaders.DictLoader({'DICT_SOURCE': 'DICT_TEMPLATE'}) - ]) - - tmpl1 = self.mod_env.get_template('a/test.html') - self.assert_equal(tmpl1.render(), 'BAR') - tmpl2 = self.mod_env.get_template('DICT_SOURCE') - self.assert_equal(tmpl2.render(), 'DICT_TEMPLATE') - - def test_prefix_loader(self): - log = self.compile_down(py_compile=True) - assert 'Byte-compiled "a/test.html"' in log - - self.mod_env.loader = loaders.PrefixLoader({ - 'MOD': self.mod_env.loader, - 'DICT': loaders.DictLoader({'test.html': 'DICT_TEMPLATE'}) - }) - - tmpl1 = self.mod_env.get_template('MOD/a/test.html') - self.assert_equal(tmpl1.render(), 'BAR') - tmpl2 = self.mod_env.get_template('DICT/test.html') - self.assert_equal(tmpl2.render(), 'DICT_TEMPLATE') - def suite(): suite = unittest.TestSuite() diff --git a/libs/jinja2/testsuite/regression.py b/libs/jinja2/testsuite/regression.py old mode 100755 new mode 100644 diff --git a/libs/jinja2/testsuite/res/__init__.py b/libs/jinja2/testsuite/res/__init__.py old mode 100755 new mode 100644 diff --git a/libs/jinja2/testsuite/res/templates/broken.html b/libs/jinja2/testsuite/res/templates/broken.html old mode 100755 new mode 100644 diff --git a/libs/jinja2/testsuite/res/templates/foo/test.html b/libs/jinja2/testsuite/res/templates/foo/test.html old mode 100755 new mode 100644 diff --git a/libs/jinja2/testsuite/res/templates/syntaxerror.html b/libs/jinja2/testsuite/res/templates/syntaxerror.html old mode 100755 new mode 100644 diff --git a/libs/jinja2/testsuite/res/templates/test.html b/libs/jinja2/testsuite/res/templates/test.html old mode 100755 new mode 100644 diff --git a/libs/jinja2/testsuite/security.py b/libs/jinja2/testsuite/security.py old mode 100755 new mode 100644 diff --git a/libs/jinja2/testsuite/tests.py b/libs/jinja2/testsuite/tests.py old mode 100755 new mode 100644 diff --git a/libs/jinja2/testsuite/utils.py b/libs/jinja2/testsuite/utils.py old mode 100755 new mode 100644 diff --git a/libs/jinja2/utils.py b/libs/jinja2/utils.py old mode 100755 new mode 100644 index 568c63f..49e9e9a --- a/libs/jinja2/utils.py +++ b/libs/jinja2/utils.py @@ -12,10 +12,6 @@ import re import sys import errno try: - from urllib.parse import quote_from_bytes as url_quote -except ImportError: - from urllib import quote as url_quote -try: from thread import allocate_lock except ImportError: from dummy_thread import allocate_lock @@ -67,7 +63,7 @@ except TypeError, _error: del _test_gen_bug, _error -# for python 2.x we create ourselves a next() function that does the +# for python 2.x we create outselves a next() function that does the # basics without exception catching. try: next = next @@ -132,7 +128,7 @@ def contextfunction(f): def evalcontextfunction(f): - """This decorator can be used to mark a function or method as an eval + """This decoraotr can be used to mark a function or method as an eval context callable. This is similar to the :func:`contextfunction` but instead of passing the context, an evaluation context object is passed. For more information about the eval context, see @@ -195,7 +191,7 @@ def clear_caches(): def import_string(import_name, silent=False): - """Imports an object based on a string. This is useful if you want to + """Imports an object based on a string. This use useful if you want to use import paths as endpoints or something similar. An import path can be specified either in dotted notation (``xml.sax.saxutils.escape``) or with a colon as object delimiter (``xml.sax.saxutils:escape``). @@ -353,21 +349,6 @@ def generate_lorem_ipsum(n=5, html=True, min=20, max=100): return Markup(u'\n'.join(u'

%s

' % escape(x) for x in result)) -def unicode_urlencode(obj, charset='utf-8'): - """URL escapes a single bytestring or unicode string with the - given charset if applicable to URL safe quoting under all rules - that need to be considered under all supported Python versions. - - If non strings are provided they are converted to their unicode - representation first. - """ - if not isinstance(obj, basestring): - obj = unicode(obj) - if isinstance(obj, unicode): - obj = obj.encode(charset) - return unicode(url_quote(obj)) - - class LRUCache(object): """A simple LRU Cache implementation.""" @@ -412,7 +393,7 @@ class LRUCache(object): return (self.capacity,) def copy(self): - """Return a shallow copy of the instance.""" + """Return an shallow copy of the instance.""" rv = self.__class__(self.capacity) rv._mapping.update(self._mapping) rv._queue = deque(self._queue) @@ -462,7 +443,7 @@ class LRUCache(object): """Get an item from the cache. Moves the item up so that it has the highest priority then. - Raise a `KeyError` if it does not exist. + Raise an `KeyError` if it does not exist. """ rv = self._mapping[key] if self._queue[-1] != key: @@ -497,7 +478,7 @@ class LRUCache(object): def __delitem__(self, key): """Remove an item from the cache dict. - Raise a `KeyError` if it does not exist. + Raise an `KeyError` if it does not exist. """ self._wlock.acquire() try: @@ -598,7 +579,7 @@ class Joiner(object): # try markupsafe first, if that fails go with Jinja2's bundled version # of markupsafe. Markupsafe was previously Jinja2's implementation of -# the Markup object but was moved into a separate package in a patchlevel +# the Markup object but was moved into a separate package in a patchleve # release try: from markupsafe import Markup, escape, soft_unicode diff --git a/libs/jinja2/visitor.py b/libs/jinja2/visitor.py old mode 100755 new mode 100644 diff --git a/libs/werkzeug/__init__.py b/libs/werkzeug/__init__.py old mode 100755 new mode 100644 index efe0eb7..edccb26 --- a/libs/werkzeug/__init__.py +++ b/libs/werkzeug/__init__.py @@ -19,7 +19,7 @@ import sys # the version. Usually set automatically by a script. -__version__ = '0.9-dev' +__version__ = '0.8.3' # This import magic raises concerns quite often which is why the implementation diff --git a/libs/werkzeug/_internal.py b/libs/werkzeug/_internal.py old mode 100755 new mode 100644 diff --git a/libs/werkzeug/contrib/__init__.py b/libs/werkzeug/contrib/__init__.py old mode 100755 new mode 100644 diff --git a/libs/werkzeug/contrib/atom.py b/libs/werkzeug/contrib/atom.py old mode 100755 new mode 100644 diff --git a/libs/werkzeug/contrib/cache.py b/libs/werkzeug/contrib/cache.py old mode 100755 new mode 100644 diff --git a/libs/werkzeug/contrib/fixers.py b/libs/werkzeug/contrib/fixers.py old mode 100755 new mode 100644 diff --git a/libs/werkzeug/contrib/iterio.py b/libs/werkzeug/contrib/iterio.py old mode 100755 new mode 100644 diff --git a/libs/werkzeug/contrib/jsrouting.py b/libs/werkzeug/contrib/jsrouting.py old mode 100755 new mode 100644 diff --git a/libs/werkzeug/contrib/kickstart.py b/libs/werkzeug/contrib/kickstart.py old mode 100755 new mode 100644 diff --git a/libs/werkzeug/contrib/limiter.py b/libs/werkzeug/contrib/limiter.py old mode 100755 new mode 100644 diff --git a/libs/werkzeug/contrib/lint.py b/libs/werkzeug/contrib/lint.py old mode 100755 new mode 100644 diff --git a/libs/werkzeug/contrib/profiler.py b/libs/werkzeug/contrib/profiler.py old mode 100755 new mode 100644 diff --git a/libs/werkzeug/contrib/securecookie.py b/libs/werkzeug/contrib/securecookie.py old mode 100755 new mode 100644 index 733b053..9e6feeb --- a/libs/werkzeug/contrib/securecookie.py +++ b/libs/werkzeug/contrib/securecookie.py @@ -88,6 +88,7 @@ r""" :copyright: (c) 2011 by the Werkzeug Team, see AUTHORS for more details. :license: BSD, see LICENSE for more details. """ +import sys import cPickle as pickle from hmac import new as hmac from time import time @@ -97,7 +98,21 @@ from werkzeug.contrib.sessions import ModificationTrackingDict from werkzeug.security import safe_str_cmp -from hashlib import sha1 as _default_hash +# rather ugly way to import the correct hash method. Because +# hmac either accepts modules with a new method (sha, md5 etc.) +# or a hashlib factory function we have to figure out what to +# pass to it. If we have 2.5 or higher (so not 2.4 with a +# custom hashlib) we import from hashlib and fail if it does +# not exist (have seen that in old OS X versions). +# in all other cases the now deprecated sha module is used. +_default_hash = None +if sys.version_info >= (2, 5): + try: + from hashlib import sha1 as _default_hash + except ImportError: + pass +if _default_hash is None: + import sha as _default_hash class UnquoteError(Exception): diff --git a/libs/werkzeug/contrib/sessions.py b/libs/werkzeug/contrib/sessions.py old mode 100755 new mode 100644 index 7f6a543..b81351a --- a/libs/werkzeug/contrib/sessions.py +++ b/libs/werkzeug/contrib/sessions.py @@ -58,7 +58,10 @@ import tempfile from os import path from time import time from random import random -from hashlib import sha1 +try: + from hashlib import sha1 +except ImportError: + from sha import new as sha1 from cPickle import dump, load, HIGHEST_PROTOCOL from werkzeug.datastructures import CallbackDict diff --git a/libs/werkzeug/contrib/testtools.py b/libs/werkzeug/contrib/testtools.py old mode 100755 new mode 100644 diff --git a/libs/werkzeug/contrib/wrappers.py b/libs/werkzeug/contrib/wrappers.py old mode 100755 new mode 100644 diff --git a/libs/werkzeug/datastructures.py b/libs/werkzeug/datastructures.py old mode 100755 new mode 100644 index 3aaeb5f..09d59e8 --- a/libs/werkzeug/datastructures.py +++ b/libs/werkzeug/datastructures.py @@ -2079,7 +2079,7 @@ class ETags(object): return etag in self._strong def __nonzero__(self): - return bool(self.star_tag or self._strong or self._weak) + return bool(self.star_tag or self._strong) def __str__(self): return self.to_header() diff --git a/libs/werkzeug/debug/__init__.py b/libs/werkzeug/debug/__init__.py old mode 100755 new mode 100644 diff --git a/libs/werkzeug/debug/console.py b/libs/werkzeug/debug/console.py old mode 100755 new mode 100644 diff --git a/libs/werkzeug/debug/repr.py b/libs/werkzeug/debug/repr.py old mode 100755 new mode 100644 diff --git a/libs/werkzeug/debug/shared/FONT_LICENSE b/libs/werkzeug/debug/shared/FONT_LICENSE old mode 100755 new mode 100644 diff --git a/libs/werkzeug/debug/shared/debugger.js b/libs/werkzeug/debug/shared/debugger.js old mode 100755 new mode 100644 diff --git a/libs/werkzeug/debug/shared/jquery.js b/libs/werkzeug/debug/shared/jquery.js old mode 100755 new mode 100644 diff --git a/libs/werkzeug/debug/shared/style.css b/libs/werkzeug/debug/shared/style.css old mode 100755 new mode 100644 diff --git a/libs/werkzeug/debug/shared/ubuntu.ttf b/libs/werkzeug/debug/shared/ubuntu.ttf old mode 100755 new mode 100644 diff --git a/libs/werkzeug/debug/tbtools.py b/libs/werkzeug/debug/tbtools.py old mode 100755 new mode 100644 diff --git a/libs/werkzeug/exceptions.py b/libs/werkzeug/exceptions.py old mode 100755 new mode 100644 diff --git a/libs/werkzeug/formparser.py b/libs/werkzeug/formparser.py old mode 100755 new mode 100644 diff --git a/libs/werkzeug/http.py b/libs/werkzeug/http.py old mode 100755 new mode 100644 diff --git a/libs/werkzeug/local.py b/libs/werkzeug/local.py old mode 100755 new mode 100644 diff --git a/libs/werkzeug/posixemulation.py b/libs/werkzeug/posixemulation.py old mode 100755 new mode 100644 diff --git a/libs/werkzeug/routing.py b/libs/werkzeug/routing.py old mode 100755 new mode 100644 index 381ce2b..7b11b14 --- a/libs/werkzeug/routing.py +++ b/libs/werkzeug/routing.py @@ -100,7 +100,7 @@ import posixpath from pprint import pformat from urlparse import urljoin -from werkzeug.urls import url_encode, url_quote +from werkzeug.urls import url_encode, url_decode, url_quote from werkzeug.utils import redirect, format_string from werkzeug.exceptions import HTTPException, NotFound, MethodNotAllowed from werkzeug._internal import _get_environ @@ -715,7 +715,7 @@ class Rule(RuleFactory): return processed.add(data) else: - add(url_quote(data, self.map.charset, safe='/:|+')) + add(url_quote(data, self.map.charset, safe='/:|')) domain_part, url = (u''.join(tmp)).split('|', 1) if append_unknown: @@ -1503,8 +1503,7 @@ class MapAdapter(object): self.url_scheme, self.get_host(domain_part), posixpath.join(self.script_name[:-1].lstrip('/'), - url_quote(path_info.lstrip('/'), self.map.charset, - safe='/:|+')), + url_quote(path_info.lstrip('/'), self.map.charset)), suffix )) diff --git a/libs/werkzeug/script.py b/libs/werkzeug/script.py old mode 100755 new mode 100644 diff --git a/libs/werkzeug/security.py b/libs/werkzeug/security.py old mode 100755 new mode 100644 index 4552753..5f1d7d4 --- a/libs/werkzeug/security.py +++ b/libs/werkzeug/security.py @@ -14,6 +14,20 @@ import posixpath from itertools import izip from random import SystemRandom +# because the API of hmac changed with the introduction of the +# new hashlib module, we have to support both. This sets up a +# mapping to the digest factory functions and the digest modules +# (or factory functions with changed API) +try: + from hashlib import sha1, md5 + _hash_funcs = _hash_mods = {'sha1': sha1, 'md5': md5} + _sha1_mod = sha1 + _md5_mod = md5 +except ImportError: + import sha as _sha1_mod, md5 as _md5_mod + _hash_mods = {'sha1': _sha1_mod, 'md5': _md5_mod} + _hash_funcs = {'sha1': _sha1_mod.new, 'md5': _md5_mod.new} + SALT_CHARS = 'abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789' @@ -23,20 +37,6 @@ _os_alt_seps = list(sep for sep in [os.path.sep, os.path.altsep] if sep not in (None, '/')) -def _find_hashlib_algorithms(): - import hashlib - algos = getattr(hashlib, 'algorithms', None) - if algos is None: - algos = ('md5', 'sha1', 'sha224', 'sha256', 'sha384', 'sha512') - rv = {} - for algo in algos: - func = getattr(hashlib, algo, None) - if func is not None: - rv[algo] = func - return rv -_hash_funcs = _find_hashlib_algorithms() - - def safe_str_cmp(a, b): """This function compares strings in somewhat constant time. This requires that the length of at least one string is known in advance. @@ -68,11 +68,11 @@ def _hash_internal(method, salt, password): if method == 'plain': return password if salt: - if method not in _hash_funcs: + if method not in _hash_mods: return None if isinstance(salt, unicode): salt = salt.encode('utf-8') - h = hmac.new(salt, None, _hash_funcs[method]) + h = hmac.new(salt, None, _hash_mods[method]) else: if method not in _hash_funcs: return None @@ -97,7 +97,7 @@ def generate_password_hash(password, method='sha1', salt_length=8): is used, hmac is used internally to salt the password. :param password: the password to hash - :param method: the hash method to use (one that hashlib supports) + :param method: the hash method to use (``'md5'`` or ``'sha1'``) :param salt_length: the lengt of the salt in letters """ salt = method != 'plain' and gen_salt(salt_length) or '' diff --git a/libs/werkzeug/serving.py b/libs/werkzeug/serving.py old mode 100755 new mode 100644 index fcca3b5..abb9927 --- a/libs/werkzeug/serving.py +++ b/libs/werkzeug/serving.py @@ -35,8 +35,6 @@ :copyright: (c) 2011 by the Werkzeug Team, see AUTHORS for more details. :license: BSD, see LICENSE for more details. """ -from __future__ import with_statement - import os import socket import sys @@ -252,13 +250,10 @@ class WSGIRequestHandler(BaseHTTPRequestHandler, object): BaseRequestHandler = WSGIRequestHandler -def generate_adhoc_ssl_pair(cn=None): +def generate_adhoc_ssl_context(): + """Generates an adhoc SSL context for the development server.""" from random import random - from OpenSSL import crypto - - # pretty damn sure that this is not actually accepted by anyone - if cn is None: - cn = '*' + from OpenSSL import crypto, SSL cert = crypto.X509() cert.set_serial_number(int(random() * sys.maxint)) @@ -266,7 +261,7 @@ def generate_adhoc_ssl_pair(cn=None): cert.gmtime_adj_notAfter(60 * 60 * 24 * 365) subject = cert.get_subject() - subject.CN = cn + subject.CN = '*' subject.O = 'Dummy Certificate' issuer = cert.get_issuer() @@ -278,59 +273,10 @@ def generate_adhoc_ssl_pair(cn=None): cert.set_pubkey(pkey) cert.sign(pkey, 'md5') - return cert, pkey - - -def make_ssl_devcert(base_path, host=None, cn=None): - """Creates an SSL key for development. This should be used instead of - the ``'adhoc'`` key which generates a new cert on each server start. - It accepts a path for where it should store the key and cert and - either a host or CN. If a host is given it will use the CN - ``*.host/CN=host``. - - For more information see :func:`run_simple`. - - .. versionadded:: 0.9 - - :param base_path: the path to the certificate and key. The extension - ``.crt`` is added for the certificate, ``.key`` is - added for the key. - :param host: the name of the host. This can be used as an alternative - for the `cn`. - :param cn: the `CN` to use. - """ - from OpenSSL import crypto - if host is not None: - cn = '*.%s/CN=%s' % (host, host) - cert, pkey = generate_adhoc_ssl_pair(cn=cn) - - cert_file = base_path + '.crt' - pkey_file = base_path + '.key' - - with open(cert_file, 'w') as f: - f.write(crypto.dump_certificate(crypto.FILETYPE_PEM, cert)) - with open(pkey_file, 'w') as f: - f.write(crypto.dump_privatekey(crypto.FILETYPE_PEM, pkey)) - - return cert_file, pkey_file - - -def generate_adhoc_ssl_context(): - """Generates an adhoc SSL context for the development server.""" - from OpenSSL import SSL - pkey, cert = generate_adhoc_ssl_pair() ctx = SSL.Context(SSL.SSLv23_METHOD) ctx.use_privatekey(pkey) ctx.use_certificate(cert) - return ctx - -def load_ssl_context(cert_file, pkey_file): - """Loads an SSL context from a certificate and private key file.""" - from OpenSSL import SSL - ctx = SSL.Context(SSL.SSLv23_METHOD) - ctx.use_certificate_file(cert_file) - ctx.use_privatekey_file(pkey_file) return ctx @@ -354,9 +300,6 @@ class _SSLConnectionFix(object): def __getattr__(self, attrib): return getattr(self._con, attrib) - def shutdown(self, arg=None): - self._con.shutdown() - def select_ip_version(host, port): """Returns AF_INET4 or AF_INET6 depending on where to connect to.""" @@ -399,8 +342,6 @@ class BaseWSGIServer(HTTPServer, object): except ImportError: raise TypeError('SSL is not available if the OpenSSL ' 'library is not installed.') - if isinstance(ssl_context, tuple): - ssl_context = load_ssl_context(*ssl_context) if ssl_context == 'adhoc': ssl_context = generate_adhoc_ssl_context() self.socket = tsafe.Connection(ssl_context, self.socket) @@ -614,10 +555,6 @@ def run_simple(hostname, port, application, use_reloader=False, .. versionadded:: 0.6 support for SSL was added. - .. versionadded:: 0.8 - Added support for automatically loading a SSL context from certificate - file and private key. - :param hostname: The host for the application. eg: ``'localhost'`` :param port: The port for the server. eg: ``8080`` :param application: the WSGI application to execute @@ -645,8 +582,7 @@ def run_simple(hostname, port, application, use_reloader=False, This means that the server will die on errors but it can be useful to hook debuggers in (pdb etc.) :param ssl_context: an SSL context for the connection. Either an OpenSSL - context, a tuple in the form ``(cert_file, pkey_file)``, - the string ``'adhoc'`` if the server should + context, the string ``'adhoc'`` if the server should automatically create one, or `None` to disable SSL (which is the default). """ diff --git a/libs/werkzeug/templates.py b/libs/werkzeug/templates.py old mode 100755 new mode 100644 diff --git a/libs/werkzeug/test.py b/libs/werkzeug/test.py old mode 100755 new mode 100644 index 314b7e5..62153d4 --- a/libs/werkzeug/test.py +++ b/libs/werkzeug/test.py @@ -474,10 +474,7 @@ class EnvironBuilder(object): return 80 def __del__(self): - try: - self.close() - except Exception: - pass + self.close() def close(self): """Closes all files. If you put real :class:`file` objects into the @@ -603,11 +600,14 @@ class Client(object): def __init__(self, application, response_wrapper=None, use_cookies=True, allow_subdomain_redirects=False): self.application = application + if response_wrapper is None: + response_wrapper = lambda a, s, h: (a, s, h) self.response_wrapper = response_wrapper if use_cookies: self.cookie_jar = _TestCookieJar() else: self.cookie_jar = None + self.redirect_client = None self.allow_subdomain_redirects = allow_subdomain_redirects def set_cookie(self, server_name, key, value='', max_age=None, @@ -629,46 +629,6 @@ class Client(object): self.set_cookie(server_name, key, expires=0, max_age=0, path=path, domain=domain) - def run_wsgi_app(self, environ, buffered=False): - """Runs the wrapped WSGI app with the given environment.""" - if self.cookie_jar is not None: - self.cookie_jar.inject_wsgi(environ) - rv = run_wsgi_app(self.application, environ, buffered=buffered) - if self.cookie_jar is not None: - self.cookie_jar.extract_wsgi(environ, rv[2]) - return rv - - def resolve_redirect(self, response, new_location, environ, buffered=False): - """Resolves a single redirect and triggers the request again - directly on this redirect client. - """ - scheme, netloc, script_root, qs, anchor = urlparse.urlsplit(new_location) - base_url = urlparse.urlunsplit((scheme, netloc, '', '', '')).rstrip('/') + '/' - - cur_server_name = netloc.split(':', 1)[0].split('.') - real_server_name = get_host(environ).rsplit(':', 1)[0].split('.') - - if self.allow_subdomain_redirects: - allowed = cur_server_name[-len(real_server_name):] == real_server_name - else: - allowed = cur_server_name == real_server_name - - if not allowed: - raise RuntimeError('%r does not support redirect to ' - 'external targets' % self.__class__) - - # For redirect handling we temporarily disable the response - # wrapper. This is not threadsafe but not a real concern - # since the test client must not be shared anyways. - old_response_wrapper = self.response_wrapper - self.response_wrapper = None - try: - return self.open(path=script_root, base_url=base_url, - query_string=qs, as_tuple=True, - buffered=buffered) - finally: - self.response_wrapper = old_response_wrapper - def open(self, *args, **kwargs): """Takes the same arguments as the :class:`EnvironBuilder` class with some additions: You can provide a :class:`EnvironBuilder` or a WSGI @@ -710,25 +670,61 @@ class Client(object): finally: builder.close() - response = self.run_wsgi_app(environ, buffered=buffered) + if self.cookie_jar is not None: + self.cookie_jar.inject_wsgi(environ) + rv = run_wsgi_app(self.application, environ, buffered=buffered) + if self.cookie_jar is not None: + self.cookie_jar.extract_wsgi(environ, rv[2]) # handle redirects redirect_chain = [] - while 1: - status_code = int(response[1].split(None, 1)[0]) - if status_code not in (301, 302, 303, 305, 307) \ - or not follow_redirects: - break - new_location = Headers.linked(response[2])['location'] - new_redirect_entry = (new_location, status_code) - if new_redirect_entry in redirect_chain: - raise ClientRedirectError('loop detected') - redirect_chain.append(new_redirect_entry) - environ, response = self.resolve_redirect(response, new_location, - environ, buffered=buffered) - - if self.response_wrapper is not None: - response = self.response_wrapper(*response) + status_code = int(rv[1].split(None, 1)[0]) + while status_code in (301, 302, 303, 305, 307) and follow_redirects: + if not self.redirect_client: + # assume that we're not using the user defined response wrapper + # so that we don't need any ugly hacks to get the status + # code from the response. + self.redirect_client = Client(self.application) + self.redirect_client.cookie_jar = self.cookie_jar + + redirect = dict(rv[2])['Location'] + + scheme, netloc, script_root, qs, anchor = urlparse.urlsplit(redirect) + base_url = urlparse.urlunsplit((scheme, netloc, '', '', '')).rstrip('/') + '/' + + cur_server_name = netloc.split(':', 1)[0].split('.') + real_server_name = get_host(environ).split(':', 1)[0].split('.') + + if self.allow_subdomain_redirects: + allowed = cur_server_name[-len(real_server_name):] == real_server_name + else: + allowed = cur_server_name == real_server_name + + if not allowed: + raise RuntimeError('%r does not support redirect to ' + 'external targets' % self.__class__) + + redirect_chain.append((redirect, status_code)) + + # the redirect request should be a new request, and not be based on + # the old request + + redirect_kwargs = { + 'path': script_root, + 'base_url': base_url, + 'query_string': qs, + 'as_tuple': True, + 'buffered': buffered, + 'follow_redirects': False, + } + environ, rv = self.redirect_client.open(**redirect_kwargs) + status_code = int(rv[1].split(None, 1)[0]) + + # Prevent loops + if redirect_chain[-1] in redirect_chain[:-1]: + raise ClientRedirectError("loop detected") + + response = self.response_wrapper(*rv) if as_tuple: return environ, response return response diff --git a/libs/werkzeug/testapp.py b/libs/werkzeug/testapp.py old mode 100755 new mode 100644 diff --git a/libs/werkzeug/urls.py b/libs/werkzeug/urls.py old mode 100755 new mode 100644 index a96d8dc..267e8cd --- a/libs/werkzeug/urls.py +++ b/libs/werkzeug/urls.py @@ -141,7 +141,7 @@ def iri_to_uri(iri, charset='utf-8'): if port: hostname += ':' + port - path = _quote(path.encode(charset), safe="/:~+%") + path = _quote(path.encode(charset), safe="/:~+") query = _quote(query.encode(charset), safe="=%&[]:;$()+,!?*/") # this absolutely always must return a string. Otherwise some parts of diff --git a/libs/werkzeug/useragents.py b/libs/werkzeug/useragents.py old mode 100755 new mode 100644 diff --git a/libs/werkzeug/utils.py b/libs/werkzeug/utils.py old mode 100755 new mode 100644 diff --git a/libs/werkzeug/wrappers.py b/libs/werkzeug/wrappers.py old mode 100755 new mode 100644 diff --git a/libs/werkzeug/wsgi.py b/libs/werkzeug/wsgi.py old mode 100755 new mode 100644 index 7eb0ab5..9d52e26 --- a/libs/werkzeug/wsgi.py +++ b/libs/werkzeug/wsgi.py @@ -14,11 +14,10 @@ import urllib import urlparse import posixpath import mimetypes -from itertools import chain, repeat +from itertools import chain from zlib import adler32 from time import time, mktime from datetime import datetime -from functools import partial from werkzeug._internal import _patch_wrapper from werkzeug.http import is_resource_modified, http_date @@ -582,13 +581,6 @@ def make_limited_stream(stream, limit): return stream -def make_chunk_iter_func(stream, limit, buffer_size): - """Helper for the line and chunk iter functions.""" - if hasattr(stream, 'read'): - return partial(make_limited_stream(stream, limit).read, buffer_size) - return iter(chain(stream, repeat(''))).next - - def make_line_iter(stream, limit=None, buffer_size=10 * 1024): """Safely iterates line-based over an input stream. If the input stream is not a :class:`LimitedStream` the `limit` parameter is mandatory. @@ -605,31 +597,46 @@ def make_line_iter(stream, limit=None, buffer_size=10 * 1024): .. versionchanged:: 0.8 This function now ensures that the limit was reached. - .. versionadded:: 0.9 - added support for iterators as input stream. - - :param stream: the stream or iterate to iterate over. + :param stream: the stream to iterate over. :param limit: the limit in bytes for the stream. (Usually content length. Not necessary if the `stream` is a :class:`LimitedStream`. :param buffer_size: The optional buffer size. """ + stream = make_limited_stream(stream, limit) def _iter_basic_lines(): - _read = make_chunk_iter_func(stream, limit, buffer_size) + _read = stream.read buffer = [] while 1: - new_data = _read() - if not new_data: + if len(buffer) > 1: + yield buffer.pop() + continue + + # we reverse the chunks because popping from the last + # position of the list is O(1) and the number of chunks + # read will be quite large for binary files. + chunks = _read(buffer_size).splitlines(True) + chunks.reverse() + + first_chunk = buffer and buffer[0] or '' + if chunks: + if first_chunk and first_chunk[-1] in '\r\n': + yield first_chunk + first_chunk = '' + first_chunk += chunks.pop() + else: + yield first_chunk break - new_buf = [] - for item in chain(buffer, new_data.splitlines(True)): - new_buf.append(item) - if item and item[-1:] in '\r\n': - yield ''.join(new_buf) - new_buf = [] - buffer = new_buf - if buffer: - yield ''.join(buffer) + + buffer = chunks + + # in case the line is longer than the buffer size we + # can't yield yet. This will only happen if the buffer + # is empty. + if not buffer and first_chunk[-1] not in '\r\n': + buffer = [first_chunk] + else: + yield first_chunk # This hackery is necessary to merge 'foo\r' and '\n' into one item # of 'foo\r\n' if we were unlucky and we hit a chunk boundary. @@ -648,26 +655,24 @@ def make_line_iter(stream, limit=None, buffer_size=10 * 1024): def make_chunk_iter(stream, separator, limit=None, buffer_size=10 * 1024): """Works like :func:`make_line_iter` but accepts a separator which divides chunks. If you want newline based processing - you should use :func:`make_limited_stream` instead as it + you shuold use :func:`make_limited_stream` instead as it supports arbitrary newline markers. .. versionadded:: 0.8 - .. versionadded:: 0.9 - added support for iterators as input stream. - - :param stream: the stream or iterate to iterate over. + :param stream: the stream to iterate over. :param separator: the separator that divides chunks. :param limit: the limit in bytes for the stream. (Usually content length. Not necessary if the `stream` is a :class:`LimitedStream`. :param buffer_size: The optional buffer size. """ - _read = make_chunk_iter_func(stream, limit, buffer_size) + stream = make_limited_stream(stream, limit) + _read = stream.read _split = re.compile(r'(%s)' % re.escape(separator)).split buffer = [] while 1: - new_data = _read() + new_data = _read(buffer_size) if not new_data: break chunks = _split(new_data) @@ -840,13 +845,6 @@ class LimitedStream(object): last_pos = self._pos return result - def tell(self): - """Returns the position of the stream. - - .. versionadded:: 0.9 - """ - return self._pos - def next(self): line = self.readline() if line is None: diff --git a/version.py b/version.py index 0c7bd68..6c7094b 100644 --- a/version.py +++ b/version.py @@ -1,2 +1,2 @@ -VERSION = '2.0.0.pre2' +VERSION = '2.0.1.1' BRANCH = 'master'