Browse Source

Merge branch 'develop'

pull/1029/merge
Ruud 8 years ago
parent
commit
b538f9a08d
  1. 4
      .gitignore
  2. 3
      README.md
  3. 6
      couchpotato/core/downloaders/putio/__init__.py
  4. 8
      couchpotato/core/downloaders/putio/main.py
  5. 20
      couchpotato/core/downloaders/utorrent.py
  6. 2
      couchpotato/core/media/_base/providers/torrent/bithdtv.py
  7. 14
      couchpotato/core/media/_base/providers/torrent/passthepopcorn.py
  8. 2
      couchpotato/core/media/_base/providers/torrent/torrentleech.py
  9. 1
      couchpotato/core/media/movie/providers/info/themoviedb.py
  10. 2
      couchpotato/core/media/movie/providers/torrent/torrentpotato.py
  11. 2
      couchpotato/core/notifications/emby.py
  12. 84
      couchpotato/core/notifications/join.py
  13. 12
      couchpotato/core/plugins/profile/main.py
  14. 11
      couchpotato/core/plugins/profile/static/profile.js
  15. 11
      couchpotato/static/scripts/combined.plugins.min.js
  16. 2
      couchpotato/static/scripts/couchpotato.js
  17. 8
      libs/guessit/fileutils.py
  18. 214
      libs/pio/api.py
  19. 190
      libs/tus/__init__.py

4
.gitignore

@ -10,3 +10,7 @@
.coverage
coverage.xml
nosetests.xml
# Visual Studio
/.vs

3
README.md

@ -56,9 +56,6 @@ Linux:
Docker:
* You can use [linuxserver.io](https://github.com/linuxserver/docker-couchpotato) or [razorgirl's](https://github.com/razorgirl/docker-couchpotato) to quickly build your own isolated app container. It's based on the Linux instructions above. For more info about Docker check out the [official website](https://www.docker.com).
Ansible:
* You can use [peerster's] (https://github.com/peerster/ansible-couchpotato) [ansible] (http://www.ansible.com) role to deploy couchpotato.
FreeBSD:
* Become root with `su`

6
couchpotato/core/downloaders/putio/__init__.py

@ -34,6 +34,12 @@ config = [{
'default': 0,
},
{
'name': 'https',
'description': 'Set to true if your callback host accepts https instead of http',
'type': 'bool',
'default': 0,
},
{
'name': 'callback_host',
'description': 'External reachable url to CP so put.io can do it\'s thing',
},

8
couchpotato/core/downloaders/putio/main.py

@ -61,9 +61,13 @@ class PutIO(DownloaderBase):
# Note callback_host is NOT our address, it's the internet host that putio can call too
callbackurl = None
if self.conf('download'):
callbackurl = 'http://' + self.conf('callback_host') + '%sdownloader.putio.getfrom/' %Env.get('api_base'.strip('/'))
pre = 'http://'
if self.conf('https'):
pre = 'https://'
callbackurl = pre + self.conf('callback_host') + '%sdownloader.putio.getfrom/' %Env.get('api_base'.strip('/'))
log.debug('callbackurl is %s', callbackurl)
resp = client.Transfer.add_url(url, callback_url = callbackurl, parent_id = putioFolder)
log.debug('resp is %s', resp.id);
log.debug('resp is %s', resp.id)
return self.downloadReturnId(resp.id)
def test(self):

20
couchpotato/core/downloaders/utorrent.py

@ -1,4 +1,4 @@
from base64 import b16encode, b32decode
from base64 import b16encode, b32decode
from datetime import timedelta
from hashlib import sha1
import cookielib
@ -74,24 +74,6 @@ class uTorrent(DownloaderBase):
if not self.connect():
return False
settings = self.utorrent_api.get_settings()
if not settings:
return False
#Fix settings in case they are not set for CPS compatibility
new_settings = {}
if not (settings.get('seed_prio_limitul') == 0 and settings['seed_prio_limitul_flag']):
new_settings['seed_prio_limitul'] = 0
new_settings['seed_prio_limitul_flag'] = True
log.info('Updated uTorrent settings to set a torrent to complete after it the seeding requirements are met.')
if settings.get('bt.read_only_on_complete'): #This doesn't work as this option seems to be not available through the api. Mitigated with removeReadOnly function
new_settings['bt.read_only_on_complete'] = False
log.info('Updated uTorrent settings to not set the files to read only after completing.')
if new_settings:
self.utorrent_api.set_settings(new_settings)
torrent_params = {}
if self.conf('label'):
torrent_params['label'] = self.conf('label')

2
couchpotato/core/media/_base/providers/torrent/bithdtv.py

@ -39,7 +39,7 @@ class Base(TorrentProvider):
if '## SELECT COUNT(' in split_data[0]:
data = split_data[2]
html = BeautifulSoup(data)
html = BeautifulSoup(data, 'html.parser')
try:
result_table = html.find('table', attrs = {'width': '750', 'class': ''})

14
couchpotato/core/media/_base/providers/torrent/passthepopcorn.py

@ -18,12 +18,12 @@ log = CPLog(__name__)
class Base(TorrentProvider):
urls = {
'domain': 'https://tls.passthepopcorn.me',
'detail': 'https://tls.passthepopcorn.me/torrents.php?torrentid=%s',
'torrent': 'https://tls.passthepopcorn.me/torrents.php',
'login': 'https://tls.passthepopcorn.me/ajax.php?action=login',
'login_check': 'https://tls.passthepopcorn.me/ajax.php?action=login',
'search': 'https://tls.passthepopcorn.me/search/%s/0/7/%d'
'domain': 'https://passthepopcorn.me',
'detail': 'https://passthepopcorn.me/torrents.php?torrentid=%s',
'torrent': 'https://passthepopcorn.me/torrents.php',
'login': 'https://passthepopcorn.me/ajax.php?action=login',
'login_check': 'https://passthepopcorn.me/ajax.php?action=login',
'search': 'https://passthepopcorn.me/search/%s/0/7/%d'
}
login_errors = 0
@ -218,7 +218,7 @@ config = [{
'name': 'domain',
'advanced': True,
'label': 'Proxy server',
'description': 'Domain for requests (HTTPS only!), keep empty to use default (tls.passthepopcorn.me).',
'description': 'Domain for requests (HTTPS only!), keep empty to use default (passthepopcorn.me).',
},
{
'name': 'username',

2
couchpotato/core/media/_base/providers/torrent/torrentleech.py

@ -50,7 +50,7 @@ class Base(TorrentProvider):
results.append({
'id': link['href'].replace('/torrent/', ''),
'name': six.text_type(link.string),
'url': self.urls['download'] % url['href'],
'url': url['href'],
'detail_url': self.urls['download'] % details['href'],
'size': self.parseSize(result.find_all('td')[4].string),
'seeders': tryInt(result.find('td', attrs = {'class': 'seeders'}).string),

1
couchpotato/core/media/movie/providers/info/themoviedb.py

@ -54,6 +54,7 @@ class TheMovieDb(MovieProvider):
languages.remove('en')
# default language has a special management
if self.default_language in languages:
languages.remove(self.default_language)
self.languages = languages

2
couchpotato/core/media/movie/providers/torrent/torrentpotato.py

@ -17,6 +17,6 @@ class TorrentPotato(MovieProvider, Base):
'user': host['name'],
'passkey': host['pass_key'],
'imdbid': getIdentifier(media),
'search' : getTitle(media),
'search' : getTitle(media) + ' ' + str(media['info']['year']),
})
return '%s?%s' % (host['host'], arguments)

2
couchpotato/core/notifications/emby.py

@ -18,7 +18,7 @@ class Emby(Notification):
apikey = self.conf('apikey')
host = cleanHost(host)
url = '%semby/Library/Series/Updated' % (host)
url = '%semby/Library/Movies/Updated' % (host)
values = {}
data = urllib.urlencode(values)

84
couchpotato/core/notifications/join.py

@ -0,0 +1,84 @@
from couchpotato.core.helpers.encoding import toUnicode
from couchpotato.core.helpers.encoding import tryUrlencode
from couchpotato.core.helpers.variable import splitString
from couchpotato.core.logger import CPLog
from couchpotato.core.notifications.base import Notification
log = CPLog(__name__)
autoload = 'Join'
class Join(Notification):
# URL for request
url = 'https://joinjoaomgcd.appspot.com/_ah/api/messaging/v1/sendPush?title=%s&text=%s&deviceId=%s&icon=%s'
# URL for notification icon
icon = tryUrlencode('https://raw.githubusercontent.com/CouchPotato/CouchPotatoServer/master/couchpotato/static/images/icons/android.png')
def notify(self, message = '', data = None, listener = None):
if not data: data = {}
# default for devices
device_default = [None]
apikey = self.conf('apikey')
if apikey is not None:
# Add apikey to request url
self.url = self.url + '&apikey=' + apikey
# If api key is present, default to sending to all devices
device_default = ['group.all']
devices = self.getDevices() or device_default
successful = 0
for device in devices:
response = self.urlopen(self.url % (self.default_title, tryUrlencode(toUnicode(message)), device, self.icon))
if response:
successful += 1
else:
log.error('Unable to push notification to Join device with ID %s' % device)
return successful == len(devices)
def getDevices(self):
return splitString(self.conf('devices'))
config = [{
'name': 'join',
'groups': [
{
'tab': 'notifications',
'list': 'notification_providers',
'name': 'join',
'options': [
{
'name': 'enabled',
'default': 0,
'type': 'enabler',
},
{
'name': 'devices',
'default': '',
'description': 'IDs of devices to notify, or group to send to if API key is specified (ex: group.all)'
},
{
'name': 'apikey',
'default': '',
'advanced': True,
'description': 'API Key for sending to all devices, or group'
},
{
'name': 'on_snatch',
'default': 0,
'type': 'bool',
'advanced': True,
'description': 'Also send message when movie is snatched.',
},
],
}
],
}]

12
couchpotato/core/plugins/profile/main.py

@ -62,6 +62,18 @@ class ProfilePlugin(Plugin):
except:
log.error('Failed: %s', traceback.format_exc())
# Cleanup profiles that have empty qualites
profiles = self.all()
for profile in profiles:
try:
if '' in profile.get('qualities') or '-1' in profile.get('qualities'):
log.warning('Found profile with empty qualities, cleaning it up')
p = db.get('id', profile.get('_id'))
p['qualities'] = [x for x in p['qualities'] if (x != '' and x != '-1')]
db.update(p)
except:
log.error('Failed: %s', traceback.format_exc())
def allView(self, **kwargs):
return {

11
couchpotato/core/plugins/profile/static/profile.js

@ -140,7 +140,7 @@ var Profile = new Class({
};
Array.each(self.type_container.getElements('.type'), function(type){
if(!type.hasClass('deleted') && type.getElement('select').get('value') != -1)
if(!type.hasClass('deleted') && type.getElement('select').get('value') != -1 && type.getElement('select').get('value') != "")
data.types.include({
'quality': type.getElement('select').get('value'),
'finish': +type.getElement('input.finish[type=checkbox]').checked,
@ -258,9 +258,10 @@ Profile.Type = new Class({
self.create();
self.addEvent('change', function(){
self.el[self.qualities.get('value') == '-1' ? 'addClass' : 'removeClass']('is_empty');
self.el[Quality.getQuality(self.qualities.get('value')).allow_3d ? 'addClass': 'removeClass']('allow_3d');
self.deleted = self.qualities.get('value') == '-1';
var has_quality = !(self.qualities.get('value') == '-1' || self.qualities.get('value') == '');
self.el[!has_quality ? 'addClass' : 'removeClass']('is_empty');
self.el[has_quality && Quality.getQuality(self.qualities.get('value')).allow_3d ? 'addClass': 'removeClass']('allow_3d');
self.deleted = !has_quality;
});
},
@ -337,7 +338,7 @@ Profile.Type = new Class({
}).inject(self.qualities);
});
self.qualities.set('value', self.data.quality);
self.qualities.set('value', self.data.quality || -1);
return self.qualities;

11
couchpotato/static/scripts/combined.plugins.min.js

@ -3223,7 +3223,7 @@ var Profile = new Class({
types: []
};
Array.each(self.type_container.getElements(".type"), function(type) {
if (!type.hasClass("deleted") && type.getElement("select").get("value") != -1) data.types.include({
if (!type.hasClass("deleted") && type.getElement("select").get("value") != -1 && type.getElement("select").get("value") != "") data.types.include({
quality: type.getElement("select").get("value"),
finish: +type.getElement("input.finish[type=checkbox]").checked,
"3d": +type.getElement("input.3d[type=checkbox]").checked
@ -3313,9 +3313,10 @@ Profile.Type = new Class({
self.data = data || {};
self.create();
self.addEvent("change", function() {
self.el[self.qualities.get("value") == "-1" ? "addClass" : "removeClass"]("is_empty");
self.el[Quality.getQuality(self.qualities.get("value")).allow_3d ? "addClass" : "removeClass"]("allow_3d");
self.deleted = self.qualities.get("value") == "-1";
var has_quality = !(self.qualities.get("value") == "-1" || self.qualities.get("value") == "");
self.el[!has_quality ? "addClass" : "removeClass"]("is_empty");
self.el[has_quality && Quality.getQuality(self.qualities.get("value")).allow_3d ? "addClass" : "removeClass"]("allow_3d");
self.deleted = !has_quality;
});
},
create: function() {
@ -3364,7 +3365,7 @@ Profile.Type = new Class({
"data-allow_3d": q.allow_3d
}).inject(self.qualities);
});
self.qualities.set("value", self.data.quality);
self.qualities.set("value", self.data.quality || -1);
return self.qualities;
},
getData: function() {

2
couchpotato/static/scripts/couchpotato.js

@ -182,7 +182,7 @@
'click': self.checkForUpdate.bind(self, null)
}
}));
};
}
setting_links.each(function(a){
self.block.more.addLink(a);

8
libs/guessit/fileutils.py

@ -23,6 +23,7 @@ from guessit import s, u
import os.path
import zipfile
import io
import re
def split_path(path):
@ -46,6 +47,13 @@ def split_path(path):
head, tail = os.path.split(path)
headlen = len(head)
# if a string has a : in position 1 it gets splitted in everycase, also if
# there is not a valid drive letter and also if : is not followed by \
if headlen >= 2 and headlen <= 3 and head[1] == ':' and ( head + tail == path ) and ( head[1:] != ':\\' or not re.match("^[a-zA-Z]:\\\\", head) ):
tail = path
head = ''
headlen = 0
# on Unix systems, the root folder is '/'
if head and head == '/'*headlen and tail == '':
return ['/'] + result

214
libs/pio/api.py

@ -1,26 +1,53 @@
# -*- coding: utf-8 -*-
# Changed
# Removed iso8601 library requirement
# Added CP logging
import os
import re
import json
import binascii
import webbrowser
try:
from urllib import urlencode
from couchpotato import CPLog
from dateutil.parser import parse
except ImportError:
from urllib.parse import urlencode
from datetime import datetime
import tus
import requests
from requests.adapters import HTTPAdapter
from requests.packages.urllib3.util.retry import Retry
from couchpotato import CPLog
KB = 1024
MB = 1024 * KB
# Read and write operations are limited to this chunk size.
# This can make a big difference when dealing with large files.
CHUNK_SIZE = 256 * KB
BASE_URL = 'https://api.put.io/v2'
UPLOAD_URL = 'https://upload.put.io/v2/files/upload'
TUS_UPLOAD_URL = 'https://upload.put.io/files/'
ACCESS_TOKEN_URL = 'https://api.put.io/v2/oauth2/access_token'
AUTHENTICATION_URL = 'https://api.put.io/v2/oauth2/authenticate'
log = CPLog(__name__)
class APIError(Exception):
pass
class ClientError(APIError):
pass
class ServerError(APIError):
pass
class AuthHelper(object):
def __init__(self, client_id, client_secret, redirect_uri, type='code'):
@ -58,10 +85,21 @@ class AuthHelper(object):
class Client(object):
def __init__(self, access_token):
def __init__(self, access_token, use_retry=False):
self.access_token = access_token
self.session = requests.session()
if use_retry:
# Retry maximum 10 times, backoff on each retry
# Sleeps 1s, 2s, 4s, 8s, etc to a maximum of 120s between retries
# Retries on HTTP status codes 500, 502, 503, 504
retries = Retry(total=10,
backoff_factor=1,
status_forcelist=[500, 502, 503, 504])
# Use the retry strategy for all HTTPS requests
self.session.mount('https://', HTTPAdapter(max_retries=retries))
# Keep resource classes as attributes of client.
# Pass client to resource classes so resource object
# can use the client.
@ -71,7 +109,7 @@ class Client(object):
self.Account = type('Account', (_Account,), attributes)
def request(self, path, method='GET', params=None, data=None, files=None,
headers=None, raw=False, stream=False):
headers=None, raw=False, allow_redirects=True, stream=False):
"""
Wrapper around requests.request()
@ -91,27 +129,31 @@ class Client(object):
headers['Accept'] = 'application/json'
if path.startswith('https://'):
url = path
else:
url = BASE_URL + path
log.debug('url: %s', url)
response = self.session.request(
method, url, params=params, data=data, files=files,
headers=headers, allow_redirects=True, stream=stream)
headers=headers, allow_redirects=allow_redirects, stream=stream)
log.debug('response: %s', response)
if raw:
return response
log.debug('content: %s', response.content)
try:
response = json.loads(response.content)
body = json.loads(response.content.decode())
except ValueError:
raise Exception('Server didn\'t send valid JSON:\n%s\n%s' % (
response, response.content))
raise ServerError('InvalidJSON', response.content)
if response['status'] == 'ERROR':
raise Exception(response['error_type'])
if body['status'] == 'ERROR':
log.error("API returned error: %s", body)
exception_class = {'4': ClientError, '5': ServerError}[str(response.status_code)[0]]
raise exception_class(body['error_type'], body['error_message'])
return response
return body
class _BaseResource(object):
@ -125,8 +167,8 @@ class _BaseResource(object):
self.name = None
self.__dict__.update(resource_dict)
try:
self.created_at = parse(self.created_at)
except AttributeError:
self.created_at = strptime(self.created_at)
except Exception:
self.created_at = None
def __str__(self):
@ -135,7 +177,7 @@ class _BaseResource(object):
def __repr__(self):
# shorten name for display
name = self.name[:17] + '...' if len(self.name) > 20 else self.name
return '<%s id=%r, name="%r">' % (
return '<%s id=%r, name=%r>' % (
self.__class__.__name__, self.id, name)
@ -160,59 +202,113 @@ class _File(_BaseResource):
files = {'file': (name, f)}
else:
files = {'file': f}
d = cls.client.request('/files/upload', method='POST',
d = cls.client.request(UPLOAD_URL, method='POST',
data={'parent_id': parent_id}, files=files)
f = d['file']
return cls(f)
@classmethod
def upload_tus(cls, path, name=None, parent_id=0):
headers = {'Authorization': 'token %s' % cls.client.access_token}
metadata = {'parent_id': str(parent_id)}
if name:
metadata['name'] = name
with open(path) as f:
tus.upload(f, TUS_UPLOAD_URL, file_name=name, headers=headers, metadata=metadata)
def dir(self):
"""List the files under directory."""
return self.list(parent_id=self.id)
def download(self, dest='.', delete_after_download=False):
def download(self, dest='.', delete_after_download=False, chunk_size=CHUNK_SIZE):
if self.content_type == 'application/x-directory':
self._download_directory(dest, delete_after_download)
self._download_directory(dest, delete_after_download, chunk_size)
else:
self._download_file(dest, delete_after_download)
self._download_file(dest, delete_after_download, chunk_size)
def _download_directory(self, dest='.', delete_after_download=False):
name = self.name
if isinstance(name, unicode):
name = name.encode('utf-8', 'replace')
def _download_directory(self, dest, delete_after_download, chunk_size):
name = _str(self.name)
dest = os.path.join(dest, name)
if not os.path.exists(dest):
os.mkdir(dest)
for sub_file in self.dir():
sub_file.download(dest, delete_after_download)
sub_file.download(dest, delete_after_download, chunk_size)
if delete_after_download:
self.delete()
def _download_file(self, dest='.', delete_after_download=False):
response = self.client.request(
'/files/%s/download' % self.id, raw=True, stream=True)
def _verify_file(self, filepath):
log.info('verifying crc32...')
filesize = os.path.getsize(filepath)
if self.size != filesize:
logging.error('file %s is %d bytes, should be %s bytes' % (filepath, filesize, self.size))
return False
crcbin = 0
with open(filepath, 'rb') as f:
while True:
chunk = f.read(CHUNK_SIZE)
if not chunk:
break
crcbin = binascii.crc32(chunk, crcbin) & 0xffffffff
filename = re.match(
'attachment; filename=(.*)',
response.headers['content-disposition']).groups()[0]
# If file name has spaces, it must have quotes around.
filename = filename.strip('"')
crc32 = '%08x' % crcbin
with open(os.path.join(dest, filename), 'wb') as f:
for chunk in response.iter_content(chunk_size=1024):
if crc32 != self.crc32:
logging.error('file %s CRC32 is %s, should be %s' % (filepath, crc32, self.crc32))
return False
return True
def _download_file(self, dest, delete_after_download, chunk_size):
name = _str(self.name)
filepath = os.path.join(dest, name)
if os.path.exists(filepath):
first_byte = os.path.getsize(filepath)
if first_byte == self.size:
log.warning('file %s exists and is the correct size %d' % (filepath, self.size))
else:
first_byte = 0
log.debug('file %s is currently %d, should be %d' % (filepath, first_byte, self.size))
if self.size == 0:
# Create an empty file
open(filepath, 'w').close()
log.debug('created empty file %s' % filepath)
else:
if first_byte < self.size:
with open(filepath, 'ab') as f:
headers = {'Range': 'bytes=%d-' % first_byte}
log.debug('request range: bytes=%d-' % first_byte)
response = self.client.request('/files/%s/download' % self.id,
headers=headers,
raw=True,
stream=True)
for chunk in response.iter_content(chunk_size=chunk_size):
if chunk: # filter out keep-alive new chunks
f.write(chunk)
f.flush()
if self._verify_file(filepath):
if delete_after_download:
self.delete()
def delete(self):
return self.client.request('/files/delete', method='POST',
data={'file_ids': str(self.id)})
data={'file_id': str(self.id)})
@classmethod
def delete_multi(cls, ids):
return cls.client.request('/files/delete', method='POST',
data={'file_ids': ','.join(map(str, ids))})
def move(self, parent_id):
return self.client.request('/files/move', method='POST',
@ -239,6 +335,7 @@ class _Transfer(_BaseResource):
@classmethod
def add_url(cls, url, parent_id=0, extract=False, callback_url=None):
log.debug('callback_url is %s', callback_url)
d = cls.client.request('/transfers/add', method='POST', data=dict(
url=url, save_parent_id=parent_id, extract=extract,
callback_url=callback_url))
@ -247,10 +344,10 @@ class _Transfer(_BaseResource):
@classmethod
def add_torrent(cls, path, parent_id=0, extract=False, callback_url=None):
with open(path) as f:
with open(path, 'rb') as f:
files = {'file': f}
d = cls.client.request('/files/upload', method='POST', files=files,
data=dict(save_parent_id=parent_id,
data=dict(parent_id=parent_id,
extract=extract,
callback_url=callback_url))
t = d['transfer']
@ -260,6 +357,17 @@ class _Transfer(_BaseResource):
def clean(cls):
return cls.client.request('/transfers/clean', method='POST')
def cancel(self):
return self.client.request('/transfers/cancel',
method='POST',
data={'transfer_ids': self.id})
@classmethod
def cancel_multi(cls, ids):
return cls.client.request('/transfers/cancel',
method='POST',
data={'transfer_ids': ','.join(map(str, ids))})
class _Account(_BaseResource):
@ -270,3 +378,31 @@ class _Account(_BaseResource):
@classmethod
def settings(cls):
return cls.client.request('/account/settings', method='GET')
# Due to a nasty bug in datetime module, datetime.strptime calls
# are not thread-safe and can throw a TypeError. Details: https://bugs.python.org/issue7980
# Here we are implementing simple RFC3339 parser which is used in Put.io APIv2.
def strptime(date):
"""Returns datetime object from the given date, which is in a specific format: YYYY-MM-ddTHH:mm:ss"""
d = {
'year': date[0:4],
'month': date[5:7],
'day': date[8:10],
'hour': date[11:13],
'minute': date[14:16],
'second': date[17:],
}
d = dict((k, int(v)) for k, v in d.iteritems())
return datetime(**d)
def _str(s):
"""Python 3 compatibility function for converting to str."""
try:
if isinstance(s, unicode):
return s.encode('utf-8', 'replace')
except NameError:
pass
return s

190
libs/tus/__init__.py

@ -0,0 +1,190 @@
import os
import base64
import logging
import argparse
import requests
LOG_LEVEL = logging.INFO
DEFAULT_CHUNK_SIZE = 4 * 1024 * 1024
TUS_VERSION = '1.0.0'
logger = logging.getLogger(__name__)
logger.setLevel(logging.DEBUG)
logger.addHandler(logging.NullHandler())
class TusError(Exception):
pass
def _init():
fmt = "[%(asctime)s] %(levelname)s %(message)s"
h = logging.StreamHandler()
h.setLevel(LOG_LEVEL)
h.setFormatter(logging.Formatter(fmt))
logger.addHandler(h)
def _create_parser():
parser = argparse.ArgumentParser()
parser.add_argument('file', type=argparse.FileType('rb'))
parser.add_argument('--chunk-size', type=int, default=DEFAULT_CHUNK_SIZE)
parser.add_argument(
'--header',
action='append',
help="A single key/value pair"
" to be sent with all requests as HTTP header."
" Can be specified multiple times to send more then one header."
" Key and value must be separated with \":\".")
return parser
def _cmd_upload():
_init()
parser = _create_parser()
parser.add_argument('tus_endpoint')
parser.add_argument('--file_name')
parser.add_argument(
'--metadata',
action='append',
help="A single key/value pair to be sent in Upload-Metadata header."
" Can be specified multiple times to send more than one pair."
" Key and value must be separated with space.")
args = parser.parse_args()
headers = dict([x.split(':') for x in args.header])
metadata = dict([x.split(' ') for x in args.metadata])
upload(
args.file,
args.tus_endpoint,
chunk_size=args.chunk_size,
file_name=args.file_name,
headers=headers,
metadata=metadata)
def _cmd_resume():
_init()
parser = _create_parser()
parser.add_argument('file_endpoint')
args = parser.parse_args()
headers = dict([x.split(':') for x in args.header])
resume(
args.file,
args.file_endpoint,
chunk_size=args.chunk_size,
headers=headers)
def upload(file_obj,
tus_endpoint,
chunk_size=DEFAULT_CHUNK_SIZE,
file_name=None,
headers=None,
metadata=None):
file_name = os.path.basename(file_obj.name)
file_size = _get_file_size(file_obj)
location = _create_file(
tus_endpoint,
file_name,
file_size,
extra_headers=headers,
metadata=metadata)
resume(
file_obj, location, chunk_size=chunk_size, headers=headers, offset=0)
def _get_file_size(f):
pos = f.tell()
f.seek(0, 2)
size = f.tell()
f.seek(pos)
return size
def _create_file(tus_endpoint,
file_name,
file_size,
extra_headers=None,
metadata=None):
logger.info("Creating file endpoint")
headers = {
"Tus-Resumable": TUS_VERSION,
"Upload-Length": str(file_size),
}
if extra_headers:
headers.update(extra_headers)
if metadata:
l = [k + ' ' + base64.b64encode(v) for k, v in metadata.items()]
headers["Upload-Metadata"] = ','.join(l)
response = requests.post(tus_endpoint, headers=headers)
if response.status_code != 201:
raise TusError("Create failed: %s" % response)
location = response.headers["Location"]
logger.info("Created: %s", location)
return location
def resume(file_obj,
file_endpoint,
chunk_size=DEFAULT_CHUNK_SIZE,
headers=None,
offset=None):
if offset is None:
offset = _get_offset(file_endpoint, extra_headers=headers)
total_sent = 0
file_size = _get_file_size(file_obj)
while offset < file_size:
file_obj.seek(offset)
data = file_obj.read(chunk_size)
offset = _upload_chunk(
data, offset, file_endpoint, extra_headers=headers)
total_sent += len(data)
logger.info("Total bytes sent: %i", total_sent)
def _get_offset(file_endpoint, extra_headers=None):
logger.info("Getting offset")
headers = {"Tus-Resumable": TUS_VERSION}
if extra_headers:
headers.update(extra_headers)
response = requests.head(file_endpoint, headers=headers)
response.raise_for_status()
offset = int(response.headers["Upload-Offset"])
logger.info("offset=%i", offset)
return offset
def _upload_chunk(data, offset, file_endpoint, extra_headers=None):
logger.info("Uploading chunk from offset: %i", offset)
headers = {
'Content-Type': 'application/offset+octet-stream',
'Upload-Offset': str(offset),
'Tus-Resumable': TUS_VERSION,
}
if extra_headers:
headers.update(extra_headers)
response = requests.patch(file_endpoint, headers=headers, data=data)
if response.status_code != 204:
raise TusError("Upload chunk failed: %s" % response)
return int(response.headers["Upload-Offset"])
Loading…
Cancel
Save