19 changed files with 547 additions and 121 deletions
@ -0,0 +1,84 @@ |
|||
from couchpotato.core.helpers.encoding import toUnicode |
|||
from couchpotato.core.helpers.encoding import tryUrlencode |
|||
from couchpotato.core.helpers.variable import splitString |
|||
from couchpotato.core.logger import CPLog |
|||
from couchpotato.core.notifications.base import Notification |
|||
|
|||
|
|||
log = CPLog(__name__) |
|||
|
|||
autoload = 'Join' |
|||
|
|||
|
|||
class Join(Notification): |
|||
|
|||
# URL for request |
|||
url = 'https://joinjoaomgcd.appspot.com/_ah/api/messaging/v1/sendPush?title=%s&text=%s&deviceId=%s&icon=%s' |
|||
|
|||
# URL for notification icon |
|||
icon = tryUrlencode('https://raw.githubusercontent.com/CouchPotato/CouchPotatoServer/master/couchpotato/static/images/icons/android.png') |
|||
|
|||
def notify(self, message = '', data = None, listener = None): |
|||
if not data: data = {} |
|||
|
|||
# default for devices |
|||
device_default = [None] |
|||
|
|||
apikey = self.conf('apikey') |
|||
if apikey is not None: |
|||
# Add apikey to request url |
|||
self.url = self.url + '&apikey=' + apikey |
|||
# If api key is present, default to sending to all devices |
|||
device_default = ['group.all'] |
|||
|
|||
devices = self.getDevices() or device_default |
|||
successful = 0 |
|||
for device in devices: |
|||
response = self.urlopen(self.url % (self.default_title, tryUrlencode(toUnicode(message)), device, self.icon)) |
|||
|
|||
if response: |
|||
successful += 1 |
|||
else: |
|||
log.error('Unable to push notification to Join device with ID %s' % device) |
|||
|
|||
return successful == len(devices) |
|||
|
|||
def getDevices(self): |
|||
return splitString(self.conf('devices')) |
|||
|
|||
|
|||
config = [{ |
|||
'name': 'join', |
|||
'groups': [ |
|||
{ |
|||
'tab': 'notifications', |
|||
'list': 'notification_providers', |
|||
'name': 'join', |
|||
'options': [ |
|||
{ |
|||
'name': 'enabled', |
|||
'default': 0, |
|||
'type': 'enabler', |
|||
}, |
|||
{ |
|||
'name': 'devices', |
|||
'default': '', |
|||
'description': 'IDs of devices to notify, or group to send to if API key is specified (ex: group.all)' |
|||
}, |
|||
{ |
|||
'name': 'apikey', |
|||
'default': '', |
|||
'advanced': True, |
|||
'description': 'API Key for sending to all devices, or group' |
|||
}, |
|||
{ |
|||
'name': 'on_snatch', |
|||
'default': 0, |
|||
'type': 'bool', |
|||
'advanced': True, |
|||
'description': 'Also send message when movie is snatched.', |
|||
}, |
|||
], |
|||
} |
|||
], |
|||
}] |
@ -0,0 +1,190 @@ |
|||
import os |
|||
import base64 |
|||
import logging |
|||
import argparse |
|||
|
|||
import requests |
|||
|
|||
LOG_LEVEL = logging.INFO |
|||
DEFAULT_CHUNK_SIZE = 4 * 1024 * 1024 |
|||
TUS_VERSION = '1.0.0' |
|||
|
|||
logger = logging.getLogger(__name__) |
|||
logger.setLevel(logging.DEBUG) |
|||
logger.addHandler(logging.NullHandler()) |
|||
|
|||
|
|||
class TusError(Exception): |
|||
pass |
|||
|
|||
|
|||
def _init(): |
|||
fmt = "[%(asctime)s] %(levelname)s %(message)s" |
|||
h = logging.StreamHandler() |
|||
h.setLevel(LOG_LEVEL) |
|||
h.setFormatter(logging.Formatter(fmt)) |
|||
logger.addHandler(h) |
|||
|
|||
|
|||
def _create_parser(): |
|||
parser = argparse.ArgumentParser() |
|||
parser.add_argument('file', type=argparse.FileType('rb')) |
|||
parser.add_argument('--chunk-size', type=int, default=DEFAULT_CHUNK_SIZE) |
|||
parser.add_argument( |
|||
'--header', |
|||
action='append', |
|||
help="A single key/value pair" |
|||
" to be sent with all requests as HTTP header." |
|||
" Can be specified multiple times to send more then one header." |
|||
" Key and value must be separated with \":\".") |
|||
return parser |
|||
|
|||
|
|||
def _cmd_upload(): |
|||
_init() |
|||
|
|||
parser = _create_parser() |
|||
parser.add_argument('tus_endpoint') |
|||
parser.add_argument('--file_name') |
|||
parser.add_argument( |
|||
'--metadata', |
|||
action='append', |
|||
help="A single key/value pair to be sent in Upload-Metadata header." |
|||
" Can be specified multiple times to send more than one pair." |
|||
" Key and value must be separated with space.") |
|||
args = parser.parse_args() |
|||
|
|||
headers = dict([x.split(':') for x in args.header]) |
|||
metadata = dict([x.split(' ') for x in args.metadata]) |
|||
|
|||
upload( |
|||
args.file, |
|||
args.tus_endpoint, |
|||
chunk_size=args.chunk_size, |
|||
file_name=args.file_name, |
|||
headers=headers, |
|||
metadata=metadata) |
|||
|
|||
|
|||
def _cmd_resume(): |
|||
_init() |
|||
|
|||
parser = _create_parser() |
|||
parser.add_argument('file_endpoint') |
|||
args = parser.parse_args() |
|||
|
|||
headers = dict([x.split(':') for x in args.header]) |
|||
|
|||
resume( |
|||
args.file, |
|||
args.file_endpoint, |
|||
chunk_size=args.chunk_size, |
|||
headers=headers) |
|||
|
|||
|
|||
def upload(file_obj, |
|||
tus_endpoint, |
|||
chunk_size=DEFAULT_CHUNK_SIZE, |
|||
file_name=None, |
|||
headers=None, |
|||
metadata=None): |
|||
file_name = os.path.basename(file_obj.name) |
|||
file_size = _get_file_size(file_obj) |
|||
location = _create_file( |
|||
tus_endpoint, |
|||
file_name, |
|||
file_size, |
|||
extra_headers=headers, |
|||
metadata=metadata) |
|||
resume( |
|||
file_obj, location, chunk_size=chunk_size, headers=headers, offset=0) |
|||
|
|||
|
|||
def _get_file_size(f): |
|||
pos = f.tell() |
|||
f.seek(0, 2) |
|||
size = f.tell() |
|||
f.seek(pos) |
|||
return size |
|||
|
|||
|
|||
def _create_file(tus_endpoint, |
|||
file_name, |
|||
file_size, |
|||
extra_headers=None, |
|||
metadata=None): |
|||
logger.info("Creating file endpoint") |
|||
|
|||
headers = { |
|||
"Tus-Resumable": TUS_VERSION, |
|||
"Upload-Length": str(file_size), |
|||
} |
|||
|
|||
if extra_headers: |
|||
headers.update(extra_headers) |
|||
|
|||
if metadata: |
|||
l = [k + ' ' + base64.b64encode(v) for k, v in metadata.items()] |
|||
headers["Upload-Metadata"] = ','.join(l) |
|||
|
|||
response = requests.post(tus_endpoint, headers=headers) |
|||
if response.status_code != 201: |
|||
raise TusError("Create failed: %s" % response) |
|||
|
|||
location = response.headers["Location"] |
|||
logger.info("Created: %s", location) |
|||
return location |
|||
|
|||
|
|||
def resume(file_obj, |
|||
file_endpoint, |
|||
chunk_size=DEFAULT_CHUNK_SIZE, |
|||
headers=None, |
|||
offset=None): |
|||
if offset is None: |
|||
offset = _get_offset(file_endpoint, extra_headers=headers) |
|||
|
|||
total_sent = 0 |
|||
file_size = _get_file_size(file_obj) |
|||
while offset < file_size: |
|||
file_obj.seek(offset) |
|||
data = file_obj.read(chunk_size) |
|||
offset = _upload_chunk( |
|||
data, offset, file_endpoint, extra_headers=headers) |
|||
total_sent += len(data) |
|||
logger.info("Total bytes sent: %i", total_sent) |
|||
|
|||
|
|||
def _get_offset(file_endpoint, extra_headers=None): |
|||
logger.info("Getting offset") |
|||
|
|||
headers = {"Tus-Resumable": TUS_VERSION} |
|||
|
|||
if extra_headers: |
|||
headers.update(extra_headers) |
|||
|
|||
response = requests.head(file_endpoint, headers=headers) |
|||
response.raise_for_status() |
|||
|
|||
offset = int(response.headers["Upload-Offset"]) |
|||
logger.info("offset=%i", offset) |
|||
return offset |
|||
|
|||
|
|||
def _upload_chunk(data, offset, file_endpoint, extra_headers=None): |
|||
logger.info("Uploading chunk from offset: %i", offset) |
|||
|
|||
headers = { |
|||
'Content-Type': 'application/offset+octet-stream', |
|||
'Upload-Offset': str(offset), |
|||
'Tus-Resumable': TUS_VERSION, |
|||
} |
|||
|
|||
if extra_headers: |
|||
headers.update(extra_headers) |
|||
|
|||
response = requests.patch(file_endpoint, headers=headers, data=data) |
|||
if response.status_code != 204: |
|||
raise TusError("Upload chunk failed: %s" % response) |
|||
|
|||
return int(response.headers["Upload-Offset"]) |
Loading…
Reference in new issue