Browse Source

Refactor the adding of NZB's (#1502)

- All is handled by sabnzbd.add_nzbfile
- Moved the actual file-processing to nzbparser
- We always support gzip in URLGrabber
- Remove upload.py, all handled by add_nzbfile
- Rework the dirscanner and urlgrabber to the new reality
- Retry job was broken if you added a file
tags/3.0.0Beta3
Safihre 5 years ago
committed by GitHub
parent
commit
15ad60f8a9
No known key found for this signature in database GPG Key ID: 4AEE18F83AFDEB23
  1. 11
      SABnzbd.py
  2. 99
      sabnzbd/__init__.py
  3. 46
      sabnzbd/api.py
  4. 291
      sabnzbd/dirscanner.py
  5. 1
      sabnzbd/emailer.py
  6. 36
      sabnzbd/filesystem.py
  7. 33
      sabnzbd/misc.py
  8. 2
      sabnzbd/notifier.py
  9. 219
      sabnzbd/nzbparser.py
  10. 39
      sabnzbd/nzbqueue.py
  11. 17
      sabnzbd/nzbstuff.py
  12. 20
      sabnzbd/osxmenu.py
  13. 25
      sabnzbd/postproc.py
  14. 3
      sabnzbd/sabtraylinux.py
  15. 73
      sabnzbd/urlgrabber.py
  16. 67
      sabnzbd/utils/upload.py

11
SABnzbd.py

@ -70,6 +70,7 @@ from sabnzbd.misc import (
set_serv_parms, set_serv_parms,
get_serv_parms, get_serv_parms,
get_from_url, get_from_url,
upload_file_to_sabnzbd,
) )
from sabnzbd.filesystem import get_ext, real_path, long_path, globber_full, remove_file from sabnzbd.filesystem import get_ext, real_path, long_path, globber_full, remove_file
from sabnzbd.panic import panic_tmpl, panic_port, panic_host, panic, launch_a_browser from sabnzbd.panic import panic_tmpl, panic_port, panic_host, panic, launch_a_browser
@ -681,11 +682,9 @@ def check_for_sabnzbd(url, upload_nzbs, allow_browser=True):
if is_sabnzbd_running(url): if is_sabnzbd_running(url):
# Upload any specified nzb files to the running instance # Upload any specified nzb files to the running instance
if upload_nzbs: if upload_nzbs:
from sabnzbd.utils.upload import upload_file
prev = sabnzbd.set_https_verification(False) prev = sabnzbd.set_https_verification(False)
for f in upload_nzbs: for f in upload_nzbs:
upload_file(url, f) upload_file_to_sabnzbd(url, f)
sabnzbd.set_https_verification(prev) sabnzbd.set_https_verification(prev)
else: else:
# Launch the web browser and quit since sabnzbd is already running # Launch the web browser and quit since sabnzbd is already running
@ -1449,10 +1448,8 @@ def main():
# Upload any nzb/zip/rar/nzb.gz/nzb.bz2 files from file association # Upload any nzb/zip/rar/nzb.gz/nzb.bz2 files from file association
if upload_nzbs: if upload_nzbs:
from sabnzbd.utils.upload import add_local for upload_nzb in upload_nzbs:
sabnzbd.add_nzbfile(upload_nzb)
for f in upload_nzbs:
add_local(f)
# Set URL for browser # Set URL for browser
if enable_https: if enable_https:

99
sabnzbd/__init__.py

@ -84,7 +84,7 @@ from sabnzbd.rating import Rating
import sabnzbd.misc as misc import sabnzbd.misc as misc
import sabnzbd.filesystem as filesystem import sabnzbd.filesystem as filesystem
import sabnzbd.powersup as powersup import sabnzbd.powersup as powersup
from sabnzbd.dirscanner import DirScanner, process_nzb_archive_file, process_single_nzb from sabnzbd.dirscanner import DirScanner
from sabnzbd.urlgrabber import URLGrabber from sabnzbd.urlgrabber import URLGrabber
import sabnzbd.scheduler as scheduler import sabnzbd.scheduler as scheduler
import sabnzbd.rss as rss import sabnzbd.rss as rss
@ -98,6 +98,7 @@ import sabnzbd.cfg as cfg
import sabnzbd.database import sabnzbd.database
import sabnzbd.lang as lang import sabnzbd.lang as lang
import sabnzbd.par2file as par2file import sabnzbd.par2file as par2file
import sabnzbd.nzbparser as nzbparser
import sabnzbd.api import sabnzbd.api
import sabnzbd.interface import sabnzbd.interface
import sabnzbd.nzbstuff as nzbstuff import sabnzbd.nzbstuff as nzbstuff
@ -634,12 +635,24 @@ def save_compressed(folder, filename, data):
def add_nzbfile( def add_nzbfile(
nzbfile, pp=None, script=None, cat=None, priority=NORMAL_PRIORITY, nzbname=None, reuse=False, password=None nzbfile,
pp=None,
script=None,
cat=None,
catdir=None,
priority=NORMAL_PRIORITY,
nzbname=None,
nzo_info=None,
url=None,
keep=None,
reuse=False,
password=None,
nzo_id=None,
): ):
""" Add disk-based NZB file, optional attributes, """ Add file
'reuse' flag will suppress duplicate detection 'reuse' flag will suppress duplicate detection
""" """
if pp and pp == "-1": if pp == "-1":
pp = None pp = None
if script and script.lower() == "default": if script and script.lower() == "default":
script = None script = None
@ -648,56 +661,68 @@ def add_nzbfile(
if isinstance(nzbfile, str): if isinstance(nzbfile, str):
# File coming from queue repair # File coming from queue repair
filename = nzbfile path = nzbfile
keep = True filename = os.path.basename(path)
else: keep_default = True
# TODO: CherryPy mangles unicode-filenames!
# See https://github.com/cherrypy/cherrypy/issues/1766
filename = encoding.correct_unknown_encoding(nzbfile.filename)
keep = False
if not sabnzbd.WIN32: if not sabnzbd.WIN32:
# If windows client sends file to Unix server backslashes may # If windows client sends file to Unix server backslashes may
# be included, so convert these # be included, so convert these
filename = filename.replace("\\", "/") path = path.replace("\\", "/")
logging.info("Attempting to add %s [%s]", filename, path)
filename = os.path.basename(filename)
ext = os.path.splitext(filename)[1]
if ext.lower() in VALID_ARCHIVES:
suffix = ext.lower()
else:
suffix = ".nzb"
logging.info("Adding %s", filename)
if isinstance(nzbfile, str):
path = nzbfile
else: else:
# File from file-upload object
# CherryPy mangles unicode-filenames: https://github.com/cherrypy/cherrypy/issues/1766
filename = encoding.correct_unknown_encoding(nzbfile.filename)
logging.info("Attempting to add %s", filename)
keep_default = False
try: try:
nzb_file, path = tempfile.mkstemp(suffix=suffix) # We have to create a copy, because we can't re-use the CherryPy temp-file
os.write(nzb_file, nzbfile.value) # Just to be sure we add the extension to detect file type later on
os.close(nzb_file) nzb_temp_file, path = tempfile.mkstemp(suffix=filesystem.get_ext(filename))
os.write(nzb_temp_file, nzbfile.file.read())
os.close(nzb_temp_file)
except OSError: except OSError:
logging.error(T("Cannot create temp file for %s"), filename) logging.error(T("Cannot create temp file for %s"), filename)
logging.info("Traceback: ", exc_info=True) logging.info("Traceback: ", exc_info=True)
return None return None
if ext.lower() in VALID_ARCHIVES: # Externally defined if we should keep the file?
return process_nzb_archive_file( if keep is None:
filename, path, pp, script, cat, priority=priority, nzbname=nzbname, password=password keep = keep_default
if filesystem.get_ext(filename) in VALID_ARCHIVES:
return nzbparser.process_nzb_archive_file(
filename,
path=path,
pp=pp,
script=script,
cat=cat,
catdir=catdir,
priority=priority,
nzbname=nzbname,
keep=keep,
reuse=reuse,
nzo_info=nzo_info,
url=url,
password=password,
nzo_id=nzo_id,
) )
else: else:
return process_single_nzb( return nzbparser.process_single_nzb(
filename, filename,
path, path=path,
pp, pp=pp,
script, script=script,
cat, cat=cat,
catdir=catdir,
priority=priority, priority=priority,
nzbname=nzbname, nzbname=nzbname,
keep=keep, keep=keep,
reuse=reuse, reuse=reuse,
nzo_info=nzo_info,
url=url,
password=password, password=password,
nzo_id=nzo_id,
) )
@ -872,7 +897,7 @@ def get_new_id(prefix, folder, check_list=None):
""" Return unique prefixed admin identifier within folder """ Return unique prefixed admin identifier within folder
optionally making sure that id is not in the check_list. optionally making sure that id is not in the check_list.
""" """
for n in range(10000): for n in range(100):
try: try:
if not os.path.exists(folder): if not os.path.exists(folder):
os.makedirs(folder) os.makedirs(folder)

46
sabnzbd/api.py

@ -68,7 +68,7 @@ from sabnzbd.misc import (
calc_age, calc_age,
opts_to_pp, opts_to_pp,
) )
from sabnzbd.filesystem import diskspace, get_ext, get_filename, globber_full, clip_path, remove_all from sabnzbd.filesystem import diskspace, get_ext, globber_full, clip_path, remove_all
from sabnzbd.encoding import xml_name from sabnzbd.encoding import xml_name
from sabnzbd.postproc import PostProcessor from sabnzbd.postproc import PostProcessor
from sabnzbd.articlecache import ArticleCache from sabnzbd.articlecache import ArticleCache
@ -349,34 +349,18 @@ def _api_translate(name, output, kwargs):
def _api_addfile(name, output, kwargs): def _api_addfile(name, output, kwargs):
""" API: accepts name, output, pp, script, cat, priority, nzbname """ """ API: accepts name, output, pp, script, cat, priority, nzbname """
# Normal upload will send the nzb in a kw arg called nzbfile # Normal upload will send the nzb in a kw arg called name
if name is None or isinstance(name, str): if hasattr(name, "file") and hasattr(name, "filename") and name.filename:
name = kwargs.get("nzbfile")
if hasattr(name, "getvalue"):
# Side effect of next line is that attribute .value is created
# which is needed to make add_nzbfile() work
size = name.length
elif hasattr(name, "file") and hasattr(name, "filename") and name.filename:
# CherryPy 3.2.2 object
if hasattr(name.file, "file"):
name.value = name.file.file.read()
else:
name.value = name.file.read()
size = len(name.value)
elif hasattr(name, "value"):
size = len(name.value)
else:
size = 0
if name is not None and size and name.filename:
cat = kwargs.get("cat") cat = kwargs.get("cat")
xcat = kwargs.get("xcat") xcat = kwargs.get("xcat")
if not cat and xcat: if not cat and xcat:
# Indexer category, so do mapping # Indexer category, so do mapping
cat = cat_convert(xcat) cat = cat_convert(xcat)
res = sabnzbd.add_nzbfile( # Add the NZB-file
res, nzo_ids = sabnzbd.add_nzbfile(
name, kwargs.get("pp"), kwargs.get("script"), cat, kwargs.get("priority"), kwargs.get("nzbname") name, kwargs.get("pp"), kwargs.get("script"), cat, kwargs.get("priority"), kwargs.get("nzbname")
) )
return report(output, keyword="", data={"status": res[0] == 0, "nzo_ids": res[1]}) return report(output, keyword="", data={"status": res == 0, "nzo_ids": nzo_ids})
else: else:
return report(output, _MSG_NO_VALUE) return report(output, _MSG_NO_VALUE)
@ -392,8 +376,6 @@ def _api_retry(name, output, kwargs):
nzo_id = retry_job(value, name, password) nzo_id = retry_job(value, name, password)
if nzo_id: if nzo_id:
if isinstance(nzo_id, list):
nzo_id = nzo_id[0]
return report(output, keyword="", data={"status": True, "nzo_id": nzo_id}) return report(output, keyword="", data={"status": True, "nzo_id": nzo_id})
else: else:
return report(output, _MSG_NO_ITEM) return report(output, _MSG_NO_ITEM)
@ -412,8 +394,6 @@ def _api_addlocalfile(name, output, kwargs):
""" API: accepts name, output, pp, script, cat, priority, nzbname """ """ API: accepts name, output, pp, script, cat, priority, nzbname """
if name: if name:
if os.path.exists(name): if os.path.exists(name):
fn = get_filename(name)
if fn:
pp = kwargs.get("pp") pp = kwargs.get("pp")
script = kwargs.get("script") script = kwargs.get("script")
cat = kwargs.get("cat") cat = kwargs.get("cat")
@ -424,21 +404,17 @@ def _api_addlocalfile(name, output, kwargs):
priority = kwargs.get("priority") priority = kwargs.get("priority")
nzbname = kwargs.get("nzbname") nzbname = kwargs.get("nzbname")
if get_ext(name) in VALID_ARCHIVES: if get_ext(name) in VALID_ARCHIVES + VALID_NZB_FILES:
res = sabnzbd.dirscanner.process_nzb_archive_file( res, nzo_ids = sabnzbd.add_nzbfile(
fn, name, pp=pp, script=script, cat=cat, priority=priority, keep=True, nzbname=nzbname name, pp=pp, script=script, cat=cat, priority=priority, keep=True, nzbname=nzbname
)
elif get_ext(name) in VALID_NZB_FILES:
res = sabnzbd.dirscanner.process_single_nzb(
fn, name, pp=pp, script=script, cat=cat, priority=priority, keep=True, nzbname=nzbname
) )
return report(output, keyword="", data={"status": res == 0, "nzo_ids": nzo_ids})
else: else:
logging.info('API-call addlocalfile: "%s" not a proper file name', name) logging.info('API-call addlocalfile: "%s" is not a supported file', name)
return report(output, _MSG_NO_FILE) return report(output, _MSG_NO_FILE)
else: else:
logging.info('API-call addlocalfile: file "%s" not found', name) logging.info('API-call addlocalfile: file "%s" not found', name)
return report(output, _MSG_NO_PATH) return report(output, _MSG_NO_PATH)
return report(output, keyword="", data={"status": res[0] == 0, "nzo_ids": res[1]})
else: else:
logging.info("API-call addlocalfile: no file name given") logging.info("API-call addlocalfile: no file name given")
return report(output, _MSG_NO_VALUE) return report(output, _MSG_NO_VALUE)

291
sabnzbd/dirscanner.py

@ -22,35 +22,15 @@ sabnzbd.dirscanner - Scanner for Watched Folder
import os import os
import time import time
import logging import logging
import zipfile
import gzip
import bz2
import threading import threading
import sabnzbd import sabnzbd
from sabnzbd.constants import SCAN_FILE_NAME, VALID_ARCHIVES, VALID_NZB_FILES from sabnzbd.constants import SCAN_FILE_NAME, VALID_ARCHIVES, VALID_NZB_FILES
import sabnzbd.utils.rarfile as rarfile
from sabnzbd.decorators import NzbQueueLocker
from sabnzbd.encoding import correct_unknown_encoding
from sabnzbd.newsunpack import is_sevenfile, SevenZip
import sabnzbd.nzbstuff as nzbstuff
import sabnzbd.filesystem as filesystem import sabnzbd.filesystem as filesystem
import sabnzbd.config as config import sabnzbd.config as config
import sabnzbd.cfg as cfg import sabnzbd.cfg as cfg
def name_to_cat(fname, cat=None):
""" Retrieve category from file name, but only if "cat" is None. """
if cat is None and fname.startswith("{{"):
n = fname.find("}}")
if n > 2:
cat = fname[2:n].strip()
fname = fname[n + 2 :].strip()
logging.debug("Job %s has category %s", fname, cat)
return fname, cat
def compare_stat_tuple(tup1, tup2): def compare_stat_tuple(tup1, tup2):
""" Test equality of two stat-tuples, content-related parts only """ """ Test equality of two stat-tuples, content-related parts only """
if tup1.st_ino != tup2.st_ino: if tup1.st_ino != tup2.st_ino:
@ -64,40 +44,6 @@ def compare_stat_tuple(tup1, tup2):
return True return True
def is_archive(path):
""" Check if file in path is an ZIP, RAR or 7z file
:param path: path to file
:return: (zf, status, expected_extension)
status: -1==Error/Retry, 0==OK, 1==Ignore
"""
if zipfile.is_zipfile(path):
try:
zf = zipfile.ZipFile(path)
return 0, zf, ".zip"
except:
logging.info(T("Cannot read %s"), path, exc_info=True)
return -1, None, ""
elif rarfile.is_rarfile(path):
try:
# Set path to tool to open it
rarfile.UNRAR_TOOL = sabnzbd.newsunpack.RAR_COMMAND
zf = rarfile.RarFile(path)
return 0, zf, ".rar"
except:
logging.info(T("Cannot read %s"), path, exc_info=True)
return -1, None, ""
elif is_sevenfile(path):
try:
zf = SevenZip(path)
return 0, zf, ".7z"
except:
logging.info(T("Cannot read %s"), path, exc_info=True)
return -1, None, ""
else:
logging.info("Archive %s is not a real archive!", os.path.basename(path))
return 1, None, ""
def clean_file_list(inp_list, folder, files): def clean_file_list(inp_list, folder, files):
""" Remove elements of "inp_list" not found in "files" """ """ Remove elements of "inp_list" not found in "files" """
for path in sorted(inp_list.keys()): for path in sorted(inp_list.keys()):
@ -112,205 +58,6 @@ def clean_file_list(inp_list, folder, files):
del inp_list[path] del inp_list[path]
@NzbQueueLocker
def process_nzb_archive_file(
filename,
path,
pp=None,
script=None,
cat=None,
catdir=None,
keep=False,
priority=None,
url="",
nzbname=None,
password=None,
nzo_id=None,
):
""" Analyse ZIP file and create job(s).
Accepts ZIP files with ONLY nzb/nfo/folder files in it.
returns (status, nzo_ids)
status: -1==Error/Retry, 0==OK, 1==Ignore
"""
nzo_ids = []
if catdir is None:
catdir = cat
filename, cat = name_to_cat(filename, catdir)
status, zf, extension = is_archive(path)
if status != 0:
return status, []
status = 1
names = zf.namelist()
nzbcount = 0
for name in names:
name = name.lower()
if name.endswith(".nzb"):
status = 0
nzbcount += 1
if status == 0:
if nzbcount != 1:
nzbname = None
for name in names:
if name.lower().endswith(".nzb"):
try:
data = correct_unknown_encoding(zf.read(name))
except OSError:
logging.error(T("Cannot read %s"), name, exc_info=True)
zf.close()
return -1, []
name = filesystem.setname_from_path(name)
if data:
nzo = None
try:
nzo = nzbstuff.NzbObject(
name, pp, script, data, cat=cat, url=url, priority=priority, nzbname=nzbname
)
if not nzo.password:
nzo.password = password
except (TypeError, ValueError):
# Duplicate or empty, ignore
pass
except:
# Something else is wrong, show error
logging.error(T("Error while adding %s, removing"), name, exc_info=True)
if nzo:
if nzo_id:
# Re-use existing nzo_id, when a "future" job gets it payload
sabnzbd.nzbqueue.NzbQueue.do.remove(nzo_id, add_to_history=False, delete_all_data=False)
nzo.nzo_id = nzo_id
nzo_id = None
nzo_ids.append(sabnzbd.nzbqueue.NzbQueue.do.add(nzo))
nzo.update_rating()
zf.close()
try:
if not keep:
filesystem.remove_file(path)
except OSError:
logging.error(T("Error removing %s"), filesystem.clip_path(path))
logging.info("Traceback: ", exc_info=True)
status = 1
else:
zf.close()
status = 1
return status, nzo_ids
@NzbQueueLocker
def process_single_nzb(
filename,
path,
pp=None,
script=None,
cat=None,
catdir=None,
keep=False,
priority=None,
nzbname=None,
reuse=False,
nzo_info=None,
dup_check=True,
url="",
password=None,
nzo_id=None,
):
""" Analyze file and create a job from it
Supports NZB, NZB.BZ2, NZB.GZ and GZ.NZB-in-disguise
returns (status, nzo_ids)
status: -2==Error/retry, -1==Error, 0==OK, 1==OK-but-ignorecannot-delete
"""
nzo_ids = []
if catdir is None:
catdir = cat
try:
with open(path, "rb") as nzb_file:
check_bytes = nzb_file.read(2)
if check_bytes == b"\x1f\x8b":
# gzip file or gzip in disguise
name = filename.replace(".nzb.gz", ".nzb")
nzb_reader_handler = gzip.GzipFile
elif check_bytes == b"BZ":
# bz2 file or bz2 in disguise
name = filename.replace(".nzb.bz2", ".nzb")
nzb_reader_handler = bz2.BZ2File
else:
name = filename
nzb_reader_handler = open
# Let's get some data and hope we can decode it
with nzb_reader_handler(path, "rb") as nzb_file:
data = correct_unknown_encoding(nzb_file.read())
except:
logging.warning(T("Cannot read %s"), filesystem.clip_path(path))
logging.info("Traceback: ", exc_info=True)
return -2, nzo_ids
if name:
name, cat = name_to_cat(name, catdir)
# The name is used as the name of the folder, so sanitize it using folder specific santization
if not nzbname:
# Prevent embedded password from being damaged by sanitize and trimming
nzbname = os.path.split(name)[1]
try:
nzo = nzbstuff.NzbObject(
name,
pp,
script,
data,
cat=cat,
priority=priority,
nzbname=nzbname,
nzo_info=nzo_info,
url=url,
reuse=reuse,
dup_check=dup_check,
)
if not nzo.password:
nzo.password = password
except TypeError:
# Duplicate, ignore
if nzo_id:
sabnzbd.nzbqueue.NzbQueue.do.remove(nzo_id, add_to_history=False)
nzo = None
except ValueError:
# Empty, but correct file
return -1, nzo_ids
except:
if data.find("<nzb") >= 0 > data.find("</nzb"):
# Looks like an incomplete file, retry
return -2, nzo_ids
else:
# Something else is wrong, show error
logging.error(T("Error while adding %s, removing"), name, exc_info=True)
return -1, nzo_ids
if nzo:
if nzo_id:
# Re-use existing nzo_id, when a "future" job gets it payload
sabnzbd.nzbqueue.NzbQueue.do.remove(nzo_id, add_to_history=False, delete_all_data=False)
nzo.nzo_id = nzo_id
nzo_ids.append(sabnzbd.nzbqueue.NzbQueue.do.add(nzo, quiet=reuse))
nzo.update_rating()
try:
if not keep:
filesystem.remove_file(path)
except OSError:
logging.error(T("Error removing %s"), filesystem.clip_path(path))
logging.info("Traceback: ", exc_info=True)
return 1, nzo_ids
return 0, nzo_ids
class DirScanner(threading.Thread): class DirScanner(threading.Thread):
""" Thread that periodically scans a given directory and picks up any """ Thread that periodically scans a given directory and picks up any
valid NZB, NZB.GZ ZIP-with-only-NZB and even NZB.GZ named as .NZB valid NZB, NZB.GZ ZIP-with-only-NZB and even NZB.GZ named as .NZB
@ -398,15 +145,14 @@ class DirScanner(threading.Thread):
if os.path.isdir(path) or path in self.ignored or filename[0] == ".": if os.path.isdir(path) or path in self.ignored or filename[0] == ".":
continue continue
ext = os.path.splitext(path)[1].lower() if filesystem.get_ext(path) in VALID_NZB_FILES + VALID_ARCHIVES:
candidate = ext in VALID_NZB_FILES + VALID_ARCHIVES
if candidate:
try: try:
stat_tuple = os.stat(path) stat_tuple = os.stat(path)
except OSError: except OSError:
continue continue
else: else:
self.ignored[path] = 1 self.ignored[path] = 1
continue
if path in self.suspected: if path in self.suspected:
if compare_stat_tuple(self.suspected[path], stat_tuple): if compare_stat_tuple(self.suspected[path], stat_tuple):
@ -415,12 +161,11 @@ class DirScanner(threading.Thread):
else: else:
del self.suspected[path] del self.suspected[path]
if candidate and stat_tuple.st_size > 0: if stat_tuple.st_size > 0:
logging.info("Trying to import %s", path) logging.info("Trying to import %s", path)
# Wait until the attributes are stable for 1 second # Wait until the attributes are stable for 1 second, but give up after 3 sec
# but give up after 3 sec # This indicates that the file is fully written to disk
stable = False
for n in range(3): for n in range(3):
time.sleep(1.0) time.sleep(1.0)
try: try:
@ -428,37 +173,23 @@ class DirScanner(threading.Thread):
except OSError: except OSError:
continue continue
if compare_stat_tuple(stat_tuple, stat_tuple_tmp): if compare_stat_tuple(stat_tuple, stat_tuple_tmp):
stable = True
break break
else:
stat_tuple = stat_tuple_tmp stat_tuple = stat_tuple_tmp
if not stable:
continue
# Handle archive files, but only when containing just NZB files
if ext in VALID_ARCHIVES:
res, nzo_ids = process_nzb_archive_file(filename, path, catdir=catdir, url=path)
if res == -1:
self.suspected[path] = stat_tuple
elif res == 0:
self.error_reported = False
else: else:
self.ignored[path] = 1 # Not stable
continue
# Handle .nzb, .nzb.gz or gzip-disguised-as-nzb or .bz2 # Add the NZB's
elif ext == ".nzb" or filename.lower().endswith(".nzb.gz") or filename.lower().endswith(".nzb.bz2"): res, _ = sabnzbd.add_nzbfile(path, catdir=catdir, keep=False)
res, nzo_id = process_single_nzb(filename, path, catdir=catdir, url=path)
if res < 0: if res < 0:
# Retry later, for example when we can't read the file
self.suspected[path] = stat_tuple self.suspected[path] = stat_tuple
elif res == 0: elif res == 0:
self.error_reported = False self.error_reported = False
else: else:
self.ignored[path] = 1 self.ignored[path] = 1
else: # Remove files from the bookkeeping that are no longer on the disk
self.ignored[path] = 1
clean_file_list(self.ignored, folder, files) clean_file_list(self.ignored, folder, files)
clean_file_list(self.suspected, folder, files) clean_file_list(self.suspected, folder, files)

1
sabnzbd/emailer.py

@ -20,7 +20,6 @@ sabnzbd.emailer - Send notification emails
""" """
import smtplib import smtplib
import os
import logging import logging
import re import re
import time import time

36
sabnzbd/filesystem.py

@ -28,11 +28,13 @@ import threading
import time import time
import fnmatch import fnmatch
import stat import stat
import zipfile
import sabnzbd import sabnzbd
from sabnzbd.decorators import synchronized from sabnzbd.decorators import synchronized
from sabnzbd.constants import FUTURE_Q_FOLDER, JOB_ADMIN, GIGI from sabnzbd.constants import FUTURE_Q_FOLDER, JOB_ADMIN, GIGI
from sabnzbd.encoding import correct_unknown_encoding from sabnzbd.encoding import correct_unknown_encoding
from sabnzbd.utils import rarfile
def get_ext(filename): def get_ext(filename):
@ -346,6 +348,40 @@ def same_file(a, b):
return is_subfolder return is_subfolder
def is_archive(path):
""" Check if file in path is an ZIP, RAR or 7z file
:param path: path to file
:return: (zf, status, expected_extension)
status: -1==Error/Retry, 0==OK, 1==Ignore
"""
if zipfile.is_zipfile(path):
try:
zf = zipfile.ZipFile(path)
return 0, zf, ".zip"
except:
logging.info(T("Cannot read %s"), path, exc_info=True)
return -1, None, ""
elif rarfile.is_rarfile(path):
try:
# Set path to tool to open it
rarfile.UNRAR_TOOL = sabnzbd.newsunpack.RAR_COMMAND
zf = rarfile.RarFile(path)
return 0, zf, ".rar"
except:
logging.info(T("Cannot read %s"), path, exc_info=True)
return -1, None, ""
elif sabnzbd.newsunpack.is_sevenfile(path):
try:
zf = sabnzbd.newsunpack.SevenZip(path)
return 0, zf, ".7z"
except:
logging.info(T("Cannot read %s"), path, exc_info=True)
return -1, None, ""
else:
logging.info("Archive %s is not a real archive!", os.path.basename(path))
return 1, None, ""
def check_mount(path): def check_mount(path):
""" Return False if volume isn't mounted on Linux or OSX """ Return False if volume isn't mounted on Linux or OSX
Retry 6 times with an interval of 1 sec. Retry 6 times with an interval of 1 sec.

33
sabnzbd/misc.py

@ -118,6 +118,18 @@ def cmp(x, y):
return (x > y) - (x < y) return (x > y) - (x < y)
def name_to_cat(fname, cat=None):
""" Retrieve category from file name, but only if "cat" is None. """
if cat is None and fname.startswith("{{"):
n = fname.find("}}")
if n > 2:
cat = fname[2:n].strip()
fname = fname[n + 2 :].strip()
logging.debug("Job %s has category %s", fname, cat)
return fname, cat
def cat_to_opts(cat, pp=None, script=None, priority=None): def cat_to_opts(cat, pp=None, script=None, priority=None):
""" Derive options from category, if options not already defined. """ Derive options from category, if options not already defined.
Specified options have priority over category-options. Specified options have priority over category-options.
@ -428,6 +440,27 @@ def check_latest_version():
sabnzbd.NEW_VERSION = (latest_testlabel, url_beta) sabnzbd.NEW_VERSION = (latest_testlabel, url_beta)
def upload_file_to_sabnzbd(url, fp):
""" Function for uploading nzbs to a running SABnzbd instance """
try:
fp = urllib.parse.quote_plus(fp)
url = "%s&mode=addlocalfile&name=%s" % (url, fp)
# Add local API-key if it wasn't already in the registered URL
apikey = cfg.api_key()
if apikey and "apikey" not in url:
url = "%s&apikey=%s" % (url, apikey)
if "apikey" not in url:
# Use alternative login method
username = cfg.username()
password = cfg.password()
if username and password:
url = "%s&ma_username=%s&ma_password=%s" % (url, username, password)
get_from_url(url)
except:
logging.error("Failed to upload file: %s", fp)
logging.info("Traceback: ", exc_info=True)
def from_units(val): def from_units(val):
""" Convert K/M/G/T/P notation to float """ """ Convert K/M/G/T/P notation to float """
val = str(val).strip().upper() val = str(val).strip().upper()

2
sabnzbd/notifier.py

@ -25,13 +25,11 @@ import os.path
import logging import logging
import urllib.request, urllib.error, urllib.parse import urllib.request, urllib.error, urllib.parse
import http.client import http.client
import subprocess
import json import json
from threading import Thread from threading import Thread
import sabnzbd import sabnzbd
import sabnzbd.cfg import sabnzbd.cfg
from sabnzbd.encoding import platform_btou
from sabnzbd.filesystem import make_script_path from sabnzbd.filesystem import make_script_path
from sabnzbd.newsunpack import external_script from sabnzbd.newsunpack import external_script

219
sabnzbd/nzbparser.py

@ -18,7 +18,8 @@
""" """
sabnzbd.nzbparser - Parse and import NZB files sabnzbd.nzbparser - Parse and import NZB files
""" """
import bz2
import gzip
import time import time
import logging import logging
import hashlib import hashlib
@ -26,7 +27,10 @@ import xml.etree.ElementTree
import datetime import datetime
import sabnzbd import sabnzbd
from sabnzbd.encoding import utob from sabnzbd import filesystem, nzbstuff
from sabnzbd.encoding import utob, correct_unknown_encoding
from sabnzbd.filesystem import is_archive, get_filename
from sabnzbd.misc import name_to_cat
def nzbfile_parser(raw_data, nzo): def nzbfile_parser(raw_data, nzo):
@ -146,3 +150,214 @@ def nzbfile_parser(raw_data, nzo):
if skipped_files: if skipped_files:
logging.warning(T("Failed to import %s files from %s"), skipped_files, nzo.filename) logging.warning(T("Failed to import %s files from %s"), skipped_files, nzo.filename)
def process_nzb_archive_file(
filename,
path,
pp=None,
script=None,
cat=None,
catdir=None,
keep=False,
priority=None,
nzbname=None,
reuse=False,
nzo_info=None,
dup_check=True,
url=None,
password=None,
nzo_id=None,
):
""" Analyse ZIP file and create job(s).
Accepts ZIP files with ONLY nzb/nfo/folder files in it.
returns (status, nzo_ids)
status: -1==Error, 0==OK, 1==Ignore
"""
nzo_ids = []
if catdir is None:
catdir = cat
filename, cat = name_to_cat(filename, catdir)
# Returns -1==Error/Retry, 0==OK, 1==Ignore
status, zf, extension = is_archive(path)
if status != 0:
return status, []
status = 1
names = zf.namelist()
nzbcount = 0
for name in names:
name = name.lower()
if name.endswith(".nzb"):
status = 0
nzbcount += 1
if status == 0:
if nzbcount != 1:
nzbname = None
for name in names:
if name.lower().endswith(".nzb"):
try:
data = correct_unknown_encoding(zf.read(name))
except OSError:
logging.error(T("Cannot read %s"), name, exc_info=True)
zf.close()
return -1, []
name = filesystem.setname_from_path(name)
if data:
nzo = None
try:
nzo = nzbstuff.NzbObject(
name,
pp=pp,
script=script,
nzb=data,
cat=cat,
url=url,
priority=priority,
nzbname=nzbname,
nzo_info=nzo_info,
reuse=reuse,
dup_check=dup_check,
)
if not nzo.password:
nzo.password = password
except (TypeError, ValueError):
# Duplicate or empty, ignore
pass
except:
# Something else is wrong, show error
logging.error(T("Error while adding %s, removing"), name, exc_info=True)
if nzo:
if nzo_id:
# Re-use existing nzo_id, when a "future" job gets it payload
sabnzbd.nzbqueue.NzbQueue.do.remove(nzo_id, add_to_history=False, delete_all_data=False)
nzo.nzo_id = nzo_id
nzo_id = None
nzo_ids.append(sabnzbd.nzbqueue.NzbQueue.do.add(nzo))
nzo.update_rating()
zf.close()
try:
if not keep:
filesystem.remove_file(path)
except OSError:
logging.error(T("Error removing %s"), filesystem.clip_path(path))
logging.info("Traceback: ", exc_info=True)
else:
zf.close()
status = 1
return status, nzo_ids
def process_single_nzb(
filename,
path,
pp=None,
script=None,
cat=None,
catdir=None,
keep=False,
priority=None,
nzbname=None,
reuse=False,
nzo_info=None,
dup_check=True,
url=None,
password=None,
nzo_id=None,
):
""" Analyze file and create a job from it
Supports NZB, NZB.BZ2, NZB.GZ and GZ.NZB-in-disguise
returns (status, nzo_ids)
status: -2==Error/retry, -1==Error, 0==OK
"""
nzo_ids = []
if catdir is None:
catdir = cat
try:
with open(path, "rb") as nzb_file:
check_bytes = nzb_file.read(2)
if check_bytes == b"\x1f\x8b":
# gzip file or gzip in disguise
name = filename.replace(".nzb.gz", ".nzb")
nzb_reader_handler = gzip.GzipFile
elif check_bytes == b"BZ":
# bz2 file or bz2 in disguise
name = filename.replace(".nzb.bz2", ".nzb")
nzb_reader_handler = bz2.BZ2File
else:
name = filename
nzb_reader_handler = open
# Let's get some data and hope we can decode it
with nzb_reader_handler(path, "rb") as nzb_file:
data = correct_unknown_encoding(nzb_file.read())
except:
logging.warning(T("Cannot read %s"), filesystem.clip_path(path))
logging.info("Traceback: ", exc_info=True)
return -2, nzo_ids
if name:
name, cat = name_to_cat(name, catdir)
# The name is used as the name of the folder, so sanitize it using folder specific santization
if not nzbname:
# Prevent embedded password from being damaged by sanitize and trimming
nzbname = get_filename(name)
try:
nzo = nzbstuff.NzbObject(
name,
pp=pp,
script=script,
nzb=data,
cat=cat,
url=url,
priority=priority,
nzbname=nzbname,
nzo_info=nzo_info,
reuse=reuse,
dup_check=dup_check,
)
if not nzo.password:
nzo.password = password
except TypeError:
# Duplicate, ignore
if nzo_id:
sabnzbd.nzbqueue.NzbQueue.do.remove(nzo_id, add_to_history=False)
nzo = None
except ValueError:
# Empty
return 1, nzo_ids
except:
if data.find("<nzb") >= 0 > data.find("</nzb"):
# Looks like an incomplete file, retry
return -2, nzo_ids
else:
# Something else is wrong, show error
logging.error(T("Error while adding %s, removing"), name, exc_info=True)
return -1, nzo_ids
if nzo:
if nzo_id:
# Re-use existing nzo_id, when a "future" job gets it payload
sabnzbd.nzbqueue.NzbQueue.do.remove(nzo_id, add_to_history=False, delete_all_data=False)
nzo.nzo_id = nzo_id
nzo_ids.append(sabnzbd.nzbqueue.NzbQueue.do.add(nzo, quiet=reuse))
nzo.update_rating()
try:
if not keep:
filesystem.remove_file(path)
except OSError:
# Job was still added to the queue, so throw error but don't report failed add
logging.error(T("Error removing %s"), filesystem.clip_path(path))
logging.info("Traceback: ", exc_info=True)
return 0, nzo_ids

39
sabnzbd/nzbqueue.py

@ -29,6 +29,7 @@ import sabnzbd
from sabnzbd.nzbstuff import NzbObject from sabnzbd.nzbstuff import NzbObject
from sabnzbd.misc import exit_sab, cat_to_opts, int_conv, caller_name, cmp, safe_lower from sabnzbd.misc import exit_sab, cat_to_opts, int_conv, caller_name, cmp, safe_lower
from sabnzbd.filesystem import get_admin_path, remove_all, globber_full, remove_file from sabnzbd.filesystem import get_admin_path, remove_all, globber_full, remove_file
from sabnzbd.nzbparser import process_single_nzb
from sabnzbd.panic import panic_queue from sabnzbd.panic import panic_queue
import sabnzbd.database as database import sabnzbd.database as database
from sabnzbd.decorators import NzbQueueLocker from sabnzbd.decorators import NzbQueueLocker
@ -55,7 +56,6 @@ import sabnzbd.downloader
from sabnzbd.assembler import Assembler, file_has_articles from sabnzbd.assembler import Assembler, file_has_articles
import sabnzbd.notifier as notifier import sabnzbd.notifier as notifier
from sabnzbd.bpsmeter import BPSMeter from sabnzbd.bpsmeter import BPSMeter
from sabnzbd.dirscanner import process_single_nzb
class NzbQueue: class NzbQueue:
@ -175,40 +175,31 @@ class NzbQueue:
name = os.path.basename(folder) name = os.path.basename(folder)
path = os.path.join(folder, JOB_ADMIN) path = os.path.join(folder, JOB_ADMIN)
if hasattr(new_nzb, "filename"):
filename = new_nzb.filename # If Retry was used and a new NZB was uploaded
if getattr(new_nzb, "filename", None):
remove_all(path, "*.gz", keep_folder=True)
logging.debug("Repair job %s with new NZB (%s)", name, new_nzb.filename)
_, nzo_ids = sabnzbd.add_nzbfile(new_nzb, nzbname=name, reuse=True, password=password)
nzo_id = nzo_ids[0]
else: else:
filename = ""
if not filename:
# Was this file already post-processed? # Was this file already post-processed?
verified = sabnzbd.load_data(VERIFIED_FILE, path, remove=False) verified = sabnzbd.load_data(VERIFIED_FILE, path, remove=False)
filenames = []
if not verified or not all(verified[x] for x in verified): if not verified or not all(verified[x] for x in verified):
filename = globber_full(path, "*.gz") filenames = globber_full(path, "*.gz")
if len(filename) > 0: if filenames:
logging.debug("Repair job %s by re-parsing stored NZB", name) logging.debug("Repair job %s by re-parsing stored NZB", name)
nzo_id = sabnzbd.add_nzbfile( _, nzo_ids = sabnzbd.add_nzbfile(filenames[0], nzbname=name, reuse=True, password=password)
filename[0], nzo_id = nzo_ids[0]
pp=None,
script=None,
cat=None,
priority=None,
nzbname=name,
reuse=True,
password=password,
)[1]
else: else:
logging.debug("Repair job %s without stored NZB", name) logging.debug("Repair job %s without stored NZB", name)
nzo = NzbObject(name, pp=None, script=None, nzb="", cat=None, priority=None, nzbname=name, reuse=True) nzo = NzbObject(name, nzbname=name, reuse=True)
nzo.password = password nzo.password = password
self.add(nzo) self.add(nzo)
nzo_id = nzo.nzo_id nzo_id = nzo.nzo_id
else:
remove_all(path, "*.gz")
logging.debug("Repair job %s with new NZB (%s)", name, filename)
nzo_id = sabnzbd.add_nzbfile(
new_nzb, pp=None, script=None, cat=None, priority=None, nzbname=name, reuse=True, password=password
)[1]
return nzo_id return nzo_id
@NzbQueueLocker @NzbQueueLocker

17
sabnzbd/nzbstuff.py

@ -563,8 +563,8 @@ class NzbObject(TryList):
def __init__( def __init__(
self, self,
filename, filename,
pp, pp=None,
script, script=None,
nzb=None, nzb=None,
futuretype=False, futuretype=False,
cat=None, cat=None,
@ -1197,7 +1197,7 @@ class NzbObject(TryList):
self.renames = renames self.renames = renames
# Looking for the longest name first, minimizes the chance on a mismatch # Looking for the longest name first, minimizes the chance on a mismatch
files.sort(key=lambda x: len(x)) files.sort(key=len)
# The NZFs should be tried shortest first, to improve the chance on a proper match # The NZFs should be tried shortest first, to improve the chance on a proper match
nzfs = self.files[:] nzfs = self.files[:]
@ -1227,7 +1227,7 @@ class NzbObject(TryList):
# Create an NZF for each remaining existing file # Create an NZF for each remaining existing file
try: try:
for filename in files: for filename in files:
# Create NZB's using basic information # Create NZO's using basic information
filepath = os.path.join(wdir, filename) filepath = os.path.join(wdir, filename)
if os.path.exists(filepath): if os.path.exists(filepath):
tup = os.stat(filepath) tup = os.stat(filepath)
@ -1248,7 +1248,7 @@ class NzbObject(TryList):
self.handle_par2(nzf, filepath) self.handle_par2(nzf, filepath)
logging.info("Existing file %s added to job", filename) logging.info("Existing file %s added to job", filename)
except: except:
logging.debug("Bad NZB handling") logging.error(T("Error importing %s"), self.final_name)
logging.info("Traceback: ", exc_info=True) logging.info("Traceback: ", exc_info=True)
@property @property
@ -1778,8 +1778,11 @@ class NzbObject(TryList):
# Remove all cached files # Remove all cached files
ArticleCache.do.purge_articles(self.saved_articles) ArticleCache.do.purge_articles(self.saved_articles)
# Delete all, or just basic? # Delete all, or just basic files
if delete_all_data: if self.futuretype:
# Remove temporary file left from URL-fetches
sabnzbd.remove_data(self.nzo_id, self.workpath)
elif delete_all_data:
remove_all(self.downpath, recursive=True) remove_all(self.downpath, recursive=True)
else: else:
# We remove any saved articles and save the renames file # We remove any saved articles and save the renames file

20
sabnzbd/osxmenu.py

@ -34,7 +34,7 @@ import cherrypy
import sabnzbd import sabnzbd
import sabnzbd.cfg import sabnzbd.cfg
from sabnzbd.filesystem import get_filename, get_ext, diskspace from sabnzbd.filesystem import diskspace
from sabnzbd.misc import to_units from sabnzbd.misc import to_units
from sabnzbd.constants import VALID_ARCHIVES, VALID_NZB_FILES, MEBI, Status from sabnzbd.constants import VALID_ARCHIVES, VALID_NZB_FILES, MEBI, Status
from sabnzbd.panic import launch_a_browser from sabnzbd.panic import launch_a_browser
@ -45,7 +45,6 @@ from sabnzbd.nzbqueue import NzbQueue
import sabnzbd.config as config import sabnzbd.config as config
import sabnzbd.scheduler as scheduler import sabnzbd.scheduler as scheduler
import sabnzbd.downloader import sabnzbd.downloader
import sabnzbd.dirscanner as dirscanner
from sabnzbd.bpsmeter import BPSMeter from sabnzbd.bpsmeter import BPSMeter
status_icons = { status_icons = {
@ -802,18 +801,11 @@ class SABnzbdDelegate(NSObject):
def application_openFiles_(self, nsapp, filenames): def application_openFiles_(self, nsapp, filenames):
# logging.info('[osx] file open') # logging.info('[osx] file open')
# logging.info('[osx] file : %s' % (filenames)) # logging.info('[osx] file : %s' % (filenames))
for name in filenames: for filename in filenames:
logging.info("[osx] receiving from OSX : %s", name) logging.info("[osx] receiving from OSX : %s", filename)
if os.path.exists(name): if os.path.exists(filename):
fn = get_filename(name) if sabnzbd.filesystem.get_ext(filename) in VALID_ARCHIVES + VALID_NZB_FILES:
# logging.info('[osx] filename : %s' % (fn)) sabnzbd.add_nzbfile(filename, keep=True)
if fn:
if get_ext(name) in VALID_ARCHIVES:
# logging.info('[osx] archive')
dirscanner.process_nzb_archive_file(fn, name, keep=True)
elif get_ext(name) in VALID_NZB_FILES:
# logging.info('[osx] nzb')
dirscanner.process_single_nzb(fn, name, keep=True)
# logging.info('opening done') # logging.info('opening done')
def applicationShouldTerminate_(self, sender): def applicationShouldTerminate_(self, sender):

25
sabnzbd/postproc.py

@ -59,6 +59,8 @@ from sabnzbd.filesystem import (
setname_from_path, setname_from_path,
create_all_dirs, create_all_dirs,
get_unique_filename, get_unique_filename,
get_ext,
get_filename,
) )
from sabnzbd.sorting import Sorter from sabnzbd.sorting import Sorter
from sabnzbd.constants import ( from sabnzbd.constants import (
@ -71,9 +73,9 @@ from sabnzbd.constants import (
Status, Status,
VERIFIED_FILE, VERIFIED_FILE,
) )
from sabnzbd.nzbparser import process_single_nzb
from sabnzbd.rating import Rating from sabnzbd.rating import Rating
import sabnzbd.emailer as emailer import sabnzbd.emailer as emailer
import sabnzbd.dirscanner as dirscanner
import sabnzbd.downloader import sabnzbd.downloader
import sabnzbd.config as config import sabnzbd.config as config
import sabnzbd.cfg as cfg import sabnzbd.cfg as cfg
@ -464,9 +466,7 @@ def process_job(nzo):
# Check if this is an NZB-only download, if so redirect to queue # Check if this is an NZB-only download, if so redirect to queue
# except when PP was Download-only # except when PP was Download-only
if flag_repair: if flag_repair:
nzb_list = nzb_redirect( nzb_list = nzb_redirect(tmp_workdir_complete, nzo.final_name, nzo.pp, script, nzo.cat, nzo.priority)
tmp_workdir_complete, nzo.final_name, nzo.pp, script, nzo.cat, priority=nzo.priority
)
else: else:
nzb_list = None nzb_list = None
if nzb_list: if nzb_list:
@ -1040,8 +1040,8 @@ def nzb_redirect(wdir, nzbname, pp, script, cat, priority):
""" """
files = recursive_listdir(wdir) files = recursive_listdir(wdir)
for file_ in files: for nzb_file in files:
if os.path.splitext(file_)[1].lower() != ".nzb": if get_ext(nzb_file) != ".nzb":
return None return None
# For multiple NZBs, cannot use the current job name # For multiple NZBs, cannot use the current job name
@ -1050,14 +1050,13 @@ def nzb_redirect(wdir, nzbname, pp, script, cat, priority):
# Process all NZB files # Process all NZB files
for nzb_file in files: for nzb_file in files:
dirscanner.process_single_nzb( process_single_nzb(
os.path.split(nzb_file)[1], get_filename(nzb_file),
file_, nzb_file,
pp, pp=pp,
script, script=script,
cat, cat=cat,
priority=priority, priority=priority,
keep=False,
dup_check=False, dup_check=False,
nzbname=nzbname, nzbname=nzbname,
) )

3
sabnzbd/sabtraylinux.py

@ -46,7 +46,6 @@ import sabnzbd.scheduler as scheduler
from sabnzbd.downloader import Downloader from sabnzbd.downloader import Downloader
import sabnzbd.cfg as cfg import sabnzbd.cfg as cfg
from sabnzbd.misc import to_units from sabnzbd.misc import to_units
from sabnzbd.utils.upload import add_local
class StatusIcon(Thread): class StatusIcon(Thread):
@ -171,7 +170,7 @@ class StatusIcon(Thread):
response = dialog.run() response = dialog.run()
if response == Gtk.ResponseType.OK: if response == Gtk.ResponseType.OK:
for filename in dialog.get_filenames(): for filename in dialog.get_filenames():
add_local(filename) sabnzbd.add_nzbfile(filename)
dialog.destroy() dialog.destroy()
def opencomplete(self, icon): def opencomplete(self, icon):

73
sabnzbd/urlgrabber.py

@ -32,10 +32,9 @@ from threading import Thread
import base64 import base64
import sabnzbd import sabnzbd
from sabnzbd.constants import DEF_TIMEOUT, FUTURE_Q_FOLDER, VALID_NZB_FILES, Status from sabnzbd.constants import DEF_TIMEOUT, FUTURE_Q_FOLDER, VALID_NZB_FILES, Status, VALID_ARCHIVES
import sabnzbd.misc as misc import sabnzbd.misc as misc
import sabnzbd.filesystem import sabnzbd.filesystem
import sabnzbd.dirscanner as dirscanner
from sabnzbd.nzbqueue import NzbQueue from sabnzbd.nzbqueue import NzbQueue
from sabnzbd.postproc import PostProcessor from sabnzbd.postproc import PostProcessor
import sabnzbd.cfg as cfg import sabnzbd.cfg as cfg
@ -44,7 +43,6 @@ import sabnzbd.notifier as notifier
from sabnzbd.encoding import ubtou, utob from sabnzbd.encoding import ubtou, utob
_BAD_GZ_HOSTS = (".zip", "nzbsa.co.za", "newshost.za.net")
_RARTING_FIELDS = ( _RARTING_FIELDS = (
"x-rating-id", "x-rating-id",
"x-rating-url", "x-rating-url",
@ -132,7 +130,6 @@ class URLGrabber(Thread):
filename = None filename = None
category = None category = None
gzipped = False
nzo_info = {} nzo_info = {}
wait = 0 wait = 0
retry = True retry = True
@ -170,8 +167,6 @@ class URLGrabber(Thread):
value = fetch_request.headers[hdr] value = fetch_request.headers[hdr]
except: except:
continue continue
if item in ("content-encoding",) and value == "gzip":
gzipped = True
if item in ("category_id", "x-dnzb-category"): if item in ("category_id", "x-dnzb-category"):
category = value category = value
elif item in ("x-dnzb-moreinfo",): elif item in ("x-dnzb-moreinfo",):
@ -224,7 +219,10 @@ class URLGrabber(Thread):
# URL was redirected, maybe the redirect has better filename? # URL was redirected, maybe the redirect has better filename?
# Check if the original URL has extension # Check if the original URL has extension
if url != fetch_request.url and sabnzbd.filesystem.get_ext(filename) not in VALID_NZB_FILES: if (
url != fetch_request.url
and sabnzbd.filesystem.get_ext(filename) not in VALID_NZB_FILES + VALID_ARCHIVES
):
filename = os.path.basename(urllib.parse.unquote(fetch_request.url)) filename = os.path.basename(urllib.parse.unquote(fetch_request.url))
elif "&nzbname=" in filename: elif "&nzbname=" in filename:
# Sometimes the filename contains the full URL, duh! # Sometimes the filename contains the full URL, duh!
@ -239,8 +237,6 @@ class URLGrabber(Thread):
nzbname = future_nzo.custom_name nzbname = future_nzo.custom_name
# process data # process data
if gzipped:
filename += ".gz"
if not data: if not data:
try: try:
data = fetch_request.read() data = fetch_request.read()
@ -256,15 +252,18 @@ class URLGrabber(Thread):
# Sanitize filename first (also removing forbidden Windows-names) # Sanitize filename first (also removing forbidden Windows-names)
filename = sabnzbd.filesystem.sanitize_filename(filename) filename = sabnzbd.filesystem.sanitize_filename(filename)
# If no filename, make one
if not filename:
filename = sabnzbd.get_new_id("url", os.path.join(cfg.admin_dir.get_path(), FUTURE_Q_FOLDER))
# Write data to temp file # Write data to temp file
path = os.path.join(cfg.admin_dir.get_path(), FUTURE_Q_FOLDER, filename) path = os.path.join(cfg.admin_dir.get_path(), FUTURE_Q_FOLDER, filename)
with open(path, "wb") as temp_nzb: with open(path, "wb") as temp_nzb:
temp_nzb.write(data) temp_nzb.write(data)
# Check if nzb file # Check if nzb file
if sabnzbd.filesystem.get_ext(filename) in VALID_NZB_FILES: if sabnzbd.filesystem.get_ext(filename) in VALID_ARCHIVES + VALID_NZB_FILES:
res = dirscanner.process_single_nzb( res, _ = sabnzbd.add_nzbfile(
filename,
path, path,
pp=pp, pp=pp,
script=script, script=script,
@ -275,49 +274,26 @@ class URLGrabber(Thread):
url=future_nzo.url, url=future_nzo.url,
keep=False, keep=False,
nzo_id=future_nzo.nzo_id, nzo_id=future_nzo.nzo_id,
)[0] )
if res: # -2==Error/retry, -1==Error, 0==OK, 1==Empty
if res == -2: if res == -2:
logging.info("Incomplete NZB, retry after 5 min %s", url) logging.info("Incomplete NZB, retry after 5 min %s", url)
when = 300 self.add(url, future_nzo, when=300)
elif res == -1: elif res == -1:
# Error, but no reason to retry. Warning is already given # Error already thrown
NzbQueue.do.remove(future_nzo.nzo_id, add_to_history=False) self.fail_to_history(future_nzo, url)
continue elif res == 1:
# No NZB-files inside archive
self.fail_to_history(future_nzo, url, T("Empty NZB file %s") % filename)
else: else:
logging.info("Unknown error fetching NZB, retry after 2 min %s", url) logging.info("Unknown filetype when fetching NZB, retry after 30s %s", url)
when = 120 self.add(url, future_nzo, 30)
self.add(url, future_nzo, when)
else: # Always clean up what we wrote to disk
# Check if a supported archive
status, zf, exp_ext = dirscanner.is_archive(path)
if status == 0:
if sabnzbd.filesystem.get_ext(filename) not in (".rar", ".zip", ".7z"):
filename = filename + exp_ext
os.rename(path, path + exp_ext)
path = path + exp_ext
dirscanner.process_nzb_archive_file(
filename,
path,
pp,
script,
cat,
priority=priority,
nzbname=nzbname,
url=future_nzo.url,
keep=False,
nzo_id=future_nzo.nzo_id,
)
else:
# Not a supported filetype, not an nzb (text/html ect)
try: try:
os.remove(fetch_request) sabnzbd.filesystem.remove_file(path)
except: except:
pass pass
logging.info("Unknown filetype when fetching NZB, retry after 30s %s", url)
self.add(url, future_nzo, 30)
except: except:
logging.error(T("URLGRABBER CRASHED"), exc_info=True) logging.error(T("URLGRABBER CRASHED"), exc_info=True)
logging.debug("URLGRABBER Traceback: ", exc_info=True) logging.debug("URLGRABBER Traceback: ", exc_info=True)
@ -351,7 +327,7 @@ class URLGrabber(Thread):
nzo.cat, _, nzo.script, _ = misc.cat_to_opts(nzo.cat, script=nzo.script) nzo.cat, _, nzo.script, _ = misc.cat_to_opts(nzo.cat, script=nzo.script)
# Add to history and run script if desired # Add to history and run script if desired
NzbQueue.do.remove(nzo.nzo_id, add_to_history=False, delete_all_data=False) NzbQueue.do.remove(nzo.nzo_id, add_to_history=False)
PostProcessor.do.process(nzo) PostProcessor.do.process(nzo)
@ -373,7 +349,6 @@ def _build_request(url):
# Add headers # Add headers
req.add_header("User-Agent", "SABnzbd+/%s" % sabnzbd.version.__version__) req.add_header("User-Agent", "SABnzbd+/%s" % sabnzbd.version.__version__)
if not any(item in url for item in _BAD_GZ_HOSTS):
req.add_header("Accept-encoding", "gzip") req.add_header("Accept-encoding", "gzip")
if user_passwd: if user_passwd:
req.add_header("Authorization", "Basic " + ubtou(base64.b64encode(utob(user_passwd))).strip()) req.add_header("Authorization", "Basic " + ubtou(base64.b64encode(utob(user_passwd))).strip())

67
sabnzbd/utils/upload.py

@ -1,67 +0,0 @@
#!/usr/bin/python3 -OO
# Copyright 2009-2020 The SABnzbd-Team <team@sabnzbd.org>
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
"""
sabnzbd.utils.upload - File association functions for adding nzb files to sabnzbd
"""
import os
import logging
import urllib.request
import urllib.parse
import urllib.error
import sabnzbd.cfg as cfg
from sabnzbd.filesystem import get_ext, get_filename
from sabnzbd.constants import VALID_ARCHIVES, VALID_NZB_FILES
from sabnzbd.dirscanner import process_nzb_archive_file, process_single_nzb
from sabnzbd.misc import get_from_url
def upload_file(url, fp):
""" Function for uploading nzbs to a running SABnzbd instance """
try:
fp = urllib.parse.quote_plus(fp)
url = "%s&mode=addlocalfile&name=%s" % (url, fp)
# Add local API-key if it wasn't already in the registered URL
apikey = cfg.api_key()
if apikey and "apikey" not in url:
url = "%s&apikey=%s" % (url, apikey)
if "apikey" not in url:
# Use alternative login method
username = cfg.username()
password = cfg.password()
if username and password:
url = "%s&ma_username=%s&ma_password=%s" % (url, username, password)
get_from_url(url)
except:
logging.error("Failed to upload file: %s", fp)
logging.info("Traceback: ", exc_info=True)
def add_local(f):
""" Function for easily adding nzb/zip/rar/nzb.gz to SABnzbd """
if os.path.exists(f):
fn = get_filename(f)
if fn:
if get_ext(fn) in VALID_ARCHIVES:
process_nzb_archive_file(fn, f, keep=True)
elif get_ext(fn) in VALID_NZB_FILES:
process_single_nzb(fn, f, keep=True)
else:
logging.error("Filename not found: %s", f)
else:
logging.error("File not found: %s", f)
Loading…
Cancel
Save