Browse Source

Py3: Reworking of removal of files and other tweaks

pull/1278/head
Safihre 6 years ago
parent
commit
d2e0ebebc9
  1. 17
      sabnzbd/api.py
  2. 4
      sabnzbd/dirscanner.py
  3. 116
      sabnzbd/filesystem.py
  4. 2
      sabnzbd/interface.py
  5. 2
      sabnzbd/nzbparser.py
  6. 111
      sabnzbd/nzbqueue.py
  7. 61
      sabnzbd/nzbstuff.py
  8. 27
      sabnzbd/postproc.py
  9. 11
      sabnzbd/urlgrabber.py

17
sabnzbd/api.py

@ -163,8 +163,8 @@ def _api_queue_delete(output, value, kwargs):
return report(output, keyword='', data={'status': bool(removed), 'nzo_ids': removed})
elif value:
items = value.split(',')
del_files = int_conv(kwargs.get('del_files'))
removed = NzbQueue.do.remove_multiple(items, del_files)
delete_all_data = int_conv(kwargs.get('del_files'))
removed = NzbQueue.do.remove_multiple(items, delete_all_data=delete_all_data)
return report(output, keyword='', data={'status': bool(removed), 'nzo_ids': removed})
else:
return report(output, _MSG_NO_VALUE)
@ -1677,7 +1677,6 @@ def build_history(start=None, limit=None,search=None, failed_only=0, categories=
# Unreverse the queue
items.reverse()
retry_folders = []
for item in items:
item['size'] = format_bytes(item['bytes'])
@ -1686,19 +1685,11 @@ def build_history(start=None, limit=None,search=None, failed_only=0, categories=
path = item.get('path', '')
item['retry'] = int(bool(item.get('status') == Status.FAILED and
path and
path not in retry_folders and
same_file(cfg.download_dir.get_path(), path) and
os.path.exists(path)) and
not bool(globber(os.path.join(path, JOB_ADMIN), 'SABnzbd_n*'))
)
item['retry'] = int_conv(item.get('status') == Status.FAILED and path and os.path.exists(path))
# Retry of failed URL-fetch
if item['report'] == 'future':
item['retry'] = True
if item['retry']:
retry_folders.append(path)
if Rating.do:
rating = Rating.do.get_rating_by_nzo(item['nzo_id'])
else:

4
sabnzbd/dirscanner.py

@ -181,7 +181,7 @@ def process_nzb_archive_file(
if nzo:
if nzo_id:
# Re-use existing nzo_id, when a "future" job gets it payload
sabnzbd.nzbqueue.NzbQueue.do.remove(nzo_id, add_to_history=False)
sabnzbd.nzbqueue.NzbQueue.do.remove(nzo_id, add_to_history=False, delete_all_data=False)
nzo.nzo_id = nzo_id
nzo_id = None
nzo_ids.append(sabnzbd.nzbqueue.NzbQueue.do.add(nzo))
@ -296,7 +296,7 @@ def process_single_nzb(
if nzo:
if nzo_id:
# Re-use existing nzo_id, when a "future" job gets it payload
sabnzbd.nzbqueue.NzbQueue.do.remove(nzo_id, add_to_history=False)
sabnzbd.nzbqueue.NzbQueue.do.remove(nzo_id, add_to_history=False, delete_all_data=False)
nzo.nzo_id = nzo_id
nzo_ids.append(sabnzbd.nzbqueue.NzbQueue.do.add(nzo, quiet=reuse))
nzo.update_rating()

116
sabnzbd/filesystem.py

@ -255,46 +255,13 @@ def is_obfuscated_filename(filename):
return os.path.splitext(filename)[1] == ""
def create_all_dirs(path, umask=False):
""" Create all required path elements and set umask on all
Return True if last element could be made or exists
"""
result = True
if sabnzbd.WIN32:
try:
os.makedirs(path)
except:
result = False
else:
lst = []
lst.extend(path.split("/"))
path = ""
for d in lst:
if d:
path += "/" + d
if not os.path.exists(path):
try:
os.mkdir(path)
result = True
except:
result = False
if umask:
mask = sabnzbd.cfg.umask()
if mask:
try:
os.chmod(path, int(mask, 8) | 0o700)
except:
pass
return result
def real_path(loc, path):
""" When 'path' is relative, return normalized join of 'loc' and 'path'
When 'path' is absolute, return normalized path
A path starting with ~ will be located in the user's Home folder
"""
# The Windows part is a bit convoluted because
# os.path.join() doesn't behave the same for all Python versions
# C: and C:\ are 2 different things
if path:
path = path.strip()
else:
@ -316,13 +283,10 @@ def real_path(loc, path):
path = os.path.join(loc, path)
elif path[0] != "/":
path = os.path.join(loc, path)
# Always use long-path notation
path = long_path(path)
else:
path = loc
return os.path.normpath(os.path.abspath(path))
return long_path(os.path.normpath(os.path.abspath(path)))
def create_real_path(name, loc, path, umask=False, writable=True):
@ -544,6 +508,27 @@ DIR_LOCK = threading.RLock()
@synchronized(DIR_LOCK)
def create_all_dirs(path, umask=False):
""" Create all required path elements and set umask on all
The umask argument is ignored on Windows
Return path if elements could be made or exists
"""
try:
# Use custom mask if desired
mask = 0o700
if umask and sabnzbd.cfg.umask():
mask = int(sabnzbd.cfg.umask(), 8)
# Use python functions to create the directory
logging.info("Creating directories: %s (mask=%s)", (path, mask))
os.makedirs(path, mode=mask, exist_ok=True)
return path
except OSError:
logging.error(T("Failed making (%s)"), clip_path(path), exc_info=True)
return False
@synchronized(DIR_LOCK)
def get_unique_path(dirpath, n=0, create_dir=True):
""" Determine a unique folder or filename """
@ -556,7 +541,7 @@ def get_unique_path(dirpath, n=0, create_dir=True):
if not os.path.exists(path):
if create_dir:
return create_dirs(path)
return create_all_dirs(path, umask=True)
else:
return path
else:
@ -579,17 +564,6 @@ def get_unique_filename(path):
@synchronized(DIR_LOCK)
def create_dirs(dirpath):
""" Create directory tree, obeying permissions """
if not os.path.exists(dirpath):
logging.info("Creating directories: %s", dirpath)
if not create_all_dirs(dirpath, True):
logging.error(T("Failed making (%s)"), clip_path(dirpath))
return None
return dirpath
@synchronized(DIR_LOCK)
def recursive_listdir(dir):
""" List all files in dirs and sub-dirs """
filelist = []
@ -630,8 +604,7 @@ def move_to_path(path, new_path):
# Cannot rename, try copying
logging.debug("File could not be renamed, trying copying: %s", path)
try:
if not os.path.exists(os.path.dirname(new_path)):
create_dirs(os.path.dirname(new_path))
create_all_dirs(os.path.dirname(new_path), umask=True)
shutil.copyfile(path, new_path)
os.remove(path)
except:
@ -780,25 +753,32 @@ def remove_dir(path):
def remove_all(path, pattern="*", keep_folder=False, recursive=False):
""" Remove folder and all its content (optionally recursive) """
if os.path.exists(path):
files = globber_full(path, pattern)
if pattern == "*" and not sabnzbd.WIN32:
files.extend(globber_full(path, ".*"))
# Fast-remove the whole tree if recursive
if pattern == "*" and not keep_folder and recursive:
logging.debug("Removing dir recursively %s", path)
try:
shutil.rmtree(path)
except:
logging.info("Cannot remove folder %s", path, exc_info=True)
else:
# Get files based on pattern
files = globber_full(path, pattern)
if pattern == "*" and not sabnzbd.WIN32:
files.extend(globber_full(path, ".*"))
for f in files:
if os.path.isfile(f):
for f in files:
if os.path.isfile(f):
try:
remove_file(f)
except:
logging.info("Cannot remove file %s", f, exc_info=True)
elif recursive:
remove_all(f, pattern, False, True)
if not keep_folder:
try:
logging.debug("Removing file %s", f)
os.remove(f)
remove_dir(path)
except:
logging.info("Cannot remove file %s", f)
elif recursive:
remove_all(f, pattern, False, True)
if not keep_folder:
try:
logging.debug("Removing dir %s", path)
os.rmdir(path)
except:
logging.info("Cannot remove folder %s", path)
logging.info("Cannot remove folder %s", path, exc_info=True)
##############################################################################

2
sabnzbd/interface.py

@ -911,7 +911,7 @@ class QueuePage:
uid = kwargs.get('uid')
del_files = int_conv(kwargs.get('del_files'))
if uid:
NzbQueue.do.remove(uid, False, keep_basic=not del_files, del_files=del_files)
NzbQueue.do.remove(uid, add_to_history=False, delete_all_data=del_files)
raise queueRaiser(self.__root, kwargs)
@secured_expose(check_session_key=True)

2
sabnzbd/nzbparser.py

@ -84,7 +84,7 @@ def nzbfile_parser(raw_data, nzo):
# Update hash
md5sum.update(utob(article_id))
# Dubplicate parts?
# Duplicate parts?
if partnum in article_db:
if article_id != article_db[partnum][0]:
logging.info(

111
sabnzbd/nzbqueue.py

@ -27,7 +27,7 @@ import functools
import sabnzbd
from sabnzbd.nzbstuff import NzbObject
from sabnzbd.misc import exit_sab, cat_to_opts, int_conv, caller_name, cmp
from sabnzbd.misc import exit_sab, cat_to_opts, int_conv, caller_name, cmp, safe_lower
from sabnzbd.filesystem import get_admin_path, remove_all, globber_full, remove_file
from sabnzbd.panic import panic_queue
import sabnzbd.database as database
@ -149,10 +149,9 @@ class NzbQueue:
def repair_job(self, folder, new_nzb=None, password=None):
""" Reconstruct admin for a single job folder, optionally with new NZB """
def all_verified(path):
""" Return True when all sets have been successfully verified """
verified = sabnzbd.load_data(VERIFIED_FILE, path, remove=False) or {'x': False}
return all(verified[x] for x in verified)
# Check if folder exists
if not folder or not os.path.exists(folder):
return None
name = os.path.basename(folder)
path = os.path.join(folder, JOB_ADMIN)
@ -161,10 +160,13 @@ class NzbQueue:
else:
filename = ''
if not filename:
if not all_verified(path):
# Was this file already post-processed?
verified = sabnzbd.load_data(VERIFIED_FILE, path, remove=False)
if not verified or not all(verified[x] for x in verified):
filename = globber_full(path, '*.gz')
if len(filename) > 0:
logging.debug('Repair job %s by reparsing stored NZB', name)
logging.debug('Repair job %s by re-parsing stored NZB', name)
nzo_id = sabnzbd.add_nzbfile(filename[0], pp=None, script=None, cat=None, priority=None, nzbname=name,
reuse=True, password=password)[1]
else:
@ -178,9 +180,9 @@ class NzbQueue:
logging.debug('Repair job %s with new NZB (%s)', name, filename)
nzo_id = sabnzbd.add_nzbfile(new_nzb, pp=None, script=None, cat=None, priority=None, nzbname=name,
reuse=True, password=password)[1]
return nzo_id
@NzbQueueLocker
def send_back(self, nzo):
""" Send back job to queue after successful pre-check """
try:
@ -189,36 +191,12 @@ class NzbQueue:
logging.debug('Failed to find NZB file after pre-check (%s)', nzo.nzo_id)
return
res, nzo_ids = process_single_nzb(nzo.work_name + '.nzb', nzb_path, keep=True, reuse=True)
# Need to remove it first, otherwise it might still be downloading
self.remove(nzo, add_to_history=False, cleanup=False)
res, nzo_ids = process_single_nzb(nzo.work_name, nzb_path, keep=True, reuse=True, nzo_id=nzo.nzo_id)
if res == 0 and nzo_ids:
nzo = self.replace_in_q(nzo, nzo_ids[0])
# Reset reuse flag to make pause/abort on encryption possible
nzo.reuse = False
@NzbQueueLocker
def replace_in_q(self, nzo, nzo_id):
""" Replace nzo by new in at the same spot in the queue, destroy nzo """
# Must be a separate function from "send_back()", due to the required queue-lock
try:
old_id = nzo.nzo_id
new_nzo = self.get_nzo(nzo_id)
pos = self.__nzo_list.index(new_nzo)
targetpos = self.__nzo_list.index(nzo)
self.__nzo_list[targetpos] = new_nzo
self.__nzo_list.pop(pos)
# Reuse the old nzo_id
new_nzo.nzo_id = old_id
# Therefore: remove the new nzo_id
del self.__nzo_table[nzo_id]
# And attach the new nzo to the old nzo_id
self.__nzo_table[old_id] = new_nzo
logging.info('Replacing in queue %s by %s', nzo.final_name, new_nzo.final_name)
del nzo
return new_nzo
except:
logging.error(T('Failed to restart NZB after pre-check (%s)'), nzo.nzo_id)
logging.info("Traceback: ", exc_info=True)
return nzo
self.__nzo_table[nzo_ids[0]].reuse = False
@NzbQueueLocker
def save(self, save_nzo=None):
@ -245,7 +223,7 @@ class NzbQueue:
def generate_future(self, msg, pp=None, script=None, cat=None, url=None, priority=NORMAL_PRIORITY, nzbname=None):
""" Create and return a placeholder nzo object """
logging.debug('Creating placeholder NZO')
future_nzo = NzbObject(msg, pp, script, None, True, cat=cat, url=url, priority=priority, nzbname=nzbname, status=Status.GRABBING)
future_nzo = NzbObject(msg, pp, script, None, futuretype=True, cat=cat, url=url, priority=priority, nzbname=nzbname, status=Status.GRABBING)
self.add(future_nzo)
return future_nzo
@ -360,9 +338,16 @@ class NzbQueue:
return nzo.nzo_id
@NzbQueueLocker
def remove(self, nzo_id, add_to_history=True, save=True, cleanup=True, keep_basic=False, del_files=False):
def remove(self, nzo_id, add_to_history=True, cleanup=True, delete_all_data=True):
""" Remove NZO from queue.
It can be added to history directly.
Or, we do some clean-up, sometimes leaving some data.
"""
if nzo_id in self.__nzo_table:
nzo = self.__nzo_table.pop(nzo_id)
logging.info('[%s] Removing job %s', caller_name(), nzo.final_name)
# Set statuses
nzo.deleted = True
if cleanup and not nzo.is_gone():
nzo.status = Status.DELETED
@ -373,28 +358,23 @@ class NzbQueue:
history_db = database.HistoryDB()
# Add the nzo to the database. Only the path, script and time taken is passed
# Other information is obtained from the nzo
history_db.add_history_db(nzo, '', '', 0, '', '')
history_db.add_history_db(nzo)
history_db.close()
sabnzbd.history_updated()
elif cleanup:
self.cleanup_nzo(nzo, keep_basic, del_files)
sabnzbd.remove_data(nzo_id, nzo.workpath)
logging.info('[%s] Removed job %s', caller_name(), nzo.final_name)
if save:
self.save(nzo)
else:
nzo_id = None
return nzo_id
nzo.purge_data(delete_all_data=delete_all_data)
self.save(False)
return nzo_id
return None
def remove_multiple(self, nzo_ids, del_files=False):
@NzbQueueLocker
def remove_multiple(self, nzo_ids, delete_all_data=True):
removed = []
for nzo_id in nzo_ids:
if self.remove(nzo_id, add_to_history=False, save=False, keep_basic=not del_files, del_files=del_files):
if self.remove(nzo_id, add_to_history=False, delete_all_data=delete_all_data):
removed.append(nzo_id)
# Save with invalid nzo_id, to that only queue file is saved
self.save('x')
self.save(False)
# Any files left? Otherwise let's disconnect
if self.actives(grabs=False) == 0 and cfg.autodisconnect():
@ -405,19 +385,13 @@ class NzbQueue:
@NzbQueueLocker
def remove_all(self, search=None):
if search:
search = search.lower()
removed = []
for nzo_id in self.__nzo_table.keys():
if (not search) or search in self.__nzo_table[nzo_id].final_name_pw_clean.lower():
nzo = self.__nzo_table.pop(nzo_id)
nzo.deleted = True
self.__nzo_list.remove(nzo)
sabnzbd.remove_data(nzo_id, nzo.workpath)
self.cleanup_nzo(nzo)
removed.append(nzo_id)
self.save()
return removed
""" Remove NZO's that match the search-pattern """
nzo_ids = []
search = safe_lower(search)
for nzo_id, nzo in self.__nzo_table.items():
if not search or search in nzo.final_name_pw_clean.lower():
nzo_ids.append(nzo_id)
return self.remove_multiple(nzo_ids)
def remove_nzf(self, nzo_id, nzf_id, force_delete=False):
removed = []
@ -756,7 +730,6 @@ class NzbQueue:
enough, _ratio = nzo.check_availability_ratio()
if enough:
# Enough data present, do real download
self.cleanup_nzo(nzo, keep_basic=True)
self.send_back(nzo)
return
else:
@ -828,12 +801,6 @@ class NzbQueue:
break
return empty
def cleanup_nzo(self, nzo, keep_basic=False, del_files=False):
# Abort DirectUnpack and let it remove files
nzo.abort_direct_unpacker()
nzo.purge_data(keep_basic, del_files)
ArticleCache.do.purge_articles(nzo.saved_articles)
def stop_idle_jobs(self):
""" Detect jobs that have zero files left and send them to post processing """
empty = []

61
sabnzbd/nzbstuff.py

@ -407,8 +407,12 @@ class NzbObject(TryList):
if not password:
_, password = scan_password(os.path.splitext(filename)[0])
# Remove trailing .nzb and .par(2)
self.final_name = self.work_name = create_work_name(self.work_name)
# For future-slots we keep the name given by URLGrabber
if nzb is None:
self.final_name = self.work_name = filename
else:
# Remove trailing .nzb and .par(2)
self.final_name = self.work_name = create_work_name(self.work_name)
# Determine category and find pp/script values based on input
# Later will be re-evaluated based on import steps
@ -416,6 +420,7 @@ class NzbObject(TryList):
r = u = d = None
else:
r, u, d = sabnzbd.pp_to_opts(pp)
self.set_priority(priority) # Parse priority of input
self.repair = r # True if we want to repair this set
self.unpack = u # True if we want to unpack this set
@ -523,8 +528,8 @@ class NzbObject(TryList):
duplicate = series = 0
if reuse:
remove_all(adir, 'SABnzbd_nz?_*', True)
remove_all(adir, 'SABnzbd_article_*', True)
remove_all(adir, 'SABnzbd_nz?_*', keep_folder=True)
remove_all(adir, 'SABnzbd_article_*', keep_folder=True)
else:
wdir = trim_win_path(wdir)
wdir = get_unique_path(wdir, create_dir=True)
@ -536,9 +541,6 @@ class NzbObject(TryList):
_, self.work_name = os.path.split(wdir)
self.created = True
# Must create a lower level XML parser because we must
# disable the reading of the DTD file from an external website
# by setting "feature_external_ges" to 0.
if nzb and '<nzb' in nzb:
try:
sabnzbd.nzbparser.nzbfile_parser(nzb, self)
@ -560,7 +562,7 @@ class NzbObject(TryList):
sabnzbd.save_compressed(adir, filename, nzb)
if not self.files and not reuse:
self.purge_data(keep_basic=False)
self.purge_data()
if cfg.warn_empty_nzb():
mylog = logging.warning
else:
@ -648,7 +650,7 @@ class NzbObject(TryList):
if duplicate and ((not series and cfg.no_dupes() == 1) or (series and cfg.no_series_dupes() == 1)):
if cfg.warn_dupl_jobs():
logging.warning(T('Ignoring duplicate NZB "%s"'), filename)
self.purge_data(keep_basic=False)
self.purge_data()
raise TypeError
if duplicate and ((not series and cfg.no_dupes() == 3) or (series and cfg.no_series_dupes() == 3)):
@ -1531,36 +1533,21 @@ class NzbObject(TryList):
return self.bytes - self.bytes_tried
@synchronized(NZO_LOCK)
def purge_data(self, keep_basic=False, del_files=False):
""" Remove all admin info, 'keep_basic' preserves attribs and nzb """
logging.info('[%s] Purging data for job %s (keep_basic=%s, del_files=%s)', caller_name(), self.final_name, keep_basic, del_files)
def purge_data(self, delete_all_data=True):
""" Remove (all) job data """
logging.info('[%s] Purging data for job %s (delete_all_data=%s)', caller_name(), self.final_name, delete_all_data)
wpath = self.workpath
for nzf in self.files:
sabnzbd.remove_data(nzf.nzf_id, wpath)
# Abort DirectUnpack and let it remove files
self.abort_direct_unpacker()
for _set in self.extrapars:
for nzf in self.extrapars[_set]:
sabnzbd.remove_data(nzf.nzf_id, wpath)
for nzf in self.finished_files:
sabnzbd.remove_data(nzf.nzf_id, wpath)
if not self.futuretype:
if keep_basic:
remove_all(wpath, 'SABnzbd_nz?_*', keep_folder=True)
remove_all(wpath, 'SABnzbd_article_*', keep_folder=True)
# We save the renames file
sabnzbd.save_data(self.renames, RENAMES_FILE, self.workpath, silent=True)
else:
remove_all(wpath, recursive=True)
if del_files:
remove_all(self.downpath, recursive=True)
else:
try:
remove_dir(self.downpath)
except:
logging.info('Folder cannot be removed: %s', self.downpath, exc_info=True)
# Delete all, or just basic?
if delete_all_data:
remove_all(self.downpath, recursive=True)
else:
# We remove any saved articles and save the renames file
remove_all(self.downpath, 'SABnzbd_nz?_*', keep_folder=True)
remove_all(self.downpath, 'SABnzbd_article_*', keep_folder=True)
sabnzbd.save_data(self.renames, RENAMES_FILE, self.workpath, silent=True)
def gather_info(self, full=False):
queued_files = []

27
sabnzbd/postproc.py

@ -32,10 +32,10 @@ from sabnzbd.newsunpack import unpack_magic, par2_repair, external_processing, \
sfv_check, build_filelists, rar_sort
from threading import Thread
from sabnzbd.misc import on_cleanup_list
from sabnzbd.filesystem import real_path, get_unique_path, create_dirs, move_to_path, \
make_script_path, long_path, clip_path, renamer, remove_dir, remove_all, globber, \
from sabnzbd.filesystem import real_path, get_unique_path, move_to_path, \
make_script_path, long_path, clip_path, renamer, remove_dir, globber, \
globber_full, set_permissions, cleanup_empty_directories, fix_unix_encoding, \
sanitize_and_trim_path, sanitize_files_in_folder, remove_file, recursive_listdir, setname_from_path
sanitize_and_trim_path, sanitize_files_in_folder, remove_file, recursive_listdir, setname_from_path, create_all_dirs
from sabnzbd.sorting import Sorter
from sabnzbd.constants import REPAIR_PRIORITY, TOP_PRIORITY, POSTPROC_QUEUE_FILE_NAME, \
POSTPROC_QUEUE_VERSION, sample_match, JOB_ADMIN, Status, VERIFIED_FILE
@ -119,7 +119,7 @@ class PostProcessor(Thread):
nzo.to_be_removed = True
elif nzo.status in (Status.DOWNLOADING, Status.QUEUED):
self.remove(nzo)
nzo.purge_data(keep_basic=False, del_files=del_files)
nzo.purge_data(delete_all_data=del_files)
logging.info('Removed job %s from postproc queue', nzo.final_name)
nzo.work_name = '' # Mark as deleted job
break
@ -242,7 +242,7 @@ class PostProcessor(Thread):
history_db = database.HistoryDB()
history_db.remove_history(nzo.nzo_id)
history_db.close()
nzo.purge_data(keep_basic=False, del_files=True)
nzo.purge_data()
# Processing done
nzo.pp_active = False
@ -541,24 +541,13 @@ def process_job(nzo):
workdir_complete = one_file_or_folder(workdir_complete)
workdir_complete = os.path.normpath(workdir_complete)
# Clean up the NZO
# Clean up the NZO data
try:
logging.info('Cleaning up %s (keep_basic=%s)', filename, str(not all_ok))
sabnzbd.nzbqueue.NzbQueue.do.cleanup_nzo(nzo, keep_basic=not all_ok)
nzo.purge_data(delete_all_data=all_ok)
except:
logging.error(T('Cleanup of %s failed.'), nzo.final_name)
logging.info("Traceback: ", exc_info=True)
# Remove download folder
if all_ok:
try:
if os.path.exists(workdir):
logging.debug('Removing workdir %s', workdir)
remove_all(workdir, recursive=True)
except:
logging.error(T('Error removing workdir (%s)'), clip_path(workdir))
logging.info("Traceback: ", exc_info=True)
# Use automatic retry link on par2 errors and encrypted/bad RARs
if par_error or unpack_error in (2, 3):
try_alt_nzb(nzo)
@ -614,7 +603,7 @@ def prepare_extraction_path(nzo):
complete_dir = sanitize_and_trim_path(complete_dir)
if one_folder:
workdir_complete = create_dirs(complete_dir)
workdir_complete = create_all_dirs(complete_dir, umask=True)
else:
workdir_complete = get_unique_path(os.path.join(complete_dir, nzo.final_name), create_dir=True)
marker_file = set_marker(workdir_complete)

11
sabnzbd/urlgrabber.py

@ -242,12 +242,9 @@ class URLGrabber(Thread):
filename = sabnzbd.filesystem.sanitize_filename(filename)
# Write data to temp file
path = os.path.join(cfg.admin_dir.get_path(), FUTURE_Q_FOLDER)
path = os.path.join(path, filename)
f = open(path, 'wb')
f.write(data)
f.close()
del data
path = os.path.join(cfg.admin_dir.get_path(), FUTURE_Q_FOLDER, filename)
with open(path, 'wb') as temp_nzb:
temp_nzb.write(data)
# Check if nzb file
if sabnzbd.filesystem.get_ext(filename) in VALID_NZB_FILES:
@ -320,7 +317,7 @@ class URLGrabber(Thread):
nzo.cat, _, nzo.script, _ = misc.cat_to_opts(nzo.cat, script=nzo.script)
# Add to history and run script if desired
NzbQueue.do.remove(nzo.nzo_id, add_to_history=False)
NzbQueue.do.remove(nzo.nzo_id, add_to_history=False, delete_all_data=False)
PostProcessor.do.process(nzo)

Loading…
Cancel
Save