Browse Source

Py3: Reworking of removal of files and other tweaks

pull/1278/head
Safihre 6 years ago
parent
commit
d2e0ebebc9
  1. 17
      sabnzbd/api.py
  2. 4
      sabnzbd/dirscanner.py
  3. 116
      sabnzbd/filesystem.py
  4. 2
      sabnzbd/interface.py
  5. 2
      sabnzbd/nzbparser.py
  6. 111
      sabnzbd/nzbqueue.py
  7. 61
      sabnzbd/nzbstuff.py
  8. 27
      sabnzbd/postproc.py
  9. 11
      sabnzbd/urlgrabber.py

17
sabnzbd/api.py

@ -163,8 +163,8 @@ def _api_queue_delete(output, value, kwargs):
return report(output, keyword='', data={'status': bool(removed), 'nzo_ids': removed}) return report(output, keyword='', data={'status': bool(removed), 'nzo_ids': removed})
elif value: elif value:
items = value.split(',') items = value.split(',')
del_files = int_conv(kwargs.get('del_files')) delete_all_data = int_conv(kwargs.get('del_files'))
removed = NzbQueue.do.remove_multiple(items, del_files) removed = NzbQueue.do.remove_multiple(items, delete_all_data=delete_all_data)
return report(output, keyword='', data={'status': bool(removed), 'nzo_ids': removed}) return report(output, keyword='', data={'status': bool(removed), 'nzo_ids': removed})
else: else:
return report(output, _MSG_NO_VALUE) return report(output, _MSG_NO_VALUE)
@ -1677,7 +1677,6 @@ def build_history(start=None, limit=None,search=None, failed_only=0, categories=
# Unreverse the queue # Unreverse the queue
items.reverse() items.reverse()
retry_folders = []
for item in items: for item in items:
item['size'] = format_bytes(item['bytes']) item['size'] = format_bytes(item['bytes'])
@ -1686,19 +1685,11 @@ def build_history(start=None, limit=None,search=None, failed_only=0, categories=
path = item.get('path', '') path = item.get('path', '')
item['retry'] = int(bool(item.get('status') == Status.FAILED and item['retry'] = int_conv(item.get('status') == Status.FAILED and path and os.path.exists(path))
path and # Retry of failed URL-fetch
path not in retry_folders and
same_file(cfg.download_dir.get_path(), path) and
os.path.exists(path)) and
not bool(globber(os.path.join(path, JOB_ADMIN), 'SABnzbd_n*'))
)
if item['report'] == 'future': if item['report'] == 'future':
item['retry'] = True item['retry'] = True
if item['retry']:
retry_folders.append(path)
if Rating.do: if Rating.do:
rating = Rating.do.get_rating_by_nzo(item['nzo_id']) rating = Rating.do.get_rating_by_nzo(item['nzo_id'])
else: else:

4
sabnzbd/dirscanner.py

@ -181,7 +181,7 @@ def process_nzb_archive_file(
if nzo: if nzo:
if nzo_id: if nzo_id:
# Re-use existing nzo_id, when a "future" job gets it payload # Re-use existing nzo_id, when a "future" job gets it payload
sabnzbd.nzbqueue.NzbQueue.do.remove(nzo_id, add_to_history=False) sabnzbd.nzbqueue.NzbQueue.do.remove(nzo_id, add_to_history=False, delete_all_data=False)
nzo.nzo_id = nzo_id nzo.nzo_id = nzo_id
nzo_id = None nzo_id = None
nzo_ids.append(sabnzbd.nzbqueue.NzbQueue.do.add(nzo)) nzo_ids.append(sabnzbd.nzbqueue.NzbQueue.do.add(nzo))
@ -296,7 +296,7 @@ def process_single_nzb(
if nzo: if nzo:
if nzo_id: if nzo_id:
# Re-use existing nzo_id, when a "future" job gets it payload # Re-use existing nzo_id, when a "future" job gets it payload
sabnzbd.nzbqueue.NzbQueue.do.remove(nzo_id, add_to_history=False) sabnzbd.nzbqueue.NzbQueue.do.remove(nzo_id, add_to_history=False, delete_all_data=False)
nzo.nzo_id = nzo_id nzo.nzo_id = nzo_id
nzo_ids.append(sabnzbd.nzbqueue.NzbQueue.do.add(nzo, quiet=reuse)) nzo_ids.append(sabnzbd.nzbqueue.NzbQueue.do.add(nzo, quiet=reuse))
nzo.update_rating() nzo.update_rating()

116
sabnzbd/filesystem.py

@ -255,46 +255,13 @@ def is_obfuscated_filename(filename):
return os.path.splitext(filename)[1] == "" return os.path.splitext(filename)[1] == ""
def create_all_dirs(path, umask=False):
""" Create all required path elements and set umask on all
Return True if last element could be made or exists
"""
result = True
if sabnzbd.WIN32:
try:
os.makedirs(path)
except:
result = False
else:
lst = []
lst.extend(path.split("/"))
path = ""
for d in lst:
if d:
path += "/" + d
if not os.path.exists(path):
try:
os.mkdir(path)
result = True
except:
result = False
if umask:
mask = sabnzbd.cfg.umask()
if mask:
try:
os.chmod(path, int(mask, 8) | 0o700)
except:
pass
return result
def real_path(loc, path): def real_path(loc, path):
""" When 'path' is relative, return normalized join of 'loc' and 'path' """ When 'path' is relative, return normalized join of 'loc' and 'path'
When 'path' is absolute, return normalized path When 'path' is absolute, return normalized path
A path starting with ~ will be located in the user's Home folder A path starting with ~ will be located in the user's Home folder
""" """
# The Windows part is a bit convoluted because # The Windows part is a bit convoluted because
# os.path.join() doesn't behave the same for all Python versions # C: and C:\ are 2 different things
if path: if path:
path = path.strip() path = path.strip()
else: else:
@ -316,13 +283,10 @@ def real_path(loc, path):
path = os.path.join(loc, path) path = os.path.join(loc, path)
elif path[0] != "/": elif path[0] != "/":
path = os.path.join(loc, path) path = os.path.join(loc, path)
# Always use long-path notation
path = long_path(path)
else: else:
path = loc path = loc
return os.path.normpath(os.path.abspath(path)) return long_path(os.path.normpath(os.path.abspath(path)))
def create_real_path(name, loc, path, umask=False, writable=True): def create_real_path(name, loc, path, umask=False, writable=True):
@ -544,6 +508,27 @@ DIR_LOCK = threading.RLock()
@synchronized(DIR_LOCK) @synchronized(DIR_LOCK)
def create_all_dirs(path, umask=False):
""" Create all required path elements and set umask on all
The umask argument is ignored on Windows
Return path if elements could be made or exists
"""
try:
# Use custom mask if desired
mask = 0o700
if umask and sabnzbd.cfg.umask():
mask = int(sabnzbd.cfg.umask(), 8)
# Use python functions to create the directory
logging.info("Creating directories: %s (mask=%s)", (path, mask))
os.makedirs(path, mode=mask, exist_ok=True)
return path
except OSError:
logging.error(T("Failed making (%s)"), clip_path(path), exc_info=True)
return False
@synchronized(DIR_LOCK)
def get_unique_path(dirpath, n=0, create_dir=True): def get_unique_path(dirpath, n=0, create_dir=True):
""" Determine a unique folder or filename """ """ Determine a unique folder or filename """
@ -556,7 +541,7 @@ def get_unique_path(dirpath, n=0, create_dir=True):
if not os.path.exists(path): if not os.path.exists(path):
if create_dir: if create_dir:
return create_dirs(path) return create_all_dirs(path, umask=True)
else: else:
return path return path
else: else:
@ -579,17 +564,6 @@ def get_unique_filename(path):
@synchronized(DIR_LOCK) @synchronized(DIR_LOCK)
def create_dirs(dirpath):
""" Create directory tree, obeying permissions """
if not os.path.exists(dirpath):
logging.info("Creating directories: %s", dirpath)
if not create_all_dirs(dirpath, True):
logging.error(T("Failed making (%s)"), clip_path(dirpath))
return None
return dirpath
@synchronized(DIR_LOCK)
def recursive_listdir(dir): def recursive_listdir(dir):
""" List all files in dirs and sub-dirs """ """ List all files in dirs and sub-dirs """
filelist = [] filelist = []
@ -630,8 +604,7 @@ def move_to_path(path, new_path):
# Cannot rename, try copying # Cannot rename, try copying
logging.debug("File could not be renamed, trying copying: %s", path) logging.debug("File could not be renamed, trying copying: %s", path)
try: try:
if not os.path.exists(os.path.dirname(new_path)): create_all_dirs(os.path.dirname(new_path), umask=True)
create_dirs(os.path.dirname(new_path))
shutil.copyfile(path, new_path) shutil.copyfile(path, new_path)
os.remove(path) os.remove(path)
except: except:
@ -780,25 +753,32 @@ def remove_dir(path):
def remove_all(path, pattern="*", keep_folder=False, recursive=False): def remove_all(path, pattern="*", keep_folder=False, recursive=False):
""" Remove folder and all its content (optionally recursive) """ """ Remove folder and all its content (optionally recursive) """
if os.path.exists(path): if os.path.exists(path):
files = globber_full(path, pattern) # Fast-remove the whole tree if recursive
if pattern == "*" and not sabnzbd.WIN32: if pattern == "*" and not keep_folder and recursive:
files.extend(globber_full(path, ".*")) logging.debug("Removing dir recursively %s", path)
try:
shutil.rmtree(path)
except:
logging.info("Cannot remove folder %s", path, exc_info=True)
else:
# Get files based on pattern
files = globber_full(path, pattern)
if pattern == "*" and not sabnzbd.WIN32:
files.extend(globber_full(path, ".*"))
for f in files: for f in files:
if os.path.isfile(f): if os.path.isfile(f):
try:
remove_file(f)
except:
logging.info("Cannot remove file %s", f, exc_info=True)
elif recursive:
remove_all(f, pattern, False, True)
if not keep_folder:
try: try:
logging.debug("Removing file %s", f) remove_dir(path)
os.remove(f)
except: except:
logging.info("Cannot remove file %s", f) logging.info("Cannot remove folder %s", path, exc_info=True)
elif recursive:
remove_all(f, pattern, False, True)
if not keep_folder:
try:
logging.debug("Removing dir %s", path)
os.rmdir(path)
except:
logging.info("Cannot remove folder %s", path)
############################################################################## ##############################################################################

2
sabnzbd/interface.py

@ -911,7 +911,7 @@ class QueuePage:
uid = kwargs.get('uid') uid = kwargs.get('uid')
del_files = int_conv(kwargs.get('del_files')) del_files = int_conv(kwargs.get('del_files'))
if uid: if uid:
NzbQueue.do.remove(uid, False, keep_basic=not del_files, del_files=del_files) NzbQueue.do.remove(uid, add_to_history=False, delete_all_data=del_files)
raise queueRaiser(self.__root, kwargs) raise queueRaiser(self.__root, kwargs)
@secured_expose(check_session_key=True) @secured_expose(check_session_key=True)

2
sabnzbd/nzbparser.py

@ -84,7 +84,7 @@ def nzbfile_parser(raw_data, nzo):
# Update hash # Update hash
md5sum.update(utob(article_id)) md5sum.update(utob(article_id))
# Dubplicate parts? # Duplicate parts?
if partnum in article_db: if partnum in article_db:
if article_id != article_db[partnum][0]: if article_id != article_db[partnum][0]:
logging.info( logging.info(

111
sabnzbd/nzbqueue.py

@ -27,7 +27,7 @@ import functools
import sabnzbd import sabnzbd
from sabnzbd.nzbstuff import NzbObject from sabnzbd.nzbstuff import NzbObject
from sabnzbd.misc import exit_sab, cat_to_opts, int_conv, caller_name, cmp from sabnzbd.misc import exit_sab, cat_to_opts, int_conv, caller_name, cmp, safe_lower
from sabnzbd.filesystem import get_admin_path, remove_all, globber_full, remove_file from sabnzbd.filesystem import get_admin_path, remove_all, globber_full, remove_file
from sabnzbd.panic import panic_queue from sabnzbd.panic import panic_queue
import sabnzbd.database as database import sabnzbd.database as database
@ -149,10 +149,9 @@ class NzbQueue:
def repair_job(self, folder, new_nzb=None, password=None): def repair_job(self, folder, new_nzb=None, password=None):
""" Reconstruct admin for a single job folder, optionally with new NZB """ """ Reconstruct admin for a single job folder, optionally with new NZB """
def all_verified(path): # Check if folder exists
""" Return True when all sets have been successfully verified """ if not folder or not os.path.exists(folder):
verified = sabnzbd.load_data(VERIFIED_FILE, path, remove=False) or {'x': False} return None
return all(verified[x] for x in verified)
name = os.path.basename(folder) name = os.path.basename(folder)
path = os.path.join(folder, JOB_ADMIN) path = os.path.join(folder, JOB_ADMIN)
@ -161,10 +160,13 @@ class NzbQueue:
else: else:
filename = '' filename = ''
if not filename: if not filename:
if not all_verified(path): # Was this file already post-processed?
verified = sabnzbd.load_data(VERIFIED_FILE, path, remove=False)
if not verified or not all(verified[x] for x in verified):
filename = globber_full(path, '*.gz') filename = globber_full(path, '*.gz')
if len(filename) > 0: if len(filename) > 0:
logging.debug('Repair job %s by reparsing stored NZB', name) logging.debug('Repair job %s by re-parsing stored NZB', name)
nzo_id = sabnzbd.add_nzbfile(filename[0], pp=None, script=None, cat=None, priority=None, nzbname=name, nzo_id = sabnzbd.add_nzbfile(filename[0], pp=None, script=None, cat=None, priority=None, nzbname=name,
reuse=True, password=password)[1] reuse=True, password=password)[1]
else: else:
@ -178,9 +180,9 @@ class NzbQueue:
logging.debug('Repair job %s with new NZB (%s)', name, filename) logging.debug('Repair job %s with new NZB (%s)', name, filename)
nzo_id = sabnzbd.add_nzbfile(new_nzb, pp=None, script=None, cat=None, priority=None, nzbname=name, nzo_id = sabnzbd.add_nzbfile(new_nzb, pp=None, script=None, cat=None, priority=None, nzbname=name,
reuse=True, password=password)[1] reuse=True, password=password)[1]
return nzo_id return nzo_id
@NzbQueueLocker
def send_back(self, nzo): def send_back(self, nzo):
""" Send back job to queue after successful pre-check """ """ Send back job to queue after successful pre-check """
try: try:
@ -189,36 +191,12 @@ class NzbQueue:
logging.debug('Failed to find NZB file after pre-check (%s)', nzo.nzo_id) logging.debug('Failed to find NZB file after pre-check (%s)', nzo.nzo_id)
return return
res, nzo_ids = process_single_nzb(nzo.work_name + '.nzb', nzb_path, keep=True, reuse=True) # Need to remove it first, otherwise it might still be downloading
self.remove(nzo, add_to_history=False, cleanup=False)
res, nzo_ids = process_single_nzb(nzo.work_name, nzb_path, keep=True, reuse=True, nzo_id=nzo.nzo_id)
if res == 0 and nzo_ids: if res == 0 and nzo_ids:
nzo = self.replace_in_q(nzo, nzo_ids[0])
# Reset reuse flag to make pause/abort on encryption possible # Reset reuse flag to make pause/abort on encryption possible
nzo.reuse = False self.__nzo_table[nzo_ids[0]].reuse = False
@NzbQueueLocker
def replace_in_q(self, nzo, nzo_id):
""" Replace nzo by new in at the same spot in the queue, destroy nzo """
# Must be a separate function from "send_back()", due to the required queue-lock
try:
old_id = nzo.nzo_id
new_nzo = self.get_nzo(nzo_id)
pos = self.__nzo_list.index(new_nzo)
targetpos = self.__nzo_list.index(nzo)
self.__nzo_list[targetpos] = new_nzo
self.__nzo_list.pop(pos)
# Reuse the old nzo_id
new_nzo.nzo_id = old_id
# Therefore: remove the new nzo_id
del self.__nzo_table[nzo_id]
# And attach the new nzo to the old nzo_id
self.__nzo_table[old_id] = new_nzo
logging.info('Replacing in queue %s by %s', nzo.final_name, new_nzo.final_name)
del nzo
return new_nzo
except:
logging.error(T('Failed to restart NZB after pre-check (%s)'), nzo.nzo_id)
logging.info("Traceback: ", exc_info=True)
return nzo
@NzbQueueLocker @NzbQueueLocker
def save(self, save_nzo=None): def save(self, save_nzo=None):
@ -245,7 +223,7 @@ class NzbQueue:
def generate_future(self, msg, pp=None, script=None, cat=None, url=None, priority=NORMAL_PRIORITY, nzbname=None): def generate_future(self, msg, pp=None, script=None, cat=None, url=None, priority=NORMAL_PRIORITY, nzbname=None):
""" Create and return a placeholder nzo object """ """ Create and return a placeholder nzo object """
logging.debug('Creating placeholder NZO') logging.debug('Creating placeholder NZO')
future_nzo = NzbObject(msg, pp, script, None, True, cat=cat, url=url, priority=priority, nzbname=nzbname, status=Status.GRABBING) future_nzo = NzbObject(msg, pp, script, None, futuretype=True, cat=cat, url=url, priority=priority, nzbname=nzbname, status=Status.GRABBING)
self.add(future_nzo) self.add(future_nzo)
return future_nzo return future_nzo
@ -360,9 +338,16 @@ class NzbQueue:
return nzo.nzo_id return nzo.nzo_id
@NzbQueueLocker @NzbQueueLocker
def remove(self, nzo_id, add_to_history=True, save=True, cleanup=True, keep_basic=False, del_files=False): def remove(self, nzo_id, add_to_history=True, cleanup=True, delete_all_data=True):
""" Remove NZO from queue.
It can be added to history directly.
Or, we do some clean-up, sometimes leaving some data.
"""
if nzo_id in self.__nzo_table: if nzo_id in self.__nzo_table:
nzo = self.__nzo_table.pop(nzo_id) nzo = self.__nzo_table.pop(nzo_id)
logging.info('[%s] Removing job %s', caller_name(), nzo.final_name)
# Set statuses
nzo.deleted = True nzo.deleted = True
if cleanup and not nzo.is_gone(): if cleanup and not nzo.is_gone():
nzo.status = Status.DELETED nzo.status = Status.DELETED
@ -373,28 +358,23 @@ class NzbQueue:
history_db = database.HistoryDB() history_db = database.HistoryDB()
# Add the nzo to the database. Only the path, script and time taken is passed # Add the nzo to the database. Only the path, script and time taken is passed
# Other information is obtained from the nzo # Other information is obtained from the nzo
history_db.add_history_db(nzo, '', '', 0, '', '') history_db.add_history_db(nzo)
history_db.close() history_db.close()
sabnzbd.history_updated() sabnzbd.history_updated()
elif cleanup: elif cleanup:
self.cleanup_nzo(nzo, keep_basic, del_files) nzo.purge_data(delete_all_data=delete_all_data)
self.save(False)
sabnzbd.remove_data(nzo_id, nzo.workpath) return nzo_id
logging.info('[%s] Removed job %s', caller_name(), nzo.final_name) return None
if save:
self.save(nzo)
else:
nzo_id = None
return nzo_id
def remove_multiple(self, nzo_ids, del_files=False): @NzbQueueLocker
def remove_multiple(self, nzo_ids, delete_all_data=True):
removed = [] removed = []
for nzo_id in nzo_ids: for nzo_id in nzo_ids:
if self.remove(nzo_id, add_to_history=False, save=False, keep_basic=not del_files, del_files=del_files): if self.remove(nzo_id, add_to_history=False, delete_all_data=delete_all_data):
removed.append(nzo_id) removed.append(nzo_id)
# Save with invalid nzo_id, to that only queue file is saved # Save with invalid nzo_id, to that only queue file is saved
self.save('x') self.save(False)
# Any files left? Otherwise let's disconnect # Any files left? Otherwise let's disconnect
if self.actives(grabs=False) == 0 and cfg.autodisconnect(): if self.actives(grabs=False) == 0 and cfg.autodisconnect():
@ -405,19 +385,13 @@ class NzbQueue:
@NzbQueueLocker @NzbQueueLocker
def remove_all(self, search=None): def remove_all(self, search=None):
if search: """ Remove NZO's that match the search-pattern """
search = search.lower() nzo_ids = []
removed = [] search = safe_lower(search)
for nzo_id in self.__nzo_table.keys(): for nzo_id, nzo in self.__nzo_table.items():
if (not search) or search in self.__nzo_table[nzo_id].final_name_pw_clean.lower(): if not search or search in nzo.final_name_pw_clean.lower():
nzo = self.__nzo_table.pop(nzo_id) nzo_ids.append(nzo_id)
nzo.deleted = True return self.remove_multiple(nzo_ids)
self.__nzo_list.remove(nzo)
sabnzbd.remove_data(nzo_id, nzo.workpath)
self.cleanup_nzo(nzo)
removed.append(nzo_id)
self.save()
return removed
def remove_nzf(self, nzo_id, nzf_id, force_delete=False): def remove_nzf(self, nzo_id, nzf_id, force_delete=False):
removed = [] removed = []
@ -756,7 +730,6 @@ class NzbQueue:
enough, _ratio = nzo.check_availability_ratio() enough, _ratio = nzo.check_availability_ratio()
if enough: if enough:
# Enough data present, do real download # Enough data present, do real download
self.cleanup_nzo(nzo, keep_basic=True)
self.send_back(nzo) self.send_back(nzo)
return return
else: else:
@ -828,12 +801,6 @@ class NzbQueue:
break break
return empty return empty
def cleanup_nzo(self, nzo, keep_basic=False, del_files=False):
# Abort DirectUnpack and let it remove files
nzo.abort_direct_unpacker()
nzo.purge_data(keep_basic, del_files)
ArticleCache.do.purge_articles(nzo.saved_articles)
def stop_idle_jobs(self): def stop_idle_jobs(self):
""" Detect jobs that have zero files left and send them to post processing """ """ Detect jobs that have zero files left and send them to post processing """
empty = [] empty = []

61
sabnzbd/nzbstuff.py

@ -407,8 +407,12 @@ class NzbObject(TryList):
if not password: if not password:
_, password = scan_password(os.path.splitext(filename)[0]) _, password = scan_password(os.path.splitext(filename)[0])
# Remove trailing .nzb and .par(2) # For future-slots we keep the name given by URLGrabber
self.final_name = self.work_name = create_work_name(self.work_name) if nzb is None:
self.final_name = self.work_name = filename
else:
# Remove trailing .nzb and .par(2)
self.final_name = self.work_name = create_work_name(self.work_name)
# Determine category and find pp/script values based on input # Determine category and find pp/script values based on input
# Later will be re-evaluated based on import steps # Later will be re-evaluated based on import steps
@ -416,6 +420,7 @@ class NzbObject(TryList):
r = u = d = None r = u = d = None
else: else:
r, u, d = sabnzbd.pp_to_opts(pp) r, u, d = sabnzbd.pp_to_opts(pp)
self.set_priority(priority) # Parse priority of input self.set_priority(priority) # Parse priority of input
self.repair = r # True if we want to repair this set self.repair = r # True if we want to repair this set
self.unpack = u # True if we want to unpack this set self.unpack = u # True if we want to unpack this set
@ -523,8 +528,8 @@ class NzbObject(TryList):
duplicate = series = 0 duplicate = series = 0
if reuse: if reuse:
remove_all(adir, 'SABnzbd_nz?_*', True) remove_all(adir, 'SABnzbd_nz?_*', keep_folder=True)
remove_all(adir, 'SABnzbd_article_*', True) remove_all(adir, 'SABnzbd_article_*', keep_folder=True)
else: else:
wdir = trim_win_path(wdir) wdir = trim_win_path(wdir)
wdir = get_unique_path(wdir, create_dir=True) wdir = get_unique_path(wdir, create_dir=True)
@ -536,9 +541,6 @@ class NzbObject(TryList):
_, self.work_name = os.path.split(wdir) _, self.work_name = os.path.split(wdir)
self.created = True self.created = True
# Must create a lower level XML parser because we must
# disable the reading of the DTD file from an external website
# by setting "feature_external_ges" to 0.
if nzb and '<nzb' in nzb: if nzb and '<nzb' in nzb:
try: try:
sabnzbd.nzbparser.nzbfile_parser(nzb, self) sabnzbd.nzbparser.nzbfile_parser(nzb, self)
@ -560,7 +562,7 @@ class NzbObject(TryList):
sabnzbd.save_compressed(adir, filename, nzb) sabnzbd.save_compressed(adir, filename, nzb)
if not self.files and not reuse: if not self.files and not reuse:
self.purge_data(keep_basic=False) self.purge_data()
if cfg.warn_empty_nzb(): if cfg.warn_empty_nzb():
mylog = logging.warning mylog = logging.warning
else: else:
@ -648,7 +650,7 @@ class NzbObject(TryList):
if duplicate and ((not series and cfg.no_dupes() == 1) or (series and cfg.no_series_dupes() == 1)): if duplicate and ((not series and cfg.no_dupes() == 1) or (series and cfg.no_series_dupes() == 1)):
if cfg.warn_dupl_jobs(): if cfg.warn_dupl_jobs():
logging.warning(T('Ignoring duplicate NZB "%s"'), filename) logging.warning(T('Ignoring duplicate NZB "%s"'), filename)
self.purge_data(keep_basic=False) self.purge_data()
raise TypeError raise TypeError
if duplicate and ((not series and cfg.no_dupes() == 3) or (series and cfg.no_series_dupes() == 3)): if duplicate and ((not series and cfg.no_dupes() == 3) or (series and cfg.no_series_dupes() == 3)):
@ -1531,36 +1533,21 @@ class NzbObject(TryList):
return self.bytes - self.bytes_tried return self.bytes - self.bytes_tried
@synchronized(NZO_LOCK) @synchronized(NZO_LOCK)
def purge_data(self, keep_basic=False, del_files=False): def purge_data(self, delete_all_data=True):
""" Remove all admin info, 'keep_basic' preserves attribs and nzb """ """ Remove (all) job data """
logging.info('[%s] Purging data for job %s (keep_basic=%s, del_files=%s)', caller_name(), self.final_name, keep_basic, del_files) logging.info('[%s] Purging data for job %s (delete_all_data=%s)', caller_name(), self.final_name, delete_all_data)
wpath = self.workpath # Abort DirectUnpack and let it remove files
for nzf in self.files: self.abort_direct_unpacker()
sabnzbd.remove_data(nzf.nzf_id, wpath)
for _set in self.extrapars: # Delete all, or just basic?
for nzf in self.extrapars[_set]: if delete_all_data:
sabnzbd.remove_data(nzf.nzf_id, wpath) remove_all(self.downpath, recursive=True)
else:
for nzf in self.finished_files: # We remove any saved articles and save the renames file
sabnzbd.remove_data(nzf.nzf_id, wpath) remove_all(self.downpath, 'SABnzbd_nz?_*', keep_folder=True)
remove_all(self.downpath, 'SABnzbd_article_*', keep_folder=True)
if not self.futuretype: sabnzbd.save_data(self.renames, RENAMES_FILE, self.workpath, silent=True)
if keep_basic:
remove_all(wpath, 'SABnzbd_nz?_*', keep_folder=True)
remove_all(wpath, 'SABnzbd_article_*', keep_folder=True)
# We save the renames file
sabnzbd.save_data(self.renames, RENAMES_FILE, self.workpath, silent=True)
else:
remove_all(wpath, recursive=True)
if del_files:
remove_all(self.downpath, recursive=True)
else:
try:
remove_dir(self.downpath)
except:
logging.info('Folder cannot be removed: %s', self.downpath, exc_info=True)
def gather_info(self, full=False): def gather_info(self, full=False):
queued_files = [] queued_files = []

27
sabnzbd/postproc.py

@ -32,10 +32,10 @@ from sabnzbd.newsunpack import unpack_magic, par2_repair, external_processing, \
sfv_check, build_filelists, rar_sort sfv_check, build_filelists, rar_sort
from threading import Thread from threading import Thread
from sabnzbd.misc import on_cleanup_list from sabnzbd.misc import on_cleanup_list
from sabnzbd.filesystem import real_path, get_unique_path, create_dirs, move_to_path, \ from sabnzbd.filesystem import real_path, get_unique_path, move_to_path, \
make_script_path, long_path, clip_path, renamer, remove_dir, remove_all, globber, \ make_script_path, long_path, clip_path, renamer, remove_dir, globber, \
globber_full, set_permissions, cleanup_empty_directories, fix_unix_encoding, \ globber_full, set_permissions, cleanup_empty_directories, fix_unix_encoding, \
sanitize_and_trim_path, sanitize_files_in_folder, remove_file, recursive_listdir, setname_from_path sanitize_and_trim_path, sanitize_files_in_folder, remove_file, recursive_listdir, setname_from_path, create_all_dirs
from sabnzbd.sorting import Sorter from sabnzbd.sorting import Sorter
from sabnzbd.constants import REPAIR_PRIORITY, TOP_PRIORITY, POSTPROC_QUEUE_FILE_NAME, \ from sabnzbd.constants import REPAIR_PRIORITY, TOP_PRIORITY, POSTPROC_QUEUE_FILE_NAME, \
POSTPROC_QUEUE_VERSION, sample_match, JOB_ADMIN, Status, VERIFIED_FILE POSTPROC_QUEUE_VERSION, sample_match, JOB_ADMIN, Status, VERIFIED_FILE
@ -119,7 +119,7 @@ class PostProcessor(Thread):
nzo.to_be_removed = True nzo.to_be_removed = True
elif nzo.status in (Status.DOWNLOADING, Status.QUEUED): elif nzo.status in (Status.DOWNLOADING, Status.QUEUED):
self.remove(nzo) self.remove(nzo)
nzo.purge_data(keep_basic=False, del_files=del_files) nzo.purge_data(delete_all_data=del_files)
logging.info('Removed job %s from postproc queue', nzo.final_name) logging.info('Removed job %s from postproc queue', nzo.final_name)
nzo.work_name = '' # Mark as deleted job nzo.work_name = '' # Mark as deleted job
break break
@ -242,7 +242,7 @@ class PostProcessor(Thread):
history_db = database.HistoryDB() history_db = database.HistoryDB()
history_db.remove_history(nzo.nzo_id) history_db.remove_history(nzo.nzo_id)
history_db.close() history_db.close()
nzo.purge_data(keep_basic=False, del_files=True) nzo.purge_data()
# Processing done # Processing done
nzo.pp_active = False nzo.pp_active = False
@ -541,24 +541,13 @@ def process_job(nzo):
workdir_complete = one_file_or_folder(workdir_complete) workdir_complete = one_file_or_folder(workdir_complete)
workdir_complete = os.path.normpath(workdir_complete) workdir_complete = os.path.normpath(workdir_complete)
# Clean up the NZO # Clean up the NZO data
try: try:
logging.info('Cleaning up %s (keep_basic=%s)', filename, str(not all_ok)) nzo.purge_data(delete_all_data=all_ok)
sabnzbd.nzbqueue.NzbQueue.do.cleanup_nzo(nzo, keep_basic=not all_ok)
except: except:
logging.error(T('Cleanup of %s failed.'), nzo.final_name) logging.error(T('Cleanup of %s failed.'), nzo.final_name)
logging.info("Traceback: ", exc_info=True) logging.info("Traceback: ", exc_info=True)
# Remove download folder
if all_ok:
try:
if os.path.exists(workdir):
logging.debug('Removing workdir %s', workdir)
remove_all(workdir, recursive=True)
except:
logging.error(T('Error removing workdir (%s)'), clip_path(workdir))
logging.info("Traceback: ", exc_info=True)
# Use automatic retry link on par2 errors and encrypted/bad RARs # Use automatic retry link on par2 errors and encrypted/bad RARs
if par_error or unpack_error in (2, 3): if par_error or unpack_error in (2, 3):
try_alt_nzb(nzo) try_alt_nzb(nzo)
@ -614,7 +603,7 @@ def prepare_extraction_path(nzo):
complete_dir = sanitize_and_trim_path(complete_dir) complete_dir = sanitize_and_trim_path(complete_dir)
if one_folder: if one_folder:
workdir_complete = create_dirs(complete_dir) workdir_complete = create_all_dirs(complete_dir, umask=True)
else: else:
workdir_complete = get_unique_path(os.path.join(complete_dir, nzo.final_name), create_dir=True) workdir_complete = get_unique_path(os.path.join(complete_dir, nzo.final_name), create_dir=True)
marker_file = set_marker(workdir_complete) marker_file = set_marker(workdir_complete)

11
sabnzbd/urlgrabber.py

@ -242,12 +242,9 @@ class URLGrabber(Thread):
filename = sabnzbd.filesystem.sanitize_filename(filename) filename = sabnzbd.filesystem.sanitize_filename(filename)
# Write data to temp file # Write data to temp file
path = os.path.join(cfg.admin_dir.get_path(), FUTURE_Q_FOLDER) path = os.path.join(cfg.admin_dir.get_path(), FUTURE_Q_FOLDER, filename)
path = os.path.join(path, filename) with open(path, 'wb') as temp_nzb:
f = open(path, 'wb') temp_nzb.write(data)
f.write(data)
f.close()
del data
# Check if nzb file # Check if nzb file
if sabnzbd.filesystem.get_ext(filename) in VALID_NZB_FILES: if sabnzbd.filesystem.get_ext(filename) in VALID_NZB_FILES:
@ -320,7 +317,7 @@ class URLGrabber(Thread):
nzo.cat, _, nzo.script, _ = misc.cat_to_opts(nzo.cat, script=nzo.script) nzo.cat, _, nzo.script, _ = misc.cat_to_opts(nzo.cat, script=nzo.script)
# Add to history and run script if desired # Add to history and run script if desired
NzbQueue.do.remove(nzo.nzo_id, add_to_history=False) NzbQueue.do.remove(nzo.nzo_id, add_to_history=False, delete_all_data=False)
PostProcessor.do.process(nzo) PostProcessor.do.process(nzo)

Loading…
Cancel
Save