You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

185 lines
6.3 KiB

#!/usr/bin/python -OO
# Copyright 2008-2017 The SABnzbd-Team <team@sabnzbd.org>
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
"""
sabnzbd.articlecache - Article cache handling
"""
import sys
import logging
import threading
import sabnzbd
from sabnzbd.decorators import synchronized
from sabnzbd.constants import GIGI, ANFO
ARTICLE_LOCK = threading.Lock()
class ArticleCache(object):
""" Operations on lists/dicts are atomic enough that we
do not have to put locks. Only the cache-size needs
a lock since the integer needs to stay synced.
With less locking, the decoder and assembler do not
have to wait on each other.
"""
do = None
def __init__(self):
self.__cache_limit_org = 0
self.__cache_limit = 0
self.__cache_size = 0
self.__article_list = [] # List of buffered articles
self.__article_table = {} # Dict of buffered articles
ArticleCache.do = self
def cache_info(self):
return ANFO(len(self.__article_list), abs(self.__cache_size), self.__cache_limit_org)
def new_limit(self, limit):
""" Called when cache limit changes """
self.__cache_limit_org = limit
if limit < 0:
self.__cache_limit = GIGI
else:
self.__cache_limit = min(limit, GIGI)
@synchronized(ARTICLE_LOCK)
def increase_cache_size(self, value):
self.__cache_size += value
@synchronized(ARTICLE_LOCK)
def decrease_cache_size(self, value):
self.__cache_size -= value
def reserve_space(self, data):
""" Is there space left in the set limit? """
data_size = sys.getsizeof(data) * 64
self.increase_cache_size(data_size)
if self.__cache_size + data_size > self.__cache_limit:
return False
else:
return True
def free_reserve_space(self, data):
""" Remove previously reserved space """
data_size = sys.getsizeof(data) * 64
self.decrease_cache_size(data_size)
return self.__cache_size + data_size < self.__cache_limit
def save_article(self, article, data):
nzf = article.nzf
nzo = nzf.nzo
if nzo.is_gone():
# Do not discard this article because the
# file might still be processed at this moment!!
return
saved_articles = article.nzf.nzo.saved_articles
if article not in saved_articles:
saved_articles.append(article)
if self.__cache_limit:
if self.__cache_limit < 0:
self.__add_to_cache(article, data)
else:
data_size = len(data)
while (self.__cache_size > (self.__cache_limit - data_size)) \
and self.__article_list:
# Flush oldest article in cache
old_article = self.__article_list.pop(0)
old_data = self.__article_table.pop(old_article)
self.decrease_cache_size(len(old_data))
# No need to flush if this is a refreshment article
if old_article != article:
self.__flush_article(old_article, old_data)
# Does our article fit into our limit now?
if (self.__cache_size + data_size) <= self.__cache_limit:
self.__add_to_cache(article, data)
else:
self.__flush_article(article, data)
else:
self.__flush_article(article, data)
def load_article(self, article):
data = None
nzo = article.nzf.nzo
if article in self.__article_list:
data = self.__article_table.pop(article)
self.__article_list.remove(article)
self.decrease_cache_size(len(data))
elif article.art_id:
data = sabnzbd.load_data(article.art_id, nzo.workpath, remove=True,
do_pickle=False, silent=True)
if article in nzo.saved_articles:
nzo.remove_saved_article(article)
return data
def flush_articles(self):
while self.__article_list:
article = self.__article_list.pop(0)
data = self.__article_table.pop(article)
self.__flush_article(article, data)
self.__cache_size = 0
def purge_articles(self, articles):
for article in articles:
if article in self.__article_list:
self.__article_list.remove(article)
data = self.__article_table.pop(article)
self.decrease_cache_size(len(data))
if article.art_id:
sabnzbd.remove_data(article.art_id, article.nzf.nzo.workpath)
def __flush_article(self, article, data):
nzf = article.nzf
nzo = nzf.nzo
if nzo.is_gone():
# Do not discard this article because the
# file might still be processed at this moment!!
return
art_id = article.get_art_id()
if art_id:
# Save data, but don't complain when destination folder is missing
# because this flush may come after completion of the NZO.
sabnzbd.save_data(data, art_id, nzo.workpath, do_pickle=False, silent=True)
else:
logging.warning("Flushing %s failed -> no art_id", article)
def __add_to_cache(self, article, data):
if article in self.__article_table:
self.decrease_cache_size(len(self.__article_table[article]))
else:
self.__article_list.append(article)
self.__article_table[article] = data
self.increase_cache_size(len(data))
# Create the instance
ArticleCache()