From 29c727319d4375763164a44bfd423f6e3c5fcb73 Mon Sep 17 00:00:00 2001 From: jcfp Date: Tue, 2 Feb 2021 22:58:20 +0100 Subject: [PATCH] Test adding nzbs (#1760) * add tests for adding nzbs * restore clean_cache_dir fixture, unbreak utils tests * include tests for partial and malformed nzbs * test handling of prio from nzb metadata category * update params of test_adding_nzbs_malformed * add metadata to sabnews nzb creator * also test with size_limit * test prio with dupe detection * remove leftover todo entry * move pause and cleanup to fixture; rename functions --- sabnzbd/newsunpack.py | 14 +- sabnzbd/nzbstuff.py | 35 +- tests/conftest.py | 96 ++--- tests/sabnews.py | 8 +- tests/test_functional_adding_nzbs.py | 625 +++++++++++++++++++++++++++++++++ tests/test_functional_api.py | 8 +- tests/test_utils/test_cert_gen.py | 1 + tests/test_utils/test_diskspeed.py | 2 + tests/test_utils/test_internetspeed.py | 2 + tests/testhelper.py | 6 +- 10 files changed, 739 insertions(+), 58 deletions(-) create mode 100644 tests/test_functional_adding_nzbs.py diff --git a/sabnzbd/newsunpack.py b/sabnzbd/newsunpack.py index abf1b0d..3633f34 100644 --- a/sabnzbd/newsunpack.py +++ b/sabnzbd/newsunpack.py @@ -2350,11 +2350,21 @@ def pre_queue(nzo: NzbObject, pp, cat): ret = p.wait() logging.info("Pre-queue script returns %s and output=\n%s", ret, output) if ret == 0: + try: + # Extract category line from pre-queue output + pre_queue_category = output.splitlines()[3].strip("\r\n '\"") + except IndexError: + pre_queue_category = None n = 0 for line in output.split("\n"): line = line.strip("\r\n '\"") - if n < len(values) and line: - values[n] = line + if n < len(values): + if line: + values[n] = line + elif pre_queue_category and n in (2, 4, 5): + # Preserve empty pp, script, and priority lines to prevent + # pre-existing values from overriding cat.-based settings + values[n] = "" n += 1 accept = int_conv(values[0]) if accept < 1: diff --git a/sabnzbd/nzbstuff.py b/sabnzbd/nzbstuff.py index be7f2cb..272a556 100644 --- a/sabnzbd/nzbstuff.py +++ b/sabnzbd/nzbstuff.py @@ -613,6 +613,11 @@ class NzbObject(TryList): if not self.password: _, self.password = scan_password(os.path.splitext(filename)[0]) + # Create a record of the input for pp, script, and priority + input_pp = pp + input_script = script + input_priority = priority if priority != DEFAULT_PRIORITY else None + # Determine category and find pp/script values based on input # Later will be re-evaluated based on import steps if pp is None: @@ -800,6 +805,16 @@ class NzbObject(TryList): # Call the script accept, name, pp, cat_pp, script_pp, priority, group = sabnzbd.newsunpack.pre_queue(self, pp, cat) + if cat_pp: + # An explicit pp/script/priority set upon adding the job takes precedence + # over an implicit setting based on the category set by pre-queue + if input_priority and not priority: + priority = input_priority + if input_pp and not pp: + pp = input_pp + if input_script and not script_pp: + script_pp = input_script + # Accept or reject accept = int_conv(accept) if accept < 1: @@ -838,7 +853,7 @@ class NzbObject(TryList): # Pause if requested by the NZB-adding or the pre-queue script if self.priority == PAUSED_PRIORITY: self.pause() - self.priority = NORMAL_PRIORITY + self.priority = self.find_stateless_priority(self.cat) # Pause job when above size limit limit = cfg.size_limit.get_int() @@ -874,7 +889,7 @@ class NzbObject(TryList): # Only change priority it's currently set to duplicate, otherwise keep original one if self.priority == DUP_PRIORITY: - self.priority = NORMAL_PRIORITY + self.priority = self.find_stateless_priority(self.cat) # Check if there is any unwanted extension in plain sight in the NZB itself for nzf in self.files: @@ -1313,6 +1328,22 @@ class NzbObject(TryList): # Invalid value, set to normal priority self.priority = NORMAL_PRIORITY + def find_stateless_priority(self, category: str) -> int: + """Find a priority that doesn't set a job state, starting from the given category, + for jobs to fall back to after their priority was set to PAUSED or DUP. The fallback + priority cannot be another state-setting priority or FORCE; the latter could override + the job state immediately after it was set.""" + cat_options = [category] + if category != "*": + cat_options.append("default") + + for cat in cat_options: + prio = cat_to_opts(cat)[3] + if prio not in (DUP_PRIORITY, PAUSED_PRIORITY, FORCE_PRIORITY): + return prio + + return NORMAL_PRIORITY + @property def labels(self): """ Return (translated) labels of job """ diff --git a/tests/conftest.py b/tests/conftest.py index 56772b6..ac002da 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -30,11 +30,16 @@ from warnings import warn from tests.testhelper import * -@pytest.fixture(scope="session") +@pytest.fixture(scope="module") def clean_cache_dir(request): # Remove cache if already there - if os.path.isdir(SAB_CACHE_DIR): - shutil.rmtree(SAB_CACHE_DIR) + try: + if os.path.isdir(SAB_CACHE_DIR): + shutil.rmtree(SAB_CACHE_DIR) + # Create an empty placeholder + os.makedirs(SAB_CACHE_DIR) + except Exception: + pytest.fail("Failed to freshen up cache dir %s" % SAB_CACHE_DIR) yield request @@ -48,32 +53,13 @@ def clean_cache_dir(request): time.sleep(1) -@pytest.fixture(scope="session") -def run_sabnzbd_sabnews_and_selenium(clean_cache_dir): - """ - Start SABnzbd (with translations), SABNews and Selenium/Chromedriver, shared - among all testcases of the pytest session. A number of key configuration - parameters are defined in testhelper.py (SAB_* variables). - """ - - # Define a shutdown routine; directory cleanup is handled by clean_cache_dir() - def shutdown_all(): - """ Shutdown all services """ - # Shutdown SABNews - try: - sabnews_process.kill() - sabnews_process.communicate(timeout=10) - except: - warn("Failed to shutdown the sabnews process") - - # Shutdown Selenium/Chrome - try: - driver.close() - driver.quit() - except: - # If something else fails, this can cause very non-informative long tracebacks - warn("Failed to shutdown the selenium/chromedriver process") +@pytest.fixture(scope="module") +def run_sabnzbd(clean_cache_dir): + """Start SABnzbd (with translations). A number of key configuration parameters are defined + in testhelper.py (SAB_* variables). Scope is set to 'module' to prevent configuration + changes made during functional tests from causing failures in unrelated tests.""" + def shutdown_sabnzbd(): # Shutdown SABnzbd try: get_url_result("shutdown", SAB_HOST, SAB_PORT) @@ -84,7 +70,6 @@ def run_sabnzbd_sabnews_and_selenium(clean_cache_dir): warn("Failed to shutdown the sabnzbd process") # Copy basic config file with API key - os.makedirs(SAB_CACHE_DIR, exist_ok=True) shutil.copyfile(os.path.join(SAB_DATA_DIR, "sabnzbd.basic.ini"), os.path.join(SAB_CACHE_DIR, "sabnzbd.ini")) # Check if we have language files @@ -118,7 +103,27 @@ def run_sabnzbd_sabnews_and_selenium(clean_cache_dir): ] ) - # In the meantime, start Selenium and Chrome; + # Wait for SAB to respond + for _ in range(10): + try: + get_url_result() + # Woohoo, we're up! + break + except requests.ConnectionError: + time.sleep(1) + else: + # Make sure we clean up + shutdown_sabnzbd() + raise requests.ConnectionError() + + yield + + shutdown_sabnzbd() + + +@pytest.fixture(scope="session") +def run_sabnews_and_selenium(request): + """ Start SABNews and Selenium/Chromedriver, shared across the pytest session. """ # We only try Chrome for consistent results driver_options = ChromeOptions() @@ -138,31 +143,30 @@ def run_sabnzbd_sabnews_and_selenium(clean_cache_dir): # Start the driver and pass it on to all the classes driver = webdriver.Chrome(options=driver_options) - for item in clean_cache_dir.node.items: + for item in request.node.items: parent_class = item.getparent(pytest.Class) parent_class.obj.driver = driver # Start SABNews sabnews_process = subprocess.Popen([sys.executable, os.path.join(SAB_BASE_DIR, "sabnews.py")]) - # Wait for SAB to respond - for _ in range(10): - try: - get_url_result() - # Woohoo, we're up! - break - except requests.ConnectionError: - time.sleep(1) - else: - # Make sure we clean up - shutdown_all() - raise requests.ConnectionError() - # Now we run the tests yield - # Shutdown gracefully - shutdown_all() + # Shutdown SABNews + try: + sabnews_process.kill() + sabnews_process.communicate(timeout=10) + except: + warn("Failed to shutdown the sabnews process") + + # Shutdown Selenium/Chrome + try: + driver.close() + driver.quit() + except: + # If something else fails, this can cause very non-informative long tracebacks + warn("Failed to shutdown the selenium/chromedriver process") @pytest.fixture(scope="class") diff --git a/tests/sabnews.py b/tests/sabnews.py index 534e1b7..ed0400f 100644 --- a/tests/sabnews.py +++ b/tests/sabnews.py @@ -138,7 +138,7 @@ async def serve_sabnews(hostname, port): return server -def create_nzb(nzb_file=None, nzb_dir=None): +def create_nzb(nzb_file=None, nzb_dir=None, metadata=None): article_size = 500000 files_for_nzb = [] output_file = "" @@ -168,6 +168,12 @@ def create_nzb(nzb_file=None, nzb_dir=None): nzb.write('\n') nzb.write('\n') + if metadata: + nzb.write("\n") + for meta_name, meta_value in metadata.items(): + nzb.write('%s\n' % (meta_name, meta_value)) + nzb.write("\n") + nzb_time = time.time() - randint(0, int(time.time() - 746863566)) for fl in files_for_nzb: diff --git a/tests/test_functional_adding_nzbs.py b/tests/test_functional_adding_nzbs.py new file mode 100644 index 0000000..7fe306c --- /dev/null +++ b/tests/test_functional_adding_nzbs.py @@ -0,0 +1,625 @@ +#!/usr/bin/python3 -OO +# Copyright 2007-2020 The SABnzbd-Team +# +# This program is free software; you can redistribute it and/or +# modify it under the terms of the GNU General Public License +# as published by the Free Software Foundation; either version 2 +# of the License, or (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program; if not, write to the Free Software +# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. + +""" +tests.test_functional_adding_nzbs - Tests for settings interaction when adding NZBs +""" + +import os +import shutil +import stat +import sys +from random import choice, randint, sample +from string import ascii_lowercase, digits + +import sabnzbd.config +from sabnzbd.constants import ( + DUP_PRIORITY, + PAUSED_PRIORITY, + DEFAULT_PRIORITY, + LOW_PRIORITY, + NORMAL_PRIORITY, + HIGH_PRIORITY, + FORCE_PRIORITY, + REPAIR_PRIORITY, +) +from sabnzbd.database import _PP_LOOKUP + +from tests.testhelper import * + + +# Repair priority is out of scope for the purpose of these tests: it cannot be +# set as a default, upon adding a job, or from a pre-queue script. +# "None" is used to *not* set any particular priority at a given stage. + +# Define valid options for various stages +PRIO_OPTS_ADD = [ + DEFAULT_PRIORITY, + DUP_PRIORITY, + PAUSED_PRIORITY, + LOW_PRIORITY, + NORMAL_PRIORITY, + HIGH_PRIORITY, + FORCE_PRIORITY, + None, +] +PRIO_OPTS_PREQ = [ + DEFAULT_PRIORITY, + DUP_PRIORITY, + PAUSED_PRIORITY, + LOW_PRIORITY, + NORMAL_PRIORITY, + HIGH_PRIORITY, + FORCE_PRIORITY, + None, +] +PRIO_OPTS_ADD_CAT = [ + DEFAULT_PRIORITY, + PAUSED_PRIORITY, + LOW_PRIORITY, + NORMAL_PRIORITY, + HIGH_PRIORITY, + FORCE_PRIORITY, + None, +] +PRIO_OPTS_PREQ_CAT = [ + DEFAULT_PRIORITY, + PAUSED_PRIORITY, + LOW_PRIORITY, + NORMAL_PRIORITY, + HIGH_PRIORITY, + FORCE_PRIORITY, + None, +] +PRIO_OPTS_META_CAT = [ + DEFAULT_PRIORITY, + PAUSED_PRIORITY, + LOW_PRIORITY, + NORMAL_PRIORITY, + HIGH_PRIORITY, + FORCE_PRIORITY, + None, +] +# Valid priority values for the Default category (as determined by their availability from the interface) +VALID_DEFAULT_PRIORITIES = [PAUSED_PRIORITY, LOW_PRIORITY, NORMAL_PRIORITY, HIGH_PRIORITY, FORCE_PRIORITY] + +# Priorities that do *not* set a job state +REGULAR_PRIOS = [LOW_PRIORITY, NORMAL_PRIORITY, HIGH_PRIORITY, FORCE_PRIORITY] +# Priorities that set job states +STATE_PRIOS = [DUP_PRIORITY, PAUSED_PRIORITY] + +# Needed for translating priority values to names +ALL_PRIOS = { + DEFAULT_PRIORITY: "Default", + DUP_PRIORITY: "Duplicate", + PAUSED_PRIORITY: "Paused", + LOW_PRIORITY: "Low", + NORMAL_PRIORITY: "Normal", + HIGH_PRIORITY: "High", + FORCE_PRIORITY: "Force", + REPAIR_PRIORITY: "Repair", +} + +# Min/max size for random files used in generated NZBs (bytes) +MIN_FILESIZE = 128 +MAX_FILESIZE = 1024 + +# Tags to randomise category/script/nzb name +CAT_RANDOM = os.urandom(4).hex() +SCRIPT_RANDOM = os.urandom(4).hex() +NZB_RANDOM = os.urandom(4).hex() + + +class ModuleVars: + # Full path to script directory resp. nzb files, once in place/generated + SCRIPT_DIR = None + NZB_FILE = None + META_NZB_FILE = None + # Pre-queue script setup marker + PRE_QUEUE_SETUP_DONE = False + + +# Shared variables at module-level +VAR = ModuleVars() + + +@pytest.fixture(scope="function") +def pause_and_clear(): + # Pause the queue + assert get_api_result(mode="pause")["status"] is True + + yield + + # Delete all jobs from queue and history + for mode in ("queue", "history"): + get_api_result(mode=mode, extra_arguments={"name": "delete", "value": "all", "del_files": 1}) + + # Unpause the queue + assert get_api_result(mode="resume")["status"] is True + + +@pytest.mark.usefixtures("run_sabnzbd", "pause_and_clear") +class TestAddingNZBs: + def _setup_script_dir(self): + VAR.SCRIPT_DIR = os.path.join(SAB_CACHE_DIR, "scripts" + SCRIPT_RANDOM) + try: + os.makedirs(VAR.SCRIPT_DIR, exist_ok=True) + except Exception: + pytest.fail("Cannot create script_dir %s" % VAR.SCRIPT_DIR) + + json = get_api_result( + mode="set_config", + extra_arguments={ + "section": "misc", + "keyword": "script_dir", + "value": VAR.SCRIPT_DIR, + }, + ) + assert VAR.SCRIPT_DIR in json["config"]["misc"]["script_dir"] + + def _customize_pre_queue_script(self, priority, category): + """ Add a script that accepts the job and sets priority & category """ + script_name = "SCRIPT%s.py" % SCRIPT_RANDOM + try: + script_path = os.path.join(VAR.SCRIPT_DIR, script_name) + with open(script_path, "w") as f: + # line 1 = accept; 4 = category; 6 = priority + f.write( + "#!%s\n\nprint('1\\n\\n\\n%s\\n\\n%s\\n')" + % ( + sys.executable, + (category if category else ""), + (str(priority) if priority != None else ""), + ) + ) + if not sys.platform.startswith("win"): + os.chmod(script_path, stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR) + except Exception: + pytest.fail("Cannot add script %s to script_dir %s" % (script_name, VAR.SCRIPT_DIR)) + + if not VAR.PRE_QUEUE_SETUP_DONE: + # Set as pre-queue script + json = get_api_result( + mode="set_config", + extra_arguments={ + "section": "misc", + "keyword": "pre_script", + "value": script_name, + }, + ) + assert script_name in json["config"]["misc"]["pre_script"] + VAR.PRE_QUEUE_SETUP_DONE = True + + def _configure_cat(self, priority, tag): + category_name = "cat" + tag + CAT_RANDOM + category_config = { + "section": "categories", + "name": category_name, + "pp": choice(list(_PP_LOOKUP.keys())), + "script": "None", + "priority": priority if priority != None else DEFAULT_PRIORITY, + } + + # Add the category + json = get_api_result(mode="set_config", extra_arguments=category_config) + assert json["config"]["categories"][0]["name"] == category_name + if priority != None: + assert json["config"]["categories"][0]["priority"] == priority + + return category_name + + def _configure_default_category_priority(self, priority): + if priority not in VALID_DEFAULT_PRIORITIES: + priority = DEFAULT_PRIORITY + json = get_api_result( + mode="set_config", + extra_arguments={ + "section": "categories", + "name": "*", + "priority": priority, + }, + ) + assert ("*", priority) == (json["config"]["categories"][0]["name"], json["config"]["categories"][0]["priority"]) + + def _create_random_nzb(self, metadata=None): + # Create some simple, unique nzb + job_dir = os.path.join(SAB_CACHE_DIR, "NZB" + os.urandom(8).hex()) + try: + os.mkdir(job_dir) + job_file = "%s.%s" % ( + "".join(choice(ascii_lowercase + digits) for i in range(randint(6, 18))), + "".join(sample(ascii_lowercase, 3)), + ) + with open(os.path.join(job_dir, job_file), "wb") as f: + f.write(os.urandom(randint(MIN_FILESIZE, MAX_FILESIZE))) + except Exception: + pytest.fail("Failed to create random nzb") + + return create_nzb(job_dir, metadata=metadata) + + def _create_meta_nzb(self, cat_meta): + return self._create_random_nzb(metadata={"category": cat_meta}) + + def _expected_results(self, STAGES, return_state=None): + """ Figure out what priority and state the job should end up with """ + # Define a bunch of helpers + def sanitize_stages(hit_stage, STAGES): + # Fallback is always category-based, so nix any explicit priorities (stages 1, 3). + # This is conditional only because explicit priority-upon-adding takes precedence + # over implicit-from-pre-queue, as discussed in #1703. + if not (hit_stage == 4 and STAGES[1] != None): + STAGES[1] = None + STAGES[3] = None + + # If the category was set from pre-queue, it replaces any category set earlier + if hit_stage == 4: + STAGES[2] = None + STAGES[5] = None + if hit_stage == 2: + STAGES[5] = None + + return STAGES + + def handle_state_prio(hit_stage, STAGES, return_state): + """ Find the priority that should to be set after changing the job state """ + # Keep record of the priority that caused the initial hit (for verification of the job state later on) + if not return_state: + return_state = STAGES[hit_stage] + + # No point in trying to find a fallback + if hit_stage == 0: + return NORMAL_PRIORITY, return_state + + STAGES = sanitize_stages(hit_stage, STAGES) + + # Work forward to find the priority prior to the hit_stage + pre_state_prio = None + pre_state_stage = None + # default cat -> implicit meta -> implicit on add -> explicit on add -> implicit pre-q -> explicit pre-q + for stage in (0, 5, 2, 1, 4, 3): + if stage == hit_stage: + if hit_stage == 1 and STAGES[4] != None: + # An explicit state-setting priority still has to deal with the category from pre-queue + # for fallback purposes, unlike non-state-setting priorities-on-add that override it. + continue + else: + break + if STAGES[stage] != None: + pre_state_prio = STAGES[stage] + pre_state_stage = stage + + if pre_state_prio != None and LOW_PRIORITY <= pre_state_prio <= HIGH_PRIORITY: + return pre_state_prio, return_state + else: + # The next-in-line prio is unsuitable; recurse with relevant stages zero'ed out + STAGES[hit_stage] = None + if pre_state_stage: + if pre_state_prio == DEFAULT_PRIORITY: + handle_default_cat(pre_state_stage, STAGES, return_state) + else: + STAGES[pre_state_stage] = None + # Sanitize again, with 'pre_state_stage' as the new hit_stage. This is needed again + # in cases such as hit_stage 3 setting a job state, with a fallback from stage 4. + sanitize_stages(pre_state_stage, STAGES) + return self._expected_results(STAGES, return_state) + + def handle_default_cat(hit_stage, STAGES, return_state): + """ Figure out the (category) default priority """ + STAGES = sanitize_stages(hit_stage, STAGES) + + # Strip the current -100 hit before recursing + STAGES[hit_stage] = None + + return self._expected_results(STAGES, return_state) + + # Work backwards through all stages: + # explicit pre-q -> implicit pre-q -> explicit on add -> implicit on add -> implicit meta + for stage in (3, 4, 1, 2, 5): + if STAGES[stage] != None: + if stage == 4 and STAGES[1] != None: + # Explicit priority on add takes precedence over implicit-from-pre-queue + continue + if STAGES[stage] in REGULAR_PRIOS: + return STAGES[stage], return_state + if STAGES[stage] in STATE_PRIOS: + return handle_state_prio(stage, STAGES, return_state) + if STAGES[stage] == DEFAULT_PRIORITY: + return handle_default_cat(stage, STAGES, return_state) + + # # ...and finally the Default category (stage 0) + if STAGES[0] not in (None, DEFAULT_PRIORITY): + if STAGES[0] in REGULAR_PRIOS: + # Avoid falling back to priority Force after setting a job state + if not (return_state in STATE_PRIOS and STAGES[0] == FORCE_PRIORITY): + return STAGES[0], return_state + else: + return NORMAL_PRIORITY, return_state + if STAGES[0] in STATE_PRIOS: + return handle_state_prio(0, STAGES, return_state) + + # The default of defaults... + return NORMAL_PRIORITY, return_state + + def _prep_priority_tester(self, prio_def_cat, prio_add, prio_add_cat, prio_preq, prio_preq_cat, prio_meta_cat): + if not VAR.SCRIPT_DIR: + self._setup_script_dir() + if not VAR.NZB_FILE: + VAR.NZB_FILE = self._create_random_nzb() + + # Set the priority for the Default category + self._configure_default_category_priority(prio_def_cat) + + # Setup categories + cat_meta = None + if prio_meta_cat != None: + cat_meta = self._configure_cat(prio_meta_cat, "meta") + if not VAR.META_NZB_FILE: + VAR.META_NZB_FILE = self._create_meta_nzb(cat_meta) + cat_add = None + if prio_add_cat != None: + cat_add = self._configure_cat(prio_add_cat, "add") + + cat_preq = None + if prio_preq_cat != None: + cat_preq = self._configure_cat(prio_preq_cat, "pre") + + # Setup the pre-queue script + self._customize_pre_queue_script(prio_preq, cat_preq) + + # Queue the job, store the nzo_id + extra = {"name": VAR.META_NZB_FILE if cat_meta else VAR.NZB_FILE} + if cat_add: + extra["cat"] = cat_add + if prio_add != None: + extra["priority"] = prio_add + nzo_id = ",".join(get_api_result(mode="addlocalfile", extra_arguments=extra)["nzo_ids"]) + + # Fetch the queue output for the current job + return get_api_result(mode="queue", extra_arguments={"nzo_ids": nzo_id})["queue"]["slots"][0] + + def _priority_tester(self, prio_def_cat, prio_add, prio_add_cat, prio_preq, prio_preq_cat, prio_meta_cat): + # Setup the current test job, and fetch its queue output + job = self._prep_priority_tester(prio_def_cat, prio_add, prio_add_cat, prio_preq, prio_preq_cat, prio_meta_cat) + + # Determine the expected results + expected_prio, expected_state = self._expected_results( + [prio_def_cat, prio_add, prio_add_cat, prio_preq, prio_preq_cat, prio_meta_cat] + ) + + # Verify the results; queue output uses a string representation for the priority + assert ALL_PRIOS.get(expected_prio) == job["priority"] + if expected_state: + # Also check the correct state or label was set + if expected_state == DUP_PRIORITY: + assert "DUPLICATE" in job["labels"] + if expected_state == PAUSED_PRIORITY: + assert "Paused" == job["status"] + + # Caution: a full run is good for 90k+ tests + # @pytest.mark.parametrize("prio_meta_cat", PRIO_OPTS_META_CAT) + # @pytest.mark.parametrize("prio_def_cat", VALID_DEFAULT_PRIORITIES) + # @pytest.mark.parametrize("prio_add", PRIO_OPTS_ADD) + # @pytest.mark.parametrize("prio_add_cat", PRIO_OPTS_ADD_CAT) + # @pytest.mark.parametrize("prio_preq", PRIO_OPTS_PREQ) + # @pytest.mark.parametrize("prio_preq_cat", PRIO_OPTS_PREQ_CAT) + + @pytest.mark.parametrize("prio_meta_cat", sample(PRIO_OPTS_META_CAT, 2)) + @pytest.mark.parametrize("prio_def_cat", sample(VALID_DEFAULT_PRIORITIES, 2)) + @pytest.mark.parametrize("prio_add", sample(PRIO_OPTS_ADD, 3)) + @pytest.mark.parametrize("prio_add_cat", sample(PRIO_OPTS_ADD_CAT, 2)) + @pytest.mark.parametrize("prio_preq", sample(PRIO_OPTS_PREQ, 2)) + @pytest.mark.parametrize("prio_preq_cat", sample(PRIO_OPTS_PREQ_CAT, 2)) + def test_adding_nzbs_priority_sample( + self, prio_def_cat, prio_add, prio_add_cat, prio_preq, prio_preq_cat, prio_meta_cat + ): + self._priority_tester(prio_def_cat, prio_add, prio_add_cat, prio_preq, prio_preq_cat, prio_meta_cat) + + @pytest.mark.parametrize( + "prio_def_cat, prio_add, prio_add_cat, prio_preq, prio_preq_cat, prio_meta_cat", + [ + # Specific triggers for fixed bugs + (-1, -2, None, None, None, None), # State-setting priorities always fell back to Normal + (-1, -3, None, None, None, None), + (1, None, -2, None, None, None), + (2, None, None, -2, None, None), + (2, None, None, -3, None, None), + (2, -2, None, -3, None, None), + (0, -3, None, None, None, None), + (0, 2, None, None, 1, None), # Explicit priority on add was bested by implicit from pre-queue + (1, None, None, None, -1, None), # Category-based values from pre-queue didn't work at all + # Checks for test code regressions + (-2, -100, 2, None, None, None), + (-2, 0, 2, -100, None, None), + (1, 2, 0, -100, None, None), + (-2, None, -2, None, 2, None), + (-2, None, -1, None, 1, None), + (2, None, -1, None, -2, None), + (-2, -3, 1, None, None, None), + (2, 2, None, -2, None, None), + (2, 1, None, -2, None, None), + (1, -2, 0, None, None, None), + (0, -3, None, None, 1, None), + (0, -1, -1, -3, 2, None), + (0, 2, None, -2, None, -1), + (1, -2, -100, None, None, -1), + (1, None, None, None, None, -1), + (-1, None, None, None, None, 1), + (0, None, None, None, None, None), + ], + ) + def test_adding_nzbs_priority_triggers( + self, prio_def_cat, prio_add, prio_add_cat, prio_preq, prio_preq_cat, prio_meta_cat + ): + self._priority_tester(prio_def_cat, prio_add, prio_add_cat, prio_preq, prio_preq_cat, prio_meta_cat) + + def test_adding_nzbs_partial(self): + """Test adding parts of an NZB file, cut off somewhere in the middle to simulate + the effects of an interrupted download or bad hardware. Should fail, of course.""" + if not VAR.NZB_FILE: + VAR.NZB_FILE = self._create_random_nzb() + + nzb_basedir, nzb_basename = os.path.split(VAR.NZB_FILE) + nzb_size = os.stat(VAR.NZB_FILE).st_size + part_size = round(randint(20, 80) / 100 * nzb_size) + first_part = os.path.join(nzb_basedir, "part1_of_" + nzb_basename) + second_part = os.path.join(nzb_basedir, "part2_of_" + nzb_basename) + + with open(VAR.NZB_FILE, "rb") as nzb_in: + for nzb_part, chunk in (first_part, part_size), (second_part, -1): + with open(nzb_part, "wb") as nzb_out: + nzb_out.write(nzb_in.read(chunk)) + + for nzb_part in first_part, second_part: + json = get_api_result(mode="addlocalfile", extra_arguments={"name": nzb_part}) + assert json["status"] is False + assert json["nzo_ids"] == [] + os.remove(nzb_part) + + @pytest.mark.parametrize( + "keep_first, keep_last, strip_first, strip_last, should_work", + [ + # Keep parts + (6, 3, 0, 0, False), # Remove all segments content + (6, 0, 0, 0, False), + (5, 2, 0, 0, False), # Remove all segments + (5, 0, 0, 0, False), + (4, 2, 0, 0, False), # Remove all groups + (3, 1, 0, 0, False), # Remove all files + # Strip parts + (0, 0, 1, 0, True), # Strip '?xml' line (survivable) + (0, 0, 2, 0, True), # Also strip 'doctype' line (survivable) + (0, 0, 3, 0, False), # Also strip 'nzb xmlns' line + (0, 0, 0, 1, False), # Forget the 'nzb' closing tag + (0, 0, 0, 2, False), # Also forget the (last) 'file' closing tag + (0, 0, 0, 3, False), # Also forget the (last) 'segment' closing tag + ], + ) + def test_adding_nzbs_malformed(self, keep_first, keep_last, strip_first, strip_last, should_work): + """ Test adding broken, empty, or otherwise malformed NZB file """ + if not VAR.NZB_FILE: + VAR.NZB_FILE = self._create_random_nzb() + + with open(VAR.NZB_FILE, "rt") as nzb_in: + nzb_lines = nzb_in.readlines() + assert len(nzb_lines) >= 9 + + broken_nzb_basename = "broken_" + os.urandom(4).hex() + ".nzb" + broken_nzb = os.path.join(SAB_CACHE_DIR, broken_nzb_basename) + with open(broken_nzb, "wt") as nzb_out: + # Keep only first x, last y lines + if keep_first: + nzb_out.write("".join(nzb_lines[:keep_first])) + elif strip_first: + nzb_out.write("".join(nzb_lines[strip_first:])) + if keep_last: + nzb_out.write("".join(nzb_lines[(-1 * keep_last) :])) + elif strip_last: + nzb_out.write("".join(nzb_lines[: (-1 * strip_last)])) + + json = get_api_result(mode="warnings", extra_arguments={"name": "clear"}) + json = get_api_result(mode="addlocalfile", extra_arguments={"name": broken_nzb}) + assert json["status"] is should_work + assert len(json["nzo_ids"]) == int(should_work) + + json = get_api_result(mode="warnings") + assert (len(json["warnings"]) == 0) is should_work + if not should_work: + for warning in range(0, len(json["warnings"])): + assert (("Empty NZB file" or "Failed to import") and broken_nzb_basename) in json["warnings"][warning][ + "text" + ] + + os.remove(broken_nzb) + + @pytest.mark.parametrize("prio_meta_cat", sample(PRIO_OPTS_META_CAT, 1)) + @pytest.mark.parametrize("prio_def_cat", sample(VALID_DEFAULT_PRIORITIES, 1)) + @pytest.mark.parametrize("prio_add", PRIO_OPTS_ADD) + def test_adding_nzbs_size_limit(self, prio_meta_cat, prio_def_cat, prio_add): + """ Verify state and priority of a job exceeding the size_limit """ + # Set size limit + json = get_api_result( + mode="set_config", extra_arguments={"section": "misc", "keyword": "size_limit", "value": MIN_FILESIZE - 1} + ) + assert int(json["config"]["misc"]["size_limit"]) < MIN_FILESIZE + + job = self._prep_priority_tester(prio_def_cat, prio_add, None, None, None, prio_meta_cat) + + # Verify job is paused and low priority, and correctly labeled + assert job["status"] == "Paused" + assert job["priority"] == ALL_PRIOS.get(-1) + assert "TOO LARGE" in job["labels"] + + # Unset size limit + json = get_api_result( + mode="set_config", extra_arguments={"section": "misc", "keyword": "size_limit", "value": ""} + ) + + @pytest.mark.parametrize("prio_def_cat", sample(VALID_DEFAULT_PRIORITIES, 2)) + @pytest.mark.parametrize("prio_add", PRIO_OPTS_ADD) + @pytest.mark.parametrize("prio_add_cat", sample(PRIO_OPTS_ADD_CAT, 1)) + @pytest.mark.parametrize("prio_preq", sample(PRIO_OPTS_PREQ, 1)) + @pytest.mark.parametrize("prio_preq_cat", sample(PRIO_OPTS_PREQ_CAT, 2)) + def test_adding_nzbs_duplicate_pausing(self, prio_def_cat, prio_add, prio_add_cat, prio_preq, prio_preq_cat): + # Set an nzb backup directory + try: + backup_dir = os.path.join(SAB_CACHE_DIR, "nzb_backup_dir" + os.urandom(4).hex()) + assert ( + get_api_result( + mode="set_config", + extra_arguments={"section": "misc", "keyword": "nzb_backup_dir", "value": backup_dir}, + )["config"]["misc"]["nzb_backup_dir"] + == backup_dir + ) + except Exception: + pytest.fail("Cannot create nzb_backup_dir %s" % backup_dir) + + # Add the job a first time + job = self._prep_priority_tester(None, None, None, None, None, None) + assert job["status"] == "Queued" + + # Setup duplicate handling to 2 (Pause) + assert ( + get_api_result(mode="set_config", extra_arguments={"section": "misc", "keyword": "no_dupes", "value": 2})[ + "config" + ]["misc"]["no_dupes"] + == 2 + ) + + expected_prio, _ = self._expected_results( + [prio_def_cat, prio_add, prio_add_cat, prio_preq, prio_preq_cat, None] + ) + + job = self._prep_priority_tester(prio_def_cat, prio_add, prio_add_cat, prio_preq, prio_preq_cat, None) + + # Verify job is paused and correctly labeled, and given the right (fallback) priority + assert "DUPLICATE" in job["labels"] + assert job["priority"] == ALL_PRIOS.get(expected_prio) + # Priority Force overrules the duplicate pause + assert job["status"] == "Paused" if expected_prio != FORCE_PRIORITY else "Downloading" + + # Reset duplicate handling (0), nzb_backup_dir ("") + get_api_result(mode="set_config_default", extra_arguments={"keyword": "no_dupes", "keyword": "nzb_backup_dir"}) + + # Remove backup_dir + for timer in range(0, 5): + try: + shutil.rmtree(backup_dir) + break + except OSError: + time.sleep(1) + else: + pytest.fail("Failed to erase nzb_backup_dir %s" % backup_dir) diff --git a/tests/test_functional_api.py b/tests/test_functional_api.py index 61afa4e..bb0c800 100644 --- a/tests/test_functional_api.py +++ b/tests/test_functional_api.py @@ -181,7 +181,7 @@ class ApiTestFunctions: assert len(self._get_api_json("queue")["queue"]["slots"]) == 0 -@pytest.mark.usefixtures("run_sabnzbd_sabnews_and_selenium") +@pytest.mark.usefixtures("run_sabnzbd") class TestOtherApi(ApiTestFunctions): """ Test API function not directly involving either history or queue """ @@ -404,7 +404,7 @@ class TestOtherApi(ApiTestFunctions): ) -@pytest.mark.usefixtures("run_sabnzbd_sabnews_and_selenium") +@pytest.mark.usefixtures("run_sabnzbd") class TestQueueApi(ApiTestFunctions): """ Test queue-related API responses """ @@ -897,7 +897,7 @@ class TestQueueApi(ApiTestFunctions): assert json["nzf_ids"] == [] -@pytest.mark.usefixtures("run_sabnzbd_sabnews_and_selenium", "generate_fake_history", "update_history_specs") +@pytest.mark.usefixtures("run_sabnzbd", "generate_fake_history", "update_history_specs") class TestHistoryApi(ApiTestFunctions): """ Test history-related API responses """ @@ -1087,7 +1087,7 @@ class TestHistoryApi(ApiTestFunctions): assert slot["status"] != Status.COMPLETED -@pytest.mark.usefixtures("run_sabnzbd_sabnews_and_selenium", "generate_fake_history", "update_history_specs") +@pytest.mark.usefixtures("run_sabnzbd", "generate_fake_history", "update_history_specs") class TestHistoryApiPart2(ApiTestFunctions): """Test history-related API responses, part 2. A separate testcase is needed because the previous one ran out of history entries to delete.""" diff --git a/tests/test_utils/test_cert_gen.py b/tests/test_utils/test_cert_gen.py index e1d9904..53622d1 100644 --- a/tests/test_utils/test_cert_gen.py +++ b/tests/test_utils/test_cert_gen.py @@ -29,6 +29,7 @@ from sabnzbd.utils.certgen import generate_key, generate_local_cert from tests.testhelper import * +@pytest.mark.usefixtures("clean_cache_dir") class TestCertGen: def test_generate_key_default(self): # Generate private key with default key_size and file name diff --git a/tests/test_utils/test_diskspeed.py b/tests/test_utils/test_diskspeed.py index f41a43c..e42ab6e 100644 --- a/tests/test_utils/test_diskspeed.py +++ b/tests/test_utils/test_diskspeed.py @@ -20,11 +20,13 @@ tests.test_utils.test_diskspeed - Testing SABnzbd diskspeed """ import os +import pytest import tempfile from sabnzbd.utils.diskspeed import diskspeedmeasure from tests.testhelper import SAB_CACHE_DIR +@pytest.mark.usefixtures("clean_cache_dir") class TestDiskSpeed: """ test sabnzbd.utils.diskspeed """ diff --git a/tests/test_utils/test_internetspeed.py b/tests/test_utils/test_internetspeed.py index 80fec49..0f6279a 100644 --- a/tests/test_utils/test_internetspeed.py +++ b/tests/test_utils/test_internetspeed.py @@ -18,10 +18,12 @@ """ tests.test_utils.test_internetspeed - Testing SABnzbd internetspeed """ +import pytest from sabnzbd.utils.internetspeed import internetspeed, measurespeed, SizeUrlList +@pytest.mark.usefixtures("clean_cache_dir") class TestInternetSpeed: """This class contains tests to measure internet speed with an active and inactive connection diff --git a/tests/testhelper.py b/tests/testhelper.py index 4f04c03..81db5c0 100644 --- a/tests/testhelper.py +++ b/tests/testhelper.py @@ -138,10 +138,10 @@ def get_api_result(mode, host=SAB_HOST, port=SAB_PORT, extra_arguments={}): return r.json() -def create_nzb(nzb_dir): +def create_nzb(nzb_dir, metadata=None): """ Create NZB from directory using SABNews """ nzb_dir_full = os.path.join(SAB_DATA_DIR, nzb_dir) - return tests.sabnews.create_nzb(nzb_dir=nzb_dir_full) + return tests.sabnews.create_nzb(nzb_dir=nzb_dir_full, metadata=metadata) def create_and_read_nzb(nzbdir): @@ -220,7 +220,7 @@ class FakeHistoryDB(db.HistoryDB): ) -@pytest.mark.usefixtures("run_sabnzbd_sabnews_and_selenium") +@pytest.mark.usefixtures("run_sabnzbd", "run_sabnews_and_selenium") class SABnzbdBaseTest: def no_page_crash(self): # Do a base test if CherryPy did not report test