From: Jason Ish Date: Wed, 28 Jun 2023 21:22:58 +0000 (-0600) Subject: runner: allow a test to be retried X-Git-Tag: suricata-6.0.16~19 X-Git-Url: http://git.ipfire.org/cgi-bin/gitweb.cgi?a=commitdiff_plain;h=e8dd5f432926be08049ba417df1fe4224cb1d8fd;p=thirdparty%2Fsuricata-verify.git runner: allow a test to be retried Add a new parameter, retry that takes count. If the checks fail, the test will be re-run. This could help us deal with failures in tests that are sensitive to timing. --- diff --git a/README.md b/README.md index 169f0128e..cd82a0e5f 100644 --- a/README.md +++ b/README.md @@ -96,6 +96,12 @@ command: | ${SRCDIR}/src/suricata -T -c ${TEST_DIR}/suricata.yaml -vvv \ -l ${TEST_DIR}/output --set default-rule-path="${TEST_DIR}" +# Retry a test 3 more times on failure. Some tests are subject to +# timing errors on CI systems and this can help filter out the noise +# of tests that fail in such environments. By default, tests are only +# run once. +retry: 3 + # Execute Suricata with the test parameters this many times. All checks will # done after each iteration. count: 10 diff --git a/run.py b/run.py index 7a67738af..600eb5a9d 100755 --- a/run.py +++ b/run.py @@ -687,7 +687,6 @@ class TestRunner: return env def run(self, outdir): - if not self.force: self.check_requires() self.check_skip() @@ -723,78 +722,86 @@ class TestRunner: else: expected_exit_code = 0 - for _ in range(count): + retries = self.config.get("retry", 1) - # Cleanup the output directory. - if os.path.exists(self.output): - shutil.rmtree(self.output) - os.makedirs(self.output) - self.setup() + while True: + retries -= 1 + for _ in range(count): - stdout = open(os.path.join(self.output, "stdout"), "wb") - stderr = open(os.path.join(self.output, "stderr"), "wb") + # Cleanup the output directory. + if os.path.exists(self.output): + shutil.rmtree(self.output) + os.makedirs(self.output) + self.setup() - if shell: - template = string.Template(args) - cmdline = template.substitute(safe_env) - else: - for a in range(len(args)): - args[a] = string.Template(args[a]).substitute(safe_env) - cmdline = " ".join(args) + "\n" + stdout = open(os.path.join(self.output, "stdout"), "wb") + stderr = open(os.path.join(self.output, "stderr"), "wb") + + if shell: + template = string.Template(args) + cmdline = template.substitute(safe_env) + else: + for a in range(len(args)): + args[a] = string.Template(args[a]).substitute(safe_env) + cmdline = " ".join(args) + "\n" + + open(os.path.join(self.output, "cmdline"), "w").write(cmdline) - open(os.path.join(self.output, "cmdline"), "w").write(cmdline) + if self.verbose: + print("Executing: {}".format(cmdline.strip())) - if self.verbose: - print("Executing: {}".format(cmdline.strip())) + p = subprocess.Popen( + args, shell=shell, cwd=self.directory, env=env, + stdout=subprocess.PIPE, stderr=subprocess.PIPE) - p = subprocess.Popen( - args, shell=shell, cwd=self.directory, env=env, - stdout=subprocess.PIPE, stderr=subprocess.PIPE) + # used to get a return value from the threads + self.utf8_errors=[] + self.start_reader(p.stdout, stdout) + self.start_reader(p.stderr, stderr) + for r in self.readers: + try: + r.join(timeout=PROC_TIMEOUT) + except: + print("stdout/stderr reader timed out, terminating") + r.terminate() - # used to get a return value from the threads - self.utf8_errors=[] - self.start_reader(p.stdout, stdout) - self.start_reader(p.stderr, stderr) - for r in self.readers: try: - r.join(timeout=PROC_TIMEOUT) + r = p.wait(timeout=PROC_TIMEOUT) except: - print("stdout/stderr reader timed out, terminating") - r.terminate() + print("Suricata timed out, terminating") + p.terminate() + raise TestError("timed out when expected exit code %d" % ( + expected_exit_code)); - try: - r = p.wait(timeout=PROC_TIMEOUT) - except: - print("Suricata timed out, terminating") - p.terminate() - raise TestError("timed out when expected exit code %d" % ( - expected_exit_code)); + if len(self.utf8_errors) > 0: + raise TestError("got utf8 decode errors %s" % self.utf8_errors); - if len(self.utf8_errors) > 0: - raise TestError("got utf8 decode errors %s" % self.utf8_errors); + if r != expected_exit_code: + raise TestError("got exit code %d, expected %d" % ( + r, expected_exit_code)); - if r != expected_exit_code: - raise TestError("got exit code %d, expected %d" % ( - r, expected_exit_code)); + check_value = self.check() - check_value = self.check() + if check_value["failure"] and retries > 0: + print("===> {}: Retrying".format(os.path.basename(self.directory))) + continue - if VALIDATE_EVE: - check_output = subprocess.call([os.path.join(TOPDIR, "check-eve.py"), outdir, "-q", "-s", os.path.join(self.cwd, "etc", "schema.json")]) - if check_output != 0: - raise TestError("Invalid JSON schema") + if VALIDATE_EVE: + check_output = subprocess.call([os.path.join(TOPDIR, "check-eve.py"), outdir, "-q", "-s", os.path.join(self.cwd, "etc", "schema.json")]) + if check_output != 0: + raise TestError("Invalid JSON schema") - if not check_value["failure"] and not check_value["skipped"]: - if not self.quiet: - if os.path.basename(os.path.dirname(self.directory)) != "tests": - path_name = os.path.join(os.path.basename(os.path.dirname(self.directory)), self.name) - else: - path_name = (os.path.basename(self.directory)) - print("===> %s: OK%s" % (path_name, " (%dx)" % count if count > 1 else "")) - elif not check_value["failure"]: - if not self.quiet: - print("===> {}: OK (checks: {}, skipped: {})".format(os.path.basename(self.directory), sum(check_value.values()), check_value["skipped"])) - return check_value + if not check_value["failure"] and not check_value["skipped"]: + if not self.quiet: + if os.path.basename(os.path.dirname(self.directory)) != "tests": + path_name = os.path.join(os.path.basename(os.path.dirname(self.directory)), self.name) + else: + path_name = (os.path.basename(self.directory)) + print("===> %s: OK%s" % (path_name, " (%dx)" % count if count > 1 else "")) + elif not check_value["failure"]: + if not self.quiet: + print("===> {}: OK (checks: {}, skipped: {})".format(os.path.basename(self.directory), sum(check_value.values()), check_value["skipped"])) + return check_value def pre_check(self): if "pre-check" in self.config: diff --git a/tests/threshold/threshold-config-rate-filter-alert-pair/test.yaml b/tests/threshold/threshold-config-rate-filter-alert-pair/test.yaml index 8e42ac187..98cec2d00 100644 --- a/tests/threshold/threshold-config-rate-filter-alert-pair/test.yaml +++ b/tests/threshold/threshold-config-rate-filter-alert-pair/test.yaml @@ -7,6 +7,8 @@ args: - --set threshold-file=${TEST_DIR}/threshold.config - --simulate-ips +retry: 3 + checks: - filter: count: 19