Add a function to show the stats, so we can decide when to print it.
This slightly adjusts the output, so that any 'test not found' message
appears on its own line after all other output.
The 'failures' message now appears in lower case so update pytest
accordingly.
Signed-off-by: Simon Glass <sjg@chromium.org>
ut_init_state(&uts);
ret = ut_run_list(&uts, "spl", NULL, tests, count,
state->select_unittests, 1, false, NULL);
+ ut_report(&uts.cur, 1);
ut_uninit_state(&uts);
/* continue execution into U-Boot */
}
const char *select_name, int runs_per_test, bool force_run,
const char *test_insert);
+/**
+ * ut_report() - Report stats on a test run
+ *
+ * @stats: Stats to show
+ * @run_count: Number of suites that were run
+ */
+void ut_report(struct ut_stats *stats, int run_count);
+
#endif
cons = u_boot_console
cons.restart_uboot_with_flags(['-u', '-k', ut_spl_subtest.split()[1]])
output = cons.get_spawn_output().replace('\r', '')
- assert 'Failures: 0' in output
+ assert 'failures: 0' in output
finally:
# Restart afterward in case a non-SPL test is run next. This should not
# happen since SPL tests are run in their own invocation of test.py, but
# Check the FIT offsets look correct
output = cons.run_command('ut upl -f upl_test_info_norun')
- assert 'Failures: 0' in output
+ assert 'failures: 0' in output
assert 'Unknown command \'quux\' - try \'help\'' in output
else:
output = u_boot_console.run_command('ut ' + ut_subtest)
- assert output.endswith('Failures: 0')
+ assert output.endswith('failures: 0')
with cons.log.section('Kernel load'):
output = cons.run_command_list(cmd.splitlines())
- assert 'Failures: 0' in output[-1]
+ assert 'failures: 0' in output[-1]
cons = u_boot_console
cons.restart_uboot_with_flags(['-u', '-k', ut_vpl_subtest.split()[1]])
output = cons.get_spawn_output().replace('\r', '')
- assert 'Failures: 0' in output
+ assert 'failures: 0' in output
finally:
# Restart afterward in case a non-VPL test is run next. This should not
# happen since VPL tests are run in their own invocation of test.py, but
return uts->cur.fail_count ? -EBADF : 0;
}
+void ut_report(struct ut_stats *stats, int run_count)
+{
+ if (run_count > 1)
+ printf("Suites run: %d, total tests", run_count);
+ else
+ printf("Tests");
+ printf(" run: %d, ", stats->test_count);
+ if (stats->skip_count)
+ printf("skipped: %d, ", stats->skip_count);
+ printf("failures: %d\n", stats->fail_count);
+}
+
int ut_run_list(struct unit_test_state *uts, const char *category,
const char *prefix, struct unit_test *tests, int count,
const char *select_name, int runs_per_test, bool force_run,
if (has_dm_tests)
dm_test_restore(uts->of_root);
- printf("Tests run: %d, ", uts->cur.test_count);
- if (uts->cur.skip_count)
- printf("Skipped: %d, ", uts->cur.skip_count);
+ ut_report(&uts->cur, 1);
if (ret == -ENOENT)
printf("Test '%s' not found\n", select_name);
- else
- printf("Failures: %d\n", uts->cur.fail_count);
return ret;
}