test: Move stat-printing into its own function

Add a function to show the stats, so we can decide when to print it.

This slightly adjusts the output, so that any 'test not found' message
appears on its own line after all other output.

The 'failures' message now appears in lower case so update pytest
accordingly.

Signed-off-by: Simon Glass <sjg@chromium.org>
This commit is contained in:
Simon Glass 2025-01-20 14:26:01 -07:00 committed by Tom Rini
parent 561320beff
commit 15c39587cf
8 changed files with 27 additions and 10 deletions

View file

@ -153,6 +153,7 @@ void spl_board_init(void)
ut_init_state(&uts); ut_init_state(&uts);
ret = ut_run_list(&uts, "spl", NULL, tests, count, ret = ut_run_list(&uts, "spl", NULL, tests, count,
state->select_unittests, 1, false, NULL); state->select_unittests, 1, false, NULL);
ut_report(&uts.cur, 1);
ut_uninit_state(&uts); ut_uninit_state(&uts);
/* continue execution into U-Boot */ /* continue execution into U-Boot */
} }

View file

@ -530,4 +530,12 @@ int ut_run_list(struct unit_test_state *uts, const char *category,
const char *select_name, int runs_per_test, bool force_run, const char *select_name, int runs_per_test, bool force_run,
const char *test_insert); const char *test_insert);
/**
* ut_report() - Report stats on a test run
*
* @stats: Stats to show
* @run_count: Number of suites that were run
*/
void ut_report(struct ut_stats *stats, int run_count);
#endif #endif

View file

@ -36,7 +36,7 @@ def test_spl(u_boot_console, ut_spl_subtest):
cons = u_boot_console cons = u_boot_console
cons.restart_uboot_with_flags(['-u', '-k', ut_spl_subtest.split()[1]]) cons.restart_uboot_with_flags(['-u', '-k', ut_spl_subtest.split()[1]])
output = cons.get_spawn_output().replace('\r', '') output = cons.get_spawn_output().replace('\r', '')
assert 'Failures: 0' in output assert 'failures: 0' in output
finally: finally:
# Restart afterward in case a non-SPL test is run next. This should not # Restart afterward in case a non-SPL test is run next. This should not
# happen since SPL tests are run in their own invocation of test.py, but # happen since SPL tests are run in their own invocation of test.py, but

View file

@ -35,4 +35,4 @@ def test_upl_handoff(u_boot_console):
# Check the FIT offsets look correct # Check the FIT offsets look correct
output = cons.run_command('ut upl -f upl_test_info_norun') output = cons.run_command('ut upl -f upl_test_info_norun')
assert 'Failures: 0' in output assert 'failures: 0' in output

View file

@ -607,4 +607,4 @@ def test_ut(u_boot_console, ut_subtest):
assert 'Unknown command \'quux\' - try \'help\'' in output assert 'Unknown command \'quux\' - try \'help\'' in output
else: else:
output = u_boot_console.run_command('ut ' + ut_subtest) output = u_boot_console.run_command('ut ' + ut_subtest)
assert output.endswith('Failures: 0') assert output.endswith('failures: 0')

View file

@ -117,4 +117,4 @@ def test_vbe(u_boot_console):
with cons.log.section('Kernel load'): with cons.log.section('Kernel load'):
output = cons.run_command_list(cmd.splitlines()) output = cons.run_command_list(cmd.splitlines())
assert 'Failures: 0' in output[-1] assert 'failures: 0' in output[-1]

View file

@ -26,7 +26,7 @@ def test_vpl(u_boot_console, ut_vpl_subtest):
cons = u_boot_console cons = u_boot_console
cons.restart_uboot_with_flags(['-u', '-k', ut_vpl_subtest.split()[1]]) cons.restart_uboot_with_flags(['-u', '-k', ut_vpl_subtest.split()[1]])
output = cons.get_spawn_output().replace('\r', '') output = cons.get_spawn_output().replace('\r', '')
assert 'Failures: 0' in output assert 'failures: 0' in output
finally: finally:
# Restart afterward in case a non-VPL test is run next. This should not # Restart afterward in case a non-VPL test is run next. This should not
# happen since VPL tests are run in their own invocation of test.py, but # happen since VPL tests are run in their own invocation of test.py, but

View file

@ -673,6 +673,18 @@ static int ut_run_tests(struct unit_test_state *uts, const char *prefix,
return uts->cur.fail_count ? -EBADF : 0; return uts->cur.fail_count ? -EBADF : 0;
} }
void ut_report(struct ut_stats *stats, int run_count)
{
if (run_count > 1)
printf("Suites run: %d, total tests", run_count);
else
printf("Tests");
printf(" run: %d, ", stats->test_count);
if (stats->skip_count)
printf("skipped: %d, ", stats->skip_count);
printf("failures: %d\n", stats->fail_count);
}
int ut_run_list(struct unit_test_state *uts, const char *category, int ut_run_list(struct unit_test_state *uts, const char *category,
const char *prefix, struct unit_test *tests, int count, const char *prefix, struct unit_test *tests, int count,
const char *select_name, int runs_per_test, bool force_run, const char *select_name, int runs_per_test, bool force_run,
@ -718,13 +730,9 @@ int ut_run_list(struct unit_test_state *uts, const char *category,
if (has_dm_tests) if (has_dm_tests)
dm_test_restore(uts->of_root); dm_test_restore(uts->of_root);
printf("Tests run: %d, ", uts->cur.test_count); ut_report(&uts->cur, 1);
if (uts->cur.skip_count)
printf("Skipped: %d, ", uts->cur.skip_count);
if (ret == -ENOENT) if (ret == -ENOENT)
printf("Test '%s' not found\n", select_name); printf("Test '%s' not found\n", select_name);
else
printf("Failures: %d\n", uts->cur.fail_count);
return ret; return ret;
} }