int ret;
ret = ut_run_list("spl", NULL, tests, count,
- state->select_unittests, 1);
+ state->select_unittests, 1, false);
/* continue execution into U-Boot */
}
}
board supported by U-Boot.
+Mixing Python and C
+-------------------
+
+The best of both worlds is sometimes to have a Python test set things up and
+perform some operations, with a 'checker' C unit test doing the checks
+afterwards. This can be achieved with these steps:
+
+- Add the `UT_TESTF_MANUAL` flag to the checker test so that the `ut` command
+ does not run it by default
+- Add a `_norun` suffix to the name so that pytest knows to skip it too
+
+In your Python test use the `-f` flag to the `ut` command to force the checker
+test to run it, e.g.::
+
+ # Do the Python part
+ host load ...
+ bootm ...
+
+ # Run the checker to make sure that everything worked
+ ut -f bootstd vbe_test_fixup_norun
+
+Note that apart from the `UT_TESTF_MANUAL` flag, the code in a 'manual' C test
+is just like any other C test. It still uses ut_assert...() and other such
+constructs, in this case to check that the expected things happened in the
+Python test.
+
+
How slow are Python tests?
--------------------------
* @other_fdt_size: Size of the other FDT (UT_TESTF_OTHER_FDT)
* @of_other: Live tree for the other FDT
* @runs_per_test: Number of times to run each test (typically 1)
+ * @force_run: true to run tests marked with the UT_TESTF_MANUAL flag
* @expect_str: Temporary string used to hold expected string value
* @actual_str: Temporary string used to hold actual string value
*/
int other_fdt_size;
struct device_node *of_other;
int runs_per_test;
+ bool force_run;
char expect_str[512];
char actual_str[512];
};
/* do extra driver model init and uninit */
UT_TESTF_DM = BIT(6),
UT_TESTF_OTHER_FDT = BIT(7), /* read in other device tree */
+ /*
+ * Only run if explicitly requested with 'ut -f <suite> <test>'. The
+ * test name must end in "_norun" so that pytest detects this also,
+ * since it cannot access the flags.
+ */
+ UT_TESTF_MANUAL = BIT(8),
};
/**
* @select_name: Name of a single test to run (from the list provided). If NULL
* then all tests are run
* @runs_per_test: Number of times to run each test (typically 1)
+ * @force_run: Run tests that are marked as manual-only (UT_TESTF_MANUAL)
* Return: 0 if all tests passed, -1 if any failed
*/
int ut_run_list(const char *name, const char *prefix, struct unit_test *tests,
- int count, const char *select_name, int runs_per_test);
+ int count, const char *select_name, int runs_per_test,
+ bool force_run);
#endif
int argc, char *const argv[])
{
int runs_per_text = 1;
+ bool force_run = false;
int ret;
- if (argc > 1 && !strncmp("-r", argv[1], 2)) {
- runs_per_text = dectoul(argv[1] + 2, NULL);
+ while (argc > 1 && *argv[1] == '-') {
+ const char *str = argv[1];
+
+ switch (str[1]) {
+ case 'r':
+ runs_per_text = dectoul(str + 2, NULL);
+ break;
+ case 'f':
+ force_run = true;
+ break;
+ }
argv++;
argc++;
}
ret = ut_run_list(name, prefix, tests, n_ents,
- argc > 1 ? argv[1] : NULL, runs_per_text);
+ argc > 1 ? argv[1] : NULL, runs_per_text, force_run);
return ret ? CMD_RET_FAILURE : 0;
}
int ret;
ret = ut_run_list("driver model", "dm_test_", tests, n_ents, test_name,
- runs_per_text);
+ runs_per_text, false);
return ret ? CMD_RET_FAILURE : 0;
}
m = re_ut_test_list.search(l)
if not m:
continue
- vals.append(m.group(1) + ' ' + m.group(2))
+ suite, name = m.groups()
+
+ # Tests marked with _norun should only be run manually using 'ut -f'
+ if name.endswith('_norun'):
+ continue
+
+ vals.append(f'{suite} {name}')
ids = ['ut_' + s.replace(' ', '_') for s in vals]
metafunc.parametrize(fixture_name, vals, ids=ids)
if (!test_matches(prefix, test_name, select_name))
continue;
+
+ if (test->flags & UT_TESTF_MANUAL) {
+ int len;
+
+ /*
+ * manual tests must have a name ending "_norun" as this
+ * is how pytest knows to skip them. See
+ * generate_ut_subtest() for this check.
+ */
+ len = strlen(test_name);
+ if (len < 6 || strcmp(test_name + len - 6, "_norun")) {
+ printf("Test %s is manual so must have a name ending in _norun\n",
+ test_name);
+ uts->fail_count++;
+ return -EBADF;
+ }
+ if (!uts->force_run) {
+ if (select_name) {
+ printf("Test %s skipped as it is manual (use -f to run it)\n",
+ test_name);
+ }
+ continue;
+ }
+ }
old_fail_count = uts->fail_count;
for (i = 0; i < uts->runs_per_test; i++)
ret = ut_run_test_live_flat(uts, test, select_name);
int ut_run_list(const char *category, const char *prefix,
struct unit_test *tests, int count, const char *select_name,
- int runs_per_test)
+ int runs_per_test, bool force_run)
{
struct unit_test_state uts = { .fail_count = 0 };
bool has_dm_tests = false;
}
memcpy(uts.fdt_copy, gd->fdt_blob, uts.fdt_size);
}
+ uts.force_run = force_run;
ret = ut_run_tests(&uts, prefix, tests, count, select_name);
/* Best efforts only...ignore errors */