
From: Stephen Warren swarren@nvidia.com
A custom fixture named ut_subtest is implemented which is parametrized with the names of all unit tests that the U-Boot binary supports. This causes each U-Boot unit test to be exposes as a separate pytest. In turn, this allows more fine-grained pass/fail counts and test selection, e.g.:
test.py --bd sandbox -k ut_dm_usb
... will run about 8 tests at present.
Signed-off-by: Stephen Warren swarren@nvidia.com --- This depends on at least my recently sent "test/py: run C-based unit tests".
test/py/conftest.py | 105 ++++++++++++++++++++++++++++++++++++----------- test/py/tests/test_ut.py | 14 +++---- 2 files changed, 86 insertions(+), 33 deletions(-)
diff --git a/test/py/conftest.py b/test/py/conftest.py index 3e162cafcc4a..05491a2453c0 100644 --- a/test/py/conftest.py +++ b/test/py/conftest.py @@ -21,7 +21,9 @@ import pexpect import pytest from _pytest.runner import runtestprotocol import ConfigParser +import re import StringIO +import subprocess import sys
# Globals: The HTML log file, and the connection to the U-Boot console. @@ -189,8 +191,43 @@ def pytest_configure(config): import u_boot_console_exec_attach console = u_boot_console_exec_attach.ConsoleExecAttach(log, ubconfig)
-def pytest_generate_tests(metafunc): - """pytest hook: parameterize test functions based on custom rules. +re_ut_test_list = re.compile(r'_u_boot_list_2_(dm|env)_test_2_\1_test_(.*)\s*$') +def generate_ut_subtest(metafunc, fixture_name): + """Provide parametrization for a ut_subtest fixture. + + Determines the set of unit tests built into a U-Boot binary by parsing the + list of symbols present in the U-Boot binary. Provides this information to + test functions by parameterizing their ut_subtest fixture parameter. + + Args: + metafunc: The pytest test function. + fixture_name: The fixture name to test. + + Returns: + Nothing. + """ + + # This does rely on an objdump binary, but that's quite likely to be + # present. This approach trivially takes care of any source or Makefile- + # level conditional compilation which may occur, and matches the test + # execution order of a plain "ut dm" command. A source-scanning approach + # would not do neither. This approach also doesn't require access to the + # U-Boot source tree when running tests. + + cmd = 'objdump -t "%s" | sort' % (console.config.build_dir + '/u-boot') + out = subprocess.check_output(cmd, shell=True) + vals = [] + for l in out.splitlines(): + m = re_ut_test_list.search(l) + if not m: + continue + vals.append(m.group(1) + ' ' + m.group(2)) + + ids = ['ut_' + s.replace(' ', '_') for s in vals] + metafunc.parametrize(fixture_name, vals, ids=ids) + +def generate_config(metafunc, fixture_name): + """Provide parametrization for {env,brd}__ fixtures.
If a test function takes parameter(s) (fixture names) of the form brd__xxx or env__xxx, the brd and env configuration dictionaries are consulted to @@ -199,6 +236,7 @@ def pytest_generate_tests(metafunc):
Args: metafunc: The pytest test function. + fixture_name: The fixture name to test.
Returns: Nothing. @@ -208,30 +246,49 @@ def pytest_generate_tests(metafunc): 'brd': console.config.brd, 'env': console.config.env, } + parts = fixture_name.split('__') + if len(parts) < 2: + return + if parts[0] not in subconfigs: + return + subconfig = subconfigs[parts[0]] + vals = [] + val = subconfig.get(fixture_name, []) + # If that exact name is a key in the data source: + if val: + # ... use the dict value as a single parameter value. + vals = (val, ) + else: + # ... otherwise, see if there's a key that contains a list of + # values to use instead. + vals = subconfig.get(fixture_name+ 's', []) + def fixture_id(index, val): + try: + return val['fixture_id'] + except: + return fixture_name + str(index) + ids = [fixture_id(index, val) for (index, val) in enumerate(vals)] + metafunc.parametrize(fixture_name, vals, ids=ids) + +def pytest_generate_tests(metafunc): + """pytest hook: parameterize test functions based on custom rules. + + Check each test function parameter (fixture name) to see if it is one of + our custom names, and if so, provide the correct parametrization for that + parameter. + + Args: + metafunc: The pytest test function. + + Returns: + Nothing. + """ + for fn in metafunc.fixturenames: - parts = fn.split('__') - if len(parts) < 2: + if fn == 'ut_subtest': + generate_ut_subtest(metafunc, fn) continue - if parts[0] not in subconfigs: - continue - subconfig = subconfigs[parts[0]] - vals = [] - val = subconfig.get(fn, []) - # If that exact name is a key in the data source: - if val: - # ... use the dict value as a single parameter value. - vals = (val, ) - else: - # ... otherwise, see if there's a key that contains a list of - # values to use instead. - vals = subconfig.get(fn + 's', []) - def fixture_id(index, val): - try: - return val["fixture_id"] - except: - return fn + str(index) - ids = [fixture_id(index, val) for (index, val) in enumerate(vals)] - metafunc.parametrize(fn, vals, ids=ids) + generate_config(metafunc, fn)
@pytest.fixture(scope='function') def u_boot_console(request): diff --git a/test/py/tests/test_ut.py b/test/py/tests/test_ut.py index b033ca54d756..cd85b3ddc0ce 100644 --- a/test/py/tests/test_ut.py +++ b/test/py/tests/test_ut.py @@ -6,8 +6,8 @@ import os.path import pytest
@pytest.mark.buildconfigspec('ut_dm') -def test_ut_dm(u_boot_console): - """Execute the "ut dm" command.""" +def test_ut_dm_init(u_boot_console): + """Initialize data for ut dm tests."""
fn = u_boot_console.config.source_dir + '/testflash.bin' if not os.path.exists(fn): @@ -16,14 +16,10 @@ def test_ut_dm(u_boot_console): with open(fn, 'wb') as fh: fh.write(data)
- output = u_boot_console.run_command('ut dm') - assert output.endswith('Failures: 0') - -@pytest.mark.buildconfigspec('ut_env') -def test_ut_env(u_boot_console): - """Execute the "ut env" command.""" +def test_ut(u_boot_console, ut_subtest): + """Execute a "ut" subtest."""
- output = u_boot_console.run_command('ut env') + output = u_boot_console.run_command('ut ' + ut_subtest) assert output.endswith('Failures: 0')
@pytest.mark.buildconfigspec('ut_time')