Files
linux-kernel-module-cheat/test-user-mode
Ciro Santilli 六四事件 法轮功 36490d9293 test-boot: run in parallel
--quit-after-boot: fix for gem5, update path to gem5.sh

Improve the printing of results and errors:

- remove newlines from IDs at the end for ./test-boot
- remove newlines from progress for __call__ commands and don't print executed commands at all,
  otherwise there are too many lines per test and it is hard to tell what is going on
- print backtraces for any exception in the threads (bugs while developing this code)

Tests across different archs and emulators are still not running in parallel,
which is a huge loss. TODO.

thread_pool: introduce with API. This was motivate by test-boot, I've had enough
of doing separate error handling for each loop type! Greatly dries up the code, awesome.

common: make --all-emulators work properly with native hopefully for the last time,
./test-baremetal was still failing.

gem5: don't pass --command-line for baremetal. Maybe later we can use it to actually
pass command line arguments to main()? To be seen.
2019-05-19 00:00:00 +00:00

75 lines
3.1 KiB
Python
Executable File

#!/usr/bin/env python3
import os
import sys
import common
import lkmc.import_path
import path_properties
import thread_pool
class Main(common.TestCliFunction):
def __init__(self, *args, **kwargs):
if not 'description' in kwargs:
kwargs['description'] = '''\
https://github.com/cirosantilli/linux-kernel-module-cheat#user-mode-tests
TODO: expose all userland relevant ./run args here as well somehow.
'''
if not 'defaults' in kwargs:
kwargs['defaults'] = {}
super().__init__(*args, **kwargs)
self.add_argument(
'tests',
nargs='*',
help='''\
If given, run only the given tests. Otherwise, run all tests.
'''
)
def setup_one(self):
self.env['tests'] = self.resolve_targets(
self.env['userland_source_dir'],
self.env['tests']
)
def timed_main(self):
run_args = self.get_common_args()
if self.env['emulator'] == 'gem5':
run_args['userland_build_id'] = 'static'
had_failure = False
rootdir_abs_len = len(self.env['root_dir'])
with thread_pool.ThreadPool(
self.run_test,
handle_output=self.handle_output_function,
nthreads=self.env['nproc'],
thread_id_arg='thread_id',
submit_raise_exit=self.env['quit_on_fail'],
) as my_thread_pool:
for test in self.env['tests']:
for path, in_dirnames, in_filenames in self.sh.walk(test):
path_abs = os.path.abspath(path)
dirpath_relative_root = path_abs[rootdir_abs_len + 1:]
for in_filename in in_filenames:
if os.path.splitext(in_filename)[1] in self.env['build_in_exts']:
path_relative_root = os.path.join(dirpath_relative_root, in_filename)
my_path_properties = path_properties.get(path_relative_root)
if my_path_properties.should_be_tested(self.env):
cur_run_args = run_args.copy()
cur_run_args.update({
'userland': os.path.relpath(os.path.join(path_abs, in_filename), os.getcwd()),
})
cur_run_args.update(my_path_properties['test_run_args'])
run_test_args = {
'expected_exit_status': my_path_properties['exit_status'],
'run_args': cur_run_args,
'run_obj': lkmc.import_path.import_path_main('run'),
'test_id': path_relative_root,
}
if my_path_properties['receives_signal']:
run_test_args['expected_exit_status'] = 128 - my_path_properties['exit_status']
my_thread_pool.submit(run_test_args)
return self._handle_thread_pool_errors(my_thread_pool)
if __name__ == '__main__':
Main().cli()