mirror of
https://github.com/espressif/esp-idf.git
synced 2024-10-05 20:47:46 -04:00
feat(tiny_test_fw): unify all junit report test case name
new format: <target>.<config>.<case_name>, the default value of "config" is "default"
This commit is contained in:
parent
7518393ee8
commit
85d4bca81a
@ -22,6 +22,7 @@ from datetime import datetime
|
||||
import junit_xml
|
||||
|
||||
from . import DUT, App, Env, Utility
|
||||
from .Utility import format_case_id
|
||||
|
||||
|
||||
class TestCaseFailed(AssertionError):
|
||||
@ -98,7 +99,7 @@ class JunitReport(object):
|
||||
def output_report(cls, junit_file_path):
|
||||
""" Output current test result to file. """
|
||||
with open(os.path.join(junit_file_path, cls.JUNIT_FILE_NAME), 'w') as f:
|
||||
cls.JUNIT_TEST_SUITE.to_file(f, [cls.JUNIT_TEST_SUITE], prettyprint=False)
|
||||
junit_xml.to_xml_report_file(f, [cls.JUNIT_TEST_SUITE], prettyprint=False)
|
||||
|
||||
@classmethod
|
||||
def get_current_test_case(cls):
|
||||
@ -195,9 +196,9 @@ def test_method(**kwargs):
|
||||
|
||||
# prepare for xunit test results
|
||||
junit_file_path = env_inst.app_cls.get_log_folder(env_config['test_suite_name'])
|
||||
junit_test_case = JunitReport.create_test_case(case_info['ID'])
|
||||
junit_test_case = JunitReport.create_test_case(format_case_id(case_info['ID'],
|
||||
target=env_inst.default_dut_cls.TARGET))
|
||||
result = False
|
||||
|
||||
try:
|
||||
Utility.console_log('starting running test: ' + test_func.__name__, color='green')
|
||||
# execute test function
|
||||
|
@ -110,3 +110,7 @@ def handle_unexpected_exception(junit_test_case, exception):
|
||||
# AssertionError caused by an 'assert' statement has an empty string as its 'str' form
|
||||
e_str = str(exception) if str(exception) else repr(exception)
|
||||
junit_test_case.add_failure_info('Unexpected exception: {}\n{}'.format(e_str, traceback.format_exc()))
|
||||
|
||||
|
||||
def format_case_id(case_name, target='esp32', config='default'):
|
||||
return '{}.{}.{}'.format(target, config, case_name)
|
||||
|
@ -34,10 +34,6 @@ TARGET_DUT_CLS_DICT = {
|
||||
}
|
||||
|
||||
|
||||
def format_case_id(target, case_name):
|
||||
return '{}.{}'.format(target, case_name)
|
||||
|
||||
|
||||
try:
|
||||
string_type = basestring
|
||||
except NameError:
|
||||
@ -128,7 +124,6 @@ def test_func_generator(func, app, target, ci_target, module, execution_time, le
|
||||
dut_dict=dut_classes, **kwargs
|
||||
)
|
||||
test_func = original_method(func)
|
||||
test_func.case_info['ID'] = format_case_id(target, test_func.case_info['name'])
|
||||
return test_func
|
||||
|
||||
|
||||
|
@ -26,7 +26,7 @@ import time
|
||||
import ttfw_idf
|
||||
from tiny_test_fw import DUT, Env, TinyFW, Utility
|
||||
from tiny_test_fw.TinyFW import TestCaseFailed
|
||||
from tiny_test_fw.Utility import handle_unexpected_exception
|
||||
from tiny_test_fw.Utility import format_case_id, handle_unexpected_exception
|
||||
|
||||
UT_APP_BOOT_UP_DONE = 'Press ENTER to see the list of tests.'
|
||||
|
||||
@ -73,7 +73,7 @@ def reset_reason_matches(reported_str, expected_str):
|
||||
return False
|
||||
|
||||
|
||||
def format_test_case_config(test_case_data):
|
||||
def format_test_case_config(test_case_data, target='esp32'):
|
||||
"""
|
||||
convert the test case data to unified format.
|
||||
We need to following info to run unit test cases:
|
||||
@ -93,6 +93,7 @@ def format_test_case_config(test_case_data):
|
||||
If config is not specified for test case, then
|
||||
|
||||
:param test_case_data: string, list, or a dictionary list
|
||||
:param target: target
|
||||
:return: formatted data
|
||||
"""
|
||||
|
||||
@ -132,6 +133,9 @@ def format_test_case_config(test_case_data):
|
||||
if 'config' not in _case:
|
||||
_case['config'] = 'default'
|
||||
|
||||
if 'target' not in _case:
|
||||
_case['target'] = target
|
||||
|
||||
return _case
|
||||
|
||||
if not isinstance(test_case_data, list):
|
||||
@ -163,7 +167,7 @@ def format_case_name(case):
|
||||
# we should regard those configs like `default` and `default_2` as the same config
|
||||
match = STRIP_CONFIG_PATTERN.match(case['config'])
|
||||
stripped_config_name = match.group(1)
|
||||
return '[{}] {}'.format(stripped_config_name, case['name'])
|
||||
return format_case_id(case['name'], target=case['target'], config=stripped_config_name)
|
||||
|
||||
|
||||
def reset_dut(dut):
|
||||
@ -191,8 +195,11 @@ def reset_dut(dut):
|
||||
|
||||
|
||||
def log_test_case(description, test_case, ut_config):
|
||||
Utility.console_log("Running {} '{}' (config {})".format(description, test_case['name'], ut_config), color='orange')
|
||||
Utility.console_log('Tags: %s' % ', '.join('%s=%s' % (k, v) for (k, v) in test_case.items() if k != 'name' and v is not None), color='orange')
|
||||
Utility.console_log("Running {} '{}' (config {})".format(description, test_case['name'], ut_config),
|
||||
color='orange')
|
||||
Utility.console_log('Tags: %s' % ', '.join('%s=%s' % (k, v) for (k, v) in test_case.items()
|
||||
if k != 'name' and v is not None),
|
||||
color='orange')
|
||||
|
||||
|
||||
def run_one_normal_case(dut, one_case, junit_test_case):
|
||||
@ -289,7 +296,7 @@ def run_unit_test_cases(env, extra_data):
|
||||
:return: None
|
||||
"""
|
||||
|
||||
case_config = format_test_case_config(extra_data)
|
||||
case_config = format_test_case_config(extra_data, env.default_dut_cls.TARGET)
|
||||
|
||||
# we don't want stop on failed case (unless some special scenarios we can't handle)
|
||||
# this flag is used to log if any of the case failed during executing
|
||||
@ -323,13 +330,6 @@ def run_unit_test_cases(env, extra_data):
|
||||
# close DUT when finish running all cases for one config
|
||||
env.close_dut(dut.name)
|
||||
|
||||
# raise exception if any case fails
|
||||
if failed_cases:
|
||||
Utility.console_log('Failed Cases:', color='red')
|
||||
for _case_name in failed_cases:
|
||||
Utility.console_log('\t' + _case_name, color='red')
|
||||
raise TestCaseFailed(*failed_cases)
|
||||
|
||||
|
||||
class Handler(threading.Thread):
|
||||
WAIT_SIGNAL_PATTERN = re.compile(r'Waiting for signal: \[(.+)]!')
|
||||
@ -503,7 +503,7 @@ def run_multiple_devices_cases(env, extra_data):
|
||||
|
||||
"""
|
||||
failed_cases = []
|
||||
case_config = format_test_case_config(extra_data)
|
||||
case_config = format_test_case_config(extra_data, env.default_dut_cls.TARGET)
|
||||
duts = {}
|
||||
for ut_config in case_config:
|
||||
Utility.console_log('Running unit test for config: ' + ut_config, 'O')
|
||||
@ -530,12 +530,6 @@ def run_multiple_devices_cases(env, extra_data):
|
||||
env.close_dut(dut)
|
||||
duts = {}
|
||||
|
||||
if failed_cases:
|
||||
Utility.console_log('Failed Cases:', color='red')
|
||||
for _case_name in failed_cases:
|
||||
Utility.console_log('\t' + _case_name, color='red')
|
||||
raise TestCaseFailed(*failed_cases)
|
||||
|
||||
|
||||
def run_one_multiple_stage_case(dut, one_case, junit_test_case):
|
||||
reset_dut(dut)
|
||||
@ -644,7 +638,7 @@ def run_one_multiple_stage_case(dut, one_case, junit_test_case):
|
||||
def run_multiple_stage_cases(env, extra_data):
|
||||
"""
|
||||
extra_data can be 2 types of value
|
||||
1. as dict: Mandantory keys: "name" and "child case num", optional keys: "reset" and others
|
||||
1. as dict: Mandatory keys: "name" and "child case num", optional keys: "reset" and others
|
||||
3. as list of string or dict:
|
||||
[case1, case2, case3, {"name": "restart from PRO CPU", "child case num": 2}, ...]
|
||||
|
||||
@ -653,7 +647,7 @@ def run_multiple_stage_cases(env, extra_data):
|
||||
:return: None
|
||||
"""
|
||||
|
||||
case_config = format_test_case_config(extra_data)
|
||||
case_config = format_test_case_config(extra_data, env.default_dut_cls.TARGET)
|
||||
|
||||
# we don't want stop on failed case (unless some special scenarios we can't handle)
|
||||
# this flag is used to log if any of the case failed during executing
|
||||
@ -685,16 +679,9 @@ def run_multiple_stage_cases(env, extra_data):
|
||||
# close DUT when finish running all cases for one config
|
||||
env.close_dut(dut.name)
|
||||
|
||||
# raise exception if any case fails
|
||||
if failed_cases:
|
||||
Utility.console_log('Failed Cases:', color='red')
|
||||
for _case_name in failed_cases:
|
||||
Utility.console_log('\t' + _case_name, color='red')
|
||||
raise TestCaseFailed(*failed_cases)
|
||||
|
||||
|
||||
def detect_update_unit_test_info(env, extra_data, app_bin):
|
||||
case_config = format_test_case_config(extra_data)
|
||||
case_config = format_test_case_config(extra_data, env.default_dut_cls.TARGET)
|
||||
|
||||
for ut_config in case_config:
|
||||
dut = env.get_dut('unit-test-app', app_path=UT_APP_PATH, app_config_name=ut_config)
|
||||
@ -766,18 +753,14 @@ if __name__ == '__main__':
|
||||
)
|
||||
parser.add_argument('--env_config_file', '-e',
|
||||
help='test env config file',
|
||||
default=None
|
||||
)
|
||||
default=None)
|
||||
parser.add_argument('--app_bin', '-b',
|
||||
help='application binary file for flashing the chip',
|
||||
default=None
|
||||
)
|
||||
parser.add_argument(
|
||||
'test',
|
||||
help='Comma separated list of <option>:<argument> where option can be "name" (default), "child case num", \
|
||||
"config", "timeout".',
|
||||
nargs='+'
|
||||
)
|
||||
default=None)
|
||||
parser.add_argument('test',
|
||||
help='Comma separated list of <option>:<argument> where option can be "name" (default), '
|
||||
'"child case num", "config", "timeout".',
|
||||
nargs='+')
|
||||
args = parser.parse_args()
|
||||
list_of_dicts = []
|
||||
for test in args.test:
|
||||
|
Loading…
x
Reference in New Issue
Block a user