2018-06-21 07:32:29 -04:00
|
|
|
#!/usr/bin/env python
|
|
|
|
#
|
|
|
|
# Copyright 2018 Espressif Systems (Shanghai) PTE LTD
|
|
|
|
#
|
|
|
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
|
|
# you may not use this file except in compliance with the License.
|
|
|
|
# You may obtain a copy of the License at
|
|
|
|
#
|
|
|
|
# http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
#
|
|
|
|
# Unless required by applicable law or agreed to in writing, software
|
|
|
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
# See the License for the specific language governing permissions and
|
|
|
|
# limitations under the License.
|
|
|
|
|
2018-01-31 05:59:10 -05:00
|
|
|
"""
|
|
|
|
Test script for unit test case.
|
|
|
|
"""
|
|
|
|
|
|
|
|
import re
|
|
|
|
import time
|
2018-06-21 07:32:29 -04:00
|
|
|
import argparse
|
2018-01-31 05:59:10 -05:00
|
|
|
import threading
|
|
|
|
|
2019-11-26 22:58:07 -05:00
|
|
|
from tiny_test_fw import TinyFW, Utility, Env, DUT
|
|
|
|
import ttfw_idf
|
2018-01-31 05:59:10 -05:00
|
|
|
|
|
|
|
|
|
|
|
UT_APP_BOOT_UP_DONE = "Press ENTER to see the list of tests."
|
2019-11-03 22:26:23 -05:00
|
|
|
|
|
|
|
# matches e.g.: "rst:0xc (SW_CPU_RESET),boot:0x13 (SPI_FAST_FLASH_BOOT)"
|
|
|
|
RESET_PATTERN = re.compile(r"(rst:0x[0-9a-fA-F]*\s\([\w].*?\),boot:0x[0-9a-fA-F]*\s\([\w].*?\))")
|
|
|
|
|
2018-03-09 01:50:34 -05:00
|
|
|
EXCEPTION_PATTERN = re.compile(r"(Guru Meditation Error: Core\s+\d panic'ed \([\w].*?\))")
|
2018-08-07 01:37:43 -04:00
|
|
|
ABORT_PATTERN = re.compile(r"(abort\(\) was called at PC 0x[a-fA-F\d]{8} on core \d)")
|
2018-03-09 01:50:34 -05:00
|
|
|
FINISH_PATTERN = re.compile(r"1 Tests (\d) Failures (\d) Ignored")
|
2018-06-21 07:32:29 -04:00
|
|
|
END_LIST_STR = r'\r?\nEnter test for running'
|
2018-11-29 01:36:34 -05:00
|
|
|
TEST_PATTERN = re.compile(r'\((\d+)\)\s+"([^"]+)" ([^\r\n]+)\r?\n(' + END_LIST_STR + r')?')
|
2018-06-21 07:32:29 -04:00
|
|
|
TEST_SUBMENU_PATTERN = re.compile(r'\s+\((\d+)\)\s+"[^"]+"\r?\n(?=(?=\()|(' + END_LIST_STR + r'))')
|
2019-10-15 08:15:06 -04:00
|
|
|
UT_APP_PATH = "tools/unit-test-app"
|
2018-05-21 00:50:27 -04:00
|
|
|
|
2018-06-21 07:32:29 -04:00
|
|
|
SIMPLE_TEST_ID = 0
|
|
|
|
MULTI_STAGE_ID = 1
|
|
|
|
MULTI_DEVICE_ID = 2
|
2018-01-31 05:59:10 -05:00
|
|
|
|
2018-11-20 09:18:04 -05:00
|
|
|
DEFAULT_TIMEOUT = 20
|
2018-03-09 01:50:34 -05:00
|
|
|
|
2019-06-16 21:48:04 -04:00
|
|
|
DUT_DELAY_AFTER_RESET = 2
|
2018-09-25 03:04:16 -04:00
|
|
|
DUT_STARTUP_CHECK_RETRY_COUNT = 5
|
2019-06-16 21:48:04 -04:00
|
|
|
TEST_HISTORY_CHECK_TIMEOUT = 2
|
2018-12-20 04:00:12 -05:00
|
|
|
|
|
|
|
|
|
|
|
class TestCaseFailed(AssertionError):
|
|
|
|
pass
|
2018-09-25 03:04:16 -04:00
|
|
|
|
|
|
|
|
2018-01-31 05:59:10 -05:00
|
|
|
def format_test_case_config(test_case_data):
|
|
|
|
"""
|
|
|
|
convert the test case data to unified format.
|
|
|
|
We need to following info to run unit test cases:
|
|
|
|
|
|
|
|
1. unit test app config
|
|
|
|
2. test case name
|
|
|
|
3. test case reset info
|
|
|
|
|
|
|
|
the formatted case config is a dict, with ut app config as keys. The value is a list of test cases.
|
|
|
|
Each test case is a dict with "name" and "reset" as keys. For example::
|
|
|
|
|
|
|
|
case_config = {
|
|
|
|
"default": [{"name": "restart from PRO CPU", "reset": "SW_CPU_RESET"}, {...}],
|
|
|
|
"psram": [{"name": "restart from PRO CPU", "reset": "SW_CPU_RESET"}],
|
|
|
|
}
|
|
|
|
|
|
|
|
If config is not specified for test case, then
|
|
|
|
|
|
|
|
:param test_case_data: string, list, or a dictionary list
|
|
|
|
:return: formatted data
|
|
|
|
"""
|
|
|
|
|
|
|
|
case_config = dict()
|
|
|
|
|
|
|
|
def parse_case(one_case_data):
|
|
|
|
""" parse and format one case """
|
|
|
|
|
|
|
|
def process_reset_list(reset_list):
|
|
|
|
# strip space and remove white space only items
|
|
|
|
_output = list()
|
|
|
|
for _r in reset_list:
|
|
|
|
_data = _r.strip(" ")
|
|
|
|
if _data:
|
|
|
|
_output.append(_data)
|
|
|
|
return _output
|
|
|
|
|
|
|
|
_case = dict()
|
|
|
|
if isinstance(one_case_data, str):
|
|
|
|
_temp = one_case_data.split(" [reset=")
|
|
|
|
_case["name"] = _temp[0]
|
|
|
|
try:
|
|
|
|
_case["reset"] = process_reset_list(_temp[1][0:-1].split(","))
|
|
|
|
except IndexError:
|
|
|
|
_case["reset"] = list()
|
|
|
|
elif isinstance(one_case_data, dict):
|
|
|
|
_case = one_case_data.copy()
|
|
|
|
assert "name" in _case
|
|
|
|
if "reset" not in _case:
|
|
|
|
_case["reset"] = list()
|
|
|
|
else:
|
|
|
|
if isinstance(_case["reset"], str):
|
|
|
|
_case["reset"] = process_reset_list(_case["reset"].split(","))
|
|
|
|
else:
|
|
|
|
raise TypeError("Not supported type during parsing unit test case")
|
|
|
|
|
|
|
|
if "config" not in _case:
|
|
|
|
_case["config"] = "default"
|
|
|
|
|
|
|
|
return _case
|
|
|
|
|
|
|
|
if not isinstance(test_case_data, list):
|
|
|
|
test_case_data = [test_case_data]
|
|
|
|
|
|
|
|
for case_data in test_case_data:
|
|
|
|
parsed_case = parse_case(case_data)
|
|
|
|
try:
|
|
|
|
case_config[parsed_case["config"]].append(parsed_case)
|
|
|
|
except KeyError:
|
|
|
|
case_config[parsed_case["config"]] = [parsed_case]
|
|
|
|
|
|
|
|
return case_config
|
|
|
|
|
2018-07-13 04:48:43 -04:00
|
|
|
|
2018-06-21 07:32:29 -04:00
|
|
|
def replace_app_bin(dut, name, new_app_bin):
|
|
|
|
if new_app_bin is None:
|
|
|
|
return
|
|
|
|
search_pattern = '/{}.bin'.format(name)
|
|
|
|
for i, config in enumerate(dut.download_config):
|
|
|
|
if config.endswith(search_pattern):
|
|
|
|
dut.download_config[i] = new_app_bin
|
|
|
|
Utility.console_log("The replaced application binary is {}".format(new_app_bin), "O")
|
|
|
|
break
|
2018-01-31 05:59:10 -05:00
|
|
|
|
2018-07-13 04:48:43 -04:00
|
|
|
|
2019-11-22 06:58:06 -05:00
|
|
|
def format_case_name(case):
|
|
|
|
return "[{}] {}".format(case["config"], case["name"])
|
|
|
|
|
|
|
|
|
2018-09-03 06:33:05 -04:00
|
|
|
def reset_dut(dut):
|
|
|
|
dut.reset()
|
|
|
|
# esptool ``run`` cmd takes quite long time.
|
|
|
|
# before reset finish, serial port is closed. therefore DUT could already bootup before serial port opened.
|
|
|
|
# this could cause checking bootup print failed.
|
2018-09-25 03:04:16 -04:00
|
|
|
# now use input cmd `-` and check test history to check if DUT is bootup.
|
2018-11-20 09:18:04 -05:00
|
|
|
# we'll retry this step for a few times,
|
|
|
|
# in case `dut.reset` returns during DUT bootup (when DUT can't process any command).
|
2019-06-16 21:48:04 -04:00
|
|
|
#
|
|
|
|
# during bootup, DUT might only receive part of the first `-` command.
|
|
|
|
# If it only receive `\n`, then it will print all cases. It could take more than 5 seconds, reset check will fail.
|
|
|
|
# To solve this problem, we will add a delay between reset and input `-` command. And we'll also enlarge expect timeout.
|
|
|
|
time.sleep(DUT_DELAY_AFTER_RESET)
|
2018-09-25 03:04:16 -04:00
|
|
|
for _ in range(DUT_STARTUP_CHECK_RETRY_COUNT):
|
|
|
|
dut.write("-")
|
|
|
|
try:
|
2018-12-20 04:00:12 -05:00
|
|
|
dut.expect("0 Tests 0 Failures 0 Ignored", timeout=TEST_HISTORY_CHECK_TIMEOUT)
|
2018-09-25 03:04:16 -04:00
|
|
|
break
|
2019-11-26 22:58:07 -05:00
|
|
|
except DUT.ExpectTimeout:
|
2018-09-25 03:04:16 -04:00
|
|
|
pass
|
|
|
|
else:
|
2018-11-20 09:18:04 -05:00
|
|
|
raise AssertionError("Reset {} ({}) failed!".format(dut.name, dut.port))
|
2018-09-03 06:33:05 -04:00
|
|
|
|
|
|
|
|
2019-12-11 01:38:56 -05:00
|
|
|
def log_test_case(description, test_case, ut_config):
|
|
|
|
Utility.console_log("Running {} '{}' (config {})".format(description, test_case["name"], ut_config), color="orange")
|
|
|
|
Utility.console_log("Tags: %s" % ", ".join("%s=%s" % (k,v) for (k,v) in test_case.items() if k != "name" and v is not None), color="orange")
|
|
|
|
|
|
|
|
|
2018-12-20 04:00:12 -05:00
|
|
|
def run_one_normal_case(dut, one_case, junit_test_case):
|
2018-07-13 04:48:43 -04:00
|
|
|
|
|
|
|
reset_dut(dut)
|
|
|
|
|
|
|
|
dut.start_capture_raw_data()
|
|
|
|
# run test case
|
|
|
|
dut.write("\"{}\"".format(one_case["name"]))
|
|
|
|
dut.expect("Running " + one_case["name"] + "...")
|
|
|
|
|
|
|
|
exception_reset_list = []
|
|
|
|
|
|
|
|
# we want to set this flag in callbacks (inner functions)
|
|
|
|
# use list here so we can use append to set this flag
|
|
|
|
test_finish = list()
|
|
|
|
|
|
|
|
# expect callbacks
|
|
|
|
def one_case_finish(result):
|
|
|
|
""" one test finished, let expect loop break and log result """
|
|
|
|
test_finish.append(True)
|
|
|
|
output = dut.stop_capture_raw_data()
|
|
|
|
if result:
|
2019-11-22 06:58:06 -05:00
|
|
|
Utility.console_log("Success: " + format_case_name(one_case), color="green")
|
2018-07-13 04:48:43 -04:00
|
|
|
else:
|
2019-11-22 06:58:06 -05:00
|
|
|
Utility.console_log("Failed: " + format_case_name(one_case), color="red")
|
2018-07-13 04:48:43 -04:00
|
|
|
junit_test_case.add_failure_info(output)
|
2018-12-20 04:00:12 -05:00
|
|
|
raise TestCaseFailed()
|
2018-07-13 04:48:43 -04:00
|
|
|
|
|
|
|
def handle_exception_reset(data):
|
|
|
|
"""
|
|
|
|
just append data to exception list.
|
|
|
|
exception list will be checked in ``handle_reset_finish``, once reset finished.
|
|
|
|
"""
|
|
|
|
exception_reset_list.append(data[0])
|
|
|
|
|
|
|
|
def handle_test_finish(data):
|
|
|
|
""" test finished without reset """
|
|
|
|
# in this scenario reset should not happen
|
|
|
|
assert not exception_reset_list
|
|
|
|
if int(data[1]):
|
|
|
|
# case ignored
|
2019-11-22 06:58:06 -05:00
|
|
|
Utility.console_log("Ignored: " + format_case_name(one_case), color="orange")
|
2018-07-13 04:48:43 -04:00
|
|
|
junit_test_case.add_skipped_info("ignored")
|
|
|
|
one_case_finish(not int(data[0]))
|
|
|
|
|
|
|
|
def handle_reset_finish(data):
|
|
|
|
""" reset happened and reboot finished """
|
|
|
|
assert exception_reset_list # reboot but no exception/reset logged. should never happen
|
|
|
|
result = False
|
|
|
|
if len(one_case["reset"]) == len(exception_reset_list):
|
|
|
|
for i, exception in enumerate(exception_reset_list):
|
|
|
|
if one_case["reset"][i] not in exception:
|
|
|
|
break
|
|
|
|
else:
|
|
|
|
result = True
|
|
|
|
if not result:
|
|
|
|
err_msg = "Reset Check Failed: \r\n\tExpected: {}\r\n\tGet: {}".format(one_case["reset"],
|
|
|
|
exception_reset_list)
|
|
|
|
Utility.console_log(err_msg, color="orange")
|
2019-12-10 00:50:32 -05:00
|
|
|
junit_test_case.add_failure_info(err_msg)
|
2018-07-13 04:48:43 -04:00
|
|
|
one_case_finish(result)
|
|
|
|
|
|
|
|
while not test_finish:
|
|
|
|
try:
|
2019-12-11 00:18:56 -05:00
|
|
|
timeout_value = one_case["timeout"]
|
2018-07-13 04:48:43 -04:00
|
|
|
dut.expect_any((RESET_PATTERN, handle_exception_reset),
|
|
|
|
(EXCEPTION_PATTERN, handle_exception_reset),
|
|
|
|
(ABORT_PATTERN, handle_exception_reset),
|
|
|
|
(FINISH_PATTERN, handle_test_finish),
|
|
|
|
(UT_APP_BOOT_UP_DONE, handle_reset_finish),
|
2019-12-11 00:18:56 -05:00
|
|
|
timeout=timeout_value)
|
2019-11-26 22:58:07 -05:00
|
|
|
except DUT.ExpectTimeout:
|
2019-12-11 00:18:56 -05:00
|
|
|
Utility.console_log("Timeout in expect (%s seconds)" % timeout_value, color="orange")
|
2018-12-20 04:00:12 -05:00
|
|
|
junit_test_case.add_failure_info("timeout")
|
2018-07-13 04:48:43 -04:00
|
|
|
one_case_finish(False)
|
|
|
|
break
|
|
|
|
|
|
|
|
|
2019-11-26 22:58:07 -05:00
|
|
|
@ttfw_idf.idf_unit_test(env_tag="UT_T1_1", junit_report_by_case=True)
|
2018-03-09 01:50:34 -05:00
|
|
|
def run_unit_test_cases(env, extra_data):
|
2018-01-31 05:59:10 -05:00
|
|
|
"""
|
|
|
|
extra_data can be three types of value
|
|
|
|
1. as string:
|
|
|
|
1. "case_name"
|
|
|
|
2. "case_name [reset=RESET_REASON]"
|
|
|
|
2. as dict:
|
|
|
|
1. with key like {"name": "Intr_alloc test, shared ints"}
|
|
|
|
2. with key like {"name": "restart from PRO CPU", "reset": "SW_CPU_RESET", "config": "psram"}
|
|
|
|
3. as list of string or dict:
|
|
|
|
[case1, case2, case3, {"name": "restart from PRO CPU", "reset": "SW_CPU_RESET"}, ...]
|
|
|
|
|
2018-11-20 09:18:04 -05:00
|
|
|
:param env: test env instance
|
2018-01-31 05:59:10 -05:00
|
|
|
:param extra_data: the case name or case list or case dictionary
|
|
|
|
:return: None
|
|
|
|
"""
|
|
|
|
|
|
|
|
case_config = format_test_case_config(extra_data)
|
|
|
|
|
|
|
|
# we don't want stop on failed case (unless some special scenarios we can't handle)
|
|
|
|
# this flag is used to log if any of the case failed during executing
|
|
|
|
# Before exit test function this flag is used to log if the case fails
|
|
|
|
failed_cases = []
|
|
|
|
|
|
|
|
for ut_config in case_config:
|
2018-05-14 23:51:56 -04:00
|
|
|
Utility.console_log("Running unit test for config: " + ut_config, "O")
|
2019-10-15 08:15:06 -04:00
|
|
|
dut = env.get_dut("unit-test-app", app_path=UT_APP_PATH, app_config_name=ut_config, allow_dut_exception=True)
|
2018-06-21 07:32:29 -04:00
|
|
|
if len(case_config[ut_config]) > 0:
|
|
|
|
replace_app_bin(dut, "unit-test-app", case_config[ut_config][0].get('app_bin'))
|
2018-01-31 05:59:10 -05:00
|
|
|
dut.start_app()
|
2018-07-13 04:48:43 -04:00
|
|
|
Utility.console_log("Download finished, start running test cases", "O")
|
2018-01-31 05:59:10 -05:00
|
|
|
|
|
|
|
for one_case in case_config[ut_config]:
|
2019-12-11 01:38:56 -05:00
|
|
|
log_test_case("test case", one_case, ut_config)
|
2019-03-07 07:18:32 -05:00
|
|
|
performance_items = []
|
2018-07-13 04:48:43 -04:00
|
|
|
# create junit report test case
|
|
|
|
junit_test_case = TinyFW.JunitReport.create_test_case("[{}] {}".format(ut_config, one_case["name"]))
|
|
|
|
try:
|
2018-12-20 04:00:12 -05:00
|
|
|
run_one_normal_case(dut, one_case, junit_test_case)
|
2019-03-07 07:18:32 -05:00
|
|
|
performance_items = dut.get_performance_items()
|
2018-12-20 04:00:12 -05:00
|
|
|
except TestCaseFailed:
|
2019-11-22 06:58:06 -05:00
|
|
|
failed_cases.append(format_case_name(one_case))
|
2018-07-13 04:48:43 -04:00
|
|
|
except Exception as e:
|
2018-12-20 04:00:12 -05:00
|
|
|
junit_test_case.add_failure_info("Unexpected exception: " + str(e))
|
2019-11-22 06:58:06 -05:00
|
|
|
failed_cases.append(format_case_name(one_case))
|
2018-12-20 04:00:12 -05:00
|
|
|
finally:
|
2019-03-07 07:18:32 -05:00
|
|
|
TinyFW.JunitReport.update_performance(performance_items)
|
2018-07-13 04:48:43 -04:00
|
|
|
TinyFW.JunitReport.test_case_finish(junit_test_case)
|
2019-11-21 22:41:57 -05:00
|
|
|
# close DUT when finish running all cases for one config
|
|
|
|
env.close_dut(dut.name)
|
2018-01-31 05:59:10 -05:00
|
|
|
|
|
|
|
# raise exception if any case fails
|
|
|
|
if failed_cases:
|
|
|
|
Utility.console_log("Failed Cases:", color="red")
|
|
|
|
for _case_name in failed_cases:
|
|
|
|
Utility.console_log("\t" + _case_name, color="red")
|
|
|
|
raise AssertionError("Unit Test Failed")
|
|
|
|
|
|
|
|
|
|
|
|
class Handler(threading.Thread):
|
|
|
|
|
2018-12-13 03:58:34 -05:00
|
|
|
WAIT_SIGNAL_PATTERN = re.compile(r'Waiting for signal: \[(.+)]!')
|
|
|
|
SEND_SIGNAL_PATTERN = re.compile(r'Send signal: \[([^]]+)](\[([^]]+)])?!')
|
2018-01-31 05:59:10 -05:00
|
|
|
FINISH_PATTERN = re.compile(r"1 Tests (\d) Failures (\d) Ignored")
|
|
|
|
|
2018-05-21 00:50:27 -04:00
|
|
|
def __init__(self, dut, sent_signal_list, lock, parent_case_name, child_case_index, timeout):
|
2018-01-31 05:59:10 -05:00
|
|
|
self.dut = dut
|
|
|
|
self.sent_signal_list = sent_signal_list
|
|
|
|
self.lock = lock
|
|
|
|
self.parent_case_name = parent_case_name
|
|
|
|
self.child_case_name = ""
|
|
|
|
self.child_case_index = child_case_index + 1
|
|
|
|
self.finish = False
|
|
|
|
self.result = False
|
2018-07-13 04:48:43 -04:00
|
|
|
self.output = ""
|
2018-01-31 05:59:10 -05:00
|
|
|
self.fail_name = None
|
|
|
|
self.timeout = timeout
|
2018-08-20 07:21:10 -04:00
|
|
|
self.force_stop = threading.Event() # it show the running status
|
2018-09-03 06:33:05 -04:00
|
|
|
|
|
|
|
reset_dut(self.dut) # reset the board to make it start from begining
|
|
|
|
|
2018-01-31 05:59:10 -05:00
|
|
|
threading.Thread.__init__(self, name="{} Handler".format(dut))
|
|
|
|
|
|
|
|
def run(self):
|
2018-07-13 04:48:43 -04:00
|
|
|
|
|
|
|
self.dut.start_capture_raw_data()
|
|
|
|
|
2018-01-31 05:59:10 -05:00
|
|
|
def get_child_case_name(data):
|
|
|
|
self.child_case_name = data[0]
|
|
|
|
time.sleep(1)
|
|
|
|
self.dut.write(str(self.child_case_index))
|
|
|
|
|
|
|
|
def one_device_case_finish(result):
|
|
|
|
""" one test finished, let expect loop break and log result """
|
|
|
|
self.finish = True
|
|
|
|
self.result = result
|
2018-07-13 04:48:43 -04:00
|
|
|
self.output = "[{}]\n\n{}\n".format(self.child_case_name,
|
|
|
|
self.dut.stop_capture_raw_data())
|
2018-01-31 05:59:10 -05:00
|
|
|
if not result:
|
|
|
|
self.fail_name = self.child_case_name
|
|
|
|
|
|
|
|
def device_wait_action(data):
|
|
|
|
start_time = time.time()
|
|
|
|
expected_signal = data[0]
|
2018-08-20 07:21:10 -04:00
|
|
|
while 1:
|
2018-01-31 05:59:10 -05:00
|
|
|
if time.time() > start_time + self.timeout:
|
2018-11-20 09:18:04 -05:00
|
|
|
Utility.console_log("Timeout in device for function: %s" % self.child_case_name, color="orange")
|
2018-01-31 05:59:10 -05:00
|
|
|
break
|
|
|
|
with self.lock:
|
2018-12-13 03:58:34 -05:00
|
|
|
for sent_signal in self.sent_signal_list:
|
|
|
|
if expected_signal == sent_signal["name"]:
|
|
|
|
self.dut.write(sent_signal["parameter"])
|
|
|
|
self.sent_signal_list.remove(sent_signal)
|
|
|
|
break
|
|
|
|
else:
|
|
|
|
time.sleep(0.01)
|
|
|
|
continue
|
|
|
|
break
|
2018-01-31 05:59:10 -05:00
|
|
|
|
|
|
|
def device_send_action(data):
|
|
|
|
with self.lock:
|
2018-12-13 03:58:34 -05:00
|
|
|
self.sent_signal_list.append({
|
|
|
|
"name": data[0].encode('utf-8'),
|
|
|
|
"parameter": "" if data[2] is None else data[2].encode('utf-8')
|
|
|
|
# no parameter means we only write EOL to DUT
|
|
|
|
})
|
2018-01-31 05:59:10 -05:00
|
|
|
|
|
|
|
def handle_device_test_finish(data):
|
|
|
|
""" test finished without reset """
|
|
|
|
# in this scenario reset should not happen
|
|
|
|
if int(data[1]):
|
|
|
|
# case ignored
|
|
|
|
Utility.console_log("Ignored: " + self.child_case_name, color="orange")
|
|
|
|
one_device_case_finish(not int(data[0]))
|
|
|
|
|
2018-06-25 21:45:09 -04:00
|
|
|
try:
|
|
|
|
time.sleep(1)
|
|
|
|
self.dut.write("\"{}\"".format(self.parent_case_name))
|
|
|
|
self.dut.expect("Running " + self.parent_case_name + "...")
|
2019-11-26 22:58:07 -05:00
|
|
|
except DUT.ExpectTimeout:
|
2018-06-25 21:45:09 -04:00
|
|
|
Utility.console_log("No case detected!", color="orange")
|
2018-08-20 07:21:10 -04:00
|
|
|
while not self.finish and not self.force_stop.isSet():
|
2018-01-31 05:59:10 -05:00
|
|
|
try:
|
2018-12-04 07:46:48 -05:00
|
|
|
self.dut.expect_any((re.compile('\(' + str(self.child_case_index) + '\)\s"(\w+)"'), # noqa: W605 - regex
|
2018-11-20 09:18:04 -05:00
|
|
|
get_child_case_name),
|
2018-01-31 05:59:10 -05:00
|
|
|
(self.WAIT_SIGNAL_PATTERN, device_wait_action), # wait signal pattern
|
|
|
|
(self.SEND_SIGNAL_PATTERN, device_send_action), # send signal pattern
|
|
|
|
(self.FINISH_PATTERN, handle_device_test_finish), # test finish pattern
|
2018-05-21 00:50:27 -04:00
|
|
|
timeout=self.timeout)
|
2019-11-26 22:58:07 -05:00
|
|
|
except DUT.ExpectTimeout:
|
2019-12-11 00:18:56 -05:00
|
|
|
Utility.console_log("Timeout in expect (%s seconds)" % self.timeout, color="orange")
|
2018-01-31 05:59:10 -05:00
|
|
|
one_device_case_finish(False)
|
|
|
|
break
|
|
|
|
|
2018-08-20 07:21:10 -04:00
|
|
|
def stop(self):
|
|
|
|
self.force_stop.set()
|
|
|
|
|
2018-01-31 05:59:10 -05:00
|
|
|
|
|
|
|
def get_case_info(one_case):
|
|
|
|
parent_case = one_case["name"]
|
|
|
|
child_case_num = one_case["child case num"]
|
|
|
|
return parent_case, child_case_num
|
|
|
|
|
|
|
|
|
2018-06-21 07:32:29 -04:00
|
|
|
def get_dut(duts, env, name, ut_config, app_bin=None):
|
2018-01-31 05:59:10 -05:00
|
|
|
if name in duts:
|
|
|
|
dut = duts[name]
|
|
|
|
else:
|
2019-10-15 08:15:06 -04:00
|
|
|
dut = env.get_dut(name, app_path=UT_APP_PATH, app_config_name=ut_config, allow_dut_exception=True)
|
2018-01-31 05:59:10 -05:00
|
|
|
duts[name] = dut
|
2018-06-21 07:32:29 -04:00
|
|
|
replace_app_bin(dut, "unit-test-app", app_bin)
|
2018-11-20 09:18:04 -05:00
|
|
|
dut.start_app() # download bin to board
|
2018-01-31 05:59:10 -05:00
|
|
|
return dut
|
|
|
|
|
|
|
|
|
2019-01-30 21:12:17 -05:00
|
|
|
def run_one_multiple_devices_case(duts, ut_config, env, one_case, app_bin, junit_test_case):
|
2018-01-31 05:59:10 -05:00
|
|
|
lock = threading.RLock()
|
|
|
|
threads = []
|
|
|
|
send_signal_list = []
|
|
|
|
result = True
|
|
|
|
parent_case, case_num = get_case_info(one_case)
|
2018-06-25 21:45:09 -04:00
|
|
|
|
2018-01-31 05:59:10 -05:00
|
|
|
for i in range(case_num):
|
2018-06-21 07:32:29 -04:00
|
|
|
dut = get_dut(duts, env, "dut%d" % i, ut_config, app_bin)
|
2018-01-31 05:59:10 -05:00
|
|
|
threads.append(Handler(dut, send_signal_list, lock,
|
2018-05-21 00:50:27 -04:00
|
|
|
parent_case, i, one_case["timeout"]))
|
2018-01-31 05:59:10 -05:00
|
|
|
for thread in threads:
|
|
|
|
thread.setDaemon(True)
|
|
|
|
thread.start()
|
2018-07-13 04:48:43 -04:00
|
|
|
output = "Multiple Device Failed\n"
|
2018-01-31 05:59:10 -05:00
|
|
|
for thread in threads:
|
|
|
|
thread.join()
|
|
|
|
result = result and thread.result
|
2018-07-13 04:48:43 -04:00
|
|
|
output += thread.output
|
2018-01-31 05:59:10 -05:00
|
|
|
if not thread.result:
|
2018-08-20 07:21:10 -04:00
|
|
|
[thd.stop() for thd in threads]
|
2018-06-25 21:45:09 -04:00
|
|
|
|
2019-01-30 21:12:17 -05:00
|
|
|
if not result:
|
2018-07-13 04:48:43 -04:00
|
|
|
junit_test_case.add_failure_info(output)
|
2019-03-07 07:18:32 -05:00
|
|
|
|
|
|
|
# collect performances from DUTs
|
|
|
|
performance_items = []
|
|
|
|
for dut_name in duts:
|
|
|
|
performance_items.extend(duts[dut_name].get_performance_items())
|
|
|
|
TinyFW.JunitReport.update_performance(performance_items)
|
|
|
|
|
2019-01-30 21:12:17 -05:00
|
|
|
return result
|
2018-01-31 05:59:10 -05:00
|
|
|
|
|
|
|
|
2019-11-26 22:58:07 -05:00
|
|
|
@ttfw_idf.idf_unit_test(env_tag="UT_T2_1", junit_report_by_case=True)
|
2018-03-09 01:50:34 -05:00
|
|
|
def run_multiple_devices_cases(env, extra_data):
|
2018-01-31 05:59:10 -05:00
|
|
|
"""
|
|
|
|
extra_data can be two types of value
|
|
|
|
1. as dict:
|
|
|
|
e.g.
|
|
|
|
{"name": "gpio master/slave test example",
|
|
|
|
"child case num": 2,
|
|
|
|
"config": "release",
|
|
|
|
"env_tag": "UT_T2_1"}
|
|
|
|
2. as list dict:
|
|
|
|
e.g.
|
|
|
|
[{"name": "gpio master/slave test example1",
|
|
|
|
"child case num": 2,
|
|
|
|
"config": "release",
|
|
|
|
"env_tag": "UT_T2_1"},
|
|
|
|
{"name": "gpio master/slave test example2",
|
|
|
|
"child case num": 2,
|
|
|
|
"config": "release",
|
|
|
|
"env_tag": "UT_T2_1"}]
|
|
|
|
|
|
|
|
"""
|
|
|
|
failed_cases = []
|
|
|
|
case_config = format_test_case_config(extra_data)
|
2018-07-13 04:48:43 -04:00
|
|
|
duts = {}
|
2018-01-31 05:59:10 -05:00
|
|
|
for ut_config in case_config:
|
2018-05-14 23:51:56 -04:00
|
|
|
Utility.console_log("Running unit test for config: " + ut_config, "O")
|
2018-01-31 05:59:10 -05:00
|
|
|
for one_case in case_config[ut_config]:
|
2019-12-11 01:38:56 -05:00
|
|
|
log_test_case("multi-device test", one_case, ut_config, )
|
2019-01-30 21:12:17 -05:00
|
|
|
result = False
|
2018-07-13 04:48:43 -04:00
|
|
|
junit_test_case = TinyFW.JunitReport.create_test_case("[{}] {}".format(ut_config, one_case["name"]))
|
|
|
|
try:
|
2019-01-30 21:12:17 -05:00
|
|
|
result = run_one_multiple_devices_case(duts, ut_config, env, one_case,
|
|
|
|
one_case.get('app_bin'), junit_test_case)
|
2018-07-13 04:48:43 -04:00
|
|
|
except Exception as e:
|
2018-12-20 04:00:12 -05:00
|
|
|
junit_test_case.add_failure_info("Unexpected exception: " + str(e))
|
|
|
|
finally:
|
2019-01-30 21:12:17 -05:00
|
|
|
if result:
|
2019-11-22 06:58:06 -05:00
|
|
|
Utility.console_log("Success: " + format_case_name(one_case), color="green")
|
2019-01-30 21:12:17 -05:00
|
|
|
else:
|
2019-11-22 06:58:06 -05:00
|
|
|
failed_cases.append(format_case_name(one_case))
|
|
|
|
Utility.console_log("Failed: " + format_case_name(one_case), color="red")
|
2018-07-13 04:48:43 -04:00
|
|
|
TinyFW.JunitReport.test_case_finish(junit_test_case)
|
2019-11-21 22:41:57 -05:00
|
|
|
# close all DUTs when finish running all cases for one config
|
|
|
|
for dut in duts:
|
|
|
|
env.close_dut(dut)
|
|
|
|
duts = {}
|
2018-01-31 05:59:10 -05:00
|
|
|
|
|
|
|
if failed_cases:
|
|
|
|
Utility.console_log("Failed Cases:", color="red")
|
|
|
|
for _case_name in failed_cases:
|
|
|
|
Utility.console_log("\t" + _case_name, color="red")
|
|
|
|
raise AssertionError("Unit Test Failed")
|
|
|
|
|
|
|
|
|
2018-12-20 04:00:12 -05:00
|
|
|
def run_one_multiple_stage_case(dut, one_case, junit_test_case):
|
2018-07-13 04:48:43 -04:00
|
|
|
reset_dut(dut)
|
|
|
|
|
|
|
|
dut.start_capture_raw_data()
|
|
|
|
|
|
|
|
exception_reset_list = []
|
|
|
|
|
|
|
|
for test_stage in range(one_case["child case num"]):
|
|
|
|
# select multi stage test case name
|
|
|
|
dut.write("\"{}\"".format(one_case["name"]))
|
|
|
|
dut.expect("Running " + one_case["name"] + "...")
|
|
|
|
# select test function for current stage
|
|
|
|
dut.write(str(test_stage + 1))
|
|
|
|
|
|
|
|
# we want to set this flag in callbacks (inner functions)
|
|
|
|
# use list here so we can use append to set this flag
|
|
|
|
stage_finish = list()
|
|
|
|
|
|
|
|
def last_stage():
|
|
|
|
return test_stage == one_case["child case num"] - 1
|
|
|
|
|
|
|
|
def check_reset():
|
|
|
|
if one_case["reset"]:
|
|
|
|
assert exception_reset_list # reboot but no exception/reset logged. should never happen
|
|
|
|
result = False
|
|
|
|
if len(one_case["reset"]) == len(exception_reset_list):
|
|
|
|
for i, exception in enumerate(exception_reset_list):
|
|
|
|
if one_case["reset"][i] not in exception:
|
|
|
|
break
|
|
|
|
else:
|
|
|
|
result = True
|
|
|
|
if not result:
|
|
|
|
err_msg = "Reset Check Failed: \r\n\tExpected: {}\r\n\tGet: {}".format(one_case["reset"],
|
|
|
|
exception_reset_list)
|
|
|
|
Utility.console_log(err_msg, color="orange")
|
2018-12-20 04:00:12 -05:00
|
|
|
junit_test_case.add_failure_info(err_msg)
|
2018-07-13 04:48:43 -04:00
|
|
|
else:
|
|
|
|
# we allow omit reset in multi stage cases
|
|
|
|
result = True
|
|
|
|
return result
|
|
|
|
|
|
|
|
# expect callbacks
|
|
|
|
def one_case_finish(result):
|
|
|
|
""" one test finished, let expect loop break and log result """
|
|
|
|
# handle test finish
|
|
|
|
result = result and check_reset()
|
|
|
|
output = dut.stop_capture_raw_data()
|
|
|
|
if result:
|
2019-11-22 06:58:06 -05:00
|
|
|
Utility.console_log("Success: " + format_case_name(one_case), color="green")
|
2018-07-13 04:48:43 -04:00
|
|
|
else:
|
2019-11-22 06:58:06 -05:00
|
|
|
Utility.console_log("Failed: " + format_case_name(one_case), color="red")
|
2018-07-13 04:48:43 -04:00
|
|
|
junit_test_case.add_failure_info(output)
|
2018-12-20 04:00:12 -05:00
|
|
|
raise TestCaseFailed()
|
2018-07-13 04:48:43 -04:00
|
|
|
stage_finish.append("break")
|
|
|
|
|
|
|
|
def handle_exception_reset(data):
|
|
|
|
"""
|
|
|
|
just append data to exception list.
|
|
|
|
exception list will be checked in ``handle_reset_finish``, once reset finished.
|
|
|
|
"""
|
|
|
|
exception_reset_list.append(data[0])
|
|
|
|
|
|
|
|
def handle_test_finish(data):
|
|
|
|
""" test finished without reset """
|
|
|
|
# in this scenario reset should not happen
|
|
|
|
if int(data[1]):
|
|
|
|
# case ignored
|
2019-11-22 06:58:06 -05:00
|
|
|
Utility.console_log("Ignored: " + format_case_name(one_case), color="orange")
|
2018-07-13 04:48:43 -04:00
|
|
|
junit_test_case.add_skipped_info("ignored")
|
|
|
|
# only passed in last stage will be regarded as real pass
|
|
|
|
if last_stage():
|
|
|
|
one_case_finish(not int(data[0]))
|
|
|
|
else:
|
|
|
|
Utility.console_log("test finished before enter last stage", color="orange")
|
|
|
|
one_case_finish(False)
|
|
|
|
|
|
|
|
def handle_next_stage(data):
|
|
|
|
""" reboot finished. we goto next stage """
|
|
|
|
if last_stage():
|
|
|
|
# already last stage, should never goto next stage
|
|
|
|
Utility.console_log("didn't finish at last stage", color="orange")
|
|
|
|
one_case_finish(False)
|
|
|
|
else:
|
|
|
|
stage_finish.append("continue")
|
|
|
|
|
|
|
|
while not stage_finish:
|
|
|
|
try:
|
2019-12-11 00:18:56 -05:00
|
|
|
timeout_value = one_case["timeout"]
|
2018-07-13 04:48:43 -04:00
|
|
|
dut.expect_any((RESET_PATTERN, handle_exception_reset),
|
|
|
|
(EXCEPTION_PATTERN, handle_exception_reset),
|
|
|
|
(ABORT_PATTERN, handle_exception_reset),
|
|
|
|
(FINISH_PATTERN, handle_test_finish),
|
|
|
|
(UT_APP_BOOT_UP_DONE, handle_next_stage),
|
2019-12-11 00:18:56 -05:00
|
|
|
timeout=timeout_value)
|
2019-11-26 22:58:07 -05:00
|
|
|
except DUT.ExpectTimeout:
|
2019-12-11 00:18:56 -05:00
|
|
|
Utility.console_log("Timeout in expect (%s seconds)" % timeout_value, color="orange")
|
2018-07-13 04:48:43 -04:00
|
|
|
one_case_finish(False)
|
|
|
|
break
|
|
|
|
if stage_finish[0] == "break":
|
|
|
|
# test breaks on current stage
|
|
|
|
break
|
|
|
|
|
|
|
|
|
2019-11-26 22:58:07 -05:00
|
|
|
@ttfw_idf.idf_unit_test(env_tag="UT_T1_1", junit_report_by_case=True)
|
2018-03-09 01:50:34 -05:00
|
|
|
def run_multiple_stage_cases(env, extra_data):
|
|
|
|
"""
|
|
|
|
extra_data can be 2 types of value
|
|
|
|
1. as dict: Mandantory keys: "name" and "child case num", optional keys: "reset" and others
|
|
|
|
3. as list of string or dict:
|
|
|
|
[case1, case2, case3, {"name": "restart from PRO CPU", "child case num": 2}, ...]
|
|
|
|
|
2018-11-20 09:18:04 -05:00
|
|
|
:param env: test env instance
|
2018-03-09 01:50:34 -05:00
|
|
|
:param extra_data: the case name or case list or case dictionary
|
|
|
|
:return: None
|
|
|
|
"""
|
|
|
|
|
|
|
|
case_config = format_test_case_config(extra_data)
|
|
|
|
|
|
|
|
# we don't want stop on failed case (unless some special scenarios we can't handle)
|
|
|
|
# this flag is used to log if any of the case failed during executing
|
|
|
|
# Before exit test function this flag is used to log if the case fails
|
|
|
|
failed_cases = []
|
|
|
|
|
|
|
|
for ut_config in case_config:
|
2018-05-14 23:51:56 -04:00
|
|
|
Utility.console_log("Running unit test for config: " + ut_config, "O")
|
2019-10-15 08:15:06 -04:00
|
|
|
dut = env.get_dut("unit-test-app", app_path=UT_APP_PATH, app_config_name=ut_config, allow_dut_exception=True)
|
2018-06-21 07:32:29 -04:00
|
|
|
if len(case_config[ut_config]) > 0:
|
|
|
|
replace_app_bin(dut, "unit-test-app", case_config[ut_config][0].get('app_bin'))
|
2018-03-09 01:50:34 -05:00
|
|
|
dut.start_app()
|
|
|
|
|
|
|
|
for one_case in case_config[ut_config]:
|
2019-12-11 01:38:56 -05:00
|
|
|
log_test_case("multi-stage test", one_case, ut_config)
|
2019-03-07 07:18:32 -05:00
|
|
|
performance_items = []
|
2018-07-13 04:48:43 -04:00
|
|
|
junit_test_case = TinyFW.JunitReport.create_test_case("[{}] {}".format(ut_config, one_case["name"]))
|
|
|
|
try:
|
2018-12-20 04:00:12 -05:00
|
|
|
run_one_multiple_stage_case(dut, one_case, junit_test_case)
|
2019-03-07 07:18:32 -05:00
|
|
|
performance_items = dut.get_performance_items()
|
2018-12-20 04:00:12 -05:00
|
|
|
except TestCaseFailed:
|
2019-11-22 06:58:06 -05:00
|
|
|
failed_cases.append(format_case_name(one_case))
|
2018-07-13 04:48:43 -04:00
|
|
|
except Exception as e:
|
2018-12-20 04:00:12 -05:00
|
|
|
junit_test_case.add_failure_info("Unexpected exception: " + str(e))
|
2019-11-22 06:58:06 -05:00
|
|
|
failed_cases.append(format_case_name(one_case))
|
2018-12-20 04:00:12 -05:00
|
|
|
finally:
|
2019-03-07 07:18:32 -05:00
|
|
|
TinyFW.JunitReport.update_performance(performance_items)
|
2018-07-13 04:48:43 -04:00
|
|
|
TinyFW.JunitReport.test_case_finish(junit_test_case)
|
2019-11-21 22:41:57 -05:00
|
|
|
# close DUT when finish running all cases for one config
|
|
|
|
env.close_dut(dut.name)
|
2018-03-09 01:50:34 -05:00
|
|
|
|
|
|
|
# raise exception if any case fails
|
|
|
|
if failed_cases:
|
|
|
|
Utility.console_log("Failed Cases:", color="red")
|
|
|
|
for _case_name in failed_cases:
|
|
|
|
Utility.console_log("\t" + _case_name, color="red")
|
|
|
|
raise AssertionError("Unit Test Failed")
|
|
|
|
|
2018-07-13 04:48:43 -04:00
|
|
|
|
2018-06-21 07:32:29 -04:00
|
|
|
def detect_update_unit_test_info(env, extra_data, app_bin):
|
|
|
|
|
|
|
|
case_config = format_test_case_config(extra_data)
|
|
|
|
|
|
|
|
for ut_config in case_config:
|
2019-10-15 08:15:06 -04:00
|
|
|
dut = env.get_dut("unit-test-app", app_path=UT_APP_PATH, app_config_name=ut_config)
|
2018-06-21 07:32:29 -04:00
|
|
|
replace_app_bin(dut, "unit-test-app", app_bin)
|
|
|
|
dut.start_app()
|
|
|
|
|
2018-09-03 06:33:05 -04:00
|
|
|
reset_dut(dut)
|
2018-06-21 07:32:29 -04:00
|
|
|
|
|
|
|
# get the list of test cases
|
|
|
|
dut.write("")
|
|
|
|
dut.expect("Here's the test menu, pick your combo:", timeout=DEFAULT_TIMEOUT)
|
|
|
|
|
2018-11-20 09:18:04 -05:00
|
|
|
def find_update_dic(name, _t, _timeout, child_case_num=None):
|
|
|
|
for _case_data in extra_data:
|
|
|
|
if _case_data['name'] == name:
|
|
|
|
_case_data['type'] = _t
|
|
|
|
if 'timeout' not in _case_data:
|
|
|
|
_case_data['timeout'] = _timeout
|
2018-06-21 07:32:29 -04:00
|
|
|
if child_case_num:
|
2018-11-20 09:18:04 -05:00
|
|
|
_case_data['child case num'] = child_case_num
|
2018-06-21 07:32:29 -04:00
|
|
|
|
|
|
|
try:
|
|
|
|
while True:
|
|
|
|
data = dut.expect(TEST_PATTERN, timeout=DEFAULT_TIMEOUT)
|
|
|
|
test_case_name = data[1]
|
|
|
|
m = re.search(r'\[timeout=(\d+)\]', data[2])
|
|
|
|
if m:
|
|
|
|
timeout = int(m.group(1))
|
|
|
|
else:
|
|
|
|
timeout = 30
|
|
|
|
m = re.search(r'\[multi_stage\]', data[2])
|
|
|
|
if m:
|
|
|
|
test_case_type = MULTI_STAGE_ID
|
|
|
|
else:
|
|
|
|
m = re.search(r'\[multi_device\]', data[2])
|
|
|
|
if m:
|
|
|
|
test_case_type = MULTI_DEVICE_ID
|
|
|
|
else:
|
|
|
|
test_case_type = SIMPLE_TEST_ID
|
|
|
|
find_update_dic(test_case_name, test_case_type, timeout)
|
|
|
|
if data[3] and re.search(END_LIST_STR, data[3]):
|
|
|
|
break
|
|
|
|
continue
|
|
|
|
# find the last submenu item
|
|
|
|
data = dut.expect(TEST_SUBMENU_PATTERN, timeout=DEFAULT_TIMEOUT)
|
|
|
|
find_update_dic(test_case_name, test_case_type, timeout, child_case_num=int(data[0]))
|
|
|
|
if data[1] and re.search(END_LIST_STR, data[1]):
|
|
|
|
break
|
|
|
|
# check if the unit test case names are correct, i.e. they could be found in the device
|
2018-11-20 09:18:04 -05:00
|
|
|
for _dic in extra_data:
|
|
|
|
if 'type' not in _dic:
|
|
|
|
raise ValueError("Unit test \"{}\" doesn't exist in the flashed device!".format(_dic.get('name')))
|
2019-11-26 22:58:07 -05:00
|
|
|
except DUT.ExpectTimeout:
|
2018-06-21 07:32:29 -04:00
|
|
|
Utility.console_log("Timeout during getting the test list", color="red")
|
|
|
|
finally:
|
|
|
|
dut.close()
|
|
|
|
|
|
|
|
# These options are the same for all configs, therefore there is no need to continue
|
|
|
|
break
|
2018-03-09 01:50:34 -05:00
|
|
|
|
2018-07-13 04:48:43 -04:00
|
|
|
|
2018-03-09 01:50:34 -05:00
|
|
|
if __name__ == '__main__':
|
2018-06-21 07:32:29 -04:00
|
|
|
parser = argparse.ArgumentParser()
|
|
|
|
parser.add_argument(
|
|
|
|
'--repeat', '-r',
|
|
|
|
help='Number of repetitions for the test(s). Default is 1.',
|
|
|
|
type=int,
|
|
|
|
default=1
|
|
|
|
)
|
|
|
|
parser.add_argument("--env_config_file", "-e",
|
2018-11-20 09:18:04 -05:00
|
|
|
help="test env config file",
|
|
|
|
default=None
|
|
|
|
)
|
2018-06-21 07:32:29 -04:00
|
|
|
parser.add_argument("--app_bin", "-b",
|
2018-11-20 09:18:04 -05:00
|
|
|
help="application binary file for flashing the chip",
|
|
|
|
default=None
|
|
|
|
)
|
2018-06-21 07:32:29 -04:00
|
|
|
parser.add_argument(
|
|
|
|
'test',
|
|
|
|
help='Comma separated list of <option>:<argument> where option can be "name" (default), "child case num", \
|
|
|
|
"config", "timeout".',
|
|
|
|
nargs='+'
|
|
|
|
)
|
|
|
|
args = parser.parse_args()
|
|
|
|
list_of_dicts = []
|
|
|
|
for test in args.test:
|
|
|
|
test_args = test.split(r',')
|
|
|
|
test_dict = dict()
|
|
|
|
for test_item in test_args:
|
|
|
|
if len(test_item) == 0:
|
|
|
|
continue
|
2019-12-11 00:32:17 -05:00
|
|
|
pair = test_item.split(r':', 1)
|
2018-06-21 07:32:29 -04:00
|
|
|
if len(pair) == 1 or pair[0] is 'name':
|
|
|
|
test_dict['name'] = pair[0]
|
|
|
|
elif len(pair) == 2:
|
|
|
|
if pair[0] == 'timeout' or pair[0] == 'child case num':
|
|
|
|
test_dict[pair[0]] = int(pair[1])
|
|
|
|
else:
|
|
|
|
test_dict[pair[0]] = pair[1]
|
|
|
|
else:
|
|
|
|
raise ValueError('Error in argument item {} of {}'.format(test_item, test))
|
|
|
|
test_dict['app_bin'] = args.app_bin
|
|
|
|
list_of_dicts.append(test_dict)
|
|
|
|
|
|
|
|
TinyFW.set_default_config(env_config_file=args.env_config_file)
|
|
|
|
|
|
|
|
env_config = TinyFW.get_default_config()
|
2019-11-26 22:58:07 -05:00
|
|
|
env_config['app'] = ttfw_idf.UT
|
|
|
|
env_config['dut'] = ttfw_idf.IDFDUT
|
2018-06-21 07:32:29 -04:00
|
|
|
env_config['test_suite_name'] = 'unit_test_parsing'
|
2018-11-20 09:18:04 -05:00
|
|
|
test_env = Env.Env(**env_config)
|
|
|
|
detect_update_unit_test_info(test_env, extra_data=list_of_dicts, app_bin=args.app_bin)
|
2018-06-21 07:32:29 -04:00
|
|
|
|
2018-12-04 07:46:48 -05:00
|
|
|
for index in range(1, args.repeat + 1):
|
2018-06-21 07:32:29 -04:00
|
|
|
if args.repeat > 1:
|
2018-11-20 09:18:04 -05:00
|
|
|
Utility.console_log("Repetition {}".format(index), color="green")
|
2018-06-21 07:32:29 -04:00
|
|
|
for dic in list_of_dicts:
|
|
|
|
t = dic.get('type', SIMPLE_TEST_ID)
|
|
|
|
if t == SIMPLE_TEST_ID:
|
|
|
|
run_unit_test_cases(extra_data=dic)
|
|
|
|
elif t == MULTI_STAGE_ID:
|
|
|
|
run_multiple_stage_cases(extra_data=dic)
|
|
|
|
elif t == MULTI_DEVICE_ID:
|
|
|
|
run_multiple_devices_cases(extra_data=dic)
|
|
|
|
else:
|
|
|
|
raise ValueError('Unknown type {} of {}'.format(t, dic.get('name')))
|