mirror of
https://github.com/espressif/esp-idf.git
synced 2024-10-05 20:47:46 -04:00
Merge branch 'feature/bot' into 'master'
CI: support customize test with @bot See merge request !1502
This commit is contained in:
commit
6e1453e864
@ -26,8 +26,16 @@ variables:
|
||||
# IDF environment
|
||||
|
||||
IDF_PATH: "$CI_PROJECT_DIR"
|
||||
APPLY_BOT_FILTER_SCRIPT: "$CI_PROJECT_DIR/tools/ci/apply_bot_filter.py"
|
||||
CHECKOUT_REF_SCRIPT: "$CI_PROJECT_DIR/tools/ci/checkout_project_ref.py"
|
||||
|
||||
# before each job, we need to check if this job is filtered by bot stage/job filter
|
||||
.apply_bot_filter: &apply_bot_filter
|
||||
python $APPLY_BOT_FILTER_SCRIPT || exit 0
|
||||
|
||||
before_script:
|
||||
# apply bot filter in before script
|
||||
- *apply_bot_filter
|
||||
# add gitlab ssh key
|
||||
- mkdir -p ~/.ssh
|
||||
- chmod 700 ~/.ssh
|
||||
@ -46,11 +54,15 @@ before_script:
|
||||
|
||||
.do_nothing_before:
|
||||
before_script: &do_nothing_before
|
||||
# apply bot filter in before script
|
||||
- *apply_bot_filter
|
||||
- echo "Not setting up GitLab key, not fetching submodules"
|
||||
- source tools/ci/configure_ci_environment.sh
|
||||
|
||||
.add_gitlab_key_before:
|
||||
before_script: &add_gitlab_key_before
|
||||
# apply bot filter in before script
|
||||
- *apply_bot_filter
|
||||
- echo "Not fetching submodules"
|
||||
- source tools/ci/configure_ci_environment.sh
|
||||
# add gitlab ssh key
|
||||
@ -75,7 +87,7 @@ build_template_app:
|
||||
# Try to use the same branch name for esp-idf-template that we're
|
||||
# using on esp-idf. If it doesn't exist then just stick to the default
|
||||
# branch
|
||||
- git checkout ${CI_COMMIT_REF_NAME} || echo "Using esp-idf-template default branch..."
|
||||
- python $CHECKOUT_REF_SCRIPT esp-idf-template
|
||||
# Test debug build (default)
|
||||
- make all V=1
|
||||
# Now test release build
|
||||
@ -106,7 +118,7 @@ build_ssc:
|
||||
script:
|
||||
- git clone $SSC_REPOSITORY
|
||||
- cd SSC
|
||||
- git checkout ${CI_COMMIT_REF_NAME} || echo "Using SSC default branch..."
|
||||
- python $CHECKOUT_REF_SCRIPT SSC
|
||||
- MAKEFLAGS= ./gen_misc_ng.sh
|
||||
|
||||
build_esp_idf_tests:
|
||||
@ -281,6 +293,7 @@ test_report:
|
||||
# clone test bench
|
||||
- git clone $GITLAB_SSH_SERVER/yinling/auto_test_script.git
|
||||
- cd auto_test_script
|
||||
- python $CHECKOUT_REF_SCRIPT auto_test_script
|
||||
# generate report
|
||||
- TEST_RESULT=Pass
|
||||
- python CITestReport.py -l $LOG_PATH -t $TEST_CASE_FILE_PATH -p $REPORT_PATH -r $RESULT_PATH -a $ARTIFACTS_PATH -m $MODULE_UPDATE_FILE || TEST_RESULT=Fail
|
||||
@ -438,7 +451,7 @@ assign_test:
|
||||
# clone test script to assign tests
|
||||
- git clone $TEST_SCRIPT_REPOSITORY
|
||||
- cd auto_test_script
|
||||
- git checkout ${CI_COMMIT_REF_NAME} || echo "Using default branch..."
|
||||
- python $CHECKOUT_REF_SCRIPT auto_test_script
|
||||
# assign unit test cases
|
||||
- python CIAssignTestCases.py -t $IDF_PATH/components/idf_test/unit_test -c $IDF_PATH/.gitlab-ci.yml -b $IDF_PATH/test_bins
|
||||
# assgin integration test cases
|
||||
@ -500,10 +513,12 @@ assign_test:
|
||||
- test -e $CONFIG_FILE || exit 0
|
||||
# clone local test env configs
|
||||
- git clone $TEST_ENV_CONFIG_REPOSITORY
|
||||
- cd ci-test-runner-configs
|
||||
- python $CHECKOUT_REF_SCRIPT ci-test-runner-configs
|
||||
# clone test bench
|
||||
- git clone $TEST_SCRIPT_REPOSITORY
|
||||
- cd auto_test_script
|
||||
- git checkout ${CI_COMMIT_REF_NAME} || echo "Using default branch..."
|
||||
- python $CHECKOUT_REF_SCRIPT auto_test_script
|
||||
# run test
|
||||
- python CIRunner.py -l "$LOG_PATH/$CI_JOB_NAME" -c $CONFIG_FILE -e $LOCAL_ENV_CONFIG_PATH -t $TEST_CASE_FILE_PATH -m $MODULE_UPDATE_FILE
|
||||
|
||||
@ -533,6 +548,8 @@ nvs_compatible_test:
|
||||
script:
|
||||
# clone local test env configs
|
||||
- git clone $TEST_ENV_CONFIG_REPOSITORY
|
||||
- cd ci-test-runner-configs
|
||||
- python $CHECKOUT_REF_SCRIPT ci-test-runner-configs
|
||||
# clone test bench
|
||||
- git clone $TEST_SCRIPT_REPOSITORY
|
||||
- cd auto_test_script
|
||||
|
@ -9,6 +9,7 @@
|
||||
#include "freertos/semphr.h"
|
||||
#include "test_utils.h"
|
||||
|
||||
|
||||
TEST_CASE("esp_timer orders timers correctly", "[esp_timer]")
|
||||
{
|
||||
void dummy_cb(void* arg)
|
||||
@ -335,8 +336,7 @@ TEST_CASE("esp_timer_get_time call takes less than 1us", "[esp_timer]")
|
||||
end = esp_timer_get_time();
|
||||
}
|
||||
int ns_per_call = (int) ((end - begin) * 1000 / iter_count);
|
||||
printf("esp_timer_get_time: %dns per call\n", ns_per_call);
|
||||
TEST_ASSERT(ns_per_call < 1000);
|
||||
TEST_PERFORMANCE_LESS_THAN(ESP_TIMER_GET_TIME_PER_CALL, "%dns", ns_per_call);
|
||||
}
|
||||
|
||||
/* This test runs for about 10 minutes and is disabled in CI.
|
||||
|
@ -17,6 +17,8 @@
|
||||
#include "soc/io_mux_reg.h"
|
||||
#include "soc/cpu.h"
|
||||
|
||||
#include "idf_performance.h"
|
||||
|
||||
#define REPEAT_OPS 10000
|
||||
|
||||
static uint32_t start, end;
|
||||
@ -42,6 +44,12 @@ TEST_CASE("portMUX spinlocks (no contention)", "[freertos]")
|
||||
portEXIT_CRITICAL(&mux);
|
||||
}
|
||||
BENCHMARK_END("no contention lock");
|
||||
|
||||
#ifdef CONFIG_FREERTOS_UNICORE
|
||||
TEST_PERFORMANCE_LESS_THAN(FREERTOS_SPINLOCK_CYCLES_PER_OP_UNICORE, "%d cycles/op", ((end - start)/REPEAT_OPS));
|
||||
#else
|
||||
TEST_PERFORMANCE_LESS_THAN(FREERTOS_SPINLOCK_CYCLES_PER_OP, "%d cycles/op", ((end - start)/REPEAT_OPS));
|
||||
#endif
|
||||
}
|
||||
|
||||
TEST_CASE("portMUX recursive locks (no contention)", "[freertos]")
|
||||
|
5
components/idf_test/component.mk
Executable file
5
components/idf_test/component.mk
Executable file
@ -0,0 +1,5 @@
|
||||
#
|
||||
# Component Makefile
|
||||
#
|
||||
# (Uses default behaviour of compiling all source files in directory, adding 'include' to include path.)
|
||||
|
15
components/idf_test/include/idf_performance.h
Normal file
15
components/idf_test/include/idf_performance.h
Normal file
@ -0,0 +1,15 @@
|
||||
|
||||
/* @brief macro to print IDF performance
|
||||
* @param mode : performance item name. a string pointer.
|
||||
* @param value_fmt: print format and unit of the value, for example: "%02fms", "%dKB"
|
||||
* @param value : the performance value.
|
||||
*/
|
||||
#define IDF_LOG_PERFORMANCE(item, value_fmt, value) \
|
||||
printf("[Performance][%s]: "value_fmt"\n", item, value)
|
||||
|
||||
|
||||
/* declare the performance here */
|
||||
#define IDF_PERFORMANCE_MAX_HTTPS_REQUEST_BIN_SIZE 610
|
||||
#define IDF_PERFORMANCE_MAX_FREERTOS_SPINLOCK_CYCLES_PER_OP 200
|
||||
#define IDF_PERFORMANCE_MAX_FREERTOS_SPINLOCK_CYCLES_PER_OP_UNICORE 130
|
||||
#define IDF_PERFORMANCE_MAX_ESP_TIMER_GET_TIME_PER_CALL 1000
|
@ -23,6 +23,12 @@ def test_examples_protocol_https_request(env, extra_data):
|
||||
3. send http request
|
||||
"""
|
||||
dut1 = env.get_dut("https_request", "examples/protocols/https_request")
|
||||
# check and log bin size
|
||||
binary_file = os.path.join(dut1.app.binary_path, "https-request.bin")
|
||||
bin_size = os.path.getsize(binary_file)
|
||||
IDF.log_performance("https_request_bin_size", "{}KB".format(bin_size//1024))
|
||||
IDF.check_performance("https_request_bin_size", bin_size//1024)
|
||||
# start test
|
||||
dut1.start_app()
|
||||
dut1.expect("Connecting to www.howsmyssl.com:443", timeout=30)
|
||||
dut1.expect("Performing the SSL/TLS handshake")
|
||||
|
60
tools/ci/apply_bot_filter.py
Executable file
60
tools/ci/apply_bot_filter.py
Executable file
@ -0,0 +1,60 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
# internal use only
|
||||
# called by CI jobs to determine if it need to be executed
|
||||
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
import json
|
||||
|
||||
|
||||
RE_FILTER_PATTERN = re.compile(r'^r"(.+)?"$')
|
||||
|
||||
RE_TYPE = type(re.compile("", 0))
|
||||
|
||||
|
||||
def parse_filter(filter_name):
|
||||
filter_raw = os.getenv(filter_name)
|
||||
filters = []
|
||||
if filter_raw:
|
||||
filter_data = json.loads(filter_raw)
|
||||
for _filter in filter_data:
|
||||
match = RE_FILTER_PATTERN.search(_filter)
|
||||
if match:
|
||||
filters.append(re.compile(match.group(1)))
|
||||
else:
|
||||
filters.append(_filter)
|
||||
return filters
|
||||
|
||||
|
||||
def process_filter(filter_name, ci_name):
|
||||
execute = True
|
||||
|
||||
# bot message is case insensitive (processed with lower case). so we also convert ci_name to lower case.
|
||||
ci_name = ci_name.lower()
|
||||
|
||||
filter_list = parse_filter(filter_name)
|
||||
|
||||
for _filter in filter_list:
|
||||
if isinstance(_filter, RE_TYPE):
|
||||
match = _filter.search(ci_name) is not None
|
||||
else:
|
||||
match = _filter == ci_name
|
||||
|
||||
if match:
|
||||
execute = True
|
||||
break
|
||||
else:
|
||||
execute = False
|
||||
return execute
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
need_to_execute = process_filter("BOT_STAGE_FILTER", os.getenv("CI_JOB_STAGE")) \
|
||||
and process_filter("BOT_JOB_FILTER", os.getenv("CI_JOB_NAME"))
|
||||
if need_to_execute:
|
||||
sys.exit(0)
|
||||
else:
|
||||
print("Skip this job as it doesn't fit @bot's filter")
|
||||
sys.exit(-1)
|
38
tools/ci/checkout_project_ref.py
Executable file
38
tools/ci/checkout_project_ref.py
Executable file
@ -0,0 +1,38 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
# internal use only
|
||||
# called by CI jobs when it uses a project related to IDF
|
||||
|
||||
import os
|
||||
import json
|
||||
import argparse
|
||||
import subprocess
|
||||
|
||||
|
||||
def checkout_branch(proj_name, customized_revision, default_ref_name):
|
||||
try:
|
||||
ref_to_use = customized_revision[proj_name.lower()]
|
||||
except (KeyError, TypeError):
|
||||
ref_to_use = default_ref_name
|
||||
|
||||
try:
|
||||
subprocess.check_call(["git", "checkout", ref_to_use])
|
||||
print("CI using ref {} for project {}".format(ref_to_use, proj_name))
|
||||
except subprocess.CalledProcessError:
|
||||
print("using default branch")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument("project",
|
||||
help="the name of project")
|
||||
|
||||
args = parser.parse_args()
|
||||
project_name = args.project
|
||||
|
||||
customized_project_revisions = os.getenv("BOT_CUSTOMIZED_REVISION")
|
||||
if customized_project_revisions:
|
||||
customized_project_revisions = json.loads(customized_project_revisions)
|
||||
ci_ref_name = os.getenv("CI_COMMIT_REF_NAME")
|
||||
|
||||
checkout_branch(project_name, customized_project_revisions, ci_ref_name)
|
@ -47,6 +47,8 @@ import functools
|
||||
import serial
|
||||
from serial.tools import list_ports
|
||||
|
||||
import Utility
|
||||
|
||||
if sys.version_info[0] == 2:
|
||||
import Queue as _queue
|
||||
else:
|
||||
@ -72,6 +74,17 @@ def _expect_lock(func):
|
||||
return handler
|
||||
|
||||
|
||||
def _decode_data(data):
|
||||
""" for python3, if the data is bytes, then decode it to string """
|
||||
if isinstance(data, bytes):
|
||||
# convert bytes to string
|
||||
try:
|
||||
data = data.decode("utf-8", "ignore")
|
||||
except UnicodeDecodeError:
|
||||
data = data.decode("iso8859-1", )
|
||||
return data
|
||||
|
||||
|
||||
class _DataCache(_queue.Queue):
|
||||
"""
|
||||
Data cache based on Queue. Allow users to process data cache based on bytes instead of Queue."
|
||||
@ -94,13 +107,7 @@ class _DataCache(_queue.Queue):
|
||||
|
||||
try:
|
||||
data = self.get(timeout=timeout)
|
||||
if isinstance(data, bytes):
|
||||
# convert bytes to string
|
||||
try:
|
||||
data = data.decode("utf-8", "ignore")
|
||||
except UnicodeDecodeError:
|
||||
data = data.decode("iso8859-1",)
|
||||
self.data_cache += data
|
||||
self.data_cache += _decode_data(data)
|
||||
except _queue.Empty:
|
||||
# don't do anything when on update for cache
|
||||
pass
|
||||
@ -122,18 +129,48 @@ class _DataCache(_queue.Queue):
|
||||
|
||||
class _RecvThread(threading.Thread):
|
||||
|
||||
PERFORMANCE_PATTERN = re.compile(r"\[Performance]\[(\w+)]: ([^\r\n]+)\r?\n")
|
||||
|
||||
def __init__(self, read, data_cache):
|
||||
super(_RecvThread, self).__init__()
|
||||
self.exit_event = threading.Event()
|
||||
self.setDaemon(True)
|
||||
self.read = read
|
||||
self.data_cache = data_cache
|
||||
# cache the last line of recv data for collecting performance
|
||||
self._line_cache = str()
|
||||
|
||||
def collect_performance(self, data):
|
||||
""" collect performance """
|
||||
if data:
|
||||
decoded_data = _decode_data(data)
|
||||
|
||||
matches = self.PERFORMANCE_PATTERN.findall(self._line_cache + decoded_data)
|
||||
for match in matches:
|
||||
Utility.console_log("[Performance][{}]: {}".format(match[0], match[1]),
|
||||
color="orange")
|
||||
|
||||
# cache incomplete line to later process
|
||||
lines = decoded_data.splitlines(True)
|
||||
last_line = lines[-1]
|
||||
|
||||
if last_line[-1] != "\n":
|
||||
if len(lines) == 1:
|
||||
# only one line and the line is not finished, then append this to cache
|
||||
self._line_cache += lines[-1]
|
||||
else:
|
||||
# more than one line and not finished, replace line cache
|
||||
self._line_cache = lines[-1]
|
||||
else:
|
||||
# line finishes, flush cache
|
||||
self._line_cache = str()
|
||||
|
||||
def run(self):
|
||||
while not self.exit_event.isSet():
|
||||
data = self.read(1000)
|
||||
if data:
|
||||
self.data_cache.put(data)
|
||||
self.collect_performance(data)
|
||||
|
||||
def exit(self):
|
||||
self.exit_event.set()
|
||||
@ -522,11 +559,7 @@ class SerialDUT(BaseDUT):
|
||||
timestamp = time.time()
|
||||
timestamp = "{}:{}".format(time.strftime("%m-%d %H:%M:%S", time.localtime(timestamp)),
|
||||
str(timestamp % 1)[2:5])
|
||||
try:
|
||||
formatted_data = "[{}]:\r\n{}\r\n".format(timestamp, data.decode("utf-8", "ignore"))
|
||||
except UnicodeDecodeError:
|
||||
# if utf-8 fail, use iso-8859-1 (single char codec with range 0-255)
|
||||
formatted_data = "[{}]:\r\n{}\r\n".format(timestamp, data.decode("iso8859-1",))
|
||||
formatted_data = "[{}]:\r\n{}\r\n".format(timestamp, _decode_data(data))
|
||||
return formatted_data
|
||||
|
||||
def _port_open(self):
|
||||
|
@ -11,9 +11,12 @@
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
import os
|
||||
import re
|
||||
|
||||
import TinyFW
|
||||
from IDF.IDFApp import Example, UT
|
||||
import Utility
|
||||
from IDF.IDFApp import IDFApp, Example, UT
|
||||
from IDF.IDFDUT import IDFDUT
|
||||
|
||||
|
||||
@ -34,3 +37,43 @@ def idf_example_test(app=Example, dut=IDFDUT, chip="ESP32",
|
||||
# not use partial function as define as function support auto generating document
|
||||
return TinyFW.test_method(app=app, dut=dut, chip=chip, module=module,
|
||||
execution_time=execution_time, **kwargs)
|
||||
|
||||
|
||||
def log_performance(item, value):
|
||||
"""
|
||||
do print performance with pre-defined format to console
|
||||
|
||||
:param item: performance item name
|
||||
:param value: performance value
|
||||
"""
|
||||
Utility.console_log("[Performance][{}]: {}".format(item, value), "orange")
|
||||
|
||||
|
||||
def check_performance(item, value):
|
||||
"""
|
||||
check if idf performance meet pass standard
|
||||
|
||||
:param item: performance item name
|
||||
:param value: performance item value
|
||||
:raise: AssertionError: if check fails
|
||||
"""
|
||||
ret = True
|
||||
standard_value = 0
|
||||
|
||||
idf_path = IDFApp.get_sdk_path()
|
||||
performance_file = os.path.join(idf_path, "components", "idf_test", "include", "idf_performance.h")
|
||||
|
||||
if os.path.exists(performance_file):
|
||||
with open(performance_file, "r") as f:
|
||||
data = f.read()
|
||||
match = re.search(r"#define\s+IDF_PERFORMANCE_(MIN|MAX)_{}\s+([\d.]+)".format(item.upper()), data)
|
||||
if match:
|
||||
op = match.group(1)
|
||||
standard_value = float(match.group(2))
|
||||
if op == "MAX":
|
||||
ret = value <= standard_value
|
||||
else:
|
||||
ret = value >= standard_value
|
||||
if not ret:
|
||||
raise AssertionError("[Performance] {} value is {}, doesn't meet pass standard {}"
|
||||
.format(item, value, standard_value))
|
||||
|
@ -25,6 +25,7 @@ import xunitgen
|
||||
import Env
|
||||
import DUT
|
||||
import App
|
||||
import Utility
|
||||
|
||||
|
||||
XUNIT_FILE_NAME = "XUNIT_RESULT.xml"
|
||||
@ -32,40 +33,6 @@ XUNIT_RECEIVER = xunitgen.EventReceiver()
|
||||
XUNIT_DEFAULT_TEST_SUITE = "test-suite"
|
||||
|
||||
|
||||
_COLOR_CODES = {
|
||||
"white": '\033[0m',
|
||||
"red": '\033[31m',
|
||||
"green": '\033[32m',
|
||||
"orange": '\033[33m',
|
||||
"blue": '\033[34m',
|
||||
"purple": '\033[35m',
|
||||
"W": '\033[0m',
|
||||
"R": '\033[31m',
|
||||
"G": '\033[32m',
|
||||
"O": '\033[33m',
|
||||
"B": '\033[34m',
|
||||
"P": '\033[35m'
|
||||
}
|
||||
|
||||
|
||||
def console_log(data, color="white"):
|
||||
"""
|
||||
log data to console.
|
||||
(if not flush console log, Gitlab-CI won't update logs during job execution)
|
||||
|
||||
:param data: data content
|
||||
:param color: color
|
||||
"""
|
||||
if color not in _COLOR_CODES:
|
||||
color = "white"
|
||||
color_codes = _COLOR_CODES[color]
|
||||
print(color_codes + data)
|
||||
if color not in ["white", "W"]:
|
||||
# reset color to white for later logs
|
||||
print(_COLOR_CODES["white"] + "\r")
|
||||
sys.stdout.flush()
|
||||
|
||||
|
||||
class DefaultEnvConfig(object):
|
||||
"""
|
||||
default test configs. There're 3 places to set configs, priority is (high -> low):
|
||||
@ -187,7 +154,7 @@ def test_method(**kwargs):
|
||||
XUNIT_FILE_NAME)
|
||||
XUNIT_RECEIVER.begin_case(test_func.__name__, time.time(), test_func_file_name)
|
||||
try:
|
||||
console_log("starting running test: " + test_func.__name__, color="green")
|
||||
Utility.console_log("starting running test: " + test_func.__name__, color="green")
|
||||
# execute test function
|
||||
test_func(env_inst, extra_data)
|
||||
# if finish without exception, test result is True
|
||||
@ -208,9 +175,9 @@ def test_method(**kwargs):
|
||||
XUNIT_DEFAULT_TEST_SUITE))
|
||||
|
||||
if result:
|
||||
console_log("Test Succeed: " + test_func.__name__, color="green")
|
||||
Utility.console_log("Test Succeed: " + test_func.__name__, color="green")
|
||||
else:
|
||||
console_log(("Test Fail: " + test_func.__name__), color="red")
|
||||
Utility.console_log(("Test Fail: " + test_func.__name__), color="red")
|
||||
TestResult.set_result(result, test_func.__name__)
|
||||
return result
|
||||
|
||||
|
@ -0,0 +1,35 @@
|
||||
import sys
|
||||
|
||||
|
||||
_COLOR_CODES = {
|
||||
"white": '\033[0m',
|
||||
"red": '\033[31m',
|
||||
"green": '\033[32m',
|
||||
"orange": '\033[33m',
|
||||
"blue": '\033[34m',
|
||||
"purple": '\033[35m',
|
||||
"W": '\033[0m',
|
||||
"R": '\033[31m',
|
||||
"G": '\033[32m',
|
||||
"O": '\033[33m',
|
||||
"B": '\033[34m',
|
||||
"P": '\033[35m'
|
||||
}
|
||||
|
||||
|
||||
def console_log(data, color="white"):
|
||||
"""
|
||||
log data to console.
|
||||
(if not flush console log, Gitlab-CI won't update logs during job execution)
|
||||
|
||||
:param data: data content
|
||||
:param color: color
|
||||
"""
|
||||
if color not in _COLOR_CODES:
|
||||
color = "white"
|
||||
color_codes = _COLOR_CODES[color]
|
||||
print(color_codes + data)
|
||||
if color not in ["white", "W"]:
|
||||
# reset color to white for later logs
|
||||
print(_COLOR_CODES["white"] + "\r")
|
||||
sys.stdout.flush()
|
@ -16,6 +16,9 @@ extern "C"
|
||||
#define UNITY_INCLUDE_CONFIG_H
|
||||
#include "unity_internals.h"
|
||||
|
||||
/* include performance pass standards header file */
|
||||
#include "idf_performance.h"
|
||||
|
||||
void setUp(void);
|
||||
void tearDown(void);
|
||||
|
||||
@ -285,6 +288,20 @@ void tearDown(void);
|
||||
#define TEST_ASSERT_DOUBLE_IS_NOT_NAN_MESSAGE(actual, message) UNITY_TEST_ASSERT_DOUBLE_IS_NOT_NAN((actual), __LINE__, (message))
|
||||
#define TEST_ASSERT_DOUBLE_IS_NOT_DETERMINATE_MESSAGE(actual, message) UNITY_TEST_ASSERT_DOUBLE_IS_NOT_DETERMINATE((actual), __LINE__, (message))
|
||||
|
||||
/* For performance check with unity test on IDF */
|
||||
/* These macros should only be used with ESP-IDF.
|
||||
* To use performance check, we need to first define pass standard in idf_performance.h.
|
||||
*/
|
||||
#define TEST_PERFORMANCE_LESS_THAN(name, value_fmt, value) do { \
|
||||
printf("[Performance]["#name"]: "value_fmt"\n", value); \
|
||||
TEST_ASSERT(value < IDF_PERFORMANCE_MAX_##name); \
|
||||
} while(0)
|
||||
|
||||
#define TEST_PERFORMANCE_GREATER_THAN(name, value_fmt, value) do { \
|
||||
printf("[Performance]["#name"]: "value_fmt"\n", value); \
|
||||
TEST_ASSERT(value > IDF_PERFORMANCE_MIN_##name); \
|
||||
} while(0)
|
||||
|
||||
/* end of UNITY_FRAMEWORK_H */
|
||||
#ifdef __cplusplus
|
||||
}
|
||||
|
Loading…
x
Reference in New Issue
Block a user