2021-12-03 02:39:18 -05:00
|
|
|
# SPDX-FileCopyrightText: 2021-2022 Espressif Systems (Shanghai) CO LTD
|
2021-11-17 04:09:36 -05:00
|
|
|
# SPDX-License-Identifier: Apache-2.0
|
|
|
|
|
2021-12-01 21:14:18 -05:00
|
|
|
# pylint: disable=W0621 # redefined-outer-name
|
2021-11-17 04:09:36 -05:00
|
|
|
|
|
|
|
# This file is a pytest root configuration file and provide the following functionalities:
|
|
|
|
# 1. Defines a few fixtures that could be used under the whole project.
|
|
|
|
# 2. Defines a few hook functions.
|
|
|
|
#
|
|
|
|
# IDF is using [pytest](https://github.com/pytest-dev/pytest) and
|
|
|
|
# [pytest-embedded plugin](https://github.com/espressif/pytest-embedded) as its example test framework.
|
|
|
|
#
|
|
|
|
# This is an experimental feature, and if you found any bug or have any question, please report to
|
|
|
|
# https://github.com/espressif/pytest-embedded/issues
|
2022-10-20 23:46:24 -04:00
|
|
|
|
2023-05-23 22:53:57 -04:00
|
|
|
import glob
|
|
|
|
import json
|
2021-11-17 04:09:36 -05:00
|
|
|
import logging
|
|
|
|
import os
|
2022-12-14 22:41:09 -05:00
|
|
|
import re
|
2021-11-17 04:09:36 -05:00
|
|
|
import sys
|
2022-01-18 23:12:15 -05:00
|
|
|
import xml.etree.ElementTree as ET
|
2022-04-02 04:47:58 -04:00
|
|
|
from datetime import datetime
|
2022-03-14 03:33:14 -04:00
|
|
|
from fnmatch import fnmatch
|
|
|
|
from typing import Callable, List, Optional, Tuple
|
2021-11-17 04:09:36 -05:00
|
|
|
|
|
|
|
import pytest
|
2022-07-07 06:34:48 -04:00
|
|
|
from _pytest.config import Config, ExitCode
|
2021-11-17 04:09:36 -05:00
|
|
|
from _pytest.fixtures import FixtureRequest
|
2022-03-07 22:37:42 -05:00
|
|
|
from _pytest.main import Session
|
2021-12-01 21:14:18 -05:00
|
|
|
from _pytest.nodes import Item
|
2022-01-18 23:12:15 -05:00
|
|
|
from _pytest.python import Function
|
2022-03-14 03:33:14 -04:00
|
|
|
from _pytest.reports import TestReport
|
|
|
|
from _pytest.runner import CallInfo
|
|
|
|
from _pytest.terminal import TerminalReporter
|
2022-04-25 05:26:29 -04:00
|
|
|
from pytest_embedded.plugin import multi_dut_argument, multi_dut_fixture
|
2022-01-18 23:12:15 -05:00
|
|
|
from pytest_embedded.utils import find_by_suffix
|
2022-05-18 02:59:34 -04:00
|
|
|
from pytest_embedded_idf.dut import IdfDut
|
2021-11-17 04:09:36 -05:00
|
|
|
|
2022-09-22 09:46:56 -04:00
|
|
|
try:
|
2023-05-23 22:53:57 -04:00
|
|
|
from idf_ci_utils import IDF_PATH, to_list
|
2022-11-09 12:58:45 -05:00
|
|
|
from idf_unity_tester import CaseTester
|
2022-09-22 09:46:56 -04:00
|
|
|
except ImportError:
|
|
|
|
sys.path.append(os.path.join(os.path.dirname(__file__), 'tools', 'ci'))
|
2023-05-23 22:53:57 -04:00
|
|
|
from idf_ci_utils import IDF_PATH, to_list
|
2022-09-22 09:46:56 -04:00
|
|
|
from idf_unity_tester import CaseTester
|
|
|
|
|
2022-07-14 01:17:39 -04:00
|
|
|
try:
|
|
|
|
import common_test_methods # noqa: F401
|
|
|
|
except ImportError:
|
|
|
|
sys.path.append(os.path.join(os.path.dirname(__file__), 'tools', 'ci', 'python_packages'))
|
|
|
|
import common_test_methods # noqa: F401
|
|
|
|
|
2023-01-10 00:59:46 -05:00
|
|
|
SUPPORTED_TARGETS = ['esp32', 'esp32s2', 'esp32c3', 'esp32s3', 'esp32c2', 'esp32c6', 'esp32h2']
|
2023-04-17 06:49:51 -04:00
|
|
|
PREVIEW_TARGETS: List[str] = [] # this PREVIEW_TARGETS excludes 'linux' target
|
2022-02-18 02:37:39 -05:00
|
|
|
DEFAULT_SDKCONFIG = 'default'
|
2021-12-03 02:39:18 -05:00
|
|
|
|
2022-11-30 21:18:44 -05:00
|
|
|
TARGET_MARKERS = {
|
|
|
|
'esp32': 'support esp32 target',
|
|
|
|
'esp32s2': 'support esp32s2 target',
|
|
|
|
'esp32s3': 'support esp32s3 target',
|
|
|
|
'esp32c3': 'support esp32c3 target',
|
|
|
|
'esp32c2': 'support esp32c2 target',
|
|
|
|
'esp32c6': 'support esp32c6 target',
|
2023-01-10 00:59:46 -05:00
|
|
|
'esp32h2': 'support esp32h2 target',
|
2022-11-30 21:18:44 -05:00
|
|
|
'linux': 'support linux target',
|
|
|
|
}
|
|
|
|
|
|
|
|
SPECIAL_MARKERS = {
|
|
|
|
'supported_targets': "support all officially announced supported targets ('esp32', 'esp32s2', 'esp32c3', 'esp32s3', 'esp32c2', 'esp32c6')",
|
2023-04-17 06:49:51 -04:00
|
|
|
'preview_targets': "support all preview targets ('none')",
|
2022-11-30 21:18:44 -05:00
|
|
|
'all_targets': 'support all targets, including supported ones and preview ones',
|
2022-11-29 04:48:36 -05:00
|
|
|
'temp_skip_ci': 'temp skip tests for specified targets only in ci',
|
|
|
|
'temp_skip': 'temp skip tests for specified targets both in ci and locally',
|
2022-11-30 21:18:44 -05:00
|
|
|
'nightly_run': 'tests should be executed as part of the nightly trigger pipeline',
|
2022-11-29 04:48:36 -05:00
|
|
|
'host_test': 'tests which should not be built at the build stage, and instead built in host_test stage',
|
|
|
|
'qemu': 'build and test using qemu-system-xtensa, not real target',
|
2022-11-30 21:18:44 -05:00
|
|
|
}
|
|
|
|
|
|
|
|
ENV_MARKERS = {
|
|
|
|
# single-dut markers
|
|
|
|
'generic': 'tests should be run on generic runners',
|
|
|
|
'flash_suspend': 'support flash suspend feature',
|
|
|
|
'ip101': 'connected via wired 10/100M ethernet',
|
|
|
|
'lan8720': 'connected via LAN8720 ethernet transceiver',
|
|
|
|
'quad_psram': 'runners with quad psram',
|
|
|
|
'octal_psram': 'runners with octal psram',
|
|
|
|
'usb_host': 'usb host runners',
|
|
|
|
'usb_host_flash_disk': 'usb host runners with USB flash disk attached',
|
|
|
|
'usb_device': 'usb device runners',
|
|
|
|
'ethernet_ota': 'ethernet OTA runners',
|
|
|
|
'flash_encryption': 'Flash Encryption runners',
|
|
|
|
'flash_encryption_f4r8': 'Flash Encryption runners with 4-line flash and 8-line psram',
|
|
|
|
'flash_encryption_f8r8': 'Flash Encryption runners with 8-line flash and 8-line psram',
|
2023-01-31 02:49:43 -05:00
|
|
|
'flash_multi': 'Multiple flash chips tests',
|
2022-11-30 21:18:44 -05:00
|
|
|
'psram': 'Chip has 4-line psram',
|
|
|
|
'ir_transceiver': 'runners with a pair of IR transmitter and receiver',
|
|
|
|
'twai_transceiver': 'runners with a TWAI PHY transceiver',
|
|
|
|
'flash_encryption_wifi_high_traffic': 'Flash Encryption runners with wifi high traffic support',
|
|
|
|
'ethernet': 'ethernet runner',
|
|
|
|
'ethernet_flash_8m': 'ethernet runner with 8mb flash',
|
|
|
|
'ethernet_router': 'both the runner and dut connect to the same router through ethernet NIC',
|
|
|
|
'wifi_ap': 'a wifi AP in the environment',
|
|
|
|
'wifi_router': 'both the runner and dut connect to the same wifi router',
|
|
|
|
'wifi_high_traffic': 'wifi high traffic runners',
|
|
|
|
'wifi_wlan': 'wifi runner with a wireless NIC',
|
2022-12-20 09:38:48 -05:00
|
|
|
'Example_ShieldBox_Basic': 'basic configuration of the AP and ESP DUT placed in shielded box',
|
|
|
|
'Example_ShieldBox': 'multiple shielded APs connected to shielded ESP DUT via RF cable with programmable attenuator',
|
2022-11-30 21:18:44 -05:00
|
|
|
'xtal_26mhz': 'runner with 26MHz xtal on board',
|
|
|
|
'xtal_40mhz': 'runner with 40MHz xtal on board',
|
|
|
|
'external_flash': 'external flash memory connected via VSPI (FSPI)',
|
|
|
|
'sdcard_sdmode': 'sdcard running in SD mode',
|
|
|
|
'sdcard_spimode': 'sdcard running in SPI mode',
|
2022-12-22 02:49:03 -05:00
|
|
|
'emmc': 'eMMC card',
|
2022-11-30 21:18:44 -05:00
|
|
|
'MSPI_F8R8': 'runner with Octal Flash and Octal PSRAM',
|
|
|
|
'MSPI_F4R8': 'runner with Quad Flash and Octal PSRAM',
|
|
|
|
'MSPI_F4R4': 'runner with Quad Flash and Quad PSRAM',
|
2022-12-01 04:40:03 -05:00
|
|
|
'jtag': 'runner where the chip is accessible through JTAG as well',
|
2023-02-01 07:54:02 -05:00
|
|
|
'usb_serial_jtag': 'runner where the chip is accessible through builtin JTAG as well',
|
2022-11-30 21:18:44 -05:00
|
|
|
'adc': 'ADC related tests should run on adc runners',
|
|
|
|
'xtal32k': 'Runner with external 32k crystal connected',
|
|
|
|
'no32kXtal': 'Runner with no external 32k crystal connected',
|
|
|
|
'multi_dut_modbus_rs485': 'a pair of runners connected by RS485 bus',
|
|
|
|
'psramv0': 'Runner with PSRAM version 0',
|
2023-02-03 10:31:38 -05:00
|
|
|
'esp32eco3': 'Runner with esp32 eco3 connected',
|
2023-04-06 02:50:44 -04:00
|
|
|
'ecdsa_efuse': 'Runner with test ECDSA private keys programmed in efuse',
|
2023-04-06 00:21:21 -04:00
|
|
|
'ccs811': 'Runner with CCS811 connected',
|
2023-07-03 05:01:56 -04:00
|
|
|
'eth_w5500': 'SPI Ethernet module with two W5500',
|
2023-04-16 02:11:11 -04:00
|
|
|
'nvs_encr_hmac': 'Runner with test HMAC key programmed in efuse',
|
2023-07-05 03:45:20 -04:00
|
|
|
'i2c_oled': 'Runner with ssd1306 I2C oled connected',
|
2022-11-30 21:18:44 -05:00
|
|
|
# multi-dut markers
|
|
|
|
'ieee802154': 'ieee802154 related tests should run on ieee802154 runners.',
|
2023-01-11 04:11:32 -05:00
|
|
|
'openthread_br': 'tests should be used for openthread border router.',
|
2023-05-16 05:04:47 -04:00
|
|
|
'zigbee_multi_dut': 'zigbee runner which have multiple duts.',
|
2022-11-30 21:18:44 -05:00
|
|
|
'wifi_two_dut': 'tests should be run on runners which has two wifi duts connected.',
|
|
|
|
'generic_multi_device': 'generic multiple devices whose corresponding gpio pins are connected to each other.',
|
|
|
|
'twai_network': 'multiple runners form a TWAI network.',
|
|
|
|
'sdio_master_slave': 'Test sdio multi board.',
|
|
|
|
}
|
|
|
|
|
2021-11-17 04:09:36 -05:00
|
|
|
|
2021-11-30 22:35:56 -05:00
|
|
|
##################
|
|
|
|
# Help Functions #
|
|
|
|
##################
|
2022-01-18 23:12:15 -05:00
|
|
|
def format_case_id(target: Optional[str], config: Optional[str], case: str) -> str:
|
2021-11-30 22:35:56 -05:00
|
|
|
return f'{target}.{config}.{case}'
|
|
|
|
|
|
|
|
|
2021-12-03 02:39:18 -05:00
|
|
|
def item_marker_names(item: Item) -> List[str]:
|
|
|
|
return [marker.name for marker in item.iter_markers()]
|
|
|
|
|
|
|
|
|
2022-11-29 04:48:36 -05:00
|
|
|
def item_target_marker_names(item: Item) -> List[str]:
|
|
|
|
res = set()
|
|
|
|
for marker in item.iter_markers():
|
|
|
|
if marker.name in TARGET_MARKERS:
|
|
|
|
res.add(marker.name)
|
|
|
|
|
|
|
|
return sorted(res)
|
|
|
|
|
|
|
|
|
|
|
|
def item_env_marker_names(item: Item) -> List[str]:
|
|
|
|
res = set()
|
|
|
|
for marker in item.iter_markers():
|
|
|
|
if marker.name in ENV_MARKERS:
|
|
|
|
res.add(marker.name)
|
|
|
|
|
|
|
|
return sorted(res)
|
|
|
|
|
|
|
|
|
|
|
|
def item_skip_targets(item: Item) -> List[str]:
|
|
|
|
def _get_temp_markers_disabled_targets(marker_name: str) -> List[str]:
|
|
|
|
temp_marker = item.get_closest_marker(marker_name)
|
|
|
|
|
|
|
|
if not temp_marker:
|
|
|
|
return []
|
|
|
|
|
|
|
|
# temp markers should always use keyword arguments `targets` and `reason`
|
|
|
|
if not temp_marker.kwargs.get('targets') or not temp_marker.kwargs.get('reason'):
|
|
|
|
raise ValueError(
|
|
|
|
f'`{marker_name}` should always use keyword arguments `targets` and `reason`. '
|
|
|
|
f'For example: '
|
|
|
|
f'`@pytest.mark.{marker_name}(targets=["esp32"], reason="IDF-xxxx, will fix it ASAP")`'
|
|
|
|
)
|
|
|
|
|
|
|
|
return to_list(temp_marker.kwargs['targets']) # type: ignore
|
|
|
|
|
|
|
|
temp_skip_ci_targets = _get_temp_markers_disabled_targets('temp_skip_ci')
|
|
|
|
temp_skip_targets = _get_temp_markers_disabled_targets('temp_skip')
|
|
|
|
|
|
|
|
# in CI we skip the union of `temp_skip` and `temp_skip_ci`
|
|
|
|
if os.getenv('CI_JOB_ID'):
|
|
|
|
skip_targets = list(set(temp_skip_ci_targets).union(set(temp_skip_targets)))
|
|
|
|
else: # we use `temp_skip` locally
|
|
|
|
skip_targets = temp_skip_targets
|
|
|
|
|
|
|
|
return skip_targets
|
|
|
|
|
|
|
|
|
|
|
|
def get_target_marker_from_expr(markexpr: str) -> str:
|
2022-05-21 12:38:17 -04:00
|
|
|
candidates = set()
|
|
|
|
# we use `-m "esp32 and generic"` in our CI to filter the test cases
|
2022-11-29 04:48:36 -05:00
|
|
|
# this doesn't cover all use cases, but fit what we do in CI.
|
2022-05-21 12:38:17 -04:00
|
|
|
for marker in markexpr.split('and'):
|
|
|
|
marker = marker.strip()
|
2022-11-30 21:18:44 -05:00
|
|
|
if marker in TARGET_MARKERS:
|
2022-05-21 12:38:17 -04:00
|
|
|
candidates.add(marker)
|
|
|
|
|
|
|
|
if len(candidates) > 1:
|
2022-11-30 21:18:44 -05:00
|
|
|
raise ValueError(f'Specified more than one target markers: {candidates}. Please specify no more than one.')
|
2022-05-21 12:38:17 -04:00
|
|
|
elif len(candidates) == 1:
|
|
|
|
return candidates.pop()
|
|
|
|
else:
|
2022-11-30 21:18:44 -05:00
|
|
|
raise ValueError('Please specify one target marker via "--target [TARGET]" or via "-m [TARGET]"')
|
2022-05-21 12:38:17 -04:00
|
|
|
|
|
|
|
|
2021-11-30 22:35:56 -05:00
|
|
|
############
|
|
|
|
# Fixtures #
|
|
|
|
############
|
2022-12-01 04:40:03 -05:00
|
|
|
@pytest.fixture(scope='session')
|
|
|
|
def idf_path() -> str:
|
|
|
|
return os.path.dirname(__file__)
|
|
|
|
|
|
|
|
|
2022-04-02 04:47:58 -04:00
|
|
|
@pytest.fixture(scope='session', autouse=True)
|
|
|
|
def session_tempdir() -> str:
|
2022-07-24 22:52:54 -04:00
|
|
|
_tmpdir = os.path.join(
|
|
|
|
os.path.dirname(__file__),
|
|
|
|
'pytest_embedded_log',
|
|
|
|
datetime.now().strftime('%Y-%m-%d_%H-%M-%S'),
|
|
|
|
)
|
|
|
|
os.makedirs(_tmpdir, exist_ok=True)
|
|
|
|
return _tmpdir
|
2022-04-02 04:47:58 -04:00
|
|
|
|
|
|
|
|
2022-09-22 09:46:56 -04:00
|
|
|
@pytest.fixture
|
2022-11-21 22:13:14 -05:00
|
|
|
def case_tester(dut: IdfDut, **kwargs): # type: ignore
|
2022-09-22 09:46:56 -04:00
|
|
|
yield CaseTester(dut, **kwargs)
|
|
|
|
|
|
|
|
|
2021-11-17 04:09:36 -05:00
|
|
|
@pytest.fixture
|
2022-04-25 05:26:29 -04:00
|
|
|
@multi_dut_argument
|
2021-11-30 22:35:56 -05:00
|
|
|
def config(request: FixtureRequest) -> str:
|
2022-02-18 02:37:39 -05:00
|
|
|
return getattr(request, 'param', None) or DEFAULT_SDKCONFIG
|
2021-11-17 04:09:36 -05:00
|
|
|
|
|
|
|
|
2022-02-11 01:58:50 -05:00
|
|
|
@pytest.fixture
|
|
|
|
def test_func_name(request: FixtureRequest) -> str:
|
|
|
|
return request.node.function.__name__ # type: ignore
|
|
|
|
|
|
|
|
|
2022-01-18 23:12:15 -05:00
|
|
|
@pytest.fixture
|
|
|
|
def test_case_name(request: FixtureRequest, target: str, config: str) -> str:
|
|
|
|
return format_case_id(target, config, request.node.originalname)
|
|
|
|
|
|
|
|
|
2021-11-17 04:09:36 -05:00
|
|
|
@pytest.fixture
|
2022-04-25 05:26:29 -04:00
|
|
|
@multi_dut_fixture
|
2023-05-23 22:53:57 -04:00
|
|
|
def build_dir(request: FixtureRequest, app_path: str, target: Optional[str], config: Optional[str]) -> str:
|
2021-11-17 04:09:36 -05:00
|
|
|
"""
|
|
|
|
Check local build dir with the following priority:
|
|
|
|
|
|
|
|
1. build_<target>_<config>
|
|
|
|
2. build_<target>
|
|
|
|
3. build_<config>
|
|
|
|
4. build
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
valid build directory
|
|
|
|
"""
|
2023-03-20 23:39:56 -04:00
|
|
|
check_dirs = []
|
|
|
|
if target is not None and config is not None:
|
|
|
|
check_dirs.append(f'build_{target}_{config}')
|
|
|
|
if target is not None:
|
|
|
|
check_dirs.append(f'build_{target}')
|
|
|
|
if config is not None:
|
|
|
|
check_dirs.append(f'build_{config}')
|
|
|
|
check_dirs.append('build')
|
2021-11-17 04:09:36 -05:00
|
|
|
|
2023-05-23 22:53:57 -04:00
|
|
|
idf_pytest_embedded = request.config.stash[_idf_pytest_embedded_key]
|
|
|
|
|
|
|
|
build_dir = None
|
|
|
|
if idf_pytest_embedded.apps_list is not None:
|
|
|
|
for check_dir in check_dirs:
|
|
|
|
binary_path = os.path.join(app_path, check_dir)
|
|
|
|
if binary_path in idf_pytest_embedded.apps_list:
|
|
|
|
build_dir = check_dir
|
|
|
|
break
|
|
|
|
|
|
|
|
if build_dir is None:
|
|
|
|
pytest.skip(
|
|
|
|
f'app path {app_path} with target {target} and config {config} is not listed in app info list files'
|
|
|
|
)
|
|
|
|
return '' # not reachable, to fool mypy
|
|
|
|
|
|
|
|
if build_dir:
|
|
|
|
check_dirs = [build_dir]
|
|
|
|
|
2021-11-17 04:09:36 -05:00
|
|
|
for check_dir in check_dirs:
|
|
|
|
binary_path = os.path.join(app_path, check_dir)
|
|
|
|
if os.path.isdir(binary_path):
|
|
|
|
logging.info(f'find valid binary path: {binary_path}')
|
|
|
|
return check_dir
|
|
|
|
|
2022-11-30 21:18:44 -05:00
|
|
|
logging.warning('checking binary path: %s... missing... try another place', binary_path)
|
2021-11-17 04:09:36 -05:00
|
|
|
|
2022-10-20 23:46:24 -04:00
|
|
|
raise ValueError(
|
2023-05-23 22:53:57 -04:00
|
|
|
f'no build dir valid. Please build the binary via "idf.py -B {check_dirs[0]} build" and run pytest again'
|
2022-02-18 02:37:39 -05:00
|
|
|
)
|
2021-11-30 22:35:56 -05:00
|
|
|
|
|
|
|
|
|
|
|
@pytest.fixture(autouse=True)
|
2022-04-25 05:26:29 -04:00
|
|
|
@multi_dut_fixture
|
2022-11-30 21:18:44 -05:00
|
|
|
def junit_properties(test_case_name: str, record_xml_attribute: Callable[[str, object], None]) -> None:
|
2021-11-30 22:35:56 -05:00
|
|
|
"""
|
|
|
|
This fixture is autoused and will modify the junit report test case name to <target>.<config>.<case_name>
|
|
|
|
"""
|
2022-01-18 23:12:15 -05:00
|
|
|
record_xml_attribute('name', test_case_name)
|
2021-12-01 21:14:18 -05:00
|
|
|
|
|
|
|
|
2022-12-14 22:41:09 -05:00
|
|
|
######################
|
|
|
|
# Log Util Functions #
|
|
|
|
######################
|
|
|
|
@pytest.fixture
|
|
|
|
def log_performance(record_property: Callable[[str, object], None]) -> Callable[[str, str], None]:
|
|
|
|
"""
|
|
|
|
log performance item with pre-defined format to the console
|
|
|
|
and record it under the ``properties`` tag in the junit report if available.
|
|
|
|
"""
|
|
|
|
|
|
|
|
def real_func(item: str, value: str) -> None:
|
|
|
|
"""
|
|
|
|
:param item: performance item name
|
|
|
|
:param value: performance value
|
|
|
|
"""
|
|
|
|
logging.info('[Performance][%s]: %s', item, value)
|
|
|
|
record_property(item, value)
|
|
|
|
|
|
|
|
return real_func
|
|
|
|
|
|
|
|
|
|
|
|
@pytest.fixture
|
|
|
|
def check_performance(idf_path: str) -> Callable[[str, float, str], None]:
|
|
|
|
"""
|
|
|
|
check if the given performance item meets the passing standard or not
|
|
|
|
"""
|
|
|
|
|
|
|
|
def real_func(item: str, value: float, target: str) -> None:
|
|
|
|
"""
|
|
|
|
:param item: performance item name
|
|
|
|
:param value: performance item value
|
|
|
|
:param target: target chip
|
|
|
|
:raise: AssertionError: if check fails
|
|
|
|
"""
|
2023-01-10 01:58:02 -05:00
|
|
|
|
2022-12-14 22:41:09 -05:00
|
|
|
def _find_perf_item(operator: str, path: str) -> float:
|
|
|
|
with open(path, 'r') as f:
|
|
|
|
data = f.read()
|
|
|
|
match = re.search(r'#define\s+IDF_PERFORMANCE_{}_{}\s+([\d.]+)'.format(operator, item.upper()), data)
|
|
|
|
return float(match.group(1)) # type: ignore
|
|
|
|
|
|
|
|
def _check_perf(operator: str, standard_value: float) -> None:
|
|
|
|
if operator == 'MAX':
|
|
|
|
ret = value <= standard_value
|
|
|
|
else:
|
|
|
|
ret = value >= standard_value
|
|
|
|
if not ret:
|
|
|
|
raise AssertionError(
|
|
|
|
"[Performance] {} value is {}, doesn't meet pass standard {}".format(item, value, standard_value)
|
|
|
|
)
|
|
|
|
|
|
|
|
path_prefix = os.path.join(idf_path, 'components', 'idf_test', 'include')
|
|
|
|
performance_files = (
|
|
|
|
os.path.join(path_prefix, target, 'idf_performance_target.h'),
|
|
|
|
os.path.join(path_prefix, 'idf_performance.h'),
|
|
|
|
)
|
|
|
|
|
|
|
|
found_item = False
|
|
|
|
for op in ['MIN', 'MAX']:
|
|
|
|
for performance_file in performance_files:
|
|
|
|
try:
|
|
|
|
standard = _find_perf_item(op, performance_file)
|
|
|
|
except (IOError, AttributeError):
|
|
|
|
# performance file doesn't exist or match is not found in it
|
|
|
|
continue
|
|
|
|
|
|
|
|
_check_perf(op, standard)
|
|
|
|
found_item = True
|
|
|
|
break
|
|
|
|
|
|
|
|
if not found_item:
|
|
|
|
raise AssertionError('Failed to get performance standard for {}'.format(item))
|
|
|
|
|
|
|
|
return real_func
|
|
|
|
|
|
|
|
|
|
|
|
@pytest.fixture
|
|
|
|
def log_minimum_free_heap_size(dut: IdfDut, config: str) -> Callable[..., None]:
|
|
|
|
def real_func() -> None:
|
|
|
|
res = dut.expect(r'Minimum free heap size: (\d+) bytes')
|
|
|
|
logging.info(
|
|
|
|
'\n------ heap size info ------\n'
|
|
|
|
'[app_name] {}\n'
|
|
|
|
'[config_name] {}\n'
|
|
|
|
'[target] {}\n'
|
|
|
|
'[minimum_free_heap_size] {} Bytes\n'
|
|
|
|
'------ heap size end ------'.format(
|
|
|
|
os.path.basename(dut.app.app_path),
|
|
|
|
config,
|
|
|
|
dut.target,
|
|
|
|
res.group(1).decode('utf8'),
|
|
|
|
)
|
|
|
|
)
|
|
|
|
|
|
|
|
return real_func
|
|
|
|
|
|
|
|
|
2023-03-31 11:24:16 -04:00
|
|
|
@pytest.fixture
|
|
|
|
def dev_password(request: FixtureRequest) -> str:
|
|
|
|
return request.config.getoption('dev_passwd') or ''
|
|
|
|
|
|
|
|
|
|
|
|
@pytest.fixture
|
|
|
|
def dev_user(request: FixtureRequest) -> str:
|
|
|
|
return request.config.getoption('dev_user') or ''
|
|
|
|
|
|
|
|
|
2021-12-01 21:14:18 -05:00
|
|
|
##################
|
|
|
|
# Hook functions #
|
|
|
|
##################
|
2022-02-18 02:37:39 -05:00
|
|
|
def pytest_addoption(parser: pytest.Parser) -> None:
|
2023-05-23 22:53:57 -04:00
|
|
|
idf_group = parser.getgroup('idf')
|
|
|
|
idf_group.addoption(
|
2022-02-18 02:37:39 -05:00
|
|
|
'--sdkconfig',
|
|
|
|
help='sdkconfig postfix, like sdkconfig.ci.<config>. (Default: None, which would build all found apps)',
|
|
|
|
)
|
2023-05-23 22:53:57 -04:00
|
|
|
idf_group.addoption('--known-failure-cases-file', help='known failure cases file path')
|
|
|
|
idf_group.addoption(
|
2023-03-31 11:24:16 -04:00
|
|
|
'--dev-user',
|
|
|
|
help='user name associated with some specific device/service used during the test execution',
|
|
|
|
)
|
2023-05-23 22:53:57 -04:00
|
|
|
idf_group.addoption(
|
2023-03-31 11:24:16 -04:00
|
|
|
'--dev-passwd',
|
|
|
|
help='password associated with some specific device/service used during the test execution',
|
|
|
|
)
|
2023-05-23 22:53:57 -04:00
|
|
|
idf_group.addoption(
|
|
|
|
'--app-info-basedir',
|
|
|
|
default=IDF_PATH,
|
|
|
|
help='app info base directory. specify this value when you\'re building under a '
|
|
|
|
'different IDF_PATH. (Default: $IDF_PATH)',
|
|
|
|
)
|
|
|
|
idf_group.addoption(
|
|
|
|
'--app-info-filepattern',
|
|
|
|
help='glob pattern to specify the files that include built app info generated by '
|
|
|
|
'`idf-build-apps --collect-app-info ...`. will not raise ValueError when binary '
|
|
|
|
'paths not exist in local file system if not listed recorded in the app info.',
|
|
|
|
)
|
2022-02-18 02:37:39 -05:00
|
|
|
|
|
|
|
|
2023-02-22 21:16:53 -05:00
|
|
|
_idf_pytest_embedded_key = pytest.StashKey['IdfPytestEmbedded']()
|
|
|
|
_item_failed_cases_key = pytest.StashKey[list]()
|
|
|
|
_item_failed_key = pytest.StashKey[bool]()
|
2022-03-13 23:53:36 -04:00
|
|
|
|
|
|
|
|
|
|
|
def pytest_configure(config: Config) -> None:
|
2022-05-21 12:38:17 -04:00
|
|
|
# cli option "--target"
|
|
|
|
target = config.getoption('target') or ''
|
|
|
|
|
|
|
|
help_commands = ['--help', '--fixtures', '--markers', '--version']
|
|
|
|
for cmd in help_commands:
|
|
|
|
if cmd in config.invocation_params.args:
|
|
|
|
target = 'unneeded'
|
|
|
|
break
|
|
|
|
|
|
|
|
if not target: # also could specify through markexpr via "-m"
|
2022-11-29 04:48:36 -05:00
|
|
|
target = get_target_marker_from_expr(config.getoption('markexpr') or '')
|
2022-05-21 12:38:17 -04:00
|
|
|
|
2023-05-23 22:53:57 -04:00
|
|
|
apps_list = None
|
|
|
|
app_info_basedir = config.getoption('app_info_basedir')
|
|
|
|
app_info_filepattern = config.getoption('app_info_filepattern')
|
|
|
|
if app_info_filepattern:
|
|
|
|
apps_list = []
|
|
|
|
for file in glob.glob(os.path.join(IDF_PATH, app_info_filepattern)):
|
|
|
|
with open(file) as fr:
|
|
|
|
for line in fr.readlines():
|
|
|
|
if not line.strip():
|
|
|
|
continue
|
|
|
|
|
|
|
|
# each line is a valid json
|
|
|
|
app_info = json.loads(line.strip())
|
|
|
|
if app_info_basedir and app_info['app_dir'].startswith(app_info_basedir):
|
|
|
|
relative_app_dir = os.path.relpath(app_info['app_dir'], app_info_basedir)
|
|
|
|
apps_list.append(os.path.join(IDF_PATH, os.path.join(relative_app_dir, app_info['build_dir'])))
|
|
|
|
print('Detected app: ', apps_list[-1])
|
|
|
|
else:
|
|
|
|
print(
|
|
|
|
f'WARNING: app_info base dir {app_info_basedir} not recognizable in {app_info["app_dir"]}, skipping...'
|
|
|
|
)
|
|
|
|
continue
|
|
|
|
|
2022-03-13 23:53:36 -04:00
|
|
|
config.stash[_idf_pytest_embedded_key] = IdfPytestEmbedded(
|
2022-05-21 12:38:17 -04:00
|
|
|
target=target,
|
2022-03-13 23:53:36 -04:00
|
|
|
sdkconfig=config.getoption('sdkconfig'),
|
2022-03-14 03:33:14 -04:00
|
|
|
known_failure_cases_file=config.getoption('known_failure_cases_file'),
|
2023-05-23 22:53:57 -04:00
|
|
|
apps_list=apps_list,
|
2022-03-13 23:53:36 -04:00
|
|
|
)
|
|
|
|
config.pluginmanager.register(config.stash[_idf_pytest_embedded_key])
|
|
|
|
|
2022-11-29 04:48:36 -05:00
|
|
|
for name, description in {**TARGET_MARKERS, **ENV_MARKERS, **SPECIAL_MARKERS}.items():
|
2022-11-30 21:18:44 -05:00
|
|
|
config.addinivalue_line('markers', f'{name}: {description}')
|
|
|
|
|
2022-03-13 23:53:36 -04:00
|
|
|
|
|
|
|
def pytest_unconfigure(config: Config) -> None:
|
|
|
|
_pytest_embedded = config.stash.get(_idf_pytest_embedded_key, None)
|
|
|
|
if _pytest_embedded:
|
|
|
|
del config.stash[_idf_pytest_embedded_key]
|
|
|
|
config.pluginmanager.unregister(_pytest_embedded)
|
|
|
|
|
|
|
|
|
|
|
|
class IdfPytestEmbedded:
|
2022-03-14 03:33:14 -04:00
|
|
|
def __init__(
|
|
|
|
self,
|
2022-11-29 04:48:36 -05:00
|
|
|
target: str,
|
2022-03-14 03:33:14 -04:00
|
|
|
sdkconfig: Optional[str] = None,
|
|
|
|
known_failure_cases_file: Optional[str] = None,
|
2023-05-23 22:53:57 -04:00
|
|
|
apps_list: Optional[List[str]] = None,
|
2022-03-14 03:33:14 -04:00
|
|
|
):
|
2022-03-13 23:53:36 -04:00
|
|
|
# CLI options to filter the test cases
|
2022-11-29 04:48:36 -05:00
|
|
|
self.target = target.lower()
|
2022-03-13 23:53:36 -04:00
|
|
|
self.sdkconfig = sdkconfig
|
2022-11-30 21:18:44 -05:00
|
|
|
self.known_failure_patterns = self._parse_known_failure_cases_file(known_failure_cases_file)
|
2023-05-23 22:53:57 -04:00
|
|
|
self.apps_list = apps_list
|
2022-03-14 03:33:14 -04:00
|
|
|
|
2022-11-30 21:18:44 -05:00
|
|
|
self._failed_cases: List[Tuple[str, bool, bool]] = [] # (test_case_name, is_known_failure_cases, is_xfail)
|
2022-03-14 03:33:14 -04:00
|
|
|
|
|
|
|
@property
|
|
|
|
def failed_cases(self) -> List[str]:
|
2022-11-30 21:18:44 -05:00
|
|
|
return [case for case, is_known, is_xfail in self._failed_cases if not is_known and not is_xfail]
|
2022-03-14 03:33:14 -04:00
|
|
|
|
|
|
|
@property
|
|
|
|
def known_failure_cases(self) -> List[str]:
|
2022-05-12 00:04:28 -04:00
|
|
|
return [case for case, is_known, _ in self._failed_cases if is_known]
|
|
|
|
|
|
|
|
@property
|
|
|
|
def xfail_cases(self) -> List[str]:
|
|
|
|
return [case for case, _, is_xfail in self._failed_cases if is_xfail]
|
2022-03-14 03:33:14 -04:00
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def _parse_known_failure_cases_file(
|
|
|
|
known_failure_cases_file: Optional[str] = None,
|
|
|
|
) -> List[str]:
|
|
|
|
if not known_failure_cases_file or not os.path.isfile(known_failure_cases_file):
|
|
|
|
return []
|
|
|
|
|
|
|
|
patterns = []
|
|
|
|
with open(known_failure_cases_file) as fr:
|
|
|
|
for line in fr.readlines():
|
|
|
|
if not line:
|
|
|
|
continue
|
|
|
|
if not line.strip():
|
|
|
|
continue
|
|
|
|
without_comments = line.split('#')[0].strip()
|
|
|
|
if without_comments:
|
|
|
|
patterns.append(without_comments)
|
|
|
|
|
|
|
|
return patterns
|
2022-03-13 23:53:36 -04:00
|
|
|
|
|
|
|
@pytest.hookimpl(tryfirst=True)
|
|
|
|
def pytest_sessionstart(self, session: Session) -> None:
|
2022-11-29 04:48:36 -05:00
|
|
|
# same behavior for vanilla pytest-embedded '--target'
|
|
|
|
session.config.option.target = self.target
|
2022-03-13 23:53:36 -04:00
|
|
|
|
|
|
|
@pytest.hookimpl(tryfirst=True)
|
|
|
|
def pytest_collection_modifyitems(self, items: List[Function]) -> None:
|
|
|
|
# sort by file path and callspec.config
|
|
|
|
# implement like this since this is a limitation of pytest, couldn't get fixture values while collecting
|
|
|
|
# https://github.com/pytest-dev/pytest/discussions/9689
|
2022-11-30 21:18:44 -05:00
|
|
|
# after sort the test apps, the test may use the app cache to reduce the flash times.
|
2022-03-13 23:53:36 -04:00
|
|
|
def _get_param_config(_item: Function) -> str:
|
|
|
|
if hasattr(_item, 'callspec'):
|
|
|
|
return _item.callspec.params.get('config', DEFAULT_SDKCONFIG) # type: ignore
|
|
|
|
return DEFAULT_SDKCONFIG
|
|
|
|
|
|
|
|
items.sort(key=lambda x: (os.path.dirname(x.path), _get_param_config(x)))
|
|
|
|
|
2022-09-30 13:49:09 -04:00
|
|
|
# set default timeout 10 minutes for each case
|
|
|
|
for item in items:
|
|
|
|
if 'timeout' not in item.keywords:
|
|
|
|
item.add_marker(pytest.mark.timeout(10 * 60))
|
|
|
|
|
2022-03-13 23:53:36 -04:00
|
|
|
# add markers for special markers
|
|
|
|
for item in items:
|
2022-05-21 12:38:17 -04:00
|
|
|
if 'supported_targets' in item.keywords:
|
2022-03-13 23:53:36 -04:00
|
|
|
for _target in SUPPORTED_TARGETS:
|
2022-11-29 04:48:36 -05:00
|
|
|
item.add_marker(_target)
|
2022-05-21 12:38:17 -04:00
|
|
|
if 'preview_targets' in item.keywords:
|
2022-03-13 23:53:36 -04:00
|
|
|
for _target in PREVIEW_TARGETS:
|
2022-11-29 04:48:36 -05:00
|
|
|
item.add_marker(_target)
|
2022-05-21 12:38:17 -04:00
|
|
|
if 'all_targets' in item.keywords:
|
2022-03-13 23:53:36 -04:00
|
|
|
for _target in [*SUPPORTED_TARGETS, *PREVIEW_TARGETS]:
|
2022-11-29 04:48:36 -05:00
|
|
|
item.add_marker(_target)
|
|
|
|
|
|
|
|
# add 'xtal_40mhz' tag as a default tag for esp32c2 target
|
2022-11-30 21:16:04 -05:00
|
|
|
# only add this marker for esp32c2 cases
|
|
|
|
if (
|
|
|
|
self.target == 'esp32c2'
|
|
|
|
and 'esp32c2' in item_marker_names(item)
|
|
|
|
and 'xtal_26mhz' not in item_marker_names(item)
|
|
|
|
):
|
2022-07-19 02:19:44 -04:00
|
|
|
item.add_marker('xtal_40mhz')
|
|
|
|
|
2022-06-30 06:19:57 -04:00
|
|
|
# filter all the test cases with "nightly_run" marker
|
2022-07-08 05:47:59 -04:00
|
|
|
if os.getenv('INCLUDE_NIGHTLY_RUN') == '1':
|
|
|
|
# Do not filter nightly_run cases
|
|
|
|
pass
|
|
|
|
elif os.getenv('NIGHTLY_RUN') == '1':
|
2022-11-30 21:18:44 -05:00
|
|
|
items[:] = [item for item in items if 'nightly_run' in item_marker_names(item)]
|
2022-06-30 06:19:57 -04:00
|
|
|
else:
|
2022-11-30 21:18:44 -05:00
|
|
|
items[:] = [item for item in items if 'nightly_run' not in item_marker_names(item)]
|
2022-06-30 06:19:57 -04:00
|
|
|
|
2022-11-29 04:48:36 -05:00
|
|
|
# filter all the test cases with target and skip_targets
|
|
|
|
items[:] = [
|
|
|
|
item
|
|
|
|
for item in items
|
|
|
|
if self.target in item_marker_names(item) and self.target not in item_skip_targets(item)
|
|
|
|
]
|
2022-03-13 23:53:36 -04:00
|
|
|
|
|
|
|
# filter all the test cases with cli option "config"
|
|
|
|
if self.sdkconfig:
|
2022-11-30 21:18:44 -05:00
|
|
|
items[:] = [item for item in items if _get_param_config(item) == self.sdkconfig]
|
2022-03-13 23:53:36 -04:00
|
|
|
|
2022-11-30 21:18:44 -05:00
|
|
|
def pytest_runtest_makereport(self, item: Function, call: CallInfo[None]) -> Optional[TestReport]:
|
2022-03-14 03:33:14 -04:00
|
|
|
report = TestReport.from_item_and_call(item, call)
|
2023-02-22 21:16:53 -05:00
|
|
|
if item.stash.get(_item_failed_key, None) is None:
|
|
|
|
item.stash[_item_failed_key] = False
|
|
|
|
|
2022-03-14 03:33:14 -04:00
|
|
|
if report.outcome == 'failed':
|
2023-02-22 21:16:53 -05:00
|
|
|
# Mark the failed test cases
|
|
|
|
#
|
|
|
|
# This hook function would be called in 3 phases, setup, call, teardown.
|
|
|
|
# the report.outcome is the outcome of the single call of current phase, which is independent
|
|
|
|
# the call phase outcome is the test result
|
|
|
|
item.stash[_item_failed_key] = True
|
|
|
|
|
|
|
|
if call.when == 'teardown':
|
|
|
|
item_failed = item.stash[_item_failed_key]
|
|
|
|
if item_failed:
|
|
|
|
# unity real test cases
|
|
|
|
failed_sub_cases = item.stash.get(_item_failed_cases_key, [])
|
|
|
|
if failed_sub_cases:
|
|
|
|
for test_case_name in failed_sub_cases:
|
|
|
|
self._failed_cases.append((test_case_name, self._is_known_failure(test_case_name), False))
|
|
|
|
else: # the case iteself is failing
|
|
|
|
test_case_name = item.funcargs.get('test_case_name', '')
|
|
|
|
if test_case_name:
|
|
|
|
self._failed_cases.append(
|
2023-05-23 22:53:57 -04:00
|
|
|
(
|
|
|
|
test_case_name,
|
|
|
|
self._is_known_failure(test_case_name),
|
|
|
|
report.keywords.get('xfail', False),
|
|
|
|
)
|
2023-02-22 21:16:53 -05:00
|
|
|
)
|
2022-03-14 03:33:14 -04:00
|
|
|
|
|
|
|
return report
|
|
|
|
|
|
|
|
def _is_known_failure(self, case_id: str) -> bool:
|
|
|
|
for pattern in self.known_failure_patterns:
|
|
|
|
if case_id == pattern:
|
|
|
|
return True
|
|
|
|
if fnmatch(case_id, pattern):
|
|
|
|
return True
|
|
|
|
return False
|
|
|
|
|
2022-03-13 23:53:36 -04:00
|
|
|
@pytest.hookimpl(trylast=True)
|
|
|
|
def pytest_runtest_teardown(self, item: Function) -> None:
|
|
|
|
"""
|
|
|
|
Format the test case generated junit reports
|
|
|
|
"""
|
|
|
|
tempdir = item.funcargs.get('test_case_tempdir')
|
|
|
|
if not tempdir:
|
|
|
|
return
|
|
|
|
|
|
|
|
junits = find_by_suffix('.xml', tempdir)
|
|
|
|
if not junits:
|
|
|
|
return
|
|
|
|
|
2023-02-22 21:16:53 -05:00
|
|
|
failed_sub_cases = []
|
2022-03-13 23:53:36 -04:00
|
|
|
target = item.funcargs['target']
|
|
|
|
config = item.funcargs['config']
|
|
|
|
for junit in junits:
|
|
|
|
xml = ET.parse(junit)
|
|
|
|
testcases = xml.findall('.//testcase')
|
|
|
|
for case in testcases:
|
2023-02-22 21:16:53 -05:00
|
|
|
# modify the junit files
|
|
|
|
new_case_name = format_case_id(target, config, case.attrib['name'])
|
|
|
|
case.attrib['name'] = new_case_name
|
2022-03-13 23:53:36 -04:00
|
|
|
if 'file' in case.attrib:
|
2022-11-30 21:18:44 -05:00
|
|
|
case.attrib['file'] = case.attrib['file'].replace('/IDF/', '') # our unity test framework
|
2023-02-22 21:16:53 -05:00
|
|
|
|
|
|
|
# collect real failure cases
|
|
|
|
if case.find('failure') is not None:
|
|
|
|
failed_sub_cases.append(new_case_name)
|
|
|
|
|
2022-03-13 23:53:36 -04:00
|
|
|
xml.write(junit)
|
2022-03-14 03:33:14 -04:00
|
|
|
|
2023-02-22 21:16:53 -05:00
|
|
|
item.stash[_item_failed_cases_key] = failed_sub_cases
|
|
|
|
|
2022-03-14 03:33:14 -04:00
|
|
|
def pytest_sessionfinish(self, session: Session, exitstatus: int) -> None:
|
2022-07-07 06:34:48 -04:00
|
|
|
if exitstatus != 0:
|
|
|
|
if exitstatus == ExitCode.NO_TESTS_COLLECTED:
|
|
|
|
session.exitstatus = 0
|
|
|
|
elif self.known_failure_cases and not self.failed_cases:
|
|
|
|
session.exitstatus = 0
|
2022-03-14 03:33:14 -04:00
|
|
|
|
|
|
|
def pytest_terminal_summary(self, terminalreporter: TerminalReporter) -> None:
|
|
|
|
if self.known_failure_cases:
|
|
|
|
terminalreporter.section('Known failure cases', bold=True, yellow=True)
|
|
|
|
terminalreporter.line('\n'.join(self.known_failure_cases))
|
|
|
|
|
2022-05-12 00:04:28 -04:00
|
|
|
if self.xfail_cases:
|
|
|
|
terminalreporter.section('xfail cases', bold=True, yellow=True)
|
|
|
|
terminalreporter.line('\n'.join(self.xfail_cases))
|
|
|
|
|
2022-03-14 03:33:14 -04:00
|
|
|
if self.failed_cases:
|
|
|
|
terminalreporter.section('Failed cases', bold=True, red=True)
|
|
|
|
terminalreporter.line('\n'.join(self.failed_cases))
|