Merge branch 'ci/standalone_unit_test_app' into 'master'

CI: add standalone unit test app for esp_netif

See merge request espressif/esp-idf!10102
This commit is contained in:
Ivan Grokhotkov 2020-09-11 15:50:54 +08:00
commit 00072fe2e2
26 changed files with 1081 additions and 235 deletions

5
.gitignore vendored
View File

@ -21,6 +21,11 @@ GPATH
# MacOS directory files # MacOS directory files
.DS_Store .DS_Store
# Components Unit Test Apps files
components/**/build
components/**/sdkconfig
components/**/sdkconfig.old
# Example project files # Example project files
examples/**/sdkconfig examples/**/sdkconfig
examples/**/sdkconfig.old examples/**/sdkconfig.old

View File

@ -64,22 +64,39 @@ variables:
.fetch_submodules: &fetch_submodules | .fetch_submodules: &fetch_submodules |
python $SUBMODULE_FETCH_TOOL -s $SUBMODULES_TO_FETCH python $SUBMODULE_FETCH_TOOL -s $SUBMODULES_TO_FETCH
.add_ssh_keys: &add_ssh_keys |
mkdir -p ~/.ssh
chmod 700 ~/.ssh
echo -n $GITLAB_KEY > ~/.ssh/id_rsa_base64
base64 --decode --ignore-garbage ~/.ssh/id_rsa_base64 > ~/.ssh/id_rsa
chmod 600 ~/.ssh/id_rsa
echo -e "Host gitlab.espressif.cn\n\tStrictHostKeyChecking no\n" >> ~/.ssh/config
before_script: before_script:
- source tools/ci/setup_python.sh - source tools/ci/setup_python.sh
# apply bot filter in before script # apply bot filter in before script
- *apply_bot_filter - *apply_bot_filter
# add gitlab ssh key # add gitlab ssh key
- mkdir -p ~/.ssh - *add_ssh_keys
- chmod 700 ~/.ssh
- echo -n $GITLAB_KEY > ~/.ssh/id_rsa_base64
- base64 --decode --ignore-garbage ~/.ssh/id_rsa_base64 > ~/.ssh/id_rsa
- chmod 600 ~/.ssh/id_rsa
- echo -e "Host gitlab.espressif.cn\n\tStrictHostKeyChecking no\n" >> ~/.ssh/config
# Set some options and environment for CI # Set some options and environment for CI
- source tools/ci/configure_ci_environment.sh - source tools/ci/configure_ci_environment.sh
- *setup_tools_unless_target_test - *setup_tools_unless_target_test
- *fetch_submodules - *fetch_submodules
# used for component-based unit test apps
.before_script_for_component_ut:
variables:
COMPONENT_UT_EXCLUDE_LIST_FP: ${CI_PROJECT_DIR}/tools/ci/component_ut_excludes.txt
before_script:
- source tools/ci/setup_python.sh
- *apply_bot_filter
- *add_ssh_keys
- source tools/ci/configure_ci_environment.sh
- *setup_tools_unless_target_test
- *fetch_submodules
- export COMPONENT_UT_DIRS=`find components/ -name test_apps -type d`
- export COMPONENT_UT_EXCLUDES=`[ -r $COMPONENT_UT_EXCLUDE_LIST_FP ] && cat $COMPONENT_UT_EXCLUDE_LIST_FP | xargs`
# used for check scripts which we want to run unconditionally # used for check scripts which we want to run unconditionally
.before_script_lesser_nofilter: .before_script_lesser_nofilter:
variables: variables:

View File

@ -0,0 +1,7 @@
# This is the project CMakeLists.txt file for the test subproject
cmake_minimum_required(VERSION 3.5)
set(EXTRA_COMPONENT_DIRS "$ENV{IDF_PATH}/tools/unit-test-app/components")
include($ENV{IDF_PATH}/tools/cmake/project.cmake)
project(esp_netif_test)

View File

@ -0,0 +1,15 @@
from __future__ import print_function
import ttfw_idf
@ttfw_idf.idf_component_unit_test(env_tag='COMPONENT_UT_GENERIC')
def test_component_ut_esp_netif(env, extra_data):
dut = env.get_dut('esp_netif', 'components/esp_netif/test_app')
dut.start_app()
stdout = dut.expect('Tests finished', full_stdout=True)
ttfw_idf.ComponentUTResult.parse_result(stdout)
if __name__ == '__main__':
test_component_ut_esp_netif()

View File

@ -0,0 +1,5 @@
idf_component_register(SRCS "esp_netif_test.c"
REQUIRES test_utils
INCLUDE_DIRS "."
PRIV_INCLUDE_DIRS "$ENV{IDF_PATH}/components/esp_netif/private_include" "."
PRIV_REQUIRES unity esp_netif nvs_flash)

View File

@ -0,0 +1,287 @@
#include <stdio.h>
#include <string.h>
#include "unity.h"
#include "unity_fixture.h"
#include "esp_netif.h"
#include "esp_wifi.h"
#include "nvs_flash.h"
#include "esp_wifi_netif.h"
#include "sdkconfig.h"
#include "lwip/sockets.h"
#include "test_utils.h"
TEST_GROUP(esp_netif);
TEST_SETUP(esp_netif)
{
}
TEST_TEAR_DOWN(esp_netif)
{
}
TEST(esp_netif, init_and_destroy)
{
esp_netif_config_t cfg = ESP_NETIF_DEFAULT_WIFI_STA();
esp_netif_t *esp_netif = esp_netif_new(NULL);
TEST_ASSERT_EQUAL(NULL, esp_netif);
esp_netif = esp_netif_new(&cfg);
TEST_ASSERT_NOT_EQUAL(NULL, esp_netif);
esp_netif_destroy(esp_netif);
}
TEST(esp_netif, get_from_if_key)
{
// init default netif
esp_netif_config_t cfg = ESP_NETIF_DEFAULT_WIFI_STA();
esp_netif_t *esp_netif = esp_netif_new(&cfg);
TEST_ASSERT_NOT_NULL(esp_netif);
// check it's accessible by key
TEST_ASSERT_EQUAL(esp_netif, esp_netif_get_handle_from_ifkey("WIFI_STA_DEF"));
// destroy it
esp_netif_destroy(esp_netif);
// check it's also destroyed in list
TEST_ASSERT_EQUAL(NULL, esp_netif_get_handle_from_ifkey("WIFI_STA_DEF"));
}
TEST(esp_netif, create_delete_multiple_netifs)
{
// interface key has to be a unique identifier
const char* if_keys[] = { "if1", "if2", "if3", "if4", "if5", "if6", "if7", "if8", "if9" };
const int nr_of_netifs = sizeof(if_keys)/sizeof(char*);
esp_netif_t *netifs[nr_of_netifs];
// create 10 wifi stations
for (int i=0; i<nr_of_netifs; ++i) {
esp_netif_inherent_config_t base_netif_config = { .if_key = if_keys[i]};
esp_netif_config_t cfg = { .base = &base_netif_config, .stack = ESP_NETIF_NETSTACK_DEFAULT_WIFI_STA };
netifs[i] = esp_netif_new(&cfg);
TEST_ASSERT_NOT_NULL(netifs[i]);
}
// there's no AP within created stations
TEST_ASSERT_EQUAL(NULL, esp_netif_get_handle_from_ifkey("WIFI_AP_DEF"));
// destroy
for (int i=0; i<nr_of_netifs; ++i) {
esp_netif_destroy(netifs[i]);
}
}
TEST(esp_netif, dhcp_client_state_transitions_wifi_sta)
{
// init default wifi netif
test_case_uses_tcpip();
TEST_ESP_OK(nvs_flash_init());
esp_netif_config_t cfg = ESP_NETIF_DEFAULT_WIFI_STA();
esp_netif_t *sta = esp_netif_new(&cfg);
TEST_ASSERT_NOT_NULL(sta);
esp_netif_attach_wifi_station(sta);
wifi_init_config_t wifi_cfg = WIFI_INIT_CONFIG_DEFAULT();
TEST_ESP_OK(esp_wifi_init(&wifi_cfg));
esp_netif_dhcp_status_t state;
// testing DHCP states per netif state transitions
esp_netif_action_start(sta, NULL, 0, NULL);
TEST_ASSERT_EQUAL(ESP_OK, esp_netif_dhcpc_get_status(sta, &state));
TEST_ASSERT_EQUAL(ESP_NETIF_DHCP_INIT, state);
esp_netif_action_connected(sta, NULL, 0, NULL);
TEST_ASSERT_EQUAL(ESP_OK, esp_netif_dhcpc_get_status(sta, &state));
TEST_ASSERT_EQUAL(ESP_NETIF_DHCP_STARTED, state);
esp_netif_action_stop(sta, NULL, 0, NULL);
TEST_ASSERT_EQUAL(ESP_OK, esp_netif_dhcpc_get_status(sta, &state));
TEST_ASSERT_EQUAL(ESP_NETIF_DHCP_INIT, state);
// destroy default wifi netif
esp_netif_destroy(sta);
TEST_ASSERT(esp_wifi_stop() == ESP_OK);
TEST_ASSERT(esp_wifi_deinit() == ESP_OK);
nvs_flash_deinit();
}
TEST(esp_netif, dhcp_server_state_transitions_wifi_ap)
{
// init default wifi netif
test_case_uses_tcpip();
TEST_ESP_OK(nvs_flash_init());
esp_netif_config_t cfg = ESP_NETIF_DEFAULT_WIFI_AP();
esp_netif_t *ap = esp_netif_new(&cfg);
TEST_ASSERT_NOT_NULL(ap);
esp_netif_attach_wifi_station(ap);
wifi_init_config_t wifi_cfg = WIFI_INIT_CONFIG_DEFAULT();
TEST_ESP_OK(esp_wifi_init(&wifi_cfg));
esp_netif_dhcp_status_t state;
// testing DHCP server states per netif state transitions
esp_netif_action_start(ap, NULL, 0, NULL);
TEST_ASSERT_EQUAL(ESP_OK, esp_netif_dhcps_get_status(ap, &state));
TEST_ASSERT_EQUAL(ESP_NETIF_DHCP_STARTED, state);
esp_netif_action_stop(ap, NULL, 0, NULL);
TEST_ASSERT_EQUAL(ESP_OK, esp_netif_dhcps_get_status(ap, &state));
TEST_ASSERT_EQUAL(ESP_NETIF_DHCP_INIT, state);
// destroy default wifi netif
esp_netif_destroy(ap);
TEST_ASSERT(esp_wifi_stop() == ESP_OK);
TEST_ASSERT(esp_wifi_deinit() == ESP_OK);
nvs_flash_deinit();
}
TEST(esp_netif, dhcp_server_state_transitions_mesh)
{
esp_netif_t *ap = NULL;
esp_netif_t *sta = NULL;
esp_netif_dhcp_status_t state;
// init two mesh network interfaces
test_case_uses_tcpip();
TEST_ESP_OK(nvs_flash_init());
TEST_ESP_OK(esp_event_loop_create_default());
TEST_ESP_OK(esp_netif_create_default_wifi_mesh_netifs(&sta, &ap));
TEST_ASSERT_NOT_NULL(sta);
TEST_ASSERT_NOT_NULL(ap);
wifi_init_config_t wifi_cfg = WIFI_INIT_CONFIG_DEFAULT();
TEST_ESP_OK(esp_wifi_init(&wifi_cfg));
// test both server and client are *not* STARTED after interfaces created
TEST_ESP_OK(esp_netif_dhcpc_get_status(sta, &state));
TEST_ASSERT_NOT_EQUAL(ESP_NETIF_DHCP_STARTED, state);
TEST_ESP_OK(esp_netif_dhcps_get_status(ap, &state));
TEST_ASSERT_NOT_EQUAL(ESP_NETIF_DHCP_STARTED, state);
// test both server and client are still *not* STARTED after start
esp_netif_action_start(ap, NULL, 0, NULL);
esp_netif_action_start(sta, NULL, 0, NULL);
TEST_ESP_OK(esp_netif_dhcpc_get_status(sta, &state));
TEST_ASSERT_NOT_EQUAL(ESP_NETIF_DHCP_STARTED, state);
TEST_ESP_OK(esp_netif_dhcps_get_status(ap, &state));
TEST_ASSERT_NOT_EQUAL(ESP_NETIF_DHCP_STARTED, state);
// test both server and client are still *not* STARTED even after connect
esp_netif_action_connected(ap, NULL, 0, NULL);
esp_netif_action_connected(sta, NULL, 0, NULL);
TEST_ESP_OK(esp_netif_dhcpc_get_status(sta, &state));
TEST_ASSERT_NOT_EQUAL(ESP_NETIF_DHCP_STARTED, state);
TEST_ESP_OK(esp_netif_dhcps_get_status(ap, &state));
TEST_ASSERT_NOT_EQUAL(ESP_NETIF_DHCP_STARTED, state);
// test station gets promoted to be a root (so DHCP client started manually) and client is in STATED state
esp_netif_dhcpc_start(sta);
esp_netif_action_connected(sta, NULL, 0, NULL);
TEST_ESP_OK(esp_netif_dhcpc_get_status(sta, &state));
TEST_ASSERT_EQUAL(ESP_NETIF_DHCP_STARTED, state);
esp_netif_dhcpc_stop(sta);
// test both server and client are still *not* STARTED even after stop
esp_netif_action_stop(sta, NULL, 0, NULL);
esp_netif_action_stop(ap, NULL, 0, NULL);
TEST_ESP_OK(esp_netif_dhcpc_get_status(sta, &state));
TEST_ASSERT_NOT_EQUAL(ESP_NETIF_DHCP_STARTED, state);
TEST_ESP_OK(esp_netif_dhcps_get_status(ap, &state));
TEST_ASSERT_NOT_EQUAL(ESP_NETIF_DHCP_STARTED, state);
// destroy event_loop, netifs, wifi, nvs
TEST_ESP_OK(esp_event_loop_delete_default());
esp_netif_destroy(ap);
esp_netif_destroy(sta);
TEST_ASSERT(esp_wifi_stop() == ESP_OK);
TEST_ASSERT(esp_wifi_deinit() == ESP_OK);
nvs_flash_deinit();
}
TEST(esp_netif, create_custom_wifi_interfaces)
{
esp_netif_t *ap = NULL;
esp_netif_t *sta = NULL;
uint8_t configured_mac[6] = {1, 2, 3, 4, 5, 6};
uint8_t actual_mac[6] = { 0 };
// create customized station
esp_netif_inherent_config_t esp_netif_config = ESP_NETIF_INHERENT_DEFAULT_WIFI_STA();
esp_netif_config.if_desc = "custom wifi station";
esp_netif_config.route_prio = 1;
sta = esp_netif_create_wifi(WIFI_IF_STA, &esp_netif_config);
TEST_ASSERT_NOT_NULL(sta);
TEST_ASSERT_EQUAL_STRING("custom wifi station", esp_netif_get_desc(sta));
TEST_ASSERT_EQUAL(1, esp_netif_get_route_prio(sta));
// create customized access point
esp_netif_inherent_config_t esp_netif_config2 = ESP_NETIF_INHERENT_DEFAULT_WIFI_AP();
esp_netif_config2.if_desc = "custom wifi ap";
esp_netif_config2.route_prio = 10;
memcpy(esp_netif_config2.mac, configured_mac, 6);
ap = esp_netif_create_wifi(WIFI_IF_AP, &esp_netif_config2);
TEST_ASSERT_NOT_NULL(ap);
TEST_ASSERT_EQUAL_STRING( "custom wifi ap", esp_netif_get_desc(ap));
TEST_ASSERT_EQUAL(10, esp_netif_get_route_prio(ap));
TEST_ASSERT_EQUAL(ESP_OK, esp_netif_get_mac(ap, actual_mac));
TEST_ASSERT_EQUAL_HEX8_ARRAY(configured_mac, actual_mac, 6);
esp_wifi_destroy_if_driver(esp_netif_get_io_driver(ap));
esp_wifi_destroy_if_driver(esp_netif_get_io_driver(sta));
esp_netif_destroy(ap);
esp_netif_destroy(sta);
}
TEST(esp_netif, get_set_hostname)
{
const char *hostname;
esp_netif_config_t cfg = ESP_NETIF_DEFAULT_WIFI_STA();
test_case_uses_tcpip();
esp_netif_t *esp_netif = esp_netif_new(&cfg);
// specific hostname not set yet, get_hostname should fail
TEST_ASSERT_NOT_EQUAL(ESP_OK, esp_netif_get_hostname(esp_netif, &hostname));
TEST_ASSERT_NOT_NULL(esp_netif);
esp_netif_attach_wifi_station(esp_netif);
esp_netif_action_start(esp_netif, NULL, 0, NULL);
// specific hostname not set yet, but if started, get_hostname to return default config value
TEST_ASSERT_EQUAL(ESP_OK, esp_netif_get_hostname(esp_netif, &hostname));
TEST_ASSERT_EQUAL_STRING(hostname, CONFIG_LWIP_LOCAL_HOSTNAME);
// specific hostname set and get
TEST_ASSERT_EQUAL(ESP_OK, esp_netif_set_hostname(esp_netif, "new_name"));
TEST_ASSERT_EQUAL(ESP_OK, esp_netif_get_hostname(esp_netif, &hostname));
TEST_ASSERT_EQUAL_STRING(hostname, "new_name");
esp_netif_destroy(esp_netif);
}
TEST_GROUP_RUNNER(esp_netif)
{
RUN_TEST_CASE(esp_netif, init_and_destroy)
RUN_TEST_CASE(esp_netif, get_from_if_key)
RUN_TEST_CASE(esp_netif, create_delete_multiple_netifs)
RUN_TEST_CASE(esp_netif, dhcp_client_state_transitions_wifi_sta)
RUN_TEST_CASE(esp_netif, dhcp_server_state_transitions_wifi_ap)
RUN_TEST_CASE(esp_netif, dhcp_server_state_transitions_mesh)
RUN_TEST_CASE(esp_netif, create_custom_wifi_interfaces)
RUN_TEST_CASE(esp_netif, get_set_hostname)
}
void app_main(void)
{
UNITY_MAIN(esp_netif);
}

View File

@ -0,0 +1,2 @@
CONFIG_UNITY_ENABLE_FIXTURE=y
CONFIG_UNITY_ENABLE_IDF_TEST_RUNNER=n

View File

@ -15,7 +15,7 @@ if(CONFIG_UNITY_ENABLE_FIXTURE)
endif() endif()
idf_component_register(SRCS "${srcs}" idf_component_register(SRCS "${srcs}"
INCLUDE_DIRS "include" "unity/src") INCLUDE_DIRS "include" "unity/src" "unity/extras/fixture/src")
target_compile_definitions(${COMPONENT_LIB} PUBLIC target_compile_definitions(${COMPONENT_LIB} PUBLIC
-DUNITY_INCLUDE_CONFIG_H -DUNITY_INCLUDE_CONFIG_H

View File

@ -47,6 +47,10 @@ uint32_t unity_exec_time_get_ms(void);
#endif //CONFIG_UNITY_ENABLE_IDF_TEST_RUNNER #endif //CONFIG_UNITY_ENABLE_IDF_TEST_RUNNER
#ifdef CONFIG_UNITY_ENABLE_FIXTURE
#include "unity_fixture_extras.h"
#endif // CONFIG_UNITY_ENABLE_FIXTURE
// shorthand to check esp_err_t return code // shorthand to check esp_err_t return code
#define TEST_ESP_OK(rc) TEST_ASSERT_EQUAL_HEX32(ESP_OK, rc) #define TEST_ESP_OK(rc) TEST_ASSERT_EQUAL_HEX32(ESP_OK, rc)
#define TEST_ESP_ERR(err, rc) TEST_ASSERT_EQUAL_HEX32(err, rc) #define TEST_ESP_ERR(err, rc) TEST_ASSERT_EQUAL_HEX32(err, rc)

View File

@ -0,0 +1,25 @@
/* IDF-specific additions to "Unity Fixture" */
#pragma once
#ifndef CONFIG_IDF_TARGET
/* A shorthand for running one test group from the main function */
#define UNITY_MAIN(group_) do { \
const char* argv[] = { "test", "-v" }; \
const int argc = sizeof(argv)/sizeof(argv[0]); \
int rc = UnityMain(argc, argv, TEST_ ## group_ ## _GROUP_RUNNER); \
printf("\nTests finished, rc=%d\n", rc); \
exit(rc); \
} while(0)
#else // CONFIG_IDF_TARGET
/* A shorthand for running one test group from the main function */
#define UNITY_MAIN(group_) do { \
const char* argv[] = { "test", "-v" }; \
const int argc = sizeof(argv)/sizeof(argv[0]); \
int rc = UnityMain(argc, argv, TEST_ ## group_ ## _GROUP_RUNNER); \
printf("\nTests finished, rc=%d\n", rc); \
} while(0)
#endif // CONFIG_IDF_TARGET

View File

@ -19,9 +19,13 @@ import os
import re import re
import threading import threading
import traceback import traceback
import Queue
import subprocess import subprocess
try:
import Queue
except ImportError:
import queue as Queue
from tiny_test_fw import Utility from tiny_test_fw import Utility
import ttfw_idf import ttfw_idf
from ble import lib_ble_client from ble import lib_ble_client

View File

@ -82,6 +82,8 @@ components/libsodium/
components/spiffs/include/spiffs_config.h components/spiffs/include/spiffs_config.h
components/unity/unity/src/unity_internals.h components/unity/unity/src/unity_internals.h
components/unity/unity/extras/
components/unity/include/unity_fixture_extras.h
components/unity/include/unity_config.h components/unity/include/unity_config.h
components/unity/include/unity_test_runner.h components/unity/include/unity_test_runner.h

View File

@ -1,4 +1,5 @@
assign_test: assign_test:
extends: .before_script_for_component_ut
tags: tags:
- assign_test - assign_test
image: $CI_DOCKER_REGISTRY/ubuntu-test-env$BOT_DOCKER_IMAGE_TAG image: $CI_DOCKER_REGISTRY/ubuntu-test-env$BOT_DOCKER_IMAGE_TAG
@ -11,8 +12,9 @@ assign_test:
- build_esp_idf_tests_cmake_esp32s2 - build_esp_idf_tests_cmake_esp32s2
variables: variables:
SUBMODULES_TO_FETCH: "components/esptool_py/esptool" SUBMODULES_TO_FETCH: "components/esptool_py/esptool"
EXAMPLE_CONFIG_OUTPUT_PATH: "$CI_PROJECT_DIR/examples/test_configs" EXAMPLE_CONFIG_OUTPUT_PATH: "${CI_PROJECT_DIR}/examples/test_configs"
TEST_APP_CONFIG_OUTPUT_PATH: "$CI_PROJECT_DIR/tools/test_apps/test_configs" TEST_APP_CONFIG_OUTPUT_PATH: "${CI_PROJECT_DIR}/tools/test_apps/test_configs"
COMPONENT_UT_CONFIG_OUTPUT_PATH: "${CI_PROJECT_DIR}/component_ut/test_configs"
UNIT_TEST_CASE_FILE: "${CI_PROJECT_DIR}/components/idf_test/unit_test" UNIT_TEST_CASE_FILE: "${CI_PROJECT_DIR}/components/idf_test/unit_test"
# auto_test_script is compatible with Python 3 only # auto_test_script is compatible with Python 3 only
PYTHON_VER: 3 PYTHON_VER: 3
@ -22,8 +24,10 @@ assign_test:
- components/idf_test/*/TC.sqlite - components/idf_test/*/TC.sqlite
- $EXAMPLE_CONFIG_OUTPUT_PATH - $EXAMPLE_CONFIG_OUTPUT_PATH
- $TEST_APP_CONFIG_OUTPUT_PATH - $TEST_APP_CONFIG_OUTPUT_PATH
- $COMPONENT_UT_CONFIG_OUTPUT_PATH
- build_examples/artifact_index.json - build_examples/artifact_index.json
- build_test_apps/artifact_index.json - build_test_apps/artifact_index.json
- build_component_ut/artifact_index.json
- tools/unit-test-app/builds/artifact_index.json - tools/unit-test-app/builds/artifact_index.json
expire_in: 1 week expire_in: 1 week
only: only:
@ -37,16 +41,18 @@ assign_test:
- $BOT_LABEL_WEEKEND_TEST - $BOT_LABEL_WEEKEND_TEST
script: script:
# assign example tests # assign example tests
- python tools/ci/python_packages/ttfw_idf/IDFAssignTest.py example_test $IDF_PATH/examples $CI_TARGET_TEST_CONFIG_FILE $EXAMPLE_CONFIG_OUTPUT_PATH - python tools/ci/python_packages/ttfw_idf/IDFAssignTest.py example_test $IDF_PATH/examples -c $CI_TARGET_TEST_CONFIG_FILE -o $EXAMPLE_CONFIG_OUTPUT_PATH
# assign test apps # assign test apps
- python tools/ci/python_packages/ttfw_idf/IDFAssignTest.py custom_test $IDF_PATH/tools/test_apps $CI_TARGET_TEST_CONFIG_FILE $TEST_APP_CONFIG_OUTPUT_PATH - python tools/ci/python_packages/ttfw_idf/IDFAssignTest.py custom_test $IDF_PATH/tools/test_apps -c $CI_TARGET_TEST_CONFIG_FILE -o $TEST_APP_CONFIG_OUTPUT_PATH
# assign component ut
- python tools/ci/python_packages/ttfw_idf/IDFAssignTest.py component_ut $COMPONENT_UT_DIRS -c $CI_TARGET_TEST_CONFIG_FILE -o $COMPONENT_UT_CONFIG_OUTPUT_PATH
# assign unit test cases # assign unit test cases
- python tools/ci/python_packages/ttfw_idf/IDFAssignTest.py unit_test $UNIT_TEST_CASE_FILE $CI_TARGET_TEST_CONFIG_FILE $IDF_PATH/components/idf_test/unit_test/CIConfigs - python tools/ci/python_packages/ttfw_idf/IDFAssignTest.py unit_test $UNIT_TEST_CASE_FILE -c $CI_TARGET_TEST_CONFIG_FILE -o $IDF_PATH/components/idf_test/unit_test/CIConfigs
# clone test script to assign tests # clone test script to assign tests
- ./tools/ci/retry_failed.sh git clone $TEST_SCRIPT_REPOSITORY - ./tools/ci/retry_failed.sh git clone $TEST_SCRIPT_REPOSITORY
- python $CHECKOUT_REF_SCRIPT auto_test_script auto_test_script - python $CHECKOUT_REF_SCRIPT auto_test_script auto_test_script
- cd auto_test_script - cd auto_test_script
# assgin integration test cases # assign integration test cases
- python CIAssignTestCases.py -t $IDF_PATH/components/idf_test/integration_test -c $CI_TARGET_TEST_CONFIG_FILE -b $IDF_PATH/SSC/ssc_bin - python CIAssignTestCases.py -t $IDF_PATH/components/idf_test/integration_test -c $CI_TARGET_TEST_CONFIG_FILE -b $IDF_PATH/SSC/ssc_bin
update_test_cases: update_test_cases:

View File

@ -85,12 +85,10 @@ build_esp_idf_tests_cmake_esp32s2:
.build_examples_template: .build_examples_template:
extends: .build_template extends: .build_template
parallel: 8
artifacts: artifacts:
when: always when: always
expire_in: 4 days expire_in: 4 days
only: only:
# Here both 'variables' and 'refs' conditions are given. They are combined with "AND" logic.
variables: variables:
- $BOT_TRIGGER_WITH_LABEL == null - $BOT_TRIGGER_WITH_LABEL == null
- $BOT_LABEL_BUILD - $BOT_LABEL_BUILD
@ -98,8 +96,12 @@ build_esp_idf_tests_cmake_esp32s2:
- $BOT_LABEL_REGULAR_TEST - $BOT_LABEL_REGULAR_TEST
- $BOT_LABEL_WEEKEND_TEST - $BOT_LABEL_WEEKEND_TEST
variables: variables:
SCAN_TEST_JSON: ${CI_PROJECT_DIR}/examples/test_configs/scan_${IDF_TARGET}_${BUILD_SYSTEM}.json TEST_PREFIX: examples
TEST_TYPE: "example_test" TEST_RELATIVE_DIR: examples
SCAN_TEST_JSON: ${CI_PROJECT_DIR}/${TEST_RELATIVE_DIR}/test_configs/scan_${IDF_TARGET}_${BUILD_SYSTEM}.json
TEST_TYPE: example_test
LOG_PATH: ${CI_PROJECT_DIR}/log_${TEST_PREFIX}
BUILD_PATH: ${CI_PROJECT_DIR}/build_${TEST_PREFIX}
script: script:
# RISC-V toolchain is optional but ULP may need it, so install: # RISC-V toolchain is optional but ULP may need it, so install:
- $IDF_PATH/tools/idf_tools.py install riscv-none-embed-gcc - $IDF_PATH/tools/idf_tools.py install riscv-none-embed-gcc
@ -113,16 +115,15 @@ build_examples_make:
# This is a workaround for a rarely encountered issue with building examples in CI. # This is a workaround for a rarely encountered issue with building examples in CI.
# Probably related to building of Kconfig in 'make clean' stage # Probably related to building of Kconfig in 'make clean' stage
retry: 1 retry: 1
parallel: 8
artifacts: artifacts:
paths: paths:
- $LOG_PATH - $LOG_PATH
- build_examples/*/*/*/build/size.json - build_${TEST_PREFIX}/*/*/*/build/size.json
- $SIZE_INFO_LOCATION - $SIZE_INFO_LOCATION
variables: variables:
LOG_PATH: "${CI_PROJECT_DIR}/log_examples_make" BUILD_SYSTEM: make
BUILD_PATH: "${CI_PROJECT_DIR}/build_examples_make" IDF_TARGET: esp32 # currently we only support esp32
BUILD_SYSTEM: "make"
IDF_TARGET: "esp32" # currently we only support esp32
only: only:
refs: refs:
- master - master
@ -140,22 +141,20 @@ build_examples_make:
- scan_tests - scan_tests
artifacts: artifacts:
paths: paths:
- build_examples/list.json - build_${TEST_PREFIX}/list.json
- build_examples/list_job_*.json - build_${TEST_PREFIX}/list_job_*.json
- build_examples/*/*/*/sdkconfig - build_${TEST_PREFIX}/*/*/*/sdkconfig
- build_examples/*/*/*/build/size.json - build_${TEST_PREFIX}/*/*/*/build/size.json
- build_examples/*/*/*/build/*.bin - build_${TEST_PREFIX}/*/*/*/build/*.bin
- build_examples/*/*/*/build/*.elf - build_${TEST_PREFIX}/*/*/*/build/*.elf
- build_examples/*/*/*/build/*.map - build_${TEST_PREFIX}/*/*/*/build/*.map
- build_examples/*/*/*/build/flasher_args.json - build_${TEST_PREFIX}/*/*/*/build/flasher_args.json
- build_examples/*/*/*/build/bootloader/*.bin - build_${TEST_PREFIX}/*/*/*/build/bootloader/*.bin
- build_examples/*/*/*/build/partition_table/*.bin - build_${TEST_PREFIX}/*/*/*/build/partition_table/*.bin
- $LOG_PATH - $LOG_PATH
- $SIZE_INFO_LOCATION - $SIZE_INFO_LOCATION
variables: variables:
LOG_PATH: "${CI_PROJECT_DIR}/log_examples" BUILD_SYSTEM: cmake
BUILD_PATH: "${CI_PROJECT_DIR}/build_examples"
BUILD_SYSTEM: "cmake"
build_examples_cmake_esp32: build_examples_cmake_esp32:
extends: .build_examples_cmake extends: .build_examples_cmake
@ -165,35 +164,15 @@ build_examples_cmake_esp32:
build_examples_cmake_esp32s2: build_examples_cmake_esp32s2:
extends: .build_examples_cmake extends: .build_examples_cmake
parallel: 8
variables: variables:
IDF_TARGET: esp32s2 IDF_TARGET: esp32s2
.build_test_apps: &build_test_apps .build_test_apps:
extends: .build_template extends: .build_examples_cmake
stage: build
dependencies:
- scan_tests
artifacts:
when: always
paths:
- build_test_apps/list.json
- build_test_apps/list_job_*.json
- build_test_apps/*/*/*/sdkconfig
- build_test_apps/*/*/*/build/size.json
- build_test_apps/*/*/*/build/*.bin
- build_test_apps/*/*/*/build/*.elf
- build_test_apps/*/*/*/build/*.map
- build_test_apps/*/*/*/build/flasher_args.json
- build_test_apps/*/*/*/build/bootloader/*.bin
- build_test_apps/*/*/*/build/partition_table/*.bin
- $LOG_PATH
- $SIZE_INFO_LOCATION
expire_in: 3 days
variables: variables:
LOG_PATH: "${CI_PROJECT_DIR}/log_test_apps" TEST_PREFIX: test_apps
BUILD_PATH: "${CI_PROJECT_DIR}/build_test_apps" TEST_RELATIVE_DIR: tools/test_apps
BUILD_SYSTEM: "cmake"
SCAN_TEST_JSON: ${CI_PROJECT_DIR}/tools/test_apps/test_configs/scan_${IDF_TARGET}_${BUILD_SYSTEM}.json
TEST_TYPE: custom_test TEST_TYPE: custom_test
only: only:
variables: variables:
@ -207,14 +186,38 @@ build_examples_cmake_esp32s2:
build_test_apps_esp32: build_test_apps_esp32:
extends: .build_test_apps extends: .build_test_apps
parallel: 8
variables: variables:
IDF_TARGET: esp32 IDF_TARGET: esp32
build_test_apps_esp32s2: build_test_apps_esp32s2:
extends: .build_test_apps extends: .build_test_apps
parallel: 8
variables: variables:
IDF_TARGET: esp32s2 IDF_TARGET: esp32s2
.build_component_ut:
extends: .build_test_apps
variables:
TEST_PREFIX: component_ut
TEST_RELATIVE_DIR: component_ut
only:
variables:
- $BOT_TRIGGER_WITH_LABEL == null
- $BOT_LABEL_BUILD
- $BOT_LABEL_REGULAR_TEST
- $BOT_LABEL_UNIT_TEST
- $BOT_LABEL_UNIT_TEST_S2
build_component_ut_esp32:
extends: .build_component_ut
variables:
IDF_TARGET: esp32
build_component_ut_esp32s2:
extends: .build_component_ut
variables:
IDF_TARGET: esp32s2
# If you want to add new build example jobs, please add it into dependencies of `.example_test_template` # If you want to add new build example jobs, please add it into dependencies of `.example_test_template`

View File

@ -139,26 +139,34 @@ check_public_headers:
TEST_CONFIG_FILE: ${CI_PROJECT_DIR}/tools/ci/config/target-test.yml TEST_CONFIG_FILE: ${CI_PROJECT_DIR}/tools/ci/config/target-test.yml
scan_tests: scan_tests:
extends: .scan_build_tests extends:
- .before_script_for_component_ut
- .scan_build_tests
only: only:
variables: variables:
- $BOT_TRIGGER_WITH_LABEL == null - $BOT_TRIGGER_WITH_LABEL == null
- $BOT_LABEL_REGULAR_TEST - $BOT_LABEL_REGULAR_TEST
- $BOT_LABEL_EXAMPLE_TEST - $BOT_LABEL_EXAMPLE_TEST
- $BOT_LABEL_CUSTOM_TEST - $BOT_LABEL_CUSTOM_TEST
- $BOT_LABEL_UNIT_TEST
- $BOT_LABEL_UNIT_TEST_S2
artifacts: artifacts:
paths: paths:
- $EXAMPLE_TEST_OUTPUT_DIR - $EXAMPLE_TEST_OUTPUT_DIR
- $TEST_APPS_OUTPUT_DIR - $TEST_APPS_OUTPUT_DIR
- $COMPONENT_UT_OUTPUT_DIR
variables: variables:
EXAMPLE_TEST_DIR: ${CI_PROJECT_DIR}/examples EXAMPLE_TEST_DIR: ${CI_PROJECT_DIR}/examples
EXAMPLE_TEST_OUTPUT_DIR: ${CI_PROJECT_DIR}/examples/test_configs EXAMPLE_TEST_OUTPUT_DIR: ${CI_PROJECT_DIR}/examples/test_configs
TEST_APPS_TEST_DIR: ${CI_PROJECT_DIR}/tools/test_apps TEST_APPS_TEST_DIR: ${CI_PROJECT_DIR}/tools/test_apps
TEST_APPS_OUTPUT_DIR: ${CI_PROJECT_DIR}/tools/test_apps/test_configs TEST_APPS_OUTPUT_DIR: ${CI_PROJECT_DIR}/tools/test_apps/test_configs
COMPONENT_UT_OUTPUT_DIR: ${CI_PROJECT_DIR}/component_ut/test_configs
PYTHON_VER: 3
script: script:
- python $CI_SCAN_TESTS_PY example_test -b make $EXAMPLE_TEST_DIR --exclude examples/build_system/idf_as_lib -c $TEST_CONFIG_FILE -o $EXAMPLE_TEST_OUTPUT_DIR - python $CI_SCAN_TESTS_PY example_test $EXAMPLE_TEST_DIR -b make --exclude examples/build_system/idf_as_lib -c $TEST_CONFIG_FILE -o $EXAMPLE_TEST_OUTPUT_DIR
- python $CI_SCAN_TESTS_PY example_test -b cmake $EXAMPLE_TEST_DIR --exclude examples/build_system/idf_as_lib -c $TEST_CONFIG_FILE -o $EXAMPLE_TEST_OUTPUT_DIR - python $CI_SCAN_TESTS_PY example_test $EXAMPLE_TEST_DIR -b cmake --exclude examples/build_system/idf_as_lib -c $TEST_CONFIG_FILE -o $EXAMPLE_TEST_OUTPUT_DIR
- python $CI_SCAN_TESTS_PY test_apps $TEST_APPS_TEST_DIR -c $TEST_CONFIG_FILE -o $TEST_APPS_OUTPUT_DIR - python $CI_SCAN_TESTS_PY test_apps $TEST_APPS_TEST_DIR -c $TEST_CONFIG_FILE -o $TEST_APPS_OUTPUT_DIR
- python $CI_SCAN_TESTS_PY component_ut $COMPONENT_UT_DIRS --exclude $COMPONENT_UT_EXCLUDES -c $TEST_CONFIG_FILE -o $COMPONENT_UT_OUTPUT_DIR
check_readme_links: check_readme_links:
extends: .check_job_template extends: .check_job_template

View File

@ -84,16 +84,7 @@
.test_app_template: .test_app_template:
extends: .example_test_template extends: .example_test_template
stage: target_test
dependencies:
- assign_test
only: only:
refs:
- master
- /^release\/v/
- /^v\d+\.\d+(\.\d+)?($|-)/
- triggers
- schedules
variables: variables:
- $BOT_TRIGGER_WITH_LABEL == null - $BOT_TRIGGER_WITH_LABEL == null
- $BOT_LABEL_CUSTOM_TEST - $BOT_LABEL_CUSTOM_TEST
@ -104,6 +95,28 @@
LOG_PATH: "$CI_PROJECT_DIR/TEST_LOGS" LOG_PATH: "$CI_PROJECT_DIR/TEST_LOGS"
ENV_FILE: "$CI_PROJECT_DIR/ci-test-runner-configs/$CI_RUNNER_DESCRIPTION/EnvConfig.yml" ENV_FILE: "$CI_PROJECT_DIR/ci-test-runner-configs/$CI_RUNNER_DESCRIPTION/EnvConfig.yml"
.component_ut_template:
extends:
- .before_script_for_component_ut
- .example_test_template
only:
variables:
- $BOT_TRIGGER_WITH_LABEL == null
- $BOT_LABEL_UNIT_TEST
variables:
CONFIG_FILE_PATH: "${CI_PROJECT_DIR}/component_ut/test_configs"
PYTHON_VER: 3
script:
- *define_config_file_name
# first test if config file exists, if not exist, exit 0
- test -e $CONFIG_FILE || exit 0
# clone test env configs
- ./tools/ci/retry_failed.sh git clone $TEST_ENV_CONFIG_REPOSITORY
- python $CHECKOUT_REF_SCRIPT ci-test-runner-configs ci-test-runner-configs
- cd tools/ci/python_packages/tiny_test_fw/bin
# run test
- python Runner.py $COMPONENT_UT_DIRS -c $CONFIG_FILE -e $ENV_FILE
.unit_test_template: .unit_test_template:
extends: .example_test_template extends: .example_test_template
stage: target_test stage: target_test
@ -396,6 +409,12 @@ test_app_test_003:
- ESP32 - ESP32
- Example_PPP - Example_PPP
component_ut_test_001:
extends: .component_ut_template
tags:
- ESP32
- COMPONENT_UT_GENERIC
UT_001: UT_001:
extends: .unit_test_template extends: .unit_test_template
parallel: 39 parallel: 39

View File

@ -571,7 +571,7 @@ class BaseDUT(object):
return self.__getattribute__(method) return self.__getattribute__(method)
@_expect_lock @_expect_lock
def expect(self, pattern, timeout=DEFAULT_EXPECT_TIMEOUT): def expect(self, pattern, timeout=DEFAULT_EXPECT_TIMEOUT, full_stdout=False):
""" """
expect(pattern, timeout=DEFAULT_EXPECT_TIMEOUT) expect(pattern, timeout=DEFAULT_EXPECT_TIMEOUT)
expect received data on DUT match the pattern. will raise exception when expect timeout. expect received data on DUT match the pattern. will raise exception when expect timeout.
@ -581,9 +581,11 @@ class BaseDUT(object):
:param pattern: string or compiled RegEx(string pattern) :param pattern: string or compiled RegEx(string pattern)
:param timeout: timeout for expect :param timeout: timeout for expect
:param full_stdout: return full stdout until meet expect string/pattern or just matched string
:return: string if pattern is string; matched groups if pattern is RegEx :return: string if pattern is string; matched groups if pattern is RegEx
""" """
method = self._get_expect_method(pattern) method = self._get_expect_method(pattern)
stdout = ''
# non-blocking get data for first time # non-blocking get data for first time
data = self.data_cache.get_data(0) data = self.data_cache.get_data(0)
@ -598,12 +600,13 @@ class BaseDUT(object):
break break
# wait for new data from cache # wait for new data from cache
data = self.data_cache.get_data(time_remaining) data = self.data_cache.get_data(time_remaining)
stdout = data
if ret is None: if ret is None:
pattern = _pattern_to_string(pattern) pattern = _pattern_to_string(pattern)
self._save_expect_failure(pattern, data, start_time) self._save_expect_failure(pattern, data, start_time)
raise ExpectTimeout(self.name + ": " + pattern) raise ExpectTimeout(self.name + ": " + pattern)
return ret return stdout if full_stdout else ret
def _expect_multi(self, expect_all, expect_item_list, timeout): def _expect_multi(self, expect_all, expect_item_list, timeout):
""" """

View File

@ -145,7 +145,7 @@ class AssignTest(object):
""" """
Auto assign tests to CI jobs. Auto assign tests to CI jobs.
:param test_case_path: path of test case file(s) :param test_case_paths: path of test case file(s)
:param ci_config_file: path of ``.gitlab-ci.yml`` :param ci_config_file: path of ``.gitlab-ci.yml``
""" """
# subclass need to rewrite CI test job pattern, to filter all test jobs # subclass need to rewrite CI test job pattern, to filter all test jobs
@ -157,8 +157,8 @@ class AssignTest(object):
"supported_in_ci": True, "supported_in_ci": True,
} }
def __init__(self, test_case_path, ci_config_file, case_group=Group): def __init__(self, test_case_paths, ci_config_file, case_group=Group):
self.test_case_path = test_case_path self.test_case_paths = test_case_paths
self.test_case_file_pattern = None self.test_case_file_pattern = None
self.test_cases = [] self.test_cases = []
self.jobs = self._parse_gitlab_ci_config(ci_config_file) self.jobs = self._parse_gitlab_ci_config(ci_config_file)
@ -197,7 +197,7 @@ class AssignTest(object):
_case_filter = self.DEFAULT_FILTER.copy() _case_filter = self.DEFAULT_FILTER.copy()
if case_filter: if case_filter:
_case_filter.update(case_filter) _case_filter.update(case_filter)
test_methods = SearchCases.Search.search_test_cases(self.test_case_path, self.test_case_file_pattern) test_methods = SearchCases.Search.search_test_cases(self.test_case_paths, self.test_case_file_pattern)
return CaseConfig.filter_test_cases(test_methods, _case_filter) return CaseConfig.filter_test_cases(test_methods, _case_filter)
def _group_cases(self): def _group_cases(self):

View File

@ -120,15 +120,19 @@ class Search(object):
return replicated_cases return replicated_cases
@classmethod @classmethod
def search_test_cases(cls, test_case, test_case_file_pattern=None): def search_test_cases(cls, test_case_paths, test_case_file_pattern=None):
""" """
search all test cases from a folder or file, and then do case replicate. search all test cases from a folder or file, and then do case replicate.
:param test_case: test case file(s) path :param test_case_paths: test case file(s) paths
:param test_case_file_pattern: unix filename pattern :param test_case_file_pattern: unix filename pattern
:return: a list of replicated test methods :return: a list of replicated test methods
""" """
test_case_files = cls._search_test_case_files(test_case, test_case_file_pattern or cls.TEST_CASE_FILE_PATTERN) if not isinstance(test_case_paths, list):
test_case_paths = [test_case_paths]
test_case_files = []
for path in test_case_paths:
test_case_files.extend(cls._search_test_case_files(path, test_case_file_pattern or cls.TEST_CASE_FILE_PATTERN))
test_cases = [] test_cases = []
for test_case_file in test_case_files: for test_case_file in test_case_files:
test_cases += cls._search_cases_from_file(test_case_file) test_cases += cls._search_cases_from_file(test_case_file)

View File

@ -32,12 +32,12 @@ from tiny_test_fw.Utility import SearchCases, CaseConfig
class Runner(threading.Thread): class Runner(threading.Thread):
""" """
:param test_case: test case file or folder :param test_case_paths: test case file or folder
:param case_config: case config file, allow to filter test cases and pass data to test case :param case_config: case config file, allow to filter test cases and pass data to test case
:param env_config_file: env config file :param env_config_file: env config file
""" """
def __init__(self, test_case, case_config, env_config_file=None): def __init__(self, test_case_paths, case_config, env_config_file=None):
super(Runner, self).__init__() super(Runner, self).__init__()
self.setDaemon(True) self.setDaemon(True)
if case_config: if case_config:
@ -45,7 +45,7 @@ class Runner(threading.Thread):
else: else:
test_suite_name = "TestRunner" test_suite_name = "TestRunner"
TinyFW.set_default_config(env_config_file=env_config_file, test_suite_name=test_suite_name) TinyFW.set_default_config(env_config_file=env_config_file, test_suite_name=test_suite_name)
test_methods = SearchCases.Search.search_test_cases(test_case) test_methods = SearchCases.Search.search_test_cases(test_case_paths)
self.test_cases = CaseConfig.Parser.apply_config(test_methods, case_config) self.test_cases = CaseConfig.Parser.apply_config(test_methods, case_config)
self.test_result = [] self.test_result = []
@ -59,23 +59,23 @@ class Runner(threading.Thread):
if __name__ == '__main__': if __name__ == '__main__':
parser = argparse.ArgumentParser() parser = argparse.ArgumentParser()
parser.add_argument("test_case", parser.add_argument("test_cases", nargs='+',
help="test case folder or file") help="test case folders or files")
parser.add_argument("--case_config", "-c", default=None, parser.add_argument("--case_config", "-c", default=None,
help="case filter/config file") help="case filter/config file")
parser.add_argument("--env_config_file", "-e", default=None, parser.add_argument("--env_config_file", "-e", default=None,
help="test env config file") help="test env config file")
args = parser.parse_args() args = parser.parse_args()
runner = Runner(args.test_case, args.case_config, args.env_config_file) test_cases = [os.path.join(os.getenv('IDF_PATH'), path) if not os.path.isabs(path) else path for path in args.test_cases]
runner = Runner(test_cases, args.case_config, args.env_config_file)
runner.start() runner.start()
while True: while True:
try: try:
runner.join(1) runner.join(1)
if not runner.isAlive(): if not runner.is_alive():
break break
except KeyboardInterrupt: except KeyboardInterrupt:
print("exit by Ctrl-C") print("exit by Ctrl-C")

View File

@ -8,18 +8,16 @@ from collections import defaultdict
from find_apps import find_apps from find_apps import find_apps
from find_build_apps import BUILD_SYSTEMS, BUILD_SYSTEM_CMAKE from find_build_apps import BUILD_SYSTEMS, BUILD_SYSTEM_CMAKE
from ttfw_idf.IDFAssignTest import ExampleAssignTest, TestAppsAssignTest from ttfw_idf.IDFAssignTest import ExampleAssignTest, TestAppsAssignTest
from idf_py_actions.constants import SUPPORTED_TARGETS
VALID_TARGETS = [
'esp32',
'esp32s2',
]
TEST_LABELS = { TEST_LABELS = {
'example_test': 'BOT_LABEL_EXAMPLE_TEST', 'example_test': 'BOT_LABEL_EXAMPLE_TEST',
'test_apps': 'BOT_LABEL_CUSTOM_TEST', 'test_apps': 'BOT_LABEL_CUSTOM_TEST',
'component_ut': ['BOT_LABEL_UNIT_TEST', 'BOT_LABEL_UNIT_TEST_S2'],
} }
BUILD_ALL_LABELS = [ BUILD_ALL_LABELS = [
'BOT_LABEL_BUILD',
'BOT_LABEL_BUILD_ALL_APPS', 'BOT_LABEL_BUILD_ALL_APPS',
'BOT_LABEL_REGULAR_TEST', 'BOT_LABEL_REGULAR_TEST',
] ]
@ -40,7 +38,12 @@ def _judge_build_or_not(action, build_all): # type: (str, bool) -> (bool, bool)
logging.info('Build all apps') logging.info('Build all apps')
return True, True return True, True
if os.getenv(TEST_LABELS[action]): labels = TEST_LABELS[action]
if not isinstance(labels, list):
labels = [labels]
for label in labels:
if os.getenv(label):
logging.info('Build test cases apps') logging.info('Build test cases apps')
return True, False return True, False
else: else:
@ -59,8 +62,7 @@ def main():
parser.add_argument('test_type', parser.add_argument('test_type',
choices=TEST_LABELS.keys(), choices=TEST_LABELS.keys(),
help='Scan test type') help='Scan test type')
parser.add_argument('paths', parser.add_argument('paths', nargs='+',
nargs='+',
help='One or more app paths') help='One or more app paths')
parser.add_argument('-b', '--build-system', parser.add_argument('-b', '--build-system',
choices=BUILD_SYSTEMS.keys(), choices=BUILD_SYSTEMS.keys(),
@ -71,8 +73,7 @@ def main():
parser.add_argument('-o', '--output-path', parser.add_argument('-o', '--output-path',
required=True, required=True,
help="output path of the scan result") help="output path of the scan result")
parser.add_argument("--exclude", parser.add_argument("--exclude", nargs="*",
action="append",
help='Ignore specified directory. Can be used multiple times.') help='Ignore specified directory. Can be used multiple times.')
parser.add_argument('--preserve', action="store_true", parser.add_argument('--preserve', action="store_true",
help='add this flag to preserve artifacts for all apps') help='add this flag to preserve artifacts for all apps')
@ -90,15 +91,17 @@ def main():
raise e raise e
if (not build_standalone_apps) and (not build_test_case_apps): if (not build_standalone_apps) and (not build_test_case_apps):
for target in VALID_TARGETS: for target in SUPPORTED_TARGETS:
output_json([], target, args.build_system, args.output_path) output_json([], target, args.build_system, args.output_path)
SystemExit(0) SystemExit(0)
paths = set([os.path.join(os.getenv('IDF_PATH'), path) if not os.path.isabs(path) else path for path in args.paths])
test_cases = [] test_cases = []
for path in set(args.paths): for path in paths:
if args.test_type == 'example_test': if args.test_type == 'example_test':
assign = ExampleAssignTest(path, args.ci_config_file) assign = ExampleAssignTest(path, args.ci_config_file)
elif args.test_type == 'test_apps': elif args.test_type in ['test_apps', 'component_ut']:
assign = TestAppsAssignTest(path, args.ci_config_file) assign = TestAppsAssignTest(path, args.ci_config_file)
else: else:
raise SystemExit(1) # which is impossible raise SystemExit(1) # which is impossible
@ -123,7 +126,7 @@ def main():
build_system_class = BUILD_SYSTEMS[build_system] build_system_class = BUILD_SYSTEMS[build_system]
if build_test_case_apps: if build_test_case_apps:
for target in VALID_TARGETS: for target in SUPPORTED_TARGETS:
target_dict = scan_info_dict[target] target_dict = scan_info_dict[target]
test_case_apps = target_dict['test_case_apps'] = set() test_case_apps = target_dict['test_case_apps'] = set()
for case in test_cases: for case in test_cases:
@ -134,21 +137,21 @@ def main():
test_case_apps.update(find_apps(build_system_class, app_dir, True, default_exclude, target.lower())) test_case_apps.update(find_apps(build_system_class, app_dir, True, default_exclude, target.lower()))
exclude_apps.append(app_dir) exclude_apps.append(app_dir)
else: else:
for target in VALID_TARGETS: for target in SUPPORTED_TARGETS:
scan_info_dict[target]['test_case_apps'] = set() scan_info_dict[target]['test_case_apps'] = set()
if build_standalone_apps: if build_standalone_apps:
for target in VALID_TARGETS: for target in SUPPORTED_TARGETS:
target_dict = scan_info_dict[target] target_dict = scan_info_dict[target]
standalone_apps = target_dict['standalone_apps'] = set() standalone_apps = target_dict['standalone_apps'] = set()
for path in args.paths: for path in paths:
standalone_apps.update(find_apps(build_system_class, path, True, exclude_apps, target.lower())) standalone_apps.update(find_apps(build_system_class, path, True, exclude_apps, target.lower()))
else: else:
for target in VALID_TARGETS: for target in SUPPORTED_TARGETS:
scan_info_dict[target]['standalone_apps'] = set() scan_info_dict[target]['standalone_apps'] = set()
test_case_apps_preserve_default = True if build_system == 'cmake' else False test_case_apps_preserve_default = True if build_system == 'cmake' else False
for target in VALID_TARGETS: for target in SUPPORTED_TARGETS:
apps = [] apps = []
for app_dir in scan_info_dict[target]['test_case_apps']: for app_dir in scan_info_dict[target]['test_case_apps']:
apps.append({ apps.append({

View File

@ -22,7 +22,7 @@ import sys
from abc import abstractmethod from abc import abstractmethod
from tiny_test_fw import App from tiny_test_fw import App
from .IDFAssignTest import ExampleGroup, TestAppsGroup, UnitTestGroup, IDFCaseGroup from .IDFAssignTest import ExampleGroup, TestAppsGroup, UnitTestGroup, IDFCaseGroup, ComponentUTGroup
try: try:
import gitlab_api import gitlab_api
@ -202,9 +202,9 @@ class IDFApp(App.BaseApp):
def __str__(self): def __str__(self):
parts = ['app<{}>'.format(self.app_path)] parts = ['app<{}>'.format(self.app_path)]
if self.config_name: if self.config_name:
parts.extend('config<{}>'.format(self.config_name)) parts.append('config<{}>'.format(self.config_name))
if self.target: if self.target:
parts.extend('target<{}>'.format(self.target)) parts.append('target<{}>'.format(self.target))
return ' '.join(parts) return ' '.join(parts)
@classmethod @classmethod
@ -447,6 +447,11 @@ class TestApp(Example):
super(TestApp, self).__init__(app_path, config_name, target, case_group, artifacts_cls) super(TestApp, self).__init__(app_path, config_name, target, case_group, artifacts_cls)
class ComponentUTApp(TestApp):
def __init__(self, app_path, config_name='default', target='esp32', case_group=ComponentUTGroup, artifacts_cls=Artifacts):
super(ComponentUTApp, self).__init__(app_path, config_name, target, case_group, artifacts_cls)
class LoadableElfTestApp(TestApp): class LoadableElfTestApp(TestApp):
def __init__(self, app_path, app_files, config_name='default', target='esp32', case_group=TestAppsGroup, artifacts_cls=Artifacts): def __init__(self, app_path, app_files, config_name='default', target='esp32', case_group=TestAppsGroup, artifacts_cls=Artifacts):
# add arg `app_files` for loadable elf test_app. # add arg `app_files` for loadable elf test_app.

View File

@ -17,7 +17,9 @@ except ImportError:
import gitlab_api import gitlab_api
from tiny_test_fw.Utility import CIAssignTest from tiny_test_fw.Utility import CIAssignTest
IDF_PATH_FROM_ENV = os.getenv("IDF_PATH") from idf_py_actions.constants import SUPPORTED_TARGETS
IDF_PATH_FROM_ENV = os.getenv('IDF_PATH')
class IDFCaseGroup(CIAssignTest.Group): class IDFCaseGroup(CIAssignTest.Group):
@ -28,33 +30,36 @@ class IDFCaseGroup(CIAssignTest.Group):
def get_artifact_index_file(cls): def get_artifact_index_file(cls):
assert cls.LOCAL_BUILD_DIR assert cls.LOCAL_BUILD_DIR
if IDF_PATH_FROM_ENV: if IDF_PATH_FROM_ENV:
artifact_index_file = os.path.join(IDF_PATH_FROM_ENV, cls.LOCAL_BUILD_DIR, "artifact_index.json") artifact_index_file = os.path.join(IDF_PATH_FROM_ENV, cls.LOCAL_BUILD_DIR, 'artifact_index.json')
else: else:
artifact_index_file = "artifact_index.json" artifact_index_file = 'artifact_index.json'
return artifact_index_file return artifact_index_file
class IDFAssignTest(CIAssignTest.AssignTest): class IDFAssignTest(CIAssignTest.AssignTest):
def __init__(self, test_case_path, ci_config_file, case_group=IDFCaseGroup):
super(IDFAssignTest, self).__init__(test_case_path, ci_config_file, case_group)
def format_build_log_path(self, parallel_num): def format_build_log_path(self, parallel_num):
return "{}/list_job_{}.json".format(self.case_group.LOCAL_BUILD_DIR, parallel_num) return '{}/list_job_{}.json'.format(self.case_group.LOCAL_BUILD_DIR, parallel_num)
def create_artifact_index_file(self, project_id=None, pipeline_id=None): def create_artifact_index_file(self, project_id=None, pipeline_id=None):
if project_id is None: if project_id is None:
project_id = os.getenv("CI_PROJECT_ID") project_id = os.getenv('CI_PROJECT_ID')
if pipeline_id is None: if pipeline_id is None:
pipeline_id = os.getenv("CI_PIPELINE_ID") pipeline_id = os.getenv('CI_PIPELINE_ID')
gitlab_inst = gitlab_api.Gitlab(project_id) gitlab_inst = gitlab_api.Gitlab(project_id)
artifact_index_list = [] artifact_index_list = []
for build_job_name in self.case_group.BUILD_JOB_NAMES: for build_job_name in self.case_group.BUILD_JOB_NAMES:
job_info_list = gitlab_inst.find_job_id(build_job_name, pipeline_id=pipeline_id) job_info_list = gitlab_inst.find_job_id(build_job_name, pipeline_id=pipeline_id)
for job_info in job_info_list: for job_info in job_info_list:
parallel_num = job_info["parallel_num"] or 1 # Could be None if "parallel_num" not defined for the job parallel_num = job_info['parallel_num'] or 1 # Could be None if "parallel_num" not defined for the job
raw_data = gitlab_inst.download_artifact(job_info["id"], raw_data = gitlab_inst.download_artifact(job_info['id'],
[self.format_build_log_path(parallel_num)])[0] [self.format_build_log_path(parallel_num)])[0]
build_info_list = [json.loads(line) for line in raw_data.decode().splitlines()] build_info_list = [json.loads(line) for line in raw_data.decode().splitlines()]
for build_info in build_info_list: for build_info in build_info_list:
build_info["ci_job_id"] = job_info["id"] build_info['ci_job_id'] = job_info['id']
artifact_index_list.append(build_info) artifact_index_list.append(build_info)
artifact_index_file = self.case_group.get_artifact_index_file() artifact_index_file = self.case_group.get_artifact_index_file()
try: try:
@ -63,48 +68,47 @@ class IDFAssignTest(CIAssignTest.AssignTest):
if e.errno != errno.EEXIST: if e.errno != errno.EEXIST:
raise e raise e
with open(artifact_index_file, "w") as f: with open(artifact_index_file, 'w') as f:
json.dump(artifact_index_list, f) json.dump(artifact_index_list, f)
SUPPORTED_TARGETS = [
'esp32',
'esp32s2',
]
class ExampleGroup(IDFCaseGroup): class ExampleGroup(IDFCaseGroup):
SORT_KEYS = CI_JOB_MATCH_KEYS = ["env_tag", "target"] SORT_KEYS = CI_JOB_MATCH_KEYS = ['env_tag', 'target']
LOCAL_BUILD_DIR = "build_examples" LOCAL_BUILD_DIR = 'build_examples'
BUILD_JOB_NAMES = ["build_examples_cmake_{}".format(target) for target in SUPPORTED_TARGETS] BUILD_JOB_NAMES = ['build_examples_cmake_{}'.format(target) for target in SUPPORTED_TARGETS]
class TestAppsGroup(ExampleGroup): class TestAppsGroup(ExampleGroup):
LOCAL_BUILD_DIR = "build_test_apps" LOCAL_BUILD_DIR = 'build_test_apps'
BUILD_JOB_NAMES = ["build_test_apps_{}".format(target) for target in SUPPORTED_TARGETS] BUILD_JOB_NAMES = ['build_test_apps_{}'.format(target) for target in SUPPORTED_TARGETS]
class ComponentUTGroup(TestAppsGroup):
LOCAL_BUILD_DIR = 'build_component_ut'
BUILD_JOB_NAMES = ['build_component_ut_{}'.format(target) for target in SUPPORTED_TARGETS]
class UnitTestGroup(IDFCaseGroup): class UnitTestGroup(IDFCaseGroup):
SORT_KEYS = ["test environment", "tags", "chip_target"] SORT_KEYS = ['test environment', 'tags', 'chip_target']
CI_JOB_MATCH_KEYS = ["test environment"] CI_JOB_MATCH_KEYS = ['test environment']
LOCAL_BUILD_DIR = "tools/unit-test-app/builds" LOCAL_BUILD_DIR = 'tools/unit-test-app/builds'
BUILD_JOB_NAMES = ["build_esp_idf_tests_cmake_{}".format(target) for target in SUPPORTED_TARGETS] BUILD_JOB_NAMES = ['build_esp_idf_tests_cmake_{}'.format(target) for target in SUPPORTED_TARGETS]
MAX_CASE = 50 MAX_CASE = 50
ATTR_CONVERT_TABLE = { ATTR_CONVERT_TABLE = {
"execution_time": "execution time" 'execution_time': 'execution time'
} }
DUT_CLS_NAME = { DUT_CLS_NAME = {
"esp32": "ESP32DUT", 'esp32': 'ESP32DUT',
"esp32s2": "ESP32S2DUT", 'esp32s2': 'ESP32S2DUT',
"esp8266": "ESP8266DUT", 'esp8266': 'ESP8266DUT',
} }
def __init__(self, case): def __init__(self, case):
super(UnitTestGroup, self).__init__(case) super(UnitTestGroup, self).__init__(case)
for tag in self._get_case_attr(case, "tags"): for tag in self._get_case_attr(case, 'tags'):
self.ci_job_match_keys.add(tag) self.ci_job_match_keys.add(tag)
@staticmethod @staticmethod
@ -119,7 +123,7 @@ class UnitTestGroup(IDFCaseGroup):
if self.accept_new_case(): if self.accept_new_case():
for key in self.filters: for key in self.filters:
if self._get_case_attr(case, key) != self.filters[key]: if self._get_case_attr(case, key) != self.filters[key]:
if key == "tags": if key == 'tags':
if set(self._get_case_attr(case, key)).issubset(set(self.filters[key])): if set(self._get_case_attr(case, key)).issubset(set(self.filters[key])):
continue continue
break break
@ -136,18 +140,18 @@ class UnitTestGroup(IDFCaseGroup):
case_data = [] case_data = []
for case in test_cases: for case in test_cases:
one_case_data = { one_case_data = {
"config": self._get_case_attr(case, "config"), 'config': self._get_case_attr(case, 'config'),
"name": self._get_case_attr(case, "summary"), 'name': self._get_case_attr(case, 'summary'),
"reset": self._get_case_attr(case, "reset"), 'reset': self._get_case_attr(case, 'reset'),
"timeout": self._get_case_attr(case, "timeout"), 'timeout': self._get_case_attr(case, 'timeout'),
} }
if test_function in ["run_multiple_devices_cases", "run_multiple_stage_cases"]: if test_function in ['run_multiple_devices_cases', 'run_multiple_stage_cases']:
try: try:
one_case_data["child case num"] = self._get_case_attr(case, "child case num") one_case_data['child case num'] = self._get_case_attr(case, 'child case num')
except KeyError as e: except KeyError as e:
print("multiple devices/stages cases must contains at least two test functions") print('multiple devices/stages cases must contains at least two test functions')
print("case name: {}".format(one_case_data["name"])) print('case name: {}'.format(one_case_data['name']))
raise e raise e
case_data.append(one_case_data) case_data.append(one_case_data)
@ -160,18 +164,18 @@ class UnitTestGroup(IDFCaseGroup):
:return: dict of list of cases for each test functions :return: dict of list of cases for each test functions
""" """
case_by_test_function = { case_by_test_function = {
"run_multiple_devices_cases": [], 'run_multiple_devices_cases': [],
"run_multiple_stage_cases": [], 'run_multiple_stage_cases': [],
"run_unit_test_cases": [], 'run_unit_test_cases': [],
} }
for case in self.case_list: for case in self.case_list:
if case["multi_device"] == "Yes": if case['multi_device'] == 'Yes':
case_by_test_function["run_multiple_devices_cases"].append(case) case_by_test_function['run_multiple_devices_cases'].append(case)
elif case["multi_stage"] == "Yes": elif case['multi_stage'] == 'Yes':
case_by_test_function["run_multiple_stage_cases"].append(case) case_by_test_function['run_multiple_stage_cases'].append(case)
else: else:
case_by_test_function["run_unit_test_cases"].append(case) case_by_test_function['run_unit_test_cases'].append(case)
return case_by_test_function return case_by_test_function
def output(self): def output(self):
@ -181,12 +185,12 @@ class UnitTestGroup(IDFCaseGroup):
:return: {"Filter": case filter, "CaseConfig": list of case configs for cases in this group} :return: {"Filter": case filter, "CaseConfig": list of case configs for cases in this group}
""" """
target = self._get_case_attr(self.case_list[0], "chip_target") target = self._get_case_attr(self.case_list[0], 'chip_target')
if target: if target:
overwrite = { overwrite = {
"dut": { 'dut': {
"package": "ttfw_idf", 'package': 'ttfw_idf',
"class": self.DUT_CLS_NAME[target], 'class': self.DUT_CLS_NAME[target],
} }
} }
else: else:
@ -196,11 +200,11 @@ class UnitTestGroup(IDFCaseGroup):
output_data = { output_data = {
# we don't need filter for test function, as UT uses a few test functions for all cases # we don't need filter for test function, as UT uses a few test functions for all cases
"CaseConfig": [ 'CaseConfig': [
{ {
"name": test_function, 'name': test_function,
"extra_data": self._create_extra_data(test_cases, test_function), 'extra_data': self._create_extra_data(test_cases, test_function),
"overwrite": overwrite, 'overwrite': overwrite,
} for test_function, test_cases in case_by_test_function.items() if test_cases } for test_function, test_cases in case_by_test_function.items() if test_cases
], ],
} }
@ -210,22 +214,29 @@ class UnitTestGroup(IDFCaseGroup):
class ExampleAssignTest(IDFAssignTest): class ExampleAssignTest(IDFAssignTest):
CI_TEST_JOB_PATTERN = re.compile(r'^example_test_.+') CI_TEST_JOB_PATTERN = re.compile(r'^example_test_.+')
def __init__(self, est_case_path, ci_config_file): def __init__(self, test_case_path, ci_config_file):
super(ExampleAssignTest, self).__init__(est_case_path, ci_config_file, case_group=ExampleGroup) super(ExampleAssignTest, self).__init__(test_case_path, ci_config_file, case_group=ExampleGroup)
class TestAppsAssignTest(IDFAssignTest): class TestAppsAssignTest(IDFAssignTest):
CI_TEST_JOB_PATTERN = re.compile(r'^test_app_test_.+') CI_TEST_JOB_PATTERN = re.compile(r'^test_app_test_.+')
def __init__(self, est_case_path, ci_config_file): def __init__(self, test_case_path, ci_config_file):
super(TestAppsAssignTest, self).__init__(est_case_path, ci_config_file, case_group=TestAppsGroup) super(TestAppsAssignTest, self).__init__(test_case_path, ci_config_file, case_group=TestAppsGroup)
class ComponentUTAssignTest(IDFAssignTest):
CI_TEST_JOB_PATTERN = re.compile(r'^component_ut_test_.+')
def __init__(self, test_case_path, ci_config_file):
super(ComponentUTAssignTest, self).__init__(test_case_path, ci_config_file, case_group=ComponentUTGroup)
class UnitTestAssignTest(IDFAssignTest): class UnitTestAssignTest(IDFAssignTest):
CI_TEST_JOB_PATTERN = re.compile(r'^UT_.+') CI_TEST_JOB_PATTERN = re.compile(r'^UT_.+')
def __init__(self, est_case_path, ci_config_file): def __init__(self, test_case_path, ci_config_file):
super(UnitTestAssignTest, self).__init__(est_case_path, ci_config_file, case_group=UnitTestGroup) super(UnitTestAssignTest, self).__init__(test_case_path, ci_config_file, case_group=UnitTestGroup)
def search_cases(self, case_filter=None): def search_cases(self, case_filter=None):
""" """
@ -252,13 +263,14 @@ class UnitTestAssignTest(IDFAssignTest):
return test_cases return test_cases
test_cases = [] test_cases = []
if os.path.isdir(self.test_case_path): for path in self.test_case_paths:
for yml_file in find_by_suffix('.yml', self.test_case_path): if os.path.isdir(path):
for yml_file in find_by_suffix('.yml', path):
test_cases.extend(get_test_cases_from_yml(yml_file)) test_cases.extend(get_test_cases_from_yml(yml_file))
elif os.path.isfile(self.test_case_path): elif os.path.isfile(path) and path.endswith('.yml'):
test_cases.extend(get_test_cases_from_yml(self.test_case_path)) test_cases.extend(get_test_cases_from_yml(path))
else: else:
print("Test case path is invalid. Should only happen when use @bot to skip unit test.") print('Test case path is invalid. Should only happen when use @bot to skip unit test.')
# filter keys are lower case. Do map lower case keys with original keys. # filter keys are lower case. Do map lower case keys with original keys.
try: try:
@ -285,27 +297,30 @@ class UnitTestAssignTest(IDFAssignTest):
# sort cases with configs and test functions # sort cases with configs and test functions
# in later stage cases with similar attributes are more likely to be assigned to the same job # in later stage cases with similar attributes are more likely to be assigned to the same job
# it will reduce the count of flash DUT operations # it will reduce the count of flash DUT operations
test_cases.sort(key=lambda x: x["config"] + x["multi_stage"] + x["multi_device"]) test_cases.sort(key=lambda x: x['config'] + x['multi_stage'] + x['multi_device'])
return test_cases return test_cases
if __name__ == '__main__': if __name__ == '__main__':
parser = argparse.ArgumentParser() parser = argparse.ArgumentParser()
parser.add_argument("case_group", choices=["example_test", "custom_test", "unit_test"]) parser.add_argument('case_group', choices=['example_test', 'custom_test', 'unit_test', 'component_ut'])
parser.add_argument("test_case", help="test case folder or file") parser.add_argument('test_case_paths', nargs='+', help='test case folder or file')
parser.add_argument("ci_config_file", help="gitlab ci config file") parser.add_argument('-c', '--config', help='gitlab ci config file')
parser.add_argument("output_path", help="output path of config files") parser.add_argument('-o', '--output', help='output path of config files')
parser.add_argument("--pipeline_id", "-p", type=int, default=None, help="pipeline_id") parser.add_argument('--pipeline_id', '-p', type=int, default=None, help='pipeline_id')
parser.add_argument("--test-case-file-pattern", help="file name pattern used to find Python test case files") parser.add_argument('--test-case-file-pattern', help='file name pattern used to find Python test case files')
args = parser.parse_args() args = parser.parse_args()
args_list = [args.test_case, args.ci_config_file] test_case_paths = [os.path.join(IDF_PATH_FROM_ENV, path) if not os.path.isabs(path) else path for path in args.test_case_paths]
args_list = [test_case_paths, args.config]
if args.case_group == 'example_test': if args.case_group == 'example_test':
assigner = ExampleAssignTest(*args_list) assigner = ExampleAssignTest(*args_list)
elif args.case_group == 'custom_test': elif args.case_group == 'custom_test':
assigner = TestAppsAssignTest(*args_list) assigner = TestAppsAssignTest(*args_list)
elif args.case_group == 'unit_test': elif args.case_group == 'unit_test':
assigner = UnitTestAssignTest(*args_list) assigner = UnitTestAssignTest(*args_list)
elif args.case_group == 'component_ut':
assigner = ComponentUTAssignTest(*args_list)
else: else:
raise SystemExit(1) # which is impossible raise SystemExit(1) # which is impossible
@ -313,5 +328,5 @@ if __name__ == '__main__':
assigner.CI_TEST_JOB_PATTERN = re.compile(r'{}'.format(args.test_case_file_pattern)) assigner.CI_TEST_JOB_PATTERN = re.compile(r'{}'.format(args.test_case_file_pattern))
assigner.assign_cases() assigner.assign_cases()
assigner.output_configs(args.output_path) assigner.output_configs(args.output)
assigner.create_artifact_index_file() assigner.create_artifact_index_file()

View File

@ -17,10 +17,13 @@ import logging
import os import os
import re import re
import junit_xml
from tiny_test_fw import TinyFW, Utility from tiny_test_fw import TinyFW, Utility
from .IDFApp import IDFApp, Example, LoadableElfTestApp, UT, TestApp # noqa: export all Apps for users
from .IDFDUT import IDFDUT, ESP32DUT, ESP32S2DUT, ESP8266DUT, ESP32QEMUDUT # noqa: export DUTs for users
from .DebugUtils import OCDBackend, GDBBackend, CustomProcess # noqa: export DebugUtils for users from .DebugUtils import OCDBackend, GDBBackend, CustomProcess # noqa: export DebugUtils for users
from .IDFApp import IDFApp, Example, LoadableElfTestApp, UT, TestApp, ComponentUTApp # noqa: export all Apps for users
from .IDFDUT import IDFDUT, ESP32DUT, ESP32S2DUT, ESP8266DUT, ESP32QEMUDUT # noqa: export DUTs for users
from .unity_test_parser import TestResults, TestFormat
# pass TARGET_DUT_CLS_DICT to Env.py to avoid circular dependency issue. # pass TARGET_DUT_CLS_DICT to Env.py to avoid circular dependency issue.
TARGET_DUT_CLS_DICT = { TARGET_DUT_CLS_DICT = {
@ -108,6 +111,22 @@ def ci_target_check(func):
return wrapper return wrapper
def test_func_generator(func, app, target, ci_target, module, execution_time, level, erase_nvs, drop_kwargs_dut=False, **kwargs):
test_target = local_test_check(target)
dut = get_dut_class(test_target, erase_nvs)
if drop_kwargs_dut and 'dut' in kwargs: # panic_test() will inject dut, resolve conflicts here
dut = kwargs['dut']
del kwargs['dut']
original_method = TinyFW.test_method(
app=app, dut=dut, target=upper_list_or_str(target), ci_target=upper_list_or_str(ci_target),
module=module, execution_time=execution_time, level=level, erase_nvs=erase_nvs,
dut_dict=TARGET_DUT_CLS_DICT, **kwargs
)
test_func = original_method(func)
test_func.case_info["ID"] = format_case_id(target, test_func.case_info["name"])
return test_func
@ci_target_check @ci_target_check
def idf_example_test(app=Example, target="ESP32", ci_target=None, module="examples", execution_time=1, def idf_example_test(app=Example, target="ESP32", ci_target=None, module="examples", execution_time=1,
level="example", erase_nvs=True, config_name=None, **kwargs): level="example", erase_nvs=True, config_name=None, **kwargs):
@ -125,19 +144,8 @@ def idf_example_test(app=Example, target="ESP32", ci_target=None, module="exampl
:param kwargs: other keyword args :param kwargs: other keyword args
:return: test method :return: test method
""" """
def test(func): def test(func):
test_target = local_test_check(target) return test_func_generator(func, app, target, ci_target, module, execution_time, level, erase_nvs, **kwargs)
dut = get_dut_class(test_target, erase_nvs)
original_method = TinyFW.test_method(
app=app, dut=dut, target=upper_list_or_str(target), ci_target=upper_list_or_str(ci_target),
module=module, execution_time=execution_time, level=level, erase_nvs=erase_nvs,
dut_dict=TARGET_DUT_CLS_DICT, **kwargs
)
test_func = original_method(func)
test_func.case_info["ID"] = format_case_id(target, test_func.case_info["name"])
return test_func
return test return test
@ -157,25 +165,36 @@ def idf_unit_test(app=UT, target="ESP32", ci_target=None, module="unit-test", ex
:param kwargs: other keyword args :param kwargs: other keyword args
:return: test method :return: test method
""" """
def test(func): def test(func):
test_target = local_test_check(target) return test_func_generator(func, app, target, ci_target, module, execution_time, level, erase_nvs, **kwargs)
dut = get_dut_class(test_target, erase_nvs)
original_method = TinyFW.test_method(
app=app, dut=dut, target=upper_list_or_str(target), ci_target=upper_list_or_str(ci_target),
module=module, execution_time=execution_time, level=level, erase_nvs=erase_nvs,
dut_dict=TARGET_DUT_CLS_DICT, **kwargs
)
test_func = original_method(func)
test_func.case_info["ID"] = format_case_id(target, test_func.case_info["name"])
return test_func
return test return test
@ci_target_check @ci_target_check
def idf_custom_test(app=TestApp, target="ESP32", ci_target=None, module="misc", execution_time=1, def idf_custom_test(app=TestApp, target="ESP32", ci_target=None, module="misc", execution_time=1,
level="integration", erase_nvs=True, config_name=None, group="test-apps", **kwargs): level="integration", erase_nvs=True, config_name=None, **kwargs):
"""
decorator for idf custom tests (with default values for some keyword args).
:param app: test application class
:param target: target supported, string or list
:param ci_target: target auto run in CI, if None than all target will be tested, None, string or list
:param module: module, string
:param execution_time: execution time in minutes, int
:param level: test level, could be used to filter test cases, string
:param erase_nvs: if need to erase_nvs in DUT.start_app()
:param config_name: if specified, name of the app configuration
:param kwargs: other keyword args
:return: test method
"""
def test(func):
return test_func_generator(func, app, target, ci_target, module, execution_time, level, erase_nvs, drop_kwargs_dut=True, **kwargs)
return test
@ci_target_check
def idf_component_unit_test(app=ComponentUTApp, target="ESP32", ci_target=None, module="misc", execution_time=1,
level="integration", erase_nvs=True, config_name=None, **kwargs):
""" """
decorator for idf custom tests (with default values for some keyword args). decorator for idf custom tests (with default values for some keyword args).
@ -187,29 +206,41 @@ def idf_custom_test(app=TestApp, target="ESP32", ci_target=None, module="misc",
:param level: test level, could be used to filter test cases, string :param level: test level, could be used to filter test cases, string
:param erase_nvs: if need to erase_nvs in DUT.start_app() :param erase_nvs: if need to erase_nvs in DUT.start_app()
:param config_name: if specified, name of the app configuration :param config_name: if specified, name of the app configuration
:param group: identifier to group custom tests (unused for now, defaults to "test-apps")
:param kwargs: other keyword args :param kwargs: other keyword args
:return: test method :return: test method
""" """
def test(func): def test(func):
test_target = local_test_check(target) return test_func_generator(func, app, target, ci_target, module, execution_time, level, erase_nvs, **kwargs)
dut = get_dut_class(test_target, erase_nvs)
if 'dut' in kwargs: # panic_test() will inject dut, resolve conflicts here
dut = kwargs['dut']
del kwargs['dut']
original_method = TinyFW.test_method(
app=app, dut=dut, target=upper_list_or_str(target), ci_target=upper_list_or_str(ci_target),
module=module, execution_time=execution_time, level=level, erase_nvs=erase_nvs,
dut_dict=TARGET_DUT_CLS_DICT, **kwargs
)
test_func = original_method(func)
test_func.case_info["ID"] = format_case_id(target, test_func.case_info["name"])
return test_func
return test return test
class ComponentUTResult:
"""
Function Class, parse component unit test results
"""
@staticmethod
def parse_result(stdout):
try:
results = TestResults(stdout, TestFormat.UNITY_FIXTURE_VERBOSE)
except (ValueError, TypeError) as e:
raise ValueError('Error occurs when parsing the component unit test stdout to JUnit report: ' + str(e))
group_name = results.tests()[0].group()
with open(os.path.join(os.getenv('LOG_PATH', ''), '{}_XUNIT_RESULT.xml'.format(group_name)), 'w') as fw:
junit_xml.to_xml_report_file(fw, [results.to_junit()])
if results.num_failed():
# raise exception if any case fails
err_msg = 'Failed Cases:\n'
for test_case in results.test_iter():
if test_case.result() == 'FAIL':
err_msg += '\t{}: {}'.format(test_case.name(), test_case.message())
raise AssertionError(err_msg)
def log_performance(item, value): def log_performance(item, value):
""" """
do print performance with pre-defined format to console do print performance with pre-defined format to console

View File

@ -0,0 +1,375 @@
"""
Modification version of https://github.com/ETCLabs/unity-test-parser/blob/develop/unity_test_parser.py
since only python 3.6 or higher version have ``enum.auto()``
unity_test_parser.py
Parse the output of the Unity Test Framework for C. Parsed results are held in the TestResults
object format, which can then be converted to various XML formats.
"""
import enum
import re
import junit_xml
_NORMAL_TEST_REGEX = re.compile(r"(?P<file>.+):(?P<line>\d+):(?P<test_name>[^\s:]+):(?P<result>PASS|FAIL|IGNORE)(?:: (?P<message>.+))?")
_UNITY_FIXTURE_VERBOSE_PREFIX_REGEX = re.compile(r"(?P<prefix>TEST\((?P<test_group>[^\s,]+), (?P<test_name>[^\s\)]+)\))(?P<remainder>.+)?$")
_UNITY_FIXTURE_REMAINDER_REGEX = re.compile(r"^(?P<file>.+):(?P<line>\d+)::(?P<result>PASS|FAIL|IGNORE)(?:: (?P<message>.+))?")
_TEST_SUMMARY_BLOCK_REGEX = re.compile(
r"^(?P<num_tests>\d+) Tests (?P<num_failures>\d+) Failures (?P<num_ignored>\d+) Ignored\s*\r?\n(?P<overall_result>OK|FAIL)(?:ED)?", re.MULTILINE
)
_TEST_RESULT_ENUM = ["PASS", "FAIL", "IGNORE"]
class TestFormat(enum.Enum):
"""Represents the flavor of Unity used to produce a given output."""
UNITY_BASIC = 0
# UNITY_FIXTURE = enum.auto()
UNITY_FIXTURE_VERBOSE = 1
globals().update(TestFormat.__members__)
class TestStats:
"""Statistics about a test collection"""
def __init__(self):
self.total = 0
self.passed = 0
self.failed = 0
self.ignored = 0
def __eq__(self, other):
if isinstance(other, self.__class__):
return (self.total == other.total
and self.passed == other.passed
and self.failed == other.failed
and self.ignored == other.ignored)
return False
class TestResult:
"""
Class representing the result of a single test.
Contains the test name, its result (either PASS, FAIL or IGNORE), the file and line number if
the test result was not PASS, and an optional message.
"""
def __init__(
self,
test_name,
result,
group="default",
file="",
line=0,
message="",
full_line="",
):
if result not in _TEST_RESULT_ENUM:
raise ValueError("result must be one of {}.".format(_TEST_RESULT_ENUM))
self._test_name = test_name
self._result = result
self._group = group
self._message = message
self._full_line = full_line
if result != "PASS":
self._file = file
self._line = line
else:
self._file = ""
self._line = 0
def file(self):
"""The file name - returns empty string if the result is PASS."""
return self._file
def line(self):
"""The line number - returns 0 if the result is PASS."""
return self._line
def name(self):
"""The test name."""
return self._test_name
def result(self):
"""The test result, one of PASS, FAIL or IGNORED."""
return self._result
def group(self):
"""
The test group, if applicable.
For basic Unity output, this will always be "default".
"""
return self._group
def message(self):
"""The accompanying message - returns empty string if the result is PASS."""
return self._message
def full_line(self):
"""The original, full line of unit test output that this object was created from."""
return self._full_line
class TestResults:
"""
Class representing Unity test results.
After being initialized with raw test output, it parses the output and represents it as a list
of TestResult objects which can be inspected or converted to other types of output, e.g. JUnit
XML.
"""
def __init__(self, test_output, test_format=TestFormat.UNITY_BASIC):
"""
Create a new TestResults object from Unity test output.
Keyword arguments:
test_output -- The full test console output, must contain the overall result and summary
block at the bottom.
Optional arguments:
test_format -- TestFormat enum representing the flavor of Unity used to create the output.
Exceptions:
ValueError, if the test output is not formatted properly.
"""
self._tests = []
self._test_stats = self._find_summary_block(test_output)
if test_format is TestFormat.UNITY_BASIC:
self._parse_unity_basic(test_output)
elif test_format is TestFormat.UNITY_FIXTURE_VERBOSE:
self._parse_unity_fixture_verbose(test_output)
else:
raise ValueError(
"test_format must be one of UNITY_BASIC or UNITY_FIXTURE_VERBOSE."
)
def num_tests(self):
"""The total number of tests parsed."""
return self._test_stats.total
def num_passed(self):
"""The number of tests with result PASS."""
return self._test_stats.passed
def num_failed(self):
"""The number of tests with result FAIL."""
return self._test_stats.failed
def num_ignored(self):
"""The number of tests with result IGNORE."""
return self._test_stats.ignored
def test_iter(self):
"""Get an iterator for iterating over individual tests.
Returns an iterator over TestResult objects.
Example:
for test in unity_results.test_iter():
print(test.name())
"""
return iter(self._tests)
def tests(self):
"""Get a list of all the tests (TestResult objects)."""
return self._tests
def to_junit(
self, suite_name="all_tests",
):
"""
Convert the tests to JUnit XML.
Returns a junit_xml.TestSuite containing all of the test cases. One test suite will be
generated with the name given in suite_name. Unity Fixture test groups are mapped to the
classname attribute of test cases; for basic Unity output there will be one class named
"default".
Optional arguments:
suite_name -- The name to use for the "name" and "package" attributes of the testsuite element.
Sample output:
<testsuite disabled="0" errors="0" failures="1" name="[suite_name]" package="[suite_name]" skipped="0" tests="8" time="0">
<testcase classname="test_group_1" name="group_1_test" />
<testcase classname="test_group_2" name="group_2_test" />
</testsuite>
"""
test_case_list = []
for test in self._tests:
if test.result() == "PASS":
test_case_list.append(
junit_xml.TestCase(name=test.name(), classname=test.group())
)
else:
junit_tc = junit_xml.TestCase(
name=test.name(),
classname=test.group(),
file=test.file(),
line=test.line(),
)
if test.result() == "FAIL":
junit_tc.add_failure_info(
message=test.message(), output=test.full_line()
)
elif test.result() == "IGNORE":
junit_tc.add_skipped_info(
message=test.message(), output=test.full_line()
)
test_case_list.append(junit_tc)
return junit_xml.TestSuite(
name=suite_name, package=suite_name, test_cases=test_case_list
)
def _find_summary_block(self, unity_output):
"""
Find and parse the test summary block.
Unity prints a test summary block at the end of a test run of the form:
-----------------------
X Tests Y Failures Z Ignored
[PASS|FAIL]
Returns the contents of the test summary block as a TestStats object.
"""
match = _TEST_SUMMARY_BLOCK_REGEX.search(unity_output)
if not match:
raise ValueError("A Unity test summary block was not found.")
try:
stats = TestStats()
stats.total = int(match.group("num_tests"))
stats.failed = int(match.group("num_failures"))
stats.ignored = int(match.group("num_ignored"))
stats.passed = stats.total - stats.failed - stats.ignored
return stats
except ValueError:
raise ValueError("The Unity test summary block was not valid.")
def _parse_unity_basic(self, unity_output):
"""
Parse basic unity output.
This is of the form file:line:test_name:result[:optional_message]
"""
found_test_stats = TestStats()
for test in _NORMAL_TEST_REGEX.finditer(unity_output):
try:
new_test = TestResult(
test.group("test_name"),
test.group("result"),
file=test.group("file"),
line=int(test.group("line")),
message=test.group("message")
if test.group("message") is not None
else "",
full_line=test.group(0),
)
except ValueError:
continue
self._add_new_test(new_test, found_test_stats)
if len(self._tests) == 0:
raise ValueError("No tests were found.")
if found_test_stats != self._test_stats:
raise ValueError("Test output does not match summary block.")
def _parse_unity_fixture_verbose(self, unity_output):
"""
Parse the output of the unity_fixture add-in invoked with the -v flag.
This is a more complex operation than basic unity output, because the output for a single
test can span multiple lines. There is a prefix of the form "TEST(test_group, test_name)"
that always exists on the first line for a given test. Immediately following that can be a
pass or fail message, or some number of diagnostic messages followed by a pass or fail
message.
"""
found_test_stats = TestStats()
line_iter = iter(unity_output.splitlines())
try:
line = next(line_iter)
while True:
prefix_match = _UNITY_FIXTURE_VERBOSE_PREFIX_REGEX.search(line)
line = next(line_iter)
if prefix_match:
# Handle the remaining portion of a test case line after the unity_fixture
# prefix.
remainder = prefix_match.group("remainder")
if remainder:
self._parse_unity_fixture_remainder(
prefix_match, remainder, found_test_stats
)
# Handle any subsequent lines with more information on the same test case.
while not _UNITY_FIXTURE_VERBOSE_PREFIX_REGEX.search(line):
self._parse_unity_fixture_remainder(
prefix_match, line, found_test_stats
)
line = next(line_iter)
except StopIteration:
pass
if len(self._tests) == 0:
raise ValueError("No tests were found.")
if found_test_stats != self._test_stats:
raise ValueError("Test output does not match summary block.")
def _parse_unity_fixture_remainder(self, prefix_match, remainder, test_stats):
"""
Parse the remainder of a Unity Fixture test case.
Can be on the same line as the prefix or on subsequent lines.
"""
new_test = None
if remainder == " PASS":
new_test = TestResult(
prefix_match.group("test_name"),
"PASS",
group=prefix_match.group("test_group"),
full_line=prefix_match.group(0),
)
else:
remainder_match = _UNITY_FIXTURE_REMAINDER_REGEX.match(remainder)
if remainder_match:
new_test = TestResult(
prefix_match.group("test_name"),
remainder_match.group("result"),
group=prefix_match.group("test_group"),
file=remainder_match.group("file"),
line=int(remainder_match.group("line")),
message=remainder_match.group("message")
if remainder_match.group("message") is not None
else "",
full_line=prefix_match.group("prefix") + remainder_match.group(0),
)
if new_test is not None:
self._add_new_test(new_test, test_stats)
def _add_new_test(self, new_test, test_stats):
"""Add a new test and increment the proper members of test_stats."""
test_stats.total += 1
if new_test.result() == "PASS":
test_stats.passed += 1
elif new_test.result() == "FAIL":
test_stats.failed += 1
else:
test_stats.ignored += 1
self._tests.append(new_test)

View File

@ -18,6 +18,7 @@
#include "freertos/FreeRTOS.h" #include "freertos/FreeRTOS.h"
#include "freertos/task.h" #include "freertos/task.h"
#include "unity.h" #include "unity.h"
#include "unity_test_runner.h"
#include "test_utils.h" #include "test_utils.h"
#include "esp_newlib.h" #include "esp_newlib.h"