ci: dynamic pipeline

build:
- upgrade idf-build-apps to 2.x
- unify get_pytest_apps and get_cmake_apps to get_all_apps
	- returns (test_apps, non_test_apps) tuple
- add tests for the new get_all_apps

assign:
- generate build report
- generate target test pipeline based on the build report

target test:
- download artifacts from minio server
- users can use `pytest --pipeline-id xxxxx` to download and flash
the binaries from the artifacts

.post:
- generate target test reports
This commit is contained in:
Fu Hanxi 2023-12-18 15:29:58 +01:00
parent a98923bc85
commit fba96d58c2
No known key found for this signature in database
GPG Key ID: 19399699CF3C4B16
44 changed files with 2184 additions and 3057 deletions

41
.gitignore vendored
View File

@ -24,18 +24,6 @@ GPATH
# cache dir # cache dir
.cache/ .cache/
# Components Unit Test Apps files
components/**/build/
components/**/build_*_*/
components/**/sdkconfig
components/**/sdkconfig.old
# Example project files
examples/**/build/
examples/**/build_esp*_*/
examples/**/sdkconfig
examples/**/sdkconfig.old
# Doc build artifacts # Doc build artifacts
docs/_build/ docs/_build/
docs/doxygen_sqlite3.db docs/doxygen_sqlite3.db
@ -44,16 +32,23 @@ docs/doxygen_sqlite3.db
docs/_static/DejaVuSans.ttf docs/_static/DejaVuSans.ttf
docs/_static/NotoSansSC-Regular.otf docs/_static/NotoSansSC-Regular.otf
# Components Unit Test Apps files
components/**/build/
components/**/build_*_*/
components/**/sdkconfig
components/**/sdkconfig.old
# Example project files
examples/**/build/
examples/**/build_*_*/
examples/**/sdkconfig
examples/**/sdkconfig.old
# Unit test app files # Unit test app files
tools/unit-test-app/sdkconfig
tools/unit-test-app/sdkconfig.old
tools/unit-test-app/build tools/unit-test-app/build
tools/unit-test-app/build_*_*/ tools/unit-test-app/build_*_*/
tools/unit-test-app/output tools/unit-test-app/sdkconfig
tools/unit-test-app/test_configs tools/unit-test-app/sdkconfig.old
# Unit Test CMake compile log folder
log_ut_cmake
# test application build files # test application build files
tools/test_apps/**/build/ tools/test_apps/**/build/
@ -61,7 +56,8 @@ tools/test_apps/**/build_*_*/
tools/test_apps/**/sdkconfig tools/test_apps/**/sdkconfig
tools/test_apps/**/sdkconfig.old tools/test_apps/**/sdkconfig.old
TEST_LOGS TEST_LOGS/
build_summary_*.xml
# gcov coverage reports # gcov coverage reports
*.gcda *.gcda
@ -101,8 +97,9 @@ managed_components
# pytest log # pytest log
pytest_embedded_log/ pytest_embedded_log/
list_job_*.txt list_job*.txt
size_info.txt size_info*.txt
XUNIT_RESULT*.xml
# clang config (for LSP) # clang config (for LSP)
.clangd .clangd

View File

@ -28,5 +28,4 @@ include:
- '.gitlab/ci/build.yml' - '.gitlab/ci/build.yml'
- '.gitlab/ci/integration_test.yml' - '.gitlab/ci/integration_test.yml'
- '.gitlab/ci/host-test.yml' - '.gitlab/ci/host-test.yml'
- '.gitlab/ci/target-test.yml'
- '.gitlab/ci/deploy.yml' - '.gitlab/ci/deploy.yml'

View File

@ -1,7 +1,7 @@
.build_template: .build_template:
stage: build stage: build
extends: extends:
- .after_script:build:ccache - .after_script:build:ccache:upload-when-fail
image: $ESP_ENV_IMAGE image: $ESP_ENV_IMAGE
tags: tags:
- build - build
@ -43,8 +43,8 @@
examples/bluetooth/esp_ble_mesh/ble_mesh_console examples/bluetooth/esp_ble_mesh/ble_mesh_console
examples/bluetooth/hci/controller_hci_uart_esp32 examples/bluetooth/hci/controller_hci_uart_esp32
examples/wifi/iperf examples/wifi/iperf
--modified-components ${MODIFIED_COMPONENTS} --modified-components ${MR_MODIFIED_COMPONENTS}
--modified-files ${MODIFIED_FILES} --modified-files ${MR_MODIFIED_FILES}
# for detailed documents, please refer to .gitlab/ci/README.md#uploaddownload-artifacts-to-internal-minio-server # for detailed documents, please refer to .gitlab/ci/README.md#uploaddownload-artifacts-to-internal-minio-server
- python tools/ci/artifacts_handler.py upload - python tools/ci/artifacts_handler.py upload
@ -62,307 +62,14 @@
--copy-sdkconfig --copy-sdkconfig
--parallel-count ${CI_NODE_TOTAL:-1} --parallel-count ${CI_NODE_TOTAL:-1}
--parallel-index ${CI_NODE_INDEX:-1} --parallel-index ${CI_NODE_INDEX:-1}
--modified-components ${MODIFIED_COMPONENTS} --modified-components ${MR_MODIFIED_COMPONENTS}
--modified-files ${MODIFIED_FILES} --modified-files ${MR_MODIFIED_FILES}
$TEST_BUILD_OPTS_EXTRA $TEST_BUILD_OPTS_EXTRA
- python tools/ci/artifacts_handler.py upload - python tools/ci/artifacts_handler.py upload
.build_pytest_template: ######################
extends: # build_template_app #
- .build_cmake_template ######################
script:
# CI specific options start from "--parallel-count xxx". could ignore when running locally
- run_cmd python tools/ci/ci_build_apps.py $TEST_DIR -v
-t $IDF_TARGET
-m \"not host_test\"
--pytest-apps
--parallel-count ${CI_NODE_TOTAL:-1}
--parallel-index ${CI_NODE_INDEX:-1}
--collect-app-info "list_job_${CI_JOB_NAME_SLUG}.txt"
--modified-components ${MODIFIED_COMPONENTS}
--modified-files ${MODIFIED_FILES}
- python tools/ci/artifacts_handler.py upload
.build_pytest_no_jtag_template:
extends:
- .build_cmake_template
script:
# CI specific options start from "--parallel-count xxx". could ignore when running locally
- run_cmd python tools/ci/ci_build_apps.py $TEST_DIR -v
-t $IDF_TARGET
-m \"not host_test and not jtag\"
--pytest-apps
--parallel-count ${CI_NODE_TOTAL:-1}
--parallel-index ${CI_NODE_INDEX:-1}
--collect-app-info "list_job_${CI_JOB_NAME_SLUG}.txt"
--modified-components ${MODIFIED_COMPONENTS}
--modified-files ${MODIFIED_FILES}
- python tools/ci/artifacts_handler.py upload
.build_pytest_jtag_template:
extends:
- .build_cmake_template
script:
# CI specific options start from "--parallel-count xxx". could ignore when running locally
- run_cmd python tools/ci/ci_build_apps.py $TEST_DIR -v
-t $IDF_TARGET
-m \"not host_test and jtag\"
--pytest-apps
--parallel-count ${CI_NODE_TOTAL:-1}
--parallel-index ${CI_NODE_INDEX:-1}
--collect-app-info "list_job_${CI_JOB_NAME_SLUG}.txt"
--modified-components ${MODIFIED_COMPONENTS}
--modified-files ${MODIFIED_FILES}
- python tools/ci/artifacts_handler.py upload
build_pytest_examples_esp32:
extends:
- .build_pytest_no_jtag_template
- .rules:build:example_test-esp32
parallel: 6
variables:
IDF_TARGET: esp32
TEST_DIR: examples
build_pytest_examples_esp32s2:
extends:
- .build_pytest_no_jtag_template
- .rules:build:example_test-esp32s2
parallel: 3
variables:
IDF_TARGET: esp32s2
TEST_DIR: examples
build_pytest_examples_esp32s3:
extends:
- .build_pytest_no_jtag_template
- .rules:build:example_test-esp32s3
parallel: 4
variables:
IDF_TARGET: esp32s3
TEST_DIR: examples
build_pytest_examples_esp32c3:
extends:
- .build_pytest_no_jtag_template
- .rules:build:example_test-esp32c3
parallel: 4
variables:
IDF_TARGET: esp32c3
TEST_DIR: examples
build_pytest_examples_esp32c2:
extends:
- .build_pytest_no_jtag_template
- .rules:build:example_test-esp32c2
parallel: 2
variables:
IDF_TARGET: esp32c2
TEST_DIR: examples
build_pytest_examples_esp32c6:
extends:
- .build_pytest_no_jtag_template
- .rules:build:example_test-esp32c6
parallel: 2
variables:
IDF_TARGET: esp32c6
TEST_DIR: examples
build_pytest_examples_esp32h2:
extends:
- .build_pytest_no_jtag_template
- .rules:build:example_test-esp32h2
parallel: 2
variables:
IDF_TARGET: esp32h2
TEST_DIR: examples
build_pytest_examples_esp32p4:
extends:
- .build_pytest_no_jtag_template
- .rules:build:example_test-esp32p4
parallel: 2
variables:
IDF_TARGET: esp32p4
TEST_DIR: examples
build_pytest_examples_jtag: # for all targets
extends:
- .build_pytest_jtag_template
- .rules:build:example_test
variables:
IDF_TARGET: all
TEST_DIR: examples
build_pytest_components_esp32:
extends:
- .build_pytest_template
- .rules:build:component_ut-esp32
parallel: 5
variables:
IDF_TARGET: esp32
TEST_DIR: components
build_pytest_components_esp32s2:
extends:
- .build_pytest_template
- .rules:build:component_ut-esp32s2
parallel: 4
variables:
IDF_TARGET: esp32s2
TEST_DIR: components
build_pytest_components_esp32s3:
extends:
- .build_pytest_template
- .rules:build:component_ut-esp32s3
parallel: 4
variables:
IDF_TARGET: esp32s3
TEST_DIR: components
build_pytest_components_esp32c3:
extends:
- .build_pytest_template
- .rules:build:component_ut-esp32c3
parallel: 4
variables:
IDF_TARGET: esp32c3
TEST_DIR: components
build_pytest_components_esp32c2:
extends:
- .build_pytest_template
- .rules:build:component_ut-esp32c2
parallel: 3
variables:
IDF_TARGET: esp32c2
TEST_DIR: components
build_pytest_components_esp32c6:
extends:
- .build_pytest_template
- .rules:build:component_ut-esp32c6
parallel: 3
variables:
IDF_TARGET: esp32c6
TEST_DIR: components
build_pytest_components_esp32h2:
extends:
- .build_pytest_template
- .rules:build:component_ut-esp32h2
parallel: 4
variables:
IDF_TARGET: esp32h2
TEST_DIR: components
build_pytest_components_esp32p4:
extends:
- .build_pytest_template
- .rules:build:component_ut-esp32p4
parallel: 4
variables:
IDF_TARGET: esp32p4
TEST_DIR: components
build_only_components_apps:
extends:
- .build_cmake_template
- .rules:build:component_ut
parallel: 5
script:
- set_component_ut_vars
# CI specific options start from "--parallel-count xxx". could ignore when running locally
- run_cmd python tools/ci/ci_build_apps.py $COMPONENT_UT_DIRS -v
-t all
--parallel-count ${CI_NODE_TOTAL:-1}
--parallel-index ${CI_NODE_INDEX:-1}
--modified-components ${MODIFIED_COMPONENTS}
--modified-files ${MODIFIED_FILES}
- python tools/ci/artifacts_handler.py upload
build_pytest_test_apps_esp32:
extends:
- .build_pytest_template
- .rules:build:custom_test-esp32
variables:
IDF_TARGET: esp32
TEST_DIR: tools/test_apps
build_pytest_test_apps_esp32s2:
extends:
- .build_pytest_template
- .rules:build:custom_test-esp32s2
variables:
IDF_TARGET: esp32s2
TEST_DIR: tools/test_apps
build_pytest_test_apps_esp32s3:
extends:
- .build_pytest_template
- .rules:build:custom_test-esp32s3
parallel: 2
variables:
IDF_TARGET: esp32s3
TEST_DIR: tools/test_apps
build_pytest_test_apps_esp32c3:
extends:
- .build_pytest_template
- .rules:build:custom_test-esp32c3
variables:
IDF_TARGET: esp32c3
TEST_DIR: tools/test_apps
build_pytest_test_apps_esp32c2:
extends:
- .build_pytest_template
- .rules:build:custom_test-esp32c2
variables:
IDF_TARGET: esp32c2
TEST_DIR: tools/test_apps
build_pytest_test_apps_esp32c6:
extends:
- .build_pytest_template
- .rules:build:custom_test-esp32c6
variables:
IDF_TARGET: esp32c6
TEST_DIR: tools/test_apps
build_pytest_test_apps_esp32h2:
extends:
- .build_pytest_template
- .rules:build:custom_test-esp32h2
variables:
IDF_TARGET: esp32h2
TEST_DIR: tools/test_apps
build_pytest_test_apps_esp32p4:
extends:
- .build_pytest_template
- .rules:build:custom_test-esp32p4
variables:
IDF_TARGET: esp32p4
TEST_DIR: tools/test_apps
build_only_tools_test_apps:
extends:
- .build_cmake_template
- .rules:build:custom_test
parallel: 9
script:
# CI specific options start from "--parallel-count xxx". could ignore when running locally
- run_cmd python tools/ci/ci_build_apps.py tools/test_apps -v
-t all
--parallel-count ${CI_NODE_TOTAL:-1}
--parallel-index ${CI_NODE_INDEX:-1}
--modified-components ${MODIFIED_COMPONENTS}
--modified-files ${MODIFIED_FILES}
- python tools/ci/artifacts_handler.py upload
.build_template_app_template: .build_template_app_template:
extends: extends:
- .build_template - .build_template
@ -400,96 +107,27 @@ fast_template_app:
BUILD_COMMAND_ARGS: "-p" BUILD_COMMAND_ARGS: "-p"
#------------------------------------------------------------------------------ #------------------------------------------------------------------------------
build_examples_cmake_esp32: ########################################
extends: # Clang Build Apps Without Tests Cases #
- .build_cmake_template ########################################
- .rules:build:example_test-esp32
parallel: 8
variables:
IDF_TARGET: esp32
TEST_DIR: examples
build_examples_cmake_esp32s2:
extends:
- .build_cmake_template
- .rules:build:example_test-esp32s2
parallel: 7
variables:
IDF_TARGET: esp32s2
TEST_DIR: examples
build_examples_cmake_esp32s3:
extends:
- .build_cmake_template
- .rules:build:example_test-esp32s3
parallel: 11
variables:
IDF_TARGET: esp32s3
TEST_DIR: examples
build_examples_cmake_esp32c2:
extends:
- .build_cmake_template
- .rules:build:example_test-esp32c2
parallel: 7
variables:
IDF_TARGET: esp32c2
TEST_DIR: examples
build_examples_cmake_esp32c3:
extends:
- .build_cmake_template
- .rules:build:example_test-esp32c3
parallel: 9
variables:
IDF_TARGET: esp32c3
TEST_DIR: examples
build_examples_cmake_esp32c6:
extends:
- .build_cmake_template
- .rules:build:example_test-esp32c6
parallel: 11
variables:
IDF_TARGET: esp32c6
TEST_DIR: examples
build_examples_cmake_esp32h2:
extends:
- .build_cmake_template
- .rules:build:example_test-esp32h2
parallel: 9
variables:
IDF_TARGET: esp32h2
TEST_DIR: examples
build_examples_cmake_esp32p4:
extends:
- .build_cmake_template
- .rules:build:example_test-esp32p4
parallel: 4
variables:
IDF_TARGET: esp32p4
TEST_DIR: examples
build_clang_test_apps_esp32: build_clang_test_apps_esp32:
extends: extends:
- .build_cmake_clang_template - .build_cmake_clang_template
- .rules:build:custom_test-esp32 - .rules:build
variables: variables:
IDF_TARGET: esp32 IDF_TARGET: esp32
build_clang_test_apps_esp32s2: build_clang_test_apps_esp32s2:
extends: extends:
- .build_cmake_clang_template - .build_cmake_clang_template
- .rules:build:custom_test-esp32s2 - .rules:build
variables: variables:
IDF_TARGET: esp32s2 IDF_TARGET: esp32s2
build_clang_test_apps_esp32s3: build_clang_test_apps_esp32s3:
extends: extends:
- .build_cmake_clang_template - .build_cmake_clang_template
- .rules:build:custom_test-esp32s3 - .rules:build
variables: variables:
IDF_TARGET: esp32s3 IDF_TARGET: esp32s3
@ -506,26 +144,29 @@ build_clang_test_apps_esp32s3:
build_clang_test_apps_esp32c3: build_clang_test_apps_esp32c3:
extends: extends:
- .build_clang_test_apps_riscv - .build_clang_test_apps_riscv
- .rules:build:custom_test-esp32c3 - .rules:build
variables: variables:
IDF_TARGET: esp32c3 IDF_TARGET: esp32c3
build_clang_test_apps_esp32c2: build_clang_test_apps_esp32c2:
extends: extends:
- .build_clang_test_apps_riscv - .build_clang_test_apps_riscv
- .rules:build:custom_test-esp32c2 - .rules:build
variables: variables:
IDF_TARGET: esp32c2 IDF_TARGET: esp32c2
build_clang_test_apps_esp32c6: build_clang_test_apps_esp32c6:
extends: extends:
- .build_clang_test_apps_riscv - .build_clang_test_apps_riscv
- .rules:build:custom_test-esp32c6 - .rules:build
# TODO: c6 builds fail in master due to missing headers # TODO: c6 builds fail in master due to missing headers
allow_failure: true allow_failure: true
variables: variables:
IDF_TARGET: esp32c6 IDF_TARGET: esp32c6
######################
# Build System Tests #
######################
.test_build_system_template: .test_build_system_template:
stage: host_test stage: host_test
extends: extends:
@ -634,3 +275,44 @@ build_template_app:
needs: needs:
- job: fast_template_app - job: fast_template_app
artifacts: false artifacts: false
####################
# Dynamic Pipeline #
####################
generate_build_child_pipeline:
extends:
- .build_template
dependencies: # set dependencies to null to avoid missing artifacts issue
needs:
- pipeline_variables
artifacts:
paths:
- build_child_pipeline.yml
- test_related_apps.txt
- non_test_related_apps.txt
script:
- run_cmd python tools/ci/dynamic_pipelines/scripts/generate_build_child_pipeline.py
--modified-components ${MR_MODIFIED_COMPONENTS}
--modified-files ${MR_MODIFIED_FILES}
build_child_pipeline:
stage: build
needs:
- job: fast_template_app
artifacts: false
- pipeline_variables
- generate_build_child_pipeline
variables:
IS_MR_PIPELINE: $IS_MR_PIPELINE
MR_MODIFIED_COMPONENTS: $MR_MODIFIED_COMPONENTS
MR_MODIFIED_FILES: $MR_MODIFIED_FILES
PARENT_PIPELINE_ID: $CI_PIPELINE_ID
BUILD_AND_TEST_ALL_APPS: $BUILD_AND_TEST_ALL_APPS
# https://gitlab.com/gitlab-org/gitlab/-/issues/214340
inherit:
variables: false
trigger:
include:
- artifact: build_child_pipeline.yml
job: generate_build_child_pipeline
strategy: depend

View File

@ -85,6 +85,7 @@ variables:
################################################ ################################################
.common_before_scripts: &common-before_scripts | .common_before_scripts: &common-before_scripts |
source tools/ci/utils.sh source tools/ci/utils.sh
is_based_on_commits $REQUIRED_ANCESTOR_COMMITS is_based_on_commits $REQUIRED_ANCESTOR_COMMITS
if [[ -n "$IDF_DONT_USE_MIRRORS" ]]; then if [[ -n "$IDF_DONT_USE_MIRRORS" ]]; then
@ -208,6 +209,10 @@ variables:
- export EXTRA_CXXFLAGS=${PEDANTIC_CXXFLAGS} - export EXTRA_CXXFLAGS=${PEDANTIC_CXXFLAGS}
.after_script:build:ccache: .after_script:build:ccache:
after_script:
- *show_ccache_statistics
.after_script:build:ccache:upload-when-fail:
after_script: after_script:
- *show_ccache_statistics - *show_ccache_statistics
- *upload_failed_job_log_artifacts - *upload_failed_job_log_artifacts

View File

@ -66,43 +66,6 @@
included_in: included_in:
- build:check - build:check
# ---------------
# Build Test Jobs
# ---------------
"build:{0}-{1}":
matrix:
- *target_test
- *all_targets
labels:
- build
patterns:
- build_components
- build_system
- build_target_test
- downloadable-tools
included_in:
- "build:{0}"
- build:target_test
####################
# Target Test Jobs #
####################
"test:{0}-{1}":
matrix:
- *target_test
- *all_targets
labels: # For each rule, use labels <test_type> and <test_type>-<target>
- "{0}"
- "{0}_{1}"
- target_test
patterns: # For each rule, use patterns <test_type> and build-<test_type>
- "{0}"
- "build-{0}"
included_in: # Parent rules
- "build:{0}"
- "build:{0}-{1}"
- build:target_test
"test:host_test": "test:host_test":
labels: labels:
- host_test - host_test

View File

@ -289,8 +289,8 @@ test_pytest_qemu:
--pytest-apps --pytest-apps
-m qemu -m qemu
--collect-app-info "list_job_${CI_JOB_NAME_SLUG}.txt" --collect-app-info "list_job_${CI_JOB_NAME_SLUG}.txt"
--modified-components ${MODIFIED_COMPONENTS} --modified-components ${MR_MODIFIED_COMPONENTS}
--modified-files ${MODIFIED_FILES} --modified-files ${MR_MODIFIED_FILES}
- retry_failed git clone $KNOWN_FAILURE_CASES_REPO known_failure_cases - retry_failed git clone $KNOWN_FAILURE_CASES_REPO known_failure_cases
- run_cmd pytest - run_cmd pytest
--target $IDF_TARGET --target $IDF_TARGET
@ -318,8 +318,8 @@ test_pytest_linux:
--pytest-apps --pytest-apps
-m host_test -m host_test
--collect-app-info "list_job_${CI_JOB_NAME_SLUG}.txt" --collect-app-info "list_job_${CI_JOB_NAME_SLUG}.txt"
--modified-components ${MODIFIED_COMPONENTS} --modified-components ${MR_MODIFIED_COMPONENTS}
--modified-files ${MODIFIED_FILES} --modified-files ${MR_MODIFIED_FILES}
- retry_failed git clone $KNOWN_FAILURE_CASES_REPO known_failure_cases - retry_failed git clone $KNOWN_FAILURE_CASES_REPO known_failure_cases
- run_cmd pytest - run_cmd pytest
--target linux --target linux
@ -327,3 +327,16 @@ test_pytest_linux:
--junitxml=XUNIT_RESULT.xml --junitxml=XUNIT_RESULT.xml
--ignore-result-files known_failure_cases/known_failure_cases.txt --ignore-result-files known_failure_cases/known_failure_cases.txt
--app-info-filepattern \"list_job_*.txt\" --app-info-filepattern \"list_job_*.txt\"
test_idf_pytest_plugin:
extends:
- .host_test_template
- .rules:patterns:idf-pytest-plugin
variables:
SUBMODULES_TO_FETCH: "none"
artifacts:
reports:
junit: XUNIT_RESULT.xml
script:
- cd tools/ci/idf_pytest
- pytest --junitxml=${CI_PROJECT_DIR}/XUNIT_RESULT.xml

View File

@ -145,9 +145,22 @@ pipeline_variables:
tags: tags:
- build - build
script: script:
- MODIFIED_FILES=$(echo $GIT_DIFF_OUTPUT | xargs) # MODIFIED_FILES is a list of files that changed, could be used everywhere
- MODIFIED_FILES=$(echo "$GIT_DIFF_OUTPUT" | xargs)
- echo "MODIFIED_FILES=$MODIFIED_FILES" >> pipeline.env - echo "MODIFIED_FILES=$MODIFIED_FILES" >> pipeline.env
- echo "MODIFIED_COMPONENTS=$(run_cmd python tools/ci/ci_get_mr_info.py components --modified-files $MODIFIED_FILES | xargs)" >> pipeline.env # MR_MODIFIED_FILES and MR_MODIFIED_COMPONENTS are semicolon separated lists that is used in MR only
# for non MR pipeline, these are empty lists
- |
if [ $IS_MR_PIPELINE == "0" ]; then
echo "MR_MODIFIED_FILES=" >> pipeline.env
echo "MR_MODIFIED_COMPONENTS=" >> pipeline.env
else
MR_MODIFIED_FILES=$(echo "$GIT_DIFF_OUTPUT" | tr '\n' ';')
echo "MR_MODIFIED_FILES=\"$MR_MODIFIED_FILES\"" >> pipeline.env
MR_MODIFIED_COMPONENTS=$(run_cmd python tools/ci/ci_get_mr_info.py components --modified-files $MODIFIED_FILES | tr '\n' ';')
echo "MR_MODIFIED_COMPONENTS=\"$MR_MODIFIED_COMPONENTS\"" >> pipeline.env
fi
- | - |
if echo "$CI_MERGE_REQUEST_LABELS" | egrep "(^|,)BUILD_AND_TEST_ALL_APPS(,|$)"; then if echo "$CI_MERGE_REQUEST_LABELS" | egrep "(^|,)BUILD_AND_TEST_ALL_APPS(,|$)"; then
echo "BUILD_AND_TEST_ALL_APPS=1" >> pipeline.env echo "BUILD_AND_TEST_ALL_APPS=1" >> pipeline.env

File diff suppressed because it is too large Load Diff

View File

@ -1,894 +0,0 @@
.target_test_template:
image: $TARGET_TEST_ENV_IMAGE
extends:
- .before_script:fetch:target_test
stage: target_test
timeout: 1 hour
dependencies: []
cache:
# Usually do not need submodule-cache in target_test
- key: pip-cache-${LATEST_GIT_TAG}
paths:
- .cache/pip
policy: pull
after_script:
- python tools/ci/artifacts_handler.py upload --type logs junit_reports
.pytest_template:
extends:
- .target_test_template
artifacts:
paths:
- XUNIT_RESULT.xml
- pytest_embedded_log/
reports:
junit: XUNIT_RESULT.xml
script:
- retry_failed git clone $KNOWN_FAILURE_CASES_REPO known_failure_cases
# get runner env config file
- retry_failed git clone $TEST_ENV_CONFIG_REPO
- python $CHECKOUT_REF_SCRIPT ci-test-runner-configs ci-test-runner-configs
# using runner tags as markers to filter the test cases
# Runner tags are comma separated, replace the comma with " and " for markers
- job_tags=$(python tools/ci/python_packages/gitlab_api.py get_job_tags $CI_PROJECT_ID --job_id $CI_JOB_ID)
- markers=$(echo $job_tags | sed -e "s/,/ and /g")
- if [ -n "$BUILD_JOB_NAME" ]; then
job_name=$BUILD_JOB_NAME;
else
job_name=${BUILD_JOB_PREFIX}$(python tools/ci/ci_get_mr_info.py target_in_tags $job_tags);
fi
- run_cmd python tools/ci/artifacts_handler.py download --job-name "$job_name" --type build_dir_without_map_and_elf_files
- if [ -n "$REQUIRES_ELF_FILES" ]; then
run_cmd python tools/ci/artifacts_handler.py download --job-name "$job_name" --type map_and_elf_files;
fi
- run_cmd pytest $TEST_DIR
-m \"${markers}\"
--junitxml=XUNIT_RESULT.xml
--ignore-result-files known_failure_cases/known_failure_cases.txt
--parallel-count ${CI_NODE_TOTAL:-1}
--parallel-index ${CI_NODE_INDEX:-1}
${PYTEST_EXTRA_FLAGS}
--app-info-filepattern \"list_job_*.txt\"
.pytest_examples_dir_template:
extends: .pytest_template
variables:
TEST_DIR: examples
BUILD_JOB_PREFIX: build_pytest_examples_
.pytest_examples_dir_jtag_template:
extends: .pytest_examples_dir_template
needs:
- job: build_pytest_examples_jtag
artifacts: false
variables:
BUILD_JOB_NAME: build_pytest_examples_jtag
REQUIRES_ELF_FILES: "1"
PYTEST_EXTRA_FLAGS: "--log-cli-level DEBUG"
pytest_examples_esp32_generic:
extends:
- .pytest_examples_dir_template
- .rules:test:example_test-esp32
needs:
- job: build_pytest_examples_esp32
artifacts: false
tags: [ esp32, generic ]
parallel: 3
pytest_examples_esp32_esp32eco3:
extends:
- .pytest_examples_dir_template
- .rules:test:example_test-esp32
needs:
- job: build_pytest_examples_esp32
artifacts: false
tags: [ esp32, esp32eco3 ]
pytest_examples_esp32_ir_transceiver:
extends:
- .pytest_examples_dir_template
- .rules:test:example_test-esp32
needs:
- job: build_pytest_examples_esp32
artifacts: false
tags: [ esp32, ir_transceiver ]
pytest_examples_esp32_twai_transceiver:
extends:
- .pytest_examples_dir_template
- .rules:test:example_test-esp32
needs:
- job: build_pytest_examples_esp32
artifacts: false
tags: [ esp32, twai_transceiver ]
pytest_examples_esp32_twai_network:
extends:
- .pytest_examples_dir_template
- .rules:test:example_test-esp32
needs:
- job: build_pytest_examples_esp32
artifacts: false
tags: [ esp32, twai_network ]
pytest_examples_esp32_jtag:
extends:
- .pytest_examples_dir_jtag_template
- .rules:test:example_test-esp32
tags: [ esp32, jtag ]
pytest_examples_esp32_ccs811:
extends:
- .pytest_examples_dir_template
- .rules:test:example_test-esp32
needs:
- job: build_pytest_examples_esp32
artifacts: false
tags: [ esp32, ccs811 ]
pytest_examples_esp32s2_generic:
extends:
- .pytest_examples_dir_template
- .rules:test:example_test-esp32s2
needs:
- job: build_pytest_examples_esp32s2
artifacts: false
tags: [ esp32s2, generic ]
parallel: 3
pytest_examples_esp32s2_jtag:
extends:
- .pytest_examples_dir_jtag_template
- .rules:test:example_test-esp32s2
tags: [ esp32s2, jtag ]
pytest_examples_esp32s3_generic:
extends:
- .pytest_examples_dir_template
- .rules:test:example_test-esp32s3
needs:
- job: build_pytest_examples_esp32s3
artifacts: false
tags: [ esp32s3, generic ]
parallel: 3
pytest_examples_esp32s3_usb_serial_jtag:
extends:
- .pytest_examples_dir_jtag_template
- .rules:test:example_test-esp32s3
tags: [ esp32s3, usb_serial_jtag ]
pytest_examples_esp32s3_f4r8:
extends:
- .pytest_examples_dir_template
- .rules:test:example_test-esp32s3
needs:
- job: build_pytest_examples_esp32s3
artifacts: false
tags: [ esp32s3, MSPI_F4R8 ]
pytest_examples_esp32c2_generic:
extends:
- .pytest_examples_dir_template
- .rules:test:example_test-esp32c2
needs:
- job: build_pytest_examples_esp32c2
artifacts: false
tags: [ esp32c2, generic, xtal_40mhz ]
parallel: 3
pytest_examples_esp32c2_jtag:
extends:
- .pytest_examples_dir_jtag_template
- .rules:test:example_test-esp32c2
tags: [ esp32c2, jtag ]
pytest_examples_esp32c3_generic:
extends:
- .pytest_examples_dir_template
- .rules:test:example_test-esp32c3
needs:
- job: build_pytest_examples_esp32c3
artifacts: false
tags: [ esp32c3, generic ]
parallel: 3
pytest_examples_esp32c3_usb_serial_jtag:
extends:
- .pytest_examples_dir_jtag_template
- .rules:test:example_test-esp32c3
tags: [ esp32c3, usb_serial_jtag ]
pytest_examples_esp32c3_flash_suspend:
extends:
- .pytest_examples_dir_template
- .rules:test:example_test-esp32c3
needs:
- job: build_pytest_examples_esp32c3
artifacts: false
tags: [ esp32c3, flash_suspend ]
pytest_examples_esp32c6_generic:
extends:
- .pytest_examples_dir_template
- .rules:test:example_test-esp32c6
needs:
- job: build_pytest_examples_esp32c6
artifacts: false
tags: [ esp32c6, generic ]
pytest_examples_esp32c6_usj_device:
extends:
- .pytest_examples_dir_template
- .rules:test:example_test-esp32c6
needs:
- job: build_pytest_examples_esp32c6
artifacts: false
tags: [ esp32c6, usj_device ]
pytest_examples_esp32h2_generic:
extends:
- .pytest_examples_dir_template
- .rules:test:example_test-esp32h2
needs:
- job: build_pytest_examples_esp32h2
artifacts: false
tags: [ esp32h2, generic ]
pytest_examples_esp32p4_generic:
extends:
- .pytest_examples_dir_template
- .rules:test:example_test-esp32p4
needs:
- job: build_pytest_examples_esp32p4
artifacts: false
tags: [ esp32p4, generic ]
pytest_examples_esp32_8mb_flash:
extends:
- .pytest_examples_dir_template
- .rules:test:example_test-esp32
needs:
- job: build_pytest_examples_esp32
artifacts: false
tags: [ esp32, ethernet_flash_8m ]
pytest_examples_esp32_ethernet_bridge:
extends:
- .pytest_examples_dir_template
- .rules:test:example_test-esp32
needs:
- job: build_pytest_examples_esp32
artifacts: false
tags: [ esp32, eth_w5500 ]
variables:
PYTEST_EXTRA_FLAGS: "--dev-passwd ${ETHERNET_TEST_PASSWORD} --dev-user ${ETHERNET_TEST_USER}"
pytest_examples_esp32_flash_encryption:
extends:
- .pytest_examples_dir_template
- .rules:test:example_test-esp32
needs:
- job: build_pytest_examples_esp32
artifacts: false
tags: [ esp32, flash_encryption ]
pytest_examples_esp32c3_flash_encryption:
extends:
- .pytest_examples_dir_template
- .rules:test:example_test-esp32c3
needs:
- job: build_pytest_examples_esp32c3
artifacts: false
tags: [ esp32c3, flash_encryption ]
pytest_examples_esp32_sdmmc:
extends:
- .pytest_examples_dir_template
- .rules:test:example_test-esp32
needs:
- job: build_pytest_examples_esp32
artifacts: false
tags: [ esp32, sdcard_sdmode ]
pytest_examples_esp32_extflash:
extends:
- .pytest_examples_dir_template
- .rules:test:example_test-esp32
needs:
- job: build_pytest_examples_esp32
artifacts: false
tags: [ esp32, external_flash ]
pytest_examples_esp32s3_emmc:
extends:
- .pytest_examples_dir_template
- .rules:test:example_test-esp32s3
needs:
- job: build_pytest_examples_esp32s3
artifacts: false
tags: [ esp32s3, emmc ]
.pytest_components_dir_template:
extends: .pytest_template
variables:
TEST_DIR: components
BUILD_JOB_PREFIX: build_pytest_components_
pytest_components_esp32_generic:
extends:
- .pytest_components_dir_template
- .rules:test:component_ut-esp32
needs:
- job: build_pytest_components_esp32
artifacts: false
tags: [ esp32, generic ]
parallel: 7
pytest_components_esp32_generic_multi_device:
extends:
- .pytest_components_dir_template
- .rules:test:component_ut-esp32
needs:
- job: build_pytest_components_esp32
artifacts: false
tags: [ esp32, generic_multi_device ]
pytest_components_esp32_wifi_two_dut:
extends:
- .pytest_components_dir_template
- .rules:test:component_ut-esp32
needs:
- job: build_pytest_components_esp32
artifacts: false
tags: [ esp32, wifi_two_dut ]
pytest_components_esp32_sdmmc:
extends:
- .pytest_components_dir_template
- .rules:test:component_ut-esp32
needs:
- job: build_pytest_components_esp32
artifacts: false
tags: [ esp32, sdcard ]
pytest_components_esp32s3_sdmmc:
extends:
- .pytest_components_dir_template
- .rules:test:component_ut-esp32s3
needs:
- job: build_pytest_components_esp32s3
artifacts: false
tags: [ esp32s3, sdcard ]
pytest_components_esp32_lan8720:
extends:
- .pytest_components_dir_template
- .rules:test:component_ut-esp32
needs:
- job: build_pytest_components_esp32
artifacts: false
tags: [ esp32, eth_lan8720 ]
pytest_components_esp32_rtl8201:
extends:
- .pytest_components_dir_template
- .rules:test:component_ut-esp32
needs:
- job: build_pytest_components_esp32
artifacts: false
tags: [ esp32, eth_rtl8201 ]
pytest_components_esp32_w5500:
extends:
- .pytest_components_dir_template
- .rules:test:component_ut-esp32
needs:
- job: build_pytest_components_esp32
artifacts: false
tags: [ esp32, eth_w5500 ]
pytest_components_esp32_ksz8851snl:
extends:
- .pytest_components_dir_template
- .rules:test:component_ut-esp32
needs:
- job: build_pytest_components_esp32
artifacts: false
tags: [ esp32, eth_ksz8851snl ]
pytest_components_esp32_dm9051:
extends:
- .pytest_components_dir_template
- .rules:test:component_ut-esp32
needs:
- job: build_pytest_components_esp32
artifacts: false
tags: [ esp32, eth_dm9051 ]
pytest_components_esp32_ksz8041:
extends:
- .pytest_components_dir_template
- .rules:test:component_ut-esp32
needs:
- job: build_pytest_components_esp32
artifacts: false
tags: [ esp32, eth_ksz8041 ]
pytest_components_esp32_dp83848:
extends:
- .pytest_components_dir_template
- .rules:test:component_ut-esp32
needs:
- job: build_pytest_components_esp32
artifacts: false
tags: [ esp32, eth_dp83848 ]
pytest_components_esp32_ethernet:
extends:
- .pytest_components_dir_template
- .rules:test:component_ut-esp32
needs:
- job: build_pytest_components_esp32
artifacts: false
tags: [ esp32, ethernet ]
pytest_components_esp32_flash_encryption:
extends:
- .pytest_components_dir_template
- .rules:test:component_ut-esp32
needs:
- job: build_pytest_components_esp32
artifacts: false
tags: [ esp32, flash_encryption ]
pytest_components_esp32_xtal32k:
extends:
- .pytest_components_dir_template
- .rules:test:component_ut-esp32
needs:
- job: build_pytest_components_esp32
artifacts: false
tags: [ esp32, xtal32k ]
pytest_components_esp32_no32kXtal:
extends:
- .pytest_components_dir_template
- .rules:test:component_ut-esp32
needs:
- job: build_pytest_components_esp32
artifacts: false
tags: [ esp32, no32kXtal ]
pytest_components_esp32_rs485_multi:
extends:
- .pytest_components_dir_template
- .rules:test:component_ut-esp32
needs:
- job: build_pytest_components_esp32
artifacts: false
tags: [ esp32, multi_dut_modbus_rs485 ]
pytest_components_esp32_psramv0:
extends:
- .pytest_components_dir_template
- .rules:test:component_ut-esp32
needs:
- job: build_pytest_components_esp32
artifacts: false
tags: [ esp32, psramv0 ]
pytest_components_esp32s2_generic:
extends:
- .pytest_components_dir_template
- .rules:test:component_ut-esp32s2
needs:
- job: build_pytest_components_esp32s2
artifacts: false
tags: [ esp32s2, generic ]
parallel: 5
pytest_components_esp32s2_generic_multi_device:
extends:
- .pytest_components_dir_template
- .rules:test:component_ut-esp32s2
needs:
- job: build_pytest_components_esp32s2
artifacts: false
tags: [ esp32s2, generic_multi_device ]
pytest_components_esp32s3_generic:
extends:
- .pytest_components_dir_template
- .rules:test:component_ut-esp32s3
needs:
- job: build_pytest_components_esp32s3
artifacts: false
tags: [ esp32s3, generic ]
parallel: 5
pytest_components_esp32s3_generic_multi_device:
extends:
- .pytest_components_dir_template
- .rules:test:component_ut-esp32s3
needs:
- job: build_pytest_components_esp32s3
artifacts: false
tags: [ esp32s3, generic_multi_device ]
pytest_components_esp32s3_octal_psram:
extends:
- .pytest_components_dir_template
- .rules:test:component_ut-esp32s3
needs:
- job: build_pytest_components_esp32s3
artifacts: false
tags: [ esp32s3, octal_psram ]
pytest_components_esp32s3_quad_psram:
extends:
- .pytest_components_dir_template
- .rules:test:component_ut-esp32s3
needs:
- job: build_pytest_components_esp32s3
artifacts: false
tags: [ esp32s3, quad_psram ]
pytest_components_esp32s3_flash_encryption_f4r8:
extends:
- .pytest_components_dir_template
- .rules:test:component_ut-esp32s3
needs:
- job: build_pytest_components_esp32s3
artifacts: false
tags: [ esp32s3, flash_encryption_f4r8 ]
pytest_components_esp32s3_flash_encryption_f8r8:
extends:
- .pytest_components_dir_template
- .rules:test:component_ut-esp32s3
needs:
- job: build_pytest_components_esp32s3
artifacts: false
tags: [ esp32s3, flash_encryption_f8r8 ]
pytest_components_esp32s3_mspi_f4r4:
extends:
- .pytest_components_dir_template
- .rules:test:component_ut-esp32s3
needs:
- job: build_pytest_components_esp32s3
artifacts: false
tags: [ esp32s3, MSPI_F4R4 ]
pytest_components_esp32s3_mspi_f4r8:
extends:
- .pytest_components_dir_template
- .rules:test:component_ut-esp32s3
needs:
- job: build_pytest_components_esp32s3
artifacts: false
tags: [ esp32s3, MSPI_F4R8 ]
pytest_components_esp32s3_mspi_f8r8:
extends:
- .pytest_components_dir_template
- .rules:test:component_ut-esp32s3
needs:
- job: build_pytest_components_esp32s3
artifacts: false
tags: [ esp32s3, MSPI_F8R8 ]
pytest_components_esp32s3_usb_serial_jtag:
extends:
- .pytest_components_dir_template
- .rules:test:component_ut-esp32s3
needs:
- build_pytest_components_esp32s3
tags: [ esp32s3, usb_serial_jtag ]
pytest_components_esp32c2_generic:
extends:
- .pytest_components_dir_template
- .rules:test:component_ut-esp32c2
needs:
- job: build_pytest_components_esp32c2
artifacts: false
tags: [ esp32c2, generic, xtal_40mhz ]
parallel: 3
pytest_components_esp32c2_generic_multi_device:
extends:
- .pytest_components_dir_template
- .rules:test:component_ut-esp32c2
needs:
- job: build_pytest_components_esp32c2
artifacts: false
tags: [ esp32c2, generic_multi_device, xtal_40mhz ]
pytest_components_esp32c2_xtal_26mhz:
extends:
- .pytest_components_dir_template
- .rules:test:component_ut-esp32c2
needs:
- job: build_pytest_components_esp32c2
artifacts: false
tags: [ esp32c2, generic, xtal_26mhz ]
pytest_components_esp32c3_generic:
extends:
- .pytest_components_dir_template
- .rules:test:component_ut-esp32c3
needs:
- job: build_pytest_components_esp32c3
artifacts: false
tags: [ esp32c3, generic ]
parallel: 3
pytest_components_esp32c3_i2c_oled:
extends:
- .pytest_components_dir_template
- .rules:test:component_ut-esp32c3
needs:
- job: build_pytest_components_esp32c3
artifacts: false
tags: [ esp32c3, i2c_oled ]
pytest_components_esp32c3_generic_multi_device:
extends:
- .pytest_components_dir_template
- .rules:test:component_ut-esp32c3
needs:
- job: build_pytest_components_esp32c3
artifacts: false
tags: [ esp32c3, generic_multi_device ]
pytest_components_esp32c3_usb_serial_jtag:
extends:
- .pytest_components_dir_template
- .rules:test:component_ut-esp32c3
needs:
- build_pytest_components_esp32c3
tags: [ esp32c3, usb_serial_jtag ]
pytest_components_esp32c3_flash_encryption:
extends:
- .pytest_components_dir_template
- .rules:test:component_ut-esp32c3
needs:
- job: build_pytest_components_esp32c3
artifacts: false
tags: [ esp32c3, flash_encryption ]
pytest_components_esp32c6_generic:
extends:
- .pytest_components_dir_template
- .rules:test:component_ut-esp32c6
needs:
- job: build_pytest_components_esp32c6
artifacts: false
tags: [ esp32c6, generic ]
parallel: 2
pytest_components_esp32c6_usb_serial_jtag:
extends:
- .pytest_components_dir_template
- .rules:test:component_ut-esp32c6
needs:
- build_pytest_components_esp32c6
tags: [ esp32c6, usb_serial_jtag ]
pytest_components_esp32h2_generic:
extends:
- .pytest_components_dir_template
- .rules:test:component_ut-esp32h2
needs:
- job: build_pytest_components_esp32h2
artifacts: false
tags: [ esp32h2, generic ]
parallel: 2
pytest_components_esp32p4_generic:
extends:
- .pytest_components_dir_template
- .rules:test:component_ut-esp32p4
needs:
- job: build_pytest_components_esp32p4
artifacts: false
tags: [ esp32p4, generic ]
parallel: 2
pytest_components_esp32h2_generic_multi_device:
extends:
- .pytest_components_dir_template
- .rules:test:component_ut-esp32h2
needs:
- job: build_pytest_components_esp32h2
artifacts: false
tags: [ esp32h2, generic_multi_device ]
pytest_components_esp32h2_usb_serial_jtag:
extends:
- .pytest_components_dir_template
- .rules:test:component_ut-esp32h2
needs:
- build_pytest_components_esp32h2
tags: [ esp32h2, usb_serial_jtag ]
pytest_components_esp32c6_generic_multi_device:
extends:
- .pytest_components_dir_template
- .rules:test:component_ut-esp32c6
needs:
- job: build_pytest_components_esp32c6
artifacts: false
tags: [ esp32c6, generic_multi_device ]
pytest_examples_openthread_sleep:
extends:
- .pytest_examples_dir_template
- .rules:test:example_test-esp32h2
needs:
- job: build_pytest_examples_esp32c6
artifacts: false
- job: build_pytest_examples_esp32h2
artifacts: false
tags: [ esp32c6, openthread_sleep ]
script:
- retry_failed git clone $KNOWN_FAILURE_CASES_REPO known_failure_cases
# get runner env config file
- retry_failed git clone $TEST_ENV_CONFIG_REPO
- python $CHECKOUT_REF_SCRIPT ci-test-runner-configs ci-test-runner-configs
# using runner tags as markers to filter the test cases
# Runner tags are comma separated, replace the comma with " and " for markers
- job_tags=$(python tools/ci/python_packages/gitlab_api.py get_job_tags $CI_PROJECT_ID --job_id $CI_JOB_ID)
- markers=$(echo $job_tags | sed -e "s/,/ and /g")
# download the artifacts, requires c6, h2 chips
- run_cmd python tools/ci/artifacts_handler.py download --job-name "build_pytest_examples_esp32c6"
- run_cmd python tools/ci/artifacts_handler.py download --job-name "build_pytest_examples_esp32h2"
- run_cmd pytest $TEST_DIR
-m \"${markers}\"
--junitxml=XUNIT_RESULT.xml
--ignore-result-files known_failure_cases/known_failure_cases.txt
--parallel-count ${CI_NODE_TOTAL:-1}
--parallel-index ${CI_NODE_INDEX:-1}
${PYTEST_EXTRA_FLAGS}
--app-info-filepattern \"list_job_*.txt\"
pytest_examples_esp32h2_zigbee:
extends:
- .pytest_examples_dir_template
- .rules:test:example_test-esp32h2
needs:
- job: build_pytest_examples_esp32h2
artifacts: false
tags: [ esp32h2, zigbee_multi_dut ]
.pytest_test_apps_dir_template:
extends: .pytest_template
variables:
TEST_DIR: tools/test_apps
BUILD_JOB_PREFIX: build_pytest_test_apps_
REQUIRES_ELF_FILES: "1"
pytest_test_apps_esp32_generic:
extends:
- .pytest_test_apps_dir_template
- .rules:test:custom_test-esp32
needs:
- job: build_pytest_test_apps_esp32
artifacts: false
tags: [ esp32, generic ]
pytest_test_apps_esp32_jtag:
extends:
- .pytest_test_apps_dir_template
- .rules:test:custom_test-esp32
needs:
- job: build_pytest_test_apps_esp32
artifacts: false
tags: [ esp32, jtag ]
variables:
PYTEST_EXTRA_FLAGS: "--log-cli-level DEBUG"
pytest_test_apps_esp32_ethernet:
extends:
- .pytest_test_apps_dir_template
- .rules:test:custom_test-esp32
needs:
- job: build_pytest_test_apps_esp32
artifacts: false
tags: [ esp32, ethernet ]
pytest_test_apps_esp32s2_generic:
extends:
- .pytest_test_apps_dir_template
- .rules:test:custom_test-esp32s2
needs:
- job: build_pytest_test_apps_esp32s2
artifacts: false
tags: [ esp32s2, generic ]
pytest_test_apps_esp32s3_generic:
extends:
- .pytest_test_apps_dir_template
- .rules:test:custom_test-esp32s3
needs:
- job: build_pytest_test_apps_esp32s3
artifacts: false
tags: [ esp32s3, generic ]
pytest_test_apps_esp32c2_generic:
extends:
- .pytest_test_apps_dir_template
- .rules:test:custom_test-esp32c2
needs:
- job: build_pytest_test_apps_esp32c2
artifacts: false
tags: [ esp32c2, generic, xtal_40mhz ]
pytest_test_apps_esp32c3_generic:
extends:
- .pytest_test_apps_dir_template
- .rules:test:custom_test-esp32c3
needs:
- job: build_pytest_test_apps_esp32c3
artifacts: false
tags: [ esp32c3, generic ]
pytest_test_apps_esp32c6_generic:
extends:
- .pytest_test_apps_dir_template
- .rules:test:custom_test-esp32c6
needs:
- job: build_pytest_test_apps_esp32c6
artifacts: false
tags: [ esp32c6, generic ]
pytest_test_apps_esp32h2_generic:
extends:
- .pytest_test_apps_dir_template
- .rules:test:custom_test-esp32h2
needs:
- job: build_pytest_test_apps_esp32h2
artifacts: false
tags: [ esp32h2, generic ]
pytest_test_apps_esp32p4_generic:
extends:
- .pytest_test_apps_dir_template
- .rules:test:custom_test-esp32p4
needs:
- job: build_pytest_test_apps_esp32p4
artifacts: false
tags: [ esp32p4, generic ]
pytest_test_apps_esp32s3_mspi_f8r8:
extends:
- .pytest_test_apps_dir_template
- .rules:test:custom_test-esp32s3
needs:
- job: build_pytest_test_apps_esp32s3
artifacts: false
tags: [ esp32s3, MSPI_F8R8 ]
pytest_test_apps_esp32s3_mspi_f4r8:
extends:
- .pytest_test_apps_dir_template
- .rules:test:custom_test-esp32s3
needs:
- job: build_pytest_test_apps_esp32s3
artifacts: false
tags: [ esp32s3, MSPI_F4R8 ]
pytest_test_apps_esp32s3_mspi_f4r4:
extends:
- .pytest_test_apps_dir_template
- .rules:test:custom_test-esp32s3
needs:
- job: build_pytest_test_apps_esp32s3
artifacts: false
tags: [ esp32s3, MSPI_F4R4 ]

View File

@ -95,7 +95,7 @@ repos:
name: Check rules are generated (based on .gitlab/ci/dependencies/dependencies.yml) name: Check rules are generated (based on .gitlab/ci/dependencies/dependencies.yml)
entry: tools/ci/generate_rules.py entry: tools/ci/generate_rules.py
language: python language: python
files: '\.gitlab/ci/dependencies/.+|\.gitlab/ci/.*\.yml' files: '\.gitlab/ci/dependencies/.+|\.gitlab/ci/.*\.yml|.gitlab-ci.yml'
pass_filenames: false pass_filenames: false
require_serial: true require_serial: true
additional_dependencies: additional_dependencies:
@ -108,6 +108,7 @@ repos:
- 'mypy-extensions==0.4.3' - 'mypy-extensions==0.4.3'
- 'types-setuptools==57.4.14' - 'types-setuptools==57.4.14'
- 'types-PyYAML==0.1.9' - 'types-PyYAML==0.1.9'
- 'types-requests'
exclude: > exclude: >
(?x)^( (?x)^(
.*_pb2.py .*_pb2.py
@ -154,7 +155,7 @@ repos:
require_serial: true require_serial: true
additional_dependencies: additional_dependencies:
- PyYAML == 5.3.1 - PyYAML == 5.3.1
- idf_build_apps~=1.0 - idf-build-apps~=2.0.0rc1
- id: sort-build-test-rules-ymls - id: sort-build-test-rules-ymls
name: sort .build-test-rules.yml files name: sort .build-test-rules.yml files
entry: tools/ci/check_build_test_rules.py sort-yaml entry: tools/ci/check_build_test_rules.py sort-yaml

View File

@ -1,2 +1,2 @@
| Supported Targets | ESP32 | ESP32-C2 | ESP32-C3 | ESP32-C6 | ESP32-H2 | ESP32-P4 | ESP32-S2 | ESP32-S3 | | Supported Targets | ESP32 | ESP32-C2 | ESP32-C3 | ESP32-C6 | ESP32-H2 | ESP32-S2 | ESP32-S3 |
| ----------------- | ----- | -------- | -------- | -------- | -------- | -------- | -------- | -------- | | ----------------- | ----- | -------- | -------- | -------- | -------- | -------- | -------- |

View File

@ -1,53 +1,54 @@
# SPDX-FileCopyrightText: 2021-2023 Espressif Systems (Shanghai) CO LTD # SPDX-FileCopyrightText: 2021-2024 Espressif Systems (Shanghai) CO LTD
# SPDX-License-Identifier: Apache-2.0 # SPDX-License-Identifier: Apache-2.0
# pylint: disable=W0621 # redefined-outer-name # pylint: disable=W0621 # redefined-outer-name
# This file is a pytest root configuration file and provide the following functionalities:
# 1. Defines a few fixtures that could be used under the whole project.
# 2. Defines a few hook functions.
#
# IDF is using [pytest](https://github.com/pytest-dev/pytest) and # IDF is using [pytest](https://github.com/pytest-dev/pytest) and
# [pytest-embedded plugin](https://github.com/espressif/pytest-embedded) as its example test framework. # [pytest-embedded plugin](https://github.com/espressif/pytest-embedded) as its test framework.
#
# This is an experimental feature, and if you found any bug or have any question, please report to # if you found any bug or have any question,
# https://github.com/espressif/pytest-embedded/issues # please report to https://github.com/espressif/pytest-embedded/issues
# or discuss at https://github.com/espressif/pytest-embedded/discussions
import os
import sys
import gitlab
if os.path.join(os.path.dirname(__file__), 'tools', 'ci') not in sys.path:
sys.path.append(os.path.join(os.path.dirname(__file__), 'tools', 'ci'))
if os.path.join(os.path.dirname(__file__), 'tools', 'ci', 'python_packages') not in sys.path:
sys.path.append(os.path.join(os.path.dirname(__file__), 'tools', 'ci', 'python_packages'))
import glob import glob
import json import io
import logging import logging
import os import os
import re import re
import sys
import typing as t import typing as t
import zipfile
from copy import deepcopy from copy import deepcopy
from datetime import datetime from datetime import datetime
import common_test_methods # noqa: F401
import gitlab_api
import pytest import pytest
import requests
import yaml
from _pytest.config import Config from _pytest.config import Config
from _pytest.fixtures import FixtureRequest from _pytest.fixtures import FixtureRequest
from artifacts_handler import ArtifactType
from dynamic_pipelines.constants import TEST_RELATED_APPS_DOWNLOAD_URLS_FILENAME
from idf_ci.app import import_apps_from_txt
from idf_ci.uploader import AppUploader
from idf_ci_utils import IDF_PATH
from idf_pytest.constants import DEFAULT_SDKCONFIG, ENV_MARKERS, SPECIAL_MARKERS, TARGET_MARKERS, PytestCase
from idf_pytest.plugin import IDF_PYTEST_EMBEDDED_KEY, ITEM_PYTEST_CASE_KEY, IdfPytestEmbedded
from idf_pytest.utils import format_case_id
from pytest_embedded.plugin import multi_dut_argument, multi_dut_fixture from pytest_embedded.plugin import multi_dut_argument, multi_dut_fixture
from pytest_embedded_idf.dut import IdfDut from pytest_embedded_idf.dut import IdfDut
from pytest_embedded_idf.unity_tester import CaseTester from pytest_embedded_idf.unity_tester import CaseTester
try:
from idf_ci_utils import IDF_PATH
from idf_pytest.constants import DEFAULT_SDKCONFIG, ENV_MARKERS, SPECIAL_MARKERS, TARGET_MARKERS
from idf_pytest.plugin import IDF_PYTEST_EMBEDDED_KEY, IdfPytestEmbedded
from idf_pytest.utils import format_case_id
except ImportError:
sys.path.append(os.path.join(os.path.dirname(__file__), 'tools', 'ci'))
from idf_ci_utils import IDF_PATH
from idf_pytest.constants import DEFAULT_SDKCONFIG, ENV_MARKERS, SPECIAL_MARKERS, TARGET_MARKERS
from idf_pytest.plugin import IDF_PYTEST_EMBEDDED_KEY, IdfPytestEmbedded
from idf_pytest.utils import format_case_id
try:
import common_test_methods # noqa: F401
except ImportError:
sys.path.append(os.path.join(os.path.dirname(__file__), 'tools', 'ci', 'python_packages'))
import common_test_methods # noqa: F401
############ ############
# Fixtures # # Fixtures #
@ -100,9 +101,91 @@ def test_case_name(request: FixtureRequest, target: str, config: str) -> str:
return format_case_id(target, config, request.node.originalname, is_qemu=is_qemu, params=filtered_params) # type: ignore return format_case_id(target, config, request.node.originalname, is_qemu=is_qemu, params=filtered_params) # type: ignore
@pytest.fixture(scope='session')
def pipeline_id(request: FixtureRequest) -> t.Optional[str]:
return request.config.getoption('pipeline_id', None) or os.getenv('PARENT_PIPELINE_ID', None) # type: ignore
class BuildReportDownloader:
def __init__(self, presigned_url_yaml: str) -> None:
self.app_presigned_urls_dict: t.Dict[str, t.Dict[str, str]] = yaml.safe_load(presigned_url_yaml)
def download_app(
self, app_build_path: str, artifact_type: ArtifactType = ArtifactType.BUILD_DIR_WITHOUT_MAP_AND_ELF_FILES
) -> None:
url = self.app_presigned_urls_dict[app_build_path][artifact_type.value]
logging.debug('Downloading app from %s', url)
with io.BytesIO() as f:
for chunk in requests.get(url).iter_content(chunk_size=1024 * 1024):
if chunk:
f.write(chunk)
f.seek(0)
with zipfile.ZipFile(f) as zip_ref:
zip_ref.extractall()
@pytest.fixture(scope='session')
def app_downloader(pipeline_id: t.Optional[str]) -> t.Union[AppUploader, BuildReportDownloader, None]:
if not pipeline_id:
return None
if (
'IDF_S3_BUCKET' in os.environ
and 'IDF_S3_ACCESS_KEY' in os.environ
and 'IDF_S3_SECRET_KEY' in os.environ
and 'IDF_S3_SERVER' in os.environ
and 'IDF_S3_BUCKET' in os.environ
):
return AppUploader(pipeline_id)
logging.info('Downloading build report from the build pipeline %s', pipeline_id)
test_app_presigned_urls_file = None
try:
gl = gitlab_api.Gitlab(os.getenv('CI_PROJECT_ID', 'espressif/esp-idf'))
except gitlab.exceptions.GitlabAuthenticationError:
msg = """To download artifacts from gitlab, please create ~/.python-gitlab.cfg with the following content:
[global]
default = internal
ssl_verify = true
timeout = 5
[internal]
url = <OUR INTERNAL HTTPS SERVER URL>
private_token = <YOUR PERSONAL ACCESS TOKEN>
api_version = 4
"""
raise SystemExit(msg)
for child_pipeline in gl.project.pipelines.get(pipeline_id, lazy=True).bridges.list(iterator=True):
if child_pipeline.name == 'build_child_pipeline':
for job in gl.project.pipelines.get(child_pipeline.downstream_pipeline['id'], lazy=True).jobs.list(
iterator=True
):
if job.name == 'generate_pytest_build_report':
test_app_presigned_urls_file = gl.download_artifact(
job.id, [TEST_RELATED_APPS_DOWNLOAD_URLS_FILENAME]
)[0]
break
if test_app_presigned_urls_file:
return BuildReportDownloader(test_app_presigned_urls_file)
return None
@pytest.fixture @pytest.fixture
@multi_dut_fixture @multi_dut_fixture
def build_dir(app_path: str, target: t.Optional[str], config: t.Optional[str]) -> str: def build_dir(
request: FixtureRequest,
app_path: str,
target: t.Optional[str],
config: t.Optional[str],
app_downloader: t.Optional[AppUploader],
) -> str:
""" """
Check local build dir with the following priority: Check local build dir with the following priority:
@ -114,14 +197,25 @@ def build_dir(app_path: str, target: t.Optional[str], config: t.Optional[str]) -
Returns: Returns:
valid build directory valid build directory
""" """
check_dirs = [] # download from minio on CI
if target is not None and config is not None: case: PytestCase = request._pyfuncitem.stash[ITEM_PYTEST_CASE_KEY]
check_dirs.append(f'build_{target}_{config}') if app_downloader:
if target is not None: # somehow hardcoded...
check_dirs.append(f'build_{target}') app_build_path = os.path.join(os.path.relpath(app_path, IDF_PATH), f'build_{target}_{config}')
if config is not None: if case.requires_elf_or_map:
check_dirs.append(f'build_{config}') app_downloader.download_app(app_build_path)
check_dirs.append('build') else:
app_downloader.download_app(app_build_path, ArtifactType.BUILD_DIR_WITHOUT_MAP_AND_ELF_FILES)
check_dirs = [f'build_{target}_{config}']
else:
check_dirs = []
if target is not None and config is not None:
check_dirs.append(f'build_{target}_{config}')
if target is not None:
check_dirs.append(f'build_{target}')
if config is not None:
check_dirs.append(f'build_{config}')
check_dirs.append('build')
for check_dir in check_dirs: for check_dir in check_dirs:
binary_path = os.path.join(app_path, check_dir) binary_path = os.path.join(app_path, check_dir)
@ -145,6 +239,13 @@ def junit_properties(test_case_name: str, record_xml_attribute: t.Callable[[str,
record_xml_attribute('name', test_case_name) record_xml_attribute('name', test_case_name)
@pytest.fixture(autouse=True)
@multi_dut_fixture
def ci_job_url(record_xml_attribute: t.Callable[[str, object], None]) -> None:
if ci_job_url := os.getenv('CI_JOB_URL'):
record_xml_attribute('ci_job_url', ci_job_url)
@pytest.fixture(autouse=True) @pytest.fixture(autouse=True)
def set_test_case_name(request: FixtureRequest, test_case_name: str) -> None: def set_test_case_name(request: FixtureRequest, test_case_name: str) -> None:
request.node.funcargs['test_case_name'] = test_case_name request.node.funcargs['test_case_name'] = test_case_name
@ -247,12 +348,12 @@ def log_minimum_free_heap_size(dut: IdfDut, config: str) -> t.Callable[..., None
return real_func return real_func
@pytest.fixture @pytest.fixture(scope='session')
def dev_password(request: FixtureRequest) -> str: def dev_password(request: FixtureRequest) -> str:
return request.config.getoption('dev_passwd') or '' return request.config.getoption('dev_passwd') or ''
@pytest.fixture @pytest.fixture(scope='session')
def dev_user(request: FixtureRequest) -> str: def dev_user(request: FixtureRequest) -> str:
return request.config.getoption('dev_user') or '' return request.config.getoption('dev_user') or ''
@ -274,18 +375,17 @@ def pytest_addoption(parser: pytest.Parser) -> None:
'--dev-passwd', '--dev-passwd',
help='password associated with some specific device/service used during the test execution', help='password associated with some specific device/service used during the test execution',
) )
idf_group.addoption(
'--app-info-basedir',
default=IDF_PATH,
help='app info base directory. specify this value when you\'re building under a '
'different IDF_PATH. (Default: $IDF_PATH)',
)
idf_group.addoption( idf_group.addoption(
'--app-info-filepattern', '--app-info-filepattern',
help='glob pattern to specify the files that include built app info generated by ' help='glob pattern to specify the files that include built app info generated by '
'`idf-build-apps --collect-app-info ...`. will not raise ValueError when binary ' '`idf-build-apps --collect-app-info ...`. will not raise ValueError when binary '
'paths not exist in local file system if not listed recorded in the app info.', 'paths not exist in local file system if not listed recorded in the app info.',
) )
idf_group.addoption(
'--pipeline-id',
help='main pipeline id, not the child pipeline id. Specify this option to download the artifacts '
'from the minio server for debugging purpose.',
)
def pytest_configure(config: Config) -> None: def pytest_configure(config: Config) -> None:
@ -325,32 +425,16 @@ def pytest_configure(config: Config) -> None:
""" """
) )
apps_list = None apps = None
app_info_basedir = config.getoption('app_info_basedir')
app_info_filepattern = config.getoption('app_info_filepattern') app_info_filepattern = config.getoption('app_info_filepattern')
if app_info_filepattern: if app_info_filepattern:
apps_list = [] apps = []
for file in glob.glob(os.path.join(IDF_PATH, app_info_filepattern)): for f in glob.glob(os.path.join(IDF_PATH, app_info_filepattern)):
with open(file) as fr: apps.extend(import_apps_from_txt(f))
for line in fr.readlines():
if not line.strip():
continue
# each line is a valid json
app_info = json.loads(line.strip())
if app_info_basedir and app_info['app_dir'].startswith(app_info_basedir):
relative_app_dir = os.path.relpath(app_info['app_dir'], app_info_basedir)
apps_list.append(os.path.join(IDF_PATH, os.path.join(relative_app_dir, app_info['build_dir'])))
print('Detected app: ', apps_list[-1])
else:
print(
f'WARNING: app_info base dir {app_info_basedir} not recognizable in {app_info["app_dir"]}, skipping...'
)
continue
config.stash[IDF_PYTEST_EMBEDDED_KEY] = IdfPytestEmbedded( config.stash[IDF_PYTEST_EMBEDDED_KEY] = IdfPytestEmbedded(
target=target, target=target,
apps_list=apps_list, apps=apps,
) )
config.pluginmanager.register(config.stash[IDF_PYTEST_EMBEDDED_KEY]) config.pluginmanager.register(config.stash[IDF_PYTEST_EMBEDDED_KEY])

View File

@ -12,6 +12,7 @@ from pathlib import Path
from zipfile import ZipFile from zipfile import ZipFile
import urllib3 import urllib3
from idf_pytest.constants import DEFAULT_BUILD_LOG_FILENAME
from minio import Minio from minio import Minio
@ -33,7 +34,7 @@ TYPE_PATTERNS_DICT = {
'**/build*/*.elf', '**/build*/*.elf',
], ],
ArtifactType.BUILD_DIR_WITHOUT_MAP_AND_ELF_FILES: [ ArtifactType.BUILD_DIR_WITHOUT_MAP_AND_ELF_FILES: [
'**/build*/build_log.txt', f'**/build*/{DEFAULT_BUILD_LOG_FILENAME}',
'**/build*/*.bin', '**/build*/*.bin',
'**/build*/bootloader/*.bin', '**/build*/bootloader/*.bin',
'**/build*/partition_table/*.bin', '**/build*/partition_table/*.bin',
@ -41,17 +42,17 @@ TYPE_PATTERNS_DICT = {
'**/build*/flash_project_args', '**/build*/flash_project_args',
'**/build*/config/sdkconfig.json', '**/build*/config/sdkconfig.json',
'**/build*/project_description.json', '**/build*/project_description.json',
'list_job_*.txt', 'list_job*.txt',
], ],
ArtifactType.LOGS: [ ArtifactType.LOGS: [
'**/build*/build_log.txt', f'**/build*/{DEFAULT_BUILD_LOG_FILENAME}',
], ],
ArtifactType.SIZE_REPORTS: [ ArtifactType.SIZE_REPORTS: [
'**/build*/size.json', '**/build*/size.json',
'size_info.txt', 'size_info.txt',
], ],
ArtifactType.JUNIT_REPORTS: [ ArtifactType.JUNIT_REPORTS: [
'XUNIT_RESULT.xml', 'XUNIT_RESULT*.xml',
], ],
ArtifactType.MODIFIED_FILES_AND_COMPONENTS_REPORT: [ ArtifactType.MODIFIED_FILES_AND_COMPONENTS_REPORT: [
'pipeline.env', 'pipeline.env',
@ -66,6 +67,23 @@ def getenv(env_var: str) -> str:
raise Exception(f'Environment variable {env_var} not set') from e raise Exception(f'Environment variable {env_var} not set') from e
def get_minio_client() -> Minio:
return Minio(
getenv('IDF_S3_SERVER').replace('https://', ''),
access_key=getenv('IDF_S3_ACCESS_KEY'),
secret_key=getenv('IDF_S3_SECRET_KEY'),
http_client=urllib3.PoolManager(
num_pools=10,
timeout=urllib3.Timeout.DEFAULT_TIMEOUT,
retries=urllib3.Retry(
total=5,
backoff_factor=0.2,
status_forcelist=[500, 502, 503, 504],
),
),
)
def _download_files( def _download_files(
pipeline_id: int, pipeline_id: int,
*, *,
@ -131,7 +149,7 @@ def _upload_files(
try: try:
if has_file: if has_file:
obj_name = f'{pipeline_id}/{artifact_type.value}/{job_name.split(" ")[0]}/{job_id}.zip' obj_name = f'{pipeline_id}/{artifact_type.value}/{job_name.rsplit(" ", maxsplit=1)[0]}/{job_id}.zip'
print(f'Created archive file: {job_id}.zip, uploading as {obj_name}') print(f'Created archive file: {job_id}.zip, uploading as {obj_name}')
client.fput_object(getenv('IDF_S3_BUCKET'), obj_name, f'{job_id}.zip') client.fput_object(getenv('IDF_S3_BUCKET'), obj_name, f'{job_id}.zip')
@ -168,19 +186,7 @@ if __name__ == '__main__':
args = parser.parse_args() args = parser.parse_args()
client = Minio( client = get_minio_client()
getenv('IDF_S3_SERVER').replace('https://', ''),
access_key=getenv('IDF_S3_ACCESS_KEY'),
secret_key=getenv('IDF_S3_SECRET_KEY'),
http_client=urllib3.PoolManager(
timeout=urllib3.Timeout.DEFAULT_TIMEOUT,
retries=urllib3.Retry(
total=5,
backoff_factor=0.2,
status_forcelist=[500, 502, 503, 504],
),
),
)
ci_pipeline_id = args.pipeline_id or getenv('CI_PIPELINE_ID') # required ci_pipeline_id = args.pipeline_id or getenv('CI_PIPELINE_ID') # required
if args.action == 'download': if args.action == 'download':

View File

@ -13,7 +13,7 @@ from pathlib import Path
from typing import Dict, List, Optional, Tuple from typing import Dict, List, Optional, Tuple
import yaml import yaml
from idf_ci_utils import IDF_PATH from idf_ci_utils import IDF_PATH, get_all_manifest_files
YES = u'\u2713' YES = u'\u2713'
NO = u'\u2717' NO = u'\u2717'
@ -148,9 +148,7 @@ def check_readme(
'all', 'all',
recursive=True, recursive=True,
exclude_list=exclude_dirs or [], exclude_list=exclude_dirs or [],
manifest_files=[ manifest_files=get_all_manifest_files(),
str(p) for p in Path(IDF_PATH).glob('**/.build-test-rules.yml')
],
default_build_targets=SUPPORTED_TARGETS + extra_default_build_targets, default_build_targets=SUPPORTED_TARGETS + extra_default_build_targets,
) )
) )
@ -304,9 +302,7 @@ def check_test_scripts(
'all', 'all',
recursive=True, recursive=True,
exclude_list=exclude_dirs or [], exclude_list=exclude_dirs or [],
manifest_files=[ manifest_files=get_all_manifest_files(),
str(p) for p in Path(IDF_PATH).glob('**/.build-test-rules.yml')
],
default_build_targets=SUPPORTED_TARGETS + extra_default_build_targets, default_build_targets=SUPPORTED_TARGETS + extra_default_build_targets,
) )
) )
@ -382,7 +378,7 @@ def sort_yaml(files: List[str]) -> None:
def check_exist() -> None: def check_exist() -> None:
exit_code = 0 exit_code = 0
config_files = [str(p) for p in Path(IDF_PATH).glob('**/.build-test-rules.yml')] config_files = get_all_manifest_files()
for file in config_files: for file in config_files:
if 'managed_components' in Path(file).parts: if 'managed_components' in Path(file).parts:
continue continue

View File

@ -39,7 +39,7 @@ def check(pattern_yml: str, exclude_list: str) -> Tuple[Set, Set]:
git_files = get_git_files(os.path.join(IDF_PATH, 'tools'), full_path=True) git_files = get_git_files(os.path.join(IDF_PATH, 'tools'), full_path=True)
for f in git_files: for f in git_files:
f = Path(f) f = Path(f)
if f in rules_files_set or f in exclude_files_set: if f in rules_files_set or f in exclude_files_set or str(f).startswith(os.path.join(IDF_PATH, 'tools', 'test_apps')):
continue continue
missing_files.add(os.path.relpath(f, IDF_PATH)) missing_files.add(os.path.relpath(f, IDF_PATH))

View File

@ -10,13 +10,15 @@ import os
import sys import sys
import typing as t import typing as t
import unittest import unittest
from collections import defaultdict
from pathlib import Path from pathlib import Path
import yaml import yaml
from idf_build_apps import LOGGER, App, build_apps, find_apps, setup_logging from dynamic_pipelines.constants import DEFAULT_TEST_PATHS
from idf_build_apps.constants import SUPPORTED_TARGETS from idf_build_apps import build_apps, setup_logging
from idf_ci_utils import IDF_PATH from idf_build_apps.utils import semicolon_separated_str_to_list
from idf_pytest.constants import (DEFAULT_BUILD_TEST_RULES_FILEPATH, DEFAULT_CONFIG_RULES_STR,
DEFAULT_FULL_BUILD_TEST_FILEPATTERNS, DEFAULT_IGNORE_WARNING_FILEPATH)
from idf_pytest.script import get_all_apps
CI_ENV_VARS = { CI_ENV_VARS = {
'EXTRA_CFLAGS': '-Werror -Werror=deprecated-declarations -Werror=unused-variable ' 'EXTRA_CFLAGS': '-Werror -Werror=deprecated-declarations -Werror=unused-variable '
@ -27,118 +29,6 @@ CI_ENV_VARS = {
} }
def get_pytest_apps(
paths: t.List[str],
target: str,
config_rules_str: t.List[str],
marker_expr: str,
filter_expr: str,
preserve_all: bool = False,
extra_default_build_targets: t.Optional[t.List[str]] = None,
modified_components: t.Optional[t.List[str]] = None,
modified_files: t.Optional[t.List[str]] = None,
ignore_app_dependencies_filepatterns: t.Optional[t.List[str]] = None,
) -> t.List[App]:
from idf_pytest.script import get_pytest_cases
pytest_cases = get_pytest_cases(paths, target, marker_expr, filter_expr)
_paths: t.Set[str] = set()
test_related_app_configs = defaultdict(set)
for case in pytest_cases:
for app in case.apps:
_paths.add(app.path)
test_related_app_configs[app.path].add(app.config)
if not extra_default_build_targets:
extra_default_build_targets = []
app_dirs = list(_paths)
if not app_dirs:
raise RuntimeError('No apps found')
LOGGER.info(f'Found {len(app_dirs)} apps')
app_dirs.sort()
apps = find_apps(
app_dirs,
target=target,
build_dir='build_@t_@w',
config_rules_str=config_rules_str,
build_log_path='build_log.txt',
size_json_path='size.json',
check_warnings=True,
manifest_rootpath=IDF_PATH,
manifest_files=[str(p) for p in Path(IDF_PATH).glob('**/.build-test-rules.yml')],
default_build_targets=SUPPORTED_TARGETS + extra_default_build_targets,
modified_components=modified_components,
modified_files=modified_files,
ignore_app_dependencies_filepatterns=ignore_app_dependencies_filepatterns,
)
for app in apps:
is_test_related = app.config_name in test_related_app_configs[app.app_dir]
if not preserve_all and not is_test_related:
app.preserve = False
if app.target == 'linux':
app._size_json_path = None # no esp_idf_size for linux target
return apps # type: ignore
def get_cmake_apps(
paths: t.List[str],
target: str,
config_rules_str: t.List[str],
preserve_all: bool = False,
extra_default_build_targets: t.Optional[t.List[str]] = None,
modified_components: t.Optional[t.List[str]] = None,
modified_files: t.Optional[t.List[str]] = None,
ignore_app_dependencies_filepatterns: t.Optional[t.List[str]] = None,
) -> t.List[App]:
from idf_pytest.constants import PytestApp
from idf_pytest.script import get_pytest_cases
apps = find_apps(
paths,
recursive=True,
target=target,
build_dir='build_@t_@w',
config_rules_str=config_rules_str,
build_log_path='build_log.txt',
size_json_path='size.json',
check_warnings=True,
preserve=False,
manifest_rootpath=IDF_PATH,
manifest_files=[str(p) for p in Path(IDF_PATH).glob('**/.build-test-rules.yml')],
default_build_targets=SUPPORTED_TARGETS + extra_default_build_targets,
modified_components=modified_components,
modified_files=modified_files,
ignore_app_dependencies_filepatterns=ignore_app_dependencies_filepatterns,
)
apps_for_build = []
pytest_cases_apps = [app for case in get_pytest_cases(paths, target) for app in case.apps]
for app in apps:
if preserve_all: # relpath
app.preserve = True
if PytestApp(os.path.realpath(app.app_dir), app.target, app.config_name) in pytest_cases_apps:
LOGGER.debug('Skipping build app with pytest scripts: %s', app)
continue
if app.target == 'linux':
app._size_json_path = None # no esp_idf_size for linux target
apps_for_build.append(app)
return apps_for_build
APPS_BUILD_PER_JOB = 30
def main(args: argparse.Namespace) -> None: def main(args: argparse.Namespace) -> None:
extra_default_build_targets: t.List[str] = [] extra_default_build_targets: t.List[str] = []
if args.default_build_test_rules: if args.default_build_test_rules:
@ -148,39 +38,24 @@ def main(args: argparse.Namespace) -> None:
if configs: if configs:
extra_default_build_targets = configs.get('extra_default_build_targets') or [] extra_default_build_targets = configs.get('extra_default_build_targets') or []
if args.pytest_apps: test_related_apps, non_test_related_apps = get_all_apps(
LOGGER.info('Only build apps with pytest scripts') args.paths,
apps = get_pytest_apps( args.target,
args.paths, config_rules_str=args.config,
args.target, marker_expr=args.marker_expr,
args.config, filter_expr=args.filter_expr,
args.marker_expr, preserve_all=args.preserve_all,
args.filter_expr, extra_default_build_targets=extra_default_build_targets,
args.preserve_all, modified_files=args.modified_components,
extra_default_build_targets, modified_components=args.modified_files,
args.modified_components, ignore_app_dependencies_filepatterns=args.ignore_app_dependencies_filepatterns,
args.modified_files,
args.ignore_app_dependencies_filepatterns,
)
else:
LOGGER.info('build apps. will skip pytest apps with pytest scripts')
apps = get_cmake_apps(
args.paths,
args.target,
args.config,
args.preserve_all,
extra_default_build_targets,
args.modified_components,
args.modified_files,
args.ignore_app_dependencies_filepatterns,
)
LOGGER.info('Found %d apps after filtering', len(apps))
LOGGER.info(
'Suggest setting the parallel count to %d for this build job',
len(apps) // APPS_BUILD_PER_JOB + 1,
) )
if args.pytest_apps:
apps = test_related_apps
else:
apps = non_test_related_apps
if args.extra_preserve_dirs: if args.extra_preserve_dirs:
for app in apps: for app in apps:
if app.preserve: if app.preserve:
@ -192,7 +67,7 @@ def main(args: argparse.Namespace) -> None:
app.preserve = True app.preserve = True
res = build_apps( res = build_apps(
apps, sorted(apps),
parallel_count=args.parallel_count, parallel_count=args.parallel_count,
parallel_index=args.parallel_index, parallel_index=args.parallel_index,
dry_run=False, dry_run=False,
@ -206,12 +81,10 @@ def main(args: argparse.Namespace) -> None:
modified_components=args.modified_components, modified_components=args.modified_components,
modified_files=args.modified_files, modified_files=args.modified_files,
ignore_app_dependencies_filepatterns=args.ignore_app_dependencies_filepatterns, ignore_app_dependencies_filepatterns=args.ignore_app_dependencies_filepatterns,
junitxml=args.junitxml,
) )
if isinstance(res, tuple): sys.exit(res)
sys.exit(res[0])
else:
sys.exit(res)
if __name__ == '__main__': if __name__ == '__main__':
@ -219,7 +92,7 @@ if __name__ == '__main__':
description='Build all the apps for different test types. Will auto remove those non-test apps binaries', description='Build all the apps for different test types. Will auto remove those non-test apps binaries',
formatter_class=argparse.ArgumentDefaultsHelpFormatter, formatter_class=argparse.ArgumentDefaultsHelpFormatter,
) )
parser.add_argument('paths', nargs='+', help='Paths to the apps to build.') parser.add_argument('paths', nargs='*', help='Paths to the apps to build.')
parser.add_argument( parser.add_argument(
'-t', '-t',
'--target', '--target',
@ -228,7 +101,7 @@ if __name__ == '__main__':
) )
parser.add_argument( parser.add_argument(
'--config', '--config',
default=['sdkconfig.ci=default', 'sdkconfig.ci.*=', '=default'], default=DEFAULT_CONFIG_RULES_STR,
nargs='+', nargs='+',
help='Adds configurations (sdkconfig file names) to build. This can either be ' help='Adds configurations (sdkconfig file names) to build. This can either be '
'FILENAME[=NAME] or FILEPATTERN. FILENAME is the name of the sdkconfig file, ' 'FILENAME[=NAME] or FILEPATTERN. FILENAME is the name of the sdkconfig file, '
@ -272,7 +145,7 @@ if __name__ == '__main__':
) )
parser.add_argument( parser.add_argument(
'--ignore-warning-file', '--ignore-warning-file',
default=os.path.join(IDF_PATH, 'tools', 'ci', 'ignore_build_warnings.txt'), default=DEFAULT_IGNORE_WARNING_FILEPATH,
type=argparse.FileType('r'), type=argparse.FileType('r'),
help='Ignore the warning strings in the specified file. Each line should be a regex string.', help='Ignore the warning strings in the specified file. Each line should be a regex string.',
) )
@ -290,7 +163,8 @@ if __name__ == '__main__':
parser.add_argument( parser.add_argument(
'--pytest-apps', '--pytest-apps',
action='store_true', action='store_true',
help='Only build apps with pytest scripts. Will build apps without pytest scripts if this flag is unspecified.', help='Only build apps required by pytest scripts. '
'Will build non-test-related apps if this flag is unspecified.',
) )
parser.add_argument( parser.add_argument(
'-m', '-m',
@ -307,7 +181,7 @@ if __name__ == '__main__':
) )
parser.add_argument( parser.add_argument(
'--default-build-test-rules', '--default-build-test-rules',
default=os.path.join(IDF_PATH, '.gitlab', 'ci', 'default-build-test-rules.yml'), default=DEFAULT_BUILD_TEST_RULES_FILEPATH,
help='default build test rules config file', help='default build test rules config file',
) )
parser.add_argument( parser.add_argument(
@ -318,69 +192,64 @@ if __name__ == '__main__':
) )
parser.add_argument( parser.add_argument(
'--modified-components', '--modified-components',
nargs='*', type=semicolon_separated_str_to_list,
default=None, help='semicolon-separated string which specifies the modified components. '
help='space-separated list which specifies the modified components. app with `depends_components` set in the ' 'app with `depends_components` set in the corresponding manifest files would only be built '
'corresponding manifest files would only be built if depends on any of the specified components.', 'if depends on any of the specified components. '
'If set to "", the value would be considered as None. '
'If set to ";", the value would be considered as an empty list',
) )
parser.add_argument( parser.add_argument(
'--modified-files', '--modified-files',
nargs='*', type=semicolon_separated_str_to_list,
default=None, help='semicolon-separated string which specifies the modified files. '
help='space-separated list which specifies the modified files. app with `depends_filepatterns` set in the ' 'app with `depends_filepatterns` set in the corresponding manifest files would only be built '
'corresponding manifest files would only be built if any of the specified file pattern matches any of the ' 'if any of the specified file pattern matches any of the specified modified files. '
'specified modified files.', 'If set to "", the value would be considered as None. '
'If set to ";", the value would be considered as an empty list',
) )
parser.add_argument( parser.add_argument(
'-if', '-if',
'--ignore-app-dependencies-filepatterns', '--ignore-app-dependencies-filepatterns',
nargs='*', type=semicolon_separated_str_to_list,
default=None, help='semicolon-separated string which specifies the file patterns used for '
help='space-separated list which specifies the file patterns used for ignoring checking the app dependencies. ' 'ignoring checking the app dependencies. '
'The `depends_components` and `depends_filepatterns` set in the manifest files will be ignored when any of the ' 'The `depends_components` and `depends_filepatterns` set in the manifest files '
'specified file patterns matches any of the modified files. Must be used together with --modified-files', 'will be ignored when any of the specified file patterns matches any of the modified files. '
'Must be used together with --modified-files. '
'If set to "", the value would be considered as None. '
'If set to ";", the value would be considered as an empty list',
)
parser.add_argument(
'--junitxml',
default='build_summary_@p.xml',
help='Path to the junitxml file. If specified, the junitxml file will be generated',
) )
arguments = parser.parse_args() arguments = parser.parse_args()
setup_logging(arguments.verbose) setup_logging(arguments.verbose)
# set default paths
if not arguments.paths:
arguments.paths = DEFAULT_TEST_PATHS
# skip setting flags in CI # skip setting flags in CI
if not arguments.skip_setting_flags and not os.getenv('CI_JOB_ID'): if not arguments.skip_setting_flags and not os.getenv('CI_JOB_ID'):
for _k, _v in CI_ENV_VARS.items(): for _k, _v in CI_ENV_VARS.items():
os.environ[_k] = _v os.environ[_k] = _v
LOGGER.info(f'env var {_k} set to "{_v}"') print(f'env var {_k} set to "{_v}"')
if os.getenv('IS_MR_PIPELINE') == '0' or os.getenv('BUILD_AND_TEST_ALL_APPS') == '1': if os.getenv('IS_MR_PIPELINE') == '0' or os.getenv('BUILD_AND_TEST_ALL_APPS') == '1':
# if it's not MR pipeline or env var BUILD_AND_TEST_ALL_APPS=1, # if it's not MR pipeline or env var BUILD_AND_TEST_ALL_APPS=1,
# remove component dependency related arguments # remove component dependency related arguments
if 'modified_components' in arguments: arguments.modified_components = None
arguments.modified_components = None arguments.modified_files = None
if 'modified_files' in arguments: arguments.ignore_app_dependencies_filepatterns = None
arguments.modified_files = None
# file patterns to tigger full build # default file patterns to tigger full build
if 'modified_components' in arguments and not arguments.ignore_app_dependencies_filepatterns: if arguments.modified_files is not None and arguments.ignore_app_dependencies_filepatterns is None:
arguments.ignore_app_dependencies_filepatterns = [ arguments.ignore_app_dependencies_filepatterns = DEFAULT_FULL_BUILD_TEST_FILEPATTERNS
# tools
'tools/cmake/**/*',
'tools/tools.json',
# components
'components/cxx/**/*',
'components/esp_common/**/*',
'components/esp_hw_support/**/*',
'components/esp_rom/**/*',
'components/esp_system/**/*',
'components/esp_timer/**/*',
'components/freertos/**/*',
'components/hal/**/*',
'components/heap/**/*',
'components/log/**/*',
'components/newlib/**/*',
'components/riscv/**/*',
'components/soc/**/*',
'components/xtensa/**/*',
]
main(arguments) main(arguments)

View File

@ -0,0 +1,9 @@
# SPDX-FileCopyrightText: 2024 Espressif Systems (Shanghai) CO LTD
# SPDX-License-Identifier: Apache-2.0
import os
import sys
tools_dir = os.path.realpath(os.path.join(os.path.dirname(__file__), '..', '..'))
if tools_dir not in sys.path:
sys.path.append(tools_dir)

View File

@ -0,0 +1,31 @@
# SPDX-FileCopyrightText: 2024 Espressif Systems (Shanghai) CO LTD
# SPDX-License-Identifier: Apache-2.0
import os
from idf_ci_utils import IDF_PATH
# use relative path to avoid absolute path in pipeline
DEFAULT_TEST_PATHS = [
'examples',
os.path.join('tools', 'test_apps'),
'components',
]
DEFAULT_APPS_BUILD_PER_JOB = 60
DEFAULT_CASES_TEST_PER_JOB = 60
DEFAULT_BUILD_CHILD_PIPELINE_FILEPATH = os.path.join(IDF_PATH, 'build_child_pipeline.yml')
DEFAULT_TARGET_TEST_CHILD_PIPELINE_FILEPATH = os.path.join(IDF_PATH, 'target_test_child_pipeline.yml')
TEST_RELATED_BUILD_JOB_NAME = 'build_test_related_apps'
NON_TEST_RELATED_BUILD_JOB_NAME = 'build_non_test_related_apps'
COMMENT_START_MARKER = '### Dynamic Pipeline Report'
TEST_RELATED_APPS_FILENAME = 'test_related_apps.txt'
NON_TEST_RELATED_APPS_FILENAME = 'non_test_related_apps.txt'
TEST_RELATED_APPS_DOWNLOAD_URLS_FILENAME = 'test_related_apps_download_urls.yml'
REPORT_TEMPLATE_FILEPATH = os.path.join(
IDF_PATH, 'tools', 'ci', 'dynamic_pipelines', 'templates', 'report.template.html'
)

View File

@ -0,0 +1,169 @@
# SPDX-FileCopyrightText: 2024 Espressif Systems (Shanghai) CO LTD
# SPDX-License-Identifier: Apache-2.0
import inspect
import typing as t
from dataclasses import dataclass
from xml.etree.ElementTree import Element
import yaml
class Job:
def __init__(
self,
*,
name: str,
extends: t.Optional[t.List[str]] = None,
tags: t.Optional[t.List[str]] = None,
stage: t.Optional[str] = None,
parallel: int = 1,
variables: t.Dict[str, str] = None,
script: t.Optional[t.List[str]] = None,
before_script: t.Optional[t.List[str]] = None,
after_script: t.Optional[t.List[str]] = None,
needs: t.Optional[t.List[str]] = None,
**kwargs: t.Any,
) -> None:
self.name = name
self.extends = extends
self.tags = tags
self.stage = stage
self.parallel = parallel
self.variables = variables or {}
self.script = script
self.before_script = before_script
self.after_script = after_script
self.needs = needs
for k, v in kwargs.items():
setattr(self, k, v)
def __str__(self) -> str:
return yaml.dump(self.to_dict()) # type: ignore
def set_variable(self, key: str, value: str) -> None:
self.variables[key] = value
def to_dict(self) -> t.Dict[str, t.Any]:
res = {}
for k, v in inspect.getmembers(self):
if k.startswith('_'):
continue
# name is the dict key
if k == 'name':
continue
# parallel 1 is not allowed
if k == 'parallel' and v == 1:
continue
if v is None:
continue
if inspect.ismethod(v) or inspect.isfunction(v):
continue
res[k] = v
return {self.name: res}
class EmptyJob(Job):
def __init__(
self,
*,
name: t.Optional[str] = None,
tags: t.Optional[t.List[str]] = None,
stage: t.Optional[str] = None,
before_script: t.Optional[t.List[str]] = None,
after_script: t.Optional[t.List[str]] = None,
**kwargs: t.Any,
) -> None:
super().__init__(
name=name or 'fake_pass_job',
tags=tags or ['build', 'shiny'],
stage=stage or 'build',
script=['echo "This is a fake job to pass the pipeline"'],
before_script=before_script or [],
after_script=after_script or [],
**kwargs,
)
class BuildJob(Job):
def __init__(
self,
*,
extends: t.Optional[t.List[str]] = None,
tags: t.Optional[t.List[str]] = None,
stage: t.Optional[str] = None,
**kwargs: t.Any,
) -> None:
super().__init__(
extends=extends or ['.dynamic_build_template'],
tags=tags or ['build', 'shiny'],
stage=stage or 'build',
**kwargs,
)
class TargetTestJob(Job):
def __init__(
self,
*,
extends: t.Optional[t.List[str]] = None,
stage: t.Optional[str] = None,
**kwargs: t.Any,
) -> None:
super().__init__(
extends=extends or ['.dynamic_target_test_template'],
stage=stage or 'target_test',
**kwargs,
)
@dataclass
class TestCase:
name: str
file: str
time: float
failure: t.Optional[str] = None
skipped: t.Optional[str] = None
ci_job_url: t.Optional[str] = None
@property
def is_failure(self) -> bool:
return self.failure is not None
@property
def is_skipped(self) -> bool:
return self.skipped is not None
@property
def is_success(self) -> bool:
return not self.is_failure and not self.is_skipped
@classmethod
def from_test_case_node(cls, node: Element) -> t.Optional['TestCase']:
if 'name' not in node.attrib:
print('WARNING: Node Invalid: ', node)
return None
kwargs = {
'name': node.attrib['name'],
'file': node.attrib.get('file'),
'time': float(node.attrib.get('time') or 0),
'ci_job_url': node.attrib.get('ci_job_url') or '',
}
failure_node = node.find('failure')
if failure_node is not None:
kwargs['failure'] = failure_node.attrib['message']
skipped_node = node.find('skipped')
if skipped_node is not None:
kwargs['skipped'] = skipped_node.attrib['message']
return cls(**kwargs) # type: ignore

View File

@ -0,0 +1,276 @@
# SPDX-FileCopyrightText: 2024 Espressif Systems (Shanghai) CO LTD
# SPDX-License-Identifier: Apache-2.0
import abc
import html
import os
import re
import typing as t
import yaml
from artifacts_handler import ArtifactType
from gitlab_api import Gitlab
from idf_build_apps import App
from idf_build_apps.constants import BuildStatus
from idf_ci.uploader import AppUploader
from prettytable import PrettyTable
from .constants import COMMENT_START_MARKER, REPORT_TEMPLATE_FILEPATH, TEST_RELATED_APPS_DOWNLOAD_URLS_FILENAME
from .models import TestCase
class ReportGenerator:
REGEX_PATTERN = '#### {}[^####]+'
def __init__(self, project_id: int, mr_iid: int, pipeline_id: int, *, title: str):
gl_project = Gitlab(project_id).project
if mr_iid is not None:
self.mr = gl_project.mergerequests.get(mr_iid)
else:
self.mr = None
self.pipeline_id = pipeline_id
self.title = title
self.output_filepath = self.title.lower().replace(' ', '_') + '.html'
@staticmethod
def get_download_link_for_url(url: str) -> str:
if url:
return f'<a href="{url}">Download</a>'
return ''
def generate_html_report(self, table_str: str) -> str:
# we're using bootstrap table
table_str = table_str.replace('<table>', '<table data-toggle="table" data-search="true">')
with open(REPORT_TEMPLATE_FILEPATH) as fr:
template = fr.read()
return template.replace('{{title}}', self.title).replace('{{table}}', table_str)
@staticmethod
def table_to_html_str(table: PrettyTable) -> str:
return html.unescape(table.get_html_string()) # type: ignore
@abc.abstractmethod
def _get_report_str(self) -> str:
raise NotImplementedError
def post_report(self, job_id: int, commit_id: str) -> None:
# report in html format, otherwise will exceed the limit
with open(self.output_filepath, 'w') as fw:
fw.write(self._get_report_str())
# for example, {URL}/-/esp-idf/-/jobs/{id}/artifacts/list_job_84.txt
# CI_PAGES_URL is {URL}/esp-idf, which missed one `-`
url = os.getenv('CI_PAGES_URL', '').replace('esp-idf', '-/esp-idf')
comment = f'''#### {self.title}
Full {self.title} here: {url}/-/jobs/{job_id}/artifacts/{self.output_filepath} (with commit {commit_id})
'''
if self.mr is None:
print('No MR found, skip posting comment')
return
for note in self.mr.notes.list(iterator=True):
if note.body.startswith(COMMENT_START_MARKER):
updated_str = re.sub(self.REGEX_PATTERN.format(self.title), comment, note.body)
if updated_str == note.body: # not updated
updated_str = f'{note.body.strip()}\n\n{comment}'
note.body = updated_str
note.save()
break
else:
new_comment = f'''{COMMENT_START_MARKER}
{comment}'''
self.mr.notes.create({'body': new_comment})
class BuildReportGenerator(ReportGenerator):
def __init__(
self,
project_id: int,
mr_iid: int,
pipeline_id: int,
*,
title: str = 'Build Report',
apps: t.List[App],
):
super().__init__(project_id, mr_iid, pipeline_id, title=title)
self.apps = apps
self.apps_presigned_url_filepath = TEST_RELATED_APPS_DOWNLOAD_URLS_FILENAME
def _get_report_str(self) -> str:
if not self.apps:
print('No apps found, skip generating build report')
return 'No Apps Built'
uploader = AppUploader(self.pipeline_id)
table_str = ''
failed_apps = [app for app in self.apps if app.build_status == BuildStatus.FAILED]
if failed_apps:
table_str += '<h2>Failed Apps</h2>'
failed_apps_table = PrettyTable()
failed_apps_table.field_names = [
'App Dir',
'Build Dir',
'Failed Reason',
'Build Log',
]
for app in failed_apps:
failed_apps_table.add_row(
[
app.app_dir,
app.build_dir,
app.build_comment or '',
self.get_download_link_for_url(uploader.get_app_presigned_url(app, ArtifactType.LOGS)),
]
)
table_str += self.table_to_html_str(failed_apps_table)
built_test_related_apps = [app for app in self.apps if app.build_status == BuildStatus.SUCCESS and app.preserve]
if built_test_related_apps:
table_str += '<h2>Built Apps (Test Related)</h2>'
built_apps_table = PrettyTable()
built_apps_table.field_names = [
'App Dir',
'Build Dir',
'Bin Files with Build Log (without map and elf)',
'Map and Elf Files',
]
app_presigned_urls_dict: t.Dict[str, t.Dict[str, str]] = {}
for app in built_test_related_apps:
_d = {
ArtifactType.BUILD_DIR_WITHOUT_MAP_AND_ELF_FILES.value: uploader.get_app_presigned_url(
app, ArtifactType.BUILD_DIR_WITHOUT_MAP_AND_ELF_FILES
),
ArtifactType.MAP_AND_ELF_FILES.value: uploader.get_app_presigned_url(
app, ArtifactType.MAP_AND_ELF_FILES
),
}
built_apps_table.add_row(
[
app.app_dir,
app.build_dir,
self.get_download_link_for_url(_d[ArtifactType.BUILD_DIR_WITHOUT_MAP_AND_ELF_FILES]),
self.get_download_link_for_url(_d[ArtifactType.MAP_AND_ELF_FILES]),
]
)
app_presigned_urls_dict[app.build_path] = _d
# also generate a yaml file that includes the apps and the presigned urls
# for helping debugging locally
with open(self.apps_presigned_url_filepath, 'w') as fw:
yaml.dump(app_presigned_urls_dict, fw)
table_str += self.table_to_html_str(built_apps_table)
built_non_test_related_apps = [
app for app in self.apps if app.build_status == BuildStatus.SUCCESS and not app.preserve
]
if built_non_test_related_apps:
table_str += '<h2>Built Apps (Non Test Related)</h2>'
built_apps_table = PrettyTable()
built_apps_table.field_names = [
'App Dir',
'Build Dir',
'Build Log',
]
for app in built_non_test_related_apps:
built_apps_table.add_row(
[
app.app_dir,
app.build_dir,
self.get_download_link_for_url(uploader.get_app_presigned_url(app, ArtifactType.LOGS)),
]
)
table_str += self.table_to_html_str(built_apps_table)
skipped_apps = [app for app in self.apps if app.build_status == BuildStatus.SKIPPED]
if skipped_apps:
table_str += '<h2>Skipped Apps</h2>'
skipped_apps_table = PrettyTable()
skipped_apps_table.field_names = ['App Dir', 'Build Dir', 'Skipped Reason', 'Build Log']
for app in skipped_apps:
skipped_apps_table.add_row(
[
app.app_dir,
app.build_dir,
app.build_comment or '',
self.get_download_link_for_url(uploader.get_app_presigned_url(app, ArtifactType.LOGS)),
]
)
table_str += self.table_to_html_str(skipped_apps_table)
return self.generate_html_report(table_str)
class TargetTestReportGenerator(ReportGenerator):
def __init__(
self,
project_id: int,
mr_iid: int,
pipeline_id: int,
*,
title: str = 'Target Test Report',
test_cases: t.List[TestCase],
):
super().__init__(project_id, mr_iid, pipeline_id, title=title)
self.test_cases = test_cases
def _get_report_str(self) -> str:
table_str = ''
failed_test_cases = [tc for tc in self.test_cases if tc.is_failure]
if failed_test_cases:
table_str += '<h2>Failed Test Cases</h2>'
failed_test_cases_table = PrettyTable()
failed_test_cases_table.field_names = ['Test Case', 'Test Script File Path', 'Failure Reason', 'Job URL']
for tc in failed_test_cases:
failed_test_cases_table.add_row([tc.name, tc.file, tc.failure, tc.ci_job_url])
table_str += self.table_to_html_str(failed_test_cases_table)
skipped_test_cases = [tc for tc in self.test_cases if tc.is_skipped]
if skipped_test_cases:
table_str += '<h2>Skipped Test Cases</h2>'
skipped_test_cases_table = PrettyTable()
skipped_test_cases_table.field_names = ['Test Case', 'Test Script File Path', 'Skipped Reason']
for tc in skipped_test_cases:
skipped_test_cases_table.add_row([tc.name, tc.file, tc.skipped])
table_str += self.table_to_html_str(skipped_test_cases_table)
successful_test_cases = [tc for tc in self.test_cases if tc.is_success]
if successful_test_cases:
table_str += '<h2>Succeeded Test Cases</h2>'
successful_test_cases_table = PrettyTable()
successful_test_cases_table.field_names = ['Test Case', 'Test Script File Path', 'Job URL']
for tc in successful_test_cases:
successful_test_cases_table.add_row([tc.name, tc.file, tc.ci_job_url])
table_str += self.table_to_html_str(successful_test_cases_table)
return self.generate_html_report(table_str)

View File

@ -0,0 +1,25 @@
# SPDX-FileCopyrightText: 2024 Espressif Systems (Shanghai) CO LTD
# SPDX-License-Identifier: Apache-2.0
import os
import sys
IDF_PATH = os.path.abspath(os.path.join(os.path.dirname(__file__), '..', '..', '..', '..'))
# run this scripts only in idf path, to ensure the relative path is the same
os.chdir(IDF_PATH)
if 'IDF_PATH' not in os.environ:
os.environ['IDF_PATH'] = IDF_PATH
tools_path = os.path.join(os.path.dirname(__file__), '..', '..', '..')
if tools_path not in sys.path:
sys.path.append(tools_path)
tools_ci_path = os.path.join(os.path.dirname(__file__), '..', '..')
if tools_ci_path not in sys.path:
sys.path.append(tools_ci_path)
tools_ci_python_packages_path = os.path.join(os.path.dirname(__file__), '..', '..', 'python_packages')
if tools_ci_python_packages_path not in sys.path:
sys.path.append(tools_ci_python_packages_path)

View File

@ -0,0 +1,73 @@
# SPDX-FileCopyrightText: 2024 Espressif Systems (Shanghai) CO LTD
# SPDX-License-Identifier: Apache-2.0
import argparse
import sys
import __init__ # noqa: F401 # inject the system path
from dynamic_pipelines.constants import TEST_RELATED_APPS_FILENAME
from idf_build_apps import build_apps, setup_logging
from idf_build_apps.utils import semicolon_separated_str_to_list
from idf_ci.app import import_apps_from_txt
from idf_pytest.constants import DEFAULT_IGNORE_WARNING_FILEPATH
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Build Apps for Dynamic Pipeline')
parser.add_argument('app_list_file', default=TEST_RELATED_APPS_FILENAME, help='List of apps to build')
parser.add_argument(
'--build-verbose',
action='store_true',
help='Enable verbose output from build system.',
)
parser.add_argument('--parallel-count', default=1, type=int, help='Number of parallel build jobs.')
parser.add_argument(
'--parallel-index',
default=1,
type=int,
help='Index (1-based) of the job, out of the number specified by --parallel-count.',
)
parser.add_argument(
'--ignore-warning-file',
default=DEFAULT_IGNORE_WARNING_FILEPATH,
type=argparse.FileType('r'),
help='Ignore the warning strings in the specified file. Each line should be a regex string.',
)
parser.add_argument(
'--modified-components',
type=semicolon_separated_str_to_list,
help='semicolon-separated string which specifies the modified components. '
'app with `depends_components` set in the corresponding manifest files would only be built '
'if depends on any of the specified components. '
'If set to "", the value would be considered as None. '
'If set to ";", the value would be considered as an empty list',
)
parser.add_argument(
'--collect-app-info',
default='list_job_@p.txt',
help='If specified, the test case name and app info json will be written to this file',
)
parser.add_argument(
'--junitxml',
default='build_summary_@p.xml',
help='Path to the junitxml file. If specified, the junitxml file will be generated',
)
args = parser.parse_args()
setup_logging(verbose=1)
sys.exit(
build_apps(
import_apps_from_txt(args.app_list_file),
build_verbose=args.build_verbose,
keep_going=True,
ignore_warning_file=args.ignore_warning_file,
modified_components=args.modified_components,
check_app_dependencies=True,
parallel_count=args.parallel_count,
parallel_index=args.parallel_index,
collect_size_info='size_info_@p.txt',
collect_app_info=args.collect_app_info,
junitxml=args.junitxml,
)
)

View File

@ -0,0 +1,193 @@
# SPDX-FileCopyrightText: 2024 Espressif Systems (Shanghai) CO LTD
# SPDX-License-Identifier: Apache-2.0
"""This file is used for generating the child pipeline for build jobs."""
import argparse
import os
import typing as t
import __init__ # noqa: F401 # inject the system path
import yaml
from dynamic_pipelines.constants import (DEFAULT_APPS_BUILD_PER_JOB, DEFAULT_BUILD_CHILD_PIPELINE_FILEPATH,
DEFAULT_TEST_PATHS, NON_TEST_RELATED_APPS_FILENAME,
NON_TEST_RELATED_BUILD_JOB_NAME, TEST_RELATED_APPS_FILENAME,
TEST_RELATED_BUILD_JOB_NAME)
from dynamic_pipelines.models import BuildJob, EmptyJob
from dynamic_pipelines.utils import dump_jobs_to_yaml
from idf_build_apps.utils import semicolon_separated_str_to_list
from idf_ci.app import dump_apps_to_txt
from idf_ci_utils import IDF_PATH
from idf_pytest.constants import DEFAULT_CONFIG_RULES_STR, DEFAULT_FULL_BUILD_TEST_FILEPATTERNS, CollectMode
from idf_pytest.script import get_all_apps
def main(arguments: argparse.Namespace) -> None:
# load from default build test rules config file
extra_default_build_targets: t.List[str] = []
if arguments.default_build_test_rules:
with open(arguments.default_build_test_rules) as fr:
configs = yaml.safe_load(fr)
if configs:
extra_default_build_targets = configs.get('extra_default_build_targets') or []
build_jobs = []
###########################################
# special case with -k, ignore other args #
###########################################
if arguments.filter_expr:
# build only test related apps
test_related_apps, _ = get_all_apps(
arguments.paths,
target=CollectMode.ALL,
config_rules_str=DEFAULT_CONFIG_RULES_STR,
filter_expr=arguments.filter_expr,
marker_expr='not host_test',
extra_default_build_targets=extra_default_build_targets,
)
dump_apps_to_txt(sorted(test_related_apps), TEST_RELATED_APPS_FILENAME)
print(f'Generate test related apps file {TEST_RELATED_APPS_FILENAME} with {len(test_related_apps)} apps')
test_apps_build_job = BuildJob(
name=TEST_RELATED_BUILD_JOB_NAME,
parallel=len(test_related_apps) // DEFAULT_APPS_BUILD_PER_JOB + 1,
variables={
'APP_LIST_FILE': TEST_RELATED_APPS_FILENAME,
},
)
build_jobs.append(test_apps_build_job)
else:
#############
# all cases #
#############
test_related_apps, non_test_related_apps = get_all_apps(
arguments.paths,
CollectMode.ALL,
marker_expr='not host_test',
config_rules_str=DEFAULT_CONFIG_RULES_STR,
extra_default_build_targets=extra_default_build_targets,
modified_components=arguments.modified_components,
modified_files=arguments.modified_files,
ignore_app_dependencies_filepatterns=arguments.ignore_app_dependencies_filepatterns,
)
dump_apps_to_txt(sorted(test_related_apps), TEST_RELATED_APPS_FILENAME)
print(f'Generate test related apps file {TEST_RELATED_APPS_FILENAME} with {len(test_related_apps)} apps')
dump_apps_to_txt(sorted(non_test_related_apps), NON_TEST_RELATED_APPS_FILENAME)
print(
f'Generate non-test related apps file {NON_TEST_RELATED_APPS_FILENAME} with {len(non_test_related_apps)} apps'
)
if test_related_apps:
test_apps_build_job = BuildJob(
name=TEST_RELATED_BUILD_JOB_NAME,
parallel=len(test_related_apps) // DEFAULT_APPS_BUILD_PER_JOB + 1,
variables={
'APP_LIST_FILE': TEST_RELATED_APPS_FILENAME,
},
)
build_jobs.append(test_apps_build_job)
if non_test_related_apps:
non_test_apps_build_job = BuildJob(
name=NON_TEST_RELATED_BUILD_JOB_NAME,
parallel=len(non_test_related_apps) // DEFAULT_APPS_BUILD_PER_JOB + 1,
variables={
'APP_LIST_FILE': NON_TEST_RELATED_APPS_FILENAME,
},
)
build_jobs.append(non_test_apps_build_job)
# check if there's no jobs
if not build_jobs:
print('No apps need to be built. Create one empty job instead')
build_jobs.append(EmptyJob())
extra_include_yml = []
else:
extra_include_yml = ['tools/ci/dynamic_pipelines/templates/test_child_pipeline.yml']
dump_jobs_to_yaml(build_jobs, arguments.yaml_output, extra_include_yml)
print(f'Generate child pipeline yaml file {arguments.yaml_output} with {sum(j.parallel for j in build_jobs)} jobs')
if __name__ == '__main__':
parser = argparse.ArgumentParser(
description='Generate build child pipeline',
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
)
parser.add_argument(
'-o',
'--yaml-output',
default=DEFAULT_BUILD_CHILD_PIPELINE_FILEPATH,
help='Output YAML path',
)
parser.add_argument(
'-p',
'--paths',
nargs='+',
default=DEFAULT_TEST_PATHS,
help='Paths to the apps to build.',
)
parser.add_argument(
'-k',
'--filter-expr',
help='only build tests matching given filter expression. For example: -k "test_hello_world". Works only'
'for pytest',
)
parser.add_argument(
'--default-build-test-rules',
default=os.path.join(IDF_PATH, '.gitlab', 'ci', 'default-build-test-rules.yml'),
help='default build test rules config file',
)
parser.add_argument(
'--modified-components',
type=semicolon_separated_str_to_list,
help='semicolon-separated string which specifies the modified components. '
'app with `depends_components` set in the corresponding manifest files would only be built '
'if depends on any of the specified components. '
'If set to "", the value would be considered as None. '
'If set to ";", the value would be considered as an empty list',
)
parser.add_argument(
'--modified-files',
type=semicolon_separated_str_to_list,
help='semicolon-separated string which specifies the modified files. '
'app with `depends_filepatterns` set in the corresponding manifest files would only be built '
'if any of the specified file pattern matches any of the specified modified files. '
'If set to "", the value would be considered as None. '
'If set to ";", the value would be considered as an empty list',
)
parser.add_argument(
'-if',
'--ignore-app-dependencies-filepatterns',
type=semicolon_separated_str_to_list,
help='semicolon-separated string which specifies the file patterns used for '
'ignoring checking the app dependencies. '
'The `depends_components` and `depends_filepatterns` set in the manifest files will be ignored '
'when any of the specified file patterns matches any of the modified files. '
'Must be used together with --modified-files. '
'If set to "", the value would be considered as None. '
'If set to ";", the value would be considered as an empty list',
)
args = parser.parse_args()
if os.getenv('IS_MR_PIPELINE') == '0' or os.getenv('BUILD_AND_TEST_ALL_APPS') == '1':
print('Build and run all test cases, and compile all cmake apps')
args.modified_components = None
args.modified_files = None
args.ignore_app_dependencies_filepatterns = None
elif args.filter_expr is not None:
print('Build and run only test cases matching "%s"' % args.filter_expr)
args.modified_components = None
args.modified_files = None
args.ignore_app_dependencies_filepatterns = None
else:
print('Build and run only test cases matching the modified components and files')
if args.modified_files and not args.ignore_app_dependencies_filepatterns:
# setting default values
args.ignore_app_dependencies_filepatterns = DEFAULT_FULL_BUILD_TEST_FILEPATTERNS
main(args)

View File

@ -0,0 +1,59 @@
# SPDX-FileCopyrightText: 2024 Espressif Systems (Shanghai) CO LTD
# SPDX-License-Identifier: Apache-2.0
import argparse
import glob
import os
import __init__ # noqa: F401 # inject the system path
from dynamic_pipelines.report import BuildReportGenerator
from idf_ci.app import import_apps_from_txt
if __name__ == '__main__':
parser = argparse.ArgumentParser(
description='Update Build Report in MR pipelines',
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
)
parser.add_argument(
'--project-id',
type=int,
default=os.getenv('CI_PROJECT_ID'),
help='Project ID',
)
parser.add_argument(
'--mr-iid',
type=int,
default=os.getenv('CI_MERGE_REQUEST_IID'),
help='Merge Request IID',
)
parser.add_argument(
'--pipeline-id',
type=int,
default=os.getenv('PARENT_PIPELINE_ID'),
help='Pipeline ID',
)
parser.add_argument(
'--job-id',
type=int,
default=os.getenv('CI_JOB_ID'),
help='Job ID',
)
parser.add_argument(
'--commit-id',
default=os.getenv('CI_COMMIT_SHORT_SHA'),
help='MR commit ID',
)
parser.add_argument(
'--app-list-filepattern',
default='list_job_*.txt',
help='App list file pattern',
)
args = parser.parse_args()
apps = []
for f in glob.glob(args.app_list_filepattern):
apps.extend(import_apps_from_txt(f))
report_generator = BuildReportGenerator(args.project_id, args.mr_iid, args.pipeline_id, apps=apps)
report_generator.post_report(args.job_id, args.commit_id)

View File

@ -0,0 +1,131 @@
# SPDX-FileCopyrightText: 2024 Espressif Systems (Shanghai) CO LTD
# SPDX-License-Identifier: Apache-2.0
"""This file is used for generating the child pipeline for target test jobs.
1. Check the build jobs' artifacts to get the built apps' information.
2. Post the Build Report if it's running in an MR pipeline.
3. Generate the child pipeline for target test jobs.
"""
import argparse
import glob
import os
import typing as t
from collections import Counter, defaultdict
import __init__ # noqa: F401 # inject the system path
from dynamic_pipelines.constants import (DEFAULT_CASES_TEST_PER_JOB, DEFAULT_TARGET_TEST_CHILD_PIPELINE_FILEPATH,
DEFAULT_TEST_PATHS)
from dynamic_pipelines.models import EmptyJob, Job, TargetTestJob
from dynamic_pipelines.utils import dump_jobs_to_yaml
from gitlab.v4.objects import Project
from gitlab_api import Gitlab
from idf_build_apps import App
from idf_ci.app import import_apps_from_txt
from idf_pytest.script import get_pytest_cases
def get_tags_with_amount(s: str) -> t.List[str]:
c: Counter = Counter()
for _t in s.split(','):
c[_t] += 1
res = set()
for target, amount in c.items():
if amount > 1:
res.add(f'{target}_{amount}')
else:
res.add(target)
return sorted(res)
def generate_target_test_child_pipeline(project: Project, paths: str, apps: t.List[App], output_filepath: str) -> None:
pytest_cases = get_pytest_cases(
paths,
apps=apps,
marker_expr='not host_test', # since it's generating target-test child pipeline
)
res = defaultdict(list)
for case in pytest_cases:
if not case.env_markers:
print(f'No env markers found for {case.item.originalname} in {case.path}. Ignoring...')
continue
res[(case.target_selector, tuple(sorted(case.env_markers)))].append(case)
target_test_jobs: t.List[Job] = []
for (target_selector, env_markers), cases in res.items():
runner_tags = get_tags_with_amount(target_selector) + list(env_markers)
# we don't need to get all runner, as long as we get one runner, it's fine
runner_list = project.runners.list(status='online', tag_list=','.join(runner_tags), get_all=False)
if not runner_list:
print(f'WARNING: No runner found with tag {",".join(runner_tags)}, ignoring the following test cases:')
for case in cases:
print(f' - {case.name}')
continue
target_test_job = TargetTestJob(
name=f'{target_selector} - {",".join(env_markers)}',
tags=runner_tags,
parallel=len(cases) // DEFAULT_CASES_TEST_PER_JOB + 1,
)
target_test_job.set_variable('TARGET_SELECTOR', f"'{target_selector}'")
target_test_job.set_variable('ENV_MARKERS', "'" + ' and '.join(env_markers) + "'")
target_test_job.set_variable('PYTEST_NODES', ' '.join([f"'{case.item.nodeid}'" for case in cases]))
target_test_jobs.append(target_test_job)
if not target_test_jobs:
print('No target test cases required, create one empty job instead')
target_test_jobs.append(EmptyJob())
extra_include_yml = []
else:
extra_include_yml = ['tools/ci/dynamic_pipelines/templates/generate_target_test_report.yml']
dump_jobs_to_yaml(target_test_jobs, output_filepath, extra_include_yml)
print(f'Generate child pipeline yaml file {output_filepath} with {sum(j.parallel for j in target_test_jobs)} jobs')
if __name__ == '__main__':
parser = argparse.ArgumentParser(
description='Generate Target Test Child Pipeline. Update Build Report in MR pipelines',
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
)
parser.add_argument(
'-p',
'--paths',
nargs='+',
default=DEFAULT_TEST_PATHS,
help='Paths to the apps to build.',
)
parser.add_argument(
'--project-id',
type=int,
default=os.getenv('CI_PROJECT_ID'),
help='Project ID',
)
parser.add_argument(
'--pipeline-id',
type=int,
default=os.getenv('PARENT_PIPELINE_ID'),
help='Pipeline ID',
)
parser.add_argument(
'-o',
'--output',
default=DEFAULT_TARGET_TEST_CHILD_PIPELINE_FILEPATH,
help='Output child pipeline file path',
)
args = parser.parse_args()
app_list_filepattern = 'list_job_*.txt'
apps = []
for f in glob.glob(app_list_filepattern):
apps.extend(import_apps_from_txt(f))
gl_project = Gitlab(args.project_id).project
generate_target_test_child_pipeline(gl_project, args.paths, apps, args.output)

View File

@ -0,0 +1,62 @@
# SPDX-FileCopyrightText: 2024 Espressif Systems (Shanghai) CO LTD
# SPDX-License-Identifier: Apache-2.0
import argparse
import glob
import os
import xml.etree.ElementTree as ET
import __init__ # noqa: F401 # inject the system path
from dynamic_pipelines.models import TestCase
from dynamic_pipelines.report import TargetTestReportGenerator
if __name__ == '__main__':
parser = argparse.ArgumentParser(
description='Update Build Report in MR pipelines',
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
)
parser.add_argument(
'--project-id',
type=int,
default=os.getenv('CI_PROJECT_ID'),
help='Project ID',
)
parser.add_argument(
'--mr-iid',
type=int,
default=os.getenv('CI_MERGE_REQUEST_IID'),
help='Merge Request IID',
)
parser.add_argument(
'--pipeline-id',
type=int,
default=os.getenv('PARENT_PIPELINE_ID'),
help='Pipeline ID',
)
parser.add_argument(
'--job-id',
type=int,
default=os.getenv('CI_JOB_ID'),
help='Job ID',
)
parser.add_argument(
'--commit-id',
default=os.getenv('CI_COMMIT_SHORT_SHA'),
help='MR commit ID',
)
parser.add_argument(
'--junit-report-filepattern',
default='XUNIT_RESULT*.xml',
help='Junit Report file pattern',
)
args = parser.parse_args()
test_cases = []
for f in glob.glob(args.junit_report_filepattern):
root = ET.parse(f).getroot()
for tc in root.findall('.//testcase'):
test_cases.append(TestCase.from_test_case_node(tc))
report_generator = TargetTestReportGenerator(args.project_id, args.mr_iid, args.pipeline_id, test_cases=test_cases)
report_generator.post_report(args.job_id, args.commit_id)

View File

@ -0,0 +1,85 @@
# This file is used to generate build jobs for pytest case dynamic pipeline
# don't add real jobs in this file
########################
# Build Jobs Templates #
########################
.dynamic_build_template:
extends:
- .before_script:build
- .after_script:build:ccache:upload-when-fail
image: $ESP_ENV_IMAGE
stage: build
variables:
# Enable ccache for all build jobs. See configure_ci_environment.sh for more ccache related settings.
IDF_CCACHE_ENABLE: "1"
needs:
- pipeline: $PARENT_PIPELINE_ID
job: generate_build_child_pipeline
artifacts:
paths:
# The other artifacts patterns are defined under tools/ci/artifacts_handler.py
# Now we're uploading/downloading the binary files from our internal storage server
#
# keep the log file to help debug
- "**/build*/build_log.txt"
# build spec files
- build_summary_*.xml
# list of built apps
- list_job_*.txt
when: always
expire_in: 1 week
script:
# CI specific options start from "--parallel-count xxx". could ignore when running locally
- run_cmd python tools/ci/dynamic_pipelines/scripts/child_pipeline_build_apps.py $APP_LIST_FILE
--parallel-count ${CI_NODE_TOTAL:-1}
--parallel-index ${CI_NODE_INDEX:-1}
--collect-app-info "list_job_${CI_JOB_NAME_SLUG}.txt"
--modified-components ${MR_MODIFIED_COMPONENTS}
--junitxml "build_summary_${CI_JOB_NAME_SLUG}.xml"
.dynamic_target_test_template:
extends:
- .before_script:fetch:target_test
image: $TARGET_TEST_ENV_IMAGE
stage: target_test
timeout: 1 hour
variables:
SUBMODULES_TO_FETCH: "none"
# set while generating the pipeline
PYTEST_NODES: ""
TARGET_SELECTOR: ""
ENV_MARKERS: ""
cache:
# Usually do not need submodule-cache in target_test
- key: pip-cache-${LATEST_GIT_TAG}
paths:
- .cache/pip
policy: pull
artifacts:
paths:
- XUNIT_RESULT*.xml
- pytest_embedded_log/
# Child pipeline reports won't be collected in the main one
# https://gitlab.com/groups/gitlab-org/-/epics/8205
# reports:
# junit: XUNIT_RESULT.xml
script:
# get known failure cases
- retry_failed git clone $KNOWN_FAILURE_CASES_REPO known_failure_cases
# get runner env config file
- retry_failed git clone $TEST_ENV_CONFIG_REPO
- python $CHECKOUT_REF_SCRIPT ci-test-runner-configs ci-test-runner-configs
# CI specific options start from "--known-failure-cases-file xxx". could ignore when running locally
- run_cmd pytest ${PYTEST_NODES}
--target ${TARGET_SELECTOR}
-m ${ENV_MARKERS}
--pipeline-id $PARENT_PIPELINE_ID
--junitxml=XUNIT_RESULT_${CI_JOB_NAME_SLUG}.xml
--ignore-result-files known_failure_cases/known_failure_cases.txt
--parallel-count ${CI_NODE_TOTAL:-1}
--parallel-index ${CI_NODE_INDEX:-1}
${PYTEST_EXTRA_FLAGS}
--app-info-filepattern "list_job_*.txt"
after_script:
- python tools/ci/artifacts_handler.py upload --type logs junit_reports

View File

@ -0,0 +1,10 @@
generate_pytest_report:
stage: .post
tags: [build, shiny]
image: $ESP_ENV_IMAGE
when: always
artifacts:
paths:
- target_test_report.html
script:
- python tools/ci/dynamic_pipelines/scripts/generate_target_test_report.py

View File

@ -0,0 +1,23 @@
<!doctype html>
<html>
<head>
<meta charset="utf-8" />
<title>{{title}}</title>
<link
href="https://cdn.jsdelivr.net/npm/bootstrap@5.3.2/dist/css/bootstrap.min.css"
rel="stylesheet"
/>
<link
href="https://unpkg.com/bootstrap-table@1.22.1/dist/bootstrap-table.min.css"
rel="stylesheet"
/>
</head>
<body>
<div class="container-fluid">{{table}}</div>
<script src="https://cdn.jsdelivr.net/npm/jquery/dist/jquery.min.js"></script>
<script src="https://cdn.jsdelivr.net/npm/bootstrap@5.3.2/dist/js/bootstrap.bundle.min.js"></script>
<script src="https://unpkg.com/bootstrap-table@1.22.1/dist/bootstrap-table.min.js"></script>
</body>
</html>

View File

@ -0,0 +1,41 @@
generate_pytest_build_report:
stage: assign_test
image: $ESP_ENV_IMAGE
tags:
- build
- shiny
when: always
artifacts:
paths:
- build_report.html
- test_related_apps_download_urls.yml
script:
- python tools/ci/dynamic_pipelines/scripts/generate_build_report.py
generate_pytest_child_pipeline:
# finally, we can get some use out of the default behavior that downloads all artifacts from the previous stage
stage: assign_test
image: $ESP_ENV_IMAGE
tags:
- build
- shiny
artifacts:
paths:
- target_test_child_pipeline.yml
script:
- python tools/ci/dynamic_pipelines/scripts/generate_target_test_child_pipeline.py
Pytest Target Test Jobs:
stage: target_test
needs:
- generate_pytest_child_pipeline
variables:
PARENT_PIPELINE_ID: $PARENT_PIPELINE_ID
# https://gitlab.com/gitlab-org/gitlab/-/issues/214340
inherit:
variables: false
trigger:
include:
- artifact: target_test_child_pipeline.yml
job: generate_pytest_child_pipeline
strategy: depend

View File

@ -0,0 +1,37 @@
# SPDX-FileCopyrightText: 2024 Espressif Systems (Shanghai) CO LTD
# SPDX-License-Identifier: Apache-2.0
import typing as t
import yaml
from .models import Job
def dump_jobs_to_yaml(
jobs: t.List[Job], output_filepath: str, extra_include_yml: t.Optional[t.List[str]] = None
) -> None:
yaml_dict = {}
for job in jobs:
yaml_dict.update(job.to_dict())
# global stuffs
yaml_dict.update(
{
'include': [
'tools/ci/dynamic_pipelines/templates/.dynamic_jobs.yml',
'.gitlab/ci/common.yml',
],
# https://gitlab.com/gitlab-org/gitlab/-/issues/222370#note_662695503
'workflow': {
'rules': [
{'if': '$CI_MERGE_REQUEST_IID'},
{'if': '$CI_COMMIT_BRANCH'},
],
},
}
)
yaml_dict['include'].extend(extra_include_yml or [])
with open(output_filepath, 'w') as fw:
yaml.dump(yaml_dict, fw, indent=2)

View File

@ -41,3 +41,11 @@ tools/ci/cleanup_ignore_lists.py
tools/ci/artifacts_handler.py tools/ci/artifacts_handler.py
tools/unit-test-app/**/* tools/unit-test-app/**/*
tools/ci/gitlab_yaml_linter.py tools/ci/gitlab_yaml_linter.py
tools/ci/dynamic_pipelines/**/*
tools/ci/idf_ci/**/*
tools/ci/get_supported_examples.sh
tools/ci/python_packages/common_test_methods.py
tools/ci/python_packages/gitlab_api.py
tools/ci/python_packages/idf_http_server_test/**/*
tools/ci/python_packages/idf_iperf_test_util/**/*
tools/esp_prov/**/*

View File

40
tools/ci/idf_ci/app.py Normal file
View File

@ -0,0 +1,40 @@
# SPDX-FileCopyrightText: 2024 Espressif Systems (Shanghai) CO LTD
# SPDX-License-Identifier: Apache-2.0
import sys
import typing as t
from typing import Literal
from idf_build_apps import App, CMakeApp, json_to_app
from idf_ci.uploader import AppUploader, get_app_uploader
class IdfCMakeApp(CMakeApp):
uploader: t.ClassVar[t.Optional['AppUploader']] = get_app_uploader()
build_system: Literal['idf_cmake'] = 'idf_cmake'
def _post_build(self) -> None:
super()._post_build()
if self.uploader:
self.uploader.upload_app(self.build_path)
def dump_apps_to_txt(apps: t.List[App], output_filepath: str) -> None:
with open(output_filepath, 'w') as fw:
for app in apps:
fw.write(app.model_dump_json() + '\n')
def import_apps_from_txt(input_filepath: str) -> t.List[App]:
apps: t.List[App] = []
with open(input_filepath) as fr:
for line in fr:
if line := line.strip():
try:
apps.append(json_to_app(line, extra_classes=[IdfCMakeApp]))
except Exception: # noqa
print('Failed to deserialize app from line: %s' % line)
sys.exit(1)
return apps

150
tools/ci/idf_ci/uploader.py Normal file
View File

@ -0,0 +1,150 @@
# SPDX-FileCopyrightText: 2024 Espressif Systems (Shanghai) CO LTD
# SPDX-License-Identifier: Apache-2.0
import glob
import os
import typing as t
from datetime import timedelta
from zipfile import ZIP_DEFLATED, ZipFile
import minio
from artifacts_handler import ArtifactType, get_minio_client, getenv
from idf_build_apps import App
from idf_build_apps.utils import rmdir
from idf_ci_utils import IDF_PATH
from idf_pytest.constants import DEFAULT_BUILD_LOG_FILENAME
class AppUploader:
TYPE_PATTERNS_DICT = {
ArtifactType.MAP_AND_ELF_FILES: [
'bootloader/*.map',
'bootloader/*.elf',
'*.map',
'*.elf',
],
ArtifactType.BUILD_DIR_WITHOUT_MAP_AND_ELF_FILES: [
'*.bin',
'bootloader/*.bin',
'partition_table/*.bin',
'flasher_args.json',
'flash_project_args',
'config/sdkconfig.json',
'project_description.json',
],
ArtifactType.LOGS: [
DEFAULT_BUILD_LOG_FILENAME,
],
}
def __init__(self, pipeline_id: t.Union[str, int, None] = None) -> None:
self.pipeline_id = str(pipeline_id or '1')
self._client = get_minio_client()
def get_app_object_name(self, app_path: str, zip_name: str, artifact_type: ArtifactType) -> str:
return f'{self.pipeline_id}/{artifact_type.value}/{app_path}/{zip_name}'
def _upload_app(self, app_build_path: str, artifact_type: ArtifactType) -> bool:
app_path, build_dir = os.path.split(app_build_path)
zip_filename = f'{build_dir}.zip'
has_file = False
with ZipFile(
zip_filename,
'w',
compression=ZIP_DEFLATED,
# 1 is the fastest compression level
# the size differs not much between 1 and 9
compresslevel=1,
) as zw:
for pattern in self.TYPE_PATTERNS_DICT[artifact_type]:
for file in glob.glob(os.path.join(app_build_path, pattern), recursive=True):
zw.write(file)
has_file = True
uploaded = False
try:
if has_file:
obj_name = self.get_app_object_name(app_path, zip_filename, artifact_type)
print(f'Created archive file: {zip_filename}, uploading as {obj_name}')
self._client.fput_object(getenv('IDF_S3_BUCKET'), obj_name, zip_filename)
uploaded = True
finally:
os.remove(zip_filename)
return uploaded
def upload_app(self, app_build_path: str, artifact_type: t.Optional[ArtifactType] = None) -> None:
uploaded = False
if not artifact_type:
for _artifact_type in [
ArtifactType.MAP_AND_ELF_FILES,
ArtifactType.BUILD_DIR_WITHOUT_MAP_AND_ELF_FILES,
ArtifactType.LOGS,
]:
uploaded |= self._upload_app(app_build_path, _artifact_type)
else:
uploaded = self._upload_app(app_build_path, artifact_type)
if uploaded:
rmdir(app_build_path, exclude_file_patterns=DEFAULT_BUILD_LOG_FILENAME)
def _download_app(self, app_build_path: str, artifact_type: ArtifactType) -> None:
app_path, build_dir = os.path.split(app_build_path)
zip_filename = f'{build_dir}.zip'
# path are relative to IDF_PATH
current_dir = os.getcwd()
os.chdir(IDF_PATH)
try:
obj_name = self.get_app_object_name(app_path, zip_filename, artifact_type)
print(f'Downloading {obj_name}')
try:
try:
self._client.stat_object(getenv('IDF_S3_BUCKET'), obj_name)
except minio.error.S3Error as e:
raise SystemExit(
f'No such file on minio server: {obj_name}. '
f'Probably the build failed or the artifacts got expired. '
f'Full error message: {str(e)}'
)
else:
self._client.fget_object(getenv('IDF_S3_BUCKET'), obj_name, zip_filename)
print(f'Downloaded to {zip_filename}')
except minio.error.S3Error as e:
raise SystemExit('Shouldn\'t happen, please report this bug in the CI channel' + str(e))
with ZipFile(zip_filename, 'r') as zr:
zr.extractall()
os.remove(zip_filename)
finally:
os.chdir(current_dir)
def download_app(self, app_build_path: str, artifact_type: t.Optional[ArtifactType] = None) -> None:
if not artifact_type:
for _artifact_type in [ArtifactType.MAP_AND_ELF_FILES, ArtifactType.BUILD_DIR_WITHOUT_MAP_AND_ELF_FILES]:
self._download_app(app_build_path, _artifact_type)
else:
self._download_app(app_build_path, artifact_type)
def get_app_presigned_url(self, app: App, artifact_type: ArtifactType) -> str:
obj_name = self.get_app_object_name(app.app_dir, f'{app.build_dir}.zip', artifact_type)
try:
self._client.stat_object(
getenv('IDF_S3_BUCKET'),
obj_name,
)
except minio.error.S3Error:
return ''
else:
return self._client.get_presigned_url( # type: ignore
'GET', getenv('IDF_S3_BUCKET'), obj_name, expires=timedelta(days=4)
)
def get_app_uploader() -> t.Optional['AppUploader']:
if parent_pipeline_id := os.getenv('PARENT_PIPELINE_ID'):
return AppUploader(parent_pipeline_id)
return None

View File

@ -10,6 +10,7 @@ import subprocess
import sys import sys
import typing as t import typing as t
from functools import cached_property from functools import cached_property
from pathlib import Path
IDF_PATH = os.path.abspath(os.getenv('IDF_PATH', os.path.join(os.path.dirname(__file__), '..', '..'))) IDF_PATH = os.path.abspath(os.getenv('IDF_PATH', os.path.join(os.path.dirname(__file__), '..', '..')))
@ -99,10 +100,6 @@ def get_git_files(path: str = IDF_PATH, full_path: bool = False) -> t.List[str]:
return [os.path.join(path, f) for f in files] if full_path else files return [os.path.join(path, f) for f in files] if full_path else files
def is_in_directory(file_path: str, folder: str) -> bool:
return os.path.realpath(file_path).startswith(os.path.realpath(folder) + os.sep)
def to_list(s: t.Any) -> t.List[t.Any]: def to_list(s: t.Any) -> t.List[t.Any]:
if not s: if not s:
return [] return []
@ -178,3 +175,19 @@ class GitlabYmlConfig:
@staticmethod @staticmethod
def _is_rule_key(key: str) -> bool: def _is_rule_key(key: str) -> bool:
return key.startswith('.rules:') or key.endswith('template') return key.startswith('.rules:') or key.endswith('template')
def get_all_manifest_files() -> t.List[str]:
"""
:rtype: object
"""
paths: t.List[str] = []
for p in Path(IDF_PATH).glob('**/.build-test-rules.yml'):
if 'managed_components' in p.parts:
continue
paths.append(str(p))
return paths

View File

@ -1,4 +1,4 @@
# SPDX-FileCopyrightText: 2023 Espressif Systems (Shanghai) CO LTD # SPDX-FileCopyrightText: 2023-2024 Espressif Systems (Shanghai) CO LTD
# SPDX-License-Identifier: Apache-2.0 # SPDX-License-Identifier: Apache-2.0
""" """
@ -6,12 +6,13 @@ Pytest Related Constants. Don't import third-party packages here.
""" """
import os import os
import typing as t import typing as t
from collections import Counter
from dataclasses import dataclass from dataclasses import dataclass
from enum import Enum from enum import Enum
from functools import cached_property from functools import cached_property
from pathlib import Path
from _pytest.python import Function from _pytest.python import Function
from idf_ci_utils import IDF_PATH
from pytest_embedded.utils import to_list from pytest_embedded.utils import to_list
SUPPORTED_TARGETS = ['esp32', 'esp32s2', 'esp32c3', 'esp32s3', 'esp32c2', 'esp32c6', 'esp32h2', 'esp32p4'] SUPPORTED_TARGETS = ['esp32', 'esp32s2', 'esp32c3', 'esp32s3', 'esp32c2', 'esp32c6', 'esp32h2', 'esp32p4']
@ -113,9 +114,34 @@ ENV_MARKERS = {
'sdio_master_slave': 'Test sdio multi board, esp32+esp32', 'sdio_master_slave': 'Test sdio multi board, esp32+esp32',
'sdio_multidev_32_c6': 'Test sdio multi board, esp32+esp32c6', 'sdio_multidev_32_c6': 'Test sdio multi board, esp32+esp32c6',
'usj_device': 'Test usb_serial_jtag and usb_serial_jtag is used as serial only (not console)', 'usj_device': 'Test usb_serial_jtag and usb_serial_jtag is used as serial only (not console)',
'twai_std': 'twai runner with all twai supported targets connect to usb-can adapter' 'twai_std': 'twai runner with all twai supported targets connect to usb-can adapter',
} }
DEFAULT_CONFIG_RULES_STR = ['sdkconfig.ci=default', 'sdkconfig.ci.*=', '=default']
DEFAULT_IGNORE_WARNING_FILEPATH = os.path.join(IDF_PATH, 'tools', 'ci', 'ignore_build_warnings.txt')
DEFAULT_BUILD_TEST_RULES_FILEPATH = os.path.join(IDF_PATH, '.gitlab', 'ci', 'default-build-test-rules.yml')
DEFAULT_FULL_BUILD_TEST_FILEPATTERNS = [
# tools
'tools/cmake/**/*',
'tools/tools.json',
# components
'components/cxx/**/*',
'components/esp_common/**/*',
'components/esp_hw_support/**/*',
'components/esp_rom/**/*',
'components/esp_system/**/*',
'components/esp_timer/**/*',
'components/freertos/**/*',
'components/hal/**/*',
'components/heap/**/*',
'components/log/**/*',
'components/newlib/**/*',
'components/riscv/**/*',
'components/soc/**/*',
'components/xtensa/**/*',
]
DEFAULT_BUILD_LOG_FILENAME = 'build_log.txt'
class CollectMode(str, Enum): class CollectMode(str, Enum):
SINGLE_SPECIFIC = 'single_specific' SINGLE_SPECIFIC = 'single_specific'
@ -163,6 +189,10 @@ class PytestCase:
def is_single_dut_test_case(self) -> bool: def is_single_dut_test_case(self) -> bool:
return True if len(self.apps) == 1 else False return True if len(self.apps) == 1 else False
@cached_property
def is_host_test(self) -> bool:
return 'host_test' in self.all_markers or 'linux' in self.targets
# the following markers could be changed dynamically, don't use cached_property # the following markers could be changed dynamically, don't use cached_property
@property @property
def all_markers(self) -> t.Set[str]: def all_markers(self) -> t.Set[str]:
@ -202,24 +232,35 @@ class PytestCase:
return {marker for marker in self.all_markers if marker in ENV_MARKERS} return {marker for marker in self.all_markers if marker in ENV_MARKERS}
@property @property
def target_with_amount_markers(self) -> t.Set[str]: def target_selector(self) -> str:
c: Counter = Counter() return ','.join(app.target for app in self.apps)
for app in self.apps:
c[app.target] += 1
res = set() @property
for target, amount in c.items(): def requires_elf_or_map(self) -> bool:
if amount > 1: """
res.add(f'{target}_{amount}') This property determines whether the test case requires elf or map file. By default, one app in the test case
else: only requires .bin files.
res.add(target)
return res :return: True if the test case requires elf or map file, False otherwise
"""
if 'jtag' in self.env_markers or 'usb_serial_jtag' in self.env_markers:
return True
def all_built_in_app_lists(self, app_lists: t.Optional[t.List[str]] = None) -> bool: if any('panic' in Path(app.path).parts for app in self.apps):
return True
return False
def all_built_in_app_lists(self, app_lists: t.Optional[t.List[str]] = None) -> t.Optional[str]:
"""
Check if all binaries of the test case are built in the app lists.
:param app_lists: app lists to check
:return: debug string if not all binaries are built in the app lists, None otherwise
"""
if app_lists is None: if app_lists is None:
# ignore this feature # ignore this feature
return True return None
bin_found = [0] * len(self.apps) bin_found = [0] * len(self.apps)
for i, app in enumerate(self.apps): for i, app in enumerate(self.apps):
@ -232,10 +273,10 @@ class PytestCase:
msg += f'\n - {app.build_dir}' msg += f'\n - {app.build_dir}'
print(msg) print(msg)
return False return msg
if sum(bin_found) == len(self.apps): if sum(bin_found) == len(self.apps):
return True return None
# some found, some not, looks suspicious # some found, some not, looks suspicious
msg = f'Found some binaries of test case {self.name} are not listed in the app lists.' msg = f'Found some binaries of test case {self.name} are not listed in the app lists.'
@ -244,4 +285,5 @@ class PytestCase:
msg += f'\n - {app.build_dir}' msg += f'\n - {app.build_dir}'
msg += '\nMight be a issue of .build-test-rules.yml files' msg += '\nMight be a issue of .build-test-rules.yml files'
return False print(msg)
return msg

View File

@ -1,8 +1,9 @@
# SPDX-FileCopyrightText: 2023 Espressif Systems (Shanghai) CO LTD # SPDX-FileCopyrightText: 2023-2024 Espressif Systems (Shanghai) CO LTD
# SPDX-License-Identifier: Apache-2.0 # SPDX-License-Identifier: Apache-2.0
import os import os
import typing as t import typing as t
from collections import defaultdict
from functools import cached_property from functools import cached_property
from xml.etree import ElementTree as ET from xml.etree import ElementTree as ET
@ -11,6 +12,8 @@ from _pytest.config import ExitCode
from _pytest.main import Session from _pytest.main import Session
from _pytest.python import Function from _pytest.python import Function
from _pytest.runner import CallInfo from _pytest.runner import CallInfo
from idf_build_apps import App
from idf_build_apps.constants import BuildStatus
from pytest_embedded import Dut from pytest_embedded import Dut
from pytest_embedded.plugin import parse_multi_dut_args from pytest_embedded.plugin import parse_multi_dut_args
from pytest_embedded.utils import find_by_suffix, to_list from pytest_embedded.utils import find_by_suffix, to_list
@ -37,7 +40,7 @@ class IdfPytestEmbedded:
target: t.Union[t.List[str], str], target: t.Union[t.List[str], str],
*, *,
single_target_duplicate_mode: bool = False, single_target_duplicate_mode: bool = False,
apps_list: t.Optional[t.List[str]] = None, apps: t.Optional[t.List[App]] = None,
): ):
if isinstance(target, str): if isinstance(target, str):
self.target = sorted(comma_sep_str_to_list(target)) self.target = sorted(comma_sep_str_to_list(target))
@ -60,10 +63,18 @@ class IdfPytestEmbedded:
# otherwise, it should be collected when running `pytest --target esp32,esp32` # otherwise, it should be collected when running `pytest --target esp32,esp32`
self._single_target_duplicate_mode = single_target_duplicate_mode self._single_target_duplicate_mode = single_target_duplicate_mode
self.apps_list = apps_list self.apps_list = (
[os.path.join(app.app_dir, app.build_dir) for app in apps if app.build_status == BuildStatus.SUCCESS]
if apps
else None
)
self.cases: t.List[PytestCase] = [] self.cases: t.List[PytestCase] = []
# record the additional info
# test case id: {key: value}
self.additional_info: t.Dict[str, t.Dict[str, t.Any]] = defaultdict(dict)
@cached_property @cached_property
def collect_mode(self) -> CollectMode: def collect_mode(self) -> CollectMode:
if len(self.target) == 1: if len(self.target) == 1:
@ -90,13 +101,19 @@ class IdfPytestEmbedded:
count = self.get_param(item, 'count', 1) count = self.get_param(item, 'count', 1)
# default app_path is where the test script locates # default app_path is where the test script locates
app_paths = to_list( app_paths = to_list(parse_multi_dut_args(count, self.get_param(item, 'app_path', os.path.dirname(item.path))))
parse_multi_dut_args(count, os.path.relpath(self.get_param(item, 'app_path', os.path.dirname(item.path))))
)
configs = to_list(parse_multi_dut_args(count, self.get_param(item, 'config', DEFAULT_SDKCONFIG))) configs = to_list(parse_multi_dut_args(count, self.get_param(item, 'config', DEFAULT_SDKCONFIG)))
targets = to_list(parse_multi_dut_args(count, self.get_param(item, 'target', self.target[0]))) targets = to_list(parse_multi_dut_args(count, self.get_param(item, 'target', self.target[0])))
return PytestCase([PytestApp(app_paths[i], targets[i], configs[i]) for i in range(count)], item) def abspath_or_relpath(s: str) -> str:
if os.path.abspath(s) and s.startswith(os.getcwd()):
return os.path.relpath(s)
return s
return PytestCase(
[PytestApp(abspath_or_relpath(app_paths[i]), targets[i], configs[i]) for i in range(count)], item
)
@pytest.hookimpl(tryfirst=True) @pytest.hookimpl(tryfirst=True)
def pytest_collection_modifyitems(self, items: t.List[Function]) -> None: def pytest_collection_modifyitems(self, items: t.List[Function]) -> None:
@ -189,11 +206,17 @@ class IdfPytestEmbedded:
# 4. filter by `self.apps_list`, skip the test case if not listed # 4. filter by `self.apps_list`, skip the test case if not listed
# should only be used in CI # should only be used in CI
items[:] = [_item for _item in items if item_to_case_dict[_item].all_built_in_app_lists(self.apps_list)] _items = []
for item in items:
case = item_to_case_dict[item]
if msg := case.all_built_in_app_lists(self.apps_list):
self.additional_info[case.name]['skip_reason'] = msg
else:
_items.append(item)
# OKAY!!! All left ones will be executed, sort it and add more markers # OKAY!!! All left ones will be executed, sort it and add more markers
items[:] = sorted( items[:] = sorted(
items, key=lambda x: (os.path.dirname(x.path), self.get_param(x, 'config', DEFAULT_SDKCONFIG)) _items, key=lambda x: (os.path.dirname(x.path), self.get_param(x, 'config', DEFAULT_SDKCONFIG))
) )
for item in items: for item in items:
case = item_to_case_dict[item] case = item_to_case_dict[item]
@ -207,8 +230,7 @@ class IdfPytestEmbedded:
item.add_marker('xtal_40mhz') item.add_marker('xtal_40mhz')
def pytest_report_collectionfinish(self, items: t.List[Function]) -> None: def pytest_report_collectionfinish(self, items: t.List[Function]) -> None:
for item in items: self.cases = [item.stash[ITEM_PYTEST_CASE_KEY] for item in items]
self.cases.append(self.item_to_pytest_case(item))
def pytest_custom_test_case_name(self, item: Function) -> str: def pytest_custom_test_case_name(self, item: Function) -> str:
return item.funcargs.get('test_case_name', item.nodeid) # type: ignore return item.funcargs.get('test_case_name', item.nodeid) # type: ignore
@ -275,6 +297,9 @@ class IdfPytestEmbedded:
if 'file' in case.attrib: if 'file' in case.attrib:
case.attrib['file'] = case.attrib['file'].replace('/IDF/', '') # our unity test framework case.attrib['file'] = case.attrib['file'].replace('/IDF/', '') # our unity test framework
if ci_job_url := os.getenv('CI_JOB_URL'):
case.attrib['ci_job_url'] = ci_job_url
xml.write(junit) xml.write(junit)
def pytest_sessionfinish(self, session: Session, exitstatus: int) -> None: def pytest_sessionfinish(self, session: Session, exitstatus: int) -> None:

View File

@ -1,18 +1,24 @@
# SPDX-FileCopyrightText: 2023 Espressif Systems (Shanghai) CO LTD # SPDX-FileCopyrightText: 2023 Espressif Systems (Shanghai) CO LTD
# SPDX-License-Identifier: Apache-2.0 # SPDX-License-Identifier: Apache-2.0
import fnmatch
import io import io
import logging
import os.path
import typing as t import typing as t
from contextlib import redirect_stdout from contextlib import redirect_stdout
from pathlib import Path from pathlib import Path
import pytest import pytest
from _pytest.config import ExitCode from _pytest.config import ExitCode
from idf_build_apps import App, find_apps
from idf_build_apps.constants import SUPPORTED_TARGETS, BuildStatus
from idf_ci.app import IdfCMakeApp
from idf_ci_utils import IDF_PATH, get_all_manifest_files, to_list
from idf_py_actions.constants import PREVIEW_TARGETS as TOOLS_PREVIEW_TARGETS from idf_py_actions.constants import PREVIEW_TARGETS as TOOLS_PREVIEW_TARGETS
from idf_py_actions.constants import SUPPORTED_TARGETS as TOOLS_SUPPORTED_TARGETS from idf_py_actions.constants import SUPPORTED_TARGETS as TOOLS_SUPPORTED_TARGETS
from pytest_embedded.utils import to_list
from .constants import CollectMode, PytestCase from .constants import DEFAULT_BUILD_LOG_FILENAME, DEFAULT_CONFIG_RULES_STR, CollectMode, PytestCase
from .plugin import IdfPytestEmbedded from .plugin import IdfPytestEmbedded
@ -36,8 +42,10 @@ def get_pytest_files(paths: t.List[str]) -> t.List[str]:
def get_pytest_cases( def get_pytest_cases(
paths: t.Union[str, t.List[str]], paths: t.Union[str, t.List[str]],
target: str = CollectMode.ALL, target: str = CollectMode.ALL,
*,
marker_expr: t.Optional[str] = None, marker_expr: t.Optional[str] = None,
filter_expr: t.Optional[str] = None, filter_expr: t.Optional[str] = None,
apps: t.Optional[t.List[App]] = None,
) -> t.List[PytestCase]: ) -> t.List[PytestCase]:
""" """
For single-dut test cases, `target` could be For single-dut test cases, `target` could be
@ -49,9 +57,10 @@ def get_pytest_cases(
- or `multi_all`, to get all multi-dut test cases - or `multi_all`, to get all multi-dut test cases
:param paths: paths to search for pytest scripts :param paths: paths to search for pytest scripts
:param target: target to get test cases for, detailed above :param target: target or keywords to get test cases for, detailed above
:param marker_expr: pytest marker expression, `-m` :param marker_expr: pytest marker expression, `-m`
:param filter_expr: pytest filter expression, `-k` :param filter_expr: pytest filter expression, `-k`
:param apps: built app list, skip the tests required by apps not in the list
:return: list of test cases :return: list of test cases
""" """
paths = to_list(paths) paths = to_list(paths)
@ -63,7 +72,7 @@ def get_pytest_cases(
return cases return cases
def _get_pytest_cases(_target: str, _single_target_duplicate_mode: bool = False) -> t.List[PytestCase]: def _get_pytest_cases(_target: str, _single_target_duplicate_mode: bool = False) -> t.List[PytestCase]:
collector = IdfPytestEmbedded(_target, single_target_duplicate_mode=_single_target_duplicate_mode) collector = IdfPytestEmbedded(_target, single_target_duplicate_mode=_single_target_duplicate_mode, apps=apps)
with io.StringIO() as buf: with io.StringIO() as buf:
with redirect_stdout(buf): with redirect_stdout(buf):
@ -97,3 +106,108 @@ def get_pytest_cases(
cases.extend(_get_pytest_cases(_target)) cases.extend(_get_pytest_cases(_target))
return sorted(cases, key=lambda x: (x.path, x.name, str(x.targets))) return sorted(cases, key=lambda x: (x.path, x.name, str(x.targets)))
def get_all_apps(
paths: t.List[str],
target: str = CollectMode.ALL,
*,
marker_expr: t.Optional[str] = None,
filter_expr: t.Optional[str] = None,
config_rules_str: t.Optional[t.List[str]] = None,
preserve_all: bool = False,
extra_default_build_targets: t.Optional[t.List[str]] = None,
modified_components: t.Optional[t.List[str]] = None,
modified_files: t.Optional[t.List[str]] = None,
ignore_app_dependencies_filepatterns: t.Optional[t.List[str]] = None,
) -> t.Tuple[t.Set[App], t.Set[App]]:
"""
Return the tuple of test-required apps and non-test-related apps
:param paths: paths to search for pytest scripts
:param target: target or keywords to get test cases for, explained in `get_pytest_cases`
:param marker_expr: pytest marker expression, `-m`
:param filter_expr: pytest filter expression, `-k`
:param config_rules_str: config rules string
:param preserve_all: preserve all apps
:param extra_default_build_targets: extra default build targets
:param modified_components: modified components
:param modified_files: modified files
:param ignore_app_dependencies_filepatterns: ignore app dependencies filepatterns
:return: tuple of test-required apps and non-test-related apps
"""
all_apps = find_apps(
paths,
target,
build_system=IdfCMakeApp,
recursive=True,
build_dir='build_@t_@w',
config_rules_str=config_rules_str or DEFAULT_CONFIG_RULES_STR,
build_log_filename=DEFAULT_BUILD_LOG_FILENAME,
size_json_filename='size.json',
check_warnings=True,
manifest_rootpath=IDF_PATH,
manifest_files=get_all_manifest_files(),
default_build_targets=SUPPORTED_TARGETS + (extra_default_build_targets or []),
modified_components=modified_components,
modified_files=modified_files,
ignore_app_dependencies_filepatterns=ignore_app_dependencies_filepatterns,
include_skipped_apps=True,
)
pytest_cases = get_pytest_cases(
paths,
target,
marker_expr=marker_expr,
filter_expr=filter_expr,
)
modified_pytest_cases = []
if modified_files:
modified_pytest_scripts = [
os.path.dirname(f) for f in modified_files if fnmatch.fnmatch(os.path.basename(f), 'pytest_*.py')
]
if modified_pytest_scripts:
modified_pytest_cases = get_pytest_cases(
modified_pytest_scripts,
target,
marker_expr=marker_expr,
filter_expr=filter_expr,
)
# app_path, target, config
pytest_app_path_tuple_dict: t.Dict[t.Tuple[Path, str, str], PytestCase] = {}
for case in pytest_cases:
for app in case.apps:
pytest_app_path_tuple_dict[(Path(app.path), app.target, app.config)] = case
modified_pytest_app_path_tuple_dict: t.Dict[t.Tuple[Path, str, str], PytestCase] = {}
for case in modified_pytest_cases:
for app in case.apps:
modified_pytest_app_path_tuple_dict[(Path(app.path), app.target, app.config)] = case
test_related_apps: t.Set[App] = set()
non_test_related_apps: t.Set[App] = set()
for app in all_apps:
# override build_status if test script got modified
if case := modified_pytest_app_path_tuple_dict.get((Path(app.app_dir), app.target, app.config_name)):
test_related_apps.add(app)
app.build_status = BuildStatus.SHOULD_BE_BUILT
app.preserve = True
logging.debug('Found app: %s - required by modified test case %s', app, case.path)
elif app.build_status != BuildStatus.SKIPPED:
if case := pytest_app_path_tuple_dict.get((Path(app.app_dir), app.target, app.config_name)):
test_related_apps.add(app)
# should be built if
app.build_status = BuildStatus.SHOULD_BE_BUILT
app.preserve = True
logging.debug('Found test-related app: %s - required by %s', app, case.path)
else:
non_test_related_apps.add(app)
app.preserve = preserve_all
logging.debug('Found non-test-related app: %s', app)
print(f'Found {len(test_related_apps)} test-related apps')
print(f'Found {len(non_test_related_apps)} non-test-related apps')
return test_related_apps, non_test_related_apps

View File

@ -0,0 +1,48 @@
# SPDX-FileCopyrightText: 2023 Espressif Systems (Shanghai) CO LTD
# SPDX-License-Identifier: Apache-2.0
import os
import sys
from pathlib import Path
tools_ci_dir = os.path.join(os.path.dirname(__file__), '..', '..')
if tools_ci_dir not in sys.path:
sys.path.append(tools_ci_dir)
tools_dir = os.path.join(os.path.dirname(__file__), '..', '..', '..')
if tools_dir not in sys.path:
sys.path.append(tools_dir)
def create_project(name: str, folder: Path) -> Path:
p = folder / name
p.mkdir(parents=True, exist_ok=True)
(p / 'main').mkdir(parents=True, exist_ok=True)
with open(p / 'CMakeLists.txt', 'w') as fw:
fw.write(
"""cmake_minimum_required(VERSION 3.16)
include($ENV{{IDF_PATH}}/tools/cmake/project.cmake)
project({})
""".format(
name
)
)
with open(p / 'main' / 'CMakeLists.txt', 'w') as fw:
fw.write(
"""idf_component_register(SRCS "{}.c"
INCLUDE_DIRS ".")
""".format(
name
)
)
with open(p / 'main' / f'{name}.c', 'w') as fw:
fw.write(
"""#include <stdio.h>
void app_main(void) {}
"""
)
return p

View File

@ -0,0 +1,100 @@
# SPDX-FileCopyrightText: 2023-2024 Espressif Systems (Shanghai) CO LTD
# SPDX-License-Identifier: Apache-2.0
from pathlib import Path
from idf_pytest.script import SUPPORTED_TARGETS, get_all_apps
from conftest import create_project
def test_get_all_apps_non(tmp_path: Path) -> None:
create_project('foo', tmp_path)
create_project('bar', tmp_path)
test_related_apps, non_test_related_apps = get_all_apps([str(tmp_path)])
assert test_related_apps == set()
assert len(non_test_related_apps) == 2 * len(SUPPORTED_TARGETS)
def test_get_all_apps_single_dut_test_script(tmp_path: Path) -> None:
create_project('foo', tmp_path)
with open(tmp_path / 'foo' / 'pytest_get_all_apps_single_dut_test_script.py', 'w') as fw:
fw.write(
"""import pytest
@pytest.mark.esp32
@pytest.mark.esp32s2
def test_foo(dut):
pass
"""
)
create_project('bar', tmp_path)
test_related_apps, non_test_related_apps = get_all_apps([str(tmp_path)], target='all')
assert len(test_related_apps) == 2
assert len(non_test_related_apps) == 2 * len(SUPPORTED_TARGETS) - 2
def test_get_all_apps_multi_dut_test_script(tmp_path: Path) -> None:
create_project('foo', tmp_path)
with open(tmp_path / 'foo' / 'pytest_get_all_apps_multi_dut_test_script.py', 'w') as fw:
fw.write(
"""import pytest
@pytest.mark.parametrize(
'count, target', [
(2, 'esp32s2|esp32s3'),
(3, 'esp32|esp32s3|esp32'),
], indirect=True
)
def test_foo(dut):
pass
"""
)
test_related_apps, non_test_related_apps = get_all_apps([str(tmp_path)], target='all')
assert len(test_related_apps) == 3 # 32, s2, s3
assert len(non_test_related_apps) == len(SUPPORTED_TARGETS) - 3
def test_get_all_apps_modified_pytest_script(tmp_path: Path) -> None:
create_project('foo', tmp_path)
create_project('bar', tmp_path)
(tmp_path / 'pytest_get_all_apps_modified_pytest_script.py').write_text(
"""import pytest
import os
@pytest.mark.parametrize('count, target', [(2, 'esp32')], indirect=True)
@pytest.mark.parametrize('app_path', [
'{}|{}'.format(os.path.join(os.path.dirname(__file__), 'foo'), os.path.join(os.path.dirname(__file__), 'bar')),
], indirect=True
)
def test_multi_foo_bar(dut):
pass
""",
encoding='utf-8',
)
test_related_apps, non_test_related_apps = get_all_apps([str(tmp_path)], target='all')
assert len(test_related_apps) == 2 # foo-esp32, bar-esp32
assert len(non_test_related_apps) == 2 * len(SUPPORTED_TARGETS) - 2
test_related_apps, non_test_related_apps = get_all_apps(
[str(tmp_path)], target='all', modified_files=[], modified_components=[]
)
assert len(test_related_apps) == 0
assert len(non_test_related_apps) == 0
test_related_apps, non_test_related_apps = get_all_apps(
[str(tmp_path)],
target='all',
modified_files=[str(tmp_path / 'pytest_get_all_apps_modified_pytest_script.py')],
modified_components=[],
)
assert len(test_related_apps) == 2
assert len(non_test_related_apps) == 0

View File

@ -1,18 +1,10 @@
# SPDX-FileCopyrightText: 2023 Espressif Systems (Shanghai) CO LTD # SPDX-FileCopyrightText: 2023 Espressif Systems (Shanghai) CO LTD
# SPDX-License-Identifier: Apache-2.0 # SPDX-License-Identifier: Apache-2.0
import os
import sys
from pathlib import Path from pathlib import Path
from idf_pytest.constants import CollectMode from idf_pytest.constants import CollectMode
from idf_pytest.script import get_pytest_cases
try:
from idf_pytest.script import get_pytest_cases
except ImportError:
sys.path.append(os.path.join(os.path.dirname(__file__), '..', '..'))
from idf_pytest.script import get_pytest_cases
TEMPLATE_SCRIPT = ''' TEMPLATE_SCRIPT = '''
import pytest import pytest

View File

@ -3,7 +3,7 @@
# ci # ci
coverage coverage
idf-build-apps idf-build-apps~=2.0.0rc1
jsonschema jsonschema
junit_xml junit_xml
python-gitlab python-gitlab

View File

@ -10,7 +10,7 @@ pytest-timeout
pytest-ignore-test-results pytest-ignore-test-results
# build # build
idf-build-apps idf-build-apps~=2.0.0rc1
# dependencies in pytest test scripts # dependencies in pytest test scripts
scapy scapy