mirror of
https://github.com/espressif/esp-idf.git
synced 2024-10-05 20:47:46 -04:00
Merge branch 'ci/detect_file_changes_to_assign_jobs' into 'master'
CI: Detect file changes to assign jobs See merge request espressif/esp-idf!10652
This commit is contained in:
commit
22cc1121fb
@ -38,6 +38,6 @@ indent_style = space
|
||||
indent_size = 4
|
||||
max_line_length = 120
|
||||
|
||||
[{*.sh,*.yml}]
|
||||
[{*.sh,*.yml,*.yaml}]
|
||||
indent_style = space
|
||||
indent_size = 2
|
||||
|
@ -2,23 +2,24 @@ stages:
|
||||
- pre_check
|
||||
- build
|
||||
- assign_test
|
||||
- build_doc
|
||||
- host_test
|
||||
- target_test
|
||||
- test_deploy
|
||||
- post_check
|
||||
- deploy
|
||||
- post_deploy
|
||||
|
||||
# pipelines will not be created in such two cases:
|
||||
# 1. MR push
|
||||
# 2. push not on "master/release" branches, and not tagged
|
||||
# This behavior could be changed after the `rules: changes` feature is implemented
|
||||
workflow:
|
||||
rules:
|
||||
- if: '$CI_PIPELINE_SOURCE == "merge_request_event"'
|
||||
when: never
|
||||
# Disable those non-protected push triggered pipelines
|
||||
- if: '$CI_COMMIT_REF_NAME != "master" && $CI_COMMIT_BRANCH !~ /^release\/v/ && $CI_COMMIT_TAG !~ /^v\d+\.\d+(\.\d+)?($|-)/ && $CI_PIPELINE_SOURCE == "push"'
|
||||
when: never
|
||||
- if: '$CI_COMMIT_TITLE =~ /^test[ _]ci:/'
|
||||
when: always
|
||||
- if: '$CI_MERGE_REQUEST_IID && $CI_MERGE_REQUEST_TITLE =~ /^wip|draft:/i'
|
||||
when: never
|
||||
- if: '$CI_COMMIT_TITLE =~ /^wip|draft/i'
|
||||
when: never
|
||||
- when: always
|
||||
|
||||
variables:
|
||||
@ -55,7 +56,7 @@ variables:
|
||||
BOT_DOCKER_IMAGE_TAG: ":latest"
|
||||
|
||||
# target test config file, used by assign test job
|
||||
CI_TARGET_TEST_CONFIG_FILE: "$CI_PROJECT_DIR/tools/ci/config/target-test.yml"
|
||||
CI_TARGET_TEST_CONFIG_FILE: "$CI_PROJECT_DIR/.gitlab/ci/target-test.yml"
|
||||
|
||||
# target test repo parameters
|
||||
TEST_ENV_CONFIG_REPO: "https://gitlab-ci-token:${BOT_TOKEN}@${CI_SERVER_HOST}:${CI_SERVER_PORT}/qa/ci-test-runner-configs.git"
|
||||
@ -76,38 +77,27 @@ variables:
|
||||
before_script:
|
||||
- source tools/ci/utils.sh
|
||||
- source tools/ci/setup_python.sh
|
||||
- apply_bot_filter
|
||||
- add_gitlab_ssh_keys
|
||||
- source tools/ci/configure_ci_environment.sh
|
||||
- *setup_tools_unless_target_test
|
||||
- fetch_submodules
|
||||
|
||||
# used for check scripts which we want to run unconditionally
|
||||
.before_script_lesser_nofilter:
|
||||
before_script:
|
||||
- echo "Not setting up GitLab key, not fetching submodules, not applying bot filter"
|
||||
- source tools/ci/utils.sh
|
||||
- source tools/ci/setup_python.sh
|
||||
- source tools/ci/configure_ci_environment.sh
|
||||
|
||||
# used for everything else where we want to do no prep, except for bot filter
|
||||
.before_script_lesser:
|
||||
.before_script_no_sync_submodule:
|
||||
before_script:
|
||||
- echo "Not setting up GitLab key, not fetching submodules"
|
||||
- source tools/ci/utils.sh
|
||||
- source tools/ci/setup_python.sh
|
||||
- apply_bot_filter
|
||||
- source tools/ci/configure_ci_environment.sh
|
||||
|
||||
.before_script_slim:
|
||||
.before_script_minimal:
|
||||
before_script:
|
||||
- echo "Only load utils.sh inside"
|
||||
- echo "Only load utils.sh"
|
||||
- source tools/ci/utils.sh
|
||||
|
||||
.before_script_macos:
|
||||
before_script:
|
||||
- source tools/ci/utils.sh
|
||||
- apply_bot_filter
|
||||
- $IDF_PATH/tools/idf_tools.py install-python-env
|
||||
# On macOS, these tools need to be installed
|
||||
- $IDF_PATH/tools/idf_tools.py --non-interactive install cmake ninja
|
||||
@ -121,12 +111,12 @@ before_script:
|
||||
- fetch_submodules
|
||||
|
||||
include:
|
||||
- '/tools/ci/config/rules.yml'
|
||||
- '/tools/ci/config/pre_check.yml'
|
||||
- '/tools/ci/config/build.yml'
|
||||
- '/tools/ci/config/assign-test.yml'
|
||||
- '/tools/ci/config/host-test.yml'
|
||||
- '/tools/ci/config/target-test.yml'
|
||||
- '/tools/ci/config/post_check.yml'
|
||||
- '/tools/ci/config/deploy.yml'
|
||||
- '/tools/ci/config/post_deploy.yml'
|
||||
- '.gitlab/ci/rules.yml'
|
||||
- '.gitlab/ci/docs.yml'
|
||||
- '.gitlab/ci/static-code-analysis.yml'
|
||||
- '.gitlab/ci/pre_check.yml'
|
||||
- '.gitlab/ci/build.yml'
|
||||
- '.gitlab/ci/assign-test.yml'
|
||||
- '.gitlab/ci/host-test.yml'
|
||||
- '.gitlab/ci/target-test.yml'
|
||||
- '.gitlab/ci/deploy.yml'
|
||||
|
251
.gitlab/ci/README.md
Normal file
251
.gitlab/ci/README.md
Normal file
@ -0,0 +1,251 @@
|
||||
# IDF CI
|
||||
|
||||
- [IDF CI](#idf-ci)
|
||||
- [General Workflow](#general-workflow)
|
||||
- [What if Expected Jobs ARE NOT Created?](#what-if-expected-jobs-are-not-created)
|
||||
- [MR labels for additional jobs](#mr-labels-for-additional-jobs)
|
||||
- [Supported MR Labels](#supported-mr-labels)
|
||||
- [Usages](#usages)
|
||||
- [How to trigger a `detached` pipeline without pushing new commits?](#how-to-trigger-a-detached-pipeline-without-pushing-new-commits)
|
||||
- [How to Develop With `rules.yml`?](#how-to-develop-with-rulesyml)
|
||||
- [General Concepts](#general-concepts)
|
||||
- [How to Add a New `Job`?](#how-to-add-a-new-job)
|
||||
- [How to Add a New `Rules` Template?](#how-to-add-a-new-rules-template)
|
||||
- [How to Add a New `if` Anchor?](#how-to-add-a-new-if-anchor)
|
||||
- [Naming Rules](#naming-rules)
|
||||
- [Common Naming Rules](#common-naming-rules)
|
||||
- [`if` Anchors Naming Rules](#if-anchors-naming-rules)
|
||||
- [`rules` Template Naming Rules](#rules-template-naming-rules)
|
||||
- [Reusable Shell Script `tools/ci/utils.sh`](#reusable-shell-script-toolsciutilssh)
|
||||
- [Functions](#functions)
|
||||
- [CI Job Related](#ci-job-related)
|
||||
- [Shell Script Related](#shell-script-related)
|
||||
|
||||
## General Workflow
|
||||
|
||||
1. Push to a remote branch
|
||||
2. Create an MR, choose related labels (not required)
|
||||
3. A `detached` pipeline will be created.
|
||||
4. if you push a new commit, a new pipeline will be created automatically.
|
||||
|
||||
**Details:**
|
||||
|
||||
1. If an MR title starts with `WIP:` or `Draft:`, push commit will NOT trigger a merge-request pipeline
|
||||
2. If a commit message starts with `test ci:`, pushing a commit will trigger a merge-request pipeline even when the MR title starts with `WIP:` or `Draft:`.
|
||||
3. If a commit message starts with `WIP:` or `Draft:`, push commit will NOT trigger a pipeline
|
||||
|
||||
## What if Expected Jobs ARE NOT Created?
|
||||
|
||||
1. check the file patterns
|
||||
|
||||
If you found a job that is not running as expected with some file changes, a git commit to improve the `pattern` will be appreciated.
|
||||
|
||||
2. please add MR labels to run additional tests
|
||||
|
||||
## MR labels for additional jobs
|
||||
|
||||
### Supported MR Labels
|
||||
|
||||
- `build`
|
||||
- `build_docs`
|
||||
- `component_ut[_esp32/esp32s2/...]`
|
||||
- `custom_test[_esp32/esp32s2/...]`
|
||||
- `docker`
|
||||
- `docs`
|
||||
- `example_test[_esp32/esp32s2/...]`
|
||||
- `fuzzer_test`
|
||||
- `host_test`
|
||||
- `integration_test`
|
||||
- `iperf_stress_test`
|
||||
- `macos`
|
||||
- `macos_test`
|
||||
- `nvs_coverage`
|
||||
- `unit_test[_esp32/esp32s2/...]`
|
||||
- `weekend_test`
|
||||
- `windows`
|
||||
|
||||
There are two general labels (not recommended since these two labels will trigger a lot of jobs)
|
||||
|
||||
- `target_test`: includes all target for `example_test`, `custom_test`, `component_ut`, `unit_test`, `integration_test`
|
||||
- `all_test`: includes all test labels
|
||||
|
||||
### Usages
|
||||
|
||||
We have two ways to run additional jobs
|
||||
|
||||
- Add these labels in the MR `labels`
|
||||
- Add these labels in the commit message (not the first line). For example:
|
||||
|
||||
```
|
||||
ci: detect file changes to assign jobs
|
||||
|
||||
test labels: example_test_esp32, custom_test_esp32
|
||||
```
|
||||
|
||||
The additional test labels line should start with `test label(s):` and the labels should be separated by space or comma.
|
||||
|
||||
### How to trigger a `detached` pipeline without pushing new commits?
|
||||
|
||||
Go to MR web page -> `Pipelines` tab -> click `Run pipeline` button
|
||||
|
||||
## How to Develop With `rules.yml`?
|
||||
|
||||
### General Concepts
|
||||
|
||||
- `pattern`: Defined in an array. A GitLab job will be created if the changed files in this MR matched one of the patterns. For example:
|
||||
|
||||
```yaml
|
||||
.patterns-python-files: &patterns-python-files
|
||||
- "**/*.py"
|
||||
```
|
||||
|
||||
- `label`: (deprecated). Defined in an if clause, similar as the previous bot command. A GitLab job will be created if the pipeline variables contains variables in `BOT_LABEL_xxx` format. For example:
|
||||
|
||||
```yaml
|
||||
.if-label-build_docs: &if-label-build_docs
|
||||
if: '$BOT_LABEL_BUILD_DOCS'
|
||||
```
|
||||
|
||||
- `title`: Defined in an if clause. A GitLab job will be created if this title included in the MR labels or in the commit message title. For example:
|
||||
|
||||
```yaml
|
||||
.if-title-docs: &if-title-docs
|
||||
if: '$CI_MERGE_REQUEST_LABELS =~ /^(?:\w+,)*docs(?:,\w+)*$/i || $CI_COMMIT_TITLE =~ /\((?:\w+\s+)*docs(?:\s+\w+)*\)$/i'
|
||||
```
|
||||
|
||||
- `rule`: A combination of various patterns, labels, and titles. It will be used by GitLab YAML `extends` keyword to tell GitLab in what conditions will this job be created. For example:
|
||||
|
||||
```yaml
|
||||
.rules:build:docs:
|
||||
rules:
|
||||
- <<: *if-protected
|
||||
- <<: *if-label-build
|
||||
- <<: *if-title-build
|
||||
- <<: *if-label-build_docs
|
||||
- <<: *if-title-build_docs
|
||||
- <<: *if-label-docs
|
||||
- <<: *if-title-docs
|
||||
- <<: *if-dev-push
|
||||
changes: *patterns-docs
|
||||
```
|
||||
|
||||
An example for GitLab job on how to use extends:
|
||||
|
||||
```yaml
|
||||
check_docs_lang_sync:
|
||||
extends:
|
||||
- .pre_check_job_template
|
||||
- .rules:build:docs
|
||||
script:
|
||||
- cd docs
|
||||
- ./check_lang_folder_sync.sh
|
||||
```
|
||||
|
||||
### How to Add a New `Job`?
|
||||
|
||||
check if there's a suitable `.rules:<rules-you-need>` template
|
||||
|
||||
1. if there is, put this in the job `extends`. All done, now you can close this window. (`extends` could be array or string)
|
||||
2. if there isn't
|
||||
1. check [How to Add a New `Rules` Template?](#how-to-add-a-new-rules-template), create a suitable one
|
||||
2. follow step 1
|
||||
|
||||
### How to Add a New `Rules` Template?
|
||||
|
||||
check if this rule is related to `labels`, `patterns`
|
||||
|
||||
1. if it is, please refer to [dependencies/README.md](./dependencies/README.md) and add new rules by auto-generating
|
||||
2. if it isn't, please continue reading
|
||||
|
||||
check if there's a suitable `.if-<if-anchor-you-need>` anchor
|
||||
|
||||
1. if there is, create a rule following [`rules` Template Naming Rules](#rules-template-naming-rules).For detail information, please refer to [GitLab Documentation `rules-if`](https://docs.gitlab.com/ee/ci/yaml/README.html#rulesif). Here's an example.
|
||||
|
||||
```yaml
|
||||
.rules:dev:
|
||||
rules:
|
||||
- <<: *if-trigger
|
||||
- <<: *if-dev-push
|
||||
```
|
||||
|
||||
2. if there isn't
|
||||
|
||||
1. check [How to Add a New `if` Anchor?](#how-to-add-a-new-if-anchor), create a suitable one
|
||||
2. follow step 1
|
||||
|
||||
### How to Add a New `if` Anchor?
|
||||
|
||||
Create an `if` anchor following [`if` Anchors Naming Rules](#if-anchors-naming-rules). For detailed information about how to write the condition clause, please refer to [GitLab Documentation `only/except (advanced)](https://docs.gitlab.com/ee/ci/yaml/README.html#onlyexcept-advanced). Here's an example.
|
||||
|
||||
```yaml
|
||||
.if-schedule: &if-schedule:
|
||||
if: '$CI_PIPELINE_SOURCE == "schedule"'
|
||||
```
|
||||
|
||||
### Naming Rules
|
||||
|
||||
#### Common Naming Rules
|
||||
|
||||
if a phrase has multi words, use `_` to concatenate them.
|
||||
|
||||
> e.g. `regular_test`
|
||||
|
||||
if a name has multi phrases, use `-` to concatenate them.
|
||||
|
||||
> e.g. `regular_test-example_test`
|
||||
|
||||
#### `if` Anchors Naming Rules
|
||||
|
||||
- if it's a label: `.if-label-<label_name>`
|
||||
- if it's a ref: `.if-ref-<ref_name>`
|
||||
- if it's a branch: `.if-branch-<branch_name>`
|
||||
- if it's a tag: `.if-tag-<tag_name>`
|
||||
- if it's multi-type combination: `.if-ref-<release_name>-branch-<branch_name>`
|
||||
|
||||
**Common Phrases/Abbreviations**
|
||||
|
||||
- `no_label`
|
||||
|
||||
`$BOT_TRIGGER_WITH_LABEL == null`
|
||||
|
||||
- `protected`
|
||||
|
||||
`($CI_COMMIT_REF_NAME == "master" || $CI_COMMIT_BRANCH =~ /^release\/v/ || $CI_COMMIT_TAG =~ /^v\d+\.\d+(\.\d+)?($|-)/)`
|
||||
|
||||
- `target_test`
|
||||
|
||||
a combination of `example_test`, `custom_test`, `unit_test`, `component_ut`, `integration_test` and all targets
|
||||
|
||||
#### `rules` Template Naming Rules
|
||||
|
||||
- if it's tag related: `.rules:tag:<tag_1>-<tag_2>`
|
||||
- if it's label related: `.rules:labels:<label_1>-<label_2>`
|
||||
- if it's test related: `.rules:test:<test_type>`
|
||||
- if it's build related: `.rules:build:<build_type>`
|
||||
- if it's pattern related: `.rules:patterns:<patterns>`
|
||||
|
||||
## Reusable Shell Script `tools/ci/utils.sh`
|
||||
|
||||
It is used to put all the reusable shell scripts as small functions. If you want to set `before_script: []` for you job, now you can set `extends: .before_script_slim` instead. it will only run `source tools/ci/utils.sh`
|
||||
|
||||
If you're developing CI shell scripts, you can use these functions without `source` them. They're already included in all `before_script`
|
||||
|
||||
To run these commands in shell script locally, place `source tools/ci/utils.sh` at the very beginning.
|
||||
|
||||
### Functions
|
||||
|
||||
#### CI Job Related
|
||||
|
||||
- `add_gitlab_ssh_keys`
|
||||
- `add_github_ssh_keys`
|
||||
- `add_doc_server_ssh_keys`
|
||||
- `fetch_submodules`
|
||||
- `get_all_submodules`
|
||||
|
||||
#### Shell Script Related
|
||||
|
||||
- `error`: log in red color
|
||||
- `warning`: log in orange color
|
||||
- `info`: log in green color
|
||||
- `run_cmd`: run the command with duration seconds info
|
||||
- `retry_failed`: run the command with duration seconds info, retry when failed
|
@ -1,6 +1,5 @@
|
||||
assign_test:
|
||||
extends:
|
||||
- .rules:assign_test:target_test-integration_test-weekend_test
|
||||
extends: .rules:test:any_test
|
||||
tags:
|
||||
- assign_test
|
||||
image: $CI_DOCKER_REGISTRY/ubuntu-test-env$BOT_DOCKER_IMAGE_TAG
|
@ -8,9 +8,7 @@
|
||||
dependencies: []
|
||||
|
||||
.build_template_app_template:
|
||||
extends:
|
||||
- .build_template
|
||||
- .rules:labels:build
|
||||
extends: .build_template
|
||||
variables:
|
||||
LOG_PATH: "${CI_PROJECT_DIR}/log_template_app"
|
||||
BUILD_PATH: "${CI_PROJECT_DIR}/build_template_app"
|
||||
@ -46,26 +44,16 @@
|
||||
fast_template_app:
|
||||
extends:
|
||||
- .build_template_app_template
|
||||
- .rules:build_tests:target_test-weekend_test
|
||||
- .rules:test:target_test
|
||||
stage: pre_check
|
||||
variables:
|
||||
BUILD_COMMAND_ARGS: "-p"
|
||||
|
||||
check_docs_gh_links:
|
||||
extends: .build_docs_template
|
||||
stage: pre_check
|
||||
variables:
|
||||
SUBMODULES_TO_FETCH: "none"
|
||||
script:
|
||||
- cd docs
|
||||
- ${IDF_PATH}/tools/ci/multirun_with_pyenv.sh -p 3.6.10 pip install -r requirements.txt
|
||||
- ${IDF_PATH}/tools/ci/multirun_with_pyenv.sh -p 3.6.10 ./build_docs.py gh-linkcheck
|
||||
#------------------------------------------------------------------------------
|
||||
|
||||
.build_ssc_template:
|
||||
extends:
|
||||
- .build_template
|
||||
- .rules:build_tests:integration_test
|
||||
- .rules:build:integration_test
|
||||
artifacts:
|
||||
paths:
|
||||
- SSC/ssc_bin
|
||||
@ -94,10 +82,8 @@ build_ssc_esp32c3:
|
||||
variables:
|
||||
TARGET_NAME: "ESP32C3"
|
||||
|
||||
.build_esp_idf_tests_cmake:
|
||||
extends:
|
||||
- .build_template
|
||||
- .rules:build_tests:unit_test
|
||||
.build_esp_idf_tests_cmake_template:
|
||||
extends: .build_template
|
||||
dependencies: # set dependencies to null to avoid missing artifacts issue
|
||||
needs:
|
||||
- job: fast_template_app
|
||||
@ -127,29 +113,35 @@ build_ssc_esp32c3:
|
||||
- python tools/UnitTestParser.py ${BUILD_PATH}
|
||||
|
||||
build_esp_idf_tests_cmake_esp32:
|
||||
extends: .build_esp_idf_tests_cmake
|
||||
extends:
|
||||
- .build_esp_idf_tests_cmake_template
|
||||
- .rules:build:unit_test-esp32
|
||||
variables:
|
||||
IDF_TARGET: esp32
|
||||
|
||||
build_esp_idf_tests_cmake_esp32s2:
|
||||
extends: .build_esp_idf_tests_cmake
|
||||
extends:
|
||||
- .build_esp_idf_tests_cmake_template
|
||||
- .rules:build:unit_test-esp32s2
|
||||
variables:
|
||||
IDF_TARGET: esp32s2
|
||||
|
||||
build_esp_idf_tests_cmake_esp32s3:
|
||||
extends: .build_esp_idf_tests_cmake
|
||||
extends:
|
||||
- .build_esp_idf_tests_cmake_template
|
||||
- .rules:build:unit_test-esp32s3
|
||||
variables:
|
||||
IDF_TARGET: esp32s3
|
||||
|
||||
build_esp_idf_tests_cmake_esp32c3:
|
||||
extends: .build_esp_idf_tests_cmake
|
||||
extends:
|
||||
- .build_esp_idf_tests_cmake_template
|
||||
- .rules:build:unit_test-esp32c3
|
||||
variables:
|
||||
IDF_TARGET: esp32c3
|
||||
|
||||
.build_examples_template:
|
||||
extends:
|
||||
- .build_template
|
||||
- .rules:build_tests:example_test-weekend_test
|
||||
extends: .build_template
|
||||
dependencies: # set dependencies to null to avoid missing artifacts issue
|
||||
needs:
|
||||
- job: fast_template_app
|
||||
@ -172,6 +164,7 @@ build_esp_idf_tests_cmake_esp32c3:
|
||||
build_examples_make:
|
||||
extends:
|
||||
- .build_examples_template
|
||||
- .rules:build:example_test-esp32
|
||||
# This is a workaround for a rarely encountered issue with building examples in CI.
|
||||
# Probably related to building of Kconfig in 'make clean' stage
|
||||
retry: 1
|
||||
@ -188,7 +181,7 @@ build_examples_make:
|
||||
IDF_TARGET: esp32 # currently we only support esp32
|
||||
|
||||
# same as above, but for CMake
|
||||
.build_examples_cmake:
|
||||
.build_examples_cmake_template:
|
||||
extends: .build_examples_template
|
||||
artifacts:
|
||||
paths:
|
||||
@ -210,27 +203,31 @@ build_examples_make:
|
||||
BUILD_SYSTEM: cmake
|
||||
|
||||
build_examples_cmake_esp32:
|
||||
extends: .build_examples_cmake
|
||||
extends:
|
||||
- .build_examples_cmake_template
|
||||
- .rules:build:example_test-esp32
|
||||
parallel: 10
|
||||
variables:
|
||||
IDF_TARGET: esp32
|
||||
|
||||
build_examples_cmake_esp32s2:
|
||||
extends: .build_examples_cmake
|
||||
extends:
|
||||
- .build_examples_cmake_template
|
||||
- .rules:build:example_test-esp32s2
|
||||
parallel: 8
|
||||
variables:
|
||||
IDF_TARGET: esp32s2
|
||||
|
||||
build_examples_cmake_esp32c3:
|
||||
extends: .build_examples_cmake
|
||||
extends:
|
||||
- .build_examples_cmake_template
|
||||
- .rules:build:example_test-esp32c3
|
||||
parallel: 8
|
||||
variables:
|
||||
IDF_TARGET: esp32c3
|
||||
|
||||
.build_test_apps:
|
||||
extends:
|
||||
- .build_examples_cmake
|
||||
- .rules:build_tests:custom_test-weekend_test
|
||||
.build_test_apps_template:
|
||||
extends: .build_examples_cmake_template
|
||||
variables:
|
||||
TEST_PREFIX: test_apps
|
||||
TEST_RELATIVE_DIR: tools/test_apps
|
||||
@ -239,96 +236,68 @@ build_examples_cmake_esp32c3:
|
||||
- ${IDF_PATH}/tools/ci/find_apps_build_apps.sh
|
||||
|
||||
build_test_apps_esp32:
|
||||
extends: .build_test_apps
|
||||
extends:
|
||||
- .build_test_apps_template
|
||||
- .rules:build:custom_test-esp32
|
||||
parallel: 8
|
||||
variables:
|
||||
IDF_TARGET: esp32
|
||||
|
||||
build_test_apps_esp32s2:
|
||||
extends: .build_test_apps
|
||||
extends:
|
||||
- .build_test_apps_template
|
||||
- .rules:build:custom_test-esp32s2
|
||||
parallel: 8
|
||||
variables:
|
||||
IDF_TARGET: esp32s2
|
||||
|
||||
build_test_apps_esp32s3:
|
||||
extends: .build_test_apps
|
||||
extends:
|
||||
- .build_test_apps_template
|
||||
- .rules:build:custom_test-esp32s3
|
||||
parallel: 8
|
||||
variables:
|
||||
IDF_TARGET: esp32s3
|
||||
|
||||
build_test_apps_esp32c3:
|
||||
extends: .build_test_apps
|
||||
extends:
|
||||
- .build_test_apps_template
|
||||
- .rules:build:custom_test-esp32c3
|
||||
parallel: 8
|
||||
variables:
|
||||
IDF_TARGET: esp32c3
|
||||
|
||||
.build_component_ut:
|
||||
extends:
|
||||
- .build_test_apps
|
||||
- .rules:build_tests:unit_test
|
||||
.build_component_ut_template:
|
||||
extends: .build_test_apps_template
|
||||
variables:
|
||||
TEST_PREFIX: component_ut
|
||||
TEST_RELATIVE_DIR: component_ut
|
||||
|
||||
build_component_ut_esp32:
|
||||
extends: .build_component_ut
|
||||
extends:
|
||||
- .build_component_ut_template
|
||||
- .rules:build:component_ut-esp32
|
||||
variables:
|
||||
IDF_TARGET: esp32
|
||||
|
||||
build_component_ut_esp32s2:
|
||||
extends: .build_component_ut
|
||||
extends:
|
||||
- .build_component_ut_template
|
||||
- .rules:build:component_ut-esp32s2
|
||||
variables:
|
||||
IDF_TARGET: esp32s2
|
||||
|
||||
build_component_ut_esp32c3:
|
||||
extends: .build_component_ut
|
||||
extends:
|
||||
- .build_component_ut_template
|
||||
- .rules:build:component_ut-esp32c3
|
||||
variables:
|
||||
IDF_TARGET: esp32c3
|
||||
|
||||
.build_docs_template:
|
||||
stage: build
|
||||
image: $ESP_IDF_DOC_ENV_IMAGE
|
||||
tags:
|
||||
- build_docs
|
||||
script:
|
||||
- cd docs
|
||||
- ${IDF_PATH}/tools/ci/multirun_with_pyenv.sh -p 3.6.10 pip install -r requirements.txt
|
||||
- ${IDF_PATH}/tools/ci/multirun_with_pyenv.sh -p 3.6.10 ./build_docs.py -bs $DOC_BUILDERS -l $DOCLANG -t $DOCTGT build
|
||||
parallel:
|
||||
matrix:
|
||||
- DOCLANG: [ "en", "zh_CN" ]
|
||||
DOCTGT: [ "esp32", "esp32s2", "esp32c3"]
|
||||
|
||||
build_docs_html:
|
||||
extends:
|
||||
- .build_docs_template
|
||||
- .rules:labels:build_docs
|
||||
artifacts:
|
||||
when: always
|
||||
paths:
|
||||
- docs/_build/*/*/*.txt
|
||||
- docs/_build/*/*/html/*
|
||||
expire_in: 4 days
|
||||
variables:
|
||||
DOC_BUILDERS: "html"
|
||||
|
||||
build_docs_pdf:
|
||||
extends:
|
||||
- .build_docs_template
|
||||
- .rules:labels:build_docs-slim
|
||||
artifacts:
|
||||
when: always
|
||||
paths:
|
||||
- docs/_build/*/*/latex/*
|
||||
expire_in: 4 days
|
||||
variables:
|
||||
DOC_BUILDERS: "latex"
|
||||
|
||||
.test_build_system_template:
|
||||
extends:
|
||||
- .build_template
|
||||
- .rules:build_tests:weekend_test
|
||||
dependencies: # set dependencies to null to avoid missing artifacts issue
|
||||
- .rules:build
|
||||
needs:
|
||||
- job: fast_template_app
|
||||
artifacts: false
|
||||
@ -353,7 +322,7 @@ test_build_system_cmake_macos:
|
||||
extends:
|
||||
- .test_build_system_template
|
||||
- .before_script_macos
|
||||
- .rules:os:mac_os
|
||||
- .rules:build:macos
|
||||
tags:
|
||||
- macos_shell
|
||||
variables:
|
||||
@ -361,8 +330,8 @@ test_build_system_cmake_macos:
|
||||
|
||||
build_docker:
|
||||
extends:
|
||||
- .before_script_slim
|
||||
- .rules:protected-schedule
|
||||
- .before_script_minimal
|
||||
- .rules:build:docker
|
||||
stage: build
|
||||
image: espressif/docker-builder:1
|
||||
tags:
|
||||
@ -383,8 +352,8 @@ build_docker:
|
||||
|
||||
.test-on-windows:
|
||||
extends:
|
||||
- .before_script_slim
|
||||
- .rules:protected-schedule
|
||||
- .before_script_minimal
|
||||
- .rules:build:windows
|
||||
stage: build
|
||||
image: $CI_DOCKER_REGISTRY/esp32-toolchain-win-cross
|
||||
tags:
|
||||
@ -415,14 +384,10 @@ build_cmdlinerunner:
|
||||
TEST_DIR: tools/windows/tool_setup/cmdlinerunner
|
||||
|
||||
build_installer:
|
||||
extends:
|
||||
- .before_script_slim
|
||||
- .rules:protected-schedule
|
||||
extends: .test-on-windows
|
||||
# using a different stage here to be able to use artifacts from build_cmdlinerunner job
|
||||
stage: host_test
|
||||
image: $CI_DOCKER_REGISTRY/wine-innosetup:1
|
||||
tags:
|
||||
- build
|
||||
dependencies: # set dependencies to null to avoid missing artifacts issue
|
||||
needs:
|
||||
- build_cmdlinerunner
|
||||
@ -432,96 +397,9 @@ build_installer:
|
||||
|
||||
# This job builds template app with permutations of targets and optimization levels
|
||||
build_template_app:
|
||||
extends:
|
||||
- .build_template_app_template
|
||||
- .rules:build
|
||||
needs:
|
||||
- job: fast_template_app
|
||||
artifacts: false
|
||||
extends: .build_template_app_template
|
||||
|
||||
# Sonarqube related jobs put here for this reason:
|
||||
# Here we have two jobs. code_quality_check and code_quality_report.
|
||||
#
|
||||
# code_quality_check will analyze the code changes between your MR and
|
||||
# code repo stored in sonarqube server. The analysis result is only shown in
|
||||
# the comments under this MR and won't be transferred to the server.
|
||||
#
|
||||
# code_quality_report will analyze and transfer both of the newly added code
|
||||
# and the analysis result to the server.
|
||||
#
|
||||
# Put in the front to ensure that the newly merged code can be stored in
|
||||
# sonarqube server ASAP, in order to avoid reporting unrelated code issues
|
||||
.sonar_scan_template:
|
||||
stage: build
|
||||
image:
|
||||
name: $CI_DOCKER_REGISTRY/sonarqube-scanner:2
|
||||
before_script:
|
||||
- source tools/ci/utils.sh
|
||||
- export PYTHONPATH="$CI_PROJECT_DIR/tools:$CI_PROJECT_DIR/tools/ci/python_packages:$PYTHONPATH"
|
||||
- fetch_submodules
|
||||
# Exclude the submodules, all paths ends with /**
|
||||
- export SUBMODULES=$(get_all_submodules)
|
||||
# get all exclude paths specified in tools/ci/sonar_exclude_list.txt | ignore lines start with # | xargs | replace all <space> to <comma>
|
||||
- export CUSTOM_EXCLUDES=$(cat $CI_PROJECT_DIR/tools/ci/sonar_exclude_list.txt | grep -v '^#' | xargs | sed -e 's/ /,/g')
|
||||
# Exclude the report dir
|
||||
- export EXCLUSIONS="$SUBMODULES,$REPORT_DIR/**,docs/_static/**,**/*.png,**/*.jpg"
|
||||
- python $NORMALIZE_CLANGTIDY_PY $CI_PROJECT_DIR/$REPORT_DIR/warnings.txt $CI_PROJECT_DIR/$REPORT_DIR/clang_tidy_report.txt $CI_PROJECT_DIR
|
||||
variables:
|
||||
GIT_DEPTH: 0
|
||||
NORMALIZE_CLANGTIDY_PY: $CI_PROJECT_DIR/tools/ci/normalize_clangtidy_path.py
|
||||
REPORT_DIR: examples/get-started/hello_world/tidybuild/report
|
||||
tags:
|
||||
- host_test
|
||||
dependencies: # set dependencies to null to avoid missing artifacts issue
|
||||
needs:
|
||||
- clang_tidy_check_regular
|
||||
|
||||
code_quality_check:
|
||||
extends:
|
||||
- .sonar_scan_template
|
||||
- .rules:trigger
|
||||
allow_failure: true
|
||||
script:
|
||||
- export CI_MR_IID=$(python ${CI_PROJECT_DIR}/tools/ci/ci_get_mr_info.py id ${CI_COMMIT_BRANCH})
|
||||
- export CI_MR_COMMITS=$(python ${CI_PROJECT_DIR}/tools/ci/ci_get_mr_info.py commits ${CI_COMMIT_BRANCH} | tr '\n' ',')
|
||||
# test if this branch have merge request, if not, exit 0
|
||||
- test -n "$CI_MR_IID" || exit 0
|
||||
- test -n "$CI_MR_COMMITS" || exit 0
|
||||
- sonar-scanner
|
||||
-Dsonar.analysis.mode=preview
|
||||
-Dsonar.host.url=$SONAR_HOST_URL
|
||||
-Dsonar.login=$SONAR_LOGIN
|
||||
-Dsonar.sources=$CI_PROJECT_DIR
|
||||
-Dsonar.sourceEncoding=UTF-8
|
||||
-Dsonar.projectKey=esp-idf
|
||||
-Dsonar.projectBaseDir=$CI_PROJECT_DIR
|
||||
-Dsonar.exclusions=$EXCLUSIONS
|
||||
-Dsonar.gitlab.project_id=$CI_PROJECT_ID
|
||||
-Dsonar.gitlab.commit_sha=$CI_MR_COMMITS
|
||||
-Dsonar.gitlab.ref_name=$CI_COMMIT_REF_NAME
|
||||
-Dsonar.gitlab.failure_notification_mode=exit-code
|
||||
-Dsonar.cxx.clangtidy.reportPath=$REPORT_DIR/clang_tidy_report.txt
|
||||
-Dsonar.cxx.includeDirectories=components,/usr/include
|
||||
-Dsonar.python.pylint_config=.pylintrc
|
||||
-Dsonar.gitlab.ci_merge_request_iid=$CI_MR_IID
|
||||
-Dsonar.gitlab.merge_request_discussion=true
|
||||
-Dsonar.branch.name=$CI_COMMIT_REF_NAME
|
||||
|
||||
code_quality_report:
|
||||
extends:
|
||||
- .sonar_scan_template
|
||||
- .rules:protected-schedule
|
||||
script:
|
||||
- sonar-scanner
|
||||
-Dsonar.host.url=$SONAR_HOST_URL
|
||||
-Dsonar.login=$SONAR_LOGIN
|
||||
-Dsonar.sources=$CI_PROJECT_DIR
|
||||
-Dsonar.sourceEncoding=UTF-8
|
||||
-Dsonar.projectKey=esp-idf
|
||||
-Dsonar.projectBaseDir=$CI_PROJECT_DIR
|
||||
-Dsonar.exclusions=$EXCLUSIONS
|
||||
-Dsonar.gitlab.project_id=$CI_PROJECT_ID
|
||||
-Dsonar.gitlab.commit_sha=$CI_COMMIT_SHA
|
||||
-Dsonar.gitlab.ref_name=$CI_COMMIT_REF_NAME
|
||||
-Dsonar.cxx.clangtidy.reportPath=$REPORT_DIR/clang_tidy_report.txt
|
||||
-Dsonar.cxx.includeDirectories=components,/usr/include
|
||||
-Dsonar.python.pylint_config=.pylintrc
|
||||
-Dsonar.branch.name=$CI_COMMIT_REF_NAME
|
100
.gitlab/ci/dependencies/README.md
Normal file
100
.gitlab/ci/dependencies/README.md
Normal file
@ -0,0 +1,100 @@
|
||||
# How the `generate_rules.py` works
|
||||
|
||||
## Functionalities
|
||||
|
||||
This script can do only two things:
|
||||
|
||||
1. Auto-generate some labels/titles/rules we need and update them in `rules.yml`
|
||||
2. Generate a dependency tree graph
|
||||
|
||||
## Schema
|
||||
|
||||
This file only used basic YAML grammar and has nothing to do with the GitLab version YAML file.
|
||||
|
||||
It has five custom keywords:
|
||||
|
||||
- `matrix`: An array of sub-arrays, used to replicate rules by formatting strings. You can use the format string everywhere, it will be formatted recursively
|
||||
- `labels`: An array of `labels`. Also indicates `titles` with the same names
|
||||
- `patterns`: An array of `patterns`. Patterns that not included
|
||||
- `included_in`: An array of other `rule` names. It indicates the `labels` and `patterns` will be included in all specified `rules` as well
|
||||
- `deploy`: An array of strings, used to replicate rules by adding postfix `-<item in deploy array>`. It indicates the extra `label` used in `rules`, which will explain later.
|
||||
|
||||
## How to use this file to generate `rules.yml`
|
||||
|
||||
Let's take a complicated example to help understand the process
|
||||
|
||||
```yaml
|
||||
"test-{0}-{1}":
|
||||
matrix:
|
||||
- [a, b]
|
||||
- [c, d]
|
||||
labels:
|
||||
- "{0}-{1}"
|
||||
patterns:
|
||||
- "{0}"
|
||||
- pattern-not-exist
|
||||
included_in:
|
||||
- build-{0}
|
||||
```
|
||||
|
||||
1. expand the mapping dicts defined by `matrix`
|
||||
|
||||
After this step, it will turn into 4 dicts:
|
||||
|
||||
| key | labels | patterns | included_in |
|
||||
| -------- | ------ | -------- | ----------- |
|
||||
| test-a-c | a-c | a | build-a |
|
||||
| test-a-d | a-d | a | build-a |
|
||||
| test-b-c | b-c | b | build-b |
|
||||
| test-b-d | b-d | b | build-b |
|
||||
|
||||
**Advanced Usage: You can overwrite a mapping by declaring it again later**, For example:
|
||||
|
||||
If we concatenate this part to the previous example,
|
||||
|
||||
```yaml
|
||||
# ... The same as the previous example
|
||||
|
||||
test-a-c:
|
||||
labels:
|
||||
- overwrite
|
||||
```
|
||||
|
||||
`rule` `test-a-c` will be turned into:
|
||||
|
||||
| key | labels |
|
||||
| -------- | --------- |
|
||||
| test-a-c | overwrite |
|
||||
|
||||
**Mappings with the keyword `deploy` will also replicate by adding a postfix `-<item in deploy array>` to the mapping key**
|
||||
|
||||
2. create rules by `included_in`
|
||||
|
||||
After this step, it will turn into 6 mapping dicts:
|
||||
|
||||
| key | labels | patterns |
|
||||
| -------- | -------- | -------- |
|
||||
| test-a-c | a-c | a |
|
||||
| test-a-d | a-d | a |
|
||||
| test-b-c | b-c | b |
|
||||
| test-b-d | b-d | b |
|
||||
| build-a | a-c, a-d | a |
|
||||
| build-b | b-c, b-d | b |
|
||||
|
||||
3. replace the auto-generated region in `rules.yml` with `labels`, `titles`, and `rules`. Each mapping will generate a `rule` and all the required labels/titles. `patterns` are pre-defined in `rules.yml` and could not be generated automatically. If a mapping is using a `pattern` undefined, the `pattern` will be ignored.
|
||||
|
||||
- If a mapping key has postfix '-preview', no `if-protected-xxx` clause will be added
|
||||
- else if a mapping key has postfix '-production', `if-protected-no_label` clause will be added
|
||||
- else: a mapping key `if-protected` clause will be added
|
||||
|
||||
## Graph
|
||||
|
||||
All `label` nodes are in green, `pattern` nodes are in cyan, `rule` nodes are in blue
|
||||
|
||||
### Requirements
|
||||
|
||||
There are a few extra dependencies while generating the dependency tree graph, please refer to [pygraphviz](https://github.com/pygraphviz/pygraphviz/blob/master/INSTALL.txt) documentation to install both `graphviz` and `pygraphviz`
|
||||
|
||||
### CLI usage
|
||||
|
||||
`python generate_rules.py --graph OUTPUT_PATH`
|
142
.gitlab/ci/dependencies/dependencies.yml
Normal file
142
.gitlab/ci/dependencies/dependencies.yml
Normal file
@ -0,0 +1,142 @@
|
||||
.all_targets: &all_targets
|
||||
- esp32
|
||||
- esp32s2
|
||||
- esp32s3
|
||||
- esp32c3
|
||||
|
||||
.target_test: &target_test
|
||||
- example_test
|
||||
- custom_test
|
||||
- unit_test
|
||||
- component_ut
|
||||
|
||||
"build:{0}-{1}":
|
||||
matrix:
|
||||
- *target_test
|
||||
- *all_targets
|
||||
labels:
|
||||
- build
|
||||
patterns:
|
||||
- build
|
||||
|
||||
"build:example_test-esp32":
|
||||
labels:
|
||||
- build
|
||||
- weekend_test # only have esp32 jobs
|
||||
- iperf_stress_test # only have esp32 jobs
|
||||
patterns:
|
||||
- build
|
||||
- example_test
|
||||
|
||||
"build:{0}":
|
||||
matrix:
|
||||
- [windows, docker]
|
||||
labels:
|
||||
- build
|
||||
- "{0}"
|
||||
patterns:
|
||||
- build
|
||||
- "{0}"
|
||||
|
||||
"build:macos":
|
||||
labels:
|
||||
- build
|
||||
- macos
|
||||
- macos_test # for backward compatibility
|
||||
patterns:
|
||||
- build
|
||||
- macos
|
||||
|
||||
"build:docs":
|
||||
labels:
|
||||
- build
|
||||
- docs
|
||||
- build_docs # for backward compatibility
|
||||
patterns:
|
||||
- docs
|
||||
deploy:
|
||||
- preview
|
||||
- production
|
||||
|
||||
"build":
|
||||
labels:
|
||||
- build
|
||||
patterns:
|
||||
- build
|
||||
|
||||
"test:{0}-{1}":
|
||||
matrix:
|
||||
- *target_test
|
||||
- *all_targets
|
||||
labels:
|
||||
- "{0}"
|
||||
- "{0}_{1}"
|
||||
patterns:
|
||||
- "{0}"
|
||||
- build
|
||||
- "build-{0}"
|
||||
included_in:
|
||||
- "build:{0}-{1}"
|
||||
- test:target_test
|
||||
- test:any_test
|
||||
|
||||
"test:component_ut-{0}":
|
||||
matrix:
|
||||
- *all_targets
|
||||
labels:
|
||||
- component_ut
|
||||
- "component_ut_{0}"
|
||||
- unit_test
|
||||
- "unit_test_{0}"
|
||||
patterns:
|
||||
- component_ut
|
||||
- build
|
||||
- "build-component_ut-{0}"
|
||||
included_in:
|
||||
- "build:component_ut-{0}"
|
||||
- test:target_test
|
||||
- test:any_test
|
||||
|
||||
# due to the lack of runners, c3 tests will only be triggered by label
|
||||
"test:unit_test-esp32c3":
|
||||
labels:
|
||||
- unit_test_esp32c3
|
||||
patterns:
|
||||
- unit_test
|
||||
- build
|
||||
- "build-unit_test-esp32c3"
|
||||
included_in:
|
||||
- "build:unit_test-esp32c3"
|
||||
|
||||
"test:integration_test":
|
||||
labels:
|
||||
- "integration_test"
|
||||
patterns:
|
||||
- "integration_test"
|
||||
included_in:
|
||||
- "build:integration_test"
|
||||
- test:target_test
|
||||
- test:any_test
|
||||
|
||||
"test:host_test":
|
||||
labels:
|
||||
- host_test
|
||||
patterns:
|
||||
- host_test
|
||||
included_in:
|
||||
- test:any_test
|
||||
|
||||
"labels:{0}":
|
||||
matrix:
|
||||
- [weekend_test, iperf_stress_test, nvs_coverage]
|
||||
labels:
|
||||
- "{0}"
|
||||
included_in:
|
||||
- test:any_test
|
||||
|
||||
"labels:fuzzer_test-weekend_test":
|
||||
labels:
|
||||
- fuzzer_test
|
||||
- weekend_test
|
||||
included_in:
|
||||
- test:any_test
|
303
.gitlab/ci/dependencies/generate_rules.py
Executable file
303
.gitlab/ci/dependencies/generate_rules.py
Executable file
@ -0,0 +1,303 @@
|
||||
#!/usr/bin/env python
|
||||
#
|
||||
# Copyright 2021 Espressif Systems (Shanghai) CO LTD
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import argparse
|
||||
import inspect
|
||||
import os
|
||||
import sys
|
||||
from collections import defaultdict
|
||||
from itertools import product
|
||||
|
||||
try:
|
||||
import pygraphviz as pgv
|
||||
except ImportError: # used when pre-commit, skip generating image
|
||||
pass
|
||||
|
||||
import yaml
|
||||
|
||||
IDF_PATH = os.path.abspath(os.getenv('IDF_PATH', os.path.join(os.path.dirname(__file__), '..', '..', '..')))
|
||||
|
||||
|
||||
def _list(str_or_list):
|
||||
if isinstance(str_or_list, str):
|
||||
return [str_or_list]
|
||||
elif isinstance(str_or_list, list):
|
||||
return str_or_list
|
||||
else:
|
||||
raise ValueError('Wrong type: {}. Only supports str or list.'.format(type(str_or_list)))
|
||||
|
||||
|
||||
def _format_nested_dict(_dict, f_tuple):
|
||||
res = {}
|
||||
for k, v in _dict.items():
|
||||
k = k.split('__')[0]
|
||||
if isinstance(v, dict):
|
||||
v = _format_nested_dict(v, f_tuple)
|
||||
elif isinstance(v, list):
|
||||
v = _format_nested_list(v, f_tuple)
|
||||
elif isinstance(v, str):
|
||||
v = v.format(*f_tuple)
|
||||
res[k.format(*f_tuple)] = v
|
||||
return res
|
||||
|
||||
|
||||
def _format_nested_list(_list, f_tuple):
|
||||
res = []
|
||||
for item in _list:
|
||||
if isinstance(item, list):
|
||||
item = _format_nested_list(item, f_tuple)
|
||||
elif isinstance(item, dict):
|
||||
item = _format_nested_dict(item, f_tuple)
|
||||
elif isinstance(item, str):
|
||||
item = item.format(*f_tuple)
|
||||
res.append(item)
|
||||
return res
|
||||
|
||||
|
||||
class RulesWriter:
|
||||
AUTO_GENERATE_MARKER = inspect.cleandoc(r'''
|
||||
##################
|
||||
# Auto Generated #
|
||||
##################
|
||||
''')
|
||||
|
||||
LABEL_TEMPLATE = inspect.cleandoc(r'''
|
||||
.if-label-{0}: &if-label-{0}
|
||||
if: '$BOT_LABEL_{1}'
|
||||
''')
|
||||
TITLE_TEMPLATE = inspect.cleandoc(r'''
|
||||
.if-title-{0}: &if-title-{0}
|
||||
if: '$CI_MERGE_REQUEST_LABELS =~ /^(?:\w+,)*{0}(?:,\w+)*$/i || $CI_COMMIT_DESCRIPTION =~ /test labels?: (?:\w+[, ]+)*{0}(?:[, ]+\w+)*/i'
|
||||
''')
|
||||
|
||||
RULE_NORM = ' - <<: *if-protected'
|
||||
RULE_PROD = ' - <<: *if-protected-no_label'
|
||||
RULE_LABEL_TEMPLATE = ' - <<: *if-label-{0}'
|
||||
RULE_TITLE_TEMPLATE = ' - <<: *if-title-{0}'
|
||||
RULE_PATTERN_TEMPLATE = ' - <<: *if-dev-push\n' \
|
||||
' changes: *patterns-{0}'
|
||||
RULES_TEMPLATE = inspect.cleandoc(r"""
|
||||
.rules:{0}:
|
||||
rules:
|
||||
{1}
|
||||
""")
|
||||
|
||||
KEYWORDS = ['labels', 'patterns']
|
||||
|
||||
def __init__(self, rules_yml, depend_yml): # type: (str, str) -> None
|
||||
self.rules_yml = rules_yml
|
||||
self.rules_cfg = yaml.load(open(rules_yml), Loader=yaml.FullLoader)
|
||||
|
||||
self.full_cfg = yaml.load(open(depend_yml), Loader=yaml.FullLoader)
|
||||
self.cfg = {k: v for k, v in self.full_cfg.items() if not k.startswith('.')}
|
||||
self.cfg = self.expand_matrices()
|
||||
self.rules = self.expand_rules()
|
||||
|
||||
self.graph = None
|
||||
|
||||
def expand_matrices(self): # type: () -> dict
|
||||
"""
|
||||
Expand the matrix into different rules
|
||||
"""
|
||||
res = {}
|
||||
for k, v in self.cfg.items():
|
||||
res.update(self._expand_matrix(k, v))
|
||||
|
||||
for k, v in self.cfg.items():
|
||||
deploy = v.get('deploy')
|
||||
if deploy:
|
||||
for item in _list(deploy):
|
||||
res['{}-{}'.format(k, item)] = v
|
||||
return res
|
||||
|
||||
@staticmethod
|
||||
def _expand_matrix(name, cfg): # type: (str, dict) -> dict
|
||||
"""
|
||||
Expand matrix into multi keys
|
||||
:param cfg: single rule dict
|
||||
:return:
|
||||
"""
|
||||
default = {name: cfg}
|
||||
if not cfg:
|
||||
return default
|
||||
matrices = cfg.pop('matrix', None)
|
||||
if not matrices:
|
||||
return default
|
||||
|
||||
res = {}
|
||||
for comb in product(*_list(matrices)):
|
||||
res.update(_format_nested_dict(default, comb))
|
||||
return res
|
||||
|
||||
def expand_rules(self): # type: () -> dict[str, dict[str, list]]
|
||||
res = defaultdict(lambda: defaultdict(set)) # type: dict[str, dict[str, set]]
|
||||
for k, v in self.cfg.items():
|
||||
for vk, vv in v.items():
|
||||
if vk in self.KEYWORDS:
|
||||
res[k][vk] = set(_list(vv))
|
||||
else:
|
||||
res[k][vk] = vv
|
||||
for key in self.KEYWORDS: # provide empty set for missing field
|
||||
if key not in res[k]:
|
||||
res[k][key] = set()
|
||||
|
||||
for k, v in self.cfg.items():
|
||||
if not v:
|
||||
continue
|
||||
if 'included_in' in v:
|
||||
for item in _list(v['included_in']):
|
||||
if 'labels' in v:
|
||||
res[item]['labels'].update(_list(v['labels']))
|
||||
if 'patterns' in v:
|
||||
for _pat in _list(v['patterns']):
|
||||
# Patterns must be pre-defined
|
||||
if '.patterns-{}'.format(_pat) not in self.rules_cfg:
|
||||
print('WARNING: pattern {} not exists'.format(_pat))
|
||||
continue
|
||||
res[item]['patterns'].add(_pat)
|
||||
|
||||
sorted_res = defaultdict(lambda: defaultdict(list)) # type: dict[str, dict[str, list]]
|
||||
for k, v in res.items():
|
||||
for vk, vv in v.items():
|
||||
sorted_res[k][vk] = sorted(vv)
|
||||
return sorted_res
|
||||
|
||||
def new_labels_titles_str(self): # type: () -> str
|
||||
_labels = set([])
|
||||
for k, v in self.cfg.items():
|
||||
if not v:
|
||||
continue # shouldn't be possible
|
||||
labels = v.get('labels')
|
||||
if not labels:
|
||||
continue
|
||||
_labels.update(_list(labels))
|
||||
labels = sorted(_labels)
|
||||
|
||||
res = ''
|
||||
res += '\n\n'.join([self._format_label(_label) for _label in labels])
|
||||
res += '\n\n'
|
||||
res += '\n\n'.join([self._format_title(_label) for _label in labels])
|
||||
return res
|
||||
|
||||
@classmethod
|
||||
def _format_label(cls, label): # type: (str) -> str
|
||||
return cls.LABEL_TEMPLATE.format(label, cls.bot_label_str(label))
|
||||
|
||||
@staticmethod
|
||||
def bot_label_str(label): # type: (str) -> str
|
||||
return label.upper().replace('-', '_')
|
||||
|
||||
@classmethod
|
||||
def _format_title(cls, title): # type: (str) -> str
|
||||
return cls.TITLE_TEMPLATE.format(title)
|
||||
|
||||
def new_rules_str(self): # type: () -> str
|
||||
res = []
|
||||
for k, v in sorted(self.rules.items()):
|
||||
res.append(self.RULES_TEMPLATE.format(k, self._format_rule(k, v)))
|
||||
return '\n\n'.join(res)
|
||||
|
||||
def _format_rule(self, name, cfg): # type: (str, dict) -> str
|
||||
_rules = []
|
||||
if name.endswith('-production'):
|
||||
_rules.append(self.RULE_PROD)
|
||||
else:
|
||||
if not name.endswith('-preview'):
|
||||
_rules.append(self.RULE_NORM)
|
||||
for label in cfg['labels']:
|
||||
_rules.append(self.RULE_LABEL_TEMPLATE.format(label))
|
||||
_rules.append(self.RULE_TITLE_TEMPLATE.format(label))
|
||||
for pattern in cfg['patterns']:
|
||||
if '.patterns-{}'.format(pattern) in self.rules_cfg:
|
||||
_rules.append(self.RULE_PATTERN_TEMPLATE.format(pattern))
|
||||
else:
|
||||
print('WARNING: pattern {} not exists'.format(pattern))
|
||||
return '\n'.join(_rules)
|
||||
|
||||
def update_rules_yml(self): # type: () -> bool
|
||||
with open(self.rules_yml) as fr:
|
||||
file_str = fr.read()
|
||||
|
||||
auto_generate_str = '\n{}\n\n{}\n'.format(self.new_labels_titles_str(), self.new_rules_str())
|
||||
rest, marker, old = file_str.partition(self.AUTO_GENERATE_MARKER)
|
||||
if old == auto_generate_str:
|
||||
return False
|
||||
else:
|
||||
print(self.rules_yml, 'has been modified. Please check')
|
||||
with open(self.rules_yml, 'w') as fw:
|
||||
fw.write(rest + marker + auto_generate_str)
|
||||
return True
|
||||
|
||||
|
||||
LABEL_COLOR = 'green'
|
||||
PATTERN_COLOR = 'cyan'
|
||||
RULE_COLOR = 'blue'
|
||||
|
||||
|
||||
def build_graph(rules_dict): # type: (dict[str, dict[str, list]]) -> pgv.AGraph
|
||||
graph = pgv.AGraph(directed=True, rankdir='LR', concentrate=True)
|
||||
|
||||
for k, v in rules_dict.items():
|
||||
if not v:
|
||||
continue
|
||||
included_in = v.get('included_in')
|
||||
if included_in:
|
||||
for item in _list(included_in):
|
||||
graph.add_node(k, color=RULE_COLOR)
|
||||
graph.add_node(item, color=RULE_COLOR)
|
||||
graph.add_edge(k, item, color=RULE_COLOR)
|
||||
labels = v.get('labels')
|
||||
if labels:
|
||||
for _label in labels:
|
||||
graph.add_node('label:{}'.format(_label), color=LABEL_COLOR)
|
||||
graph.add_edge('label:{}'.format(_label), k, color=LABEL_COLOR)
|
||||
patterns = v.get('patterns')
|
||||
if patterns:
|
||||
for _pat in patterns:
|
||||
graph.add_node('pattern:{}'.format(_pat), color=PATTERN_COLOR)
|
||||
graph.add_edge('pattern:{}'.format(_pat), k, color=PATTERN_COLOR)
|
||||
|
||||
return graph
|
||||
|
||||
|
||||
def output_graph(graph, output_path='output.png'): # type: (pgv.AGraph, str) -> None
|
||||
graph.layout('dot')
|
||||
if output_path.endswith('.png'):
|
||||
img_path = output_path
|
||||
else:
|
||||
img_path = os.path.join(output_path, 'output.png')
|
||||
graph.draw(img_path)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
parser = argparse.ArgumentParser(description=__doc__)
|
||||
parser.add_argument('rules_yml', nargs='?', default=os.path.join(IDF_PATH, '.gitlab', 'ci', 'rules.yml'),
|
||||
help='rules.yml file path')
|
||||
parser.add_argument('dependencies_yml', nargs='?', default=os.path.join(IDF_PATH, '.gitlab', 'ci', 'dependencies',
|
||||
'dependencies.yml'),
|
||||
help='dependencies.yml file path')
|
||||
parser.add_argument('--graph',
|
||||
help='Specify PNG image output path. Use this argument to generate dependency graph')
|
||||
args = parser.parse_args()
|
||||
|
||||
writer = RulesWriter(args.rules_yml, args.dependencies_yml)
|
||||
file_modified = writer.update_rules_yml()
|
||||
|
||||
if args.graph:
|
||||
dep_tree_graph = build_graph(writer.rules)
|
||||
output_graph(dep_tree_graph)
|
||||
|
||||
sys.exit(file_modified)
|
73
.gitlab/ci/deploy.yml
Normal file
73
.gitlab/ci/deploy.yml
Normal file
@ -0,0 +1,73 @@
|
||||
.deploy_job_template:
|
||||
extends: .before_script_no_sync_submodule
|
||||
stage: deploy
|
||||
image: $CI_DOCKER_REGISTRY/esp32-ci-env$BOT_DOCKER_IMAGE_TAG
|
||||
tags:
|
||||
- deploy
|
||||
dependencies: []
|
||||
|
||||
push_to_github:
|
||||
extends:
|
||||
- .deploy_job_template
|
||||
- .rules:protected-no_label
|
||||
script:
|
||||
- add_github_ssh_keys
|
||||
- git remote remove github &>/dev/null || true
|
||||
- git remote add github git@github.com:espressif/esp-idf.git
|
||||
- tools/ci/push_to_github.sh
|
||||
|
||||
deploy_test_result:
|
||||
extends:
|
||||
- .deploy_job_template
|
||||
- .rules:ref:master-schedule-always
|
||||
image: $CI_DOCKER_REGISTRY/bot-env
|
||||
tags:
|
||||
- deploy_test
|
||||
artifacts:
|
||||
when: always
|
||||
paths:
|
||||
- ${CI_PROJECT_DIR}/test-management/*.log
|
||||
# save all test logs as artifacts, make it easier to track errors
|
||||
- ${CI_PROJECT_DIR}/TEST_LOGS
|
||||
- $CI_PROJECT_DIR/$CI_COMMIT_SHA
|
||||
expire_in: 1 mos
|
||||
variables:
|
||||
UNIT_TEST_CASE_FILE: "${CI_PROJECT_DIR}/components/idf_test/unit_test/TestCaseAll.yml"
|
||||
BOT_ACCOUNT_CONFIG_FILE: "${CI_PROJECT_DIR}/test-management/Config/Account.local.yml"
|
||||
TEST_FW_PATH: "$CI_PROJECT_DIR/tools/tiny-test-fw"
|
||||
AUTO_TEST_SCRIPT_PATH: "${CI_PROJECT_DIR}/auto_test_script"
|
||||
script:
|
||||
- add_gitlab_ssh_keys
|
||||
- export GIT_SHA=$(echo ${CI_COMMIT_SHA} | cut -c 1-8)
|
||||
- export REV_COUNT=$(git rev-list --count HEAD)
|
||||
- export SUMMARY="IDF CI test result for $GIT_SHA (r${REV_COUNT})"
|
||||
# artifacts of job update_test_cases creates test-management folder
|
||||
# we need to remove it so we can clone test-management folder again
|
||||
- rm -rf test-management
|
||||
- retry_failed git clone $TEST_MANAGEMENT_REPO
|
||||
- python3 $CHECKOUT_REF_SCRIPT test-management test-management
|
||||
- cd test-management
|
||||
- echo $BOT_JIRA_ACCOUNT > ${BOT_ACCOUNT_CONFIG_FILE}
|
||||
# update test results
|
||||
- python3 ImportTestResult.py -r "$GIT_SHA (r${REV_COUNT})" -j $JIRA_TEST_MANAGEMENT_PROJECT -s "$SUMMARY" -l CI -p ${CI_PROJECT_DIR}/TEST_LOGS ${CI_PROJECT_DIR}/${CI_COMMIT_SHA} --pipeline_url ${CI_PIPELINE_URL}
|
||||
|
||||
check_submodule_sync:
|
||||
extends:
|
||||
- .deploy_job_template
|
||||
- .rules:protected
|
||||
tags:
|
||||
- github_sync
|
||||
retry: 2
|
||||
variables:
|
||||
GIT_STRATEGY: clone
|
||||
SUBMODULES_TO_FETCH: "none"
|
||||
PUBLIC_IDF_URL: "https://github.com/espressif/esp-idf.git"
|
||||
script:
|
||||
- git submodule deinit --force .
|
||||
# setting the default remote URL to the public one, to resolve relative location URLs
|
||||
- git config remote.origin.url ${PUBLIC_IDF_URL}
|
||||
# check if all submodules are correctly synced to public repository
|
||||
- git submodule init
|
||||
- git config --get-regexp '^submodule\..*\.url$' || true
|
||||
- git submodule update --recursive
|
||||
- echo "IDF was cloned from ${PUBLIC_IDF_URL} completely"
|
151
.gitlab/ci/docs.yml
Normal file
151
.gitlab/ci/docs.yml
Normal file
@ -0,0 +1,151 @@
|
||||
# stage: pre_check
|
||||
check_readme_links:
|
||||
extends:
|
||||
- .pre_check_job_template
|
||||
- .rules:build:docs
|
||||
tags: ["build", "amd64", "internet"]
|
||||
allow_failure: true
|
||||
script:
|
||||
- python ${IDF_PATH}/tools/ci/check_readme_links.py
|
||||
|
||||
check_docs_lang_sync:
|
||||
extends:
|
||||
- .pre_check_job_template
|
||||
- .rules:build:docs
|
||||
script:
|
||||
- cd docs
|
||||
- ./check_lang_folder_sync.sh
|
||||
|
||||
.build_docs_template:
|
||||
image: $ESP_IDF_DOC_ENV_IMAGE
|
||||
tags:
|
||||
- build_docs
|
||||
dependencies: []
|
||||
script:
|
||||
- cd docs
|
||||
- ${IDF_PATH}/tools/ci/multirun_with_pyenv.sh -p 3.6.10 pip install -r requirements.txt
|
||||
- ${IDF_PATH}/tools/ci/multirun_with_pyenv.sh -p 3.6.10 ./build_docs.py -bs $DOC_BUILDERS -l $DOCLANG -t $DOCTGT build
|
||||
parallel:
|
||||
matrix:
|
||||
- DOCLANG: ["en", "zh_CN"]
|
||||
DOCTGT: ["esp32", "esp32s2"]
|
||||
|
||||
check_docs_gh_links:
|
||||
extends:
|
||||
- .pre_check_job_template
|
||||
- .build_docs_template
|
||||
- .rules:build:docs
|
||||
script:
|
||||
- cd docs
|
||||
- ${IDF_PATH}/tools/ci/multirun_with_pyenv.sh -p 3.6.10 pip install -r requirements.txt
|
||||
- ${IDF_PATH}/tools/ci/multirun_with_pyenv.sh -p 3.6.10 ./build_docs.py gh-linkcheck
|
||||
|
||||
# stage: build_doc
|
||||
# Add this stage to let the build_docs job run in parallel with build
|
||||
.build_docs_build_stage_template:
|
||||
extends:
|
||||
- .build_docs_template
|
||||
- .rules:build:docs
|
||||
stage: build_doc
|
||||
needs:
|
||||
- job: check_docs_lang_sync
|
||||
artifacts: false
|
||||
- job: check_docs_gh_links
|
||||
artifacts: false
|
||||
|
||||
build_docs_html:
|
||||
extends:
|
||||
- .build_docs_build_stage_template
|
||||
artifacts:
|
||||
when: always
|
||||
paths:
|
||||
- docs/_build/*/*/*.txt
|
||||
- docs/_build/*/*/html/*
|
||||
expire_in: 4 days
|
||||
variables:
|
||||
DOC_BUILDERS: "html"
|
||||
|
||||
build_docs_pdf:
|
||||
extends:
|
||||
- .build_docs_build_stage_template
|
||||
artifacts:
|
||||
when: always
|
||||
paths:
|
||||
- docs/_build/*/*/latex/*
|
||||
expire_in: 4 days
|
||||
variables:
|
||||
DOC_BUILDERS: "latex"
|
||||
|
||||
.deploy_docs_template:
|
||||
extends:
|
||||
- .before_script_no_sync_submodule
|
||||
image: $ESP_IDF_DOC_ENV_IMAGE
|
||||
stage: test_deploy
|
||||
tags:
|
||||
- deploy
|
||||
- shiny
|
||||
variables:
|
||||
DOCS_BUILD_DIR: "${IDF_PATH}/docs/_build/"
|
||||
PYTHONUNBUFFERED: 1
|
||||
dependencies: []
|
||||
script:
|
||||
- add_doc_server_ssh_keys $DOCS_DEPLOY_PRIVATEKEY $DOCS_DEPLOY_SERVER $DOCS_DEPLOY_SERVER_USER
|
||||
- export GIT_VER=$(git describe --always)
|
||||
- ${IDF_PATH}/tools/ci/multirun_with_pyenv.sh -p 3.6.10 ${IDF_PATH}/tools/ci/deploy_docs.py
|
||||
|
||||
# stage: test_deploy
|
||||
deploy_docs_preview:
|
||||
extends:
|
||||
- .deploy_docs_template
|
||||
- .rules:build:docs-preview
|
||||
dependencies: # set dependencies to null to avoid missing artifacts issue
|
||||
needs:
|
||||
- build_docs_html
|
||||
- build_docs_pdf
|
||||
variables:
|
||||
TYPE: "preview"
|
||||
# older branches use DOCS_DEPLOY_KEY, DOCS_SERVER, DOCS_SERVER_USER, DOCS_PATH for preview server so we keep these names for 'preview'
|
||||
DOCS_DEPLOY_PRIVATEKEY: "$DOCS_DEPLOY_KEY"
|
||||
DOCS_DEPLOY_SERVER: "$DOCS_SERVER"
|
||||
DOCS_DEPLOY_SERVER_USER: "$DOCS_SERVER_USER"
|
||||
DOCS_DEPLOY_PATH: "$DOCS_PATH"
|
||||
DOCS_DEPLOY_URL_BASE: "https://$CI_DOCKER_REGISTRY/docs/esp-idf"
|
||||
|
||||
# stage: post_deploy
|
||||
deploy_docs_production:
|
||||
# The DOCS_PROD_* variables used by this job are "Protected" so these branches must all be marked "Protected" in Gitlab settings
|
||||
extends:
|
||||
- .deploy_docs_template
|
||||
- .rules:build:docs-production
|
||||
stage: post_deploy
|
||||
dependencies: # set dependencies to null to avoid missing artifacts issue
|
||||
needs: # ensure runs after push_to_github succeeded
|
||||
- build_docs_html
|
||||
- build_docs_pdf
|
||||
- job: push_to_github
|
||||
artifacts: false
|
||||
variables:
|
||||
TYPE: "preview"
|
||||
DOCS_DEPLOY_PRIVATEKEY: "$DOCS_PROD_DEPLOY_KEY"
|
||||
DOCS_DEPLOY_SERVER: "$DOCS_PROD_SERVER"
|
||||
DOCS_DEPLOY_SERVER_USER: "$DOCS_PROD_SERVER_USER"
|
||||
DOCS_DEPLOY_PATH: "$DOCS_PROD_PATH"
|
||||
DOCS_DEPLOY_URL_BASE: "https://docs.espressif.com/projects/esp-idf"
|
||||
|
||||
check_doc_links:
|
||||
extends:
|
||||
- .build_docs_template
|
||||
- .rules:protected
|
||||
stage: post_deploy
|
||||
tags: ["build", "amd64", "internet"]
|
||||
artifacts:
|
||||
when: always
|
||||
paths:
|
||||
- docs/_build/*/*/*.txt
|
||||
- docs/_build/*/*/linkcheck/*.txt
|
||||
expire_in: 1 week
|
||||
allow_failure: true
|
||||
script:
|
||||
- cd docs
|
||||
- ${IDF_PATH}/tools/ci/multirun_with_pyenv.sh -p 3.6.10 pip install -r requirements.txt
|
||||
- ${IDF_PATH}/tools/ci/multirun_with_pyenv.sh -p 3.6.10 ./build_docs.py -l $DOCLANG -t $DOCTGT linkcheck
|
@ -1,30 +1,11 @@
|
||||
.host_test_template:
|
||||
extends: .rules:labels:host_test
|
||||
extends: .rules:test:host_test
|
||||
stage: host_test
|
||||
image: $CI_DOCKER_REGISTRY/esp32-ci-env$BOT_DOCKER_IMAGE_TAG
|
||||
tags:
|
||||
- host_test
|
||||
dependencies: []
|
||||
|
||||
.host_fuzzer_test_template:
|
||||
extends:
|
||||
- .host_test_template
|
||||
- .rules:labels:fuzzer_test-weekend_test-only
|
||||
image: $CI_DOCKER_REGISTRY/afl-fuzzer-test
|
||||
artifacts:
|
||||
when: always
|
||||
paths:
|
||||
- ${FUZZER_TEST_DIR}/out/crashes
|
||||
- ${FUZZER_TEST_DIR}/fuzz_output.log
|
||||
expire_in: 1 week
|
||||
script:
|
||||
- export AFL_I_DONT_CARE_ABOUT_MISSING_CRASHES=1 && export AFL_SKIP_CPUFREQ=1
|
||||
- cd ${FUZZER_TEST_DIR}
|
||||
# run AFL fuzzer for one hour
|
||||
- ( ( make ${FUZZER_PARAMS} fuzz | tee fuzz_output.log | grep -v '\(Fuzzing test case\|Entering queue cycle\)' ) || pkill sleep ) &
|
||||
- ( sleep 3600 || mkdir -p out/crashes/env_failed ) && pkill afl-fuz
|
||||
# check no crashes found
|
||||
- test -z "$(ls out/crashes/)" || exit 1
|
||||
needs: [] # run host_test jobs immediately
|
||||
|
||||
test_nvs_on_host:
|
||||
extends: .host_test_template
|
||||
@ -35,7 +16,7 @@ test_nvs_on_host:
|
||||
test_nvs_coverage:
|
||||
extends:
|
||||
- .host_test_template
|
||||
- .rules:labels:nvs_coverage-only
|
||||
- .rules:labels:nvs_coverage
|
||||
artifacts:
|
||||
paths:
|
||||
- components/nvs_flash/test_nvs_host/coverage_report
|
||||
@ -79,6 +60,26 @@ test_ldgen_on_host:
|
||||
variables:
|
||||
LC_ALL: C.UTF-8
|
||||
|
||||
.host_fuzzer_test_template:
|
||||
extends:
|
||||
- .host_test_template
|
||||
- .rules:labels:fuzzer_test-weekend_test
|
||||
image: $CI_DOCKER_REGISTRY/afl-fuzzer-test
|
||||
artifacts:
|
||||
when: always
|
||||
paths:
|
||||
- ${FUZZER_TEST_DIR}/out/crashes
|
||||
- ${FUZZER_TEST_DIR}/fuzz_output.log
|
||||
expire_in: 1 week
|
||||
script:
|
||||
- export AFL_I_DONT_CARE_ABOUT_MISSING_CRASHES=1 && export AFL_SKIP_CPUFREQ=1
|
||||
- cd ${FUZZER_TEST_DIR}
|
||||
# run AFL fuzzer for one hour
|
||||
- ( ( make ${FUZZER_PARAMS} fuzz | tee fuzz_output.log | grep -v '\(Fuzzing test case\|Entering queue cycle\)' ) || pkill sleep ) &
|
||||
- ( sleep 3600 || mkdir -p out/crashes/env_failed ) && pkill afl-fuz
|
||||
# check no crashes found
|
||||
- test -z "$(ls out/crashes/)" || exit 1
|
||||
|
||||
test_mdns_fuzzer_on_host:
|
||||
extends: .host_fuzzer_test_template
|
||||
variables:
|
@ -8,16 +8,10 @@
|
||||
.pre_check_job_template:
|
||||
extends:
|
||||
- .pre_check_base_template
|
||||
- .before_script_lesser_nofilter
|
||||
|
||||
.pre_check_job_template_with_filter:
|
||||
extends:
|
||||
- .pre_check_base_template
|
||||
- .before_script_lesser
|
||||
- .before_script_no_sync_submodule
|
||||
|
||||
.check_pre_commit_template:
|
||||
extends: .pre_check_job_template
|
||||
stage: pre_check
|
||||
image: "$CI_DOCKER_REGISTRY/esp-idf-pre-commit:1"
|
||||
before_script:
|
||||
- source tools/ci/utils.sh
|
||||
@ -37,14 +31,6 @@ check_pre_commit_MR:
|
||||
script:
|
||||
- python ${CI_PROJECT_DIR}/tools/ci/ci_get_mr_info.py files ${CI_COMMIT_BRANCH} | xargs pre-commit run --files
|
||||
|
||||
check_docs_lang_sync:
|
||||
extends: .pre_check_job_template
|
||||
variables:
|
||||
SUBMODULES_TO_FETCH: "none"
|
||||
script:
|
||||
- cd docs
|
||||
- ./check_lang_folder_sync.sh
|
||||
|
||||
check_version:
|
||||
# Don't run this for feature/bugfix branches, so that it is possible to modify
|
||||
# esp_idf_version.h in a branch before tagging the next version.
|
||||
@ -56,20 +42,20 @@ check_version:
|
||||
- tools/ci/check_idf_version.sh
|
||||
|
||||
check_examples_cmake_make:
|
||||
extends:
|
||||
- .pre_check_job_template_with_filter
|
||||
- .rules:dev
|
||||
extends: .pre_check_job_template
|
||||
script:
|
||||
- python ${IDF_PATH}/tools/ci/check_examples_cmake_make.py
|
||||
- python ${IDF_PATH}/tools/ci/check_examples_cmake_make.py
|
||||
|
||||
check_rom_api_header:
|
||||
extends: .pre_check_job_template_with_filter
|
||||
extends: .pre_check_job_template
|
||||
script:
|
||||
- tools/ci/check_examples_rom_header.sh
|
||||
- tools/ci/check_rom_apis.sh
|
||||
|
||||
check_python_style:
|
||||
extends: .pre_check_base_template
|
||||
extends:
|
||||
- .pre_check_base_template
|
||||
- .rules:patterns:python-files
|
||||
artifacts:
|
||||
when: on_failure
|
||||
paths:
|
||||
@ -78,8 +64,8 @@ check_python_style:
|
||||
script:
|
||||
- ${IDF_PATH}/tools/ci/multirun_with_pyenv.sh python -m flake8 --config=$IDF_PATH/.flake8 --output-file=flake8_output.txt --tee --benchmark $IDF_PATH
|
||||
|
||||
check_kconfigs:
|
||||
extends: .pre_check_job_template_with_filter
|
||||
test_check_kconfigs:
|
||||
extends: .pre_check_job_template
|
||||
artifacts:
|
||||
when: on_failure
|
||||
paths:
|
||||
@ -92,7 +78,6 @@ check_kconfigs:
|
||||
expire_in: 1 week
|
||||
script:
|
||||
- ${IDF_PATH}/tools/ci/multirun_with_pyenv.sh ${IDF_PATH}/tools/ci/test_check_kconfigs.py
|
||||
- ${IDF_PATH}/tools/ci/check_kconfigs.py
|
||||
|
||||
check_wifi_lib_md5:
|
||||
extends: .pre_check_base_template
|
||||
@ -107,25 +92,19 @@ check_wifi_lib_md5:
|
||||
check_public_headers:
|
||||
extends:
|
||||
- .pre_check_base_template
|
||||
- .rules:labels:build
|
||||
- .rules:build
|
||||
tags:
|
||||
- build
|
||||
script:
|
||||
- python tools/ci/check_public_headers.py --jobs 4 --prefix xtensa-esp32-elf-
|
||||
|
||||
.scan_build_tests:
|
||||
stage: pre_check
|
||||
scan_tests:
|
||||
extends:
|
||||
- .pre_check_base_template
|
||||
- .rules:test:target_test
|
||||
image: $CI_DOCKER_REGISTRY/ubuntu-test-env$BOT_DOCKER_IMAGE_TAG
|
||||
tags:
|
||||
- assign_test
|
||||
variables:
|
||||
CI_SCAN_TESTS_PY: ${CI_PROJECT_DIR}/tools/ci/python_packages/ttfw_idf/CIScanTests.py
|
||||
TEST_CONFIG_FILE: ${CI_PROJECT_DIR}/tools/ci/config/target-test.yml
|
||||
|
||||
scan_tests:
|
||||
extends:
|
||||
- .scan_build_tests
|
||||
- .rules:build_tests:target_test-weekend_test
|
||||
artifacts:
|
||||
paths:
|
||||
- $EXAMPLE_TEST_OUTPUT_DIR
|
||||
@ -137,59 +116,49 @@ scan_tests:
|
||||
TEST_APPS_TEST_DIR: ${CI_PROJECT_DIR}/tools/test_apps
|
||||
TEST_APPS_OUTPUT_DIR: ${CI_PROJECT_DIR}/tools/test_apps/test_configs
|
||||
COMPONENT_UT_OUTPUT_DIR: ${CI_PROJECT_DIR}/component_ut/test_configs
|
||||
PYTHON_VER: 3
|
||||
CI_SCAN_TESTS_PY: ${CI_PROJECT_DIR}/tools/ci/python_packages/ttfw_idf/CIScanTests.py
|
||||
script:
|
||||
- set_component_ut_vars
|
||||
- python $CI_SCAN_TESTS_PY example_test $EXAMPLE_TEST_DIR -b make --exclude examples/build_system/idf_as_lib -c $TEST_CONFIG_FILE -o $EXAMPLE_TEST_OUTPUT_DIR
|
||||
- python $CI_SCAN_TESTS_PY example_test $EXAMPLE_TEST_DIR -b cmake --exclude examples/build_system/idf_as_lib -c $TEST_CONFIG_FILE -o $EXAMPLE_TEST_OUTPUT_DIR
|
||||
- python $CI_SCAN_TESTS_PY test_apps $TEST_APPS_TEST_DIR -c $TEST_CONFIG_FILE -o $TEST_APPS_OUTPUT_DIR
|
||||
- python $CI_SCAN_TESTS_PY component_ut $COMPONENT_UT_DIRS --exclude $COMPONENT_UT_EXCLUDES -c $TEST_CONFIG_FILE -o $COMPONENT_UT_OUTPUT_DIR
|
||||
|
||||
check_readme_links:
|
||||
extends: .pre_check_job_template
|
||||
tags: [ "amd64", "deploy", "internet" ]
|
||||
allow_failure: true
|
||||
variables:
|
||||
PYTHON_VER: 3
|
||||
script:
|
||||
- python ${IDF_PATH}/tools/ci/check_readme_links.py
|
||||
|
||||
.clang_tidy_check_template:
|
||||
extends: .pre_check_base_template
|
||||
image: ${CI_DOCKER_REGISTRY}/clang-static-analysis
|
||||
artifacts:
|
||||
reports:
|
||||
junit: $IDF_PATH/output.xml
|
||||
when: always
|
||||
paths:
|
||||
- $IDF_PATH/examples/get-started/hello_world/tidybuild/report/*
|
||||
expire_in: 1 day
|
||||
script:
|
||||
- retry_failed git clone $IDF_ANALYSIS_UTILS static_analysis_utils && cd static_analysis_utils
|
||||
# Setup parameters of triggered/regular job
|
||||
- export TRIGGERED_RELATIVE=${BOT_LABEL_STATIC_ANALYSIS-} && export TRIGGERED_ABSOLUTE=${BOT_LABEL_STATIC_ANALYSIS_ALL-} && export TARGET_BRANCH=${BOT_CUSTOMIZED_REVISION-}
|
||||
- ./analyze.sh $IDF_PATH/examples/get-started/hello_world/ $IDF_PATH/tools/ci/static-analysis-rules.yml $IDF_PATH/output.xml
|
||||
|
||||
clang_tidy_check:
|
||||
extends: .clang_tidy_check_template
|
||||
variables:
|
||||
BOT_NEEDS_TRIGGER_BY_NAME: 1
|
||||
BOT_LABEL_STATIC_ANALYSIS: 1
|
||||
|
||||
clang_tidy_check_regular:
|
||||
extends: .clang_tidy_check_template
|
||||
|
||||
clang_tidy_check_all:
|
||||
extends: .clang_tidy_check_template
|
||||
variables:
|
||||
BOT_NEEDS_TRIGGER_BY_NAME: 1
|
||||
BOT_LABEL_STATIC_ANALYSIS_ALL: 1
|
||||
- run_cmd python $CI_SCAN_TESTS_PY example_test $EXAMPLE_TEST_DIR -b make --exclude examples/build_system/idf_as_lib -c $CI_TARGET_TEST_CONFIG_FILE -o $EXAMPLE_TEST_OUTPUT_DIR
|
||||
- run_cmd python $CI_SCAN_TESTS_PY example_test $EXAMPLE_TEST_DIR -b cmake --exclude examples/build_system/idf_as_lib -c $CI_TARGET_TEST_CONFIG_FILE -o $EXAMPLE_TEST_OUTPUT_DIR
|
||||
- run_cmd python $CI_SCAN_TESTS_PY test_apps $TEST_APPS_TEST_DIR -c $CI_TARGET_TEST_CONFIG_FILE -o $TEST_APPS_OUTPUT_DIR
|
||||
- run_cmd python $CI_SCAN_TESTS_PY component_ut $COMPONENT_UT_DIRS --exclude $COMPONENT_UT_EXCLUDES -c $CI_TARGET_TEST_CONFIG_FILE -o $COMPONENT_UT_OUTPUT_DIR
|
||||
|
||||
# For release tag pipelines only, make sure the tag was created with 'git tag -a' so it will update
|
||||
# the version returned by 'git describe'
|
||||
check_version_tag:
|
||||
extends:
|
||||
- .pre_check_job_template
|
||||
- .rules:tag:release-no_label
|
||||
- .rules:tag:release
|
||||
script:
|
||||
- (git cat-file -t $CI_COMMIT_REF_NAME | grep tag) || (echo "ESP-IDF versions must be annotated tags." && exit 1)
|
||||
|
||||
check_ut_cmake_make:
|
||||
extends: .pre_check_job_template
|
||||
tags:
|
||||
- build
|
||||
script:
|
||||
- tools/ci/check_ut_cmake_make.sh
|
||||
|
||||
check_artifacts_expire_time:
|
||||
extends: .pre_check_job_template
|
||||
script:
|
||||
# check if we have set expire time for all artifacts
|
||||
- python tools/ci/check_artifacts_expire_time.py
|
||||
|
||||
check_commit_msg:
|
||||
extends: .pre_check_job_template
|
||||
script:
|
||||
- git status
|
||||
- git log -n10 --oneline
|
||||
# commit start with "WIP: " need to be squashed before merge
|
||||
- 'git log --pretty=%s master.. -- | grep "^WIP: " && exit 1 || exit 0'
|
||||
|
||||
check_tools_file_patterns:
|
||||
extends: .pre_check_job_template
|
||||
image: $CI_DOCKER_REGISTRY/ubuntu-test-env$BOT_DOCKER_IMAGE_TAG
|
||||
variables:
|
||||
PYTHON_VER: 3.7.7
|
||||
script:
|
||||
- python tools/ci/check_tools_files_patterns.py
|
||||
allow_failure: true
|
1131
.gitlab/ci/rules.yml
Normal file
1131
.gitlab/ci/rules.yml
Normal file
File diff suppressed because it is too large
Load Diff
131
.gitlab/ci/static-code-analysis.yml
Normal file
131
.gitlab/ci/static-code-analysis.yml
Normal file
@ -0,0 +1,131 @@
|
||||
# pre_check stage
|
||||
clang_tidy_check:
|
||||
extends:
|
||||
- .pre_check_base_template
|
||||
- .rules:patterns:clang_tidy
|
||||
image: ${CI_DOCKER_REGISTRY}/clang-static-analysis
|
||||
artifacts:
|
||||
reports:
|
||||
junit: $IDF_PATH/output.xml
|
||||
when: always
|
||||
paths:
|
||||
- $IDF_PATH/examples/get-started/hello_world/tidybuild/report/*
|
||||
expire_in: 1 day
|
||||
script:
|
||||
- retry_failed git clone $IDF_ANALYSIS_UTILS static_analysis_utils && cd static_analysis_utils
|
||||
# Setup parameters of triggered/regular job
|
||||
- export TARGET_BRANCH=${BOT_CUSTOMIZED_REVISION-}
|
||||
- ./analyze.sh $IDF_PATH/examples/get-started/hello_world/ $IDF_PATH/tools/ci/static-analysis-rules.yml $IDF_PATH/output.xml
|
||||
|
||||
# build stage
|
||||
# Sonarqube related jobs put here for this reason:
|
||||
# Here we have two jobs. code_quality_check and code_quality_report.
|
||||
#
|
||||
# code_quality_check will analyze the code changes between your MR and
|
||||
# code repo stored in sonarqube server. The analysis result is only shown in
|
||||
# the comments under this MR and won't be transferred to the server.
|
||||
#
|
||||
# code_quality_report will analyze and transfer both of the newly added code
|
||||
# and the analysis result to the server.
|
||||
#
|
||||
# Put in the front to ensure that the newly merged code can be stored in
|
||||
# sonarqube server ASAP, in order to avoid reporting unrelated code issues
|
||||
.sonar_scan_template:
|
||||
stage: build
|
||||
image:
|
||||
name: $CI_DOCKER_REGISTRY/sonarqube-scanner:2
|
||||
before_script:
|
||||
- source tools/ci/utils.sh
|
||||
- export PYTHONPATH="$CI_PROJECT_DIR/tools:$CI_PROJECT_DIR/tools/ci/python_packages:$PYTHONPATH"
|
||||
- fetch_submodules
|
||||
# Exclude the submodules, all paths ends with /**
|
||||
- export SUBMODULES=$(get_all_submodules)
|
||||
# get all exclude paths specified in tools/ci/sonar_exclude_list.txt | ignore lines start with # | xargs | replace all <space> to <comma>
|
||||
- export CUSTOM_EXCLUDES=$(cat $CI_PROJECT_DIR/tools/ci/sonar_exclude_list.txt | grep -v '^#' | xargs | sed -e 's/ /,/g')
|
||||
# Exclude the report dir
|
||||
- export EXCLUSIONS="$SUBMODULES,$REPORT_DIR/**,docs/_static/**,**/*.png,**/*.jpg"
|
||||
- python $NORMALIZE_CLANGTIDY_PY $CI_PROJECT_DIR/$REPORT_DIR/warnings.txt $CI_PROJECT_DIR/$REPORT_DIR/clang_tidy_report.txt $CI_PROJECT_DIR
|
||||
variables:
|
||||
GIT_DEPTH: 0
|
||||
NORMALIZE_CLANGTIDY_PY: $CI_PROJECT_DIR/tools/ci/normalize_clangtidy_path.py
|
||||
REPORT_DIR: examples/get-started/hello_world/tidybuild/report
|
||||
tags:
|
||||
- host_test
|
||||
dependencies: # Here is not a hard dependency relationship, could be skipped when only python files changed. so we do not use "needs" here.
|
||||
- clang_tidy_check
|
||||
|
||||
code_quality_check:
|
||||
extends:
|
||||
- .sonar_scan_template
|
||||
- .rules:patterns:static-code-analysis-preview
|
||||
allow_failure: true
|
||||
script:
|
||||
- export CI_MERGE_REQUEST_COMMITS=$(python ${CI_PROJECT_DIR}/tools/ci/ci_get_mr_info.py commits ${CI_COMMIT_REF_NAME} | tr '\n' ',')
|
||||
# test if this branch have merge request, if not, exit 0
|
||||
- test -n "$CI_MERGE_REQUEST_IID" || exit 0
|
||||
- test -n "$CI_MERGE_REQUEST_COMMITS" || exit 0
|
||||
- sonar-scanner
|
||||
-Dsonar.analysis.mode=preview
|
||||
-Dsonar.branch.name=$CI_COMMIT_REF_NAME
|
||||
-Dsonar.cxx.clangtidy.reportPath=$REPORT_DIR/clang_tidy_report.txt
|
||||
-Dsonar.cxx.includeDirectories=components,/usr/include
|
||||
-Dsonar.exclusions=$EXCLUSIONS
|
||||
-Dsonar.gitlab.ci_merge_request_iid=$CI_MERGE_REQUEST_IID
|
||||
-Dsonar.gitlab.commit_sha=$CI_MERGE_REQUEST_COMMITS
|
||||
-Dsonar.gitlab.failure_notification_mode=exit-code
|
||||
-Dsonar.gitlab.merge_request_discussion=true
|
||||
-Dsonar.gitlab.project_id=$CI_PROJECT_ID
|
||||
-Dsonar.gitlab.ref_name=$CI_COMMIT_REF_NAME
|
||||
-Dsonar.host.url=$SONAR_HOST_URL
|
||||
-Dsonar.login=$SONAR_LOGIN
|
||||
-Dsonar.projectBaseDir=$CI_PROJECT_DIR
|
||||
-Dsonar.projectKey=esp-idf
|
||||
-Dsonar.python.pylint_config=.pylintrc
|
||||
-Dsonar.sourceEncoding=UTF-8
|
||||
-Dsonar.sources=$CI_PROJECT_DIR
|
||||
|
||||
code_quality_report:
|
||||
extends:
|
||||
- .sonar_scan_template
|
||||
- .rules:protected
|
||||
script:
|
||||
- sonar-scanner
|
||||
-Dsonar.branch.name=$CI_COMMIT_REF_NAME
|
||||
-Dsonar.cxx.clangtidy.reportPath=$REPORT_DIR/clang_tidy_report.txt
|
||||
-Dsonar.cxx.includeDirectories=components,/usr/include
|
||||
-Dsonar.exclusions=$EXCLUSIONS
|
||||
-Dsonar.gitlab.commit_sha=$CI_COMMIT_SHA
|
||||
-Dsonar.gitlab.failure_notification_mode=exit-code
|
||||
-Dsonar.gitlab.project_id=$CI_PROJECT_ID
|
||||
-Dsonar.gitlab.ref_name=$CI_COMMIT_REF_NAME
|
||||
-Dsonar.host.url=$SONAR_HOST_URL
|
||||
-Dsonar.login=$SONAR_LOGIN
|
||||
-Dsonar.projectBaseDir=$CI_PROJECT_DIR
|
||||
-Dsonar.projectKey=esp-idf
|
||||
-Dsonar.python.pylint_config=.pylintrc
|
||||
-Dsonar.sourceEncoding=UTF-8
|
||||
-Dsonar.sources=$CI_PROJECT_DIR
|
||||
|
||||
# deploy stage
|
||||
clang_tidy_deploy:
|
||||
extends:
|
||||
- .deploy_job_template
|
||||
- .rules:patterns:clang_tidy
|
||||
needs:
|
||||
- clang_tidy_check
|
||||
tags:
|
||||
- deploy
|
||||
- shiny
|
||||
script:
|
||||
- add_doc_server_ssh_keys $DOCS_DEPLOY_KEY $DOCS_SERVER $DOCS_SERVER_USER
|
||||
- export GIT_VER=$(git describe --always)
|
||||
- cd $IDF_PATH/examples/get-started/hello_world/tidybuild
|
||||
- mv report $GIT_VER
|
||||
- tar czvf $GIT_VER.tar.gz $GIT_VER
|
||||
- export STATIC_REPORT_PATH="web/static_analysis/esp-idf/"
|
||||
- ssh $DOCS_SERVER -x "mkdir -p $STATIC_REPORT_PATH/clang-tidy"
|
||||
- scp $GIT_VER.tar.gz $DOCS_SERVER:$STATIC_REPORT_PATH/clang-tidy
|
||||
- ssh $DOCS_SERVER -x "cd $STATIC_REPORT_PATH/clang-tidy && tar xzvf $GIT_VER.tar.gz && rm -f latest && ln -s $GIT_VER latest"
|
||||
# add link to view the report
|
||||
- echo "[static analysis][clang tidy] $CI_DOCKER_REGISTRY/static_analysis/esp-idf/clang-tidy/${GIT_VER}/index.html"
|
||||
- test ! -e ${GIT_VER}/FAILED_RULES || { echo 'Failed static analysis rules!'; cat ${GIT_VER}/FAILED_RULES; exit 1; }
|
File diff suppressed because it is too large
Load Diff
@ -13,12 +13,12 @@ repos:
|
||||
# 3 - any directory named 'testdata'
|
||||
# 4 - IDF monitor test data
|
||||
exclude: &whitespace_excludes |
|
||||
(?x)^(
|
||||
.+\.(md|rst|map|bin)|
|
||||
.+test.*\/.*expected.*|
|
||||
.+\/testdata\/.+|
|
||||
.+test_idf_monitor\/tests\/.+
|
||||
)$
|
||||
(?x)^(
|
||||
.+\.(md|rst|map|bin)|
|
||||
.+test.*\/.*expected.*|
|
||||
.+\/testdata\/.+|
|
||||
.+test_idf_monitor\/tests\/.+
|
||||
)$
|
||||
- id: end-of-file-fixer
|
||||
exclude: *whitespace_excludes
|
||||
- id: check-executables-have-shebangs
|
||||
@ -75,3 +75,19 @@ repos:
|
||||
language: python
|
||||
files: '\.gitlab/CODEOWNERS'
|
||||
pass_filenames: false
|
||||
- id: check-rules-yml
|
||||
name: Check rules.yml all rules have at lease one job applied, all rules needed exist
|
||||
entry: tools/ci/check_rules_yml.py
|
||||
language: python
|
||||
files: '\.gitlab/ci/.+\.yml|\.gitlab-ci.yml'
|
||||
pass_filenames: false
|
||||
additional_dependencies:
|
||||
- PyYAML == 5.3.1
|
||||
- id: check-generated-rules
|
||||
name: Check rules are generated (based on .gitlab/ci/dependencies/dependencies.yml)
|
||||
entry: .gitlab/ci/dependencies/generate_rules.py
|
||||
language: python
|
||||
files: '\.gitlab/ci/dependencies/.+'
|
||||
pass_filenames: false
|
||||
additional_dependencies:
|
||||
- PyYAML == 5.3.1
|
||||
|
@ -147,6 +147,9 @@ disable=print-statement,
|
||||
too-few-public-methods,
|
||||
too-many-locals,
|
||||
bad-super-call, # since we still haven't drop python2 support
|
||||
too-many-nested-blocks,
|
||||
too-many-branches,
|
||||
too-many-statements,
|
||||
|
||||
# Enable the message, report, category or checker with the given id(s). You can
|
||||
# either give multiple identifier separated by comma (,) or put this option
|
||||
|
@ -1,63 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
# internal use only
|
||||
# called by CI jobs to determine if it need to be executed
|
||||
|
||||
import json
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
|
||||
RE_FILTER_PATTERN = re.compile(r'^r"(.+)?"$')
|
||||
|
||||
RE_TYPE = type(re.compile('', 0))
|
||||
|
||||
|
||||
def parse_filter(filter_name):
|
||||
filter_raw = os.getenv(filter_name)
|
||||
filters = []
|
||||
if filter_raw:
|
||||
filter_data = json.loads(filter_raw)
|
||||
for _filter in filter_data:
|
||||
match = RE_FILTER_PATTERN.search(_filter)
|
||||
if match:
|
||||
filters.append(re.compile(match.group(1)))
|
||||
else:
|
||||
filters.append(_filter)
|
||||
return filters
|
||||
|
||||
|
||||
def process_filter(execute_by_default, filter_name, ci_name):
|
||||
execute = execute_by_default
|
||||
|
||||
# bot message is case insensitive (processed with lower case). so we also convert ci_name to lower case.
|
||||
ci_name = ci_name.lower()
|
||||
|
||||
filter_list = parse_filter(filter_name)
|
||||
|
||||
for _filter in filter_list:
|
||||
if isinstance(_filter, RE_TYPE):
|
||||
match = _filter.search(ci_name) is not None
|
||||
else:
|
||||
match = _filter == ci_name
|
||||
|
||||
if match:
|
||||
execute = True
|
||||
break
|
||||
else:
|
||||
execute = False
|
||||
return execute
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
execute_by_default = True
|
||||
if os.getenv('BOT_NEEDS_TRIGGER_BY_NAME', '0') == '1':
|
||||
execute_by_default = False
|
||||
|
||||
need_to_execute = process_filter(True, 'BOT_STAGE_FILTER', os.getenv('CI_JOB_STAGE')) and process_filter(execute_by_default,
|
||||
'BOT_JOB_FILTER', os.getenv('CI_JOB_NAME'))
|
||||
if need_to_execute:
|
||||
sys.exit(0)
|
||||
else:
|
||||
print("Skip this job as it doesn't fit @bot's filter")
|
||||
sys.exit(-1)
|
113
tools/ci/check_rules_yml.py
Executable file
113
tools/ci/check_rules_yml.py
Executable file
@ -0,0 +1,113 @@
|
||||
#!/usr/bin/env python
|
||||
#
|
||||
# Copyright 2021 Espressif Systems (Shanghai) CO LTD
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
"""
|
||||
Check if all rules in rules.yml used or not in CI yaml files.
|
||||
"""
|
||||
|
||||
import argparse
|
||||
import os
|
||||
import sys
|
||||
from copy import deepcopy
|
||||
|
||||
import yaml
|
||||
from idf_ci_utils import IDF_PATH
|
||||
|
||||
ROOT_YML_FP = os.path.join(IDF_PATH, '.gitlab-ci.yml')
|
||||
|
||||
|
||||
def load_yaml(file_path):
|
||||
return yaml.load(open(file_path), Loader=yaml.FullLoader)
|
||||
|
||||
|
||||
class YMLConfig:
|
||||
def __init__(self, root_yml_file_path):
|
||||
self._config = None
|
||||
self._all_extends = None
|
||||
|
||||
self.root_yml = load_yaml(root_yml_file_path)
|
||||
assert self.root_yml
|
||||
|
||||
@staticmethod
|
||||
def _list(str_or_list):
|
||||
if isinstance(str_or_list, str):
|
||||
return [str_or_list]
|
||||
if isinstance(str_or_list, list):
|
||||
return str_or_list
|
||||
raise ValueError('Wrong type: {}. Only supports str or list.'.format(type(str_or_list)))
|
||||
|
||||
@property
|
||||
def config(self):
|
||||
if self._config:
|
||||
return self._config
|
||||
|
||||
all_config = dict()
|
||||
for item in self.root_yml['include']:
|
||||
if not item.endswith('rules.yml'):
|
||||
all_config.update(load_yaml(os.path.join(IDF_PATH, item)))
|
||||
self._config = all_config
|
||||
return self._config
|
||||
|
||||
@property
|
||||
def all_extends(self):
|
||||
if self._all_extends:
|
||||
return self._all_extends
|
||||
|
||||
res = set([])
|
||||
for v in self.config.values():
|
||||
if 'extends' in v:
|
||||
for item in self._list(v['extends']):
|
||||
if item.startswith('.rules:'):
|
||||
res.add(item)
|
||||
self._all_extends = res
|
||||
return self._all_extends
|
||||
|
||||
def exists(self, key):
|
||||
if key in self.all_extends:
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def validate(rules_yml):
|
||||
yml_config = YMLConfig(ROOT_YML_FP)
|
||||
res = 0
|
||||
needed_rules = deepcopy(yml_config.all_extends)
|
||||
with open(rules_yml) as fr:
|
||||
for index, line in enumerate(fr):
|
||||
if line.startswith('.rules:'):
|
||||
key = line.strip().rsplit(':', 1)[0]
|
||||
if not yml_config.exists(key):
|
||||
print('{}:{}:WARNING:rule "{}" unused'.format(rules_yml, index, key))
|
||||
else:
|
||||
needed_rules.remove(key)
|
||||
|
||||
if needed_rules:
|
||||
for item in needed_rules:
|
||||
print('ERROR: missing rule: "{}"'.format(item))
|
||||
res = 1
|
||||
|
||||
if res == 0:
|
||||
print('Pass')
|
||||
return res
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
parser = argparse.ArgumentParser(description=__doc__)
|
||||
parser.add_argument('rules_yml', nargs='?', default=os.path.join(IDF_PATH, '.gitlab', 'ci', 'rules.yml'),
|
||||
help='rules.yml file path')
|
||||
args = parser.parse_args()
|
||||
|
||||
sys.exit(validate(args.rules_yml))
|
94
tools/ci/check_tools_files_patterns.py
Executable file
94
tools/ci/check_tools_files_patterns.py
Executable file
@ -0,0 +1,94 @@
|
||||
#!/usr/bin/env python
|
||||
#
|
||||
# Copyright 2021 Espressif Systems (Shanghai) CO LTD
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import argparse
|
||||
import fnmatch
|
||||
import glob
|
||||
import os
|
||||
import sys
|
||||
|
||||
import yaml
|
||||
from idf_ci_utils import IDF_PATH, get_git_files, magic_check, magic_check_bytes, translate
|
||||
|
||||
# Monkey patch starts
|
||||
# glob.glob will ignore all files starts with ``.``
|
||||
# don't ignore them here
|
||||
# need to keep the same argument as glob._ishidden
|
||||
|
||||
|
||||
def _ishidden(path): # pylint: disable=W0613
|
||||
return False
|
||||
|
||||
|
||||
fnmatch.translate = translate
|
||||
|
||||
glob.magic_check = magic_check
|
||||
glob.magic_check_bytes = magic_check_bytes
|
||||
glob._ishidden = _ishidden # pylint: disable=W0212
|
||||
# ends here
|
||||
|
||||
|
||||
def check(pattern_yml, exclude_list):
|
||||
rules_dict = yaml.load(open(pattern_yml), Loader=yaml.FullLoader)
|
||||
rules_patterns_set = set()
|
||||
for k, v in rules_dict.items():
|
||||
if k.startswith('.pattern') and isinstance(v, list):
|
||||
rules_patterns_set.update(v)
|
||||
rules_files_set = set()
|
||||
for pat in rules_patterns_set:
|
||||
rules_files_set.update(glob.glob(os.path.join(IDF_PATH, pat), recursive=True))
|
||||
|
||||
exclude_patterns_set = set()
|
||||
exclude_patterns_set.update([path.split('#')[0].strip() for path in open(exclude_list).readlines() if path])
|
||||
exclude_files_set = set()
|
||||
for pat in exclude_patterns_set:
|
||||
exclude_files_set.update(glob.glob(os.path.join(IDF_PATH, pat), recursive=True))
|
||||
|
||||
missing_files = set()
|
||||
git_files = get_git_files(os.path.join(IDF_PATH, 'tools'), full_path=True)
|
||||
for f in git_files:
|
||||
if f in rules_files_set or f in exclude_files_set:
|
||||
continue
|
||||
missing_files.add(os.path.relpath(f, IDF_PATH))
|
||||
|
||||
return missing_files, rules_patterns_set.intersection(exclude_patterns_set)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
parser = argparse.ArgumentParser(description='check if all tools files are in rules patterns or exclude list')
|
||||
parser.add_argument('-c', '--pattern-yml',
|
||||
default=os.path.join(IDF_PATH, '.gitlab', 'ci', 'rules.yml'),
|
||||
help='yml file path included file patterns')
|
||||
parser.add_argument('-e', '--exclude-list',
|
||||
default=os.path.join(IDF_PATH, 'tools', 'ci', 'exclude_check_tools_files.txt'),
|
||||
help='exclude list path')
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
res = 0
|
||||
not_included_files, dup_patterns = check(args.pattern_yml, args.exclude_list)
|
||||
if not_included_files:
|
||||
print('Missing Files: (please add to tools/ci/exclude_check_tools_files.txt')
|
||||
for file in not_included_files:
|
||||
print(file)
|
||||
res = 1
|
||||
if dup_patterns:
|
||||
print('Duplicated Patterns: (please check .gitlab/ci/rules.yml and tools/ci/exclude_check_tools_files.txt')
|
||||
for pattern in dup_patterns:
|
||||
print(pattern)
|
||||
res = 1
|
||||
|
||||
sys.exit(res)
|
@ -1,135 +0,0 @@
|
||||
# Rules for `rules.yml`
|
||||
|
||||
- [Rules for `rules.yml`](#rules-for-rulesyml)
|
||||
- [How to Develop With `rules.yml`?](#how-to-develop-with-rulesyml)
|
||||
- [How to Add a New `Job`?](#how-to-add-a-new-job)
|
||||
- [How to Add a New `Rules` Template?](#how-to-add-a-new-rules-template)
|
||||
- [How to Add a New `if` Anchor?](#how-to-add-a-new-if-anchor)
|
||||
- [Naming Rules](#naming-rules)
|
||||
- [Common Naming Rules](#common-naming-rules)
|
||||
- [`if` Anchors Naming Rules](#if-anchors-naming-rules)
|
||||
- [Common Phrases/Abbreviations](#common-phrasesabbreviations)
|
||||
- [`rules` Template Naming Rules](#rules-template-naming-rules)
|
||||
- [Reusable Shell Script `tools/ci/utils.sh`](#reusable-shell-script-toolsciutilssh)
|
||||
- [Functions](#functions)
|
||||
- [CI Job Related](#ci-job-related)
|
||||
- [Shell Script Related](#shell-script-related)
|
||||
|
||||
## How to Develop With `rules.yml`?
|
||||
|
||||
### How to Add a New `Job`?
|
||||
|
||||
check if there's a suitable `.rules:<rules-you-need>` template
|
||||
|
||||
1. if there is, put this in the job `extends`. All done, now you can close this window. (`extends` could be array or string)
|
||||
2. if there isn't
|
||||
1. check [How to Add a New `Rules` Template?](#how-to-add-a-new-rules-template), create a suitable one
|
||||
2. follow step 1
|
||||
|
||||
### How to Add a New `Rules` Template?
|
||||
|
||||
check if there's a suitable `.if-<if-anchor-you-need>` anchor
|
||||
|
||||
1. if there is, create a rule following [`rules` Template Naming Rules](#rules-template-naming-rules).For detail information, please refer to [GitLab Documentation `rules-if`](https://docs.gitlab.com/ee/ci/yaml/README.html#rulesif). Here's an example.
|
||||
|
||||
```yaml
|
||||
.rules:dev:
|
||||
rules:
|
||||
- <<: *if-trigger
|
||||
- <<: *if-dev-push
|
||||
```
|
||||
|
||||
2. if there isn't
|
||||
|
||||
1. check [How to Add a New `if` Anchor?](#how-to-add-a-new-if-anchor), create a suitable one
|
||||
2. follow step 1
|
||||
|
||||
### How to Add a New `if` Anchor?
|
||||
|
||||
Create an `if` anchor following [`if` Anchors Naming Rules](#if-anchors-naming-rules). For detail information about how to write the condition clause, please refer to [GitLab Documentation `only/except (advanced)](https://docs.gitlab.com/ee/ci/yaml/README.html#onlyexcept-advanced). Here's an example.
|
||||
|
||||
```yaml
|
||||
.if-schedule: &if-schedule:
|
||||
if: '$CI_PIPELINE_SOURCE == "schedule"'
|
||||
```
|
||||
|
||||
## Naming Rules
|
||||
|
||||
### Common Naming Rules
|
||||
|
||||
if a phrase has multi words, use `_` to concat them.
|
||||
|
||||
> e.g. `regular_test`
|
||||
|
||||
if a name have multi phrase, use `-` to concat them.
|
||||
|
||||
> e.g. `regular_test-example_test`
|
||||
|
||||
### `if` Anchors Naming Rules
|
||||
|
||||
- if it's a label: `.if-label-<label_name>`
|
||||
- if it's a ref: `.if-ref-<ref_name>`
|
||||
- if it's a branch: `.if-branch-<branch_name>`
|
||||
- if it's a tag: `.if-tag-<tag_name>`
|
||||
- if it's a operating system: `.if-os-mac`
|
||||
- if it's multi-type combination: `.if-ref-<release_name>-branch-<branch_name>`
|
||||
|
||||
#### Common Phrases/Abbreviations
|
||||
|
||||
- `no_label`
|
||||
|
||||
`$BOT_TRIGGER_WITH_LABEL == null`
|
||||
|
||||
- `protected`
|
||||
|
||||
`($CI_COMMIT_REF_NAME == "master" || $CI_COMMIT_BRANCH =~ /^release\/v/ || $CI_COMMIT_TAG =~ /^v\d+\.\d+(\.\d+)?($|-)/)`
|
||||
|
||||
- `target_test`
|
||||
|
||||
`example_test` or `custom_test` or `unit_test-all_targets`
|
||||
|
||||
### `rules` Template Naming Rules
|
||||
|
||||
- if it's os related: `.rules:os:<os_name>`
|
||||
- if it's tag related: `.rules:tag:<tag_1>-<tag_2>`
|
||||
- if it's label related: `.rules:labels:<label_1>-<label_2>`
|
||||
|
||||
By default, all `.rules:labels` should include both `if-label-regular_test` and `if-protected-no-label` implicitly, unless they have special postfixes:
|
||||
|
||||
- slim: `regular_test` not included
|
||||
- only: only have the phrases listed
|
||||
|
||||
- if it's target test related: `.rules:tests:<test_type_1>-<test_type_2>`
|
||||
|
||||
By default, all `.rules:tests` should include `if-protected-no_label` implicitly, unless they have special postfixes (same as above)
|
||||
|
||||
- if it needs to build at first, then do some target test: `.rules:build_tests:<test_type_1>-<test_type_2>`
|
||||
|
||||
By default, all `.rules:build_tests` should include `if-protected-no-label`, `if-label-build`, `if-label-regular-test` implictly, unless they have special postfixes (same as above)
|
||||
|
||||
- if a job supports all targets, use `-all_targets` as postfix
|
||||
|
||||
## Reusable Shell Script `tools/ci/utils.sh`
|
||||
|
||||
It is used to put all the reusable shell script as small functions. If you want to set `before_script: []` for you job, now you can set `extends: .before_script_slim` instead. it will only run `source tools/ci/utils.sh`
|
||||
|
||||
If you're developing CI shell scripts, you can use these functions without source. They're already included in all `before_script`
|
||||
|
||||
To run these commands in shell script locally, place `source tools/ci/utils.sh` at the very beginning.
|
||||
|
||||
### Functions
|
||||
|
||||
#### CI Job Related
|
||||
- `apply_bot_filter`
|
||||
- `add_gitlab_ssh_keys`
|
||||
- `add_github_ssh_keys`
|
||||
- `add_doc_server_ssh_keys`
|
||||
- `fetch_submodules`
|
||||
- `get_all_submodules`
|
||||
|
||||
#### Shell Script Related
|
||||
- `error`: log in red color
|
||||
- `warning`: log in orange color
|
||||
- `info`: log in green color
|
||||
- `run_cmd`: run the command with duration seconds info
|
||||
- `retry_failed`: run the command with duration seconds info, retry when failed
|
@ -1,143 +0,0 @@
|
||||
.deploy_job_template:
|
||||
stage: deploy
|
||||
image: $CI_DOCKER_REGISTRY/esp32-ci-env$BOT_DOCKER_IMAGE_TAG
|
||||
tags:
|
||||
- deploy
|
||||
|
||||
.clang_tidy_deploy_template:
|
||||
extends: .deploy_job_template
|
||||
tags:
|
||||
- deploy
|
||||
- shiny
|
||||
script:
|
||||
- add_doc_server_ssh_keys $DOCS_DEPLOY_KEY $DOCS_SERVER $DOCS_SERVER_USER
|
||||
- export GIT_VER=$(git describe --always)
|
||||
- cd $IDF_PATH/examples/get-started/hello_world/tidybuild
|
||||
- mv report $GIT_VER
|
||||
- tar czvf $GIT_VER.tar.gz $GIT_VER
|
||||
- export STATIC_REPORT_PATH="web/static_analysis/esp-idf/"
|
||||
- ssh $DOCS_SERVER -x "mkdir -p $STATIC_REPORT_PATH/clang-tidy"
|
||||
- scp $GIT_VER.tar.gz $DOCS_SERVER:$STATIC_REPORT_PATH/clang-tidy
|
||||
- ssh $DOCS_SERVER -x "cd $STATIC_REPORT_PATH/clang-tidy && tar xzvf $GIT_VER.tar.gz && rm -f latest && ln -s $GIT_VER latest"
|
||||
# add link to view the report
|
||||
- echo "[static analysis][clang tidy] $CI_DOCKER_REGISTRY/static_analysis/esp-idf/clang-tidy/${GIT_VER}/index.html"
|
||||
- test ! -e ${GIT_VER}/FAILED_RULES || { echo 'Failed static analysis rules!'; cat ${GIT_VER}/FAILED_RULES; exit 1; }
|
||||
|
||||
clang_tidy_deploy:
|
||||
extends: .clang_tidy_deploy_template
|
||||
# Override default stage to happen before the post_check
|
||||
stage: test_deploy
|
||||
needs:
|
||||
- clang_tidy_check
|
||||
- clang_tidy_check_all
|
||||
variables:
|
||||
BOT_NEEDS_TRIGGER_BY_NAME: 1
|
||||
|
||||
clang_tidy_deploy_regular:
|
||||
extends:
|
||||
- .clang_tidy_deploy_template
|
||||
- .rules:labels:static_analysis-only
|
||||
needs:
|
||||
- clang_tidy_check_regular
|
||||
|
||||
push_to_github:
|
||||
extends:
|
||||
- .deploy_job_template
|
||||
- .before_script_lesser
|
||||
- .rules:protected-no_label
|
||||
dependencies: []
|
||||
script:
|
||||
- add_github_ssh_keys
|
||||
- git remote remove github &>/dev/null || true
|
||||
- git remote add github git@github.com:espressif/esp-idf.git
|
||||
- tools/ci/push_to_github.sh
|
||||
|
||||
.deploy_docs_template:
|
||||
extends:
|
||||
- .deploy_job_template
|
||||
- .before_script_lesser
|
||||
image: $ESP_IDF_DOC_ENV_IMAGE
|
||||
tags:
|
||||
- deploy
|
||||
- shiny
|
||||
variables:
|
||||
DOCS_BUILD_DIR: "${IDF_PATH}/docs/_build/"
|
||||
PYTHONUNBUFFERED: 1
|
||||
script:
|
||||
- add_doc_server_ssh_keys $DOCS_DEPLOY_PRIVATEKEY $DOCS_DEPLOY_SERVER $DOCS_DEPLOY_SERVER_USER
|
||||
- export GIT_VER=$(git describe --always)
|
||||
- ${IDF_PATH}/tools/ci/multirun_with_pyenv.sh -p 3.6.10 ${IDF_PATH}/tools/ci/deploy_docs.py
|
||||
|
||||
# deploys docs to CI_DOCKER_REGISTRY webserver, for internal review
|
||||
deploy_docs_preview:
|
||||
extends:
|
||||
- .deploy_docs_template
|
||||
- .rules:labels:build_docs-preview
|
||||
# Override default stage to happen before the post_check
|
||||
stage: test_deploy
|
||||
needs:
|
||||
- build_docs_html
|
||||
- build_docs_pdf
|
||||
variables:
|
||||
TYPE: "preview"
|
||||
# older branches use DOCS_DEPLOY_KEY, DOCS_SERVER, DOCS_SERVER_USER, DOCS_PATH for preview server so we keep these names for 'preview'
|
||||
DOCS_DEPLOY_PRIVATEKEY: "$DOCS_DEPLOY_KEY"
|
||||
DOCS_DEPLOY_SERVER: "$DOCS_SERVER"
|
||||
DOCS_DEPLOY_SERVER_USER: "$DOCS_SERVER_USER"
|
||||
DOCS_DEPLOY_PATH: "$DOCS_PATH"
|
||||
DOCS_DEPLOY_URL_BASE: "https://$CI_DOCKER_REGISTRY/docs/esp-idf"
|
||||
|
||||
# deploy docs to production webserver
|
||||
deploy_docs_production:
|
||||
# The DOCS_PROD_* variables used by this job are "Protected" so these branches must all be marked "Protected" in Gitlab settings
|
||||
extends:
|
||||
- .deploy_docs_template
|
||||
- .rules:protected-no_label
|
||||
stage: post_deploy
|
||||
needs: # ensure runs after push_to_github succeeded
|
||||
- build_docs_html
|
||||
- build_docs_pdf
|
||||
- push_to_github
|
||||
variables:
|
||||
TYPE: "preview"
|
||||
DOCS_DEPLOY_PRIVATEKEY: "$DOCS_PROD_DEPLOY_KEY"
|
||||
DOCS_DEPLOY_SERVER: "$DOCS_PROD_SERVER"
|
||||
DOCS_DEPLOY_SERVER_USER: "$DOCS_PROD_SERVER_USER"
|
||||
DOCS_DEPLOY_PATH: "$DOCS_PROD_PATH"
|
||||
DOCS_DEPLOY_URL_BASE: "https://docs.espressif.com/projects/esp-idf"
|
||||
|
||||
deploy_test_result:
|
||||
extends:
|
||||
- .deploy_job_template
|
||||
- .before_script_slim
|
||||
- .rules:ref:master-schedule-always
|
||||
image: $CI_DOCKER_REGISTRY/bot-env
|
||||
tags:
|
||||
- deploy_test
|
||||
artifacts:
|
||||
when: always
|
||||
paths:
|
||||
- ${CI_PROJECT_DIR}/test-management/*.log
|
||||
# save all test logs as artifacts, make it easier to track errors
|
||||
- ${CI_PROJECT_DIR}/TEST_LOGS
|
||||
- $CI_PROJECT_DIR/$CI_COMMIT_SHA
|
||||
expire_in: 1 mos
|
||||
variables:
|
||||
UNIT_TEST_CASE_FILE: "${CI_PROJECT_DIR}/components/idf_test/unit_test/TestCaseAll.yml"
|
||||
BOT_ACCOUNT_CONFIG_FILE: "${CI_PROJECT_DIR}/test-management/Config/Account.local.yml"
|
||||
TEST_FW_PATH: "$CI_PROJECT_DIR/tools/tiny-test-fw"
|
||||
AUTO_TEST_SCRIPT_PATH: "${CI_PROJECT_DIR}/auto_test_script"
|
||||
script:
|
||||
- add_gitlab_ssh_keys
|
||||
- export GIT_SHA=$(echo ${CI_COMMIT_SHA} | cut -c 1-8)
|
||||
- export REV_COUNT=$(git rev-list --count HEAD)
|
||||
- export SUMMARY="IDF CI test result for $GIT_SHA (r${REV_COUNT})"
|
||||
# artifacts of job update_test_cases creates test-management folder
|
||||
# we need to remove it so we can clone test-management folder again
|
||||
- rm -rf test-management
|
||||
- retry_failed git clone $TEST_MANAGEMENT_REPO
|
||||
- python3 $CHECKOUT_REF_SCRIPT test-management test-management
|
||||
- cd test-management
|
||||
- echo $BOT_JIRA_ACCOUNT > ${BOT_ACCOUNT_CONFIG_FILE}
|
||||
# update test results
|
||||
- python3 ImportTestResult.py -r "$GIT_SHA (r${REV_COUNT})" -j $JIRA_TEST_MANAGEMENT_PROJECT -s "$SUMMARY" -l CI -p ${CI_PROJECT_DIR}/TEST_LOGS ${CI_PROJECT_DIR}/${CI_COMMIT_SHA} --pipeline_url ${CI_PIPELINE_URL}
|
@ -1,73 +0,0 @@
|
||||
# copy from .gitlab-ci.yml as anchor is not global
|
||||
.show_submodule_urls: &show_submodule_urls |
|
||||
git config --get-regexp '^submodule\..*\.url$' || true
|
||||
|
||||
.post_check_base_template:
|
||||
stage: post_check
|
||||
image: $CI_DOCKER_REGISTRY/esp32-ci-env$BOT_DOCKER_IMAGE_TAG
|
||||
tags:
|
||||
- host_test
|
||||
dependencies: []
|
||||
|
||||
.post_check_job_template:
|
||||
extends:
|
||||
- .post_check_base_template
|
||||
- .before_script_lesser_nofilter
|
||||
|
||||
.post_check_job_template_with_filter:
|
||||
extends:
|
||||
- .post_check_base_template
|
||||
- .before_script_lesser
|
||||
|
||||
check_submodule_sync:
|
||||
extends:
|
||||
- .before_script_slim
|
||||
- .post_check_job_template
|
||||
tags:
|
||||
- github_sync
|
||||
retry: 2
|
||||
variables:
|
||||
GIT_STRATEGY: clone
|
||||
SUBMODULES_TO_FETCH: "none"
|
||||
PUBLIC_IDF_URL: "https://github.com/espressif/esp-idf.git"
|
||||
script:
|
||||
- git submodule deinit --force .
|
||||
# setting the default remote URL to the public one, to resolve relative location URLs
|
||||
- git config remote.origin.url ${PUBLIC_IDF_URL}
|
||||
# check if all submodules are correctly synced to public repository
|
||||
- git submodule init
|
||||
- *show_submodule_urls
|
||||
- git submodule update --recursive
|
||||
- echo "IDF was cloned from ${PUBLIC_IDF_URL} completely"
|
||||
|
||||
check_ut_cmake_make:
|
||||
extends:
|
||||
- .post_check_job_template_with_filter
|
||||
- .rules:dev
|
||||
tags:
|
||||
- build
|
||||
script:
|
||||
- tools/ci/check_ut_cmake_make.sh
|
||||
|
||||
check_artifacts_expire_time:
|
||||
extends: .post_check_job_template
|
||||
script:
|
||||
# check if we have set expire time for all artifacts
|
||||
- python tools/ci/check_artifacts_expire_time.py
|
||||
|
||||
check_pipeline_triggered_by_label:
|
||||
extends:
|
||||
- .post_check_job_template
|
||||
- .rules:dev
|
||||
script:
|
||||
# If the pipeline is triggered with label, the pipeline will only succeeded if "regular_test" label is added.
|
||||
# We want to make sure some jobs are always executed to detect regression.
|
||||
- test "$BOT_LABEL_REGULAR_TEST" = "true" || { echo "CI can only pass if 'regular_test' label is included"; exit -1; }
|
||||
|
||||
check_commit_msg:
|
||||
extends: .post_check_job_template
|
||||
script:
|
||||
- git status
|
||||
- git log -n10 --oneline
|
||||
# commit start with "WIP: " need to be squashed before merge
|
||||
- 'git log --pretty=%s master.. -- | grep "^WIP: " && exit 1 || exit 0'
|
@ -1,45 +0,0 @@
|
||||
.check_doc_links_template:
|
||||
extends: .rules:protected-no_label
|
||||
stage: post_deploy
|
||||
image: $ESP_IDF_DOC_ENV_IMAGE
|
||||
tags: [ "build", "amd64", "internet" ]
|
||||
needs: # ensure runs after push_to_github succeeded
|
||||
- build_docs_html
|
||||
- build_docs_pdf
|
||||
- push_to_github
|
||||
artifacts:
|
||||
when: always
|
||||
paths:
|
||||
- docs/_build/*/*/*.txt
|
||||
- docs/_build/*/*/linkcheck/*.txt
|
||||
expire_in: 1 week
|
||||
allow_failure: true
|
||||
dependencies: []
|
||||
script:
|
||||
- cd docs
|
||||
- ${IDF_PATH}/tools/ci/multirun_with_pyenv.sh -p 3.6.10 pip install -r requirements.txt
|
||||
- ${IDF_PATH}/tools/ci/multirun_with_pyenv.sh -p 3.6.10 ./build_docs.py -l $DOCLANG -t $DOCTGT linkcheck
|
||||
|
||||
check_doc_links_en_esp32:
|
||||
extends: .check_doc_links_template
|
||||
variables:
|
||||
DOCLANG: "en"
|
||||
DOCTGT: "esp32"
|
||||
|
||||
check_doc_links_en_esp32s2:
|
||||
extends: .check_doc_links_template
|
||||
variables:
|
||||
DOCLANG: "en"
|
||||
DOCTGT: "esp32s2"
|
||||
|
||||
check_doc_links_zh_CN_esp32:
|
||||
extends: .check_doc_links_template
|
||||
variables:
|
||||
DOCLANG: "zh_CN"
|
||||
DOCTGT: "esp32"
|
||||
|
||||
check_doc_links_zh_CN_esp32s2:
|
||||
extends: .check_doc_links_template
|
||||
variables:
|
||||
DOCLANG: "zh_CN"
|
||||
DOCTGT: "esp32s2"
|
@ -1,273 +0,0 @@
|
||||
# if anchors
|
||||
.if-ref-master: &if-ref-master
|
||||
if: '$CI_COMMIT_REF_NAME == "master"'
|
||||
|
||||
.if-tag-release-no_label: &if-tag-release-no_label
|
||||
if: '$CI_COMMIT_TAG =~ /^v\d+\.\d+(\.\d+)?($|-)/ && $BOT_TRIGGER_WITH_LABEL == null'
|
||||
|
||||
.if-protected: &if-protected
|
||||
if: '($CI_COMMIT_REF_NAME == "master" || $CI_COMMIT_BRANCH =~ /^release\/v/ || $CI_COMMIT_TAG =~ /^v\d+\.\d+(\.\d+)?($|-)/)'
|
||||
|
||||
.if-protected-no_label: &if-protected-no_label
|
||||
if: '($CI_COMMIT_REF_NAME == "master" || $CI_COMMIT_BRANCH =~ /^release\/v/ || $CI_COMMIT_TAG =~ /^v\d+\.\d+(\.\d+)?($|-)/) && $BOT_TRIGGER_WITH_LABEL == null'
|
||||
|
||||
.if-dev-push: &if-dev-push
|
||||
if: '$CI_COMMIT_REF_NAME != "master" && $CI_COMMIT_BRANCH !~ /^release\/v/ && $CI_COMMIT_TAG !~ /^v\d+\.\d+(\.\d+)?($|-)/ && ($CI_PIPELINE_SOURCE == "push" || $CI_PIPELINE_SOURCE == "merge_request_event")'
|
||||
|
||||
.if-schedule: &if-schedule
|
||||
if: '$CI_PIPELINE_SOURCE == "schedule"'
|
||||
|
||||
.if-trigger: &if-trigger
|
||||
if: '$CI_PIPELINE_SOURCE == "trigger"'
|
||||
|
||||
.if-label-regular_test: &if-label-regular_test
|
||||
if: '$BOT_LABEL_REGULAR_TEST'
|
||||
|
||||
.if-label-build: &if-label-build
|
||||
if: '$BOT_LABEL_BUILD'
|
||||
|
||||
.if-label-build_docs: &if-label-build_docs
|
||||
if: '$BOT_LABEL_BUILD_DOCS'
|
||||
|
||||
.if-label-integration_test: &if-label-integration_test
|
||||
if: '$BOT_LABEL_INTEGRATION_TEST'
|
||||
|
||||
.if-label-unit_test: &if-label-unit_test
|
||||
if: '$BOT_LABEL_UNIT_TEST'
|
||||
|
||||
.if-label-unit_test-32: &if-label-unit_test-32
|
||||
if: '$BOT_LABEL_UNIT_TEST_32'
|
||||
|
||||
.if-label-unit_test-s2: &if-label-unit_test-s2
|
||||
if: '$BOT_LABEL_UNIT_TEST_S2'
|
||||
|
||||
.if-label-unit_test-c3: &if-label-unit_test-c3
|
||||
if: '$BOT_LABEL_UNIT_TEST_C3'
|
||||
|
||||
.if-label-unit_test-all_labels: &if-label-unit_test-all_labels
|
||||
if: '$BOT_LABEL_UNIT_TEST || $BOT_LABEL_UNIT_TEST_32 || $BOT_LABEL_UNIT_TEST_S2 || $BOT_LABEL_UNIT_TEST_C3'
|
||||
|
||||
.if-label-weekend_test: &if-label-weekend_test
|
||||
if: '$BOT_LABEL_WEEKEND_TEST'
|
||||
|
||||
.if-label-example_test: &if-label-example_test
|
||||
if: '$BOT_LABEL_EXAMPLE_TEST'
|
||||
|
||||
.if-label-custom_test: &if-label-custom_test
|
||||
if: '$BOT_LABEL_CUSTOM_TEST'
|
||||
|
||||
.if-label-host_test: &if-label-host_test
|
||||
if: '$BOT_LABEL_HOST_TEST'
|
||||
|
||||
.if-label-fuzzer_test: &if-label-fuzzer_test
|
||||
if: '$BOT_LABEL_FUZZER_TEST'
|
||||
|
||||
.if-label-nvs_coverage: &if-label-nvs_coverage
|
||||
if: '$BOT_LABEL_NVS_COVERAGE'
|
||||
|
||||
.if-label-static_analysis: &if-label-static_analysis
|
||||
if: '$BOT_LABEL_STATIC_ANALYSIS || $BOT_LABEL_STATIC_ANALYSIS_ALL'
|
||||
|
||||
.if-label-iperf_stress_test: &if-label-iperf_stress_test
|
||||
if: '$BOT_LABEL_IPERF_STRESS_TEST'
|
||||
|
||||
.if-os-mac: &if-os-mac
|
||||
if: '$BOT_LABEL_MACOS_TEST'
|
||||
|
||||
# Rules templates
|
||||
.rules:protected:
|
||||
rules:
|
||||
- <<: *if-protected
|
||||
|
||||
.rules:protected-no_label:
|
||||
rules:
|
||||
- <<: *if-protected-no_label
|
||||
|
||||
.rules:protected-schedule:
|
||||
rules:
|
||||
- <<: *if-protected
|
||||
- <<: *if-schedule
|
||||
|
||||
.rules:trigger:
|
||||
rules:
|
||||
- <<: *if-trigger
|
||||
|
||||
.rules:dev:
|
||||
rules:
|
||||
- <<: *if-trigger
|
||||
- <<: *if-dev-push
|
||||
|
||||
.rules:os:mac_os:
|
||||
rules:
|
||||
- <<: *if-protected-no_label
|
||||
- <<: *if-os-mac
|
||||
|
||||
.rules:tag:release-no_label:
|
||||
rules:
|
||||
- <<: *if-tag-release-no_label
|
||||
|
||||
.rules:ref:master-schedule:
|
||||
rules:
|
||||
- <<: *if-ref-master
|
||||
- <<: *if-schedule
|
||||
|
||||
.rules:ref:master-schedule-always:
|
||||
rules:
|
||||
- <<: *if-ref-master
|
||||
when: always
|
||||
- <<: *if-schedule
|
||||
when: always
|
||||
|
||||
.rules:labels:static_analysis-only:
|
||||
rules:
|
||||
- <<: *if-label-static_analysis
|
||||
|
||||
.rules:labels:build:
|
||||
rules:
|
||||
- <<: *if-protected-no_label
|
||||
- <<: *if-label-regular_test
|
||||
- <<: *if-label-build
|
||||
|
||||
.rules:labels:build_docs:
|
||||
rules:
|
||||
- <<: *if-protected-no_label
|
||||
- <<: *if-label-build
|
||||
- <<: *if-label-regular_test
|
||||
- <<: *if-label-build_docs
|
||||
|
||||
.rules:labels:build_docs-slim:
|
||||
rules:
|
||||
- <<: *if-protected-no_label
|
||||
- <<: *if-label-build_docs
|
||||
|
||||
.rules:labels:build_docs-preview:
|
||||
rules:
|
||||
- <<: *if-label-build_docs
|
||||
|
||||
.rules:labels:weekend_test-only:
|
||||
rules:
|
||||
- <<: *if-label-weekend_test
|
||||
|
||||
.rules:labels:iperf_stress_test-only:
|
||||
rules:
|
||||
- <<: *if-label-iperf_stress_test
|
||||
|
||||
.rules:labels:fuzzer_test-weekend_test-only:
|
||||
rules:
|
||||
- <<: *if-label-fuzzer_test
|
||||
- <<: *if-label-weekend_test
|
||||
|
||||
.rules:labels:nvs_coverage-only:
|
||||
rules:
|
||||
- <<: *if-label-nvs_coverage
|
||||
|
||||
.rules:labels:host_test:
|
||||
rules:
|
||||
- <<: *if-protected-no_label
|
||||
- <<: *if-label-regular_test
|
||||
- <<: *if-label-host_test
|
||||
|
||||
.rules:tests:example_test-schedule:
|
||||
rules:
|
||||
- <<: *if-protected-no_label
|
||||
- <<: *if-label-example_test
|
||||
- <<: *if-schedule
|
||||
|
||||
.rules:tests:custom_test-schedule:
|
||||
rules:
|
||||
- <<: *if-protected-no_label
|
||||
- <<: *if-label-custom_test
|
||||
- <<: *if-schedule
|
||||
|
||||
.rules:tests:unit_test:
|
||||
rules:
|
||||
- <<: *if-protected-no_label
|
||||
- <<: *if-label-unit_test-all_labels
|
||||
|
||||
.rules:tests:unit_test_32:
|
||||
rules:
|
||||
- <<: *if-protected-no_label
|
||||
- <<: *if-label-unit_test
|
||||
- <<: *if-label-unit_test-32
|
||||
|
||||
.rules:tests:unit_test_s2:
|
||||
rules:
|
||||
- <<: *if-protected-no_label
|
||||
- <<: *if-label-unit_test
|
||||
- <<: *if-label-unit_test-s2
|
||||
|
||||
.rules:tests:unit_test_c3:
|
||||
rules:
|
||||
- <<: *if-label-unit_test
|
||||
- <<: *if-label-unit_test-c3
|
||||
|
||||
.rules:tests:integration_test:
|
||||
rules:
|
||||
- <<: *if-protected-no_label
|
||||
- <<: *if-label-integration_test
|
||||
|
||||
.rules:assign_test:target_test-integration_test-weekend_test:
|
||||
rules:
|
||||
- <<: *if-protected-no_label
|
||||
- <<: *if-label-regular_test
|
||||
- <<: *if-label-example_test
|
||||
- <<: *if-label-custom_test
|
||||
- <<: *if-label-unit_test-all_labels
|
||||
- <<: *if-label-integration_test
|
||||
- <<: *if-label-weekend_test
|
||||
|
||||
.rules:build_tests:integration_test:
|
||||
rules:
|
||||
- <<: *if-protected-no_label
|
||||
- <<: *if-label-build
|
||||
- <<: *if-label-regular_test
|
||||
- <<: *if-label-integration_test
|
||||
|
||||
.rules:build_tests:weekend_test:
|
||||
rules:
|
||||
- <<: *if-protected-no_label
|
||||
- <<: *if-label-build
|
||||
- <<: *if-label-regular_test
|
||||
- <<: *if-label-weekend_test
|
||||
|
||||
.rules:build_tests:unit_test:
|
||||
rules:
|
||||
- <<: *if-protected-no_label
|
||||
- <<: *if-label-build
|
||||
- <<: *if-label-regular_test
|
||||
- <<: *if-label-unit_test-all_labels
|
||||
|
||||
.rules:build_tests:example_test-weekend_test:
|
||||
rules:
|
||||
- <<: *if-protected-no_label
|
||||
- <<: *if-label-build
|
||||
- <<: *if-label-regular_test
|
||||
- <<: *if-label-example_test
|
||||
- <<: *if-label-weekend_test
|
||||
|
||||
.rules:build_tests:custom_test-weekend_test:
|
||||
rules:
|
||||
- <<: *if-protected-no_label
|
||||
- <<: *if-label-build
|
||||
- <<: *if-label-regular_test
|
||||
- <<: *if-label-custom_test
|
||||
- <<: *if-label-weekend_test
|
||||
|
||||
.rules:build_tests:target_test:
|
||||
rules:
|
||||
- <<: *if-protected-no_label
|
||||
- <<: *if-label-build
|
||||
- <<: *if-label-regular_test
|
||||
- <<: *if-label-example_test
|
||||
- <<: *if-label-custom_test
|
||||
- <<: *if-label-unit_test-all_labels
|
||||
|
||||
.rules:build_tests:target_test-weekend_test:
|
||||
rules:
|
||||
- <<: *if-protected-no_label
|
||||
- <<: *if-label-build
|
||||
- <<: *if-label-regular_test
|
||||
- <<: *if-label-example_test
|
||||
- <<: *if-label-custom_test
|
||||
- <<: *if-label-unit_test-all_labels
|
||||
- <<: *if-label-weekend_test
|
32
tools/ci/exclude_check_tools_files.txt
Normal file
32
tools/ci/exclude_check_tools_files.txt
Normal file
@ -0,0 +1,32 @@
|
||||
tools/ble/**/*
|
||||
tools/catch/**/*
|
||||
tools/ci/build_template_app.sh
|
||||
tools/ci/check_*.{py,txt,sh} # excluded because run in default pipeline pre-check stage
|
||||
tools/ci/checkout_project_ref.py
|
||||
tools/ci/ci_fetch_submodule.py
|
||||
tools/ci/ci_get_mr_info.py
|
||||
tools/ci/configure_ci_environment.sh
|
||||
tools/ci/deploy_docs.py
|
||||
tools/ci/envsubst.py
|
||||
tools/ci/*exclude*.txt
|
||||
tools/ci/executable-list.txt
|
||||
tools/ci/fix_empty_prototypes.sh
|
||||
tools/ci/get-full-sources.sh
|
||||
tools/ci/idf_ci_utils.py
|
||||
tools/ci/mirror-submodule-update.sh
|
||||
tools/ci/multirun_with_pyenv.sh
|
||||
tools/ci/normalize_clangtidy_path.py
|
||||
tools/ci/push_to_github.sh
|
||||
tools/ci/python_packages/wifi_tools.py
|
||||
tools/ci/setup_python.sh
|
||||
tools/ci/utils.sh
|
||||
tools/eclipse-code-style.xml
|
||||
tools/format-minimal.sh
|
||||
tools/format.sh
|
||||
tools/gen_esp_err_to_name.py
|
||||
tools/kconfig/**/*
|
||||
tools/set-submodules-to-github.sh
|
||||
tools/templates/sample_component/CMakeLists.txt
|
||||
tools/templates/sample_component/include/main.h
|
||||
tools/templates/sample_component/main.c
|
||||
tools/toolchain_versions.mk
|
@ -1,3 +1,4 @@
|
||||
.gitlab/ci/dependencies/generate_rules.py
|
||||
components/app_update/otatool.py
|
||||
components/efuse/efuse_table_gen.py
|
||||
components/efuse/test_efuse_host/efuse_tests.py
|
||||
@ -33,7 +34,6 @@ install.fish
|
||||
install.sh
|
||||
tools/build_apps.py
|
||||
tools/check_python_dependencies.py
|
||||
tools/ci/apply_bot_filter.py
|
||||
tools/ci/build_template_app.sh
|
||||
tools/ci/check_build_warnings.py
|
||||
tools/ci/check_callgraph.py
|
||||
@ -46,6 +46,8 @@ tools/ci/check_idf_version.sh
|
||||
tools/ci/check_kconfigs.py
|
||||
tools/ci/check_readme_links.py
|
||||
tools/ci/check_rom_apis.sh
|
||||
tools/ci/check_rules_yml.py
|
||||
tools/ci/check_tools_files_patterns.py
|
||||
tools/ci/check_ut_cmake_make.sh
|
||||
tools/ci/checkout_project_ref.py
|
||||
tools/ci/deploy_docs.py
|
||||
|
@ -15,13 +15,14 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
|
||||
import functools
|
||||
import logging
|
||||
import os
|
||||
import re
|
||||
import subprocess
|
||||
import sys
|
||||
|
||||
IDF_PATH = os.getenv('IDF_PATH', os.path.join(os.path.dirname(__file__), '..', '..'))
|
||||
IDF_PATH = os.path.abspath(os.getenv('IDF_PATH', os.path.join(os.path.dirname(__file__), '..', '..')))
|
||||
|
||||
|
||||
def get_submodule_dirs(full_path=False): # type: (bool) -> list
|
||||
@ -41,7 +42,7 @@ def get_submodule_dirs(full_path=False): # type: (bool) -> list
|
||||
dirs.append(os.path.join(IDF_PATH, path))
|
||||
else:
|
||||
dirs.append(path)
|
||||
except Exception as e:
|
||||
except Exception as e: # pylint: disable=W0703
|
||||
logging.warning(str(e))
|
||||
|
||||
return dirs
|
||||
@ -67,5 +68,132 @@ def is_executable(full_path): # type: (str) -> bool
|
||||
"""
|
||||
if sys.platform == 'win32':
|
||||
return _check_git_filemode(full_path)
|
||||
else:
|
||||
return os.access(full_path, os.X_OK)
|
||||
return os.access(full_path, os.X_OK)
|
||||
|
||||
|
||||
def get_git_files(path=IDF_PATH, full_path=False): # type: (str, bool) -> list[str]
|
||||
"""
|
||||
Get the result of git ls-files
|
||||
:param path: path to run git ls-files
|
||||
:param full_path: return full path if set to True
|
||||
:return: list of file paths
|
||||
"""
|
||||
try:
|
||||
files = subprocess.check_output(['git', 'ls-files'], cwd=path).decode('utf8').strip().split('\n')
|
||||
except Exception as e: # pylint: disable=W0703
|
||||
logging.warning(str(e))
|
||||
files = []
|
||||
return [os.path.join(path, f) for f in files] if full_path else files
|
||||
|
||||
|
||||
# this function is a commit from
|
||||
# https://github.com/python/cpython/pull/6299/commits/bfd63120c18bd055defb338c075550f975e3bec1
|
||||
# In order to solve python https://bugs.python.org/issue9584
|
||||
# glob pattern does not support brace expansion issue
|
||||
def _translate(pat):
|
||||
"""Translate a shell PATTERN to a regular expression.
|
||||
There is no way to quote meta-characters.
|
||||
"""
|
||||
i, n = 0, len(pat)
|
||||
res = ''
|
||||
while i < n:
|
||||
c = pat[i]
|
||||
i = i + 1
|
||||
if c == '*':
|
||||
res = res + '.*'
|
||||
elif c == '?':
|
||||
res = res + '.'
|
||||
elif c == '[':
|
||||
j = i
|
||||
if j < n and pat[j] == '!':
|
||||
j = j + 1
|
||||
if j < n and pat[j] == ']':
|
||||
j = j + 1
|
||||
while j < n and pat[j] != ']':
|
||||
j = j + 1
|
||||
if j >= n:
|
||||
res = res + '\\['
|
||||
else:
|
||||
stuff = pat[i:j]
|
||||
if '--' not in stuff:
|
||||
stuff = stuff.replace('\\', r'\\')
|
||||
else:
|
||||
chunks = []
|
||||
k = i + 2 if pat[i] == '!' else i + 1
|
||||
while True:
|
||||
k = pat.find('-', k, j)
|
||||
if k < 0:
|
||||
break
|
||||
chunks.append(pat[i:k])
|
||||
i = k + 1
|
||||
k = k + 3
|
||||
chunks.append(pat[i:j])
|
||||
# Escape backslashes and hyphens for set difference (--).
|
||||
# Hyphens that create ranges shouldn't be escaped.
|
||||
stuff = '-'.join(s.replace('\\', r'\\').replace('-', r'\-')
|
||||
for s in chunks)
|
||||
# Escape set operations (&&, ~~ and ||).
|
||||
stuff = re.sub(r'([&~|])', r'\\\1', stuff)
|
||||
i = j + 1
|
||||
if stuff[0] == '!':
|
||||
stuff = '^' + stuff[1:]
|
||||
elif stuff[0] in ('^', '['):
|
||||
stuff = '\\' + stuff
|
||||
res = '%s[%s]' % (res, stuff)
|
||||
elif c == '{':
|
||||
# Handling of brace expression: '{PATTERN,PATTERN,...}'
|
||||
j = 1
|
||||
while j < n and pat[j] != '}':
|
||||
j = j + 1
|
||||
if j >= n:
|
||||
res = res + '\\{'
|
||||
else:
|
||||
stuff = pat[i:j]
|
||||
i = j + 1
|
||||
|
||||
# Find indices of ',' in pattern excluding r'\,'.
|
||||
# E.g. for r'a\,a,b\b,c' it will be [4, 8]
|
||||
indices = [m.end() for m in re.finditer(r'[^\\],', stuff)]
|
||||
|
||||
# Splitting pattern string based on ',' character.
|
||||
# Also '\,' is translated to ','. E.g. for r'a\,a,b\b,c':
|
||||
# * first_part = 'a,a'
|
||||
# * last_part = 'c'
|
||||
# * middle_part = ['b,b']
|
||||
first_part = stuff[:indices[0] - 1].replace(r'\,', ',')
|
||||
last_part = stuff[indices[-1]:].replace(r'\,', ',')
|
||||
middle_parts = [
|
||||
stuff[st:en - 1].replace(r'\,', ',')
|
||||
for st, en in zip(indices, indices[1:])
|
||||
]
|
||||
|
||||
# creating the regex from splitted pattern. Each part is
|
||||
# recursivelly evaluated.
|
||||
expanded = functools.reduce(
|
||||
lambda a, b: '|'.join((a, b)),
|
||||
(_translate(elem) for elem in [first_part] + middle_parts + [last_part])
|
||||
)
|
||||
res = '%s(%s)' % (res, expanded)
|
||||
else:
|
||||
res = res + re.escape(c)
|
||||
return res
|
||||
|
||||
|
||||
def translate(pat):
|
||||
res = _translate(pat)
|
||||
return r'(?s:%s)\Z' % res
|
||||
|
||||
|
||||
magic_check = re.compile('([*?[{])')
|
||||
magic_check_bytes = re.compile(b'([*?[{])')
|
||||
# cpython github PR 6299 ends here
|
||||
|
||||
# Here's the code block we're going to use to monkey patch ``glob`` module and ``fnmatch`` modules
|
||||
# DO NOT monkey patch here, only patch where you really needs
|
||||
#
|
||||
# import glob
|
||||
# import fnmatch
|
||||
# from idf_ci_utils import magic_check, magic_check_bytes, translate
|
||||
# glob.magic_check = magic_check
|
||||
# glob.magic_check_bytes = magic_check_bytes
|
||||
# fnmatch.translate = translate
|
||||
|
@ -1,12 +1,17 @@
|
||||
#!/usr/bin/env python
|
||||
import argparse
|
||||
import re
|
||||
from os.path import dirname, join, normpath, relpath
|
||||
from os.path import dirname, exists, join, normpath, relpath
|
||||
|
||||
CLANG_TIDY_REGEX = re.compile(r'(.+|[a-zA-Z]:\\\\.+):([0-9]+):([0-9]+): ([^:]+): (.+)')
|
||||
|
||||
|
||||
def normalize_clang_tidy_path(file_path, output_path, base_dir):
|
||||
if not exists(file_path):
|
||||
print('Skipping normalizing. This could only happen when skipping clang-tidy check '
|
||||
'because of no c file modified. Please double check')
|
||||
return
|
||||
|
||||
with open(output_path, 'w') as fw:
|
||||
for line in open(file_path):
|
||||
result = CLANG_TIDY_REGEX.match(line)
|
||||
|
@ -1,10 +1,5 @@
|
||||
# Modified from https://gitlab.com/gitlab-org/gitlab/-/blob/master/scripts/utils.sh
|
||||
|
||||
# before each job, we need to check if this job is filtered by bot stage/job filter
|
||||
function apply_bot_filter() {
|
||||
python "${IDF_PATH}"/tools/ci/apply_bot_filter.py || exit 0
|
||||
}
|
||||
|
||||
function add_ssh_keys() {
|
||||
local key_string="${1}"
|
||||
mkdir -p ~/.ssh
|
||||
|
@ -1,3 +1,3 @@
|
||||
TEST_COMPONENTS=fatfs
|
||||
CONFIG_FATFS_USE_FASTSEEK=y
|
||||
CONFIG_FATFS_FAST_SEEK_BUFFER_SIZE=64
|
||||
CONFIG_FATFS_FAST_SEEK_BUFFER_SIZE=64
|
||||
|
Loading…
x
Reference in New Issue
Block a user