Now scan_tests will generate 'scan_<target>.json`

also updated CI yaml and shell scripts
This commit is contained in:
Fu Hanxi 2020-04-20 14:30:31 +08:00
parent 59f5e9af37
commit b26d42afe3
15 changed files with 244 additions and 31 deletions

View File

@ -7,7 +7,7 @@
import argparse
import sys
import logging
from find_build_apps import BuildItem, BuildError, setup_logging, BUILD_SYSTEMS
from find_build_apps import BuildItem, BuildError, setup_logging, BUILD_SYSTEMS, safe_exit_if_file_is_empty
def main():
@ -71,11 +71,10 @@ def main():
help="Name of the file to read the list of builds from. If not specified, read from stdin.",
)
args = parser.parse_args()
setup_logging(args)
safe_exit_if_file_is_empty(args.build_list.name)
build_items = [BuildItem.from_json(line) for line in args.build_list]
if not build_items:
logging.error("Empty build list!")
raise SystemExit(1)

View File

@ -31,6 +31,7 @@ die() {
[ -z ${BUILD_PATH} ] && die "BUILD_PATH is not set"
[ -z ${IDF_TARGET} ] && die "IDF_TARGET is not set"
[ -z ${EXAMPLE_TEST_BUILD_SYSTEM} ] && die "EXAMPLE_TEST_BUILD_SYSTEM is not set"
[ -z ${SCAN_EXAMPLE_TEST_JSON} ] && die "SCAN_EXAMPLE_TEST_JSON is not set"
[ -d ${LOG_PATH} ] || mkdir -p ${LOG_PATH}
[ -d ${BUILD_PATH} ] || mkdir -p ${BUILD_PATH}
@ -85,6 +86,7 @@ ${IDF_PATH}/tools/find_apps.py examples \
--config 'sdkconfig.ci=default' \
--config 'sdkconfig.ci.*=' \
--config '=default' \
--scan-tests-json ${SCAN_EXAMPLE_TEST_JSON}
# --config rules above explained:
# 1. If sdkconfig.ci exists, use it build the example with configuration name "default"

View File

@ -29,6 +29,7 @@ die() {
[ -z ${LOG_PATH} ] && die "LOG_PATH is not set"
[ -z ${BUILD_PATH} ] && die "BUILD_PATH is not set"
[ -z ${IDF_TARGET} ] && die "IDF_TARGET is not set"
[ -z ${SCAN_CUSTOM_TEST_JSON} ] && die "SCAN_CUSTOM_TEST_JSON is not set"
[ -d ${LOG_PATH} ] || mkdir -p ${LOG_PATH}
[ -d ${BUILD_PATH} ] || mkdir -p ${BUILD_PATH}
@ -74,6 +75,7 @@ ${IDF_PATH}/tools/find_apps.py tools/test_apps \
--config 'sdkconfig.ci=default' \
--config 'sdkconfig.ci.*=' \
--config '=default' \
--scan-tests-json ${SCAN_CUSTOM_TEST_JSON}
# --config rules above explained:
# 1. If sdkconfig.ci exists, use it build the example with configuration name "default"

View File

@ -13,10 +13,10 @@ import logging
import re
try:
from find_build_apps import BuildItem, setup_logging
from find_build_apps import BuildItem, setup_logging, safe_exit_if_file_is_empty
except ImportError:
sys.path.insert(0, os.path.join(os.path.dirname(__file__), ".."))
from find_build_apps import BuildItem, setup_logging
from find_build_apps import BuildItem, setup_logging, safe_exit_if_file_is_empty
WARNING_REGEX = re.compile(r"(?:error|warning)[^\w]", re.MULTILINE | re.IGNORECASE)
@ -69,6 +69,7 @@ def main():
args = parser.parse_args()
setup_logging(args)
safe_exit_if_file_is_empty(args.build_list.name)
build_items = [BuildItem.from_json(line) for line in args.build_list]
if not build_items:

View File

@ -126,6 +126,8 @@ build_examples_make:
# same as above, but for CMake
.build_examples_cmake: &build_examples_cmake
extends: .build_examples_template
dependencies:
- scan_tests
artifacts:
paths:
- build_examples/list.json
@ -142,6 +144,8 @@ build_examples_make:
LOG_PATH: "${CI_PROJECT_DIR}/log_examples"
BUILD_PATH: "${CI_PROJECT_DIR}/build_examples"
EXAMPLE_TEST_BUILD_SYSTEM: "cmake"
SCAN_EXAMPLE_TEST_JSON: ${CI_PROJECT_DIR}/examples/test_configs/scan_${IDF_TARGET}.json
build_examples_cmake_esp32:
extends: .build_examples_cmake
@ -156,6 +160,8 @@ build_examples_cmake_esp32s2:
.build_test_apps: &build_test_apps
extends: .build_template
stage: build
dependencies:
- scan_tests
artifacts:
when: always
paths:
@ -171,8 +177,9 @@ build_examples_cmake_esp32s2:
- $LOG_PATH
expire_in: 3 days
variables:
LOG_PATH: "$CI_PROJECT_DIR/log_test_apps"
BUILD_PATH: "$CI_PROJECT_DIR/build_test_apps"
LOG_PATH: "${CI_PROJECT_DIR}/log_test_apps"
BUILD_PATH: "${CI_PROJECT_DIR}/build_test_apps"
SCAN_CUSTOM_TEST_JSON: ${CI_PROJECT_DIR}/tools/test_apps/test_configs/scan_${IDF_TARGET}.json
only:
variables:
- $BOT_TRIGGER_WITH_LABEL == null

View File

@ -190,3 +190,30 @@ check_public_headers:
script:
- python tools/ci/check_public_headers.py --jobs 4 --prefix xtensa-esp32-elf-
.scan_build_tests:
stage: pre_check
image: $CI_DOCKER_REGISTRY/ubuntu-test-env$BOT_DOCKER_IMAGE_TAG
tags:
- assign_test
variables:
CI_SCAN_TESTS_PY: ${CI_PROJECT_DIR}/tools/ci/python_packages/ttfw_idf/CIScanTests.py
TEST_CONFIG_FILE: ${CI_PROJECT_DIR}/tools/ci/config/target-test.yml
scan_tests:
extends: .scan_build_tests
only:
variables:
- $BOT_LABEL_EXAMPLE_TEST
- $BOT_LABEL_CUSTOM_TEST
artifacts:
paths:
- $EXAMPLE_TEST_OUTPUT_DIR
- $TEST_APPS_OUTPUT_DIR
variables:
EXAMPLE_TEST_DIR: ${CI_PROJECT_DIR}/examples
EXAMPLE_TEST_OUTPUT_DIR: ${CI_PROJECT_DIR}/examples/test_configs
TEST_APPS_TEST_DIR: ${CI_PROJECT_DIR}/tools/test_apps
TEST_APPS_OUTPUT_DIR: ${CI_PROJECT_DIR}/tools/test_apps/test_configs
script:
- python $CI_SCAN_TESTS_PY example_test $EXAMPLE_TEST_DIR -c $TEST_CONFIG_FILE -o $EXAMPLE_TEST_OUTPUT_DIR
- python $CI_SCAN_TESTS_PY test_apps $TEST_APPS_TEST_DIR -c $TEST_CONFIG_FILE -o $TEST_APPS_OUTPUT_DIR

View File

@ -124,7 +124,7 @@ class Gitlab(object):
return raw_data_list
def find_job_id(self, job_name, pipeline_id=None, job_status="success"):
def find_job_id(self, job_name, pipeline_id=None, job_status="success", suffix=None):
"""
Get Job ID from job name of specific pipeline
@ -132,6 +132,7 @@ class Gitlab(object):
:param pipeline_id: If None, will get pipeline id from CI pre-defined variable.
:param job_status: status of job. One pipeline could have multiple jobs with same name after retry.
job_status is used to filter these jobs.
:param suffix: suffix of the job. e.g. 'limit' for build only needed apps.
:return: a list of job IDs (parallel job will generate multiple jobs)
"""
job_id_list = []
@ -144,6 +145,9 @@ class Gitlab(object):
if match:
if match.group(1) == job_name and job.status == job_status:
job_id_list.append({"id": job.id, "parallel_num": match.group(3)})
elif suffix:
if match.group(1) == "{}_{}".format(job_name, suffix) and job.status == job_status:
job_id_list.append({"id": job.id, "parallel_num": match.group(3)})
return job_id_list
@retry_download

View File

@ -189,16 +189,15 @@ class AssignTest(object):
job_list.sort(key=lambda x: x["name"])
return job_list
def _search_cases(self, test_case_path, case_filter=None, test_case_file_pattern=None):
def search_cases(self, case_filter=None):
"""
:param test_case_path: path contains test case folder
:param case_filter: filter for test cases. the filter to use is default filter updated with case_filter param.
:return: filtered test case list
"""
_case_filter = self.DEFAULT_FILTER.copy()
if case_filter:
_case_filter.update(case_filter)
test_methods = SearchCases.Search.search_test_cases(test_case_path, test_case_file_pattern)
test_methods = SearchCases.Search.search_test_cases(self.test_case_path, self.test_case_file_pattern)
return CaseConfig.filter_test_cases(test_methods, _case_filter)
def _group_cases(self):
@ -287,7 +286,7 @@ class AssignTest(object):
failed_to_assign = []
assigned_groups = []
case_filter = self._apply_bot_filter()
self.test_cases = self._search_cases(self.test_case_path, case_filter, self.test_case_file_pattern)
self.test_cases = self.search_cases(case_filter)
self._apply_bot_test_count()
test_groups = self._group_cases()

View File

@ -50,6 +50,7 @@ class Search(object):
for i, test_function in enumerate(test_functions_out):
print("\t{}. ".format(i + 1) + test_function.case_info["name"])
test_function.case_info['app_dir'] = os.path.dirname(file_name)
return test_functions_out
@classmethod

View File

@ -65,7 +65,7 @@ def create_artifact_index_file(project_id=None, pipeline_id=None, case_group=Exa
return "{}/list_job_{}.json".format(case_group.BUILD_LOCAL_DIR, parallel or 1)
for build_job_name in case_group.BUILD_JOB_NAMES:
job_info_list = gitlab_inst.find_job_id(build_job_name, pipeline_id=pipeline_id)
job_info_list = gitlab_inst.find_job_id(build_job_name, pipeline_id=pipeline_id, suffix='limit')
for job_info in job_info_list:
raw_data = gitlab_inst.download_artifact(job_info["id"], [format_build_log_path()])[0]
build_info_list = [json.loads(line) for line in raw_data.splitlines()]

View File

@ -139,7 +139,7 @@ class UnitTestAssignTest(CIAssignTest.AssignTest):
def __init__(self, test_case_path, ci_config_file):
CIAssignTest.AssignTest.__init__(self, test_case_path, ci_config_file, case_group=Group)
def _search_cases(self, test_case_path, case_filter=None, test_case_file_pattern=None):
def search_cases(self, case_filter=None):
"""
For unit test case, we don't search for test functions.
The unit test cases is stored in a yaml file which is created in job build-idf-test.

View File

@ -0,0 +1,134 @@
import argparse
import json
import os
import re
from collections import defaultdict
from find_apps import find_apps
from find_build_apps import CMakeBuildSystem
from ttfw_idf.CIAssignExampleTest import CIExampleAssignTest, TestAppsGroup, ExampleGroup
VALID_TARGETS = [
'esp32',
'esp32s2',
]
SPECIAL_REFS = [
'master',
re.compile(r'^release/v'),
re.compile(r'^v\d+\.\d+'),
]
def _judge_build_all():
ref = os.getenv('CI_COMMIT_REF_NAME')
pipeline_src = os.getenv('CI_PIPELINE_SOURCE')
if not ref or not pipeline_src:
return False
# scheduled pipeline will build all
if pipeline_src == 'schedule':
return True
# master, release/v..., v1.2.3..., and will build all
for special_ref in SPECIAL_REFS:
if isinstance(special_ref, re._pattern_type):
if special_ref.match(ref):
return True
else:
if ref == special_ref:
return True
return False
def main():
parser = argparse.ArgumentParser(description='Scan the required build tests')
actions = parser.add_subparsers(dest='action')
common = argparse.ArgumentParser(add_help=False)
common.add_argument('paths', type=str, nargs='+',
help="One or more app paths")
common.add_argument('-c', '--ci_config_file', type=str, required=True,
help="gitlab ci config file")
common.add_argument('-o', '--output_path', type=str, required=True,
help="output path of the scan result")
common.add_argument('-p', '--preserve-all', action="store_true",
help='add this flag to preserve artifacts for all apps')
actions.add_parser('example_test', parents=[common])
actions.add_parser('test_apps', parents=[common])
# actions.add_parser('unit_test', parents=[common])
args = parser.parse_args()
test_cases = []
for path in args.paths:
if args.action == 'example_test':
assign = CIExampleAssignTest(path, args.ci_config_file, ExampleGroup)
elif args.action == 'test_apps':
CIExampleAssignTest.CI_TEST_JOB_PATTERN = re.compile(r'^test_app_test_.+')
assign = CIExampleAssignTest(path, args.ci_config_file, TestAppsGroup)
# elif args.action == 'unit_test':
# assign = UnitTestAssignTest(args.test_case, args.ci_config_file)
else:
raise SystemExit(1) # which is impossible
test_cases.extend(assign.search_cases())
try:
os.makedirs(args.output_path)
except Exception:
pass
'''
{
<target>: {
'test_case_apps': [<app_dir>],
'standalone_apps': [<app_dir>],
},
...
}
'''
scan_info_dict = defaultdict(dict)
# store the test cases dir, exclude these folders when scan for standalone apps
exclude_apps = []
# TODO change this chip to target after feat/add_multi_target_for_example_test is merged
for target in VALID_TARGETS:
target_dict = scan_info_dict[target]
test_case_apps = target_dict['test_case_apps'] = set()
for case in test_cases:
app_dir = case.case_info['app_dir']
app_target = case.case_info['chip']
if app_target.lower() != target.lower():
continue
test_case_apps.update(find_apps(CMakeBuildSystem, app_dir, True, [], target.lower()))
exclude_apps.append(app_dir)
for target in VALID_TARGETS:
target_dict = scan_info_dict[target]
standalone_apps = target_dict['standalone_apps'] = set()
for path in args.paths:
standalone_apps.update(find_apps(CMakeBuildSystem, path, True, exclude_apps, target.lower()))
build_all = _judge_build_all()
for target in VALID_TARGETS:
apps = []
for app_dir in scan_info_dict[target]['test_case_apps']:
apps.append({
'app_dir': app_dir,
'build': True,
'preserve': True,
})
for app_dir in scan_info_dict[target]['standalone_apps']:
apps.append({
'app_dir': app_dir,
'build': build_all,
'preserve': args.preserve_all and build_all, # you can't preserve the artifacts if you don't build them right?
})
with open(os.path.join(args.output_path, 'scan_{}.json'.format(target.lower())), 'w') as fw:
fw.writelines([json.dumps(app) + '\n' for app in apps])
if __name__ == '__main__':
main()

View File

@ -11,6 +11,8 @@ import re
import glob
import logging
import typing
from typing import Optional
from find_build_apps import (
BUILD_SYSTEMS,
BUILD_SYSTEM_CMAKE,
@ -22,8 +24,8 @@ from find_build_apps import (
DEFAULT_TARGET,
)
# Helper functions
# Helper functions
def dict_from_sdkconfig(path):
"""
@ -124,8 +126,8 @@ def find_builds_for_app(
return build_items
def find_apps(build_system_class, path, recursive, exclude_list,
target): # type: (typing.Type[BuildSystem], str, bool, typing.List[str], str) -> typing.List[str]
def find_apps(build_system_class, path, recursive, exclude_list, target,
build_apps_list_file=None): # type: (typing.Type[BuildSystem], str, bool, typing.List[str], str, Optional[str]) -> typing.List[str]
"""
Find app directories in path (possibly recursively), which contain apps for the given build system, compatible
with the given target.
@ -134,6 +136,8 @@ def find_apps(build_system_class, path, recursive, exclude_list,
:param recursive: whether to recursively descend into nested directories if no app is found
:param exclude_list: list of paths to be excluded from the recursive search
:param target: desired value of IDF_TARGET; apps incompatible with the given target are skipped.
:param build_apps_list_file: List of apps need to be built, apps not on this list will be skipped.
None or empty file to build all apps.
:return: list of paths of the apps found
"""
build_system_name = build_system_class.NAME
@ -147,6 +151,18 @@ def find_apps(build_system_class, path, recursive, exclude_list,
return []
return [path]
# if this argument is empty, treat it as build_all.
if not build_apps_list_file:
build_all = True
else:
# if this argument is not empty but the file doesn't exists, treat as no supported apps.
if not os.path.exists(build_apps_list_file):
return []
else:
build_all = False
with open(build_apps_list_file) as fr:
apps_need_be_built = set([line.strip() for line in fr.readlines() if line])
# The remaining part is for recursive == True
apps_found = [] # type: typing.List[str]
for root, dirs, _ in os.walk(path, topdown=True):
@ -156,6 +172,11 @@ def find_apps(build_system_class, path, recursive, exclude_list,
del dirs[:]
continue
if not build_all:
if os.path.abspath(root) not in apps_need_be_built:
logging.debug("Skipping, app not listed in {}".format(build_apps_list_file))
continue
if build_system_class.is_app(root):
logging.debug("Found {} app in {}".format(build_system_name, root))
# Don't recurse into app subdirectories
@ -232,6 +253,13 @@ def main():
type=argparse.FileType("w"),
help="Output the list of builds to the specified file",
)
parser.add_argument(
'-s',
'--scan-tests-json',
default=None,
help="Scan tests result. Restrict the build/architect behavior to apps need to be built.\n"
"If it's None or the file does not exist, will build all apps and upload all artifacts."
)
parser.add_argument("paths", nargs="+", help="One or more app paths.")
args = parser.parse_args()
setup_logging(args)
@ -251,17 +279,17 @@ def main():
# Prepare the list of app paths
app_paths = [] # type: typing.List[str]
for path in args.paths:
app_paths += find_apps(build_system_class, path, args.recursive, args.exclude or [], args.target)
app_paths += find_apps(build_system_class, path, args.recursive, args.exclude or [], args.target, args.build_apps_list_file)
build_items = [] # type: typing.List[BuildItem]
if not app_paths:
logging.critical("No {} apps found".format(build_system_class.NAME))
raise SystemExit(1)
logging.info("Found {} apps".format(len(app_paths)))
logging.warning("No {} apps found, skipping...".format(build_system_class.NAME))
sys.exit(0)
logging.info("Found {} apps".format(len(app_paths)))
app_paths = sorted(app_paths)
# Find compatible configurations of each app, collect them as BuildItems
build_items = [] # type: typing.List[BuildItem]
config_rules = config_rules_from_str(args.config or [])
for app_path in app_paths:
build_items += find_builds_for_app(
@ -278,6 +306,7 @@ def main():
# Write out the BuildItems. Only JSON supported now (will add YAML later).
if args.format != "json":
raise NotImplementedError()
out = args.output or sys.stdout
out.writelines([item.to_json() + "\n" for item in build_items])

View File

@ -6,6 +6,7 @@ from .common import (
config_rules_from_str,
setup_logging,
DEFAULT_TARGET,
safe_exit_if_file_is_empty,
)
from .cmake import CMakeBuildSystem, BUILD_SYSTEM_CMAKE
from .make import MakeBuildSystem, BUILD_SYSTEM_MAKE
@ -28,4 +29,5 @@ __all__ = [
"MakeBuildSystem",
"BUILD_SYSTEM_MAKE",
"BUILD_SYSTEMS",
"safe_exit_if_file_is_empty",
]

View File

@ -383,3 +383,9 @@ def setup_logging(args):
stream=args.log_file or sys.stderr,
level=log_level,
)
def safe_exit_if_file_is_empty(file_name):
if os.stat(file_name).st_size == 0:
logging.warning('Skipping all...')
sys.exit(0)