2018-01-31 05:59:10 -05:00
|
|
|
"""
|
2020-07-21 04:59:31 -04:00
|
|
|
Command line tool to assign tests to CI test jobs.
|
2018-01-31 05:59:10 -05:00
|
|
|
"""
|
2020-07-21 04:59:31 -04:00
|
|
|
import argparse
|
|
|
|
import errno
|
|
|
|
import json
|
2020-04-28 22:49:10 -04:00
|
|
|
import os
|
2018-01-31 05:59:10 -05:00
|
|
|
import re
|
2021-11-02 23:11:38 -04:00
|
|
|
from copy import deepcopy
|
2018-01-31 05:59:10 -05:00
|
|
|
|
|
|
|
import yaml
|
|
|
|
|
2019-06-28 10:36:20 -04:00
|
|
|
try:
|
|
|
|
from yaml import CLoader as Loader
|
|
|
|
except ImportError:
|
2021-06-07 06:17:13 -04:00
|
|
|
from yaml import Loader as Loader # type: ignore
|
2019-06-28 10:36:20 -04:00
|
|
|
|
2020-07-21 04:59:31 -04:00
|
|
|
import gitlab_api
|
2019-11-26 22:21:33 -05:00
|
|
|
from tiny_test_fw.Utility import CIAssignTest
|
2018-01-31 05:59:10 -05:00
|
|
|
|
2020-09-30 16:41:53 -04:00
|
|
|
try:
|
2021-01-25 21:49:01 -05:00
|
|
|
from idf_py_actions.constants import PREVIEW_TARGETS, SUPPORTED_TARGETS
|
2020-09-30 16:41:53 -04:00
|
|
|
except ImportError:
|
|
|
|
SUPPORTED_TARGETS = []
|
|
|
|
PREVIEW_TARGETS = []
|
2020-08-21 02:42:25 -04:00
|
|
|
|
2021-11-02 23:11:38 -04:00
|
|
|
IDF_PATH_FROM_ENV = os.getenv('IDF_PATH', '')
|
2020-07-21 04:59:31 -04:00
|
|
|
|
|
|
|
|
|
|
|
class IDFCaseGroup(CIAssignTest.Group):
|
|
|
|
LOCAL_BUILD_DIR = None
|
|
|
|
BUILD_JOB_NAMES = None
|
|
|
|
|
|
|
|
@classmethod
|
|
|
|
def get_artifact_index_file(cls):
|
|
|
|
assert cls.LOCAL_BUILD_DIR
|
|
|
|
if IDF_PATH_FROM_ENV:
|
2020-08-25 04:42:36 -04:00
|
|
|
artifact_index_file = os.path.join(IDF_PATH_FROM_ENV, cls.LOCAL_BUILD_DIR, 'artifact_index.json')
|
2020-07-21 04:59:31 -04:00
|
|
|
else:
|
2020-08-25 04:42:36 -04:00
|
|
|
artifact_index_file = 'artifact_index.json'
|
2020-07-21 04:59:31 -04:00
|
|
|
return artifact_index_file
|
|
|
|
|
|
|
|
|
|
|
|
class IDFAssignTest(CIAssignTest.AssignTest):
|
2021-11-02 23:11:38 -04:00
|
|
|
DEFAULT_FILTER = {
|
|
|
|
'category': 'function',
|
|
|
|
'ignore': False,
|
|
|
|
'supported_in_ci': True,
|
|
|
|
'nightly_run': False,
|
|
|
|
}
|
|
|
|
|
2020-08-21 02:42:25 -04:00
|
|
|
def __init__(self, test_case_path, ci_config_file, case_group=IDFCaseGroup):
|
|
|
|
super(IDFAssignTest, self).__init__(test_case_path, ci_config_file, case_group)
|
|
|
|
|
2020-07-21 04:59:31 -04:00
|
|
|
def format_build_log_path(self, parallel_num):
|
2020-08-25 04:42:36 -04:00
|
|
|
return '{}/list_job_{}.json'.format(self.case_group.LOCAL_BUILD_DIR, parallel_num)
|
2020-07-21 04:59:31 -04:00
|
|
|
|
|
|
|
def create_artifact_index_file(self, project_id=None, pipeline_id=None):
|
|
|
|
if project_id is None:
|
2020-08-25 04:42:36 -04:00
|
|
|
project_id = os.getenv('CI_PROJECT_ID')
|
2020-07-21 04:59:31 -04:00
|
|
|
if pipeline_id is None:
|
2020-08-25 04:42:36 -04:00
|
|
|
pipeline_id = os.getenv('CI_PIPELINE_ID')
|
2020-07-21 04:59:31 -04:00
|
|
|
gitlab_inst = gitlab_api.Gitlab(project_id)
|
|
|
|
|
|
|
|
artifact_index_list = []
|
|
|
|
for build_job_name in self.case_group.BUILD_JOB_NAMES:
|
|
|
|
job_info_list = gitlab_inst.find_job_id(build_job_name, pipeline_id=pipeline_id)
|
|
|
|
for job_info in job_info_list:
|
2020-08-25 04:42:36 -04:00
|
|
|
parallel_num = job_info['parallel_num'] or 1 # Could be None if "parallel_num" not defined for the job
|
|
|
|
raw_data = gitlab_inst.download_artifact(job_info['id'],
|
2020-07-21 04:59:31 -04:00
|
|
|
[self.format_build_log_path(parallel_num)])[0]
|
|
|
|
build_info_list = [json.loads(line) for line in raw_data.decode().splitlines()]
|
|
|
|
for build_info in build_info_list:
|
2020-08-25 04:42:36 -04:00
|
|
|
build_info['ci_job_id'] = job_info['id']
|
2020-07-21 04:59:31 -04:00
|
|
|
artifact_index_list.append(build_info)
|
|
|
|
artifact_index_file = self.case_group.get_artifact_index_file()
|
|
|
|
try:
|
|
|
|
os.makedirs(os.path.dirname(artifact_index_file))
|
|
|
|
except OSError as e:
|
|
|
|
if e.errno != errno.EEXIST:
|
|
|
|
raise e
|
|
|
|
|
2020-08-25 04:42:36 -04:00
|
|
|
with open(artifact_index_file, 'w') as f:
|
2020-07-21 04:59:31 -04:00
|
|
|
json.dump(artifact_index_list, f)
|
|
|
|
|
2021-11-02 23:11:38 -04:00
|
|
|
def search_cases(self, case_filter=None):
|
|
|
|
_filter = deepcopy(case_filter) if case_filter else {}
|
2022-02-09 01:55:34 -05:00
|
|
|
if 'NIGHTLY_RUN' in os.environ or 'BOT_LABEL_NIGHTLY_RUN' in os.environ:
|
2021-11-02 23:11:38 -04:00
|
|
|
_filter.update({'nightly_run': True})
|
|
|
|
return super().search_cases(_filter)
|
|
|
|
|
2020-07-21 04:59:31 -04:00
|
|
|
|
|
|
|
class ExampleGroup(IDFCaseGroup):
|
2020-08-25 04:42:36 -04:00
|
|
|
SORT_KEYS = CI_JOB_MATCH_KEYS = ['env_tag', 'target']
|
2018-01-31 05:59:10 -05:00
|
|
|
|
2021-06-07 06:17:13 -04:00
|
|
|
LOCAL_BUILD_DIR = 'build_examples' # type: ignore
|
2020-12-28 21:00:45 -05:00
|
|
|
EXAMPLE_TARGETS = SUPPORTED_TARGETS + PREVIEW_TARGETS
|
2021-06-07 06:17:13 -04:00
|
|
|
BUILD_JOB_NAMES = ['build_examples_cmake_{}'.format(target) for target in EXAMPLE_TARGETS] # type: ignore
|
2020-07-21 04:59:31 -04:00
|
|
|
|
|
|
|
|
|
|
|
class TestAppsGroup(ExampleGroup):
|
2020-08-25 04:42:36 -04:00
|
|
|
LOCAL_BUILD_DIR = 'build_test_apps'
|
2020-12-28 21:00:45 -05:00
|
|
|
TEST_APP_TARGETS = SUPPORTED_TARGETS + PREVIEW_TARGETS
|
2021-06-07 06:17:13 -04:00
|
|
|
BUILD_JOB_NAMES = ['build_test_apps_{}'.format(target) for target in TEST_APP_TARGETS] # type: ignore
|
2020-08-25 04:42:36 -04:00
|
|
|
|
|
|
|
|
|
|
|
class ComponentUTGroup(TestAppsGroup):
|
|
|
|
LOCAL_BUILD_DIR = 'build_component_ut'
|
2020-12-28 21:00:45 -05:00
|
|
|
UNIT_TEST_TARGETS = SUPPORTED_TARGETS + PREVIEW_TARGETS
|
2021-06-07 06:17:13 -04:00
|
|
|
BUILD_JOB_NAMES = ['build_component_ut_{}'.format(target) for target in UNIT_TEST_TARGETS] # type: ignore
|
2020-07-21 04:59:31 -04:00
|
|
|
|
|
|
|
|
|
|
|
class UnitTestGroup(IDFCaseGroup):
|
2020-08-25 04:42:36 -04:00
|
|
|
SORT_KEYS = ['test environment', 'tags', 'chip_target']
|
|
|
|
CI_JOB_MATCH_KEYS = ['test environment']
|
2020-07-21 04:59:31 -04:00
|
|
|
|
2021-06-07 06:17:13 -04:00
|
|
|
LOCAL_BUILD_DIR = 'tools/unit-test-app/builds' # type: ignore
|
2020-12-28 21:00:45 -05:00
|
|
|
UNIT_TEST_TARGETS = SUPPORTED_TARGETS + PREVIEW_TARGETS
|
2021-06-07 06:17:13 -04:00
|
|
|
BUILD_JOB_NAMES = ['build_esp_idf_tests_cmake_{}'.format(target) for target in UNIT_TEST_TARGETS] # type: ignore
|
2020-07-21 04:59:31 -04:00
|
|
|
|
2019-11-21 04:33:32 -05:00
|
|
|
MAX_CASE = 50
|
2018-01-31 05:59:10 -05:00
|
|
|
ATTR_CONVERT_TABLE = {
|
2020-08-25 04:42:36 -04:00
|
|
|
'execution_time': 'execution time'
|
2018-01-31 05:59:10 -05:00
|
|
|
}
|
2019-11-21 03:04:30 -05:00
|
|
|
DUT_CLS_NAME = {
|
2020-08-25 04:42:36 -04:00
|
|
|
'esp32': 'ESP32DUT',
|
|
|
|
'esp32s2': 'ESP32S2DUT',
|
2021-06-07 06:17:13 -04:00
|
|
|
'esp32s3': 'ESP32S3DUT',
|
2022-05-19 14:42:46 -04:00
|
|
|
'esp32c2': 'ESP32C2DUT',
|
2020-12-28 21:00:45 -05:00
|
|
|
'esp32c3': 'ESP32C3DUT',
|
2020-08-25 04:42:36 -04:00
|
|
|
'esp8266': 'ESP8266DUT',
|
2019-11-21 03:04:30 -05:00
|
|
|
}
|
2018-05-14 23:51:56 -04:00
|
|
|
|
|
|
|
def __init__(self, case):
|
2020-07-21 04:59:31 -04:00
|
|
|
super(UnitTestGroup, self).__init__(case)
|
2020-08-25 04:42:36 -04:00
|
|
|
for tag in self._get_case_attr(case, 'tags'):
|
2018-05-14 23:51:56 -04:00
|
|
|
self.ci_job_match_keys.add(tag)
|
2018-01-31 05:59:10 -05:00
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def _get_case_attr(case, attr):
|
2020-07-21 04:59:31 -04:00
|
|
|
if attr in UnitTestGroup.ATTR_CONVERT_TABLE:
|
|
|
|
attr = UnitTestGroup.ATTR_CONVERT_TABLE[attr]
|
2018-01-31 05:59:10 -05:00
|
|
|
return case[attr]
|
|
|
|
|
2019-11-21 08:01:15 -05:00
|
|
|
def add_extra_case(self, case):
|
|
|
|
""" If current group contains all tags required by case, then add succeed """
|
|
|
|
added = False
|
|
|
|
if self.accept_new_case():
|
|
|
|
for key in self.filters:
|
|
|
|
if self._get_case_attr(case, key) != self.filters[key]:
|
2020-08-25 04:42:36 -04:00
|
|
|
if key == 'tags':
|
2020-04-28 22:41:39 -04:00
|
|
|
if set(self._get_case_attr(case, key)).issubset(set(self.filters[key])):
|
2019-11-21 08:01:15 -05:00
|
|
|
continue
|
|
|
|
break
|
|
|
|
else:
|
|
|
|
self.case_list.append(case)
|
|
|
|
added = True
|
|
|
|
return added
|
|
|
|
|
2019-11-21 04:33:32 -05:00
|
|
|
def _create_extra_data(self, test_cases, test_function):
|
2018-05-21 00:50:27 -04:00
|
|
|
"""
|
|
|
|
For unit test case, we need to copy some attributes of test cases into config file.
|
|
|
|
So unit test function knows how to run the case.
|
|
|
|
"""
|
2018-01-31 05:59:10 -05:00
|
|
|
case_data = []
|
2019-11-21 04:33:32 -05:00
|
|
|
for case in test_cases:
|
2018-03-09 01:50:34 -05:00
|
|
|
one_case_data = {
|
2020-08-25 04:42:36 -04:00
|
|
|
'config': self._get_case_attr(case, 'config'),
|
|
|
|
'name': self._get_case_attr(case, 'summary'),
|
|
|
|
'reset': self._get_case_attr(case, 'reset'),
|
|
|
|
'timeout': self._get_case_attr(case, 'timeout'),
|
2018-03-09 01:50:34 -05:00
|
|
|
}
|
|
|
|
|
2020-08-25 04:42:36 -04:00
|
|
|
if test_function in ['run_multiple_devices_cases', 'run_multiple_stage_cases']:
|
2018-03-09 01:50:34 -05:00
|
|
|
try:
|
2020-08-25 04:42:36 -04:00
|
|
|
one_case_data['child case num'] = self._get_case_attr(case, 'child case num')
|
2018-03-09 01:50:34 -05:00
|
|
|
except KeyError as e:
|
2020-08-25 04:42:36 -04:00
|
|
|
print('multiple devices/stages cases must contains at least two test functions')
|
|
|
|
print('case name: {}'.format(one_case_data['name']))
|
2018-03-09 01:50:34 -05:00
|
|
|
raise e
|
|
|
|
|
|
|
|
case_data.append(one_case_data)
|
2018-01-31 05:59:10 -05:00
|
|
|
return case_data
|
|
|
|
|
2019-11-21 04:33:32 -05:00
|
|
|
def _divide_case_by_test_function(self):
|
2018-03-09 01:50:34 -05:00
|
|
|
"""
|
2019-11-21 04:33:32 -05:00
|
|
|
divide cases of current test group by test function they need to use
|
2018-03-09 01:50:34 -05:00
|
|
|
|
2019-11-21 04:33:32 -05:00
|
|
|
:return: dict of list of cases for each test functions
|
2018-03-09 01:50:34 -05:00
|
|
|
"""
|
2019-11-21 04:33:32 -05:00
|
|
|
case_by_test_function = {
|
2020-08-25 04:42:36 -04:00
|
|
|
'run_multiple_devices_cases': [],
|
|
|
|
'run_multiple_stage_cases': [],
|
|
|
|
'run_unit_test_cases': [],
|
2019-11-21 04:33:32 -05:00
|
|
|
}
|
|
|
|
|
|
|
|
for case in self.case_list:
|
2020-08-25 04:42:36 -04:00
|
|
|
if case['multi_device'] == 'Yes':
|
|
|
|
case_by_test_function['run_multiple_devices_cases'].append(case)
|
|
|
|
elif case['multi_stage'] == 'Yes':
|
|
|
|
case_by_test_function['run_multiple_stage_cases'].append(case)
|
2019-11-21 04:33:32 -05:00
|
|
|
else:
|
2020-08-25 04:42:36 -04:00
|
|
|
case_by_test_function['run_unit_test_cases'].append(case)
|
2019-11-21 04:33:32 -05:00
|
|
|
return case_by_test_function
|
2018-03-09 01:50:34 -05:00
|
|
|
|
2018-01-31 05:59:10 -05:00
|
|
|
def output(self):
|
|
|
|
"""
|
|
|
|
output data for job configs
|
|
|
|
|
|
|
|
:return: {"Filter": case filter, "CaseConfig": list of case configs for cases in this group}
|
|
|
|
"""
|
2019-08-22 04:45:25 -04:00
|
|
|
|
2020-08-25 04:42:36 -04:00
|
|
|
target = self._get_case_attr(self.case_list[0], 'chip_target')
|
2019-11-21 03:04:30 -05:00
|
|
|
if target:
|
|
|
|
overwrite = {
|
2020-08-25 04:42:36 -04:00
|
|
|
'dut': {
|
|
|
|
'package': 'ttfw_idf',
|
|
|
|
'class': self.DUT_CLS_NAME[target],
|
2019-11-21 03:04:30 -05:00
|
|
|
}
|
|
|
|
}
|
|
|
|
else:
|
|
|
|
overwrite = dict()
|
2019-11-21 04:33:32 -05:00
|
|
|
|
|
|
|
case_by_test_function = self._divide_case_by_test_function()
|
|
|
|
|
2018-01-31 05:59:10 -05:00
|
|
|
output_data = {
|
|
|
|
# we don't need filter for test function, as UT uses a few test functions for all cases
|
2020-08-25 04:42:36 -04:00
|
|
|
'CaseConfig': [
|
2018-01-31 05:59:10 -05:00
|
|
|
{
|
2020-08-25 04:42:36 -04:00
|
|
|
'name': test_function,
|
|
|
|
'extra_data': self._create_extra_data(test_cases, test_function),
|
|
|
|
'overwrite': overwrite,
|
2020-06-30 06:04:45 -04:00
|
|
|
} for test_function, test_cases in case_by_test_function.items() if test_cases
|
2019-08-22 04:45:25 -04:00
|
|
|
],
|
2018-01-31 05:59:10 -05:00
|
|
|
}
|
|
|
|
return output_data
|
|
|
|
|
|
|
|
|
2020-07-21 04:59:31 -04:00
|
|
|
class ExampleAssignTest(IDFAssignTest):
|
|
|
|
CI_TEST_JOB_PATTERN = re.compile(r'^example_test_.+')
|
2018-01-31 05:59:10 -05:00
|
|
|
|
2020-08-21 02:42:25 -04:00
|
|
|
def __init__(self, test_case_path, ci_config_file):
|
|
|
|
super(ExampleAssignTest, self).__init__(test_case_path, ci_config_file, case_group=ExampleGroup)
|
2020-07-21 04:59:31 -04:00
|
|
|
|
|
|
|
|
|
|
|
class TestAppsAssignTest(IDFAssignTest):
|
|
|
|
CI_TEST_JOB_PATTERN = re.compile(r'^test_app_test_.+')
|
|
|
|
|
2020-08-21 02:42:25 -04:00
|
|
|
def __init__(self, test_case_path, ci_config_file):
|
|
|
|
super(TestAppsAssignTest, self).__init__(test_case_path, ci_config_file, case_group=TestAppsGroup)
|
|
|
|
|
|
|
|
|
|
|
|
class ComponentUTAssignTest(IDFAssignTest):
|
|
|
|
CI_TEST_JOB_PATTERN = re.compile(r'^component_ut_test_.+')
|
|
|
|
|
|
|
|
def __init__(self, test_case_path, ci_config_file):
|
|
|
|
super(ComponentUTAssignTest, self).__init__(test_case_path, ci_config_file, case_group=ComponentUTGroup)
|
2020-07-21 04:59:31 -04:00
|
|
|
|
|
|
|
|
|
|
|
class UnitTestAssignTest(IDFAssignTest):
|
|
|
|
CI_TEST_JOB_PATTERN = re.compile(r'^UT_.+')
|
|
|
|
|
2020-08-21 02:42:25 -04:00
|
|
|
def __init__(self, test_case_path, ci_config_file):
|
|
|
|
super(UnitTestAssignTest, self).__init__(test_case_path, ci_config_file, case_group=UnitTestGroup)
|
2018-01-31 05:59:10 -05:00
|
|
|
|
2020-04-20 02:30:31 -04:00
|
|
|
def search_cases(self, case_filter=None):
|
2018-01-31 05:59:10 -05:00
|
|
|
"""
|
|
|
|
For unit test case, we don't search for test functions.
|
|
|
|
The unit test cases is stored in a yaml file which is created in job build-idf-test.
|
|
|
|
"""
|
|
|
|
|
2020-04-28 22:49:10 -04:00
|
|
|
def find_by_suffix(suffix, path):
|
|
|
|
res = []
|
|
|
|
for root, _, files in os.walk(path):
|
|
|
|
for file in files:
|
|
|
|
if file.endswith(suffix):
|
|
|
|
res.append(os.path.join(root, file))
|
|
|
|
return res
|
|
|
|
|
|
|
|
def get_test_cases_from_yml(yml_file):
|
|
|
|
try:
|
|
|
|
with open(yml_file) as fr:
|
|
|
|
raw_data = yaml.load(fr, Loader=Loader)
|
|
|
|
test_cases = raw_data['test cases']
|
|
|
|
except (IOError, KeyError):
|
|
|
|
return []
|
|
|
|
else:
|
|
|
|
return test_cases
|
|
|
|
|
|
|
|
test_cases = []
|
2020-08-25 04:42:36 -04:00
|
|
|
for path in self.test_case_paths:
|
|
|
|
if os.path.isdir(path):
|
|
|
|
for yml_file in find_by_suffix('.yml', path):
|
|
|
|
test_cases.extend(get_test_cases_from_yml(yml_file))
|
|
|
|
elif os.path.isfile(path) and path.endswith('.yml'):
|
|
|
|
test_cases.extend(get_test_cases_from_yml(path))
|
|
|
|
else:
|
|
|
|
print('Test case path is invalid. Should only happen when use @bot to skip unit test.')
|
2020-04-28 22:49:10 -04:00
|
|
|
|
2018-07-03 10:00:09 -04:00
|
|
|
# filter keys are lower case. Do map lower case keys with original keys.
|
|
|
|
try:
|
|
|
|
key_mapping = {x.lower(): x for x in test_cases[0].keys()}
|
|
|
|
except IndexError:
|
|
|
|
key_mapping = dict()
|
2018-01-31 05:59:10 -05:00
|
|
|
if case_filter:
|
|
|
|
for key in case_filter:
|
|
|
|
filtered_cases = []
|
|
|
|
for case in test_cases:
|
|
|
|
try:
|
2018-07-03 10:00:09 -04:00
|
|
|
mapped_key = key_mapping[key]
|
2018-01-31 05:59:10 -05:00
|
|
|
# bot converts string to lower case
|
2018-07-03 10:00:09 -04:00
|
|
|
if isinstance(case[mapped_key], str):
|
|
|
|
_value = case[mapped_key].lower()
|
2018-01-31 05:59:10 -05:00
|
|
|
else:
|
2018-07-03 10:00:09 -04:00
|
|
|
_value = case[mapped_key]
|
2018-01-31 05:59:10 -05:00
|
|
|
if _value in case_filter[key]:
|
|
|
|
filtered_cases.append(case)
|
|
|
|
except KeyError:
|
|
|
|
# case don't have this key, regard as filter success
|
|
|
|
filtered_cases.append(case)
|
|
|
|
test_cases = filtered_cases
|
2019-11-21 04:33:32 -05:00
|
|
|
# sort cases with configs and test functions
|
|
|
|
# in later stage cases with similar attributes are more likely to be assigned to the same job
|
|
|
|
# it will reduce the count of flash DUT operations
|
2020-08-25 04:42:36 -04:00
|
|
|
test_cases.sort(key=lambda x: x['config'] + x['multi_stage'] + x['multi_device'])
|
2018-01-31 05:59:10 -05:00
|
|
|
return test_cases
|
|
|
|
|
|
|
|
|
|
|
|
if __name__ == '__main__':
|
|
|
|
parser = argparse.ArgumentParser()
|
2020-08-25 04:42:36 -04:00
|
|
|
parser.add_argument('case_group', choices=['example_test', 'custom_test', 'unit_test', 'component_ut'])
|
|
|
|
parser.add_argument('test_case_paths', nargs='+', help='test case folder or file')
|
2021-11-02 23:11:38 -04:00
|
|
|
parser.add_argument('-c', '--config', default=os.path.join(IDF_PATH_FROM_ENV, '.gitlab', 'ci', 'target-test.yml'),
|
|
|
|
help='gitlab ci config file')
|
2020-08-25 04:42:36 -04:00
|
|
|
parser.add_argument('-o', '--output', help='output path of config files')
|
|
|
|
parser.add_argument('--pipeline_id', '-p', type=int, default=None, help='pipeline_id')
|
|
|
|
parser.add_argument('--test-case-file-pattern', help='file name pattern used to find Python test case files')
|
2018-01-31 05:59:10 -05:00
|
|
|
args = parser.parse_args()
|
|
|
|
|
2020-08-20 00:22:36 -04:00
|
|
|
SUPPORTED_TARGETS.extend(PREVIEW_TARGETS)
|
|
|
|
|
2021-11-02 23:11:38 -04:00
|
|
|
test_case_paths = [os.path.join(IDF_PATH_FROM_ENV, path) if not os.path.isabs(path) else path for path in
|
|
|
|
args.test_case_paths] # type: ignore
|
2020-08-25 04:42:36 -04:00
|
|
|
args_list = [test_case_paths, args.config]
|
2020-07-21 04:59:31 -04:00
|
|
|
if args.case_group == 'example_test':
|
|
|
|
assigner = ExampleAssignTest(*args_list)
|
|
|
|
elif args.case_group == 'custom_test':
|
|
|
|
assigner = TestAppsAssignTest(*args_list)
|
|
|
|
elif args.case_group == 'unit_test':
|
|
|
|
assigner = UnitTestAssignTest(*args_list)
|
2020-08-25 04:42:36 -04:00
|
|
|
elif args.case_group == 'component_ut':
|
|
|
|
assigner = ComponentUTAssignTest(*args_list)
|
2020-07-21 04:59:31 -04:00
|
|
|
else:
|
|
|
|
raise SystemExit(1) # which is impossible
|
|
|
|
|
|
|
|
if args.test_case_file_pattern:
|
|
|
|
assigner.CI_TEST_JOB_PATTERN = re.compile(r'{}'.format(args.test_case_file_pattern))
|
|
|
|
|
|
|
|
assigner.assign_cases()
|
2020-08-25 04:42:36 -04:00
|
|
|
assigner.output_configs(args.output)
|
2020-07-21 04:59:31 -04:00
|
|
|
assigner.create_artifact_index_file()
|