mirror of
https://github.com/espressif/esp-idf.git
synced 2024-10-05 20:47:46 -04:00
ci: add pytest-embedded CI jobs
This commit is contained in:
parent
ef685456ab
commit
94578f4658
@ -69,6 +69,7 @@ variables:
|
||||
TEST_ENV_CONFIG_REPO: "https://gitlab-ci-token:${BOT_TOKEN}@${CI_SERVER_HOST}:${CI_SERVER_PORT}/qa/ci-test-runner-configs.git"
|
||||
CI_AUTO_TEST_SCRIPT_REPO_URL: "https://gitlab-ci-token:${BOT_TOKEN}@${CI_SERVER_HOST}:${CI_SERVER_PORT}/qa/auto_test_script.git"
|
||||
CI_AUTO_TEST_SCRIPT_REPO_BRANCH: "ci/v3.1"
|
||||
PYTEST_EMBEDDED_TAG: "v0.4.5"
|
||||
|
||||
.setup_tools_unless_target_test: &setup_tools_unless_target_test |
|
||||
if [[ -n "$IDF_DONT_USE_MIRRORS" ]]; then
|
||||
@ -123,6 +124,19 @@ before_script:
|
||||
- export PYTHONPATH="$IDF_PATH/tools:$IDF_PATH/tools/ci/python_packages:$PYTHONPATH"
|
||||
- fetch_submodules
|
||||
|
||||
.before_script_pytest:
|
||||
before_script:
|
||||
- source tools/ci/utils.sh
|
||||
- source tools/ci/setup_python.sh
|
||||
- add_gitlab_ssh_keys
|
||||
- source tools/ci/configure_ci_environment.sh
|
||||
- *setup_tools_unless_target_test
|
||||
- fetch_submodules
|
||||
- cd /tmp
|
||||
- retry_failed git clone --depth 1 --branch $PYTEST_EMBEDDED_TAG https://gitlab-ci-token:${BOT_TOKEN}@${CI_SERVER_HOST}:${CI_SERVER_PORT}/idf/pytest-embedded.git
|
||||
- cd pytest-embedded && bash foreach.sh install
|
||||
- cd $IDF_PATH
|
||||
|
||||
default:
|
||||
retry:
|
||||
max: 2
|
||||
|
@ -7,6 +7,45 @@
|
||||
SIZE_INFO_LOCATION: "$CI_PROJECT_DIR/size_info.txt"
|
||||
dependencies: []
|
||||
|
||||
.build_pytest_template:
|
||||
extends:
|
||||
- .build_template
|
||||
- .before_script_pytest
|
||||
dependencies: # set dependencies to null to avoid missing artifacts issue
|
||||
needs:
|
||||
- job: fast_template_app
|
||||
artifacts: false
|
||||
variables:
|
||||
PYTHON_VER: 3.6.13
|
||||
artifacts:
|
||||
paths:
|
||||
- "**/build*/size.json"
|
||||
- "**/build*/build.log"
|
||||
- "**/build*/*.bin"
|
||||
- "**/build*/*.elf"
|
||||
- "**/build*/*.map"
|
||||
- "**/build*/flasher_args.json"
|
||||
- "**/build*/config/sdkconfig.json"
|
||||
- "**/build*/bootloader/*.bin"
|
||||
- "**/build*/partition_table/*.bin"
|
||||
- $SIZE_INFO_LOCATION
|
||||
when: always
|
||||
expire_in: 3 days
|
||||
|
||||
build_examples_pytest_esp32:
|
||||
extends:
|
||||
- .build_pytest_template
|
||||
- .rules:build:example_test-esp32
|
||||
script:
|
||||
- python tools/ci/build_pytest_apps.py --all-pytest-apps --under-dir examples --target esp32 --size-info $SIZE_INFO_LOCATION -vv
|
||||
|
||||
build_examples_pytest_esp32c3:
|
||||
extends:
|
||||
- .build_pytest_template
|
||||
- .rules:build:example_test-esp32c3
|
||||
script:
|
||||
- python tools/ci/build_pytest_apps.py --all-pytest-apps --under-dir examples --target esp32c3 --size-info $SIZE_INFO_LOCATION -vv
|
||||
|
||||
.build_template_app_template:
|
||||
extends: .build_template
|
||||
variables:
|
||||
|
@ -1,3 +1,46 @@
|
||||
.pytest_template:
|
||||
stage: target_test
|
||||
extends: .before_script_pytest
|
||||
artifacts:
|
||||
when: always
|
||||
paths:
|
||||
- XUNIT_RESULT.xml
|
||||
reports:
|
||||
junit: XUNIT_RESULT.xml
|
||||
script:
|
||||
- pytest $TEST_DIR -m $TARGET_MARKER -m $ENV_MARKER --junitxml=XUNIT_RESULT.xml
|
||||
|
||||
.pytest_examples_dir_template:
|
||||
extends: .pytest_template
|
||||
variables:
|
||||
TEST_DIR: examples
|
||||
|
||||
example_test_pytest_esp32_generic:
|
||||
extends:
|
||||
- .pytest_examples_dir_template
|
||||
- .rules:test:example_test-esp32
|
||||
needs:
|
||||
- build_examples_pytest_esp32
|
||||
variables:
|
||||
TARGET_MARKER: esp32
|
||||
ENV_MARKER: generic
|
||||
tags: # in gitlab 14.1 or later, we can use `parallel: matrix` with the `tags` keyword. https://docs.gitlab.com/ee/ci/jobs/job_control.html#run-a-matrix-of-parallel-trigger-jobs
|
||||
- ESP32
|
||||
- Example_GENERIC
|
||||
|
||||
example_test_pytest_esp32c3_generic:
|
||||
extends:
|
||||
- .pytest_examples_dir_template
|
||||
- .rules:test:example_test-esp32c3
|
||||
needs:
|
||||
- build_examples_pytest_esp32c3
|
||||
variables:
|
||||
TARGET_MARKER: esp32c3
|
||||
ENV_MARKER: generic
|
||||
tags:
|
||||
- ESP32C3
|
||||
- Example_GENERIC
|
||||
|
||||
# for parallel jobs, CI_JOB_NAME will be "job_name index/total" (for example, "IT_001 1/2")
|
||||
# we need to convert to pattern "job_name_index.yml"
|
||||
.define_config_file_name: &define_config_file_name |
|
||||
|
@ -150,6 +150,7 @@ disable=print-statement,
|
||||
too-many-nested-blocks,
|
||||
too-many-branches,
|
||||
too-many-statements,
|
||||
ungrouped-imports, # since we have isort in pre-commit
|
||||
|
||||
# Enable the message, report, category or checker with the given id(s). You can
|
||||
# either give multiple identifier separated by comma (,) or put this option
|
||||
|
@ -10,6 +10,7 @@ import logging
|
||||
import os.path
|
||||
import re
|
||||
import sys
|
||||
from typing import List, Optional, TextIO
|
||||
|
||||
from find_build_apps import BUILD_SYSTEMS, BuildError, BuildItem, setup_logging
|
||||
from find_build_apps.common import SIZE_JSON_FN, rmdir
|
||||
@ -21,7 +22,84 @@ LOG_ERROR_WARNING = re.compile(r'(error|warning):', re.IGNORECASE)
|
||||
LOG_DEBUG_LINES = 25
|
||||
|
||||
|
||||
def main(): # type: () -> None
|
||||
def build_apps(
|
||||
build_items: List[BuildItem],
|
||||
parallel_count: int = 1,
|
||||
parallel_index: int = 1,
|
||||
dry_run: bool = False,
|
||||
build_verbose: bool = False,
|
||||
keep_going: bool = False,
|
||||
output_build_list: Optional[TextIO] = None,
|
||||
size_info: Optional[TextIO] = None
|
||||
) -> None:
|
||||
if not build_items:
|
||||
logging.warning('Empty build list')
|
||||
sys.exit(0)
|
||||
|
||||
num_builds = len(build_items)
|
||||
num_jobs = parallel_count
|
||||
job_index = parallel_index - 1 # convert to 0-based index
|
||||
num_builds_per_job = (num_builds + num_jobs - 1) // num_jobs
|
||||
min_job_index = num_builds_per_job * job_index
|
||||
if min_job_index >= num_builds:
|
||||
logging.warning(
|
||||
f'Nothing to do for job {job_index + 1} (build total: {num_builds}, per job: {num_builds_per_job})')
|
||||
sys.exit(0)
|
||||
|
||||
max_job_index = min(num_builds_per_job * (job_index + 1) - 1, num_builds - 1)
|
||||
logging.info('Total {} builds, max. {} builds per job, running builds {}-{}'.format(
|
||||
num_builds, num_builds_per_job, min_job_index + 1, max_job_index + 1))
|
||||
|
||||
builds_for_current_job = build_items[min_job_index:max_job_index + 1]
|
||||
for i, build_item in enumerate(builds_for_current_job):
|
||||
index = i + min_job_index + 1
|
||||
build_item.index = index
|
||||
build_item.dry_run = dry_run
|
||||
build_item.verbose = build_verbose
|
||||
build_item.keep_going = keep_going
|
||||
logging.debug('\tBuild {}: {}'.format(index, repr(build_item)))
|
||||
if output_build_list:
|
||||
output_build_list.write(build_item.to_json_expanded() + '\n')
|
||||
|
||||
failed_builds = []
|
||||
for build_item in builds_for_current_job:
|
||||
logging.info('Running build {}: {}'.format(build_item.index, repr(build_item)))
|
||||
build_system_class = BUILD_SYSTEMS[build_item.build_system]
|
||||
try:
|
||||
build_system_class.build(build_item)
|
||||
except BuildError as e:
|
||||
logging.error(str(e))
|
||||
if build_item.build_log_path:
|
||||
log_filename = os.path.basename(build_item.build_log_path)
|
||||
with open(build_item.build_log_path, 'r') as f:
|
||||
lines = [line.rstrip() for line in f.readlines() if line.rstrip()] # non-empty lines
|
||||
logging.debug('Error and warning lines from {}:'.format(log_filename))
|
||||
for line in lines:
|
||||
if LOG_ERROR_WARNING.search(line):
|
||||
logging.warning('>>> {}'.format(line))
|
||||
logging.debug('Last {} lines of {}:'.format(LOG_DEBUG_LINES, log_filename))
|
||||
for line in lines[-LOG_DEBUG_LINES:]:
|
||||
logging.debug('>>> {}'.format(line))
|
||||
if keep_going:
|
||||
failed_builds.append(build_item)
|
||||
else:
|
||||
sys.exit(1)
|
||||
else:
|
||||
if size_info:
|
||||
build_item.write_size_info(size_info)
|
||||
if not build_item.preserve:
|
||||
logging.info(f'Removing build directory {build_item.build_path}')
|
||||
# we only remove binaries here, log files are still needed by check_build_warnings.py
|
||||
rmdir(build_item.build_path, exclude_file_pattern=SIZE_JSON_FN)
|
||||
|
||||
if failed_builds:
|
||||
logging.error('The following build have failed:')
|
||||
for build in failed_builds:
|
||||
logging.error('\t{}'.format(build))
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
parser = argparse.ArgumentParser(description='ESP-IDF app builder')
|
||||
parser.add_argument(
|
||||
'-v',
|
||||
@ -87,76 +165,7 @@ def main(): # type: () -> None
|
||||
help='Name of the file to read the list of builds from. If not specified, read from stdin.',
|
||||
)
|
||||
args = parser.parse_args()
|
||||
|
||||
setup_logging(args)
|
||||
|
||||
build_items = [BuildItem.from_json(line) for line in args.build_list]
|
||||
if not build_items:
|
||||
logging.warning('Empty build list')
|
||||
SystemExit(0)
|
||||
|
||||
num_builds = len(build_items)
|
||||
num_jobs = args.parallel_count
|
||||
job_index = args.parallel_index - 1 # convert to 0-based index
|
||||
num_builds_per_job = (num_builds + num_jobs - 1) // num_jobs
|
||||
min_job_index = num_builds_per_job * job_index
|
||||
if min_job_index >= num_builds:
|
||||
logging.warn('Nothing to do for job {} (build total: {}, per job: {})'.format(
|
||||
job_index + 1, num_builds, num_builds_per_job))
|
||||
raise SystemExit(0)
|
||||
|
||||
max_job_index = min(num_builds_per_job * (job_index + 1) - 1, num_builds - 1)
|
||||
logging.info('Total {} builds, max. {} builds per job, running builds {}-{}'.format(
|
||||
num_builds, num_builds_per_job, min_job_index + 1, max_job_index + 1))
|
||||
|
||||
builds_for_current_job = build_items[min_job_index:max_job_index + 1]
|
||||
for i, build_info in enumerate(builds_for_current_job):
|
||||
index = i + min_job_index + 1
|
||||
build_info.index = index
|
||||
build_info.dry_run = args.dry_run
|
||||
build_info.verbose = args.build_verbose
|
||||
build_info.keep_going = args.keep_going
|
||||
logging.debug(' Build {}: {}'.format(index, repr(build_info)))
|
||||
if args.output_build_list:
|
||||
args.output_build_list.write(build_info.to_json_expanded() + '\n')
|
||||
|
||||
failed_builds = []
|
||||
for build_info in builds_for_current_job:
|
||||
logging.info('Running build {}: {}'.format(build_info.index, repr(build_info)))
|
||||
build_system_class = BUILD_SYSTEMS[build_info.build_system]
|
||||
try:
|
||||
build_system_class.build(build_info)
|
||||
except BuildError as e:
|
||||
logging.error(str(e))
|
||||
if build_info.build_log_path:
|
||||
log_filename = os.path.basename(build_info.build_log_path)
|
||||
with open(build_info.build_log_path, 'r') as f:
|
||||
lines = [line.rstrip() for line in f.readlines() if line.rstrip()] # non-empty lines
|
||||
logging.debug('Error and warning lines from {}:'.format(log_filename))
|
||||
for line in lines:
|
||||
if LOG_ERROR_WARNING.search(line):
|
||||
logging.warning('>>> {}'.format(line))
|
||||
logging.debug('Last {} lines of {}:'.format(LOG_DEBUG_LINES, log_filename))
|
||||
for line in lines[-LOG_DEBUG_LINES:]:
|
||||
logging.debug('>>> {}'.format(line))
|
||||
if args.keep_going:
|
||||
failed_builds.append(build_info)
|
||||
else:
|
||||
raise SystemExit(1)
|
||||
else:
|
||||
if args.size_info:
|
||||
build_info.write_size_info(args.size_info)
|
||||
if not build_info.preserve:
|
||||
logging.info('Removing build directory {}'.format(build_info.build_path))
|
||||
# we only remove binaries here, log files are still needed by check_build_warnings.py
|
||||
rmdir(build_info.build_path, exclude_file_pattern=SIZE_JSON_FN)
|
||||
|
||||
if failed_builds:
|
||||
logging.error('The following build have failed:')
|
||||
for build in failed_builds:
|
||||
logging.error(' {}'.format(build))
|
||||
raise SystemExit(1)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
items = [BuildItem.from_json(line) for line in args.build_list]
|
||||
build_apps(items, args.parallel_count, args.parallel_index, args.dry_run, args.build_verbose,
|
||||
args.keep_going, args.output_build_list, args.size_info)
|
||||
|
133
tools/ci/build_pytest_apps.py
Normal file
133
tools/ci/build_pytest_apps.py
Normal file
@ -0,0 +1,133 @@
|
||||
# SPDX-FileCopyrightText: 2021 Espressif Systems (Shanghai) CO LTD
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
"""
|
||||
This file is used to generate binary files for the given path.
|
||||
"""
|
||||
|
||||
import argparse
|
||||
import logging
|
||||
import os
|
||||
import sys
|
||||
from typing import List
|
||||
|
||||
from idf_ci_utils import IDF_PATH, get_pytest_dirs
|
||||
|
||||
try:
|
||||
from build_apps import build_apps
|
||||
from find_apps import find_apps, find_builds_for_app
|
||||
from find_build_apps import BuildItem, CMakeBuildSystem, config_rules_from_str, setup_logging
|
||||
except ImportError:
|
||||
sys.path.append(os.path.join(IDF_PATH, 'tools'))
|
||||
|
||||
from build_apps import build_apps
|
||||
from find_apps import find_apps, find_builds_for_app
|
||||
from find_build_apps import BuildItem, CMakeBuildSystem, config_rules_from_str, setup_logging
|
||||
|
||||
|
||||
def main(args: argparse.Namespace) -> None:
|
||||
if args.all_pytest_apps:
|
||||
paths = get_pytest_dirs(IDF_PATH, args.under_dir)
|
||||
args.recursive = True
|
||||
elif args.paths is None:
|
||||
paths = [os.getcwd()]
|
||||
else:
|
||||
paths = args.paths
|
||||
|
||||
app_dirs = []
|
||||
for path in paths:
|
||||
app_dirs += find_apps(CMakeBuildSystem, path, args.recursive, [], args.target)
|
||||
if not app_dirs:
|
||||
logging.error('No apps found')
|
||||
sys.exit(1)
|
||||
|
||||
logging.info('Found {} apps'.format(len(app_dirs)))
|
||||
app_dirs.sort()
|
||||
|
||||
# Find compatible configurations of each app, collect them as BuildItems
|
||||
build_items: List[BuildItem] = []
|
||||
config_rules = config_rules_from_str(args.config or [])
|
||||
for app_dir in app_dirs:
|
||||
app_dir = os.path.realpath(app_dir)
|
||||
build_items += find_builds_for_app(
|
||||
app_dir,
|
||||
app_dir,
|
||||
'build_@t_@w',
|
||||
f'{app_dir}/build_@t_@w/build.log',
|
||||
args.target,
|
||||
'cmake',
|
||||
config_rules,
|
||||
True,
|
||||
)
|
||||
logging.info('Found {} builds'.format(len(build_items)))
|
||||
build_items.sort(key=lambda x: x.build_path) # type: ignore
|
||||
|
||||
build_apps(build_items, args.parallel_count, args.parallel_index, False, args.build_verbose, True, None,
|
||||
args.size_info)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
parser = argparse.ArgumentParser(description='Tool to generate build steps for IDF apps')
|
||||
parser.add_argument(
|
||||
'--recursive',
|
||||
action='store_true',
|
||||
help='Look for apps in the specified directories recursively.',
|
||||
)
|
||||
parser.add_argument('--target', required=True, help='Build apps for given target.')
|
||||
parser.add_argument(
|
||||
'--config',
|
||||
default=['sdkconfig.ci=default', 'sdkconfig.ci.*=', '=default'],
|
||||
action='append',
|
||||
help='Adds configurations (sdkconfig file names) to build. This can either be ' +
|
||||
'FILENAME[=NAME] or FILEPATTERN. FILENAME is the name of the sdkconfig file, ' +
|
||||
'relative to the project directory, to be used. Optional NAME can be specified, ' +
|
||||
'which can be used as a name of this configuration. FILEPATTERN is the name of ' +
|
||||
'the sdkconfig file, relative to the project directory, with at most one wildcard. ' +
|
||||
'The part captured by the wildcard is used as the name of the configuration.',
|
||||
)
|
||||
parser.add_argument(
|
||||
'-p', '--paths',
|
||||
nargs='*',
|
||||
help='One or more app paths. Will use the current path if not specified.'
|
||||
)
|
||||
parser.add_argument(
|
||||
'--all-pytest-apps',
|
||||
action='store_true',
|
||||
help='Look for all pytest apps. "--paths" would be ignored if specify this flag.'
|
||||
)
|
||||
parser.add_argument(
|
||||
'--under-dir',
|
||||
help='Build only the pytest apps under this directory if specified. '
|
||||
'Would be ignored if "--all-pytest-apps" is unflagged.'
|
||||
)
|
||||
parser.add_argument(
|
||||
'--parallel-count',
|
||||
default=1,
|
||||
type=int,
|
||||
help='Number of parallel build jobs.'
|
||||
)
|
||||
parser.add_argument(
|
||||
'--parallel-index',
|
||||
default=1,
|
||||
type=int,
|
||||
help='Index (1-based) of the job, out of the number specified by --parallel-count.',
|
||||
)
|
||||
parser.add_argument(
|
||||
'--size-info',
|
||||
type=argparse.FileType('a'),
|
||||
help='If specified, the test case name and size info json will be written to this file'
|
||||
)
|
||||
parser.add_argument(
|
||||
'-v',
|
||||
'--verbose',
|
||||
action='count',
|
||||
help='Increase the logging level of the script. Can be specified multiple times.',
|
||||
)
|
||||
parser.add_argument(
|
||||
'--build-verbose',
|
||||
action='store_true',
|
||||
help='Enable verbose output from build system.',
|
||||
)
|
||||
arguments = parser.parse_args()
|
||||
setup_logging(arguments)
|
||||
main(arguments)
|
@ -10,11 +10,12 @@ import os
|
||||
import re
|
||||
import subprocess
|
||||
import sys
|
||||
from typing import List, Optional
|
||||
|
||||
IDF_PATH = os.path.abspath(os.getenv('IDF_PATH', os.path.join(os.path.dirname(__file__), '..', '..')))
|
||||
|
||||
|
||||
def get_submodule_dirs(full_path=False): # type: (bool) -> list
|
||||
def get_submodule_dirs(full_path: bool = False) -> List:
|
||||
"""
|
||||
To avoid issue could be introduced by multi-os or additional dependency,
|
||||
we use python and git to get this output
|
||||
@ -49,7 +50,7 @@ def _check_git_filemode(full_path): # type: (str) -> bool
|
||||
return False
|
||||
|
||||
|
||||
def is_executable(full_path): # type: (str) -> bool
|
||||
def is_executable(full_path: str) -> bool:
|
||||
"""
|
||||
os.X_OK will always return true on windows. Use git to check file mode.
|
||||
:param full_path: file full path
|
||||
@ -60,7 +61,7 @@ def is_executable(full_path): # type: (str) -> bool
|
||||
return os.access(full_path, os.X_OK)
|
||||
|
||||
|
||||
def get_git_files(path=IDF_PATH, full_path=False): # type: (str, bool) -> list[str]
|
||||
def get_git_files(path: str = IDF_PATH, full_path: bool = False) -> List[str]:
|
||||
"""
|
||||
Get the result of git ls-files
|
||||
:param path: path to run git ls-files
|
||||
@ -87,7 +88,7 @@ def get_git_files(path=IDF_PATH, full_path=False): # type: (str, bool) -> list[
|
||||
# https://github.com/python/cpython/pull/6299/commits/bfd63120c18bd055defb338c075550f975e3bec1
|
||||
# In order to solve python https://bugs.python.org/issue9584
|
||||
# glob pattern does not support brace expansion issue
|
||||
def _translate(pat): # type: (str) -> str
|
||||
def _translate(pat: str) -> str:
|
||||
"""Translate a shell PATTERN to a regular expression.
|
||||
There is no way to quote meta-characters.
|
||||
"""
|
||||
@ -176,7 +177,7 @@ def _translate(pat): # type: (str) -> str
|
||||
return res
|
||||
|
||||
|
||||
def translate(pat): # type: (str) -> str
|
||||
def translate(pat: str) -> str:
|
||||
res = _translate(pat)
|
||||
return r'(?s:%s)\Z' % res
|
||||
|
||||
@ -194,3 +195,36 @@ magic_check_bytes = re.compile(b'([*?[{])')
|
||||
# glob.magic_check = magic_check
|
||||
# glob.magic_check_bytes = magic_check_bytes
|
||||
# fnmatch.translate = translate
|
||||
|
||||
|
||||
def is_in_directory(file_path: str, folder: str) -> bool:
|
||||
return os.path.realpath(file_path).startswith(os.path.realpath(folder) + os.sep)
|
||||
|
||||
|
||||
def get_pytest_dirs(folder: str, under_dir: Optional[str] = None) -> List[str]:
|
||||
from io import StringIO
|
||||
|
||||
import pytest
|
||||
from _pytest.nodes import Item
|
||||
|
||||
class CollectPlugin:
|
||||
def __init__(self) -> None:
|
||||
self.nodes: List[Item] = []
|
||||
|
||||
def pytest_collection_modifyitems(self, items: List[Item]) -> None:
|
||||
for item in items:
|
||||
self.nodes.append(item)
|
||||
|
||||
collector = CollectPlugin()
|
||||
|
||||
sys_stdout = sys.stdout
|
||||
sys.stdout = StringIO() # swallow the output
|
||||
pytest.main(['--collect-only', folder], plugins=[collector])
|
||||
sys.stdout = sys_stdout # restore sys.stdout
|
||||
|
||||
test_file_paths = set(node.fspath for node in collector.nodes)
|
||||
|
||||
if under_dir:
|
||||
return [os.path.dirname(file) for file in test_file_paths if is_in_directory(file, under_dir)]
|
||||
|
||||
return [os.path.dirname(file) for file in test_file_paths]
|
||||
|
@ -102,7 +102,7 @@ def find_builds_for_app(app_path, work_dir, build_dir, build_log, target_arg,
|
||||
))
|
||||
|
||||
if not build_items:
|
||||
logging.debug('Adding build: app {}, default sdkconfig, config name "{}"'.format(app_path, default_config_name))
|
||||
logging.debug('\tAdding build: app {}, default sdkconfig, config name "{}"'.format(app_path, default_config_name))
|
||||
return [
|
||||
BuildItem(
|
||||
app_path,
|
||||
|
@ -462,6 +462,6 @@ def setup_logging(args):
|
||||
|
||||
logging.basicConfig(
|
||||
format='%(levelname)s: %(message)s',
|
||||
stream=args.log_file or sys.stderr,
|
||||
stream=getattr(args, 'log_file', None) or sys.stderr,
|
||||
level=log_level,
|
||||
)
|
||||
|
Loading…
Reference in New Issue
Block a user