2022-05-23 09:30:13 -04:00
|
|
|
# SPDX-FileCopyrightText: 2022 Espressif Systems (Shanghai) CO LTD
|
|
|
|
# SPDX-License-Identifier: Apache-2.0
|
2021-12-08 12:29:14 -05:00
|
|
|
import asyncio
|
2019-10-03 12:26:44 -04:00
|
|
|
import os
|
|
|
|
import re
|
|
|
|
import subprocess
|
|
|
|
import sys
|
2021-12-08 12:29:14 -05:00
|
|
|
from asyncio.subprocess import Process
|
2019-11-04 10:49:14 -05:00
|
|
|
from io import open
|
2021-12-08 12:29:14 -05:00
|
|
|
from types import FunctionType
|
2022-07-18 10:13:28 -04:00
|
|
|
from typing import Any, Dict, List, Optional, TextIO, Tuple, Union
|
2019-10-03 12:26:44 -04:00
|
|
|
|
2021-01-25 21:49:01 -05:00
|
|
|
import click
|
2021-12-08 12:29:14 -05:00
|
|
|
import yaml
|
|
|
|
from idf_monitor_base.output_helpers import yellow_print
|
2021-01-25 21:49:01 -05:00
|
|
|
|
2019-10-24 07:20:25 -04:00
|
|
|
from .constants import GENERATORS
|
2019-10-03 12:26:44 -04:00
|
|
|
from .errors import FatalError
|
|
|
|
|
|
|
|
|
2021-12-08 12:29:14 -05:00
|
|
|
def executable_exists(args: List) -> bool:
|
2019-10-03 12:26:44 -04:00
|
|
|
try:
|
|
|
|
subprocess.check_output(args)
|
|
|
|
return True
|
|
|
|
|
|
|
|
except Exception:
|
|
|
|
return False
|
|
|
|
|
|
|
|
|
2021-12-08 12:29:14 -05:00
|
|
|
def realpath(path: str) -> str:
|
2019-10-03 12:26:44 -04:00
|
|
|
"""
|
|
|
|
Return the cannonical path with normalized case.
|
|
|
|
|
|
|
|
It is useful on Windows to comparision paths in case-insensitive manner.
|
|
|
|
On Unix and Mac OS X it works as `os.path.realpath()` only.
|
|
|
|
"""
|
|
|
|
return os.path.normcase(os.path.realpath(path))
|
|
|
|
|
|
|
|
|
2021-12-08 12:29:14 -05:00
|
|
|
def _idf_version_from_cmake() -> Optional[str]:
|
2021-01-25 21:49:01 -05:00
|
|
|
version_path = os.path.join(os.environ['IDF_PATH'], 'tools/cmake/version.cmake')
|
|
|
|
regex = re.compile(r'^\s*set\s*\(\s*IDF_VERSION_([A-Z]{5})\s+(\d+)')
|
2019-10-03 12:26:44 -04:00
|
|
|
ver = {}
|
|
|
|
try:
|
|
|
|
with open(version_path) as f:
|
|
|
|
for line in f:
|
|
|
|
m = regex.match(line)
|
|
|
|
|
|
|
|
if m:
|
|
|
|
ver[m.group(1)] = m.group(2)
|
|
|
|
|
2021-01-25 21:49:01 -05:00
|
|
|
return 'v%s.%s.%s' % (ver['MAJOR'], ver['MINOR'], ver['PATCH'])
|
2019-10-03 12:26:44 -04:00
|
|
|
except (KeyError, OSError):
|
2021-01-25 21:49:01 -05:00
|
|
|
sys.stderr.write('WARNING: Cannot find ESP-IDF version in version.cmake\n')
|
2019-10-03 12:26:44 -04:00
|
|
|
return None
|
|
|
|
|
|
|
|
|
2021-12-08 12:29:14 -05:00
|
|
|
def get_target(path: str, sdkconfig_filename: str='sdkconfig') -> Optional[str]:
|
2021-06-03 11:14:38 -04:00
|
|
|
path = os.path.join(path, sdkconfig_filename)
|
|
|
|
return get_sdkconfig_value(path, 'CONFIG_IDF_TARGET')
|
|
|
|
|
|
|
|
|
2021-12-08 12:29:14 -05:00
|
|
|
def idf_version() -> Optional[str]:
|
2019-10-03 12:26:44 -04:00
|
|
|
"""Print version of ESP-IDF"""
|
|
|
|
|
|
|
|
# Try to get version from git:
|
|
|
|
try:
|
2021-12-08 12:29:14 -05:00
|
|
|
version: Optional[str] = subprocess.check_output([
|
2021-01-25 21:49:01 -05:00
|
|
|
'git',
|
|
|
|
'--git-dir=%s' % os.path.join(os.environ['IDF_PATH'], '.git'),
|
2021-02-17 08:04:04 -05:00
|
|
|
'--work-tree=%s' % os.environ['IDF_PATH'],
|
|
|
|
'describe', '--tags', '--dirty', '--match', 'v*.*',
|
2019-10-03 12:26:44 -04:00
|
|
|
]).decode('utf-8', 'ignore').strip()
|
|
|
|
except (subprocess.CalledProcessError, UnicodeError):
|
|
|
|
# if failed, then try to parse cmake.version file
|
2021-01-25 21:49:01 -05:00
|
|
|
sys.stderr.write('WARNING: Git version unavailable, reading from source\n')
|
2019-10-03 12:26:44 -04:00
|
|
|
version = _idf_version_from_cmake()
|
|
|
|
|
|
|
|
return version
|
|
|
|
|
|
|
|
|
2021-12-08 12:29:14 -05:00
|
|
|
def print_hints(*filenames: str) -> None:
|
|
|
|
"""Getting output files and printing hints on how to resolve errors based on the output."""
|
|
|
|
with open(os.path.join(os.path.dirname(__file__), 'hints.yml'), 'r') as file:
|
|
|
|
hints = yaml.safe_load(file)
|
|
|
|
for file_name in filenames:
|
|
|
|
with open(file_name, 'r') as file:
|
|
|
|
output = ' '.join(line.strip() for line in file if line.strip())
|
|
|
|
for hint in hints:
|
|
|
|
try:
|
|
|
|
match = re.compile(hint['re']).findall(output)
|
|
|
|
except KeyError:
|
|
|
|
raise KeyError("Argument 're' missing in {}. Check hints.yml file.".format(hint))
|
|
|
|
except re.error as e:
|
|
|
|
raise re.error('{} from hints.yml have {} problem. Check hints.yml file.'.format(hint['re'], e))
|
|
|
|
if match:
|
|
|
|
extra_info = ', '.join(match) if hint.get('match_to_output', '') else ''
|
|
|
|
try:
|
|
|
|
yellow_print(' '.join(['HINT:', hint['hint'].format(extra_info)]))
|
|
|
|
except KeyError:
|
|
|
|
raise KeyError("Argument 'hint' missing in {}. Check hints.yml file.".format(hint))
|
|
|
|
|
|
|
|
|
|
|
|
def fit_text_in_terminal(out: str) -> str:
|
|
|
|
"""Fit text in terminal, if the string is not fit replace center with `...`"""
|
|
|
|
space_for_dots = 3 # Space for "..."
|
|
|
|
terminal_width, _ = os.get_terminal_size()
|
|
|
|
if terminal_width <= space_for_dots:
|
|
|
|
# if the wide of the terminal is too small just print dots
|
|
|
|
return '.' * terminal_width
|
|
|
|
if len(out) >= terminal_width:
|
|
|
|
elide_size = (terminal_width - space_for_dots) // 2
|
|
|
|
# cut out the middle part of the output if it does not fit in the terminal
|
|
|
|
return '...'.join([out[:elide_size], out[len(out) - elide_size:]])
|
|
|
|
return out
|
|
|
|
|
|
|
|
|
|
|
|
class RunTool:
|
|
|
|
def __init__(self, tool_name: str, args: List, cwd: str, env: Dict=None, custom_error_handler: FunctionType=None, build_dir: str=None,
|
2022-07-19 10:34:15 -04:00
|
|
|
hints: bool=False, force_progression: bool=False, interactive: bool=False) -> None:
|
2021-12-08 12:29:14 -05:00
|
|
|
self.tool_name = tool_name
|
|
|
|
self.args = args
|
|
|
|
self.cwd = cwd
|
|
|
|
self.env = env
|
|
|
|
self.custom_error_handler = custom_error_handler
|
|
|
|
# build_dir sets by tools that do not use build directory as cwd
|
|
|
|
self.build_dir = build_dir or cwd
|
|
|
|
self.hints = hints
|
|
|
|
self.force_progression = force_progression
|
2022-07-19 10:34:15 -04:00
|
|
|
self.interactive = interactive
|
2021-12-08 12:29:14 -05:00
|
|
|
|
|
|
|
def __call__(self) -> None:
|
|
|
|
def quote_arg(arg: str) -> str:
|
|
|
|
""" Quote the `arg` with whitespace in them because it can cause problems when we call it from a subprocess."""
|
|
|
|
if re.match(r"^(?![\'\"]).*\s.*", arg):
|
|
|
|
return ''.join(["'", arg, "'"])
|
|
|
|
return arg
|
|
|
|
|
|
|
|
self.args = [str(arg) for arg in self.args]
|
|
|
|
display_args = ' '.join(quote_arg(arg) for arg in self.args)
|
|
|
|
print('Running %s in directory %s' % (self.tool_name, quote_arg(self.cwd)))
|
|
|
|
print('Executing "%s"...' % str(display_args))
|
|
|
|
|
|
|
|
env_copy = dict(os.environ)
|
|
|
|
env_copy.update(self.env or {})
|
|
|
|
|
2022-07-18 10:13:28 -04:00
|
|
|
process: Union[Process, subprocess.CompletedProcess[bytes]]
|
|
|
|
if self.hints:
|
|
|
|
process, stderr_output_file, stdout_output_file = asyncio.run(self.run_command(self.args, env_copy))
|
|
|
|
else:
|
|
|
|
process = subprocess.run(self.args, env=env_copy, cwd=self.cwd)
|
|
|
|
stderr_output_file, stdout_output_file = None, None
|
2021-12-08 12:29:14 -05:00
|
|
|
if process.returncode == 0:
|
|
|
|
return
|
|
|
|
|
|
|
|
if self.custom_error_handler:
|
|
|
|
self.custom_error_handler(process.returncode, stderr_output_file, stdout_output_file)
|
|
|
|
return
|
|
|
|
|
|
|
|
if stderr_output_file and stdout_output_file:
|
|
|
|
print_hints(stderr_output_file, stdout_output_file)
|
|
|
|
raise FatalError('{} failed with exit code {}, output of the command is in the {} and {}'.format(self.tool_name, process.returncode,
|
|
|
|
stderr_output_file, stdout_output_file))
|
|
|
|
|
|
|
|
raise FatalError('{} failed with exit code {}'.format(self.tool_name, process.returncode))
|
|
|
|
|
|
|
|
async def run_command(self, cmd: List, env_copy: Dict) -> Tuple[Process, Optional[str], Optional[str]]:
|
|
|
|
""" Run the `cmd` command with capturing stderr and stdout from that function and return returncode
|
|
|
|
and of the command, the id of the process, paths to captured output """
|
|
|
|
log_dir_name = 'log'
|
|
|
|
try:
|
|
|
|
os.mkdir(os.path.join(self.build_dir, log_dir_name))
|
|
|
|
except FileExistsError:
|
|
|
|
pass
|
2019-10-03 12:26:44 -04:00
|
|
|
# Note: we explicitly pass in os.environ here, as we may have set IDF_PATH there during startup
|
2021-12-08 12:29:14 -05:00
|
|
|
# limit was added for avoiding error in idf.py confserver
|
2022-07-18 10:13:28 -04:00
|
|
|
try:
|
|
|
|
p = await asyncio.create_subprocess_exec(*cmd, env=env_copy, limit=1024 * 256, cwd=self.cwd, stdout=asyncio.subprocess.PIPE,
|
|
|
|
stderr=asyncio.subprocess.PIPE)
|
|
|
|
except NotImplementedError:
|
|
|
|
sys.exit(f'ERROR: {sys.executable} doesn\'t support asyncio. The issue can be worked around by re-running idf.py with the "--no-hints" argument.')
|
|
|
|
|
2021-12-08 12:29:14 -05:00
|
|
|
stderr_output_file = os.path.join(self.build_dir, log_dir_name, f'idf_py_stderr_output_{p.pid}')
|
|
|
|
stdout_output_file = os.path.join(self.build_dir, log_dir_name, f'idf_py_stdout_output_{p.pid}')
|
|
|
|
if p.stderr and p.stdout: # it only to avoid None type in p.std
|
|
|
|
await asyncio.gather(
|
|
|
|
self.read_and_write_stream(p.stderr, stderr_output_file, sys.stderr),
|
|
|
|
self.read_and_write_stream(p.stdout, stdout_output_file))
|
|
|
|
await p.wait() # added for avoiding None returncode
|
|
|
|
return p, stderr_output_file, stdout_output_file
|
|
|
|
|
|
|
|
async def read_and_write_stream(self, input_stream: asyncio.StreamReader, output_filename: str,
|
|
|
|
output_stream: TextIO=sys.stdout) -> None:
|
|
|
|
"""read the output of the `input_stream` and then write it into `output_filename` and `output_stream`"""
|
|
|
|
def delete_ansi_escape(text: str) -> str:
|
|
|
|
ansi_escape = re.compile(r'\x1B(?:[@-Z\\-_]|\[[0-?]*[ -/]*[@-~])')
|
|
|
|
return ansi_escape.sub('', text)
|
|
|
|
|
|
|
|
def prepare_for_print(out: bytes) -> str:
|
|
|
|
# errors='ignore' is here because some chips produce some garbage bytes
|
|
|
|
result = out.decode(errors='ignore')
|
|
|
|
if not output_stream.isatty():
|
|
|
|
# delete escape sequence if we printing in environments where ANSI coloring is disabled
|
|
|
|
return delete_ansi_escape(result)
|
|
|
|
return result
|
|
|
|
|
|
|
|
def print_progression(output: str) -> None:
|
|
|
|
# Print a new line on top of the previous line
|
|
|
|
sys.stdout.write('\x1b[K')
|
|
|
|
print('\r', end='')
|
|
|
|
print(fit_text_in_terminal(output.strip('\n\r')), end='', file=output_stream)
|
2019-10-03 12:26:44 -04:00
|
|
|
|
2021-12-08 12:29:14 -05:00
|
|
|
try:
|
|
|
|
with open(output_filename, 'w') as output_file:
|
|
|
|
while True:
|
2022-07-19 10:34:15 -04:00
|
|
|
if self.interactive:
|
|
|
|
out = await input_stream.read(1)
|
|
|
|
else:
|
|
|
|
out = await input_stream.readline()
|
2021-12-08 12:29:14 -05:00
|
|
|
if not out:
|
|
|
|
break
|
|
|
|
output = prepare_for_print(out)
|
|
|
|
output_file.write(output)
|
|
|
|
|
|
|
|
# print output in progression way but only the progression related (that started with '[') and if verbose flag is not set
|
2022-06-29 09:55:22 -04:00
|
|
|
if self.force_progression and output[0] == '[' and '-v' not in self.args and output_stream.isatty():
|
2021-12-08 12:29:14 -05:00
|
|
|
print_progression(output)
|
|
|
|
else:
|
2022-07-19 10:34:15 -04:00
|
|
|
output_stream.write(output)
|
|
|
|
output_stream.flush()
|
2021-12-08 12:29:14 -05:00
|
|
|
except (RuntimeError, EnvironmentError) as e:
|
|
|
|
yellow_print('WARNING: The exception {} was raised and we can\'t capture all your {} and '
|
|
|
|
'hints on how to resolve errors can be not accurate.'.format(e, output_stream.name.strip('<>')))
|
|
|
|
|
|
|
|
|
|
|
|
def run_tool(*args: Any, **kwargs: Any) -> None:
|
|
|
|
# Added in case some one use run_tool externally in a idf.py extensions
|
|
|
|
return RunTool(*args, **kwargs)()
|
|
|
|
|
|
|
|
|
|
|
|
def run_target(target_name: str, args: 'PropertyDict', env: Optional[Dict]=None,
|
2022-07-19 10:34:15 -04:00
|
|
|
custom_error_handler: FunctionType=None, force_progression: bool=False, hints: bool=False, interactive: bool=False) -> None:
|
2021-12-08 12:29:14 -05:00
|
|
|
"""Run target in build directory."""
|
|
|
|
if env is None:
|
|
|
|
env = {}
|
2019-10-03 12:26:44 -04:00
|
|
|
|
2021-01-25 21:49:01 -05:00
|
|
|
generator_cmd = GENERATORS[args.generator]['command']
|
2021-12-08 12:29:14 -05:00
|
|
|
env.update(GENERATORS[args.generator]['envvar'])
|
2019-12-19 20:55:39 -05:00
|
|
|
|
|
|
|
if args.verbose:
|
2021-01-25 21:49:01 -05:00
|
|
|
generator_cmd += [GENERATORS[args.generator]['verbose_flag']]
|
2019-12-19 20:55:39 -05:00
|
|
|
|
2021-12-08 12:29:14 -05:00
|
|
|
RunTool(generator_cmd[0], generator_cmd + [target_name], args.build_dir, env, custom_error_handler, hints=hints,
|
2022-07-19 10:34:15 -04:00
|
|
|
force_progression=force_progression, interactive=interactive)()
|
2019-12-19 20:55:39 -05:00
|
|
|
|
|
|
|
|
2021-12-08 12:29:14 -05:00
|
|
|
def _strip_quotes(value: str, regexp: re.Pattern=re.compile(r"^\"(.*)\"$|^'(.*)'$|^(.*)$")) -> Optional[str]:
|
2019-10-03 12:26:44 -04:00
|
|
|
"""
|
|
|
|
Strip quotes like CMake does during parsing cache entries
|
|
|
|
"""
|
2021-12-08 12:29:14 -05:00
|
|
|
matching_values = regexp.match(value)
|
|
|
|
return [x for x in matching_values.groups() if x is not None][0].rstrip() if matching_values is not None else None
|
2019-10-03 12:26:44 -04:00
|
|
|
|
|
|
|
|
2021-12-08 12:29:14 -05:00
|
|
|
def _parse_cmakecache(path: str) -> Dict:
|
2019-10-03 12:26:44 -04:00
|
|
|
"""
|
|
|
|
Parse the CMakeCache file at 'path'.
|
|
|
|
|
|
|
|
Returns a dict of name:value.
|
|
|
|
|
|
|
|
CMakeCache entries also each have a "type", but this is currently ignored.
|
|
|
|
"""
|
|
|
|
result = {}
|
2019-11-04 10:49:14 -05:00
|
|
|
with open(path, encoding='utf-8') as f:
|
2019-10-03 12:26:44 -04:00
|
|
|
for line in f:
|
|
|
|
# cmake cache lines look like: CMAKE_CXX_FLAGS_DEBUG:STRING=-g
|
|
|
|
# groups are name, type, value
|
2021-01-25 21:49:01 -05:00
|
|
|
m = re.match(r'^([^#/:=]+):([^:=]+)=(.*)\n$', line)
|
2019-10-03 12:26:44 -04:00
|
|
|
if m:
|
|
|
|
result[m.group(1)] = m.group(3)
|
|
|
|
return result
|
|
|
|
|
|
|
|
|
2021-12-08 12:29:14 -05:00
|
|
|
def _new_cmakecache_entries(cache_path: str, new_cache_entries: List) -> bool:
|
2019-10-03 12:26:44 -04:00
|
|
|
if not os.path.exists(cache_path):
|
|
|
|
return True
|
|
|
|
|
|
|
|
if new_cache_entries:
|
|
|
|
current_cache = _parse_cmakecache(cache_path)
|
|
|
|
|
|
|
|
for entry in new_cache_entries:
|
2021-01-25 21:49:01 -05:00
|
|
|
key, value = entry.split('=', 1)
|
2019-10-03 12:26:44 -04:00
|
|
|
current_value = current_cache.get(key, None)
|
|
|
|
if current_value is None or _strip_quotes(value) != current_value:
|
|
|
|
return True
|
|
|
|
|
|
|
|
return False
|
|
|
|
|
|
|
|
|
2021-12-08 12:29:14 -05:00
|
|
|
def _detect_cmake_generator(prog_name: str) -> Any:
|
2019-10-03 12:26:44 -04:00
|
|
|
"""
|
|
|
|
Find the default cmake generator, if none was specified. Raises an exception if no valid generator is found.
|
|
|
|
"""
|
2019-11-08 10:46:02 -05:00
|
|
|
for (generator_name, generator) in GENERATORS.items():
|
2021-01-25 21:49:01 -05:00
|
|
|
if executable_exists(generator['version']):
|
2019-11-08 10:46:02 -05:00
|
|
|
return generator_name
|
2019-10-03 12:26:44 -04:00
|
|
|
raise FatalError("To use %s, either the 'ninja' or 'GNU make' build tool must be available in the PATH" % prog_name)
|
|
|
|
|
|
|
|
|
2021-12-08 12:29:14 -05:00
|
|
|
def ensure_build_directory(args: 'PropertyDict', prog_name: str, always_run_cmake: bool=False) -> None:
|
2019-10-03 12:26:44 -04:00
|
|
|
"""Check the build directory exists and that cmake has been run there.
|
|
|
|
|
|
|
|
If this isn't the case, create the build directory (if necessary) and
|
|
|
|
do an initial cmake run to configure it.
|
|
|
|
|
|
|
|
This function will also check args.generator parameter. If the parameter is incompatible with
|
|
|
|
the build directory, an error is raised. If the parameter is None, this function will set it to
|
|
|
|
an auto-detected default generator or to the value already configured in the build directory.
|
|
|
|
"""
|
|
|
|
project_dir = args.project_dir
|
|
|
|
# Verify the project directory
|
|
|
|
if not os.path.isdir(project_dir):
|
|
|
|
if not os.path.exists(project_dir):
|
2021-01-25 21:49:01 -05:00
|
|
|
raise FatalError('Project directory %s does not exist' % project_dir)
|
2019-10-03 12:26:44 -04:00
|
|
|
else:
|
2021-01-25 21:49:01 -05:00
|
|
|
raise FatalError('%s must be a project directory' % project_dir)
|
|
|
|
if not os.path.exists(os.path.join(project_dir, 'CMakeLists.txt')):
|
|
|
|
raise FatalError('CMakeLists.txt not found in project directory %s' % project_dir)
|
2019-10-03 12:26:44 -04:00
|
|
|
|
|
|
|
# Verify/create the build directory
|
|
|
|
build_dir = args.build_dir
|
|
|
|
if not os.path.isdir(build_dir):
|
|
|
|
os.makedirs(build_dir)
|
2020-01-31 05:08:22 -05:00
|
|
|
|
|
|
|
# Parse CMakeCache, if it exists
|
2021-01-25 21:49:01 -05:00
|
|
|
cache_path = os.path.join(build_dir, 'CMakeCache.txt')
|
2020-01-31 05:08:22 -05:00
|
|
|
cache = _parse_cmakecache(cache_path) if os.path.exists(cache_path) else {}
|
|
|
|
|
|
|
|
# Validate or set IDF_TARGET
|
|
|
|
_guess_or_check_idf_target(args, prog_name, cache)
|
2019-10-03 12:26:44 -04:00
|
|
|
|
2021-01-25 21:49:01 -05:00
|
|
|
args.define_cache_entry.append('CCACHE_ENABLE=%d' % args.ccache)
|
2019-10-03 12:26:44 -04:00
|
|
|
|
|
|
|
if always_run_cmake or _new_cmakecache_entries(cache_path, args.define_cache_entry):
|
|
|
|
if args.generator is None:
|
|
|
|
args.generator = _detect_cmake_generator(prog_name)
|
|
|
|
try:
|
|
|
|
cmake_args = [
|
2021-01-25 21:49:01 -05:00
|
|
|
'cmake',
|
|
|
|
'-G',
|
2019-10-03 12:26:44 -04:00
|
|
|
args.generator,
|
2021-01-25 21:49:01 -05:00
|
|
|
'-DPYTHON_DEPS_CHECKED=1',
|
|
|
|
'-DESP_PLATFORM=1',
|
2019-10-03 12:26:44 -04:00
|
|
|
]
|
2020-08-25 03:04:18 -04:00
|
|
|
if args.cmake_warn_uninitialized:
|
2021-01-25 21:49:01 -05:00
|
|
|
cmake_args += ['--warn-uninitialized']
|
2019-10-03 12:26:44 -04:00
|
|
|
|
|
|
|
if args.define_cache_entry:
|
2021-01-25 21:49:01 -05:00
|
|
|
cmake_args += ['-D' + d for d in args.define_cache_entry]
|
2019-10-03 12:26:44 -04:00
|
|
|
cmake_args += [project_dir]
|
|
|
|
|
2021-12-08 12:29:14 -05:00
|
|
|
hints = not args.no_hints
|
|
|
|
RunTool('cmake', cmake_args, cwd=args.build_dir, hints=hints)()
|
2019-10-03 12:26:44 -04:00
|
|
|
except Exception:
|
|
|
|
# don't allow partially valid CMakeCache.txt files,
|
|
|
|
# to keep the "should I run cmake?" logic simple
|
|
|
|
if os.path.exists(cache_path):
|
|
|
|
os.remove(cache_path)
|
|
|
|
raise
|
|
|
|
|
2020-03-05 08:53:47 -05:00
|
|
|
# need to update cache so subsequent access in this method would reflect the result of the previous cmake run
|
|
|
|
cache = _parse_cmakecache(cache_path) if os.path.exists(cache_path) else {}
|
|
|
|
|
2019-10-03 12:26:44 -04:00
|
|
|
try:
|
2021-01-25 21:49:01 -05:00
|
|
|
generator = cache['CMAKE_GENERATOR']
|
2019-10-03 12:26:44 -04:00
|
|
|
except KeyError:
|
|
|
|
generator = _detect_cmake_generator(prog_name)
|
|
|
|
if args.generator is None:
|
|
|
|
args.generator = (generator) # reuse the previously configured generator, if none was given
|
|
|
|
if generator != args.generator:
|
|
|
|
raise FatalError("Build is configured for generator '%s' not '%s'. Run '%s fullclean' to start again." %
|
|
|
|
(generator, args.generator, prog_name))
|
|
|
|
|
|
|
|
try:
|
2021-01-25 21:49:01 -05:00
|
|
|
home_dir = cache['CMAKE_HOME_DIRECTORY']
|
2019-10-03 12:26:44 -04:00
|
|
|
if realpath(home_dir) != realpath(project_dir):
|
|
|
|
raise FatalError(
|
|
|
|
"Build directory '%s' configured for project '%s' not '%s'. Run '%s fullclean' to start again." %
|
|
|
|
(build_dir, realpath(home_dir), realpath(project_dir), prog_name))
|
|
|
|
except KeyError:
|
|
|
|
pass # if cmake failed part way, CMAKE_HOME_DIRECTORY may not be set yet
|
|
|
|
|
|
|
|
|
2021-12-08 12:29:14 -05:00
|
|
|
def merge_action_lists(*action_lists: Dict) -> Dict:
|
|
|
|
merged_actions: Dict = {
|
2021-01-25 21:49:01 -05:00
|
|
|
'global_options': [],
|
|
|
|
'actions': {},
|
|
|
|
'global_action_callbacks': [],
|
2019-10-03 12:26:44 -04:00
|
|
|
}
|
|
|
|
for action_list in action_lists:
|
2021-01-25 21:49:01 -05:00
|
|
|
merged_actions['global_options'].extend(action_list.get('global_options', []))
|
|
|
|
merged_actions['actions'].update(action_list.get('actions', {}))
|
|
|
|
merged_actions['global_action_callbacks'].extend(action_list.get('global_action_callbacks', []))
|
2019-10-03 12:26:44 -04:00
|
|
|
return merged_actions
|
2020-01-30 11:18:20 -05:00
|
|
|
|
|
|
|
|
2021-12-08 12:29:14 -05:00
|
|
|
def get_sdkconfig_value(sdkconfig_file: str, key: str) -> Optional[str]:
|
2020-01-30 11:18:20 -05:00
|
|
|
"""
|
|
|
|
Return the value of given key from sdkconfig_file.
|
|
|
|
If sdkconfig_file does not exist or the option is not present, returns None.
|
|
|
|
"""
|
2021-01-25 21:49:01 -05:00
|
|
|
assert key.startswith('CONFIG_')
|
2020-01-30 11:18:20 -05:00
|
|
|
if not os.path.exists(sdkconfig_file):
|
|
|
|
return None
|
|
|
|
# keep track of the last seen value for the given key
|
|
|
|
value = None
|
|
|
|
# if the value is quoted, this excludes the quotes from the value
|
|
|
|
pattern = re.compile(r"^{}=\"?([^\"]*)\"?$".format(key))
|
2021-01-25 21:49:01 -05:00
|
|
|
with open(sdkconfig_file, 'r') as f:
|
2020-01-30 11:18:20 -05:00
|
|
|
for line in f:
|
|
|
|
match = re.match(pattern, line)
|
|
|
|
if match:
|
|
|
|
value = match.group(1)
|
|
|
|
return value
|
2020-01-31 05:08:22 -05:00
|
|
|
|
|
|
|
|
2021-12-08 12:29:14 -05:00
|
|
|
def is_target_supported(project_path: str, supported_targets: List) -> bool:
|
2020-04-06 10:41:44 -04:00
|
|
|
"""
|
|
|
|
Returns True if the active target is supported, or False otherwise.
|
|
|
|
"""
|
2021-06-03 11:14:38 -04:00
|
|
|
return get_target(project_path) in supported_targets
|
2020-04-06 10:41:44 -04:00
|
|
|
|
|
|
|
|
2021-12-08 12:29:14 -05:00
|
|
|
def _guess_or_check_idf_target(args: 'PropertyDict', prog_name: str, cache: Dict) -> None:
|
2020-01-31 05:08:22 -05:00
|
|
|
"""
|
|
|
|
If CMakeCache.txt doesn't exist, and IDF_TARGET is not set in the environment, guess the value from
|
|
|
|
sdkconfig or sdkconfig.defaults, and pass it to CMake in IDF_TARGET variable.
|
|
|
|
|
|
|
|
Otherwise, cross-check the three settings (sdkconfig, CMakeCache, environment) and if there is
|
|
|
|
mismatch, fail with instructions on how to fix this.
|
|
|
|
"""
|
|
|
|
# Default locations of sdkconfig files.
|
|
|
|
# FIXME: they may be overridden in the project or by a CMake variable (IDF-1369).
|
|
|
|
# These are used to guess the target from sdkconfig, or set the default target by sdkconfig.defaults.
|
2021-06-03 11:14:38 -04:00
|
|
|
idf_target_from_sdkconfig = get_target(args.project_dir)
|
|
|
|
idf_target_from_sdkconfig_defaults = get_target(args.project_dir, 'sdkconfig.defaults')
|
2021-01-25 21:49:01 -05:00
|
|
|
idf_target_from_env = os.environ.get('IDF_TARGET')
|
|
|
|
idf_target_from_cache = cache.get('IDF_TARGET')
|
2020-01-31 05:08:22 -05:00
|
|
|
|
|
|
|
if not cache and not idf_target_from_env:
|
|
|
|
# CMakeCache.txt does not exist yet, and IDF_TARGET is not set in the environment.
|
|
|
|
guessed_target = idf_target_from_sdkconfig or idf_target_from_sdkconfig_defaults
|
|
|
|
if guessed_target:
|
|
|
|
if args.verbose:
|
|
|
|
print("IDF_TARGET is not set, guessed '%s' from sdkconfig" % (guessed_target))
|
2021-01-25 21:49:01 -05:00
|
|
|
args.define_cache_entry.append('IDF_TARGET=' + guessed_target)
|
2020-01-31 05:08:22 -05:00
|
|
|
|
|
|
|
elif idf_target_from_env:
|
|
|
|
# Let's check that IDF_TARGET values are consistent
|
|
|
|
if idf_target_from_sdkconfig and idf_target_from_sdkconfig != idf_target_from_env:
|
|
|
|
raise FatalError("Project sdkconfig was generated for target '{t_conf}', but environment variable IDF_TARGET "
|
|
|
|
"is set to '{t_env}'. Run '{prog} set-target {t_env}' to generate new sdkconfig file for target {t_env}."
|
|
|
|
.format(t_conf=idf_target_from_sdkconfig, t_env=idf_target_from_env, prog=prog_name))
|
|
|
|
|
|
|
|
if idf_target_from_cache and idf_target_from_cache != idf_target_from_env:
|
|
|
|
raise FatalError("Target settings are not consistent: '{t_env}' in the environment, '{t_cache}' in CMakeCache.txt. "
|
|
|
|
"Run '{prog} fullclean' to start again."
|
|
|
|
.format(t_env=idf_target_from_env, t_cache=idf_target_from_cache, prog=prog_name))
|
|
|
|
|
|
|
|
elif idf_target_from_cache and idf_target_from_sdkconfig and idf_target_from_cache != idf_target_from_sdkconfig:
|
|
|
|
# This shouldn't happen, unless the user manually edits CMakeCache.txt or sdkconfig, but let's check anyway.
|
|
|
|
raise FatalError("Project sdkconfig was generated for target '{t_conf}', but CMakeCache.txt contains '{t_cache}'. "
|
|
|
|
"To keep the setting in sdkconfig ({t_conf}) and re-generate CMakeCache.txt, run '{prog} fullclean'. "
|
|
|
|
"To re-generate sdkconfig for '{t_cache}' target, run '{prog} set-target {t_cache}'."
|
|
|
|
.format(t_conf=idf_target_from_sdkconfig, t_cache=idf_target_from_cache, prog=prog_name))
|
2020-04-03 10:21:24 -04:00
|
|
|
|
|
|
|
|
|
|
|
class TargetChoice(click.Choice):
|
|
|
|
"""
|
|
|
|
A version of click.Choice with two special features:
|
|
|
|
- ignores hyphens
|
|
|
|
- not case sensitive
|
|
|
|
"""
|
2022-06-03 08:46:56 -04:00
|
|
|
def __init__(self, choices: List) -> None:
|
2020-04-03 10:21:24 -04:00
|
|
|
super(TargetChoice, self).__init__(choices, case_sensitive=False)
|
|
|
|
|
2022-06-03 08:46:56 -04:00
|
|
|
def convert(self, value: Any, param: click.Parameter, ctx: click.Context) -> Any:
|
|
|
|
def normalize(string: str) -> str:
|
|
|
|
return string.lower().replace('-', '')
|
2020-04-03 10:21:24 -04:00
|
|
|
|
|
|
|
saved_token_normalize_func = ctx.token_normalize_func
|
|
|
|
ctx.token_normalize_func = normalize
|
|
|
|
|
|
|
|
try:
|
|
|
|
return super(TargetChoice, self).convert(value, param, ctx)
|
|
|
|
finally:
|
|
|
|
ctx.token_normalize_func = saved_token_normalize_func
|
2022-06-03 08:46:56 -04:00
|
|
|
|
|
|
|
|
|
|
|
class PropertyDict(dict):
|
|
|
|
def __getattr__(self, name: str) -> Any:
|
|
|
|
if name in self:
|
|
|
|
return self[name]
|
|
|
|
else:
|
|
|
|
raise AttributeError("'PropertyDict' object has no attribute '%s'" % name)
|
|
|
|
|
|
|
|
def __setattr__(self, name: str, value: Any) -> None:
|
|
|
|
self[name] = value
|
|
|
|
|
|
|
|
def __delattr__(self, name: str) -> None:
|
|
|
|
if name in self:
|
|
|
|
del self[name]
|
|
|
|
else:
|
|
|
|
raise AttributeError("'PropertyDict' object has no attribute '%s'" % name)
|