mirror of
https://github.com/espressif/esp-idf.git
synced 2024-10-05 20:47:46 -04:00
Merge branch 'feature/idfpy_extensions' into 'master'
idf.py: Support extensions for idf.py and move commands to separate files Closes IDF-1041 See merge request espressif/esp-idf!6416
This commit is contained in:
commit
c0c75478fa
1105
tools/idf.py
1105
tools/idf.py
File diff suppressed because it is too large
Load Diff
42
tools/idf_py_actions/README.md
Normal file
42
tools/idf_py_actions/README.md
Normal file
@ -0,0 +1,42 @@
|
||||
# idf.py extensions
|
||||
Python modules (subdirectories and files) in this directory named `[your_extension]_ext` will be loaded as idf.py extensions.
|
||||
If you want to provide extra extensions just provide `;` separated list of directories with extensions in `IDF_EXTRA_ACTIONS_PATH`. Extensions will be loaded in alphanumeric order.
|
||||
Command line arguments parsing and extension mechanism is implemented on top of [Click](https://click.palletsprojects.com/en/5.x/) (versions >=5.0 are supported).
|
||||
|
||||
They should define a function `action_extensions(base_actions, project_path)` where:
|
||||
|
||||
- base_actions - dictionary with actions that are already available for idf.py
|
||||
- project_path - working dir, may be defaulted to `os.getcwd()`
|
||||
|
||||
This function have to return a dict with 3 possible keys:
|
||||
|
||||
```python
|
||||
{
|
||||
# Additional options that will be available from id
|
||||
"global_options": [{
|
||||
"names": ["--option-name"],
|
||||
"help": "Help for option --option-name.",
|
||||
}],
|
||||
# List of functions that will have access to full app context, and can mangle with arguments
|
||||
"global_action_callbacks": [global_callback],
|
||||
# Additional subcommands for idf.py
|
||||
"actions": {
|
||||
"subcommand_name": {
|
||||
"callback": subcommand_callback,
|
||||
"help": "Help for subcommand.",
|
||||
},
|
||||
},
|
||||
}
|
||||
```
|
||||
|
||||
Where function `global_callback(ctx, global_args, tasks)` accepts 3 arguments:
|
||||
|
||||
- ctx - [Click context](https://click.palletsprojects.com/en/5.x/api/#context)
|
||||
- global_args - dictionary of all available global arguments
|
||||
- tasks - list of Task objects
|
||||
|
||||
And `subcommand_callback(subcommand_name, ctx, args)` accepts 3 arguments:
|
||||
|
||||
- subcommand_name - name of subcommand
|
||||
- ctx - [Click context](https://click.palletsprojects.com/en/5.x/api/#context)
|
||||
- args - list of command's arguments
|
0
tools/idf_py_actions/__init__.py
Normal file
0
tools/idf_py_actions/__init__.py
Normal file
32
tools/idf_py_actions/constants.py
Normal file
32
tools/idf_py_actions/constants.py
Normal file
@ -0,0 +1,32 @@
|
||||
import multiprocessing
|
||||
import os
|
||||
import platform
|
||||
|
||||
# Make flavors, across the various kinds of Windows environments & POSIX...
|
||||
if "MSYSTEM" in os.environ: # MSYS
|
||||
MAKE_CMD = "make"
|
||||
MAKE_GENERATOR = "MSYS Makefiles"
|
||||
elif os.name == "nt": # other Windows
|
||||
MAKE_CMD = "mingw32-make"
|
||||
MAKE_GENERATOR = "MinGW Makefiles"
|
||||
elif platform.system() == "FreeBSD":
|
||||
MAKE_CMD = "gmake"
|
||||
MAKE_GENERATOR = "Unix Makefiles"
|
||||
else:
|
||||
MAKE_CMD = "make"
|
||||
MAKE_GENERATOR = "Unix Makefiles"
|
||||
|
||||
GENERATORS = [
|
||||
# ('generator name', 'build command line', 'version command line', 'verbose flag')
|
||||
("Ninja", ["ninja"], ["ninja", "--version"], "-v"),
|
||||
(
|
||||
MAKE_GENERATOR,
|
||||
[MAKE_CMD, "-j", str(multiprocessing.cpu_count() + 2)],
|
||||
[MAKE_CMD, "--version"],
|
||||
"VERBOSE=1",
|
||||
),
|
||||
]
|
||||
GENERATOR_CMDS = dict((a[0], a[1]) for a in GENERATORS)
|
||||
GENERATOR_VERBOSE = dict((a[0], a[3]) for a in GENERATORS)
|
||||
|
||||
SUPPORTED_TARGETS = ["esp32", "esp32s2beta"]
|
342
tools/idf_py_actions/core_ext.py
Normal file
342
tools/idf_py_actions/core_ext.py
Normal file
@ -0,0 +1,342 @@
|
||||
import os
|
||||
import shutil
|
||||
import sys
|
||||
|
||||
import click
|
||||
|
||||
from idf_py_actions.constants import GENERATOR_CMDS, GENERATOR_VERBOSE, SUPPORTED_TARGETS
|
||||
from idf_py_actions.errors import FatalError
|
||||
from idf_py_actions.global_options import global_options
|
||||
from idf_py_actions.tools import ensure_build_directory, idf_version, merge_action_lists, realpath, run_tool
|
||||
|
||||
|
||||
def action_extensions(base_actions, project_path):
|
||||
def build_target(target_name, ctx, args):
|
||||
"""
|
||||
Execute the target build system to build target 'target_name'
|
||||
|
||||
Calls ensure_build_directory() which will run cmake to generate a build
|
||||
directory (with the specified generator) as needed.
|
||||
"""
|
||||
ensure_build_directory(args, ctx.info_name)
|
||||
generator_cmd = GENERATOR_CMDS[args.generator]
|
||||
|
||||
if args.verbose:
|
||||
generator_cmd += [GENERATOR_VERBOSE[args.generator]]
|
||||
|
||||
run_tool(generator_cmd[0], generator_cmd + [target_name], args.build_dir)
|
||||
|
||||
def verbose_callback(ctx, param, value):
|
||||
if not value or ctx.resilient_parsing:
|
||||
return
|
||||
|
||||
for line in ctx.command.verbose_output:
|
||||
print(line)
|
||||
|
||||
def clean(action, ctx, args):
|
||||
if not os.path.isdir(args.build_dir):
|
||||
print("Build directory '%s' not found. Nothing to clean." % args.build_dir)
|
||||
return
|
||||
build_target("clean", ctx, args)
|
||||
|
||||
def _delete_windows_symlinks(directory):
|
||||
"""
|
||||
It deletes symlinks recursively on Windows. It is useful for Python 2 which doesn't detect symlinks on Windows.
|
||||
"""
|
||||
deleted_paths = []
|
||||
if os.name == "nt":
|
||||
import ctypes
|
||||
|
||||
for root, dirnames, _filenames in os.walk(directory):
|
||||
for d in dirnames:
|
||||
full_path = os.path.join(root, d)
|
||||
try:
|
||||
full_path = full_path.decode("utf-8")
|
||||
except Exception:
|
||||
pass
|
||||
if ctypes.windll.kernel32.GetFileAttributesW(full_path) & 0x0400:
|
||||
os.rmdir(full_path)
|
||||
deleted_paths.append(full_path)
|
||||
return deleted_paths
|
||||
|
||||
def fullclean(action, ctx, args):
|
||||
build_dir = args.build_dir
|
||||
if not os.path.isdir(build_dir):
|
||||
print("Build directory '%s' not found. Nothing to clean." % build_dir)
|
||||
return
|
||||
if len(os.listdir(build_dir)) == 0:
|
||||
print("Build directory '%s' is empty. Nothing to clean." % build_dir)
|
||||
return
|
||||
|
||||
if not os.path.exists(os.path.join(build_dir, "CMakeCache.txt")):
|
||||
raise FatalError("Directory '%s' doesn't seem to be a CMake build directory. Refusing to automatically "
|
||||
"delete files in this directory. Delete the directory manually to 'clean' it." % build_dir)
|
||||
red_flags = ["CMakeLists.txt", ".git", ".svn"]
|
||||
for red in red_flags:
|
||||
red = os.path.join(build_dir, red)
|
||||
if os.path.exists(red):
|
||||
raise FatalError(
|
||||
"Refusing to automatically delete files in directory containing '%s'. Delete files manually if you're sure."
|
||||
% red)
|
||||
# OK, delete everything in the build directory...
|
||||
# Note: Python 2.7 doesn't detect symlinks on Windows (it is supported form 3.2). Tools promising to not
|
||||
# follow symlinks will actually follow them. Deleting the build directory with symlinks deletes also items
|
||||
# outside of this directory.
|
||||
deleted_symlinks = _delete_windows_symlinks(build_dir)
|
||||
if args.verbose and len(deleted_symlinks) > 1:
|
||||
print("The following symlinks were identified and removed:\n%s" % "\n".join(deleted_symlinks))
|
||||
for f in os.listdir(build_dir): # TODO: once we are Python 3 only, this can be os.scandir()
|
||||
f = os.path.join(build_dir, f)
|
||||
if args.verbose:
|
||||
print("Removing: %s" % f)
|
||||
if os.path.isdir(f):
|
||||
shutil.rmtree(f)
|
||||
else:
|
||||
os.remove(f)
|
||||
|
||||
def set_target(action, ctx, args, idf_target):
|
||||
args.define_cache_entry.append("IDF_TARGET=" + idf_target)
|
||||
sdkconfig_path = os.path.join(args.project_dir, 'sdkconfig')
|
||||
sdkconfig_old = sdkconfig_path + ".old"
|
||||
if os.path.exists(sdkconfig_old):
|
||||
os.remove(sdkconfig_old)
|
||||
if os.path.exists(sdkconfig_path):
|
||||
os.rename(sdkconfig_path, sdkconfig_old)
|
||||
print("Set Target to: %s, new sdkconfig created. Existing sdkconfig renamed to sdkconfig.old." % idf_target)
|
||||
ensure_build_directory(args, ctx.info_name, True)
|
||||
|
||||
def reconfigure(action, ctx, args):
|
||||
ensure_build_directory(args, ctx.info_name, True)
|
||||
|
||||
def validate_root_options(ctx, args, tasks):
|
||||
args.project_dir = realpath(args.project_dir)
|
||||
if args.build_dir is not None and args.project_dir == realpath(args.build_dir):
|
||||
raise FatalError("Setting the build directory to the project directory is not supported. Suggest dropping "
|
||||
"--build-dir option, the default is a 'build' subdirectory inside the project directory.")
|
||||
if args.build_dir is None:
|
||||
args.build_dir = os.path.join(args.project_dir, "build")
|
||||
args.build_dir = realpath(args.build_dir)
|
||||
|
||||
def idf_version_callback(ctx, param, value):
|
||||
if not value or ctx.resilient_parsing:
|
||||
return
|
||||
|
||||
version = idf_version()
|
||||
|
||||
if not version:
|
||||
raise FatalError("ESP-IDF version cannot be determined")
|
||||
|
||||
print("ESP-IDF %s" % version)
|
||||
sys.exit(0)
|
||||
|
||||
root_options = {
|
||||
"global_options": [
|
||||
{
|
||||
"names": ["--version"],
|
||||
"help": "Show IDF version and exit.",
|
||||
"is_flag": True,
|
||||
"callback": idf_version_callback
|
||||
},
|
||||
{
|
||||
"names": ["-C", "--project-dir"],
|
||||
"help": "Project directory.",
|
||||
"type": click.Path(),
|
||||
"default": os.getcwd(),
|
||||
},
|
||||
{
|
||||
"names": ["-B", "--build-dir"],
|
||||
"help": "Build directory.",
|
||||
"type": click.Path(),
|
||||
"default": None,
|
||||
},
|
||||
{
|
||||
"names": ["-n", "--no-warnings"],
|
||||
"help": "Disable Cmake warnings.",
|
||||
"is_flag": True,
|
||||
"default": False,
|
||||
},
|
||||
{
|
||||
"names": ["-v", "--verbose"],
|
||||
"help": "Verbose build output.",
|
||||
"is_flag": True,
|
||||
"is_eager": True,
|
||||
"default": False,
|
||||
"callback": verbose_callback
|
||||
},
|
||||
{
|
||||
"names": ["--ccache/--no-ccache"],
|
||||
"help": ("Use ccache in build. Disabled by default, unless "
|
||||
"IDF_CCACHE_ENABLE environment variable is set to a non-zero value."),
|
||||
"is_flag": True,
|
||||
"default": os.getenv("IDF_CCACHE_ENABLE") not in [None, "", "0"],
|
||||
},
|
||||
{
|
||||
"names": ["-G", "--generator"],
|
||||
"help": "CMake generator.",
|
||||
"type": click.Choice(GENERATOR_CMDS.keys()),
|
||||
},
|
||||
{
|
||||
"names": ["--dry-run"],
|
||||
"help": "Only process arguments, but don't execute actions.",
|
||||
"is_flag": True,
|
||||
"hidden": True,
|
||||
"default": False
|
||||
},
|
||||
],
|
||||
"global_action_callbacks": [validate_root_options],
|
||||
}
|
||||
|
||||
build_actions = {
|
||||
"actions": {
|
||||
"all": {
|
||||
"aliases": ["build"],
|
||||
"callback": build_target,
|
||||
"short_help": "Build the project.",
|
||||
"help": ("Build the project. This can involve multiple steps:\n\n"
|
||||
"1. Create the build directory if needed. "
|
||||
"The sub-directory 'build' is used to hold build output, "
|
||||
"although this can be changed with the -B option.\n\n"
|
||||
"2. Run CMake as necessary to configure the project "
|
||||
"and generate build files for the main build tool.\n\n"
|
||||
"3. Run the main build tool (Ninja or GNU Make). "
|
||||
"By default, the build tool is automatically detected "
|
||||
"but it can be explicitly set by passing the -G option to idf.py.\n\n"),
|
||||
"options": global_options,
|
||||
"order_dependencies": [
|
||||
"reconfigure",
|
||||
"menuconfig",
|
||||
"clean",
|
||||
"fullclean",
|
||||
],
|
||||
},
|
||||
"menuconfig": {
|
||||
"callback": build_target,
|
||||
"help": 'Run "menuconfig" project configuration tool.',
|
||||
"options": global_options,
|
||||
},
|
||||
"confserver": {
|
||||
"callback": build_target,
|
||||
"help": "Run JSON configuration server.",
|
||||
"options": global_options,
|
||||
},
|
||||
"size": {
|
||||
"callback": build_target,
|
||||
"help": "Print basic size information about the app.",
|
||||
"options": global_options,
|
||||
"dependencies": ["app"],
|
||||
},
|
||||
"size-components": {
|
||||
"callback": build_target,
|
||||
"help": "Print per-component size information.",
|
||||
"options": global_options,
|
||||
"dependencies": ["app"],
|
||||
},
|
||||
"size-files": {
|
||||
"callback": build_target,
|
||||
"help": "Print per-source-file size information.",
|
||||
"options": global_options,
|
||||
"dependencies": ["app"],
|
||||
},
|
||||
"bootloader": {
|
||||
"callback": build_target,
|
||||
"help": "Build only bootloader.",
|
||||
"options": global_options,
|
||||
},
|
||||
"app": {
|
||||
"callback": build_target,
|
||||
"help": "Build only the app.",
|
||||
"order_dependencies": ["clean", "fullclean", "reconfigure"],
|
||||
"options": global_options,
|
||||
},
|
||||
"efuse_common_table": {
|
||||
"callback": build_target,
|
||||
"help": "Genereate C-source for IDF's eFuse fields.",
|
||||
"order_dependencies": ["reconfigure"],
|
||||
"options": global_options,
|
||||
},
|
||||
"efuse_custom_table": {
|
||||
"callback": build_target,
|
||||
"help": "Genereate C-source for user's eFuse fields.",
|
||||
"order_dependencies": ["reconfigure"],
|
||||
"options": global_options,
|
||||
},
|
||||
"show_efuse_table": {
|
||||
"callback": build_target,
|
||||
"help": "Print eFuse table.",
|
||||
"order_dependencies": ["reconfigure"],
|
||||
"options": global_options,
|
||||
},
|
||||
"partition_table": {
|
||||
"callback": build_target,
|
||||
"help": "Build only partition table.",
|
||||
"order_dependencies": ["reconfigure"],
|
||||
"options": global_options,
|
||||
},
|
||||
"erase_otadata": {
|
||||
"callback": build_target,
|
||||
"help": "Erase otadata partition.",
|
||||
"options": global_options,
|
||||
},
|
||||
"read_otadata": {
|
||||
"callback": build_target,
|
||||
"help": "Read otadata partition.",
|
||||
"options": global_options,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
clean_actions = {
|
||||
"actions": {
|
||||
"reconfigure": {
|
||||
"callback": reconfigure,
|
||||
"short_help": "Re-run CMake.",
|
||||
"help": ("Re-run CMake even if it doesn't seem to need re-running. "
|
||||
"This isn't necessary during normal usage, "
|
||||
"but can be useful after adding/removing files from the source tree, "
|
||||
"or when modifying CMake cache variables. "
|
||||
"For example, \"idf.py -DNAME='VALUE' reconfigure\" "
|
||||
'can be used to set variable "NAME" in CMake cache to value "VALUE".'),
|
||||
"options": global_options,
|
||||
"order_dependencies": ["menuconfig", "fullclean"],
|
||||
},
|
||||
"set-target": {
|
||||
"callback": set_target,
|
||||
"short_help": "Set the chip target to build.",
|
||||
"help": ("Set the chip target to build. This will remove the "
|
||||
"existing sdkconfig file and corresponding CMakeCache and "
|
||||
"create new ones according to the new target.\nFor example, "
|
||||
"\"idf.py set-target esp32\" will select esp32 as the new chip "
|
||||
"target."),
|
||||
"arguments": [
|
||||
{
|
||||
"names": ["idf-target"],
|
||||
"nargs": 1,
|
||||
"type": click.Choice(SUPPORTED_TARGETS),
|
||||
},
|
||||
],
|
||||
"dependencies": ["fullclean"],
|
||||
},
|
||||
"clean": {
|
||||
"callback": clean,
|
||||
"short_help": "Delete build output files from the build directory.",
|
||||
"help": ("Delete build output files from the build directory, "
|
||||
"forcing a 'full rebuild' the next time "
|
||||
"the project is built. Cleaning doesn't delete "
|
||||
"CMake configuration output and some other files"),
|
||||
"order_dependencies": ["fullclean"],
|
||||
},
|
||||
"fullclean": {
|
||||
"callback": fullclean,
|
||||
"short_help": "Delete the entire build directory contents.",
|
||||
"help": ("Delete the entire build directory contents. "
|
||||
"This includes all CMake configuration output."
|
||||
"The next time the project is built, "
|
||||
"CMake will configure it from scratch. "
|
||||
"Note that this option recursively deletes all files "
|
||||
"in the build directory, so use with care."
|
||||
"Project configuration is not deleted.")
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
return merge_action_lists(root_options, build_actions, clean_actions)
|
6
tools/idf_py_actions/errors.py
Normal file
6
tools/idf_py_actions/errors.py
Normal file
@ -0,0 +1,6 @@
|
||||
class FatalError(RuntimeError):
|
||||
"""
|
||||
Wrapper class for runtime errors that aren't caused by bugs in idf.py or the build proces.s
|
||||
"""
|
||||
|
||||
pass
|
6
tools/idf_py_actions/global_options.py
Normal file
6
tools/idf_py_actions/global_options.py
Normal file
@ -0,0 +1,6 @@
|
||||
global_options = [{
|
||||
"names": ["-D", "--define-cache-entry"],
|
||||
"help": "Create a cmake cache entry.",
|
||||
"scope": "global",
|
||||
"multiple": True,
|
||||
}]
|
209
tools/idf_py_actions/serial_ext.py
Normal file
209
tools/idf_py_actions/serial_ext.py
Normal file
@ -0,0 +1,209 @@
|
||||
import json
|
||||
import os
|
||||
import sys
|
||||
|
||||
from idf_py_actions.errors import FatalError
|
||||
from idf_py_actions.global_options import global_options
|
||||
from idf_py_actions.tools import ensure_build_directory, run_tool
|
||||
|
||||
PYTHON = sys.executable
|
||||
|
||||
|
||||
def action_extensions(base_actions, project_path):
|
||||
def _get_default_serial_port():
|
||||
""" Return a default serial port. esptool can do this (smarter), but it can create
|
||||
inconsistencies where esptool.py uses one port and idf_monitor uses another.
|
||||
|
||||
Same logic as esptool.py search order, reverse sort by name and choose the first port.
|
||||
"""
|
||||
# Import is done here in order to move it after the check_environment() ensured that pyserial has been installed
|
||||
import serial.tools.list_ports
|
||||
|
||||
ports = list(reversed(sorted(p.device for p in serial.tools.list_ports.comports())))
|
||||
try:
|
||||
print("Choosing default port %s (use '-p PORT' option to set a specific serial port)" %
|
||||
ports[0].encode("ascii", "ignore"))
|
||||
return ports[0]
|
||||
except IndexError:
|
||||
raise RuntimeError(
|
||||
"No serial ports found. Connect a device, or use '-p PORT' option to set a specific port.")
|
||||
|
||||
def _get_esptool_args(args):
|
||||
esptool_path = os.path.join(os.environ["IDF_PATH"], "components/esptool_py/esptool/esptool.py")
|
||||
if args.port is None:
|
||||
args.port = _get_default_serial_port()
|
||||
result = [PYTHON, esptool_path]
|
||||
result += ["-p", args.port]
|
||||
result += ["-b", str(args.baud)]
|
||||
|
||||
with open(os.path.join(args.build_dir, "flasher_args.json")) as f:
|
||||
flasher_args = json.load(f)
|
||||
|
||||
extra_esptool_args = flasher_args["extra_esptool_args"]
|
||||
result += ["--after", extra_esptool_args["after"]]
|
||||
return result
|
||||
|
||||
def _get_commandline_options(ctx):
|
||||
""" Return all the command line options up to first action """
|
||||
# This approach ignores argument parsing done Click
|
||||
result = []
|
||||
|
||||
for arg in sys.argv:
|
||||
if arg in ctx.command.commands_with_aliases:
|
||||
break
|
||||
|
||||
result.append(arg)
|
||||
|
||||
return result
|
||||
|
||||
def monitor(action, ctx, args, print_filter):
|
||||
"""
|
||||
Run idf_monitor.py to watch build output
|
||||
"""
|
||||
if args.port is None:
|
||||
args.port = _get_default_serial_port()
|
||||
desc_path = os.path.join(args.build_dir, "project_description.json")
|
||||
if not os.path.exists(desc_path):
|
||||
ensure_build_directory(args, ctx.info_name)
|
||||
with open(desc_path, "r") as f:
|
||||
project_desc = json.load(f)
|
||||
|
||||
elf_file = os.path.join(args.build_dir, project_desc["app_elf"])
|
||||
if not os.path.exists(elf_file):
|
||||
raise FatalError("ELF file '%s' not found. You need to build & flash the project before running 'monitor', "
|
||||
"and the binary on the device must match the one in the build directory exactly. "
|
||||
"Try '%s flash monitor'." % (elf_file, ctx.info_name))
|
||||
idf_monitor = os.path.join(os.environ["IDF_PATH"], "tools/idf_monitor.py")
|
||||
monitor_args = [PYTHON, idf_monitor]
|
||||
if args.port is not None:
|
||||
monitor_args += ["-p", args.port]
|
||||
monitor_args += ["-b", project_desc["monitor_baud"]]
|
||||
monitor_args += ["--toolchain-prefix", project_desc["monitor_toolprefix"]]
|
||||
|
||||
if print_filter is not None:
|
||||
monitor_args += ["--print_filter", print_filter]
|
||||
monitor_args += [elf_file]
|
||||
|
||||
idf_py = [PYTHON] + _get_commandline_options(ctx) # commands to re-run idf.py
|
||||
monitor_args += ["-m", " ".join("'%s'" % a for a in idf_py)]
|
||||
|
||||
if "MSYSTEM" in os.environ:
|
||||
monitor_args = ["winpty"] + monitor_args
|
||||
run_tool("idf_monitor", monitor_args, args.project_dir)
|
||||
|
||||
def flash(action, ctx, args):
|
||||
"""
|
||||
Run esptool to flash the entire project, from an argfile generated by the build system
|
||||
"""
|
||||
flasher_args_path = {
|
||||
# action -> name of flasher args file generated by build system
|
||||
"bootloader-flash": "flash_bootloader_args",
|
||||
"partition_table-flash": "flash_partition_table_args",
|
||||
"app-flash": "flash_app_args",
|
||||
"flash": "flash_project_args",
|
||||
"encrypted-app-flash": "flash_encrypted_app_args",
|
||||
"encrypted-flash": "flash_encrypted_project_args",
|
||||
}[action]
|
||||
esptool_args = _get_esptool_args(args)
|
||||
esptool_args += ["write_flash", "@" + flasher_args_path]
|
||||
run_tool("esptool.py", esptool_args, args.build_dir)
|
||||
|
||||
def erase_flash(action, ctx, args):
|
||||
esptool_args = _get_esptool_args(args)
|
||||
esptool_args += ["erase_flash"]
|
||||
run_tool("esptool.py", esptool_args, args.build_dir)
|
||||
|
||||
baud_rate = {
|
||||
"names": ["-b", "--baud"],
|
||||
"help": "Baud rate.",
|
||||
"scope": "global",
|
||||
"envvar": "ESPBAUD",
|
||||
"default": 460800,
|
||||
}
|
||||
|
||||
port = {
|
||||
"names": ["-p", "--port"],
|
||||
"help": "Serial port.",
|
||||
"scope": "global",
|
||||
"envvar": "ESPPORT",
|
||||
"default": None,
|
||||
}
|
||||
|
||||
serial_actions = {
|
||||
"actions": {
|
||||
"flash": {
|
||||
"callback": flash,
|
||||
"help": "Flash the project.",
|
||||
"options": global_options + [baud_rate, port],
|
||||
"dependencies": ["all"],
|
||||
"order_dependencies": ["erase_flash"],
|
||||
},
|
||||
"erase_flash": {
|
||||
"callback": erase_flash,
|
||||
"help": "Erase entire flash chip.",
|
||||
"options": [baud_rate, port],
|
||||
},
|
||||
"monitor": {
|
||||
"callback": monitor,
|
||||
"help": "Display serial output.",
|
||||
"options": [
|
||||
port,
|
||||
{
|
||||
"names": ["--print-filter", "--print_filter"],
|
||||
"help": (
|
||||
"Filter monitor output.\n"
|
||||
"Restrictions on what to print can be specified as a series of <tag>:<log_level> items "
|
||||
"where <tag> is the tag string and <log_level> is a character from the set "
|
||||
"{N, E, W, I, D, V, *} referring to a level. "
|
||||
'For example, "tag1:W" matches and prints only the outputs written with '
|
||||
'ESP_LOGW("tag1", ...) or at lower verbosity level, i.e. ESP_LOGE("tag1", ...). '
|
||||
'Not specifying a <log_level> or using "*" defaults to Verbose level.\n'
|
||||
'Please see the IDF Monitor section of the ESP-IDF documentation '
|
||||
'for a more detailed description and further examples.'),
|
||||
"default": None,
|
||||
},
|
||||
],
|
||||
"order_dependencies": [
|
||||
"flash",
|
||||
"partition_table-flash",
|
||||
"bootloader-flash",
|
||||
"app-flash",
|
||||
],
|
||||
},
|
||||
"partition_table-flash": {
|
||||
"callback": flash,
|
||||
"help": "Flash partition table only.",
|
||||
"options": [baud_rate, port],
|
||||
"dependencies": ["partition_table"],
|
||||
"order_dependencies": ["erase_flash"],
|
||||
},
|
||||
"bootloader-flash": {
|
||||
"callback": flash,
|
||||
"help": "Flash bootloader only.",
|
||||
"options": [baud_rate, port],
|
||||
"dependencies": ["bootloader"],
|
||||
"order_dependencies": ["erase_flash"],
|
||||
},
|
||||
"app-flash": {
|
||||
"callback": flash,
|
||||
"help": "Flash the app only.",
|
||||
"options": [baud_rate, port],
|
||||
"dependencies": ["app"],
|
||||
"order_dependencies": ["erase_flash"],
|
||||
},
|
||||
"encrypted-app-flash": {
|
||||
"callback": flash,
|
||||
"help": "Flash the encrypted app only.",
|
||||
"dependencies": ["app"],
|
||||
"order_dependencies": ["erase_flash"],
|
||||
},
|
||||
"encrypted-flash": {
|
||||
"callback": flash,
|
||||
"help": "Flash the encrypted project.",
|
||||
"dependencies": ["all"],
|
||||
"order_dependencies": ["erase_flash"],
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
return serial_actions
|
221
tools/idf_py_actions/tools.py
Normal file
221
tools/idf_py_actions/tools.py
Normal file
@ -0,0 +1,221 @@
|
||||
import os
|
||||
import re
|
||||
import subprocess
|
||||
import sys
|
||||
|
||||
from .constants import GENERATORS
|
||||
from .errors import FatalError
|
||||
|
||||
|
||||
def executable_exists(args):
|
||||
try:
|
||||
subprocess.check_output(args)
|
||||
return True
|
||||
|
||||
except Exception:
|
||||
return False
|
||||
|
||||
|
||||
def realpath(path):
|
||||
"""
|
||||
Return the cannonical path with normalized case.
|
||||
|
||||
It is useful on Windows to comparision paths in case-insensitive manner.
|
||||
On Unix and Mac OS X it works as `os.path.realpath()` only.
|
||||
"""
|
||||
return os.path.normcase(os.path.realpath(path))
|
||||
|
||||
|
||||
def _idf_version_from_cmake():
|
||||
version_path = os.path.join(os.environ["IDF_PATH"], "tools/cmake/version.cmake")
|
||||
regex = re.compile(r"^\s*set\s*\(\s*IDF_VERSION_([A-Z]{5})\s+(\d+)")
|
||||
ver = {}
|
||||
try:
|
||||
with open(version_path) as f:
|
||||
for line in f:
|
||||
m = regex.match(line)
|
||||
|
||||
if m:
|
||||
ver[m.group(1)] = m.group(2)
|
||||
|
||||
return "v%s.%s.%s" % (ver["MAJOR"], ver["MINOR"], ver["PATCH"])
|
||||
except (KeyError, OSError):
|
||||
sys.stderr.write("WARNING: Cannot find ESP-IDF version in version.cmake\n")
|
||||
return None
|
||||
|
||||
|
||||
def idf_version():
|
||||
"""Print version of ESP-IDF"""
|
||||
|
||||
# Try to get version from git:
|
||||
try:
|
||||
version = subprocess.check_output([
|
||||
"git",
|
||||
"--git-dir=%s" % os.path.join(os.environ["IDF_PATH"], '.git'),
|
||||
"--work-tree=%s" % os.environ["IDF_PATH"], "describe", "--tags", "--dirty"
|
||||
]).decode('utf-8', 'ignore').strip()
|
||||
except (subprocess.CalledProcessError, UnicodeError):
|
||||
# if failed, then try to parse cmake.version file
|
||||
sys.stderr.write("WARNING: Git version unavailable, reading from source\n")
|
||||
version = _idf_version_from_cmake()
|
||||
|
||||
return version
|
||||
|
||||
|
||||
def run_tool(tool_name, args, cwd):
|
||||
def quote_arg(arg):
|
||||
" Quote 'arg' if necessary "
|
||||
if " " in arg and not (arg.startswith('"') or arg.startswith("'")):
|
||||
return "'" + arg + "'"
|
||||
return arg
|
||||
|
||||
display_args = " ".join(quote_arg(arg) for arg in args)
|
||||
print("Running %s in directory %s" % (tool_name, quote_arg(cwd)))
|
||||
print('Executing "%s"...' % str(display_args))
|
||||
try:
|
||||
# Note: we explicitly pass in os.environ here, as we may have set IDF_PATH there during startup
|
||||
subprocess.check_call(args, env=os.environ, cwd=cwd)
|
||||
except subprocess.CalledProcessError as e:
|
||||
raise FatalError("%s failed with exit code %d" % (tool_name, e.returncode))
|
||||
|
||||
|
||||
def _strip_quotes(value, regexp=re.compile(r"^\"(.*)\"$|^'(.*)'$|^(.*)$")):
|
||||
"""
|
||||
Strip quotes like CMake does during parsing cache entries
|
||||
"""
|
||||
|
||||
return [x for x in regexp.match(value).groups() if x is not None][0].rstrip()
|
||||
|
||||
|
||||
def _parse_cmakecache(path):
|
||||
"""
|
||||
Parse the CMakeCache file at 'path'.
|
||||
|
||||
Returns a dict of name:value.
|
||||
|
||||
CMakeCache entries also each have a "type", but this is currently ignored.
|
||||
"""
|
||||
result = {}
|
||||
with open(path) as f:
|
||||
for line in f:
|
||||
# cmake cache lines look like: CMAKE_CXX_FLAGS_DEBUG:STRING=-g
|
||||
# groups are name, type, value
|
||||
m = re.match(r"^([^#/:=]+):([^:=]+)=(.*)\n$", line)
|
||||
if m:
|
||||
result[m.group(1)] = m.group(3)
|
||||
return result
|
||||
|
||||
|
||||
def _new_cmakecache_entries(cache_path, new_cache_entries):
|
||||
if not os.path.exists(cache_path):
|
||||
return True
|
||||
|
||||
if new_cache_entries:
|
||||
current_cache = _parse_cmakecache(cache_path)
|
||||
|
||||
for entry in new_cache_entries:
|
||||
key, value = entry.split("=", 1)
|
||||
current_value = current_cache.get(key, None)
|
||||
if current_value is None or _strip_quotes(value) != current_value:
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
|
||||
def _detect_cmake_generator(prog_name):
|
||||
"""
|
||||
Find the default cmake generator, if none was specified. Raises an exception if no valid generator is found.
|
||||
"""
|
||||
for (generator, _, version_check, _) in GENERATORS:
|
||||
if executable_exists(version_check):
|
||||
return generator
|
||||
raise FatalError("To use %s, either the 'ninja' or 'GNU make' build tool must be available in the PATH" % prog_name)
|
||||
|
||||
|
||||
def ensure_build_directory(args, prog_name, always_run_cmake=False):
|
||||
"""Check the build directory exists and that cmake has been run there.
|
||||
|
||||
If this isn't the case, create the build directory (if necessary) and
|
||||
do an initial cmake run to configure it.
|
||||
|
||||
This function will also check args.generator parameter. If the parameter is incompatible with
|
||||
the build directory, an error is raised. If the parameter is None, this function will set it to
|
||||
an auto-detected default generator or to the value already configured in the build directory.
|
||||
"""
|
||||
project_dir = args.project_dir
|
||||
# Verify the project directory
|
||||
if not os.path.isdir(project_dir):
|
||||
if not os.path.exists(project_dir):
|
||||
raise FatalError("Project directory %s does not exist" % project_dir)
|
||||
else:
|
||||
raise FatalError("%s must be a project directory" % project_dir)
|
||||
if not os.path.exists(os.path.join(project_dir, "CMakeLists.txt")):
|
||||
raise FatalError("CMakeLists.txt not found in project directory %s" % project_dir)
|
||||
|
||||
# Verify/create the build directory
|
||||
build_dir = args.build_dir
|
||||
if not os.path.isdir(build_dir):
|
||||
os.makedirs(build_dir)
|
||||
cache_path = os.path.join(build_dir, "CMakeCache.txt")
|
||||
|
||||
args.define_cache_entry.append("CCACHE_ENABLE=%d" % args.ccache)
|
||||
|
||||
if always_run_cmake or _new_cmakecache_entries(cache_path, args.define_cache_entry):
|
||||
if args.generator is None:
|
||||
args.generator = _detect_cmake_generator(prog_name)
|
||||
try:
|
||||
cmake_args = [
|
||||
"cmake",
|
||||
"-G",
|
||||
args.generator,
|
||||
"-DPYTHON_DEPS_CHECKED=1",
|
||||
"-DESP_PLATFORM=1",
|
||||
]
|
||||
if not args.no_warnings:
|
||||
cmake_args += ["--warn-uninitialized"]
|
||||
|
||||
if args.define_cache_entry:
|
||||
cmake_args += ["-D" + d for d in args.define_cache_entry]
|
||||
cmake_args += [project_dir]
|
||||
|
||||
run_tool("cmake", cmake_args, cwd=args.build_dir)
|
||||
except Exception:
|
||||
# don't allow partially valid CMakeCache.txt files,
|
||||
# to keep the "should I run cmake?" logic simple
|
||||
if os.path.exists(cache_path):
|
||||
os.remove(cache_path)
|
||||
raise
|
||||
|
||||
# Learn some things from the CMakeCache.txt file in the build directory
|
||||
cache = _parse_cmakecache(cache_path)
|
||||
try:
|
||||
generator = cache["CMAKE_GENERATOR"]
|
||||
except KeyError:
|
||||
generator = _detect_cmake_generator(prog_name)
|
||||
if args.generator is None:
|
||||
args.generator = (generator) # reuse the previously configured generator, if none was given
|
||||
if generator != args.generator:
|
||||
raise FatalError("Build is configured for generator '%s' not '%s'. Run '%s fullclean' to start again." %
|
||||
(generator, args.generator, prog_name))
|
||||
|
||||
try:
|
||||
home_dir = cache["CMAKE_HOME_DIRECTORY"]
|
||||
if realpath(home_dir) != realpath(project_dir):
|
||||
raise FatalError(
|
||||
"Build directory '%s' configured for project '%s' not '%s'. Run '%s fullclean' to start again." %
|
||||
(build_dir, realpath(home_dir), realpath(project_dir), prog_name))
|
||||
except KeyError:
|
||||
pass # if cmake failed part way, CMAKE_HOME_DIRECTORY may not be set yet
|
||||
|
||||
|
||||
def merge_action_lists(*action_lists):
|
||||
merged_actions = {
|
||||
"global_options": [],
|
||||
"actions": {},
|
||||
"global_action_callbacks": [],
|
||||
}
|
||||
for action_list in action_lists:
|
||||
merged_actions["global_options"].extend(action_list.get("global_options", []))
|
||||
merged_actions["actions"].update(action_list.get("actions", {}))
|
||||
merged_actions["global_action_callbacks"].extend(action_list.get("global_action_callbacks", []))
|
||||
return merged_actions
|
21
tools/test_idf_py/extra_path/some_ext.py
Normal file
21
tools/test_idf_py/extra_path/some_ext.py
Normal file
@ -0,0 +1,21 @@
|
||||
def action_extensions(base_actions, project_path):
|
||||
def some_callback(ut_apply_config_name, ctx, args):
|
||||
print("!!! From some subcommand")
|
||||
|
||||
def some_global_callback(ctx, global_args, tasks):
|
||||
print("!!! From some global callback: %s" % global_args.some_extension_option)
|
||||
|
||||
return {
|
||||
"global_options": [{
|
||||
"names": ["--some-extension-option"],
|
||||
"help": "Help for option --some-extension-option",
|
||||
"default": "test",
|
||||
}],
|
||||
"global_action_callbacks": [some_global_callback],
|
||||
"actions": {
|
||||
"extra_subcommand": {
|
||||
"callback": some_callback,
|
||||
"help": "Help for some subcommand.",
|
||||
},
|
||||
},
|
||||
}
|
@ -0,0 +1 @@
|
||||
from .test_extension import action_extensions # noqa: F401
|
@ -0,0 +1,24 @@
|
||||
import os
|
||||
|
||||
|
||||
def action_extensions(base_actions, project_path=os.getcwd()):
|
||||
def test_callback(ut_apply_config_name, ctx, args):
|
||||
print("!!! From test_subcommand")
|
||||
|
||||
def test_global_callback(ctx, global_args, tasks):
|
||||
print("!!! From test global callback: %s" % global_args.test_extension_option)
|
||||
|
||||
return {
|
||||
"global_options": [{
|
||||
"names": ["--test-extension-option"],
|
||||
"help": "Help for option --test-extension-option",
|
||||
"default": "test",
|
||||
}],
|
||||
"global_action_callbacks": [test_global_callback],
|
||||
"actions": {
|
||||
"test_subcommand": {
|
||||
"callback": test_callback,
|
||||
"help": "Help for test subcommand.",
|
||||
},
|
||||
},
|
||||
}
|
@ -14,8 +14,10 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import os
|
||||
import sys
|
||||
import unittest
|
||||
import subprocess
|
||||
|
||||
try:
|
||||
from StringIO import StringIO
|
||||
@ -28,32 +30,67 @@ except ImportError:
|
||||
sys.path.append('..')
|
||||
import idf
|
||||
|
||||
current_dir = os.path.dirname(os.path.realpath(__file__))
|
||||
idf_py_path = os.path.join(current_dir, '..', 'idf.py')
|
||||
extension_path = os.path.join(current_dir, 'test_idf_extensions', 'test_ext')
|
||||
link_path = os.path.join(current_dir, '..', 'idf_py_actions', 'test_ext')
|
||||
|
||||
|
||||
class TestExtensions(unittest.TestCase):
|
||||
def test_extension_loading(self):
|
||||
try:
|
||||
os.symlink(extension_path, link_path)
|
||||
os.environ["IDF_EXTRA_ACTIONS_PATH"] = os.path.join(current_dir, 'extra_path')
|
||||
output = subprocess.check_output([sys.executable, idf_py_path, "--help"],
|
||||
env=os.environ).decode('utf-8', 'ignore')
|
||||
|
||||
self.assertIn('--test-extension-option', output)
|
||||
self.assertIn('test_subcommand', output)
|
||||
self.assertIn('--some-extension-option', output)
|
||||
self.assertIn('extra_subcommand', output)
|
||||
finally:
|
||||
os.remove(link_path)
|
||||
|
||||
def test_extension_execution(self):
|
||||
try:
|
||||
os.symlink(extension_path, link_path)
|
||||
os.environ["IDF_EXTRA_ACTIONS_PATH"] = ";".join([os.path.join(current_dir, 'extra_path')])
|
||||
output = subprocess.check_output(
|
||||
[sys.executable, idf_py_path, "--some-extension-option=awesome", 'test_subcommand', "extra_subcommand"],
|
||||
env=os.environ).decode('utf-8', 'ignore')
|
||||
self.assertIn('!!! From some global callback: awesome', output)
|
||||
self.assertIn('!!! From some subcommand', output)
|
||||
self.assertIn('!!! From test global callback: test', output)
|
||||
self.assertIn('!!! From some subcommand', output)
|
||||
finally:
|
||||
os.remove(link_path)
|
||||
|
||||
|
||||
class TestDependencyManagement(unittest.TestCase):
|
||||
def test_dependencies(self):
|
||||
result = idf.init_cli()(
|
||||
args=['--no-run', 'flash'],
|
||||
args=['--dry-run', 'flash'],
|
||||
standalone_mode=False,
|
||||
)
|
||||
self.assertEqual(['all', 'flash'], list(result.keys()))
|
||||
|
||||
def test_order_only_dependencies(self):
|
||||
result = idf.init_cli()(
|
||||
args=['--no-run', 'build', 'fullclean', 'all'],
|
||||
args=['--dry-run', 'build', 'fullclean', 'all'],
|
||||
standalone_mode=False,
|
||||
)
|
||||
self.assertEqual(['fullclean', 'all'], list(result.keys()))
|
||||
|
||||
def test_repeated_dependencies(self):
|
||||
result = idf.init_cli()(
|
||||
args=['--no-run', 'fullclean', 'app', 'fullclean', 'fullclean'],
|
||||
args=['--dry-run', 'fullclean', 'app', 'fullclean', 'fullclean'],
|
||||
standalone_mode=False,
|
||||
)
|
||||
self.assertEqual(['fullclean', 'app'], list(result.keys()))
|
||||
|
||||
def test_complex_case(self):
|
||||
result = idf.init_cli()(
|
||||
args=['--no-run', 'clean', 'monitor', 'clean', 'fullclean', 'flash'],
|
||||
args=['--dry-run', 'clean', 'monitor', 'clean', 'fullclean', 'flash'],
|
||||
standalone_mode=False,
|
||||
)
|
||||
self.assertEqual(['fullclean', 'clean', 'all', 'flash', 'monitor'], list(result.keys()))
|
||||
@ -62,7 +99,7 @@ class TestDependencyManagement(unittest.TestCase):
|
||||
capturedOutput = StringIO()
|
||||
sys.stdout = capturedOutput
|
||||
idf.init_cli()(
|
||||
args=['--no-run', 'clean', 'monitor', 'build', 'clean', 'fullclean', 'all'],
|
||||
args=['--dry-run', 'clean', 'monitor', 'build', 'clean', 'fullclean', 'all'],
|
||||
standalone_mode=False,
|
||||
)
|
||||
sys.stdout = sys.__stdout__
|
||||
@ -71,7 +108,7 @@ class TestDependencyManagement(unittest.TestCase):
|
||||
|
||||
sys.stdout = capturedOutput
|
||||
idf.init_cli()(
|
||||
args=['--no-run', 'clean', 'clean'],
|
||||
args=['--dry-run', 'clean', 'clean'],
|
||||
standalone_mode=False,
|
||||
)
|
||||
sys.stdout = sys.__stdout__
|
||||
@ -84,7 +121,7 @@ class TestGlobalAndSubcommandParameters(unittest.TestCase):
|
||||
"""Can set -D twice: globally and for subcommand if values are the same"""
|
||||
|
||||
idf.init_cli()(
|
||||
args=['--no-run', '-DAAA=BBB', '-DCCC=EEE', 'build', '-DAAA=BBB', '-DCCC=EEE'],
|
||||
args=['--dry-run', '-DAAA=BBB', '-DCCC=EEE', 'build', '-DAAA=BBB', '-DCCC=EEE'],
|
||||
standalone_mode=False,
|
||||
)
|
||||
|
||||
@ -93,7 +130,7 @@ class TestGlobalAndSubcommandParameters(unittest.TestCase):
|
||||
|
||||
with self.assertRaises(idf.FatalError):
|
||||
idf.init_cli()(
|
||||
args=['--no-run', '-DAAA=BBB', 'build', '-DAAA=EEE', '-DCCC=EEE'],
|
||||
args=['--dry-run', '-DAAA=BBB', 'build', '-DAAA=EEE', '-DCCC=EEE'],
|
||||
standalone_mode=False,
|
||||
)
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user