mirror of
https://github.com/espressif/esp-idf.git
synced 2024-10-05 20:47:46 -04:00
feat: add python script to activate ESP-IDF environment
Signed-off-by: Frantisek Hrbata <frantisek.hrbata@espressif.com>
This commit is contained in:
parent
0c388cf576
commit
1c22f6c4e8
43
activate.py
Executable file
43
activate.py
Executable file
@ -0,0 +1,43 @@
|
||||
#!/usr/bin/env python
|
||||
# SPDX-FileCopyrightText: 2023-2024 Espressif Systems (Shanghai) CO LTD
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
"""
|
||||
Ensure that the Python version used to initiate this script is appropriate for
|
||||
running the ESP-IDF shell activation. The primary goal is to perform the minimum
|
||||
necessary checks to identify the virtual environment with the default user Python
|
||||
and then launch activate.py using the ESP-IDF Python virtual environment.
|
||||
"""
|
||||
import os
|
||||
import sys
|
||||
from subprocess import run
|
||||
from subprocess import SubprocessError
|
||||
|
||||
|
||||
def die(msg: str) -> None:
|
||||
sys.exit(f'error: {msg}')
|
||||
|
||||
|
||||
idf_path = os.path.realpath(os.path.dirname(__file__))
|
||||
idf_tools_path = os.path.join(idf_path, 'tools')
|
||||
sys.path.insert(0, idf_tools_path)
|
||||
|
||||
try:
|
||||
# The idf_tools module checks for Python version compatibility.
|
||||
import idf_tools
|
||||
except ImportError as e:
|
||||
die(f'Unable to import the idf_tools module: {e}')
|
||||
|
||||
# Get ESP-IDF venv python path
|
||||
idf_tools.g.idf_path = idf_path
|
||||
os.environ['IDF_PYTHON_ENV_PATH'] = '' # let idf_tools get the pyenv path
|
||||
idf_tools.g.idf_tools_path = os.environ.get('IDF_TOOLS_PATH') or os.path.expanduser(idf_tools.IDF_TOOLS_PATH_DEFAULT)
|
||||
idf_python_env_path, idf_python_export_path, virtualenv_python, idf_version = idf_tools.get_python_env_path()
|
||||
|
||||
os.environ['IDF_PATH'] = idf_path
|
||||
os.environ['IDF_PYTHON_ENV_PATH'] = idf_python_env_path
|
||||
os.environ['ESP_IDF_VERSION'] = idf_version
|
||||
|
||||
try:
|
||||
run([virtualenv_python, os.path.join(idf_path, 'activate_venv.py')] + sys.argv[1:], check=True)
|
||||
except (OSError, SubprocessError):
|
||||
die(f'Activation script failed')
|
466
activate_venv.py
Normal file
466
activate_venv.py
Normal file
@ -0,0 +1,466 @@
|
||||
# SPDX-FileCopyrightText: 2023-2024 Espressif Systems (Shanghai) CO LTD
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
import argparse
|
||||
import os
|
||||
import shutil
|
||||
import sys
|
||||
from pathlib import Path
|
||||
from subprocess import run
|
||||
from subprocess import SubprocessError
|
||||
from tempfile import gettempdir
|
||||
from tempfile import NamedTemporaryFile
|
||||
from tempfile import TemporaryDirectory
|
||||
from textwrap import dedent
|
||||
from typing import Any
|
||||
from typing import Callable
|
||||
from typing import Dict
|
||||
from typing import List
|
||||
from typing import Optional
|
||||
from typing import TextIO
|
||||
|
||||
try:
|
||||
# The ESP-IDF virtual environment hasn't been verified yet, so see if the rich library
|
||||
# can be imported to display error and status messages nicely.
|
||||
from rich.console import Console
|
||||
except ImportError as e:
|
||||
sys.exit(f'error: Unable to import the rich module: {e}. Please execute the install script.')
|
||||
|
||||
|
||||
def status_message(msg: str, rv_on_ok: bool=False, die_on_err: bool=True) -> Callable:
|
||||
def inner(func: Callable) -> Callable:
|
||||
def wrapper(*args: Any, **kwargs: Any) -> Any:
|
||||
eprint(f'[dark_orange]*[/dark_orange] {msg} ... ', end='')
|
||||
|
||||
try:
|
||||
rv = func(*args, **kwargs)
|
||||
except Exception as e:
|
||||
eprint('[red]FAILED[/red]')
|
||||
if ARGS.debug:
|
||||
raise
|
||||
if not die_on_err:
|
||||
return None
|
||||
die(str(e))
|
||||
|
||||
if rv_on_ok:
|
||||
eprint(f'[green]{rv}[/green]')
|
||||
else:
|
||||
eprint('[green]OK[/green]')
|
||||
|
||||
return rv
|
||||
return wrapper
|
||||
return inner
|
||||
|
||||
|
||||
class Shell():
|
||||
def __init__(self, shell: str, deactivate_cmd: str, new_esp_idf_env: Dict[str,str]):
|
||||
self.shell = shell
|
||||
self.deactivate_cmd = deactivate_cmd
|
||||
self.new_esp_idf_env = new_esp_idf_env
|
||||
|
||||
# TODO We are not removing the temporary activation scripts.
|
||||
self.tmp_dir_path = Path(gettempdir()) / 'esp_idf_activate'
|
||||
self.tmp_dir_path.mkdir(parents=True, exist_ok=True)
|
||||
with NamedTemporaryFile(dir=self.tmp_dir_path, delete=False, prefix='activate_') as fd:
|
||||
self.script_file_path = Path(fd.name)
|
||||
debug(f'Temporary script file path: {self.script_file_path}')
|
||||
|
||||
def expanded_env(self) -> Dict[str, str]:
|
||||
expanded_env = self.new_esp_idf_env.copy()
|
||||
|
||||
if 'PATH' not in expanded_env:
|
||||
return expanded_env
|
||||
|
||||
# The PATH returned by idf_tools.py export is not expanded.
|
||||
# Note that for the export script, the PATH should remain unexpanded
|
||||
# to ensure proper deactivation. In the export script,
|
||||
# the expansion should occur after deactivation, when the PATH is adjusted.
|
||||
# But it has to be expanded for processes started with the new PATH.
|
||||
expanded_env['PATH'] = os.path.expandvars(expanded_env['PATH'])
|
||||
return expanded_env
|
||||
|
||||
def spawn(self) -> None:
|
||||
# This method should likely work for all shells because we are delegating the initialization
|
||||
# purely to Python os.environ.
|
||||
new_env = os.environ.copy()
|
||||
new_env.update(self.expanded_env())
|
||||
run([self.shell], env=new_env)
|
||||
|
||||
|
||||
class UnixShell(Shell):
|
||||
def __init__(self, shell: str, deactivate_cmd: str, new_esp_idf_env: Dict[str,str]):
|
||||
super().__init__(shell, deactivate_cmd, new_esp_idf_env)
|
||||
self.new_esp_idf_env['IDF_TOOLS_INSTALL_CMD'] = os.path.join(IDF_PATH, 'install.sh')
|
||||
self.new_esp_idf_env['IDF_TOOLS_EXPORT_CMD'] = os.path.join(IDF_PATH, 'export.sh')
|
||||
|
||||
def export_file(self, fd: TextIO) -> None:
|
||||
fd.write(f'{self.deactivate_cmd}\n')
|
||||
for var, value in self.new_esp_idf_env.items():
|
||||
fd.write(f'export {var}="{value}"\n')
|
||||
prompt = self.get_prompt()
|
||||
fd.write(f'{prompt}\n')
|
||||
|
||||
def get_prompt(self) -> str:
|
||||
return f'PS1="(ESP-IDF {IDF_VERSION}) $PS1"'
|
||||
|
||||
def export(self) -> None:
|
||||
with open(self.script_file_path, 'w') as fd:
|
||||
self.export_file(fd)
|
||||
fd.write((f'echo "\nDone! You can now compile ESP-IDF projects.\n'
|
||||
'Go to the project directory and run:\n\n idf.py build\n"'))
|
||||
|
||||
print(f'. {self.script_file_path}')
|
||||
|
||||
def click_ver(self) -> int:
|
||||
return int(click.__version__.split('.')[0])
|
||||
|
||||
|
||||
class BashShell(UnixShell):
|
||||
def get_bash_major(self) -> int:
|
||||
env = self.expanded_env()
|
||||
stdout = run_cmd(['bash', '-c', 'echo ${BASH_VERSINFO[0]}'], env=env)
|
||||
bash_maj = int(stdout)
|
||||
return bash_maj
|
||||
|
||||
@status_message('Shell completion', die_on_err=False)
|
||||
def autocompletion(self) -> str:
|
||||
bash_maj = self.get_bash_major()
|
||||
if bash_maj < 4:
|
||||
raise RuntimeError('Autocompletion not supported')
|
||||
|
||||
env = self.expanded_env()
|
||||
env['LANG'] = 'en'
|
||||
env['_IDF.PY_COMPLETE'] = 'bash_source' if self.click_ver() >= 8 else 'source_bash'
|
||||
stdout = run_cmd([IDF_PY], env=env)
|
||||
return stdout
|
||||
|
||||
def export_file(self, fd: TextIO) -> None:
|
||||
super().export_file(fd)
|
||||
stdout = self.autocompletion()
|
||||
if stdout is not None:
|
||||
fd.write(f'{stdout}\n')
|
||||
|
||||
def init_file(self) -> None:
|
||||
with open(self.script_file_path, 'w') as fd:
|
||||
# We will use the --init-file option to pass a custom rc file, which will ignore .bashrc,
|
||||
# so we need to source .bashrc first.
|
||||
fd.write(f'source ~/.bashrc\n')
|
||||
|
||||
stdout = self.autocompletion()
|
||||
if stdout is not None:
|
||||
fd.write(f'{stdout}\n')
|
||||
|
||||
prompt = self.get_prompt()
|
||||
fd.write(f'{prompt}\n')
|
||||
|
||||
def spawn(self) -> None:
|
||||
self.init_file()
|
||||
new_env = os.environ.copy()
|
||||
new_env.update(self.expanded_env())
|
||||
run([self.shell, '--init-file', str(self.script_file_path)], env=new_env)
|
||||
|
||||
|
||||
class ZshShell(UnixShell):
|
||||
@status_message('Shell completion', die_on_err=False)
|
||||
def autocompletion(self) -> str:
|
||||
env = self.expanded_env()
|
||||
env['LANG'] = 'en'
|
||||
env['_IDF.PY_COMPLETE'] = 'zsh_source' if self.click_ver() >= 8 else 'source_zsh'
|
||||
stdout = run_cmd([IDF_PY], env=env)
|
||||
return f'autoload -Uz compinit && compinit -u\n{stdout}'
|
||||
|
||||
def export_file(self, fd: TextIO) -> None:
|
||||
super().export_file(fd)
|
||||
stdout = self.autocompletion()
|
||||
# Add autocompletion
|
||||
if stdout is not None:
|
||||
fd.write(f'{stdout}\n')
|
||||
|
||||
def init_file(self) -> None:
|
||||
# If ZDOTDIR is unset, HOME is used instead.
|
||||
# https://zsh.sourceforge.io/Doc/Release/Files.html#Startup_002fShutdown-Files
|
||||
zdotdir = os.environ.get('ZDOTDIR', str(Path.home()))
|
||||
with open(self.script_file_path, 'w') as fd:
|
||||
# We will use the ZDOTDIR env variable to load our custom script in the newly spawned shell
|
||||
# so we need to source .zshrc first.
|
||||
zshrc_path = Path(zdotdir) / '.zshrc'
|
||||
if zshrc_path.is_file():
|
||||
fd.write(f'source {zshrc_path}\n')
|
||||
|
||||
# Add autocompletion
|
||||
stdout = self.autocompletion()
|
||||
if stdout is not None:
|
||||
fd.write(f'{stdout}\n')
|
||||
|
||||
prompt = self.get_prompt()
|
||||
fd.write(f'{prompt}\n')
|
||||
|
||||
# TODO This might not be needed, or consider resetting it to the original value
|
||||
fd.write('unset ZDOTDIR\n')
|
||||
|
||||
def spawn(self) -> None:
|
||||
self.init_file()
|
||||
|
||||
# Create a temporary directory to use as ZDOTDIR
|
||||
tmpdir = TemporaryDirectory()
|
||||
tmpdir_path = Path(tmpdir.name)
|
||||
debug(f'Temporary ZDOTDIR {tmpdir_path} with .zshrc file')
|
||||
|
||||
# Copy init script to the custom ZDOTDIR
|
||||
zshrc_path = tmpdir_path / '.zshrc'
|
||||
shutil.copy(str(self.script_file_path), str(zshrc_path))
|
||||
|
||||
new_env = os.environ.copy()
|
||||
new_env.update(self.expanded_env())
|
||||
# Set new ZDOTDIR in the new environment
|
||||
new_env['ZDOTDIR'] = str(tmpdir_path)
|
||||
|
||||
run([self.shell], env=new_env)
|
||||
|
||||
|
||||
class FishShell(UnixShell):
|
||||
def __init__(self, shell: str, deactivate_cmd: str, new_esp_idf_env: Dict[str,str]):
|
||||
super().__init__(shell, deactivate_cmd, new_esp_idf_env)
|
||||
self.new_esp_idf_env['IDF_TOOLS_INSTALL_CMD'] = os.path.join(IDF_PATH, 'install.fish')
|
||||
self.new_esp_idf_env['IDF_TOOLS_EXPORT_CMD'] = os.path.join(IDF_PATH, 'export.fish')
|
||||
|
||||
@status_message('Shell completion', die_on_err=False)
|
||||
def autocompletion(self) -> str:
|
||||
env = self.expanded_env()
|
||||
env['LANG'] = 'en'
|
||||
env['_IDF.PY_COMPLETE'] = 'fish_source' if self.click_ver() >= 8 else 'source_fish'
|
||||
stdout = run_cmd([IDF_PY], env=env)
|
||||
return stdout
|
||||
|
||||
def get_prompt(self) -> str:
|
||||
prompt = dedent(f'''
|
||||
functions -c fish_prompt _old_fish_prompt
|
||||
function fish_prompt
|
||||
printf "(ESP-IDF {IDF_VERSION}) "
|
||||
_old_fish_prompt
|
||||
end
|
||||
''')
|
||||
return prompt
|
||||
|
||||
def export_file(self, fd: TextIO) -> None:
|
||||
fd.write(f'{self.deactivate_cmd}\n')
|
||||
for var, value in self.new_esp_idf_env.items():
|
||||
fd.write(f'export {var}="{value}"\n')
|
||||
|
||||
# Add autocompletion
|
||||
stdout = self.autocompletion()
|
||||
if stdout is not None:
|
||||
fd.write(f'{stdout}\n')
|
||||
|
||||
# Adjust fish prompt
|
||||
prompt = self.get_prompt()
|
||||
fd.write(f'{prompt}\n')
|
||||
|
||||
def init_file(self) -> None:
|
||||
with open(self.script_file_path, 'w') as fd:
|
||||
# Add autocompletion
|
||||
stdout = self.autocompletion()
|
||||
if stdout is not None:
|
||||
fd.write(f'{stdout}\n')
|
||||
# Adjust fish prompt
|
||||
prompt = self.get_prompt()
|
||||
fd.write(f'{prompt}\n')
|
||||
|
||||
def spawn(self) -> None:
|
||||
self.init_file()
|
||||
new_env = os.environ.copy()
|
||||
new_env.update(self.expanded_env())
|
||||
run([self.shell, f'--init-command=source {self.script_file_path}'], env=new_env)
|
||||
|
||||
|
||||
SHELL_CLASSES = {
|
||||
'bash': BashShell,
|
||||
'zsh': ZshShell,
|
||||
'fish': FishShell,
|
||||
'sh': UnixShell,
|
||||
'ksh': UnixShell,
|
||||
'dash': UnixShell,
|
||||
'nu': UnixShell,
|
||||
}
|
||||
|
||||
SUPPORTED_SHELLS = ' '.join(SHELL_CLASSES.keys())
|
||||
|
||||
CONSOLE_STDERR = None
|
||||
CONSOLE_STDOUT = None
|
||||
|
||||
|
||||
def err(*args: Any, **kwargs: Any) -> None:
|
||||
CONSOLE_STDERR.print('[red]error[/red]: ', *args, **kwargs) # type: ignore
|
||||
|
||||
|
||||
def warn(*args: Any, **kwargs: Any) -> None:
|
||||
CONSOLE_STDERR.print('[yellow]warning[/yellow]: ', *args, **kwargs) # type: ignore
|
||||
|
||||
|
||||
def debug(*args: Any, **kwargs: Any) -> None:
|
||||
if not ARGS.debug:
|
||||
return
|
||||
CONSOLE_STDERR.print('[green_yellow]debug[/green_yellow]: ', *args, **kwargs) # type: ignore
|
||||
|
||||
|
||||
def die(*args: Any, **kwargs: Any) -> None:
|
||||
err(*args, **kwargs)
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
def eprint(*args: Any, **kwargs: Any) -> None:
|
||||
CONSOLE_STDERR.print(*args, **kwargs) # type: ignore
|
||||
|
||||
|
||||
def oprint(*args: Any, **kwargs: Any) -> None:
|
||||
CONSOLE_STDOUT.print(*args, **kwargs) # type: ignore
|
||||
|
||||
|
||||
def run_cmd(cmd: List[str], env: Optional[Dict[str, Any]]=None) -> str:
|
||||
new_env = os.environ.copy()
|
||||
if env is not None:
|
||||
new_env.update(env)
|
||||
|
||||
cmd_str = '"{}"'.format(' '.join(cmd))
|
||||
try:
|
||||
p = run(cmd, env=new_env, text=True, capture_output=True)
|
||||
except (OSError, SubprocessError) as e:
|
||||
raise RuntimeError(f'Command {cmd_str} failed: {e}')
|
||||
|
||||
stdout = p.stdout.strip()
|
||||
stderr = p.stderr.strip()
|
||||
if p.returncode:
|
||||
raise RuntimeError(f'Command {cmd_str} failed with error code {p.returncode}\n{stdout}\n{stderr}')
|
||||
|
||||
return stdout
|
||||
|
||||
|
||||
parser = argparse.ArgumentParser(prog='activate',
|
||||
description='Activate ESP-IDF environment')
|
||||
parser.add_argument('-s', '--shell',
|
||||
metavar='SHELL',
|
||||
default=os.environ.get('ESP_IDF_SHELL', None),
|
||||
help='Explicitly specify shell to start. For example bash, zsh, powershell.exe, cmd.exe')
|
||||
parser.add_argument('-l', '--list',
|
||||
action='store_true',
|
||||
help=('List supported shells.'))
|
||||
parser.add_argument('-e', '--export',
|
||||
action='store_true',
|
||||
help=('Generate commands to run in the terminal.'))
|
||||
parser.add_argument('-n', '--no-color',
|
||||
action='store_true',
|
||||
help=('Disable ANSI color escape sequences.'))
|
||||
parser.add_argument('-d', '--debug',
|
||||
action='store_true',
|
||||
help=('Enable debug information.'))
|
||||
parser.add_argument('-q', '--quiet',
|
||||
action='store_true',
|
||||
help=('Suppress all output.'))
|
||||
|
||||
ARGS = parser.parse_args()
|
||||
|
||||
CONSOLE_STDERR = Console(stderr=True, quiet=ARGS.quiet, no_color=ARGS.no_color)
|
||||
CONSOLE_STDOUT = Console(quiet=ARGS.quiet, no_color=ARGS.no_color)
|
||||
|
||||
if ARGS.list:
|
||||
oprint(SUPPORTED_SHELLS)
|
||||
sys.exit()
|
||||
|
||||
# The activate.py script sets the following environment variables
|
||||
IDF_PATH = os.environ['IDF_PATH']
|
||||
IDF_VERSION = os.environ['ESP_IDF_VERSION']
|
||||
IDF_PYTHON_ENV_PATH = os.environ['IDF_PYTHON_ENV_PATH']
|
||||
IDF_TOOLS_PY = os.path.join(IDF_PATH, 'tools', 'idf_tools.py')
|
||||
IDF_PY = os.path.join(IDF_PATH, 'tools', 'idf.py')
|
||||
|
||||
eprint(f'[dark_orange]Activating ESP-IDF {IDF_VERSION}')
|
||||
debug(f'IDF_PATH {IDF_PATH}')
|
||||
debug(f'IDF_PYTHON_ENV_PATH {IDF_PYTHON_ENV_PATH}')
|
||||
|
||||
|
||||
@status_message('Checking python version', rv_on_ok=True)
|
||||
def check_python_version() -> str:
|
||||
# Check the Python version within a virtual environment
|
||||
python_version_checker = os.path.join(IDF_PATH, 'tools', 'python_version_checker.py')
|
||||
run_cmd([sys.executable, python_version_checker])
|
||||
ver = sys.version_info[:3]
|
||||
return f'{ver[0]}.{ver[1]}.{ver[2]}'
|
||||
|
||||
|
||||
@status_message('Checking python dependencies')
|
||||
def check_python_dependencies() -> None:
|
||||
# Check Python dependencies within the virtual environment
|
||||
run_cmd([sys.executable, IDF_TOOLS_PY, 'check-python-dependencies'])
|
||||
|
||||
|
||||
check_python_version()
|
||||
check_python_dependencies()
|
||||
# TODO Report installed tools that are not currently used by active ESP-IDF version
|
||||
|
||||
# From this point forward, we are functioning within a fully validated ESP-IDF environment.
|
||||
|
||||
# TODO Verify the architectures supported by psutils. We might need to create a wheel for it or
|
||||
# substitute it with ps and tasklist commands.
|
||||
import psutil # noqa: E402
|
||||
import click # noqa: E402
|
||||
|
||||
|
||||
@status_message('Deactivating the current ESP-IDF environment')
|
||||
def get_deactivate_cmd() -> str:
|
||||
# Get previous ESP-IDF system environment variables
|
||||
cmd = [sys.executable, IDF_TOOLS_PY, 'export', '--deactivate']
|
||||
stdout = run_cmd(cmd)
|
||||
return stdout
|
||||
|
||||
|
||||
@status_message('Establishing a new ESP-IDF environment')
|
||||
def get_idf_env() -> Dict[str,str]:
|
||||
# Get ESP-IDF system environment variables
|
||||
extra_paths_list = [os.path.join('components', 'espcoredump'),
|
||||
os.path.join('components', 'partition_table'),
|
||||
os.path.join('components', 'app_update')]
|
||||
extra_paths = ':'.join([os.path.join(IDF_PATH, path) for path in extra_paths_list])
|
||||
cmd = [sys.executable, IDF_TOOLS_PY, 'export', '--format', 'key-value', '--add_paths_extras', extra_paths]
|
||||
stdout = run_cmd(cmd)
|
||||
|
||||
# idf_tools.py might not export certain environment variables if they are already set
|
||||
idf_env: Dict[str, Any] = {
|
||||
'IDF_PATH': os.environ['IDF_PATH'],
|
||||
'ESP_IDF_VERSION': os.environ['ESP_IDF_VERSION'],
|
||||
'IDF_PYTHON_ENV_PATH': os.environ['IDF_PYTHON_ENV_PATH'],
|
||||
}
|
||||
|
||||
for line in stdout.splitlines():
|
||||
var, val = line.split('=')
|
||||
idf_env[var] = val
|
||||
|
||||
if 'PATH' in idf_env:
|
||||
idf_env['PATH'] = ':'.join([extra_paths, idf_env['PATH']])
|
||||
|
||||
return idf_env
|
||||
|
||||
|
||||
@status_message('Identifying shell', rv_on_ok=True)
|
||||
def detect_shell() -> str:
|
||||
if ARGS.shell is not None:
|
||||
return str(ARGS.shell)
|
||||
ppid = psutil.Process(os.getpid()).ppid()
|
||||
# Look for grandparent, because we started from activate.py.
|
||||
pppid = psutil.Process(ppid).ppid()
|
||||
return str(psutil.Process(pppid).name())
|
||||
|
||||
|
||||
deactivate_cmd = get_deactivate_cmd()
|
||||
new_esp_idf_env = get_idf_env()
|
||||
detected_shell = detect_shell()
|
||||
|
||||
if detected_shell not in SHELL_CLASSES:
|
||||
die(f'"{detected_shell}" shell is not among the supported options: "{SUPPORTED_SHELLS}"')
|
||||
|
||||
shell = SHELL_CLASSES[detected_shell](detected_shell, deactivate_cmd, new_esp_idf_env)
|
||||
|
||||
if ARGS.export:
|
||||
shell.export()
|
||||
sys.exit()
|
||||
|
||||
shell.spawn()
|
||||
eprint(f'[dark_orange]ESP-IDF environment exited.')
|
102
export.fish
102
export.fish
@ -5,105 +5,5 @@ function unset
|
||||
set --erase $argv
|
||||
end
|
||||
|
||||
function __main
|
||||
set script_dir (dirname (realpath (status -f)))
|
||||
if not set -q IDF_PATH
|
||||
set -gx IDF_PATH $script_dir
|
||||
echo "Setting IDF_PATH to '$IDF_PATH'"
|
||||
end
|
||||
|
||||
if test "$IDF_PATH" != "$script_dir"
|
||||
# Change IDF_PATH is important when there are 2 ESP-IDF versions in different directories.
|
||||
# Sourcing this script without change, would cause sourcing wrong export script.
|
||||
echo "Resetting IDF_PATH from '$IDF_PATH' to '$script_dir'"
|
||||
set IDF_PATH "$script_dir"
|
||||
end
|
||||
|
||||
set oldpath = $PATH
|
||||
|
||||
echo "Detecting the Python interpreter"
|
||||
source "$IDF_PATH"/tools/detect_python.fish
|
||||
|
||||
echo "Checking Python compatibility"
|
||||
"$ESP_PYTHON" "$IDF_PATH"/tools/python_version_checker.py
|
||||
|
||||
echo "Checking other ESP-IDF version."
|
||||
set idf_deactivate ("$ESP_PYTHON" "$IDF_PATH"/tools/idf_tools.py export --deactivate) || return 1
|
||||
eval "$idf_deactivate"
|
||||
|
||||
echo "Adding ESP-IDF tools to PATH..."
|
||||
# Call idf_tools.py to export tool paths
|
||||
set -gx IDF_TOOLS_EXPORT_CMD "$IDF_PATH"/export.fish
|
||||
set -gx IDF_TOOLS_INSTALL_CMD "$IDF_PATH"/install.fish
|
||||
# Allow calling some IDF python tools without specifying the full path
|
||||
# "$IDF_PATH"/tools is already added by 'idf_tools.py export'
|
||||
set IDF_ADD_PATHS_EXTRAS "$IDF_PATH"/components/espcoredump
|
||||
set IDF_ADD_PATHS_EXTRAS "$IDF_ADD_PATHS_EXTRAS":"$IDF_PATH"/components/partition_table
|
||||
set IDF_ADD_PATHS_EXTRAS "$IDF_ADD_PATHS_EXTRAS":"$IDF_PATH"/components/app_update
|
||||
|
||||
set idf_exports ("$ESP_PYTHON" "$IDF_PATH"/tools/idf_tools.py export --add_paths_extras="$IDF_ADD_PATHS_EXTRAS") || return 1
|
||||
eval "$idf_exports"
|
||||
set -x PATH "$IDF_ADD_PATHS_EXTRAS":"$PATH"
|
||||
|
||||
echo "Checking if Python packages are up to date..."
|
||||
"$ESP_PYTHON" "$IDF_PATH"/tools/idf_tools.py check-python-dependencies || return 1
|
||||
|
||||
set added_path_variables
|
||||
for entry in $PATH;
|
||||
if not contains $entry $oldpath
|
||||
set -a added_path_variables $entry
|
||||
end
|
||||
end
|
||||
if set -q added_path_variables[1]
|
||||
echo "Added the following directories to PATH:"
|
||||
for entry in $added_path_variables;
|
||||
echo $entry
|
||||
end
|
||||
else
|
||||
echo "All paths are already set."
|
||||
end
|
||||
|
||||
set uninstall ("$ESP_PYTHON" "$IDF_PATH"/tools/idf_tools.py uninstall --dry-run) || return 1
|
||||
if test -n "$uninstall"
|
||||
echo ""
|
||||
echo "Detected installed tools that are not currently used by active ESP-IDF version."
|
||||
echo "$uninstall"
|
||||
echo "For free up even more space, remove installation packages of those tools. Use option '$ESP_PYTHON $IDF_PATH/tools/idf_tools.py uninstall --remove-archives'."
|
||||
echo ""
|
||||
end
|
||||
|
||||
# Clean up
|
||||
set -e added_path_variables
|
||||
set -e cmd
|
||||
set -e old_path
|
||||
set -e paths
|
||||
set -e path_prefix
|
||||
set -e path_entry
|
||||
set -e IDF_ADD_PATHS_EXTRAS
|
||||
set -e idf_exports
|
||||
set -e ESP_PYTHON
|
||||
set -e uninstall
|
||||
set -e script_dir
|
||||
set -e idf_deactivate
|
||||
|
||||
|
||||
# Not unsetting IDF_PYTHON_ENV_PATH, it can be used by IDF build system
|
||||
# to check whether we are using a private Python environment
|
||||
|
||||
echo "Done! You can now compile ESP-IDF projects."
|
||||
echo "Go to the project directory and run:"
|
||||
echo ""
|
||||
echo " idf.py build"
|
||||
echo ""
|
||||
end
|
||||
|
||||
__main
|
||||
|
||||
set click_version (python -c 'import click; print(click.__version__.split(".")[0])')
|
||||
if test $click_version -lt 8
|
||||
eval (env _IDF.PY_COMPLETE=source_fish idf.py)
|
||||
else
|
||||
eval (env _IDF.PY_COMPLETE=fish_source idf.py)
|
||||
end
|
||||
|
||||
functions -e __main
|
||||
eval ("$script_dir"/activate.py --export)
|
||||
|
234
export.sh
234
export.sh
@ -1,233 +1,47 @@
|
||||
# This script should be sourced, not executed.
|
||||
|
||||
__realpath() {
|
||||
wdir="$PWD"; [ "$PWD" = "/" ] && wdir=""
|
||||
arg=$1
|
||||
case "$arg" in
|
||||
/*) scriptdir="${arg}";;
|
||||
*) scriptdir="$wdir/${arg#./}";;
|
||||
esac
|
||||
scriptdir="${scriptdir%/*}"
|
||||
echo "$scriptdir"
|
||||
}
|
||||
|
||||
|
||||
__verbose() {
|
||||
[ -n "${IDF_EXPORT_QUIET-}" ] && return
|
||||
echo "$@"
|
||||
}
|
||||
|
||||
__script_dir(){
|
||||
# shellcheck disable=SC2169,SC2169,SC2039,SC3010,SC3028 # unreachable with 'dash'
|
||||
if [ "$(uname -s)" = "Darwin" ]; then
|
||||
# convert possibly relative path to absolute
|
||||
script_dir="$(__realpath "${self_path}")"
|
||||
# resolve any ../ references to make the path shorter
|
||||
script_dir="$(cd "${script_dir}" || exit 1; pwd)"
|
||||
else
|
||||
# convert to full path and get the directory name of that
|
||||
script_name="$(readlink -f "${self_path}")"
|
||||
script_dir="$(dirname "${script_name}")"
|
||||
fi
|
||||
if [ "$script_dir" = '.' ]
|
||||
then
|
||||
script_dir="$(pwd)"
|
||||
fi
|
||||
echo "$script_dir"
|
||||
}
|
||||
|
||||
__is_dir_esp_idf(){
|
||||
if [ ! -f "$1/tools/idf.py" ] || [ ! -f "$1/tools/idf_tools.py" ]
|
||||
then
|
||||
# Echo command here is not used for printing to the terminal, but as non-empty return value from function.
|
||||
echo "THIS DIRECTORY IS NOT ESP-IDF"
|
||||
fi
|
||||
}
|
||||
|
||||
__main() {
|
||||
# The file doesn't have executable permissions, so this shouldn't really happen.
|
||||
# Doing this in case someone tries to chmod +x it and execute...
|
||||
|
||||
# shellcheck disable=SC2128,SC2169,SC2039,SC3054 # ignore array expansion warning
|
||||
if [ -n "${BASH_SOURCE-}" ] && [ "${BASH_SOURCE[0]}" = "${0}" ]
|
||||
then
|
||||
echo "This script should be sourced, not executed:"
|
||||
# shellcheck disable=SC2039,SC3054 # reachable only with bash
|
||||
echo ". ${BASH_SOURCE[0]}"
|
||||
return 1
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# If using bash or zsh, try to guess IDF_PATH from script location.
|
||||
self_path=""
|
||||
# shellcheck disable=SC2128 # ignore array expansion warning
|
||||
if [ -n "${BASH_SOURCE-}" ]
|
||||
# Attempt to identify the ESP-IDF directory
|
||||
idf_path="."
|
||||
|
||||
# shellcheck disable=SC2128,SC2169,SC2039,SC3054,SC3028 # ignore array expansion warning
|
||||
if test -n "${BASH_SOURCE-}"
|
||||
then
|
||||
self_path="${BASH_SOURCE}"
|
||||
elif [ -n "${ZSH_VERSION-}" ]
|
||||
# shellcheck disable=SC3028,SC3054 # unreachable with 'dash'
|
||||
idf_path=$(dirname "${BASH_SOURCE[0]}")
|
||||
elif test -n "${ZSH_VERSION-}"
|
||||
then
|
||||
# shellcheck disable=SC2296 # ignore parameter starts with '{' because it's zsh
|
||||
self_path="${(%):-%x}"
|
||||
idf_path=$(dirname "${(%):-%x}")
|
||||
elif test -n "${IDF_PATH-}"
|
||||
then
|
||||
idf_path=$IDF_PATH
|
||||
fi
|
||||
|
||||
script_dir="$(__script_dir)"
|
||||
# Since sh or dash shells can't detect script_dir correctly, check if script_dir looks like an IDF directory
|
||||
is_script_dir_esp_idf=$(__is_dir_esp_idf "${script_dir}")
|
||||
|
||||
if [ -z "${IDF_PATH-}" ]
|
||||
then
|
||||
# IDF_PATH not set in the environment.
|
||||
|
||||
if [ -n "${is_script_dir_esp_idf}" ]
|
||||
if [ ! -f "${idf_path}/tools/idf.py" ] ||
|
||||
[ ! -f "${idf_path}/tools/idf_tools.py" ] ||
|
||||
[ ! -f "${idf_path}/activate.py" ]
|
||||
then
|
||||
# Echo command here is not used for printing to the terminal, but as non-empty return value from function.
|
||||
echo "Could not detect IDF_PATH. Please set it before sourcing this script:"
|
||||
echo " export IDF_PATH=(add path here)"
|
||||
return 1
|
||||
fi
|
||||
export IDF_PATH="${script_dir}"
|
||||
echo "Setting IDF_PATH to '${IDF_PATH}'"
|
||||
else
|
||||
# IDF_PATH came from the environment, check if the path is valid
|
||||
# Set IDF_PATH to script_dir, if script_dir looks like an IDF directory
|
||||
if [ ! "${IDF_PATH}" = "${script_dir}" ] && [ -z "${is_script_dir_esp_idf}" ]
|
||||
then
|
||||
# Change IDF_PATH is important when there are 2 ESP-IDF versions in different directories.
|
||||
# Sourcing this script without change, would cause sourcing wrong export script.
|
||||
echo "Resetting IDF_PATH from '${IDF_PATH}' to '${script_dir}' "
|
||||
export IDF_PATH="${script_dir}"
|
||||
fi
|
||||
# Check if this path looks like an IDF directory
|
||||
is_idf_path_esp_idf=$(__is_dir_esp_idf "${IDF_PATH}")
|
||||
if [ -n "${is_idf_path_esp_idf}" ]
|
||||
then
|
||||
echo "IDF_PATH is set to '${IDF_PATH}', but it doesn't look like an ESP-IDF directory."
|
||||
echo "If you have set IDF_PATH manually, check if the path is correct."
|
||||
unset idf_path
|
||||
return 1
|
||||
fi
|
||||
|
||||
# The varible might have been set (rather than exported), re-export it to be sure
|
||||
export IDF_PATH="${IDF_PATH}"
|
||||
fi
|
||||
# TODO Maybe we can use "command -v" to check just for python and python3
|
||||
. "${idf_path}/tools/detect_python.sh"
|
||||
|
||||
old_path="$PATH"
|
||||
|
||||
echo "Detecting the Python interpreter"
|
||||
. "${IDF_PATH}/tools/detect_python.sh"
|
||||
|
||||
echo "Checking Python compatibility"
|
||||
"$ESP_PYTHON" "${IDF_PATH}/tools/python_version_checker.py"
|
||||
|
||||
__verbose "Checking other ESP-IDF version."
|
||||
idf_deactivate=$("$ESP_PYTHON" "${IDF_PATH}/tools/idf_tools.py" export --deactivate) || return 1
|
||||
eval "${idf_deactivate}"
|
||||
|
||||
__verbose "Adding ESP-IDF tools to PATH..."
|
||||
# Call idf_tools.py to export tool paths
|
||||
export IDF_TOOLS_EXPORT_CMD=${IDF_PATH}/export.sh
|
||||
export IDF_TOOLS_INSTALL_CMD=${IDF_PATH}/install.sh
|
||||
# Allow calling some IDF python tools without specifying the full path
|
||||
# ${IDF_PATH}/tools is already added by 'idf_tools.py export'
|
||||
IDF_ADD_PATHS_EXTRAS="${IDF_PATH}/components/espcoredump"
|
||||
IDF_ADD_PATHS_EXTRAS="${IDF_ADD_PATHS_EXTRAS}:${IDF_PATH}/components/partition_table"
|
||||
IDF_ADD_PATHS_EXTRAS="${IDF_ADD_PATHS_EXTRAS}:${IDF_PATH}/components/app_update"
|
||||
|
||||
idf_exports=$("$ESP_PYTHON" "${IDF_PATH}/tools/idf_tools.py" export "--add_paths_extras=${IDF_ADD_PATHS_EXTRAS}") || return 1
|
||||
# Evaluate the ESP-IDF environment set up by the activate.py script.
|
||||
idf_exports=$("$ESP_PYTHON" "${idf_path}/activate.py" --export)
|
||||
eval "${idf_exports}"
|
||||
export PATH="${IDF_ADD_PATHS_EXTRAS}:${PATH}"
|
||||
|
||||
__verbose "Checking if Python packages are up to date..."
|
||||
"$ESP_PYTHON" "${IDF_PATH}/tools/idf_tools.py" check-python-dependencies || return 1
|
||||
|
||||
if [ -n "$BASH" ]
|
||||
then
|
||||
path_prefix="${PATH%%"${old_path}"}"
|
||||
# shellcheck disable=SC2169,SC2039 # unreachable with 'dash'
|
||||
if [ -n "${path_prefix}" ]; then
|
||||
__verbose "Added the following directories to PATH:"
|
||||
else
|
||||
__verbose "All paths are already set."
|
||||
fi
|
||||
old_ifs="$IFS"
|
||||
IFS=":"
|
||||
for path_entry in ${path_prefix}
|
||||
do
|
||||
__verbose " ${path_entry}"
|
||||
done
|
||||
IFS="$old_ifs"
|
||||
unset old_ifs
|
||||
else
|
||||
__verbose "Updated PATH variable:"
|
||||
__verbose " ${PATH}"
|
||||
fi
|
||||
|
||||
uninstall=$("$ESP_PYTHON" "${IDF_PATH}/tools/idf_tools.py" uninstall --dry-run) || return 1
|
||||
if [ -n "$uninstall" ]
|
||||
then
|
||||
__verbose ""
|
||||
__verbose "Detected installed tools that are not currently used by active ESP-IDF version."
|
||||
__verbose "${uninstall}"
|
||||
__verbose "To free up even more space, remove installation packages of those tools. Use option '${ESP_PYTHON} ${IDF_PATH}/tools/idf_tools.py uninstall --remove-archives'."
|
||||
__verbose ""
|
||||
fi
|
||||
|
||||
__verbose "Done! You can now compile ESP-IDF projects."
|
||||
__verbose "Go to the project directory and run:"
|
||||
__verbose ""
|
||||
__verbose " idf.py build"
|
||||
__verbose ""
|
||||
}
|
||||
|
||||
__cleanup() {
|
||||
unset old_path
|
||||
unset paths
|
||||
unset path_prefix
|
||||
unset path_entry
|
||||
unset IDF_ADD_PATHS_EXTRAS
|
||||
unset idf_exports
|
||||
unset idf_deactivate
|
||||
unset ESP_PYTHON
|
||||
unset SOURCE_ZSH
|
||||
unset SOURCE_BASH
|
||||
unset WARNING_MSG
|
||||
unset uninstall
|
||||
unset is_idf_path_esp_idf
|
||||
unset is_script_dir_esp_idf
|
||||
|
||||
unset __realpath
|
||||
unset __main
|
||||
unset __verbose
|
||||
unset __enable_autocomplete
|
||||
unset __cleanup
|
||||
unset __is_dir_esp_idf
|
||||
|
||||
# Not unsetting IDF_PYTHON_ENV_PATH, it can be used by IDF build system
|
||||
# to check whether we are using a private Python environment
|
||||
|
||||
return "$1"
|
||||
}
|
||||
|
||||
|
||||
__enable_autocomplete() {
|
||||
click_version="$(python -c 'import click; print(click.__version__.split(".")[0])')"
|
||||
if [ "${click_version}" -lt 8 ]
|
||||
then
|
||||
SOURCE_ZSH=source_zsh
|
||||
SOURCE_BASH=source_bash
|
||||
else
|
||||
SOURCE_ZSH=zsh_source
|
||||
SOURCE_BASH=bash_source
|
||||
fi
|
||||
if [ -n "${ZSH_VERSION-}" ]
|
||||
then
|
||||
autoload -Uz compinit && compinit -u
|
||||
eval "$(env _IDF.PY_COMPLETE=$SOURCE_ZSH idf.py)" || echo "WARNING: Failed to load shell autocompletion for zsh version: $ZSH_VERSION!"
|
||||
elif [ -n "${BASH_SOURCE-}" ]
|
||||
then
|
||||
WARNING_MSG="WARNING: Failed to load shell autocompletion for bash version: $BASH_VERSION!"
|
||||
# shellcheck disable=SC3028,SC3054,SC2086,SC2169 # code block for 'bash' only
|
||||
[ ${BASH_VERSINFO[0]} -lt 4 ] && { echo "$WARNING_MSG"; return; }
|
||||
eval "$(env LANG=en _IDF.PY_COMPLETE=$SOURCE_BASH idf.py)" || echo "$WARNING_MSG"
|
||||
fi
|
||||
}
|
||||
|
||||
__main && __enable_autocomplete
|
||||
__cleanup $?
|
||||
unset idf_path
|
||||
return 0
|
||||
|
@ -1,3 +1,4 @@
|
||||
activate.py
|
||||
components/app_update/otatool.py
|
||||
components/efuse/efuse_table_gen.py
|
||||
components/efuse/test_efuse_host/efuse_tests.py
|
||||
|
@ -1851,7 +1851,7 @@ def add_variables_to_deactivate_file(args: List[str], new_idf_vars:Dict[str, Any
|
||||
return deactivate_file_path
|
||||
|
||||
|
||||
def deactivate_statement(args: List[str]) -> None:
|
||||
def print_deactivate_statement(args: List[str]) -> None:
|
||||
"""
|
||||
Deactivate statement is sequence of commands, that remove IDF global variables from environment,
|
||||
so the environment gets to the state it was before calling export.{sh/fish} script.
|
||||
@ -2152,8 +2152,9 @@ def action_export(args: Any) -> None:
|
||||
"""
|
||||
Exports all necessary environment variables and paths needed for tools used.
|
||||
"""
|
||||
if args.deactivate and different_idf_detected():
|
||||
deactivate_statement(args)
|
||||
if args.deactivate:
|
||||
if different_idf_detected():
|
||||
print_deactivate_statement(args)
|
||||
return
|
||||
|
||||
tools_info = load_tools_info()
|
||||
|
@ -19,6 +19,8 @@ esp-idf-size
|
||||
esp-idf-panic-decoder
|
||||
pyclang
|
||||
construct
|
||||
rich
|
||||
psutil
|
||||
|
||||
# gdb extensions dependencies
|
||||
freertos_gdb
|
||||
|
Loading…
Reference in New Issue
Block a user