2019-04-17 03:30:30 -04:00
|
|
|
#!/usr/bin/env python
|
|
|
|
# coding=utf-8
|
|
|
|
#
|
2024-01-19 08:19:12 -05:00
|
|
|
# SPDX-FileCopyrightText: 2019-2024 Espressif Systems (Shanghai) CO LTD
|
2021-05-26 04:44:20 -04:00
|
|
|
#
|
|
|
|
# SPDX-License-Identifier: Apache-2.0
|
|
|
|
#
|
2019-04-17 03:30:30 -04:00
|
|
|
# This script helps installing tools required to use the ESP-IDF, and updating PATH
|
|
|
|
# to use the installed tools. It can also create a Python virtual environment,
|
|
|
|
# and install Python requirements into it.
|
|
|
|
# It does not install OS dependencies. It does install tools such as the Xtensa
|
|
|
|
# GCC toolchain and ESP32 ULP coprocessor toolchain.
|
|
|
|
#
|
|
|
|
# By default, downloaded tools will be installed under $HOME/.espressif directory
|
|
|
|
# (%USERPROFILE%/.espressif on Windows). This path can be modified by setting
|
|
|
|
# IDF_TOOLS_PATH variable prior to running this tool.
|
|
|
|
#
|
|
|
|
# Users do not need to interact with this script directly. In IDF root directory,
|
|
|
|
# install.sh (.bat) and export.sh (.bat) scripts are provided to invoke this script.
|
|
|
|
#
|
|
|
|
# Usage:
|
|
|
|
#
|
|
|
|
# * To install the tools, run `idf_tools.py install`.
|
|
|
|
#
|
|
|
|
# * To install the Python environment, run `idf_tools.py install-python-env`.
|
|
|
|
#
|
|
|
|
# * To start using the tools, run `eval "$(idf_tools.py export)"` — this will update
|
|
|
|
# the PATH to point to the installed tools and set up other environment variables
|
|
|
|
# needed by the tools.
|
2021-01-25 21:49:01 -05:00
|
|
|
import argparse
|
|
|
|
import contextlib
|
|
|
|
import copy
|
2022-01-18 08:49:53 -05:00
|
|
|
import datetime
|
2021-01-25 21:49:01 -05:00
|
|
|
import errno
|
2023-09-14 01:55:24 -04:00
|
|
|
import fnmatch
|
2021-01-25 21:49:01 -05:00
|
|
|
import functools
|
|
|
|
import hashlib
|
2019-04-17 03:30:30 -04:00
|
|
|
import json
|
|
|
|
import os
|
2021-01-25 21:49:01 -05:00
|
|
|
import platform
|
|
|
|
import re
|
|
|
|
import shutil
|
|
|
|
import ssl
|
2019-04-17 03:30:30 -04:00
|
|
|
import subprocess
|
|
|
|
import sys
|
|
|
|
import tarfile
|
2022-08-09 12:13:29 -04:00
|
|
|
import tempfile
|
2022-02-07 09:18:14 -05:00
|
|
|
import time
|
2024-01-19 08:19:12 -05:00
|
|
|
from collections import namedtuple
|
|
|
|
from collections import OrderedDict
|
2022-03-19 16:37:16 -04:00
|
|
|
from json import JSONEncoder
|
2024-01-19 08:19:12 -05:00
|
|
|
from ssl import SSLContext
|
|
|
|
from tarfile import TarFile
|
2021-03-04 03:53:47 -05:00
|
|
|
from zipfile import ZipFile
|
2019-04-17 03:30:30 -04:00
|
|
|
|
2021-05-26 04:44:20 -04:00
|
|
|
# Important notice: Please keep the lines above compatible with old Pythons so it won't fail with ImportError but with
|
|
|
|
# a nice message printed by python_version_checker.check()
|
2020-01-17 11:38:50 -05:00
|
|
|
try:
|
2021-05-26 04:44:20 -04:00
|
|
|
import python_version_checker
|
2020-01-17 11:38:50 -05:00
|
|
|
|
2021-05-26 04:44:20 -04:00
|
|
|
# check the Python version before it will fail with an exception on syntax or package incompatibility.
|
|
|
|
python_version_checker.check()
|
|
|
|
except RuntimeError as e:
|
|
|
|
print(e)
|
|
|
|
raise SystemExit(1)
|
|
|
|
|
2024-01-19 08:19:12 -05:00
|
|
|
from typing import IO, Any, Callable, Dict, Iterator, List, Optional, Set, Tuple, Union
|
2021-05-26 04:44:20 -04:00
|
|
|
from urllib.error import ContentTooShortError
|
2022-03-15 15:02:37 -04:00
|
|
|
from urllib.parse import urljoin, urlparse
|
2021-05-26 04:44:20 -04:00
|
|
|
from urllib.request import urlopen
|
2024-01-19 08:19:12 -05:00
|
|
|
from urllib.response import addinfourl
|
2019-04-17 03:30:30 -04:00
|
|
|
|
2019-09-23 00:01:09 -04:00
|
|
|
try:
|
|
|
|
from exceptions import WindowsError
|
|
|
|
except ImportError:
|
2021-03-04 03:53:47 -05:00
|
|
|
# Unix
|
|
|
|
class WindowsError(OSError): # type: ignore
|
2019-09-23 00:01:09 -04:00
|
|
|
pass
|
|
|
|
|
2019-04-17 03:30:30 -04:00
|
|
|
|
|
|
|
TOOLS_FILE = 'tools/tools.json'
|
|
|
|
TOOLS_SCHEMA_FILE = 'tools/tools_schema.json'
|
|
|
|
TOOLS_FILE_NEW = 'tools/tools.new.json'
|
2021-04-26 15:34:48 -04:00
|
|
|
IDF_ENV_FILE = 'idf-env.json'
|
2023-09-01 11:07:41 -04:00
|
|
|
TOOLS_FILE_VERSION = 2
|
2019-04-17 03:30:30 -04:00
|
|
|
IDF_TOOLS_PATH_DEFAULT = os.path.join('~', '.espressif')
|
|
|
|
UNKNOWN_VERSION = 'unknown'
|
|
|
|
SUBST_TOOL_PATH_REGEX = re.compile(r'\${TOOL_PATH}')
|
|
|
|
VERSION_REGEX_REPLACE_DEFAULT = r'\1'
|
|
|
|
IDF_MAINTAINER = os.environ.get('IDF_MAINTAINER') or False
|
|
|
|
TODO_MESSAGE = 'TODO'
|
|
|
|
DOWNLOAD_RETRY_COUNT = 3
|
|
|
|
URL_PREFIX_MAP_SEPARATOR = ','
|
|
|
|
IDF_TOOLS_INSTALL_CMD = os.environ.get('IDF_TOOLS_INSTALL_CMD')
|
|
|
|
IDF_TOOLS_EXPORT_CMD = os.environ.get('IDF_TOOLS_INSTALL_CMD')
|
2021-09-16 10:48:03 -04:00
|
|
|
IDF_DL_URL = 'https://dl.espressif.com/dl/esp-idf'
|
2022-11-10 23:00:56 -05:00
|
|
|
IDF_PIP_WHEELS_URL = os.environ.get('IDF_PIP_WHEELS_URL', 'https://dl.espressif.com/pypi')
|
2024-02-21 06:46:39 -05:00
|
|
|
PYTHON_VENV_DIR_TEMPLATE = 'idf{}_py{}_env'
|
|
|
|
PYTHON_VER_MAJOR_MINOR = f'{sys.version_info.major}.{sys.version_info.minor}'
|
|
|
|
VENV_VER_FILE = 'idf_version.txt'
|
2019-04-17 03:30:30 -04:00
|
|
|
|
2024-02-09 09:03:58 -05:00
|
|
|
|
|
|
|
class GlobalVarsStore:
|
|
|
|
"""
|
|
|
|
Pythonic way how to handle global variables.
|
|
|
|
One global instance of this class is initialized and used as an entrypoint (store)
|
|
|
|
It handles string and boolean properties.
|
|
|
|
"""
|
|
|
|
_instance: Optional['GlobalVarsStore'] = None
|
|
|
|
_bool_properties = ['quiet', 'non_interactive']
|
|
|
|
_string_properties = ['idf_path', 'idf_tools_path', 'tools_json']
|
|
|
|
|
|
|
|
def __new__(cls, *args: Any, **kwargs: Any) -> 'GlobalVarsStore':
|
|
|
|
if not cls._instance:
|
|
|
|
cls._instance = super(GlobalVarsStore, cls).__new__(cls, *args, **kwargs)
|
|
|
|
cls._instance._initialize_properties()
|
|
|
|
return cls._instance
|
|
|
|
|
|
|
|
def _initialize_properties(self) -> None:
|
|
|
|
# Initialize boolean properties to False
|
|
|
|
for prop in self._bool_properties:
|
|
|
|
setattr(self, f'_{prop}', False)
|
|
|
|
# Initialize string properties to None
|
|
|
|
for prop in self._string_properties:
|
|
|
|
setattr(self, f'_{prop}', None)
|
|
|
|
|
|
|
|
def __getattr__(self, name: str) -> Any:
|
|
|
|
if name in self._bool_properties + self._string_properties:
|
|
|
|
value: Union[str, bool] = getattr(self, f'_{name}')
|
|
|
|
if value is None and name in self._string_properties:
|
|
|
|
raise ReferenceError(f'Variable {name} accessed before initialization.')
|
|
|
|
return value
|
|
|
|
raise AttributeError(f'{name} is not a valid attribute')
|
|
|
|
|
|
|
|
def __setattr__(self, name: str, value: Any) -> None:
|
|
|
|
if name in self._bool_properties + self._string_properties:
|
|
|
|
super().__setattr__(f'_{name}', value)
|
|
|
|
else:
|
|
|
|
super().__setattr__(name, value)
|
|
|
|
|
|
|
|
|
|
|
|
g = GlobalVarsStore()
|
|
|
|
|
|
|
|
|
|
|
|
def fatal(text: str, *args: str) -> None:
|
|
|
|
"""
|
|
|
|
Writes ERROR: + text to sys.stderr.
|
|
|
|
"""
|
|
|
|
if not g.quiet:
|
|
|
|
sys.stderr.write(f'ERROR: {text}\n', *args)
|
|
|
|
|
|
|
|
|
|
|
|
def warn(text: str, *args: str) -> None:
|
|
|
|
"""
|
|
|
|
Writes WARNING: + text to sys.stderr.
|
|
|
|
"""
|
|
|
|
if not g.quiet:
|
|
|
|
sys.stderr.write(f'WARNING: {text}\n', *args)
|
|
|
|
|
|
|
|
|
|
|
|
def info(text: str, f: Optional[IO[str]]=None, *args: str) -> None:
|
|
|
|
"""
|
|
|
|
Writes text to a stream specified by second arg, sys.stdout by default.
|
|
|
|
"""
|
|
|
|
if not g.quiet:
|
|
|
|
if f is None:
|
|
|
|
f = sys.stdout
|
|
|
|
f.write(f'{text}\n', *args)
|
|
|
|
|
|
|
|
|
|
|
|
def print_hints_on_download_error(err: str) -> None:
|
|
|
|
"""
|
|
|
|
Prints hint on download error. Tries to specify the message depending on the error.
|
|
|
|
"""
|
|
|
|
info('Please make sure you have a working Internet connection.')
|
|
|
|
|
|
|
|
if 'CERTIFICATE' in err:
|
|
|
|
info('Certificate issues are usually caused by an outdated certificate database on your computer.')
|
|
|
|
info('Please check the documentation of your operating system for how to upgrade it.')
|
|
|
|
|
|
|
|
if sys.platform == 'darwin':
|
|
|
|
info('Running "./Install\\ Certificates.command" might be able to fix this issue.')
|
|
|
|
|
|
|
|
info(f'Running "{sys.executable} -m pip install --upgrade certifi" can also resolve this issue in some cases.')
|
|
|
|
|
|
|
|
# Certificate issue on Windows can be hidden under different errors which might be even translated,
|
|
|
|
# e.g. "[WinError -2146881269] ASN1 valor de tag inválido encontrado"
|
|
|
|
if sys.platform == 'win32':
|
|
|
|
info('By downloading and using the offline installer from https://dl.espressif.com/dl/esp-idf '
|
|
|
|
'you might be able to work around this issue.')
|
|
|
|
|
|
|
|
|
2024-02-16 10:39:38 -05:00
|
|
|
PYTHON_PLATFORM = f'{platform.system()}-{platform.machine()}'
|
2019-04-17 03:30:30 -04:00
|
|
|
|
|
|
|
# Identifiers used in tools.json for different platforms.
|
|
|
|
PLATFORM_WIN32 = 'win32'
|
|
|
|
PLATFORM_WIN64 = 'win64'
|
|
|
|
PLATFORM_MACOS = 'macos'
|
2022-04-13 16:11:28 -04:00
|
|
|
PLATFORM_MACOS_ARM64 = 'macos-arm64'
|
2019-04-17 03:30:30 -04:00
|
|
|
PLATFORM_LINUX32 = 'linux-i686'
|
|
|
|
PLATFORM_LINUX64 = 'linux-amd64'
|
|
|
|
PLATFORM_LINUX_ARM32 = 'linux-armel'
|
2021-11-11 07:42:55 -05:00
|
|
|
PLATFORM_LINUX_ARMHF = 'linux-armhf'
|
2019-04-17 03:30:30 -04:00
|
|
|
PLATFORM_LINUX_ARM64 = 'linux-arm64'
|
|
|
|
|
|
|
|
|
2022-05-16 16:11:01 -04:00
|
|
|
class Platforms:
|
2024-01-19 08:19:12 -05:00
|
|
|
"""
|
|
|
|
Mappings from various other names these platforms are known as, to the identifiers above.
|
|
|
|
This includes strings produced from "platform.system() + '-' + platform.machine()", see PYTHON_PLATFORM
|
|
|
|
definition above.
|
|
|
|
"""
|
2022-05-16 16:11:01 -04:00
|
|
|
# Mappings from various other names these platforms are known as, to the identifiers above.
|
|
|
|
# This includes strings produced from "platform.system() + '-' + platform.machine()", see PYTHON_PLATFORM
|
|
|
|
# definition above.
|
|
|
|
# This list also includes various strings used in release archives of xtensa-esp32-elf-gcc, OpenOCD, etc.
|
|
|
|
PLATFORM_FROM_NAME = {
|
|
|
|
# Windows
|
|
|
|
PLATFORM_WIN32: PLATFORM_WIN32,
|
|
|
|
'Windows-i686': PLATFORM_WIN32,
|
|
|
|
'Windows-x86': PLATFORM_WIN32,
|
|
|
|
'i686-w64-mingw32': PLATFORM_WIN32,
|
|
|
|
PLATFORM_WIN64: PLATFORM_WIN64,
|
|
|
|
'Windows-x86_64': PLATFORM_WIN64,
|
|
|
|
'Windows-AMD64': PLATFORM_WIN64,
|
|
|
|
'x86_64-w64-mingw32': PLATFORM_WIN64,
|
2023-08-24 10:18:13 -04:00
|
|
|
'Windows-ARM64': PLATFORM_WIN64,
|
2022-05-16 16:11:01 -04:00
|
|
|
# macOS
|
|
|
|
PLATFORM_MACOS: PLATFORM_MACOS,
|
|
|
|
'osx': PLATFORM_MACOS,
|
|
|
|
'darwin': PLATFORM_MACOS,
|
|
|
|
'Darwin-x86_64': PLATFORM_MACOS,
|
2022-08-23 04:17:24 -04:00
|
|
|
'x86_64-apple-darwin': PLATFORM_MACOS,
|
2022-05-16 16:11:01 -04:00
|
|
|
PLATFORM_MACOS_ARM64: PLATFORM_MACOS_ARM64,
|
|
|
|
'Darwin-arm64': PLATFORM_MACOS_ARM64,
|
2022-08-23 04:17:24 -04:00
|
|
|
'aarch64-apple-darwin': PLATFORM_MACOS_ARM64,
|
|
|
|
'arm64-apple-darwin': PLATFORM_MACOS_ARM64,
|
2022-05-16 16:11:01 -04:00
|
|
|
# Linux
|
|
|
|
PLATFORM_LINUX64: PLATFORM_LINUX64,
|
|
|
|
'linux64': PLATFORM_LINUX64,
|
|
|
|
'Linux-x86_64': PLATFORM_LINUX64,
|
|
|
|
'FreeBSD-amd64': PLATFORM_LINUX64,
|
|
|
|
'x86_64-linux-gnu': PLATFORM_LINUX64,
|
|
|
|
PLATFORM_LINUX32: PLATFORM_LINUX32,
|
|
|
|
'linux32': PLATFORM_LINUX32,
|
|
|
|
'Linux-i686': PLATFORM_LINUX32,
|
|
|
|
'FreeBSD-i386': PLATFORM_LINUX32,
|
|
|
|
'i586-linux-gnu': PLATFORM_LINUX32,
|
2023-01-19 08:30:17 -05:00
|
|
|
'i686-linux-gnu': PLATFORM_LINUX32,
|
2022-05-16 16:11:01 -04:00
|
|
|
PLATFORM_LINUX_ARM64: PLATFORM_LINUX_ARM64,
|
|
|
|
'Linux-arm64': PLATFORM_LINUX_ARM64,
|
|
|
|
'Linux-aarch64': PLATFORM_LINUX_ARM64,
|
|
|
|
'Linux-armv8l': PLATFORM_LINUX_ARM64,
|
|
|
|
'aarch64': PLATFORM_LINUX_ARM64,
|
|
|
|
PLATFORM_LINUX_ARMHF: PLATFORM_LINUX_ARMHF,
|
|
|
|
'arm-linux-gnueabihf': PLATFORM_LINUX_ARMHF,
|
|
|
|
PLATFORM_LINUX_ARM32: PLATFORM_LINUX_ARM32,
|
|
|
|
'arm-linux-gnueabi': PLATFORM_LINUX_ARM32,
|
|
|
|
'Linux-armv7l': PLATFORM_LINUX_ARM32,
|
|
|
|
'Linux-arm': PLATFORM_LINUX_ARM32,
|
|
|
|
}
|
|
|
|
|
2024-02-09 09:03:58 -05:00
|
|
|
# List of platforms that are not supported by ESP-IDF
|
|
|
|
UNSUPPORTED_PLATFORMS = [
|
|
|
|
'Linux-armv6l'
|
|
|
|
]
|
|
|
|
|
2023-12-18 08:01:27 -05:00
|
|
|
@staticmethod
|
2024-01-19 08:19:12 -05:00
|
|
|
def detect_linux_arm_platform(supposed_platform: Optional[str]) -> Optional[str]:
|
2023-12-18 08:01:27 -05:00
|
|
|
"""
|
|
|
|
We probe the python binary to check exactly what environment the script is running in.
|
|
|
|
|
|
|
|
ARM platform may run on armhf hardware but having armel installed packages.
|
|
|
|
To avoid possible armel/armhf libraries mixing need to define user's
|
|
|
|
packages architecture to use the same
|
|
|
|
See note section in https://gcc.gnu.org/onlinedocs/gcc/ARM-Options.html#index-mfloat-abi
|
|
|
|
|
|
|
|
ARM platform may run on aarch64 hardware but having armhf installed packages
|
|
|
|
(it happens if a docker container is running on arm64 hardware, but using an armhf image).
|
|
|
|
"""
|
|
|
|
if supposed_platform not in (PLATFORM_LINUX_ARM32, PLATFORM_LINUX_ARMHF, PLATFORM_LINUX_ARM64):
|
|
|
|
return supposed_platform
|
|
|
|
|
|
|
|
# suppose that installed python was built with the right ABI
|
|
|
|
with open(sys.executable, 'rb') as f:
|
|
|
|
# see ELF header description in https://man7.org/linux/man-pages/man5/elf.5.html, offsets depend on ElfN size
|
|
|
|
if int.from_bytes(f.read(4), sys.byteorder) != int.from_bytes(b'\x7fELF', sys.byteorder):
|
|
|
|
return supposed_platform # ELF magic not found. Use the default platform name from PLATFORM_FROM_NAME
|
|
|
|
f.seek(18) # seek to e_machine
|
|
|
|
e_machine = int.from_bytes(f.read(2), sys.byteorder)
|
|
|
|
if e_machine == 183: # EM_AARCH64, https://github.com/ARM-software/abi-aa/blob/main/aaelf64/aaelf64.rst
|
|
|
|
supposed_platform = PLATFORM_LINUX_ARM64
|
|
|
|
elif e_machine == 40: # EM_ARM, https://github.com/ARM-software/abi-aa/blob/main/aaelf32/aaelf32.rst
|
|
|
|
f.seek(36) # seek to e_flags
|
|
|
|
e_flags = int.from_bytes(f.read(4), sys.byteorder)
|
|
|
|
if e_flags & 0x400:
|
|
|
|
supposed_platform = PLATFORM_LINUX_ARMHF
|
|
|
|
else:
|
|
|
|
supposed_platform = PLATFORM_LINUX_ARM32
|
|
|
|
|
|
|
|
return supposed_platform
|
|
|
|
|
2022-05-16 16:11:01 -04:00
|
|
|
@staticmethod
|
2024-02-09 09:03:58 -05:00
|
|
|
def get(platform_alias: Optional[str]) -> str:
|
2024-01-19 08:19:12 -05:00
|
|
|
"""
|
|
|
|
Get a proper platform name based on PLATFORM_FROM_NAME dict.
|
|
|
|
"""
|
|
|
|
if not platform_alias:
|
2024-02-09 09:03:58 -05:00
|
|
|
raise ValueError('System platform could not be identified.')
|
|
|
|
|
|
|
|
if platform_alias in Platforms.UNSUPPORTED_PLATFORMS:
|
|
|
|
raise ValueError(f'Platform \'{platform_alias}\' is not supported by ESP-IDF.')
|
2022-05-16 16:11:01 -04:00
|
|
|
|
2022-08-15 10:47:24 -04:00
|
|
|
if platform_alias == 'any' and CURRENT_PLATFORM:
|
|
|
|
platform_alias = CURRENT_PLATFORM
|
2022-05-16 16:11:01 -04:00
|
|
|
platform_name = Platforms.PLATFORM_FROM_NAME.get(platform_alias, None)
|
2024-02-21 07:59:09 -05:00
|
|
|
if sys.platform == 'linux':
|
|
|
|
platform_name = Platforms.detect_linux_arm_platform(platform_name)
|
2024-02-09 09:03:58 -05:00
|
|
|
|
|
|
|
if not platform_name:
|
|
|
|
raise ValueError(f'Support for platform \'{platform_alias}\' hasn\'t been added yet.')
|
|
|
|
|
2022-05-16 16:11:01 -04:00
|
|
|
return platform_name
|
|
|
|
|
|
|
|
@staticmethod
|
2024-02-09 09:03:58 -05:00
|
|
|
def get_by_filename(file_name: str) -> str:
|
2024-01-19 08:19:12 -05:00
|
|
|
"""
|
|
|
|
Guess the right platform based on the file name.
|
|
|
|
"""
|
2022-05-16 16:11:01 -04:00
|
|
|
found_alias = ''
|
|
|
|
for platform_alias in Platforms.PLATFORM_FROM_NAME:
|
|
|
|
# Find the longest alias which matches with file name to avoid mismatching
|
|
|
|
if platform_alias in file_name and len(found_alias) < len(platform_alias):
|
|
|
|
found_alias = platform_alias
|
|
|
|
return Platforms.get(found_alias)
|
|
|
|
|
|
|
|
|
2024-01-19 08:19:12 -05:00
|
|
|
def parse_platform_arg(platform_str: str) -> str:
|
|
|
|
"""
|
|
|
|
Parses platform from input string and checks whether it is a valid platform.
|
2024-02-09 09:03:58 -05:00
|
|
|
If not, raises SystemExit exception with error message.
|
2024-01-19 08:19:12 -05:00
|
|
|
"""
|
2024-02-09 09:03:58 -05:00
|
|
|
try:
|
|
|
|
platform = Platforms.get(platform_str)
|
|
|
|
except ValueError as e:
|
|
|
|
fatal(str(e))
|
2023-09-14 01:55:24 -04:00
|
|
|
raise SystemExit(1)
|
2024-02-09 09:03:58 -05:00
|
|
|
|
2023-09-14 01:55:24 -04:00
|
|
|
return platform
|
|
|
|
|
|
|
|
|
|
|
|
CURRENT_PLATFORM = parse_platform_arg(PYTHON_PLATFORM)
|
|
|
|
|
2019-04-17 03:30:30 -04:00
|
|
|
|
|
|
|
EXPORT_SHELL = 'shell'
|
|
|
|
EXPORT_KEY_VALUE = 'key-value'
|
|
|
|
|
2023-05-18 08:46:17 -04:00
|
|
|
# the older "DigiCert Global Root CA" certificate used with github.com
|
|
|
|
DIGICERT_ROOT_CA_CERT = """
|
2020-05-20 03:50:57 -04:00
|
|
|
-----BEGIN CERTIFICATE-----
|
2022-09-09 11:33:33 -04:00
|
|
|
MIIDrzCCApegAwIBAgIQCDvgVpBCRrGhdWrJWZHHSjANBgkqhkiG9w0BAQUFADBh
|
|
|
|
MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3
|
|
|
|
d3cuZGlnaWNlcnQuY29tMSAwHgYDVQQDExdEaWdpQ2VydCBHbG9iYWwgUm9vdCBD
|
|
|
|
QTAeFw0wNjExMTAwMDAwMDBaFw0zMTExMTAwMDAwMDBaMGExCzAJBgNVBAYTAlVT
|
|
|
|
MRUwEwYDVQQKEwxEaWdpQ2VydCBJbmMxGTAXBgNVBAsTEHd3dy5kaWdpY2VydC5j
|
|
|
|
b20xIDAeBgNVBAMTF0RpZ2lDZXJ0IEdsb2JhbCBSb290IENBMIIBIjANBgkqhkiG
|
|
|
|
9w0BAQEFAAOCAQ8AMIIBCgKCAQEA4jvhEXLeqKTTo1eqUKKPC3eQyaKl7hLOllsB
|
|
|
|
CSDMAZOnTjC3U/dDxGkAV53ijSLdhwZAAIEJzs4bg7/fzTtxRuLWZscFs3YnFo97
|
|
|
|
nh6Vfe63SKMI2tavegw5BmV/Sl0fvBf4q77uKNd0f3p4mVmFaG5cIzJLv07A6Fpt
|
|
|
|
43C/dxC//AH2hdmoRBBYMql1GNXRor5H4idq9Joz+EkIYIvUX7Q6hL+hqkpMfT7P
|
|
|
|
T19sdl6gSzeRntwi5m3OFBqOasv+zbMUZBfHWymeMr/y7vrTC0LUq7dBMtoM1O/4
|
|
|
|
gdW7jVg/tRvoSSiicNoxBN33shbyTApOB6jtSj1etX+jkMOvJwIDAQABo2MwYTAO
|
|
|
|
BgNVHQ8BAf8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUA95QNVbR
|
|
|
|
TLtm8KPiGxvDl7I90VUwHwYDVR0jBBgwFoAUA95QNVbRTLtm8KPiGxvDl7I90VUw
|
|
|
|
DQYJKoZIhvcNAQEFBQADggEBAMucN6pIExIK+t1EnE9SsPTfrgT1eXkIoyQY/Esr
|
|
|
|
hMAtudXH/vTBH1jLuG2cenTnmCmrEbXjcKChzUyImZOMkXDiqw8cvpOp/2PV5Adg
|
|
|
|
06O/nVsJ8dWO41P0jmP6P6fbtGbfYmbW0W5BjfIttep3Sp+dWOIrWcBAI+0tKIJF
|
|
|
|
PnlUkiaY4IBIqDfv8NZ5YBberOgOzW6sRBc4L0na4UU+Krk2U886UAb3LujEV0ls
|
|
|
|
YSEY1QSteDwsOoBrp+uvFRTp2InBuThs4pFsiv9kuXclVzDAGySj4dzp30d8tbQk
|
|
|
|
CAUw7C29C79Fv1C5qfPrmAESrciIxpg0X40KPMbp1ZWVbd4=
|
2020-05-20 03:50:57 -04:00
|
|
|
-----END CERTIFICATE-----
|
|
|
|
"""
|
|
|
|
|
2023-05-18 08:46:17 -04:00
|
|
|
# the newer "DigiCert Global Root G2" certificate used with dl.espressif.com
|
|
|
|
DIGICERT_ROOT_G2_CERT = """
|
|
|
|
-----BEGIN CERTIFICATE-----
|
|
|
|
MIIDjjCCAnagAwIBAgIQAzrx5qcRqaC7KGSxHQn65TANBgkqhkiG9w0BAQsFADBh
|
|
|
|
MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3
|
|
|
|
d3cuZGlnaWNlcnQuY29tMSAwHgYDVQQDExdEaWdpQ2VydCBHbG9iYWwgUm9vdCBH
|
|
|
|
MjAeFw0xMzA4MDExMjAwMDBaFw0zODAxMTUxMjAwMDBaMGExCzAJBgNVBAYTAlVT
|
|
|
|
MRUwEwYDVQQKEwxEaWdpQ2VydCBJbmMxGTAXBgNVBAsTEHd3dy5kaWdpY2VydC5j
|
|
|
|
b20xIDAeBgNVBAMTF0RpZ2lDZXJ0IEdsb2JhbCBSb290IEcyMIIBIjANBgkqhkiG
|
|
|
|
9w0BAQEFAAOCAQ8AMIIBCgKCAQEAuzfNNNx7a8myaJCtSnX/RrohCgiN9RlUyfuI
|
|
|
|
2/Ou8jqJkTx65qsGGmvPrC3oXgkkRLpimn7Wo6h+4FR1IAWsULecYxpsMNzaHxmx
|
|
|
|
1x7e/dfgy5SDN67sH0NO3Xss0r0upS/kqbitOtSZpLYl6ZtrAGCSYP9PIUkY92eQ
|
|
|
|
q2EGnI/yuum06ZIya7XzV+hdG82MHauVBJVJ8zUtluNJbd134/tJS7SsVQepj5Wz
|
|
|
|
tCO7TG1F8PapspUwtP1MVYwnSlcUfIKdzXOS0xZKBgyMUNGPHgm+F6HmIcr9g+UQ
|
|
|
|
vIOlCsRnKPZzFBQ9RnbDhxSJITRNrw9FDKZJobq7nMWxM4MphQIDAQABo0IwQDAP
|
|
|
|
BgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBhjAdBgNVHQ4EFgQUTiJUIBiV
|
|
|
|
5uNu5g/6+rkS7QYXjzkwDQYJKoZIhvcNAQELBQADggEBAGBnKJRvDkhj6zHd6mcY
|
|
|
|
1Yl9PMWLSn/pvtsrF9+wX3N3KjITOYFnQoQj8kVnNeyIv/iPsGEMNKSuIEyExtv4
|
|
|
|
NeF22d+mQrvHRAiGfzZ0JFrabA0UWTW98kndth/Jsw1HKj2ZL7tcu7XUIOGZX1NG
|
|
|
|
Fdtom/DzMNU+MeKNhJ7jitralj41E6Vf8PlwUHBHQRFXGU7Aj64GxJUTFy8bJZ91
|
|
|
|
8rGOmaFvE7FBcf6IKshPECBV1/MUReXgRPTqh5Uykw7+U0b6LJ3/iyK5S9kJRaTe
|
|
|
|
pLiaWN0bfVKfjllDiIGknibVb63dDcY3fe0Dkhvld1927jyNxF1WW6LZZm6zNTfl
|
|
|
|
MrY=
|
|
|
|
-----END CERTIFICATE-----
|
|
|
|
"""
|
|
|
|
|
|
|
|
DL_CERT_DICT = {'dl.espressif.com': DIGICERT_ROOT_G2_CERT,
|
|
|
|
'github.com': DIGICERT_ROOT_CA_CERT}
|
|
|
|
|
2019-04-17 03:30:30 -04:00
|
|
|
|
2024-01-19 08:19:12 -05:00
|
|
|
def run_cmd_check_output(cmd: List[str], input_text: Optional[str]=None, extra_paths: Optional[List[str]]=None) -> bytes:
|
|
|
|
"""
|
|
|
|
Runs command and checks output for exceptions. If AttributeError or TypeError occurs, function re-runs the process.
|
|
|
|
If return code was not 0, subprocess.CalledProcessError is raised, otherwise, the original error is masked.
|
|
|
|
Returns both stdout and stderr of the run command.
|
|
|
|
"""
|
2019-04-17 03:30:30 -04:00
|
|
|
# If extra_paths is given, locate the executable in one of these directories.
|
|
|
|
# Note: it would seem logical to add extra_paths to env[PATH], instead, and let OS do the job of finding the
|
|
|
|
# executable for us. However this does not work on Windows: https://bugs.python.org/issue8557.
|
|
|
|
if extra_paths:
|
|
|
|
found = False
|
|
|
|
extensions = ['']
|
|
|
|
if sys.platform == 'win32':
|
|
|
|
extensions.append('.exe')
|
|
|
|
for path in extra_paths:
|
|
|
|
for ext in extensions:
|
|
|
|
fullpath = os.path.join(path, cmd[0] + ext)
|
|
|
|
if os.path.exists(fullpath):
|
|
|
|
cmd[0] = fullpath
|
|
|
|
found = True
|
|
|
|
break
|
|
|
|
if found:
|
|
|
|
break
|
|
|
|
|
|
|
|
try:
|
2021-03-04 03:53:47 -05:00
|
|
|
input_bytes = None
|
2019-04-17 03:30:30 -04:00
|
|
|
if input_text:
|
2021-03-04 03:53:47 -05:00
|
|
|
input_bytes = input_text.encode()
|
|
|
|
result = subprocess.run(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, check=True, input=input_bytes)
|
2019-04-17 03:30:30 -04:00
|
|
|
return result.stdout + result.stderr
|
tools: fix idf_tools.py exception with python3
use `subprocess.Popen` when catch TypeError:
```
Traceback (most recent call last):
File "tools/idf_tools.py", line 1249, in <module>
main(sys.argv[1:])
File "tools/idf_tools.py", line 1245, in main
action_func(args)
File "tools/idf_tools.py", line 1038, in action_install
tool_obj.find_installed_versions()
File "tools/idf_tools.py", line 468, in find_installed_versions
ver_str = self.check_version()
File "tools/idf_tools.py", line 426, in check_version
version_cmd_result = run_cmd_check_output(cmd, None, extra_paths)
File "tools/idf_tools.py", line 176, in run_cmd_check_output
result = subprocess.run(cmd, capture_output=True, check=True, input=input_text)
File "/opt/pyenv/pyenv-1.2.6/versions/3.5.5/lib/python3.5/subprocess.py", line 383, in run
with Popen(*popenargs, **kwargs) as process:
TypeError: __init__() got an unexpected keyword argument 'capture_output'
```
2019-07-04 03:06:00 -04:00
|
|
|
except (AttributeError, TypeError):
|
2019-04-17 03:30:30 -04:00
|
|
|
p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stdin=subprocess.PIPE, stderr=subprocess.PIPE)
|
2021-03-04 03:53:47 -05:00
|
|
|
stdout, stderr = p.communicate(input_bytes)
|
2019-04-17 03:30:30 -04:00
|
|
|
if p.returncode != 0:
|
|
|
|
try:
|
|
|
|
raise subprocess.CalledProcessError(p.returncode, cmd, stdout, stderr)
|
|
|
|
except TypeError:
|
|
|
|
raise subprocess.CalledProcessError(p.returncode, cmd, stdout)
|
|
|
|
return stdout + stderr
|
|
|
|
|
|
|
|
|
2024-01-19 08:19:12 -05:00
|
|
|
def to_shell_specific_paths(paths_list: List[str]) -> List[str]:
|
|
|
|
"""
|
|
|
|
Converts / (linux) to \\ (Windows) if called under win32 platform.
|
|
|
|
"""
|
2019-04-17 03:30:30 -04:00
|
|
|
if sys.platform == 'win32':
|
|
|
|
paths_list = [p.replace('/', os.path.sep) if os.path.sep in p else p for p in paths_list]
|
|
|
|
|
|
|
|
return paths_list
|
|
|
|
|
|
|
|
|
2024-01-19 08:19:12 -05:00
|
|
|
def get_env_for_extra_paths(extra_paths: List[str]) -> Dict[str, str]:
|
2019-04-17 03:30:30 -04:00
|
|
|
"""
|
|
|
|
Return a copy of environment variables dict, prepending paths listed in extra_paths
|
|
|
|
to the PATH environment variable.
|
|
|
|
"""
|
|
|
|
env_arg = os.environ.copy()
|
|
|
|
new_path = os.pathsep.join(extra_paths) + os.pathsep + env_arg['PATH']
|
|
|
|
if sys.version_info.major == 2:
|
2021-03-04 03:53:47 -05:00
|
|
|
env_arg['PATH'] = new_path.encode('utf8') # type: ignore
|
2019-04-17 03:30:30 -04:00
|
|
|
else:
|
|
|
|
env_arg['PATH'] = new_path
|
|
|
|
return env_arg
|
|
|
|
|
|
|
|
|
2024-01-19 08:19:12 -05:00
|
|
|
def get_file_size_sha256(filename: str, block_size: int=65536) -> Tuple[int, str]:
|
|
|
|
"""
|
|
|
|
Gets file size and its sha256.
|
|
|
|
"""
|
2019-04-17 03:30:30 -04:00
|
|
|
sha256 = hashlib.sha256()
|
|
|
|
size = 0
|
|
|
|
with open(filename, 'rb') as f:
|
|
|
|
for block in iter(lambda: f.read(block_size), b''):
|
|
|
|
sha256.update(block)
|
|
|
|
size += len(block)
|
|
|
|
return size, sha256.hexdigest()
|
|
|
|
|
|
|
|
|
2024-01-19 08:19:12 -05:00
|
|
|
def report_progress(count: int, block_size: int, total_size: int) -> None:
|
|
|
|
"""
|
|
|
|
Prints progress (count * block_size * 100 / total_size) to stdout.
|
|
|
|
"""
|
2019-04-17 03:30:30 -04:00
|
|
|
percent = int(count * block_size * 100 / total_size)
|
|
|
|
percent = min(100, percent)
|
2021-01-25 21:49:01 -05:00
|
|
|
sys.stdout.write('\r%d%%' % percent)
|
2019-04-17 03:30:30 -04:00
|
|
|
sys.stdout.flush()
|
|
|
|
|
|
|
|
|
2024-01-19 08:19:12 -05:00
|
|
|
def mkdir_p(path: str) -> None:
|
|
|
|
"""
|
|
|
|
Makes directory in given path.
|
2024-01-16 00:56:49 -05:00
|
|
|
Suppresses error when directory is already created or path is a path to file.
|
2024-01-19 08:19:12 -05:00
|
|
|
"""
|
2019-04-17 03:30:30 -04:00
|
|
|
try:
|
|
|
|
os.makedirs(path)
|
|
|
|
except OSError as exc:
|
|
|
|
if exc.errno != errno.EEXIST or not os.path.isdir(path):
|
|
|
|
raise
|
|
|
|
|
|
|
|
|
2024-01-19 08:19:12 -05:00
|
|
|
def unpack(filename: str, destination: str) -> None:
|
|
|
|
"""
|
|
|
|
Extracts file specified by filename into destination depending on its type.
|
|
|
|
"""
|
2024-02-16 10:39:38 -05:00
|
|
|
info(f'Extracting {filename} to {destination}')
|
2020-06-08 09:34:05 -04:00
|
|
|
if filename.endswith(('.tar.gz', '.tgz')):
|
2024-01-19 08:19:12 -05:00
|
|
|
archive_obj: Union[TarFile, ZipFile] = tarfile.open(filename, 'r:gz')
|
2021-09-10 07:23:00 -04:00
|
|
|
elif filename.endswith(('.tar.xz')):
|
|
|
|
archive_obj = tarfile.open(filename, 'r:xz')
|
2023-09-14 01:55:24 -04:00
|
|
|
elif filename.endswith(('.tar.bz2')):
|
|
|
|
archive_obj = tarfile.open(filename, 'r:bz2')
|
2019-04-17 03:30:30 -04:00
|
|
|
elif filename.endswith('zip'):
|
2021-03-04 03:53:47 -05:00
|
|
|
archive_obj = ZipFile(filename)
|
2019-04-17 03:30:30 -04:00
|
|
|
else:
|
|
|
|
raise NotImplementedError('Unsupported archive type')
|
|
|
|
if sys.version_info.major == 2:
|
|
|
|
# This is a workaround for the issue that unicode destination is not handled:
|
|
|
|
# https://bugs.python.org/issue17153
|
|
|
|
destination = str(destination)
|
|
|
|
archive_obj.extractall(destination)
|
2023-07-27 10:08:52 -04:00
|
|
|
# ZipFile on Unix systems does not preserve file permissions while extracting it
|
|
|
|
# We need to reset the permissions afterward
|
|
|
|
if sys.platform != 'win32' and filename.endswith('zip') and isinstance(archive_obj, ZipFile):
|
|
|
|
for file_info in archive_obj.infolist():
|
|
|
|
extracted_file = os.path.join(destination, file_info.filename)
|
|
|
|
extracted_permissions = file_info.external_attr >> 16 & 0o777 # Extract Unix permissions
|
|
|
|
if os.path.exists(extracted_file):
|
|
|
|
os.chmod(extracted_file, extracted_permissions)
|
2019-04-17 03:30:30 -04:00
|
|
|
|
|
|
|
|
2024-01-19 08:19:12 -05:00
|
|
|
def splittype(url: str) -> Tuple[Optional[str], str]:
|
|
|
|
"""
|
|
|
|
Splits given url into its type (e.g. https, file) and the rest.
|
|
|
|
"""
|
2021-01-11 10:58:21 -05:00
|
|
|
match = re.match('([^/:]+):(.*)', url, re.DOTALL)
|
|
|
|
if match:
|
|
|
|
scheme, data = match.groups()
|
|
|
|
return scheme.lower(), data
|
|
|
|
return None, url
|
|
|
|
|
|
|
|
|
2024-01-19 08:19:12 -05:00
|
|
|
def urlretrieve_ctx(url: str,
|
|
|
|
filename: str,
|
|
|
|
reporthook: Optional[Callable[[int, int, int], None]]=None,
|
|
|
|
data: Optional[bytes]=None,
|
|
|
|
context: Optional[SSLContext]=None) -> Tuple[str, addinfourl]:
|
|
|
|
"""
|
|
|
|
Retrieve data from given URL. An alternative version of urlretrieve which takes SSL context as an argument.
|
|
|
|
"""
|
2020-05-20 03:50:57 -04:00
|
|
|
url_type, path = splittype(url)
|
|
|
|
|
2020-10-14 14:19:32 -04:00
|
|
|
# urlopen doesn't have context argument in Python <=2.7.9
|
|
|
|
extra_urlopen_args = {}
|
|
|
|
if context:
|
2021-01-25 21:49:01 -05:00
|
|
|
extra_urlopen_args['context'] = context
|
2021-03-04 03:53:47 -05:00
|
|
|
with contextlib.closing(urlopen(url, data, **extra_urlopen_args)) as fp: # type: ignore
|
2020-05-20 03:50:57 -04:00
|
|
|
headers = fp.info()
|
|
|
|
|
|
|
|
# Just return the local path and the "headers" for file://
|
|
|
|
# URLs. No sense in performing a copy unless requested.
|
2021-01-25 21:49:01 -05:00
|
|
|
if url_type == 'file' and not filename:
|
2020-05-20 03:50:57 -04:00
|
|
|
return os.path.normpath(path), headers
|
|
|
|
|
|
|
|
# Handle temporary file setup.
|
|
|
|
tfp = open(filename, 'wb')
|
|
|
|
|
|
|
|
with tfp:
|
|
|
|
result = filename, headers
|
|
|
|
bs = 1024 * 8
|
2021-01-25 21:49:01 -05:00
|
|
|
size = int(headers.get('content-length', -1))
|
2020-05-20 03:50:57 -04:00
|
|
|
read = 0
|
|
|
|
blocknum = 0
|
|
|
|
|
|
|
|
if reporthook:
|
|
|
|
reporthook(blocknum, bs, size)
|
|
|
|
|
|
|
|
while True:
|
|
|
|
block = fp.read(bs)
|
|
|
|
if not block:
|
|
|
|
break
|
|
|
|
read += len(block)
|
|
|
|
tfp.write(block)
|
|
|
|
blocknum += 1
|
|
|
|
if reporthook:
|
|
|
|
reporthook(blocknum, bs, size)
|
|
|
|
|
|
|
|
if size >= 0 and read < size:
|
|
|
|
raise ContentTooShortError(
|
2021-01-25 21:49:01 -05:00
|
|
|
'retrieval incomplete: got only %i out of %i bytes'
|
2020-05-20 03:50:57 -04:00
|
|
|
% (read, size), result)
|
|
|
|
|
|
|
|
return result
|
|
|
|
|
|
|
|
|
2024-01-19 08:19:12 -05:00
|
|
|
def download(url: str, destination: str) -> Union[None, Exception]:
|
|
|
|
"""
|
2024-01-16 00:56:49 -05:00
|
|
|
Download from given url and save into given destination.
|
2024-01-19 08:19:12 -05:00
|
|
|
"""
|
2022-03-30 02:11:18 -04:00
|
|
|
info(f'Downloading {url}')
|
|
|
|
info(f'Destination: {destination}')
|
2021-09-16 10:48:03 -04:00
|
|
|
try:
|
2023-05-18 08:46:17 -04:00
|
|
|
for site, cert in DL_CERT_DICT.items():
|
|
|
|
# For dl.espressif.com and github.com, add the DigiCert root certificate.
|
|
|
|
# This works around the issue with outdated certificate stores in some installations.
|
|
|
|
if site in url:
|
2021-09-16 10:48:03 -04:00
|
|
|
ctx = ssl.create_default_context()
|
2023-05-18 08:46:17 -04:00
|
|
|
ctx.load_verify_locations(cadata=cert)
|
|
|
|
break
|
|
|
|
else:
|
|
|
|
ctx = None
|
2021-09-16 10:48:03 -04:00
|
|
|
|
2024-02-06 05:16:24 -05:00
|
|
|
urlretrieve_ctx(url, destination, report_progress if not g.non_interactive else None, context=ctx)
|
2021-09-16 10:48:03 -04:00
|
|
|
sys.stdout.write('\rDone\n')
|
2022-09-22 10:14:17 -04:00
|
|
|
return None
|
2021-09-16 10:48:03 -04:00
|
|
|
except Exception as e:
|
|
|
|
# urlretrieve could throw different exceptions, e.g. IOError when the server is down
|
2022-09-22 10:14:17 -04:00
|
|
|
return e
|
2021-09-16 10:48:03 -04:00
|
|
|
finally:
|
|
|
|
sys.stdout.flush()
|
|
|
|
|
|
|
|
|
2024-01-19 08:19:12 -05:00
|
|
|
def rename_with_retry(path_from: str, path_to: str) -> None:
|
|
|
|
"""
|
|
|
|
Sometimes renaming a directory on Windows (randomly?) causes a PermissionError.
|
|
|
|
This is confirmed to be a workaround:
|
|
|
|
https://github.com/espressif/esp-idf/issues/3819#issuecomment-515167118
|
|
|
|
https://github.com/espressif/esp-idf/issues/4063#issuecomment-531490140
|
|
|
|
https://stackoverflow.com/a/43046729
|
|
|
|
"""
|
2022-02-07 09:18:14 -05:00
|
|
|
retry_count = 20 if sys.platform.startswith('win') else 1
|
2019-09-23 00:01:09 -04:00
|
|
|
for retry in range(retry_count):
|
|
|
|
try:
|
|
|
|
os.rename(path_from, path_to)
|
|
|
|
return
|
2022-02-07 09:18:14 -05:00
|
|
|
except OSError:
|
|
|
|
msg = f'Rename {path_from} to {path_to} failed'
|
2019-09-23 00:01:09 -04:00
|
|
|
if retry == retry_count - 1:
|
2024-02-16 10:39:38 -05:00
|
|
|
fatal(f'{msg}. Antivirus software might be causing this. Disabling it temporarily could solve the issue.')
|
2019-09-23 00:01:09 -04:00
|
|
|
raise
|
2024-02-16 10:39:38 -05:00
|
|
|
warn(f'{msg}, retrying...')
|
2022-02-07 09:18:14 -05:00
|
|
|
# Sleep before the next try in order to pass the antivirus check on Windows
|
|
|
|
time.sleep(0.5)
|
2019-09-23 00:01:09 -04:00
|
|
|
|
|
|
|
|
2024-01-19 08:19:12 -05:00
|
|
|
def do_strip_container_dirs(path: str, levels: int) -> None:
|
|
|
|
"""
|
|
|
|
The number of top directory levels specified by levels argument will be removed when extracting.
|
|
|
|
E.g. if levels=2, archive path a/b/c/d.txt will be extracted as c/d.txt.
|
|
|
|
"""
|
2019-04-17 03:30:30 -04:00
|
|
|
assert levels > 0
|
|
|
|
# move the original directory out of the way (add a .tmp suffix)
|
2024-02-16 10:39:38 -05:00
|
|
|
tmp_path = f'{path}.tmp'
|
2019-04-17 03:30:30 -04:00
|
|
|
if os.path.exists(tmp_path):
|
|
|
|
shutil.rmtree(tmp_path)
|
2019-09-23 00:01:09 -04:00
|
|
|
rename_with_retry(path, tmp_path)
|
2019-04-17 03:30:30 -04:00
|
|
|
os.mkdir(path)
|
|
|
|
base_path = tmp_path
|
|
|
|
# walk given number of levels down
|
|
|
|
for level in range(levels):
|
|
|
|
contents = os.listdir(base_path)
|
|
|
|
if len(contents) > 1:
|
2024-02-16 10:39:38 -05:00
|
|
|
raise RuntimeError(f'at level {level}, expected 1 entry, got {contents}')
|
2019-04-17 03:30:30 -04:00
|
|
|
base_path = os.path.join(base_path, contents[0])
|
|
|
|
if not os.path.isdir(base_path):
|
2024-02-16 10:39:38 -05:00
|
|
|
raise RuntimeError(f'at level {level}, {contents[0]} is not a directory')
|
2019-04-17 03:30:30 -04:00
|
|
|
# get the list of directories/files to move
|
|
|
|
contents = os.listdir(base_path)
|
|
|
|
for name in contents:
|
|
|
|
move_from = os.path.join(base_path, name)
|
|
|
|
move_to = os.path.join(path, name)
|
2019-09-23 00:01:09 -04:00
|
|
|
rename_with_retry(move_from, move_to)
|
2019-04-17 03:30:30 -04:00
|
|
|
shutil.rmtree(tmp_path)
|
|
|
|
|
|
|
|
|
2024-01-19 08:19:12 -05:00
|
|
|
class ToolNotFoundError(RuntimeError):
|
|
|
|
"""
|
|
|
|
Raise when the tool is not found (not present in the paths etc.).
|
|
|
|
"""
|
2019-04-17 03:30:30 -04:00
|
|
|
pass
|
|
|
|
|
|
|
|
|
|
|
|
class ToolExecError(RuntimeError):
|
2024-01-19 08:19:12 -05:00
|
|
|
"""
|
|
|
|
Raise when the tool returns with a non-zero exit code.
|
|
|
|
"""
|
2019-04-17 03:30:30 -04:00
|
|
|
pass
|
|
|
|
|
|
|
|
|
2023-12-05 22:27:18 -05:00
|
|
|
class ToolBinaryError(RuntimeError):
|
2024-01-19 08:19:12 -05:00
|
|
|
""""
|
|
|
|
Raise when an error occurred when running any version of the tool.
|
|
|
|
"""
|
2023-12-05 22:27:18 -05:00
|
|
|
pass
|
|
|
|
|
|
|
|
|
2019-04-17 03:30:30 -04:00
|
|
|
class IDFToolDownload(object):
|
2024-01-19 08:19:12 -05:00
|
|
|
"""
|
|
|
|
Structure to store all the relevant information about particular download.
|
|
|
|
"""
|
|
|
|
def __init__(self, platform_name: str, url: str, size: int, sha256: str, rename_dist: str) -> None:
|
2019-04-17 03:30:30 -04:00
|
|
|
self.platform_name = platform_name
|
|
|
|
self.url = url
|
|
|
|
self.size = size
|
|
|
|
self.sha256 = sha256
|
2023-09-01 11:07:41 -04:00
|
|
|
self.rename_dist = rename_dist
|
2019-04-17 03:30:30 -04:00
|
|
|
|
|
|
|
|
|
|
|
@functools.total_ordering
|
|
|
|
class IDFToolVersion(object):
|
2024-01-19 08:19:12 -05:00
|
|
|
"""
|
|
|
|
Used for storing information about version; status (recommended, supported, deprecated)
|
|
|
|
and easy way of comparing different versions. Also allows platform compatibility check
|
|
|
|
and getting right download for given platform, if available.
|
|
|
|
"""
|
2019-04-17 03:30:30 -04:00
|
|
|
STATUS_RECOMMENDED = 'recommended'
|
|
|
|
STATUS_SUPPORTED = 'supported'
|
|
|
|
STATUS_DEPRECATED = 'deprecated'
|
|
|
|
|
|
|
|
STATUS_VALUES = [STATUS_RECOMMENDED, STATUS_SUPPORTED, STATUS_DEPRECATED]
|
|
|
|
|
2024-01-19 08:19:12 -05:00
|
|
|
def __init__(self, version: str, status: str) -> None:
|
2019-04-17 03:30:30 -04:00
|
|
|
self.version = version
|
|
|
|
self.status = status
|
2024-01-19 08:19:12 -05:00
|
|
|
self.downloads: OrderedDict[str, IDFToolDownload] = OrderedDict()
|
2019-04-17 03:30:30 -04:00
|
|
|
self.latest = False
|
|
|
|
|
2024-01-19 08:19:12 -05:00
|
|
|
def __lt__(self, other: 'IDFToolVersion') -> bool:
|
2019-04-17 03:30:30 -04:00
|
|
|
if self.status != other.status:
|
|
|
|
return self.status > other.status
|
|
|
|
else:
|
|
|
|
assert not (self.status == IDFToolVersion.STATUS_RECOMMENDED
|
|
|
|
and other.status == IDFToolVersion.STATUS_RECOMMENDED)
|
|
|
|
return self.version < other.version
|
|
|
|
|
2024-01-19 08:19:12 -05:00
|
|
|
def __eq__(self, other: object) -> bool:
|
2021-03-04 03:53:47 -05:00
|
|
|
if not isinstance(other, IDFToolVersion):
|
|
|
|
return NotImplemented
|
2019-04-17 03:30:30 -04:00
|
|
|
return self.status == other.status and self.version == other.version
|
|
|
|
|
2024-01-19 08:19:12 -05:00
|
|
|
def add_download(self, platform_name: str, url: str, size: int, sha256: str, rename_dist: str = '') -> None:
|
|
|
|
"""
|
|
|
|
Add download entry of type IDFToolDownload into self.downloads.
|
|
|
|
"""
|
2023-09-01 11:07:41 -04:00
|
|
|
self.downloads[platform_name] = IDFToolDownload(platform_name, url, size, sha256, rename_dist)
|
2019-04-17 03:30:30 -04:00
|
|
|
|
2024-01-19 08:19:12 -05:00
|
|
|
def get_download_for_platform(self, platform_name: Optional[str]) -> Optional[IDFToolDownload]:
|
|
|
|
"""
|
|
|
|
Get download for given platform if usable download already exists.
|
|
|
|
"""
|
2024-02-09 09:03:58 -05:00
|
|
|
try:
|
|
|
|
platform_name = Platforms.get(platform_name)
|
|
|
|
if platform_name in self.downloads.keys():
|
|
|
|
return self.downloads[platform_name]
|
2024-01-16 00:56:49 -05:00
|
|
|
# exception can be omitted, as not detected platform is handled without err message
|
2024-02-09 09:03:58 -05:00
|
|
|
except ValueError:
|
|
|
|
pass
|
2019-04-17 03:30:30 -04:00
|
|
|
if 'any' in self.downloads.keys():
|
|
|
|
return self.downloads['any']
|
|
|
|
return None
|
|
|
|
|
2024-01-19 08:19:12 -05:00
|
|
|
def compatible_with_platform(self, platform_name: Optional[str] = PYTHON_PLATFORM) -> bool:
|
|
|
|
"""
|
|
|
|
Check whether this version is compatible with given platform name.
|
|
|
|
"""
|
2019-04-17 03:30:30 -04:00
|
|
|
return self.get_download_for_platform(platform_name) is not None
|
|
|
|
|
2024-01-19 08:19:12 -05:00
|
|
|
def get_supported_platforms(self) -> Set[str]:
|
|
|
|
"""
|
|
|
|
Get all platforms for which this version has a valid download record.
|
|
|
|
"""
|
2020-01-17 11:38:50 -05:00
|
|
|
return set(self.downloads.keys())
|
|
|
|
|
2019-04-17 03:30:30 -04:00
|
|
|
|
2021-06-21 04:22:54 -04:00
|
|
|
IDFToolOptions = namedtuple('IDFToolOptions', [
|
|
|
|
'version_cmd',
|
|
|
|
'version_regex',
|
|
|
|
'version_regex_replace',
|
2022-08-15 10:47:24 -04:00
|
|
|
'is_executable',
|
2021-06-21 04:22:54 -04:00
|
|
|
'export_paths',
|
|
|
|
'export_vars',
|
|
|
|
'install',
|
|
|
|
'info_url',
|
|
|
|
'license',
|
|
|
|
'strip_container_dirs',
|
|
|
|
'supported_targets'])
|
2019-04-17 03:30:30 -04:00
|
|
|
|
|
|
|
|
|
|
|
class IDFTool(object):
|
2024-01-19 08:19:12 -05:00
|
|
|
"""
|
|
|
|
Used to store info about IDF tools from tools.json file in a Python-accesible form.
|
|
|
|
The term "IDF tool" is used for e.g. CMake, ninja, QUEMU and toolchains.
|
|
|
|
"""
|
2019-04-17 03:30:30 -04:00
|
|
|
# possible values of 'install' field
|
|
|
|
INSTALL_ALWAYS = 'always'
|
|
|
|
INSTALL_ON_REQUEST = 'on_request'
|
|
|
|
INSTALL_NEVER = 'never'
|
|
|
|
|
2024-01-19 08:19:12 -05:00
|
|
|
def __init__(self, name: str,
|
|
|
|
description: str,
|
|
|
|
install: str,
|
|
|
|
info_url: str,
|
|
|
|
license: str,
|
|
|
|
version_cmd: List[str],
|
|
|
|
version_regex: str,
|
|
|
|
supported_targets: List[str],
|
|
|
|
version_regex_replace: Optional[str] = None,
|
|
|
|
strip_container_dirs: int = 0,
|
|
|
|
is_executable: bool = True) -> None:
|
2019-04-17 03:30:30 -04:00
|
|
|
self.name = name
|
|
|
|
self.description = description
|
2022-03-15 15:02:37 -04:00
|
|
|
self.drop_versions()
|
2024-01-19 08:19:12 -05:00
|
|
|
self.version_in_path: Optional[str] = None
|
|
|
|
self.versions_installed: List[str] = []
|
2019-04-17 03:30:30 -04:00
|
|
|
if version_regex_replace is None:
|
|
|
|
version_regex_replace = VERSION_REGEX_REPLACE_DEFAULT
|
2022-08-15 10:47:24 -04:00
|
|
|
self.options = IDFToolOptions(version_cmd, version_regex, version_regex_replace, is_executable,
|
2021-04-26 15:34:48 -04:00
|
|
|
[], OrderedDict(), install, info_url, license, strip_container_dirs, supported_targets) # type: ignore
|
2024-01-19 08:19:12 -05:00
|
|
|
self.platform_overrides: List[Dict[str, str]] = []
|
2019-07-18 11:20:40 -04:00
|
|
|
self._platform = CURRENT_PLATFORM
|
2019-04-17 03:30:30 -04:00
|
|
|
self._update_current_options()
|
2022-08-15 10:47:24 -04:00
|
|
|
self.is_executable = is_executable
|
2019-04-17 03:30:30 -04:00
|
|
|
|
2024-01-19 08:19:12 -05:00
|
|
|
def copy_for_platform(self, platform: str) -> 'IDFTool':
|
|
|
|
"""
|
|
|
|
Copy the IDFTool record in respect to given platform (e.g. apply platform overrides).
|
|
|
|
"""
|
2019-07-18 11:20:40 -04:00
|
|
|
result = copy.deepcopy(self)
|
|
|
|
result._platform = platform
|
|
|
|
result._update_current_options()
|
|
|
|
return result
|
|
|
|
|
2024-01-19 08:19:12 -05:00
|
|
|
def _update_current_options(self) -> None:
|
|
|
|
"""
|
|
|
|
Update current options by platform overrides, if applicable for current platform.
|
|
|
|
"""
|
2019-04-17 03:30:30 -04:00
|
|
|
self._current_options = IDFToolOptions(*self.options)
|
|
|
|
for override in self.platform_overrides:
|
2022-05-16 16:11:01 -04:00
|
|
|
if self._platform and self._platform not in override['platforms']:
|
2019-04-17 03:30:30 -04:00
|
|
|
continue
|
|
|
|
override_dict = override.copy()
|
|
|
|
del override_dict['platforms']
|
2021-03-04 03:53:47 -05:00
|
|
|
self._current_options = self._current_options._replace(**override_dict) # type: ignore
|
2019-04-17 03:30:30 -04:00
|
|
|
|
2024-01-19 08:19:12 -05:00
|
|
|
def drop_versions(self) -> None:
|
|
|
|
"""
|
|
|
|
Clear self.versions dictionary.
|
|
|
|
"""
|
|
|
|
self.versions: Dict[str, IDFToolVersion] = OrderedDict()
|
2022-03-15 15:02:37 -04:00
|
|
|
|
2024-01-19 08:19:12 -05:00
|
|
|
def add_version(self, version: IDFToolVersion) -> None:
|
|
|
|
"""
|
|
|
|
Add new IDFVersion to self.versions.
|
|
|
|
"""
|
2022-08-10 03:01:57 -04:00
|
|
|
assert type(version) is IDFToolVersion
|
2019-04-17 03:30:30 -04:00
|
|
|
self.versions[version.version] = version
|
|
|
|
|
2024-01-19 08:19:12 -05:00
|
|
|
def get_path(self) -> str:
|
|
|
|
"""
|
|
|
|
Returns path where the tool is installed.
|
|
|
|
"""
|
2024-02-06 05:16:24 -05:00
|
|
|
return os.path.join(g.idf_tools_path, 'tools', self.name)
|
2019-04-17 03:30:30 -04:00
|
|
|
|
2024-01-19 08:19:12 -05:00
|
|
|
def get_path_for_version(self, version: str) -> str:
|
|
|
|
"""
|
|
|
|
Returns path for the tool of given version.
|
|
|
|
"""
|
2022-08-10 03:01:57 -04:00
|
|
|
assert version in self.versions
|
2019-04-17 03:30:30 -04:00
|
|
|
return os.path.join(self.get_path(), version)
|
|
|
|
|
2024-01-19 08:19:12 -05:00
|
|
|
def get_export_paths(self, version: str) -> List[str]:
|
|
|
|
"""
|
|
|
|
Returns a list of paths that need to be exported.
|
|
|
|
"""
|
2019-04-17 03:30:30 -04:00
|
|
|
tool_path = self.get_path_for_version(version)
|
2021-03-04 03:53:47 -05:00
|
|
|
return [os.path.join(tool_path, *p) for p in self._current_options.export_paths] # type: ignore
|
2019-04-17 03:30:30 -04:00
|
|
|
|
2024-01-19 08:19:12 -05:00
|
|
|
def get_export_vars(self, version: str) -> Dict[str, str]:
|
2019-04-17 03:30:30 -04:00
|
|
|
"""
|
|
|
|
Get the dictionary of environment variables to be exported, for the given version.
|
|
|
|
Expands:
|
2024-01-19 08:19:12 -05:00
|
|
|
- ${TOOL_PATH} => the actual path where the version is installed.
|
2019-04-17 03:30:30 -04:00
|
|
|
"""
|
|
|
|
result = {}
|
2021-03-04 03:53:47 -05:00
|
|
|
for k, v in self._current_options.export_vars.items(): # type: ignore
|
2019-04-17 03:30:30 -04:00
|
|
|
replace_path = self.get_path_for_version(version).replace('\\', '\\\\')
|
|
|
|
v_repl = re.sub(SUBST_TOOL_PATH_REGEX, replace_path, v)
|
|
|
|
if v_repl != v:
|
|
|
|
v_repl = to_shell_specific_paths([v_repl])[0]
|
2022-08-15 10:47:24 -04:00
|
|
|
old_v = os.environ.get(k)
|
|
|
|
if old_v is None or old_v != v_repl:
|
|
|
|
result[k] = v_repl
|
2019-04-17 03:30:30 -04:00
|
|
|
return result
|
|
|
|
|
2024-01-19 08:19:12 -05:00
|
|
|
def get_version(self, extra_paths: Optional[List[str]] = None, executable_path: Optional[str] = None) -> str:
|
2019-04-17 03:30:30 -04:00
|
|
|
"""
|
|
|
|
Execute the tool, optionally prepending extra_paths to PATH,
|
|
|
|
extract the version string and return it as a result.
|
2024-01-19 08:19:12 -05:00
|
|
|
Raises ToolNotFoundError if the tool is not found (not present in the paths).
|
2019-04-17 03:30:30 -04:00
|
|
|
Raises ToolExecError if the tool returns with a non-zero exit code.
|
|
|
|
Returns 'unknown' if tool returns something from which version string
|
|
|
|
can not be extracted.
|
|
|
|
"""
|
2019-07-18 11:20:40 -04:00
|
|
|
# this function can not be called for a different platform
|
|
|
|
assert self._platform == CURRENT_PLATFORM
|
2021-03-04 03:53:47 -05:00
|
|
|
cmd = self._current_options.version_cmd # type: ignore
|
2022-08-31 16:59:15 -04:00
|
|
|
if executable_path:
|
|
|
|
cmd[0] = executable_path
|
2023-07-24 10:19:30 -04:00
|
|
|
|
|
|
|
if not cmd[0]:
|
|
|
|
# There is no command available, so return early. It seems that
|
|
|
|
# within some very strange context empty [''] may actually execute
|
|
|
|
# something https://github.com/espressif/esp-idf/issues/11880
|
2024-02-16 10:39:38 -05:00
|
|
|
raise ToolNotFoundError(f'Tool {self.name} not found')
|
2023-07-24 10:19:30 -04:00
|
|
|
|
2019-04-17 03:30:30 -04:00
|
|
|
try:
|
|
|
|
version_cmd_result = run_cmd_check_output(cmd, None, extra_paths)
|
2024-01-16 00:56:49 -05:00
|
|
|
except OSError as e:
|
2019-04-17 03:30:30 -04:00
|
|
|
# tool is not on the path
|
2024-01-16 00:56:49 -05:00
|
|
|
raise ToolNotFoundError(f'Tool {self.name} not found with error: {e}')
|
2019-04-17 03:30:30 -04:00
|
|
|
except subprocess.CalledProcessError as e:
|
2024-01-16 00:56:49 -05:00
|
|
|
raise ToolExecError(f'non-zero exit code ({e.returncode}) with message: {e.stderr.decode("utf-8",errors="ignore")}') # type: ignore
|
2019-04-17 03:30:30 -04:00
|
|
|
|
2021-01-25 21:49:01 -05:00
|
|
|
in_str = version_cmd_result.decode('utf-8')
|
2021-03-04 03:53:47 -05:00
|
|
|
match = re.search(self._current_options.version_regex, in_str) # type: ignore
|
2019-04-17 03:30:30 -04:00
|
|
|
if not match:
|
|
|
|
return UNKNOWN_VERSION
|
2021-03-04 03:53:47 -05:00
|
|
|
return re.sub(self._current_options.version_regex, self._current_options.version_regex_replace, match.group(0)) # type: ignore
|
2019-04-17 03:30:30 -04:00
|
|
|
|
2024-01-16 00:56:49 -05:00
|
|
|
def check_binary_valid(self, version: str) -> bool:
|
|
|
|
if not self.is_executable:
|
|
|
|
return True
|
|
|
|
try:
|
|
|
|
ver_str = self.get_version(self.get_export_paths(version))
|
|
|
|
except (ToolNotFoundError, ToolExecError) as e:
|
|
|
|
fatal(f'tool {self.name} version {version} is installed, but getting error: {e}')
|
|
|
|
return False
|
|
|
|
if ver_str != version:
|
|
|
|
# just print, state is still valid
|
|
|
|
warn(f'tool {self.name} version {version} is installed, but reporting version {ver_str}')
|
|
|
|
return True
|
|
|
|
|
2024-01-19 08:19:12 -05:00
|
|
|
def check_version(self, executable_path: Optional[str]) -> bool:
|
|
|
|
"""
|
|
|
|
Check if tool's version from executable path is in self.version dictionary.
|
|
|
|
"""
|
2022-08-31 16:59:15 -04:00
|
|
|
version = self.get_version(executable_path=executable_path)
|
|
|
|
return version in self.versions
|
|
|
|
|
2024-01-19 08:19:12 -05:00
|
|
|
def get_install_type(self) -> Callable[[str], None]:
|
|
|
|
"""
|
|
|
|
Returns whether the tools are installed always, on request or never.
|
|
|
|
"""
|
2021-03-04 03:53:47 -05:00
|
|
|
return self._current_options.install # type: ignore
|
2019-04-17 03:30:30 -04:00
|
|
|
|
2024-01-19 08:19:12 -05:00
|
|
|
def get_supported_targets(self) -> List[str]:
|
|
|
|
"""
|
|
|
|
Returns list of supported targets with current options.
|
|
|
|
"""
|
2021-04-26 15:34:48 -04:00
|
|
|
return self._current_options.supported_targets # type: ignore
|
|
|
|
|
2024-01-19 08:19:12 -05:00
|
|
|
def is_supported_for_any_of_targets(self, targets: List[str]) -> bool:
|
2023-09-14 01:55:24 -04:00
|
|
|
"""
|
|
|
|
Checks whether the tool is suitable for at least one of the specified targets.
|
|
|
|
"""
|
|
|
|
supported_targets = self.get_supported_targets()
|
|
|
|
return (any(item in targets for item in supported_targets) or supported_targets == ['all'])
|
|
|
|
|
2024-01-19 08:19:12 -05:00
|
|
|
def compatible_with_platform(self) -> bool:
|
|
|
|
"""
|
|
|
|
Checks whether this tool (any version) is compatible with the platform.
|
|
|
|
"""
|
2019-07-23 00:55:30 -04:00
|
|
|
return any([v.compatible_with_platform() for v in self.versions.values()])
|
|
|
|
|
2024-01-19 08:19:12 -05:00
|
|
|
def get_supported_platforms(self) -> Set[str]:
|
|
|
|
"""
|
|
|
|
Return set of platforms that are supported by at least one version of the tool.
|
|
|
|
"""
|
2020-01-17 11:38:50 -05:00
|
|
|
result = set()
|
|
|
|
for v in self.versions.values():
|
|
|
|
result.update(v.get_supported_platforms())
|
|
|
|
return result
|
|
|
|
|
2024-01-19 08:19:12 -05:00
|
|
|
def get_recommended_version(self) -> Optional[str]:
|
|
|
|
"""
|
2024-01-16 00:56:49 -05:00
|
|
|
Get all recommended versions of the tool. If more versions are recommended, highest version is returned.
|
2024-01-19 08:19:12 -05:00
|
|
|
"""
|
2019-04-17 03:30:30 -04:00
|
|
|
recommended_versions = [k for k, v in self.versions.items()
|
|
|
|
if v.status == IDFToolVersion.STATUS_RECOMMENDED
|
2019-07-18 11:20:40 -04:00
|
|
|
and v.compatible_with_platform(self._platform)]
|
2019-04-17 03:30:30 -04:00
|
|
|
assert len(recommended_versions) <= 1
|
|
|
|
if recommended_versions:
|
|
|
|
return recommended_versions[0]
|
|
|
|
return None
|
|
|
|
|
2024-01-19 08:19:12 -05:00
|
|
|
def get_preferred_installed_version(self) -> Optional[str]:
|
|
|
|
"""
|
|
|
|
Get the preferred installed version of the tool. If more versions installed, return the highest.
|
|
|
|
"""
|
2019-04-17 03:30:30 -04:00
|
|
|
recommended_versions = [k for k in self.versions_installed
|
|
|
|
if self.versions[k].status == IDFToolVersion.STATUS_RECOMMENDED
|
2019-07-18 11:20:40 -04:00
|
|
|
and self.versions[k].compatible_with_platform(self._platform)]
|
2019-04-17 03:30:30 -04:00
|
|
|
assert len(recommended_versions) <= 1
|
|
|
|
if recommended_versions:
|
|
|
|
return recommended_versions[0]
|
|
|
|
return None
|
|
|
|
|
2024-01-19 08:19:12 -05:00
|
|
|
def find_installed_versions(self) -> None:
|
2019-04-17 03:30:30 -04:00
|
|
|
"""
|
2024-02-06 05:16:24 -05:00
|
|
|
Checks whether the tool can be found in PATH and in GlobalVarsStore.idf_tools_path.
|
2019-04-17 03:30:30 -04:00
|
|
|
Writes results to self.version_in_path and self.versions_installed.
|
2023-12-05 22:27:18 -05:00
|
|
|
|
2024-01-19 08:19:12 -05:00
|
|
|
Raises ToolBinaryError if an error occurred when running any version of the tool.
|
2019-04-17 03:30:30 -04:00
|
|
|
"""
|
2019-07-18 11:20:40 -04:00
|
|
|
# this function can not be called for a different platform
|
|
|
|
assert self._platform == CURRENT_PLATFORM
|
2023-12-05 22:27:18 -05:00
|
|
|
tool_error = False
|
2019-04-17 03:30:30 -04:00
|
|
|
# First check if the tool is in system PATH
|
|
|
|
try:
|
2022-08-31 16:59:15 -04:00
|
|
|
ver_str = self.get_version()
|
2024-01-19 08:19:12 -05:00
|
|
|
except ToolNotFoundError:
|
2019-04-17 03:30:30 -04:00
|
|
|
# not in PATH
|
|
|
|
pass
|
2021-04-12 04:06:56 -04:00
|
|
|
except ToolExecError as e:
|
2024-01-16 00:56:49 -05:00
|
|
|
fatal(f'tool {self.name} is found in PATH, but has failed: {e}')
|
2023-12-05 22:27:18 -05:00
|
|
|
tool_error = True
|
2019-04-17 03:30:30 -04:00
|
|
|
else:
|
|
|
|
self.version_in_path = ver_str
|
|
|
|
|
2024-02-06 05:16:24 -05:00
|
|
|
# Now check all the versions installed in GlobalVarsStore.idf_tools_path
|
2019-04-17 03:30:30 -04:00
|
|
|
self.versions_installed = []
|
|
|
|
for version, version_obj in self.versions.items():
|
|
|
|
if not version_obj.compatible_with_platform():
|
|
|
|
continue
|
|
|
|
tool_path = self.get_path_for_version(version)
|
|
|
|
if not os.path.exists(tool_path):
|
|
|
|
# version not installed
|
|
|
|
continue
|
2022-08-15 10:47:24 -04:00
|
|
|
if not self.is_executable:
|
|
|
|
self.versions_installed.append(version)
|
|
|
|
continue
|
2019-04-17 03:30:30 -04:00
|
|
|
try:
|
2022-08-31 16:59:15 -04:00
|
|
|
ver_str = self.get_version(self.get_export_paths(version))
|
2024-01-16 00:56:49 -05:00
|
|
|
except ToolNotFoundError as e:
|
|
|
|
warn(f'directory for tool {self.name} version {version} is present, but the tool has not been found: {e}')
|
2021-04-12 04:06:56 -04:00
|
|
|
except ToolExecError as e:
|
2024-01-16 00:56:49 -05:00
|
|
|
fatal(f'tool {self.name} version {version} is installed, but cannot be run: {e}')
|
2023-12-05 22:27:18 -05:00
|
|
|
tool_error = True
|
2019-04-17 03:30:30 -04:00
|
|
|
else:
|
|
|
|
if ver_str != version:
|
2024-02-16 10:39:38 -05:00
|
|
|
warn(f'tool {self.name} version {version} is installed, but has reported version {ver_str}')
|
2019-04-17 03:30:30 -04:00
|
|
|
else:
|
|
|
|
self.versions_installed.append(version)
|
2023-12-05 22:27:18 -05:00
|
|
|
if tool_error:
|
2023-12-18 08:01:27 -05:00
|
|
|
raise ToolBinaryError
|
2019-04-17 03:30:30 -04:00
|
|
|
|
2024-01-19 08:19:12 -05:00
|
|
|
def latest_installed_version(self) -> Optional[str]:
|
2023-02-05 10:29:03 -05:00
|
|
|
"""
|
|
|
|
Get the latest installed tool version by directly checking the
|
|
|
|
tool's version directories.
|
|
|
|
"""
|
|
|
|
tool_path = self.get_path()
|
|
|
|
if not os.path.exists(tool_path):
|
|
|
|
return None
|
|
|
|
dentries = os.listdir(tool_path)
|
|
|
|
dirs = [d for d in dentries if os.path.isdir(os.path.join(tool_path, d))]
|
|
|
|
for version in sorted(dirs, reverse=True):
|
|
|
|
# get_path_for_version() has assert to check if version is in versions
|
|
|
|
# dict, so get_export_paths() cannot be used. Let's just create the
|
|
|
|
# export paths list directly here.
|
|
|
|
paths = [os.path.join(tool_path, version, *p) for p in self._current_options.export_paths]
|
|
|
|
try:
|
|
|
|
ver_str = self.get_version(paths)
|
2024-01-19 08:19:12 -05:00
|
|
|
except (ToolNotFoundError, ToolExecError):
|
2023-02-05 10:29:03 -05:00
|
|
|
continue
|
|
|
|
if ver_str != version:
|
|
|
|
continue
|
|
|
|
return version
|
|
|
|
|
|
|
|
return None
|
|
|
|
|
2024-01-19 08:19:12 -05:00
|
|
|
def download(self, version: str) -> None:
|
|
|
|
"""
|
|
|
|
Download archive of the tool for platform given by self._platform.
|
|
|
|
"""
|
2022-08-10 03:01:57 -04:00
|
|
|
assert version in self.versions
|
2019-07-18 11:20:40 -04:00
|
|
|
download_obj = self.versions[version].get_download_for_platform(self._platform)
|
2019-04-17 03:30:30 -04:00
|
|
|
if not download_obj:
|
2024-02-16 10:39:38 -05:00
|
|
|
fatal(f'No packages for tool {self.name} platform {self._platform}!')
|
2022-09-22 10:14:17 -04:00
|
|
|
raise SystemExit(1)
|
2019-04-17 03:30:30 -04:00
|
|
|
|
|
|
|
url = download_obj.url
|
2023-09-01 11:07:41 -04:00
|
|
|
archive_name = download_obj.rename_dist if download_obj.rename_dist else os.path.basename(url)
|
2024-02-06 05:16:24 -05:00
|
|
|
local_path = os.path.join(g.idf_tools_path, 'dist', archive_name)
|
2019-04-17 03:30:30 -04:00
|
|
|
mkdir_p(os.path.dirname(local_path))
|
|
|
|
|
|
|
|
if os.path.isfile(local_path):
|
|
|
|
if not self.check_download_file(download_obj, local_path):
|
2024-02-16 10:39:38 -05:00
|
|
|
warn(f'removing downloaded file {archive_name} and downloading again')
|
2019-04-17 03:30:30 -04:00
|
|
|
os.unlink(local_path)
|
|
|
|
else:
|
2024-02-16 10:39:38 -05:00
|
|
|
info(f'file {archive_name} is already downloaded')
|
2019-04-17 03:30:30 -04:00
|
|
|
return
|
|
|
|
|
|
|
|
downloaded = False
|
2024-02-16 10:39:38 -05:00
|
|
|
local_temp_path = f'{local_path}.tmp'
|
2019-04-17 03:30:30 -04:00
|
|
|
for retry in range(DOWNLOAD_RETRY_COUNT):
|
2022-09-22 10:14:17 -04:00
|
|
|
err = download(url, local_temp_path)
|
2019-11-15 08:31:19 -05:00
|
|
|
if not os.path.isfile(local_temp_path) or not self.check_download_file(download_obj, local_temp_path):
|
2024-02-16 10:39:38 -05:00
|
|
|
warn(f'Download failure: {err}')
|
|
|
|
warn(f'Failed to download {url} to {local_temp_path}')
|
2019-04-17 03:30:30 -04:00
|
|
|
continue
|
2019-09-23 00:01:09 -04:00
|
|
|
rename_with_retry(local_temp_path, local_path)
|
2019-04-17 03:30:30 -04:00
|
|
|
downloaded = True
|
|
|
|
break
|
|
|
|
if not downloaded:
|
|
|
|
fatal('Failed to download, and retry count has expired')
|
2022-09-22 10:14:17 -04:00
|
|
|
print_hints_on_download_error(str(err))
|
|
|
|
raise SystemExit(1)
|
2019-04-17 03:30:30 -04:00
|
|
|
|
2024-01-19 08:19:12 -05:00
|
|
|
def install(self, version: str) -> None:
|
|
|
|
"""
|
|
|
|
Unpack archive to destination directory and remove given number of top-level folder if specified.
|
|
|
|
Should always be called after IDFTool.download().
|
|
|
|
"""
|
2019-04-17 03:30:30 -04:00
|
|
|
# Currently this is called after calling 'download' method, so here are a few asserts
|
|
|
|
# for the conditions which should be true once that method is done.
|
2022-08-10 03:01:57 -04:00
|
|
|
assert version in self.versions
|
2019-07-18 11:20:40 -04:00
|
|
|
download_obj = self.versions[version].get_download_for_platform(self._platform)
|
2022-08-10 03:01:57 -04:00
|
|
|
assert download_obj is not None
|
2023-09-01 11:07:41 -04:00
|
|
|
archive_name = download_obj.rename_dist if download_obj.rename_dist else os.path.basename(download_obj.url)
|
2024-02-06 05:16:24 -05:00
|
|
|
archive_path = os.path.join(g.idf_tools_path, 'dist', archive_name)
|
2022-08-10 03:01:57 -04:00
|
|
|
assert os.path.isfile(archive_path)
|
2019-04-17 03:30:30 -04:00
|
|
|
dest_dir = self.get_path_for_version(version)
|
|
|
|
if os.path.exists(dest_dir):
|
|
|
|
warn('destination path already exists, removing')
|
|
|
|
shutil.rmtree(dest_dir)
|
|
|
|
mkdir_p(dest_dir)
|
|
|
|
unpack(archive_path, dest_dir)
|
2021-03-04 03:53:47 -05:00
|
|
|
if self._current_options.strip_container_dirs: # type: ignore
|
2022-08-15 10:47:24 -04:00
|
|
|
do_strip_container_dirs(dest_dir, self._current_options.strip_container_dirs) # type: ignore
|
2024-01-16 00:56:49 -05:00
|
|
|
if not self.check_binary_valid(version):
|
|
|
|
fatal(f'Failed to check the tool while installed. Removing directory {dest_dir}')
|
|
|
|
shutil.rmtree(dest_dir)
|
|
|
|
raise SystemExit(1)
|
2019-04-17 03:30:30 -04:00
|
|
|
|
|
|
|
@staticmethod
|
2024-01-19 08:19:12 -05:00
|
|
|
def check_download_file(download_obj: IDFToolDownload, local_path: str) -> bool:
|
|
|
|
"""
|
|
|
|
Compare the computed sha256 to sha256 specified by downloaded archive.
|
|
|
|
"""
|
2019-04-17 03:30:30 -04:00
|
|
|
expected_sha256 = download_obj.sha256
|
|
|
|
expected_size = download_obj.size
|
|
|
|
file_size, file_sha256 = get_file_size_sha256(local_path)
|
|
|
|
if file_size != expected_size:
|
2024-02-16 10:39:38 -05:00
|
|
|
warn(f'file size mismatch for {local_path}, expected {expected_size}, got {file_size}')
|
2019-04-17 03:30:30 -04:00
|
|
|
return False
|
|
|
|
if file_sha256 != expected_sha256:
|
2024-02-16 10:39:38 -05:00
|
|
|
warn(f'hash mismatch for {local_path}, expected {expected_sha256}, got {file_sha256}')
|
2019-04-17 03:30:30 -04:00
|
|
|
return False
|
|
|
|
return True
|
|
|
|
|
|
|
|
@classmethod
|
2024-01-19 08:19:12 -05:00
|
|
|
def from_json(cls, tool_dict: Dict[str, Union[str, List[str], Dict[str, str]]]) -> 'IDFTool':
|
|
|
|
"""
|
|
|
|
Create IDFTool class instance form its JSON dump.
|
|
|
|
"""
|
2019-04-17 03:30:30 -04:00
|
|
|
# Validate json fields
|
2021-03-04 03:53:47 -05:00
|
|
|
tool_name = tool_dict.get('name') # type: ignore
|
2022-08-15 10:47:24 -04:00
|
|
|
if not isinstance(tool_name, str):
|
2019-04-17 03:30:30 -04:00
|
|
|
raise RuntimeError('tool_name is not a string')
|
|
|
|
|
2021-03-04 03:53:47 -05:00
|
|
|
description = tool_dict.get('description') # type: ignore
|
2022-08-15 10:47:24 -04:00
|
|
|
if not isinstance(description, str):
|
2019-04-17 03:30:30 -04:00
|
|
|
raise RuntimeError('description is not a string')
|
|
|
|
|
2022-08-15 10:47:24 -04:00
|
|
|
is_executable = tool_dict.get('is_executable', True) # type: ignore
|
|
|
|
if not isinstance(is_executable, bool):
|
2024-02-16 10:39:38 -05:00
|
|
|
raise RuntimeError(f'is_executable for tool {tool_name} is not a bool')
|
2022-08-15 10:47:24 -04:00
|
|
|
|
2019-04-17 03:30:30 -04:00
|
|
|
version_cmd = tool_dict.get('version_cmd')
|
|
|
|
if type(version_cmd) is not list:
|
2024-02-16 10:39:38 -05:00
|
|
|
raise RuntimeError(f'version_cmd for tool {tool_name} is not a list of strings')
|
2019-04-17 03:30:30 -04:00
|
|
|
|
|
|
|
version_regex = tool_dict.get('version_regex')
|
2022-08-15 10:47:24 -04:00
|
|
|
if not isinstance(version_regex, str) or (not version_regex and is_executable):
|
2024-02-16 10:39:38 -05:00
|
|
|
raise RuntimeError(f'version_regex for tool {tool_name} is not a non-empty string')
|
2019-04-17 03:30:30 -04:00
|
|
|
|
|
|
|
version_regex_replace = tool_dict.get('version_regex_replace')
|
2022-08-15 10:47:24 -04:00
|
|
|
if version_regex_replace and not isinstance(version_regex_replace, str):
|
2024-02-16 10:39:38 -05:00
|
|
|
raise RuntimeError(f'version_regex_replace for tool {tool_name} is not a string')
|
2019-04-17 03:30:30 -04:00
|
|
|
|
|
|
|
export_paths = tool_dict.get('export_paths')
|
|
|
|
if type(export_paths) is not list:
|
2024-02-16 10:39:38 -05:00
|
|
|
raise RuntimeError(f'export_paths for tool {tool_name} is not a list')
|
2019-04-17 03:30:30 -04:00
|
|
|
|
2021-03-04 03:53:47 -05:00
|
|
|
export_vars = tool_dict.get('export_vars', {}) # type: ignore
|
2019-04-17 03:30:30 -04:00
|
|
|
if type(export_vars) is not dict:
|
2024-02-16 10:39:38 -05:00
|
|
|
raise RuntimeError(f'export_vars for tool {tool_name} is not a mapping')
|
2019-04-17 03:30:30 -04:00
|
|
|
|
|
|
|
versions = tool_dict.get('versions')
|
|
|
|
if type(versions) is not list:
|
2024-02-16 10:39:38 -05:00
|
|
|
raise RuntimeError(f'versions for tool {tool_name} is not an array')
|
2019-04-17 03:30:30 -04:00
|
|
|
|
2021-03-04 03:53:47 -05:00
|
|
|
install = tool_dict.get('install', False) # type: ignore
|
2022-08-15 10:47:24 -04:00
|
|
|
if not isinstance(install, str):
|
2024-02-16 10:39:38 -05:00
|
|
|
raise RuntimeError(f'install for tool {tool_name} is not a string')
|
2019-04-17 03:30:30 -04:00
|
|
|
|
2021-03-04 03:53:47 -05:00
|
|
|
info_url = tool_dict.get('info_url', False) # type: ignore
|
2022-08-15 10:47:24 -04:00
|
|
|
if not isinstance(info_url, str):
|
2024-02-16 10:39:38 -05:00
|
|
|
raise RuntimeError(f'info_url for tool {tool_name} is not a string')
|
2019-04-17 03:30:30 -04:00
|
|
|
|
2021-03-04 03:53:47 -05:00
|
|
|
license = tool_dict.get('license', False) # type: ignore
|
2022-08-15 10:47:24 -04:00
|
|
|
if not isinstance(license, str):
|
2024-02-16 10:39:38 -05:00
|
|
|
raise RuntimeError(f'license for tool {tool_name} is not a string')
|
2019-04-17 03:30:30 -04:00
|
|
|
|
|
|
|
strip_container_dirs = tool_dict.get('strip_container_dirs', 0)
|
|
|
|
if strip_container_dirs and type(strip_container_dirs) is not int:
|
2024-02-16 10:39:38 -05:00
|
|
|
raise RuntimeError(f'strip_container_dirs for tool {tool_name} is not an int')
|
2019-04-17 03:30:30 -04:00
|
|
|
|
2021-03-04 03:53:47 -05:00
|
|
|
overrides_list = tool_dict.get('platform_overrides', []) # type: ignore
|
2019-04-17 03:30:30 -04:00
|
|
|
if type(overrides_list) is not list:
|
2024-02-16 10:39:38 -05:00
|
|
|
raise RuntimeError(f'platform_overrides for tool {tool_name} is not a list')
|
2019-04-17 03:30:30 -04:00
|
|
|
|
2021-04-26 15:34:48 -04:00
|
|
|
supported_targets = tool_dict.get('supported_targets')
|
|
|
|
if not isinstance(supported_targets, list):
|
2024-02-16 10:39:38 -05:00
|
|
|
raise RuntimeError(f'supported_targets for tool {tool_name} is not a list of strings')
|
2021-04-26 15:34:48 -04:00
|
|
|
|
2019-04-17 03:30:30 -04:00
|
|
|
# Create the object
|
2024-01-19 08:19:12 -05:00
|
|
|
tool_obj: 'IDFTool' = cls(tool_name, description, install, info_url, license, # type: ignore
|
|
|
|
version_cmd, version_regex, supported_targets, version_regex_replace, # type: ignore
|
|
|
|
strip_container_dirs, is_executable) # type: ignore
|
2019-04-17 03:30:30 -04:00
|
|
|
|
2021-03-04 03:53:47 -05:00
|
|
|
for path in export_paths: # type: ignore
|
|
|
|
tool_obj.options.export_paths.append(path) # type: ignore
|
2019-04-17 03:30:30 -04:00
|
|
|
|
2021-03-04 03:53:47 -05:00
|
|
|
for name, value in export_vars.items(): # type: ignore
|
|
|
|
tool_obj.options.export_vars[name] = value # type: ignore
|
2019-04-17 03:30:30 -04:00
|
|
|
|
|
|
|
for index, override in enumerate(overrides_list):
|
2021-03-04 03:53:47 -05:00
|
|
|
platforms_list = override.get('platforms') # type: ignore
|
2019-04-17 03:30:30 -04:00
|
|
|
if type(platforms_list) is not list:
|
|
|
|
raise RuntimeError('platforms for override %d of tool %s is not a list' % (index, tool_name))
|
|
|
|
|
2021-03-04 03:53:47 -05:00
|
|
|
install = override.get('install') # type: ignore
|
2022-08-15 10:47:24 -04:00
|
|
|
if install is not None and not isinstance(install, str):
|
2019-04-17 03:30:30 -04:00
|
|
|
raise RuntimeError('install for override %d of tool %s is not a string' % (index, tool_name))
|
|
|
|
|
2021-03-04 03:53:47 -05:00
|
|
|
version_cmd = override.get('version_cmd') # type: ignore
|
2019-04-17 03:30:30 -04:00
|
|
|
if version_cmd is not None and type(version_cmd) is not list:
|
|
|
|
raise RuntimeError('version_cmd for override %d of tool %s is not a list of strings' %
|
|
|
|
(index, tool_name))
|
|
|
|
|
2021-03-04 03:53:47 -05:00
|
|
|
version_regex = override.get('version_regex') # type: ignore
|
2022-08-15 10:47:24 -04:00
|
|
|
if version_regex is not None and (not isinstance(version_regex, str) or not version_regex):
|
2019-04-17 03:30:30 -04:00
|
|
|
raise RuntimeError('version_regex for override %d of tool %s is not a non-empty string' %
|
|
|
|
(index, tool_name))
|
|
|
|
|
2021-03-04 03:53:47 -05:00
|
|
|
version_regex_replace = override.get('version_regex_replace') # type: ignore
|
2022-08-15 10:47:24 -04:00
|
|
|
if version_regex_replace is not None and not isinstance(version_regex_replace, str):
|
2019-04-17 03:30:30 -04:00
|
|
|
raise RuntimeError('version_regex_replace for override %d of tool %s is not a string' %
|
|
|
|
(index, tool_name))
|
|
|
|
|
2021-03-04 03:53:47 -05:00
|
|
|
export_paths = override.get('export_paths') # type: ignore
|
2019-04-17 03:30:30 -04:00
|
|
|
if export_paths is not None and type(export_paths) is not list:
|
|
|
|
raise RuntimeError('export_paths for override %d of tool %s is not a list' % (index, tool_name))
|
|
|
|
|
2021-03-04 03:53:47 -05:00
|
|
|
export_vars = override.get('export_vars') # type: ignore
|
2019-04-17 03:30:30 -04:00
|
|
|
if export_vars is not None and type(export_vars) is not dict:
|
|
|
|
raise RuntimeError('export_vars for override %d of tool %s is not a mapping' % (index, tool_name))
|
2021-03-04 03:53:47 -05:00
|
|
|
tool_obj.platform_overrides.append(override) # type: ignore
|
2019-04-17 03:30:30 -04:00
|
|
|
|
2024-01-19 08:19:12 -05:00
|
|
|
recommended_versions:Dict[str, List[str]] = {}
|
2021-03-04 03:53:47 -05:00
|
|
|
for version_dict in versions: # type: ignore
|
|
|
|
version = version_dict.get('name') # type: ignore
|
2022-08-15 10:47:24 -04:00
|
|
|
if not isinstance(version, str):
|
2024-02-16 10:39:38 -05:00
|
|
|
raise RuntimeError(f'version name for tool {tool_name} is not a string')
|
2019-04-17 03:30:30 -04:00
|
|
|
|
2021-03-04 03:53:47 -05:00
|
|
|
version_status = version_dict.get('status') # type: ignore
|
2022-08-15 10:47:24 -04:00
|
|
|
if not isinstance(version_status, str) and version_status not in IDFToolVersion.STATUS_VALUES:
|
2019-04-17 03:30:30 -04:00
|
|
|
raise RuntimeError('tool {} version {} status is not one of {}', tool_name, version,
|
|
|
|
IDFToolVersion.STATUS_VALUES)
|
|
|
|
|
|
|
|
version_obj = IDFToolVersion(version, version_status)
|
2021-03-04 03:53:47 -05:00
|
|
|
for platform_id, platform_dict in version_dict.items(): # type: ignore
|
2019-04-17 03:30:30 -04:00
|
|
|
if platform_id in ['name', 'status']:
|
|
|
|
continue
|
2024-02-09 09:03:58 -05:00
|
|
|
try:
|
|
|
|
Platforms.get(platform_id)
|
|
|
|
except ValueError:
|
2019-04-17 03:30:30 -04:00
|
|
|
raise RuntimeError('invalid platform %s for tool %s version %s' %
|
|
|
|
(platform_id, tool_name, version))
|
|
|
|
|
|
|
|
version_obj.add_download(platform_id,
|
2023-09-01 11:07:41 -04:00
|
|
|
platform_dict['url'], platform_dict['size'],
|
|
|
|
platform_dict['sha256'], platform_dict.get('rename_dist', ''))
|
2019-04-17 03:30:30 -04:00
|
|
|
|
|
|
|
if version_status == IDFToolVersion.STATUS_RECOMMENDED:
|
|
|
|
if platform_id not in recommended_versions:
|
|
|
|
recommended_versions[platform_id] = []
|
|
|
|
recommended_versions[platform_id].append(version)
|
|
|
|
|
|
|
|
tool_obj.add_version(version_obj)
|
|
|
|
for platform_id, version_list in recommended_versions.items():
|
|
|
|
if len(version_list) > 1:
|
2024-02-16 10:39:38 -05:00
|
|
|
raise RuntimeError(f'tool {tool_name} for platform {platform_id} has {len(recommended_versions)} recommended versions')
|
2019-04-17 03:30:30 -04:00
|
|
|
if install != IDFTool.INSTALL_NEVER and len(recommended_versions) == 0:
|
2024-02-16 10:39:38 -05:00
|
|
|
raise RuntimeError(f'required/optional tool {tool_name} for platform {platform_id} has no recommended versions')
|
2019-04-17 03:30:30 -04:00
|
|
|
|
|
|
|
tool_obj._update_current_options()
|
|
|
|
return tool_obj
|
|
|
|
|
2021-03-04 03:53:47 -05:00
|
|
|
def to_json(self): # type: ignore
|
2024-01-19 08:19:12 -05:00
|
|
|
"""
|
|
|
|
Make a JSON dump of self.
|
|
|
|
"""
|
2019-04-17 03:30:30 -04:00
|
|
|
versions_array = []
|
|
|
|
for version, version_obj in self.versions.items():
|
|
|
|
version_json = {
|
|
|
|
'name': version,
|
|
|
|
'status': version_obj.status
|
|
|
|
}
|
|
|
|
for platform_id, download in version_obj.downloads.items():
|
2023-09-01 11:07:41 -04:00
|
|
|
if download.rename_dist:
|
|
|
|
version_json[platform_id] = {
|
|
|
|
'url': download.url,
|
|
|
|
'size': download.size,
|
|
|
|
'sha256': download.sha256,
|
|
|
|
'rename_dist': download.rename_dist
|
|
|
|
}
|
|
|
|
else:
|
|
|
|
version_json[platform_id] = {
|
|
|
|
'url': download.url,
|
|
|
|
'size': download.size,
|
|
|
|
'sha256': download.sha256
|
|
|
|
}
|
2019-04-17 03:30:30 -04:00
|
|
|
versions_array.append(version_json)
|
|
|
|
overrides_array = self.platform_overrides
|
|
|
|
|
|
|
|
tool_json = {
|
|
|
|
'name': self.name,
|
|
|
|
'description': self.description,
|
|
|
|
'export_paths': self.options.export_paths,
|
|
|
|
'export_vars': self.options.export_vars,
|
|
|
|
'install': self.options.install,
|
|
|
|
'info_url': self.options.info_url,
|
|
|
|
'license': self.options.license,
|
|
|
|
'version_cmd': self.options.version_cmd,
|
|
|
|
'version_regex': self.options.version_regex,
|
2021-04-26 15:34:48 -04:00
|
|
|
'supported_targets': self.options.supported_targets,
|
2019-04-17 03:30:30 -04:00
|
|
|
'versions': versions_array,
|
|
|
|
}
|
|
|
|
if self.options.version_regex_replace != VERSION_REGEX_REPLACE_DEFAULT:
|
|
|
|
tool_json['version_regex_replace'] = self.options.version_regex_replace
|
|
|
|
if overrides_array:
|
|
|
|
tool_json['platform_overrides'] = overrides_array
|
|
|
|
if self.options.strip_container_dirs:
|
|
|
|
tool_json['strip_container_dirs'] = self.options.strip_container_dirs
|
2022-08-15 10:47:24 -04:00
|
|
|
if self.options.is_executable is False:
|
|
|
|
tool_json['is_executable'] = self.options.is_executable
|
2019-04-17 03:30:30 -04:00
|
|
|
return tool_json
|
|
|
|
|
|
|
|
|
2022-03-19 16:37:16 -04:00
|
|
|
class IDFEnvEncoder(JSONEncoder):
|
|
|
|
"""
|
2024-01-19 08:19:12 -05:00
|
|
|
IDFEnvEncoder is used for encoding IDFEnv, IDFRecord, SelectedIDFRecord classes to JSON in readable format. Not as (__main__.IDFRecord object at '0x7fcxx').
|
|
|
|
Additionally remove first underscore with private properties when processing.
|
2022-03-19 16:37:16 -04:00
|
|
|
"""
|
|
|
|
def default(self, obj): # type: ignore
|
|
|
|
return {k.lstrip('_'): v for k, v in vars(obj).items()}
|
|
|
|
|
|
|
|
|
|
|
|
class IDFRecord:
|
|
|
|
"""
|
|
|
|
IDFRecord represents one record of installed ESP-IDF on system.
|
|
|
|
Contains:
|
|
|
|
* version - actual version of ESP-IDF (example '5.0')
|
|
|
|
* path - absolute path to the ESP-IDF
|
|
|
|
* features - features using ESP-IDF
|
|
|
|
* targets - ESP chips for which are installed needed toolchains (example ['esp32' , 'esp32s2'])
|
|
|
|
- Default value is [], since user didn't define any targets yet
|
|
|
|
"""
|
|
|
|
def __init__(self) -> None:
|
2024-01-19 08:19:12 -05:00
|
|
|
self.version: str = ''
|
|
|
|
self.path: str = ''
|
|
|
|
self._features: List[str] = ['core']
|
|
|
|
self._targets: List[str] = []
|
2022-03-19 16:37:16 -04:00
|
|
|
|
|
|
|
def __iter__(self): # type: ignore
|
|
|
|
yield from {
|
|
|
|
'version': self.version,
|
|
|
|
'path': self.path,
|
|
|
|
'features': self._features,
|
|
|
|
'targets': self._targets
|
|
|
|
}.items()
|
|
|
|
|
|
|
|
def __str__(self) -> str:
|
|
|
|
return json.dumps(dict(self), ensure_ascii=False, indent=4) # type: ignore
|
|
|
|
|
|
|
|
def __repr__(self) -> str:
|
|
|
|
return self.__str__()
|
|
|
|
|
2022-08-09 12:13:29 -04:00
|
|
|
def __eq__(self, other: object) -> bool:
|
|
|
|
if not isinstance(other, IDFRecord):
|
|
|
|
return False
|
|
|
|
return all(getattr(self, x) == getattr(other, x) for x in ('version', 'path', 'features', 'targets'))
|
|
|
|
|
|
|
|
def __ne__(self, other: object) -> bool:
|
|
|
|
if not isinstance(other, IDFRecord):
|
|
|
|
return False
|
|
|
|
return not self.__eq__(other)
|
|
|
|
|
2022-03-19 16:37:16 -04:00
|
|
|
@property
|
|
|
|
def features(self) -> List[str]:
|
|
|
|
return self._features
|
|
|
|
|
2022-05-20 09:15:52 -04:00
|
|
|
def update_features(self, add: Tuple[str, ...] = (), remove: Tuple[str, ...] = ()) -> None:
|
|
|
|
# Update features, but maintain required feature 'core'
|
|
|
|
# If the same feature is present in both argument's tuples, do not update this feature
|
|
|
|
add_set = set(add)
|
|
|
|
remove_set = set(remove)
|
|
|
|
# Remove duplicates
|
|
|
|
features_to_add = add_set.difference(remove_set)
|
|
|
|
features_to_remove = remove_set.difference(add_set)
|
|
|
|
|
|
|
|
features = set(self._features)
|
|
|
|
features.update(features_to_add)
|
|
|
|
features.difference_update(features_to_remove)
|
|
|
|
features.add('core')
|
|
|
|
self._features = list(features)
|
2022-03-19 16:37:16 -04:00
|
|
|
|
|
|
|
@property
|
|
|
|
def targets(self) -> List[str]:
|
|
|
|
return self._targets
|
|
|
|
|
|
|
|
def extend_targets(self, targets: List[str]) -> None:
|
|
|
|
# Targets can be only updated, but always maintain existing targets.
|
|
|
|
self._targets = list(set(targets + self._targets))
|
|
|
|
|
|
|
|
@classmethod
|
2024-01-19 08:19:12 -05:00
|
|
|
def get_active_idf_record(cls) -> 'IDFRecord':
|
2022-03-19 16:37:16 -04:00
|
|
|
idf_record_obj = cls()
|
|
|
|
idf_record_obj.version = get_idf_version()
|
2024-02-06 05:16:24 -05:00
|
|
|
idf_record_obj.path = g.idf_path
|
2022-03-19 16:37:16 -04:00
|
|
|
return idf_record_obj
|
|
|
|
|
|
|
|
@classmethod
|
2024-01-19 08:19:12 -05:00
|
|
|
def get_idf_record_from_dict(cls, record_dict: Dict[str, Any]) -> 'IDFRecord':
|
2022-03-19 16:37:16 -04:00
|
|
|
idf_record_obj = cls()
|
|
|
|
try:
|
|
|
|
idf_record_obj.version = record_dict['version']
|
|
|
|
idf_record_obj.path = record_dict['path']
|
|
|
|
except KeyError:
|
|
|
|
# When some of these key attributes, which are irreplaceable with default values, are not found, raise VallueError
|
|
|
|
raise ValueError('Inconsistent record')
|
|
|
|
|
2022-05-20 09:15:52 -04:00
|
|
|
idf_record_obj.update_features(record_dict.get('features', []))
|
2022-03-19 16:37:16 -04:00
|
|
|
idf_record_obj.extend_targets(record_dict.get('targets', []))
|
|
|
|
|
|
|
|
return idf_record_obj
|
|
|
|
|
|
|
|
|
|
|
|
class IDFEnv:
|
|
|
|
"""
|
2024-01-19 08:19:12 -05:00
|
|
|
IDFEnv represents ESP-IDF Environments installed on system and is responsible for loading and saving structured data.
|
|
|
|
All information is saved and loaded from IDF_ENV_FILE.
|
2022-03-19 16:37:16 -04:00
|
|
|
Contains:
|
2024-01-19 08:19:12 -05:00
|
|
|
* idf_installed - all installed environments of ESP-IDF on system.
|
2022-03-19 16:37:16 -04:00
|
|
|
"""
|
|
|
|
def __init__(self) -> None:
|
|
|
|
active_idf_id = active_repo_id()
|
2024-01-19 08:19:12 -05:00
|
|
|
self.idf_installed: Dict[str, IDFRecord] = {active_idf_id: IDFRecord.get_active_idf_record()}
|
2022-03-19 16:37:16 -04:00
|
|
|
|
|
|
|
def __iter__(self): # type: ignore
|
|
|
|
yield from {
|
|
|
|
'idfInstalled': self.idf_installed,
|
|
|
|
}.items()
|
|
|
|
|
|
|
|
def __str__(self) -> str:
|
|
|
|
return json.dumps(dict(self), cls=IDFEnvEncoder, ensure_ascii=False, indent=4) # type: ignore
|
|
|
|
|
|
|
|
def __repr__(self) -> str:
|
|
|
|
return self.__str__()
|
|
|
|
|
|
|
|
def save(self) -> None:
|
2022-08-09 12:13:29 -04:00
|
|
|
"""
|
2024-01-19 08:19:12 -05:00
|
|
|
Diff current class instance with instance loaded from IDF_ENV_FILE and save only if are different.
|
2022-08-09 12:13:29 -04:00
|
|
|
"""
|
|
|
|
# It is enough to compare just active records because others can't be touched by the running script
|
|
|
|
if self.get_active_idf_record() != self.get_idf_env().get_active_idf_record():
|
2024-02-06 05:16:24 -05:00
|
|
|
idf_env_file_path = os.path.join(g.idf_tools_path, IDF_ENV_FILE)
|
2022-08-09 12:13:29 -04:00
|
|
|
try:
|
2024-02-06 05:16:24 -05:00
|
|
|
if g.idf_tools_path: # mypy fix for Optional[str] in the next call
|
2022-08-09 12:13:29 -04:00
|
|
|
# the directory doesn't exist if this is run on a clean system the first time
|
2024-02-06 05:16:24 -05:00
|
|
|
mkdir_p(g.idf_tools_path)
|
2023-06-01 04:44:54 -04:00
|
|
|
with open(idf_env_file_path, 'w', encoding='utf-8') as w:
|
2024-02-16 10:39:38 -05:00
|
|
|
info(f'Updating {idf_env_file_path}')
|
2022-08-09 12:13:29 -04:00
|
|
|
json.dump(dict(self), w, cls=IDFEnvEncoder, ensure_ascii=False, indent=4) # type: ignore
|
|
|
|
except (IOError, OSError):
|
2024-02-06 05:16:24 -05:00
|
|
|
if not os.access(g.idf_tools_path, os.W_OK):
|
2024-02-16 10:39:38 -05:00
|
|
|
raise OSError(f'IDF_TOOLS_PATH {g.idf_tools_path} is not accessible to write. '
|
|
|
|
'Required changes have not been saved')
|
|
|
|
raise OSError(f'File {idf_env_file_path} is not accessible to write or corrupted. Required changes have not been saved')
|
2022-03-19 16:37:16 -04:00
|
|
|
|
|
|
|
def get_active_idf_record(self) -> IDFRecord:
|
|
|
|
return self.idf_installed[active_repo_id()]
|
|
|
|
|
|
|
|
@classmethod
|
2024-01-19 08:19:12 -05:00
|
|
|
def get_idf_env(cls) -> 'IDFEnv':
|
|
|
|
"""
|
|
|
|
IDFEnv class is used to process IDF_ENV_FILE file. The constructor is therefore called only in this method that loads the file and checks its contents.
|
|
|
|
"""
|
2022-03-19 16:37:16 -04:00
|
|
|
idf_env_obj = cls()
|
|
|
|
try:
|
2024-02-06 05:16:24 -05:00
|
|
|
idf_env_file_path = os.path.join(g.idf_tools_path, IDF_ENV_FILE)
|
2023-06-01 04:44:54 -04:00
|
|
|
with open(idf_env_file_path, 'r', encoding='utf-8') as idf_env_file:
|
2022-03-19 16:37:16 -04:00
|
|
|
idf_env_json = json.load(idf_env_file)
|
|
|
|
|
|
|
|
try:
|
|
|
|
idf_installed = idf_env_json['idfInstalled']
|
|
|
|
except KeyError:
|
|
|
|
# If no ESP-IDF record is found in loaded file, do not update and keep default value from constructor
|
|
|
|
pass
|
|
|
|
else:
|
|
|
|
# Load and verify ESP-IDF records found in IDF_ENV_FILE
|
|
|
|
idf_installed.pop('sha', None)
|
2024-01-19 08:19:12 -05:00
|
|
|
idf_installed_verified:Dict[str, IDFRecord] = {}
|
2022-03-19 16:37:16 -04:00
|
|
|
for idf in idf_installed:
|
|
|
|
try:
|
|
|
|
idf_installed_verified[idf] = IDFRecord.get_idf_record_from_dict(idf_installed[idf])
|
|
|
|
except ValueError as err:
|
2024-02-16 10:39:38 -05:00
|
|
|
warn(f'{err} "{idf}" found in {idf_env_file_path}, removing this record.')
|
2022-03-19 16:37:16 -04:00
|
|
|
# Combine ESP-IDF loaded records with the one in constructor, to be sure that there is an active ESP-IDF record in the idf_installed
|
|
|
|
# If the active record is already in idf_installed, it is not overwritten
|
|
|
|
idf_env_obj.idf_installed = dict(idf_env_obj.idf_installed, **idf_installed_verified)
|
|
|
|
|
|
|
|
except (IOError, OSError, ValueError):
|
|
|
|
# If no, empty or not-accessible to read IDF_ENV_FILE found, use default values from constructor
|
|
|
|
pass
|
|
|
|
|
|
|
|
return idf_env_obj
|
|
|
|
|
|
|
|
|
2022-08-09 12:13:29 -04:00
|
|
|
class ENVState:
|
|
|
|
"""
|
2024-01-19 08:19:12 -05:00
|
|
|
ENVState is used to handle IDF global variables that are set in environment and need to be removed when switching between ESP-IDF versions in opened shell.
|
|
|
|
Every opened shell/terminal has it's own temporary file to store these variables.
|
|
|
|
The temporary file's name is generated automatically with suffix 'idf_ + opened shell ID'. Path to this tmp file is stored as env global variable (env_key).
|
|
|
|
The shell ID is crucial, since in one terminal can be opened more shells.
|
2022-08-09 12:13:29 -04:00
|
|
|
* env_key - global variable name/key
|
|
|
|
* deactivate_file_path - global variable value (generated tmp file name)
|
|
|
|
* idf_variables - loaded IDF variables from file
|
|
|
|
"""
|
|
|
|
env_key = 'IDF_DEACTIVATE_FILE_PATH'
|
|
|
|
deactivate_file_path = os.environ.get(env_key, '')
|
|
|
|
|
|
|
|
def __init__(self) -> None:
|
2024-01-19 08:19:12 -05:00
|
|
|
self.idf_variables: Dict[str, Any] = {}
|
2022-08-09 12:13:29 -04:00
|
|
|
|
|
|
|
@classmethod
|
2024-01-19 08:19:12 -05:00
|
|
|
def get_env_state(cls) -> 'ENVState':
|
2022-08-09 12:13:29 -04:00
|
|
|
env_state_obj = cls()
|
|
|
|
|
|
|
|
if cls.deactivate_file_path:
|
|
|
|
try:
|
|
|
|
with open(cls.deactivate_file_path, 'r') as fp:
|
|
|
|
env_state_obj.idf_variables = json.load(fp)
|
|
|
|
except (IOError, OSError, ValueError):
|
|
|
|
pass
|
|
|
|
return env_state_obj
|
|
|
|
|
|
|
|
def save(self) -> str:
|
|
|
|
try:
|
2024-02-16 10:39:38 -05:00
|
|
|
if self.deactivate_file_path and os.path.basename(self.deactivate_file_path).endswith(f'idf_{str(os.getppid())}'):
|
2022-08-09 12:13:29 -04:00
|
|
|
# If exported file path/name exists and belongs to actual opened shell
|
|
|
|
with open(self.deactivate_file_path, 'w') as w:
|
|
|
|
json.dump(self.idf_variables, w, ensure_ascii=False, indent=4) # type: ignore
|
|
|
|
else:
|
2024-02-16 10:39:38 -05:00
|
|
|
with tempfile.NamedTemporaryFile(delete=False, suffix=f'idf_{str(os.getppid())}') as fp:
|
2022-08-09 12:13:29 -04:00
|
|
|
self.deactivate_file_path = fp.name
|
|
|
|
fp.write(json.dumps(self.idf_variables, ensure_ascii=False, indent=4).encode('utf-8'))
|
|
|
|
except (IOError, OSError):
|
2024-02-16 10:39:38 -05:00
|
|
|
warn(f'File storing IDF env variables {self.deactivate_file_path} is not accessible to write. '
|
|
|
|
'Potentional switching ESP-IDF versions may cause problems')
|
2022-08-09 12:13:29 -04:00
|
|
|
return self.deactivate_file_path
|
|
|
|
|
|
|
|
|
2024-01-19 08:19:12 -05:00
|
|
|
def load_tools_info() -> Dict[str, IDFTool]:
|
2019-04-17 03:30:30 -04:00
|
|
|
"""
|
2024-01-19 08:19:12 -05:00
|
|
|
Load tools metadata from tools.json, return a dictionary: tool name - tool info.
|
2019-04-17 03:30:30 -04:00
|
|
|
"""
|
2024-02-06 05:16:24 -05:00
|
|
|
|
|
|
|
tool_versions_file_name = g.tools_json
|
2019-04-17 03:30:30 -04:00
|
|
|
|
2021-03-04 03:53:47 -05:00
|
|
|
with open(tool_versions_file_name, 'r') as f: # type: ignore
|
2019-04-17 03:30:30 -04:00
|
|
|
tools_info = json.load(f)
|
|
|
|
|
2021-03-04 03:53:47 -05:00
|
|
|
return parse_tools_info_json(tools_info) # type: ignore
|
2019-04-17 03:30:30 -04:00
|
|
|
|
|
|
|
|
2021-03-04 03:53:47 -05:00
|
|
|
def parse_tools_info_json(tools_info): # type: ignore
|
2019-04-17 03:30:30 -04:00
|
|
|
"""
|
|
|
|
Parse and validate the dictionary obtained by loading the tools.json file.
|
|
|
|
Returns a dictionary of tools (key: tool name, value: IDFTool object).
|
|
|
|
"""
|
|
|
|
tools_dict = OrderedDict()
|
|
|
|
|
|
|
|
tools_array = tools_info.get('tools')
|
|
|
|
if type(tools_array) is not list:
|
|
|
|
raise RuntimeError('tools property is missing or not an array')
|
|
|
|
|
|
|
|
for tool_dict in tools_array:
|
|
|
|
tool = IDFTool.from_json(tool_dict)
|
|
|
|
tools_dict[tool.name] = tool
|
|
|
|
|
|
|
|
return tools_dict
|
|
|
|
|
|
|
|
|
2024-01-19 08:19:12 -05:00
|
|
|
def dump_tools_json(tools_info: Optional[Dict[str, IDFTool]]): # type: ignore
|
|
|
|
"""
|
|
|
|
Dump all the tools into JSON.
|
|
|
|
"""
|
2019-04-17 03:30:30 -04:00
|
|
|
tools_array = []
|
2024-01-19 08:19:12 -05:00
|
|
|
if tools_info:
|
|
|
|
for _, tool_obj in tools_info.items():
|
|
|
|
tool_json = tool_obj.to_json()
|
|
|
|
tools_array.append(tool_json)
|
|
|
|
file_json = {'version': TOOLS_FILE_VERSION, 'tools': tools_array}
|
|
|
|
return json.dumps(file_json, indent=2, separators=(',', ': '), sort_keys=True)
|
|
|
|
else:
|
|
|
|
return json.dumps({}, indent=2, separators=(',', ': '), sort_keys=True)
|
2019-04-17 03:30:30 -04:00
|
|
|
|
|
|
|
|
2021-12-13 10:45:11 -05:00
|
|
|
def get_python_exe_and_subdir() -> Tuple[str, str]:
|
2024-01-19 08:19:12 -05:00
|
|
|
"""
|
|
|
|
Returns:
|
|
|
|
* python executable name (python on Unix, python.exe on Win)
|
|
|
|
* subdir in which the executable is (bin for Unix, Scripts for Win)
|
|
|
|
"""
|
2021-12-13 10:45:11 -05:00
|
|
|
if sys.platform == 'win32':
|
|
|
|
subdir = 'Scripts'
|
|
|
|
python_exe = 'python.exe'
|
|
|
|
else:
|
|
|
|
subdir = 'bin'
|
|
|
|
python_exe = 'python'
|
|
|
|
return python_exe, subdir
|
|
|
|
|
|
|
|
|
2021-11-08 11:55:03 -05:00
|
|
|
def get_idf_version() -> str:
|
2024-01-19 08:19:12 -05:00
|
|
|
"""
|
|
|
|
Return ESP-IDF version.
|
|
|
|
"""
|
2024-03-18 08:22:36 -04:00
|
|
|
idf_version: Optional[str] = None
|
|
|
|
|
|
|
|
version_file_path = os.path.join(g.idf_path, 'version.txt')
|
2019-04-17 03:30:30 -04:00
|
|
|
if os.path.exists(version_file_path):
|
2021-01-25 21:49:01 -05:00
|
|
|
with open(version_file_path, 'r') as version_file:
|
2019-04-17 03:30:30 -04:00
|
|
|
idf_version_str = version_file.read()
|
2024-03-18 08:22:36 -04:00
|
|
|
|
|
|
|
match = re.match(r'^v([0-9]+\.[0-9]+).*', idf_version_str)
|
|
|
|
if match:
|
|
|
|
idf_version = match.group(1)
|
|
|
|
|
|
|
|
if idf_version is None:
|
2019-11-18 04:12:05 -05:00
|
|
|
try:
|
2024-03-18 08:22:36 -04:00
|
|
|
with open(os.path.join(g.idf_path, 'components', 'esp_common', 'include', 'esp_idf_version.h')) as f:
|
2019-11-18 04:12:05 -05:00
|
|
|
m = re.search(r'^#define\s+ESP_IDF_VERSION_MAJOR\s+(\d+).+?^#define\s+ESP_IDF_VERSION_MINOR\s+(\d+)',
|
|
|
|
f.read(), re.DOTALL | re.MULTILINE)
|
|
|
|
if m:
|
|
|
|
idf_version = '.'.join((m.group(1), m.group(2)))
|
|
|
|
else:
|
2024-03-18 08:22:36 -04:00
|
|
|
fatal('Reading IDF version from C header file failed!')
|
|
|
|
raise SystemExit(1)
|
2019-11-18 04:12:05 -05:00
|
|
|
except Exception as e:
|
2024-03-18 08:22:36 -04:00
|
|
|
fatal(f'It is not possible to determine the IDF version: {e}')
|
|
|
|
raise SystemExit(1)
|
2021-11-08 11:55:03 -05:00
|
|
|
return idf_version
|
|
|
|
|
|
|
|
|
|
|
|
def get_python_env_path() -> Tuple[str, str, str, str]:
|
2024-01-19 08:19:12 -05:00
|
|
|
"""
|
|
|
|
Returns tuple of Python environment path, Python env. path with subdir and full path from Python (i.e. with executable).
|
|
|
|
"""
|
2021-11-08 11:55:03 -05:00
|
|
|
idf_version = get_idf_version()
|
2024-02-21 06:46:39 -05:00
|
|
|
idf_python_env_path = os.getenv('IDF_PYTHON_ENV_PATH') or os.path.join(g.idf_tools_path,
|
|
|
|
'python_env',
|
|
|
|
PYTHON_VENV_DIR_TEMPLATE.format(idf_version,
|
|
|
|
PYTHON_VER_MAJOR_MINOR))
|
2019-04-17 03:30:30 -04:00
|
|
|
|
2021-12-13 10:45:11 -05:00
|
|
|
python_exe, subdir = get_python_exe_and_subdir()
|
2019-04-17 03:30:30 -04:00
|
|
|
idf_python_export_path = os.path.join(idf_python_env_path, subdir)
|
|
|
|
virtualenv_python = os.path.join(idf_python_export_path, python_exe)
|
|
|
|
|
2021-09-16 10:48:03 -04:00
|
|
|
return idf_python_env_path, idf_python_export_path, virtualenv_python, idf_version
|
2019-04-17 03:30:30 -04:00
|
|
|
|
|
|
|
|
2024-01-19 08:19:12 -05:00
|
|
|
def parse_tools_arg(tools_str: List[str]) -> List[str]:
|
2021-11-08 11:55:03 -05:00
|
|
|
"""
|
2024-01-16 00:56:49 -05:00
|
|
|
Base parsing "tools" arguments: all, required, etc.
|
2023-09-14 01:55:24 -04:00
|
|
|
"""
|
|
|
|
if not tools_str:
|
|
|
|
return ['required']
|
|
|
|
else:
|
|
|
|
return tools_str
|
|
|
|
|
|
|
|
|
2024-01-19 08:19:12 -05:00
|
|
|
def expand_tools_arg(tools_spec: List[str], overall_tools: OrderedDict, targets: List[str]) -> List[str]:
|
|
|
|
"""
|
|
|
|
Expand list of tools 'tools_spec' in according:
|
2023-09-14 01:55:24 -04:00
|
|
|
- a tool is in the 'overall_tools' list
|
|
|
|
- consider metapackages like "required" and "all"
|
|
|
|
- process wildcards in tool names
|
|
|
|
- a tool supports chips from 'targets'
|
|
|
|
"""
|
|
|
|
tools = []
|
|
|
|
# Filtering tools if they are in overall_tools
|
|
|
|
# Processing wildcards if possible
|
|
|
|
for tool_pattern in tools_spec:
|
|
|
|
tools.extend([k for k, _ in overall_tools.items() if fnmatch.fnmatch(k,tool_pattern) and k not in tools])
|
|
|
|
|
|
|
|
# Processing "metapackage"
|
|
|
|
if 'required' in tools_spec:
|
|
|
|
tools.extend([k for k, v in overall_tools.items() if v.get_install_type() == IDFTool.INSTALL_ALWAYS and k not in tools])
|
|
|
|
|
|
|
|
elif 'all' in tools_spec:
|
|
|
|
tools.extend([k for k, v in overall_tools.items() if v.get_install_type() != IDFTool.INSTALL_NEVER and k not in tools])
|
|
|
|
|
|
|
|
# Filtering by ESP_targets
|
|
|
|
tools = [k for k in tools if overall_tools[k].is_supported_for_any_of_targets(targets)]
|
|
|
|
return tools
|
|
|
|
|
|
|
|
|
2024-01-19 08:19:12 -05:00
|
|
|
def parse_targets_arg(targets_str: str) -> List[str]:
|
2023-09-14 01:55:24 -04:00
|
|
|
"""
|
2024-01-19 08:19:12 -05:00
|
|
|
Parse and check if targets_str is a valid list of targets and return a target list.
|
2021-11-08 11:55:03 -05:00
|
|
|
"""
|
2021-04-26 15:34:48 -04:00
|
|
|
targets_from_tools_json = get_all_targets_from_tools_json()
|
|
|
|
invalid_targets = []
|
|
|
|
|
|
|
|
targets_str = targets_str.lower()
|
|
|
|
targets = targets_str.replace('-', '').split(',')
|
2023-09-14 01:55:24 -04:00
|
|
|
if targets == ['all']:
|
|
|
|
return targets_from_tools_json
|
|
|
|
else:
|
2021-04-26 15:34:48 -04:00
|
|
|
invalid_targets = [t for t in targets if t not in targets_from_tools_json]
|
|
|
|
if invalid_targets:
|
2024-02-16 10:39:38 -05:00
|
|
|
warn(f'Targets: \"{", ".join(invalid_targets)}\" are not supported. Only allowed options are: {", ".join(targets_from_tools_json)}.')
|
2021-04-26 15:34:48 -04:00
|
|
|
raise SystemExit(1)
|
2023-09-14 01:55:24 -04:00
|
|
|
return targets
|
|
|
|
|
2021-11-08 11:55:03 -05:00
|
|
|
|
2024-01-19 08:19:12 -05:00
|
|
|
def add_and_check_targets(idf_env_obj: IDFEnv, targets_str: str) -> List[str]:
|
2023-09-14 01:55:24 -04:00
|
|
|
"""
|
2024-01-19 08:19:12 -05:00
|
|
|
Define targets from targets_str, check that the target names are valid and add them to idf_env_obj.
|
2023-09-14 01:55:24 -04:00
|
|
|
"""
|
|
|
|
targets = parse_targets_arg(targets_str)
|
|
|
|
idf_env_obj.get_active_idf_record().extend_targets(targets)
|
2022-03-19 16:37:16 -04:00
|
|
|
return idf_env_obj.get_active_idf_record().targets
|
2021-04-26 15:34:48 -04:00
|
|
|
|
|
|
|
|
2024-01-19 08:19:12 -05:00
|
|
|
def feature_to_requirements_path(feature: str) -> str:
|
|
|
|
"""
|
|
|
|
Convert feature (ci, core, docs, gdbgui, pytest, ...) to the path to its requirements.txt.
|
|
|
|
"""
|
2024-02-16 10:39:38 -05:00
|
|
|
return os.path.join(g.idf_path, 'tools', 'requirements', f'requirements.{feature}.txt')
|
2021-09-16 10:48:03 -04:00
|
|
|
|
|
|
|
|
2024-01-19 08:19:12 -05:00
|
|
|
def process_and_check_features(idf_env_obj: IDFEnv, features_str: str) -> List[str]:
|
|
|
|
"""
|
|
|
|
Check whether new feature is valid. If yes, update features in active IDF record.
|
|
|
|
"""
|
2022-03-19 16:37:16 -04:00
|
|
|
new_features = []
|
2022-05-20 09:15:52 -04:00
|
|
|
remove_features = []
|
2021-09-16 10:48:03 -04:00
|
|
|
for new_feature_candidate in features_str.split(','):
|
2022-05-20 09:15:52 -04:00
|
|
|
if new_feature_candidate.startswith('-'):
|
|
|
|
remove_features += [new_feature_candidate.lstrip('-')]
|
|
|
|
else:
|
|
|
|
new_feature_candidate = new_feature_candidate.lstrip('+')
|
|
|
|
# Feature to be added needs to be checked if is valid
|
|
|
|
if os.path.isfile(feature_to_requirements_path(new_feature_candidate)):
|
|
|
|
new_features += [new_feature_candidate]
|
|
|
|
idf_env_obj.get_active_idf_record().update_features(tuple(new_features), tuple(remove_features))
|
2022-03-19 16:37:16 -04:00
|
|
|
return idf_env_obj.get_active_idf_record().features
|
2021-04-26 15:34:48 -04:00
|
|
|
|
|
|
|
|
2024-01-19 08:19:12 -05:00
|
|
|
def get_all_targets_from_tools_json() -> List[str]:
|
|
|
|
"""
|
|
|
|
Returns list of all targets from tools.json file.
|
|
|
|
"""
|
2021-04-26 15:34:48 -04:00
|
|
|
tools_info = load_tools_info()
|
2024-01-19 08:19:12 -05:00
|
|
|
targets_from_tools_json: List[str] = []
|
2021-04-26 15:34:48 -04:00
|
|
|
|
|
|
|
for _, v in tools_info.items():
|
|
|
|
targets_from_tools_json.extend(v.get_supported_targets())
|
|
|
|
# remove duplicates
|
|
|
|
targets_from_tools_json = list(set(targets_from_tools_json))
|
|
|
|
if 'all' in targets_from_tools_json:
|
|
|
|
targets_from_tools_json.remove('all')
|
|
|
|
return sorted(targets_from_tools_json)
|
|
|
|
|
|
|
|
|
2024-01-19 08:19:12 -05:00
|
|
|
def filter_tools_info(idf_env_obj: IDFEnv, tools_info: Dict[str, IDFTool]) -> Dict[str,IDFTool]:
|
|
|
|
"""
|
|
|
|
Filter tools info; return only those targets which:
|
|
|
|
* are installable (install type is INSTALL_ALWAYS or INSTALL_ON_REQUEST)
|
|
|
|
* support at least one target from active IDF record
|
|
|
|
"""
|
2022-03-19 16:37:16 -04:00
|
|
|
targets = idf_env_obj.get_active_idf_record().targets
|
2021-04-26 15:34:48 -04:00
|
|
|
if not targets:
|
|
|
|
return tools_info
|
|
|
|
else:
|
|
|
|
filtered_tools_spec = {k:v for k, v in tools_info.items() if
|
|
|
|
(v.get_install_type() == IDFTool.INSTALL_ALWAYS or v.get_install_type() == IDFTool.INSTALL_ON_REQUEST) and
|
|
|
|
(any(item in targets for item in v.get_supported_targets()) or v.get_supported_targets() == ['all'])}
|
|
|
|
return OrderedDict(filtered_tools_spec)
|
|
|
|
|
|
|
|
|
2024-01-19 08:19:12 -05:00
|
|
|
def add_variables_to_deactivate_file(args: List[str], new_idf_vars:Dict[str, Any]) -> str:
|
2021-11-08 11:55:03 -05:00
|
|
|
"""
|
2022-08-09 12:13:29 -04:00
|
|
|
Add IDF global variables that need to be removed when the active esp-idf environment is deactivated.
|
2021-11-08 11:55:03 -05:00
|
|
|
"""
|
2022-08-09 12:13:29 -04:00
|
|
|
if 'PATH' in new_idf_vars:
|
|
|
|
new_idf_vars['PATH'] = new_idf_vars['PATH'].split(':')[:-1] # PATH is stored as list of sub-paths without '$PATH'
|
2022-03-19 16:37:16 -04:00
|
|
|
|
2022-08-09 12:13:29 -04:00
|
|
|
new_idf_vars['PATH'] = new_idf_vars.get('PATH', [])
|
2022-03-19 16:37:16 -04:00
|
|
|
args_add_paths_extras = vars(args).get('add_paths_extras') # remove mypy error with args
|
2022-08-09 12:13:29 -04:00
|
|
|
new_idf_vars['PATH'] = new_idf_vars['PATH'] + args_add_paths_extras.split(':') if args_add_paths_extras else new_idf_vars['PATH']
|
2022-03-19 16:37:16 -04:00
|
|
|
|
2022-08-09 12:13:29 -04:00
|
|
|
env_state_obj = ENVState.get_env_state()
|
2022-03-19 16:37:16 -04:00
|
|
|
|
2022-08-09 12:13:29 -04:00
|
|
|
if env_state_obj.idf_variables:
|
|
|
|
exported_idf_vars = env_state_obj.idf_variables
|
|
|
|
new_idf_vars['PATH'] = list(set(new_idf_vars['PATH'] + exported_idf_vars.get('PATH', []))) # remove duplicates
|
|
|
|
env_state_obj.idf_variables = dict(exported_idf_vars, **new_idf_vars) # merge two dicts
|
2022-03-19 16:37:16 -04:00
|
|
|
else:
|
2022-08-09 12:13:29 -04:00
|
|
|
env_state_obj.idf_variables = new_idf_vars
|
|
|
|
deactivate_file_path = env_state_obj.save()
|
2021-11-08 11:55:03 -05:00
|
|
|
|
2022-08-09 12:13:29 -04:00
|
|
|
return deactivate_file_path
|
2021-11-08 11:55:03 -05:00
|
|
|
|
|
|
|
|
2024-01-19 08:19:12 -05:00
|
|
|
def deactivate_statement(args: List[str]) -> None:
|
2021-11-08 11:55:03 -05:00
|
|
|
"""
|
2024-01-16 00:56:49 -05:00
|
|
|
Deactivate statement is sequence of commands, that remove IDF global variables from environment,
|
2024-01-19 08:19:12 -05:00
|
|
|
so the environment gets to the state it was before calling export.{sh/fish} script.
|
2021-11-08 11:55:03 -05:00
|
|
|
"""
|
2022-08-09 12:13:29 -04:00
|
|
|
env_state_obj = ENVState.get_env_state()
|
|
|
|
if not env_state_obj.idf_variables:
|
|
|
|
warn('No IDF variables to remove from environment found. Deactivation of previous esp-idf version was not successful.')
|
2021-11-08 11:55:03 -05:00
|
|
|
return
|
2022-08-09 12:13:29 -04:00
|
|
|
unset_vars = env_state_obj.idf_variables
|
2024-01-19 08:19:12 -05:00
|
|
|
env_path: Optional[str] = os.getenv('PATH')
|
2021-11-08 11:55:03 -05:00
|
|
|
if env_path:
|
2022-08-09 12:13:29 -04:00
|
|
|
cleared_env_path = ':'.join([k for k in env_path.split(':') if k not in unset_vars['PATH']])
|
2021-11-08 11:55:03 -05:00
|
|
|
|
2022-08-09 12:13:29 -04:00
|
|
|
unset_list = [k for k in unset_vars.keys() if k != 'PATH']
|
2021-11-08 11:55:03 -05:00
|
|
|
unset_format, sep = get_unset_format_and_separator(args)
|
|
|
|
unset_statement = sep.join([unset_format.format(k) for k in unset_list])
|
|
|
|
|
|
|
|
export_format, sep = get_export_format_and_separator(args)
|
|
|
|
export_statement = export_format.format('PATH', cleared_env_path)
|
|
|
|
|
|
|
|
deactivate_statement_str = sep.join([unset_statement, export_statement])
|
|
|
|
|
|
|
|
print(deactivate_statement_str)
|
2022-08-09 12:13:29 -04:00
|
|
|
# After deactivation clear old variables
|
|
|
|
env_state_obj.idf_variables.clear()
|
|
|
|
env_state_obj.save()
|
2021-11-08 11:55:03 -05:00
|
|
|
return
|
|
|
|
|
|
|
|
|
2024-01-19 08:19:12 -05:00
|
|
|
def get_export_format_and_separator(args: List[str]) -> Tuple[str, str]:
|
|
|
|
"""
|
|
|
|
Returns export pattern (formatted string) either for exporting in shell or as a key-value pair.
|
|
|
|
"""
|
2021-11-08 11:55:03 -05:00
|
|
|
return {EXPORT_SHELL: ('export {}="{}"', ';'), EXPORT_KEY_VALUE: ('{}={}', '\n')}[args.format] # type: ignore
|
|
|
|
|
|
|
|
|
2024-01-19 08:19:12 -05:00
|
|
|
def get_unset_format_and_separator(args: List[str]) -> Tuple[str, str]:
|
|
|
|
"""
|
|
|
|
Returns pattern to unset a variable (formatted string) either for shell or for key-value pair.
|
|
|
|
"""
|
2021-11-08 11:55:03 -05:00
|
|
|
return {EXPORT_SHELL: ('unset {}', ';'), EXPORT_KEY_VALUE: ('{}', '\n')}[args.format] # type: ignore
|
|
|
|
|
|
|
|
|
2022-03-19 16:37:16 -04:00
|
|
|
def different_idf_detected() -> bool:
|
2024-01-19 08:19:12 -05:00
|
|
|
"""
|
|
|
|
Checks if new IDF detected.
|
|
|
|
"""
|
2022-03-19 16:37:16 -04:00
|
|
|
# If IDF global variable found, test if belong to different ESP-IDF version
|
|
|
|
if 'IDF_TOOLS_EXPORT_CMD' in os.environ:
|
2024-02-06 05:16:24 -05:00
|
|
|
if g.idf_path != os.path.dirname(os.environ['IDF_TOOLS_EXPORT_CMD']):
|
2022-03-19 16:37:16 -04:00
|
|
|
return True
|
|
|
|
|
2021-11-08 11:55:03 -05:00
|
|
|
# No previous ESP-IDF export detected, nothing to be unset
|
2022-03-19 16:37:16 -04:00
|
|
|
if all(s not in os.environ for s in ['IDF_PYTHON_ENV_PATH', 'OPENOCD_SCRIPTS', 'ESP_IDF_VERSION']):
|
2021-11-08 11:55:03 -05:00
|
|
|
return False
|
|
|
|
|
|
|
|
# User is exporting the same version as is in env
|
|
|
|
if os.getenv('ESP_IDF_VERSION') == get_idf_version():
|
|
|
|
return False
|
|
|
|
|
|
|
|
# Different version detected
|
|
|
|
return True
|
|
|
|
|
|
|
|
|
|
|
|
def active_repo_id() -> str:
|
2024-01-19 08:19:12 -05:00
|
|
|
"""
|
|
|
|
Function returns unique id of running ESP-IDF combining current idfpath with version.
|
|
|
|
The id is unique with same version & different path or same path & different version.
|
|
|
|
"""
|
2024-02-06 05:16:24 -05:00
|
|
|
try:
|
|
|
|
# g.idf_path is forcefully casted to str just to make type linters happy
|
2024-02-16 10:39:38 -05:00
|
|
|
return f'{str(g.idf_path)}-v{get_idf_version()}'
|
2024-02-06 05:16:24 -05:00
|
|
|
except ReferenceError:
|
2024-02-16 10:39:38 -05:00
|
|
|
return f'UNKNOWN_PATH-v{get_idf_version()}'
|
2021-11-08 11:55:03 -05:00
|
|
|
|
|
|
|
|
2023-02-05 10:29:03 -05:00
|
|
|
def list_default(args): # type: ignore
|
2024-01-19 08:19:12 -05:00
|
|
|
"""
|
|
|
|
Prints currently installed versions of all tools compatible with current platform.
|
|
|
|
"""
|
2019-04-17 03:30:30 -04:00
|
|
|
tools_info = load_tools_info()
|
|
|
|
for name, tool in tools_info.items():
|
|
|
|
if tool.get_install_type() == IDFTool.INSTALL_NEVER:
|
|
|
|
continue
|
|
|
|
optional_str = ' (optional)' if tool.get_install_type() == IDFTool.INSTALL_ON_REQUEST else ''
|
2024-02-16 10:39:38 -05:00
|
|
|
info(f'* {name}: {tool.description}{optional_str}')
|
2023-12-05 22:27:18 -05:00
|
|
|
try:
|
|
|
|
tool.find_installed_versions()
|
|
|
|
except ToolBinaryError:
|
2024-01-16 00:56:49 -05:00
|
|
|
pass
|
2019-04-17 03:30:30 -04:00
|
|
|
versions_for_platform = {k: v for k, v in tool.versions.items() if v.compatible_with_platform()}
|
|
|
|
if not versions_for_platform:
|
2024-02-16 10:39:38 -05:00
|
|
|
info(f' (no versions compatible with platform {PYTHON_PLATFORM})')
|
2019-04-17 03:30:30 -04:00
|
|
|
continue
|
2021-03-04 03:53:47 -05:00
|
|
|
versions_sorted = sorted(versions_for_platform.keys(), key=tool.versions.get, reverse=True) # type: ignore
|
2019-04-17 03:30:30 -04:00
|
|
|
for version in versions_sorted:
|
|
|
|
version_obj = tool.versions[version]
|
|
|
|
info(' - {} ({}{})'.format(version, version_obj.status,
|
|
|
|
', installed' if version in tool.versions_installed else ''))
|
|
|
|
|
|
|
|
|
2023-02-05 10:29:03 -05:00
|
|
|
def list_outdated(args): # type: ignore
|
2024-01-19 08:19:12 -05:00
|
|
|
"""
|
|
|
|
Prints info if currently installed version can be replaced by newer one for every tool.
|
|
|
|
"""
|
2023-02-05 10:29:03 -05:00
|
|
|
tools_info = load_tools_info()
|
|
|
|
for name, tool in tools_info.items():
|
|
|
|
if tool.get_install_type() == IDFTool.INSTALL_NEVER:
|
|
|
|
continue
|
|
|
|
versions_for_platform = {k: v for k, v in tool.versions.items() if v.compatible_with_platform()}
|
|
|
|
if not versions_for_platform:
|
|
|
|
continue
|
|
|
|
version_installed = tool.latest_installed_version()
|
|
|
|
if not version_installed:
|
|
|
|
continue
|
|
|
|
version_available = sorted(versions_for_platform.keys(), key=tool.versions.get, reverse=True)[0]
|
|
|
|
if version_installed < version_available:
|
|
|
|
info(f'{name}: version {version_installed} is outdated by {version_available}')
|
|
|
|
|
|
|
|
|
|
|
|
def action_list(args): # type: ignore
|
2024-01-19 08:19:12 -05:00
|
|
|
"""
|
|
|
|
If args.outdated flag is set, prints if currently installed tools can be replaced by their newer version.
|
|
|
|
If not, prints currently installed tools with their version.
|
|
|
|
"""
|
2023-02-05 10:29:03 -05:00
|
|
|
if args.outdated:
|
|
|
|
list_outdated(args)
|
|
|
|
else:
|
|
|
|
list_default(args)
|
|
|
|
|
|
|
|
|
2021-03-04 03:53:47 -05:00
|
|
|
def action_check(args): # type: ignore
|
2024-01-19 08:19:12 -05:00
|
|
|
"""
|
|
|
|
Checks what tools are installed and if some mandatory tool is missing, exits with return code 1.
|
|
|
|
"""
|
2019-04-17 03:30:30 -04:00
|
|
|
tools_info = load_tools_info()
|
2022-03-19 16:37:16 -04:00
|
|
|
tools_info = filter_tools_info(IDFEnv.get_idf_env(), tools_info)
|
2019-04-17 03:30:30 -04:00
|
|
|
not_found_list = []
|
2023-12-05 22:27:18 -05:00
|
|
|
tool_error = False
|
2019-04-17 03:30:30 -04:00
|
|
|
info('Checking for installed tools...')
|
|
|
|
for name, tool in tools_info.items():
|
|
|
|
if tool.get_install_type() == IDFTool.INSTALL_NEVER:
|
|
|
|
continue
|
|
|
|
tool_found_somewhere = False
|
2024-02-16 10:39:38 -05:00
|
|
|
info(f'Checking tool {name}')
|
2023-12-05 22:27:18 -05:00
|
|
|
try:
|
|
|
|
tool.find_installed_versions()
|
|
|
|
except ToolBinaryError:
|
|
|
|
tool_error = True
|
2019-04-17 03:30:30 -04:00
|
|
|
if tool.version_in_path:
|
2024-02-16 10:39:38 -05:00
|
|
|
info(f' version found in PATH: {tool.version_in_path}')
|
2019-04-17 03:30:30 -04:00
|
|
|
tool_found_somewhere = True
|
|
|
|
else:
|
|
|
|
info(' no version found in PATH')
|
|
|
|
|
|
|
|
for version in tool.versions_installed:
|
2024-02-16 10:39:38 -05:00
|
|
|
info(f' version installed in tools directory: {version}')
|
2019-04-17 03:30:30 -04:00
|
|
|
tool_found_somewhere = True
|
|
|
|
if not tool_found_somewhere and tool.get_install_type() == IDFTool.INSTALL_ALWAYS:
|
|
|
|
not_found_list.append(name)
|
|
|
|
if not_found_list:
|
2024-02-16 10:39:38 -05:00
|
|
|
fatal(f'The following required tools were not found: {" ".join(not_found_list)}')
|
2019-04-17 03:30:30 -04:00
|
|
|
raise SystemExit(1)
|
2023-12-05 22:27:18 -05:00
|
|
|
if tool_error:
|
|
|
|
raise SystemExit(1)
|
2019-04-17 03:30:30 -04:00
|
|
|
|
|
|
|
|
2024-01-19 08:19:12 -05:00
|
|
|
# The following function is used in process_tool which is a part of the action_export.
|
2023-12-02 00:38:51 -05:00
|
|
|
def handle_recommended_version_to_use(
|
2024-01-19 08:19:12 -05:00
|
|
|
tool: IDFTool,
|
|
|
|
tool_name: str,
|
|
|
|
version_to_use: str,
|
|
|
|
prefer_system_hint: str,
|
|
|
|
) -> Tuple[list, dict]:
|
|
|
|
"""
|
|
|
|
If there is unsupported tools version in PATH, prints info about that.
|
|
|
|
"""
|
2023-12-02 00:38:51 -05:00
|
|
|
tool_export_paths = tool.get_export_paths(version_to_use)
|
|
|
|
tool_export_vars = tool.get_export_vars(version_to_use)
|
|
|
|
if tool.version_in_path and tool.version_in_path not in tool.versions:
|
2024-02-16 10:39:38 -05:00
|
|
|
info(f'Not using an unsupported version of tool {tool.name} found in PATH: {tool.version_in_path}.' + prefer_system_hint, f=sys.stderr)
|
2023-12-02 00:38:51 -05:00
|
|
|
return tool_export_paths, tool_export_vars
|
|
|
|
|
|
|
|
|
2024-01-19 08:19:12 -05:00
|
|
|
# The following function is used in process_tool which is a part of the action_export.
|
|
|
|
def handle_supported_or_deprecated_version(tool: IDFTool, tool_name: str) -> None:
|
|
|
|
"""
|
|
|
|
Prints info if supported, but not recommended or deprecated version of the tool is used.
|
|
|
|
"""
|
2023-12-02 00:38:51 -05:00
|
|
|
version_obj: IDFToolVersion = tool.versions[tool.version_in_path] # type: ignore
|
|
|
|
if version_obj.status == IDFToolVersion.STATUS_SUPPORTED:
|
2024-02-16 10:39:38 -05:00
|
|
|
info(f'Using a supported version of tool {tool_name} found in PATH: {tool.version_in_path}.',
|
2023-12-02 00:38:51 -05:00
|
|
|
f=sys.stderr)
|
2024-02-16 10:39:38 -05:00
|
|
|
info(f'However the recommended version is {tool.get_recommended_version()}.',
|
2023-12-02 00:38:51 -05:00
|
|
|
f=sys.stderr)
|
|
|
|
elif version_obj.status == IDFToolVersion.STATUS_DEPRECATED:
|
2024-02-16 10:39:38 -05:00
|
|
|
warn(f'using a deprecated version of tool {tool_name} found in PATH: {tool.version_in_path}')
|
2023-12-02 00:38:51 -05:00
|
|
|
|
|
|
|
|
2024-01-19 08:19:12 -05:00
|
|
|
# The following function is used in process_tool which is a part of the action_export.
|
2023-12-02 00:38:51 -05:00
|
|
|
def handle_missing_versions(
|
2024-01-19 08:19:12 -05:00
|
|
|
tool: IDFTool,
|
|
|
|
tool_name: str,
|
|
|
|
install_cmd: str,
|
|
|
|
prefer_system_hint: str
|
|
|
|
) -> None:
|
|
|
|
"""
|
|
|
|
Prints the info about missing tool to stderr if tool has no supported versions installed.
|
|
|
|
"""
|
2024-02-16 10:39:38 -05:00
|
|
|
fatal(f'tool {tool.name} has no installed versions. Please run \'{install_cmd}\' to install it.')
|
2023-12-02 00:38:51 -05:00
|
|
|
if tool.version_in_path and tool.version_in_path not in tool.versions:
|
2024-02-16 10:39:38 -05:00
|
|
|
info(f'An unsupported version of tool {tool_name} was found in PATH: {tool.version_in_path}. ' +
|
2023-12-02 00:38:51 -05:00
|
|
|
prefer_system_hint, f=sys.stderr)
|
|
|
|
|
|
|
|
|
|
|
|
def process_tool(
|
2024-01-19 08:19:12 -05:00
|
|
|
tool: IDFTool,
|
|
|
|
tool_name: str,
|
|
|
|
args: argparse.Namespace,
|
|
|
|
install_cmd: str,
|
|
|
|
prefer_system_hint: str
|
|
|
|
) -> Tuple[list, dict, bool]:
|
|
|
|
"""
|
|
|
|
Helper function used only in action export.
|
|
|
|
Returns:
|
|
|
|
* Paths that need to be exported.
|
|
|
|
* Dictionary of environment variables that need to be exported for the tool.
|
|
|
|
* Flag if any tool was found.
|
|
|
|
"""
|
2023-12-02 00:38:51 -05:00
|
|
|
tool_found: bool = True
|
|
|
|
tool_export_paths: List[str] = []
|
|
|
|
tool_export_vars: Dict[str, str] = {}
|
|
|
|
|
2023-12-05 22:27:18 -05:00
|
|
|
try:
|
|
|
|
tool.find_installed_versions()
|
2023-12-18 08:01:27 -05:00
|
|
|
except ToolBinaryError:
|
2024-01-16 00:56:49 -05:00
|
|
|
pass
|
2023-12-02 00:38:51 -05:00
|
|
|
recommended_version_to_use = tool.get_preferred_installed_version()
|
|
|
|
|
|
|
|
if not tool.is_executable and recommended_version_to_use:
|
|
|
|
tool_export_vars = tool.get_export_vars(recommended_version_to_use)
|
|
|
|
return tool_export_paths, tool_export_vars, tool_found
|
|
|
|
|
|
|
|
if recommended_version_to_use and not args.prefer_system:
|
|
|
|
tool_export_paths, tool_export_vars = handle_recommended_version_to_use(
|
|
|
|
tool, tool_name, recommended_version_to_use, prefer_system_hint
|
|
|
|
)
|
|
|
|
return tool_export_paths, tool_export_vars, tool_found
|
|
|
|
|
|
|
|
if tool.version_in_path:
|
|
|
|
if tool.version_in_path not in tool.versions:
|
|
|
|
# unsupported version
|
|
|
|
if args.prefer_system: # type: ignore
|
2024-02-16 10:39:38 -05:00
|
|
|
warn(f'using an unsupported version of tool {tool.name} found in PATH: {tool.version_in_path}')
|
2023-12-02 00:38:51 -05:00
|
|
|
return tool_export_paths, tool_export_vars, tool_found
|
|
|
|
else:
|
|
|
|
# unsupported version in path
|
|
|
|
pass
|
|
|
|
else:
|
|
|
|
# supported/deprecated version in PATH, use it
|
|
|
|
handle_supported_or_deprecated_version(tool, tool_name)
|
|
|
|
return tool_export_paths, tool_export_vars, tool_found
|
|
|
|
|
|
|
|
if not tool.versions_installed:
|
|
|
|
if tool.get_install_type() == IDFTool.INSTALL_ALWAYS:
|
|
|
|
handle_missing_versions(tool, tool_name, install_cmd, prefer_system_hint)
|
|
|
|
tool_found = False
|
|
|
|
# If a tool found, but it is optional and does not have versions installed, use whatever is in PATH.
|
|
|
|
return tool_export_paths, tool_export_vars, tool_found
|
|
|
|
|
|
|
|
return tool_export_paths, tool_export_vars, tool_found
|
|
|
|
|
|
|
|
|
2024-02-21 06:46:39 -05:00
|
|
|
def check_python_venv_compatibility(idf_python_env_path: str, idf_version: str) -> None:
|
|
|
|
try:
|
|
|
|
with open(os.path.join(idf_python_env_path, VENV_VER_FILE), 'r') as f:
|
|
|
|
read_idf_version = f.read().strip()
|
|
|
|
if read_idf_version != idf_version:
|
|
|
|
fatal(f'Python environment is set to {idf_python_env_path} which was generated for '
|
|
|
|
f'ESP-IDF {read_idf_version} instead of the current {idf_version}. '
|
|
|
|
'The issue can be solved by (1) removing the directory and re-running the install script, '
|
|
|
|
'or (2) unsetting the IDF_PYTHON_ENV_PATH environment variable, or (3) '
|
|
|
|
're-runing the install script from a clean shell where an ESP-IDF environment is '
|
|
|
|
'not active.')
|
|
|
|
raise SystemExit(1)
|
|
|
|
except OSError as e:
|
|
|
|
# perhaps the environment was generated before the support for VENV_VER_FILE was added
|
2024-06-13 06:29:55 -04:00
|
|
|
warn(f'The following issue occurred while accessing the ESP-IDF version file in the Python environment: {e}. '
|
|
|
|
'(Diagnostic information. It can be ignored.)')
|
2024-02-21 06:46:39 -05:00
|
|
|
|
|
|
|
|
2024-01-19 08:19:12 -05:00
|
|
|
def action_export(args: Any) -> None:
|
|
|
|
"""
|
|
|
|
Exports all necessary environment variables and paths needed for tools used.
|
|
|
|
"""
|
2022-08-09 12:13:29 -04:00
|
|
|
if args.deactivate and different_idf_detected():
|
|
|
|
deactivate_statement(args)
|
2021-11-08 11:55:03 -05:00
|
|
|
return
|
|
|
|
|
2019-04-17 03:30:30 -04:00
|
|
|
tools_info = load_tools_info()
|
2022-08-09 12:13:29 -04:00
|
|
|
tools_info = filter_tools_info(IDFEnv.get_idf_env(), tools_info)
|
2019-04-17 03:30:30 -04:00
|
|
|
all_tools_found = True
|
2024-01-19 08:19:12 -05:00
|
|
|
export_vars: Dict[str, str] = {}
|
2019-04-17 03:30:30 -04:00
|
|
|
paths_to_export = []
|
2022-08-15 10:47:24 -04:00
|
|
|
|
2024-02-16 10:39:38 -05:00
|
|
|
self_restart_cmd = f'{sys.executable} {__file__}{(" --tools-json {args.tools_json}") if args.tools_json else ""}'
|
2022-08-15 10:47:24 -04:00
|
|
|
self_restart_cmd = to_shell_specific_paths([self_restart_cmd])[0]
|
|
|
|
prefer_system_hint = '' if IDF_TOOLS_EXPORT_CMD else f' To use it, run \'{self_restart_cmd} export --prefer-system\''
|
2024-02-16 10:39:38 -05:00
|
|
|
install_cmd = to_shell_specific_paths([IDF_TOOLS_INSTALL_CMD])[0] if IDF_TOOLS_INSTALL_CMD else f'{self_restart_cmd} install'
|
2022-08-15 10:47:24 -04:00
|
|
|
|
2019-04-17 03:30:30 -04:00
|
|
|
for name, tool in tools_info.items():
|
|
|
|
if tool.get_install_type() == IDFTool.INSTALL_NEVER:
|
|
|
|
continue
|
2023-12-02 00:38:51 -05:00
|
|
|
tool_export_paths, tool_export_vars, tool_found = process_tool(tool, name, args, install_cmd, prefer_system_hint)
|
|
|
|
if not tool_found:
|
|
|
|
all_tools_found = False
|
|
|
|
paths_to_export += tool_export_paths
|
2022-08-15 10:47:24 -04:00
|
|
|
export_vars = {**export_vars, **tool_export_vars}
|
2019-04-17 03:30:30 -04:00
|
|
|
|
2023-12-05 22:27:18 -05:00
|
|
|
if not all_tools_found:
|
|
|
|
raise SystemExit(1)
|
|
|
|
|
2019-04-17 03:30:30 -04:00
|
|
|
current_path = os.getenv('PATH')
|
2021-09-16 10:48:03 -04:00
|
|
|
idf_python_env_path, idf_python_export_path, virtualenv_python, _ = get_python_env_path()
|
2019-04-17 03:30:30 -04:00
|
|
|
if os.path.exists(virtualenv_python):
|
|
|
|
idf_python_env_path = to_shell_specific_paths([idf_python_env_path])[0]
|
|
|
|
if os.getenv('IDF_PYTHON_ENV_PATH') != idf_python_env_path:
|
|
|
|
export_vars['IDF_PYTHON_ENV_PATH'] = to_shell_specific_paths([idf_python_env_path])[0]
|
2024-01-19 08:19:12 -05:00
|
|
|
if current_path and idf_python_export_path not in current_path: # getenv can return None
|
2019-04-17 03:30:30 -04:00
|
|
|
paths_to_export.append(idf_python_export_path)
|
|
|
|
|
2022-03-19 16:37:16 -04:00
|
|
|
idf_version = get_idf_version()
|
|
|
|
if os.getenv('ESP_IDF_VERSION') != idf_version:
|
|
|
|
export_vars['ESP_IDF_VERSION'] = idf_version
|
2021-11-08 11:55:03 -05:00
|
|
|
|
2024-02-21 06:46:39 -05:00
|
|
|
check_python_venv_compatibility(idf_python_env_path, idf_version)
|
|
|
|
|
2024-02-06 05:16:24 -05:00
|
|
|
idf_tools_dir = os.path.join(g.idf_path, 'tools') # type: ignore
|
2019-04-17 03:30:30 -04:00
|
|
|
idf_tools_dir = to_shell_specific_paths([idf_tools_dir])[0]
|
2024-01-19 08:19:12 -05:00
|
|
|
if current_path and idf_tools_dir not in current_path:
|
2019-04-17 03:30:30 -04:00
|
|
|
paths_to_export.append(idf_tools_dir)
|
|
|
|
|
2021-11-09 07:10:56 -05:00
|
|
|
if sys.platform == 'win32':
|
2019-04-17 03:30:30 -04:00
|
|
|
old_path = '%PATH%'
|
|
|
|
path_sep = ';'
|
|
|
|
else:
|
|
|
|
old_path = '$PATH'
|
|
|
|
path_sep = ':'
|
|
|
|
|
2021-11-08 11:55:03 -05:00
|
|
|
export_format, export_sep = get_export_format_and_separator(args)
|
2019-04-17 03:30:30 -04:00
|
|
|
|
|
|
|
if paths_to_export:
|
|
|
|
export_vars['PATH'] = path_sep.join(to_shell_specific_paths(paths_to_export) + [old_path])
|
2023-10-23 04:51:26 -04:00
|
|
|
# Correct PATH order check for Windows platform
|
|
|
|
# idf-exe has to be before \tools in PATH
|
|
|
|
if sys.platform == 'win32':
|
2024-02-16 10:39:38 -05:00
|
|
|
paths_to_check = rf'{export_vars["PATH"]}{os.environ["PATH"]}'
|
2023-10-23 04:51:26 -04:00
|
|
|
try:
|
|
|
|
if paths_to_check.index(r'\tools;') < paths_to_check.index(r'\idf-exe'):
|
|
|
|
warn('The PATH is not in correct order (idf-exe should be before esp-idf\\tools)')
|
|
|
|
except ValueError:
|
|
|
|
fatal(f'Both of the directories (..\\idf-exe\\.. and ..\\tools) has to be in the PATH:\n\n{paths_to_check}\n')
|
2019-04-17 03:30:30 -04:00
|
|
|
|
2022-08-09 12:13:29 -04:00
|
|
|
if export_vars:
|
|
|
|
# if not copy of export_vars is given to function, it brekas the formatting string for 'export_statements'
|
|
|
|
deactivate_file_path = add_variables_to_deactivate_file(args, export_vars.copy())
|
|
|
|
export_vars[ENVState.env_key] = deactivate_file_path
|
|
|
|
export_statements = export_sep.join([export_format.format(k, v) for k, v in export_vars.items()])
|
2019-04-17 03:30:30 -04:00
|
|
|
print(export_statements)
|
|
|
|
|
|
|
|
|
2024-01-19 08:19:12 -05:00
|
|
|
def get_idf_download_url_apply_mirrors(args: Any = None, download_url: str = IDF_DL_URL) -> str:
|
|
|
|
"""
|
|
|
|
Returns URL for ESP-IDF download with applied mirrors if available.
|
|
|
|
If original URL pointed to Github and IDF_GITHUB_ASSETS is set, change the source to Espressif's download servers.
|
|
|
|
"""
|
2022-11-09 11:51:07 -05:00
|
|
|
url = apply_mirror_prefix_map(args, download_url)
|
|
|
|
url = apply_github_assets_option(url)
|
|
|
|
return url
|
2020-09-24 19:58:49 -04:00
|
|
|
|
|
|
|
|
2024-01-19 08:19:12 -05:00
|
|
|
def apply_mirror_prefix_map(args: Any, idf_download_url: str) -> str:
|
|
|
|
"""
|
|
|
|
Rewrite URL for given idf_download_url.
|
|
|
|
If --mirror-prefix-map flag or IDF_MIRROR_PREFIX_MAP environment variable is given.
|
2019-04-17 03:30:30 -04:00
|
|
|
"""
|
2022-11-09 11:51:07 -05:00
|
|
|
new_url = idf_download_url
|
2019-04-17 03:30:30 -04:00
|
|
|
mirror_prefix_map = None
|
|
|
|
mirror_prefix_map_env = os.getenv('IDF_MIRROR_PREFIX_MAP')
|
|
|
|
if mirror_prefix_map_env:
|
|
|
|
mirror_prefix_map = mirror_prefix_map_env.split(';')
|
2022-11-09 11:51:07 -05:00
|
|
|
if IDF_MAINTAINER and args and args.mirror_prefix_map:
|
2019-04-17 03:30:30 -04:00
|
|
|
if mirror_prefix_map:
|
2024-02-16 10:39:38 -05:00
|
|
|
warn('Both IDF_MIRROR_PREFIX_MAP environment variable and --mirror-prefix-map flag are specified, '
|
2019-04-17 03:30:30 -04:00
|
|
|
'will use the value from the command line.')
|
|
|
|
mirror_prefix_map = args.mirror_prefix_map
|
2022-11-09 11:51:07 -05:00
|
|
|
if mirror_prefix_map:
|
2019-04-17 03:30:30 -04:00
|
|
|
for item in mirror_prefix_map:
|
|
|
|
if URL_PREFIX_MAP_SEPARATOR not in item:
|
2024-02-16 10:39:38 -05:00
|
|
|
warn(f'invalid mirror-prefix-map item (missing \'{URL_PREFIX_MAP_SEPARATOR}\') {item}')
|
2019-04-17 03:30:30 -04:00
|
|
|
continue
|
|
|
|
search, replace = item.split(URL_PREFIX_MAP_SEPARATOR, 1)
|
2023-12-08 10:36:05 -05:00
|
|
|
replace = replace.replace('\\', '\\\\') # On windows replace single \ with double \\
|
2022-11-09 11:51:07 -05:00
|
|
|
new_url = re.sub(search, replace, idf_download_url)
|
|
|
|
if new_url != idf_download_url:
|
2024-02-16 10:39:38 -05:00
|
|
|
info(f'Changed download URL: {idf_download_url} => {new_url}')
|
2019-04-17 03:30:30 -04:00
|
|
|
break
|
2022-11-09 11:51:07 -05:00
|
|
|
return new_url
|
2019-04-17 03:30:30 -04:00
|
|
|
|
|
|
|
|
2024-01-19 08:19:12 -05:00
|
|
|
def apply_github_assets_option(idf_download_url: str) -> str:
|
|
|
|
"""
|
|
|
|
Rewrite URL for given idf_download_url if the download URL is an https://github.com/ URL and the variable
|
2020-09-24 19:58:49 -04:00
|
|
|
IDF_GITHUB_ASSETS is set. The github.com part of the URL will be replaced.
|
|
|
|
"""
|
2022-11-09 11:51:07 -05:00
|
|
|
new_url = idf_download_url
|
|
|
|
github_assets = os.environ.get('IDF_GITHUB_ASSETS', '').strip()
|
|
|
|
if not github_assets:
|
|
|
|
# no IDF_GITHUB_ASSETS or variable exists but is empty
|
|
|
|
return new_url
|
2020-09-24 19:58:49 -04:00
|
|
|
|
|
|
|
# check no URL qualifier in the mirror URL
|
|
|
|
if '://' in github_assets:
|
|
|
|
fatal("IDF_GITHUB_ASSETS shouldn't include any URL qualifier, https:// is assumed")
|
|
|
|
raise SystemExit(1)
|
|
|
|
|
|
|
|
# Strip any trailing / from the mirror URL
|
|
|
|
github_assets = github_assets.rstrip('/')
|
|
|
|
|
2024-02-16 10:39:38 -05:00
|
|
|
new_url = re.sub(r'^https://github.com/', f'https://{github_assets}/', idf_download_url)
|
2022-11-09 11:51:07 -05:00
|
|
|
if new_url != idf_download_url:
|
2024-02-16 10:39:38 -05:00
|
|
|
info(f'Using GitHub assets mirror for URL: {idf_download_url} => {new_url}')
|
2022-11-09 11:51:07 -05:00
|
|
|
return new_url
|
2020-09-24 19:58:49 -04:00
|
|
|
|
|
|
|
|
2024-01-19 08:19:12 -05:00
|
|
|
def get_tools_spec_and_platform_info(selected_platform: str, targets: List[str], tools_spec: List[str],
|
|
|
|
quiet: bool = False) -> Tuple[List[str], Dict[str, IDFTool]]:
|
|
|
|
"""
|
|
|
|
Returns tools_spec list and dict of tools for selected platform in form tool_name : IDFTool object.
|
|
|
|
NOTE: If this function is not called from action_download, but is used just for detecting active tools, info about downloading is unwanted.
|
|
|
|
"""
|
2021-12-13 10:45:11 -05:00
|
|
|
global global_quiet
|
|
|
|
try:
|
2024-02-06 05:16:24 -05:00
|
|
|
old_global_quiet = g.quiet
|
|
|
|
g.quiet = quiet
|
2021-12-13 10:45:11 -05:00
|
|
|
tools_info = load_tools_info()
|
|
|
|
tools_info_for_platform = OrderedDict()
|
|
|
|
for name, tool_obj in tools_info.items():
|
|
|
|
tool_for_platform = tool_obj.copy_for_platform(selected_platform)
|
|
|
|
tools_info_for_platform[name] = tool_for_platform
|
|
|
|
|
2023-09-14 01:55:24 -04:00
|
|
|
tools_spec = expand_tools_arg(tools_spec, tools_info_for_platform, targets)
|
2024-02-16 10:39:38 -05:00
|
|
|
info(f'Downloading tools for {selected_platform}: {", ".join(tools_spec)}')
|
2021-12-13 10:45:11 -05:00
|
|
|
finally:
|
2024-02-06 05:16:24 -05:00
|
|
|
g.quiet = old_global_quiet
|
2021-12-13 10:45:11 -05:00
|
|
|
|
|
|
|
return tools_spec, tools_info_for_platform
|
|
|
|
|
|
|
|
|
2021-03-04 03:53:47 -05:00
|
|
|
def action_download(args): # type: ignore
|
2024-01-19 08:19:12 -05:00
|
|
|
"""
|
|
|
|
Saves current IDF environment and for every tools in tools_spec, downloads the right archive for tools version and target platform, if possible.
|
2024-01-16 00:56:49 -05:00
|
|
|
If not, prints appropriate message to stderr and raise SystemExit() exception.
|
2024-01-19 08:19:12 -05:00
|
|
|
"""
|
2023-09-14 01:55:24 -04:00
|
|
|
tools_spec = parse_tools_arg(args.tools)
|
|
|
|
|
2024-01-19 08:19:12 -05:00
|
|
|
targets: List[str] = []
|
2023-09-14 01:55:24 -04:00
|
|
|
# Saving IDFEnv::targets for selected ESP_targets if all tools have been specified
|
|
|
|
if 'required' in tools_spec or 'all' in tools_spec:
|
2022-03-19 16:37:16 -04:00
|
|
|
idf_env_obj = IDFEnv.get_idf_env()
|
|
|
|
targets = add_and_check_targets(idf_env_obj, args.targets)
|
2022-08-09 12:13:29 -04:00
|
|
|
try:
|
|
|
|
idf_env_obj.save()
|
|
|
|
except OSError as err:
|
|
|
|
if args.targets in targets:
|
|
|
|
targets.remove(args.targets)
|
2024-02-16 10:39:38 -05:00
|
|
|
warn(f'Downloading tools for targets was not successful with error: {err}')
|
2023-09-14 01:55:24 -04:00
|
|
|
# Taking into account ESP_targets but not saving them for individual tools (specified list of tools)
|
|
|
|
else:
|
|
|
|
targets = parse_targets_arg(args.targets)
|
2019-07-18 11:20:40 -04:00
|
|
|
|
2023-09-14 01:55:24 -04:00
|
|
|
platform = parse_platform_arg(args.platform)
|
2019-07-18 11:20:40 -04:00
|
|
|
|
2023-09-14 01:55:24 -04:00
|
|
|
tools_spec, tools_info_for_platform = get_tools_spec_and_platform_info(platform, targets, tools_spec)
|
2019-07-18 11:20:40 -04:00
|
|
|
for tool_spec in tools_spec:
|
|
|
|
if '@' not in tool_spec:
|
|
|
|
tool_name = tool_spec
|
|
|
|
tool_version = None
|
|
|
|
else:
|
|
|
|
tool_name, tool_version = tool_spec.split('@', 1)
|
|
|
|
if tool_name not in tools_info_for_platform:
|
2024-02-16 10:39:38 -05:00
|
|
|
fatal(f'unknown tool name: {tool_name}')
|
2019-07-18 11:20:40 -04:00
|
|
|
raise SystemExit(1)
|
|
|
|
tool_obj = tools_info_for_platform[tool_name]
|
|
|
|
if tool_version is not None and tool_version not in tool_obj.versions:
|
2024-02-16 10:39:38 -05:00
|
|
|
fatal(f'unknown version for tool {tool_name}: {tool_version}')
|
2019-07-18 11:20:40 -04:00
|
|
|
raise SystemExit(1)
|
|
|
|
if tool_version is None:
|
|
|
|
tool_version = tool_obj.get_recommended_version()
|
2019-11-24 06:11:33 -05:00
|
|
|
if tool_version is None:
|
2024-02-16 10:39:38 -05:00
|
|
|
fatal(f'tool {tool_name} not found for {platform} platform')
|
2019-11-24 06:11:33 -05:00
|
|
|
raise SystemExit(1)
|
2024-02-16 10:39:38 -05:00
|
|
|
tool_spec = f'{tool_name}@{tool_version}'
|
2019-07-18 11:20:40 -04:00
|
|
|
|
2024-02-16 10:39:38 -05:00
|
|
|
info(f'Downloading {tool_spec}')
|
2023-09-14 01:55:24 -04:00
|
|
|
_idf_tool_obj = tool_obj.versions[tool_version].get_download_for_platform(platform)
|
2022-11-09 11:51:07 -05:00
|
|
|
_idf_tool_obj.url = get_idf_download_url_apply_mirrors(args, _idf_tool_obj.url)
|
2019-07-18 11:20:40 -04:00
|
|
|
|
|
|
|
tool_obj.download(tool_version)
|
|
|
|
|
|
|
|
|
2021-03-04 03:53:47 -05:00
|
|
|
def action_install(args): # type: ignore
|
2024-01-19 08:19:12 -05:00
|
|
|
"""
|
|
|
|
Saves current IDF environment and for every tool in tools_spec, installs the tool from the archive downloaded before, if possible.
|
|
|
|
If not, raises SystemExit.
|
|
|
|
"""
|
2023-09-14 01:55:24 -04:00
|
|
|
tools_spec = parse_tools_arg(args.tools)
|
|
|
|
|
2024-01-19 08:19:12 -05:00
|
|
|
targets: List[str] = []
|
2023-09-14 01:55:24 -04:00
|
|
|
# Saving IDFEnv::targets for selected ESP_targets if all tools have been specified
|
2022-03-19 16:37:16 -04:00
|
|
|
if 'required' in tools_spec or 'all' in tools_spec:
|
|
|
|
idf_env_obj = IDFEnv.get_idf_env()
|
|
|
|
targets = add_and_check_targets(idf_env_obj, args.targets)
|
2022-08-09 12:13:29 -04:00
|
|
|
try:
|
|
|
|
idf_env_obj.save()
|
|
|
|
except OSError as err:
|
|
|
|
if args.targets in targets:
|
|
|
|
targets.remove(args.targets)
|
2024-02-16 10:39:38 -05:00
|
|
|
warn(f'Installing targets was not successful with error: {err}')
|
|
|
|
info(f'Selected targets are: {", ".join(targets)}')
|
2023-09-14 01:55:24 -04:00
|
|
|
# Taking into account ESP_targets but not saving them for individual tools (specified list of tools)
|
|
|
|
else:
|
|
|
|
targets = parse_targets_arg(args.targets)
|
2021-04-26 15:34:48 -04:00
|
|
|
|
2024-02-16 10:39:38 -05:00
|
|
|
info(f'Current system platform: {CURRENT_PLATFORM}')
|
2023-09-14 01:55:24 -04:00
|
|
|
tools_info = load_tools_info()
|
|
|
|
tools_spec = expand_tools_arg(tools_spec, tools_info, targets)
|
2024-02-16 10:39:38 -05:00
|
|
|
info(f'Installing tools: {", ".join(tools_spec)}')
|
2019-04-17 03:30:30 -04:00
|
|
|
for tool_spec in tools_spec:
|
|
|
|
if '@' not in tool_spec:
|
|
|
|
tool_name = tool_spec
|
|
|
|
tool_version = None
|
|
|
|
else:
|
|
|
|
tool_name, tool_version = tool_spec.split('@', 1)
|
|
|
|
if tool_name not in tools_info:
|
2024-02-16 10:39:38 -05:00
|
|
|
fatal(f'unknown tool name: {tool_name}')
|
2019-04-17 03:30:30 -04:00
|
|
|
raise SystemExit(1)
|
|
|
|
tool_obj = tools_info[tool_name]
|
2019-07-23 00:55:30 -04:00
|
|
|
if not tool_obj.compatible_with_platform():
|
2024-02-16 10:39:38 -05:00
|
|
|
fatal(f'tool {tool_name} does not have versions compatible with platform {CURRENT_PLATFORM}')
|
2019-07-23 00:55:30 -04:00
|
|
|
raise SystemExit(1)
|
2019-04-17 03:30:30 -04:00
|
|
|
if tool_version is not None and tool_version not in tool_obj.versions:
|
2024-02-16 10:39:38 -05:00
|
|
|
fatal(f'unknown version for tool {tool_name}: {tool_version}')
|
2019-04-17 03:30:30 -04:00
|
|
|
raise SystemExit(1)
|
|
|
|
if tool_version is None:
|
|
|
|
tool_version = tool_obj.get_recommended_version()
|
|
|
|
assert tool_version is not None
|
2023-12-05 22:27:18 -05:00
|
|
|
try:
|
|
|
|
tool_obj.find_installed_versions()
|
|
|
|
except ToolBinaryError:
|
2024-01-16 00:56:49 -05:00
|
|
|
pass
|
2024-02-16 10:39:38 -05:00
|
|
|
tool_spec = f'{tool_name}@{tool_version}'
|
2019-04-17 03:30:30 -04:00
|
|
|
if tool_version in tool_obj.versions_installed:
|
2024-02-16 10:39:38 -05:00
|
|
|
info(f'Skipping {tool_spec} (already installed)')
|
2019-04-17 03:30:30 -04:00
|
|
|
continue
|
|
|
|
|
2024-02-16 10:39:38 -05:00
|
|
|
info(f'Installing {tool_spec}')
|
2022-11-09 11:51:07 -05:00
|
|
|
_idf_tool_obj = tool_obj.versions[tool_version].get_download_for_platform(PYTHON_PLATFORM)
|
|
|
|
_idf_tool_obj.url = get_idf_download_url_apply_mirrors(args, _idf_tool_obj.url)
|
2019-04-17 03:30:30 -04:00
|
|
|
|
|
|
|
tool_obj.download(tool_version)
|
|
|
|
tool_obj.install(tool_version)
|
|
|
|
|
|
|
|
|
2024-01-19 08:19:12 -05:00
|
|
|
def get_wheels_dir() -> Optional[str]:
|
|
|
|
"""
|
|
|
|
Gets path for idf-python-wheels package.
|
|
|
|
"""
|
2020-12-16 11:40:13 -05:00
|
|
|
tools_info = load_tools_info()
|
|
|
|
wheels_package_name = 'idf-python-wheels'
|
|
|
|
if wheels_package_name not in tools_info:
|
|
|
|
return None
|
|
|
|
wheels_package = tools_info[wheels_package_name]
|
|
|
|
recommended_version = wheels_package.get_recommended_version()
|
2021-01-18 11:26:20 -05:00
|
|
|
if recommended_version is None:
|
|
|
|
return None
|
2020-12-16 11:40:13 -05:00
|
|
|
wheels_dir = wheels_package.get_path_for_version(recommended_version)
|
|
|
|
if not os.path.exists(wheels_dir):
|
|
|
|
return None
|
|
|
|
return wheels_dir
|
|
|
|
|
|
|
|
|
2024-01-19 08:19:12 -05:00
|
|
|
def get_requirements(new_features: str) -> List[str]:
|
|
|
|
"""
|
|
|
|
Returns list of path for requirements.txt for given feature list.
|
|
|
|
"""
|
2022-03-19 16:37:16 -04:00
|
|
|
idf_env_obj = IDFEnv.get_idf_env()
|
2022-05-20 09:15:52 -04:00
|
|
|
features = process_and_check_features(idf_env_obj, new_features)
|
2022-08-09 12:13:29 -04:00
|
|
|
try:
|
|
|
|
idf_env_obj.save()
|
|
|
|
except OSError as err:
|
|
|
|
if new_features in features:
|
|
|
|
features.remove(new_features)
|
2024-02-16 10:39:38 -05:00
|
|
|
warn(f'Updating features was not successful with error: {err}')
|
2021-09-16 10:48:03 -04:00
|
|
|
return [feature_to_requirements_path(feature) for feature in features]
|
|
|
|
|
|
|
|
|
2024-01-19 08:19:12 -05:00
|
|
|
def get_constraints(idf_version: str, online: bool = True) -> str:
|
|
|
|
"""
|
|
|
|
Download constraints file for specified IDF vversion if it was not downloaded recently (1 day),
|
|
|
|
check success and place it in constraints file location.
|
|
|
|
"""
|
2022-11-09 11:51:07 -05:00
|
|
|
idf_download_url = get_idf_download_url_apply_mirrors()
|
2024-02-16 10:39:38 -05:00
|
|
|
constraint_file = f'espidf.constraints.v{idf_version}.txt'
|
2024-02-06 05:16:24 -05:00
|
|
|
constraint_path = os.path.join(g.idf_tools_path, constraint_file)
|
2022-11-09 11:51:07 -05:00
|
|
|
constraint_url = '/'.join([idf_download_url, constraint_file])
|
2024-02-16 10:39:38 -05:00
|
|
|
temp_path = f'{constraint_path}.tmp'
|
2021-09-16 10:48:03 -04:00
|
|
|
|
2022-10-03 04:59:31 -04:00
|
|
|
if not online:
|
|
|
|
if os.path.isfile(constraint_path):
|
|
|
|
return constraint_path
|
|
|
|
else:
|
|
|
|
fatal(f'{constraint_path} doesn\'t exist. Perhaps you\'ve forgotten to run the install scripts. '
|
|
|
|
f'Please check the installation guide for more information.')
|
|
|
|
raise SystemExit(1)
|
|
|
|
|
2021-09-16 10:48:03 -04:00
|
|
|
mkdir_p(os.path.dirname(temp_path))
|
|
|
|
|
2022-01-18 08:49:53 -05:00
|
|
|
try:
|
|
|
|
age = datetime.date.today() - datetime.date.fromtimestamp(os.path.getmtime(constraint_path))
|
|
|
|
if age < datetime.timedelta(days=1):
|
2022-07-01 08:44:12 -04:00
|
|
|
info(f'Skipping the download of {constraint_path} because it was downloaded recently.')
|
2022-01-18 08:49:53 -05:00
|
|
|
return constraint_path
|
|
|
|
except OSError:
|
|
|
|
# doesn't exist or inaccessible
|
|
|
|
pass
|
|
|
|
|
2021-09-16 10:48:03 -04:00
|
|
|
for _ in range(DOWNLOAD_RETRY_COUNT):
|
2022-09-22 10:14:17 -04:00
|
|
|
err = download(constraint_url, temp_path)
|
2021-09-16 10:48:03 -04:00
|
|
|
if not os.path.isfile(temp_path):
|
2024-02-16 10:39:38 -05:00
|
|
|
warn(f'Download failure: {err}')
|
|
|
|
warn(f'Failed to download {constraint_url} to {temp_path}')
|
2021-09-16 10:48:03 -04:00
|
|
|
continue
|
|
|
|
if os.path.isfile(constraint_path):
|
|
|
|
# Windows cannot rename to existing file. It needs to be deleted.
|
|
|
|
os.remove(constraint_path)
|
|
|
|
rename_with_retry(temp_path, constraint_path)
|
|
|
|
return constraint_path
|
|
|
|
|
|
|
|
if os.path.isfile(constraint_path):
|
|
|
|
warn('Failed to download, retry count has expired, using a previously downloaded version')
|
|
|
|
return constraint_path
|
|
|
|
else:
|
|
|
|
fatal('Failed to download, and retry count has expired')
|
2022-09-22 10:14:17 -04:00
|
|
|
print_hints_on_download_error(str(err))
|
2022-07-01 08:44:12 -04:00
|
|
|
info('See the help on how to disable constraints in order to work around this issue.')
|
2022-09-22 10:14:17 -04:00
|
|
|
raise SystemExit(1)
|
2021-09-16 10:48:03 -04:00
|
|
|
|
|
|
|
|
2024-01-19 08:19:12 -05:00
|
|
|
def install_legacy_python_virtualenv(path: str) -> None:
|
|
|
|
"""
|
|
|
|
Checks if pip is installed (and installs it if not), checks whether virtualenv is already installed (and in which version),
|
|
|
|
and finally creates virtual environment with python -m virtualenv <virtualenv_options>.
|
|
|
|
"""
|
2022-05-25 09:06:13 -04:00
|
|
|
# Before creating the virtual environment, check if pip is installed.
|
|
|
|
try:
|
|
|
|
subprocess.check_call([sys.executable, '-m', 'pip', '--version'])
|
|
|
|
except subprocess.CalledProcessError:
|
2024-02-16 10:39:38 -05:00
|
|
|
fatal(f'Python interpreter at {sys.executable} doesn\'t have pip installed. '
|
|
|
|
'Please check the Getting Started Guides for the steps to install prerequisites for your OS.')
|
2022-05-25 09:06:13 -04:00
|
|
|
raise SystemExit(1)
|
|
|
|
|
|
|
|
virtualenv_installed_via_pip = False
|
|
|
|
try:
|
|
|
|
import virtualenv # noqa: F401
|
|
|
|
except ImportError:
|
|
|
|
info('Installing virtualenv')
|
|
|
|
subprocess.check_call([sys.executable, '-m', 'pip', 'install', '--user', 'virtualenv'],
|
|
|
|
stdout=sys.stdout, stderr=sys.stderr)
|
|
|
|
virtualenv_installed_via_pip = True
|
|
|
|
# since we just installed virtualenv via pip, we know that version is recent enough
|
|
|
|
# so the version check below is not necessary.
|
|
|
|
|
|
|
|
with_seeder_option = True
|
|
|
|
if not virtualenv_installed_via_pip:
|
|
|
|
# virtualenv is already present in the system and may have been installed via OS package manager
|
|
|
|
# check the version to determine if we should add --seeder option
|
|
|
|
try:
|
|
|
|
major_ver = int(virtualenv.__version__.split('.')[0])
|
|
|
|
if major_ver < 20:
|
2024-02-16 10:39:38 -05:00
|
|
|
warn(f'Virtualenv version {virtualenv.__version__} is old, please consider upgrading it')
|
2022-05-25 09:06:13 -04:00
|
|
|
with_seeder_option = False
|
|
|
|
except (ValueError, NameError, AttributeError, IndexError):
|
|
|
|
pass
|
|
|
|
|
|
|
|
info(f'Creating a new Python environment using virtualenv in {path}')
|
|
|
|
virtualenv_options = ['--python', sys.executable]
|
|
|
|
if with_seeder_option:
|
|
|
|
virtualenv_options += ['--seeder', 'pip']
|
|
|
|
|
|
|
|
subprocess.check_call([sys.executable, '-m', 'virtualenv',
|
|
|
|
*virtualenv_options,
|
|
|
|
path],
|
|
|
|
stdout=sys.stdout, stderr=sys.stderr)
|
|
|
|
|
|
|
|
|
2021-03-04 03:53:47 -05:00
|
|
|
def action_install_python_env(args): # type: ignore
|
2024-01-19 08:19:12 -05:00
|
|
|
"""
|
|
|
|
(Re)installs python virtual environment.
|
|
|
|
If Python virtualenv is already installed, checks for errors (missing/corrupted python interpreter, pip...)
|
|
|
|
and reinstalls if needed. Removes current virtualenv before reinstalling.
|
|
|
|
"""
|
2021-09-16 10:48:03 -04:00
|
|
|
use_constraints = not args.no_constraints
|
2021-03-04 03:52:34 -05:00
|
|
|
reinstall = args.reinstall
|
2021-09-16 10:48:03 -04:00
|
|
|
idf_python_env_path, _, virtualenv_python, idf_version = get_python_env_path()
|
2019-04-17 03:30:30 -04:00
|
|
|
|
2020-02-26 08:02:12 -05:00
|
|
|
is_virtualenv = hasattr(sys, 'real_prefix') or (hasattr(sys, 'base_prefix') and sys.base_prefix != sys.prefix)
|
2021-03-04 03:52:34 -05:00
|
|
|
if is_virtualenv and (not os.path.exists(idf_python_env_path) or reinstall):
|
2020-02-26 08:02:12 -05:00
|
|
|
fatal('This script was called from a virtual environment, can not create a virtual environment again')
|
|
|
|
raise SystemExit(1)
|
|
|
|
|
2021-03-04 03:52:34 -05:00
|
|
|
if os.path.exists(virtualenv_python):
|
|
|
|
try:
|
|
|
|
subprocess.check_call([virtualenv_python, '--version'], stdout=sys.stdout, stderr=sys.stderr)
|
2021-06-23 05:46:00 -04:00
|
|
|
except (OSError, subprocess.CalledProcessError):
|
2021-03-04 03:52:34 -05:00
|
|
|
# At this point we can reinstall the virtual environment if it is non-functional. This can happen at least
|
2024-01-19 08:19:12 -05:00
|
|
|
# when the Python interpreter which was used to create the virtual environment was removed.
|
2021-03-04 03:52:34 -05:00
|
|
|
reinstall = True
|
|
|
|
|
|
|
|
try:
|
|
|
|
subprocess.check_call([virtualenv_python, '-m', 'pip', '--version'], stdout=sys.stdout, stderr=sys.stderr)
|
|
|
|
except subprocess.CalledProcessError:
|
2022-01-04 14:01:35 -05:00
|
|
|
warn('pip is not available in the existing virtual environment, new virtual environment will be created.')
|
|
|
|
# Reinstallation of the virtual environment could help if pip was installed for the main Python
|
2021-03-04 03:52:34 -05:00
|
|
|
reinstall = True
|
|
|
|
|
2023-06-28 04:44:27 -04:00
|
|
|
if sys.platform != 'win32':
|
|
|
|
try:
|
|
|
|
subprocess.check_call([virtualenv_python, '-c', 'import curses'], stdout=sys.stdout, stderr=sys.stderr)
|
|
|
|
except subprocess.CalledProcessError:
|
|
|
|
warn('curses can not be imported, new virtual environment will be created.')
|
|
|
|
reinstall = True
|
|
|
|
|
2021-03-04 03:52:34 -05:00
|
|
|
if reinstall and os.path.exists(idf_python_env_path):
|
2024-02-16 10:39:38 -05:00
|
|
|
warn(f'Removing the existing Python environment in {idf_python_env_path}')
|
2019-04-17 03:30:30 -04:00
|
|
|
shutil.rmtree(idf_python_env_path)
|
|
|
|
|
2024-02-21 06:46:39 -05:00
|
|
|
if os.path.exists(virtualenv_python):
|
|
|
|
check_python_venv_compatibility(idf_python_env_path, idf_version)
|
|
|
|
else:
|
2022-10-10 06:20:46 -04:00
|
|
|
if subprocess.run([sys.executable, '-m', 'venv', '-h'], check=False, stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL).returncode == 0:
|
2022-05-25 09:06:13 -04:00
|
|
|
# venv available
|
|
|
|
virtualenv_options = ['--clear'] # delete environment if already exists
|
|
|
|
|
2024-02-16 10:39:38 -05:00
|
|
|
info(f'Creating a new Python environment in {idf_python_env_path}')
|
2024-02-21 06:46:39 -05:00
|
|
|
|
|
|
|
try:
|
|
|
|
environ_idf_python_env_path = os.environ['IDF_PYTHON_ENV_PATH']
|
|
|
|
correct_env_path = environ_idf_python_env_path.endswith(PYTHON_VENV_DIR_TEMPLATE.format(idf_version,
|
|
|
|
PYTHON_VER_MAJOR_MINOR))
|
|
|
|
if not correct_env_path and re.search(PYTHON_VENV_DIR_TEMPLATE.format(r'\d+\.\d+', r'\d+\.\d+'),
|
|
|
|
environ_idf_python_env_path):
|
|
|
|
warn(f'IDF_PYTHON_ENV_PATH is set to {environ_idf_python_env_path} but it does not match '
|
|
|
|
f'the detected {idf_version} ESP-IDF version and/or the used {PYTHON_VER_MAJOR_MINOR} '
|
|
|
|
'version of Python. If you have not set IDF_PYTHON_ENV_PATH intentionally then it is '
|
|
|
|
'recommended to re-run this script from a clean shell where an ESP-IDF environment is '
|
|
|
|
'not active.')
|
|
|
|
|
|
|
|
except KeyError:
|
|
|
|
# if IDF_PYTHON_ENV_PATH not defined then the above checks can be skipped
|
|
|
|
pass
|
|
|
|
|
2022-05-25 09:06:13 -04:00
|
|
|
subprocess.check_call([sys.executable, '-m', 'venv',
|
|
|
|
*virtualenv_options,
|
|
|
|
idf_python_env_path],
|
2019-04-17 03:30:30 -04:00
|
|
|
stdout=sys.stdout, stderr=sys.stderr)
|
2024-02-21 06:46:39 -05:00
|
|
|
|
|
|
|
try:
|
|
|
|
with open(os.path.join(idf_python_env_path, VENV_VER_FILE), 'w') as f:
|
|
|
|
f.write(idf_version)
|
|
|
|
except OSError as e:
|
2024-06-13 06:29:55 -04:00
|
|
|
warn(f'The following issue occurred while generating the ESP-IDF version file in the Python environment: {e}. '
|
|
|
|
'(Diagnostic information. It can be ignored.)')
|
2024-02-21 06:46:39 -05:00
|
|
|
|
2022-10-10 06:20:46 -04:00
|
|
|
else:
|
2022-05-25 09:06:13 -04:00
|
|
|
# The embeddable Python for Windows doesn't have the built-in venv module
|
|
|
|
install_legacy_python_virtualenv(idf_python_env_path)
|
2019-04-17 03:30:30 -04:00
|
|
|
|
2021-11-29 08:03:21 -05:00
|
|
|
env_copy = os.environ.copy()
|
|
|
|
if env_copy.get('PIP_USER') == 'yes':
|
|
|
|
warn('Found PIP_USER="yes" in the environment. Disabling PIP_USER in this shell to install packages into a virtual environment.')
|
|
|
|
env_copy['PIP_USER'] = 'no'
|
2022-08-10 07:45:32 -04:00
|
|
|
|
2024-07-31 10:15:11 -04:00
|
|
|
constraint_file = get_constraints(idf_version) if use_constraints else None
|
|
|
|
|
|
|
|
info('Upgrading pip and setuptools...')
|
|
|
|
run_args = [virtualenv_python, '-m', 'pip', 'install', '--upgrade', 'pip', 'setuptools']
|
|
|
|
if constraint_file:
|
|
|
|
run_args += ['--constraint', constraint_file]
|
|
|
|
subprocess.check_call(run_args, stdout=sys.stdout, stderr=sys.stderr, env=env_copy)
|
2022-08-10 07:45:32 -04:00
|
|
|
|
2019-04-17 03:30:30 -04:00
|
|
|
run_args = [virtualenv_python, '-m', 'pip', 'install', '--no-warn-script-location']
|
2021-09-16 10:48:03 -04:00
|
|
|
requirements_file_list = get_requirements(args.features)
|
|
|
|
for requirement_file in requirements_file_list:
|
|
|
|
run_args += ['-r', requirement_file]
|
2024-07-31 10:15:11 -04:00
|
|
|
if constraint_file:
|
2021-09-16 10:48:03 -04:00
|
|
|
run_args += ['--upgrade', '--constraint', constraint_file]
|
2019-04-17 03:30:30 -04:00
|
|
|
if args.extra_wheels_dir:
|
|
|
|
run_args += ['--find-links', args.extra_wheels_dir]
|
2020-12-16 11:40:13 -05:00
|
|
|
if args.no_index:
|
|
|
|
run_args += ['--no-index']
|
|
|
|
if args.extra_wheels_url:
|
|
|
|
run_args += ['--extra-index-url', args.extra_wheels_url]
|
|
|
|
|
|
|
|
wheels_dir = get_wheels_dir()
|
|
|
|
if wheels_dir is not None:
|
|
|
|
run_args += ['--find-links', wheels_dir]
|
|
|
|
|
2021-09-16 10:48:03 -04:00
|
|
|
info('Installing Python packages')
|
2024-07-31 10:15:11 -04:00
|
|
|
if constraint_file:
|
2024-02-16 10:39:38 -05:00
|
|
|
info(f' Constraint file: {constraint_file}')
|
2021-09-16 10:48:03 -04:00
|
|
|
info(' Requirement files:')
|
2024-02-16 10:39:38 -05:00
|
|
|
info(os.linesep.join(f' - {path}' for path in requirements_file_list))
|
2021-11-29 08:03:21 -05:00
|
|
|
subprocess.check_call(run_args, stdout=sys.stdout, stderr=sys.stderr, env=env_copy)
|
2019-04-17 03:30:30 -04:00
|
|
|
|
|
|
|
|
2021-09-16 10:48:03 -04:00
|
|
|
def action_check_python_dependencies(args): # type: ignore
|
2024-01-19 08:19:12 -05:00
|
|
|
"""
|
|
|
|
Checks if all the dependencies (from requirements, constraints...) are installed properly.
|
|
|
|
Raises SystemExit if not.
|
|
|
|
"""
|
2021-09-16 10:48:03 -04:00
|
|
|
use_constraints = not args.no_constraints
|
|
|
|
req_paths = get_requirements('') # no new features -> just detect the existing ones
|
|
|
|
|
|
|
|
_, _, virtualenv_python, idf_version = get_python_env_path()
|
|
|
|
|
|
|
|
if not os.path.isfile(virtualenv_python):
|
2024-02-16 10:39:38 -05:00
|
|
|
fatal(f'{virtualenv_python} doesn\'t exist! Please run the install script or "idf_tools.py install-python-env" in order to create it')
|
2021-09-16 10:48:03 -04:00
|
|
|
raise SystemExit(1)
|
|
|
|
|
|
|
|
if use_constraints:
|
2022-10-03 04:59:31 -04:00
|
|
|
constr_path = get_constraints(idf_version, online=False) # keep offline for checking
|
2024-02-16 10:39:38 -05:00
|
|
|
info(f'Constraint file: {constr_path}')
|
2021-09-16 10:48:03 -04:00
|
|
|
|
|
|
|
info('Requirement files:')
|
2024-02-16 10:39:38 -05:00
|
|
|
info(os.linesep.join(f' - {path}' for path in req_paths))
|
2021-09-16 10:48:03 -04:00
|
|
|
|
2024-02-16 10:39:38 -05:00
|
|
|
info(f'Python being checked: {virtualenv_python}')
|
2021-09-16 10:48:03 -04:00
|
|
|
|
|
|
|
# The dependency checker will be invoked with virtualenv_python. idf_tools.py could have been invoked with a
|
|
|
|
# different one, therefore, importing is not a suitable option.
|
|
|
|
dep_check_cmd = [virtualenv_python,
|
2024-02-06 05:16:24 -05:00
|
|
|
os.path.join(g.idf_path,
|
2021-09-16 10:48:03 -04:00
|
|
|
'tools',
|
|
|
|
'check_python_dependencies.py')]
|
|
|
|
|
|
|
|
if use_constraints:
|
|
|
|
dep_check_cmd += ['-c', constr_path]
|
|
|
|
|
|
|
|
for req_path in req_paths:
|
|
|
|
dep_check_cmd += ['-r', req_path]
|
|
|
|
|
|
|
|
try:
|
|
|
|
ret = subprocess.run(dep_check_cmd)
|
|
|
|
if ret and ret.returncode:
|
|
|
|
# returncode is a negative number and system exit output is usually expected be positive.
|
|
|
|
raise SystemExit(-ret.returncode)
|
|
|
|
except FileNotFoundError:
|
|
|
|
# Python environment not yet created
|
|
|
|
fatal('Requirements are not satisfied!')
|
|
|
|
raise SystemExit(1)
|
|
|
|
|
|
|
|
|
2022-03-15 15:02:37 -04:00
|
|
|
class ChecksumCalculator():
|
|
|
|
"""
|
|
|
|
A class used to get size/checksum/basename of local artifact files.
|
|
|
|
"""
|
2024-01-19 08:19:12 -05:00
|
|
|
def __init__(self, files: List[str]) -> None:
|
2022-03-15 15:02:37 -04:00
|
|
|
self.files = files
|
|
|
|
|
2024-01-19 08:19:12 -05:00
|
|
|
def __iter__(self) -> Iterator[Tuple[int, str, str]]:
|
2022-03-15 15:02:37 -04:00
|
|
|
for f in self.files:
|
|
|
|
yield (*get_file_size_sha256(f), os.path.basename(f))
|
|
|
|
|
|
|
|
|
|
|
|
class ChecksumParsingError(RuntimeError):
|
2024-01-19 08:19:12 -05:00
|
|
|
"""
|
|
|
|
Raised when checksum cannot be parsed.
|
|
|
|
"""
|
2022-03-15 15:02:37 -04:00
|
|
|
pass
|
|
|
|
|
|
|
|
|
|
|
|
class ChecksumFileParser():
|
|
|
|
"""
|
|
|
|
A class used to get size/sha256/filename of artifact using checksum-file with format:
|
|
|
|
# <artifact-filename>: <size> bytes
|
|
|
|
<sha256sum-string> *<artifact-filename>
|
|
|
|
... (2 lines for every artifact) ...
|
|
|
|
"""
|
2024-01-19 08:19:12 -05:00
|
|
|
def __init__(self, tool_name: str, url: str) -> None:
|
2022-03-15 15:02:37 -04:00
|
|
|
self.tool_name = tool_name
|
|
|
|
|
2024-02-06 05:16:24 -05:00
|
|
|
sha256_file_tmp = os.path.join(g.idf_tools_path, 'tools', 'add-version.sha256.tmp')
|
2022-03-15 15:02:37 -04:00
|
|
|
sha256_file = os.path.abspath(url)
|
|
|
|
|
|
|
|
# download sha256 file if URL presented
|
|
|
|
if urlparse(url).scheme:
|
|
|
|
sha256_file = sha256_file_tmp
|
|
|
|
download(url, sha256_file)
|
|
|
|
|
|
|
|
with open(sha256_file, 'r') as f:
|
|
|
|
self.checksum = f.read().splitlines()
|
|
|
|
|
|
|
|
# remove temp file
|
|
|
|
if os.path.isfile(sha256_file_tmp):
|
|
|
|
os.remove(sha256_file_tmp)
|
|
|
|
|
2024-01-19 08:19:12 -05:00
|
|
|
def parseLine(self, regex: str, line: str) -> str:
|
2022-03-15 15:02:37 -04:00
|
|
|
match = re.search(regex, line)
|
|
|
|
if not match:
|
|
|
|
raise ChecksumParsingError(f'Can not parse line "{line}" with regex "{regex}"')
|
|
|
|
return match.group(1)
|
|
|
|
|
|
|
|
# parse checksum file with formatting used by crosstool-ng, gdb, ... releases
|
|
|
|
# e.g. https://github.com/espressif/crosstool-NG/releases/download/esp-2021r2/crosstool-NG-esp-2021r2-checksum.sha256
|
2024-01-19 08:19:12 -05:00
|
|
|
def __iter__(self) -> Iterator[Tuple[int, str, str]]:
|
2022-03-15 15:02:37 -04:00
|
|
|
try:
|
|
|
|
for bytes_str, hash_str in zip(self.checksum[0::2], self.checksum[1::2]):
|
|
|
|
bytes_filename = self.parseLine(r'^# (\S*):', bytes_str)
|
2024-01-10 08:18:03 -05:00
|
|
|
hash_filename = self.parseLine(r'^\S* [\* ](\S*)', hash_str)
|
2022-03-15 15:02:37 -04:00
|
|
|
if hash_filename != bytes_filename:
|
|
|
|
fatal('filename in hash-line and in bytes-line are not the same')
|
|
|
|
raise SystemExit(1)
|
|
|
|
# crosstool-ng checksum file contains info about few tools
|
|
|
|
# e.g.: "xtensa-esp32-elf", "xtensa-esp32s2-elf"
|
|
|
|
# filter records for file by tool_name to avoid mismatch
|
|
|
|
if not hash_filename.startswith(self.tool_name):
|
|
|
|
continue
|
|
|
|
size = self.parseLine(r'^# \S*: (\d*) bytes', bytes_str)
|
|
|
|
sha256 = self.parseLine(r'^(\S*) ', hash_str)
|
|
|
|
yield int(size), sha256, hash_filename
|
|
|
|
except (TypeError, AttributeError) as err:
|
|
|
|
fatal(f'Error while parsing, check checksum file ({err})')
|
|
|
|
raise SystemExit(1)
|
|
|
|
|
|
|
|
|
2024-01-19 08:19:12 -05:00
|
|
|
def action_add_version(args: Any) -> None:
|
|
|
|
"""
|
|
|
|
Adds new version of the tool to IDFTool entry together with download entry and updating json dump.
|
|
|
|
"""
|
2019-04-17 03:30:30 -04:00
|
|
|
tools_info = load_tools_info()
|
|
|
|
tool_name = args.tool
|
|
|
|
tool_obj = tools_info.get(tool_name)
|
|
|
|
if not tool_obj:
|
2024-02-16 10:39:38 -05:00
|
|
|
info(f'Creating new tool entry for {tool_name}')
|
2019-04-17 03:30:30 -04:00
|
|
|
tool_obj = IDFTool(tool_name, TODO_MESSAGE, IDFTool.INSTALL_ALWAYS,
|
2023-09-01 11:07:41 -04:00
|
|
|
TODO_MESSAGE, TODO_MESSAGE, [TODO_MESSAGE], TODO_MESSAGE,
|
|
|
|
[TODO_MESSAGE])
|
2019-04-17 03:30:30 -04:00
|
|
|
tools_info[tool_name] = tool_obj
|
|
|
|
version = args.version
|
2022-03-15 15:02:37 -04:00
|
|
|
version_status = IDFToolVersion.STATUS_SUPPORTED
|
|
|
|
if args.override and len(tool_obj.versions):
|
|
|
|
tool_obj.drop_versions()
|
|
|
|
version_status = IDFToolVersion.STATUS_RECOMMENDED
|
2019-04-17 03:30:30 -04:00
|
|
|
version_obj = tool_obj.versions.get(version)
|
2022-03-15 15:02:37 -04:00
|
|
|
if not version_obj:
|
2024-02-16 10:39:38 -05:00
|
|
|
info(f'Creating new version {version}')
|
2022-03-15 15:02:37 -04:00
|
|
|
version_obj = IDFToolVersion(version, version_status)
|
2019-04-17 03:30:30 -04:00
|
|
|
tool_obj.versions[version] = version_obj
|
2024-02-16 10:39:38 -05:00
|
|
|
url_prefix = args.url_prefix or f'https://{TODO_MESSAGE}/'
|
2024-01-19 08:19:12 -05:00
|
|
|
checksum_info: ChecksumFileParser = (ChecksumFileParser(tool_name, args.checksum_file)
|
|
|
|
if args.checksum_file
|
|
|
|
else ChecksumCalculator(args.artifact_file)) # type: ignore
|
2022-03-15 15:02:37 -04:00
|
|
|
for file_size, file_sha256, file_name in checksum_info:
|
2019-04-17 03:30:30 -04:00
|
|
|
# Guess which platform this file is for
|
2024-02-09 09:03:58 -05:00
|
|
|
try:
|
|
|
|
found_platform = Platforms.get_by_filename(file_name)
|
|
|
|
except ValueError:
|
2024-02-16 10:39:38 -05:00
|
|
|
info(f'Could not guess platform for file {file_name}')
|
2019-04-17 03:30:30 -04:00
|
|
|
found_platform = TODO_MESSAGE
|
2022-03-15 15:02:37 -04:00
|
|
|
url = urljoin(url_prefix, file_name)
|
2024-02-16 10:39:38 -05:00
|
|
|
info(f'Adding download for platform {found_platform}')
|
|
|
|
info(f' size: {file_size}')
|
|
|
|
info(f' SHA256: {file_sha256}')
|
|
|
|
info(f' URL: {url}')
|
2019-04-17 03:30:30 -04:00
|
|
|
version_obj.add_download(found_platform, url, file_size, file_sha256)
|
|
|
|
json_str = dump_tools_json(tools_info)
|
|
|
|
if not args.output:
|
2024-02-06 05:16:24 -05:00
|
|
|
args.output = os.path.join(g.idf_path, TOOLS_FILE_NEW) # type: ignore
|
2019-04-17 03:30:30 -04:00
|
|
|
with open(args.output, 'w') as f:
|
|
|
|
f.write(json_str)
|
|
|
|
f.write('\n')
|
2024-02-16 10:39:38 -05:00
|
|
|
info(f'Wrote output to {args.output}')
|
2019-04-17 03:30:30 -04:00
|
|
|
|
|
|
|
|
2021-03-04 03:53:47 -05:00
|
|
|
def action_rewrite(args): # type: ignore
|
2024-01-19 08:19:12 -05:00
|
|
|
"""
|
|
|
|
Write JSON dump of all tools from tools_info into file specified by args.output.
|
|
|
|
"""
|
2019-04-17 03:30:30 -04:00
|
|
|
tools_info = load_tools_info()
|
|
|
|
json_str = dump_tools_json(tools_info)
|
|
|
|
if not args.output:
|
2024-02-06 05:16:24 -05:00
|
|
|
args.output = os.path.join(g.idf_path, TOOLS_FILE_NEW)
|
2019-04-17 03:30:30 -04:00
|
|
|
with open(args.output, 'w') as f:
|
|
|
|
f.write(json_str)
|
|
|
|
f.write('\n')
|
2024-02-16 10:39:38 -05:00
|
|
|
info(f'Wrote output to {args.output}')
|
2019-04-17 03:30:30 -04:00
|
|
|
|
|
|
|
|
2024-01-19 08:19:12 -05:00
|
|
|
def action_uninstall(args: Any) -> None:
|
|
|
|
"""
|
|
|
|
Print or remove installed tools versions, that are not used by active ESP-IDF version anymore.
|
2021-12-13 10:45:11 -05:00
|
|
|
Additionally remove all older versions of previously downloaded archives.
|
|
|
|
"""
|
|
|
|
tools_info = load_tools_info()
|
2024-02-06 05:16:24 -05:00
|
|
|
tools_path = os.path.join(g.idf_tools_path, 'tools')
|
|
|
|
dist_path = os.path.join(g.idf_tools_path, 'dist')
|
2021-12-13 10:45:11 -05:00
|
|
|
installed_tools = os.listdir(tools_path) if os.path.isdir(tools_path) else []
|
2022-10-20 10:10:38 -04:00
|
|
|
|
|
|
|
unused_tools_versions = {}
|
|
|
|
for tool in installed_tools:
|
|
|
|
tool_versions = os.listdir(os.path.join(tools_path, tool)) if os.path.isdir(os.path.join(tools_path, tool)) else []
|
|
|
|
try:
|
|
|
|
unused_versions = ([x for x in tool_versions if x != tools_info[tool].get_recommended_version()])
|
|
|
|
except KeyError: # When tool that is not supported by tools_info (tools.json) anymore, remove the whole tool file
|
|
|
|
unused_versions = ['']
|
|
|
|
if unused_versions:
|
|
|
|
unused_tools_versions[tool] = unused_versions
|
|
|
|
|
2021-12-13 10:45:11 -05:00
|
|
|
# Keeping tools added by windows installer
|
|
|
|
KEEP_WIN_TOOLS = ['idf-git', 'idf-python']
|
|
|
|
for tool in KEEP_WIN_TOOLS:
|
2022-10-20 10:10:38 -04:00
|
|
|
if tool in unused_tools_versions:
|
|
|
|
unused_tools_versions.pop(tool)
|
2021-12-13 10:45:11 -05:00
|
|
|
|
|
|
|
# Print unused tools.
|
|
|
|
if args.dry_run:
|
2022-10-20 10:10:38 -04:00
|
|
|
if unused_tools_versions:
|
|
|
|
print('For removing old versions of {} use command \'{} {} {}\''.format(', '.join(unused_tools_versions), get_python_exe_and_subdir()[0],
|
2024-02-06 05:16:24 -05:00
|
|
|
os.path.join(g.idf_path, 'tools', 'idf_tools.py'), 'uninstall'))
|
2021-12-13 10:45:11 -05:00
|
|
|
return
|
|
|
|
|
|
|
|
# Remove installed tools that are not used by current ESP-IDF version.
|
2022-10-20 10:10:38 -04:00
|
|
|
for tool in unused_tools_versions:
|
|
|
|
for version in unused_tools_versions[tool]:
|
|
|
|
try:
|
|
|
|
if version:
|
|
|
|
path_to_remove = os.path.join(tools_path, tool, version)
|
|
|
|
else:
|
|
|
|
path_to_remove = os.path.join(tools_path, tool)
|
|
|
|
shutil.rmtree(path_to_remove)
|
2024-02-16 10:39:38 -05:00
|
|
|
info(f'{path_to_remove} was removed.')
|
2022-10-20 10:10:38 -04:00
|
|
|
except OSError as error:
|
|
|
|
warn(f'{error.filename} can not be removed because {error.strerror}.')
|
2021-12-13 10:45:11 -05:00
|
|
|
|
|
|
|
# Remove old archives versions and archives that are not used by the current ESP-IDF version.
|
|
|
|
if args.remove_archives:
|
2022-10-20 10:10:38 -04:00
|
|
|
tools_spec, tools_info_for_platform = get_tools_spec_and_platform_info(CURRENT_PLATFORM, ['all'], ['all'], quiet=True)
|
2021-12-13 10:45:11 -05:00
|
|
|
used_archives = []
|
|
|
|
|
|
|
|
# Detect used active archives
|
|
|
|
for tool_spec in tools_spec:
|
|
|
|
if '@' not in tool_spec:
|
|
|
|
tool_name = tool_spec
|
|
|
|
tool_version = None
|
|
|
|
else:
|
|
|
|
tool_name, tool_version = tool_spec.split('@', 1)
|
|
|
|
tool_obj = tools_info_for_platform[tool_name]
|
|
|
|
if tool_version is None:
|
|
|
|
tool_version = tool_obj.get_recommended_version()
|
|
|
|
# mypy-checks
|
|
|
|
if tool_version is not None:
|
|
|
|
archive_version = tool_obj.versions[tool_version].get_download_for_platform(CURRENT_PLATFORM)
|
|
|
|
if archive_version is not None:
|
|
|
|
archive_version_url = archive_version.url
|
|
|
|
|
|
|
|
archive = os.path.basename(archive_version_url)
|
|
|
|
used_archives.append(archive)
|
|
|
|
|
|
|
|
downloaded_archives = os.listdir(dist_path)
|
|
|
|
for archive in downloaded_archives:
|
|
|
|
if archive not in used_archives:
|
|
|
|
os.remove(os.path.join(dist_path, archive))
|
2024-02-16 10:39:38 -05:00
|
|
|
info(f'{os.path.join(dist_path, archive)} was removed.')
|
2021-12-13 10:45:11 -05:00
|
|
|
|
|
|
|
|
2021-03-04 03:53:47 -05:00
|
|
|
def action_validate(args): # type: ignore
|
2024-01-19 08:19:12 -05:00
|
|
|
"""
|
|
|
|
Validate whether everything is installed correctly.
|
|
|
|
"""
|
2019-04-17 03:30:30 -04:00
|
|
|
try:
|
|
|
|
import jsonschema
|
|
|
|
except ImportError:
|
|
|
|
fatal('You need to install jsonschema package to use validate command')
|
|
|
|
raise SystemExit(1)
|
|
|
|
|
2024-02-06 05:16:24 -05:00
|
|
|
with open(os.path.join(g.idf_path, TOOLS_FILE), 'r') as tools_file:
|
2019-04-17 03:30:30 -04:00
|
|
|
tools_json = json.load(tools_file)
|
|
|
|
|
2024-02-06 05:16:24 -05:00
|
|
|
with open(os.path.join(g.idf_path, TOOLS_SCHEMA_FILE), 'r') as schema_file:
|
2019-04-17 03:30:30 -04:00
|
|
|
schema_json = json.load(schema_file)
|
|
|
|
jsonschema.validate(tools_json, schema_json)
|
|
|
|
# on failure, this will raise an exception with a fairly verbose diagnostic message
|
|
|
|
|
|
|
|
|
2021-03-04 03:53:47 -05:00
|
|
|
def action_gen_doc(args): # type: ignore
|
2024-01-19 08:19:12 -05:00
|
|
|
"""
|
|
|
|
Prints basic documentation and information about every tool from tool_info.
|
|
|
|
"""
|
2020-01-17 11:38:50 -05:00
|
|
|
f = args.output
|
|
|
|
tools_info = load_tools_info()
|
|
|
|
|
2024-01-19 08:19:12 -05:00
|
|
|
def print_out(text: str) -> None:
|
2024-02-16 10:39:38 -05:00
|
|
|
f.write(f'{text}\n')
|
2020-01-17 11:38:50 -05:00
|
|
|
|
2021-01-25 21:49:01 -05:00
|
|
|
print_out('.. |zwsp| unicode:: U+200B')
|
|
|
|
print_out(' :trim:')
|
|
|
|
print_out('')
|
2020-01-17 11:38:50 -05:00
|
|
|
|
2021-01-25 21:49:01 -05:00
|
|
|
idf_gh_url = 'https://github.com/espressif/esp-idf'
|
2020-01-17 11:38:50 -05:00
|
|
|
for tool_name, tool_obj in tools_info.items():
|
|
|
|
info_url = tool_obj.options.info_url
|
2024-02-16 10:39:38 -05:00
|
|
|
if f'{idf_gh_url}/tree' in info_url:
|
|
|
|
info_url = re.sub(f'{idf_gh_url}/tree/\\w+/(.*)', r':idf:`\1`', info_url)
|
2020-01-17 11:38:50 -05:00
|
|
|
|
2024-02-16 10:39:38 -05:00
|
|
|
license_url = f'https://spdx.org/licenses/{tool_obj.options.license}'
|
2020-01-17 11:38:50 -05:00
|
|
|
|
|
|
|
print_out("""
|
|
|
|
.. _tool-{name}:
|
|
|
|
|
|
|
|
{name}
|
|
|
|
{underline}
|
|
|
|
|
|
|
|
{description}
|
|
|
|
|
|
|
|
.. include:: idf-tools-notes.inc
|
|
|
|
:start-after: tool-{name}-notes
|
|
|
|
:end-before: ---
|
|
|
|
|
|
|
|
License: `{license} <{license_url}>`_
|
|
|
|
|
|
|
|
More info: {info_url}
|
|
|
|
|
|
|
|
.. list-table::
|
|
|
|
:widths: 10 10 80
|
|
|
|
:header-rows: 1
|
|
|
|
|
|
|
|
* - Platform
|
|
|
|
- Required
|
|
|
|
- Download
|
|
|
|
""".rstrip().format(name=tool_name,
|
|
|
|
underline=args.heading_underline_char * len(tool_name),
|
|
|
|
description=tool_obj.description,
|
|
|
|
license=tool_obj.options.license,
|
|
|
|
license_url=license_url,
|
|
|
|
info_url=info_url))
|
|
|
|
|
|
|
|
for platform_name in sorted(tool_obj.get_supported_platforms()):
|
|
|
|
platform_tool = tool_obj.copy_for_platform(platform_name)
|
|
|
|
install_type = platform_tool.get_install_type()
|
|
|
|
if install_type == IDFTool.INSTALL_NEVER:
|
|
|
|
continue
|
|
|
|
elif install_type == IDFTool.INSTALL_ALWAYS:
|
2021-01-25 21:49:01 -05:00
|
|
|
install_type_str = 'required'
|
2020-01-17 11:38:50 -05:00
|
|
|
elif install_type == IDFTool.INSTALL_ON_REQUEST:
|
2021-01-25 21:49:01 -05:00
|
|
|
install_type_str = 'optional'
|
2020-01-17 11:38:50 -05:00
|
|
|
else:
|
|
|
|
raise NotImplementedError()
|
|
|
|
|
|
|
|
version = platform_tool.get_recommended_version()
|
|
|
|
version_obj = platform_tool.versions[version]
|
|
|
|
download_obj = version_obj.get_download_for_platform(platform_name)
|
|
|
|
|
|
|
|
# Note: keep the list entries indented to the same number of columns
|
|
|
|
# as the list header above.
|
|
|
|
print_out("""
|
|
|
|
* - {}
|
|
|
|
- {}
|
|
|
|
- {}
|
|
|
|
|
|
|
|
.. rst-class:: tool-sha256
|
|
|
|
|
|
|
|
SHA256: {}
|
|
|
|
""".strip('\n').format(platform_name, install_type_str, download_obj.url, download_obj.sha256))
|
|
|
|
|
|
|
|
print_out('')
|
|
|
|
print_out('')
|
|
|
|
|
|
|
|
|
2024-01-19 08:19:12 -05:00
|
|
|
def action_check_tool_supported(args: Any) -> None:
|
2022-08-31 16:59:15 -04:00
|
|
|
"""
|
2024-01-19 08:19:12 -05:00
|
|
|
Print "True"/"False" to stdout as a result that tool is supported in IDF.
|
|
|
|
Print error message to stderr otherwise and set exit code to 1.
|
2022-08-31 16:59:15 -04:00
|
|
|
"""
|
|
|
|
try:
|
|
|
|
tools_info = load_tools_info()
|
|
|
|
for _, v in tools_info.items():
|
|
|
|
if v.name == args.tool_name:
|
|
|
|
print(v.check_version(args.exec_path))
|
|
|
|
break
|
2024-01-19 08:19:12 -05:00
|
|
|
except (RuntimeError, ToolNotFoundError, ToolExecError) as err:
|
2022-08-31 16:59:15 -04:00
|
|
|
fatal(f'Failed to check tool support: (name: {args.tool_name}, exec: {args.exec_path})')
|
|
|
|
fatal(f'{err}')
|
|
|
|
raise SystemExit(1)
|
|
|
|
|
|
|
|
|
2024-01-19 08:19:12 -05:00
|
|
|
def action_get_tool_supported_versions(args: Any) -> None:
|
2022-08-31 16:59:15 -04:00
|
|
|
"""
|
2024-01-19 08:19:12 -05:00
|
|
|
Print supported versions of a tool to stdout.
|
|
|
|
Print error message to stderr otherwise and set exit code to 1.
|
2022-08-31 16:59:15 -04:00
|
|
|
"""
|
|
|
|
try:
|
|
|
|
tools_info = load_tools_info()
|
|
|
|
for _, v in tools_info.items():
|
|
|
|
if v.name == args.tool_name:
|
|
|
|
print(list(v.versions.keys()))
|
|
|
|
break
|
|
|
|
except RuntimeError as err:
|
|
|
|
fatal(f'Failed to get tool supported versions. (tool: {args.tool_name})')
|
|
|
|
fatal(f'{err}')
|
|
|
|
raise SystemExit(1)
|
|
|
|
|
|
|
|
|
2024-01-19 08:19:12 -05:00
|
|
|
def main(argv: List[str]) -> None:
|
2019-04-17 03:30:30 -04:00
|
|
|
parser = argparse.ArgumentParser()
|
|
|
|
|
|
|
|
parser.add_argument('--quiet', help='Don\'t output diagnostic messages to stdout/stderr', action='store_true')
|
2019-06-05 03:16:19 -04:00
|
|
|
parser.add_argument('--non-interactive', help='Don\'t output interactive messages and questions', action='store_true')
|
2019-04-17 03:30:30 -04:00
|
|
|
parser.add_argument('--tools-json', help='Path to the tools.json file to use')
|
|
|
|
parser.add_argument('--idf-path', help='ESP-IDF path to use')
|
|
|
|
|
|
|
|
subparsers = parser.add_subparsers(dest='action')
|
2023-02-05 10:29:03 -05:00
|
|
|
list_parser = subparsers.add_parser('list', help='List tools and versions available')
|
|
|
|
list_parser.add_argument('--outdated', help='Print only outdated installed tools', action='store_true')
|
2019-04-17 03:30:30 -04:00
|
|
|
subparsers.add_parser('check', help='Print summary of tools installed or found in PATH')
|
|
|
|
export = subparsers.add_parser('export', help='Output command for setting tool paths, suitable for shell')
|
|
|
|
export.add_argument('--format', choices=[EXPORT_SHELL, EXPORT_KEY_VALUE], default=EXPORT_SHELL,
|
2024-02-16 10:39:38 -05:00
|
|
|
help=('Format of the output: shell (suitable for printing into shell), '
|
|
|
|
'or key-value (suitable for parsing by other tools'))
|
|
|
|
export.add_argument('--prefer-system', help=('Normally, if the tool is already present in PATH, '
|
|
|
|
'but has an unsupported version, a version from the tools directory '
|
|
|
|
'will be used instead. If this flag is given, the version in PATH '
|
|
|
|
'will be used.'), action='store_true')
|
2022-08-09 12:13:29 -04:00
|
|
|
export.add_argument('--deactivate', help='Output command for deactivate different ESP-IDF version, previously set with export', action='store_true')
|
|
|
|
export.add_argument('--unset', help=argparse.SUPPRESS, action='store_true')
|
|
|
|
export.add_argument('--add_paths_extras', help='Add idf-related path extras for deactivate option')
|
2019-04-17 03:30:30 -04:00
|
|
|
install = subparsers.add_parser('install', help='Download and install tools into the tools directory')
|
2019-11-24 06:11:33 -05:00
|
|
|
install.add_argument('tools', metavar='TOOL', nargs='*', default=['required'],
|
2024-02-16 10:39:38 -05:00
|
|
|
help=('Tools to install.\n'
|
|
|
|
'To install a specific version use <tool_name>@<version> syntax. '
|
|
|
|
'To install tools by pattern use wildcards in <tool_name_pattern>. '
|
|
|
|
'Use empty or \'required\' to install required tools, not optional ones. '
|
|
|
|
'Use \'all\' to install all tools, including the optional ones.'))
|
|
|
|
install.add_argument('--targets', default='all', help=('A comma separated list of desired chip targets for installing. '
|
|
|
|
'It defaults to installing all supported targets.'))
|
2019-04-17 03:30:30 -04:00
|
|
|
|
2019-07-18 11:20:40 -04:00
|
|
|
download = subparsers.add_parser('download', help='Download the tools into the dist directory')
|
2021-06-21 04:22:23 -04:00
|
|
|
download.add_argument('--platform', default=CURRENT_PLATFORM, help='Platform to download the tools for')
|
2019-11-24 06:11:33 -05:00
|
|
|
download.add_argument('tools', metavar='TOOL', nargs='*', default=['required'],
|
2024-02-16 10:39:38 -05:00
|
|
|
help=('Tools to download. '
|
|
|
|
'To download a specific version use <tool_name>@<version> syntax. '
|
|
|
|
'To download tools by pattern use wildcards in <tool_name_pattern> . '
|
|
|
|
'Use empty or \'required\' to download required tools, not optional ones. '
|
|
|
|
'Use \'all\' to download all tools, including the optional ones.'))
|
|
|
|
download.add_argument('--targets', default='all', help=('A comma separated list of desired chip targets for installing. '
|
|
|
|
' It defaults to installing all supported targets.'))
|
2019-07-18 11:20:40 -04:00
|
|
|
|
2021-12-13 10:45:11 -05:00
|
|
|
uninstall = subparsers.add_parser('uninstall', help='Remove installed tools, that are not used by current version of ESP-IDF.')
|
|
|
|
uninstall.add_argument('--dry-run', help='Print unused tools.', action='store_true')
|
|
|
|
uninstall.add_argument('--remove-archives', help='Remove old archive versions and archives from unused tools.', action='store_true')
|
|
|
|
|
2022-07-01 08:44:12 -04:00
|
|
|
no_constraints_default = os.environ.get('IDF_PYTHON_CHECK_CONSTRAINTS', '').lower() in ['0', 'n', 'no']
|
|
|
|
|
2019-07-18 11:20:40 -04:00
|
|
|
if IDF_MAINTAINER:
|
|
|
|
for subparser in [download, install]:
|
|
|
|
subparser.add_argument('--mirror-prefix-map', nargs='*',
|
2024-02-16 10:39:38 -05:00
|
|
|
help=('Pattern to rewrite download URLs, with source and replacement separated by comma. '
|
|
|
|
'E.g. http://foo.com,http://test.foo.com'))
|
2019-07-18 11:20:40 -04:00
|
|
|
|
2019-04-17 03:30:30 -04:00
|
|
|
install_python_env = subparsers.add_parser('install-python-env',
|
2024-02-16 10:39:38 -05:00
|
|
|
help=('Create Python virtual environment and install the '
|
|
|
|
'required Python packages'))
|
2019-04-17 03:30:30 -04:00
|
|
|
install_python_env.add_argument('--reinstall', help='Discard the previously installed environment',
|
|
|
|
action='store_true')
|
2024-02-16 10:39:38 -05:00
|
|
|
install_python_env.add_argument('--extra-wheels-dir', help=('Additional directories with wheels '
|
|
|
|
'to use during installation'))
|
2022-11-10 23:00:56 -05:00
|
|
|
install_python_env.add_argument('--extra-wheels-url', help='Additional URL with wheels', default=IDF_PIP_WHEELS_URL)
|
2020-12-16 11:40:13 -05:00
|
|
|
install_python_env.add_argument('--no-index', help='Work offline without retrieving wheels index')
|
2024-02-16 10:39:38 -05:00
|
|
|
install_python_env.add_argument('--features', default='core', help=('A comma separated list of desired features for installing. '
|
2024-01-16 00:56:49 -05:00
|
|
|
'It defaults to installing just the core functionality.'))
|
2022-07-01 08:44:12 -04:00
|
|
|
install_python_env.add_argument('--no-constraints', action='store_true', default=no_constraints_default,
|
2024-02-16 10:39:38 -05:00
|
|
|
help=('Disable constraint settings. Use with care and only when you want to manage '
|
|
|
|
'package versions by yourself. It can be set with the IDF_PYTHON_CHECK_CONSTRAINTS '
|
|
|
|
'environment variable.'))
|
2019-04-17 03:30:30 -04:00
|
|
|
|
|
|
|
if IDF_MAINTAINER:
|
|
|
|
add_version = subparsers.add_parser('add-version', help='Add or update download info for a version')
|
|
|
|
add_version.add_argument('--output', help='Save new tools.json into this file')
|
|
|
|
add_version.add_argument('--tool', help='Tool name to set add a version for', required=True)
|
|
|
|
add_version.add_argument('--version', help='Version identifier', required=True)
|
|
|
|
add_version.add_argument('--url-prefix', help='String to prepend to file names to obtain download URLs')
|
2022-03-15 15:02:37 -04:00
|
|
|
add_version.add_argument('--override', action='store_true', help='Override tool versions with new data')
|
|
|
|
add_version_files_group = add_version.add_mutually_exclusive_group(required=True)
|
|
|
|
add_version_files_group.add_argument('--checksum-file', help='URL or path to local file with checksum/size for artifacts')
|
|
|
|
add_version_files_group.add_argument('--artifact-file', help='File names of the download artifacts', nargs='*')
|
2019-04-17 03:30:30 -04:00
|
|
|
|
|
|
|
rewrite = subparsers.add_parser('rewrite', help='Load tools.json, validate, and save the result back into JSON')
|
|
|
|
rewrite.add_argument('--output', help='Save new tools.json into this file')
|
|
|
|
|
|
|
|
subparsers.add_parser('validate', help='Validate tools.json against schema file')
|
|
|
|
|
2020-01-17 11:38:50 -05:00
|
|
|
gen_doc = subparsers.add_parser('gen-doc', help='Write the list of tools as a documentation page')
|
|
|
|
gen_doc.add_argument('--output', type=argparse.FileType('w'), default=sys.stdout,
|
|
|
|
help='Output file name')
|
|
|
|
gen_doc.add_argument('--heading-underline-char', help='Character to use when generating RST sections', default='~')
|
|
|
|
|
2021-09-16 10:48:03 -04:00
|
|
|
check_python_dependencies = subparsers.add_parser('check-python-dependencies',
|
|
|
|
help='Check that all required Python packages are installed.')
|
2022-07-01 08:44:12 -04:00
|
|
|
check_python_dependencies.add_argument('--no-constraints', action='store_true', default=no_constraints_default,
|
2021-09-16 10:48:03 -04:00
|
|
|
help='Disable constraint settings. Use with care and only when you want '
|
2022-07-01 08:44:12 -04:00
|
|
|
'to manage package versions by yourself. It can be set with the IDF_PYTHON_CHECK_CONSTRAINTS '
|
|
|
|
'environment variable.')
|
2021-09-16 10:48:03 -04:00
|
|
|
|
2022-08-31 16:59:15 -04:00
|
|
|
if os.environ.get('IDF_TOOLS_VERSION_HELPER'):
|
|
|
|
check_tool_supported = subparsers.add_parser('check-tool-supported',
|
|
|
|
help='Check that selected tool is compatible with IDF. Writes "True"/"False" to stdout in success.')
|
|
|
|
check_tool_supported.add_argument('--tool-name', required=True, help='Tool name (from tools.json)')
|
|
|
|
check_tool_supported.add_argument('--exec-path', required=True, help='Full path to executable under the test')
|
|
|
|
|
|
|
|
get_tool_supported_versions = subparsers.add_parser('get-tool-supported-versions', help='Prints a list of tool\'s supported versions')
|
|
|
|
get_tool_supported_versions.add_argument('--tool-name', required=True, help='Tool name (from tools.json)')
|
|
|
|
|
2019-04-17 03:30:30 -04:00
|
|
|
args = parser.parse_args(argv)
|
|
|
|
|
|
|
|
if args.action is None:
|
|
|
|
parser.print_help()
|
|
|
|
parser.exit(1)
|
|
|
|
|
2024-02-16 10:39:38 -05:00
|
|
|
g.quiet = args.quiet
|
2019-04-17 03:30:30 -04:00
|
|
|
|
2024-02-16 10:39:38 -05:00
|
|
|
g.non_interactive = args.non_interactive
|
2019-06-05 03:16:19 -04:00
|
|
|
|
2022-08-09 12:13:29 -04:00
|
|
|
if 'unset' in args and args.unset:
|
|
|
|
args.deactivate = True
|
|
|
|
|
2024-02-27 05:49:48 -05:00
|
|
|
g.idf_path = args.idf_path or os.environ.get('IDF_PATH') or os.path.realpath(os.path.join(os.path.dirname(__file__), '..'))
|
2024-02-06 05:16:24 -05:00
|
|
|
os.environ['IDF_PATH'] = g.idf_path
|
2019-04-17 03:30:30 -04:00
|
|
|
|
2024-02-06 05:16:24 -05:00
|
|
|
g.idf_tools_path = os.environ.get('IDF_TOOLS_PATH') or os.path.expanduser(IDF_TOOLS_PATH_DEFAULT)
|
2019-04-17 03:30:30 -04:00
|
|
|
|
2019-10-30 11:19:17 -04:00
|
|
|
# On macOS, unset __PYVENV_LAUNCHER__ variable if it is set.
|
|
|
|
# Otherwise sys.executable keeps pointing to the system Python, even when a python binary from a virtualenv is invoked.
|
|
|
|
# See https://bugs.python.org/issue22490#msg283859.
|
2020-01-21 04:48:24 -05:00
|
|
|
os.environ.pop('__PYVENV_LAUNCHER__', None)
|
2019-10-30 11:19:17 -04:00
|
|
|
|
2019-04-17 03:30:30 -04:00
|
|
|
if sys.version_info.major == 2:
|
|
|
|
try:
|
2024-02-06 05:16:24 -05:00
|
|
|
g.idf_tools_path.decode('ascii') # type: ignore
|
2019-04-17 03:30:30 -04:00
|
|
|
except UnicodeDecodeError:
|
2024-02-16 10:39:38 -05:00
|
|
|
fatal(f'IDF_TOOLS_PATH contains non-ASCII characters: {g.idf_tools_path}'
|
|
|
|
'\nThis is not supported yet with Python 2. '
|
2019-04-17 03:30:30 -04:00
|
|
|
'Please set IDF_TOOLS_PATH to a directory with an ASCII name, or switch to Python 3.')
|
|
|
|
raise SystemExit(1)
|
|
|
|
|
2022-05-16 16:11:01 -04:00
|
|
|
if CURRENT_PLATFORM is None:
|
2024-02-16 10:39:38 -05:00
|
|
|
fatal(f'Platform {PYTHON_PLATFORM} appears to be unsupported')
|
2019-04-17 03:30:30 -04:00
|
|
|
raise SystemExit(1)
|
|
|
|
|
|
|
|
if args.tools_json:
|
2024-02-06 05:16:24 -05:00
|
|
|
g.tools_json = args.tools_json
|
2019-04-17 03:30:30 -04:00
|
|
|
else:
|
2024-02-06 05:16:24 -05:00
|
|
|
g.tools_json = os.path.join(g.idf_path, TOOLS_FILE)
|
2019-04-17 03:30:30 -04:00
|
|
|
|
2024-02-16 10:39:38 -05:00
|
|
|
action_func_name = f'action_{args.action.replace("-", "_")}'
|
2019-04-17 03:30:30 -04:00
|
|
|
action_func = globals()[action_func_name]
|
|
|
|
|
|
|
|
action_func(args)
|
|
|
|
|
|
|
|
|
|
|
|
if __name__ == '__main__':
|
2021-11-09 07:10:56 -05:00
|
|
|
if 'MSYSTEM' in os.environ:
|
|
|
|
fatal('MSys/Mingw is not supported. Please follow the getting started guide of the documentation to set up '
|
|
|
|
'a supported environment')
|
|
|
|
raise SystemExit(1)
|
2019-04-17 03:30:30 -04:00
|
|
|
main(sys.argv[1:])
|