2024-01-24 04:49:40 -05:00
|
|
|
# SPDX-FileCopyrightText: 2020-2024 Espressif Systems (Shanghai) CO LTD
|
2021-10-27 02:20:49 -04:00
|
|
|
# SPDX-License-Identifier: Apache-2.0
|
2023-10-04 10:41:03 -04:00
|
|
|
# internal use only for CI
|
|
|
|
# some CI related util functions
|
2020-10-21 07:30:49 -04:00
|
|
|
import logging
|
|
|
|
import os
|
2024-01-24 04:49:40 -05:00
|
|
|
import re
|
2020-10-21 07:30:49 -04:00
|
|
|
import subprocess
|
2020-11-12 01:11:54 -05:00
|
|
|
import sys
|
2023-12-18 09:42:56 -05:00
|
|
|
import typing as t
|
|
|
|
from functools import cached_property
|
2023-12-18 09:29:58 -05:00
|
|
|
from pathlib import Path
|
2022-04-29 00:19:32 -04:00
|
|
|
|
2022-11-29 03:14:26 -05:00
|
|
|
IDF_PATH = os.path.abspath(os.getenv('IDF_PATH', os.path.join(os.path.dirname(__file__), '..', '..')))
|
2020-10-21 07:30:49 -04:00
|
|
|
|
|
|
|
|
2023-12-18 09:42:56 -05:00
|
|
|
def get_submodule_dirs(full_path: bool = False) -> t.List[str]:
|
2020-10-21 07:30:49 -04:00
|
|
|
"""
|
|
|
|
To avoid issue could be introduced by multi-os or additional dependency,
|
|
|
|
we use python and git to get this output
|
|
|
|
:return: List of submodule dirs
|
|
|
|
"""
|
|
|
|
dirs = []
|
|
|
|
try:
|
2022-01-28 02:21:12 -05:00
|
|
|
lines = (
|
|
|
|
subprocess.check_output(
|
|
|
|
[
|
|
|
|
'git',
|
|
|
|
'config',
|
|
|
|
'--file',
|
|
|
|
os.path.realpath(os.path.join(IDF_PATH, '.gitmodules')),
|
|
|
|
'--get-regexp',
|
|
|
|
'path',
|
|
|
|
]
|
|
|
|
)
|
|
|
|
.decode('utf8')
|
|
|
|
.strip()
|
|
|
|
.split('\n')
|
|
|
|
)
|
2020-10-21 07:30:49 -04:00
|
|
|
for line in lines:
|
|
|
|
_, path = line.split(' ')
|
2020-10-30 05:28:24 -04:00
|
|
|
if full_path:
|
|
|
|
dirs.append(os.path.join(IDF_PATH, path))
|
|
|
|
else:
|
|
|
|
dirs.append(path)
|
2021-02-01 21:53:40 -05:00
|
|
|
except Exception as e: # pylint: disable=W0703
|
2020-10-21 07:30:49 -04:00
|
|
|
logging.warning(str(e))
|
|
|
|
|
|
|
|
return dirs
|
2020-11-12 01:11:54 -05:00
|
|
|
|
|
|
|
|
2022-11-29 04:36:56 -05:00
|
|
|
def _check_git_filemode(full_path: str) -> bool:
|
2020-11-12 01:11:54 -05:00
|
|
|
try:
|
2022-11-29 03:14:26 -05:00
|
|
|
stdout = subprocess.check_output(['git', 'ls-files', '--stage', full_path]).strip().decode('utf-8')
|
2020-11-12 01:11:54 -05:00
|
|
|
except subprocess.CalledProcessError:
|
|
|
|
return True
|
|
|
|
|
|
|
|
mode = stdout.split(' ', 1)[0] # e.g. 100644 for a rw-r--r--
|
|
|
|
if any([int(i, 8) & 1 for i in mode[-3:]]):
|
2021-04-26 03:46:44 -04:00
|
|
|
return True
|
|
|
|
return False
|
2020-11-12 01:11:54 -05:00
|
|
|
|
|
|
|
|
2021-11-19 03:11:47 -05:00
|
|
|
def is_executable(full_path: str) -> bool:
|
2020-11-12 01:11:54 -05:00
|
|
|
"""
|
|
|
|
os.X_OK will always return true on windows. Use git to check file mode.
|
|
|
|
:param full_path: file full path
|
|
|
|
:return: True is it's an executable file
|
|
|
|
"""
|
|
|
|
if sys.platform == 'win32':
|
|
|
|
return _check_git_filemode(full_path)
|
2021-02-01 21:53:40 -05:00
|
|
|
return os.access(full_path, os.X_OK)
|
|
|
|
|
|
|
|
|
2023-12-18 09:42:56 -05:00
|
|
|
def get_git_files(path: str = IDF_PATH, full_path: bool = False) -> t.List[str]:
|
2021-02-01 21:53:40 -05:00
|
|
|
"""
|
|
|
|
Get the result of git ls-files
|
|
|
|
:param path: path to run git ls-files
|
|
|
|
:param full_path: return full path if set to True
|
|
|
|
:return: list of file paths
|
|
|
|
"""
|
|
|
|
try:
|
2021-10-27 02:20:49 -04:00
|
|
|
# this is a workaround when using under worktree
|
|
|
|
# if you're using worktree, when running git commit a new environment variable GIT_DIR would be declared,
|
|
|
|
# the value should be <origin_repo_path>/.git/worktrees/<worktree name>
|
2022-07-17 22:17:54 -04:00
|
|
|
# This would affect the return value of `git ls-files`, unset this would use the `cwd`value or its parent
|
2021-10-27 02:20:49 -04:00
|
|
|
# folder if no `.git` folder found in `cwd`.
|
|
|
|
workaround_env = os.environ.copy()
|
|
|
|
workaround_env.pop('GIT_DIR', None)
|
2022-01-28 02:21:12 -05:00
|
|
|
files = (
|
|
|
|
subprocess.check_output(['git', 'ls-files'], cwd=path, env=workaround_env)
|
|
|
|
.decode('utf8')
|
|
|
|
.strip()
|
|
|
|
.split('\n')
|
|
|
|
)
|
2021-02-01 21:53:40 -05:00
|
|
|
except Exception as e: # pylint: disable=W0703
|
|
|
|
logging.warning(str(e))
|
|
|
|
files = []
|
|
|
|
return [os.path.join(path, f) for f in files] if full_path else files
|
|
|
|
|
|
|
|
|
2023-12-18 09:42:56 -05:00
|
|
|
def to_list(s: t.Any) -> t.List[t.Any]:
|
|
|
|
if not s:
|
|
|
|
return []
|
|
|
|
|
2022-05-07 00:18:56 -04:00
|
|
|
if isinstance(s, (set, tuple)):
|
2022-04-29 00:19:32 -04:00
|
|
|
return list(s)
|
2022-05-07 00:18:56 -04:00
|
|
|
|
|
|
|
if isinstance(s, list):
|
2022-04-29 00:19:32 -04:00
|
|
|
return s
|
2022-05-07 00:18:56 -04:00
|
|
|
|
|
|
|
return [s]
|
2023-12-18 09:42:56 -05:00
|
|
|
|
|
|
|
|
|
|
|
class GitlabYmlConfig:
|
|
|
|
def __init__(self, root_yml_filepath: str = os.path.join(IDF_PATH, '.gitlab-ci.yml')) -> None:
|
|
|
|
self._config: t.Dict[str, t.Any] = {}
|
|
|
|
self._defaults: t.Dict[str, t.Any] = {}
|
|
|
|
|
|
|
|
self._load(root_yml_filepath)
|
|
|
|
|
|
|
|
def _load(self, root_yml_filepath: str) -> None:
|
|
|
|
# avoid unused import in other pre-commit hooks
|
|
|
|
import yaml
|
|
|
|
|
|
|
|
all_config = dict()
|
|
|
|
root_yml = yaml.load(open(root_yml_filepath), Loader=yaml.FullLoader)
|
2024-01-25 06:45:13 -05:00
|
|
|
|
|
|
|
# expanding "include"
|
|
|
|
for item in root_yml.pop('include', []) or []:
|
2023-12-18 09:42:56 -05:00
|
|
|
all_config.update(yaml.load(open(os.path.join(IDF_PATH, item)), Loader=yaml.FullLoader))
|
|
|
|
|
|
|
|
if 'default' in all_config:
|
|
|
|
self._defaults = all_config.pop('default')
|
|
|
|
|
|
|
|
self._config = all_config
|
|
|
|
|
2024-01-25 06:45:13 -05:00
|
|
|
# anchor is the string that will be reused in templates
|
|
|
|
self._anchor_keys: t.Set[str] = set()
|
|
|
|
# template is a dict that will be extended
|
|
|
|
self._template_keys: t.Set[str] = set()
|
|
|
|
self._used_template_keys: t.Set[str] = set() # tracing the used templates
|
|
|
|
# job is a dict that will be executed
|
|
|
|
self._job_keys: t.Set[str] = set()
|
|
|
|
|
|
|
|
self.expand_extends()
|
|
|
|
|
2023-12-18 09:42:56 -05:00
|
|
|
@property
|
|
|
|
def default(self) -> t.Dict[str, t.Any]:
|
|
|
|
return self._defaults
|
|
|
|
|
|
|
|
@property
|
|
|
|
def config(self) -> t.Dict[str, t.Any]:
|
|
|
|
return self._config
|
|
|
|
|
|
|
|
@cached_property
|
|
|
|
def global_keys(self) -> t.List[str]:
|
|
|
|
return ['default', 'include', 'workflow', 'variables', 'stages']
|
|
|
|
|
|
|
|
@cached_property
|
|
|
|
def anchors(self) -> t.Dict[str, t.Any]:
|
2024-01-25 06:45:13 -05:00
|
|
|
return {k: v for k, v in self.config.items() if k in self._anchor_keys}
|
2023-12-18 09:42:56 -05:00
|
|
|
|
|
|
|
@cached_property
|
|
|
|
def jobs(self) -> t.Dict[str, t.Any]:
|
2024-01-25 06:45:13 -05:00
|
|
|
return {k: v for k, v in self.config.items() if k in self._job_keys}
|
2023-12-18 09:42:56 -05:00
|
|
|
|
|
|
|
@cached_property
|
2024-01-25 06:45:13 -05:00
|
|
|
def templates(self) -> t.Dict[str, t.Any]:
|
|
|
|
return {k: v for k, v in self.config.items() if k in self._template_keys}
|
2023-12-18 09:42:56 -05:00
|
|
|
|
|
|
|
@cached_property
|
2024-01-25 06:45:13 -05:00
|
|
|
def used_templates(self) -> t.Set[str]:
|
|
|
|
return self._used_template_keys
|
|
|
|
|
|
|
|
def expand_extends(self) -> None:
|
|
|
|
"""
|
|
|
|
expand the `extends` key in-place.
|
|
|
|
"""
|
|
|
|
for k, v in self.config.items():
|
|
|
|
if k in self.global_keys:
|
2023-12-18 09:42:56 -05:00
|
|
|
continue
|
|
|
|
|
2024-01-25 06:45:13 -05:00
|
|
|
if isinstance(v, (str, list)):
|
|
|
|
self._anchor_keys.add(k)
|
|
|
|
elif k.startswith('.if-'):
|
|
|
|
self._anchor_keys.add(k)
|
|
|
|
elif k.startswith('.'):
|
|
|
|
self._template_keys.add(k)
|
|
|
|
elif isinstance(v, dict):
|
|
|
|
self._job_keys.add(k)
|
|
|
|
else:
|
|
|
|
raise ValueError(f'Unknown type for key {k} with value {v}')
|
|
|
|
|
|
|
|
# no need to expand anchor
|
|
|
|
|
|
|
|
# expand template first
|
|
|
|
for k in self._template_keys:
|
|
|
|
self._expand_extends(k)
|
|
|
|
|
|
|
|
# expand job
|
|
|
|
for k in self._job_keys:
|
|
|
|
self._expand_extends(k)
|
|
|
|
|
2024-02-16 07:43:53 -05:00
|
|
|
def _merge_dict(self, d1: t.Dict[str, t.Any], d2: t.Dict[str, t.Any]) -> t.Any:
|
|
|
|
for k, v in d2.items():
|
|
|
|
if k in d1:
|
|
|
|
if isinstance(v, dict) and isinstance(d1[k], dict):
|
|
|
|
d1[k] = self._merge_dict(d1[k], v)
|
|
|
|
else:
|
|
|
|
d1[k] = v
|
|
|
|
else:
|
|
|
|
d1[k] = v
|
|
|
|
|
|
|
|
return d1
|
|
|
|
|
2024-01-25 06:45:13 -05:00
|
|
|
def _expand_extends(self, name: str) -> t.Dict[str, t.Any]:
|
|
|
|
extends = to_list(self.config[name].pop('extends', None))
|
|
|
|
if not extends:
|
|
|
|
return self.config[name] # type: ignore
|
|
|
|
|
2024-02-16 07:43:53 -05:00
|
|
|
original_d = self.config[name].copy()
|
2024-01-25 06:45:13 -05:00
|
|
|
d = {}
|
|
|
|
while extends:
|
2024-02-16 07:43:53 -05:00
|
|
|
self._used_template_keys.update(extends) # for tracking
|
2024-01-25 06:45:13 -05:00
|
|
|
|
|
|
|
for i in extends:
|
|
|
|
d.update(self._expand_extends(i))
|
2023-12-18 09:42:56 -05:00
|
|
|
|
2024-01-25 06:45:13 -05:00
|
|
|
extends = to_list(self.config[name].pop('extends', None))
|
2023-12-18 09:42:56 -05:00
|
|
|
|
2024-02-16 07:43:53 -05:00
|
|
|
self.config[name] = self._merge_dict(d, original_d)
|
2024-01-25 06:45:13 -05:00
|
|
|
return self.config[name] # type: ignore
|
2023-12-18 09:29:58 -05:00
|
|
|
|
|
|
|
|
|
|
|
def get_all_manifest_files() -> t.List[str]:
|
|
|
|
paths: t.List[str] = []
|
|
|
|
|
|
|
|
for p in Path(IDF_PATH).glob('**/.build-test-rules.yml'):
|
|
|
|
if 'managed_components' in p.parts:
|
|
|
|
continue
|
|
|
|
|
|
|
|
paths.append(str(p))
|
|
|
|
|
|
|
|
return paths
|
2024-01-24 04:49:40 -05:00
|
|
|
|
|
|
|
|
|
|
|
def sanitize_job_name(name: str) -> str:
|
|
|
|
"""
|
|
|
|
Sanitize the job name from CI_JOB_NAME
|
|
|
|
|
|
|
|
- for job with `parallel: int` set, the `CI_JOB_NAME` would be `job_name index/total`, like `foo 1/3`
|
|
|
|
- for job with `parallel: matrix` set, the `CI_JOB_NAME` would be `job_name: [var1, var2]`, like `foo: [a, b]`
|
|
|
|
|
|
|
|
We consider
|
|
|
|
- the jobs generated by `parallel: int` as the same job, i.e., we remove the index/total part.
|
|
|
|
- the jobs generated by `parallel: matrix` as different jobs, so we keep the matrix part.
|
|
|
|
|
|
|
|
:param name: job name
|
|
|
|
:return: sanitized job name
|
|
|
|
"""
|
|
|
|
return re.sub(r' \d+/\d+', '', name)
|
2024-01-23 04:32:14 -05:00
|
|
|
|
|
|
|
|
|
|
|
def idf_relpath(p: str) -> str:
|
|
|
|
"""
|
|
|
|
Turn all paths under IDF_PATH to relative paths
|
|
|
|
:param p: path
|
|
|
|
:return: relpath to IDF_PATH, or absolute path if not under IDF_PATH
|
|
|
|
"""
|
|
|
|
abs_path = os.path.abspath(p)
|
|
|
|
if abs_path.startswith(IDF_PATH):
|
|
|
|
return os.path.relpath(abs_path, IDF_PATH)
|
|
|
|
else:
|
|
|
|
return abs_path
|